1#![allow(clippy::format_collect)]
2
3mod color_extractor;
4mod context_server_store;
5mod debugger;
6mod ext_agent_tests;
7mod extension_agent_tests;
8mod git_store;
9mod image_store;
10mod lsp_command;
11mod lsp_store;
12mod manifest_tree;
13mod project_search;
14mod search;
15mod search_history;
16mod signature_help;
17mod task_inventory;
18mod trusted_worktrees;
19mod yarn;
20
21use anyhow::Result;
22use async_trait::async_trait;
23use buffer_diff::{
24 BufferDiffEvent, DiffChanged, DiffHunkSecondaryStatus, DiffHunkStatus, DiffHunkStatusKind,
25 assert_hunks,
26};
27use collections::{BTreeSet, HashMap, HashSet};
28use encoding_rs;
29use fs::FakeFs;
30use futures::{StreamExt, future};
31use git::{
32 GitHostingProviderRegistry,
33 repository::{RepoPath, repo_path},
34 status::{DiffStat, FileStatus, StatusCode, TrackedStatus},
35};
36use git2::RepositoryInitOptions;
37use gpui::{
38 App, AppContext, BackgroundExecutor, BorrowAppContext, Entity, FutureExt, SharedString, Task,
39 UpdateGlobal,
40};
41use itertools::Itertools;
42use language::{
43 Buffer, BufferEvent, Diagnostic, DiagnosticEntry, DiagnosticEntryRef, DiagnosticSet,
44 DiagnosticSourceKind, DiskState, FakeLspAdapter, Language, LanguageConfig, LanguageMatcher,
45 LanguageName, LineEnding, ManifestName, ManifestProvider, ManifestQuery, OffsetRangeExt, Point,
46 ToPoint, Toolchain, ToolchainList, ToolchainLister, ToolchainMetadata,
47 language_settings::{LanguageSettingsContent, language_settings},
48 markdown_lang, rust_lang, tree_sitter_typescript,
49};
50use lsp::{
51 CodeActionKind, DEFAULT_LSP_REQUEST_TIMEOUT, DiagnosticSeverity, DocumentChanges,
52 FileOperationFilter, LanguageServerId, LanguageServerName, NumberOrString, TextDocumentEdit,
53 Uri, WillRenameFiles, notification::DidRenameFiles,
54};
55use parking_lot::Mutex;
56use paths::{config_dir, global_gitignore_path, tasks_file};
57use postage::stream::Stream as _;
58use pretty_assertions::{assert_eq, assert_matches};
59use project::{
60 Event, TaskContexts,
61 git_store::{GitStoreEvent, Repository, RepositoryEvent, StatusEntry, pending_op},
62 search::{SearchQuery, SearchResult},
63 task_store::{TaskSettingsLocation, TaskStore},
64 *,
65};
66use rand::{Rng as _, rngs::StdRng};
67use serde_json::json;
68use settings::SettingsStore;
69#[cfg(not(windows))]
70use std::os;
71use std::{
72 cell::RefCell,
73 env, mem,
74 num::NonZeroU32,
75 ops::Range,
76 path::{Path, PathBuf},
77 rc::Rc,
78 str::FromStr,
79 sync::{Arc, OnceLock},
80 task::Poll,
81 time::Duration,
82};
83use sum_tree::SumTree;
84use task::{ResolvedTask, ShellKind, TaskContext};
85use text::{Anchor, PointUtf16, ReplicaId, ToOffset, Unclipped};
86use unindent::Unindent as _;
87use util::{
88 TryFutureExt as _, assert_set_eq, maybe, path,
89 paths::{PathMatcher, PathStyle},
90 rel_path::{RelPath, rel_path},
91 test::{TempTree, marked_text_offsets},
92 uri,
93};
94use worktree::WorktreeModelHandle as _;
95
96#[gpui::test]
97async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
98 cx.executor().allow_parking();
99
100 let (tx, mut rx) = futures::channel::mpsc::unbounded();
101 let _thread = std::thread::spawn(move || {
102 #[cfg(not(target_os = "windows"))]
103 std::fs::metadata("/tmp").unwrap();
104 #[cfg(target_os = "windows")]
105 std::fs::metadata("C:/Windows").unwrap();
106 std::thread::sleep(Duration::from_millis(1000));
107 tx.unbounded_send(1).unwrap();
108 });
109 rx.next().await.unwrap();
110}
111
112#[gpui::test]
113async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
114 cx.executor().allow_parking();
115
116 let io_task = smol::unblock(move || {
117 println!("sleeping on thread {:?}", std::thread::current().id());
118 std::thread::sleep(Duration::from_millis(10));
119 1
120 });
121
122 let task = cx.foreground_executor().spawn(async move {
123 io_task.await;
124 });
125
126 task.await;
127}
128
129// NOTE:
130// While POSIX symbolic links are somewhat supported on Windows, they are an opt in by the user, and thus
131// we assume that they are not supported out of the box.
132#[cfg(not(windows))]
133#[gpui::test]
134async fn test_symlinks(cx: &mut gpui::TestAppContext) {
135 init_test(cx);
136 cx.executor().allow_parking();
137
138 let dir = TempTree::new(json!({
139 "root": {
140 "apple": "",
141 "banana": {
142 "carrot": {
143 "date": "",
144 "endive": "",
145 }
146 },
147 "fennel": {
148 "grape": "",
149 }
150 }
151 }));
152
153 let root_link_path = dir.path().join("root_link");
154 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
155 os::unix::fs::symlink(
156 dir.path().join("root/fennel"),
157 dir.path().join("root/finnochio"),
158 )
159 .unwrap();
160
161 let project = Project::test(
162 Arc::new(RealFs::new(None, cx.executor())),
163 [root_link_path.as_ref()],
164 cx,
165 )
166 .await;
167
168 project.update(cx, |project, cx| {
169 let tree = project.worktrees(cx).next().unwrap().read(cx);
170 assert_eq!(tree.file_count(), 5);
171 assert_eq!(
172 tree.entry_for_path(rel_path("fennel/grape")).unwrap().inode,
173 tree.entry_for_path(rel_path("finnochio/grape"))
174 .unwrap()
175 .inode
176 );
177 });
178}
179
180#[gpui::test]
181async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
182 init_test(cx);
183
184 let dir = TempTree::new(json!({
185 ".editorconfig": r#"
186 root = true
187 [*.rs]
188 indent_style = tab
189 indent_size = 3
190 end_of_line = lf
191 insert_final_newline = true
192 trim_trailing_whitespace = true
193 max_line_length = 120
194 [*.js]
195 tab_width = 10
196 max_line_length = off
197 "#,
198 ".zed": {
199 "settings.json": r#"{
200 "tab_size": 8,
201 "hard_tabs": false,
202 "ensure_final_newline_on_save": false,
203 "remove_trailing_whitespace_on_save": false,
204 "preferred_line_length": 64,
205 "soft_wrap": "editor_width",
206 }"#,
207 },
208 "a.rs": "fn a() {\n A\n}",
209 "b": {
210 ".editorconfig": r#"
211 [*.rs]
212 indent_size = 2
213 max_line_length = off,
214 "#,
215 "b.rs": "fn b() {\n B\n}",
216 },
217 "c.js": "def c\n C\nend",
218 "d": {
219 ".editorconfig": r#"
220 [*.rs]
221 indent_size = 1
222 "#,
223 "d.rs": "fn d() {\n D\n}",
224 },
225 "README.json": "tabs are better\n",
226 }));
227
228 let path = dir.path();
229 let fs = FakeFs::new(cx.executor());
230 fs.insert_tree_from_real_fs(path, path).await;
231 let project = Project::test(fs, [path], cx).await;
232
233 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
234 language_registry.add(js_lang());
235 language_registry.add(json_lang());
236 language_registry.add(rust_lang());
237
238 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
239
240 cx.executor().run_until_parked();
241
242 cx.update(|cx| {
243 let tree = worktree.read(cx);
244 let settings_for = |path: &str| {
245 let file_entry = tree.entry_for_path(rel_path(path)).unwrap().clone();
246 let file = File::for_entry(file_entry, worktree.clone());
247 let file_language = project
248 .read(cx)
249 .languages()
250 .load_language_for_file_path(file.path.as_std_path());
251 let file_language = cx
252 .foreground_executor()
253 .block_on(file_language)
254 .expect("Failed to get file language");
255 let file = file as _;
256 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
257 };
258
259 let settings_a = settings_for("a.rs");
260 let settings_b = settings_for("b/b.rs");
261 let settings_c = settings_for("c.js");
262 let settings_d = settings_for("d/d.rs");
263 let settings_readme = settings_for("README.json");
264
265 // .editorconfig overrides .zed/settings
266 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
267 assert_eq!(settings_a.hard_tabs, true);
268 assert_eq!(settings_a.ensure_final_newline_on_save, true);
269 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
270 assert_eq!(settings_a.preferred_line_length, 120);
271
272 // .editorconfig in subdirectory overrides .editorconfig in root
273 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
274 assert_eq!(Some(settings_d.tab_size), NonZeroU32::new(1));
275
276 // "indent_size" is not set, so "tab_width" is used
277 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
278
279 // When max_line_length is "off", default to .zed/settings.json
280 assert_eq!(settings_b.preferred_line_length, 64);
281 assert_eq!(settings_c.preferred_line_length, 64);
282
283 // README.md should not be affected by .editorconfig's globe "*.rs"
284 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
285 });
286}
287
288#[gpui::test]
289async fn test_external_editorconfig_support(cx: &mut gpui::TestAppContext) {
290 init_test(cx);
291
292 let fs = FakeFs::new(cx.executor());
293 fs.insert_tree(
294 path!("/grandparent"),
295 json!({
296 ".editorconfig": "[*]\nindent_size = 4\n",
297 "parent": {
298 ".editorconfig": "[*.rs]\nindent_size = 2\n",
299 "worktree": {
300 ".editorconfig": "[*.md]\nindent_size = 3\n",
301 "main.rs": "fn main() {}",
302 "README.md": "# README",
303 "other.txt": "other content",
304 }
305 }
306 }),
307 )
308 .await;
309
310 let project = Project::test(fs, [path!("/grandparent/parent/worktree").as_ref()], cx).await;
311
312 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
313 language_registry.add(rust_lang());
314 language_registry.add(markdown_lang());
315
316 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
317
318 cx.executor().run_until_parked();
319
320 cx.update(|cx| {
321 let tree = worktree.read(cx);
322 let settings_for = |path: &str| {
323 let file_entry = tree.entry_for_path(rel_path(path)).unwrap().clone();
324 let file = File::for_entry(file_entry, worktree.clone());
325 let file_language = project
326 .read(cx)
327 .languages()
328 .load_language_for_file_path(file.path.as_std_path());
329 let file_language = cx
330 .foreground_executor()
331 .block_on(file_language)
332 .expect("Failed to get file language");
333 let file = file as _;
334 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
335 };
336
337 let settings_rs = settings_for("main.rs");
338 let settings_md = settings_for("README.md");
339 let settings_txt = settings_for("other.txt");
340
341 // main.rs gets indent_size = 2 from parent's external .editorconfig
342 assert_eq!(Some(settings_rs.tab_size), NonZeroU32::new(2));
343
344 // README.md gets indent_size = 3 from internal worktree .editorconfig
345 assert_eq!(Some(settings_md.tab_size), NonZeroU32::new(3));
346
347 // other.txt gets indent_size = 4 from grandparent's external .editorconfig
348 assert_eq!(Some(settings_txt.tab_size), NonZeroU32::new(4));
349 });
350}
351
352#[gpui::test]
353async fn test_internal_editorconfig_root_stops_traversal(cx: &mut gpui::TestAppContext) {
354 init_test(cx);
355
356 let fs = FakeFs::new(cx.executor());
357 fs.insert_tree(
358 path!("/worktree"),
359 json!({
360 ".editorconfig": "[*]\nindent_size = 99\n",
361 "src": {
362 ".editorconfig": "root = true\n[*]\nindent_size = 2\n",
363 "file.rs": "fn main() {}",
364 }
365 }),
366 )
367 .await;
368
369 let project = Project::test(fs, [path!("/worktree").as_ref()], cx).await;
370
371 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
372 language_registry.add(rust_lang());
373
374 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
375
376 cx.executor().run_until_parked();
377
378 cx.update(|cx| {
379 let tree = worktree.read(cx);
380 let file_entry = tree
381 .entry_for_path(rel_path("src/file.rs"))
382 .unwrap()
383 .clone();
384 let file = File::for_entry(file_entry, worktree.clone());
385 let file_language = project
386 .read(cx)
387 .languages()
388 .load_language_for_file_path(file.path.as_std_path());
389 let file_language = cx
390 .foreground_executor()
391 .block_on(file_language)
392 .expect("Failed to get file language");
393 let file = file as _;
394 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
395
396 assert_eq!(Some(settings.tab_size), NonZeroU32::new(2));
397 });
398}
399
400#[gpui::test]
401async fn test_external_editorconfig_root_stops_traversal(cx: &mut gpui::TestAppContext) {
402 init_test(cx);
403
404 let fs = FakeFs::new(cx.executor());
405 fs.insert_tree(
406 path!("/parent"),
407 json!({
408 ".editorconfig": "[*]\nindent_size = 99\n",
409 "worktree": {
410 ".editorconfig": "root = true\n[*]\nindent_size = 2\n",
411 "file.rs": "fn main() {}",
412 }
413 }),
414 )
415 .await;
416
417 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
418
419 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
420 language_registry.add(rust_lang());
421
422 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
423
424 cx.executor().run_until_parked();
425
426 cx.update(|cx| {
427 let tree = worktree.read(cx);
428 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
429 let file = File::for_entry(file_entry, worktree.clone());
430 let file_language = project
431 .read(cx)
432 .languages()
433 .load_language_for_file_path(file.path.as_std_path());
434 let file_language = cx
435 .foreground_executor()
436 .block_on(file_language)
437 .expect("Failed to get file language");
438 let file = file as _;
439 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
440
441 // file.rs gets indent_size = 2 from worktree's root config, NOT 99 from parent
442 assert_eq!(Some(settings.tab_size), NonZeroU32::new(2));
443 });
444}
445
446#[gpui::test]
447async fn test_external_editorconfig_root_in_parent_stops_traversal(cx: &mut gpui::TestAppContext) {
448 init_test(cx);
449
450 let fs = FakeFs::new(cx.executor());
451 fs.insert_tree(
452 path!("/grandparent"),
453 json!({
454 ".editorconfig": "[*]\nindent_size = 99\n",
455 "parent": {
456 ".editorconfig": "root = true\n[*]\nindent_size = 4\n",
457 "worktree": {
458 "file.rs": "fn main() {}",
459 }
460 }
461 }),
462 )
463 .await;
464
465 let project = Project::test(fs, [path!("/grandparent/parent/worktree").as_ref()], cx).await;
466
467 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
468 language_registry.add(rust_lang());
469
470 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
471
472 cx.executor().run_until_parked();
473
474 cx.update(|cx| {
475 let tree = worktree.read(cx);
476 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
477 let file = File::for_entry(file_entry, worktree.clone());
478 let file_language = project
479 .read(cx)
480 .languages()
481 .load_language_for_file_path(file.path.as_std_path());
482 let file_language = cx
483 .foreground_executor()
484 .block_on(file_language)
485 .expect("Failed to get file language");
486 let file = file as _;
487 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
488
489 // file.rs gets indent_size = 4 from parent's root config, NOT 99 from grandparent
490 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
491 });
492}
493
494#[gpui::test]
495async fn test_external_editorconfig_shared_across_worktrees(cx: &mut gpui::TestAppContext) {
496 init_test(cx);
497
498 let fs = FakeFs::new(cx.executor());
499 fs.insert_tree(
500 path!("/parent"),
501 json!({
502 ".editorconfig": "root = true\n[*]\nindent_size = 5\n",
503 "worktree_a": {
504 "file.rs": "fn a() {}",
505 ".editorconfig": "[*]\ninsert_final_newline = true\n",
506 },
507 "worktree_b": {
508 "file.rs": "fn b() {}",
509 ".editorconfig": "[*]\ninsert_final_newline = false\n",
510 }
511 }),
512 )
513 .await;
514
515 let project = Project::test(
516 fs,
517 [
518 path!("/parent/worktree_a").as_ref(),
519 path!("/parent/worktree_b").as_ref(),
520 ],
521 cx,
522 )
523 .await;
524
525 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
526 language_registry.add(rust_lang());
527
528 cx.executor().run_until_parked();
529
530 cx.update(|cx| {
531 let worktrees: Vec<_> = project.read(cx).worktrees(cx).collect();
532 assert_eq!(worktrees.len(), 2);
533
534 for worktree in worktrees {
535 let tree = worktree.read(cx);
536 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
537 let file = File::for_entry(file_entry, worktree.clone());
538 let file_language = project
539 .read(cx)
540 .languages()
541 .load_language_for_file_path(file.path.as_std_path());
542 let file_language = cx
543 .foreground_executor()
544 .block_on(file_language)
545 .expect("Failed to get file language");
546 let file = file as _;
547 let settings =
548 language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
549
550 // Both worktrees should get indent_size = 5 from shared parent .editorconfig
551 assert_eq!(Some(settings.tab_size), NonZeroU32::new(5));
552 }
553 });
554}
555
556#[gpui::test]
557async fn test_external_editorconfig_not_loaded_without_internal_config(
558 cx: &mut gpui::TestAppContext,
559) {
560 init_test(cx);
561
562 let fs = FakeFs::new(cx.executor());
563 fs.insert_tree(
564 path!("/parent"),
565 json!({
566 ".editorconfig": "[*]\nindent_size = 99\n",
567 "worktree": {
568 "file.rs": "fn main() {}",
569 }
570 }),
571 )
572 .await;
573
574 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
575
576 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
577 language_registry.add(rust_lang());
578
579 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
580
581 cx.executor().run_until_parked();
582
583 cx.update(|cx| {
584 let tree = worktree.read(cx);
585 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
586 let file = File::for_entry(file_entry, worktree.clone());
587 let file_language = project
588 .read(cx)
589 .languages()
590 .load_language_for_file_path(file.path.as_std_path());
591 let file_language = cx
592 .foreground_executor()
593 .block_on(file_language)
594 .expect("Failed to get file language");
595 let file = file as _;
596 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
597
598 // file.rs should have default tab_size = 4, NOT 99 from parent's external .editorconfig
599 // because without an internal .editorconfig, external configs are not loaded
600 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
601 });
602}
603
604#[gpui::test]
605async fn test_external_editorconfig_modification_triggers_refresh(cx: &mut gpui::TestAppContext) {
606 init_test(cx);
607
608 let fs = FakeFs::new(cx.executor());
609 fs.insert_tree(
610 path!("/parent"),
611 json!({
612 ".editorconfig": "[*]\nindent_size = 4\n",
613 "worktree": {
614 ".editorconfig": "[*]\n",
615 "file.rs": "fn main() {}",
616 }
617 }),
618 )
619 .await;
620
621 let project = Project::test(fs.clone(), [path!("/parent/worktree").as_ref()], cx).await;
622
623 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
624 language_registry.add(rust_lang());
625
626 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
627
628 cx.executor().run_until_parked();
629
630 cx.update(|cx| {
631 let tree = worktree.read(cx);
632 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
633 let file = File::for_entry(file_entry, worktree.clone());
634 let file_language = project
635 .read(cx)
636 .languages()
637 .load_language_for_file_path(file.path.as_std_path());
638 let file_language = cx
639 .foreground_executor()
640 .block_on(file_language)
641 .expect("Failed to get file language");
642 let file = file as _;
643 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
644
645 // Test initial settings: tab_size = 4 from parent's external .editorconfig
646 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
647 });
648
649 fs.atomic_write(
650 PathBuf::from(path!("/parent/.editorconfig")),
651 "[*]\nindent_size = 8\n".to_owned(),
652 )
653 .await
654 .unwrap();
655
656 cx.executor().run_until_parked();
657
658 cx.update(|cx| {
659 let tree = worktree.read(cx);
660 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
661 let file = File::for_entry(file_entry, worktree.clone());
662 let file_language = project
663 .read(cx)
664 .languages()
665 .load_language_for_file_path(file.path.as_std_path());
666 let file_language = cx
667 .foreground_executor()
668 .block_on(file_language)
669 .expect("Failed to get file language");
670 let file = file as _;
671 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
672
673 // Test settings updated: tab_size = 8
674 assert_eq!(Some(settings.tab_size), NonZeroU32::new(8));
675 });
676}
677
678#[gpui::test]
679async fn test_adding_worktree_discovers_external_editorconfigs(cx: &mut gpui::TestAppContext) {
680 init_test(cx);
681
682 let fs = FakeFs::new(cx.executor());
683 fs.insert_tree(
684 path!("/parent"),
685 json!({
686 ".editorconfig": "root = true\n[*]\nindent_size = 7\n",
687 "existing_worktree": {
688 ".editorconfig": "[*]\n",
689 "file.rs": "fn a() {}",
690 },
691 "new_worktree": {
692 ".editorconfig": "[*]\n",
693 "file.rs": "fn b() {}",
694 }
695 }),
696 )
697 .await;
698
699 let project = Project::test(fs, [path!("/parent/existing_worktree").as_ref()], cx).await;
700
701 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
702 language_registry.add(rust_lang());
703
704 cx.executor().run_until_parked();
705
706 cx.update(|cx| {
707 let worktree = project.read(cx).worktrees(cx).next().unwrap();
708 let tree = worktree.read(cx);
709 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
710 let file = File::for_entry(file_entry, worktree.clone());
711 let file_language = project
712 .read(cx)
713 .languages()
714 .load_language_for_file_path(file.path.as_std_path());
715 let file_language = cx
716 .foreground_executor()
717 .block_on(file_language)
718 .expect("Failed to get file language");
719 let file = file as _;
720 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
721
722 // Test existing worktree has tab_size = 7
723 assert_eq!(Some(settings.tab_size), NonZeroU32::new(7));
724 });
725
726 let (new_worktree, _) = project
727 .update(cx, |project, cx| {
728 project.find_or_create_worktree(path!("/parent/new_worktree"), true, cx)
729 })
730 .await
731 .unwrap();
732
733 cx.executor().run_until_parked();
734
735 cx.update(|cx| {
736 let tree = new_worktree.read(cx);
737 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
738 let file = File::for_entry(file_entry, new_worktree.clone());
739 let file_language = project
740 .read(cx)
741 .languages()
742 .load_language_for_file_path(file.path.as_std_path());
743 let file_language = cx
744 .foreground_executor()
745 .block_on(file_language)
746 .expect("Failed to get file language");
747 let file = file as _;
748 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
749
750 // Verify new worktree also has tab_size = 7 from shared parent editorconfig
751 assert_eq!(Some(settings.tab_size), NonZeroU32::new(7));
752 });
753}
754
755#[gpui::test]
756async fn test_removing_worktree_cleans_up_external_editorconfig(cx: &mut gpui::TestAppContext) {
757 init_test(cx);
758
759 let fs = FakeFs::new(cx.executor());
760 fs.insert_tree(
761 path!("/parent"),
762 json!({
763 ".editorconfig": "[*]\nindent_size = 6\n",
764 "worktree": {
765 ".editorconfig": "[*]\n",
766 "file.rs": "fn main() {}",
767 }
768 }),
769 )
770 .await;
771
772 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
773
774 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
775 language_registry.add(rust_lang());
776
777 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
778 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
779
780 cx.executor().run_until_parked();
781
782 cx.update(|cx| {
783 let store = cx.global::<SettingsStore>();
784 let (worktree_ids, external_paths, watcher_paths) =
785 store.editorconfig_store.read(cx).test_state();
786
787 // Test external config is loaded
788 assert!(worktree_ids.contains(&worktree_id));
789 assert!(!external_paths.is_empty());
790 assert!(!watcher_paths.is_empty());
791 });
792
793 project.update(cx, |project, cx| {
794 project.remove_worktree(worktree_id, cx);
795 });
796
797 cx.executor().run_until_parked();
798
799 cx.update(|cx| {
800 let store = cx.global::<SettingsStore>();
801 let (worktree_ids, external_paths, watcher_paths) =
802 store.editorconfig_store.read(cx).test_state();
803
804 // Test worktree state, external configs, and watchers all removed
805 assert!(!worktree_ids.contains(&worktree_id));
806 assert!(external_paths.is_empty());
807 assert!(watcher_paths.is_empty());
808 });
809}
810
811#[gpui::test]
812async fn test_shared_external_editorconfig_cleanup_with_multiple_worktrees(
813 cx: &mut gpui::TestAppContext,
814) {
815 init_test(cx);
816
817 let fs = FakeFs::new(cx.executor());
818 fs.insert_tree(
819 path!("/parent"),
820 json!({
821 ".editorconfig": "root = true\n[*]\nindent_size = 5\n",
822 "worktree_a": {
823 ".editorconfig": "[*]\n",
824 "file.rs": "fn a() {}",
825 },
826 "worktree_b": {
827 ".editorconfig": "[*]\n",
828 "file.rs": "fn b() {}",
829 }
830 }),
831 )
832 .await;
833
834 let project = Project::test(
835 fs,
836 [
837 path!("/parent/worktree_a").as_ref(),
838 path!("/parent/worktree_b").as_ref(),
839 ],
840 cx,
841 )
842 .await;
843
844 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
845 language_registry.add(rust_lang());
846
847 cx.executor().run_until_parked();
848
849 let (worktree_a_id, worktree_b, worktree_b_id) = cx.update(|cx| {
850 let worktrees: Vec<_> = project.read(cx).worktrees(cx).collect();
851 assert_eq!(worktrees.len(), 2);
852
853 let worktree_a = &worktrees[0];
854 let worktree_b = &worktrees[1];
855 let worktree_a_id = worktree_a.read(cx).id();
856 let worktree_b_id = worktree_b.read(cx).id();
857 (worktree_a_id, worktree_b.clone(), worktree_b_id)
858 });
859
860 cx.update(|cx| {
861 let store = cx.global::<SettingsStore>();
862 let (worktree_ids, external_paths, _) = store.editorconfig_store.read(cx).test_state();
863
864 // Test both worktrees have settings and share external config
865 assert!(worktree_ids.contains(&worktree_a_id));
866 assert!(worktree_ids.contains(&worktree_b_id));
867 assert_eq!(external_paths.len(), 1); // single shared external config
868 });
869
870 project.update(cx, |project, cx| {
871 project.remove_worktree(worktree_a_id, cx);
872 });
873
874 cx.executor().run_until_parked();
875
876 cx.update(|cx| {
877 let store = cx.global::<SettingsStore>();
878 let (worktree_ids, external_paths, watcher_paths) =
879 store.editorconfig_store.read(cx).test_state();
880
881 // Test worktree_a is gone but external config remains for worktree_b
882 assert!(!worktree_ids.contains(&worktree_a_id));
883 assert!(worktree_ids.contains(&worktree_b_id));
884 // External config should still exist because worktree_b uses it
885 assert_eq!(external_paths.len(), 1);
886 assert_eq!(watcher_paths.len(), 1);
887 });
888
889 cx.update(|cx| {
890 let tree = worktree_b.read(cx);
891 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
892 let file = File::for_entry(file_entry, worktree_b.clone());
893 let file_language = project
894 .read(cx)
895 .languages()
896 .load_language_for_file_path(file.path.as_std_path());
897 let file_language = cx
898 .foreground_executor()
899 .block_on(file_language)
900 .expect("Failed to get file language");
901 let file = file as _;
902 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
903
904 // Test worktree_b still has correct settings
905 assert_eq!(Some(settings.tab_size), NonZeroU32::new(5));
906 });
907}
908
909#[gpui::test]
910async fn test_git_provider_project_setting(cx: &mut gpui::TestAppContext) {
911 init_test(cx);
912 cx.update(|cx| {
913 GitHostingProviderRegistry::default_global(cx);
914 git_hosting_providers::init(cx);
915 });
916
917 let fs = FakeFs::new(cx.executor());
918 let str_path = path!("/dir");
919 let path = Path::new(str_path);
920
921 fs.insert_tree(
922 path!("/dir"),
923 json!({
924 ".zed": {
925 "settings.json": r#"{
926 "git_hosting_providers": [
927 {
928 "provider": "gitlab",
929 "base_url": "https://google.com",
930 "name": "foo"
931 }
932 ]
933 }"#
934 },
935 }),
936 )
937 .await;
938
939 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
940 let (_worktree, _) =
941 project.read_with(cx, |project, cx| project.find_worktree(path, cx).unwrap());
942 cx.executor().run_until_parked();
943
944 cx.update(|cx| {
945 let provider = GitHostingProviderRegistry::global(cx);
946 assert!(
947 provider
948 .list_hosting_providers()
949 .into_iter()
950 .any(|provider| provider.name() == "foo")
951 );
952 });
953
954 fs.atomic_write(
955 Path::new(path!("/dir/.zed/settings.json")).to_owned(),
956 "{}".into(),
957 )
958 .await
959 .unwrap();
960
961 cx.run_until_parked();
962
963 cx.update(|cx| {
964 let provider = GitHostingProviderRegistry::global(cx);
965 assert!(
966 !provider
967 .list_hosting_providers()
968 .into_iter()
969 .any(|provider| provider.name() == "foo")
970 );
971 });
972}
973
974#[gpui::test]
975async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
976 init_test(cx);
977 TaskStore::init(None);
978
979 let fs = FakeFs::new(cx.executor());
980 fs.insert_tree(
981 path!("/dir"),
982 json!({
983 ".zed": {
984 "settings.json": r#"{ "tab_size": 8 }"#,
985 "tasks.json": r#"[{
986 "label": "cargo check all",
987 "command": "cargo",
988 "args": ["check", "--all"]
989 },]"#,
990 },
991 "a": {
992 "a.rs": "fn a() {\n A\n}"
993 },
994 "b": {
995 ".zed": {
996 "settings.json": r#"{ "tab_size": 2 }"#,
997 "tasks.json": r#"[{
998 "label": "cargo check",
999 "command": "cargo",
1000 "args": ["check"]
1001 },]"#,
1002 },
1003 "b.rs": "fn b() {\n B\n}"
1004 }
1005 }),
1006 )
1007 .await;
1008
1009 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1010 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
1011
1012 cx.executor().run_until_parked();
1013 let worktree_id = cx.update(|cx| {
1014 project.update(cx, |project, cx| {
1015 project.worktrees(cx).next().unwrap().read(cx).id()
1016 })
1017 });
1018
1019 let mut task_contexts = TaskContexts::default();
1020 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
1021 let task_contexts = Arc::new(task_contexts);
1022
1023 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
1024 id: worktree_id,
1025 directory_in_worktree: rel_path(".zed").into(),
1026 id_base: "local worktree tasks from directory \".zed\"".into(),
1027 };
1028
1029 let all_tasks = cx
1030 .update(|cx| {
1031 let tree = worktree.read(cx);
1032
1033 let file_a = File::for_entry(
1034 tree.entry_for_path(rel_path("a/a.rs")).unwrap().clone(),
1035 worktree.clone(),
1036 ) as _;
1037 let settings_a = language_settings(None, Some(&file_a), cx);
1038 let file_b = File::for_entry(
1039 tree.entry_for_path(rel_path("b/b.rs")).unwrap().clone(),
1040 worktree.clone(),
1041 ) as _;
1042 let settings_b = language_settings(None, Some(&file_b), cx);
1043
1044 assert_eq!(settings_a.tab_size.get(), 8);
1045 assert_eq!(settings_b.tab_size.get(), 2);
1046
1047 get_all_tasks(&project, task_contexts.clone(), cx)
1048 })
1049 .await
1050 .into_iter()
1051 .map(|(source_kind, task)| {
1052 let resolved = task.resolved;
1053 (
1054 source_kind,
1055 task.resolved_label,
1056 resolved.args,
1057 resolved.env,
1058 )
1059 })
1060 .collect::<Vec<_>>();
1061 assert_eq!(
1062 all_tasks,
1063 vec![
1064 (
1065 TaskSourceKind::Worktree {
1066 id: worktree_id,
1067 directory_in_worktree: rel_path("b/.zed").into(),
1068 id_base: "local worktree tasks from directory \"b/.zed\"".into()
1069 },
1070 "cargo check".to_string(),
1071 vec!["check".to_string()],
1072 HashMap::default(),
1073 ),
1074 (
1075 topmost_local_task_source_kind.clone(),
1076 "cargo check all".to_string(),
1077 vec!["check".to_string(), "--all".to_string()],
1078 HashMap::default(),
1079 ),
1080 ]
1081 );
1082
1083 let (_, resolved_task) = cx
1084 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
1085 .await
1086 .into_iter()
1087 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
1088 .expect("should have one global task");
1089 project.update(cx, |project, cx| {
1090 let task_inventory = project
1091 .task_store()
1092 .read(cx)
1093 .task_inventory()
1094 .cloned()
1095 .unwrap();
1096 task_inventory.update(cx, |inventory, _| {
1097 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
1098 inventory
1099 .update_file_based_tasks(
1100 TaskSettingsLocation::Global(tasks_file()),
1101 Some(
1102 &json!([{
1103 "label": "cargo check unstable",
1104 "command": "cargo",
1105 "args": [
1106 "check",
1107 "--all",
1108 "--all-targets"
1109 ],
1110 "env": {
1111 "RUSTFLAGS": "-Zunstable-options"
1112 }
1113 }])
1114 .to_string(),
1115 ),
1116 )
1117 .unwrap();
1118 });
1119 });
1120 cx.run_until_parked();
1121
1122 let all_tasks = cx
1123 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
1124 .await
1125 .into_iter()
1126 .map(|(source_kind, task)| {
1127 let resolved = task.resolved;
1128 (
1129 source_kind,
1130 task.resolved_label,
1131 resolved.args,
1132 resolved.env,
1133 )
1134 })
1135 .collect::<Vec<_>>();
1136 assert_eq!(
1137 all_tasks,
1138 vec![
1139 (
1140 topmost_local_task_source_kind.clone(),
1141 "cargo check all".to_string(),
1142 vec!["check".to_string(), "--all".to_string()],
1143 HashMap::default(),
1144 ),
1145 (
1146 TaskSourceKind::Worktree {
1147 id: worktree_id,
1148 directory_in_worktree: rel_path("b/.zed").into(),
1149 id_base: "local worktree tasks from directory \"b/.zed\"".into()
1150 },
1151 "cargo check".to_string(),
1152 vec!["check".to_string()],
1153 HashMap::default(),
1154 ),
1155 (
1156 TaskSourceKind::AbsPath {
1157 abs_path: paths::tasks_file().clone(),
1158 id_base: "global tasks.json".into(),
1159 },
1160 "cargo check unstable".to_string(),
1161 vec![
1162 "check".to_string(),
1163 "--all".to_string(),
1164 "--all-targets".to_string(),
1165 ],
1166 HashMap::from_iter(Some((
1167 "RUSTFLAGS".to_string(),
1168 "-Zunstable-options".to_string()
1169 ))),
1170 ),
1171 ]
1172 );
1173}
1174
1175#[gpui::test]
1176async fn test_invalid_local_tasks_shows_toast_with_doc_link(cx: &mut gpui::TestAppContext) {
1177 init_test(cx);
1178 TaskStore::init(None);
1179
1180 // We need to start with a valid `.zed/tasks.json` file as otherwise the
1181 // event is emitted before we havd a chance to setup the event subscription.
1182 let fs = FakeFs::new(cx.executor());
1183 fs.insert_tree(
1184 path!("/dir"),
1185 json!({
1186 ".zed": {
1187 "tasks.json": r#"[{ "label": "valid task", "command": "echo" }]"#,
1188 },
1189 "file.rs": ""
1190 }),
1191 )
1192 .await;
1193
1194 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1195 let saw_toast = Rc::new(RefCell::new(false));
1196
1197 // Update the `.zed/tasks.json` file with an invalid variable, so we can
1198 // later assert that the `Event::Toast` even is emitted.
1199 fs.save(
1200 path!("/dir/.zed/tasks.json").as_ref(),
1201 &r#"[{ "label": "test $ZED_FOO", "command": "echo" }]"#.into(),
1202 Default::default(),
1203 )
1204 .await
1205 .unwrap();
1206
1207 project.update(cx, |_, cx| {
1208 let saw_toast = saw_toast.clone();
1209
1210 cx.subscribe(&project, move |_, _, event: &Event, _| match event {
1211 Event::Toast {
1212 notification_id,
1213 message,
1214 link: Some(ToastLink { url, .. }),
1215 } => {
1216 assert!(notification_id.starts_with("local-tasks-"));
1217 assert!(message.contains("ZED_FOO"));
1218 assert_eq!(*url, "https://zed.dev/docs/tasks");
1219 *saw_toast.borrow_mut() = true;
1220 }
1221 _ => {}
1222 })
1223 .detach();
1224 });
1225
1226 cx.run_until_parked();
1227 assert!(
1228 *saw_toast.borrow(),
1229 "Expected `Event::Toast` was never emitted"
1230 );
1231}
1232
1233#[gpui::test]
1234async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
1235 init_test(cx);
1236 TaskStore::init(None);
1237
1238 let fs = FakeFs::new(cx.executor());
1239 fs.insert_tree(
1240 path!("/dir"),
1241 json!({
1242 ".zed": {
1243 "tasks.json": r#"[{
1244 "label": "test worktree root",
1245 "command": "echo $ZED_WORKTREE_ROOT"
1246 }]"#,
1247 },
1248 "a": {
1249 "a.rs": "fn a() {\n A\n}"
1250 },
1251 }),
1252 )
1253 .await;
1254
1255 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1256 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
1257
1258 cx.executor().run_until_parked();
1259 let worktree_id = cx.update(|cx| {
1260 project.update(cx, |project, cx| {
1261 project.worktrees(cx).next().unwrap().read(cx).id()
1262 })
1263 });
1264
1265 let active_non_worktree_item_tasks = cx
1266 .update(|cx| {
1267 get_all_tasks(
1268 &project,
1269 Arc::new(TaskContexts {
1270 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
1271 active_worktree_context: None,
1272 other_worktree_contexts: Vec::new(),
1273 lsp_task_sources: HashMap::default(),
1274 latest_selection: None,
1275 }),
1276 cx,
1277 )
1278 })
1279 .await;
1280 assert!(
1281 active_non_worktree_item_tasks.is_empty(),
1282 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
1283 );
1284
1285 let active_worktree_tasks = cx
1286 .update(|cx| {
1287 get_all_tasks(
1288 &project,
1289 Arc::new(TaskContexts {
1290 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
1291 active_worktree_context: Some((worktree_id, {
1292 let mut worktree_context = TaskContext::default();
1293 worktree_context
1294 .task_variables
1295 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
1296 worktree_context
1297 })),
1298 other_worktree_contexts: Vec::new(),
1299 lsp_task_sources: HashMap::default(),
1300 latest_selection: None,
1301 }),
1302 cx,
1303 )
1304 })
1305 .await;
1306 assert_eq!(
1307 active_worktree_tasks
1308 .into_iter()
1309 .map(|(source_kind, task)| {
1310 let resolved = task.resolved;
1311 (source_kind, resolved.command.unwrap())
1312 })
1313 .collect::<Vec<_>>(),
1314 vec![(
1315 TaskSourceKind::Worktree {
1316 id: worktree_id,
1317 directory_in_worktree: rel_path(".zed").into(),
1318 id_base: "local worktree tasks from directory \".zed\"".into(),
1319 },
1320 "echo /dir".to_string(),
1321 )]
1322 );
1323}
1324
1325#[gpui::test]
1326async fn test_running_multiple_instances_of_a_single_server_in_one_worktree(
1327 cx: &mut gpui::TestAppContext,
1328) {
1329 pub(crate) struct PyprojectTomlManifestProvider;
1330
1331 impl ManifestProvider for PyprojectTomlManifestProvider {
1332 fn name(&self) -> ManifestName {
1333 SharedString::new_static("pyproject.toml").into()
1334 }
1335
1336 fn search(
1337 &self,
1338 ManifestQuery {
1339 path,
1340 depth,
1341 delegate,
1342 }: ManifestQuery,
1343 ) -> Option<Arc<RelPath>> {
1344 for path in path.ancestors().take(depth) {
1345 let p = path.join(rel_path("pyproject.toml"));
1346 if delegate.exists(&p, Some(false)) {
1347 return Some(path.into());
1348 }
1349 }
1350
1351 None
1352 }
1353 }
1354
1355 init_test(cx);
1356 let fs = FakeFs::new(cx.executor());
1357
1358 fs.insert_tree(
1359 path!("/the-root"),
1360 json!({
1361 ".zed": {
1362 "settings.json": r#"
1363 {
1364 "languages": {
1365 "Python": {
1366 "language_servers": ["ty"]
1367 }
1368 }
1369 }"#
1370 },
1371 "project-a": {
1372 ".venv": {},
1373 "file.py": "",
1374 "pyproject.toml": ""
1375 },
1376 "project-b": {
1377 ".venv": {},
1378 "source_file.py":"",
1379 "another_file.py": "",
1380 "pyproject.toml": ""
1381 }
1382 }),
1383 )
1384 .await;
1385 cx.update(|cx| {
1386 ManifestProvidersStore::global(cx).register(Arc::new(PyprojectTomlManifestProvider))
1387 });
1388
1389 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1390 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1391 let _fake_python_server = language_registry.register_fake_lsp(
1392 "Python",
1393 FakeLspAdapter {
1394 name: "ty",
1395 capabilities: lsp::ServerCapabilities {
1396 ..Default::default()
1397 },
1398 ..Default::default()
1399 },
1400 );
1401
1402 language_registry.add(python_lang(fs.clone()));
1403 let (first_buffer, _handle) = project
1404 .update(cx, |project, cx| {
1405 project.open_local_buffer_with_lsp(path!("/the-root/project-a/file.py"), cx)
1406 })
1407 .await
1408 .unwrap();
1409 cx.executor().run_until_parked();
1410 let servers = project.update(cx, |project, cx| {
1411 project.lsp_store().update(cx, |this, cx| {
1412 first_buffer.update(cx, |buffer, cx| {
1413 this.running_language_servers_for_local_buffer(buffer, cx)
1414 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1415 .collect::<Vec<_>>()
1416 })
1417 })
1418 });
1419 cx.executor().run_until_parked();
1420 assert_eq!(servers.len(), 1);
1421 let (adapter, server) = servers.into_iter().next().unwrap();
1422 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1423 assert_eq!(server.server_id(), LanguageServerId(0));
1424 // `workspace_folders` are set to the rooting point.
1425 assert_eq!(
1426 server.workspace_folders(),
1427 BTreeSet::from_iter(
1428 [Uri::from_file_path(path!("/the-root/project-a")).unwrap()].into_iter()
1429 )
1430 );
1431
1432 let (second_project_buffer, _other_handle) = project
1433 .update(cx, |project, cx| {
1434 project.open_local_buffer_with_lsp(path!("/the-root/project-b/source_file.py"), cx)
1435 })
1436 .await
1437 .unwrap();
1438 cx.executor().run_until_parked();
1439 let servers = project.update(cx, |project, cx| {
1440 project.lsp_store().update(cx, |this, cx| {
1441 second_project_buffer.update(cx, |buffer, cx| {
1442 this.running_language_servers_for_local_buffer(buffer, cx)
1443 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1444 .collect::<Vec<_>>()
1445 })
1446 })
1447 });
1448 cx.executor().run_until_parked();
1449 assert_eq!(servers.len(), 1);
1450 let (adapter, server) = servers.into_iter().next().unwrap();
1451 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1452 // We're not using venvs at all here, so both folders should fall under the same root.
1453 assert_eq!(server.server_id(), LanguageServerId(0));
1454 // Now, let's select a different toolchain for one of subprojects.
1455
1456 let Toolchains {
1457 toolchains: available_toolchains_for_b,
1458 root_path,
1459 ..
1460 } = project
1461 .update(cx, |this, cx| {
1462 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1463 this.available_toolchains(
1464 ProjectPath {
1465 worktree_id,
1466 path: rel_path("project-b/source_file.py").into(),
1467 },
1468 LanguageName::new_static("Python"),
1469 cx,
1470 )
1471 })
1472 .await
1473 .expect("A toolchain to be discovered");
1474 assert_eq!(root_path.as_ref(), rel_path("project-b"));
1475 assert_eq!(available_toolchains_for_b.toolchains().len(), 1);
1476 let currently_active_toolchain = project
1477 .update(cx, |this, cx| {
1478 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1479 this.active_toolchain(
1480 ProjectPath {
1481 worktree_id,
1482 path: rel_path("project-b/source_file.py").into(),
1483 },
1484 LanguageName::new_static("Python"),
1485 cx,
1486 )
1487 })
1488 .await;
1489
1490 assert!(currently_active_toolchain.is_none());
1491 let _ = project
1492 .update(cx, |this, cx| {
1493 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1494 this.activate_toolchain(
1495 ProjectPath {
1496 worktree_id,
1497 path: root_path,
1498 },
1499 available_toolchains_for_b
1500 .toolchains
1501 .into_iter()
1502 .next()
1503 .unwrap(),
1504 cx,
1505 )
1506 })
1507 .await
1508 .unwrap();
1509 cx.run_until_parked();
1510 let servers = project.update(cx, |project, cx| {
1511 project.lsp_store().update(cx, |this, cx| {
1512 second_project_buffer.update(cx, |buffer, cx| {
1513 this.running_language_servers_for_local_buffer(buffer, cx)
1514 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1515 .collect::<Vec<_>>()
1516 })
1517 })
1518 });
1519 cx.executor().run_until_parked();
1520 assert_eq!(servers.len(), 1);
1521 let (adapter, server) = servers.into_iter().next().unwrap();
1522 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1523 // There's a new language server in town.
1524 assert_eq!(server.server_id(), LanguageServerId(1));
1525}
1526
1527#[gpui::test]
1528async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
1529 init_test(cx);
1530
1531 let fs = FakeFs::new(cx.executor());
1532 fs.insert_tree(
1533 path!("/dir"),
1534 json!({
1535 "test.rs": "const A: i32 = 1;",
1536 "test2.rs": "",
1537 "Cargo.toml": "a = 1",
1538 "package.json": "{\"a\": 1}",
1539 }),
1540 )
1541 .await;
1542
1543 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1544 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1545
1546 let mut fake_rust_servers = language_registry.register_fake_lsp(
1547 "Rust",
1548 FakeLspAdapter {
1549 name: "the-rust-language-server",
1550 capabilities: lsp::ServerCapabilities {
1551 completion_provider: Some(lsp::CompletionOptions {
1552 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
1553 ..Default::default()
1554 }),
1555 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
1556 lsp::TextDocumentSyncOptions {
1557 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
1558 ..Default::default()
1559 },
1560 )),
1561 ..Default::default()
1562 },
1563 ..Default::default()
1564 },
1565 );
1566 let mut fake_json_servers = language_registry.register_fake_lsp(
1567 "JSON",
1568 FakeLspAdapter {
1569 name: "the-json-language-server",
1570 capabilities: lsp::ServerCapabilities {
1571 completion_provider: Some(lsp::CompletionOptions {
1572 trigger_characters: Some(vec![":".to_string()]),
1573 ..Default::default()
1574 }),
1575 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
1576 lsp::TextDocumentSyncOptions {
1577 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
1578 ..Default::default()
1579 },
1580 )),
1581 ..Default::default()
1582 },
1583 ..Default::default()
1584 },
1585 );
1586
1587 // Open a buffer without an associated language server.
1588 let (toml_buffer, _handle) = project
1589 .update(cx, |project, cx| {
1590 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
1591 })
1592 .await
1593 .unwrap();
1594
1595 // Open a buffer with an associated language server before the language for it has been loaded.
1596 let (rust_buffer, _handle2) = project
1597 .update(cx, |project, cx| {
1598 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
1599 })
1600 .await
1601 .unwrap();
1602 rust_buffer.update(cx, |buffer, _| {
1603 assert_eq!(buffer.language().map(|l| l.name()), None);
1604 });
1605
1606 // Now we add the languages to the project, and ensure they get assigned to all
1607 // the relevant open buffers.
1608 language_registry.add(json_lang());
1609 language_registry.add(rust_lang());
1610 cx.executor().run_until_parked();
1611 rust_buffer.update(cx, |buffer, _| {
1612 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
1613 });
1614
1615 // A server is started up, and it is notified about Rust files.
1616 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1617 assert_eq!(
1618 fake_rust_server
1619 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1620 .await
1621 .text_document,
1622 lsp::TextDocumentItem {
1623 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1624 version: 0,
1625 text: "const A: i32 = 1;".to_string(),
1626 language_id: "rust".to_string(),
1627 }
1628 );
1629
1630 // The buffer is configured based on the language server's capabilities.
1631 rust_buffer.update(cx, |buffer, _| {
1632 assert_eq!(
1633 buffer
1634 .completion_triggers()
1635 .iter()
1636 .cloned()
1637 .collect::<Vec<_>>(),
1638 &[".".to_string(), "::".to_string()]
1639 );
1640 });
1641 toml_buffer.update(cx, |buffer, _| {
1642 assert!(buffer.completion_triggers().is_empty());
1643 });
1644
1645 // Edit a buffer. The changes are reported to the language server.
1646 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
1647 assert_eq!(
1648 fake_rust_server
1649 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1650 .await
1651 .text_document,
1652 lsp::VersionedTextDocumentIdentifier::new(
1653 lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1654 1
1655 )
1656 );
1657
1658 // Open a third buffer with a different associated language server.
1659 let (json_buffer, _json_handle) = project
1660 .update(cx, |project, cx| {
1661 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
1662 })
1663 .await
1664 .unwrap();
1665
1666 // A json language server is started up and is only notified about the json buffer.
1667 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1668 assert_eq!(
1669 fake_json_server
1670 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1671 .await
1672 .text_document,
1673 lsp::TextDocumentItem {
1674 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1675 version: 0,
1676 text: "{\"a\": 1}".to_string(),
1677 language_id: "json".to_string(),
1678 }
1679 );
1680
1681 // This buffer is configured based on the second language server's
1682 // capabilities.
1683 json_buffer.update(cx, |buffer, _| {
1684 assert_eq!(
1685 buffer
1686 .completion_triggers()
1687 .iter()
1688 .cloned()
1689 .collect::<Vec<_>>(),
1690 &[":".to_string()]
1691 );
1692 });
1693
1694 // When opening another buffer whose language server is already running,
1695 // it is also configured based on the existing language server's capabilities.
1696 let (rust_buffer2, _handle4) = project
1697 .update(cx, |project, cx| {
1698 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
1699 })
1700 .await
1701 .unwrap();
1702 rust_buffer2.update(cx, |buffer, _| {
1703 assert_eq!(
1704 buffer
1705 .completion_triggers()
1706 .iter()
1707 .cloned()
1708 .collect::<Vec<_>>(),
1709 &[".".to_string(), "::".to_string()]
1710 );
1711 });
1712
1713 // Changes are reported only to servers matching the buffer's language.
1714 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
1715 rust_buffer2.update(cx, |buffer, cx| {
1716 buffer.edit([(0..0, "let x = 1;")], None, cx)
1717 });
1718 assert_eq!(
1719 fake_rust_server
1720 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1721 .await
1722 .text_document,
1723 lsp::VersionedTextDocumentIdentifier::new(
1724 lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap(),
1725 1
1726 )
1727 );
1728
1729 // Save notifications are reported to all servers.
1730 project
1731 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
1732 .await
1733 .unwrap();
1734 assert_eq!(
1735 fake_rust_server
1736 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1737 .await
1738 .text_document,
1739 lsp::TextDocumentIdentifier::new(
1740 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1741 )
1742 );
1743 assert_eq!(
1744 fake_json_server
1745 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1746 .await
1747 .text_document,
1748 lsp::TextDocumentIdentifier::new(
1749 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1750 )
1751 );
1752
1753 // Renames are reported only to servers matching the buffer's language.
1754 fs.rename(
1755 Path::new(path!("/dir/test2.rs")),
1756 Path::new(path!("/dir/test3.rs")),
1757 Default::default(),
1758 )
1759 .await
1760 .unwrap();
1761 assert_eq!(
1762 fake_rust_server
1763 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1764 .await
1765 .text_document,
1766 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap()),
1767 );
1768 assert_eq!(
1769 fake_rust_server
1770 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1771 .await
1772 .text_document,
1773 lsp::TextDocumentItem {
1774 uri: lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap(),
1775 version: 0,
1776 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1777 language_id: "rust".to_string(),
1778 },
1779 );
1780
1781 rust_buffer2.update(cx, |buffer, cx| {
1782 buffer.update_diagnostics(
1783 LanguageServerId(0),
1784 DiagnosticSet::from_sorted_entries(
1785 vec![DiagnosticEntry {
1786 diagnostic: Default::default(),
1787 range: Anchor::MIN..Anchor::MAX,
1788 }],
1789 &buffer.snapshot(),
1790 ),
1791 cx,
1792 );
1793 assert_eq!(
1794 buffer
1795 .snapshot()
1796 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1797 .count(),
1798 1
1799 );
1800 });
1801
1802 // When the rename changes the extension of the file, the buffer gets closed on the old
1803 // language server and gets opened on the new one.
1804 fs.rename(
1805 Path::new(path!("/dir/test3.rs")),
1806 Path::new(path!("/dir/test3.json")),
1807 Default::default(),
1808 )
1809 .await
1810 .unwrap();
1811 assert_eq!(
1812 fake_rust_server
1813 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1814 .await
1815 .text_document,
1816 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap()),
1817 );
1818 assert_eq!(
1819 fake_json_server
1820 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1821 .await
1822 .text_document,
1823 lsp::TextDocumentItem {
1824 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1825 version: 0,
1826 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1827 language_id: "json".to_string(),
1828 },
1829 );
1830
1831 // We clear the diagnostics, since the language has changed.
1832 rust_buffer2.update(cx, |buffer, _| {
1833 assert_eq!(
1834 buffer
1835 .snapshot()
1836 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1837 .count(),
1838 0
1839 );
1840 });
1841
1842 // The renamed file's version resets after changing language server.
1843 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
1844 assert_eq!(
1845 fake_json_server
1846 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1847 .await
1848 .text_document,
1849 lsp::VersionedTextDocumentIdentifier::new(
1850 lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1851 1
1852 )
1853 );
1854
1855 // Restart language servers
1856 project.update(cx, |project, cx| {
1857 project.restart_language_servers_for_buffers(
1858 vec![rust_buffer.clone(), json_buffer.clone()],
1859 HashSet::default(),
1860 cx,
1861 );
1862 });
1863
1864 let mut rust_shutdown_requests = fake_rust_server
1865 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1866 let mut json_shutdown_requests = fake_json_server
1867 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1868 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
1869
1870 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1871 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1872
1873 // Ensure rust document is reopened in new rust language server
1874 assert_eq!(
1875 fake_rust_server
1876 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1877 .await
1878 .text_document,
1879 lsp::TextDocumentItem {
1880 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1881 version: 0,
1882 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
1883 language_id: "rust".to_string(),
1884 }
1885 );
1886
1887 // Ensure json documents are reopened in new json language server
1888 assert_set_eq!(
1889 [
1890 fake_json_server
1891 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1892 .await
1893 .text_document,
1894 fake_json_server
1895 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1896 .await
1897 .text_document,
1898 ],
1899 [
1900 lsp::TextDocumentItem {
1901 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1902 version: 0,
1903 text: json_buffer.update(cx, |buffer, _| buffer.text()),
1904 language_id: "json".to_string(),
1905 },
1906 lsp::TextDocumentItem {
1907 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1908 version: 0,
1909 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1910 language_id: "json".to_string(),
1911 }
1912 ]
1913 );
1914
1915 // Close notifications are reported only to servers matching the buffer's language.
1916 cx.update(|_| drop(_json_handle));
1917 let close_message = lsp::DidCloseTextDocumentParams {
1918 text_document: lsp::TextDocumentIdentifier::new(
1919 lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1920 ),
1921 };
1922 assert_eq!(
1923 fake_json_server
1924 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1925 .await,
1926 close_message,
1927 );
1928}
1929
1930#[gpui::test]
1931async fn test_language_server_relative_path(cx: &mut gpui::TestAppContext) {
1932 init_test(cx);
1933
1934 let settings_json_contents = json!({
1935 "languages": {
1936 "Rust": {
1937 "language_servers": ["my_fake_lsp", "lsp_on_path"]
1938 }
1939 },
1940 "lsp": {
1941 "my_fake_lsp": {
1942 "binary": {
1943 // file exists, so this is treated as a relative path
1944 "path": path!(".relative_path/to/my_fake_lsp_binary.exe").to_string(),
1945 }
1946 },
1947 "lsp_on_path": {
1948 "binary": {
1949 // file doesn't exist, so it will fall back on PATH env var
1950 "path": path!("lsp_on_path.exe").to_string(),
1951 }
1952 }
1953 },
1954 });
1955
1956 let fs = FakeFs::new(cx.executor());
1957 fs.insert_tree(
1958 path!("/the-root"),
1959 json!({
1960 ".zed": {
1961 "settings.json": settings_json_contents.to_string(),
1962 },
1963 ".relative_path": {
1964 "to": {
1965 "my_fake_lsp.exe": "",
1966 },
1967 },
1968 "src": {
1969 "main.rs": "",
1970 }
1971 }),
1972 )
1973 .await;
1974
1975 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1976 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1977 language_registry.add(rust_lang());
1978
1979 let mut my_fake_lsp = language_registry.register_fake_lsp(
1980 "Rust",
1981 FakeLspAdapter {
1982 name: "my_fake_lsp",
1983 ..Default::default()
1984 },
1985 );
1986 let mut lsp_on_path = language_registry.register_fake_lsp(
1987 "Rust",
1988 FakeLspAdapter {
1989 name: "lsp_on_path",
1990 ..Default::default()
1991 },
1992 );
1993
1994 cx.run_until_parked();
1995
1996 // Start the language server by opening a buffer with a compatible file extension.
1997 project
1998 .update(cx, |project, cx| {
1999 project.open_local_buffer_with_lsp(path!("/the-root/src/main.rs"), cx)
2000 })
2001 .await
2002 .unwrap();
2003
2004 let lsp_path = my_fake_lsp.next().await.unwrap().binary.path;
2005 assert_eq!(
2006 lsp_path.to_string_lossy(),
2007 path!("/the-root/.relative_path/to/my_fake_lsp_binary.exe"),
2008 );
2009
2010 let lsp_path = lsp_on_path.next().await.unwrap().binary.path;
2011 assert_eq!(lsp_path.to_string_lossy(), path!("lsp_on_path.exe"));
2012}
2013
2014#[gpui::test]
2015async fn test_language_server_tilde_path(cx: &mut gpui::TestAppContext) {
2016 init_test(cx);
2017
2018 let settings_json_contents = json!({
2019 "languages": {
2020 "Rust": {
2021 "language_servers": ["tilde_lsp"]
2022 }
2023 },
2024 "lsp": {
2025 "tilde_lsp": {
2026 "binary": {
2027 "path": "~/.local/bin/rust-analyzer",
2028 }
2029 }
2030 },
2031 });
2032
2033 let fs = FakeFs::new(cx.executor());
2034 fs.insert_tree(
2035 path!("/root"),
2036 json!({
2037 ".zed": {
2038 "settings.json": settings_json_contents.to_string(),
2039 },
2040 "src": {
2041 "main.rs": "fn main() {}",
2042 }
2043 }),
2044 )
2045 .await;
2046
2047 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
2048 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2049 language_registry.add(rust_lang());
2050
2051 let mut tilde_lsp = language_registry.register_fake_lsp(
2052 "Rust",
2053 FakeLspAdapter {
2054 name: "tilde_lsp",
2055 ..Default::default()
2056 },
2057 );
2058 cx.run_until_parked();
2059
2060 project
2061 .update(cx, |project, cx| {
2062 project.open_local_buffer_with_lsp(path!("/root/src/main.rs"), cx)
2063 })
2064 .await
2065 .unwrap();
2066
2067 let lsp_path = tilde_lsp.next().await.unwrap().binary.path;
2068 let expected_path = paths::home_dir().join(".local/bin/rust-analyzer");
2069 assert_eq!(
2070 lsp_path, expected_path,
2071 "Tilde path should expand to home directory"
2072 );
2073}
2074
2075#[gpui::test]
2076async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
2077 init_test(cx);
2078
2079 let fs = FakeFs::new(cx.executor());
2080 fs.insert_tree(
2081 path!("/the-root"),
2082 json!({
2083 ".gitignore": "target\n",
2084 "Cargo.lock": "",
2085 "src": {
2086 "a.rs": "",
2087 "b.rs": "",
2088 },
2089 "target": {
2090 "x": {
2091 "out": {
2092 "x.rs": ""
2093 }
2094 },
2095 "y": {
2096 "out": {
2097 "y.rs": "",
2098 }
2099 },
2100 "z": {
2101 "out": {
2102 "z.rs": ""
2103 }
2104 }
2105 }
2106 }),
2107 )
2108 .await;
2109 fs.insert_tree(
2110 path!("/the-registry"),
2111 json!({
2112 "dep1": {
2113 "src": {
2114 "dep1.rs": "",
2115 }
2116 },
2117 "dep2": {
2118 "src": {
2119 "dep2.rs": "",
2120 }
2121 },
2122 }),
2123 )
2124 .await;
2125 fs.insert_tree(
2126 path!("/the/stdlib"),
2127 json!({
2128 "LICENSE": "",
2129 "src": {
2130 "string.rs": "",
2131 }
2132 }),
2133 )
2134 .await;
2135
2136 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
2137 let (language_registry, lsp_store) = project.read_with(cx, |project, _| {
2138 (project.languages().clone(), project.lsp_store())
2139 });
2140 language_registry.add(rust_lang());
2141 let mut fake_servers = language_registry.register_fake_lsp(
2142 "Rust",
2143 FakeLspAdapter {
2144 name: "the-language-server",
2145 ..Default::default()
2146 },
2147 );
2148
2149 cx.executor().run_until_parked();
2150
2151 // Start the language server by opening a buffer with a compatible file extension.
2152 project
2153 .update(cx, |project, cx| {
2154 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
2155 })
2156 .await
2157 .unwrap();
2158
2159 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
2160 project.update(cx, |project, cx| {
2161 let worktree = project.worktrees(cx).next().unwrap();
2162 assert_eq!(
2163 worktree
2164 .read(cx)
2165 .snapshot()
2166 .entries(true, 0)
2167 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
2168 .collect::<Vec<_>>(),
2169 &[
2170 ("", false),
2171 (".gitignore", false),
2172 ("Cargo.lock", false),
2173 ("src", false),
2174 ("src/a.rs", false),
2175 ("src/b.rs", false),
2176 ("target", true),
2177 ]
2178 );
2179 });
2180
2181 let prev_read_dir_count = fs.read_dir_call_count();
2182
2183 let fake_server = fake_servers.next().await.unwrap();
2184 cx.executor().run_until_parked();
2185 let server_id = lsp_store.read_with(cx, |lsp_store, _| {
2186 let (id, _) = lsp_store.language_server_statuses().next().unwrap();
2187 id
2188 });
2189
2190 // Simulate jumping to a definition in a dependency outside of the worktree.
2191 let _out_of_worktree_buffer = project
2192 .update(cx, |project, cx| {
2193 project.open_local_buffer_via_lsp(
2194 lsp::Uri::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
2195 server_id,
2196 cx,
2197 )
2198 })
2199 .await
2200 .unwrap();
2201
2202 // Keep track of the FS events reported to the language server.
2203 let file_changes = Arc::new(Mutex::new(Vec::new()));
2204 fake_server
2205 .request::<lsp::request::RegisterCapability>(
2206 lsp::RegistrationParams {
2207 registrations: vec![lsp::Registration {
2208 id: Default::default(),
2209 method: "workspace/didChangeWatchedFiles".to_string(),
2210 register_options: serde_json::to_value(
2211 lsp::DidChangeWatchedFilesRegistrationOptions {
2212 watchers: vec![
2213 lsp::FileSystemWatcher {
2214 glob_pattern: lsp::GlobPattern::String(
2215 path!("/the-root/Cargo.toml").to_string(),
2216 ),
2217 kind: None,
2218 },
2219 lsp::FileSystemWatcher {
2220 glob_pattern: lsp::GlobPattern::String(
2221 path!("/the-root/src/*.{rs,c}").to_string(),
2222 ),
2223 kind: None,
2224 },
2225 lsp::FileSystemWatcher {
2226 glob_pattern: lsp::GlobPattern::String(
2227 path!("/the-root/target/y/**/*.rs").to_string(),
2228 ),
2229 kind: None,
2230 },
2231 lsp::FileSystemWatcher {
2232 glob_pattern: lsp::GlobPattern::String(
2233 path!("/the/stdlib/src/**/*.rs").to_string(),
2234 ),
2235 kind: None,
2236 },
2237 lsp::FileSystemWatcher {
2238 glob_pattern: lsp::GlobPattern::String(
2239 path!("**/Cargo.lock").to_string(),
2240 ),
2241 kind: None,
2242 },
2243 ],
2244 },
2245 )
2246 .ok(),
2247 }],
2248 },
2249 DEFAULT_LSP_REQUEST_TIMEOUT,
2250 )
2251 .await
2252 .into_response()
2253 .unwrap();
2254 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
2255 let file_changes = file_changes.clone();
2256 move |params, _| {
2257 let mut file_changes = file_changes.lock();
2258 file_changes.extend(params.changes);
2259 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
2260 }
2261 });
2262
2263 cx.executor().run_until_parked();
2264 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
2265 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
2266
2267 let mut new_watched_paths = fs.watched_paths();
2268 new_watched_paths.retain(|path| {
2269 !path.starts_with(config_dir()) && !path.starts_with(global_gitignore_path().unwrap())
2270 });
2271 assert_eq!(
2272 &new_watched_paths,
2273 &[
2274 Path::new(path!("/the-root")),
2275 Path::new(path!("/the-registry/dep1/src/dep1.rs")),
2276 Path::new(path!("/the/stdlib/src"))
2277 ]
2278 );
2279
2280 // Now the language server has asked us to watch an ignored directory path,
2281 // so we recursively load it.
2282 project.update(cx, |project, cx| {
2283 let worktree = project.visible_worktrees(cx).next().unwrap();
2284 assert_eq!(
2285 worktree
2286 .read(cx)
2287 .snapshot()
2288 .entries(true, 0)
2289 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
2290 .collect::<Vec<_>>(),
2291 &[
2292 ("", false),
2293 (".gitignore", false),
2294 ("Cargo.lock", false),
2295 ("src", false),
2296 ("src/a.rs", false),
2297 ("src/b.rs", false),
2298 ("target", true),
2299 ("target/x", true),
2300 ("target/y", true),
2301 ("target/y/out", true),
2302 ("target/y/out/y.rs", true),
2303 ("target/z", true),
2304 ]
2305 );
2306 });
2307
2308 // Perform some file system mutations, two of which match the watched patterns,
2309 // and one of which does not.
2310 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
2311 .await
2312 .unwrap();
2313 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
2314 .await
2315 .unwrap();
2316 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
2317 .await
2318 .unwrap();
2319 fs.create_file(
2320 path!("/the-root/target/x/out/x2.rs").as_ref(),
2321 Default::default(),
2322 )
2323 .await
2324 .unwrap();
2325 fs.create_file(
2326 path!("/the-root/target/y/out/y2.rs").as_ref(),
2327 Default::default(),
2328 )
2329 .await
2330 .unwrap();
2331 fs.save(
2332 path!("/the-root/Cargo.lock").as_ref(),
2333 &"".into(),
2334 Default::default(),
2335 )
2336 .await
2337 .unwrap();
2338 fs.save(
2339 path!("/the-stdlib/LICENSE").as_ref(),
2340 &"".into(),
2341 Default::default(),
2342 )
2343 .await
2344 .unwrap();
2345 fs.save(
2346 path!("/the/stdlib/src/string.rs").as_ref(),
2347 &"".into(),
2348 Default::default(),
2349 )
2350 .await
2351 .unwrap();
2352
2353 // The language server receives events for the FS mutations that match its watch patterns.
2354 cx.executor().run_until_parked();
2355 assert_eq!(
2356 &*file_changes.lock(),
2357 &[
2358 lsp::FileEvent {
2359 uri: lsp::Uri::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
2360 typ: lsp::FileChangeType::CHANGED,
2361 },
2362 lsp::FileEvent {
2363 uri: lsp::Uri::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
2364 typ: lsp::FileChangeType::DELETED,
2365 },
2366 lsp::FileEvent {
2367 uri: lsp::Uri::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
2368 typ: lsp::FileChangeType::CREATED,
2369 },
2370 lsp::FileEvent {
2371 uri: lsp::Uri::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
2372 typ: lsp::FileChangeType::CREATED,
2373 },
2374 lsp::FileEvent {
2375 uri: lsp::Uri::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
2376 typ: lsp::FileChangeType::CHANGED,
2377 },
2378 ]
2379 );
2380}
2381
2382#[gpui::test]
2383async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
2384 init_test(cx);
2385
2386 let fs = FakeFs::new(cx.executor());
2387 fs.insert_tree(
2388 path!("/dir"),
2389 json!({
2390 "a.rs": "let a = 1;",
2391 "b.rs": "let b = 2;"
2392 }),
2393 )
2394 .await;
2395
2396 let project = Project::test(
2397 fs,
2398 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
2399 cx,
2400 )
2401 .await;
2402 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2403
2404 let buffer_a = project
2405 .update(cx, |project, cx| {
2406 project.open_local_buffer(path!("/dir/a.rs"), cx)
2407 })
2408 .await
2409 .unwrap();
2410 let buffer_b = project
2411 .update(cx, |project, cx| {
2412 project.open_local_buffer(path!("/dir/b.rs"), cx)
2413 })
2414 .await
2415 .unwrap();
2416
2417 lsp_store.update(cx, |lsp_store, cx| {
2418 lsp_store
2419 .update_diagnostics(
2420 LanguageServerId(0),
2421 lsp::PublishDiagnosticsParams {
2422 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2423 version: None,
2424 diagnostics: vec![lsp::Diagnostic {
2425 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2426 severity: Some(lsp::DiagnosticSeverity::ERROR),
2427 message: "error 1".to_string(),
2428 ..Default::default()
2429 }],
2430 },
2431 None,
2432 DiagnosticSourceKind::Pushed,
2433 &[],
2434 cx,
2435 )
2436 .unwrap();
2437 lsp_store
2438 .update_diagnostics(
2439 LanguageServerId(0),
2440 lsp::PublishDiagnosticsParams {
2441 uri: Uri::from_file_path(path!("/dir/b.rs")).unwrap(),
2442 version: None,
2443 diagnostics: vec![lsp::Diagnostic {
2444 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2445 severity: Some(DiagnosticSeverity::WARNING),
2446 message: "error 2".to_string(),
2447 ..Default::default()
2448 }],
2449 },
2450 None,
2451 DiagnosticSourceKind::Pushed,
2452 &[],
2453 cx,
2454 )
2455 .unwrap();
2456 });
2457
2458 buffer_a.update(cx, |buffer, _| {
2459 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2460 assert_eq!(
2461 chunks
2462 .iter()
2463 .map(|(s, d)| (s.as_str(), *d))
2464 .collect::<Vec<_>>(),
2465 &[
2466 ("let ", None),
2467 ("a", Some(DiagnosticSeverity::ERROR)),
2468 (" = 1;", None),
2469 ]
2470 );
2471 });
2472 buffer_b.update(cx, |buffer, _| {
2473 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2474 assert_eq!(
2475 chunks
2476 .iter()
2477 .map(|(s, d)| (s.as_str(), *d))
2478 .collect::<Vec<_>>(),
2479 &[
2480 ("let ", None),
2481 ("b", Some(DiagnosticSeverity::WARNING)),
2482 (" = 2;", None),
2483 ]
2484 );
2485 });
2486}
2487
2488#[gpui::test]
2489async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
2490 init_test(cx);
2491
2492 let fs = FakeFs::new(cx.executor());
2493 fs.insert_tree(
2494 path!("/root"),
2495 json!({
2496 "dir": {
2497 ".git": {
2498 "HEAD": "ref: refs/heads/main",
2499 },
2500 ".gitignore": "b.rs",
2501 "a.rs": "let a = 1;",
2502 "b.rs": "let b = 2;",
2503 },
2504 "other.rs": "let b = c;"
2505 }),
2506 )
2507 .await;
2508
2509 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
2510 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2511 let (worktree, _) = project
2512 .update(cx, |project, cx| {
2513 project.find_or_create_worktree(path!("/root/dir"), true, cx)
2514 })
2515 .await
2516 .unwrap();
2517 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
2518
2519 let (worktree, _) = project
2520 .update(cx, |project, cx| {
2521 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
2522 })
2523 .await
2524 .unwrap();
2525 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
2526
2527 let server_id = LanguageServerId(0);
2528 lsp_store.update(cx, |lsp_store, cx| {
2529 lsp_store
2530 .update_diagnostics(
2531 server_id,
2532 lsp::PublishDiagnosticsParams {
2533 uri: Uri::from_file_path(path!("/root/dir/b.rs")).unwrap(),
2534 version: None,
2535 diagnostics: vec![lsp::Diagnostic {
2536 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2537 severity: Some(lsp::DiagnosticSeverity::ERROR),
2538 message: "unused variable 'b'".to_string(),
2539 ..Default::default()
2540 }],
2541 },
2542 None,
2543 DiagnosticSourceKind::Pushed,
2544 &[],
2545 cx,
2546 )
2547 .unwrap();
2548 lsp_store
2549 .update_diagnostics(
2550 server_id,
2551 lsp::PublishDiagnosticsParams {
2552 uri: Uri::from_file_path(path!("/root/other.rs")).unwrap(),
2553 version: None,
2554 diagnostics: vec![lsp::Diagnostic {
2555 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
2556 severity: Some(lsp::DiagnosticSeverity::ERROR),
2557 message: "unknown variable 'c'".to_string(),
2558 ..Default::default()
2559 }],
2560 },
2561 None,
2562 DiagnosticSourceKind::Pushed,
2563 &[],
2564 cx,
2565 )
2566 .unwrap();
2567 });
2568
2569 let main_ignored_buffer = project
2570 .update(cx, |project, cx| {
2571 project.open_buffer((main_worktree_id, rel_path("b.rs")), cx)
2572 })
2573 .await
2574 .unwrap();
2575 main_ignored_buffer.update(cx, |buffer, _| {
2576 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2577 assert_eq!(
2578 chunks
2579 .iter()
2580 .map(|(s, d)| (s.as_str(), *d))
2581 .collect::<Vec<_>>(),
2582 &[
2583 ("let ", None),
2584 ("b", Some(DiagnosticSeverity::ERROR)),
2585 (" = 2;", None),
2586 ],
2587 "Gigitnored buffers should still get in-buffer diagnostics",
2588 );
2589 });
2590 let other_buffer = project
2591 .update(cx, |project, cx| {
2592 project.open_buffer((other_worktree_id, rel_path("")), cx)
2593 })
2594 .await
2595 .unwrap();
2596 other_buffer.update(cx, |buffer, _| {
2597 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2598 assert_eq!(
2599 chunks
2600 .iter()
2601 .map(|(s, d)| (s.as_str(), *d))
2602 .collect::<Vec<_>>(),
2603 &[
2604 ("let b = ", None),
2605 ("c", Some(DiagnosticSeverity::ERROR)),
2606 (";", None),
2607 ],
2608 "Buffers from hidden projects should still get in-buffer diagnostics"
2609 );
2610 });
2611
2612 project.update(cx, |project, cx| {
2613 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
2614 assert_eq!(
2615 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
2616 vec![(
2617 ProjectPath {
2618 worktree_id: main_worktree_id,
2619 path: rel_path("b.rs").into(),
2620 },
2621 server_id,
2622 DiagnosticSummary {
2623 error_count: 1,
2624 warning_count: 0,
2625 }
2626 )]
2627 );
2628 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
2629 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
2630 });
2631}
2632
2633#[gpui::test]
2634async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
2635 init_test(cx);
2636
2637 let progress_token = "the-progress-token";
2638
2639 let fs = FakeFs::new(cx.executor());
2640 fs.insert_tree(
2641 path!("/dir"),
2642 json!({
2643 "a.rs": "fn a() { A }",
2644 "b.rs": "const y: i32 = 1",
2645 }),
2646 )
2647 .await;
2648
2649 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2650 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2651
2652 language_registry.add(rust_lang());
2653 let mut fake_servers = language_registry.register_fake_lsp(
2654 "Rust",
2655 FakeLspAdapter {
2656 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2657 disk_based_diagnostics_sources: vec!["disk".into()],
2658 ..Default::default()
2659 },
2660 );
2661
2662 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
2663
2664 // Cause worktree to start the fake language server
2665 let _ = project
2666 .update(cx, |project, cx| {
2667 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
2668 })
2669 .await
2670 .unwrap();
2671
2672 let mut events = cx.events(&project);
2673
2674 let fake_server = fake_servers.next().await.unwrap();
2675 assert_eq!(
2676 events.next().await.unwrap(),
2677 Event::LanguageServerAdded(
2678 LanguageServerId(0),
2679 fake_server.server.name(),
2680 Some(worktree_id)
2681 ),
2682 );
2683
2684 fake_server
2685 .start_progress(format!("{}/0", progress_token))
2686 .await;
2687 assert_eq!(
2688 events.next().await.unwrap(),
2689 Event::DiskBasedDiagnosticsStarted {
2690 language_server_id: LanguageServerId(0),
2691 }
2692 );
2693
2694 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2695 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2696 version: None,
2697 diagnostics: vec![lsp::Diagnostic {
2698 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2699 severity: Some(lsp::DiagnosticSeverity::ERROR),
2700 message: "undefined variable 'A'".to_string(),
2701 ..Default::default()
2702 }],
2703 });
2704 assert_eq!(
2705 events.next().await.unwrap(),
2706 Event::DiagnosticsUpdated {
2707 language_server_id: LanguageServerId(0),
2708 paths: vec![(worktree_id, rel_path("a.rs")).into()],
2709 }
2710 );
2711
2712 fake_server.end_progress(format!("{}/0", progress_token));
2713 assert_eq!(
2714 events.next().await.unwrap(),
2715 Event::DiskBasedDiagnosticsFinished {
2716 language_server_id: LanguageServerId(0)
2717 }
2718 );
2719
2720 let buffer = project
2721 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
2722 .await
2723 .unwrap();
2724
2725 buffer.update(cx, |buffer, _| {
2726 let snapshot = buffer.snapshot();
2727 let diagnostics = snapshot
2728 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2729 .collect::<Vec<_>>();
2730 assert_eq!(
2731 diagnostics,
2732 &[DiagnosticEntryRef {
2733 range: Point::new(0, 9)..Point::new(0, 10),
2734 diagnostic: &Diagnostic {
2735 severity: lsp::DiagnosticSeverity::ERROR,
2736 message: "undefined variable 'A'".to_string(),
2737 group_id: 0,
2738 is_primary: true,
2739 source_kind: DiagnosticSourceKind::Pushed,
2740 ..Diagnostic::default()
2741 }
2742 }]
2743 )
2744 });
2745
2746 // Ensure publishing empty diagnostics twice only results in one update event.
2747 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2748 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2749 version: None,
2750 diagnostics: Default::default(),
2751 });
2752 assert_eq!(
2753 events.next().await.unwrap(),
2754 Event::DiagnosticsUpdated {
2755 language_server_id: LanguageServerId(0),
2756 paths: vec![(worktree_id, rel_path("a.rs")).into()],
2757 }
2758 );
2759
2760 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2761 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2762 version: None,
2763 diagnostics: Default::default(),
2764 });
2765 cx.executor().run_until_parked();
2766 assert_eq!(futures::poll!(events.next()), Poll::Pending);
2767}
2768
2769#[gpui::test]
2770async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
2771 init_test(cx);
2772
2773 let progress_token = "the-progress-token";
2774
2775 let fs = FakeFs::new(cx.executor());
2776 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2777
2778 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2779
2780 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2781 language_registry.add(rust_lang());
2782 let mut fake_servers = language_registry.register_fake_lsp(
2783 "Rust",
2784 FakeLspAdapter {
2785 name: "the-language-server",
2786 disk_based_diagnostics_sources: vec!["disk".into()],
2787 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2788 ..FakeLspAdapter::default()
2789 },
2790 );
2791
2792 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
2793
2794 let (buffer, _handle) = project
2795 .update(cx, |project, cx| {
2796 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2797 })
2798 .await
2799 .unwrap();
2800 let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id());
2801 // Simulate diagnostics starting to update.
2802 let fake_server = fake_servers.next().await.unwrap();
2803 cx.executor().run_until_parked();
2804 fake_server.start_progress(progress_token).await;
2805
2806 // Restart the server before the diagnostics finish updating.
2807 project.update(cx, |project, cx| {
2808 project.restart_language_servers_for_buffers(vec![buffer], HashSet::default(), cx);
2809 });
2810 let mut events = cx.events(&project);
2811
2812 // Simulate the newly started server sending more diagnostics.
2813 let fake_server = fake_servers.next().await.unwrap();
2814 cx.executor().run_until_parked();
2815 assert_eq!(
2816 events.next().await.unwrap(),
2817 Event::LanguageServerRemoved(LanguageServerId(0))
2818 );
2819 assert_eq!(
2820 events.next().await.unwrap(),
2821 Event::LanguageServerAdded(
2822 LanguageServerId(1),
2823 fake_server.server.name(),
2824 Some(worktree_id)
2825 )
2826 );
2827 fake_server.start_progress(progress_token).await;
2828 assert_eq!(
2829 events.next().await.unwrap(),
2830 Event::LanguageServerBufferRegistered {
2831 server_id: LanguageServerId(1),
2832 buffer_id,
2833 buffer_abs_path: PathBuf::from(path!("/dir/a.rs")),
2834 name: Some(fake_server.server.name())
2835 }
2836 );
2837 assert_eq!(
2838 events.next().await.unwrap(),
2839 Event::DiskBasedDiagnosticsStarted {
2840 language_server_id: LanguageServerId(1)
2841 }
2842 );
2843 project.update(cx, |project, cx| {
2844 assert_eq!(
2845 project
2846 .language_servers_running_disk_based_diagnostics(cx)
2847 .collect::<Vec<_>>(),
2848 [LanguageServerId(1)]
2849 );
2850 });
2851
2852 // All diagnostics are considered done, despite the old server's diagnostic
2853 // task never completing.
2854 fake_server.end_progress(progress_token);
2855 assert_eq!(
2856 events.next().await.unwrap(),
2857 Event::DiskBasedDiagnosticsFinished {
2858 language_server_id: LanguageServerId(1)
2859 }
2860 );
2861 project.update(cx, |project, cx| {
2862 assert_eq!(
2863 project
2864 .language_servers_running_disk_based_diagnostics(cx)
2865 .collect::<Vec<_>>(),
2866 [] as [language::LanguageServerId; 0]
2867 );
2868 });
2869}
2870
2871#[gpui::test]
2872async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
2873 init_test(cx);
2874
2875 let fs = FakeFs::new(cx.executor());
2876 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
2877
2878 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2879
2880 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2881 language_registry.add(rust_lang());
2882 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2883
2884 let (buffer, _) = project
2885 .update(cx, |project, cx| {
2886 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2887 })
2888 .await
2889 .unwrap();
2890
2891 // Publish diagnostics
2892 let fake_server = fake_servers.next().await.unwrap();
2893 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2894 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2895 version: None,
2896 diagnostics: vec![lsp::Diagnostic {
2897 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
2898 severity: Some(lsp::DiagnosticSeverity::ERROR),
2899 message: "the message".to_string(),
2900 ..Default::default()
2901 }],
2902 });
2903
2904 cx.executor().run_until_parked();
2905 buffer.update(cx, |buffer, _| {
2906 assert_eq!(
2907 buffer
2908 .snapshot()
2909 .diagnostics_in_range::<_, usize>(0..1, false)
2910 .map(|entry| entry.diagnostic.message.clone())
2911 .collect::<Vec<_>>(),
2912 ["the message".to_string()]
2913 );
2914 });
2915 project.update(cx, |project, cx| {
2916 assert_eq!(
2917 project.diagnostic_summary(false, cx),
2918 DiagnosticSummary {
2919 error_count: 1,
2920 warning_count: 0,
2921 }
2922 );
2923 });
2924
2925 project.update(cx, |project, cx| {
2926 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2927 });
2928
2929 // The diagnostics are cleared.
2930 cx.executor().run_until_parked();
2931 buffer.update(cx, |buffer, _| {
2932 assert_eq!(
2933 buffer
2934 .snapshot()
2935 .diagnostics_in_range::<_, usize>(0..1, false)
2936 .map(|entry| entry.diagnostic.message.clone())
2937 .collect::<Vec<_>>(),
2938 Vec::<String>::new(),
2939 );
2940 });
2941 project.update(cx, |project, cx| {
2942 assert_eq!(
2943 project.diagnostic_summary(false, cx),
2944 DiagnosticSummary {
2945 error_count: 0,
2946 warning_count: 0,
2947 }
2948 );
2949 });
2950}
2951
2952#[gpui::test]
2953async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
2954 init_test(cx);
2955
2956 let fs = FakeFs::new(cx.executor());
2957 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2958
2959 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2960 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2961
2962 language_registry.add(rust_lang());
2963 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2964
2965 let (buffer, _handle) = project
2966 .update(cx, |project, cx| {
2967 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2968 })
2969 .await
2970 .unwrap();
2971
2972 // Before restarting the server, report diagnostics with an unknown buffer version.
2973 let fake_server = fake_servers.next().await.unwrap();
2974 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2975 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2976 version: Some(10000),
2977 diagnostics: Vec::new(),
2978 });
2979 cx.executor().run_until_parked();
2980 project.update(cx, |project, cx| {
2981 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2982 });
2983
2984 let mut fake_server = fake_servers.next().await.unwrap();
2985 let notification = fake_server
2986 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2987 .await
2988 .text_document;
2989 assert_eq!(notification.version, 0);
2990}
2991
2992#[gpui::test]
2993async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
2994 init_test(cx);
2995
2996 let progress_token = "the-progress-token";
2997
2998 let fs = FakeFs::new(cx.executor());
2999 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
3000
3001 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3002
3003 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3004 language_registry.add(rust_lang());
3005 let mut fake_servers = language_registry.register_fake_lsp(
3006 "Rust",
3007 FakeLspAdapter {
3008 name: "the-language-server",
3009 disk_based_diagnostics_sources: vec!["disk".into()],
3010 disk_based_diagnostics_progress_token: Some(progress_token.into()),
3011 ..Default::default()
3012 },
3013 );
3014
3015 let (buffer, _handle) = project
3016 .update(cx, |project, cx| {
3017 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3018 })
3019 .await
3020 .unwrap();
3021
3022 // Simulate diagnostics starting to update.
3023 let mut fake_server = fake_servers.next().await.unwrap();
3024 fake_server
3025 .start_progress_with(
3026 "another-token",
3027 lsp::WorkDoneProgressBegin {
3028 cancellable: Some(false),
3029 ..Default::default()
3030 },
3031 DEFAULT_LSP_REQUEST_TIMEOUT,
3032 )
3033 .await;
3034 // Ensure progress notification is fully processed before starting the next one
3035 cx.executor().run_until_parked();
3036
3037 fake_server
3038 .start_progress_with(
3039 progress_token,
3040 lsp::WorkDoneProgressBegin {
3041 cancellable: Some(true),
3042 ..Default::default()
3043 },
3044 DEFAULT_LSP_REQUEST_TIMEOUT,
3045 )
3046 .await;
3047 // Ensure progress notification is fully processed before cancelling
3048 cx.executor().run_until_parked();
3049
3050 project.update(cx, |project, cx| {
3051 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
3052 });
3053 cx.executor().run_until_parked();
3054
3055 let cancel_notification = fake_server
3056 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
3057 .await;
3058 assert_eq!(
3059 cancel_notification.token,
3060 NumberOrString::String(progress_token.into())
3061 );
3062}
3063
3064#[gpui::test]
3065async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
3066 init_test(cx);
3067
3068 let fs = FakeFs::new(cx.executor());
3069 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
3070 .await;
3071
3072 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3073 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3074
3075 let mut fake_rust_servers = language_registry.register_fake_lsp(
3076 "Rust",
3077 FakeLspAdapter {
3078 name: "rust-lsp",
3079 ..Default::default()
3080 },
3081 );
3082 let mut fake_js_servers = language_registry.register_fake_lsp(
3083 "JavaScript",
3084 FakeLspAdapter {
3085 name: "js-lsp",
3086 ..Default::default()
3087 },
3088 );
3089 language_registry.add(rust_lang());
3090 language_registry.add(js_lang());
3091
3092 let _rs_buffer = project
3093 .update(cx, |project, cx| {
3094 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3095 })
3096 .await
3097 .unwrap();
3098 let _js_buffer = project
3099 .update(cx, |project, cx| {
3100 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
3101 })
3102 .await
3103 .unwrap();
3104
3105 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
3106 assert_eq!(
3107 fake_rust_server_1
3108 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3109 .await
3110 .text_document
3111 .uri
3112 .as_str(),
3113 uri!("file:///dir/a.rs")
3114 );
3115
3116 let mut fake_js_server = fake_js_servers.next().await.unwrap();
3117 assert_eq!(
3118 fake_js_server
3119 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3120 .await
3121 .text_document
3122 .uri
3123 .as_str(),
3124 uri!("file:///dir/b.js")
3125 );
3126
3127 // Disable Rust language server, ensuring only that server gets stopped.
3128 cx.update(|cx| {
3129 SettingsStore::update_global(cx, |settings, cx| {
3130 settings.update_user_settings(cx, |settings| {
3131 settings.languages_mut().insert(
3132 "Rust".into(),
3133 LanguageSettingsContent {
3134 enable_language_server: Some(false),
3135 ..Default::default()
3136 },
3137 );
3138 });
3139 })
3140 });
3141 fake_rust_server_1
3142 .receive_notification::<lsp::notification::Exit>()
3143 .await;
3144
3145 // Enable Rust and disable JavaScript language servers, ensuring that the
3146 // former gets started again and that the latter stops.
3147 cx.update(|cx| {
3148 SettingsStore::update_global(cx, |settings, cx| {
3149 settings.update_user_settings(cx, |settings| {
3150 settings.languages_mut().insert(
3151 "Rust".into(),
3152 LanguageSettingsContent {
3153 enable_language_server: Some(true),
3154 ..Default::default()
3155 },
3156 );
3157 settings.languages_mut().insert(
3158 "JavaScript".into(),
3159 LanguageSettingsContent {
3160 enable_language_server: Some(false),
3161 ..Default::default()
3162 },
3163 );
3164 });
3165 })
3166 });
3167 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
3168 assert_eq!(
3169 fake_rust_server_2
3170 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3171 .await
3172 .text_document
3173 .uri
3174 .as_str(),
3175 uri!("file:///dir/a.rs")
3176 );
3177 fake_js_server
3178 .receive_notification::<lsp::notification::Exit>()
3179 .await;
3180}
3181
3182#[gpui::test(iterations = 3)]
3183async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
3184 init_test(cx);
3185
3186 let text = "
3187 fn a() { A }
3188 fn b() { BB }
3189 fn c() { CCC }
3190 "
3191 .unindent();
3192
3193 let fs = FakeFs::new(cx.executor());
3194 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
3195
3196 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3197 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3198
3199 language_registry.add(rust_lang());
3200 let mut fake_servers = language_registry.register_fake_lsp(
3201 "Rust",
3202 FakeLspAdapter {
3203 disk_based_diagnostics_sources: vec!["disk".into()],
3204 ..Default::default()
3205 },
3206 );
3207
3208 let buffer = project
3209 .update(cx, |project, cx| {
3210 project.open_local_buffer(path!("/dir/a.rs"), cx)
3211 })
3212 .await
3213 .unwrap();
3214
3215 let _handle = project.update(cx, |project, cx| {
3216 project.register_buffer_with_language_servers(&buffer, cx)
3217 });
3218
3219 let mut fake_server = fake_servers.next().await.unwrap();
3220 let open_notification = fake_server
3221 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3222 .await;
3223
3224 // Edit the buffer, moving the content down
3225 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
3226 let change_notification_1 = fake_server
3227 .receive_notification::<lsp::notification::DidChangeTextDocument>()
3228 .await;
3229 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
3230
3231 // Report some diagnostics for the initial version of the buffer
3232 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3233 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3234 version: Some(open_notification.text_document.version),
3235 diagnostics: vec![
3236 lsp::Diagnostic {
3237 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3238 severity: Some(DiagnosticSeverity::ERROR),
3239 message: "undefined variable 'A'".to_string(),
3240 source: Some("disk".to_string()),
3241 ..Default::default()
3242 },
3243 lsp::Diagnostic {
3244 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
3245 severity: Some(DiagnosticSeverity::ERROR),
3246 message: "undefined variable 'BB'".to_string(),
3247 source: Some("disk".to_string()),
3248 ..Default::default()
3249 },
3250 lsp::Diagnostic {
3251 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
3252 severity: Some(DiagnosticSeverity::ERROR),
3253 source: Some("disk".to_string()),
3254 message: "undefined variable 'CCC'".to_string(),
3255 ..Default::default()
3256 },
3257 ],
3258 });
3259
3260 // The diagnostics have moved down since they were created.
3261 cx.executor().run_until_parked();
3262 buffer.update(cx, |buffer, _| {
3263 assert_eq!(
3264 buffer
3265 .snapshot()
3266 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
3267 .collect::<Vec<_>>(),
3268 &[
3269 DiagnosticEntry {
3270 range: Point::new(3, 9)..Point::new(3, 11),
3271 diagnostic: Diagnostic {
3272 source: Some("disk".into()),
3273 severity: DiagnosticSeverity::ERROR,
3274 message: "undefined variable 'BB'".to_string(),
3275 is_disk_based: true,
3276 group_id: 1,
3277 is_primary: true,
3278 source_kind: DiagnosticSourceKind::Pushed,
3279 ..Diagnostic::default()
3280 },
3281 },
3282 DiagnosticEntry {
3283 range: Point::new(4, 9)..Point::new(4, 12),
3284 diagnostic: Diagnostic {
3285 source: Some("disk".into()),
3286 severity: DiagnosticSeverity::ERROR,
3287 message: "undefined variable 'CCC'".to_string(),
3288 is_disk_based: true,
3289 group_id: 2,
3290 is_primary: true,
3291 source_kind: DiagnosticSourceKind::Pushed,
3292 ..Diagnostic::default()
3293 }
3294 }
3295 ]
3296 );
3297 assert_eq!(
3298 chunks_with_diagnostics(buffer, 0..buffer.len()),
3299 [
3300 ("\n\nfn a() { ".to_string(), None),
3301 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
3302 (" }\nfn b() { ".to_string(), None),
3303 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
3304 (" }\nfn c() { ".to_string(), None),
3305 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
3306 (" }\n".to_string(), None),
3307 ]
3308 );
3309 assert_eq!(
3310 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
3311 [
3312 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
3313 (" }\nfn c() { ".to_string(), None),
3314 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
3315 ]
3316 );
3317 });
3318
3319 // Ensure overlapping diagnostics are highlighted correctly.
3320 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3321 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3322 version: Some(open_notification.text_document.version),
3323 diagnostics: vec![
3324 lsp::Diagnostic {
3325 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3326 severity: Some(DiagnosticSeverity::ERROR),
3327 message: "undefined variable 'A'".to_string(),
3328 source: Some("disk".to_string()),
3329 ..Default::default()
3330 },
3331 lsp::Diagnostic {
3332 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
3333 severity: Some(DiagnosticSeverity::WARNING),
3334 message: "unreachable statement".to_string(),
3335 source: Some("disk".to_string()),
3336 ..Default::default()
3337 },
3338 ],
3339 });
3340
3341 cx.executor().run_until_parked();
3342 buffer.update(cx, |buffer, _| {
3343 assert_eq!(
3344 buffer
3345 .snapshot()
3346 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
3347 .collect::<Vec<_>>(),
3348 &[
3349 DiagnosticEntry {
3350 range: Point::new(2, 9)..Point::new(2, 12),
3351 diagnostic: Diagnostic {
3352 source: Some("disk".into()),
3353 severity: DiagnosticSeverity::WARNING,
3354 message: "unreachable statement".to_string(),
3355 is_disk_based: true,
3356 group_id: 4,
3357 is_primary: true,
3358 source_kind: DiagnosticSourceKind::Pushed,
3359 ..Diagnostic::default()
3360 }
3361 },
3362 DiagnosticEntry {
3363 range: Point::new(2, 9)..Point::new(2, 10),
3364 diagnostic: Diagnostic {
3365 source: Some("disk".into()),
3366 severity: DiagnosticSeverity::ERROR,
3367 message: "undefined variable 'A'".to_string(),
3368 is_disk_based: true,
3369 group_id: 3,
3370 is_primary: true,
3371 source_kind: DiagnosticSourceKind::Pushed,
3372 ..Diagnostic::default()
3373 },
3374 }
3375 ]
3376 );
3377 assert_eq!(
3378 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
3379 [
3380 ("fn a() { ".to_string(), None),
3381 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
3382 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
3383 ("\n".to_string(), None),
3384 ]
3385 );
3386 assert_eq!(
3387 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
3388 [
3389 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
3390 ("\n".to_string(), None),
3391 ]
3392 );
3393 });
3394
3395 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
3396 // changes since the last save.
3397 buffer.update(cx, |buffer, cx| {
3398 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
3399 buffer.edit(
3400 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
3401 None,
3402 cx,
3403 );
3404 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
3405 });
3406 let change_notification_2 = fake_server
3407 .receive_notification::<lsp::notification::DidChangeTextDocument>()
3408 .await;
3409 assert!(
3410 change_notification_2.text_document.version > change_notification_1.text_document.version
3411 );
3412
3413 // Handle out-of-order diagnostics
3414 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3415 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3416 version: Some(change_notification_2.text_document.version),
3417 diagnostics: vec![
3418 lsp::Diagnostic {
3419 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
3420 severity: Some(DiagnosticSeverity::ERROR),
3421 message: "undefined variable 'BB'".to_string(),
3422 source: Some("disk".to_string()),
3423 ..Default::default()
3424 },
3425 lsp::Diagnostic {
3426 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3427 severity: Some(DiagnosticSeverity::WARNING),
3428 message: "undefined variable 'A'".to_string(),
3429 source: Some("disk".to_string()),
3430 ..Default::default()
3431 },
3432 ],
3433 });
3434
3435 cx.executor().run_until_parked();
3436 buffer.update(cx, |buffer, _| {
3437 assert_eq!(
3438 buffer
3439 .snapshot()
3440 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3441 .collect::<Vec<_>>(),
3442 &[
3443 DiagnosticEntry {
3444 range: Point::new(2, 21)..Point::new(2, 22),
3445 diagnostic: Diagnostic {
3446 source: Some("disk".into()),
3447 severity: DiagnosticSeverity::WARNING,
3448 message: "undefined variable 'A'".to_string(),
3449 is_disk_based: true,
3450 group_id: 6,
3451 is_primary: true,
3452 source_kind: DiagnosticSourceKind::Pushed,
3453 ..Diagnostic::default()
3454 }
3455 },
3456 DiagnosticEntry {
3457 range: Point::new(3, 9)..Point::new(3, 14),
3458 diagnostic: Diagnostic {
3459 source: Some("disk".into()),
3460 severity: DiagnosticSeverity::ERROR,
3461 message: "undefined variable 'BB'".to_string(),
3462 is_disk_based: true,
3463 group_id: 5,
3464 is_primary: true,
3465 source_kind: DiagnosticSourceKind::Pushed,
3466 ..Diagnostic::default()
3467 },
3468 }
3469 ]
3470 );
3471 });
3472}
3473
3474#[gpui::test]
3475async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
3476 init_test(cx);
3477
3478 let text = concat!(
3479 "let one = ;\n", //
3480 "let two = \n",
3481 "let three = 3;\n",
3482 );
3483
3484 let fs = FakeFs::new(cx.executor());
3485 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
3486
3487 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3488 let buffer = project
3489 .update(cx, |project, cx| {
3490 project.open_local_buffer(path!("/dir/a.rs"), cx)
3491 })
3492 .await
3493 .unwrap();
3494
3495 project.update(cx, |project, cx| {
3496 project.lsp_store().update(cx, |lsp_store, cx| {
3497 lsp_store
3498 .update_diagnostic_entries(
3499 LanguageServerId(0),
3500 PathBuf::from(path!("/dir/a.rs")),
3501 None,
3502 None,
3503 vec![
3504 DiagnosticEntry {
3505 range: Unclipped(PointUtf16::new(0, 10))
3506 ..Unclipped(PointUtf16::new(0, 10)),
3507 diagnostic: Diagnostic {
3508 severity: DiagnosticSeverity::ERROR,
3509 message: "syntax error 1".to_string(),
3510 source_kind: DiagnosticSourceKind::Pushed,
3511 ..Diagnostic::default()
3512 },
3513 },
3514 DiagnosticEntry {
3515 range: Unclipped(PointUtf16::new(1, 10))
3516 ..Unclipped(PointUtf16::new(1, 10)),
3517 diagnostic: Diagnostic {
3518 severity: DiagnosticSeverity::ERROR,
3519 message: "syntax error 2".to_string(),
3520 source_kind: DiagnosticSourceKind::Pushed,
3521 ..Diagnostic::default()
3522 },
3523 },
3524 ],
3525 cx,
3526 )
3527 .unwrap();
3528 })
3529 });
3530
3531 // An empty range is extended forward to include the following character.
3532 // At the end of a line, an empty range is extended backward to include
3533 // the preceding character.
3534 buffer.update(cx, |buffer, _| {
3535 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
3536 assert_eq!(
3537 chunks
3538 .iter()
3539 .map(|(s, d)| (s.as_str(), *d))
3540 .collect::<Vec<_>>(),
3541 &[
3542 ("let one = ", None),
3543 (";", Some(DiagnosticSeverity::ERROR)),
3544 ("\nlet two =", None),
3545 (" ", Some(DiagnosticSeverity::ERROR)),
3546 ("\nlet three = 3;\n", None)
3547 ]
3548 );
3549 });
3550}
3551
3552#[gpui::test]
3553async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
3554 init_test(cx);
3555
3556 let fs = FakeFs::new(cx.executor());
3557 fs.insert_tree(path!("/dir"), json!({ "a.rs": "one two three" }))
3558 .await;
3559
3560 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3561 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3562
3563 lsp_store.update(cx, |lsp_store, cx| {
3564 lsp_store
3565 .update_diagnostic_entries(
3566 LanguageServerId(0),
3567 Path::new(path!("/dir/a.rs")).to_owned(),
3568 None,
3569 None,
3570 vec![DiagnosticEntry {
3571 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
3572 diagnostic: Diagnostic {
3573 severity: DiagnosticSeverity::ERROR,
3574 is_primary: true,
3575 message: "syntax error a1".to_string(),
3576 source_kind: DiagnosticSourceKind::Pushed,
3577 ..Diagnostic::default()
3578 },
3579 }],
3580 cx,
3581 )
3582 .unwrap();
3583 lsp_store
3584 .update_diagnostic_entries(
3585 LanguageServerId(1),
3586 Path::new(path!("/dir/a.rs")).to_owned(),
3587 None,
3588 None,
3589 vec![DiagnosticEntry {
3590 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
3591 diagnostic: Diagnostic {
3592 severity: DiagnosticSeverity::ERROR,
3593 is_primary: true,
3594 message: "syntax error b1".to_string(),
3595 source_kind: DiagnosticSourceKind::Pushed,
3596 ..Diagnostic::default()
3597 },
3598 }],
3599 cx,
3600 )
3601 .unwrap();
3602
3603 assert_eq!(
3604 lsp_store.diagnostic_summary(false, cx),
3605 DiagnosticSummary {
3606 error_count: 2,
3607 warning_count: 0,
3608 }
3609 );
3610 });
3611}
3612
3613#[gpui::test]
3614async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
3615 init_test(cx);
3616
3617 let text = "
3618 fn a() {
3619 f1();
3620 }
3621 fn b() {
3622 f2();
3623 }
3624 fn c() {
3625 f3();
3626 }
3627 "
3628 .unindent();
3629
3630 let fs = FakeFs::new(cx.executor());
3631 fs.insert_tree(
3632 path!("/dir"),
3633 json!({
3634 "a.rs": text.clone(),
3635 }),
3636 )
3637 .await;
3638
3639 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3640 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3641
3642 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3643 language_registry.add(rust_lang());
3644 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3645
3646 let (buffer, _handle) = project
3647 .update(cx, |project, cx| {
3648 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3649 })
3650 .await
3651 .unwrap();
3652
3653 let mut fake_server = fake_servers.next().await.unwrap();
3654 let lsp_document_version = fake_server
3655 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3656 .await
3657 .text_document
3658 .version;
3659
3660 // Simulate editing the buffer after the language server computes some edits.
3661 buffer.update(cx, |buffer, cx| {
3662 buffer.edit(
3663 [(
3664 Point::new(0, 0)..Point::new(0, 0),
3665 "// above first function\n",
3666 )],
3667 None,
3668 cx,
3669 );
3670 buffer.edit(
3671 [(
3672 Point::new(2, 0)..Point::new(2, 0),
3673 " // inside first function\n",
3674 )],
3675 None,
3676 cx,
3677 );
3678 buffer.edit(
3679 [(
3680 Point::new(6, 4)..Point::new(6, 4),
3681 "// inside second function ",
3682 )],
3683 None,
3684 cx,
3685 );
3686
3687 assert_eq!(
3688 buffer.text(),
3689 "
3690 // above first function
3691 fn a() {
3692 // inside first function
3693 f1();
3694 }
3695 fn b() {
3696 // inside second function f2();
3697 }
3698 fn c() {
3699 f3();
3700 }
3701 "
3702 .unindent()
3703 );
3704 });
3705
3706 let edits = lsp_store
3707 .update(cx, |lsp_store, cx| {
3708 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3709 &buffer,
3710 vec![
3711 // replace body of first function
3712 lsp::TextEdit {
3713 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
3714 new_text: "
3715 fn a() {
3716 f10();
3717 }
3718 "
3719 .unindent(),
3720 },
3721 // edit inside second function
3722 lsp::TextEdit {
3723 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
3724 new_text: "00".into(),
3725 },
3726 // edit inside third function via two distinct edits
3727 lsp::TextEdit {
3728 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
3729 new_text: "4000".into(),
3730 },
3731 lsp::TextEdit {
3732 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
3733 new_text: "".into(),
3734 },
3735 ],
3736 LanguageServerId(0),
3737 Some(lsp_document_version),
3738 cx,
3739 )
3740 })
3741 .await
3742 .unwrap();
3743
3744 buffer.update(cx, |buffer, cx| {
3745 for (range, new_text) in edits {
3746 buffer.edit([(range, new_text)], None, cx);
3747 }
3748 assert_eq!(
3749 buffer.text(),
3750 "
3751 // above first function
3752 fn a() {
3753 // inside first function
3754 f10();
3755 }
3756 fn b() {
3757 // inside second function f200();
3758 }
3759 fn c() {
3760 f4000();
3761 }
3762 "
3763 .unindent()
3764 );
3765 });
3766}
3767
3768#[gpui::test]
3769async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
3770 init_test(cx);
3771
3772 let text = "
3773 use a::b;
3774 use a::c;
3775
3776 fn f() {
3777 b();
3778 c();
3779 }
3780 "
3781 .unindent();
3782
3783 let fs = FakeFs::new(cx.executor());
3784 fs.insert_tree(
3785 path!("/dir"),
3786 json!({
3787 "a.rs": text.clone(),
3788 }),
3789 )
3790 .await;
3791
3792 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3793 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3794 let buffer = project
3795 .update(cx, |project, cx| {
3796 project.open_local_buffer(path!("/dir/a.rs"), cx)
3797 })
3798 .await
3799 .unwrap();
3800
3801 // Simulate the language server sending us a small edit in the form of a very large diff.
3802 // Rust-analyzer does this when performing a merge-imports code action.
3803 let edits = lsp_store
3804 .update(cx, |lsp_store, cx| {
3805 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3806 &buffer,
3807 [
3808 // Replace the first use statement without editing the semicolon.
3809 lsp::TextEdit {
3810 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
3811 new_text: "a::{b, c}".into(),
3812 },
3813 // Reinsert the remainder of the file between the semicolon and the final
3814 // newline of the file.
3815 lsp::TextEdit {
3816 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3817 new_text: "\n\n".into(),
3818 },
3819 lsp::TextEdit {
3820 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3821 new_text: "
3822 fn f() {
3823 b();
3824 c();
3825 }"
3826 .unindent(),
3827 },
3828 // Delete everything after the first newline of the file.
3829 lsp::TextEdit {
3830 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
3831 new_text: "".into(),
3832 },
3833 ],
3834 LanguageServerId(0),
3835 None,
3836 cx,
3837 )
3838 })
3839 .await
3840 .unwrap();
3841
3842 buffer.update(cx, |buffer, cx| {
3843 let edits = edits
3844 .into_iter()
3845 .map(|(range, text)| {
3846 (
3847 range.start.to_point(buffer)..range.end.to_point(buffer),
3848 text,
3849 )
3850 })
3851 .collect::<Vec<_>>();
3852
3853 assert_eq!(
3854 edits,
3855 [
3856 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
3857 (Point::new(1, 0)..Point::new(2, 0), "".into())
3858 ]
3859 );
3860
3861 for (range, new_text) in edits {
3862 buffer.edit([(range, new_text)], None, cx);
3863 }
3864 assert_eq!(
3865 buffer.text(),
3866 "
3867 use a::{b, c};
3868
3869 fn f() {
3870 b();
3871 c();
3872 }
3873 "
3874 .unindent()
3875 );
3876 });
3877}
3878
3879#[gpui::test]
3880async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
3881 cx: &mut gpui::TestAppContext,
3882) {
3883 init_test(cx);
3884
3885 let text = "Path()";
3886
3887 let fs = FakeFs::new(cx.executor());
3888 fs.insert_tree(
3889 path!("/dir"),
3890 json!({
3891 "a.rs": text
3892 }),
3893 )
3894 .await;
3895
3896 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3897 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3898 let buffer = project
3899 .update(cx, |project, cx| {
3900 project.open_local_buffer(path!("/dir/a.rs"), cx)
3901 })
3902 .await
3903 .unwrap();
3904
3905 // Simulate the language server sending us a pair of edits at the same location,
3906 // with an insertion following a replacement (which violates the LSP spec).
3907 let edits = lsp_store
3908 .update(cx, |lsp_store, cx| {
3909 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3910 &buffer,
3911 [
3912 lsp::TextEdit {
3913 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
3914 new_text: "Path".into(),
3915 },
3916 lsp::TextEdit {
3917 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
3918 new_text: "from path import Path\n\n\n".into(),
3919 },
3920 ],
3921 LanguageServerId(0),
3922 None,
3923 cx,
3924 )
3925 })
3926 .await
3927 .unwrap();
3928
3929 buffer.update(cx, |buffer, cx| {
3930 buffer.edit(edits, None, cx);
3931 assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
3932 });
3933}
3934
3935#[gpui::test]
3936async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
3937 init_test(cx);
3938
3939 let text = "
3940 use a::b;
3941 use a::c;
3942
3943 fn f() {
3944 b();
3945 c();
3946 }
3947 "
3948 .unindent();
3949
3950 let fs = FakeFs::new(cx.executor());
3951 fs.insert_tree(
3952 path!("/dir"),
3953 json!({
3954 "a.rs": text.clone(),
3955 }),
3956 )
3957 .await;
3958
3959 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3960 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3961 let buffer = project
3962 .update(cx, |project, cx| {
3963 project.open_local_buffer(path!("/dir/a.rs"), cx)
3964 })
3965 .await
3966 .unwrap();
3967
3968 // Simulate the language server sending us edits in a non-ordered fashion,
3969 // with ranges sometimes being inverted or pointing to invalid locations.
3970 let edits = lsp_store
3971 .update(cx, |lsp_store, cx| {
3972 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3973 &buffer,
3974 [
3975 lsp::TextEdit {
3976 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3977 new_text: "\n\n".into(),
3978 },
3979 lsp::TextEdit {
3980 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
3981 new_text: "a::{b, c}".into(),
3982 },
3983 lsp::TextEdit {
3984 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
3985 new_text: "".into(),
3986 },
3987 lsp::TextEdit {
3988 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3989 new_text: "
3990 fn f() {
3991 b();
3992 c();
3993 }"
3994 .unindent(),
3995 },
3996 ],
3997 LanguageServerId(0),
3998 None,
3999 cx,
4000 )
4001 })
4002 .await
4003 .unwrap();
4004
4005 buffer.update(cx, |buffer, cx| {
4006 let edits = edits
4007 .into_iter()
4008 .map(|(range, text)| {
4009 (
4010 range.start.to_point(buffer)..range.end.to_point(buffer),
4011 text,
4012 )
4013 })
4014 .collect::<Vec<_>>();
4015
4016 assert_eq!(
4017 edits,
4018 [
4019 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
4020 (Point::new(1, 0)..Point::new(2, 0), "".into())
4021 ]
4022 );
4023
4024 for (range, new_text) in edits {
4025 buffer.edit([(range, new_text)], None, cx);
4026 }
4027 assert_eq!(
4028 buffer.text(),
4029 "
4030 use a::{b, c};
4031
4032 fn f() {
4033 b();
4034 c();
4035 }
4036 "
4037 .unindent()
4038 );
4039 });
4040}
4041
4042fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
4043 buffer: &Buffer,
4044 range: Range<T>,
4045) -> Vec<(String, Option<DiagnosticSeverity>)> {
4046 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
4047 for chunk in buffer.snapshot().chunks(range, true) {
4048 if chunks
4049 .last()
4050 .is_some_and(|prev_chunk| prev_chunk.1 == chunk.diagnostic_severity)
4051 {
4052 chunks.last_mut().unwrap().0.push_str(chunk.text);
4053 } else {
4054 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
4055 }
4056 }
4057 chunks
4058}
4059
4060#[gpui::test(iterations = 10)]
4061async fn test_definition(cx: &mut gpui::TestAppContext) {
4062 init_test(cx);
4063
4064 let fs = FakeFs::new(cx.executor());
4065 fs.insert_tree(
4066 path!("/dir"),
4067 json!({
4068 "a.rs": "const fn a() { A }",
4069 "b.rs": "const y: i32 = crate::a()",
4070 }),
4071 )
4072 .await;
4073
4074 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
4075
4076 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4077 language_registry.add(rust_lang());
4078 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
4079
4080 let (buffer, _handle) = project
4081 .update(cx, |project, cx| {
4082 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
4083 })
4084 .await
4085 .unwrap();
4086
4087 let fake_server = fake_servers.next().await.unwrap();
4088 cx.executor().run_until_parked();
4089
4090 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
4091 let params = params.text_document_position_params;
4092 assert_eq!(
4093 params.text_document.uri.to_file_path().unwrap(),
4094 Path::new(path!("/dir/b.rs")),
4095 );
4096 assert_eq!(params.position, lsp::Position::new(0, 22));
4097
4098 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
4099 lsp::Location::new(
4100 lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
4101 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
4102 ),
4103 )))
4104 });
4105 let mut definitions = project
4106 .update(cx, |project, cx| project.definitions(&buffer, 22, cx))
4107 .await
4108 .unwrap()
4109 .unwrap();
4110
4111 // Assert no new language server started
4112 cx.executor().run_until_parked();
4113 assert!(fake_servers.try_next().is_err());
4114
4115 assert_eq!(definitions.len(), 1);
4116 let definition = definitions.pop().unwrap();
4117 cx.update(|cx| {
4118 let target_buffer = definition.target.buffer.read(cx);
4119 assert_eq!(
4120 target_buffer
4121 .file()
4122 .unwrap()
4123 .as_local()
4124 .unwrap()
4125 .abs_path(cx),
4126 Path::new(path!("/dir/a.rs")),
4127 );
4128 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
4129 assert_eq!(
4130 list_worktrees(&project, cx),
4131 [
4132 (path!("/dir/a.rs").as_ref(), false),
4133 (path!("/dir/b.rs").as_ref(), true)
4134 ],
4135 );
4136
4137 drop(definition);
4138 });
4139 cx.update(|cx| {
4140 assert_eq!(
4141 list_worktrees(&project, cx),
4142 [(path!("/dir/b.rs").as_ref(), true)]
4143 );
4144 });
4145
4146 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
4147 project
4148 .read(cx)
4149 .worktrees(cx)
4150 .map(|worktree| {
4151 let worktree = worktree.read(cx);
4152 (
4153 worktree.as_local().unwrap().abs_path().as_ref(),
4154 worktree.is_visible(),
4155 )
4156 })
4157 .collect::<Vec<_>>()
4158 }
4159}
4160
4161#[gpui::test]
4162async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
4163 init_test(cx);
4164
4165 let fs = FakeFs::new(cx.executor());
4166 fs.insert_tree(
4167 path!("/dir"),
4168 json!({
4169 "a.ts": "",
4170 }),
4171 )
4172 .await;
4173
4174 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4175
4176 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4177 language_registry.add(typescript_lang());
4178 let mut fake_language_servers = language_registry.register_fake_lsp(
4179 "TypeScript",
4180 FakeLspAdapter {
4181 capabilities: lsp::ServerCapabilities {
4182 completion_provider: Some(lsp::CompletionOptions {
4183 trigger_characters: Some(vec![".".to_string()]),
4184 ..Default::default()
4185 }),
4186 ..Default::default()
4187 },
4188 ..Default::default()
4189 },
4190 );
4191
4192 let (buffer, _handle) = project
4193 .update(cx, |p, cx| {
4194 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4195 })
4196 .await
4197 .unwrap();
4198
4199 let fake_server = fake_language_servers.next().await.unwrap();
4200 cx.executor().run_until_parked();
4201
4202 // When text_edit exists, it takes precedence over insert_text and label
4203 let text = "let a = obj.fqn";
4204 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4205 let completions = project.update(cx, |project, cx| {
4206 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4207 });
4208
4209 fake_server
4210 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4211 Ok(Some(lsp::CompletionResponse::Array(vec![
4212 lsp::CompletionItem {
4213 label: "labelText".into(),
4214 insert_text: Some("insertText".into()),
4215 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
4216 range: lsp::Range::new(
4217 lsp::Position::new(0, text.len() as u32 - 3),
4218 lsp::Position::new(0, text.len() as u32),
4219 ),
4220 new_text: "textEditText".into(),
4221 })),
4222 ..Default::default()
4223 },
4224 ])))
4225 })
4226 .next()
4227 .await;
4228
4229 let completions = completions
4230 .await
4231 .unwrap()
4232 .into_iter()
4233 .flat_map(|response| response.completions)
4234 .collect::<Vec<_>>();
4235 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4236
4237 assert_eq!(completions.len(), 1);
4238 assert_eq!(completions[0].new_text, "textEditText");
4239 assert_eq!(
4240 completions[0].replace_range.to_offset(&snapshot),
4241 text.len() - 3..text.len()
4242 );
4243}
4244
4245#[gpui::test]
4246async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
4247 init_test(cx);
4248
4249 let fs = FakeFs::new(cx.executor());
4250 fs.insert_tree(
4251 path!("/dir"),
4252 json!({
4253 "a.ts": "",
4254 }),
4255 )
4256 .await;
4257
4258 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4259
4260 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4261 language_registry.add(typescript_lang());
4262 let mut fake_language_servers = language_registry.register_fake_lsp(
4263 "TypeScript",
4264 FakeLspAdapter {
4265 capabilities: lsp::ServerCapabilities {
4266 completion_provider: Some(lsp::CompletionOptions {
4267 trigger_characters: Some(vec![".".to_string()]),
4268 ..Default::default()
4269 }),
4270 ..Default::default()
4271 },
4272 ..Default::default()
4273 },
4274 );
4275
4276 let (buffer, _handle) = project
4277 .update(cx, |p, cx| {
4278 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4279 })
4280 .await
4281 .unwrap();
4282
4283 let fake_server = fake_language_servers.next().await.unwrap();
4284 cx.executor().run_until_parked();
4285 let text = "let a = obj.fqn";
4286
4287 // Test 1: When text_edit is None but text_edit_text exists with default edit_range
4288 {
4289 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4290 let completions = project.update(cx, |project, cx| {
4291 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4292 });
4293
4294 fake_server
4295 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4296 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
4297 is_incomplete: false,
4298 item_defaults: Some(lsp::CompletionListItemDefaults {
4299 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
4300 lsp::Range::new(
4301 lsp::Position::new(0, text.len() as u32 - 3),
4302 lsp::Position::new(0, text.len() as u32),
4303 ),
4304 )),
4305 ..Default::default()
4306 }),
4307 items: vec![lsp::CompletionItem {
4308 label: "labelText".into(),
4309 text_edit_text: Some("textEditText".into()),
4310 text_edit: None,
4311 ..Default::default()
4312 }],
4313 })))
4314 })
4315 .next()
4316 .await;
4317
4318 let completions = completions
4319 .await
4320 .unwrap()
4321 .into_iter()
4322 .flat_map(|response| response.completions)
4323 .collect::<Vec<_>>();
4324 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4325
4326 assert_eq!(completions.len(), 1);
4327 assert_eq!(completions[0].new_text, "textEditText");
4328 assert_eq!(
4329 completions[0].replace_range.to_offset(&snapshot),
4330 text.len() - 3..text.len()
4331 );
4332 }
4333
4334 // Test 2: When both text_edit and text_edit_text are None with default edit_range
4335 {
4336 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4337 let completions = project.update(cx, |project, cx| {
4338 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4339 });
4340
4341 fake_server
4342 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4343 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
4344 is_incomplete: false,
4345 item_defaults: Some(lsp::CompletionListItemDefaults {
4346 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
4347 lsp::Range::new(
4348 lsp::Position::new(0, text.len() as u32 - 3),
4349 lsp::Position::new(0, text.len() as u32),
4350 ),
4351 )),
4352 ..Default::default()
4353 }),
4354 items: vec![lsp::CompletionItem {
4355 label: "labelText".into(),
4356 text_edit_text: None,
4357 insert_text: Some("irrelevant".into()),
4358 text_edit: None,
4359 ..Default::default()
4360 }],
4361 })))
4362 })
4363 .next()
4364 .await;
4365
4366 let completions = completions
4367 .await
4368 .unwrap()
4369 .into_iter()
4370 .flat_map(|response| response.completions)
4371 .collect::<Vec<_>>();
4372 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4373
4374 assert_eq!(completions.len(), 1);
4375 assert_eq!(completions[0].new_text, "labelText");
4376 assert_eq!(
4377 completions[0].replace_range.to_offset(&snapshot),
4378 text.len() - 3..text.len()
4379 );
4380 }
4381}
4382
4383#[gpui::test]
4384async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
4385 init_test(cx);
4386
4387 let fs = FakeFs::new(cx.executor());
4388 fs.insert_tree(
4389 path!("/dir"),
4390 json!({
4391 "a.ts": "",
4392 }),
4393 )
4394 .await;
4395
4396 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4397
4398 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4399 language_registry.add(typescript_lang());
4400 let mut fake_language_servers = language_registry.register_fake_lsp(
4401 "TypeScript",
4402 FakeLspAdapter {
4403 capabilities: lsp::ServerCapabilities {
4404 completion_provider: Some(lsp::CompletionOptions {
4405 trigger_characters: Some(vec![":".to_string()]),
4406 ..Default::default()
4407 }),
4408 ..Default::default()
4409 },
4410 ..Default::default()
4411 },
4412 );
4413
4414 let (buffer, _handle) = project
4415 .update(cx, |p, cx| {
4416 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4417 })
4418 .await
4419 .unwrap();
4420
4421 let fake_server = fake_language_servers.next().await.unwrap();
4422 cx.executor().run_until_parked();
4423
4424 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
4425 let text = "let a = b.fqn";
4426 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4427 let completions = project.update(cx, |project, cx| {
4428 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4429 });
4430
4431 fake_server
4432 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4433 Ok(Some(lsp::CompletionResponse::Array(vec![
4434 lsp::CompletionItem {
4435 label: "fullyQualifiedName?".into(),
4436 insert_text: Some("fullyQualifiedName".into()),
4437 ..Default::default()
4438 },
4439 ])))
4440 })
4441 .next()
4442 .await;
4443 let completions = completions
4444 .await
4445 .unwrap()
4446 .into_iter()
4447 .flat_map(|response| response.completions)
4448 .collect::<Vec<_>>();
4449 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4450 assert_eq!(completions.len(), 1);
4451 assert_eq!(completions[0].new_text, "fullyQualifiedName");
4452 assert_eq!(
4453 completions[0].replace_range.to_offset(&snapshot),
4454 text.len() - 3..text.len()
4455 );
4456
4457 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
4458 let text = "let a = \"atoms/cmp\"";
4459 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4460 let completions = project.update(cx, |project, cx| {
4461 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
4462 });
4463
4464 fake_server
4465 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4466 Ok(Some(lsp::CompletionResponse::Array(vec![
4467 lsp::CompletionItem {
4468 label: "component".into(),
4469 ..Default::default()
4470 },
4471 ])))
4472 })
4473 .next()
4474 .await;
4475 let completions = completions
4476 .await
4477 .unwrap()
4478 .into_iter()
4479 .flat_map(|response| response.completions)
4480 .collect::<Vec<_>>();
4481 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4482 assert_eq!(completions.len(), 1);
4483 assert_eq!(completions[0].new_text, "component");
4484 assert_eq!(
4485 completions[0].replace_range.to_offset(&snapshot),
4486 text.len() - 4..text.len() - 1
4487 );
4488}
4489
4490#[gpui::test]
4491async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
4492 init_test(cx);
4493
4494 let fs = FakeFs::new(cx.executor());
4495 fs.insert_tree(
4496 path!("/dir"),
4497 json!({
4498 "a.ts": "",
4499 }),
4500 )
4501 .await;
4502
4503 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4504
4505 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4506 language_registry.add(typescript_lang());
4507 let mut fake_language_servers = language_registry.register_fake_lsp(
4508 "TypeScript",
4509 FakeLspAdapter {
4510 capabilities: lsp::ServerCapabilities {
4511 completion_provider: Some(lsp::CompletionOptions {
4512 trigger_characters: Some(vec![":".to_string()]),
4513 ..Default::default()
4514 }),
4515 ..Default::default()
4516 },
4517 ..Default::default()
4518 },
4519 );
4520
4521 let (buffer, _handle) = project
4522 .update(cx, |p, cx| {
4523 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4524 })
4525 .await
4526 .unwrap();
4527
4528 let fake_server = fake_language_servers.next().await.unwrap();
4529 cx.executor().run_until_parked();
4530
4531 let text = "let a = b.fqn";
4532 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4533 let completions = project.update(cx, |project, cx| {
4534 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4535 });
4536
4537 fake_server
4538 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4539 Ok(Some(lsp::CompletionResponse::Array(vec![
4540 lsp::CompletionItem {
4541 label: "fullyQualifiedName?".into(),
4542 insert_text: Some("fully\rQualified\r\nName".into()),
4543 ..Default::default()
4544 },
4545 ])))
4546 })
4547 .next()
4548 .await;
4549 let completions = completions
4550 .await
4551 .unwrap()
4552 .into_iter()
4553 .flat_map(|response| response.completions)
4554 .collect::<Vec<_>>();
4555 assert_eq!(completions.len(), 1);
4556 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
4557}
4558
4559#[gpui::test(iterations = 10)]
4560async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
4561 init_test(cx);
4562
4563 let fs = FakeFs::new(cx.executor());
4564 fs.insert_tree(
4565 path!("/dir"),
4566 json!({
4567 "a.ts": "a",
4568 }),
4569 )
4570 .await;
4571
4572 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4573
4574 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4575 language_registry.add(typescript_lang());
4576 let mut fake_language_servers = language_registry.register_fake_lsp(
4577 "TypeScript",
4578 FakeLspAdapter {
4579 capabilities: lsp::ServerCapabilities {
4580 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
4581 lsp::CodeActionOptions {
4582 resolve_provider: Some(true),
4583 ..lsp::CodeActionOptions::default()
4584 },
4585 )),
4586 execute_command_provider: Some(lsp::ExecuteCommandOptions {
4587 commands: vec!["_the/command".to_string()],
4588 ..lsp::ExecuteCommandOptions::default()
4589 }),
4590 ..lsp::ServerCapabilities::default()
4591 },
4592 ..FakeLspAdapter::default()
4593 },
4594 );
4595
4596 let (buffer, _handle) = project
4597 .update(cx, |p, cx| {
4598 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4599 })
4600 .await
4601 .unwrap();
4602
4603 let fake_server = fake_language_servers.next().await.unwrap();
4604 cx.executor().run_until_parked();
4605
4606 // Language server returns code actions that contain commands, and not edits.
4607 let actions = project.update(cx, |project, cx| {
4608 project.code_actions(&buffer, 0..0, None, cx)
4609 });
4610 fake_server
4611 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
4612 Ok(Some(vec![
4613 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
4614 title: "The code action".into(),
4615 data: Some(serde_json::json!({
4616 "command": "_the/command",
4617 })),
4618 ..lsp::CodeAction::default()
4619 }),
4620 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
4621 title: "two".into(),
4622 ..lsp::CodeAction::default()
4623 }),
4624 ]))
4625 })
4626 .next()
4627 .await;
4628
4629 let action = actions.await.unwrap().unwrap()[0].clone();
4630 let apply = project.update(cx, |project, cx| {
4631 project.apply_code_action(buffer.clone(), action, true, cx)
4632 });
4633
4634 // Resolving the code action does not populate its edits. In absence of
4635 // edits, we must execute the given command.
4636 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
4637 |mut action, _| async move {
4638 if action.data.is_some() {
4639 action.command = Some(lsp::Command {
4640 title: "The command".into(),
4641 command: "_the/command".into(),
4642 arguments: Some(vec![json!("the-argument")]),
4643 });
4644 }
4645 Ok(action)
4646 },
4647 );
4648
4649 // While executing the command, the language server sends the editor
4650 // a `workspaceEdit` request.
4651 fake_server
4652 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
4653 let fake = fake_server.clone();
4654 move |params, _| {
4655 assert_eq!(params.command, "_the/command");
4656 let fake = fake.clone();
4657 async move {
4658 fake.server
4659 .request::<lsp::request::ApplyWorkspaceEdit>(
4660 lsp::ApplyWorkspaceEditParams {
4661 label: None,
4662 edit: lsp::WorkspaceEdit {
4663 changes: Some(
4664 [(
4665 lsp::Uri::from_file_path(path!("/dir/a.ts")).unwrap(),
4666 vec![lsp::TextEdit {
4667 range: lsp::Range::new(
4668 lsp::Position::new(0, 0),
4669 lsp::Position::new(0, 0),
4670 ),
4671 new_text: "X".into(),
4672 }],
4673 )]
4674 .into_iter()
4675 .collect(),
4676 ),
4677 ..Default::default()
4678 },
4679 },
4680 DEFAULT_LSP_REQUEST_TIMEOUT,
4681 )
4682 .await
4683 .into_response()
4684 .unwrap();
4685 Ok(Some(json!(null)))
4686 }
4687 }
4688 })
4689 .next()
4690 .await;
4691
4692 // Applying the code action returns a project transaction containing the edits
4693 // sent by the language server in its `workspaceEdit` request.
4694 let transaction = apply.await.unwrap();
4695 assert!(transaction.0.contains_key(&buffer));
4696 buffer.update(cx, |buffer, cx| {
4697 assert_eq!(buffer.text(), "Xa");
4698 buffer.undo(cx);
4699 assert_eq!(buffer.text(), "a");
4700 });
4701}
4702
4703#[gpui::test]
4704async fn test_rename_file_to_new_directory(cx: &mut gpui::TestAppContext) {
4705 init_test(cx);
4706 let fs = FakeFs::new(cx.background_executor.clone());
4707 let expected_contents = "content";
4708 fs.as_fake()
4709 .insert_tree(
4710 "/root",
4711 json!({
4712 "test.txt": expected_contents
4713 }),
4714 )
4715 .await;
4716
4717 let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
4718
4719 let (worktree, entry_id) = project.read_with(cx, |project, cx| {
4720 let worktree = project.worktrees(cx).next().unwrap();
4721 let entry_id = worktree
4722 .read(cx)
4723 .entry_for_path(rel_path("test.txt"))
4724 .unwrap()
4725 .id;
4726 (worktree, entry_id)
4727 });
4728 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
4729 let _result = project
4730 .update(cx, |project, cx| {
4731 project.rename_entry(
4732 entry_id,
4733 (worktree_id, rel_path("dir1/dir2/dir3/test.txt")).into(),
4734 cx,
4735 )
4736 })
4737 .await
4738 .unwrap();
4739 worktree.read_with(cx, |worktree, _| {
4740 assert!(
4741 worktree.entry_for_path(rel_path("test.txt")).is_none(),
4742 "Old file should have been removed"
4743 );
4744 assert!(
4745 worktree
4746 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
4747 .is_some(),
4748 "Whole directory hierarchy and the new file should have been created"
4749 );
4750 });
4751 assert_eq!(
4752 worktree
4753 .update(cx, |worktree, cx| {
4754 worktree.load_file(rel_path("dir1/dir2/dir3/test.txt"), cx)
4755 })
4756 .await
4757 .unwrap()
4758 .text,
4759 expected_contents,
4760 "Moved file's contents should be preserved"
4761 );
4762
4763 let entry_id = worktree.read_with(cx, |worktree, _| {
4764 worktree
4765 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
4766 .unwrap()
4767 .id
4768 });
4769
4770 let _result = project
4771 .update(cx, |project, cx| {
4772 project.rename_entry(
4773 entry_id,
4774 (worktree_id, rel_path("dir1/dir2/test.txt")).into(),
4775 cx,
4776 )
4777 })
4778 .await
4779 .unwrap();
4780 worktree.read_with(cx, |worktree, _| {
4781 assert!(
4782 worktree.entry_for_path(rel_path("test.txt")).is_none(),
4783 "First file should not reappear"
4784 );
4785 assert!(
4786 worktree
4787 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
4788 .is_none(),
4789 "Old file should have been removed"
4790 );
4791 assert!(
4792 worktree
4793 .entry_for_path(rel_path("dir1/dir2/test.txt"))
4794 .is_some(),
4795 "No error should have occurred after moving into existing directory"
4796 );
4797 });
4798 assert_eq!(
4799 worktree
4800 .update(cx, |worktree, cx| {
4801 worktree.load_file(rel_path("dir1/dir2/test.txt"), cx)
4802 })
4803 .await
4804 .unwrap()
4805 .text,
4806 expected_contents,
4807 "Moved file's contents should be preserved"
4808 );
4809}
4810
4811#[gpui::test(iterations = 10)]
4812async fn test_save_file(cx: &mut gpui::TestAppContext) {
4813 init_test(cx);
4814
4815 let fs = FakeFs::new(cx.executor());
4816 fs.insert_tree(
4817 path!("/dir"),
4818 json!({
4819 "file1": "the old contents",
4820 }),
4821 )
4822 .await;
4823
4824 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4825 let buffer = project
4826 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4827 .await
4828 .unwrap();
4829 buffer.update(cx, |buffer, cx| {
4830 assert_eq!(buffer.text(), "the old contents");
4831 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
4832 });
4833
4834 project
4835 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
4836 .await
4837 .unwrap();
4838
4839 let new_text = fs
4840 .load(Path::new(path!("/dir/file1")))
4841 .await
4842 .unwrap()
4843 .replace("\r\n", "\n");
4844 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
4845}
4846
4847#[gpui::test(iterations = 10)]
4848async fn test_save_file_spawns_language_server(cx: &mut gpui::TestAppContext) {
4849 // Issue: #24349
4850 init_test(cx);
4851
4852 let fs = FakeFs::new(cx.executor());
4853 fs.insert_tree(path!("/dir"), json!({})).await;
4854
4855 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4856 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4857
4858 language_registry.add(rust_lang());
4859 let mut fake_rust_servers = language_registry.register_fake_lsp(
4860 "Rust",
4861 FakeLspAdapter {
4862 name: "the-rust-language-server",
4863 capabilities: lsp::ServerCapabilities {
4864 completion_provider: Some(lsp::CompletionOptions {
4865 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
4866 ..Default::default()
4867 }),
4868 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
4869 lsp::TextDocumentSyncOptions {
4870 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
4871 ..Default::default()
4872 },
4873 )),
4874 ..Default::default()
4875 },
4876 ..Default::default()
4877 },
4878 );
4879
4880 let buffer = project
4881 .update(cx, |this, cx| this.create_buffer(None, false, cx))
4882 .unwrap()
4883 .await;
4884 project.update(cx, |this, cx| {
4885 this.register_buffer_with_language_servers(&buffer, cx);
4886 buffer.update(cx, |buffer, cx| {
4887 assert!(!this.has_language_servers_for(buffer, cx));
4888 })
4889 });
4890
4891 project
4892 .update(cx, |this, cx| {
4893 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
4894 this.save_buffer_as(
4895 buffer.clone(),
4896 ProjectPath {
4897 worktree_id,
4898 path: rel_path("file.rs").into(),
4899 },
4900 cx,
4901 )
4902 })
4903 .await
4904 .unwrap();
4905 // A server is started up, and it is notified about Rust files.
4906 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
4907 assert_eq!(
4908 fake_rust_server
4909 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4910 .await
4911 .text_document,
4912 lsp::TextDocumentItem {
4913 uri: lsp::Uri::from_file_path(path!("/dir/file.rs")).unwrap(),
4914 version: 0,
4915 text: "".to_string(),
4916 language_id: "rust".to_string(),
4917 }
4918 );
4919
4920 project.update(cx, |this, cx| {
4921 buffer.update(cx, |buffer, cx| {
4922 assert!(this.has_language_servers_for(buffer, cx));
4923 })
4924 });
4925}
4926
4927#[gpui::test(iterations = 30)]
4928async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
4929 init_test(cx);
4930
4931 let fs = FakeFs::new(cx.executor());
4932 fs.insert_tree(
4933 path!("/dir"),
4934 json!({
4935 "file1": "the original contents",
4936 }),
4937 )
4938 .await;
4939
4940 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4941 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
4942 let buffer = project
4943 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4944 .await
4945 .unwrap();
4946
4947 // Change the buffer's file on disk, and then wait for the file change
4948 // to be detected by the worktree, so that the buffer starts reloading.
4949 fs.save(
4950 path!("/dir/file1").as_ref(),
4951 &"the first contents".into(),
4952 Default::default(),
4953 )
4954 .await
4955 .unwrap();
4956 worktree.next_event(cx).await;
4957
4958 // Change the buffer's file again. Depending on the random seed, the
4959 // previous file change may still be in progress.
4960 fs.save(
4961 path!("/dir/file1").as_ref(),
4962 &"the second contents".into(),
4963 Default::default(),
4964 )
4965 .await
4966 .unwrap();
4967 worktree.next_event(cx).await;
4968
4969 cx.executor().run_until_parked();
4970 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
4971 buffer.read_with(cx, |buffer, _| {
4972 assert_eq!(buffer.text(), on_disk_text);
4973 assert!(!buffer.is_dirty(), "buffer should not be dirty");
4974 assert!(!buffer.has_conflict(), "buffer should not be dirty");
4975 });
4976}
4977
4978#[gpui::test(iterations = 30)]
4979async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
4980 init_test(cx);
4981
4982 let fs = FakeFs::new(cx.executor());
4983 fs.insert_tree(
4984 path!("/dir"),
4985 json!({
4986 "file1": "the original contents",
4987 }),
4988 )
4989 .await;
4990
4991 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4992 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
4993 let buffer = project
4994 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4995 .await
4996 .unwrap();
4997
4998 // Change the buffer's file on disk, and then wait for the file change
4999 // to be detected by the worktree, so that the buffer starts reloading.
5000 fs.save(
5001 path!("/dir/file1").as_ref(),
5002 &"the first contents".into(),
5003 Default::default(),
5004 )
5005 .await
5006 .unwrap();
5007 worktree.next_event(cx).await;
5008
5009 cx.executor()
5010 .spawn(cx.executor().simulate_random_delay())
5011 .await;
5012
5013 // Perform a noop edit, causing the buffer's version to increase.
5014 buffer.update(cx, |buffer, cx| {
5015 buffer.edit([(0..0, " ")], None, cx);
5016 buffer.undo(cx);
5017 });
5018
5019 cx.executor().run_until_parked();
5020 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
5021 buffer.read_with(cx, |buffer, _| {
5022 let buffer_text = buffer.text();
5023 if buffer_text == on_disk_text {
5024 assert!(
5025 !buffer.is_dirty() && !buffer.has_conflict(),
5026 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
5027 );
5028 }
5029 // If the file change occurred while the buffer was processing the first
5030 // change, the buffer will be in a conflicting state.
5031 else {
5032 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
5033 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
5034 }
5035 });
5036}
5037
5038#[gpui::test]
5039async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
5040 init_test(cx);
5041
5042 let fs = FakeFs::new(cx.executor());
5043 fs.insert_tree(
5044 path!("/dir"),
5045 json!({
5046 "file1": "the old contents",
5047 }),
5048 )
5049 .await;
5050
5051 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
5052 let buffer = project
5053 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5054 .await
5055 .unwrap();
5056 buffer.update(cx, |buffer, cx| {
5057 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
5058 });
5059
5060 project
5061 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
5062 .await
5063 .unwrap();
5064
5065 let new_text = fs
5066 .load(Path::new(path!("/dir/file1")))
5067 .await
5068 .unwrap()
5069 .replace("\r\n", "\n");
5070 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
5071}
5072
5073#[gpui::test]
5074async fn test_save_as(cx: &mut gpui::TestAppContext) {
5075 init_test(cx);
5076
5077 let fs = FakeFs::new(cx.executor());
5078 fs.insert_tree("/dir", json!({})).await;
5079
5080 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5081
5082 let languages = project.update(cx, |project, _| project.languages().clone());
5083 languages.add(rust_lang());
5084
5085 let buffer = project.update(cx, |project, cx| {
5086 project.create_local_buffer("", None, false, cx)
5087 });
5088 buffer.update(cx, |buffer, cx| {
5089 buffer.edit([(0..0, "abc")], None, cx);
5090 assert!(buffer.is_dirty());
5091 assert!(!buffer.has_conflict());
5092 assert_eq!(buffer.language().unwrap().name(), "Plain Text");
5093 });
5094 project
5095 .update(cx, |project, cx| {
5096 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
5097 let path = ProjectPath {
5098 worktree_id,
5099 path: rel_path("file1.rs").into(),
5100 };
5101 project.save_buffer_as(buffer.clone(), path, cx)
5102 })
5103 .await
5104 .unwrap();
5105 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
5106
5107 cx.executor().run_until_parked();
5108 buffer.update(cx, |buffer, cx| {
5109 assert_eq!(
5110 buffer.file().unwrap().full_path(cx),
5111 Path::new("dir/file1.rs")
5112 );
5113 assert!(!buffer.is_dirty());
5114 assert!(!buffer.has_conflict());
5115 assert_eq!(buffer.language().unwrap().name(), "Rust");
5116 });
5117
5118 let opened_buffer = project
5119 .update(cx, |project, cx| {
5120 project.open_local_buffer("/dir/file1.rs", cx)
5121 })
5122 .await
5123 .unwrap();
5124 assert_eq!(opened_buffer, buffer);
5125}
5126
5127#[gpui::test]
5128async fn test_save_as_existing_file(cx: &mut gpui::TestAppContext) {
5129 init_test(cx);
5130
5131 let fs = FakeFs::new(cx.executor());
5132 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5133
5134 fs.insert_tree(
5135 path!("/dir"),
5136 json!({
5137 "data_a.txt": "data about a"
5138 }),
5139 )
5140 .await;
5141
5142 let buffer = project
5143 .update(cx, |project, cx| {
5144 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
5145 })
5146 .await
5147 .unwrap();
5148
5149 buffer.update(cx, |buffer, cx| {
5150 buffer.edit([(11..12, "b")], None, cx);
5151 });
5152
5153 // Save buffer's contents as a new file and confirm that the buffer's now
5154 // associated with `data_b.txt` instead of `data_a.txt`, confirming that the
5155 // file associated with the buffer has now been updated to `data_b.txt`
5156 project
5157 .update(cx, |project, cx| {
5158 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
5159 let new_path = ProjectPath {
5160 worktree_id,
5161 path: rel_path("data_b.txt").into(),
5162 };
5163
5164 project.save_buffer_as(buffer.clone(), new_path, cx)
5165 })
5166 .await
5167 .unwrap();
5168
5169 buffer.update(cx, |buffer, cx| {
5170 assert_eq!(
5171 buffer.file().unwrap().full_path(cx),
5172 Path::new("dir/data_b.txt")
5173 )
5174 });
5175
5176 // Open the original `data_a.txt` file, confirming that its contents are
5177 // unchanged and the resulting buffer's associated file is `data_a.txt`.
5178 let original_buffer = project
5179 .update(cx, |project, cx| {
5180 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
5181 })
5182 .await
5183 .unwrap();
5184
5185 original_buffer.update(cx, |buffer, cx| {
5186 assert_eq!(buffer.text(), "data about a");
5187 assert_eq!(
5188 buffer.file().unwrap().full_path(cx),
5189 Path::new("dir/data_a.txt")
5190 )
5191 });
5192}
5193
5194#[gpui::test(retries = 5)]
5195async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
5196 use worktree::WorktreeModelHandle as _;
5197
5198 init_test(cx);
5199 cx.executor().allow_parking();
5200
5201 let dir = TempTree::new(json!({
5202 "a": {
5203 "file1": "",
5204 "file2": "",
5205 "file3": "",
5206 },
5207 "b": {
5208 "c": {
5209 "file4": "",
5210 "file5": "",
5211 }
5212 }
5213 }));
5214
5215 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
5216
5217 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5218 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
5219 async move { buffer.await.unwrap() }
5220 };
5221 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5222 project.update(cx, |project, cx| {
5223 let tree = project.worktrees(cx).next().unwrap();
5224 tree.read(cx)
5225 .entry_for_path(rel_path(path))
5226 .unwrap_or_else(|| panic!("no entry for path {}", path))
5227 .id
5228 })
5229 };
5230
5231 let buffer2 = buffer_for_path("a/file2", cx).await;
5232 let buffer3 = buffer_for_path("a/file3", cx).await;
5233 let buffer4 = buffer_for_path("b/c/file4", cx).await;
5234 let buffer5 = buffer_for_path("b/c/file5", cx).await;
5235
5236 let file2_id = id_for_path("a/file2", cx);
5237 let file3_id = id_for_path("a/file3", cx);
5238 let file4_id = id_for_path("b/c/file4", cx);
5239
5240 // Create a remote copy of this worktree.
5241 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
5242 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
5243
5244 let updates = Arc::new(Mutex::new(Vec::new()));
5245 tree.update(cx, |tree, cx| {
5246 let updates = updates.clone();
5247 tree.observe_updates(0, cx, move |update| {
5248 updates.lock().push(update);
5249 async { true }
5250 });
5251 });
5252
5253 let remote = cx.update(|cx| {
5254 Worktree::remote(
5255 0,
5256 ReplicaId::REMOTE_SERVER,
5257 metadata,
5258 project.read(cx).client().into(),
5259 project.read(cx).path_style(cx),
5260 cx,
5261 )
5262 });
5263
5264 cx.executor().run_until_parked();
5265
5266 cx.update(|cx| {
5267 assert!(!buffer2.read(cx).is_dirty());
5268 assert!(!buffer3.read(cx).is_dirty());
5269 assert!(!buffer4.read(cx).is_dirty());
5270 assert!(!buffer5.read(cx).is_dirty());
5271 });
5272
5273 // Rename and delete files and directories.
5274 tree.flush_fs_events(cx).await;
5275 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
5276 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
5277 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
5278 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
5279 tree.flush_fs_events(cx).await;
5280
5281 cx.update(|app| {
5282 assert_eq!(
5283 tree.read(app).paths().collect::<Vec<_>>(),
5284 vec![
5285 rel_path("a"),
5286 rel_path("a/file1"),
5287 rel_path("a/file2.new"),
5288 rel_path("b"),
5289 rel_path("d"),
5290 rel_path("d/file3"),
5291 rel_path("d/file4"),
5292 ]
5293 );
5294 });
5295
5296 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
5297 assert_eq!(id_for_path("d/file3", cx), file3_id);
5298 assert_eq!(id_for_path("d/file4", cx), file4_id);
5299
5300 cx.update(|cx| {
5301 assert_eq!(
5302 buffer2.read(cx).file().unwrap().path().as_ref(),
5303 rel_path("a/file2.new")
5304 );
5305 assert_eq!(
5306 buffer3.read(cx).file().unwrap().path().as_ref(),
5307 rel_path("d/file3")
5308 );
5309 assert_eq!(
5310 buffer4.read(cx).file().unwrap().path().as_ref(),
5311 rel_path("d/file4")
5312 );
5313 assert_eq!(
5314 buffer5.read(cx).file().unwrap().path().as_ref(),
5315 rel_path("b/c/file5")
5316 );
5317
5318 assert_matches!(
5319 buffer2.read(cx).file().unwrap().disk_state(),
5320 DiskState::Present { .. }
5321 );
5322 assert_matches!(
5323 buffer3.read(cx).file().unwrap().disk_state(),
5324 DiskState::Present { .. }
5325 );
5326 assert_matches!(
5327 buffer4.read(cx).file().unwrap().disk_state(),
5328 DiskState::Present { .. }
5329 );
5330 assert_eq!(
5331 buffer5.read(cx).file().unwrap().disk_state(),
5332 DiskState::Deleted
5333 );
5334 });
5335
5336 // Update the remote worktree. Check that it becomes consistent with the
5337 // local worktree.
5338 cx.executor().run_until_parked();
5339
5340 remote.update(cx, |remote, _| {
5341 for update in updates.lock().drain(..) {
5342 remote.as_remote_mut().unwrap().update_from_remote(update);
5343 }
5344 });
5345 cx.executor().run_until_parked();
5346 remote.update(cx, |remote, _| {
5347 assert_eq!(
5348 remote.paths().collect::<Vec<_>>(),
5349 vec![
5350 rel_path("a"),
5351 rel_path("a/file1"),
5352 rel_path("a/file2.new"),
5353 rel_path("b"),
5354 rel_path("d"),
5355 rel_path("d/file3"),
5356 rel_path("d/file4"),
5357 ]
5358 );
5359 });
5360}
5361
5362#[cfg(target_os = "linux")]
5363#[gpui::test(retries = 5)]
5364async fn test_recreated_directory_receives_child_events(cx: &mut gpui::TestAppContext) {
5365 init_test(cx);
5366 cx.executor().allow_parking();
5367
5368 let dir = TempTree::new(json!({}));
5369 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
5370 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
5371
5372 tree.flush_fs_events(cx).await;
5373
5374 let repro_dir = dir.path().join("repro");
5375 std::fs::create_dir(&repro_dir).unwrap();
5376 tree.flush_fs_events(cx).await;
5377
5378 cx.update(|cx| {
5379 assert!(tree.read(cx).entry_for_path(rel_path("repro")).is_some());
5380 });
5381
5382 std::fs::remove_dir_all(&repro_dir).unwrap();
5383 tree.flush_fs_events(cx).await;
5384
5385 cx.update(|cx| {
5386 assert!(tree.read(cx).entry_for_path(rel_path("repro")).is_none());
5387 });
5388
5389 std::fs::create_dir(&repro_dir).unwrap();
5390 tree.flush_fs_events(cx).await;
5391
5392 cx.update(|cx| {
5393 assert!(tree.read(cx).entry_for_path(rel_path("repro")).is_some());
5394 });
5395
5396 std::fs::write(repro_dir.join("repro-marker"), "").unwrap();
5397 tree.flush_fs_events(cx).await;
5398
5399 cx.update(|cx| {
5400 assert!(
5401 tree.read(cx)
5402 .entry_for_path(rel_path("repro/repro-marker"))
5403 .is_some()
5404 );
5405 });
5406}
5407
5408#[gpui::test(iterations = 10)]
5409async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
5410 init_test(cx);
5411
5412 let fs = FakeFs::new(cx.executor());
5413 fs.insert_tree(
5414 path!("/dir"),
5415 json!({
5416 "a": {
5417 "file1": "",
5418 }
5419 }),
5420 )
5421 .await;
5422
5423 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
5424 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
5425 let tree_id = tree.update(cx, |tree, _| tree.id());
5426
5427 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5428 project.update(cx, |project, cx| {
5429 let tree = project.worktrees(cx).next().unwrap();
5430 tree.read(cx)
5431 .entry_for_path(rel_path(path))
5432 .unwrap_or_else(|| panic!("no entry for path {}", path))
5433 .id
5434 })
5435 };
5436
5437 let dir_id = id_for_path("a", cx);
5438 let file_id = id_for_path("a/file1", cx);
5439 let buffer = project
5440 .update(cx, |p, cx| {
5441 p.open_buffer((tree_id, rel_path("a/file1")), cx)
5442 })
5443 .await
5444 .unwrap();
5445 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5446
5447 project
5448 .update(cx, |project, cx| {
5449 project.rename_entry(dir_id, (tree_id, rel_path("b")).into(), cx)
5450 })
5451 .unwrap()
5452 .await
5453 .into_included()
5454 .unwrap();
5455 cx.executor().run_until_parked();
5456
5457 assert_eq!(id_for_path("b", cx), dir_id);
5458 assert_eq!(id_for_path("b/file1", cx), file_id);
5459 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5460}
5461
5462#[gpui::test]
5463async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
5464 init_test(cx);
5465
5466 let fs = FakeFs::new(cx.executor());
5467 fs.insert_tree(
5468 "/dir",
5469 json!({
5470 "a.txt": "a-contents",
5471 "b.txt": "b-contents",
5472 }),
5473 )
5474 .await;
5475
5476 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5477
5478 // Spawn multiple tasks to open paths, repeating some paths.
5479 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
5480 (
5481 p.open_local_buffer("/dir/a.txt", cx),
5482 p.open_local_buffer("/dir/b.txt", cx),
5483 p.open_local_buffer("/dir/a.txt", cx),
5484 )
5485 });
5486
5487 let buffer_a_1 = buffer_a_1.await.unwrap();
5488 let buffer_a_2 = buffer_a_2.await.unwrap();
5489 let buffer_b = buffer_b.await.unwrap();
5490 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
5491 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
5492
5493 // There is only one buffer per path.
5494 let buffer_a_id = buffer_a_1.entity_id();
5495 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
5496
5497 // Open the same path again while it is still open.
5498 drop(buffer_a_1);
5499 let buffer_a_3 = project
5500 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
5501 .await
5502 .unwrap();
5503
5504 // There's still only one buffer per path.
5505 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
5506}
5507
5508#[gpui::test]
5509async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
5510 init_test(cx);
5511
5512 let fs = FakeFs::new(cx.executor());
5513 fs.insert_tree(
5514 path!("/dir"),
5515 json!({
5516 "file1": "abc",
5517 "file2": "def",
5518 "file3": "ghi",
5519 }),
5520 )
5521 .await;
5522
5523 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5524
5525 let buffer1 = project
5526 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5527 .await
5528 .unwrap();
5529 let events = Arc::new(Mutex::new(Vec::new()));
5530
5531 // initially, the buffer isn't dirty.
5532 buffer1.update(cx, |buffer, cx| {
5533 cx.subscribe(&buffer1, {
5534 let events = events.clone();
5535 move |_, _, event, _| match event {
5536 BufferEvent::Operation { .. } => {}
5537 _ => events.lock().push(event.clone()),
5538 }
5539 })
5540 .detach();
5541
5542 assert!(!buffer.is_dirty());
5543 assert!(events.lock().is_empty());
5544
5545 buffer.edit([(1..2, "")], None, cx);
5546 });
5547
5548 // after the first edit, the buffer is dirty, and emits a dirtied event.
5549 buffer1.update(cx, |buffer, cx| {
5550 assert!(buffer.text() == "ac");
5551 assert!(buffer.is_dirty());
5552 assert_eq!(
5553 *events.lock(),
5554 &[
5555 language::BufferEvent::Edited { is_local: true },
5556 language::BufferEvent::DirtyChanged
5557 ]
5558 );
5559 events.lock().clear();
5560 buffer.did_save(
5561 buffer.version(),
5562 buffer.file().unwrap().disk_state().mtime(),
5563 cx,
5564 );
5565 });
5566
5567 // after saving, the buffer is not dirty, and emits a saved event.
5568 buffer1.update(cx, |buffer, cx| {
5569 assert!(!buffer.is_dirty());
5570 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
5571 events.lock().clear();
5572
5573 buffer.edit([(1..1, "B")], None, cx);
5574 buffer.edit([(2..2, "D")], None, cx);
5575 });
5576
5577 // after editing again, the buffer is dirty, and emits another dirty event.
5578 buffer1.update(cx, |buffer, cx| {
5579 assert!(buffer.text() == "aBDc");
5580 assert!(buffer.is_dirty());
5581 assert_eq!(
5582 *events.lock(),
5583 &[
5584 language::BufferEvent::Edited { is_local: true },
5585 language::BufferEvent::DirtyChanged,
5586 language::BufferEvent::Edited { is_local: true },
5587 ],
5588 );
5589 events.lock().clear();
5590
5591 // After restoring the buffer to its previously-saved state,
5592 // the buffer is not considered dirty anymore.
5593 buffer.edit([(1..3, "")], None, cx);
5594 assert!(buffer.text() == "ac");
5595 assert!(!buffer.is_dirty());
5596 });
5597
5598 assert_eq!(
5599 *events.lock(),
5600 &[
5601 language::BufferEvent::Edited { is_local: true },
5602 language::BufferEvent::DirtyChanged
5603 ]
5604 );
5605
5606 // When a file is deleted, it is not considered dirty.
5607 let events = Arc::new(Mutex::new(Vec::new()));
5608 let buffer2 = project
5609 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
5610 .await
5611 .unwrap();
5612 buffer2.update(cx, |_, cx| {
5613 cx.subscribe(&buffer2, {
5614 let events = events.clone();
5615 move |_, _, event, _| match event {
5616 BufferEvent::Operation { .. } => {}
5617 _ => events.lock().push(event.clone()),
5618 }
5619 })
5620 .detach();
5621 });
5622
5623 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
5624 .await
5625 .unwrap();
5626 cx.executor().run_until_parked();
5627 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5628 assert_eq!(
5629 mem::take(&mut *events.lock()),
5630 &[language::BufferEvent::FileHandleChanged]
5631 );
5632
5633 // Buffer becomes dirty when edited.
5634 buffer2.update(cx, |buffer, cx| {
5635 buffer.edit([(2..3, "")], None, cx);
5636 assert_eq!(buffer.is_dirty(), true);
5637 });
5638 assert_eq!(
5639 mem::take(&mut *events.lock()),
5640 &[
5641 language::BufferEvent::Edited { is_local: true },
5642 language::BufferEvent::DirtyChanged
5643 ]
5644 );
5645
5646 // Buffer becomes clean again when all of its content is removed, because
5647 // the file was deleted.
5648 buffer2.update(cx, |buffer, cx| {
5649 buffer.edit([(0..2, "")], None, cx);
5650 assert_eq!(buffer.is_empty(), true);
5651 assert_eq!(buffer.is_dirty(), false);
5652 });
5653 assert_eq!(
5654 *events.lock(),
5655 &[
5656 language::BufferEvent::Edited { is_local: true },
5657 language::BufferEvent::DirtyChanged
5658 ]
5659 );
5660
5661 // When a file is already dirty when deleted, we don't emit a Dirtied event.
5662 let events = Arc::new(Mutex::new(Vec::new()));
5663 let buffer3 = project
5664 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
5665 .await
5666 .unwrap();
5667 buffer3.update(cx, |_, cx| {
5668 cx.subscribe(&buffer3, {
5669 let events = events.clone();
5670 move |_, _, event, _| match event {
5671 BufferEvent::Operation { .. } => {}
5672 _ => events.lock().push(event.clone()),
5673 }
5674 })
5675 .detach();
5676 });
5677
5678 buffer3.update(cx, |buffer, cx| {
5679 buffer.edit([(0..0, "x")], None, cx);
5680 });
5681 events.lock().clear();
5682 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
5683 .await
5684 .unwrap();
5685 cx.executor().run_until_parked();
5686 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
5687 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
5688}
5689
5690#[gpui::test]
5691async fn test_dirty_buffer_reloads_after_undo(cx: &mut gpui::TestAppContext) {
5692 init_test(cx);
5693
5694 let fs = FakeFs::new(cx.executor());
5695 fs.insert_tree(
5696 path!("/dir"),
5697 json!({
5698 "file.txt": "version 1",
5699 }),
5700 )
5701 .await;
5702
5703 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5704 let buffer = project
5705 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file.txt"), cx))
5706 .await
5707 .unwrap();
5708
5709 buffer.read_with(cx, |buffer, _| {
5710 assert_eq!(buffer.text(), "version 1");
5711 assert!(!buffer.is_dirty());
5712 });
5713
5714 // User makes an edit, making the buffer dirty.
5715 buffer.update(cx, |buffer, cx| {
5716 buffer.edit([(0..0, "user edit: ")], None, cx);
5717 });
5718
5719 buffer.read_with(cx, |buffer, _| {
5720 assert!(buffer.is_dirty());
5721 assert_eq!(buffer.text(), "user edit: version 1");
5722 });
5723
5724 // External tool writes new content while buffer is dirty.
5725 // file_updated() updates the File but suppresses ReloadNeeded.
5726 fs.save(
5727 path!("/dir/file.txt").as_ref(),
5728 &"version 2 from external tool".into(),
5729 Default::default(),
5730 )
5731 .await
5732 .unwrap();
5733 cx.executor().run_until_parked();
5734
5735 buffer.read_with(cx, |buffer, _| {
5736 assert!(buffer.has_conflict());
5737 assert_eq!(buffer.text(), "user edit: version 1");
5738 });
5739
5740 // User undoes their edit. Buffer becomes clean, but disk has different
5741 // content. did_edit() detects the dirty->clean transition and checks if
5742 // disk changed while dirty. Since mtime differs from saved_mtime, it
5743 // emits ReloadNeeded.
5744 buffer.update(cx, |buffer, cx| {
5745 buffer.undo(cx);
5746 });
5747 cx.executor().run_until_parked();
5748
5749 buffer.read_with(cx, |buffer, _| {
5750 assert_eq!(
5751 buffer.text(),
5752 "version 2 from external tool",
5753 "buffer should reload from disk after undo makes it clean"
5754 );
5755 assert!(!buffer.is_dirty());
5756 });
5757}
5758
5759#[gpui::test]
5760async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
5761 init_test(cx);
5762
5763 let (initial_contents, initial_offsets) =
5764 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
5765 let fs = FakeFs::new(cx.executor());
5766 fs.insert_tree(
5767 path!("/dir"),
5768 json!({
5769 "the-file": initial_contents,
5770 }),
5771 )
5772 .await;
5773 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5774 let buffer = project
5775 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
5776 .await
5777 .unwrap();
5778
5779 let anchors = initial_offsets
5780 .iter()
5781 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
5782 .collect::<Vec<_>>();
5783
5784 // Change the file on disk, adding two new lines of text, and removing
5785 // one line.
5786 buffer.update(cx, |buffer, _| {
5787 assert!(!buffer.is_dirty());
5788 assert!(!buffer.has_conflict());
5789 });
5790
5791 let (new_contents, new_offsets) =
5792 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
5793 fs.save(
5794 path!("/dir/the-file").as_ref(),
5795 &new_contents.as_str().into(),
5796 LineEnding::Unix,
5797 )
5798 .await
5799 .unwrap();
5800
5801 // Because the buffer was not modified, it is reloaded from disk. Its
5802 // contents are edited according to the diff between the old and new
5803 // file contents.
5804 cx.executor().run_until_parked();
5805 buffer.update(cx, |buffer, _| {
5806 assert_eq!(buffer.text(), new_contents);
5807 assert!(!buffer.is_dirty());
5808 assert!(!buffer.has_conflict());
5809
5810 let anchor_offsets = anchors
5811 .iter()
5812 .map(|anchor| anchor.to_offset(&*buffer))
5813 .collect::<Vec<_>>();
5814 assert_eq!(anchor_offsets, new_offsets);
5815 });
5816
5817 // Modify the buffer
5818 buffer.update(cx, |buffer, cx| {
5819 buffer.edit([(0..0, " ")], None, cx);
5820 assert!(buffer.is_dirty());
5821 assert!(!buffer.has_conflict());
5822 });
5823
5824 // Change the file on disk again, adding blank lines to the beginning.
5825 fs.save(
5826 path!("/dir/the-file").as_ref(),
5827 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
5828 LineEnding::Unix,
5829 )
5830 .await
5831 .unwrap();
5832
5833 // Because the buffer is modified, it doesn't reload from disk, but is
5834 // marked as having a conflict.
5835 cx.executor().run_until_parked();
5836 buffer.update(cx, |buffer, _| {
5837 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
5838 assert!(buffer.has_conflict());
5839 });
5840}
5841
5842#[gpui::test]
5843async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
5844 init_test(cx);
5845
5846 let fs = FakeFs::new(cx.executor());
5847 fs.insert_tree(
5848 path!("/dir"),
5849 json!({
5850 "file1": "a\nb\nc\n",
5851 "file2": "one\r\ntwo\r\nthree\r\n",
5852 }),
5853 )
5854 .await;
5855
5856 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5857 let buffer1 = project
5858 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5859 .await
5860 .unwrap();
5861 let buffer2 = project
5862 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
5863 .await
5864 .unwrap();
5865
5866 buffer1.update(cx, |buffer, _| {
5867 assert_eq!(buffer.text(), "a\nb\nc\n");
5868 assert_eq!(buffer.line_ending(), LineEnding::Unix);
5869 });
5870 buffer2.update(cx, |buffer, _| {
5871 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
5872 assert_eq!(buffer.line_ending(), LineEnding::Windows);
5873 });
5874
5875 // Change a file's line endings on disk from unix to windows. The buffer's
5876 // state updates correctly.
5877 fs.save(
5878 path!("/dir/file1").as_ref(),
5879 &"aaa\nb\nc\n".into(),
5880 LineEnding::Windows,
5881 )
5882 .await
5883 .unwrap();
5884 cx.executor().run_until_parked();
5885 buffer1.update(cx, |buffer, _| {
5886 assert_eq!(buffer.text(), "aaa\nb\nc\n");
5887 assert_eq!(buffer.line_ending(), LineEnding::Windows);
5888 });
5889
5890 // Save a file with windows line endings. The file is written correctly.
5891 buffer2.update(cx, |buffer, cx| {
5892 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
5893 });
5894 project
5895 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
5896 .await
5897 .unwrap();
5898 assert_eq!(
5899 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
5900 "one\r\ntwo\r\nthree\r\nfour\r\n",
5901 );
5902}
5903
5904#[gpui::test]
5905async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
5906 init_test(cx);
5907
5908 let fs = FakeFs::new(cx.executor());
5909 fs.insert_tree(
5910 path!("/dir"),
5911 json!({
5912 "a.rs": "
5913 fn foo(mut v: Vec<usize>) {
5914 for x in &v {
5915 v.push(1);
5916 }
5917 }
5918 "
5919 .unindent(),
5920 }),
5921 )
5922 .await;
5923
5924 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5925 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
5926 let buffer = project
5927 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
5928 .await
5929 .unwrap();
5930
5931 let buffer_uri = Uri::from_file_path(path!("/dir/a.rs")).unwrap();
5932 let message = lsp::PublishDiagnosticsParams {
5933 uri: buffer_uri.clone(),
5934 diagnostics: vec![
5935 lsp::Diagnostic {
5936 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5937 severity: Some(DiagnosticSeverity::WARNING),
5938 message: "error 1".to_string(),
5939 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5940 location: lsp::Location {
5941 uri: buffer_uri.clone(),
5942 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5943 },
5944 message: "error 1 hint 1".to_string(),
5945 }]),
5946 ..Default::default()
5947 },
5948 lsp::Diagnostic {
5949 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5950 severity: Some(DiagnosticSeverity::HINT),
5951 message: "error 1 hint 1".to_string(),
5952 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5953 location: lsp::Location {
5954 uri: buffer_uri.clone(),
5955 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5956 },
5957 message: "original diagnostic".to_string(),
5958 }]),
5959 ..Default::default()
5960 },
5961 lsp::Diagnostic {
5962 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5963 severity: Some(DiagnosticSeverity::ERROR),
5964 message: "error 2".to_string(),
5965 related_information: Some(vec![
5966 lsp::DiagnosticRelatedInformation {
5967 location: lsp::Location {
5968 uri: buffer_uri.clone(),
5969 range: lsp::Range::new(
5970 lsp::Position::new(1, 13),
5971 lsp::Position::new(1, 15),
5972 ),
5973 },
5974 message: "error 2 hint 1".to_string(),
5975 },
5976 lsp::DiagnosticRelatedInformation {
5977 location: lsp::Location {
5978 uri: buffer_uri.clone(),
5979 range: lsp::Range::new(
5980 lsp::Position::new(1, 13),
5981 lsp::Position::new(1, 15),
5982 ),
5983 },
5984 message: "error 2 hint 2".to_string(),
5985 },
5986 ]),
5987 ..Default::default()
5988 },
5989 lsp::Diagnostic {
5990 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
5991 severity: Some(DiagnosticSeverity::HINT),
5992 message: "error 2 hint 1".to_string(),
5993 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5994 location: lsp::Location {
5995 uri: buffer_uri.clone(),
5996 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5997 },
5998 message: "original diagnostic".to_string(),
5999 }]),
6000 ..Default::default()
6001 },
6002 lsp::Diagnostic {
6003 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6004 severity: Some(DiagnosticSeverity::HINT),
6005 message: "error 2 hint 2".to_string(),
6006 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6007 location: lsp::Location {
6008 uri: buffer_uri,
6009 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
6010 },
6011 message: "original diagnostic".to_string(),
6012 }]),
6013 ..Default::default()
6014 },
6015 ],
6016 version: None,
6017 };
6018
6019 lsp_store
6020 .update(cx, |lsp_store, cx| {
6021 lsp_store.update_diagnostics(
6022 LanguageServerId(0),
6023 message,
6024 None,
6025 DiagnosticSourceKind::Pushed,
6026 &[],
6027 cx,
6028 )
6029 })
6030 .unwrap();
6031 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
6032
6033 assert_eq!(
6034 buffer
6035 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6036 .collect::<Vec<_>>(),
6037 &[
6038 DiagnosticEntry {
6039 range: Point::new(1, 8)..Point::new(1, 9),
6040 diagnostic: Diagnostic {
6041 severity: DiagnosticSeverity::WARNING,
6042 message: "error 1".to_string(),
6043 group_id: 1,
6044 is_primary: true,
6045 source_kind: DiagnosticSourceKind::Pushed,
6046 ..Diagnostic::default()
6047 }
6048 },
6049 DiagnosticEntry {
6050 range: Point::new(1, 8)..Point::new(1, 9),
6051 diagnostic: Diagnostic {
6052 severity: DiagnosticSeverity::HINT,
6053 message: "error 1 hint 1".to_string(),
6054 group_id: 1,
6055 is_primary: false,
6056 source_kind: DiagnosticSourceKind::Pushed,
6057 ..Diagnostic::default()
6058 }
6059 },
6060 DiagnosticEntry {
6061 range: Point::new(1, 13)..Point::new(1, 15),
6062 diagnostic: Diagnostic {
6063 severity: DiagnosticSeverity::HINT,
6064 message: "error 2 hint 1".to_string(),
6065 group_id: 0,
6066 is_primary: false,
6067 source_kind: DiagnosticSourceKind::Pushed,
6068 ..Diagnostic::default()
6069 }
6070 },
6071 DiagnosticEntry {
6072 range: Point::new(1, 13)..Point::new(1, 15),
6073 diagnostic: Diagnostic {
6074 severity: DiagnosticSeverity::HINT,
6075 message: "error 2 hint 2".to_string(),
6076 group_id: 0,
6077 is_primary: false,
6078 source_kind: DiagnosticSourceKind::Pushed,
6079 ..Diagnostic::default()
6080 }
6081 },
6082 DiagnosticEntry {
6083 range: Point::new(2, 8)..Point::new(2, 17),
6084 diagnostic: Diagnostic {
6085 severity: DiagnosticSeverity::ERROR,
6086 message: "error 2".to_string(),
6087 group_id: 0,
6088 is_primary: true,
6089 source_kind: DiagnosticSourceKind::Pushed,
6090 ..Diagnostic::default()
6091 }
6092 }
6093 ]
6094 );
6095
6096 assert_eq!(
6097 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
6098 &[
6099 DiagnosticEntry {
6100 range: Point::new(1, 13)..Point::new(1, 15),
6101 diagnostic: Diagnostic {
6102 severity: DiagnosticSeverity::HINT,
6103 message: "error 2 hint 1".to_string(),
6104 group_id: 0,
6105 is_primary: false,
6106 source_kind: DiagnosticSourceKind::Pushed,
6107 ..Diagnostic::default()
6108 }
6109 },
6110 DiagnosticEntry {
6111 range: Point::new(1, 13)..Point::new(1, 15),
6112 diagnostic: Diagnostic {
6113 severity: DiagnosticSeverity::HINT,
6114 message: "error 2 hint 2".to_string(),
6115 group_id: 0,
6116 is_primary: false,
6117 source_kind: DiagnosticSourceKind::Pushed,
6118 ..Diagnostic::default()
6119 }
6120 },
6121 DiagnosticEntry {
6122 range: Point::new(2, 8)..Point::new(2, 17),
6123 diagnostic: Diagnostic {
6124 severity: DiagnosticSeverity::ERROR,
6125 message: "error 2".to_string(),
6126 group_id: 0,
6127 is_primary: true,
6128 source_kind: DiagnosticSourceKind::Pushed,
6129 ..Diagnostic::default()
6130 }
6131 }
6132 ]
6133 );
6134
6135 assert_eq!(
6136 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
6137 &[
6138 DiagnosticEntry {
6139 range: Point::new(1, 8)..Point::new(1, 9),
6140 diagnostic: Diagnostic {
6141 severity: DiagnosticSeverity::WARNING,
6142 message: "error 1".to_string(),
6143 group_id: 1,
6144 is_primary: true,
6145 source_kind: DiagnosticSourceKind::Pushed,
6146 ..Diagnostic::default()
6147 }
6148 },
6149 DiagnosticEntry {
6150 range: Point::new(1, 8)..Point::new(1, 9),
6151 diagnostic: Diagnostic {
6152 severity: DiagnosticSeverity::HINT,
6153 message: "error 1 hint 1".to_string(),
6154 group_id: 1,
6155 is_primary: false,
6156 source_kind: DiagnosticSourceKind::Pushed,
6157 ..Diagnostic::default()
6158 }
6159 },
6160 ]
6161 );
6162}
6163
6164#[gpui::test]
6165async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
6166 init_test(cx);
6167
6168 let fs = FakeFs::new(cx.executor());
6169 fs.insert_tree(
6170 path!("/dir"),
6171 json!({
6172 "one.rs": "const ONE: usize = 1;",
6173 "two": {
6174 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
6175 }
6176
6177 }),
6178 )
6179 .await;
6180 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6181
6182 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6183 language_registry.add(rust_lang());
6184 let watched_paths = lsp::FileOperationRegistrationOptions {
6185 filters: vec![
6186 FileOperationFilter {
6187 scheme: Some("file".to_owned()),
6188 pattern: lsp::FileOperationPattern {
6189 glob: "**/*.rs".to_owned(),
6190 matches: Some(lsp::FileOperationPatternKind::File),
6191 options: None,
6192 },
6193 },
6194 FileOperationFilter {
6195 scheme: Some("file".to_owned()),
6196 pattern: lsp::FileOperationPattern {
6197 glob: "**/**".to_owned(),
6198 matches: Some(lsp::FileOperationPatternKind::Folder),
6199 options: None,
6200 },
6201 },
6202 ],
6203 };
6204 let mut fake_servers = language_registry.register_fake_lsp(
6205 "Rust",
6206 FakeLspAdapter {
6207 capabilities: lsp::ServerCapabilities {
6208 workspace: Some(lsp::WorkspaceServerCapabilities {
6209 workspace_folders: None,
6210 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
6211 did_rename: Some(watched_paths.clone()),
6212 will_rename: Some(watched_paths),
6213 ..Default::default()
6214 }),
6215 }),
6216 ..Default::default()
6217 },
6218 ..Default::default()
6219 },
6220 );
6221
6222 let _ = project
6223 .update(cx, |project, cx| {
6224 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
6225 })
6226 .await
6227 .unwrap();
6228
6229 let fake_server = fake_servers.next().await.unwrap();
6230 cx.executor().run_until_parked();
6231 let response = project.update(cx, |project, cx| {
6232 let worktree = project.worktrees(cx).next().unwrap();
6233 let entry = worktree
6234 .read(cx)
6235 .entry_for_path(rel_path("one.rs"))
6236 .unwrap();
6237 project.rename_entry(
6238 entry.id,
6239 (worktree.read(cx).id(), rel_path("three.rs")).into(),
6240 cx,
6241 )
6242 });
6243 let expected_edit = lsp::WorkspaceEdit {
6244 changes: None,
6245 document_changes: Some(DocumentChanges::Edits({
6246 vec![TextDocumentEdit {
6247 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
6248 range: lsp::Range {
6249 start: lsp::Position {
6250 line: 0,
6251 character: 1,
6252 },
6253 end: lsp::Position {
6254 line: 0,
6255 character: 3,
6256 },
6257 },
6258 new_text: "This is not a drill".to_owned(),
6259 })],
6260 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
6261 uri: Uri::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
6262 version: Some(1337),
6263 },
6264 }]
6265 })),
6266 change_annotations: None,
6267 };
6268 let resolved_workspace_edit = Arc::new(OnceLock::new());
6269 fake_server
6270 .set_request_handler::<WillRenameFiles, _, _>({
6271 let resolved_workspace_edit = resolved_workspace_edit.clone();
6272 let expected_edit = expected_edit.clone();
6273 move |params, _| {
6274 let resolved_workspace_edit = resolved_workspace_edit.clone();
6275 let expected_edit = expected_edit.clone();
6276 async move {
6277 assert_eq!(params.files.len(), 1);
6278 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
6279 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
6280 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
6281 Ok(Some(expected_edit))
6282 }
6283 }
6284 })
6285 .next()
6286 .await
6287 .unwrap();
6288 let _ = response.await.unwrap();
6289 fake_server
6290 .handle_notification::<DidRenameFiles, _>(|params, _| {
6291 assert_eq!(params.files.len(), 1);
6292 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
6293 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
6294 })
6295 .next()
6296 .await
6297 .unwrap();
6298 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
6299}
6300
6301#[gpui::test]
6302async fn test_rename(cx: &mut gpui::TestAppContext) {
6303 // hi
6304 init_test(cx);
6305
6306 let fs = FakeFs::new(cx.executor());
6307 fs.insert_tree(
6308 path!("/dir"),
6309 json!({
6310 "one.rs": "const ONE: usize = 1;",
6311 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
6312 }),
6313 )
6314 .await;
6315
6316 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6317
6318 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6319 language_registry.add(rust_lang());
6320 let mut fake_servers = language_registry.register_fake_lsp(
6321 "Rust",
6322 FakeLspAdapter {
6323 capabilities: lsp::ServerCapabilities {
6324 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
6325 prepare_provider: Some(true),
6326 work_done_progress_options: Default::default(),
6327 })),
6328 ..Default::default()
6329 },
6330 ..Default::default()
6331 },
6332 );
6333
6334 let (buffer, _handle) = project
6335 .update(cx, |project, cx| {
6336 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
6337 })
6338 .await
6339 .unwrap();
6340
6341 let fake_server = fake_servers.next().await.unwrap();
6342 cx.executor().run_until_parked();
6343
6344 let response = project.update(cx, |project, cx| {
6345 project.prepare_rename(buffer.clone(), 7, cx)
6346 });
6347 fake_server
6348 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
6349 assert_eq!(
6350 params.text_document.uri.as_str(),
6351 uri!("file:///dir/one.rs")
6352 );
6353 assert_eq!(params.position, lsp::Position::new(0, 7));
6354 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
6355 lsp::Position::new(0, 6),
6356 lsp::Position::new(0, 9),
6357 ))))
6358 })
6359 .next()
6360 .await
6361 .unwrap();
6362 let response = response.await.unwrap();
6363 let PrepareRenameResponse::Success(range) = response else {
6364 panic!("{:?}", response);
6365 };
6366 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
6367 assert_eq!(range, 6..9);
6368
6369 let response = project.update(cx, |project, cx| {
6370 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
6371 });
6372 fake_server
6373 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
6374 assert_eq!(
6375 params.text_document_position.text_document.uri.as_str(),
6376 uri!("file:///dir/one.rs")
6377 );
6378 assert_eq!(
6379 params.text_document_position.position,
6380 lsp::Position::new(0, 7)
6381 );
6382 assert_eq!(params.new_name, "THREE");
6383 Ok(Some(lsp::WorkspaceEdit {
6384 changes: Some(
6385 [
6386 (
6387 lsp::Uri::from_file_path(path!("/dir/one.rs")).unwrap(),
6388 vec![lsp::TextEdit::new(
6389 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
6390 "THREE".to_string(),
6391 )],
6392 ),
6393 (
6394 lsp::Uri::from_file_path(path!("/dir/two.rs")).unwrap(),
6395 vec![
6396 lsp::TextEdit::new(
6397 lsp::Range::new(
6398 lsp::Position::new(0, 24),
6399 lsp::Position::new(0, 27),
6400 ),
6401 "THREE".to_string(),
6402 ),
6403 lsp::TextEdit::new(
6404 lsp::Range::new(
6405 lsp::Position::new(0, 35),
6406 lsp::Position::new(0, 38),
6407 ),
6408 "THREE".to_string(),
6409 ),
6410 ],
6411 ),
6412 ]
6413 .into_iter()
6414 .collect(),
6415 ),
6416 ..Default::default()
6417 }))
6418 })
6419 .next()
6420 .await
6421 .unwrap();
6422 let mut transaction = response.await.unwrap().0;
6423 assert_eq!(transaction.len(), 2);
6424 assert_eq!(
6425 transaction
6426 .remove_entry(&buffer)
6427 .unwrap()
6428 .0
6429 .update(cx, |buffer, _| buffer.text()),
6430 "const THREE: usize = 1;"
6431 );
6432 assert_eq!(
6433 transaction
6434 .into_keys()
6435 .next()
6436 .unwrap()
6437 .update(cx, |buffer, _| buffer.text()),
6438 "const TWO: usize = one::THREE + one::THREE;"
6439 );
6440}
6441
6442#[gpui::test]
6443async fn test_search(cx: &mut gpui::TestAppContext) {
6444 init_test(cx);
6445
6446 let fs = FakeFs::new(cx.executor());
6447 fs.insert_tree(
6448 path!("/dir"),
6449 json!({
6450 "one.rs": "const ONE: usize = 1;",
6451 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
6452 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
6453 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
6454 }),
6455 )
6456 .await;
6457 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6458 assert_eq!(
6459 search(
6460 &project,
6461 SearchQuery::text(
6462 "TWO",
6463 false,
6464 true,
6465 false,
6466 Default::default(),
6467 Default::default(),
6468 false,
6469 None
6470 )
6471 .unwrap(),
6472 cx
6473 )
6474 .await
6475 .unwrap(),
6476 HashMap::from_iter([
6477 (path!("dir/two.rs").to_string(), vec![6..9]),
6478 (path!("dir/three.rs").to_string(), vec![37..40])
6479 ])
6480 );
6481
6482 let buffer_4 = project
6483 .update(cx, |project, cx| {
6484 project.open_local_buffer(path!("/dir/four.rs"), cx)
6485 })
6486 .await
6487 .unwrap();
6488 buffer_4.update(cx, |buffer, cx| {
6489 let text = "two::TWO";
6490 buffer.edit([(20..28, text), (31..43, text)], None, cx);
6491 });
6492
6493 assert_eq!(
6494 search(
6495 &project,
6496 SearchQuery::text(
6497 "TWO",
6498 false,
6499 true,
6500 false,
6501 Default::default(),
6502 Default::default(),
6503 false,
6504 None,
6505 )
6506 .unwrap(),
6507 cx
6508 )
6509 .await
6510 .unwrap(),
6511 HashMap::from_iter([
6512 (path!("dir/two.rs").to_string(), vec![6..9]),
6513 (path!("dir/three.rs").to_string(), vec![37..40]),
6514 (path!("dir/four.rs").to_string(), vec![25..28, 36..39])
6515 ])
6516 );
6517}
6518
6519#[gpui::test]
6520async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
6521 init_test(cx);
6522
6523 let search_query = "file";
6524
6525 let fs = FakeFs::new(cx.executor());
6526 fs.insert_tree(
6527 path!("/dir"),
6528 json!({
6529 "one.rs": r#"// Rust file one"#,
6530 "one.ts": r#"// TypeScript file one"#,
6531 "two.rs": r#"// Rust file two"#,
6532 "two.ts": r#"// TypeScript file two"#,
6533 }),
6534 )
6535 .await;
6536 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6537
6538 assert!(
6539 search(
6540 &project,
6541 SearchQuery::text(
6542 search_query,
6543 false,
6544 true,
6545 false,
6546 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6547 Default::default(),
6548 false,
6549 None
6550 )
6551 .unwrap(),
6552 cx
6553 )
6554 .await
6555 .unwrap()
6556 .is_empty(),
6557 "If no inclusions match, no files should be returned"
6558 );
6559
6560 assert_eq!(
6561 search(
6562 &project,
6563 SearchQuery::text(
6564 search_query,
6565 false,
6566 true,
6567 false,
6568 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
6569 Default::default(),
6570 false,
6571 None
6572 )
6573 .unwrap(),
6574 cx
6575 )
6576 .await
6577 .unwrap(),
6578 HashMap::from_iter([
6579 (path!("dir/one.rs").to_string(), vec![8..12]),
6580 (path!("dir/two.rs").to_string(), vec![8..12]),
6581 ]),
6582 "Rust only search should give only Rust files"
6583 );
6584
6585 assert_eq!(
6586 search(
6587 &project,
6588 SearchQuery::text(
6589 search_query,
6590 false,
6591 true,
6592 false,
6593 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6594 .unwrap(),
6595 Default::default(),
6596 false,
6597 None,
6598 )
6599 .unwrap(),
6600 cx
6601 )
6602 .await
6603 .unwrap(),
6604 HashMap::from_iter([
6605 (path!("dir/one.ts").to_string(), vec![14..18]),
6606 (path!("dir/two.ts").to_string(), vec![14..18]),
6607 ]),
6608 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
6609 );
6610
6611 assert_eq!(
6612 search(
6613 &project,
6614 SearchQuery::text(
6615 search_query,
6616 false,
6617 true,
6618 false,
6619 PathMatcher::new(
6620 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
6621 PathStyle::local()
6622 )
6623 .unwrap(),
6624 Default::default(),
6625 false,
6626 None,
6627 )
6628 .unwrap(),
6629 cx
6630 )
6631 .await
6632 .unwrap(),
6633 HashMap::from_iter([
6634 (path!("dir/two.ts").to_string(), vec![14..18]),
6635 (path!("dir/one.rs").to_string(), vec![8..12]),
6636 (path!("dir/one.ts").to_string(), vec![14..18]),
6637 (path!("dir/two.rs").to_string(), vec![8..12]),
6638 ]),
6639 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
6640 );
6641}
6642
6643#[gpui::test]
6644async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
6645 init_test(cx);
6646
6647 let search_query = "file";
6648
6649 let fs = FakeFs::new(cx.executor());
6650 fs.insert_tree(
6651 path!("/dir"),
6652 json!({
6653 "one.rs": r#"// Rust file one"#,
6654 "one.ts": r#"// TypeScript file one"#,
6655 "two.rs": r#"// Rust file two"#,
6656 "two.ts": r#"// TypeScript file two"#,
6657 }),
6658 )
6659 .await;
6660 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6661
6662 assert_eq!(
6663 search(
6664 &project,
6665 SearchQuery::text(
6666 search_query,
6667 false,
6668 true,
6669 false,
6670 Default::default(),
6671 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6672 false,
6673 None,
6674 )
6675 .unwrap(),
6676 cx
6677 )
6678 .await
6679 .unwrap(),
6680 HashMap::from_iter([
6681 (path!("dir/one.rs").to_string(), vec![8..12]),
6682 (path!("dir/one.ts").to_string(), vec![14..18]),
6683 (path!("dir/two.rs").to_string(), vec![8..12]),
6684 (path!("dir/two.ts").to_string(), vec![14..18]),
6685 ]),
6686 "If no exclusions match, all files should be returned"
6687 );
6688
6689 assert_eq!(
6690 search(
6691 &project,
6692 SearchQuery::text(
6693 search_query,
6694 false,
6695 true,
6696 false,
6697 Default::default(),
6698 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
6699 false,
6700 None,
6701 )
6702 .unwrap(),
6703 cx
6704 )
6705 .await
6706 .unwrap(),
6707 HashMap::from_iter([
6708 (path!("dir/one.ts").to_string(), vec![14..18]),
6709 (path!("dir/two.ts").to_string(), vec![14..18]),
6710 ]),
6711 "Rust exclusion search should give only TypeScript files"
6712 );
6713
6714 assert_eq!(
6715 search(
6716 &project,
6717 SearchQuery::text(
6718 search_query,
6719 false,
6720 true,
6721 false,
6722 Default::default(),
6723 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6724 .unwrap(),
6725 false,
6726 None,
6727 )
6728 .unwrap(),
6729 cx
6730 )
6731 .await
6732 .unwrap(),
6733 HashMap::from_iter([
6734 (path!("dir/one.rs").to_string(), vec![8..12]),
6735 (path!("dir/two.rs").to_string(), vec![8..12]),
6736 ]),
6737 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
6738 );
6739
6740 assert!(
6741 search(
6742 &project,
6743 SearchQuery::text(
6744 search_query,
6745 false,
6746 true,
6747 false,
6748 Default::default(),
6749 PathMatcher::new(
6750 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
6751 PathStyle::local(),
6752 )
6753 .unwrap(),
6754 false,
6755 None,
6756 )
6757 .unwrap(),
6758 cx
6759 )
6760 .await
6761 .unwrap()
6762 .is_empty(),
6763 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
6764 );
6765}
6766
6767#[gpui::test]
6768async fn test_search_with_buffer_exclusions(cx: &mut gpui::TestAppContext) {
6769 init_test(cx);
6770
6771 let search_query = "file";
6772
6773 let fs = FakeFs::new(cx.executor());
6774 fs.insert_tree(
6775 path!("/dir"),
6776 json!({
6777 "one.rs": r#"// Rust file one"#,
6778 "one.ts": r#"// TypeScript file one"#,
6779 "two.rs": r#"// Rust file two"#,
6780 "two.ts": r#"// TypeScript file two"#,
6781 }),
6782 )
6783 .await;
6784
6785 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6786 let path_style = PathStyle::local();
6787 let _buffer = project.update(cx, |project, cx| {
6788 project.create_local_buffer("file", None, false, cx)
6789 });
6790
6791 assert_eq!(
6792 search(
6793 &project,
6794 SearchQuery::text(
6795 search_query,
6796 false,
6797 true,
6798 false,
6799 Default::default(),
6800 PathMatcher::new(&["*.odd".to_owned()], path_style).unwrap(),
6801 false,
6802 None,
6803 )
6804 .unwrap(),
6805 cx
6806 )
6807 .await
6808 .unwrap(),
6809 HashMap::from_iter([
6810 (path!("dir/one.rs").to_string(), vec![8..12]),
6811 (path!("dir/one.ts").to_string(), vec![14..18]),
6812 (path!("dir/two.rs").to_string(), vec![8..12]),
6813 (path!("dir/two.ts").to_string(), vec![14..18]),
6814 ]),
6815 "If no exclusions match, all files should be returned"
6816 );
6817
6818 assert_eq!(
6819 search(
6820 &project,
6821 SearchQuery::text(
6822 search_query,
6823 false,
6824 true,
6825 false,
6826 Default::default(),
6827 PathMatcher::new(&["*.rs".to_owned()], path_style).unwrap(),
6828 false,
6829 None,
6830 )
6831 .unwrap(),
6832 cx
6833 )
6834 .await
6835 .unwrap(),
6836 HashMap::from_iter([
6837 (path!("dir/one.ts").to_string(), vec![14..18]),
6838 (path!("dir/two.ts").to_string(), vec![14..18]),
6839 ]),
6840 "Rust exclusion search should give only TypeScript files"
6841 );
6842
6843 assert_eq!(
6844 search(
6845 &project,
6846 SearchQuery::text(
6847 search_query,
6848 false,
6849 true,
6850 false,
6851 Default::default(),
6852 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], path_style).unwrap(),
6853 false,
6854 None,
6855 )
6856 .unwrap(),
6857 cx
6858 )
6859 .await
6860 .unwrap(),
6861 HashMap::from_iter([
6862 (path!("dir/one.rs").to_string(), vec![8..12]),
6863 (path!("dir/two.rs").to_string(), vec![8..12]),
6864 ]),
6865 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
6866 );
6867
6868 assert!(
6869 search(
6870 &project,
6871 SearchQuery::text(
6872 search_query,
6873 false,
6874 true,
6875 false,
6876 Default::default(),
6877 PathMatcher::new(
6878 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
6879 PathStyle::local(),
6880 )
6881 .unwrap(),
6882 false,
6883 None,
6884 )
6885 .unwrap(),
6886 cx
6887 )
6888 .await
6889 .unwrap()
6890 .is_empty(),
6891 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
6892 );
6893}
6894
6895#[gpui::test]
6896async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
6897 init_test(cx);
6898
6899 let search_query = "file";
6900
6901 let fs = FakeFs::new(cx.executor());
6902 fs.insert_tree(
6903 path!("/dir"),
6904 json!({
6905 "one.rs": r#"// Rust file one"#,
6906 "one.ts": r#"// TypeScript file one"#,
6907 "two.rs": r#"// Rust file two"#,
6908 "two.ts": r#"// TypeScript file two"#,
6909 }),
6910 )
6911 .await;
6912 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6913 assert!(
6914 search(
6915 &project,
6916 SearchQuery::text(
6917 search_query,
6918 false,
6919 true,
6920 false,
6921 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6922 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6923 false,
6924 None,
6925 )
6926 .unwrap(),
6927 cx
6928 )
6929 .await
6930 .unwrap()
6931 .is_empty(),
6932 "If both no exclusions and inclusions match, exclusions should win and return nothing"
6933 );
6934
6935 assert!(
6936 search(
6937 &project,
6938 SearchQuery::text(
6939 search_query,
6940 false,
6941 true,
6942 false,
6943 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
6944 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
6945 false,
6946 None,
6947 )
6948 .unwrap(),
6949 cx
6950 )
6951 .await
6952 .unwrap()
6953 .is_empty(),
6954 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
6955 );
6956
6957 assert!(
6958 search(
6959 &project,
6960 SearchQuery::text(
6961 search_query,
6962 false,
6963 true,
6964 false,
6965 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6966 .unwrap(),
6967 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6968 .unwrap(),
6969 false,
6970 None,
6971 )
6972 .unwrap(),
6973 cx
6974 )
6975 .await
6976 .unwrap()
6977 .is_empty(),
6978 "Non-matching inclusions and exclusions should not change that."
6979 );
6980
6981 assert_eq!(
6982 search(
6983 &project,
6984 SearchQuery::text(
6985 search_query,
6986 false,
6987 true,
6988 false,
6989 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6990 .unwrap(),
6991 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()], PathStyle::local())
6992 .unwrap(),
6993 false,
6994 None,
6995 )
6996 .unwrap(),
6997 cx
6998 )
6999 .await
7000 .unwrap(),
7001 HashMap::from_iter([
7002 (path!("dir/one.ts").to_string(), vec![14..18]),
7003 (path!("dir/two.ts").to_string(), vec![14..18]),
7004 ]),
7005 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
7006 );
7007}
7008
7009#[gpui::test]
7010async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
7011 init_test(cx);
7012
7013 let fs = FakeFs::new(cx.executor());
7014 fs.insert_tree(
7015 path!("/worktree-a"),
7016 json!({
7017 "haystack.rs": r#"// NEEDLE"#,
7018 "haystack.ts": r#"// NEEDLE"#,
7019 }),
7020 )
7021 .await;
7022 fs.insert_tree(
7023 path!("/worktree-b"),
7024 json!({
7025 "haystack.rs": r#"// NEEDLE"#,
7026 "haystack.ts": r#"// NEEDLE"#,
7027 }),
7028 )
7029 .await;
7030
7031 let path_style = PathStyle::local();
7032 let project = Project::test(
7033 fs.clone(),
7034 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
7035 cx,
7036 )
7037 .await;
7038
7039 assert_eq!(
7040 search(
7041 &project,
7042 SearchQuery::text(
7043 "NEEDLE",
7044 false,
7045 true,
7046 false,
7047 PathMatcher::new(&["worktree-a/*.rs".to_owned()], path_style).unwrap(),
7048 Default::default(),
7049 true,
7050 None,
7051 )
7052 .unwrap(),
7053 cx
7054 )
7055 .await
7056 .unwrap(),
7057 HashMap::from_iter([(path!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
7058 "should only return results from included worktree"
7059 );
7060 assert_eq!(
7061 search(
7062 &project,
7063 SearchQuery::text(
7064 "NEEDLE",
7065 false,
7066 true,
7067 false,
7068 PathMatcher::new(&["worktree-b/*.rs".to_owned()], path_style).unwrap(),
7069 Default::default(),
7070 true,
7071 None,
7072 )
7073 .unwrap(),
7074 cx
7075 )
7076 .await
7077 .unwrap(),
7078 HashMap::from_iter([(path!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
7079 "should only return results from included worktree"
7080 );
7081
7082 assert_eq!(
7083 search(
7084 &project,
7085 SearchQuery::text(
7086 "NEEDLE",
7087 false,
7088 true,
7089 false,
7090 PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap(),
7091 Default::default(),
7092 false,
7093 None,
7094 )
7095 .unwrap(),
7096 cx
7097 )
7098 .await
7099 .unwrap(),
7100 HashMap::from_iter([
7101 (path!("worktree-a/haystack.ts").to_string(), vec![3..9]),
7102 (path!("worktree-b/haystack.ts").to_string(), vec![3..9])
7103 ]),
7104 "should return results from both worktrees"
7105 );
7106}
7107
7108#[gpui::test]
7109async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
7110 init_test(cx);
7111
7112 let fs = FakeFs::new(cx.background_executor.clone());
7113 fs.insert_tree(
7114 path!("/dir"),
7115 json!({
7116 ".git": {},
7117 ".gitignore": "**/target\n/node_modules\n",
7118 "target": {
7119 "index.txt": "index_key:index_value"
7120 },
7121 "node_modules": {
7122 "eslint": {
7123 "index.ts": "const eslint_key = 'eslint value'",
7124 "package.json": r#"{ "some_key": "some value" }"#,
7125 },
7126 "prettier": {
7127 "index.ts": "const prettier_key = 'prettier value'",
7128 "package.json": r#"{ "other_key": "other value" }"#,
7129 },
7130 },
7131 "package.json": r#"{ "main_key": "main value" }"#,
7132 }),
7133 )
7134 .await;
7135 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7136
7137 let query = "key";
7138 assert_eq!(
7139 search(
7140 &project,
7141 SearchQuery::text(
7142 query,
7143 false,
7144 false,
7145 false,
7146 Default::default(),
7147 Default::default(),
7148 false,
7149 None,
7150 )
7151 .unwrap(),
7152 cx
7153 )
7154 .await
7155 .unwrap(),
7156 HashMap::from_iter([(path!("dir/package.json").to_string(), vec![8..11])]),
7157 "Only one non-ignored file should have the query"
7158 );
7159
7160 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7161 let path_style = PathStyle::local();
7162 assert_eq!(
7163 search(
7164 &project,
7165 SearchQuery::text(
7166 query,
7167 false,
7168 false,
7169 true,
7170 Default::default(),
7171 Default::default(),
7172 false,
7173 None,
7174 )
7175 .unwrap(),
7176 cx
7177 )
7178 .await
7179 .unwrap(),
7180 HashMap::from_iter([
7181 (path!("dir/package.json").to_string(), vec![8..11]),
7182 (path!("dir/target/index.txt").to_string(), vec![6..9]),
7183 (
7184 path!("dir/node_modules/prettier/package.json").to_string(),
7185 vec![9..12]
7186 ),
7187 (
7188 path!("dir/node_modules/prettier/index.ts").to_string(),
7189 vec![15..18]
7190 ),
7191 (
7192 path!("dir/node_modules/eslint/index.ts").to_string(),
7193 vec![13..16]
7194 ),
7195 (
7196 path!("dir/node_modules/eslint/package.json").to_string(),
7197 vec![8..11]
7198 ),
7199 ]),
7200 "Unrestricted search with ignored directories should find every file with the query"
7201 );
7202
7203 let files_to_include =
7204 PathMatcher::new(&["node_modules/prettier/**".to_owned()], path_style).unwrap();
7205 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap();
7206 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7207 assert_eq!(
7208 search(
7209 &project,
7210 SearchQuery::text(
7211 query,
7212 false,
7213 false,
7214 true,
7215 files_to_include,
7216 files_to_exclude,
7217 false,
7218 None,
7219 )
7220 .unwrap(),
7221 cx
7222 )
7223 .await
7224 .unwrap(),
7225 HashMap::from_iter([(
7226 path!("dir/node_modules/prettier/package.json").to_string(),
7227 vec![9..12]
7228 )]),
7229 "With search including ignored prettier directory and excluding TS files, only one file should be found"
7230 );
7231}
7232
7233#[gpui::test]
7234async fn test_search_with_unicode(cx: &mut gpui::TestAppContext) {
7235 init_test(cx);
7236
7237 let fs = FakeFs::new(cx.executor());
7238 fs.insert_tree(
7239 path!("/dir"),
7240 json!({
7241 "one.rs": "// ПРИВЕТ? привет!",
7242 "two.rs": "// ПРИВЕТ.",
7243 "three.rs": "// привет",
7244 }),
7245 )
7246 .await;
7247 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7248 let unicode_case_sensitive_query = SearchQuery::text(
7249 "привет",
7250 false,
7251 true,
7252 false,
7253 Default::default(),
7254 Default::default(),
7255 false,
7256 None,
7257 );
7258 assert_matches!(unicode_case_sensitive_query, Ok(SearchQuery::Text { .. }));
7259 assert_eq!(
7260 search(&project, unicode_case_sensitive_query.unwrap(), cx)
7261 .await
7262 .unwrap(),
7263 HashMap::from_iter([
7264 (path!("dir/one.rs").to_string(), vec![17..29]),
7265 (path!("dir/three.rs").to_string(), vec![3..15]),
7266 ])
7267 );
7268
7269 let unicode_case_insensitive_query = SearchQuery::text(
7270 "привет",
7271 false,
7272 false,
7273 false,
7274 Default::default(),
7275 Default::default(),
7276 false,
7277 None,
7278 );
7279 assert_matches!(
7280 unicode_case_insensitive_query,
7281 Ok(SearchQuery::Regex { .. })
7282 );
7283 assert_eq!(
7284 search(&project, unicode_case_insensitive_query.unwrap(), cx)
7285 .await
7286 .unwrap(),
7287 HashMap::from_iter([
7288 (path!("dir/one.rs").to_string(), vec![3..15, 17..29]),
7289 (path!("dir/two.rs").to_string(), vec![3..15]),
7290 (path!("dir/three.rs").to_string(), vec![3..15]),
7291 ])
7292 );
7293
7294 assert_eq!(
7295 search(
7296 &project,
7297 SearchQuery::text(
7298 "привет.",
7299 false,
7300 false,
7301 false,
7302 Default::default(),
7303 Default::default(),
7304 false,
7305 None,
7306 )
7307 .unwrap(),
7308 cx
7309 )
7310 .await
7311 .unwrap(),
7312 HashMap::from_iter([(path!("dir/two.rs").to_string(), vec![3..16]),])
7313 );
7314}
7315
7316#[gpui::test]
7317async fn test_create_entry(cx: &mut gpui::TestAppContext) {
7318 init_test(cx);
7319
7320 let fs = FakeFs::new(cx.executor());
7321 fs.insert_tree(
7322 "/one/two",
7323 json!({
7324 "three": {
7325 "a.txt": "",
7326 "four": {}
7327 },
7328 "c.rs": ""
7329 }),
7330 )
7331 .await;
7332
7333 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
7334 project
7335 .update(cx, |project, cx| {
7336 let id = project.worktrees(cx).next().unwrap().read(cx).id();
7337 project.create_entry((id, rel_path("b..")), true, cx)
7338 })
7339 .await
7340 .unwrap()
7341 .into_included()
7342 .unwrap();
7343
7344 assert_eq!(
7345 fs.paths(true),
7346 vec![
7347 PathBuf::from(path!("/")),
7348 PathBuf::from(path!("/one")),
7349 PathBuf::from(path!("/one/two")),
7350 PathBuf::from(path!("/one/two/c.rs")),
7351 PathBuf::from(path!("/one/two/three")),
7352 PathBuf::from(path!("/one/two/three/a.txt")),
7353 PathBuf::from(path!("/one/two/three/b..")),
7354 PathBuf::from(path!("/one/two/three/four")),
7355 ]
7356 );
7357}
7358
7359#[gpui::test]
7360async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
7361 init_test(cx);
7362
7363 let fs = FakeFs::new(cx.executor());
7364 fs.insert_tree(
7365 path!("/dir"),
7366 json!({
7367 "a.tsx": "a",
7368 }),
7369 )
7370 .await;
7371
7372 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7373
7374 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7375 language_registry.add(tsx_lang());
7376 let language_server_names = [
7377 "TypeScriptServer",
7378 "TailwindServer",
7379 "ESLintServer",
7380 "NoHoverCapabilitiesServer",
7381 ];
7382 let mut language_servers = [
7383 language_registry.register_fake_lsp(
7384 "tsx",
7385 FakeLspAdapter {
7386 name: language_server_names[0],
7387 capabilities: lsp::ServerCapabilities {
7388 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7389 ..lsp::ServerCapabilities::default()
7390 },
7391 ..FakeLspAdapter::default()
7392 },
7393 ),
7394 language_registry.register_fake_lsp(
7395 "tsx",
7396 FakeLspAdapter {
7397 name: language_server_names[1],
7398 capabilities: lsp::ServerCapabilities {
7399 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7400 ..lsp::ServerCapabilities::default()
7401 },
7402 ..FakeLspAdapter::default()
7403 },
7404 ),
7405 language_registry.register_fake_lsp(
7406 "tsx",
7407 FakeLspAdapter {
7408 name: language_server_names[2],
7409 capabilities: lsp::ServerCapabilities {
7410 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7411 ..lsp::ServerCapabilities::default()
7412 },
7413 ..FakeLspAdapter::default()
7414 },
7415 ),
7416 language_registry.register_fake_lsp(
7417 "tsx",
7418 FakeLspAdapter {
7419 name: language_server_names[3],
7420 capabilities: lsp::ServerCapabilities {
7421 hover_provider: None,
7422 ..lsp::ServerCapabilities::default()
7423 },
7424 ..FakeLspAdapter::default()
7425 },
7426 ),
7427 ];
7428
7429 let (buffer, _handle) = project
7430 .update(cx, |p, cx| {
7431 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
7432 })
7433 .await
7434 .unwrap();
7435 cx.executor().run_until_parked();
7436
7437 let mut servers_with_hover_requests = HashMap::default();
7438 for i in 0..language_server_names.len() {
7439 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
7440 panic!(
7441 "Failed to get language server #{i} with name {}",
7442 &language_server_names[i]
7443 )
7444 });
7445 let new_server_name = new_server.server.name();
7446 assert!(
7447 !servers_with_hover_requests.contains_key(&new_server_name),
7448 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
7449 );
7450 match new_server_name.as_ref() {
7451 "TailwindServer" | "TypeScriptServer" => {
7452 servers_with_hover_requests.insert(
7453 new_server_name.clone(),
7454 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7455 move |_, _| {
7456 let name = new_server_name.clone();
7457 async move {
7458 Ok(Some(lsp::Hover {
7459 contents: lsp::HoverContents::Scalar(
7460 lsp::MarkedString::String(format!("{name} hover")),
7461 ),
7462 range: None,
7463 }))
7464 }
7465 },
7466 ),
7467 );
7468 }
7469 "ESLintServer" => {
7470 servers_with_hover_requests.insert(
7471 new_server_name,
7472 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7473 |_, _| async move { Ok(None) },
7474 ),
7475 );
7476 }
7477 "NoHoverCapabilitiesServer" => {
7478 let _never_handled = new_server
7479 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
7480 panic!(
7481 "Should not call for hovers server with no corresponding capabilities"
7482 )
7483 });
7484 }
7485 unexpected => panic!("Unexpected server name: {unexpected}"),
7486 }
7487 }
7488
7489 let hover_task = project.update(cx, |project, cx| {
7490 project.hover(&buffer, Point::new(0, 0), cx)
7491 });
7492 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
7493 |mut hover_request| async move {
7494 hover_request
7495 .next()
7496 .await
7497 .expect("All hover requests should have been triggered")
7498 },
7499 ))
7500 .await;
7501 assert_eq!(
7502 vec!["TailwindServer hover", "TypeScriptServer hover"],
7503 hover_task
7504 .await
7505 .into_iter()
7506 .flatten()
7507 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
7508 .sorted()
7509 .collect::<Vec<_>>(),
7510 "Should receive hover responses from all related servers with hover capabilities"
7511 );
7512}
7513
7514#[gpui::test]
7515async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
7516 init_test(cx);
7517
7518 let fs = FakeFs::new(cx.executor());
7519 fs.insert_tree(
7520 path!("/dir"),
7521 json!({
7522 "a.ts": "a",
7523 }),
7524 )
7525 .await;
7526
7527 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7528
7529 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7530 language_registry.add(typescript_lang());
7531 let mut fake_language_servers = language_registry.register_fake_lsp(
7532 "TypeScript",
7533 FakeLspAdapter {
7534 capabilities: lsp::ServerCapabilities {
7535 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7536 ..lsp::ServerCapabilities::default()
7537 },
7538 ..FakeLspAdapter::default()
7539 },
7540 );
7541
7542 let (buffer, _handle) = project
7543 .update(cx, |p, cx| {
7544 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
7545 })
7546 .await
7547 .unwrap();
7548 cx.executor().run_until_parked();
7549
7550 let fake_server = fake_language_servers
7551 .next()
7552 .await
7553 .expect("failed to get the language server");
7554
7555 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7556 move |_, _| async move {
7557 Ok(Some(lsp::Hover {
7558 contents: lsp::HoverContents::Array(vec![
7559 lsp::MarkedString::String("".to_string()),
7560 lsp::MarkedString::String(" ".to_string()),
7561 lsp::MarkedString::String("\n\n\n".to_string()),
7562 ]),
7563 range: None,
7564 }))
7565 },
7566 );
7567
7568 let hover_task = project.update(cx, |project, cx| {
7569 project.hover(&buffer, Point::new(0, 0), cx)
7570 });
7571 let () = request_handled
7572 .next()
7573 .await
7574 .expect("All hover requests should have been triggered");
7575 assert_eq!(
7576 Vec::<String>::new(),
7577 hover_task
7578 .await
7579 .into_iter()
7580 .flatten()
7581 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
7582 .sorted()
7583 .collect::<Vec<_>>(),
7584 "Empty hover parts should be ignored"
7585 );
7586}
7587
7588#[gpui::test]
7589async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
7590 init_test(cx);
7591
7592 let fs = FakeFs::new(cx.executor());
7593 fs.insert_tree(
7594 path!("/dir"),
7595 json!({
7596 "a.ts": "a",
7597 }),
7598 )
7599 .await;
7600
7601 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7602
7603 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7604 language_registry.add(typescript_lang());
7605 let mut fake_language_servers = language_registry.register_fake_lsp(
7606 "TypeScript",
7607 FakeLspAdapter {
7608 capabilities: lsp::ServerCapabilities {
7609 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
7610 ..lsp::ServerCapabilities::default()
7611 },
7612 ..FakeLspAdapter::default()
7613 },
7614 );
7615
7616 let (buffer, _handle) = project
7617 .update(cx, |p, cx| {
7618 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
7619 })
7620 .await
7621 .unwrap();
7622 cx.executor().run_until_parked();
7623
7624 let fake_server = fake_language_servers
7625 .next()
7626 .await
7627 .expect("failed to get the language server");
7628
7629 let mut request_handled = fake_server
7630 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
7631 Ok(Some(vec![
7632 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7633 title: "organize imports".to_string(),
7634 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
7635 ..lsp::CodeAction::default()
7636 }),
7637 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7638 title: "fix code".to_string(),
7639 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
7640 ..lsp::CodeAction::default()
7641 }),
7642 ]))
7643 });
7644
7645 let code_actions_task = project.update(cx, |project, cx| {
7646 project.code_actions(
7647 &buffer,
7648 0..buffer.read(cx).len(),
7649 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
7650 cx,
7651 )
7652 });
7653
7654 let () = request_handled
7655 .next()
7656 .await
7657 .expect("The code action request should have been triggered");
7658
7659 let code_actions = code_actions_task.await.unwrap().unwrap();
7660 assert_eq!(code_actions.len(), 1);
7661 assert_eq!(
7662 code_actions[0].lsp_action.action_kind(),
7663 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
7664 );
7665}
7666
7667#[gpui::test]
7668async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
7669 init_test(cx);
7670
7671 let fs = FakeFs::new(cx.executor());
7672 fs.insert_tree(
7673 path!("/dir"),
7674 json!({
7675 "a.tsx": "a",
7676 }),
7677 )
7678 .await;
7679
7680 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7681
7682 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7683 language_registry.add(tsx_lang());
7684 let language_server_names = [
7685 "TypeScriptServer",
7686 "TailwindServer",
7687 "ESLintServer",
7688 "NoActionsCapabilitiesServer",
7689 ];
7690
7691 let mut language_server_rxs = [
7692 language_registry.register_fake_lsp(
7693 "tsx",
7694 FakeLspAdapter {
7695 name: language_server_names[0],
7696 capabilities: lsp::ServerCapabilities {
7697 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
7698 ..lsp::ServerCapabilities::default()
7699 },
7700 ..FakeLspAdapter::default()
7701 },
7702 ),
7703 language_registry.register_fake_lsp(
7704 "tsx",
7705 FakeLspAdapter {
7706 name: language_server_names[1],
7707 capabilities: lsp::ServerCapabilities {
7708 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
7709 ..lsp::ServerCapabilities::default()
7710 },
7711 ..FakeLspAdapter::default()
7712 },
7713 ),
7714 language_registry.register_fake_lsp(
7715 "tsx",
7716 FakeLspAdapter {
7717 name: language_server_names[2],
7718 capabilities: lsp::ServerCapabilities {
7719 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
7720 ..lsp::ServerCapabilities::default()
7721 },
7722 ..FakeLspAdapter::default()
7723 },
7724 ),
7725 language_registry.register_fake_lsp(
7726 "tsx",
7727 FakeLspAdapter {
7728 name: language_server_names[3],
7729 capabilities: lsp::ServerCapabilities {
7730 code_action_provider: None,
7731 ..lsp::ServerCapabilities::default()
7732 },
7733 ..FakeLspAdapter::default()
7734 },
7735 ),
7736 ];
7737
7738 let (buffer, _handle) = project
7739 .update(cx, |p, cx| {
7740 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
7741 })
7742 .await
7743 .unwrap();
7744 cx.executor().run_until_parked();
7745
7746 let mut servers_with_actions_requests = HashMap::default();
7747 for i in 0..language_server_names.len() {
7748 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
7749 panic!(
7750 "Failed to get language server #{i} with name {}",
7751 &language_server_names[i]
7752 )
7753 });
7754 let new_server_name = new_server.server.name();
7755
7756 assert!(
7757 !servers_with_actions_requests.contains_key(&new_server_name),
7758 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
7759 );
7760 match new_server_name.0.as_ref() {
7761 "TailwindServer" | "TypeScriptServer" => {
7762 servers_with_actions_requests.insert(
7763 new_server_name.clone(),
7764 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
7765 move |_, _| {
7766 let name = new_server_name.clone();
7767 async move {
7768 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
7769 lsp::CodeAction {
7770 title: format!("{name} code action"),
7771 ..lsp::CodeAction::default()
7772 },
7773 )]))
7774 }
7775 },
7776 ),
7777 );
7778 }
7779 "ESLintServer" => {
7780 servers_with_actions_requests.insert(
7781 new_server_name,
7782 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
7783 |_, _| async move { Ok(None) },
7784 ),
7785 );
7786 }
7787 "NoActionsCapabilitiesServer" => {
7788 let _never_handled = new_server
7789 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
7790 panic!(
7791 "Should not call for code actions server with no corresponding capabilities"
7792 )
7793 });
7794 }
7795 unexpected => panic!("Unexpected server name: {unexpected}"),
7796 }
7797 }
7798
7799 let code_actions_task = project.update(cx, |project, cx| {
7800 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
7801 });
7802
7803 // cx.run_until_parked();
7804 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
7805 |mut code_actions_request| async move {
7806 code_actions_request
7807 .next()
7808 .await
7809 .expect("All code actions requests should have been triggered")
7810 },
7811 ))
7812 .await;
7813 assert_eq!(
7814 vec!["TailwindServer code action", "TypeScriptServer code action"],
7815 code_actions_task
7816 .await
7817 .unwrap()
7818 .unwrap()
7819 .into_iter()
7820 .map(|code_action| code_action.lsp_action.title().to_owned())
7821 .sorted()
7822 .collect::<Vec<_>>(),
7823 "Should receive code actions responses from all related servers with hover capabilities"
7824 );
7825}
7826
7827#[gpui::test]
7828async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
7829 init_test(cx);
7830
7831 let fs = FakeFs::new(cx.executor());
7832 fs.insert_tree(
7833 "/dir",
7834 json!({
7835 "a.rs": "let a = 1;",
7836 "b.rs": "let b = 2;",
7837 "c.rs": "let c = 2;",
7838 }),
7839 )
7840 .await;
7841
7842 let project = Project::test(
7843 fs,
7844 [
7845 "/dir/a.rs".as_ref(),
7846 "/dir/b.rs".as_ref(),
7847 "/dir/c.rs".as_ref(),
7848 ],
7849 cx,
7850 )
7851 .await;
7852
7853 // check the initial state and get the worktrees
7854 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
7855 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7856 assert_eq!(worktrees.len(), 3);
7857
7858 let worktree_a = worktrees[0].read(cx);
7859 let worktree_b = worktrees[1].read(cx);
7860 let worktree_c = worktrees[2].read(cx);
7861
7862 // check they start in the right order
7863 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
7864 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
7865 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
7866
7867 (
7868 worktrees[0].clone(),
7869 worktrees[1].clone(),
7870 worktrees[2].clone(),
7871 )
7872 });
7873
7874 // move first worktree to after the second
7875 // [a, b, c] -> [b, a, c]
7876 project
7877 .update(cx, |project, cx| {
7878 let first = worktree_a.read(cx);
7879 let second = worktree_b.read(cx);
7880 project.move_worktree(first.id(), second.id(), cx)
7881 })
7882 .expect("moving first after second");
7883
7884 // check the state after moving
7885 project.update(cx, |project, cx| {
7886 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7887 assert_eq!(worktrees.len(), 3);
7888
7889 let first = worktrees[0].read(cx);
7890 let second = worktrees[1].read(cx);
7891 let third = worktrees[2].read(cx);
7892
7893 // check they are now in the right order
7894 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
7895 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
7896 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7897 });
7898
7899 // move the second worktree to before the first
7900 // [b, a, c] -> [a, b, c]
7901 project
7902 .update(cx, |project, cx| {
7903 let second = worktree_a.read(cx);
7904 let first = worktree_b.read(cx);
7905 project.move_worktree(first.id(), second.id(), cx)
7906 })
7907 .expect("moving second before first");
7908
7909 // check the state after moving
7910 project.update(cx, |project, cx| {
7911 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7912 assert_eq!(worktrees.len(), 3);
7913
7914 let first = worktrees[0].read(cx);
7915 let second = worktrees[1].read(cx);
7916 let third = worktrees[2].read(cx);
7917
7918 // check they are now in the right order
7919 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7920 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
7921 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7922 });
7923
7924 // move the second worktree to after the third
7925 // [a, b, c] -> [a, c, b]
7926 project
7927 .update(cx, |project, cx| {
7928 let second = worktree_b.read(cx);
7929 let third = worktree_c.read(cx);
7930 project.move_worktree(second.id(), third.id(), cx)
7931 })
7932 .expect("moving second after third");
7933
7934 // check the state after moving
7935 project.update(cx, |project, cx| {
7936 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7937 assert_eq!(worktrees.len(), 3);
7938
7939 let first = worktrees[0].read(cx);
7940 let second = worktrees[1].read(cx);
7941 let third = worktrees[2].read(cx);
7942
7943 // check they are now in the right order
7944 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7945 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
7946 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
7947 });
7948
7949 // move the third worktree to before the second
7950 // [a, c, b] -> [a, b, c]
7951 project
7952 .update(cx, |project, cx| {
7953 let third = worktree_c.read(cx);
7954 let second = worktree_b.read(cx);
7955 project.move_worktree(third.id(), second.id(), cx)
7956 })
7957 .expect("moving third before second");
7958
7959 // check the state after moving
7960 project.update(cx, |project, cx| {
7961 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7962 assert_eq!(worktrees.len(), 3);
7963
7964 let first = worktrees[0].read(cx);
7965 let second = worktrees[1].read(cx);
7966 let third = worktrees[2].read(cx);
7967
7968 // check they are now in the right order
7969 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7970 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
7971 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7972 });
7973
7974 // move the first worktree to after the third
7975 // [a, b, c] -> [b, c, a]
7976 project
7977 .update(cx, |project, cx| {
7978 let first = worktree_a.read(cx);
7979 let third = worktree_c.read(cx);
7980 project.move_worktree(first.id(), third.id(), cx)
7981 })
7982 .expect("moving first after third");
7983
7984 // check the state after moving
7985 project.update(cx, |project, cx| {
7986 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7987 assert_eq!(worktrees.len(), 3);
7988
7989 let first = worktrees[0].read(cx);
7990 let second = worktrees[1].read(cx);
7991 let third = worktrees[2].read(cx);
7992
7993 // check they are now in the right order
7994 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
7995 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
7996 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
7997 });
7998
7999 // move the third worktree to before the first
8000 // [b, c, a] -> [a, b, c]
8001 project
8002 .update(cx, |project, cx| {
8003 let third = worktree_a.read(cx);
8004 let first = worktree_b.read(cx);
8005 project.move_worktree(third.id(), first.id(), cx)
8006 })
8007 .expect("moving third before first");
8008
8009 // check the state after moving
8010 project.update(cx, |project, cx| {
8011 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8012 assert_eq!(worktrees.len(), 3);
8013
8014 let first = worktrees[0].read(cx);
8015 let second = worktrees[1].read(cx);
8016 let third = worktrees[2].read(cx);
8017
8018 // check they are now in the right order
8019 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
8020 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
8021 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
8022 });
8023}
8024
8025#[gpui::test]
8026async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
8027 init_test(cx);
8028
8029 let staged_contents = r#"
8030 fn main() {
8031 println!("hello world");
8032 }
8033 "#
8034 .unindent();
8035 let file_contents = r#"
8036 // print goodbye
8037 fn main() {
8038 println!("goodbye world");
8039 }
8040 "#
8041 .unindent();
8042
8043 let fs = FakeFs::new(cx.background_executor.clone());
8044 fs.insert_tree(
8045 "/dir",
8046 json!({
8047 ".git": {},
8048 "src": {
8049 "main.rs": file_contents,
8050 }
8051 }),
8052 )
8053 .await;
8054
8055 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
8056
8057 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8058
8059 let buffer = project
8060 .update(cx, |project, cx| {
8061 project.open_local_buffer("/dir/src/main.rs", cx)
8062 })
8063 .await
8064 .unwrap();
8065 let unstaged_diff = project
8066 .update(cx, |project, cx| {
8067 project.open_unstaged_diff(buffer.clone(), cx)
8068 })
8069 .await
8070 .unwrap();
8071
8072 cx.run_until_parked();
8073 unstaged_diff.update(cx, |unstaged_diff, cx| {
8074 let snapshot = buffer.read(cx).snapshot();
8075 assert_hunks(
8076 unstaged_diff.snapshot(cx).hunks(&snapshot),
8077 &snapshot,
8078 &unstaged_diff.base_text_string(cx).unwrap(),
8079 &[
8080 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
8081 (
8082 2..3,
8083 " println!(\"hello world\");\n",
8084 " println!(\"goodbye world\");\n",
8085 DiffHunkStatus::modified_none(),
8086 ),
8087 ],
8088 );
8089 });
8090
8091 let staged_contents = r#"
8092 // print goodbye
8093 fn main() {
8094 }
8095 "#
8096 .unindent();
8097
8098 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
8099
8100 cx.run_until_parked();
8101 unstaged_diff.update(cx, |unstaged_diff, cx| {
8102 let snapshot = buffer.read(cx).snapshot();
8103 assert_hunks(
8104 unstaged_diff
8105 .snapshot(cx)
8106 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8107 &snapshot,
8108 &unstaged_diff.base_text(cx).text(),
8109 &[(
8110 2..3,
8111 "",
8112 " println!(\"goodbye world\");\n",
8113 DiffHunkStatus::added_none(),
8114 )],
8115 );
8116 });
8117}
8118
8119#[gpui::test]
8120async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
8121 init_test(cx);
8122
8123 let committed_contents = r#"
8124 fn main() {
8125 println!("hello world");
8126 }
8127 "#
8128 .unindent();
8129 let staged_contents = r#"
8130 fn main() {
8131 println!("goodbye world");
8132 }
8133 "#
8134 .unindent();
8135 let file_contents = r#"
8136 // print goodbye
8137 fn main() {
8138 println!("goodbye world");
8139 }
8140 "#
8141 .unindent();
8142
8143 let fs = FakeFs::new(cx.background_executor.clone());
8144 fs.insert_tree(
8145 "/dir",
8146 json!({
8147 ".git": {},
8148 "src": {
8149 "modification.rs": file_contents,
8150 }
8151 }),
8152 )
8153 .await;
8154
8155 fs.set_head_for_repo(
8156 Path::new("/dir/.git"),
8157 &[
8158 ("src/modification.rs", committed_contents),
8159 ("src/deletion.rs", "// the-deleted-contents\n".into()),
8160 ],
8161 "deadbeef",
8162 );
8163 fs.set_index_for_repo(
8164 Path::new("/dir/.git"),
8165 &[
8166 ("src/modification.rs", staged_contents),
8167 ("src/deletion.rs", "// the-deleted-contents\n".into()),
8168 ],
8169 );
8170
8171 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8172 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
8173 let language = rust_lang();
8174 language_registry.add(language.clone());
8175
8176 let buffer_1 = project
8177 .update(cx, |project, cx| {
8178 project.open_local_buffer("/dir/src/modification.rs", cx)
8179 })
8180 .await
8181 .unwrap();
8182 let diff_1 = project
8183 .update(cx, |project, cx| {
8184 project.open_uncommitted_diff(buffer_1.clone(), cx)
8185 })
8186 .await
8187 .unwrap();
8188 diff_1.read_with(cx, |diff, cx| {
8189 assert_eq!(diff.base_text(cx).language().cloned(), Some(language))
8190 });
8191 cx.run_until_parked();
8192 diff_1.update(cx, |diff, cx| {
8193 let snapshot = buffer_1.read(cx).snapshot();
8194 assert_hunks(
8195 diff.snapshot(cx)
8196 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8197 &snapshot,
8198 &diff.base_text_string(cx).unwrap(),
8199 &[
8200 (
8201 0..1,
8202 "",
8203 "// print goodbye\n",
8204 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
8205 ),
8206 (
8207 2..3,
8208 " println!(\"hello world\");\n",
8209 " println!(\"goodbye world\");\n",
8210 DiffHunkStatus::modified_none(),
8211 ),
8212 ],
8213 );
8214 });
8215
8216 // Reset HEAD to a version that differs from both the buffer and the index.
8217 let committed_contents = r#"
8218 // print goodbye
8219 fn main() {
8220 }
8221 "#
8222 .unindent();
8223 fs.set_head_for_repo(
8224 Path::new("/dir/.git"),
8225 &[
8226 ("src/modification.rs", committed_contents.clone()),
8227 ("src/deletion.rs", "// the-deleted-contents\n".into()),
8228 ],
8229 "deadbeef",
8230 );
8231
8232 // Buffer now has an unstaged hunk.
8233 cx.run_until_parked();
8234 diff_1.update(cx, |diff, cx| {
8235 let snapshot = buffer_1.read(cx).snapshot();
8236 assert_hunks(
8237 diff.snapshot(cx)
8238 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8239 &snapshot,
8240 &diff.base_text(cx).text(),
8241 &[(
8242 2..3,
8243 "",
8244 " println!(\"goodbye world\");\n",
8245 DiffHunkStatus::added_none(),
8246 )],
8247 );
8248 });
8249
8250 // Open a buffer for a file that's been deleted.
8251 let buffer_2 = project
8252 .update(cx, |project, cx| {
8253 project.open_local_buffer("/dir/src/deletion.rs", cx)
8254 })
8255 .await
8256 .unwrap();
8257 let diff_2 = project
8258 .update(cx, |project, cx| {
8259 project.open_uncommitted_diff(buffer_2.clone(), cx)
8260 })
8261 .await
8262 .unwrap();
8263 cx.run_until_parked();
8264 diff_2.update(cx, |diff, cx| {
8265 let snapshot = buffer_2.read(cx).snapshot();
8266 assert_hunks(
8267 diff.snapshot(cx)
8268 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8269 &snapshot,
8270 &diff.base_text_string(cx).unwrap(),
8271 &[(
8272 0..0,
8273 "// the-deleted-contents\n",
8274 "",
8275 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
8276 )],
8277 );
8278 });
8279
8280 // Stage the deletion of this file
8281 fs.set_index_for_repo(
8282 Path::new("/dir/.git"),
8283 &[("src/modification.rs", committed_contents.clone())],
8284 );
8285 cx.run_until_parked();
8286 diff_2.update(cx, |diff, cx| {
8287 let snapshot = buffer_2.read(cx).snapshot();
8288 assert_hunks(
8289 diff.snapshot(cx)
8290 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8291 &snapshot,
8292 &diff.base_text_string(cx).unwrap(),
8293 &[(
8294 0..0,
8295 "// the-deleted-contents\n",
8296 "",
8297 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
8298 )],
8299 );
8300 });
8301}
8302
8303#[gpui::test]
8304async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
8305 use DiffHunkSecondaryStatus::*;
8306 init_test(cx);
8307
8308 let committed_contents = r#"
8309 zero
8310 one
8311 two
8312 three
8313 four
8314 five
8315 "#
8316 .unindent();
8317 let file_contents = r#"
8318 one
8319 TWO
8320 three
8321 FOUR
8322 five
8323 "#
8324 .unindent();
8325
8326 let fs = FakeFs::new(cx.background_executor.clone());
8327 fs.insert_tree(
8328 "/dir",
8329 json!({
8330 ".git": {},
8331 "file.txt": file_contents.clone()
8332 }),
8333 )
8334 .await;
8335
8336 fs.set_head_and_index_for_repo(
8337 path!("/dir/.git").as_ref(),
8338 &[("file.txt", committed_contents.clone())],
8339 );
8340
8341 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8342
8343 let buffer = project
8344 .update(cx, |project, cx| {
8345 project.open_local_buffer("/dir/file.txt", cx)
8346 })
8347 .await
8348 .unwrap();
8349 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8350 let uncommitted_diff = project
8351 .update(cx, |project, cx| {
8352 project.open_uncommitted_diff(buffer.clone(), cx)
8353 })
8354 .await
8355 .unwrap();
8356 let mut diff_events = cx.events(&uncommitted_diff);
8357
8358 // The hunks are initially unstaged.
8359 uncommitted_diff.read_with(cx, |diff, cx| {
8360 assert_hunks(
8361 diff.snapshot(cx).hunks(&snapshot),
8362 &snapshot,
8363 &diff.base_text_string(cx).unwrap(),
8364 &[
8365 (
8366 0..0,
8367 "zero\n",
8368 "",
8369 DiffHunkStatus::deleted(HasSecondaryHunk),
8370 ),
8371 (
8372 1..2,
8373 "two\n",
8374 "TWO\n",
8375 DiffHunkStatus::modified(HasSecondaryHunk),
8376 ),
8377 (
8378 3..4,
8379 "four\n",
8380 "FOUR\n",
8381 DiffHunkStatus::modified(HasSecondaryHunk),
8382 ),
8383 ],
8384 );
8385 });
8386
8387 // Stage a hunk. It appears as optimistically staged.
8388 uncommitted_diff.update(cx, |diff, cx| {
8389 let range =
8390 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
8391 let hunks = diff
8392 .snapshot(cx)
8393 .hunks_intersecting_range(range, &snapshot)
8394 .collect::<Vec<_>>();
8395 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
8396
8397 assert_hunks(
8398 diff.snapshot(cx).hunks(&snapshot),
8399 &snapshot,
8400 &diff.base_text_string(cx).unwrap(),
8401 &[
8402 (
8403 0..0,
8404 "zero\n",
8405 "",
8406 DiffHunkStatus::deleted(HasSecondaryHunk),
8407 ),
8408 (
8409 1..2,
8410 "two\n",
8411 "TWO\n",
8412 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8413 ),
8414 (
8415 3..4,
8416 "four\n",
8417 "FOUR\n",
8418 DiffHunkStatus::modified(HasSecondaryHunk),
8419 ),
8420 ],
8421 );
8422 });
8423
8424 // The diff emits a change event for the range of the staged hunk.
8425 assert!(matches!(
8426 diff_events.next().await.unwrap(),
8427 BufferDiffEvent::HunksStagedOrUnstaged(_)
8428 ));
8429 let event = diff_events.next().await.unwrap();
8430 if let BufferDiffEvent::DiffChanged(DiffChanged {
8431 changed_range: Some(changed_range),
8432 base_text_changed_range: _,
8433 extended_range: _,
8434 }) = event
8435 {
8436 let changed_range = changed_range.to_point(&snapshot);
8437 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
8438 } else {
8439 panic!("Unexpected event {event:?}");
8440 }
8441
8442 // When the write to the index completes, it appears as staged.
8443 cx.run_until_parked();
8444 uncommitted_diff.update(cx, |diff, cx| {
8445 assert_hunks(
8446 diff.snapshot(cx).hunks(&snapshot),
8447 &snapshot,
8448 &diff.base_text_string(cx).unwrap(),
8449 &[
8450 (
8451 0..0,
8452 "zero\n",
8453 "",
8454 DiffHunkStatus::deleted(HasSecondaryHunk),
8455 ),
8456 (
8457 1..2,
8458 "two\n",
8459 "TWO\n",
8460 DiffHunkStatus::modified(NoSecondaryHunk),
8461 ),
8462 (
8463 3..4,
8464 "four\n",
8465 "FOUR\n",
8466 DiffHunkStatus::modified(HasSecondaryHunk),
8467 ),
8468 ],
8469 );
8470 });
8471
8472 // The diff emits a change event for the changed index text.
8473 let event = diff_events.next().await.unwrap();
8474 if let BufferDiffEvent::DiffChanged(DiffChanged {
8475 changed_range: Some(changed_range),
8476 base_text_changed_range: _,
8477 extended_range: _,
8478 }) = event
8479 {
8480 let changed_range = changed_range.to_point(&snapshot);
8481 assert_eq!(changed_range, Point::new(0, 0)..Point::new(4, 0));
8482 } else {
8483 panic!("Unexpected event {event:?}");
8484 }
8485
8486 // Simulate a problem writing to the git index.
8487 fs.set_error_message_for_index_write(
8488 "/dir/.git".as_ref(),
8489 Some("failed to write git index".into()),
8490 );
8491
8492 // Stage another hunk.
8493 uncommitted_diff.update(cx, |diff, cx| {
8494 let range =
8495 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
8496 let hunks = diff
8497 .snapshot(cx)
8498 .hunks_intersecting_range(range, &snapshot)
8499 .collect::<Vec<_>>();
8500 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
8501
8502 assert_hunks(
8503 diff.snapshot(cx).hunks(&snapshot),
8504 &snapshot,
8505 &diff.base_text_string(cx).unwrap(),
8506 &[
8507 (
8508 0..0,
8509 "zero\n",
8510 "",
8511 DiffHunkStatus::deleted(HasSecondaryHunk),
8512 ),
8513 (
8514 1..2,
8515 "two\n",
8516 "TWO\n",
8517 DiffHunkStatus::modified(NoSecondaryHunk),
8518 ),
8519 (
8520 3..4,
8521 "four\n",
8522 "FOUR\n",
8523 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8524 ),
8525 ],
8526 );
8527 });
8528 assert!(matches!(
8529 diff_events.next().await.unwrap(),
8530 BufferDiffEvent::HunksStagedOrUnstaged(_)
8531 ));
8532 let event = diff_events.next().await.unwrap();
8533 if let BufferDiffEvent::DiffChanged(DiffChanged {
8534 changed_range: Some(changed_range),
8535 base_text_changed_range: _,
8536 extended_range: _,
8537 }) = event
8538 {
8539 let changed_range = changed_range.to_point(&snapshot);
8540 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
8541 } else {
8542 panic!("Unexpected event {event:?}");
8543 }
8544
8545 // When the write fails, the hunk returns to being unstaged.
8546 cx.run_until_parked();
8547 uncommitted_diff.update(cx, |diff, cx| {
8548 assert_hunks(
8549 diff.snapshot(cx).hunks(&snapshot),
8550 &snapshot,
8551 &diff.base_text_string(cx).unwrap(),
8552 &[
8553 (
8554 0..0,
8555 "zero\n",
8556 "",
8557 DiffHunkStatus::deleted(HasSecondaryHunk),
8558 ),
8559 (
8560 1..2,
8561 "two\n",
8562 "TWO\n",
8563 DiffHunkStatus::modified(NoSecondaryHunk),
8564 ),
8565 (
8566 3..4,
8567 "four\n",
8568 "FOUR\n",
8569 DiffHunkStatus::modified(HasSecondaryHunk),
8570 ),
8571 ],
8572 );
8573 });
8574
8575 let event = diff_events.next().await.unwrap();
8576 if let BufferDiffEvent::DiffChanged(DiffChanged {
8577 changed_range: Some(changed_range),
8578 base_text_changed_range: _,
8579 extended_range: _,
8580 }) = event
8581 {
8582 let changed_range = changed_range.to_point(&snapshot);
8583 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
8584 } else {
8585 panic!("Unexpected event {event:?}");
8586 }
8587
8588 // Allow writing to the git index to succeed again.
8589 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
8590
8591 // Stage two hunks with separate operations.
8592 uncommitted_diff.update(cx, |diff, cx| {
8593 let hunks = diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>();
8594 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
8595 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
8596 });
8597
8598 // Both staged hunks appear as pending.
8599 uncommitted_diff.update(cx, |diff, cx| {
8600 assert_hunks(
8601 diff.snapshot(cx).hunks(&snapshot),
8602 &snapshot,
8603 &diff.base_text_string(cx).unwrap(),
8604 &[
8605 (
8606 0..0,
8607 "zero\n",
8608 "",
8609 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
8610 ),
8611 (
8612 1..2,
8613 "two\n",
8614 "TWO\n",
8615 DiffHunkStatus::modified(NoSecondaryHunk),
8616 ),
8617 (
8618 3..4,
8619 "four\n",
8620 "FOUR\n",
8621 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8622 ),
8623 ],
8624 );
8625 });
8626
8627 // Both staging operations take effect.
8628 cx.run_until_parked();
8629 uncommitted_diff.update(cx, |diff, cx| {
8630 assert_hunks(
8631 diff.snapshot(cx).hunks(&snapshot),
8632 &snapshot,
8633 &diff.base_text_string(cx).unwrap(),
8634 &[
8635 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
8636 (
8637 1..2,
8638 "two\n",
8639 "TWO\n",
8640 DiffHunkStatus::modified(NoSecondaryHunk),
8641 ),
8642 (
8643 3..4,
8644 "four\n",
8645 "FOUR\n",
8646 DiffHunkStatus::modified(NoSecondaryHunk),
8647 ),
8648 ],
8649 );
8650 });
8651}
8652
8653#[gpui::test(seeds(340, 472))]
8654async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
8655 use DiffHunkSecondaryStatus::*;
8656 init_test(cx);
8657
8658 let committed_contents = r#"
8659 zero
8660 one
8661 two
8662 three
8663 four
8664 five
8665 "#
8666 .unindent();
8667 let file_contents = r#"
8668 one
8669 TWO
8670 three
8671 FOUR
8672 five
8673 "#
8674 .unindent();
8675
8676 let fs = FakeFs::new(cx.background_executor.clone());
8677 fs.insert_tree(
8678 "/dir",
8679 json!({
8680 ".git": {},
8681 "file.txt": file_contents.clone()
8682 }),
8683 )
8684 .await;
8685
8686 fs.set_head_for_repo(
8687 "/dir/.git".as_ref(),
8688 &[("file.txt", committed_contents.clone())],
8689 "deadbeef",
8690 );
8691 fs.set_index_for_repo(
8692 "/dir/.git".as_ref(),
8693 &[("file.txt", committed_contents.clone())],
8694 );
8695
8696 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8697
8698 let buffer = project
8699 .update(cx, |project, cx| {
8700 project.open_local_buffer("/dir/file.txt", cx)
8701 })
8702 .await
8703 .unwrap();
8704 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8705 let uncommitted_diff = project
8706 .update(cx, |project, cx| {
8707 project.open_uncommitted_diff(buffer.clone(), cx)
8708 })
8709 .await
8710 .unwrap();
8711
8712 // The hunks are initially unstaged.
8713 uncommitted_diff.read_with(cx, |diff, cx| {
8714 assert_hunks(
8715 diff.snapshot(cx).hunks(&snapshot),
8716 &snapshot,
8717 &diff.base_text_string(cx).unwrap(),
8718 &[
8719 (
8720 0..0,
8721 "zero\n",
8722 "",
8723 DiffHunkStatus::deleted(HasSecondaryHunk),
8724 ),
8725 (
8726 1..2,
8727 "two\n",
8728 "TWO\n",
8729 DiffHunkStatus::modified(HasSecondaryHunk),
8730 ),
8731 (
8732 3..4,
8733 "four\n",
8734 "FOUR\n",
8735 DiffHunkStatus::modified(HasSecondaryHunk),
8736 ),
8737 ],
8738 );
8739 });
8740
8741 // Pause IO events
8742 fs.pause_events();
8743
8744 // Stage the first hunk.
8745 uncommitted_diff.update(cx, |diff, cx| {
8746 let hunk = diff.snapshot(cx).hunks(&snapshot).next().unwrap();
8747 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
8748 assert_hunks(
8749 diff.snapshot(cx).hunks(&snapshot),
8750 &snapshot,
8751 &diff.base_text_string(cx).unwrap(),
8752 &[
8753 (
8754 0..0,
8755 "zero\n",
8756 "",
8757 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
8758 ),
8759 (
8760 1..2,
8761 "two\n",
8762 "TWO\n",
8763 DiffHunkStatus::modified(HasSecondaryHunk),
8764 ),
8765 (
8766 3..4,
8767 "four\n",
8768 "FOUR\n",
8769 DiffHunkStatus::modified(HasSecondaryHunk),
8770 ),
8771 ],
8772 );
8773 });
8774
8775 // Stage the second hunk *before* receiving the FS event for the first hunk.
8776 cx.run_until_parked();
8777 uncommitted_diff.update(cx, |diff, cx| {
8778 let hunk = diff.snapshot(cx).hunks(&snapshot).nth(1).unwrap();
8779 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
8780 assert_hunks(
8781 diff.snapshot(cx).hunks(&snapshot),
8782 &snapshot,
8783 &diff.base_text_string(cx).unwrap(),
8784 &[
8785 (
8786 0..0,
8787 "zero\n",
8788 "",
8789 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
8790 ),
8791 (
8792 1..2,
8793 "two\n",
8794 "TWO\n",
8795 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8796 ),
8797 (
8798 3..4,
8799 "four\n",
8800 "FOUR\n",
8801 DiffHunkStatus::modified(HasSecondaryHunk),
8802 ),
8803 ],
8804 );
8805 });
8806
8807 // Process the FS event for staging the first hunk (second event is still pending).
8808 fs.flush_events(1);
8809 cx.run_until_parked();
8810
8811 // Stage the third hunk before receiving the second FS event.
8812 uncommitted_diff.update(cx, |diff, cx| {
8813 let hunk = diff.snapshot(cx).hunks(&snapshot).nth(2).unwrap();
8814 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
8815 });
8816
8817 // Wait for all remaining IO.
8818 cx.run_until_parked();
8819 fs.flush_events(fs.buffered_event_count());
8820
8821 // Now all hunks are staged.
8822 cx.run_until_parked();
8823 uncommitted_diff.update(cx, |diff, cx| {
8824 assert_hunks(
8825 diff.snapshot(cx).hunks(&snapshot),
8826 &snapshot,
8827 &diff.base_text_string(cx).unwrap(),
8828 &[
8829 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
8830 (
8831 1..2,
8832 "two\n",
8833 "TWO\n",
8834 DiffHunkStatus::modified(NoSecondaryHunk),
8835 ),
8836 (
8837 3..4,
8838 "four\n",
8839 "FOUR\n",
8840 DiffHunkStatus::modified(NoSecondaryHunk),
8841 ),
8842 ],
8843 );
8844 });
8845}
8846
8847#[gpui::test(iterations = 25)]
8848async fn test_staging_random_hunks(
8849 mut rng: StdRng,
8850 _executor: BackgroundExecutor,
8851 cx: &mut gpui::TestAppContext,
8852) {
8853 let operations = env::var("OPERATIONS")
8854 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
8855 .unwrap_or(20);
8856
8857 use DiffHunkSecondaryStatus::*;
8858 init_test(cx);
8859
8860 let committed_text = (0..30).map(|i| format!("line {i}\n")).collect::<String>();
8861 let index_text = committed_text.clone();
8862 let buffer_text = (0..30)
8863 .map(|i| match i % 5 {
8864 0 => format!("line {i} (modified)\n"),
8865 _ => format!("line {i}\n"),
8866 })
8867 .collect::<String>();
8868
8869 let fs = FakeFs::new(cx.background_executor.clone());
8870 fs.insert_tree(
8871 path!("/dir"),
8872 json!({
8873 ".git": {},
8874 "file.txt": buffer_text.clone()
8875 }),
8876 )
8877 .await;
8878 fs.set_head_for_repo(
8879 path!("/dir/.git").as_ref(),
8880 &[("file.txt", committed_text.clone())],
8881 "deadbeef",
8882 );
8883 fs.set_index_for_repo(
8884 path!("/dir/.git").as_ref(),
8885 &[("file.txt", index_text.clone())],
8886 );
8887 let repo = fs
8888 .open_repo(path!("/dir/.git").as_ref(), Some("git".as_ref()))
8889 .unwrap();
8890
8891 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
8892 let buffer = project
8893 .update(cx, |project, cx| {
8894 project.open_local_buffer(path!("/dir/file.txt"), cx)
8895 })
8896 .await
8897 .unwrap();
8898 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8899 let uncommitted_diff = project
8900 .update(cx, |project, cx| {
8901 project.open_uncommitted_diff(buffer.clone(), cx)
8902 })
8903 .await
8904 .unwrap();
8905
8906 let mut hunks = uncommitted_diff.update(cx, |diff, cx| {
8907 diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>()
8908 });
8909 assert_eq!(hunks.len(), 6);
8910
8911 for _i in 0..operations {
8912 let hunk_ix = rng.random_range(0..hunks.len());
8913 let hunk = &mut hunks[hunk_ix];
8914 let row = hunk.range.start.row;
8915
8916 if hunk.status().has_secondary_hunk() {
8917 log::info!("staging hunk at {row}");
8918 uncommitted_diff.update(cx, |diff, cx| {
8919 diff.stage_or_unstage_hunks(true, std::slice::from_ref(hunk), &snapshot, true, cx);
8920 });
8921 hunk.secondary_status = SecondaryHunkRemovalPending;
8922 } else {
8923 log::info!("unstaging hunk at {row}");
8924 uncommitted_diff.update(cx, |diff, cx| {
8925 diff.stage_or_unstage_hunks(false, std::slice::from_ref(hunk), &snapshot, true, cx);
8926 });
8927 hunk.secondary_status = SecondaryHunkAdditionPending;
8928 }
8929
8930 for _ in 0..rng.random_range(0..10) {
8931 log::info!("yielding");
8932 cx.executor().simulate_random_delay().await;
8933 }
8934 }
8935
8936 cx.executor().run_until_parked();
8937
8938 for hunk in &mut hunks {
8939 if hunk.secondary_status == SecondaryHunkRemovalPending {
8940 hunk.secondary_status = NoSecondaryHunk;
8941 } else if hunk.secondary_status == SecondaryHunkAdditionPending {
8942 hunk.secondary_status = HasSecondaryHunk;
8943 }
8944 }
8945
8946 log::info!(
8947 "index text:\n{}",
8948 repo.load_index_text(RepoPath::from_rel_path(rel_path("file.txt")))
8949 .await
8950 .unwrap()
8951 );
8952
8953 uncommitted_diff.update(cx, |diff, cx| {
8954 let expected_hunks = hunks
8955 .iter()
8956 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
8957 .collect::<Vec<_>>();
8958 let actual_hunks = diff
8959 .snapshot(cx)
8960 .hunks(&snapshot)
8961 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
8962 .collect::<Vec<_>>();
8963 assert_eq!(actual_hunks, expected_hunks);
8964 });
8965}
8966
8967#[gpui::test]
8968async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
8969 init_test(cx);
8970
8971 let committed_contents = r#"
8972 fn main() {
8973 println!("hello from HEAD");
8974 }
8975 "#
8976 .unindent();
8977 let file_contents = r#"
8978 fn main() {
8979 println!("hello from the working copy");
8980 }
8981 "#
8982 .unindent();
8983
8984 let fs = FakeFs::new(cx.background_executor.clone());
8985 fs.insert_tree(
8986 "/dir",
8987 json!({
8988 ".git": {},
8989 "src": {
8990 "main.rs": file_contents,
8991 }
8992 }),
8993 )
8994 .await;
8995
8996 fs.set_head_for_repo(
8997 Path::new("/dir/.git"),
8998 &[("src/main.rs", committed_contents.clone())],
8999 "deadbeef",
9000 );
9001 fs.set_index_for_repo(
9002 Path::new("/dir/.git"),
9003 &[("src/main.rs", committed_contents.clone())],
9004 );
9005
9006 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
9007
9008 let buffer = project
9009 .update(cx, |project, cx| {
9010 project.open_local_buffer("/dir/src/main.rs", cx)
9011 })
9012 .await
9013 .unwrap();
9014 let uncommitted_diff = project
9015 .update(cx, |project, cx| {
9016 project.open_uncommitted_diff(buffer.clone(), cx)
9017 })
9018 .await
9019 .unwrap();
9020
9021 cx.run_until_parked();
9022 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
9023 let snapshot = buffer.read(cx).snapshot();
9024 assert_hunks(
9025 uncommitted_diff.snapshot(cx).hunks(&snapshot),
9026 &snapshot,
9027 &uncommitted_diff.base_text_string(cx).unwrap(),
9028 &[(
9029 1..2,
9030 " println!(\"hello from HEAD\");\n",
9031 " println!(\"hello from the working copy\");\n",
9032 DiffHunkStatus {
9033 kind: DiffHunkStatusKind::Modified,
9034 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
9035 },
9036 )],
9037 );
9038 });
9039}
9040
9041// TODO: Should we test this on Windows also?
9042#[gpui::test]
9043#[cfg(not(windows))]
9044async fn test_staging_hunk_preserve_executable_permission(cx: &mut gpui::TestAppContext) {
9045 use std::os::unix::fs::PermissionsExt;
9046 init_test(cx);
9047 cx.executor().allow_parking();
9048 let committed_contents = "bar\n";
9049 let file_contents = "baz\n";
9050 let root = TempTree::new(json!({
9051 "project": {
9052 "foo": committed_contents
9053 },
9054 }));
9055
9056 let work_dir = root.path().join("project");
9057 let file_path = work_dir.join("foo");
9058 let repo = git_init(work_dir.as_path());
9059 let mut perms = std::fs::metadata(&file_path).unwrap().permissions();
9060 perms.set_mode(0o755);
9061 std::fs::set_permissions(&file_path, perms).unwrap();
9062 git_add("foo", &repo);
9063 git_commit("Initial commit", &repo);
9064 std::fs::write(&file_path, file_contents).unwrap();
9065
9066 let project = Project::test(
9067 Arc::new(RealFs::new(None, cx.executor())),
9068 [root.path()],
9069 cx,
9070 )
9071 .await;
9072
9073 let buffer = project
9074 .update(cx, |project, cx| {
9075 project.open_local_buffer(file_path.as_path(), cx)
9076 })
9077 .await
9078 .unwrap();
9079
9080 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
9081
9082 let uncommitted_diff = project
9083 .update(cx, |project, cx| {
9084 project.open_uncommitted_diff(buffer.clone(), cx)
9085 })
9086 .await
9087 .unwrap();
9088
9089 uncommitted_diff.update(cx, |diff, cx| {
9090 let hunks = diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>();
9091 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
9092 });
9093
9094 cx.run_until_parked();
9095
9096 let output = smol::process::Command::new("git")
9097 .current_dir(&work_dir)
9098 .args(["diff", "--staged"])
9099 .output()
9100 .await
9101 .unwrap();
9102
9103 let staged_diff = String::from_utf8_lossy(&output.stdout);
9104
9105 assert!(
9106 !staged_diff.contains("new mode 100644"),
9107 "Staging should not change file mode from 755 to 644.\ngit diff --staged:\n{}",
9108 staged_diff
9109 );
9110
9111 let output = smol::process::Command::new("git")
9112 .current_dir(&work_dir)
9113 .args(["ls-files", "-s"])
9114 .output()
9115 .await
9116 .unwrap();
9117 let index_contents = String::from_utf8_lossy(&output.stdout);
9118
9119 assert!(
9120 index_contents.contains("100755"),
9121 "Index should show file as executable (100755).\ngit ls-files -s:\n{}",
9122 index_contents
9123 );
9124}
9125
9126#[gpui::test]
9127async fn test_repository_and_path_for_project_path(
9128 background_executor: BackgroundExecutor,
9129 cx: &mut gpui::TestAppContext,
9130) {
9131 init_test(cx);
9132 let fs = FakeFs::new(background_executor);
9133 fs.insert_tree(
9134 path!("/root"),
9135 json!({
9136 "c.txt": "",
9137 "dir1": {
9138 ".git": {},
9139 "deps": {
9140 "dep1": {
9141 ".git": {},
9142 "src": {
9143 "a.txt": ""
9144 }
9145 }
9146 },
9147 "src": {
9148 "b.txt": ""
9149 }
9150 },
9151 }),
9152 )
9153 .await;
9154
9155 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
9156 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9157 let tree_id = tree.read_with(cx, |tree, _| tree.id());
9158 project
9159 .update(cx, |project, cx| project.git_scans_complete(cx))
9160 .await;
9161 cx.run_until_parked();
9162
9163 project.read_with(cx, |project, cx| {
9164 let git_store = project.git_store().read(cx);
9165 let pairs = [
9166 ("c.txt", None),
9167 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
9168 (
9169 "dir1/deps/dep1/src/a.txt",
9170 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
9171 ),
9172 ];
9173 let expected = pairs
9174 .iter()
9175 .map(|(path, result)| {
9176 (
9177 path,
9178 result.map(|(repo, repo_path)| {
9179 (Path::new(repo).into(), RepoPath::new(repo_path).unwrap())
9180 }),
9181 )
9182 })
9183 .collect::<Vec<_>>();
9184 let actual = pairs
9185 .iter()
9186 .map(|(path, _)| {
9187 let project_path = (tree_id, rel_path(path)).into();
9188 let result = maybe!({
9189 let (repo, repo_path) =
9190 git_store.repository_and_path_for_project_path(&project_path, cx)?;
9191 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
9192 });
9193 (path, result)
9194 })
9195 .collect::<Vec<_>>();
9196 pretty_assertions::assert_eq!(expected, actual);
9197 });
9198
9199 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
9200 .await
9201 .unwrap();
9202 cx.run_until_parked();
9203
9204 project.read_with(cx, |project, cx| {
9205 let git_store = project.git_store().read(cx);
9206 assert_eq!(
9207 git_store.repository_and_path_for_project_path(
9208 &(tree_id, rel_path("dir1/src/b.txt")).into(),
9209 cx
9210 ),
9211 None
9212 );
9213 });
9214}
9215
9216#[gpui::test]
9217async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
9218 init_test(cx);
9219 let fs = FakeFs::new(cx.background_executor.clone());
9220 let home = paths::home_dir();
9221 fs.insert_tree(
9222 home,
9223 json!({
9224 ".git": {},
9225 "project": {
9226 "a.txt": "A"
9227 },
9228 }),
9229 )
9230 .await;
9231
9232 let project = Project::test(fs.clone(), [home.join("project").as_ref()], cx).await;
9233 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9234 let tree_id = tree.read_with(cx, |tree, _| tree.id());
9235
9236 project
9237 .update(cx, |project, cx| project.git_scans_complete(cx))
9238 .await;
9239 tree.flush_fs_events(cx).await;
9240
9241 project.read_with(cx, |project, cx| {
9242 let containing = project
9243 .git_store()
9244 .read(cx)
9245 .repository_and_path_for_project_path(&(tree_id, rel_path("a.txt")).into(), cx);
9246 assert!(containing.is_none());
9247 });
9248
9249 let project = Project::test(fs.clone(), [home.as_ref()], cx).await;
9250 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9251 let tree_id = tree.read_with(cx, |tree, _| tree.id());
9252 project
9253 .update(cx, |project, cx| project.git_scans_complete(cx))
9254 .await;
9255 tree.flush_fs_events(cx).await;
9256
9257 project.read_with(cx, |project, cx| {
9258 let containing = project
9259 .git_store()
9260 .read(cx)
9261 .repository_and_path_for_project_path(&(tree_id, rel_path("project/a.txt")).into(), cx);
9262 assert_eq!(
9263 containing
9264 .unwrap()
9265 .0
9266 .read(cx)
9267 .work_directory_abs_path
9268 .as_ref(),
9269 home,
9270 );
9271 });
9272}
9273
9274#[gpui::test]
9275async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
9276 init_test(cx);
9277 cx.executor().allow_parking();
9278
9279 let root = TempTree::new(json!({
9280 "project": {
9281 "a.txt": "a", // Modified
9282 "b.txt": "bb", // Added
9283 "c.txt": "ccc", // Unchanged
9284 "d.txt": "dddd", // Deleted
9285 },
9286 }));
9287
9288 // Set up git repository before creating the project.
9289 let work_dir = root.path().join("project");
9290 let repo = git_init(work_dir.as_path());
9291 git_add("a.txt", &repo);
9292 git_add("c.txt", &repo);
9293 git_add("d.txt", &repo);
9294 git_commit("Initial commit", &repo);
9295 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
9296 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
9297
9298 let project = Project::test(
9299 Arc::new(RealFs::new(None, cx.executor())),
9300 [root.path()],
9301 cx,
9302 )
9303 .await;
9304
9305 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9306 tree.flush_fs_events(cx).await;
9307 project
9308 .update(cx, |project, cx| project.git_scans_complete(cx))
9309 .await;
9310 cx.executor().run_until_parked();
9311
9312 let repository = project.read_with(cx, |project, cx| {
9313 project.repositories(cx).values().next().unwrap().clone()
9314 });
9315
9316 // Check that the right git state is observed on startup
9317 repository.read_with(cx, |repository, _| {
9318 let entries = repository.cached_status().collect::<Vec<_>>();
9319 assert_eq!(
9320 entries,
9321 [
9322 StatusEntry {
9323 repo_path: repo_path("a.txt"),
9324 status: StatusCode::Modified.worktree(),
9325 diff_stat: Some(DiffStat {
9326 added: 1,
9327 deleted: 1,
9328 }),
9329 },
9330 StatusEntry {
9331 repo_path: repo_path("b.txt"),
9332 status: FileStatus::Untracked,
9333 diff_stat: None,
9334 },
9335 StatusEntry {
9336 repo_path: repo_path("d.txt"),
9337 status: StatusCode::Deleted.worktree(),
9338 diff_stat: Some(DiffStat {
9339 added: 0,
9340 deleted: 1,
9341 }),
9342 },
9343 ]
9344 );
9345 });
9346
9347 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
9348
9349 tree.flush_fs_events(cx).await;
9350 project
9351 .update(cx, |project, cx| project.git_scans_complete(cx))
9352 .await;
9353 cx.executor().run_until_parked();
9354
9355 repository.read_with(cx, |repository, _| {
9356 let entries = repository.cached_status().collect::<Vec<_>>();
9357 assert_eq!(
9358 entries,
9359 [
9360 StatusEntry {
9361 repo_path: repo_path("a.txt"),
9362 status: StatusCode::Modified.worktree(),
9363 diff_stat: Some(DiffStat {
9364 added: 1,
9365 deleted: 1,
9366 }),
9367 },
9368 StatusEntry {
9369 repo_path: repo_path("b.txt"),
9370 status: FileStatus::Untracked,
9371 diff_stat: None,
9372 },
9373 StatusEntry {
9374 repo_path: repo_path("c.txt"),
9375 status: StatusCode::Modified.worktree(),
9376 diff_stat: Some(DiffStat {
9377 added: 1,
9378 deleted: 1,
9379 }),
9380 },
9381 StatusEntry {
9382 repo_path: repo_path("d.txt"),
9383 status: StatusCode::Deleted.worktree(),
9384 diff_stat: Some(DiffStat {
9385 added: 0,
9386 deleted: 1,
9387 }),
9388 },
9389 ]
9390 );
9391 });
9392
9393 git_add("a.txt", &repo);
9394 git_add("c.txt", &repo);
9395 git_remove_index(Path::new("d.txt"), &repo);
9396 git_commit("Another commit", &repo);
9397 tree.flush_fs_events(cx).await;
9398 project
9399 .update(cx, |project, cx| project.git_scans_complete(cx))
9400 .await;
9401 cx.executor().run_until_parked();
9402
9403 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
9404 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
9405 tree.flush_fs_events(cx).await;
9406 project
9407 .update(cx, |project, cx| project.git_scans_complete(cx))
9408 .await;
9409 cx.executor().run_until_parked();
9410
9411 repository.read_with(cx, |repository, _cx| {
9412 let entries = repository.cached_status().collect::<Vec<_>>();
9413
9414 // Deleting an untracked entry, b.txt, should leave no status
9415 // a.txt was tracked, and so should have a status
9416 assert_eq!(
9417 entries,
9418 [StatusEntry {
9419 repo_path: repo_path("a.txt"),
9420 status: StatusCode::Deleted.worktree(),
9421 diff_stat: Some(DiffStat {
9422 added: 0,
9423 deleted: 1,
9424 }),
9425 }]
9426 );
9427 });
9428}
9429
9430#[gpui::test]
9431#[ignore]
9432async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
9433 init_test(cx);
9434 cx.executor().allow_parking();
9435
9436 let root = TempTree::new(json!({
9437 "project": {
9438 "sub": {},
9439 "a.txt": "",
9440 },
9441 }));
9442
9443 let work_dir = root.path().join("project");
9444 let repo = git_init(work_dir.as_path());
9445 // a.txt exists in HEAD and the working copy but is deleted in the index.
9446 git_add("a.txt", &repo);
9447 git_commit("Initial commit", &repo);
9448 git_remove_index("a.txt".as_ref(), &repo);
9449 // `sub` is a nested git repository.
9450 let _sub = git_init(&work_dir.join("sub"));
9451
9452 let project = Project::test(
9453 Arc::new(RealFs::new(None, cx.executor())),
9454 [root.path()],
9455 cx,
9456 )
9457 .await;
9458
9459 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9460 tree.flush_fs_events(cx).await;
9461 project
9462 .update(cx, |project, cx| project.git_scans_complete(cx))
9463 .await;
9464 cx.executor().run_until_parked();
9465
9466 let repository = project.read_with(cx, |project, cx| {
9467 project
9468 .repositories(cx)
9469 .values()
9470 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
9471 .unwrap()
9472 .clone()
9473 });
9474
9475 repository.read_with(cx, |repository, _cx| {
9476 let entries = repository.cached_status().collect::<Vec<_>>();
9477
9478 // `sub` doesn't appear in our computed statuses.
9479 // a.txt appears with a combined `DA` status.
9480 assert_eq!(
9481 entries,
9482 [StatusEntry {
9483 repo_path: repo_path("a.txt"),
9484 status: TrackedStatus {
9485 index_status: StatusCode::Deleted,
9486 worktree_status: StatusCode::Added
9487 }
9488 .into(),
9489 diff_stat: None,
9490 }]
9491 )
9492 });
9493}
9494
9495#[track_caller]
9496/// We merge lhs into rhs.
9497fn merge_pending_ops_snapshots(
9498 source: Vec<pending_op::PendingOps>,
9499 mut target: Vec<pending_op::PendingOps>,
9500) -> Vec<pending_op::PendingOps> {
9501 for s_ops in source {
9502 if let Some(idx) = target.iter().zip(0..).find_map(|(ops, idx)| {
9503 if ops.repo_path == s_ops.repo_path {
9504 Some(idx)
9505 } else {
9506 None
9507 }
9508 }) {
9509 let t_ops = &mut target[idx];
9510 for s_op in s_ops.ops {
9511 if let Some(op_idx) = t_ops
9512 .ops
9513 .iter()
9514 .zip(0..)
9515 .find_map(|(op, idx)| if op.id == s_op.id { Some(idx) } else { None })
9516 {
9517 let t_op = &mut t_ops.ops[op_idx];
9518 match (s_op.job_status, t_op.job_status) {
9519 (pending_op::JobStatus::Running, _) => {}
9520 (s_st, pending_op::JobStatus::Running) => t_op.job_status = s_st,
9521 (s_st, t_st) if s_st == t_st => {}
9522 _ => unreachable!(),
9523 }
9524 } else {
9525 t_ops.ops.push(s_op);
9526 }
9527 }
9528 t_ops.ops.sort_by(|l, r| l.id.cmp(&r.id));
9529 } else {
9530 target.push(s_ops);
9531 }
9532 }
9533 target
9534}
9535
9536#[gpui::test]
9537async fn test_repository_pending_ops_staging(
9538 executor: gpui::BackgroundExecutor,
9539 cx: &mut gpui::TestAppContext,
9540) {
9541 init_test(cx);
9542
9543 let fs = FakeFs::new(executor);
9544 fs.insert_tree(
9545 path!("/root"),
9546 json!({
9547 "my-repo": {
9548 ".git": {},
9549 "a.txt": "a",
9550 }
9551
9552 }),
9553 )
9554 .await;
9555
9556 fs.set_status_for_repo(
9557 path!("/root/my-repo/.git").as_ref(),
9558 &[("a.txt", FileStatus::Untracked)],
9559 );
9560
9561 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
9562 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
9563 project.update(cx, |project, cx| {
9564 let pending_ops_all = pending_ops_all.clone();
9565 cx.subscribe(project.git_store(), move |_, _, e, _| {
9566 if let GitStoreEvent::RepositoryUpdated(
9567 _,
9568 RepositoryEvent::PendingOpsChanged { pending_ops },
9569 _,
9570 ) = e
9571 {
9572 let merged = merge_pending_ops_snapshots(
9573 pending_ops.items(()),
9574 pending_ops_all.lock().items(()),
9575 );
9576 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
9577 }
9578 })
9579 .detach();
9580 });
9581 project
9582 .update(cx, |project, cx| project.git_scans_complete(cx))
9583 .await;
9584
9585 let repo = project.read_with(cx, |project, cx| {
9586 project.repositories(cx).values().next().unwrap().clone()
9587 });
9588
9589 // Ensure we have no pending ops for any of the untracked files
9590 repo.read_with(cx, |repo, _cx| {
9591 assert!(repo.pending_ops().next().is_none());
9592 });
9593
9594 let mut id = 1u16;
9595
9596 let mut assert_stage = async |path: RepoPath, stage| {
9597 let git_status = if stage {
9598 pending_op::GitStatus::Staged
9599 } else {
9600 pending_op::GitStatus::Unstaged
9601 };
9602 repo.update(cx, |repo, cx| {
9603 let task = if stage {
9604 repo.stage_entries(vec![path.clone()], cx)
9605 } else {
9606 repo.unstage_entries(vec![path.clone()], cx)
9607 };
9608 let ops = repo.pending_ops_for_path(&path).unwrap();
9609 assert_eq!(
9610 ops.ops.last(),
9611 Some(&pending_op::PendingOp {
9612 id: id.into(),
9613 git_status,
9614 job_status: pending_op::JobStatus::Running
9615 })
9616 );
9617 task
9618 })
9619 .await
9620 .unwrap();
9621
9622 repo.read_with(cx, |repo, _cx| {
9623 let ops = repo.pending_ops_for_path(&path).unwrap();
9624 assert_eq!(
9625 ops.ops.last(),
9626 Some(&pending_op::PendingOp {
9627 id: id.into(),
9628 git_status,
9629 job_status: pending_op::JobStatus::Finished
9630 })
9631 );
9632 });
9633
9634 id += 1;
9635 };
9636
9637 assert_stage(repo_path("a.txt"), true).await;
9638 assert_stage(repo_path("a.txt"), false).await;
9639 assert_stage(repo_path("a.txt"), true).await;
9640 assert_stage(repo_path("a.txt"), false).await;
9641 assert_stage(repo_path("a.txt"), true).await;
9642
9643 cx.run_until_parked();
9644
9645 assert_eq!(
9646 pending_ops_all
9647 .lock()
9648 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
9649 .unwrap()
9650 .ops,
9651 vec![
9652 pending_op::PendingOp {
9653 id: 1u16.into(),
9654 git_status: pending_op::GitStatus::Staged,
9655 job_status: pending_op::JobStatus::Finished
9656 },
9657 pending_op::PendingOp {
9658 id: 2u16.into(),
9659 git_status: pending_op::GitStatus::Unstaged,
9660 job_status: pending_op::JobStatus::Finished
9661 },
9662 pending_op::PendingOp {
9663 id: 3u16.into(),
9664 git_status: pending_op::GitStatus::Staged,
9665 job_status: pending_op::JobStatus::Finished
9666 },
9667 pending_op::PendingOp {
9668 id: 4u16.into(),
9669 git_status: pending_op::GitStatus::Unstaged,
9670 job_status: pending_op::JobStatus::Finished
9671 },
9672 pending_op::PendingOp {
9673 id: 5u16.into(),
9674 git_status: pending_op::GitStatus::Staged,
9675 job_status: pending_op::JobStatus::Finished
9676 }
9677 ],
9678 );
9679
9680 repo.update(cx, |repo, _cx| {
9681 let git_statuses = repo.cached_status().collect::<Vec<_>>();
9682
9683 assert_eq!(
9684 git_statuses,
9685 [StatusEntry {
9686 repo_path: repo_path("a.txt"),
9687 status: TrackedStatus {
9688 index_status: StatusCode::Added,
9689 worktree_status: StatusCode::Unmodified
9690 }
9691 .into(),
9692 diff_stat: Some(DiffStat {
9693 added: 1,
9694 deleted: 0,
9695 }),
9696 }]
9697 );
9698 });
9699}
9700
9701#[gpui::test]
9702async fn test_repository_pending_ops_long_running_staging(
9703 executor: gpui::BackgroundExecutor,
9704 cx: &mut gpui::TestAppContext,
9705) {
9706 init_test(cx);
9707
9708 let fs = FakeFs::new(executor);
9709 fs.insert_tree(
9710 path!("/root"),
9711 json!({
9712 "my-repo": {
9713 ".git": {},
9714 "a.txt": "a",
9715 }
9716
9717 }),
9718 )
9719 .await;
9720
9721 fs.set_status_for_repo(
9722 path!("/root/my-repo/.git").as_ref(),
9723 &[("a.txt", FileStatus::Untracked)],
9724 );
9725
9726 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
9727 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
9728 project.update(cx, |project, cx| {
9729 let pending_ops_all = pending_ops_all.clone();
9730 cx.subscribe(project.git_store(), move |_, _, e, _| {
9731 if let GitStoreEvent::RepositoryUpdated(
9732 _,
9733 RepositoryEvent::PendingOpsChanged { pending_ops },
9734 _,
9735 ) = e
9736 {
9737 let merged = merge_pending_ops_snapshots(
9738 pending_ops.items(()),
9739 pending_ops_all.lock().items(()),
9740 );
9741 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
9742 }
9743 })
9744 .detach();
9745 });
9746
9747 project
9748 .update(cx, |project, cx| project.git_scans_complete(cx))
9749 .await;
9750
9751 let repo = project.read_with(cx, |project, cx| {
9752 project.repositories(cx).values().next().unwrap().clone()
9753 });
9754
9755 repo.update(cx, |repo, cx| {
9756 repo.stage_entries(vec![repo_path("a.txt")], cx)
9757 })
9758 .detach();
9759
9760 repo.update(cx, |repo, cx| {
9761 repo.stage_entries(vec![repo_path("a.txt")], cx)
9762 })
9763 .unwrap()
9764 .with_timeout(Duration::from_secs(1), &cx.executor())
9765 .await
9766 .unwrap();
9767
9768 cx.run_until_parked();
9769
9770 assert_eq!(
9771 pending_ops_all
9772 .lock()
9773 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
9774 .unwrap()
9775 .ops,
9776 vec![
9777 pending_op::PendingOp {
9778 id: 1u16.into(),
9779 git_status: pending_op::GitStatus::Staged,
9780 job_status: pending_op::JobStatus::Skipped
9781 },
9782 pending_op::PendingOp {
9783 id: 2u16.into(),
9784 git_status: pending_op::GitStatus::Staged,
9785 job_status: pending_op::JobStatus::Finished
9786 }
9787 ],
9788 );
9789
9790 repo.update(cx, |repo, _cx| {
9791 let git_statuses = repo.cached_status().collect::<Vec<_>>();
9792
9793 assert_eq!(
9794 git_statuses,
9795 [StatusEntry {
9796 repo_path: repo_path("a.txt"),
9797 status: TrackedStatus {
9798 index_status: StatusCode::Added,
9799 worktree_status: StatusCode::Unmodified
9800 }
9801 .into(),
9802 diff_stat: Some(DiffStat {
9803 added: 1,
9804 deleted: 0,
9805 }),
9806 }]
9807 );
9808 });
9809}
9810
9811#[gpui::test]
9812async fn test_repository_pending_ops_stage_all(
9813 executor: gpui::BackgroundExecutor,
9814 cx: &mut gpui::TestAppContext,
9815) {
9816 init_test(cx);
9817
9818 let fs = FakeFs::new(executor);
9819 fs.insert_tree(
9820 path!("/root"),
9821 json!({
9822 "my-repo": {
9823 ".git": {},
9824 "a.txt": "a",
9825 "b.txt": "b"
9826 }
9827
9828 }),
9829 )
9830 .await;
9831
9832 fs.set_status_for_repo(
9833 path!("/root/my-repo/.git").as_ref(),
9834 &[
9835 ("a.txt", FileStatus::Untracked),
9836 ("b.txt", FileStatus::Untracked),
9837 ],
9838 );
9839
9840 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
9841 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
9842 project.update(cx, |project, cx| {
9843 let pending_ops_all = pending_ops_all.clone();
9844 cx.subscribe(project.git_store(), move |_, _, e, _| {
9845 if let GitStoreEvent::RepositoryUpdated(
9846 _,
9847 RepositoryEvent::PendingOpsChanged { pending_ops },
9848 _,
9849 ) = e
9850 {
9851 let merged = merge_pending_ops_snapshots(
9852 pending_ops.items(()),
9853 pending_ops_all.lock().items(()),
9854 );
9855 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
9856 }
9857 })
9858 .detach();
9859 });
9860 project
9861 .update(cx, |project, cx| project.git_scans_complete(cx))
9862 .await;
9863
9864 let repo = project.read_with(cx, |project, cx| {
9865 project.repositories(cx).values().next().unwrap().clone()
9866 });
9867
9868 repo.update(cx, |repo, cx| {
9869 repo.stage_entries(vec![repo_path("a.txt")], cx)
9870 })
9871 .await
9872 .unwrap();
9873 repo.update(cx, |repo, cx| repo.stage_all(cx))
9874 .await
9875 .unwrap();
9876 repo.update(cx, |repo, cx| repo.unstage_all(cx))
9877 .await
9878 .unwrap();
9879
9880 cx.run_until_parked();
9881
9882 assert_eq!(
9883 pending_ops_all
9884 .lock()
9885 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
9886 .unwrap()
9887 .ops,
9888 vec![
9889 pending_op::PendingOp {
9890 id: 1u16.into(),
9891 git_status: pending_op::GitStatus::Staged,
9892 job_status: pending_op::JobStatus::Finished
9893 },
9894 pending_op::PendingOp {
9895 id: 2u16.into(),
9896 git_status: pending_op::GitStatus::Unstaged,
9897 job_status: pending_op::JobStatus::Finished
9898 },
9899 ],
9900 );
9901 assert_eq!(
9902 pending_ops_all
9903 .lock()
9904 .get(&worktree::PathKey(repo_path("b.txt").as_ref().clone()), ())
9905 .unwrap()
9906 .ops,
9907 vec![
9908 pending_op::PendingOp {
9909 id: 1u16.into(),
9910 git_status: pending_op::GitStatus::Staged,
9911 job_status: pending_op::JobStatus::Finished
9912 },
9913 pending_op::PendingOp {
9914 id: 2u16.into(),
9915 git_status: pending_op::GitStatus::Unstaged,
9916 job_status: pending_op::JobStatus::Finished
9917 },
9918 ],
9919 );
9920
9921 repo.update(cx, |repo, _cx| {
9922 let git_statuses = repo.cached_status().collect::<Vec<_>>();
9923
9924 assert_eq!(
9925 git_statuses,
9926 [
9927 StatusEntry {
9928 repo_path: repo_path("a.txt"),
9929 status: FileStatus::Untracked,
9930 diff_stat: None,
9931 },
9932 StatusEntry {
9933 repo_path: repo_path("b.txt"),
9934 status: FileStatus::Untracked,
9935 diff_stat: None,
9936 },
9937 ]
9938 );
9939 });
9940}
9941
9942#[gpui::test]
9943async fn test_repository_subfolder_git_status(
9944 executor: gpui::BackgroundExecutor,
9945 cx: &mut gpui::TestAppContext,
9946) {
9947 init_test(cx);
9948
9949 let fs = FakeFs::new(executor);
9950 fs.insert_tree(
9951 path!("/root"),
9952 json!({
9953 "my-repo": {
9954 ".git": {},
9955 "a.txt": "a",
9956 "sub-folder-1": {
9957 "sub-folder-2": {
9958 "c.txt": "cc",
9959 "d": {
9960 "e.txt": "eee"
9961 }
9962 },
9963 }
9964 },
9965 }),
9966 )
9967 .await;
9968
9969 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
9970 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
9971
9972 fs.set_status_for_repo(
9973 path!("/root/my-repo/.git").as_ref(),
9974 &[(E_TXT, FileStatus::Untracked)],
9975 );
9976
9977 let project = Project::test(
9978 fs.clone(),
9979 [path!("/root/my-repo/sub-folder-1/sub-folder-2").as_ref()],
9980 cx,
9981 )
9982 .await;
9983
9984 project
9985 .update(cx, |project, cx| project.git_scans_complete(cx))
9986 .await;
9987 cx.run_until_parked();
9988
9989 let repository = project.read_with(cx, |project, cx| {
9990 project.repositories(cx).values().next().unwrap().clone()
9991 });
9992
9993 // Ensure that the git status is loaded correctly
9994 repository.read_with(cx, |repository, _cx| {
9995 assert_eq!(
9996 repository.work_directory_abs_path,
9997 Path::new(path!("/root/my-repo")).into()
9998 );
9999
10000 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
10001 assert_eq!(
10002 repository
10003 .status_for_path(&repo_path(E_TXT))
10004 .unwrap()
10005 .status,
10006 FileStatus::Untracked
10007 );
10008 });
10009
10010 fs.set_status_for_repo(path!("/root/my-repo/.git").as_ref(), &[]);
10011 project
10012 .update(cx, |project, cx| project.git_scans_complete(cx))
10013 .await;
10014 cx.run_until_parked();
10015
10016 repository.read_with(cx, |repository, _cx| {
10017 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
10018 assert_eq!(repository.status_for_path(&repo_path(E_TXT)), None);
10019 });
10020}
10021
10022// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
10023#[cfg(any())]
10024#[gpui::test]
10025async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
10026 init_test(cx);
10027 cx.executor().allow_parking();
10028
10029 let root = TempTree::new(json!({
10030 "project": {
10031 "a.txt": "a",
10032 },
10033 }));
10034 let root_path = root.path();
10035
10036 let repo = git_init(&root_path.join("project"));
10037 git_add("a.txt", &repo);
10038 git_commit("init", &repo);
10039
10040 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10041
10042 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10043 tree.flush_fs_events(cx).await;
10044 project
10045 .update(cx, |project, cx| project.git_scans_complete(cx))
10046 .await;
10047 cx.executor().run_until_parked();
10048
10049 let repository = project.read_with(cx, |project, cx| {
10050 project.repositories(cx).values().next().unwrap().clone()
10051 });
10052
10053 git_branch("other-branch", &repo);
10054 git_checkout("refs/heads/other-branch", &repo);
10055 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
10056 git_add("a.txt", &repo);
10057 git_commit("capitalize", &repo);
10058 let commit = repo
10059 .head()
10060 .expect("Failed to get HEAD")
10061 .peel_to_commit()
10062 .expect("HEAD is not a commit");
10063 git_checkout("refs/heads/main", &repo);
10064 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
10065 git_add("a.txt", &repo);
10066 git_commit("improve letter", &repo);
10067 git_cherry_pick(&commit, &repo);
10068 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
10069 .expect("No CHERRY_PICK_HEAD");
10070 pretty_assertions::assert_eq!(
10071 git_status(&repo),
10072 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
10073 );
10074 tree.flush_fs_events(cx).await;
10075 project
10076 .update(cx, |project, cx| project.git_scans_complete(cx))
10077 .await;
10078 cx.executor().run_until_parked();
10079 let conflicts = repository.update(cx, |repository, _| {
10080 repository
10081 .merge_conflicts
10082 .iter()
10083 .cloned()
10084 .collect::<Vec<_>>()
10085 });
10086 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
10087
10088 git_add("a.txt", &repo);
10089 // Attempt to manually simulate what `git cherry-pick --continue` would do.
10090 git_commit("whatevs", &repo);
10091 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
10092 .expect("Failed to remove CHERRY_PICK_HEAD");
10093 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
10094 tree.flush_fs_events(cx).await;
10095 let conflicts = repository.update(cx, |repository, _| {
10096 repository
10097 .merge_conflicts
10098 .iter()
10099 .cloned()
10100 .collect::<Vec<_>>()
10101 });
10102 pretty_assertions::assert_eq!(conflicts, []);
10103}
10104
10105#[gpui::test]
10106async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
10107 init_test(cx);
10108 let fs = FakeFs::new(cx.background_executor.clone());
10109 fs.insert_tree(
10110 path!("/root"),
10111 json!({
10112 ".git": {},
10113 ".gitignore": "*.txt\n",
10114 "a.xml": "<a></a>",
10115 "b.txt": "Some text"
10116 }),
10117 )
10118 .await;
10119
10120 fs.set_head_and_index_for_repo(
10121 path!("/root/.git").as_ref(),
10122 &[
10123 (".gitignore", "*.txt\n".into()),
10124 ("a.xml", "<a></a>".into()),
10125 ],
10126 );
10127
10128 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
10129
10130 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10131 tree.flush_fs_events(cx).await;
10132 project
10133 .update(cx, |project, cx| project.git_scans_complete(cx))
10134 .await;
10135 cx.executor().run_until_parked();
10136
10137 let repository = project.read_with(cx, |project, cx| {
10138 project.repositories(cx).values().next().unwrap().clone()
10139 });
10140
10141 // One file is unmodified, the other is ignored.
10142 cx.read(|cx| {
10143 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
10144 assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
10145 });
10146
10147 // Change the gitignore, and stage the newly non-ignored file.
10148 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
10149 .await
10150 .unwrap();
10151 fs.set_index_for_repo(
10152 Path::new(path!("/root/.git")),
10153 &[
10154 (".gitignore", "*.txt\n".into()),
10155 ("a.xml", "<a></a>".into()),
10156 ("b.txt", "Some text".into()),
10157 ],
10158 );
10159
10160 cx.executor().run_until_parked();
10161 cx.read(|cx| {
10162 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
10163 assert_entry_git_state(
10164 tree.read(cx),
10165 repository.read(cx),
10166 "b.txt",
10167 Some(StatusCode::Added),
10168 false,
10169 );
10170 });
10171}
10172
10173// NOTE:
10174// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
10175// a directory which some program has already open.
10176// This is a limitation of the Windows.
10177// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
10178// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
10179#[gpui::test]
10180#[cfg_attr(target_os = "windows", ignore)]
10181async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
10182 init_test(cx);
10183 cx.executor().allow_parking();
10184 let root = TempTree::new(json!({
10185 "projects": {
10186 "project1": {
10187 "a": "",
10188 "b": "",
10189 }
10190 },
10191
10192 }));
10193 let root_path = root.path();
10194
10195 let repo = git_init(&root_path.join("projects/project1"));
10196 git_add("a", &repo);
10197 git_commit("init", &repo);
10198 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
10199
10200 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10201
10202 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10203 tree.flush_fs_events(cx).await;
10204 project
10205 .update(cx, |project, cx| project.git_scans_complete(cx))
10206 .await;
10207 cx.executor().run_until_parked();
10208
10209 let repository = project.read_with(cx, |project, cx| {
10210 project.repositories(cx).values().next().unwrap().clone()
10211 });
10212
10213 repository.read_with(cx, |repository, _| {
10214 assert_eq!(
10215 repository.work_directory_abs_path.as_ref(),
10216 root_path.join("projects/project1").as_path()
10217 );
10218 assert_eq!(
10219 repository
10220 .status_for_path(&repo_path("a"))
10221 .map(|entry| entry.status),
10222 Some(StatusCode::Modified.worktree()),
10223 );
10224 assert_eq!(
10225 repository
10226 .status_for_path(&repo_path("b"))
10227 .map(|entry| entry.status),
10228 Some(FileStatus::Untracked),
10229 );
10230 });
10231
10232 std::fs::rename(
10233 root_path.join("projects/project1"),
10234 root_path.join("projects/project2"),
10235 )
10236 .unwrap();
10237 tree.flush_fs_events(cx).await;
10238
10239 repository.read_with(cx, |repository, _| {
10240 assert_eq!(
10241 repository.work_directory_abs_path.as_ref(),
10242 root_path.join("projects/project2").as_path()
10243 );
10244 assert_eq!(
10245 repository.status_for_path(&repo_path("a")).unwrap().status,
10246 StatusCode::Modified.worktree(),
10247 );
10248 assert_eq!(
10249 repository.status_for_path(&repo_path("b")).unwrap().status,
10250 FileStatus::Untracked,
10251 );
10252 });
10253}
10254
10255// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
10256// you can't rename a directory which some program has already open. This is a
10257// limitation of the Windows. See:
10258// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
10259// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
10260#[gpui::test]
10261#[cfg_attr(target_os = "windows", ignore)]
10262async fn test_file_status(cx: &mut gpui::TestAppContext) {
10263 init_test(cx);
10264 cx.executor().allow_parking();
10265 const IGNORE_RULE: &str = "**/target";
10266
10267 let root = TempTree::new(json!({
10268 "project": {
10269 "a.txt": "a",
10270 "b.txt": "bb",
10271 "c": {
10272 "d": {
10273 "e.txt": "eee"
10274 }
10275 },
10276 "f.txt": "ffff",
10277 "target": {
10278 "build_file": "???"
10279 },
10280 ".gitignore": IGNORE_RULE
10281 },
10282
10283 }));
10284 let root_path = root.path();
10285
10286 const A_TXT: &str = "a.txt";
10287 const B_TXT: &str = "b.txt";
10288 const E_TXT: &str = "c/d/e.txt";
10289 const F_TXT: &str = "f.txt";
10290 const DOTGITIGNORE: &str = ".gitignore";
10291 const BUILD_FILE: &str = "target/build_file";
10292
10293 // Set up git repository before creating the worktree.
10294 let work_dir = root.path().join("project");
10295 let mut repo = git_init(work_dir.as_path());
10296 repo.add_ignore_rule(IGNORE_RULE).unwrap();
10297 git_add(A_TXT, &repo);
10298 git_add(E_TXT, &repo);
10299 git_add(DOTGITIGNORE, &repo);
10300 git_commit("Initial commit", &repo);
10301
10302 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10303
10304 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10305 tree.flush_fs_events(cx).await;
10306 project
10307 .update(cx, |project, cx| project.git_scans_complete(cx))
10308 .await;
10309 cx.executor().run_until_parked();
10310
10311 let repository = project.read_with(cx, |project, cx| {
10312 project.repositories(cx).values().next().unwrap().clone()
10313 });
10314
10315 // Check that the right git state is observed on startup
10316 repository.read_with(cx, |repository, _cx| {
10317 assert_eq!(
10318 repository.work_directory_abs_path.as_ref(),
10319 root_path.join("project").as_path()
10320 );
10321
10322 assert_eq!(
10323 repository
10324 .status_for_path(&repo_path(B_TXT))
10325 .unwrap()
10326 .status,
10327 FileStatus::Untracked,
10328 );
10329 assert_eq!(
10330 repository
10331 .status_for_path(&repo_path(F_TXT))
10332 .unwrap()
10333 .status,
10334 FileStatus::Untracked,
10335 );
10336 });
10337
10338 // Modify a file in the working copy.
10339 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
10340 tree.flush_fs_events(cx).await;
10341 project
10342 .update(cx, |project, cx| project.git_scans_complete(cx))
10343 .await;
10344 cx.executor().run_until_parked();
10345
10346 // The worktree detects that the file's git status has changed.
10347 repository.read_with(cx, |repository, _| {
10348 assert_eq!(
10349 repository
10350 .status_for_path(&repo_path(A_TXT))
10351 .unwrap()
10352 .status,
10353 StatusCode::Modified.worktree(),
10354 );
10355 });
10356
10357 // Create a commit in the git repository.
10358 git_add(A_TXT, &repo);
10359 git_add(B_TXT, &repo);
10360 git_commit("Committing modified and added", &repo);
10361 tree.flush_fs_events(cx).await;
10362 project
10363 .update(cx, |project, cx| project.git_scans_complete(cx))
10364 .await;
10365 cx.executor().run_until_parked();
10366
10367 // The worktree detects that the files' git status have changed.
10368 repository.read_with(cx, |repository, _cx| {
10369 assert_eq!(
10370 repository
10371 .status_for_path(&repo_path(F_TXT))
10372 .unwrap()
10373 .status,
10374 FileStatus::Untracked,
10375 );
10376 assert_eq!(repository.status_for_path(&repo_path(B_TXT)), None);
10377 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
10378 });
10379
10380 // Modify files in the working copy and perform git operations on other files.
10381 git_reset(0, &repo);
10382 git_remove_index(Path::new(B_TXT), &repo);
10383 git_stash(&mut repo);
10384 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
10385 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
10386 tree.flush_fs_events(cx).await;
10387 project
10388 .update(cx, |project, cx| project.git_scans_complete(cx))
10389 .await;
10390 cx.executor().run_until_parked();
10391
10392 // Check that more complex repo changes are tracked
10393 repository.read_with(cx, |repository, _cx| {
10394 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
10395 assert_eq!(
10396 repository
10397 .status_for_path(&repo_path(B_TXT))
10398 .unwrap()
10399 .status,
10400 FileStatus::Untracked,
10401 );
10402 assert_eq!(
10403 repository
10404 .status_for_path(&repo_path(E_TXT))
10405 .unwrap()
10406 .status,
10407 StatusCode::Modified.worktree(),
10408 );
10409 });
10410
10411 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
10412 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
10413 std::fs::write(
10414 work_dir.join(DOTGITIGNORE),
10415 [IGNORE_RULE, "f.txt"].join("\n"),
10416 )
10417 .unwrap();
10418
10419 git_add(Path::new(DOTGITIGNORE), &repo);
10420 git_commit("Committing modified git ignore", &repo);
10421
10422 tree.flush_fs_events(cx).await;
10423 cx.executor().run_until_parked();
10424
10425 let mut renamed_dir_name = "first_directory/second_directory";
10426 const RENAMED_FILE: &str = "rf.txt";
10427
10428 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
10429 std::fs::write(
10430 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
10431 "new-contents",
10432 )
10433 .unwrap();
10434
10435 tree.flush_fs_events(cx).await;
10436 project
10437 .update(cx, |project, cx| project.git_scans_complete(cx))
10438 .await;
10439 cx.executor().run_until_parked();
10440
10441 repository.read_with(cx, |repository, _cx| {
10442 assert_eq!(
10443 repository
10444 .status_for_path(&RepoPath::from_rel_path(
10445 &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE))
10446 ))
10447 .unwrap()
10448 .status,
10449 FileStatus::Untracked,
10450 );
10451 });
10452
10453 renamed_dir_name = "new_first_directory/second_directory";
10454
10455 std::fs::rename(
10456 work_dir.join("first_directory"),
10457 work_dir.join("new_first_directory"),
10458 )
10459 .unwrap();
10460
10461 tree.flush_fs_events(cx).await;
10462 project
10463 .update(cx, |project, cx| project.git_scans_complete(cx))
10464 .await;
10465 cx.executor().run_until_parked();
10466
10467 repository.read_with(cx, |repository, _cx| {
10468 assert_eq!(
10469 repository
10470 .status_for_path(&RepoPath::from_rel_path(
10471 &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE))
10472 ))
10473 .unwrap()
10474 .status,
10475 FileStatus::Untracked,
10476 );
10477 });
10478}
10479
10480#[gpui::test]
10481#[ignore]
10482async fn test_ignored_dirs_events(cx: &mut gpui::TestAppContext) {
10483 init_test(cx);
10484 cx.executor().allow_parking();
10485
10486 const IGNORE_RULE: &str = "**/target";
10487
10488 let root = TempTree::new(json!({
10489 "project": {
10490 "src": {
10491 "main.rs": "fn main() {}"
10492 },
10493 "target": {
10494 "debug": {
10495 "important_text.txt": "important text",
10496 },
10497 },
10498 ".gitignore": IGNORE_RULE
10499 },
10500
10501 }));
10502 let root_path = root.path();
10503
10504 // Set up git repository before creating the worktree.
10505 let work_dir = root.path().join("project");
10506 let repo = git_init(work_dir.as_path());
10507 repo.add_ignore_rule(IGNORE_RULE).unwrap();
10508 git_add("src/main.rs", &repo);
10509 git_add(".gitignore", &repo);
10510 git_commit("Initial commit", &repo);
10511
10512 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10513 let repository_updates = Arc::new(Mutex::new(Vec::new()));
10514 let project_events = Arc::new(Mutex::new(Vec::new()));
10515 project.update(cx, |project, cx| {
10516 let repo_events = repository_updates.clone();
10517 cx.subscribe(project.git_store(), move |_, _, e, _| {
10518 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
10519 repo_events.lock().push(e.clone());
10520 }
10521 })
10522 .detach();
10523 let project_events = project_events.clone();
10524 cx.subscribe_self(move |_, e, _| {
10525 if let Event::WorktreeUpdatedEntries(_, updates) = e {
10526 project_events.lock().extend(
10527 updates
10528 .iter()
10529 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
10530 .filter(|(path, _)| path != "fs-event-sentinel"),
10531 );
10532 }
10533 })
10534 .detach();
10535 });
10536
10537 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10538 tree.flush_fs_events(cx).await;
10539 tree.update(cx, |tree, cx| {
10540 tree.load_file(rel_path("project/target/debug/important_text.txt"), cx)
10541 })
10542 .await
10543 .unwrap();
10544 tree.update(cx, |tree, _| {
10545 assert_eq!(
10546 tree.entries(true, 0)
10547 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
10548 .collect::<Vec<_>>(),
10549 vec![
10550 (rel_path(""), false),
10551 (rel_path("project/"), false),
10552 (rel_path("project/.gitignore"), false),
10553 (rel_path("project/src"), false),
10554 (rel_path("project/src/main.rs"), false),
10555 (rel_path("project/target"), true),
10556 (rel_path("project/target/debug"), true),
10557 (rel_path("project/target/debug/important_text.txt"), true),
10558 ]
10559 );
10560 });
10561
10562 assert_eq!(
10563 repository_updates.lock().drain(..).collect::<Vec<_>>(),
10564 vec![RepositoryEvent::StatusesChanged,],
10565 "Initial worktree scan should produce a repo update event"
10566 );
10567 assert_eq!(
10568 project_events.lock().drain(..).collect::<Vec<_>>(),
10569 vec![
10570 ("project/target".to_string(), PathChange::Loaded),
10571 ("project/target/debug".to_string(), PathChange::Loaded),
10572 (
10573 "project/target/debug/important_text.txt".to_string(),
10574 PathChange::Loaded
10575 ),
10576 ],
10577 "Initial project changes should show that all not-ignored and all opened files are loaded"
10578 );
10579
10580 let deps_dir = work_dir.join("target").join("debug").join("deps");
10581 std::fs::create_dir_all(&deps_dir).unwrap();
10582 tree.flush_fs_events(cx).await;
10583 project
10584 .update(cx, |project, cx| project.git_scans_complete(cx))
10585 .await;
10586 cx.executor().run_until_parked();
10587 std::fs::write(deps_dir.join("aa.tmp"), "something tmp").unwrap();
10588 tree.flush_fs_events(cx).await;
10589 project
10590 .update(cx, |project, cx| project.git_scans_complete(cx))
10591 .await;
10592 cx.executor().run_until_parked();
10593 std::fs::remove_dir_all(&deps_dir).unwrap();
10594 tree.flush_fs_events(cx).await;
10595 project
10596 .update(cx, |project, cx| project.git_scans_complete(cx))
10597 .await;
10598 cx.executor().run_until_parked();
10599
10600 tree.update(cx, |tree, _| {
10601 assert_eq!(
10602 tree.entries(true, 0)
10603 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
10604 .collect::<Vec<_>>(),
10605 vec![
10606 (rel_path(""), false),
10607 (rel_path("project/"), false),
10608 (rel_path("project/.gitignore"), false),
10609 (rel_path("project/src"), false),
10610 (rel_path("project/src/main.rs"), false),
10611 (rel_path("project/target"), true),
10612 (rel_path("project/target/debug"), true),
10613 (rel_path("project/target/debug/important_text.txt"), true),
10614 ],
10615 "No stray temp files should be left after the flycheck changes"
10616 );
10617 });
10618
10619 assert_eq!(
10620 repository_updates
10621 .lock()
10622 .iter()
10623 .cloned()
10624 .collect::<Vec<_>>(),
10625 Vec::new(),
10626 "No further RepositoryUpdated events should happen, as only ignored dirs' contents was changed",
10627 );
10628 assert_eq!(
10629 project_events.lock().as_slice(),
10630 vec![
10631 ("project/target/debug/deps".to_string(), PathChange::Added),
10632 ("project/target/debug/deps".to_string(), PathChange::Removed),
10633 ],
10634 "Due to `debug` directory being tracked, it should get updates for entries inside it.
10635 No updates for more nested directories should happen as those are ignored",
10636 );
10637}
10638
10639// todo(jk): turning this test off until we rework it in such a way so that it is not so susceptible
10640// to different timings/ordering of events.
10641#[ignore]
10642#[gpui::test]
10643async fn test_odd_events_for_ignored_dirs(
10644 executor: BackgroundExecutor,
10645 cx: &mut gpui::TestAppContext,
10646) {
10647 init_test(cx);
10648 let fs = FakeFs::new(executor);
10649 fs.insert_tree(
10650 path!("/root"),
10651 json!({
10652 ".git": {},
10653 ".gitignore": "**/target/",
10654 "src": {
10655 "main.rs": "fn main() {}",
10656 },
10657 "target": {
10658 "debug": {
10659 "foo.txt": "foo",
10660 "deps": {}
10661 }
10662 }
10663 }),
10664 )
10665 .await;
10666 fs.set_head_and_index_for_repo(
10667 path!("/root/.git").as_ref(),
10668 &[
10669 (".gitignore", "**/target/".into()),
10670 ("src/main.rs", "fn main() {}".into()),
10671 ],
10672 );
10673
10674 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
10675 let repository_updates = Arc::new(Mutex::new(Vec::new()));
10676 let project_events = Arc::new(Mutex::new(Vec::new()));
10677 project.update(cx, |project, cx| {
10678 let repository_updates = repository_updates.clone();
10679 cx.subscribe(project.git_store(), move |_, _, e, _| {
10680 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
10681 repository_updates.lock().push(e.clone());
10682 }
10683 })
10684 .detach();
10685 let project_events = project_events.clone();
10686 cx.subscribe_self(move |_, e, _| {
10687 if let Event::WorktreeUpdatedEntries(_, updates) = e {
10688 project_events.lock().extend(
10689 updates
10690 .iter()
10691 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
10692 .filter(|(path, _)| path != "fs-event-sentinel"),
10693 );
10694 }
10695 })
10696 .detach();
10697 });
10698
10699 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10700 tree.update(cx, |tree, cx| {
10701 tree.load_file(rel_path("target/debug/foo.txt"), cx)
10702 })
10703 .await
10704 .unwrap();
10705 tree.flush_fs_events(cx).await;
10706 project
10707 .update(cx, |project, cx| project.git_scans_complete(cx))
10708 .await;
10709 cx.run_until_parked();
10710 tree.update(cx, |tree, _| {
10711 assert_eq!(
10712 tree.entries(true, 0)
10713 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
10714 .collect::<Vec<_>>(),
10715 vec![
10716 (rel_path(""), false),
10717 (rel_path(".gitignore"), false),
10718 (rel_path("src"), false),
10719 (rel_path("src/main.rs"), false),
10720 (rel_path("target"), true),
10721 (rel_path("target/debug"), true),
10722 (rel_path("target/debug/deps"), true),
10723 (rel_path("target/debug/foo.txt"), true),
10724 ]
10725 );
10726 });
10727
10728 assert_eq!(
10729 repository_updates.lock().drain(..).collect::<Vec<_>>(),
10730 vec![
10731 RepositoryEvent::BranchChanged,
10732 RepositoryEvent::StatusesChanged,
10733 RepositoryEvent::StatusesChanged,
10734 ],
10735 "Initial worktree scan should produce a repo update event"
10736 );
10737 assert_eq!(
10738 project_events.lock().drain(..).collect::<Vec<_>>(),
10739 vec![
10740 ("target".to_string(), PathChange::Loaded),
10741 ("target/debug".to_string(), PathChange::Loaded),
10742 ("target/debug/deps".to_string(), PathChange::Loaded),
10743 ("target/debug/foo.txt".to_string(), PathChange::Loaded),
10744 ],
10745 "All non-ignored entries and all opened firs should be getting a project event",
10746 );
10747
10748 // Emulate a flycheck spawn: it emits a `INODE_META_MOD`-flagged FS event on target/debug/deps, then creates and removes temp files inside.
10749 // This may happen multiple times during a single flycheck, but once is enough for testing.
10750 fs.emit_fs_event("/root/target/debug/deps", None);
10751 tree.flush_fs_events(cx).await;
10752 project
10753 .update(cx, |project, cx| project.git_scans_complete(cx))
10754 .await;
10755 cx.executor().run_until_parked();
10756
10757 assert_eq!(
10758 repository_updates
10759 .lock()
10760 .iter()
10761 .cloned()
10762 .collect::<Vec<_>>(),
10763 Vec::new(),
10764 "No further RepositoryUpdated events should happen, as only ignored dirs received FS events",
10765 );
10766 assert_eq!(
10767 project_events.lock().as_slice(),
10768 Vec::new(),
10769 "No further project events should happen, as only ignored dirs received FS events",
10770 );
10771}
10772
10773#[gpui::test]
10774async fn test_repos_in_invisible_worktrees(
10775 executor: BackgroundExecutor,
10776 cx: &mut gpui::TestAppContext,
10777) {
10778 init_test(cx);
10779 let fs = FakeFs::new(executor);
10780 fs.insert_tree(
10781 path!("/root"),
10782 json!({
10783 "dir1": {
10784 ".git": {},
10785 "dep1": {
10786 ".git": {},
10787 "src": {
10788 "a.txt": "",
10789 },
10790 },
10791 "b.txt": "",
10792 },
10793 }),
10794 )
10795 .await;
10796
10797 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
10798 let _visible_worktree =
10799 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10800 project
10801 .update(cx, |project, cx| project.git_scans_complete(cx))
10802 .await;
10803
10804 let repos = project.read_with(cx, |project, cx| {
10805 project
10806 .repositories(cx)
10807 .values()
10808 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
10809 .collect::<Vec<_>>()
10810 });
10811 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
10812
10813 let (_invisible_worktree, _) = project
10814 .update(cx, |project, cx| {
10815 project.worktree_store().update(cx, |worktree_store, cx| {
10816 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
10817 })
10818 })
10819 .await
10820 .expect("failed to create worktree");
10821 project
10822 .update(cx, |project, cx| project.git_scans_complete(cx))
10823 .await;
10824
10825 let repos = project.read_with(cx, |project, cx| {
10826 project
10827 .repositories(cx)
10828 .values()
10829 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
10830 .collect::<Vec<_>>()
10831 });
10832 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
10833}
10834
10835#[gpui::test(iterations = 10)]
10836async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
10837 init_test(cx);
10838 cx.update(|cx| {
10839 cx.update_global::<SettingsStore, _>(|store, cx| {
10840 store.update_user_settings(cx, |settings| {
10841 settings.project.worktree.file_scan_exclusions = Some(Vec::new());
10842 });
10843 });
10844 });
10845 let fs = FakeFs::new(cx.background_executor.clone());
10846 fs.insert_tree(
10847 path!("/root"),
10848 json!({
10849 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
10850 "tree": {
10851 ".git": {},
10852 ".gitignore": "ignored-dir\n",
10853 "tracked-dir": {
10854 "tracked-file1": "",
10855 "ancestor-ignored-file1": "",
10856 },
10857 "ignored-dir": {
10858 "ignored-file1": ""
10859 }
10860 }
10861 }),
10862 )
10863 .await;
10864 fs.set_head_and_index_for_repo(
10865 path!("/root/tree/.git").as_ref(),
10866 &[
10867 (".gitignore", "ignored-dir\n".into()),
10868 ("tracked-dir/tracked-file1", "".into()),
10869 ],
10870 );
10871
10872 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
10873
10874 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10875 tree.flush_fs_events(cx).await;
10876 project
10877 .update(cx, |project, cx| project.git_scans_complete(cx))
10878 .await;
10879 cx.executor().run_until_parked();
10880
10881 let repository = project.read_with(cx, |project, cx| {
10882 project.repositories(cx).values().next().unwrap().clone()
10883 });
10884
10885 tree.read_with(cx, |tree, _| {
10886 tree.as_local()
10887 .unwrap()
10888 .manually_refresh_entries_for_paths(vec![rel_path("ignored-dir").into()])
10889 })
10890 .recv()
10891 .await;
10892
10893 cx.read(|cx| {
10894 assert_entry_git_state(
10895 tree.read(cx),
10896 repository.read(cx),
10897 "tracked-dir/tracked-file1",
10898 None,
10899 false,
10900 );
10901 assert_entry_git_state(
10902 tree.read(cx),
10903 repository.read(cx),
10904 "tracked-dir/ancestor-ignored-file1",
10905 None,
10906 false,
10907 );
10908 assert_entry_git_state(
10909 tree.read(cx),
10910 repository.read(cx),
10911 "ignored-dir/ignored-file1",
10912 None,
10913 true,
10914 );
10915 });
10916
10917 fs.create_file(
10918 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
10919 Default::default(),
10920 )
10921 .await
10922 .unwrap();
10923 fs.set_index_for_repo(
10924 path!("/root/tree/.git").as_ref(),
10925 &[
10926 (".gitignore", "ignored-dir\n".into()),
10927 ("tracked-dir/tracked-file1", "".into()),
10928 ("tracked-dir/tracked-file2", "".into()),
10929 ],
10930 );
10931 fs.create_file(
10932 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
10933 Default::default(),
10934 )
10935 .await
10936 .unwrap();
10937 fs.create_file(
10938 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
10939 Default::default(),
10940 )
10941 .await
10942 .unwrap();
10943
10944 cx.executor().run_until_parked();
10945 cx.read(|cx| {
10946 assert_entry_git_state(
10947 tree.read(cx),
10948 repository.read(cx),
10949 "tracked-dir/tracked-file2",
10950 Some(StatusCode::Added),
10951 false,
10952 );
10953 assert_entry_git_state(
10954 tree.read(cx),
10955 repository.read(cx),
10956 "tracked-dir/ancestor-ignored-file2",
10957 None,
10958 false,
10959 );
10960 assert_entry_git_state(
10961 tree.read(cx),
10962 repository.read(cx),
10963 "ignored-dir/ignored-file2",
10964 None,
10965 true,
10966 );
10967 assert!(
10968 tree.read(cx)
10969 .entry_for_path(&rel_path(".git"))
10970 .unwrap()
10971 .is_ignored
10972 );
10973 });
10974}
10975
10976#[gpui::test]
10977async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) {
10978 init_test(cx);
10979
10980 let fs = FakeFs::new(cx.executor());
10981 fs.insert_tree(
10982 path!("/project"),
10983 json!({
10984 ".git": {
10985 "worktrees": {
10986 "some-worktree": {
10987 "commondir": "../..\n",
10988 // For is_git_dir
10989 "HEAD": "",
10990 "config": ""
10991 }
10992 },
10993 "modules": {
10994 "subdir": {
10995 "some-submodule": {
10996 // For is_git_dir
10997 "HEAD": "",
10998 "config": "",
10999 }
11000 }
11001 }
11002 },
11003 "src": {
11004 "a.txt": "A",
11005 },
11006 "some-worktree": {
11007 ".git": "gitdir: ../.git/worktrees/some-worktree\n",
11008 "src": {
11009 "b.txt": "B",
11010 }
11011 },
11012 "subdir": {
11013 "some-submodule": {
11014 ".git": "gitdir: ../../.git/modules/subdir/some-submodule\n",
11015 "c.txt": "C",
11016 }
11017 }
11018 }),
11019 )
11020 .await;
11021
11022 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
11023 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
11024 scan_complete.await;
11025
11026 let mut repositories = project.update(cx, |project, cx| {
11027 project
11028 .repositories(cx)
11029 .values()
11030 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
11031 .collect::<Vec<_>>()
11032 });
11033 repositories.sort();
11034 pretty_assertions::assert_eq!(
11035 repositories,
11036 [
11037 Path::new(path!("/project")).into(),
11038 Path::new(path!("/project/some-worktree")).into(),
11039 Path::new(path!("/project/subdir/some-submodule")).into(),
11040 ]
11041 );
11042
11043 // Generate a git-related event for the worktree and check that it's refreshed.
11044 fs.with_git_state(
11045 path!("/project/some-worktree/.git").as_ref(),
11046 true,
11047 |state| {
11048 state
11049 .head_contents
11050 .insert(repo_path("src/b.txt"), "b".to_owned());
11051 state
11052 .index_contents
11053 .insert(repo_path("src/b.txt"), "b".to_owned());
11054 },
11055 )
11056 .unwrap();
11057 cx.run_until_parked();
11058
11059 let buffer = project
11060 .update(cx, |project, cx| {
11061 project.open_local_buffer(path!("/project/some-worktree/src/b.txt"), cx)
11062 })
11063 .await
11064 .unwrap();
11065 let (worktree_repo, barrier) = project.update(cx, |project, cx| {
11066 let (repo, _) = project
11067 .git_store()
11068 .read(cx)
11069 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
11070 .unwrap();
11071 pretty_assertions::assert_eq!(
11072 repo.read(cx).work_directory_abs_path,
11073 Path::new(path!("/project/some-worktree")).into(),
11074 );
11075 let barrier = repo.update(cx, |repo, _| repo.barrier());
11076 (repo.clone(), barrier)
11077 });
11078 barrier.await.unwrap();
11079 worktree_repo.update(cx, |repo, _| {
11080 pretty_assertions::assert_eq!(
11081 repo.status_for_path(&repo_path("src/b.txt"))
11082 .unwrap()
11083 .status,
11084 StatusCode::Modified.worktree(),
11085 );
11086 });
11087
11088 // The same for the submodule.
11089 fs.with_git_state(
11090 path!("/project/subdir/some-submodule/.git").as_ref(),
11091 true,
11092 |state| {
11093 state
11094 .head_contents
11095 .insert(repo_path("c.txt"), "c".to_owned());
11096 state
11097 .index_contents
11098 .insert(repo_path("c.txt"), "c".to_owned());
11099 },
11100 )
11101 .unwrap();
11102 cx.run_until_parked();
11103
11104 let buffer = project
11105 .update(cx, |project, cx| {
11106 project.open_local_buffer(path!("/project/subdir/some-submodule/c.txt"), cx)
11107 })
11108 .await
11109 .unwrap();
11110 let (submodule_repo, barrier) = project.update(cx, |project, cx| {
11111 let (repo, _) = project
11112 .git_store()
11113 .read(cx)
11114 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
11115 .unwrap();
11116 pretty_assertions::assert_eq!(
11117 repo.read(cx).work_directory_abs_path,
11118 Path::new(path!("/project/subdir/some-submodule")).into(),
11119 );
11120 let barrier = repo.update(cx, |repo, _| repo.barrier());
11121 (repo.clone(), barrier)
11122 });
11123 barrier.await.unwrap();
11124 submodule_repo.update(cx, |repo, _| {
11125 pretty_assertions::assert_eq!(
11126 repo.status_for_path(&repo_path("c.txt")).unwrap().status,
11127 StatusCode::Modified.worktree(),
11128 );
11129 });
11130}
11131
11132#[gpui::test]
11133async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
11134 init_test(cx);
11135 let fs = FakeFs::new(cx.background_executor.clone());
11136 fs.insert_tree(
11137 path!("/root"),
11138 json!({
11139 "project": {
11140 ".git": {},
11141 "child1": {
11142 "a.txt": "A",
11143 },
11144 "child2": {
11145 "b.txt": "B",
11146 }
11147 }
11148 }),
11149 )
11150 .await;
11151
11152 let project = Project::test(
11153 fs.clone(),
11154 [
11155 path!("/root/project/child1").as_ref(),
11156 path!("/root/project/child2").as_ref(),
11157 ],
11158 cx,
11159 )
11160 .await;
11161
11162 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
11163 tree.flush_fs_events(cx).await;
11164 project
11165 .update(cx, |project, cx| project.git_scans_complete(cx))
11166 .await;
11167 cx.executor().run_until_parked();
11168
11169 let repos = project.read_with(cx, |project, cx| {
11170 project
11171 .repositories(cx)
11172 .values()
11173 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
11174 .collect::<Vec<_>>()
11175 });
11176 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
11177}
11178
11179#[gpui::test]
11180async fn test_buffer_changed_file_path_updates_git_diff(cx: &mut gpui::TestAppContext) {
11181 init_test(cx);
11182
11183 let file_1_committed = String::from(r#"file_1_committed"#);
11184 let file_1_staged = String::from(r#"file_1_staged"#);
11185 let file_2_committed = String::from(r#"file_2_committed"#);
11186 let file_2_staged = String::from(r#"file_2_staged"#);
11187 let buffer_contents = String::from(r#"buffer"#);
11188
11189 let fs = FakeFs::new(cx.background_executor.clone());
11190 fs.insert_tree(
11191 path!("/dir"),
11192 json!({
11193 ".git": {},
11194 "src": {
11195 "file_1.rs": file_1_committed.clone(),
11196 "file_2.rs": file_2_committed.clone(),
11197 }
11198 }),
11199 )
11200 .await;
11201
11202 fs.set_head_for_repo(
11203 path!("/dir/.git").as_ref(),
11204 &[
11205 ("src/file_1.rs", file_1_committed.clone()),
11206 ("src/file_2.rs", file_2_committed.clone()),
11207 ],
11208 "deadbeef",
11209 );
11210 fs.set_index_for_repo(
11211 path!("/dir/.git").as_ref(),
11212 &[
11213 ("src/file_1.rs", file_1_staged.clone()),
11214 ("src/file_2.rs", file_2_staged.clone()),
11215 ],
11216 );
11217
11218 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
11219
11220 let buffer = project
11221 .update(cx, |project, cx| {
11222 project.open_local_buffer(path!("/dir/src/file_1.rs"), cx)
11223 })
11224 .await
11225 .unwrap();
11226
11227 buffer.update(cx, |buffer, cx| {
11228 buffer.edit([(0..buffer.len(), buffer_contents.as_str())], None, cx);
11229 });
11230
11231 let unstaged_diff = project
11232 .update(cx, |project, cx| {
11233 project.open_unstaged_diff(buffer.clone(), cx)
11234 })
11235 .await
11236 .unwrap();
11237
11238 cx.run_until_parked();
11239
11240 unstaged_diff.update(cx, |unstaged_diff, cx| {
11241 let base_text = unstaged_diff.base_text_string(cx).unwrap();
11242 assert_eq!(base_text, file_1_staged, "Should start with file_1 staged");
11243 });
11244
11245 // Save the buffer as `file_2.rs`, which should trigger the
11246 // `BufferChangedFilePath` event.
11247 project
11248 .update(cx, |project, cx| {
11249 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
11250 let path = ProjectPath {
11251 worktree_id,
11252 path: rel_path("src/file_2.rs").into(),
11253 };
11254 project.save_buffer_as(buffer.clone(), path, cx)
11255 })
11256 .await
11257 .unwrap();
11258
11259 cx.run_until_parked();
11260
11261 // Verify that the diff bases have been updated to file_2's contents due to
11262 // the `BufferChangedFilePath` event being handled.
11263 unstaged_diff.update(cx, |unstaged_diff, cx| {
11264 let snapshot = buffer.read(cx).snapshot();
11265 let base_text = unstaged_diff.base_text_string(cx).unwrap();
11266 assert_eq!(
11267 base_text, file_2_staged,
11268 "Diff bases should be automatically updated to file_2 staged content"
11269 );
11270
11271 let hunks: Vec<_> = unstaged_diff.snapshot(cx).hunks(&snapshot).collect();
11272 assert!(!hunks.is_empty(), "Should have diff hunks for file_2");
11273 });
11274
11275 let uncommitted_diff = project
11276 .update(cx, |project, cx| {
11277 project.open_uncommitted_diff(buffer.clone(), cx)
11278 })
11279 .await
11280 .unwrap();
11281
11282 cx.run_until_parked();
11283
11284 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
11285 let base_text = uncommitted_diff.base_text_string(cx).unwrap();
11286 assert_eq!(
11287 base_text, file_2_committed,
11288 "Uncommitted diff should compare against file_2 committed content"
11289 );
11290 });
11291}
11292
11293async fn search(
11294 project: &Entity<Project>,
11295 query: SearchQuery,
11296 cx: &mut gpui::TestAppContext,
11297) -> Result<HashMap<String, Vec<Range<usize>>>> {
11298 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
11299 let mut results = HashMap::default();
11300 while let Ok(search_result) = search_rx.rx.recv().await {
11301 match search_result {
11302 SearchResult::Buffer { buffer, ranges } => {
11303 results.entry(buffer).or_insert(ranges);
11304 }
11305 SearchResult::LimitReached => {}
11306 }
11307 }
11308 Ok(results
11309 .into_iter()
11310 .map(|(buffer, ranges)| {
11311 buffer.update(cx, |buffer, cx| {
11312 let path = buffer
11313 .file()
11314 .unwrap()
11315 .full_path(cx)
11316 .to_string_lossy()
11317 .to_string();
11318 let ranges = ranges
11319 .into_iter()
11320 .map(|range| range.to_offset(buffer))
11321 .collect::<Vec<_>>();
11322 (path, ranges)
11323 })
11324 })
11325 .collect())
11326}
11327
11328#[gpui::test]
11329async fn test_undo_encoding_change(cx: &mut gpui::TestAppContext) {
11330 init_test(cx);
11331
11332 let fs = FakeFs::new(cx.executor());
11333
11334 // Create a file with ASCII content "Hi" - this will be detected as UTF-8
11335 // When reinterpreted as UTF-16LE, the bytes 0x48 0x69 become a single character
11336 let ascii_bytes: Vec<u8> = vec![0x48, 0x69];
11337 fs.insert_tree(path!("/dir"), json!({})).await;
11338 fs.insert_file(path!("/dir/test.txt"), ascii_bytes).await;
11339
11340 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
11341
11342 let buffer = project
11343 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/test.txt"), cx))
11344 .await
11345 .unwrap();
11346
11347 let (initial_encoding, initial_text, initial_dirty) = buffer.read_with(cx, |buffer, _| {
11348 (buffer.encoding(), buffer.text(), buffer.is_dirty())
11349 });
11350 assert_eq!(initial_encoding, encoding_rs::UTF_8);
11351 assert_eq!(initial_text, "Hi");
11352 assert!(!initial_dirty);
11353
11354 let reload_receiver = buffer.update(cx, |buffer, cx| {
11355 buffer.reload_with_encoding(encoding_rs::UTF_16LE, cx)
11356 });
11357 cx.executor().run_until_parked();
11358
11359 // Wait for reload to complete
11360 let _ = reload_receiver.await;
11361
11362 // Verify the encoding changed, text is different, and still not dirty (we reloaded from disk)
11363 let (reloaded_encoding, reloaded_text, reloaded_dirty) = buffer.read_with(cx, |buffer, _| {
11364 (buffer.encoding(), buffer.text(), buffer.is_dirty())
11365 });
11366 assert_eq!(reloaded_encoding, encoding_rs::UTF_16LE);
11367 assert_eq!(reloaded_text, "楈");
11368 assert!(!reloaded_dirty);
11369
11370 // Undo the reload
11371 buffer.update(cx, |buffer, cx| {
11372 buffer.undo(cx);
11373 });
11374
11375 buffer.read_with(cx, |buffer, _| {
11376 assert_eq!(buffer.encoding(), encoding_rs::UTF_8);
11377 assert_eq!(buffer.text(), "Hi");
11378 assert!(!buffer.is_dirty());
11379 });
11380
11381 buffer.update(cx, |buffer, cx| {
11382 buffer.redo(cx);
11383 });
11384
11385 buffer.read_with(cx, |buffer, _| {
11386 assert_eq!(buffer.encoding(), encoding_rs::UTF_16LE);
11387 assert_ne!(buffer.text(), "Hi");
11388 assert!(!buffer.is_dirty());
11389 });
11390}
11391
11392pub fn init_test(cx: &mut gpui::TestAppContext) {
11393 zlog::init_test();
11394
11395 cx.update(|cx| {
11396 let settings_store = SettingsStore::test(cx);
11397 cx.set_global(settings_store);
11398 release_channel::init(semver::Version::new(0, 0, 0), cx);
11399 });
11400}
11401
11402fn json_lang() -> Arc<Language> {
11403 Arc::new(Language::new(
11404 LanguageConfig {
11405 name: "JSON".into(),
11406 matcher: LanguageMatcher {
11407 path_suffixes: vec!["json".to_string()],
11408 ..Default::default()
11409 },
11410 ..Default::default()
11411 },
11412 None,
11413 ))
11414}
11415
11416fn js_lang() -> Arc<Language> {
11417 Arc::new(Language::new(
11418 LanguageConfig {
11419 name: "JavaScript".into(),
11420 matcher: LanguageMatcher {
11421 path_suffixes: vec!["js".to_string()],
11422 ..Default::default()
11423 },
11424 ..Default::default()
11425 },
11426 None,
11427 ))
11428}
11429
11430fn python_lang(fs: Arc<FakeFs>) -> Arc<Language> {
11431 struct PythonMootToolchainLister(Arc<FakeFs>);
11432 #[async_trait]
11433 impl ToolchainLister for PythonMootToolchainLister {
11434 async fn list(
11435 &self,
11436 worktree_root: PathBuf,
11437 subroot_relative_path: Arc<RelPath>,
11438 _: Option<HashMap<String, String>>,
11439 _: &dyn Fs,
11440 ) -> ToolchainList {
11441 // This lister will always return a path .venv directories within ancestors
11442 let ancestors = subroot_relative_path.ancestors().collect::<Vec<_>>();
11443 let mut toolchains = vec![];
11444 for ancestor in ancestors {
11445 let venv_path = worktree_root.join(ancestor.as_std_path()).join(".venv");
11446 if self.0.is_dir(&venv_path).await {
11447 toolchains.push(Toolchain {
11448 name: SharedString::new_static("Python Venv"),
11449 path: venv_path.to_string_lossy().into_owned().into(),
11450 language_name: LanguageName(SharedString::new_static("Python")),
11451 as_json: serde_json::Value::Null,
11452 })
11453 }
11454 }
11455 ToolchainList {
11456 toolchains,
11457 ..Default::default()
11458 }
11459 }
11460 async fn resolve(
11461 &self,
11462 _: PathBuf,
11463 _: Option<HashMap<String, String>>,
11464 _: &dyn Fs,
11465 ) -> anyhow::Result<Toolchain> {
11466 Err(anyhow::anyhow!("Not implemented"))
11467 }
11468 fn meta(&self) -> ToolchainMetadata {
11469 ToolchainMetadata {
11470 term: SharedString::new_static("Virtual Environment"),
11471 new_toolchain_placeholder: SharedString::new_static(
11472 "A path to the python3 executable within a virtual environment, or path to virtual environment itself",
11473 ),
11474 manifest_name: ManifestName::from(SharedString::new_static("pyproject.toml")),
11475 }
11476 }
11477 fn activation_script(
11478 &self,
11479 _: &Toolchain,
11480 _: ShellKind,
11481 _: &gpui::App,
11482 ) -> futures::future::BoxFuture<'static, Vec<String>> {
11483 Box::pin(async { vec![] })
11484 }
11485 }
11486 Arc::new(
11487 Language::new(
11488 LanguageConfig {
11489 name: "Python".into(),
11490 matcher: LanguageMatcher {
11491 path_suffixes: vec!["py".to_string()],
11492 ..Default::default()
11493 },
11494 ..Default::default()
11495 },
11496 None, // We're not testing Python parsing with this language.
11497 )
11498 .with_manifest(Some(ManifestName::from(SharedString::new_static(
11499 "pyproject.toml",
11500 ))))
11501 .with_toolchain_lister(Some(Arc::new(PythonMootToolchainLister(fs)))),
11502 )
11503}
11504
11505fn typescript_lang() -> Arc<Language> {
11506 Arc::new(Language::new(
11507 LanguageConfig {
11508 name: "TypeScript".into(),
11509 matcher: LanguageMatcher {
11510 path_suffixes: vec!["ts".to_string()],
11511 ..Default::default()
11512 },
11513 ..Default::default()
11514 },
11515 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
11516 ))
11517}
11518
11519fn tsx_lang() -> Arc<Language> {
11520 Arc::new(Language::new(
11521 LanguageConfig {
11522 name: "tsx".into(),
11523 matcher: LanguageMatcher {
11524 path_suffixes: vec!["tsx".to_string()],
11525 ..Default::default()
11526 },
11527 ..Default::default()
11528 },
11529 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
11530 ))
11531}
11532
11533fn get_all_tasks(
11534 project: &Entity<Project>,
11535 task_contexts: Arc<TaskContexts>,
11536 cx: &mut App,
11537) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
11538 let new_tasks = project.update(cx, |project, cx| {
11539 project.task_store().update(cx, |task_store, cx| {
11540 task_store.task_inventory().unwrap().update(cx, |this, cx| {
11541 this.used_and_current_resolved_tasks(task_contexts, cx)
11542 })
11543 })
11544 });
11545
11546 cx.background_spawn(async move {
11547 let (mut old, new) = new_tasks.await;
11548 old.extend(new);
11549 old
11550 })
11551}
11552
11553#[track_caller]
11554fn assert_entry_git_state(
11555 tree: &Worktree,
11556 repository: &Repository,
11557 path: &str,
11558 index_status: Option<StatusCode>,
11559 is_ignored: bool,
11560) {
11561 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
11562 let entry = tree
11563 .entry_for_path(&rel_path(path))
11564 .unwrap_or_else(|| panic!("entry {path} not found"));
11565 let status = repository
11566 .status_for_path(&repo_path(path))
11567 .map(|entry| entry.status);
11568 let expected = index_status.map(|index_status| {
11569 TrackedStatus {
11570 index_status,
11571 worktree_status: StatusCode::Unmodified,
11572 }
11573 .into()
11574 });
11575 assert_eq!(
11576 status, expected,
11577 "expected {path} to have git status: {expected:?}"
11578 );
11579 assert_eq!(
11580 entry.is_ignored, is_ignored,
11581 "expected {path} to have is_ignored: {is_ignored}"
11582 );
11583}
11584
11585#[track_caller]
11586fn git_init(path: &Path) -> git2::Repository {
11587 let mut init_opts = RepositoryInitOptions::new();
11588 init_opts.initial_head("main");
11589 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
11590}
11591
11592#[track_caller]
11593fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
11594 let path = path.as_ref();
11595 let mut index = repo.index().expect("Failed to get index");
11596 index.add_path(path).expect("Failed to add file");
11597 index.write().expect("Failed to write index");
11598}
11599
11600#[track_caller]
11601fn git_remove_index(path: &Path, repo: &git2::Repository) {
11602 let mut index = repo.index().expect("Failed to get index");
11603 index.remove_path(path).expect("Failed to add file");
11604 index.write().expect("Failed to write index");
11605}
11606
11607#[track_caller]
11608fn git_commit(msg: &'static str, repo: &git2::Repository) {
11609 use git2::Signature;
11610
11611 let signature = Signature::now("test", "test@zed.dev").unwrap();
11612 let oid = repo.index().unwrap().write_tree().unwrap();
11613 let tree = repo.find_tree(oid).unwrap();
11614 if let Ok(head) = repo.head() {
11615 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
11616
11617 let parent_commit = parent_obj.as_commit().unwrap();
11618
11619 repo.commit(
11620 Some("HEAD"),
11621 &signature,
11622 &signature,
11623 msg,
11624 &tree,
11625 &[parent_commit],
11626 )
11627 .expect("Failed to commit with parent");
11628 } else {
11629 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
11630 .expect("Failed to commit");
11631 }
11632}
11633
11634#[cfg(any())]
11635#[track_caller]
11636fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
11637 repo.cherrypick(commit, None).expect("Failed to cherrypick");
11638}
11639
11640#[track_caller]
11641fn git_stash(repo: &mut git2::Repository) {
11642 use git2::Signature;
11643
11644 let signature = Signature::now("test", "test@zed.dev").unwrap();
11645 repo.stash_save(&signature, "N/A", None)
11646 .expect("Failed to stash");
11647}
11648
11649#[track_caller]
11650fn git_reset(offset: usize, repo: &git2::Repository) {
11651 let head = repo.head().expect("Couldn't get repo head");
11652 let object = head.peel(git2::ObjectType::Commit).unwrap();
11653 let commit = object.as_commit().unwrap();
11654 let new_head = commit
11655 .parents()
11656 .inspect(|parnet| {
11657 parnet.message();
11658 })
11659 .nth(offset)
11660 .expect("Not enough history");
11661 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
11662 .expect("Could not reset");
11663}
11664
11665#[cfg(any())]
11666#[track_caller]
11667fn git_branch(name: &str, repo: &git2::Repository) {
11668 let head = repo
11669 .head()
11670 .expect("Couldn't get repo head")
11671 .peel_to_commit()
11672 .expect("HEAD is not a commit");
11673 repo.branch(name, &head, false).expect("Failed to commit");
11674}
11675
11676#[cfg(any())]
11677#[track_caller]
11678fn git_checkout(name: &str, repo: &git2::Repository) {
11679 repo.set_head(name).expect("Failed to set head");
11680 repo.checkout_head(None).expect("Failed to check out head");
11681}
11682
11683#[cfg(any())]
11684#[track_caller]
11685fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
11686 repo.statuses(None)
11687 .unwrap()
11688 .iter()
11689 .map(|status| (status.path().unwrap().to_string(), status.status()))
11690 .collect()
11691}
11692
11693#[gpui::test]
11694async fn test_find_project_path_abs(
11695 background_executor: BackgroundExecutor,
11696 cx: &mut gpui::TestAppContext,
11697) {
11698 // find_project_path should work with absolute paths
11699 init_test(cx);
11700
11701 let fs = FakeFs::new(background_executor);
11702 fs.insert_tree(
11703 path!("/root"),
11704 json!({
11705 "project1": {
11706 "file1.txt": "content1",
11707 "subdir": {
11708 "file2.txt": "content2"
11709 }
11710 },
11711 "project2": {
11712 "file3.txt": "content3"
11713 }
11714 }),
11715 )
11716 .await;
11717
11718 let project = Project::test(
11719 fs.clone(),
11720 [
11721 path!("/root/project1").as_ref(),
11722 path!("/root/project2").as_ref(),
11723 ],
11724 cx,
11725 )
11726 .await;
11727
11728 // Make sure the worktrees are fully initialized
11729 project
11730 .update(cx, |project, cx| project.git_scans_complete(cx))
11731 .await;
11732 cx.run_until_parked();
11733
11734 let (project1_abs_path, project1_id, project2_abs_path, project2_id) =
11735 project.read_with(cx, |project, cx| {
11736 let worktrees: Vec<_> = project.worktrees(cx).collect();
11737 let abs_path1 = worktrees[0].read(cx).abs_path().to_path_buf();
11738 let id1 = worktrees[0].read(cx).id();
11739 let abs_path2 = worktrees[1].read(cx).abs_path().to_path_buf();
11740 let id2 = worktrees[1].read(cx).id();
11741 (abs_path1, id1, abs_path2, id2)
11742 });
11743
11744 project.update(cx, |project, cx| {
11745 let abs_path = project1_abs_path.join("file1.txt");
11746 let found_path = project.find_project_path(abs_path, cx).unwrap();
11747 assert_eq!(found_path.worktree_id, project1_id);
11748 assert_eq!(&*found_path.path, rel_path("file1.txt"));
11749
11750 let abs_path = project1_abs_path.join("subdir").join("file2.txt");
11751 let found_path = project.find_project_path(abs_path, cx).unwrap();
11752 assert_eq!(found_path.worktree_id, project1_id);
11753 assert_eq!(&*found_path.path, rel_path("subdir/file2.txt"));
11754
11755 let abs_path = project2_abs_path.join("file3.txt");
11756 let found_path = project.find_project_path(abs_path, cx).unwrap();
11757 assert_eq!(found_path.worktree_id, project2_id);
11758 assert_eq!(&*found_path.path, rel_path("file3.txt"));
11759
11760 let abs_path = project1_abs_path.join("nonexistent.txt");
11761 let found_path = project.find_project_path(abs_path, cx);
11762 assert!(
11763 found_path.is_some(),
11764 "Should find project path for nonexistent file in worktree"
11765 );
11766
11767 // Test with an absolute path outside any worktree
11768 let abs_path = Path::new("/some/other/path");
11769 let found_path = project.find_project_path(abs_path, cx);
11770 assert!(
11771 found_path.is_none(),
11772 "Should not find project path for path outside any worktree"
11773 );
11774 });
11775}
11776
11777#[gpui::test]
11778async fn test_git_worktree_remove(cx: &mut gpui::TestAppContext) {
11779 init_test(cx);
11780
11781 let fs = FakeFs::new(cx.executor());
11782 fs.insert_tree(
11783 path!("/root"),
11784 json!({
11785 "a": {
11786 ".git": {},
11787 "src": {
11788 "main.rs": "fn main() {}",
11789 }
11790 },
11791 "b": {
11792 ".git": {},
11793 "src": {
11794 "main.rs": "fn main() {}",
11795 },
11796 "script": {
11797 "run.sh": "#!/bin/bash"
11798 }
11799 }
11800 }),
11801 )
11802 .await;
11803
11804 let project = Project::test(
11805 fs.clone(),
11806 [
11807 path!("/root/a").as_ref(),
11808 path!("/root/b/script").as_ref(),
11809 path!("/root/b").as_ref(),
11810 ],
11811 cx,
11812 )
11813 .await;
11814 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
11815 scan_complete.await;
11816
11817 let worktrees = project.update(cx, |project, cx| project.worktrees(cx).collect::<Vec<_>>());
11818 assert_eq!(worktrees.len(), 3);
11819
11820 let worktree_id_by_abs_path = worktrees
11821 .into_iter()
11822 .map(|worktree| worktree.read_with(cx, |w, _| (w.abs_path(), w.id())))
11823 .collect::<HashMap<_, _>>();
11824 let worktree_id = worktree_id_by_abs_path
11825 .get(Path::new(path!("/root/b/script")))
11826 .unwrap();
11827
11828 let repos = project.update(cx, |p, cx| p.git_store().read(cx).repositories().clone());
11829 assert_eq!(repos.len(), 2);
11830
11831 project.update(cx, |project, cx| {
11832 project.remove_worktree(*worktree_id, cx);
11833 });
11834 cx.run_until_parked();
11835
11836 let mut repo_paths = project
11837 .update(cx, |p, cx| p.git_store().read(cx).repositories().clone())
11838 .values()
11839 .map(|repo| repo.read_with(cx, |r, _| r.work_directory_abs_path.clone()))
11840 .collect::<Vec<_>>();
11841 repo_paths.sort();
11842
11843 pretty_assertions::assert_eq!(
11844 repo_paths,
11845 [
11846 Path::new(path!("/root/a")).into(),
11847 Path::new(path!("/root/b")).into(),
11848 ]
11849 );
11850
11851 let active_repo_path = project
11852 .read_with(cx, |p, cx| {
11853 p.active_repository(cx)
11854 .map(|r| r.read(cx).work_directory_abs_path.clone())
11855 })
11856 .unwrap();
11857 assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/a")));
11858
11859 let worktree_id = worktree_id_by_abs_path
11860 .get(Path::new(path!("/root/a")))
11861 .unwrap();
11862 project.update(cx, |project, cx| {
11863 project.remove_worktree(*worktree_id, cx);
11864 });
11865 cx.run_until_parked();
11866
11867 let active_repo_path = project
11868 .read_with(cx, |p, cx| {
11869 p.active_repository(cx)
11870 .map(|r| r.read(cx).work_directory_abs_path.clone())
11871 })
11872 .unwrap();
11873 assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/b")));
11874
11875 let worktree_id = worktree_id_by_abs_path
11876 .get(Path::new(path!("/root/b")))
11877 .unwrap();
11878 project.update(cx, |project, cx| {
11879 project.remove_worktree(*worktree_id, cx);
11880 });
11881 cx.run_until_parked();
11882
11883 let active_repo_path = project.read_with(cx, |p, cx| {
11884 p.active_repository(cx)
11885 .map(|r| r.read(cx).work_directory_abs_path.clone())
11886 });
11887 assert!(active_repo_path.is_none());
11888}
11889
11890#[gpui::test]
11891async fn test_optimistic_hunks_in_staged_files(cx: &mut gpui::TestAppContext) {
11892 use DiffHunkSecondaryStatus::*;
11893 init_test(cx);
11894
11895 let committed_contents = r#"
11896 one
11897 two
11898 three
11899 "#
11900 .unindent();
11901 let file_contents = r#"
11902 one
11903 TWO
11904 three
11905 "#
11906 .unindent();
11907
11908 let fs = FakeFs::new(cx.background_executor.clone());
11909 fs.insert_tree(
11910 path!("/dir"),
11911 json!({
11912 ".git": {},
11913 "file.txt": file_contents.clone()
11914 }),
11915 )
11916 .await;
11917
11918 fs.set_head_and_index_for_repo(
11919 path!("/dir/.git").as_ref(),
11920 &[("file.txt", committed_contents.clone())],
11921 );
11922
11923 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
11924
11925 let buffer = project
11926 .update(cx, |project, cx| {
11927 project.open_local_buffer(path!("/dir/file.txt"), cx)
11928 })
11929 .await
11930 .unwrap();
11931 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
11932 let uncommitted_diff = project
11933 .update(cx, |project, cx| {
11934 project.open_uncommitted_diff(buffer.clone(), cx)
11935 })
11936 .await
11937 .unwrap();
11938
11939 // The hunk is initially unstaged.
11940 uncommitted_diff.read_with(cx, |diff, cx| {
11941 assert_hunks(
11942 diff.snapshot(cx).hunks(&snapshot),
11943 &snapshot,
11944 &diff.base_text_string(cx).unwrap(),
11945 &[(
11946 1..2,
11947 "two\n",
11948 "TWO\n",
11949 DiffHunkStatus::modified(HasSecondaryHunk),
11950 )],
11951 );
11952 });
11953
11954 // Get the repository handle.
11955 let repo = project.read_with(cx, |project, cx| {
11956 project.repositories(cx).values().next().unwrap().clone()
11957 });
11958
11959 // Stage the file.
11960 let stage_task = repo.update(cx, |repo, cx| {
11961 repo.stage_entries(vec![repo_path("file.txt")], cx)
11962 });
11963
11964 // Run a few ticks to let the job start and mark hunks as pending,
11965 // but don't run_until_parked which would complete the entire operation.
11966 for _ in 0..10 {
11967 cx.executor().tick();
11968 let [hunk]: [_; 1] = uncommitted_diff
11969 .read_with(cx, |diff, cx| {
11970 diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>()
11971 })
11972 .try_into()
11973 .unwrap();
11974 match hunk.secondary_status {
11975 HasSecondaryHunk => {}
11976 SecondaryHunkRemovalPending => break,
11977 NoSecondaryHunk => panic!("hunk was not optimistically staged"),
11978 _ => panic!("unexpected hunk state"),
11979 }
11980 }
11981 uncommitted_diff.read_with(cx, |diff, cx| {
11982 assert_hunks(
11983 diff.snapshot(cx).hunks(&snapshot),
11984 &snapshot,
11985 &diff.base_text_string(cx).unwrap(),
11986 &[(
11987 1..2,
11988 "two\n",
11989 "TWO\n",
11990 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
11991 )],
11992 );
11993 });
11994
11995 // Let the staging complete.
11996 stage_task.await.unwrap();
11997 cx.run_until_parked();
11998
11999 // The hunk is now fully staged.
12000 uncommitted_diff.read_with(cx, |diff, cx| {
12001 assert_hunks(
12002 diff.snapshot(cx).hunks(&snapshot),
12003 &snapshot,
12004 &diff.base_text_string(cx).unwrap(),
12005 &[(
12006 1..2,
12007 "two\n",
12008 "TWO\n",
12009 DiffHunkStatus::modified(NoSecondaryHunk),
12010 )],
12011 );
12012 });
12013
12014 // Simulate a commit by updating HEAD to match the current file contents.
12015 // The FakeGitRepository's commit method is a no-op, so we need to manually
12016 // update HEAD to simulate the commit completing.
12017 fs.set_head_for_repo(
12018 path!("/dir/.git").as_ref(),
12019 &[("file.txt", file_contents.clone())],
12020 "newhead",
12021 );
12022 cx.run_until_parked();
12023
12024 // After committing, there are no more hunks.
12025 uncommitted_diff.read_with(cx, |diff, cx| {
12026 assert_hunks(
12027 diff.snapshot(cx).hunks(&snapshot),
12028 &snapshot,
12029 &diff.base_text_string(cx).unwrap(),
12030 &[] as &[(Range<u32>, &str, &str, DiffHunkStatus)],
12031 );
12032 });
12033}
12034
12035#[gpui::test]
12036async fn test_read_only_files_setting(cx: &mut gpui::TestAppContext) {
12037 init_test(cx);
12038
12039 // Configure read_only_files setting
12040 cx.update(|cx| {
12041 cx.update_global::<SettingsStore, _>(|store, cx| {
12042 store.update_user_settings(cx, |settings| {
12043 settings.project.worktree.read_only_files = Some(vec![
12044 "**/generated/**".to_string(),
12045 "**/*.gen.rs".to_string(),
12046 ]);
12047 });
12048 });
12049 });
12050
12051 let fs = FakeFs::new(cx.background_executor.clone());
12052 fs.insert_tree(
12053 path!("/root"),
12054 json!({
12055 "src": {
12056 "main.rs": "fn main() {}",
12057 "types.gen.rs": "// Generated file",
12058 },
12059 "generated": {
12060 "schema.rs": "// Auto-generated schema",
12061 }
12062 }),
12063 )
12064 .await;
12065
12066 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
12067
12068 // Open a regular file - should be read-write
12069 let regular_buffer = project
12070 .update(cx, |project, cx| {
12071 project.open_local_buffer(path!("/root/src/main.rs"), cx)
12072 })
12073 .await
12074 .unwrap();
12075
12076 regular_buffer.read_with(cx, |buffer, _| {
12077 assert!(!buffer.read_only(), "Regular file should not be read-only");
12078 });
12079
12080 // Open a file matching *.gen.rs pattern - should be read-only
12081 let gen_buffer = project
12082 .update(cx, |project, cx| {
12083 project.open_local_buffer(path!("/root/src/types.gen.rs"), cx)
12084 })
12085 .await
12086 .unwrap();
12087
12088 gen_buffer.read_with(cx, |buffer, _| {
12089 assert!(
12090 buffer.read_only(),
12091 "File matching *.gen.rs pattern should be read-only"
12092 );
12093 });
12094
12095 // Open a file in generated directory - should be read-only
12096 let generated_buffer = project
12097 .update(cx, |project, cx| {
12098 project.open_local_buffer(path!("/root/generated/schema.rs"), cx)
12099 })
12100 .await
12101 .unwrap();
12102
12103 generated_buffer.read_with(cx, |buffer, _| {
12104 assert!(
12105 buffer.read_only(),
12106 "File in generated directory should be read-only"
12107 );
12108 });
12109}
12110
12111#[gpui::test]
12112async fn test_read_only_files_empty_setting(cx: &mut gpui::TestAppContext) {
12113 init_test(cx);
12114
12115 // Explicitly set read_only_files to empty (default behavior)
12116 cx.update(|cx| {
12117 cx.update_global::<SettingsStore, _>(|store, cx| {
12118 store.update_user_settings(cx, |settings| {
12119 settings.project.worktree.read_only_files = Some(vec![]);
12120 });
12121 });
12122 });
12123
12124 let fs = FakeFs::new(cx.background_executor.clone());
12125 fs.insert_tree(
12126 path!("/root"),
12127 json!({
12128 "src": {
12129 "main.rs": "fn main() {}",
12130 },
12131 "generated": {
12132 "schema.rs": "// Auto-generated schema",
12133 }
12134 }),
12135 )
12136 .await;
12137
12138 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
12139
12140 // All files should be read-write when read_only_files is empty
12141 let main_buffer = project
12142 .update(cx, |project, cx| {
12143 project.open_local_buffer(path!("/root/src/main.rs"), cx)
12144 })
12145 .await
12146 .unwrap();
12147
12148 main_buffer.read_with(cx, |buffer, _| {
12149 assert!(
12150 !buffer.read_only(),
12151 "Files should not be read-only when read_only_files is empty"
12152 );
12153 });
12154
12155 let generated_buffer = project
12156 .update(cx, |project, cx| {
12157 project.open_local_buffer(path!("/root/generated/schema.rs"), cx)
12158 })
12159 .await
12160 .unwrap();
12161
12162 generated_buffer.read_with(cx, |buffer, _| {
12163 assert!(
12164 !buffer.read_only(),
12165 "Generated files should not be read-only when read_only_files is empty"
12166 );
12167 });
12168}
12169
12170#[gpui::test]
12171async fn test_read_only_files_with_lock_files(cx: &mut gpui::TestAppContext) {
12172 init_test(cx);
12173
12174 // Configure to make lock files read-only
12175 cx.update(|cx| {
12176 cx.update_global::<SettingsStore, _>(|store, cx| {
12177 store.update_user_settings(cx, |settings| {
12178 settings.project.worktree.read_only_files = Some(vec![
12179 "**/*.lock".to_string(),
12180 "**/package-lock.json".to_string(),
12181 ]);
12182 });
12183 });
12184 });
12185
12186 let fs = FakeFs::new(cx.background_executor.clone());
12187 fs.insert_tree(
12188 path!("/root"),
12189 json!({
12190 "Cargo.lock": "# Lock file",
12191 "Cargo.toml": "[package]",
12192 "package-lock.json": "{}",
12193 "package.json": "{}",
12194 }),
12195 )
12196 .await;
12197
12198 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
12199
12200 // Cargo.lock should be read-only
12201 let cargo_lock = project
12202 .update(cx, |project, cx| {
12203 project.open_local_buffer(path!("/root/Cargo.lock"), cx)
12204 })
12205 .await
12206 .unwrap();
12207
12208 cargo_lock.read_with(cx, |buffer, _| {
12209 assert!(buffer.read_only(), "Cargo.lock should be read-only");
12210 });
12211
12212 // Cargo.toml should be read-write
12213 let cargo_toml = project
12214 .update(cx, |project, cx| {
12215 project.open_local_buffer(path!("/root/Cargo.toml"), cx)
12216 })
12217 .await
12218 .unwrap();
12219
12220 cargo_toml.read_with(cx, |buffer, _| {
12221 assert!(!buffer.read_only(), "Cargo.toml should not be read-only");
12222 });
12223
12224 // package-lock.json should be read-only
12225 let package_lock = project
12226 .update(cx, |project, cx| {
12227 project.open_local_buffer(path!("/root/package-lock.json"), cx)
12228 })
12229 .await
12230 .unwrap();
12231
12232 package_lock.read_with(cx, |buffer, _| {
12233 assert!(buffer.read_only(), "package-lock.json should be read-only");
12234 });
12235
12236 // package.json should be read-write
12237 let package_json = project
12238 .update(cx, |project, cx| {
12239 project.open_local_buffer(path!("/root/package.json"), cx)
12240 })
12241 .await
12242 .unwrap();
12243
12244 package_json.read_with(cx, |buffer, _| {
12245 assert!(!buffer.read_only(), "package.json should not be read-only");
12246 });
12247}
12248
12249mod disable_ai_settings_tests {
12250 use gpui::TestAppContext;
12251 use project::*;
12252 use settings::{Settings, SettingsStore};
12253
12254 #[gpui::test]
12255 async fn test_disable_ai_settings_security(cx: &mut TestAppContext) {
12256 cx.update(|cx| {
12257 settings::init(cx);
12258
12259 // Test 1: Default is false (AI enabled)
12260 assert!(
12261 !DisableAiSettings::get_global(cx).disable_ai,
12262 "Default should allow AI"
12263 );
12264 });
12265
12266 let disable_true = serde_json::json!({
12267 "disable_ai": true
12268 })
12269 .to_string();
12270 let disable_false = serde_json::json!({
12271 "disable_ai": false
12272 })
12273 .to_string();
12274
12275 cx.update_global::<SettingsStore, _>(|store, cx| {
12276 store.set_user_settings(&disable_false, cx).unwrap();
12277 store.set_global_settings(&disable_true, cx).unwrap();
12278 });
12279 cx.update(|cx| {
12280 assert!(
12281 DisableAiSettings::get_global(cx).disable_ai,
12282 "Local false cannot override global true"
12283 );
12284 });
12285
12286 cx.update_global::<SettingsStore, _>(|store, cx| {
12287 store.set_global_settings(&disable_false, cx).unwrap();
12288 store.set_user_settings(&disable_true, cx).unwrap();
12289 });
12290
12291 cx.update(|cx| {
12292 assert!(
12293 DisableAiSettings::get_global(cx).disable_ai,
12294 "Local false cannot override global true"
12295 );
12296 });
12297 }
12298
12299 #[gpui::test]
12300 async fn test_disable_ai_project_level_settings(cx: &mut TestAppContext) {
12301 use settings::{LocalSettingsKind, LocalSettingsPath, SettingsLocation, SettingsStore};
12302 use worktree::WorktreeId;
12303
12304 cx.update(|cx| {
12305 settings::init(cx);
12306
12307 // Default should allow AI
12308 assert!(
12309 !DisableAiSettings::get_global(cx).disable_ai,
12310 "Default should allow AI"
12311 );
12312 });
12313
12314 let worktree_id = WorktreeId::from_usize(1);
12315 let rel_path = |path: &str| -> std::sync::Arc<util::rel_path::RelPath> {
12316 std::sync::Arc::from(util::rel_path::RelPath::unix(path).unwrap())
12317 };
12318 let project_path = rel_path("project");
12319 let settings_location = SettingsLocation {
12320 worktree_id,
12321 path: project_path.as_ref(),
12322 };
12323
12324 // Test: Project-level disable_ai=true should disable AI for files in that project
12325 cx.update_global::<SettingsStore, _>(|store, cx| {
12326 store
12327 .set_local_settings(
12328 worktree_id,
12329 LocalSettingsPath::InWorktree(project_path.clone()),
12330 LocalSettingsKind::Settings,
12331 Some(r#"{ "disable_ai": true }"#),
12332 cx,
12333 )
12334 .unwrap();
12335 });
12336
12337 cx.update(|cx| {
12338 let settings = DisableAiSettings::get(Some(settings_location), cx);
12339 assert!(
12340 settings.disable_ai,
12341 "Project-level disable_ai=true should disable AI for files in that project"
12342 );
12343 // Global should now also be true since project-level disable_ai is merged into global
12344 assert!(
12345 DisableAiSettings::get_global(cx).disable_ai,
12346 "Global setting should be affected by project-level disable_ai=true"
12347 );
12348 });
12349
12350 // Test: Setting project-level to false should allow AI for that project
12351 cx.update_global::<SettingsStore, _>(|store, cx| {
12352 store
12353 .set_local_settings(
12354 worktree_id,
12355 LocalSettingsPath::InWorktree(project_path.clone()),
12356 LocalSettingsKind::Settings,
12357 Some(r#"{ "disable_ai": false }"#),
12358 cx,
12359 )
12360 .unwrap();
12361 });
12362
12363 cx.update(|cx| {
12364 let settings = DisableAiSettings::get(Some(settings_location), cx);
12365 assert!(
12366 !settings.disable_ai,
12367 "Project-level disable_ai=false should allow AI"
12368 );
12369 // Global should also be false now
12370 assert!(
12371 !DisableAiSettings::get_global(cx).disable_ai,
12372 "Global setting should be false when project-level is false"
12373 );
12374 });
12375
12376 // Test: User-level true + project-level false = AI disabled (saturation)
12377 let disable_true = serde_json::json!({ "disable_ai": true }).to_string();
12378 cx.update_global::<SettingsStore, _>(|store, cx| {
12379 store.set_user_settings(&disable_true, cx).unwrap();
12380 store
12381 .set_local_settings(
12382 worktree_id,
12383 LocalSettingsPath::InWorktree(project_path.clone()),
12384 LocalSettingsKind::Settings,
12385 Some(r#"{ "disable_ai": false }"#),
12386 cx,
12387 )
12388 .unwrap();
12389 });
12390
12391 cx.update(|cx| {
12392 let settings = DisableAiSettings::get(Some(settings_location), cx);
12393 assert!(
12394 settings.disable_ai,
12395 "Project-level false cannot override user-level true (SaturatingBool)"
12396 );
12397 });
12398 }
12399}