1#![allow(clippy::format_collect)]
2
3mod color_extractor;
4mod context_server_store;
5mod debugger;
6mod ext_agent_tests;
7mod extension_agent_tests;
8mod git_store;
9mod image_store;
10mod lsp_command;
11mod lsp_store;
12mod manifest_tree;
13mod project_search;
14mod search;
15mod search_history;
16mod signature_help;
17mod task_inventory;
18mod trusted_worktrees;
19mod yarn;
20
21use anyhow::Result;
22use async_trait::async_trait;
23use buffer_diff::{
24 BufferDiffEvent, DiffChanged, DiffHunkSecondaryStatus, DiffHunkStatus, DiffHunkStatusKind,
25 assert_hunks,
26};
27use collections::{BTreeSet, HashMap, HashSet};
28use encoding_rs;
29use fs::{FakeFs, PathEventKind};
30use futures::{StreamExt, future};
31use git::{
32 GitHostingProviderRegistry,
33 repository::{RepoPath, repo_path},
34 status::{DiffStat, FileStatus, StatusCode, TrackedStatus},
35};
36use git2::RepositoryInitOptions;
37use gpui::{
38 App, AppContext, BackgroundExecutor, BorrowAppContext, Entity, FutureExt, SharedString, Task,
39 UpdateGlobal,
40};
41use itertools::Itertools;
42use language::{
43 Buffer, BufferEvent, Diagnostic, DiagnosticEntry, DiagnosticEntryRef, DiagnosticSet,
44 DiagnosticSourceKind, DiskState, FakeLspAdapter, Language, LanguageConfig, LanguageMatcher,
45 LanguageName, LineEnding, ManifestName, ManifestProvider, ManifestQuery, OffsetRangeExt, Point,
46 ToPoint, Toolchain, ToolchainList, ToolchainLister, ToolchainMetadata,
47 language_settings::{LanguageSettingsContent, language_settings},
48 markdown_lang, rust_lang, tree_sitter_typescript,
49};
50use lsp::{
51 CodeActionKind, DEFAULT_LSP_REQUEST_TIMEOUT, DiagnosticSeverity, DocumentChanges,
52 FileOperationFilter, LanguageServerId, LanguageServerName, NumberOrString, TextDocumentEdit,
53 Uri, WillRenameFiles, notification::DidRenameFiles,
54};
55use parking_lot::Mutex;
56use paths::{config_dir, global_gitignore_path, tasks_file};
57use postage::stream::Stream as _;
58use pretty_assertions::{assert_eq, assert_matches};
59use project::{
60 Event, TaskContexts,
61 git_store::{GitStoreEvent, Repository, RepositoryEvent, StatusEntry, pending_op},
62 search::{SearchQuery, SearchResult},
63 task_store::{TaskSettingsLocation, TaskStore},
64 *,
65};
66use rand::{Rng as _, rngs::StdRng};
67use serde_json::json;
68use settings::SettingsStore;
69#[cfg(not(windows))]
70use std::os;
71use std::{
72 cell::RefCell,
73 env, mem,
74 num::NonZeroU32,
75 ops::Range,
76 path::{Path, PathBuf},
77 rc::Rc,
78 str::FromStr,
79 sync::{Arc, OnceLock},
80 task::Poll,
81 time::Duration,
82};
83use sum_tree::SumTree;
84use task::{ResolvedTask, ShellKind, TaskContext};
85use text::{Anchor, PointUtf16, ReplicaId, ToOffset, Unclipped};
86use unindent::Unindent as _;
87use util::{
88 TryFutureExt as _, assert_set_eq, maybe, path,
89 paths::{PathMatcher, PathStyle},
90 rel_path::{RelPath, rel_path},
91 test::{TempTree, marked_text_offsets},
92 uri,
93};
94use worktree::WorktreeModelHandle as _;
95
96#[gpui::test]
97async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
98 cx.executor().allow_parking();
99
100 let (tx, mut rx) = futures::channel::mpsc::unbounded();
101 let _thread = std::thread::spawn(move || {
102 #[cfg(not(target_os = "windows"))]
103 std::fs::metadata("/tmp").unwrap();
104 #[cfg(target_os = "windows")]
105 std::fs::metadata("C:/Windows").unwrap();
106 std::thread::sleep(Duration::from_millis(1000));
107 tx.unbounded_send(1).unwrap();
108 });
109 rx.next().await.unwrap();
110}
111
112#[gpui::test]
113async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
114 cx.executor().allow_parking();
115
116 let io_task = smol::unblock(move || {
117 println!("sleeping on thread {:?}", std::thread::current().id());
118 std::thread::sleep(Duration::from_millis(10));
119 1
120 });
121
122 let task = cx.foreground_executor().spawn(async move {
123 io_task.await;
124 });
125
126 task.await;
127}
128
129#[gpui::test]
130async fn test_default_session_work_dirs_prefers_directory_worktrees_over_single_file_parents(
131 cx: &mut gpui::TestAppContext,
132) {
133 init_test(cx);
134
135 let fs = FakeFs::new(cx.executor());
136 fs.insert_tree(
137 path!("/root"),
138 json!({
139 "dir-project": {
140 "src": {
141 "main.rs": "fn main() {}"
142 }
143 },
144 "single-file.rs": "fn helper() {}"
145 }),
146 )
147 .await;
148
149 let project = Project::test(
150 fs,
151 [
152 Path::new(path!("/root/single-file.rs")),
153 Path::new(path!("/root/dir-project")),
154 ],
155 cx,
156 )
157 .await;
158
159 let work_dirs = project.read_with(cx, |project, cx| project.default_path_list(cx));
160 let ordered_paths = work_dirs.ordered_paths().cloned().collect::<Vec<_>>();
161
162 assert_eq!(
163 ordered_paths,
164 vec![
165 PathBuf::from(path!("/root/dir-project")),
166 PathBuf::from(path!("/root")),
167 ]
168 );
169}
170
171#[gpui::test]
172async fn test_default_session_work_dirs_falls_back_to_home_for_empty_project(
173 cx: &mut gpui::TestAppContext,
174) {
175 init_test(cx);
176
177 let fs = FakeFs::new(cx.executor());
178 let project = Project::test(fs, [], cx).await;
179
180 let work_dirs = project.read_with(cx, |project, cx| project.default_path_list(cx));
181 let ordered_paths = work_dirs.ordered_paths().cloned().collect::<Vec<_>>();
182
183 assert_eq!(ordered_paths, vec![paths::home_dir().to_path_buf()]);
184}
185
186// NOTE:
187// While POSIX symbolic links are somewhat supported on Windows, they are an opt in by the user, and thus
188// we assume that they are not supported out of the box.
189#[cfg(not(windows))]
190#[gpui::test]
191async fn test_symlinks(cx: &mut gpui::TestAppContext) {
192 init_test(cx);
193 cx.executor().allow_parking();
194
195 let dir = TempTree::new(json!({
196 "root": {
197 "apple": "",
198 "banana": {
199 "carrot": {
200 "date": "",
201 "endive": "",
202 }
203 },
204 "fennel": {
205 "grape": "",
206 }
207 }
208 }));
209
210 let root_link_path = dir.path().join("root_link");
211 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
212 os::unix::fs::symlink(
213 dir.path().join("root/fennel"),
214 dir.path().join("root/finnochio"),
215 )
216 .unwrap();
217
218 let project = Project::test(
219 Arc::new(RealFs::new(None, cx.executor())),
220 [root_link_path.as_ref()],
221 cx,
222 )
223 .await;
224
225 project.update(cx, |project, cx| {
226 let tree = project.worktrees(cx).next().unwrap().read(cx);
227 assert_eq!(tree.file_count(), 5);
228 assert_eq!(
229 tree.entry_for_path(rel_path("fennel/grape")).unwrap().inode,
230 tree.entry_for_path(rel_path("finnochio/grape"))
231 .unwrap()
232 .inode
233 );
234 });
235}
236
237#[gpui::test]
238async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
239 init_test(cx);
240
241 let dir = TempTree::new(json!({
242 ".editorconfig": r#"
243 root = true
244 [*.rs]
245 indent_style = tab
246 indent_size = 3
247 end_of_line = lf
248 insert_final_newline = true
249 trim_trailing_whitespace = true
250 max_line_length = 120
251 [*.js]
252 tab_width = 10
253 max_line_length = off
254 "#,
255 ".zed": {
256 "settings.json": r#"{
257 "tab_size": 8,
258 "hard_tabs": false,
259 "ensure_final_newline_on_save": false,
260 "remove_trailing_whitespace_on_save": false,
261 "preferred_line_length": 64,
262 "soft_wrap": "editor_width",
263 }"#,
264 },
265 "a.rs": "fn a() {\n A\n}",
266 "b": {
267 ".editorconfig": r#"
268 [*.rs]
269 indent_size = 2
270 max_line_length = off,
271 "#,
272 "b.rs": "fn b() {\n B\n}",
273 },
274 "c.js": "def c\n C\nend",
275 "d": {
276 ".editorconfig": r#"
277 [*.rs]
278 indent_size = 1
279 "#,
280 "d.rs": "fn d() {\n D\n}",
281 },
282 "README.json": "tabs are better\n",
283 }));
284
285 let path = dir.path();
286 let fs = FakeFs::new(cx.executor());
287 fs.insert_tree_from_real_fs(path, path).await;
288 let project = Project::test(fs, [path], cx).await;
289
290 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
291 language_registry.add(js_lang());
292 language_registry.add(json_lang());
293 language_registry.add(rust_lang());
294
295 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
296
297 cx.executor().run_until_parked();
298
299 cx.update(|cx| {
300 let tree = worktree.read(cx);
301 let settings_for = |path: &str| {
302 let file_entry = tree.entry_for_path(rel_path(path)).unwrap().clone();
303 let file = File::for_entry(file_entry, worktree.clone());
304 let file_language = project
305 .read(cx)
306 .languages()
307 .load_language_for_file_path(file.path.as_std_path());
308 let file_language = cx
309 .foreground_executor()
310 .block_on(file_language)
311 .expect("Failed to get file language");
312 let file = file as _;
313 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
314 };
315
316 let settings_a = settings_for("a.rs");
317 let settings_b = settings_for("b/b.rs");
318 let settings_c = settings_for("c.js");
319 let settings_d = settings_for("d/d.rs");
320 let settings_readme = settings_for("README.json");
321
322 // .editorconfig overrides .zed/settings
323 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
324 assert_eq!(settings_a.hard_tabs, true);
325 assert_eq!(settings_a.ensure_final_newline_on_save, true);
326 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
327 assert_eq!(settings_a.preferred_line_length, 120);
328
329 // .editorconfig in subdirectory overrides .editorconfig in root
330 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
331 assert_eq!(Some(settings_d.tab_size), NonZeroU32::new(1));
332
333 // "indent_size" is not set, so "tab_width" is used
334 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
335
336 // When max_line_length is "off", default to .zed/settings.json
337 assert_eq!(settings_b.preferred_line_length, 64);
338 assert_eq!(settings_c.preferred_line_length, 64);
339
340 // README.md should not be affected by .editorconfig's globe "*.rs"
341 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
342 });
343}
344
345#[gpui::test]
346async fn test_external_editorconfig_support(cx: &mut gpui::TestAppContext) {
347 init_test(cx);
348
349 let fs = FakeFs::new(cx.executor());
350 fs.insert_tree(
351 path!("/grandparent"),
352 json!({
353 ".editorconfig": "[*]\nindent_size = 4\n",
354 "parent": {
355 ".editorconfig": "[*.rs]\nindent_size = 2\n",
356 "worktree": {
357 ".editorconfig": "[*.md]\nindent_size = 3\n",
358 "main.rs": "fn main() {}",
359 "README.md": "# README",
360 "other.txt": "other content",
361 }
362 }
363 }),
364 )
365 .await;
366
367 let project = Project::test(fs, [path!("/grandparent/parent/worktree").as_ref()], cx).await;
368
369 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
370 language_registry.add(rust_lang());
371 language_registry.add(markdown_lang());
372
373 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
374
375 cx.executor().run_until_parked();
376
377 cx.update(|cx| {
378 let tree = worktree.read(cx);
379 let settings_for = |path: &str| {
380 let file_entry = tree.entry_for_path(rel_path(path)).unwrap().clone();
381 let file = File::for_entry(file_entry, worktree.clone());
382 let file_language = project
383 .read(cx)
384 .languages()
385 .load_language_for_file_path(file.path.as_std_path());
386 let file_language = cx
387 .foreground_executor()
388 .block_on(file_language)
389 .expect("Failed to get file language");
390 let file = file as _;
391 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
392 };
393
394 let settings_rs = settings_for("main.rs");
395 let settings_md = settings_for("README.md");
396 let settings_txt = settings_for("other.txt");
397
398 // main.rs gets indent_size = 2 from parent's external .editorconfig
399 assert_eq!(Some(settings_rs.tab_size), NonZeroU32::new(2));
400
401 // README.md gets indent_size = 3 from internal worktree .editorconfig
402 assert_eq!(Some(settings_md.tab_size), NonZeroU32::new(3));
403
404 // other.txt gets indent_size = 4 from grandparent's external .editorconfig
405 assert_eq!(Some(settings_txt.tab_size), NonZeroU32::new(4));
406 });
407}
408
409#[gpui::test]
410async fn test_internal_editorconfig_root_stops_traversal(cx: &mut gpui::TestAppContext) {
411 init_test(cx);
412
413 let fs = FakeFs::new(cx.executor());
414 fs.insert_tree(
415 path!("/worktree"),
416 json!({
417 ".editorconfig": "[*]\nindent_size = 99\n",
418 "src": {
419 ".editorconfig": "root = true\n[*]\nindent_size = 2\n",
420 "file.rs": "fn main() {}",
421 }
422 }),
423 )
424 .await;
425
426 let project = Project::test(fs, [path!("/worktree").as_ref()], cx).await;
427
428 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
429 language_registry.add(rust_lang());
430
431 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
432
433 cx.executor().run_until_parked();
434
435 cx.update(|cx| {
436 let tree = worktree.read(cx);
437 let file_entry = tree
438 .entry_for_path(rel_path("src/file.rs"))
439 .unwrap()
440 .clone();
441 let file = File::for_entry(file_entry, worktree.clone());
442 let file_language = project
443 .read(cx)
444 .languages()
445 .load_language_for_file_path(file.path.as_std_path());
446 let file_language = cx
447 .foreground_executor()
448 .block_on(file_language)
449 .expect("Failed to get file language");
450 let file = file as _;
451 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
452
453 assert_eq!(Some(settings.tab_size), NonZeroU32::new(2));
454 });
455}
456
457#[gpui::test]
458async fn test_external_editorconfig_root_stops_traversal(cx: &mut gpui::TestAppContext) {
459 init_test(cx);
460
461 let fs = FakeFs::new(cx.executor());
462 fs.insert_tree(
463 path!("/parent"),
464 json!({
465 ".editorconfig": "[*]\nindent_size = 99\n",
466 "worktree": {
467 ".editorconfig": "root = true\n[*]\nindent_size = 2\n",
468 "file.rs": "fn main() {}",
469 }
470 }),
471 )
472 .await;
473
474 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
475
476 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
477 language_registry.add(rust_lang());
478
479 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
480
481 cx.executor().run_until_parked();
482
483 cx.update(|cx| {
484 let tree = worktree.read(cx);
485 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
486 let file = File::for_entry(file_entry, worktree.clone());
487 let file_language = project
488 .read(cx)
489 .languages()
490 .load_language_for_file_path(file.path.as_std_path());
491 let file_language = cx
492 .foreground_executor()
493 .block_on(file_language)
494 .expect("Failed to get file language");
495 let file = file as _;
496 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
497
498 // file.rs gets indent_size = 2 from worktree's root config, NOT 99 from parent
499 assert_eq!(Some(settings.tab_size), NonZeroU32::new(2));
500 });
501}
502
503#[gpui::test]
504async fn test_external_editorconfig_root_in_parent_stops_traversal(cx: &mut gpui::TestAppContext) {
505 init_test(cx);
506
507 let fs = FakeFs::new(cx.executor());
508 fs.insert_tree(
509 path!("/grandparent"),
510 json!({
511 ".editorconfig": "[*]\nindent_size = 99\n",
512 "parent": {
513 ".editorconfig": "root = true\n[*]\nindent_size = 4\n",
514 "worktree": {
515 "file.rs": "fn main() {}",
516 }
517 }
518 }),
519 )
520 .await;
521
522 let project = Project::test(fs, [path!("/grandparent/parent/worktree").as_ref()], cx).await;
523
524 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
525 language_registry.add(rust_lang());
526
527 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
528
529 cx.executor().run_until_parked();
530
531 cx.update(|cx| {
532 let tree = worktree.read(cx);
533 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
534 let file = File::for_entry(file_entry, worktree.clone());
535 let file_language = project
536 .read(cx)
537 .languages()
538 .load_language_for_file_path(file.path.as_std_path());
539 let file_language = cx
540 .foreground_executor()
541 .block_on(file_language)
542 .expect("Failed to get file language");
543 let file = file as _;
544 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
545
546 // file.rs gets indent_size = 4 from parent's root config, NOT 99 from grandparent
547 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
548 });
549}
550
551#[gpui::test]
552async fn test_external_editorconfig_shared_across_worktrees(cx: &mut gpui::TestAppContext) {
553 init_test(cx);
554
555 let fs = FakeFs::new(cx.executor());
556 fs.insert_tree(
557 path!("/parent"),
558 json!({
559 ".editorconfig": "root = true\n[*]\nindent_size = 5\n",
560 "worktree_a": {
561 "file.rs": "fn a() {}",
562 ".editorconfig": "[*]\ninsert_final_newline = true\n",
563 },
564 "worktree_b": {
565 "file.rs": "fn b() {}",
566 ".editorconfig": "[*]\ninsert_final_newline = false\n",
567 }
568 }),
569 )
570 .await;
571
572 let project = Project::test(
573 fs,
574 [
575 path!("/parent/worktree_a").as_ref(),
576 path!("/parent/worktree_b").as_ref(),
577 ],
578 cx,
579 )
580 .await;
581
582 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
583 language_registry.add(rust_lang());
584
585 cx.executor().run_until_parked();
586
587 cx.update(|cx| {
588 let worktrees: Vec<_> = project.read(cx).worktrees(cx).collect();
589 assert_eq!(worktrees.len(), 2);
590
591 for worktree in worktrees {
592 let tree = worktree.read(cx);
593 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
594 let file = File::for_entry(file_entry, worktree.clone());
595 let file_language = project
596 .read(cx)
597 .languages()
598 .load_language_for_file_path(file.path.as_std_path());
599 let file_language = cx
600 .foreground_executor()
601 .block_on(file_language)
602 .expect("Failed to get file language");
603 let file = file as _;
604 let settings =
605 language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
606
607 // Both worktrees should get indent_size = 5 from shared parent .editorconfig
608 assert_eq!(Some(settings.tab_size), NonZeroU32::new(5));
609 }
610 });
611}
612
613#[gpui::test]
614async fn test_external_editorconfig_not_loaded_without_internal_config(
615 cx: &mut gpui::TestAppContext,
616) {
617 init_test(cx);
618
619 let fs = FakeFs::new(cx.executor());
620 fs.insert_tree(
621 path!("/parent"),
622 json!({
623 ".editorconfig": "[*]\nindent_size = 99\n",
624 "worktree": {
625 "file.rs": "fn main() {}",
626 }
627 }),
628 )
629 .await;
630
631 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
632
633 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
634 language_registry.add(rust_lang());
635
636 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
637
638 cx.executor().run_until_parked();
639
640 cx.update(|cx| {
641 let tree = worktree.read(cx);
642 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
643 let file = File::for_entry(file_entry, worktree.clone());
644 let file_language = project
645 .read(cx)
646 .languages()
647 .load_language_for_file_path(file.path.as_std_path());
648 let file_language = cx
649 .foreground_executor()
650 .block_on(file_language)
651 .expect("Failed to get file language");
652 let file = file as _;
653 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
654
655 // file.rs should have default tab_size = 4, NOT 99 from parent's external .editorconfig
656 // because without an internal .editorconfig, external configs are not loaded
657 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
658 });
659}
660
661#[gpui::test]
662async fn test_external_editorconfig_modification_triggers_refresh(cx: &mut gpui::TestAppContext) {
663 init_test(cx);
664
665 let fs = FakeFs::new(cx.executor());
666 fs.insert_tree(
667 path!("/parent"),
668 json!({
669 ".editorconfig": "[*]\nindent_size = 4\n",
670 "worktree": {
671 ".editorconfig": "[*]\n",
672 "file.rs": "fn main() {}",
673 }
674 }),
675 )
676 .await;
677
678 let project = Project::test(fs.clone(), [path!("/parent/worktree").as_ref()], cx).await;
679
680 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
681 language_registry.add(rust_lang());
682
683 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
684
685 cx.executor().run_until_parked();
686
687 cx.update(|cx| {
688 let tree = worktree.read(cx);
689 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
690 let file = File::for_entry(file_entry, worktree.clone());
691 let file_language = project
692 .read(cx)
693 .languages()
694 .load_language_for_file_path(file.path.as_std_path());
695 let file_language = cx
696 .foreground_executor()
697 .block_on(file_language)
698 .expect("Failed to get file language");
699 let file = file as _;
700 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
701
702 // Test initial settings: tab_size = 4 from parent's external .editorconfig
703 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
704 });
705
706 fs.atomic_write(
707 PathBuf::from(path!("/parent/.editorconfig")),
708 "[*]\nindent_size = 8\n".to_owned(),
709 )
710 .await
711 .unwrap();
712
713 cx.executor().run_until_parked();
714
715 cx.update(|cx| {
716 let tree = worktree.read(cx);
717 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
718 let file = File::for_entry(file_entry, worktree.clone());
719 let file_language = project
720 .read(cx)
721 .languages()
722 .load_language_for_file_path(file.path.as_std_path());
723 let file_language = cx
724 .foreground_executor()
725 .block_on(file_language)
726 .expect("Failed to get file language");
727 let file = file as _;
728 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
729
730 // Test settings updated: tab_size = 8
731 assert_eq!(Some(settings.tab_size), NonZeroU32::new(8));
732 });
733}
734
735#[gpui::test]
736async fn test_adding_worktree_discovers_external_editorconfigs(cx: &mut gpui::TestAppContext) {
737 init_test(cx);
738
739 let fs = FakeFs::new(cx.executor());
740 fs.insert_tree(
741 path!("/parent"),
742 json!({
743 ".editorconfig": "root = true\n[*]\nindent_size = 7\n",
744 "existing_worktree": {
745 ".editorconfig": "[*]\n",
746 "file.rs": "fn a() {}",
747 },
748 "new_worktree": {
749 ".editorconfig": "[*]\n",
750 "file.rs": "fn b() {}",
751 }
752 }),
753 )
754 .await;
755
756 let project = Project::test(fs, [path!("/parent/existing_worktree").as_ref()], cx).await;
757
758 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
759 language_registry.add(rust_lang());
760
761 cx.executor().run_until_parked();
762
763 cx.update(|cx| {
764 let worktree = project.read(cx).worktrees(cx).next().unwrap();
765 let tree = worktree.read(cx);
766 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
767 let file = File::for_entry(file_entry, worktree.clone());
768 let file_language = project
769 .read(cx)
770 .languages()
771 .load_language_for_file_path(file.path.as_std_path());
772 let file_language = cx
773 .foreground_executor()
774 .block_on(file_language)
775 .expect("Failed to get file language");
776 let file = file as _;
777 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
778
779 // Test existing worktree has tab_size = 7
780 assert_eq!(Some(settings.tab_size), NonZeroU32::new(7));
781 });
782
783 let (new_worktree, _) = project
784 .update(cx, |project, cx| {
785 project.find_or_create_worktree(path!("/parent/new_worktree"), true, cx)
786 })
787 .await
788 .unwrap();
789
790 cx.executor().run_until_parked();
791
792 cx.update(|cx| {
793 let tree = new_worktree.read(cx);
794 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
795 let file = File::for_entry(file_entry, new_worktree.clone());
796 let file_language = project
797 .read(cx)
798 .languages()
799 .load_language_for_file_path(file.path.as_std_path());
800 let file_language = cx
801 .foreground_executor()
802 .block_on(file_language)
803 .expect("Failed to get file language");
804 let file = file as _;
805 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
806
807 // Verify new worktree also has tab_size = 7 from shared parent editorconfig
808 assert_eq!(Some(settings.tab_size), NonZeroU32::new(7));
809 });
810}
811
812#[gpui::test]
813async fn test_removing_worktree_cleans_up_external_editorconfig(cx: &mut gpui::TestAppContext) {
814 init_test(cx);
815
816 let fs = FakeFs::new(cx.executor());
817 fs.insert_tree(
818 path!("/parent"),
819 json!({
820 ".editorconfig": "[*]\nindent_size = 6\n",
821 "worktree": {
822 ".editorconfig": "[*]\n",
823 "file.rs": "fn main() {}",
824 }
825 }),
826 )
827 .await;
828
829 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
830
831 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
832 language_registry.add(rust_lang());
833
834 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
835 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
836
837 cx.executor().run_until_parked();
838
839 cx.update(|cx| {
840 let store = cx.global::<SettingsStore>();
841 let (worktree_ids, external_paths, watcher_paths) =
842 store.editorconfig_store.read(cx).test_state();
843
844 // Test external config is loaded
845 assert!(worktree_ids.contains(&worktree_id));
846 assert!(!external_paths.is_empty());
847 assert!(!watcher_paths.is_empty());
848 });
849
850 project.update(cx, |project, cx| {
851 project.remove_worktree(worktree_id, cx);
852 });
853
854 cx.executor().run_until_parked();
855
856 cx.update(|cx| {
857 let store = cx.global::<SettingsStore>();
858 let (worktree_ids, external_paths, watcher_paths) =
859 store.editorconfig_store.read(cx).test_state();
860
861 // Test worktree state, external configs, and watchers all removed
862 assert!(!worktree_ids.contains(&worktree_id));
863 assert!(external_paths.is_empty());
864 assert!(watcher_paths.is_empty());
865 });
866}
867
868#[gpui::test]
869async fn test_shared_external_editorconfig_cleanup_with_multiple_worktrees(
870 cx: &mut gpui::TestAppContext,
871) {
872 init_test(cx);
873
874 let fs = FakeFs::new(cx.executor());
875 fs.insert_tree(
876 path!("/parent"),
877 json!({
878 ".editorconfig": "root = true\n[*]\nindent_size = 5\n",
879 "worktree_a": {
880 ".editorconfig": "[*]\n",
881 "file.rs": "fn a() {}",
882 },
883 "worktree_b": {
884 ".editorconfig": "[*]\n",
885 "file.rs": "fn b() {}",
886 }
887 }),
888 )
889 .await;
890
891 let project = Project::test(
892 fs,
893 [
894 path!("/parent/worktree_a").as_ref(),
895 path!("/parent/worktree_b").as_ref(),
896 ],
897 cx,
898 )
899 .await;
900
901 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
902 language_registry.add(rust_lang());
903
904 cx.executor().run_until_parked();
905
906 let (worktree_a_id, worktree_b, worktree_b_id) = cx.update(|cx| {
907 let worktrees: Vec<_> = project.read(cx).worktrees(cx).collect();
908 assert_eq!(worktrees.len(), 2);
909
910 let worktree_a = &worktrees[0];
911 let worktree_b = &worktrees[1];
912 let worktree_a_id = worktree_a.read(cx).id();
913 let worktree_b_id = worktree_b.read(cx).id();
914 (worktree_a_id, worktree_b.clone(), worktree_b_id)
915 });
916
917 cx.update(|cx| {
918 let store = cx.global::<SettingsStore>();
919 let (worktree_ids, external_paths, _) = store.editorconfig_store.read(cx).test_state();
920
921 // Test both worktrees have settings and share external config
922 assert!(worktree_ids.contains(&worktree_a_id));
923 assert!(worktree_ids.contains(&worktree_b_id));
924 assert_eq!(external_paths.len(), 1); // single shared external config
925 });
926
927 project.update(cx, |project, cx| {
928 project.remove_worktree(worktree_a_id, cx);
929 });
930
931 cx.executor().run_until_parked();
932
933 cx.update(|cx| {
934 let store = cx.global::<SettingsStore>();
935 let (worktree_ids, external_paths, watcher_paths) =
936 store.editorconfig_store.read(cx).test_state();
937
938 // Test worktree_a is gone but external config remains for worktree_b
939 assert!(!worktree_ids.contains(&worktree_a_id));
940 assert!(worktree_ids.contains(&worktree_b_id));
941 // External config should still exist because worktree_b uses it
942 assert_eq!(external_paths.len(), 1);
943 assert_eq!(watcher_paths.len(), 1);
944 });
945
946 cx.update(|cx| {
947 let tree = worktree_b.read(cx);
948 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
949 let file = File::for_entry(file_entry, worktree_b.clone());
950 let file_language = project
951 .read(cx)
952 .languages()
953 .load_language_for_file_path(file.path.as_std_path());
954 let file_language = cx
955 .foreground_executor()
956 .block_on(file_language)
957 .expect("Failed to get file language");
958 let file = file as _;
959 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
960
961 // Test worktree_b still has correct settings
962 assert_eq!(Some(settings.tab_size), NonZeroU32::new(5));
963 });
964}
965
966#[gpui::test]
967async fn test_git_provider_project_setting(cx: &mut gpui::TestAppContext) {
968 init_test(cx);
969 cx.update(|cx| {
970 GitHostingProviderRegistry::default_global(cx);
971 git_hosting_providers::init(cx);
972 });
973
974 let fs = FakeFs::new(cx.executor());
975 let str_path = path!("/dir");
976 let path = Path::new(str_path);
977
978 fs.insert_tree(
979 path!("/dir"),
980 json!({
981 ".zed": {
982 "settings.json": r#"{
983 "git_hosting_providers": [
984 {
985 "provider": "gitlab",
986 "base_url": "https://google.com",
987 "name": "foo"
988 }
989 ]
990 }"#
991 },
992 }),
993 )
994 .await;
995
996 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
997 let (_worktree, _) =
998 project.read_with(cx, |project, cx| project.find_worktree(path, cx).unwrap());
999 cx.executor().run_until_parked();
1000
1001 cx.update(|cx| {
1002 let provider = GitHostingProviderRegistry::global(cx);
1003 assert!(
1004 provider
1005 .list_hosting_providers()
1006 .into_iter()
1007 .any(|provider| provider.name() == "foo")
1008 );
1009 });
1010
1011 fs.atomic_write(
1012 Path::new(path!("/dir/.zed/settings.json")).to_owned(),
1013 "{}".into(),
1014 )
1015 .await
1016 .unwrap();
1017
1018 cx.run_until_parked();
1019
1020 cx.update(|cx| {
1021 let provider = GitHostingProviderRegistry::global(cx);
1022 assert!(
1023 !provider
1024 .list_hosting_providers()
1025 .into_iter()
1026 .any(|provider| provider.name() == "foo")
1027 );
1028 });
1029}
1030
1031#[gpui::test]
1032async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
1033 init_test(cx);
1034 TaskStore::init(None);
1035
1036 let fs = FakeFs::new(cx.executor());
1037 fs.insert_tree(
1038 path!("/dir"),
1039 json!({
1040 ".zed": {
1041 "settings.json": r#"{ "tab_size": 8 }"#,
1042 "tasks.json": r#"[{
1043 "label": "cargo check all",
1044 "command": "cargo",
1045 "args": ["check", "--all"]
1046 },]"#,
1047 },
1048 "a": {
1049 "a.rs": "fn a() {\n A\n}"
1050 },
1051 "b": {
1052 ".zed": {
1053 "settings.json": r#"{ "tab_size": 2 }"#,
1054 "tasks.json": r#"[{
1055 "label": "cargo check",
1056 "command": "cargo",
1057 "args": ["check"]
1058 },]"#,
1059 },
1060 "b.rs": "fn b() {\n B\n}"
1061 }
1062 }),
1063 )
1064 .await;
1065
1066 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1067 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
1068
1069 cx.executor().run_until_parked();
1070 let worktree_id = cx.update(|cx| {
1071 project.update(cx, |project, cx| {
1072 project.worktrees(cx).next().unwrap().read(cx).id()
1073 })
1074 });
1075
1076 let mut task_contexts = TaskContexts::default();
1077 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
1078 let task_contexts = Arc::new(task_contexts);
1079
1080 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
1081 id: worktree_id,
1082 directory_in_worktree: rel_path(".zed").into(),
1083 id_base: "local worktree tasks from directory \".zed\"".into(),
1084 };
1085
1086 let all_tasks = cx
1087 .update(|cx| {
1088 let tree = worktree.read(cx);
1089
1090 let file_a = File::for_entry(
1091 tree.entry_for_path(rel_path("a/a.rs")).unwrap().clone(),
1092 worktree.clone(),
1093 ) as _;
1094 let settings_a = language_settings(None, Some(&file_a), cx);
1095 let file_b = File::for_entry(
1096 tree.entry_for_path(rel_path("b/b.rs")).unwrap().clone(),
1097 worktree.clone(),
1098 ) as _;
1099 let settings_b = language_settings(None, Some(&file_b), cx);
1100
1101 assert_eq!(settings_a.tab_size.get(), 8);
1102 assert_eq!(settings_b.tab_size.get(), 2);
1103
1104 get_all_tasks(&project, task_contexts.clone(), cx)
1105 })
1106 .await
1107 .into_iter()
1108 .map(|(source_kind, task)| {
1109 let resolved = task.resolved;
1110 (
1111 source_kind,
1112 task.resolved_label,
1113 resolved.args,
1114 resolved.env,
1115 )
1116 })
1117 .collect::<Vec<_>>();
1118 assert_eq!(
1119 all_tasks,
1120 vec![
1121 (
1122 TaskSourceKind::Worktree {
1123 id: worktree_id,
1124 directory_in_worktree: rel_path("b/.zed").into(),
1125 id_base: "local worktree tasks from directory \"b/.zed\"".into()
1126 },
1127 "cargo check".to_string(),
1128 vec!["check".to_string()],
1129 HashMap::default(),
1130 ),
1131 (
1132 topmost_local_task_source_kind.clone(),
1133 "cargo check all".to_string(),
1134 vec!["check".to_string(), "--all".to_string()],
1135 HashMap::default(),
1136 ),
1137 ]
1138 );
1139
1140 let (_, resolved_task) = cx
1141 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
1142 .await
1143 .into_iter()
1144 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
1145 .expect("should have one global task");
1146 project.update(cx, |project, cx| {
1147 let task_inventory = project
1148 .task_store()
1149 .read(cx)
1150 .task_inventory()
1151 .cloned()
1152 .unwrap();
1153 task_inventory.update(cx, |inventory, _| {
1154 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
1155 inventory
1156 .update_file_based_tasks(
1157 TaskSettingsLocation::Global(tasks_file()),
1158 Some(
1159 &json!([{
1160 "label": "cargo check unstable",
1161 "command": "cargo",
1162 "args": [
1163 "check",
1164 "--all",
1165 "--all-targets"
1166 ],
1167 "env": {
1168 "RUSTFLAGS": "-Zunstable-options"
1169 }
1170 }])
1171 .to_string(),
1172 ),
1173 )
1174 .unwrap();
1175 });
1176 });
1177 cx.run_until_parked();
1178
1179 let all_tasks = cx
1180 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
1181 .await
1182 .into_iter()
1183 .map(|(source_kind, task)| {
1184 let resolved = task.resolved;
1185 (
1186 source_kind,
1187 task.resolved_label,
1188 resolved.args,
1189 resolved.env,
1190 )
1191 })
1192 .collect::<Vec<_>>();
1193 assert_eq!(
1194 all_tasks,
1195 vec![
1196 (
1197 topmost_local_task_source_kind.clone(),
1198 "cargo check all".to_string(),
1199 vec!["check".to_string(), "--all".to_string()],
1200 HashMap::default(),
1201 ),
1202 (
1203 TaskSourceKind::Worktree {
1204 id: worktree_id,
1205 directory_in_worktree: rel_path("b/.zed").into(),
1206 id_base: "local worktree tasks from directory \"b/.zed\"".into()
1207 },
1208 "cargo check".to_string(),
1209 vec!["check".to_string()],
1210 HashMap::default(),
1211 ),
1212 (
1213 TaskSourceKind::AbsPath {
1214 abs_path: paths::tasks_file().clone(),
1215 id_base: "global tasks.json".into(),
1216 },
1217 "cargo check unstable".to_string(),
1218 vec![
1219 "check".to_string(),
1220 "--all".to_string(),
1221 "--all-targets".to_string(),
1222 ],
1223 HashMap::from_iter(Some((
1224 "RUSTFLAGS".to_string(),
1225 "-Zunstable-options".to_string()
1226 ))),
1227 ),
1228 ]
1229 );
1230}
1231
1232#[gpui::test]
1233async fn test_invalid_local_tasks_shows_toast_with_doc_link(cx: &mut gpui::TestAppContext) {
1234 init_test(cx);
1235 TaskStore::init(None);
1236
1237 // We need to start with a valid `.zed/tasks.json` file as otherwise the
1238 // event is emitted before we havd a chance to setup the event subscription.
1239 let fs = FakeFs::new(cx.executor());
1240 fs.insert_tree(
1241 path!("/dir"),
1242 json!({
1243 ".zed": {
1244 "tasks.json": r#"[{ "label": "valid task", "command": "echo" }]"#,
1245 },
1246 "file.rs": ""
1247 }),
1248 )
1249 .await;
1250
1251 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1252 let saw_toast = Rc::new(RefCell::new(false));
1253
1254 // Update the `.zed/tasks.json` file with an invalid variable, so we can
1255 // later assert that the `Event::Toast` even is emitted.
1256 fs.save(
1257 path!("/dir/.zed/tasks.json").as_ref(),
1258 &r#"[{ "label": "test $ZED_FOO", "command": "echo" }]"#.into(),
1259 Default::default(),
1260 )
1261 .await
1262 .unwrap();
1263
1264 project.update(cx, |_, cx| {
1265 let saw_toast = saw_toast.clone();
1266
1267 cx.subscribe(&project, move |_, _, event: &Event, _| match event {
1268 Event::Toast {
1269 notification_id,
1270 message,
1271 link: Some(ToastLink { url, .. }),
1272 } => {
1273 assert!(notification_id.starts_with("local-tasks-"));
1274 assert!(message.contains("ZED_FOO"));
1275 assert_eq!(*url, "https://zed.dev/docs/tasks");
1276 *saw_toast.borrow_mut() = true;
1277 }
1278 _ => {}
1279 })
1280 .detach();
1281 });
1282
1283 cx.run_until_parked();
1284 assert!(
1285 *saw_toast.borrow(),
1286 "Expected `Event::Toast` was never emitted"
1287 );
1288}
1289
1290#[gpui::test]
1291async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
1292 init_test(cx);
1293 TaskStore::init(None);
1294
1295 let fs = FakeFs::new(cx.executor());
1296 fs.insert_tree(
1297 path!("/dir"),
1298 json!({
1299 ".zed": {
1300 "tasks.json": r#"[{
1301 "label": "test worktree root",
1302 "command": "echo $ZED_WORKTREE_ROOT"
1303 }]"#,
1304 },
1305 "a": {
1306 "a.rs": "fn a() {\n A\n}"
1307 },
1308 }),
1309 )
1310 .await;
1311
1312 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1313 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
1314
1315 cx.executor().run_until_parked();
1316 let worktree_id = cx.update(|cx| {
1317 project.update(cx, |project, cx| {
1318 project.worktrees(cx).next().unwrap().read(cx).id()
1319 })
1320 });
1321
1322 let active_non_worktree_item_tasks = cx
1323 .update(|cx| {
1324 get_all_tasks(
1325 &project,
1326 Arc::new(TaskContexts {
1327 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
1328 active_worktree_context: None,
1329 other_worktree_contexts: Vec::new(),
1330 lsp_task_sources: HashMap::default(),
1331 latest_selection: None,
1332 }),
1333 cx,
1334 )
1335 })
1336 .await;
1337 assert!(
1338 active_non_worktree_item_tasks.is_empty(),
1339 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
1340 );
1341
1342 let active_worktree_tasks = cx
1343 .update(|cx| {
1344 get_all_tasks(
1345 &project,
1346 Arc::new(TaskContexts {
1347 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
1348 active_worktree_context: Some((worktree_id, {
1349 let mut worktree_context = TaskContext::default();
1350 worktree_context
1351 .task_variables
1352 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
1353 worktree_context
1354 })),
1355 other_worktree_contexts: Vec::new(),
1356 lsp_task_sources: HashMap::default(),
1357 latest_selection: None,
1358 }),
1359 cx,
1360 )
1361 })
1362 .await;
1363 assert_eq!(
1364 active_worktree_tasks
1365 .into_iter()
1366 .map(|(source_kind, task)| {
1367 let resolved = task.resolved;
1368 (source_kind, resolved.command.unwrap())
1369 })
1370 .collect::<Vec<_>>(),
1371 vec![(
1372 TaskSourceKind::Worktree {
1373 id: worktree_id,
1374 directory_in_worktree: rel_path(".zed").into(),
1375 id_base: "local worktree tasks from directory \".zed\"".into(),
1376 },
1377 "echo /dir".to_string(),
1378 )]
1379 );
1380}
1381
1382#[gpui::test]
1383async fn test_running_multiple_instances_of_a_single_server_in_one_worktree(
1384 cx: &mut gpui::TestAppContext,
1385) {
1386 pub(crate) struct PyprojectTomlManifestProvider;
1387
1388 impl ManifestProvider for PyprojectTomlManifestProvider {
1389 fn name(&self) -> ManifestName {
1390 SharedString::new_static("pyproject.toml").into()
1391 }
1392
1393 fn search(
1394 &self,
1395 ManifestQuery {
1396 path,
1397 depth,
1398 delegate,
1399 }: ManifestQuery,
1400 ) -> Option<Arc<RelPath>> {
1401 for path in path.ancestors().take(depth) {
1402 let p = path.join(rel_path("pyproject.toml"));
1403 if delegate.exists(&p, Some(false)) {
1404 return Some(path.into());
1405 }
1406 }
1407
1408 None
1409 }
1410 }
1411
1412 init_test(cx);
1413 let fs = FakeFs::new(cx.executor());
1414
1415 fs.insert_tree(
1416 path!("/the-root"),
1417 json!({
1418 ".zed": {
1419 "settings.json": r#"
1420 {
1421 "languages": {
1422 "Python": {
1423 "language_servers": ["ty"]
1424 }
1425 }
1426 }"#
1427 },
1428 "project-a": {
1429 ".venv": {},
1430 "file.py": "",
1431 "pyproject.toml": ""
1432 },
1433 "project-b": {
1434 ".venv": {},
1435 "source_file.py":"",
1436 "another_file.py": "",
1437 "pyproject.toml": ""
1438 }
1439 }),
1440 )
1441 .await;
1442 cx.update(|cx| {
1443 ManifestProvidersStore::global(cx).register(Arc::new(PyprojectTomlManifestProvider))
1444 });
1445
1446 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1447 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1448 let _fake_python_server = language_registry.register_fake_lsp(
1449 "Python",
1450 FakeLspAdapter {
1451 name: "ty",
1452 capabilities: lsp::ServerCapabilities {
1453 ..Default::default()
1454 },
1455 ..Default::default()
1456 },
1457 );
1458
1459 language_registry.add(python_lang(fs.clone()));
1460 let (first_buffer, _handle) = project
1461 .update(cx, |project, cx| {
1462 project.open_local_buffer_with_lsp(path!("/the-root/project-a/file.py"), cx)
1463 })
1464 .await
1465 .unwrap();
1466 cx.executor().run_until_parked();
1467 let servers = project.update(cx, |project, cx| {
1468 project.lsp_store().update(cx, |this, cx| {
1469 first_buffer.update(cx, |buffer, cx| {
1470 this.running_language_servers_for_local_buffer(buffer, cx)
1471 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1472 .collect::<Vec<_>>()
1473 })
1474 })
1475 });
1476 cx.executor().run_until_parked();
1477 assert_eq!(servers.len(), 1);
1478 let (adapter, server) = servers.into_iter().next().unwrap();
1479 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1480 assert_eq!(server.server_id(), LanguageServerId(0));
1481 // `workspace_folders` are set to the rooting point.
1482 assert_eq!(
1483 server.workspace_folders(),
1484 BTreeSet::from_iter(
1485 [Uri::from_file_path(path!("/the-root/project-a")).unwrap()].into_iter()
1486 )
1487 );
1488
1489 let (second_project_buffer, _other_handle) = project
1490 .update(cx, |project, cx| {
1491 project.open_local_buffer_with_lsp(path!("/the-root/project-b/source_file.py"), cx)
1492 })
1493 .await
1494 .unwrap();
1495 cx.executor().run_until_parked();
1496 let servers = project.update(cx, |project, cx| {
1497 project.lsp_store().update(cx, |this, cx| {
1498 second_project_buffer.update(cx, |buffer, cx| {
1499 this.running_language_servers_for_local_buffer(buffer, cx)
1500 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1501 .collect::<Vec<_>>()
1502 })
1503 })
1504 });
1505 cx.executor().run_until_parked();
1506 assert_eq!(servers.len(), 1);
1507 let (adapter, server) = servers.into_iter().next().unwrap();
1508 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1509 // We're not using venvs at all here, so both folders should fall under the same root.
1510 assert_eq!(server.server_id(), LanguageServerId(0));
1511 // Now, let's select a different toolchain for one of subprojects.
1512
1513 let Toolchains {
1514 toolchains: available_toolchains_for_b,
1515 root_path,
1516 ..
1517 } = project
1518 .update(cx, |this, cx| {
1519 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1520 this.available_toolchains(
1521 ProjectPath {
1522 worktree_id,
1523 path: rel_path("project-b/source_file.py").into(),
1524 },
1525 LanguageName::new_static("Python"),
1526 cx,
1527 )
1528 })
1529 .await
1530 .expect("A toolchain to be discovered");
1531 assert_eq!(root_path.as_ref(), rel_path("project-b"));
1532 assert_eq!(available_toolchains_for_b.toolchains().len(), 1);
1533 let currently_active_toolchain = project
1534 .update(cx, |this, cx| {
1535 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1536 this.active_toolchain(
1537 ProjectPath {
1538 worktree_id,
1539 path: rel_path("project-b/source_file.py").into(),
1540 },
1541 LanguageName::new_static("Python"),
1542 cx,
1543 )
1544 })
1545 .await;
1546
1547 assert!(currently_active_toolchain.is_none());
1548 let _ = project
1549 .update(cx, |this, cx| {
1550 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1551 this.activate_toolchain(
1552 ProjectPath {
1553 worktree_id,
1554 path: root_path,
1555 },
1556 available_toolchains_for_b
1557 .toolchains
1558 .into_iter()
1559 .next()
1560 .unwrap(),
1561 cx,
1562 )
1563 })
1564 .await
1565 .unwrap();
1566 cx.run_until_parked();
1567 let servers = project.update(cx, |project, cx| {
1568 project.lsp_store().update(cx, |this, cx| {
1569 second_project_buffer.update(cx, |buffer, cx| {
1570 this.running_language_servers_for_local_buffer(buffer, cx)
1571 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1572 .collect::<Vec<_>>()
1573 })
1574 })
1575 });
1576 cx.executor().run_until_parked();
1577 assert_eq!(servers.len(), 1);
1578 let (adapter, server) = servers.into_iter().next().unwrap();
1579 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1580 // There's a new language server in town.
1581 assert_eq!(server.server_id(), LanguageServerId(1));
1582}
1583
1584#[gpui::test]
1585async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
1586 init_test(cx);
1587
1588 let fs = FakeFs::new(cx.executor());
1589 fs.insert_tree(
1590 path!("/dir"),
1591 json!({
1592 "test.rs": "const A: i32 = 1;",
1593 "test2.rs": "",
1594 "Cargo.toml": "a = 1",
1595 "package.json": "{\"a\": 1}",
1596 }),
1597 )
1598 .await;
1599
1600 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1601 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1602
1603 let mut fake_rust_servers = language_registry.register_fake_lsp(
1604 "Rust",
1605 FakeLspAdapter {
1606 name: "the-rust-language-server",
1607 capabilities: lsp::ServerCapabilities {
1608 completion_provider: Some(lsp::CompletionOptions {
1609 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
1610 ..Default::default()
1611 }),
1612 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
1613 lsp::TextDocumentSyncOptions {
1614 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
1615 ..Default::default()
1616 },
1617 )),
1618 ..Default::default()
1619 },
1620 ..Default::default()
1621 },
1622 );
1623 let mut fake_json_servers = language_registry.register_fake_lsp(
1624 "JSON",
1625 FakeLspAdapter {
1626 name: "the-json-language-server",
1627 capabilities: lsp::ServerCapabilities {
1628 completion_provider: Some(lsp::CompletionOptions {
1629 trigger_characters: Some(vec![":".to_string()]),
1630 ..Default::default()
1631 }),
1632 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
1633 lsp::TextDocumentSyncOptions {
1634 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
1635 ..Default::default()
1636 },
1637 )),
1638 ..Default::default()
1639 },
1640 ..Default::default()
1641 },
1642 );
1643
1644 // Open a buffer without an associated language server.
1645 let (toml_buffer, _handle) = project
1646 .update(cx, |project, cx| {
1647 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
1648 })
1649 .await
1650 .unwrap();
1651
1652 // Open a buffer with an associated language server before the language for it has been loaded.
1653 let (rust_buffer, _handle2) = project
1654 .update(cx, |project, cx| {
1655 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
1656 })
1657 .await
1658 .unwrap();
1659 rust_buffer.update(cx, |buffer, _| {
1660 assert_eq!(buffer.language().map(|l| l.name()), None);
1661 });
1662
1663 // Now we add the languages to the project, and ensure they get assigned to all
1664 // the relevant open buffers.
1665 language_registry.add(json_lang());
1666 language_registry.add(rust_lang());
1667 cx.executor().run_until_parked();
1668 rust_buffer.update(cx, |buffer, _| {
1669 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
1670 });
1671
1672 // A server is started up, and it is notified about Rust files.
1673 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1674 assert_eq!(
1675 fake_rust_server
1676 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1677 .await
1678 .text_document,
1679 lsp::TextDocumentItem {
1680 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1681 version: 0,
1682 text: "const A: i32 = 1;".to_string(),
1683 language_id: "rust".to_string(),
1684 }
1685 );
1686
1687 // The buffer is configured based on the language server's capabilities.
1688 rust_buffer.update(cx, |buffer, _| {
1689 assert_eq!(
1690 buffer
1691 .completion_triggers()
1692 .iter()
1693 .cloned()
1694 .collect::<Vec<_>>(),
1695 &[".".to_string(), "::".to_string()]
1696 );
1697 });
1698 toml_buffer.update(cx, |buffer, _| {
1699 assert!(buffer.completion_triggers().is_empty());
1700 });
1701
1702 // Edit a buffer. The changes are reported to the language server.
1703 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
1704 assert_eq!(
1705 fake_rust_server
1706 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1707 .await
1708 .text_document,
1709 lsp::VersionedTextDocumentIdentifier::new(
1710 lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1711 1
1712 )
1713 );
1714
1715 // Open a third buffer with a different associated language server.
1716 let (json_buffer, _json_handle) = project
1717 .update(cx, |project, cx| {
1718 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
1719 })
1720 .await
1721 .unwrap();
1722
1723 // A json language server is started up and is only notified about the json buffer.
1724 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1725 assert_eq!(
1726 fake_json_server
1727 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1728 .await
1729 .text_document,
1730 lsp::TextDocumentItem {
1731 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1732 version: 0,
1733 text: "{\"a\": 1}".to_string(),
1734 language_id: "json".to_string(),
1735 }
1736 );
1737
1738 // This buffer is configured based on the second language server's
1739 // capabilities.
1740 json_buffer.update(cx, |buffer, _| {
1741 assert_eq!(
1742 buffer
1743 .completion_triggers()
1744 .iter()
1745 .cloned()
1746 .collect::<Vec<_>>(),
1747 &[":".to_string()]
1748 );
1749 });
1750
1751 // When opening another buffer whose language server is already running,
1752 // it is also configured based on the existing language server's capabilities.
1753 let (rust_buffer2, _handle4) = project
1754 .update(cx, |project, cx| {
1755 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
1756 })
1757 .await
1758 .unwrap();
1759 rust_buffer2.update(cx, |buffer, _| {
1760 assert_eq!(
1761 buffer
1762 .completion_triggers()
1763 .iter()
1764 .cloned()
1765 .collect::<Vec<_>>(),
1766 &[".".to_string(), "::".to_string()]
1767 );
1768 });
1769
1770 // Changes are reported only to servers matching the buffer's language.
1771 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
1772 rust_buffer2.update(cx, |buffer, cx| {
1773 buffer.edit([(0..0, "let x = 1;")], None, cx)
1774 });
1775 assert_eq!(
1776 fake_rust_server
1777 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1778 .await
1779 .text_document,
1780 lsp::VersionedTextDocumentIdentifier::new(
1781 lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap(),
1782 1
1783 )
1784 );
1785
1786 // Save notifications are reported to all servers.
1787 project
1788 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
1789 .await
1790 .unwrap();
1791 assert_eq!(
1792 fake_rust_server
1793 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1794 .await
1795 .text_document,
1796 lsp::TextDocumentIdentifier::new(
1797 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1798 )
1799 );
1800 assert_eq!(
1801 fake_json_server
1802 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1803 .await
1804 .text_document,
1805 lsp::TextDocumentIdentifier::new(
1806 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1807 )
1808 );
1809
1810 // Renames are reported only to servers matching the buffer's language.
1811 fs.rename(
1812 Path::new(path!("/dir/test2.rs")),
1813 Path::new(path!("/dir/test3.rs")),
1814 Default::default(),
1815 )
1816 .await
1817 .unwrap();
1818 assert_eq!(
1819 fake_rust_server
1820 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1821 .await
1822 .text_document,
1823 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap()),
1824 );
1825 assert_eq!(
1826 fake_rust_server
1827 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1828 .await
1829 .text_document,
1830 lsp::TextDocumentItem {
1831 uri: lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap(),
1832 version: 0,
1833 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1834 language_id: "rust".to_string(),
1835 },
1836 );
1837
1838 rust_buffer2.update(cx, |buffer, cx| {
1839 buffer.update_diagnostics(
1840 LanguageServerId(0),
1841 DiagnosticSet::from_sorted_entries(
1842 vec![DiagnosticEntry {
1843 diagnostic: Default::default(),
1844 range: Anchor::MIN..Anchor::MAX,
1845 }],
1846 &buffer.snapshot(),
1847 ),
1848 cx,
1849 );
1850 assert_eq!(
1851 buffer
1852 .snapshot()
1853 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1854 .count(),
1855 1
1856 );
1857 });
1858
1859 // When the rename changes the extension of the file, the buffer gets closed on the old
1860 // language server and gets opened on the new one.
1861 fs.rename(
1862 Path::new(path!("/dir/test3.rs")),
1863 Path::new(path!("/dir/test3.json")),
1864 Default::default(),
1865 )
1866 .await
1867 .unwrap();
1868 assert_eq!(
1869 fake_rust_server
1870 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1871 .await
1872 .text_document,
1873 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap()),
1874 );
1875 assert_eq!(
1876 fake_json_server
1877 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1878 .await
1879 .text_document,
1880 lsp::TextDocumentItem {
1881 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1882 version: 0,
1883 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1884 language_id: "json".to_string(),
1885 },
1886 );
1887
1888 // We clear the diagnostics, since the language has changed.
1889 rust_buffer2.update(cx, |buffer, _| {
1890 assert_eq!(
1891 buffer
1892 .snapshot()
1893 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1894 .count(),
1895 0
1896 );
1897 });
1898
1899 // The renamed file's version resets after changing language server.
1900 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
1901 assert_eq!(
1902 fake_json_server
1903 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1904 .await
1905 .text_document,
1906 lsp::VersionedTextDocumentIdentifier::new(
1907 lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1908 1
1909 )
1910 );
1911
1912 // Restart language servers
1913 project.update(cx, |project, cx| {
1914 project.restart_language_servers_for_buffers(
1915 vec![rust_buffer.clone(), json_buffer.clone()],
1916 HashSet::default(),
1917 cx,
1918 );
1919 });
1920
1921 let mut rust_shutdown_requests = fake_rust_server
1922 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1923 let mut json_shutdown_requests = fake_json_server
1924 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1925 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
1926
1927 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1928 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1929
1930 // Ensure rust document is reopened in new rust language server
1931 assert_eq!(
1932 fake_rust_server
1933 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1934 .await
1935 .text_document,
1936 lsp::TextDocumentItem {
1937 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1938 version: 0,
1939 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
1940 language_id: "rust".to_string(),
1941 }
1942 );
1943
1944 // Ensure json documents are reopened in new json language server
1945 assert_set_eq!(
1946 [
1947 fake_json_server
1948 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1949 .await
1950 .text_document,
1951 fake_json_server
1952 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1953 .await
1954 .text_document,
1955 ],
1956 [
1957 lsp::TextDocumentItem {
1958 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1959 version: 0,
1960 text: json_buffer.update(cx, |buffer, _| buffer.text()),
1961 language_id: "json".to_string(),
1962 },
1963 lsp::TextDocumentItem {
1964 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1965 version: 0,
1966 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1967 language_id: "json".to_string(),
1968 }
1969 ]
1970 );
1971
1972 // Close notifications are reported only to servers matching the buffer's language.
1973 cx.update(|_| drop(_json_handle));
1974 let close_message = lsp::DidCloseTextDocumentParams {
1975 text_document: lsp::TextDocumentIdentifier::new(
1976 lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1977 ),
1978 };
1979 assert_eq!(
1980 fake_json_server
1981 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1982 .await,
1983 close_message,
1984 );
1985}
1986
1987#[gpui::test]
1988async fn test_language_server_relative_path(cx: &mut gpui::TestAppContext) {
1989 init_test(cx);
1990
1991 let settings_json_contents = json!({
1992 "languages": {
1993 "Rust": {
1994 "language_servers": ["my_fake_lsp", "lsp_on_path"]
1995 }
1996 },
1997 "lsp": {
1998 "my_fake_lsp": {
1999 "binary": {
2000 // file exists, so this is treated as a relative path
2001 "path": path!(".relative_path/to/my_fake_lsp_binary.exe").to_string(),
2002 }
2003 },
2004 "lsp_on_path": {
2005 "binary": {
2006 // file doesn't exist, so it will fall back on PATH env var
2007 "path": path!("lsp_on_path.exe").to_string(),
2008 }
2009 }
2010 },
2011 });
2012
2013 let fs = FakeFs::new(cx.executor());
2014 fs.insert_tree(
2015 path!("/the-root"),
2016 json!({
2017 ".zed": {
2018 "settings.json": settings_json_contents.to_string(),
2019 },
2020 ".relative_path": {
2021 "to": {
2022 "my_fake_lsp.exe": "",
2023 },
2024 },
2025 "src": {
2026 "main.rs": "",
2027 }
2028 }),
2029 )
2030 .await;
2031
2032 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
2033 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2034 language_registry.add(rust_lang());
2035
2036 let mut my_fake_lsp = language_registry.register_fake_lsp(
2037 "Rust",
2038 FakeLspAdapter {
2039 name: "my_fake_lsp",
2040 ..Default::default()
2041 },
2042 );
2043 let mut lsp_on_path = language_registry.register_fake_lsp(
2044 "Rust",
2045 FakeLspAdapter {
2046 name: "lsp_on_path",
2047 ..Default::default()
2048 },
2049 );
2050
2051 cx.run_until_parked();
2052
2053 // Start the language server by opening a buffer with a compatible file extension.
2054 project
2055 .update(cx, |project, cx| {
2056 project.open_local_buffer_with_lsp(path!("/the-root/src/main.rs"), cx)
2057 })
2058 .await
2059 .unwrap();
2060
2061 let lsp_path = my_fake_lsp.next().await.unwrap().binary.path;
2062 assert_eq!(
2063 lsp_path.to_string_lossy(),
2064 path!("/the-root/.relative_path/to/my_fake_lsp_binary.exe"),
2065 );
2066
2067 let lsp_path = lsp_on_path.next().await.unwrap().binary.path;
2068 assert_eq!(lsp_path.to_string_lossy(), path!("lsp_on_path.exe"));
2069}
2070
2071#[gpui::test]
2072async fn test_language_server_tilde_path(cx: &mut gpui::TestAppContext) {
2073 init_test(cx);
2074
2075 let settings_json_contents = json!({
2076 "languages": {
2077 "Rust": {
2078 "language_servers": ["tilde_lsp"]
2079 }
2080 },
2081 "lsp": {
2082 "tilde_lsp": {
2083 "binary": {
2084 "path": "~/.local/bin/rust-analyzer",
2085 }
2086 }
2087 },
2088 });
2089
2090 let fs = FakeFs::new(cx.executor());
2091 fs.insert_tree(
2092 path!("/root"),
2093 json!({
2094 ".zed": {
2095 "settings.json": settings_json_contents.to_string(),
2096 },
2097 "src": {
2098 "main.rs": "fn main() {}",
2099 }
2100 }),
2101 )
2102 .await;
2103
2104 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
2105 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2106 language_registry.add(rust_lang());
2107
2108 let mut tilde_lsp = language_registry.register_fake_lsp(
2109 "Rust",
2110 FakeLspAdapter {
2111 name: "tilde_lsp",
2112 ..Default::default()
2113 },
2114 );
2115 cx.run_until_parked();
2116
2117 project
2118 .update(cx, |project, cx| {
2119 project.open_local_buffer_with_lsp(path!("/root/src/main.rs"), cx)
2120 })
2121 .await
2122 .unwrap();
2123
2124 let lsp_path = tilde_lsp.next().await.unwrap().binary.path;
2125 let expected_path = paths::home_dir().join(".local/bin/rust-analyzer");
2126 assert_eq!(
2127 lsp_path, expected_path,
2128 "Tilde path should expand to home directory"
2129 );
2130}
2131
2132#[gpui::test]
2133async fn test_rescan_fs_change_is_reported_to_language_servers_as_changed(
2134 cx: &mut gpui::TestAppContext,
2135) {
2136 init_test(cx);
2137
2138 let fs = FakeFs::new(cx.executor());
2139 fs.insert_tree(
2140 path!("/the-root"),
2141 json!({
2142 "Cargo.lock": "",
2143 "src": {
2144 "a.rs": "",
2145 }
2146 }),
2147 )
2148 .await;
2149
2150 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
2151 let (language_registry, _lsp_store) = project.read_with(cx, |project, _| {
2152 (project.languages().clone(), project.lsp_store())
2153 });
2154 language_registry.add(rust_lang());
2155 let mut fake_servers = language_registry.register_fake_lsp(
2156 "Rust",
2157 FakeLspAdapter {
2158 name: "the-language-server",
2159 ..Default::default()
2160 },
2161 );
2162
2163 cx.executor().run_until_parked();
2164
2165 project
2166 .update(cx, |project, cx| {
2167 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
2168 })
2169 .await
2170 .unwrap();
2171
2172 let fake_server = fake_servers.next().await.unwrap();
2173 cx.executor().run_until_parked();
2174
2175 let file_changes = Arc::new(Mutex::new(Vec::new()));
2176 fake_server
2177 .request::<lsp::request::RegisterCapability>(
2178 lsp::RegistrationParams {
2179 registrations: vec![lsp::Registration {
2180 id: Default::default(),
2181 method: "workspace/didChangeWatchedFiles".to_string(),
2182 register_options: serde_json::to_value(
2183 lsp::DidChangeWatchedFilesRegistrationOptions {
2184 watchers: vec![lsp::FileSystemWatcher {
2185 glob_pattern: lsp::GlobPattern::String(
2186 path!("/the-root/Cargo.lock").to_string(),
2187 ),
2188 kind: None,
2189 }],
2190 },
2191 )
2192 .ok(),
2193 }],
2194 },
2195 DEFAULT_LSP_REQUEST_TIMEOUT,
2196 )
2197 .await
2198 .into_response()
2199 .unwrap();
2200 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
2201 let file_changes = file_changes.clone();
2202 move |params, _| {
2203 let mut file_changes = file_changes.lock();
2204 file_changes.extend(params.changes);
2205 }
2206 });
2207
2208 cx.executor().run_until_parked();
2209 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
2210
2211 fs.emit_fs_event(path!("/the-root/Cargo.lock"), Some(PathEventKind::Rescan));
2212 cx.executor().run_until_parked();
2213
2214 assert_eq!(
2215 &*file_changes.lock(),
2216 &[lsp::FileEvent {
2217 uri: lsp::Uri::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
2218 typ: lsp::FileChangeType::CHANGED,
2219 }]
2220 );
2221}
2222
2223#[gpui::test]
2224async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
2225 init_test(cx);
2226
2227 let fs = FakeFs::new(cx.executor());
2228 fs.insert_tree(
2229 path!("/the-root"),
2230 json!({
2231 ".gitignore": "target\n",
2232 "Cargo.lock": "",
2233 "src": {
2234 "a.rs": "",
2235 "b.rs": "",
2236 },
2237 "target": {
2238 "x": {
2239 "out": {
2240 "x.rs": ""
2241 }
2242 },
2243 "y": {
2244 "out": {
2245 "y.rs": "",
2246 }
2247 },
2248 "z": {
2249 "out": {
2250 "z.rs": ""
2251 }
2252 }
2253 }
2254 }),
2255 )
2256 .await;
2257 fs.insert_tree(
2258 path!("/the-registry"),
2259 json!({
2260 "dep1": {
2261 "src": {
2262 "dep1.rs": "",
2263 }
2264 },
2265 "dep2": {
2266 "src": {
2267 "dep2.rs": "",
2268 }
2269 },
2270 }),
2271 )
2272 .await;
2273 fs.insert_tree(
2274 path!("/the/stdlib"),
2275 json!({
2276 "LICENSE": "",
2277 "src": {
2278 "string.rs": "",
2279 }
2280 }),
2281 )
2282 .await;
2283
2284 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
2285 let (language_registry, lsp_store) = project.read_with(cx, |project, _| {
2286 (project.languages().clone(), project.lsp_store())
2287 });
2288 language_registry.add(rust_lang());
2289 let mut fake_servers = language_registry.register_fake_lsp(
2290 "Rust",
2291 FakeLspAdapter {
2292 name: "the-language-server",
2293 ..Default::default()
2294 },
2295 );
2296
2297 cx.executor().run_until_parked();
2298
2299 // Start the language server by opening a buffer with a compatible file extension.
2300 project
2301 .update(cx, |project, cx| {
2302 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
2303 })
2304 .await
2305 .unwrap();
2306
2307 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
2308 project.update(cx, |project, cx| {
2309 let worktree = project.worktrees(cx).next().unwrap();
2310 assert_eq!(
2311 worktree
2312 .read(cx)
2313 .snapshot()
2314 .entries(true, 0)
2315 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
2316 .collect::<Vec<_>>(),
2317 &[
2318 ("", false),
2319 (".gitignore", false),
2320 ("Cargo.lock", false),
2321 ("src", false),
2322 ("src/a.rs", false),
2323 ("src/b.rs", false),
2324 ("target", true),
2325 ]
2326 );
2327 });
2328
2329 let prev_read_dir_count = fs.read_dir_call_count();
2330
2331 let fake_server = fake_servers.next().await.unwrap();
2332 cx.executor().run_until_parked();
2333 let server_id = lsp_store.read_with(cx, |lsp_store, _| {
2334 let (id, _) = lsp_store.language_server_statuses().next().unwrap();
2335 id
2336 });
2337
2338 // Simulate jumping to a definition in a dependency outside of the worktree.
2339 let _out_of_worktree_buffer = project
2340 .update(cx, |project, cx| {
2341 project.open_local_buffer_via_lsp(
2342 lsp::Uri::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
2343 server_id,
2344 cx,
2345 )
2346 })
2347 .await
2348 .unwrap();
2349
2350 // Keep track of the FS events reported to the language server.
2351 let file_changes = Arc::new(Mutex::new(Vec::new()));
2352 fake_server
2353 .request::<lsp::request::RegisterCapability>(
2354 lsp::RegistrationParams {
2355 registrations: vec![lsp::Registration {
2356 id: Default::default(),
2357 method: "workspace/didChangeWatchedFiles".to_string(),
2358 register_options: serde_json::to_value(
2359 lsp::DidChangeWatchedFilesRegistrationOptions {
2360 watchers: vec![
2361 lsp::FileSystemWatcher {
2362 glob_pattern: lsp::GlobPattern::String(
2363 path!("/the-root/Cargo.toml").to_string(),
2364 ),
2365 kind: None,
2366 },
2367 lsp::FileSystemWatcher {
2368 glob_pattern: lsp::GlobPattern::String(
2369 path!("/the-root/src/*.{rs,c}").to_string(),
2370 ),
2371 kind: None,
2372 },
2373 lsp::FileSystemWatcher {
2374 glob_pattern: lsp::GlobPattern::String(
2375 path!("/the-root/target/y/**/*.rs").to_string(),
2376 ),
2377 kind: None,
2378 },
2379 lsp::FileSystemWatcher {
2380 glob_pattern: lsp::GlobPattern::String(
2381 path!("/the/stdlib/src/**/*.rs").to_string(),
2382 ),
2383 kind: None,
2384 },
2385 lsp::FileSystemWatcher {
2386 glob_pattern: lsp::GlobPattern::String(
2387 path!("**/Cargo.lock").to_string(),
2388 ),
2389 kind: None,
2390 },
2391 ],
2392 },
2393 )
2394 .ok(),
2395 }],
2396 },
2397 DEFAULT_LSP_REQUEST_TIMEOUT,
2398 )
2399 .await
2400 .into_response()
2401 .unwrap();
2402 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
2403 let file_changes = file_changes.clone();
2404 move |params, _| {
2405 let mut file_changes = file_changes.lock();
2406 file_changes.extend(params.changes);
2407 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
2408 }
2409 });
2410
2411 cx.executor().run_until_parked();
2412 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
2413 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
2414
2415 let mut new_watched_paths = fs.watched_paths();
2416 new_watched_paths.retain(|path| {
2417 !path.starts_with(config_dir()) && !path.starts_with(global_gitignore_path().unwrap())
2418 });
2419 assert_eq!(
2420 &new_watched_paths,
2421 &[
2422 Path::new(path!("/the-root")),
2423 Path::new(path!("/the-registry/dep1/src/dep1.rs")),
2424 Path::new(path!("/the/stdlib/src"))
2425 ]
2426 );
2427
2428 // Now the language server has asked us to watch an ignored directory path,
2429 // so we recursively load it.
2430 project.update(cx, |project, cx| {
2431 let worktree = project.visible_worktrees(cx).next().unwrap();
2432 assert_eq!(
2433 worktree
2434 .read(cx)
2435 .snapshot()
2436 .entries(true, 0)
2437 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
2438 .collect::<Vec<_>>(),
2439 &[
2440 ("", false),
2441 (".gitignore", false),
2442 ("Cargo.lock", false),
2443 ("src", false),
2444 ("src/a.rs", false),
2445 ("src/b.rs", false),
2446 ("target", true),
2447 ("target/x", true),
2448 ("target/y", true),
2449 ("target/y/out", true),
2450 ("target/y/out/y.rs", true),
2451 ("target/z", true),
2452 ]
2453 );
2454 });
2455
2456 // Perform some file system mutations, two of which match the watched patterns,
2457 // and one of which does not.
2458 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
2459 .await
2460 .unwrap();
2461 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
2462 .await
2463 .unwrap();
2464 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
2465 .await
2466 .unwrap();
2467 fs.create_file(
2468 path!("/the-root/target/x/out/x2.rs").as_ref(),
2469 Default::default(),
2470 )
2471 .await
2472 .unwrap();
2473 fs.create_file(
2474 path!("/the-root/target/y/out/y2.rs").as_ref(),
2475 Default::default(),
2476 )
2477 .await
2478 .unwrap();
2479 fs.save(
2480 path!("/the-root/Cargo.lock").as_ref(),
2481 &"".into(),
2482 Default::default(),
2483 )
2484 .await
2485 .unwrap();
2486 fs.save(
2487 path!("/the-stdlib/LICENSE").as_ref(),
2488 &"".into(),
2489 Default::default(),
2490 )
2491 .await
2492 .unwrap();
2493 fs.save(
2494 path!("/the/stdlib/src/string.rs").as_ref(),
2495 &"".into(),
2496 Default::default(),
2497 )
2498 .await
2499 .unwrap();
2500
2501 // The language server receives events for the FS mutations that match its watch patterns.
2502 cx.executor().run_until_parked();
2503 assert_eq!(
2504 &*file_changes.lock(),
2505 &[
2506 lsp::FileEvent {
2507 uri: lsp::Uri::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
2508 typ: lsp::FileChangeType::CHANGED,
2509 },
2510 lsp::FileEvent {
2511 uri: lsp::Uri::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
2512 typ: lsp::FileChangeType::DELETED,
2513 },
2514 lsp::FileEvent {
2515 uri: lsp::Uri::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
2516 typ: lsp::FileChangeType::CREATED,
2517 },
2518 lsp::FileEvent {
2519 uri: lsp::Uri::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
2520 typ: lsp::FileChangeType::CREATED,
2521 },
2522 lsp::FileEvent {
2523 uri: lsp::Uri::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
2524 typ: lsp::FileChangeType::CHANGED,
2525 },
2526 ]
2527 );
2528}
2529
2530#[gpui::test]
2531async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
2532 init_test(cx);
2533
2534 let fs = FakeFs::new(cx.executor());
2535 fs.insert_tree(
2536 path!("/dir"),
2537 json!({
2538 "a.rs": "let a = 1;",
2539 "b.rs": "let b = 2;"
2540 }),
2541 )
2542 .await;
2543
2544 let project = Project::test(
2545 fs,
2546 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
2547 cx,
2548 )
2549 .await;
2550 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2551
2552 let buffer_a = project
2553 .update(cx, |project, cx| {
2554 project.open_local_buffer(path!("/dir/a.rs"), cx)
2555 })
2556 .await
2557 .unwrap();
2558 let buffer_b = project
2559 .update(cx, |project, cx| {
2560 project.open_local_buffer(path!("/dir/b.rs"), cx)
2561 })
2562 .await
2563 .unwrap();
2564
2565 lsp_store.update(cx, |lsp_store, cx| {
2566 lsp_store
2567 .update_diagnostics(
2568 LanguageServerId(0),
2569 lsp::PublishDiagnosticsParams {
2570 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2571 version: None,
2572 diagnostics: vec![lsp::Diagnostic {
2573 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2574 severity: Some(lsp::DiagnosticSeverity::ERROR),
2575 message: "error 1".to_string(),
2576 ..Default::default()
2577 }],
2578 },
2579 None,
2580 DiagnosticSourceKind::Pushed,
2581 &[],
2582 cx,
2583 )
2584 .unwrap();
2585 lsp_store
2586 .update_diagnostics(
2587 LanguageServerId(0),
2588 lsp::PublishDiagnosticsParams {
2589 uri: Uri::from_file_path(path!("/dir/b.rs")).unwrap(),
2590 version: None,
2591 diagnostics: vec![lsp::Diagnostic {
2592 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2593 severity: Some(DiagnosticSeverity::WARNING),
2594 message: "error 2".to_string(),
2595 ..Default::default()
2596 }],
2597 },
2598 None,
2599 DiagnosticSourceKind::Pushed,
2600 &[],
2601 cx,
2602 )
2603 .unwrap();
2604 });
2605
2606 buffer_a.update(cx, |buffer, _| {
2607 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2608 assert_eq!(
2609 chunks
2610 .iter()
2611 .map(|(s, d)| (s.as_str(), *d))
2612 .collect::<Vec<_>>(),
2613 &[
2614 ("let ", None),
2615 ("a", Some(DiagnosticSeverity::ERROR)),
2616 (" = 1;", None),
2617 ]
2618 );
2619 });
2620 buffer_b.update(cx, |buffer, _| {
2621 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2622 assert_eq!(
2623 chunks
2624 .iter()
2625 .map(|(s, d)| (s.as_str(), *d))
2626 .collect::<Vec<_>>(),
2627 &[
2628 ("let ", None),
2629 ("b", Some(DiagnosticSeverity::WARNING)),
2630 (" = 2;", None),
2631 ]
2632 );
2633 });
2634}
2635
2636#[gpui::test]
2637async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
2638 init_test(cx);
2639
2640 let fs = FakeFs::new(cx.executor());
2641 fs.insert_tree(
2642 path!("/root"),
2643 json!({
2644 "dir": {
2645 ".git": {
2646 "HEAD": "ref: refs/heads/main",
2647 },
2648 ".gitignore": "b.rs",
2649 "a.rs": "let a = 1;",
2650 "b.rs": "let b = 2;",
2651 },
2652 "other.rs": "let b = c;"
2653 }),
2654 )
2655 .await;
2656
2657 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
2658 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2659 let (worktree, _) = project
2660 .update(cx, |project, cx| {
2661 project.find_or_create_worktree(path!("/root/dir"), true, cx)
2662 })
2663 .await
2664 .unwrap();
2665 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
2666
2667 let (worktree, _) = project
2668 .update(cx, |project, cx| {
2669 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
2670 })
2671 .await
2672 .unwrap();
2673 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
2674
2675 let server_id = LanguageServerId(0);
2676 lsp_store.update(cx, |lsp_store, cx| {
2677 lsp_store
2678 .update_diagnostics(
2679 server_id,
2680 lsp::PublishDiagnosticsParams {
2681 uri: Uri::from_file_path(path!("/root/dir/b.rs")).unwrap(),
2682 version: None,
2683 diagnostics: vec![lsp::Diagnostic {
2684 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2685 severity: Some(lsp::DiagnosticSeverity::ERROR),
2686 message: "unused variable 'b'".to_string(),
2687 ..Default::default()
2688 }],
2689 },
2690 None,
2691 DiagnosticSourceKind::Pushed,
2692 &[],
2693 cx,
2694 )
2695 .unwrap();
2696 lsp_store
2697 .update_diagnostics(
2698 server_id,
2699 lsp::PublishDiagnosticsParams {
2700 uri: Uri::from_file_path(path!("/root/other.rs")).unwrap(),
2701 version: None,
2702 diagnostics: vec![lsp::Diagnostic {
2703 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
2704 severity: Some(lsp::DiagnosticSeverity::ERROR),
2705 message: "unknown variable 'c'".to_string(),
2706 ..Default::default()
2707 }],
2708 },
2709 None,
2710 DiagnosticSourceKind::Pushed,
2711 &[],
2712 cx,
2713 )
2714 .unwrap();
2715 });
2716
2717 let main_ignored_buffer = project
2718 .update(cx, |project, cx| {
2719 project.open_buffer((main_worktree_id, rel_path("b.rs")), cx)
2720 })
2721 .await
2722 .unwrap();
2723 main_ignored_buffer.update(cx, |buffer, _| {
2724 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2725 assert_eq!(
2726 chunks
2727 .iter()
2728 .map(|(s, d)| (s.as_str(), *d))
2729 .collect::<Vec<_>>(),
2730 &[
2731 ("let ", None),
2732 ("b", Some(DiagnosticSeverity::ERROR)),
2733 (" = 2;", None),
2734 ],
2735 "Gigitnored buffers should still get in-buffer diagnostics",
2736 );
2737 });
2738 let other_buffer = project
2739 .update(cx, |project, cx| {
2740 project.open_buffer((other_worktree_id, rel_path("")), cx)
2741 })
2742 .await
2743 .unwrap();
2744 other_buffer.update(cx, |buffer, _| {
2745 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2746 assert_eq!(
2747 chunks
2748 .iter()
2749 .map(|(s, d)| (s.as_str(), *d))
2750 .collect::<Vec<_>>(),
2751 &[
2752 ("let b = ", None),
2753 ("c", Some(DiagnosticSeverity::ERROR)),
2754 (";", None),
2755 ],
2756 "Buffers from hidden projects should still get in-buffer diagnostics"
2757 );
2758 });
2759
2760 project.update(cx, |project, cx| {
2761 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
2762 assert_eq!(
2763 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
2764 vec![(
2765 ProjectPath {
2766 worktree_id: main_worktree_id,
2767 path: rel_path("b.rs").into(),
2768 },
2769 server_id,
2770 DiagnosticSummary {
2771 error_count: 1,
2772 warning_count: 0,
2773 }
2774 )]
2775 );
2776 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
2777 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
2778 });
2779}
2780
2781#[gpui::test]
2782async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
2783 init_test(cx);
2784
2785 let progress_token = "the-progress-token";
2786
2787 let fs = FakeFs::new(cx.executor());
2788 fs.insert_tree(
2789 path!("/dir"),
2790 json!({
2791 "a.rs": "fn a() { A }",
2792 "b.rs": "const y: i32 = 1",
2793 }),
2794 )
2795 .await;
2796
2797 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2798 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2799
2800 language_registry.add(rust_lang());
2801 let mut fake_servers = language_registry.register_fake_lsp(
2802 "Rust",
2803 FakeLspAdapter {
2804 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2805 disk_based_diagnostics_sources: vec!["disk".into()],
2806 ..Default::default()
2807 },
2808 );
2809
2810 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
2811
2812 // Cause worktree to start the fake language server
2813 let _ = project
2814 .update(cx, |project, cx| {
2815 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
2816 })
2817 .await
2818 .unwrap();
2819
2820 let mut events = cx.events(&project);
2821
2822 let fake_server = fake_servers.next().await.unwrap();
2823 assert_eq!(
2824 events.next().await.unwrap(),
2825 Event::LanguageServerAdded(
2826 LanguageServerId(0),
2827 fake_server.server.name(),
2828 Some(worktree_id)
2829 ),
2830 );
2831
2832 fake_server
2833 .start_progress(format!("{}/0", progress_token))
2834 .await;
2835 assert_eq!(
2836 events.next().await.unwrap(),
2837 Event::DiskBasedDiagnosticsStarted {
2838 language_server_id: LanguageServerId(0),
2839 }
2840 );
2841
2842 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2843 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2844 version: None,
2845 diagnostics: vec![lsp::Diagnostic {
2846 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2847 severity: Some(lsp::DiagnosticSeverity::ERROR),
2848 message: "undefined variable 'A'".to_string(),
2849 ..Default::default()
2850 }],
2851 });
2852 assert_eq!(
2853 events.next().await.unwrap(),
2854 Event::DiagnosticsUpdated {
2855 language_server_id: LanguageServerId(0),
2856 paths: vec![(worktree_id, rel_path("a.rs")).into()],
2857 }
2858 );
2859
2860 fake_server.end_progress(format!("{}/0", progress_token));
2861 assert_eq!(
2862 events.next().await.unwrap(),
2863 Event::DiskBasedDiagnosticsFinished {
2864 language_server_id: LanguageServerId(0)
2865 }
2866 );
2867
2868 let buffer = project
2869 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
2870 .await
2871 .unwrap();
2872
2873 buffer.update(cx, |buffer, _| {
2874 let snapshot = buffer.snapshot();
2875 let diagnostics = snapshot
2876 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2877 .collect::<Vec<_>>();
2878 assert_eq!(
2879 diagnostics,
2880 &[DiagnosticEntryRef {
2881 range: Point::new(0, 9)..Point::new(0, 10),
2882 diagnostic: &Diagnostic {
2883 severity: lsp::DiagnosticSeverity::ERROR,
2884 message: "undefined variable 'A'".to_string(),
2885 group_id: 0,
2886 is_primary: true,
2887 source_kind: DiagnosticSourceKind::Pushed,
2888 ..Diagnostic::default()
2889 }
2890 }]
2891 )
2892 });
2893
2894 // Ensure publishing empty diagnostics twice only results in one update event.
2895 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2896 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2897 version: None,
2898 diagnostics: Default::default(),
2899 });
2900 assert_eq!(
2901 events.next().await.unwrap(),
2902 Event::DiagnosticsUpdated {
2903 language_server_id: LanguageServerId(0),
2904 paths: vec![(worktree_id, rel_path("a.rs")).into()],
2905 }
2906 );
2907
2908 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2909 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2910 version: None,
2911 diagnostics: Default::default(),
2912 });
2913 cx.executor().run_until_parked();
2914 assert_eq!(futures::poll!(events.next()), Poll::Pending);
2915}
2916
2917#[gpui::test]
2918async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
2919 init_test(cx);
2920
2921 let progress_token = "the-progress-token";
2922
2923 let fs = FakeFs::new(cx.executor());
2924 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2925
2926 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2927
2928 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2929 language_registry.add(rust_lang());
2930 let mut fake_servers = language_registry.register_fake_lsp(
2931 "Rust",
2932 FakeLspAdapter {
2933 name: "the-language-server",
2934 disk_based_diagnostics_sources: vec!["disk".into()],
2935 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2936 ..FakeLspAdapter::default()
2937 },
2938 );
2939
2940 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
2941
2942 let (buffer, _handle) = project
2943 .update(cx, |project, cx| {
2944 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2945 })
2946 .await
2947 .unwrap();
2948 let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id());
2949 // Simulate diagnostics starting to update.
2950 let fake_server = fake_servers.next().await.unwrap();
2951 cx.executor().run_until_parked();
2952 fake_server.start_progress(progress_token).await;
2953
2954 // Restart the server before the diagnostics finish updating.
2955 project.update(cx, |project, cx| {
2956 project.restart_language_servers_for_buffers(vec![buffer], HashSet::default(), cx);
2957 });
2958 let mut events = cx.events(&project);
2959
2960 // Simulate the newly started server sending more diagnostics.
2961 let fake_server = fake_servers.next().await.unwrap();
2962 cx.executor().run_until_parked();
2963 assert_eq!(
2964 events.next().await.unwrap(),
2965 Event::LanguageServerRemoved(LanguageServerId(0))
2966 );
2967 assert_eq!(
2968 events.next().await.unwrap(),
2969 Event::LanguageServerAdded(
2970 LanguageServerId(1),
2971 fake_server.server.name(),
2972 Some(worktree_id)
2973 )
2974 );
2975 fake_server.start_progress(progress_token).await;
2976 assert_eq!(
2977 events.next().await.unwrap(),
2978 Event::LanguageServerBufferRegistered {
2979 server_id: LanguageServerId(1),
2980 buffer_id,
2981 buffer_abs_path: PathBuf::from(path!("/dir/a.rs")),
2982 name: Some(fake_server.server.name())
2983 }
2984 );
2985 assert_eq!(
2986 events.next().await.unwrap(),
2987 Event::DiskBasedDiagnosticsStarted {
2988 language_server_id: LanguageServerId(1)
2989 }
2990 );
2991 project.update(cx, |project, cx| {
2992 assert_eq!(
2993 project
2994 .language_servers_running_disk_based_diagnostics(cx)
2995 .collect::<Vec<_>>(),
2996 [LanguageServerId(1)]
2997 );
2998 });
2999
3000 // All diagnostics are considered done, despite the old server's diagnostic
3001 // task never completing.
3002 fake_server.end_progress(progress_token);
3003 assert_eq!(
3004 events.next().await.unwrap(),
3005 Event::DiskBasedDiagnosticsFinished {
3006 language_server_id: LanguageServerId(1)
3007 }
3008 );
3009 project.update(cx, |project, cx| {
3010 assert_eq!(
3011 project
3012 .language_servers_running_disk_based_diagnostics(cx)
3013 .collect::<Vec<_>>(),
3014 [] as [language::LanguageServerId; 0]
3015 );
3016 });
3017}
3018
3019#[gpui::test]
3020async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
3021 init_test(cx);
3022
3023 let fs = FakeFs::new(cx.executor());
3024 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
3025
3026 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3027
3028 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3029 language_registry.add(rust_lang());
3030 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3031
3032 let (buffer, _) = project
3033 .update(cx, |project, cx| {
3034 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3035 })
3036 .await
3037 .unwrap();
3038
3039 // Publish diagnostics
3040 let fake_server = fake_servers.next().await.unwrap();
3041 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3042 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3043 version: None,
3044 diagnostics: vec![lsp::Diagnostic {
3045 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
3046 severity: Some(lsp::DiagnosticSeverity::ERROR),
3047 message: "the message".to_string(),
3048 ..Default::default()
3049 }],
3050 });
3051
3052 cx.executor().run_until_parked();
3053 buffer.update(cx, |buffer, _| {
3054 assert_eq!(
3055 buffer
3056 .snapshot()
3057 .diagnostics_in_range::<_, usize>(0..1, false)
3058 .map(|entry| entry.diagnostic.message.clone())
3059 .collect::<Vec<_>>(),
3060 ["the message".to_string()]
3061 );
3062 });
3063 project.update(cx, |project, cx| {
3064 assert_eq!(
3065 project.diagnostic_summary(false, cx),
3066 DiagnosticSummary {
3067 error_count: 1,
3068 warning_count: 0,
3069 }
3070 );
3071 });
3072
3073 project.update(cx, |project, cx| {
3074 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
3075 });
3076
3077 // The diagnostics are cleared.
3078 cx.executor().run_until_parked();
3079 buffer.update(cx, |buffer, _| {
3080 assert_eq!(
3081 buffer
3082 .snapshot()
3083 .diagnostics_in_range::<_, usize>(0..1, false)
3084 .map(|entry| entry.diagnostic.message.clone())
3085 .collect::<Vec<_>>(),
3086 Vec::<String>::new(),
3087 );
3088 });
3089 project.update(cx, |project, cx| {
3090 assert_eq!(
3091 project.diagnostic_summary(false, cx),
3092 DiagnosticSummary {
3093 error_count: 0,
3094 warning_count: 0,
3095 }
3096 );
3097 });
3098}
3099
3100#[gpui::test]
3101async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
3102 init_test(cx);
3103
3104 let fs = FakeFs::new(cx.executor());
3105 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
3106
3107 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3108 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3109
3110 language_registry.add(rust_lang());
3111 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3112
3113 let (buffer, _handle) = project
3114 .update(cx, |project, cx| {
3115 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3116 })
3117 .await
3118 .unwrap();
3119
3120 // Before restarting the server, report diagnostics with an unknown buffer version.
3121 let fake_server = fake_servers.next().await.unwrap();
3122 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3123 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3124 version: Some(10000),
3125 diagnostics: Vec::new(),
3126 });
3127 cx.executor().run_until_parked();
3128 project.update(cx, |project, cx| {
3129 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
3130 });
3131
3132 let mut fake_server = fake_servers.next().await.unwrap();
3133 let notification = fake_server
3134 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3135 .await
3136 .text_document;
3137 assert_eq!(notification.version, 0);
3138}
3139
3140#[gpui::test]
3141async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
3142 init_test(cx);
3143
3144 let progress_token = "the-progress-token";
3145
3146 let fs = FakeFs::new(cx.executor());
3147 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
3148
3149 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3150
3151 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3152 language_registry.add(rust_lang());
3153 let mut fake_servers = language_registry.register_fake_lsp(
3154 "Rust",
3155 FakeLspAdapter {
3156 name: "the-language-server",
3157 disk_based_diagnostics_sources: vec!["disk".into()],
3158 disk_based_diagnostics_progress_token: Some(progress_token.into()),
3159 ..Default::default()
3160 },
3161 );
3162
3163 let (buffer, _handle) = project
3164 .update(cx, |project, cx| {
3165 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3166 })
3167 .await
3168 .unwrap();
3169
3170 // Simulate diagnostics starting to update.
3171 let mut fake_server = fake_servers.next().await.unwrap();
3172 fake_server
3173 .start_progress_with(
3174 "another-token",
3175 lsp::WorkDoneProgressBegin {
3176 cancellable: Some(false),
3177 ..Default::default()
3178 },
3179 DEFAULT_LSP_REQUEST_TIMEOUT,
3180 )
3181 .await;
3182 // Ensure progress notification is fully processed before starting the next one
3183 cx.executor().run_until_parked();
3184
3185 fake_server
3186 .start_progress_with(
3187 progress_token,
3188 lsp::WorkDoneProgressBegin {
3189 cancellable: Some(true),
3190 ..Default::default()
3191 },
3192 DEFAULT_LSP_REQUEST_TIMEOUT,
3193 )
3194 .await;
3195 // Ensure progress notification is fully processed before cancelling
3196 cx.executor().run_until_parked();
3197
3198 project.update(cx, |project, cx| {
3199 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
3200 });
3201 cx.executor().run_until_parked();
3202
3203 let cancel_notification = fake_server
3204 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
3205 .await;
3206 assert_eq!(
3207 cancel_notification.token,
3208 NumberOrString::String(progress_token.into())
3209 );
3210}
3211
3212#[gpui::test]
3213async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
3214 init_test(cx);
3215
3216 let fs = FakeFs::new(cx.executor());
3217 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
3218 .await;
3219
3220 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3221 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3222
3223 let mut fake_rust_servers = language_registry.register_fake_lsp(
3224 "Rust",
3225 FakeLspAdapter {
3226 name: "rust-lsp",
3227 ..Default::default()
3228 },
3229 );
3230 let mut fake_js_servers = language_registry.register_fake_lsp(
3231 "JavaScript",
3232 FakeLspAdapter {
3233 name: "js-lsp",
3234 ..Default::default()
3235 },
3236 );
3237 language_registry.add(rust_lang());
3238 language_registry.add(js_lang());
3239
3240 let _rs_buffer = project
3241 .update(cx, |project, cx| {
3242 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3243 })
3244 .await
3245 .unwrap();
3246 let _js_buffer = project
3247 .update(cx, |project, cx| {
3248 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
3249 })
3250 .await
3251 .unwrap();
3252
3253 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
3254 assert_eq!(
3255 fake_rust_server_1
3256 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3257 .await
3258 .text_document
3259 .uri
3260 .as_str(),
3261 uri!("file:///dir/a.rs")
3262 );
3263
3264 let mut fake_js_server = fake_js_servers.next().await.unwrap();
3265 assert_eq!(
3266 fake_js_server
3267 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3268 .await
3269 .text_document
3270 .uri
3271 .as_str(),
3272 uri!("file:///dir/b.js")
3273 );
3274
3275 // Disable Rust language server, ensuring only that server gets stopped.
3276 cx.update(|cx| {
3277 SettingsStore::update_global(cx, |settings, cx| {
3278 settings.update_user_settings(cx, |settings| {
3279 settings.languages_mut().insert(
3280 "Rust".into(),
3281 LanguageSettingsContent {
3282 enable_language_server: Some(false),
3283 ..Default::default()
3284 },
3285 );
3286 });
3287 })
3288 });
3289 fake_rust_server_1
3290 .receive_notification::<lsp::notification::Exit>()
3291 .await;
3292
3293 // Enable Rust and disable JavaScript language servers, ensuring that the
3294 // former gets started again and that the latter stops.
3295 cx.update(|cx| {
3296 SettingsStore::update_global(cx, |settings, cx| {
3297 settings.update_user_settings(cx, |settings| {
3298 settings.languages_mut().insert(
3299 "Rust".into(),
3300 LanguageSettingsContent {
3301 enable_language_server: Some(true),
3302 ..Default::default()
3303 },
3304 );
3305 settings.languages_mut().insert(
3306 "JavaScript".into(),
3307 LanguageSettingsContent {
3308 enable_language_server: Some(false),
3309 ..Default::default()
3310 },
3311 );
3312 });
3313 })
3314 });
3315 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
3316 assert_eq!(
3317 fake_rust_server_2
3318 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3319 .await
3320 .text_document
3321 .uri
3322 .as_str(),
3323 uri!("file:///dir/a.rs")
3324 );
3325 fake_js_server
3326 .receive_notification::<lsp::notification::Exit>()
3327 .await;
3328}
3329
3330#[gpui::test(iterations = 3)]
3331async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
3332 init_test(cx);
3333
3334 let text = "
3335 fn a() { A }
3336 fn b() { BB }
3337 fn c() { CCC }
3338 "
3339 .unindent();
3340
3341 let fs = FakeFs::new(cx.executor());
3342 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
3343
3344 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3345 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3346
3347 language_registry.add(rust_lang());
3348 let mut fake_servers = language_registry.register_fake_lsp(
3349 "Rust",
3350 FakeLspAdapter {
3351 disk_based_diagnostics_sources: vec!["disk".into()],
3352 ..Default::default()
3353 },
3354 );
3355
3356 let buffer = project
3357 .update(cx, |project, cx| {
3358 project.open_local_buffer(path!("/dir/a.rs"), cx)
3359 })
3360 .await
3361 .unwrap();
3362
3363 let _handle = project.update(cx, |project, cx| {
3364 project.register_buffer_with_language_servers(&buffer, cx)
3365 });
3366
3367 let mut fake_server = fake_servers.next().await.unwrap();
3368 let open_notification = fake_server
3369 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3370 .await;
3371
3372 // Edit the buffer, moving the content down
3373 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
3374 let change_notification_1 = fake_server
3375 .receive_notification::<lsp::notification::DidChangeTextDocument>()
3376 .await;
3377 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
3378
3379 // Report some diagnostics for the initial version of the buffer
3380 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3381 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3382 version: Some(open_notification.text_document.version),
3383 diagnostics: vec![
3384 lsp::Diagnostic {
3385 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3386 severity: Some(DiagnosticSeverity::ERROR),
3387 message: "undefined variable 'A'".to_string(),
3388 source: Some("disk".to_string()),
3389 ..Default::default()
3390 },
3391 lsp::Diagnostic {
3392 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
3393 severity: Some(DiagnosticSeverity::ERROR),
3394 message: "undefined variable 'BB'".to_string(),
3395 source: Some("disk".to_string()),
3396 ..Default::default()
3397 },
3398 lsp::Diagnostic {
3399 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
3400 severity: Some(DiagnosticSeverity::ERROR),
3401 source: Some("disk".to_string()),
3402 message: "undefined variable 'CCC'".to_string(),
3403 ..Default::default()
3404 },
3405 ],
3406 });
3407
3408 // The diagnostics have moved down since they were created.
3409 cx.executor().run_until_parked();
3410 buffer.update(cx, |buffer, _| {
3411 assert_eq!(
3412 buffer
3413 .snapshot()
3414 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
3415 .collect::<Vec<_>>(),
3416 &[
3417 DiagnosticEntry {
3418 range: Point::new(3, 9)..Point::new(3, 11),
3419 diagnostic: Diagnostic {
3420 source: Some("disk".into()),
3421 severity: DiagnosticSeverity::ERROR,
3422 message: "undefined variable 'BB'".to_string(),
3423 is_disk_based: true,
3424 group_id: 1,
3425 is_primary: true,
3426 source_kind: DiagnosticSourceKind::Pushed,
3427 ..Diagnostic::default()
3428 },
3429 },
3430 DiagnosticEntry {
3431 range: Point::new(4, 9)..Point::new(4, 12),
3432 diagnostic: Diagnostic {
3433 source: Some("disk".into()),
3434 severity: DiagnosticSeverity::ERROR,
3435 message: "undefined variable 'CCC'".to_string(),
3436 is_disk_based: true,
3437 group_id: 2,
3438 is_primary: true,
3439 source_kind: DiagnosticSourceKind::Pushed,
3440 ..Diagnostic::default()
3441 }
3442 }
3443 ]
3444 );
3445 assert_eq!(
3446 chunks_with_diagnostics(buffer, 0..buffer.len()),
3447 [
3448 ("\n\nfn a() { ".to_string(), None),
3449 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
3450 (" }\nfn b() { ".to_string(), None),
3451 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
3452 (" }\nfn c() { ".to_string(), None),
3453 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
3454 (" }\n".to_string(), None),
3455 ]
3456 );
3457 assert_eq!(
3458 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
3459 [
3460 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
3461 (" }\nfn c() { ".to_string(), None),
3462 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
3463 ]
3464 );
3465 });
3466
3467 // Ensure overlapping diagnostics are highlighted correctly.
3468 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3469 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3470 version: Some(open_notification.text_document.version),
3471 diagnostics: vec![
3472 lsp::Diagnostic {
3473 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3474 severity: Some(DiagnosticSeverity::ERROR),
3475 message: "undefined variable 'A'".to_string(),
3476 source: Some("disk".to_string()),
3477 ..Default::default()
3478 },
3479 lsp::Diagnostic {
3480 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
3481 severity: Some(DiagnosticSeverity::WARNING),
3482 message: "unreachable statement".to_string(),
3483 source: Some("disk".to_string()),
3484 ..Default::default()
3485 },
3486 ],
3487 });
3488
3489 cx.executor().run_until_parked();
3490 buffer.update(cx, |buffer, _| {
3491 assert_eq!(
3492 buffer
3493 .snapshot()
3494 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
3495 .collect::<Vec<_>>(),
3496 &[
3497 DiagnosticEntry {
3498 range: Point::new(2, 9)..Point::new(2, 12),
3499 diagnostic: Diagnostic {
3500 source: Some("disk".into()),
3501 severity: DiagnosticSeverity::WARNING,
3502 message: "unreachable statement".to_string(),
3503 is_disk_based: true,
3504 group_id: 4,
3505 is_primary: true,
3506 source_kind: DiagnosticSourceKind::Pushed,
3507 ..Diagnostic::default()
3508 }
3509 },
3510 DiagnosticEntry {
3511 range: Point::new(2, 9)..Point::new(2, 10),
3512 diagnostic: Diagnostic {
3513 source: Some("disk".into()),
3514 severity: DiagnosticSeverity::ERROR,
3515 message: "undefined variable 'A'".to_string(),
3516 is_disk_based: true,
3517 group_id: 3,
3518 is_primary: true,
3519 source_kind: DiagnosticSourceKind::Pushed,
3520 ..Diagnostic::default()
3521 },
3522 }
3523 ]
3524 );
3525 assert_eq!(
3526 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
3527 [
3528 ("fn a() { ".to_string(), None),
3529 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
3530 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
3531 ("\n".to_string(), None),
3532 ]
3533 );
3534 assert_eq!(
3535 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
3536 [
3537 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
3538 ("\n".to_string(), None),
3539 ]
3540 );
3541 });
3542
3543 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
3544 // changes since the last save.
3545 buffer.update(cx, |buffer, cx| {
3546 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
3547 buffer.edit(
3548 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
3549 None,
3550 cx,
3551 );
3552 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
3553 });
3554 let change_notification_2 = fake_server
3555 .receive_notification::<lsp::notification::DidChangeTextDocument>()
3556 .await;
3557 assert!(
3558 change_notification_2.text_document.version > change_notification_1.text_document.version
3559 );
3560
3561 // Handle out-of-order diagnostics
3562 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3563 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3564 version: Some(change_notification_2.text_document.version),
3565 diagnostics: vec![
3566 lsp::Diagnostic {
3567 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
3568 severity: Some(DiagnosticSeverity::ERROR),
3569 message: "undefined variable 'BB'".to_string(),
3570 source: Some("disk".to_string()),
3571 ..Default::default()
3572 },
3573 lsp::Diagnostic {
3574 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3575 severity: Some(DiagnosticSeverity::WARNING),
3576 message: "undefined variable 'A'".to_string(),
3577 source: Some("disk".to_string()),
3578 ..Default::default()
3579 },
3580 ],
3581 });
3582
3583 cx.executor().run_until_parked();
3584 buffer.update(cx, |buffer, _| {
3585 assert_eq!(
3586 buffer
3587 .snapshot()
3588 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3589 .collect::<Vec<_>>(),
3590 &[
3591 DiagnosticEntry {
3592 range: Point::new(2, 21)..Point::new(2, 22),
3593 diagnostic: Diagnostic {
3594 source: Some("disk".into()),
3595 severity: DiagnosticSeverity::WARNING,
3596 message: "undefined variable 'A'".to_string(),
3597 is_disk_based: true,
3598 group_id: 6,
3599 is_primary: true,
3600 source_kind: DiagnosticSourceKind::Pushed,
3601 ..Diagnostic::default()
3602 }
3603 },
3604 DiagnosticEntry {
3605 range: Point::new(3, 9)..Point::new(3, 14),
3606 diagnostic: Diagnostic {
3607 source: Some("disk".into()),
3608 severity: DiagnosticSeverity::ERROR,
3609 message: "undefined variable 'BB'".to_string(),
3610 is_disk_based: true,
3611 group_id: 5,
3612 is_primary: true,
3613 source_kind: DiagnosticSourceKind::Pushed,
3614 ..Diagnostic::default()
3615 },
3616 }
3617 ]
3618 );
3619 });
3620}
3621
3622#[gpui::test]
3623async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
3624 init_test(cx);
3625
3626 let text = concat!(
3627 "let one = ;\n", //
3628 "let two = \n",
3629 "let three = 3;\n",
3630 );
3631
3632 let fs = FakeFs::new(cx.executor());
3633 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
3634
3635 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3636 let buffer = project
3637 .update(cx, |project, cx| {
3638 project.open_local_buffer(path!("/dir/a.rs"), cx)
3639 })
3640 .await
3641 .unwrap();
3642
3643 project.update(cx, |project, cx| {
3644 project.lsp_store().update(cx, |lsp_store, cx| {
3645 lsp_store
3646 .update_diagnostic_entries(
3647 LanguageServerId(0),
3648 PathBuf::from(path!("/dir/a.rs")),
3649 None,
3650 None,
3651 vec![
3652 DiagnosticEntry {
3653 range: Unclipped(PointUtf16::new(0, 10))
3654 ..Unclipped(PointUtf16::new(0, 10)),
3655 diagnostic: Diagnostic {
3656 severity: DiagnosticSeverity::ERROR,
3657 message: "syntax error 1".to_string(),
3658 source_kind: DiagnosticSourceKind::Pushed,
3659 ..Diagnostic::default()
3660 },
3661 },
3662 DiagnosticEntry {
3663 range: Unclipped(PointUtf16::new(1, 10))
3664 ..Unclipped(PointUtf16::new(1, 10)),
3665 diagnostic: Diagnostic {
3666 severity: DiagnosticSeverity::ERROR,
3667 message: "syntax error 2".to_string(),
3668 source_kind: DiagnosticSourceKind::Pushed,
3669 ..Diagnostic::default()
3670 },
3671 },
3672 ],
3673 cx,
3674 )
3675 .unwrap();
3676 })
3677 });
3678
3679 // An empty range is extended forward to include the following character.
3680 // At the end of a line, an empty range is extended backward to include
3681 // the preceding character.
3682 buffer.update(cx, |buffer, _| {
3683 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
3684 assert_eq!(
3685 chunks
3686 .iter()
3687 .map(|(s, d)| (s.as_str(), *d))
3688 .collect::<Vec<_>>(),
3689 &[
3690 ("let one = ", None),
3691 (";", Some(DiagnosticSeverity::ERROR)),
3692 ("\nlet two =", None),
3693 (" ", Some(DiagnosticSeverity::ERROR)),
3694 ("\nlet three = 3;\n", None)
3695 ]
3696 );
3697 });
3698}
3699
3700#[gpui::test]
3701async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
3702 init_test(cx);
3703
3704 let fs = FakeFs::new(cx.executor());
3705 fs.insert_tree(path!("/dir"), json!({ "a.rs": "one two three" }))
3706 .await;
3707
3708 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3709 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3710
3711 lsp_store.update(cx, |lsp_store, cx| {
3712 lsp_store
3713 .update_diagnostic_entries(
3714 LanguageServerId(0),
3715 Path::new(path!("/dir/a.rs")).to_owned(),
3716 None,
3717 None,
3718 vec![DiagnosticEntry {
3719 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
3720 diagnostic: Diagnostic {
3721 severity: DiagnosticSeverity::ERROR,
3722 is_primary: true,
3723 message: "syntax error a1".to_string(),
3724 source_kind: DiagnosticSourceKind::Pushed,
3725 ..Diagnostic::default()
3726 },
3727 }],
3728 cx,
3729 )
3730 .unwrap();
3731 lsp_store
3732 .update_diagnostic_entries(
3733 LanguageServerId(1),
3734 Path::new(path!("/dir/a.rs")).to_owned(),
3735 None,
3736 None,
3737 vec![DiagnosticEntry {
3738 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
3739 diagnostic: Diagnostic {
3740 severity: DiagnosticSeverity::ERROR,
3741 is_primary: true,
3742 message: "syntax error b1".to_string(),
3743 source_kind: DiagnosticSourceKind::Pushed,
3744 ..Diagnostic::default()
3745 },
3746 }],
3747 cx,
3748 )
3749 .unwrap();
3750
3751 assert_eq!(
3752 lsp_store.diagnostic_summary(false, cx),
3753 DiagnosticSummary {
3754 error_count: 2,
3755 warning_count: 0,
3756 }
3757 );
3758 });
3759}
3760
3761#[gpui::test]
3762async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
3763 init_test(cx);
3764
3765 let text = "
3766 fn a() {
3767 f1();
3768 }
3769 fn b() {
3770 f2();
3771 }
3772 fn c() {
3773 f3();
3774 }
3775 "
3776 .unindent();
3777
3778 let fs = FakeFs::new(cx.executor());
3779 fs.insert_tree(
3780 path!("/dir"),
3781 json!({
3782 "a.rs": text.clone(),
3783 }),
3784 )
3785 .await;
3786
3787 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3788 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3789
3790 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3791 language_registry.add(rust_lang());
3792 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3793
3794 let (buffer, _handle) = project
3795 .update(cx, |project, cx| {
3796 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3797 })
3798 .await
3799 .unwrap();
3800
3801 let mut fake_server = fake_servers.next().await.unwrap();
3802 let lsp_document_version = fake_server
3803 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3804 .await
3805 .text_document
3806 .version;
3807
3808 // Simulate editing the buffer after the language server computes some edits.
3809 buffer.update(cx, |buffer, cx| {
3810 buffer.edit(
3811 [(
3812 Point::new(0, 0)..Point::new(0, 0),
3813 "// above first function\n",
3814 )],
3815 None,
3816 cx,
3817 );
3818 buffer.edit(
3819 [(
3820 Point::new(2, 0)..Point::new(2, 0),
3821 " // inside first function\n",
3822 )],
3823 None,
3824 cx,
3825 );
3826 buffer.edit(
3827 [(
3828 Point::new(6, 4)..Point::new(6, 4),
3829 "// inside second function ",
3830 )],
3831 None,
3832 cx,
3833 );
3834
3835 assert_eq!(
3836 buffer.text(),
3837 "
3838 // above first function
3839 fn a() {
3840 // inside first function
3841 f1();
3842 }
3843 fn b() {
3844 // inside second function f2();
3845 }
3846 fn c() {
3847 f3();
3848 }
3849 "
3850 .unindent()
3851 );
3852 });
3853
3854 let edits = lsp_store
3855 .update(cx, |lsp_store, cx| {
3856 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3857 &buffer,
3858 vec![
3859 // replace body of first function
3860 lsp::TextEdit {
3861 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
3862 new_text: "
3863 fn a() {
3864 f10();
3865 }
3866 "
3867 .unindent(),
3868 },
3869 // edit inside second function
3870 lsp::TextEdit {
3871 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
3872 new_text: "00".into(),
3873 },
3874 // edit inside third function via two distinct edits
3875 lsp::TextEdit {
3876 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
3877 new_text: "4000".into(),
3878 },
3879 lsp::TextEdit {
3880 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
3881 new_text: "".into(),
3882 },
3883 ],
3884 LanguageServerId(0),
3885 Some(lsp_document_version),
3886 cx,
3887 )
3888 })
3889 .await
3890 .unwrap();
3891
3892 buffer.update(cx, |buffer, cx| {
3893 for (range, new_text) in edits {
3894 buffer.edit([(range, new_text)], None, cx);
3895 }
3896 assert_eq!(
3897 buffer.text(),
3898 "
3899 // above first function
3900 fn a() {
3901 // inside first function
3902 f10();
3903 }
3904 fn b() {
3905 // inside second function f200();
3906 }
3907 fn c() {
3908 f4000();
3909 }
3910 "
3911 .unindent()
3912 );
3913 });
3914}
3915
3916#[gpui::test]
3917async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
3918 init_test(cx);
3919
3920 let text = "
3921 use a::b;
3922 use a::c;
3923
3924 fn f() {
3925 b();
3926 c();
3927 }
3928 "
3929 .unindent();
3930
3931 let fs = FakeFs::new(cx.executor());
3932 fs.insert_tree(
3933 path!("/dir"),
3934 json!({
3935 "a.rs": text.clone(),
3936 }),
3937 )
3938 .await;
3939
3940 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3941 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3942 let buffer = project
3943 .update(cx, |project, cx| {
3944 project.open_local_buffer(path!("/dir/a.rs"), cx)
3945 })
3946 .await
3947 .unwrap();
3948
3949 // Simulate the language server sending us a small edit in the form of a very large diff.
3950 // Rust-analyzer does this when performing a merge-imports code action.
3951 let edits = lsp_store
3952 .update(cx, |lsp_store, cx| {
3953 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3954 &buffer,
3955 [
3956 // Replace the first use statement without editing the semicolon.
3957 lsp::TextEdit {
3958 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
3959 new_text: "a::{b, c}".into(),
3960 },
3961 // Reinsert the remainder of the file between the semicolon and the final
3962 // newline of the file.
3963 lsp::TextEdit {
3964 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3965 new_text: "\n\n".into(),
3966 },
3967 lsp::TextEdit {
3968 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3969 new_text: "
3970 fn f() {
3971 b();
3972 c();
3973 }"
3974 .unindent(),
3975 },
3976 // Delete everything after the first newline of the file.
3977 lsp::TextEdit {
3978 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
3979 new_text: "".into(),
3980 },
3981 ],
3982 LanguageServerId(0),
3983 None,
3984 cx,
3985 )
3986 })
3987 .await
3988 .unwrap();
3989
3990 buffer.update(cx, |buffer, cx| {
3991 let edits = edits
3992 .into_iter()
3993 .map(|(range, text)| {
3994 (
3995 range.start.to_point(buffer)..range.end.to_point(buffer),
3996 text,
3997 )
3998 })
3999 .collect::<Vec<_>>();
4000
4001 assert_eq!(
4002 edits,
4003 [
4004 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
4005 (Point::new(1, 0)..Point::new(2, 0), "".into())
4006 ]
4007 );
4008
4009 for (range, new_text) in edits {
4010 buffer.edit([(range, new_text)], None, cx);
4011 }
4012 assert_eq!(
4013 buffer.text(),
4014 "
4015 use a::{b, c};
4016
4017 fn f() {
4018 b();
4019 c();
4020 }
4021 "
4022 .unindent()
4023 );
4024 });
4025}
4026
4027#[gpui::test]
4028async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
4029 cx: &mut gpui::TestAppContext,
4030) {
4031 init_test(cx);
4032
4033 let text = "Path()";
4034
4035 let fs = FakeFs::new(cx.executor());
4036 fs.insert_tree(
4037 path!("/dir"),
4038 json!({
4039 "a.rs": text
4040 }),
4041 )
4042 .await;
4043
4044 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4045 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4046 let buffer = project
4047 .update(cx, |project, cx| {
4048 project.open_local_buffer(path!("/dir/a.rs"), cx)
4049 })
4050 .await
4051 .unwrap();
4052
4053 // Simulate the language server sending us a pair of edits at the same location,
4054 // with an insertion following a replacement (which violates the LSP spec).
4055 let edits = lsp_store
4056 .update(cx, |lsp_store, cx| {
4057 lsp_store.as_local_mut().unwrap().edits_from_lsp(
4058 &buffer,
4059 [
4060 lsp::TextEdit {
4061 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
4062 new_text: "Path".into(),
4063 },
4064 lsp::TextEdit {
4065 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
4066 new_text: "from path import Path\n\n\n".into(),
4067 },
4068 ],
4069 LanguageServerId(0),
4070 None,
4071 cx,
4072 )
4073 })
4074 .await
4075 .unwrap();
4076
4077 buffer.update(cx, |buffer, cx| {
4078 buffer.edit(edits, None, cx);
4079 assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
4080 });
4081}
4082
4083#[gpui::test]
4084async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
4085 init_test(cx);
4086
4087 let text = "
4088 use a::b;
4089 use a::c;
4090
4091 fn f() {
4092 b();
4093 c();
4094 }
4095 "
4096 .unindent();
4097
4098 let fs = FakeFs::new(cx.executor());
4099 fs.insert_tree(
4100 path!("/dir"),
4101 json!({
4102 "a.rs": text.clone(),
4103 }),
4104 )
4105 .await;
4106
4107 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4108 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4109 let buffer = project
4110 .update(cx, |project, cx| {
4111 project.open_local_buffer(path!("/dir/a.rs"), cx)
4112 })
4113 .await
4114 .unwrap();
4115
4116 // Simulate the language server sending us edits in a non-ordered fashion,
4117 // with ranges sometimes being inverted or pointing to invalid locations.
4118 let edits = lsp_store
4119 .update(cx, |lsp_store, cx| {
4120 lsp_store.as_local_mut().unwrap().edits_from_lsp(
4121 &buffer,
4122 [
4123 lsp::TextEdit {
4124 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
4125 new_text: "\n\n".into(),
4126 },
4127 lsp::TextEdit {
4128 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
4129 new_text: "a::{b, c}".into(),
4130 },
4131 lsp::TextEdit {
4132 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
4133 new_text: "".into(),
4134 },
4135 lsp::TextEdit {
4136 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
4137 new_text: "
4138 fn f() {
4139 b();
4140 c();
4141 }"
4142 .unindent(),
4143 },
4144 ],
4145 LanguageServerId(0),
4146 None,
4147 cx,
4148 )
4149 })
4150 .await
4151 .unwrap();
4152
4153 buffer.update(cx, |buffer, cx| {
4154 let edits = edits
4155 .into_iter()
4156 .map(|(range, text)| {
4157 (
4158 range.start.to_point(buffer)..range.end.to_point(buffer),
4159 text,
4160 )
4161 })
4162 .collect::<Vec<_>>();
4163
4164 assert_eq!(
4165 edits,
4166 [
4167 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
4168 (Point::new(1, 0)..Point::new(2, 0), "".into())
4169 ]
4170 );
4171
4172 for (range, new_text) in edits {
4173 buffer.edit([(range, new_text)], None, cx);
4174 }
4175 assert_eq!(
4176 buffer.text(),
4177 "
4178 use a::{b, c};
4179
4180 fn f() {
4181 b();
4182 c();
4183 }
4184 "
4185 .unindent()
4186 );
4187 });
4188}
4189
4190fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
4191 buffer: &Buffer,
4192 range: Range<T>,
4193) -> Vec<(String, Option<DiagnosticSeverity>)> {
4194 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
4195 for chunk in buffer.snapshot().chunks(range, true) {
4196 if chunks
4197 .last()
4198 .is_some_and(|prev_chunk| prev_chunk.1 == chunk.diagnostic_severity)
4199 {
4200 chunks.last_mut().unwrap().0.push_str(chunk.text);
4201 } else {
4202 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
4203 }
4204 }
4205 chunks
4206}
4207
4208#[gpui::test(iterations = 10)]
4209async fn test_definition(cx: &mut gpui::TestAppContext) {
4210 init_test(cx);
4211
4212 let fs = FakeFs::new(cx.executor());
4213 fs.insert_tree(
4214 path!("/dir"),
4215 json!({
4216 "a.rs": "const fn a() { A }",
4217 "b.rs": "const y: i32 = crate::a()",
4218 }),
4219 )
4220 .await;
4221
4222 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
4223
4224 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4225 language_registry.add(rust_lang());
4226 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
4227
4228 let (buffer, _handle) = project
4229 .update(cx, |project, cx| {
4230 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
4231 })
4232 .await
4233 .unwrap();
4234
4235 let fake_server = fake_servers.next().await.unwrap();
4236 cx.executor().run_until_parked();
4237
4238 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
4239 let params = params.text_document_position_params;
4240 assert_eq!(
4241 params.text_document.uri.to_file_path().unwrap(),
4242 Path::new(path!("/dir/b.rs")),
4243 );
4244 assert_eq!(params.position, lsp::Position::new(0, 22));
4245
4246 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
4247 lsp::Location::new(
4248 lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
4249 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
4250 ),
4251 )))
4252 });
4253 let mut definitions = project
4254 .update(cx, |project, cx| project.definitions(&buffer, 22, cx))
4255 .await
4256 .unwrap()
4257 .unwrap();
4258
4259 // Assert no new language server started
4260 cx.executor().run_until_parked();
4261 assert!(fake_servers.try_next().is_err());
4262
4263 assert_eq!(definitions.len(), 1);
4264 let definition = definitions.pop().unwrap();
4265 cx.update(|cx| {
4266 let target_buffer = definition.target.buffer.read(cx);
4267 assert_eq!(
4268 target_buffer
4269 .file()
4270 .unwrap()
4271 .as_local()
4272 .unwrap()
4273 .abs_path(cx),
4274 Path::new(path!("/dir/a.rs")),
4275 );
4276 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
4277 assert_eq!(
4278 list_worktrees(&project, cx),
4279 [
4280 (path!("/dir/a.rs").as_ref(), false),
4281 (path!("/dir/b.rs").as_ref(), true)
4282 ],
4283 );
4284
4285 drop(definition);
4286 });
4287 cx.update(|cx| {
4288 assert_eq!(
4289 list_worktrees(&project, cx),
4290 [(path!("/dir/b.rs").as_ref(), true)]
4291 );
4292 });
4293
4294 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
4295 project
4296 .read(cx)
4297 .worktrees(cx)
4298 .map(|worktree| {
4299 let worktree = worktree.read(cx);
4300 (
4301 worktree.as_local().unwrap().abs_path().as_ref(),
4302 worktree.is_visible(),
4303 )
4304 })
4305 .collect::<Vec<_>>()
4306 }
4307}
4308
4309#[gpui::test]
4310async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
4311 init_test(cx);
4312
4313 let fs = FakeFs::new(cx.executor());
4314 fs.insert_tree(
4315 path!("/dir"),
4316 json!({
4317 "a.ts": "",
4318 }),
4319 )
4320 .await;
4321
4322 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4323
4324 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4325 language_registry.add(typescript_lang());
4326 let mut fake_language_servers = language_registry.register_fake_lsp(
4327 "TypeScript",
4328 FakeLspAdapter {
4329 capabilities: lsp::ServerCapabilities {
4330 completion_provider: Some(lsp::CompletionOptions {
4331 trigger_characters: Some(vec![".".to_string()]),
4332 ..Default::default()
4333 }),
4334 ..Default::default()
4335 },
4336 ..Default::default()
4337 },
4338 );
4339
4340 let (buffer, _handle) = project
4341 .update(cx, |p, cx| {
4342 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4343 })
4344 .await
4345 .unwrap();
4346
4347 let fake_server = fake_language_servers.next().await.unwrap();
4348 cx.executor().run_until_parked();
4349
4350 // When text_edit exists, it takes precedence over insert_text and label
4351 let text = "let a = obj.fqn";
4352 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4353 let completions = project.update(cx, |project, cx| {
4354 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4355 });
4356
4357 fake_server
4358 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4359 Ok(Some(lsp::CompletionResponse::Array(vec![
4360 lsp::CompletionItem {
4361 label: "labelText".into(),
4362 insert_text: Some("insertText".into()),
4363 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
4364 range: lsp::Range::new(
4365 lsp::Position::new(0, text.len() as u32 - 3),
4366 lsp::Position::new(0, text.len() as u32),
4367 ),
4368 new_text: "textEditText".into(),
4369 })),
4370 ..Default::default()
4371 },
4372 ])))
4373 })
4374 .next()
4375 .await;
4376
4377 let completions = completions
4378 .await
4379 .unwrap()
4380 .into_iter()
4381 .flat_map(|response| response.completions)
4382 .collect::<Vec<_>>();
4383 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4384
4385 assert_eq!(completions.len(), 1);
4386 assert_eq!(completions[0].new_text, "textEditText");
4387 assert_eq!(
4388 completions[0].replace_range.to_offset(&snapshot),
4389 text.len() - 3..text.len()
4390 );
4391}
4392
4393#[gpui::test]
4394async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
4395 init_test(cx);
4396
4397 let fs = FakeFs::new(cx.executor());
4398 fs.insert_tree(
4399 path!("/dir"),
4400 json!({
4401 "a.ts": "",
4402 }),
4403 )
4404 .await;
4405
4406 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4407
4408 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4409 language_registry.add(typescript_lang());
4410 let mut fake_language_servers = language_registry.register_fake_lsp(
4411 "TypeScript",
4412 FakeLspAdapter {
4413 capabilities: lsp::ServerCapabilities {
4414 completion_provider: Some(lsp::CompletionOptions {
4415 trigger_characters: Some(vec![".".to_string()]),
4416 ..Default::default()
4417 }),
4418 ..Default::default()
4419 },
4420 ..Default::default()
4421 },
4422 );
4423
4424 let (buffer, _handle) = project
4425 .update(cx, |p, cx| {
4426 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4427 })
4428 .await
4429 .unwrap();
4430
4431 let fake_server = fake_language_servers.next().await.unwrap();
4432 cx.executor().run_until_parked();
4433 let text = "let a = obj.fqn";
4434
4435 // Test 1: When text_edit is None but text_edit_text exists with default edit_range
4436 {
4437 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4438 let completions = project.update(cx, |project, cx| {
4439 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4440 });
4441
4442 fake_server
4443 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4444 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
4445 is_incomplete: false,
4446 item_defaults: Some(lsp::CompletionListItemDefaults {
4447 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
4448 lsp::Range::new(
4449 lsp::Position::new(0, text.len() as u32 - 3),
4450 lsp::Position::new(0, text.len() as u32),
4451 ),
4452 )),
4453 ..Default::default()
4454 }),
4455 items: vec![lsp::CompletionItem {
4456 label: "labelText".into(),
4457 text_edit_text: Some("textEditText".into()),
4458 text_edit: None,
4459 ..Default::default()
4460 }],
4461 })))
4462 })
4463 .next()
4464 .await;
4465
4466 let completions = completions
4467 .await
4468 .unwrap()
4469 .into_iter()
4470 .flat_map(|response| response.completions)
4471 .collect::<Vec<_>>();
4472 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4473
4474 assert_eq!(completions.len(), 1);
4475 assert_eq!(completions[0].new_text, "textEditText");
4476 assert_eq!(
4477 completions[0].replace_range.to_offset(&snapshot),
4478 text.len() - 3..text.len()
4479 );
4480 }
4481
4482 // Test 2: When both text_edit and text_edit_text are None with default edit_range
4483 {
4484 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4485 let completions = project.update(cx, |project, cx| {
4486 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4487 });
4488
4489 fake_server
4490 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4491 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
4492 is_incomplete: false,
4493 item_defaults: Some(lsp::CompletionListItemDefaults {
4494 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
4495 lsp::Range::new(
4496 lsp::Position::new(0, text.len() as u32 - 3),
4497 lsp::Position::new(0, text.len() as u32),
4498 ),
4499 )),
4500 ..Default::default()
4501 }),
4502 items: vec![lsp::CompletionItem {
4503 label: "labelText".into(),
4504 text_edit_text: None,
4505 insert_text: Some("irrelevant".into()),
4506 text_edit: None,
4507 ..Default::default()
4508 }],
4509 })))
4510 })
4511 .next()
4512 .await;
4513
4514 let completions = completions
4515 .await
4516 .unwrap()
4517 .into_iter()
4518 .flat_map(|response| response.completions)
4519 .collect::<Vec<_>>();
4520 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4521
4522 assert_eq!(completions.len(), 1);
4523 assert_eq!(completions[0].new_text, "labelText");
4524 assert_eq!(
4525 completions[0].replace_range.to_offset(&snapshot),
4526 text.len() - 3..text.len()
4527 );
4528 }
4529}
4530
4531#[gpui::test]
4532async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
4533 init_test(cx);
4534
4535 let fs = FakeFs::new(cx.executor());
4536 fs.insert_tree(
4537 path!("/dir"),
4538 json!({
4539 "a.ts": "",
4540 }),
4541 )
4542 .await;
4543
4544 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4545
4546 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4547 language_registry.add(typescript_lang());
4548 let mut fake_language_servers = language_registry.register_fake_lsp(
4549 "TypeScript",
4550 FakeLspAdapter {
4551 capabilities: lsp::ServerCapabilities {
4552 completion_provider: Some(lsp::CompletionOptions {
4553 trigger_characters: Some(vec![":".to_string()]),
4554 ..Default::default()
4555 }),
4556 ..Default::default()
4557 },
4558 ..Default::default()
4559 },
4560 );
4561
4562 let (buffer, _handle) = project
4563 .update(cx, |p, cx| {
4564 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4565 })
4566 .await
4567 .unwrap();
4568
4569 let fake_server = fake_language_servers.next().await.unwrap();
4570 cx.executor().run_until_parked();
4571
4572 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
4573 let text = "let a = b.fqn";
4574 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4575 let completions = project.update(cx, |project, cx| {
4576 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4577 });
4578
4579 fake_server
4580 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4581 Ok(Some(lsp::CompletionResponse::Array(vec![
4582 lsp::CompletionItem {
4583 label: "fullyQualifiedName?".into(),
4584 insert_text: Some("fullyQualifiedName".into()),
4585 ..Default::default()
4586 },
4587 ])))
4588 })
4589 .next()
4590 .await;
4591 let completions = completions
4592 .await
4593 .unwrap()
4594 .into_iter()
4595 .flat_map(|response| response.completions)
4596 .collect::<Vec<_>>();
4597 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4598 assert_eq!(completions.len(), 1);
4599 assert_eq!(completions[0].new_text, "fullyQualifiedName");
4600 assert_eq!(
4601 completions[0].replace_range.to_offset(&snapshot),
4602 text.len() - 3..text.len()
4603 );
4604
4605 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
4606 let text = "let a = \"atoms/cmp\"";
4607 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4608 let completions = project.update(cx, |project, cx| {
4609 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
4610 });
4611
4612 fake_server
4613 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4614 Ok(Some(lsp::CompletionResponse::Array(vec![
4615 lsp::CompletionItem {
4616 label: "component".into(),
4617 ..Default::default()
4618 },
4619 ])))
4620 })
4621 .next()
4622 .await;
4623 let completions = completions
4624 .await
4625 .unwrap()
4626 .into_iter()
4627 .flat_map(|response| response.completions)
4628 .collect::<Vec<_>>();
4629 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4630 assert_eq!(completions.len(), 1);
4631 assert_eq!(completions[0].new_text, "component");
4632 assert_eq!(
4633 completions[0].replace_range.to_offset(&snapshot),
4634 text.len() - 4..text.len() - 1
4635 );
4636}
4637
4638#[gpui::test]
4639async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
4640 init_test(cx);
4641
4642 let fs = FakeFs::new(cx.executor());
4643 fs.insert_tree(
4644 path!("/dir"),
4645 json!({
4646 "a.ts": "",
4647 }),
4648 )
4649 .await;
4650
4651 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4652
4653 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4654 language_registry.add(typescript_lang());
4655 let mut fake_language_servers = language_registry.register_fake_lsp(
4656 "TypeScript",
4657 FakeLspAdapter {
4658 capabilities: lsp::ServerCapabilities {
4659 completion_provider: Some(lsp::CompletionOptions {
4660 trigger_characters: Some(vec![":".to_string()]),
4661 ..Default::default()
4662 }),
4663 ..Default::default()
4664 },
4665 ..Default::default()
4666 },
4667 );
4668
4669 let (buffer, _handle) = project
4670 .update(cx, |p, cx| {
4671 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4672 })
4673 .await
4674 .unwrap();
4675
4676 let fake_server = fake_language_servers.next().await.unwrap();
4677 cx.executor().run_until_parked();
4678
4679 let text = "let a = b.fqn";
4680 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4681 let completions = project.update(cx, |project, cx| {
4682 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4683 });
4684
4685 fake_server
4686 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4687 Ok(Some(lsp::CompletionResponse::Array(vec![
4688 lsp::CompletionItem {
4689 label: "fullyQualifiedName?".into(),
4690 insert_text: Some("fully\rQualified\r\nName".into()),
4691 ..Default::default()
4692 },
4693 ])))
4694 })
4695 .next()
4696 .await;
4697 let completions = completions
4698 .await
4699 .unwrap()
4700 .into_iter()
4701 .flat_map(|response| response.completions)
4702 .collect::<Vec<_>>();
4703 assert_eq!(completions.len(), 1);
4704 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
4705}
4706
4707#[gpui::test(iterations = 10)]
4708async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
4709 init_test(cx);
4710
4711 let fs = FakeFs::new(cx.executor());
4712 fs.insert_tree(
4713 path!("/dir"),
4714 json!({
4715 "a.ts": "a",
4716 }),
4717 )
4718 .await;
4719
4720 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4721
4722 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4723 language_registry.add(typescript_lang());
4724 let mut fake_language_servers = language_registry.register_fake_lsp(
4725 "TypeScript",
4726 FakeLspAdapter {
4727 capabilities: lsp::ServerCapabilities {
4728 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
4729 lsp::CodeActionOptions {
4730 resolve_provider: Some(true),
4731 ..lsp::CodeActionOptions::default()
4732 },
4733 )),
4734 execute_command_provider: Some(lsp::ExecuteCommandOptions {
4735 commands: vec!["_the/command".to_string()],
4736 ..lsp::ExecuteCommandOptions::default()
4737 }),
4738 ..lsp::ServerCapabilities::default()
4739 },
4740 ..FakeLspAdapter::default()
4741 },
4742 );
4743
4744 let (buffer, _handle) = project
4745 .update(cx, |p, cx| {
4746 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4747 })
4748 .await
4749 .unwrap();
4750
4751 let fake_server = fake_language_servers.next().await.unwrap();
4752 cx.executor().run_until_parked();
4753
4754 // Language server returns code actions that contain commands, and not edits.
4755 let actions = project.update(cx, |project, cx| {
4756 project.code_actions(&buffer, 0..0, None, cx)
4757 });
4758 fake_server
4759 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
4760 Ok(Some(vec![
4761 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
4762 title: "The code action".into(),
4763 data: Some(serde_json::json!({
4764 "command": "_the/command",
4765 })),
4766 ..lsp::CodeAction::default()
4767 }),
4768 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
4769 title: "two".into(),
4770 ..lsp::CodeAction::default()
4771 }),
4772 ]))
4773 })
4774 .next()
4775 .await;
4776
4777 let action = actions.await.unwrap().unwrap()[0].clone();
4778 let apply = project.update(cx, |project, cx| {
4779 project.apply_code_action(buffer.clone(), action, true, cx)
4780 });
4781
4782 // Resolving the code action does not populate its edits. In absence of
4783 // edits, we must execute the given command.
4784 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
4785 |mut action, _| async move {
4786 if action.data.is_some() {
4787 action.command = Some(lsp::Command {
4788 title: "The command".into(),
4789 command: "_the/command".into(),
4790 arguments: Some(vec![json!("the-argument")]),
4791 });
4792 }
4793 Ok(action)
4794 },
4795 );
4796
4797 // While executing the command, the language server sends the editor
4798 // a `workspaceEdit` request.
4799 fake_server
4800 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
4801 let fake = fake_server.clone();
4802 move |params, _| {
4803 assert_eq!(params.command, "_the/command");
4804 let fake = fake.clone();
4805 async move {
4806 fake.server
4807 .request::<lsp::request::ApplyWorkspaceEdit>(
4808 lsp::ApplyWorkspaceEditParams {
4809 label: None,
4810 edit: lsp::WorkspaceEdit {
4811 changes: Some(
4812 [(
4813 lsp::Uri::from_file_path(path!("/dir/a.ts")).unwrap(),
4814 vec![lsp::TextEdit {
4815 range: lsp::Range::new(
4816 lsp::Position::new(0, 0),
4817 lsp::Position::new(0, 0),
4818 ),
4819 new_text: "X".into(),
4820 }],
4821 )]
4822 .into_iter()
4823 .collect(),
4824 ),
4825 ..Default::default()
4826 },
4827 },
4828 DEFAULT_LSP_REQUEST_TIMEOUT,
4829 )
4830 .await
4831 .into_response()
4832 .unwrap();
4833 Ok(Some(json!(null)))
4834 }
4835 }
4836 })
4837 .next()
4838 .await;
4839
4840 // Applying the code action returns a project transaction containing the edits
4841 // sent by the language server in its `workspaceEdit` request.
4842 let transaction = apply.await.unwrap();
4843 assert!(transaction.0.contains_key(&buffer));
4844 buffer.update(cx, |buffer, cx| {
4845 assert_eq!(buffer.text(), "Xa");
4846 buffer.undo(cx);
4847 assert_eq!(buffer.text(), "a");
4848 });
4849}
4850
4851#[gpui::test]
4852async fn test_rename_file_to_new_directory(cx: &mut gpui::TestAppContext) {
4853 init_test(cx);
4854 let fs = FakeFs::new(cx.background_executor.clone());
4855 let expected_contents = "content";
4856 fs.as_fake()
4857 .insert_tree(
4858 "/root",
4859 json!({
4860 "test.txt": expected_contents
4861 }),
4862 )
4863 .await;
4864
4865 let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
4866
4867 let (worktree, entry_id) = project.read_with(cx, |project, cx| {
4868 let worktree = project.worktrees(cx).next().unwrap();
4869 let entry_id = worktree
4870 .read(cx)
4871 .entry_for_path(rel_path("test.txt"))
4872 .unwrap()
4873 .id;
4874 (worktree, entry_id)
4875 });
4876 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
4877 let _result = project
4878 .update(cx, |project, cx| {
4879 project.rename_entry(
4880 entry_id,
4881 (worktree_id, rel_path("dir1/dir2/dir3/test.txt")).into(),
4882 cx,
4883 )
4884 })
4885 .await
4886 .unwrap();
4887 worktree.read_with(cx, |worktree, _| {
4888 assert!(
4889 worktree.entry_for_path(rel_path("test.txt")).is_none(),
4890 "Old file should have been removed"
4891 );
4892 assert!(
4893 worktree
4894 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
4895 .is_some(),
4896 "Whole directory hierarchy and the new file should have been created"
4897 );
4898 });
4899 assert_eq!(
4900 worktree
4901 .update(cx, |worktree, cx| {
4902 worktree.load_file(rel_path("dir1/dir2/dir3/test.txt"), cx)
4903 })
4904 .await
4905 .unwrap()
4906 .text,
4907 expected_contents,
4908 "Moved file's contents should be preserved"
4909 );
4910
4911 let entry_id = worktree.read_with(cx, |worktree, _| {
4912 worktree
4913 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
4914 .unwrap()
4915 .id
4916 });
4917
4918 let _result = project
4919 .update(cx, |project, cx| {
4920 project.rename_entry(
4921 entry_id,
4922 (worktree_id, rel_path("dir1/dir2/test.txt")).into(),
4923 cx,
4924 )
4925 })
4926 .await
4927 .unwrap();
4928 worktree.read_with(cx, |worktree, _| {
4929 assert!(
4930 worktree.entry_for_path(rel_path("test.txt")).is_none(),
4931 "First file should not reappear"
4932 );
4933 assert!(
4934 worktree
4935 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
4936 .is_none(),
4937 "Old file should have been removed"
4938 );
4939 assert!(
4940 worktree
4941 .entry_for_path(rel_path("dir1/dir2/test.txt"))
4942 .is_some(),
4943 "No error should have occurred after moving into existing directory"
4944 );
4945 });
4946 assert_eq!(
4947 worktree
4948 .update(cx, |worktree, cx| {
4949 worktree.load_file(rel_path("dir1/dir2/test.txt"), cx)
4950 })
4951 .await
4952 .unwrap()
4953 .text,
4954 expected_contents,
4955 "Moved file's contents should be preserved"
4956 );
4957}
4958
4959#[gpui::test(iterations = 10)]
4960async fn test_save_file(cx: &mut gpui::TestAppContext) {
4961 init_test(cx);
4962
4963 let fs = FakeFs::new(cx.executor());
4964 fs.insert_tree(
4965 path!("/dir"),
4966 json!({
4967 "file1": "the old contents",
4968 }),
4969 )
4970 .await;
4971
4972 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4973 let buffer = project
4974 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4975 .await
4976 .unwrap();
4977 buffer.update(cx, |buffer, cx| {
4978 assert_eq!(buffer.text(), "the old contents");
4979 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
4980 });
4981
4982 project
4983 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
4984 .await
4985 .unwrap();
4986
4987 let new_text = fs
4988 .load(Path::new(path!("/dir/file1")))
4989 .await
4990 .unwrap()
4991 .replace("\r\n", "\n");
4992 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
4993}
4994
4995#[gpui::test(iterations = 10)]
4996async fn test_save_file_spawns_language_server(cx: &mut gpui::TestAppContext) {
4997 // Issue: #24349
4998 init_test(cx);
4999
5000 let fs = FakeFs::new(cx.executor());
5001 fs.insert_tree(path!("/dir"), json!({})).await;
5002
5003 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5004 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5005
5006 language_registry.add(rust_lang());
5007 let mut fake_rust_servers = language_registry.register_fake_lsp(
5008 "Rust",
5009 FakeLspAdapter {
5010 name: "the-rust-language-server",
5011 capabilities: lsp::ServerCapabilities {
5012 completion_provider: Some(lsp::CompletionOptions {
5013 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
5014 ..Default::default()
5015 }),
5016 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
5017 lsp::TextDocumentSyncOptions {
5018 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
5019 ..Default::default()
5020 },
5021 )),
5022 ..Default::default()
5023 },
5024 ..Default::default()
5025 },
5026 );
5027
5028 let buffer = project
5029 .update(cx, |this, cx| this.create_buffer(None, false, cx))
5030 .unwrap()
5031 .await;
5032 project.update(cx, |this, cx| {
5033 this.register_buffer_with_language_servers(&buffer, cx);
5034 buffer.update(cx, |buffer, cx| {
5035 assert!(!this.has_language_servers_for(buffer, cx));
5036 })
5037 });
5038
5039 project
5040 .update(cx, |this, cx| {
5041 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
5042 this.save_buffer_as(
5043 buffer.clone(),
5044 ProjectPath {
5045 worktree_id,
5046 path: rel_path("file.rs").into(),
5047 },
5048 cx,
5049 )
5050 })
5051 .await
5052 .unwrap();
5053 // A server is started up, and it is notified about Rust files.
5054 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5055 assert_eq!(
5056 fake_rust_server
5057 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5058 .await
5059 .text_document,
5060 lsp::TextDocumentItem {
5061 uri: lsp::Uri::from_file_path(path!("/dir/file.rs")).unwrap(),
5062 version: 0,
5063 text: "".to_string(),
5064 language_id: "rust".to_string(),
5065 }
5066 );
5067
5068 project.update(cx, |this, cx| {
5069 buffer.update(cx, |buffer, cx| {
5070 assert!(this.has_language_servers_for(buffer, cx));
5071 })
5072 });
5073}
5074
5075#[gpui::test(iterations = 30)]
5076async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
5077 init_test(cx);
5078
5079 let fs = FakeFs::new(cx.executor());
5080 fs.insert_tree(
5081 path!("/dir"),
5082 json!({
5083 "file1": "the original contents",
5084 }),
5085 )
5086 .await;
5087
5088 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5089 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
5090 let buffer = project
5091 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5092 .await
5093 .unwrap();
5094
5095 // Change the buffer's file on disk, and then wait for the file change
5096 // to be detected by the worktree, so that the buffer starts reloading.
5097 fs.save(
5098 path!("/dir/file1").as_ref(),
5099 &"the first contents".into(),
5100 Default::default(),
5101 )
5102 .await
5103 .unwrap();
5104 worktree.next_event(cx).await;
5105
5106 // Change the buffer's file again. Depending on the random seed, the
5107 // previous file change may still be in progress.
5108 fs.save(
5109 path!("/dir/file1").as_ref(),
5110 &"the second contents".into(),
5111 Default::default(),
5112 )
5113 .await
5114 .unwrap();
5115 worktree.next_event(cx).await;
5116
5117 cx.executor().run_until_parked();
5118 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
5119 buffer.read_with(cx, |buffer, _| {
5120 assert_eq!(buffer.text(), on_disk_text);
5121 assert!(!buffer.is_dirty(), "buffer should not be dirty");
5122 assert!(!buffer.has_conflict(), "buffer should not be dirty");
5123 });
5124}
5125
5126#[gpui::test(iterations = 30)]
5127async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
5128 init_test(cx);
5129
5130 let fs = FakeFs::new(cx.executor());
5131 fs.insert_tree(
5132 path!("/dir"),
5133 json!({
5134 "file1": "the original contents",
5135 }),
5136 )
5137 .await;
5138
5139 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5140 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
5141 let buffer = project
5142 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5143 .await
5144 .unwrap();
5145
5146 // Change the buffer's file on disk, and then wait for the file change
5147 // to be detected by the worktree, so that the buffer starts reloading.
5148 fs.save(
5149 path!("/dir/file1").as_ref(),
5150 &"the first contents".into(),
5151 Default::default(),
5152 )
5153 .await
5154 .unwrap();
5155 worktree.next_event(cx).await;
5156
5157 cx.executor()
5158 .spawn(cx.executor().simulate_random_delay())
5159 .await;
5160
5161 // Perform a noop edit, causing the buffer's version to increase.
5162 buffer.update(cx, |buffer, cx| {
5163 buffer.edit([(0..0, " ")], None, cx);
5164 buffer.undo(cx);
5165 });
5166
5167 cx.executor().run_until_parked();
5168 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
5169 buffer.read_with(cx, |buffer, _| {
5170 let buffer_text = buffer.text();
5171 if buffer_text == on_disk_text {
5172 assert!(
5173 !buffer.is_dirty() && !buffer.has_conflict(),
5174 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
5175 );
5176 }
5177 // If the file change occurred while the buffer was processing the first
5178 // change, the buffer will be in a conflicting state.
5179 else {
5180 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
5181 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
5182 }
5183 });
5184}
5185
5186#[gpui::test]
5187async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
5188 init_test(cx);
5189
5190 let fs = FakeFs::new(cx.executor());
5191 fs.insert_tree(
5192 path!("/dir"),
5193 json!({
5194 "file1": "the old contents",
5195 }),
5196 )
5197 .await;
5198
5199 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
5200 let buffer = project
5201 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5202 .await
5203 .unwrap();
5204 buffer.update(cx, |buffer, cx| {
5205 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
5206 });
5207
5208 project
5209 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
5210 .await
5211 .unwrap();
5212
5213 let new_text = fs
5214 .load(Path::new(path!("/dir/file1")))
5215 .await
5216 .unwrap()
5217 .replace("\r\n", "\n");
5218 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
5219}
5220
5221#[gpui::test]
5222async fn test_save_as(cx: &mut gpui::TestAppContext) {
5223 init_test(cx);
5224
5225 let fs = FakeFs::new(cx.executor());
5226 fs.insert_tree("/dir", json!({})).await;
5227
5228 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5229
5230 let languages = project.update(cx, |project, _| project.languages().clone());
5231 languages.add(rust_lang());
5232
5233 let buffer = project.update(cx, |project, cx| {
5234 project.create_local_buffer("", None, false, cx)
5235 });
5236 buffer.update(cx, |buffer, cx| {
5237 buffer.edit([(0..0, "abc")], None, cx);
5238 assert!(buffer.is_dirty());
5239 assert!(!buffer.has_conflict());
5240 assert_eq!(buffer.language().unwrap().name(), "Plain Text");
5241 });
5242 project
5243 .update(cx, |project, cx| {
5244 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
5245 let path = ProjectPath {
5246 worktree_id,
5247 path: rel_path("file1.rs").into(),
5248 };
5249 project.save_buffer_as(buffer.clone(), path, cx)
5250 })
5251 .await
5252 .unwrap();
5253 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
5254
5255 cx.executor().run_until_parked();
5256 buffer.update(cx, |buffer, cx| {
5257 assert_eq!(
5258 buffer.file().unwrap().full_path(cx),
5259 Path::new("dir/file1.rs")
5260 );
5261 assert!(!buffer.is_dirty());
5262 assert!(!buffer.has_conflict());
5263 assert_eq!(buffer.language().unwrap().name(), "Rust");
5264 });
5265
5266 let opened_buffer = project
5267 .update(cx, |project, cx| {
5268 project.open_local_buffer("/dir/file1.rs", cx)
5269 })
5270 .await
5271 .unwrap();
5272 assert_eq!(opened_buffer, buffer);
5273}
5274
5275#[gpui::test]
5276async fn test_save_as_existing_file(cx: &mut gpui::TestAppContext) {
5277 init_test(cx);
5278
5279 let fs = FakeFs::new(cx.executor());
5280 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5281
5282 fs.insert_tree(
5283 path!("/dir"),
5284 json!({
5285 "data_a.txt": "data about a"
5286 }),
5287 )
5288 .await;
5289
5290 let buffer = project
5291 .update(cx, |project, cx| {
5292 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
5293 })
5294 .await
5295 .unwrap();
5296
5297 buffer.update(cx, |buffer, cx| {
5298 buffer.edit([(11..12, "b")], None, cx);
5299 });
5300
5301 // Save buffer's contents as a new file and confirm that the buffer's now
5302 // associated with `data_b.txt` instead of `data_a.txt`, confirming that the
5303 // file associated with the buffer has now been updated to `data_b.txt`
5304 project
5305 .update(cx, |project, cx| {
5306 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
5307 let new_path = ProjectPath {
5308 worktree_id,
5309 path: rel_path("data_b.txt").into(),
5310 };
5311
5312 project.save_buffer_as(buffer.clone(), new_path, cx)
5313 })
5314 .await
5315 .unwrap();
5316
5317 buffer.update(cx, |buffer, cx| {
5318 assert_eq!(
5319 buffer.file().unwrap().full_path(cx),
5320 Path::new("dir/data_b.txt")
5321 )
5322 });
5323
5324 // Open the original `data_a.txt` file, confirming that its contents are
5325 // unchanged and the resulting buffer's associated file is `data_a.txt`.
5326 let original_buffer = project
5327 .update(cx, |project, cx| {
5328 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
5329 })
5330 .await
5331 .unwrap();
5332
5333 original_buffer.update(cx, |buffer, cx| {
5334 assert_eq!(buffer.text(), "data about a");
5335 assert_eq!(
5336 buffer.file().unwrap().full_path(cx),
5337 Path::new("dir/data_a.txt")
5338 )
5339 });
5340}
5341
5342#[gpui::test(retries = 5)]
5343async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
5344 use worktree::WorktreeModelHandle as _;
5345
5346 init_test(cx);
5347 cx.executor().allow_parking();
5348
5349 let dir = TempTree::new(json!({
5350 "a": {
5351 "file1": "",
5352 "file2": "",
5353 "file3": "",
5354 },
5355 "b": {
5356 "c": {
5357 "file4": "",
5358 "file5": "",
5359 }
5360 }
5361 }));
5362
5363 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
5364
5365 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5366 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
5367 async move { buffer.await.unwrap() }
5368 };
5369 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5370 project.update(cx, |project, cx| {
5371 let tree = project.worktrees(cx).next().unwrap();
5372 tree.read(cx)
5373 .entry_for_path(rel_path(path))
5374 .unwrap_or_else(|| panic!("no entry for path {}", path))
5375 .id
5376 })
5377 };
5378
5379 let buffer2 = buffer_for_path("a/file2", cx).await;
5380 let buffer3 = buffer_for_path("a/file3", cx).await;
5381 let buffer4 = buffer_for_path("b/c/file4", cx).await;
5382 let buffer5 = buffer_for_path("b/c/file5", cx).await;
5383
5384 let file2_id = id_for_path("a/file2", cx);
5385 let file3_id = id_for_path("a/file3", cx);
5386 let file4_id = id_for_path("b/c/file4", cx);
5387
5388 // Create a remote copy of this worktree.
5389 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
5390 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
5391
5392 let updates = Arc::new(Mutex::new(Vec::new()));
5393 tree.update(cx, |tree, cx| {
5394 let updates = updates.clone();
5395 tree.observe_updates(0, cx, move |update| {
5396 updates.lock().push(update);
5397 async { true }
5398 });
5399 });
5400
5401 let remote = cx.update(|cx| {
5402 Worktree::remote(
5403 0,
5404 ReplicaId::REMOTE_SERVER,
5405 metadata,
5406 project.read(cx).client().into(),
5407 project.read(cx).path_style(cx),
5408 cx,
5409 )
5410 });
5411
5412 cx.executor().run_until_parked();
5413
5414 cx.update(|cx| {
5415 assert!(!buffer2.read(cx).is_dirty());
5416 assert!(!buffer3.read(cx).is_dirty());
5417 assert!(!buffer4.read(cx).is_dirty());
5418 assert!(!buffer5.read(cx).is_dirty());
5419 });
5420
5421 // Rename and delete files and directories.
5422 tree.flush_fs_events(cx).await;
5423 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
5424 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
5425 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
5426 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
5427 tree.flush_fs_events(cx).await;
5428
5429 cx.update(|app| {
5430 assert_eq!(
5431 tree.read(app).paths().collect::<Vec<_>>(),
5432 vec![
5433 rel_path("a"),
5434 rel_path("a/file1"),
5435 rel_path("a/file2.new"),
5436 rel_path("b"),
5437 rel_path("d"),
5438 rel_path("d/file3"),
5439 rel_path("d/file4"),
5440 ]
5441 );
5442 });
5443
5444 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
5445 assert_eq!(id_for_path("d/file3", cx), file3_id);
5446 assert_eq!(id_for_path("d/file4", cx), file4_id);
5447
5448 cx.update(|cx| {
5449 assert_eq!(
5450 buffer2.read(cx).file().unwrap().path().as_ref(),
5451 rel_path("a/file2.new")
5452 );
5453 assert_eq!(
5454 buffer3.read(cx).file().unwrap().path().as_ref(),
5455 rel_path("d/file3")
5456 );
5457 assert_eq!(
5458 buffer4.read(cx).file().unwrap().path().as_ref(),
5459 rel_path("d/file4")
5460 );
5461 assert_eq!(
5462 buffer5.read(cx).file().unwrap().path().as_ref(),
5463 rel_path("b/c/file5")
5464 );
5465
5466 assert_matches!(
5467 buffer2.read(cx).file().unwrap().disk_state(),
5468 DiskState::Present { .. }
5469 );
5470 assert_matches!(
5471 buffer3.read(cx).file().unwrap().disk_state(),
5472 DiskState::Present { .. }
5473 );
5474 assert_matches!(
5475 buffer4.read(cx).file().unwrap().disk_state(),
5476 DiskState::Present { .. }
5477 );
5478 assert_eq!(
5479 buffer5.read(cx).file().unwrap().disk_state(),
5480 DiskState::Deleted
5481 );
5482 });
5483
5484 // Update the remote worktree. Check that it becomes consistent with the
5485 // local worktree.
5486 cx.executor().run_until_parked();
5487
5488 remote.update(cx, |remote, _| {
5489 for update in updates.lock().drain(..) {
5490 remote.as_remote_mut().unwrap().update_from_remote(update);
5491 }
5492 });
5493 cx.executor().run_until_parked();
5494 remote.update(cx, |remote, _| {
5495 assert_eq!(
5496 remote.paths().collect::<Vec<_>>(),
5497 vec![
5498 rel_path("a"),
5499 rel_path("a/file1"),
5500 rel_path("a/file2.new"),
5501 rel_path("b"),
5502 rel_path("d"),
5503 rel_path("d/file3"),
5504 rel_path("d/file4"),
5505 ]
5506 );
5507 });
5508}
5509
5510#[cfg(target_os = "linux")]
5511#[gpui::test(retries = 5)]
5512async fn test_recreated_directory_receives_child_events(cx: &mut gpui::TestAppContext) {
5513 init_test(cx);
5514 cx.executor().allow_parking();
5515
5516 let dir = TempTree::new(json!({}));
5517 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
5518 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
5519
5520 tree.flush_fs_events(cx).await;
5521
5522 let repro_dir = dir.path().join("repro");
5523 std::fs::create_dir(&repro_dir).unwrap();
5524 tree.flush_fs_events(cx).await;
5525
5526 cx.update(|cx| {
5527 assert!(tree.read(cx).entry_for_path(rel_path("repro")).is_some());
5528 });
5529
5530 std::fs::remove_dir_all(&repro_dir).unwrap();
5531 tree.flush_fs_events(cx).await;
5532
5533 cx.update(|cx| {
5534 assert!(tree.read(cx).entry_for_path(rel_path("repro")).is_none());
5535 });
5536
5537 std::fs::create_dir(&repro_dir).unwrap();
5538 tree.flush_fs_events(cx).await;
5539
5540 cx.update(|cx| {
5541 assert!(tree.read(cx).entry_for_path(rel_path("repro")).is_some());
5542 });
5543
5544 std::fs::write(repro_dir.join("repro-marker"), "").unwrap();
5545 tree.flush_fs_events(cx).await;
5546
5547 cx.update(|cx| {
5548 assert!(
5549 tree.read(cx)
5550 .entry_for_path(rel_path("repro/repro-marker"))
5551 .is_some()
5552 );
5553 });
5554}
5555
5556#[gpui::test(iterations = 10)]
5557async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
5558 init_test(cx);
5559
5560 let fs = FakeFs::new(cx.executor());
5561 fs.insert_tree(
5562 path!("/dir"),
5563 json!({
5564 "a": {
5565 "file1": "",
5566 }
5567 }),
5568 )
5569 .await;
5570
5571 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
5572 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
5573 let tree_id = tree.update(cx, |tree, _| tree.id());
5574
5575 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5576 project.update(cx, |project, cx| {
5577 let tree = project.worktrees(cx).next().unwrap();
5578 tree.read(cx)
5579 .entry_for_path(rel_path(path))
5580 .unwrap_or_else(|| panic!("no entry for path {}", path))
5581 .id
5582 })
5583 };
5584
5585 let dir_id = id_for_path("a", cx);
5586 let file_id = id_for_path("a/file1", cx);
5587 let buffer = project
5588 .update(cx, |p, cx| {
5589 p.open_buffer((tree_id, rel_path("a/file1")), cx)
5590 })
5591 .await
5592 .unwrap();
5593 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5594
5595 project
5596 .update(cx, |project, cx| {
5597 project.rename_entry(dir_id, (tree_id, rel_path("b")).into(), cx)
5598 })
5599 .unwrap()
5600 .await
5601 .into_included()
5602 .unwrap();
5603 cx.executor().run_until_parked();
5604
5605 assert_eq!(id_for_path("b", cx), dir_id);
5606 assert_eq!(id_for_path("b/file1", cx), file_id);
5607 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5608}
5609
5610#[gpui::test]
5611async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
5612 init_test(cx);
5613
5614 let fs = FakeFs::new(cx.executor());
5615 fs.insert_tree(
5616 "/dir",
5617 json!({
5618 "a.txt": "a-contents",
5619 "b.txt": "b-contents",
5620 }),
5621 )
5622 .await;
5623
5624 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5625
5626 // Spawn multiple tasks to open paths, repeating some paths.
5627 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
5628 (
5629 p.open_local_buffer("/dir/a.txt", cx),
5630 p.open_local_buffer("/dir/b.txt", cx),
5631 p.open_local_buffer("/dir/a.txt", cx),
5632 )
5633 });
5634
5635 let buffer_a_1 = buffer_a_1.await.unwrap();
5636 let buffer_a_2 = buffer_a_2.await.unwrap();
5637 let buffer_b = buffer_b.await.unwrap();
5638 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
5639 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
5640
5641 // There is only one buffer per path.
5642 let buffer_a_id = buffer_a_1.entity_id();
5643 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
5644
5645 // Open the same path again while it is still open.
5646 drop(buffer_a_1);
5647 let buffer_a_3 = project
5648 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
5649 .await
5650 .unwrap();
5651
5652 // There's still only one buffer per path.
5653 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
5654}
5655
5656#[gpui::test]
5657async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
5658 init_test(cx);
5659
5660 let fs = FakeFs::new(cx.executor());
5661 fs.insert_tree(
5662 path!("/dir"),
5663 json!({
5664 "file1": "abc",
5665 "file2": "def",
5666 "file3": "ghi",
5667 }),
5668 )
5669 .await;
5670
5671 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5672
5673 let buffer1 = project
5674 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5675 .await
5676 .unwrap();
5677 let events = Arc::new(Mutex::new(Vec::new()));
5678
5679 // initially, the buffer isn't dirty.
5680 buffer1.update(cx, |buffer, cx| {
5681 cx.subscribe(&buffer1, {
5682 let events = events.clone();
5683 move |_, _, event, _| match event {
5684 BufferEvent::Operation { .. } => {}
5685 _ => events.lock().push(event.clone()),
5686 }
5687 })
5688 .detach();
5689
5690 assert!(!buffer.is_dirty());
5691 assert!(events.lock().is_empty());
5692
5693 buffer.edit([(1..2, "")], None, cx);
5694 });
5695
5696 // after the first edit, the buffer is dirty, and emits a dirtied event.
5697 buffer1.update(cx, |buffer, cx| {
5698 assert!(buffer.text() == "ac");
5699 assert!(buffer.is_dirty());
5700 assert_eq!(
5701 *events.lock(),
5702 &[
5703 language::BufferEvent::Edited { is_local: true },
5704 language::BufferEvent::DirtyChanged
5705 ]
5706 );
5707 events.lock().clear();
5708 buffer.did_save(
5709 buffer.version(),
5710 buffer.file().unwrap().disk_state().mtime(),
5711 cx,
5712 );
5713 });
5714
5715 // after saving, the buffer is not dirty, and emits a saved event.
5716 buffer1.update(cx, |buffer, cx| {
5717 assert!(!buffer.is_dirty());
5718 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
5719 events.lock().clear();
5720
5721 buffer.edit([(1..1, "B")], None, cx);
5722 buffer.edit([(2..2, "D")], None, cx);
5723 });
5724
5725 // after editing again, the buffer is dirty, and emits another dirty event.
5726 buffer1.update(cx, |buffer, cx| {
5727 assert!(buffer.text() == "aBDc");
5728 assert!(buffer.is_dirty());
5729 assert_eq!(
5730 *events.lock(),
5731 &[
5732 language::BufferEvent::Edited { is_local: true },
5733 language::BufferEvent::DirtyChanged,
5734 language::BufferEvent::Edited { is_local: true },
5735 ],
5736 );
5737 events.lock().clear();
5738
5739 // After restoring the buffer to its previously-saved state,
5740 // the buffer is not considered dirty anymore.
5741 buffer.edit([(1..3, "")], None, cx);
5742 assert!(buffer.text() == "ac");
5743 assert!(!buffer.is_dirty());
5744 });
5745
5746 assert_eq!(
5747 *events.lock(),
5748 &[
5749 language::BufferEvent::Edited { is_local: true },
5750 language::BufferEvent::DirtyChanged
5751 ]
5752 );
5753
5754 // When a file is deleted, it is not considered dirty.
5755 let events = Arc::new(Mutex::new(Vec::new()));
5756 let buffer2 = project
5757 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
5758 .await
5759 .unwrap();
5760 buffer2.update(cx, |_, cx| {
5761 cx.subscribe(&buffer2, {
5762 let events = events.clone();
5763 move |_, _, event, _| match event {
5764 BufferEvent::Operation { .. } => {}
5765 _ => events.lock().push(event.clone()),
5766 }
5767 })
5768 .detach();
5769 });
5770
5771 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
5772 .await
5773 .unwrap();
5774 cx.executor().run_until_parked();
5775 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5776 assert_eq!(
5777 mem::take(&mut *events.lock()),
5778 &[language::BufferEvent::FileHandleChanged]
5779 );
5780
5781 // Buffer becomes dirty when edited.
5782 buffer2.update(cx, |buffer, cx| {
5783 buffer.edit([(2..3, "")], None, cx);
5784 assert_eq!(buffer.is_dirty(), true);
5785 });
5786 assert_eq!(
5787 mem::take(&mut *events.lock()),
5788 &[
5789 language::BufferEvent::Edited { is_local: true },
5790 language::BufferEvent::DirtyChanged
5791 ]
5792 );
5793
5794 // Buffer becomes clean again when all of its content is removed, because
5795 // the file was deleted.
5796 buffer2.update(cx, |buffer, cx| {
5797 buffer.edit([(0..2, "")], None, cx);
5798 assert_eq!(buffer.is_empty(), true);
5799 assert_eq!(buffer.is_dirty(), false);
5800 });
5801 assert_eq!(
5802 *events.lock(),
5803 &[
5804 language::BufferEvent::Edited { is_local: true },
5805 language::BufferEvent::DirtyChanged
5806 ]
5807 );
5808
5809 // When a file is already dirty when deleted, we don't emit a Dirtied event.
5810 let events = Arc::new(Mutex::new(Vec::new()));
5811 let buffer3 = project
5812 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
5813 .await
5814 .unwrap();
5815 buffer3.update(cx, |_, cx| {
5816 cx.subscribe(&buffer3, {
5817 let events = events.clone();
5818 move |_, _, event, _| match event {
5819 BufferEvent::Operation { .. } => {}
5820 _ => events.lock().push(event.clone()),
5821 }
5822 })
5823 .detach();
5824 });
5825
5826 buffer3.update(cx, |buffer, cx| {
5827 buffer.edit([(0..0, "x")], None, cx);
5828 });
5829 events.lock().clear();
5830 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
5831 .await
5832 .unwrap();
5833 cx.executor().run_until_parked();
5834 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
5835 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
5836}
5837
5838#[gpui::test]
5839async fn test_dirty_buffer_reloads_after_undo(cx: &mut gpui::TestAppContext) {
5840 init_test(cx);
5841
5842 let fs = FakeFs::new(cx.executor());
5843 fs.insert_tree(
5844 path!("/dir"),
5845 json!({
5846 "file.txt": "version 1",
5847 }),
5848 )
5849 .await;
5850
5851 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5852 let buffer = project
5853 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file.txt"), cx))
5854 .await
5855 .unwrap();
5856
5857 buffer.read_with(cx, |buffer, _| {
5858 assert_eq!(buffer.text(), "version 1");
5859 assert!(!buffer.is_dirty());
5860 });
5861
5862 // User makes an edit, making the buffer dirty.
5863 buffer.update(cx, |buffer, cx| {
5864 buffer.edit([(0..0, "user edit: ")], None, cx);
5865 });
5866
5867 buffer.read_with(cx, |buffer, _| {
5868 assert!(buffer.is_dirty());
5869 assert_eq!(buffer.text(), "user edit: version 1");
5870 });
5871
5872 // External tool writes new content while buffer is dirty.
5873 // file_updated() updates the File but suppresses ReloadNeeded.
5874 fs.save(
5875 path!("/dir/file.txt").as_ref(),
5876 &"version 2 from external tool".into(),
5877 Default::default(),
5878 )
5879 .await
5880 .unwrap();
5881 cx.executor().run_until_parked();
5882
5883 buffer.read_with(cx, |buffer, _| {
5884 assert!(buffer.has_conflict());
5885 assert_eq!(buffer.text(), "user edit: version 1");
5886 });
5887
5888 // User undoes their edit. Buffer becomes clean, but disk has different
5889 // content. did_edit() detects the dirty->clean transition and checks if
5890 // disk changed while dirty. Since mtime differs from saved_mtime, it
5891 // emits ReloadNeeded.
5892 buffer.update(cx, |buffer, cx| {
5893 buffer.undo(cx);
5894 });
5895 cx.executor().run_until_parked();
5896
5897 buffer.read_with(cx, |buffer, _| {
5898 assert_eq!(
5899 buffer.text(),
5900 "version 2 from external tool",
5901 "buffer should reload from disk after undo makes it clean"
5902 );
5903 assert!(!buffer.is_dirty());
5904 });
5905}
5906
5907#[gpui::test]
5908async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
5909 init_test(cx);
5910
5911 let (initial_contents, initial_offsets) =
5912 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
5913 let fs = FakeFs::new(cx.executor());
5914 fs.insert_tree(
5915 path!("/dir"),
5916 json!({
5917 "the-file": initial_contents,
5918 }),
5919 )
5920 .await;
5921 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5922 let buffer = project
5923 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
5924 .await
5925 .unwrap();
5926
5927 let anchors = initial_offsets
5928 .iter()
5929 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
5930 .collect::<Vec<_>>();
5931
5932 // Change the file on disk, adding two new lines of text, and removing
5933 // one line.
5934 buffer.update(cx, |buffer, _| {
5935 assert!(!buffer.is_dirty());
5936 assert!(!buffer.has_conflict());
5937 });
5938
5939 let (new_contents, new_offsets) =
5940 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
5941 fs.save(
5942 path!("/dir/the-file").as_ref(),
5943 &new_contents.as_str().into(),
5944 LineEnding::Unix,
5945 )
5946 .await
5947 .unwrap();
5948
5949 // Because the buffer was not modified, it is reloaded from disk. Its
5950 // contents are edited according to the diff between the old and new
5951 // file contents.
5952 cx.executor().run_until_parked();
5953 buffer.update(cx, |buffer, _| {
5954 assert_eq!(buffer.text(), new_contents);
5955 assert!(!buffer.is_dirty());
5956 assert!(!buffer.has_conflict());
5957
5958 let anchor_offsets = anchors
5959 .iter()
5960 .map(|anchor| anchor.to_offset(&*buffer))
5961 .collect::<Vec<_>>();
5962 assert_eq!(anchor_offsets, new_offsets);
5963 });
5964
5965 // Modify the buffer
5966 buffer.update(cx, |buffer, cx| {
5967 buffer.edit([(0..0, " ")], None, cx);
5968 assert!(buffer.is_dirty());
5969 assert!(!buffer.has_conflict());
5970 });
5971
5972 // Change the file on disk again, adding blank lines to the beginning.
5973 fs.save(
5974 path!("/dir/the-file").as_ref(),
5975 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
5976 LineEnding::Unix,
5977 )
5978 .await
5979 .unwrap();
5980
5981 // Because the buffer is modified, it doesn't reload from disk, but is
5982 // marked as having a conflict.
5983 cx.executor().run_until_parked();
5984 buffer.update(cx, |buffer, _| {
5985 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
5986 assert!(buffer.has_conflict());
5987 });
5988}
5989
5990#[gpui::test]
5991async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
5992 init_test(cx);
5993
5994 let fs = FakeFs::new(cx.executor());
5995 fs.insert_tree(
5996 path!("/dir"),
5997 json!({
5998 "file1": "a\nb\nc\n",
5999 "file2": "one\r\ntwo\r\nthree\r\n",
6000 }),
6001 )
6002 .await;
6003
6004 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6005 let buffer1 = project
6006 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
6007 .await
6008 .unwrap();
6009 let buffer2 = project
6010 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
6011 .await
6012 .unwrap();
6013
6014 buffer1.update(cx, |buffer, _| {
6015 assert_eq!(buffer.text(), "a\nb\nc\n");
6016 assert_eq!(buffer.line_ending(), LineEnding::Unix);
6017 });
6018 buffer2.update(cx, |buffer, _| {
6019 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
6020 assert_eq!(buffer.line_ending(), LineEnding::Windows);
6021 });
6022
6023 // Change a file's line endings on disk from unix to windows. The buffer's
6024 // state updates correctly.
6025 fs.save(
6026 path!("/dir/file1").as_ref(),
6027 &"aaa\nb\nc\n".into(),
6028 LineEnding::Windows,
6029 )
6030 .await
6031 .unwrap();
6032 cx.executor().run_until_parked();
6033 buffer1.update(cx, |buffer, _| {
6034 assert_eq!(buffer.text(), "aaa\nb\nc\n");
6035 assert_eq!(buffer.line_ending(), LineEnding::Windows);
6036 });
6037
6038 // Save a file with windows line endings. The file is written correctly.
6039 buffer2.update(cx, |buffer, cx| {
6040 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
6041 });
6042 project
6043 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
6044 .await
6045 .unwrap();
6046 assert_eq!(
6047 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
6048 "one\r\ntwo\r\nthree\r\nfour\r\n",
6049 );
6050}
6051
6052#[gpui::test]
6053async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
6054 init_test(cx);
6055
6056 let fs = FakeFs::new(cx.executor());
6057 fs.insert_tree(
6058 path!("/dir"),
6059 json!({
6060 "a.rs": "
6061 fn foo(mut v: Vec<usize>) {
6062 for x in &v {
6063 v.push(1);
6064 }
6065 }
6066 "
6067 .unindent(),
6068 }),
6069 )
6070 .await;
6071
6072 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6073 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
6074 let buffer = project
6075 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
6076 .await
6077 .unwrap();
6078
6079 let buffer_uri = Uri::from_file_path(path!("/dir/a.rs")).unwrap();
6080 let message = lsp::PublishDiagnosticsParams {
6081 uri: buffer_uri.clone(),
6082 diagnostics: vec![
6083 lsp::Diagnostic {
6084 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6085 severity: Some(DiagnosticSeverity::WARNING),
6086 message: "error 1".to_string(),
6087 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6088 location: lsp::Location {
6089 uri: buffer_uri.clone(),
6090 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6091 },
6092 message: "error 1 hint 1".to_string(),
6093 }]),
6094 ..Default::default()
6095 },
6096 lsp::Diagnostic {
6097 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6098 severity: Some(DiagnosticSeverity::HINT),
6099 message: "error 1 hint 1".to_string(),
6100 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6101 location: lsp::Location {
6102 uri: buffer_uri.clone(),
6103 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6104 },
6105 message: "original diagnostic".to_string(),
6106 }]),
6107 ..Default::default()
6108 },
6109 lsp::Diagnostic {
6110 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
6111 severity: Some(DiagnosticSeverity::ERROR),
6112 message: "error 2".to_string(),
6113 related_information: Some(vec![
6114 lsp::DiagnosticRelatedInformation {
6115 location: lsp::Location {
6116 uri: buffer_uri.clone(),
6117 range: lsp::Range::new(
6118 lsp::Position::new(1, 13),
6119 lsp::Position::new(1, 15),
6120 ),
6121 },
6122 message: "error 2 hint 1".to_string(),
6123 },
6124 lsp::DiagnosticRelatedInformation {
6125 location: lsp::Location {
6126 uri: buffer_uri.clone(),
6127 range: lsp::Range::new(
6128 lsp::Position::new(1, 13),
6129 lsp::Position::new(1, 15),
6130 ),
6131 },
6132 message: "error 2 hint 2".to_string(),
6133 },
6134 ]),
6135 ..Default::default()
6136 },
6137 lsp::Diagnostic {
6138 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6139 severity: Some(DiagnosticSeverity::HINT),
6140 message: "error 2 hint 1".to_string(),
6141 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6142 location: lsp::Location {
6143 uri: buffer_uri.clone(),
6144 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
6145 },
6146 message: "original diagnostic".to_string(),
6147 }]),
6148 ..Default::default()
6149 },
6150 lsp::Diagnostic {
6151 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6152 severity: Some(DiagnosticSeverity::HINT),
6153 message: "error 2 hint 2".to_string(),
6154 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6155 location: lsp::Location {
6156 uri: buffer_uri,
6157 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
6158 },
6159 message: "original diagnostic".to_string(),
6160 }]),
6161 ..Default::default()
6162 },
6163 ],
6164 version: None,
6165 };
6166
6167 lsp_store
6168 .update(cx, |lsp_store, cx| {
6169 lsp_store.update_diagnostics(
6170 LanguageServerId(0),
6171 message,
6172 None,
6173 DiagnosticSourceKind::Pushed,
6174 &[],
6175 cx,
6176 )
6177 })
6178 .unwrap();
6179 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
6180
6181 assert_eq!(
6182 buffer
6183 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6184 .collect::<Vec<_>>(),
6185 &[
6186 DiagnosticEntry {
6187 range: Point::new(1, 8)..Point::new(1, 9),
6188 diagnostic: Diagnostic {
6189 severity: DiagnosticSeverity::WARNING,
6190 message: "error 1".to_string(),
6191 group_id: 1,
6192 is_primary: true,
6193 source_kind: DiagnosticSourceKind::Pushed,
6194 ..Diagnostic::default()
6195 }
6196 },
6197 DiagnosticEntry {
6198 range: Point::new(1, 8)..Point::new(1, 9),
6199 diagnostic: Diagnostic {
6200 severity: DiagnosticSeverity::HINT,
6201 message: "error 1 hint 1".to_string(),
6202 group_id: 1,
6203 is_primary: false,
6204 source_kind: DiagnosticSourceKind::Pushed,
6205 ..Diagnostic::default()
6206 }
6207 },
6208 DiagnosticEntry {
6209 range: Point::new(1, 13)..Point::new(1, 15),
6210 diagnostic: Diagnostic {
6211 severity: DiagnosticSeverity::HINT,
6212 message: "error 2 hint 1".to_string(),
6213 group_id: 0,
6214 is_primary: false,
6215 source_kind: DiagnosticSourceKind::Pushed,
6216 ..Diagnostic::default()
6217 }
6218 },
6219 DiagnosticEntry {
6220 range: Point::new(1, 13)..Point::new(1, 15),
6221 diagnostic: Diagnostic {
6222 severity: DiagnosticSeverity::HINT,
6223 message: "error 2 hint 2".to_string(),
6224 group_id: 0,
6225 is_primary: false,
6226 source_kind: DiagnosticSourceKind::Pushed,
6227 ..Diagnostic::default()
6228 }
6229 },
6230 DiagnosticEntry {
6231 range: Point::new(2, 8)..Point::new(2, 17),
6232 diagnostic: Diagnostic {
6233 severity: DiagnosticSeverity::ERROR,
6234 message: "error 2".to_string(),
6235 group_id: 0,
6236 is_primary: true,
6237 source_kind: DiagnosticSourceKind::Pushed,
6238 ..Diagnostic::default()
6239 }
6240 }
6241 ]
6242 );
6243
6244 assert_eq!(
6245 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
6246 &[
6247 DiagnosticEntry {
6248 range: Point::new(1, 13)..Point::new(1, 15),
6249 diagnostic: Diagnostic {
6250 severity: DiagnosticSeverity::HINT,
6251 message: "error 2 hint 1".to_string(),
6252 group_id: 0,
6253 is_primary: false,
6254 source_kind: DiagnosticSourceKind::Pushed,
6255 ..Diagnostic::default()
6256 }
6257 },
6258 DiagnosticEntry {
6259 range: Point::new(1, 13)..Point::new(1, 15),
6260 diagnostic: Diagnostic {
6261 severity: DiagnosticSeverity::HINT,
6262 message: "error 2 hint 2".to_string(),
6263 group_id: 0,
6264 is_primary: false,
6265 source_kind: DiagnosticSourceKind::Pushed,
6266 ..Diagnostic::default()
6267 }
6268 },
6269 DiagnosticEntry {
6270 range: Point::new(2, 8)..Point::new(2, 17),
6271 diagnostic: Diagnostic {
6272 severity: DiagnosticSeverity::ERROR,
6273 message: "error 2".to_string(),
6274 group_id: 0,
6275 is_primary: true,
6276 source_kind: DiagnosticSourceKind::Pushed,
6277 ..Diagnostic::default()
6278 }
6279 }
6280 ]
6281 );
6282
6283 assert_eq!(
6284 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
6285 &[
6286 DiagnosticEntry {
6287 range: Point::new(1, 8)..Point::new(1, 9),
6288 diagnostic: Diagnostic {
6289 severity: DiagnosticSeverity::WARNING,
6290 message: "error 1".to_string(),
6291 group_id: 1,
6292 is_primary: true,
6293 source_kind: DiagnosticSourceKind::Pushed,
6294 ..Diagnostic::default()
6295 }
6296 },
6297 DiagnosticEntry {
6298 range: Point::new(1, 8)..Point::new(1, 9),
6299 diagnostic: Diagnostic {
6300 severity: DiagnosticSeverity::HINT,
6301 message: "error 1 hint 1".to_string(),
6302 group_id: 1,
6303 is_primary: false,
6304 source_kind: DiagnosticSourceKind::Pushed,
6305 ..Diagnostic::default()
6306 }
6307 },
6308 ]
6309 );
6310}
6311
6312#[gpui::test]
6313async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
6314 init_test(cx);
6315
6316 let fs = FakeFs::new(cx.executor());
6317 fs.insert_tree(
6318 path!("/dir"),
6319 json!({
6320 "one.rs": "const ONE: usize = 1;",
6321 "two": {
6322 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
6323 }
6324
6325 }),
6326 )
6327 .await;
6328 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6329
6330 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6331 language_registry.add(rust_lang());
6332 let watched_paths = lsp::FileOperationRegistrationOptions {
6333 filters: vec![
6334 FileOperationFilter {
6335 scheme: Some("file".to_owned()),
6336 pattern: lsp::FileOperationPattern {
6337 glob: "**/*.rs".to_owned(),
6338 matches: Some(lsp::FileOperationPatternKind::File),
6339 options: None,
6340 },
6341 },
6342 FileOperationFilter {
6343 scheme: Some("file".to_owned()),
6344 pattern: lsp::FileOperationPattern {
6345 glob: "**/**".to_owned(),
6346 matches: Some(lsp::FileOperationPatternKind::Folder),
6347 options: None,
6348 },
6349 },
6350 ],
6351 };
6352 let mut fake_servers = language_registry.register_fake_lsp(
6353 "Rust",
6354 FakeLspAdapter {
6355 capabilities: lsp::ServerCapabilities {
6356 workspace: Some(lsp::WorkspaceServerCapabilities {
6357 workspace_folders: None,
6358 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
6359 did_rename: Some(watched_paths.clone()),
6360 will_rename: Some(watched_paths),
6361 ..Default::default()
6362 }),
6363 }),
6364 ..Default::default()
6365 },
6366 ..Default::default()
6367 },
6368 );
6369
6370 let _ = project
6371 .update(cx, |project, cx| {
6372 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
6373 })
6374 .await
6375 .unwrap();
6376
6377 let fake_server = fake_servers.next().await.unwrap();
6378 cx.executor().run_until_parked();
6379 let response = project.update(cx, |project, cx| {
6380 let worktree = project.worktrees(cx).next().unwrap();
6381 let entry = worktree
6382 .read(cx)
6383 .entry_for_path(rel_path("one.rs"))
6384 .unwrap();
6385 project.rename_entry(
6386 entry.id,
6387 (worktree.read(cx).id(), rel_path("three.rs")).into(),
6388 cx,
6389 )
6390 });
6391 let expected_edit = lsp::WorkspaceEdit {
6392 changes: None,
6393 document_changes: Some(DocumentChanges::Edits({
6394 vec![TextDocumentEdit {
6395 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
6396 range: lsp::Range {
6397 start: lsp::Position {
6398 line: 0,
6399 character: 1,
6400 },
6401 end: lsp::Position {
6402 line: 0,
6403 character: 3,
6404 },
6405 },
6406 new_text: "This is not a drill".to_owned(),
6407 })],
6408 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
6409 uri: Uri::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
6410 version: Some(1337),
6411 },
6412 }]
6413 })),
6414 change_annotations: None,
6415 };
6416 let resolved_workspace_edit = Arc::new(OnceLock::new());
6417 fake_server
6418 .set_request_handler::<WillRenameFiles, _, _>({
6419 let resolved_workspace_edit = resolved_workspace_edit.clone();
6420 let expected_edit = expected_edit.clone();
6421 move |params, _| {
6422 let resolved_workspace_edit = resolved_workspace_edit.clone();
6423 let expected_edit = expected_edit.clone();
6424 async move {
6425 assert_eq!(params.files.len(), 1);
6426 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
6427 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
6428 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
6429 Ok(Some(expected_edit))
6430 }
6431 }
6432 })
6433 .next()
6434 .await
6435 .unwrap();
6436 let _ = response.await.unwrap();
6437 fake_server
6438 .handle_notification::<DidRenameFiles, _>(|params, _| {
6439 assert_eq!(params.files.len(), 1);
6440 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
6441 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
6442 })
6443 .next()
6444 .await
6445 .unwrap();
6446 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
6447}
6448
6449#[gpui::test]
6450async fn test_rename(cx: &mut gpui::TestAppContext) {
6451 // hi
6452 init_test(cx);
6453
6454 let fs = FakeFs::new(cx.executor());
6455 fs.insert_tree(
6456 path!("/dir"),
6457 json!({
6458 "one.rs": "const ONE: usize = 1;",
6459 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
6460 }),
6461 )
6462 .await;
6463
6464 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6465
6466 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6467 language_registry.add(rust_lang());
6468 let mut fake_servers = language_registry.register_fake_lsp(
6469 "Rust",
6470 FakeLspAdapter {
6471 capabilities: lsp::ServerCapabilities {
6472 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
6473 prepare_provider: Some(true),
6474 work_done_progress_options: Default::default(),
6475 })),
6476 ..Default::default()
6477 },
6478 ..Default::default()
6479 },
6480 );
6481
6482 let (buffer, _handle) = project
6483 .update(cx, |project, cx| {
6484 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
6485 })
6486 .await
6487 .unwrap();
6488
6489 let fake_server = fake_servers.next().await.unwrap();
6490 cx.executor().run_until_parked();
6491
6492 let response = project.update(cx, |project, cx| {
6493 project.prepare_rename(buffer.clone(), 7, cx)
6494 });
6495 fake_server
6496 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
6497 assert_eq!(
6498 params.text_document.uri.as_str(),
6499 uri!("file:///dir/one.rs")
6500 );
6501 assert_eq!(params.position, lsp::Position::new(0, 7));
6502 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
6503 lsp::Position::new(0, 6),
6504 lsp::Position::new(0, 9),
6505 ))))
6506 })
6507 .next()
6508 .await
6509 .unwrap();
6510 let response = response.await.unwrap();
6511 let PrepareRenameResponse::Success(range) = response else {
6512 panic!("{:?}", response);
6513 };
6514 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
6515 assert_eq!(range, 6..9);
6516
6517 let response = project.update(cx, |project, cx| {
6518 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
6519 });
6520 fake_server
6521 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
6522 assert_eq!(
6523 params.text_document_position.text_document.uri.as_str(),
6524 uri!("file:///dir/one.rs")
6525 );
6526 assert_eq!(
6527 params.text_document_position.position,
6528 lsp::Position::new(0, 7)
6529 );
6530 assert_eq!(params.new_name, "THREE");
6531 Ok(Some(lsp::WorkspaceEdit {
6532 changes: Some(
6533 [
6534 (
6535 lsp::Uri::from_file_path(path!("/dir/one.rs")).unwrap(),
6536 vec![lsp::TextEdit::new(
6537 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
6538 "THREE".to_string(),
6539 )],
6540 ),
6541 (
6542 lsp::Uri::from_file_path(path!("/dir/two.rs")).unwrap(),
6543 vec![
6544 lsp::TextEdit::new(
6545 lsp::Range::new(
6546 lsp::Position::new(0, 24),
6547 lsp::Position::new(0, 27),
6548 ),
6549 "THREE".to_string(),
6550 ),
6551 lsp::TextEdit::new(
6552 lsp::Range::new(
6553 lsp::Position::new(0, 35),
6554 lsp::Position::new(0, 38),
6555 ),
6556 "THREE".to_string(),
6557 ),
6558 ],
6559 ),
6560 ]
6561 .into_iter()
6562 .collect(),
6563 ),
6564 ..Default::default()
6565 }))
6566 })
6567 .next()
6568 .await
6569 .unwrap();
6570 let mut transaction = response.await.unwrap().0;
6571 assert_eq!(transaction.len(), 2);
6572 assert_eq!(
6573 transaction
6574 .remove_entry(&buffer)
6575 .unwrap()
6576 .0
6577 .update(cx, |buffer, _| buffer.text()),
6578 "const THREE: usize = 1;"
6579 );
6580 assert_eq!(
6581 transaction
6582 .into_keys()
6583 .next()
6584 .unwrap()
6585 .update(cx, |buffer, _| buffer.text()),
6586 "const TWO: usize = one::THREE + one::THREE;"
6587 );
6588}
6589
6590#[gpui::test]
6591async fn test_search(cx: &mut gpui::TestAppContext) {
6592 init_test(cx);
6593
6594 let fs = FakeFs::new(cx.executor());
6595 fs.insert_tree(
6596 path!("/dir"),
6597 json!({
6598 "one.rs": "const ONE: usize = 1;",
6599 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
6600 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
6601 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
6602 }),
6603 )
6604 .await;
6605 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6606 assert_eq!(
6607 search(
6608 &project,
6609 SearchQuery::text(
6610 "TWO",
6611 false,
6612 true,
6613 false,
6614 Default::default(),
6615 Default::default(),
6616 false,
6617 None
6618 )
6619 .unwrap(),
6620 cx
6621 )
6622 .await
6623 .unwrap(),
6624 HashMap::from_iter([
6625 (path!("dir/two.rs").to_string(), vec![6..9]),
6626 (path!("dir/three.rs").to_string(), vec![37..40])
6627 ])
6628 );
6629
6630 let buffer_4 = project
6631 .update(cx, |project, cx| {
6632 project.open_local_buffer(path!("/dir/four.rs"), cx)
6633 })
6634 .await
6635 .unwrap();
6636 buffer_4.update(cx, |buffer, cx| {
6637 let text = "two::TWO";
6638 buffer.edit([(20..28, text), (31..43, text)], None, cx);
6639 });
6640
6641 assert_eq!(
6642 search(
6643 &project,
6644 SearchQuery::text(
6645 "TWO",
6646 false,
6647 true,
6648 false,
6649 Default::default(),
6650 Default::default(),
6651 false,
6652 None,
6653 )
6654 .unwrap(),
6655 cx
6656 )
6657 .await
6658 .unwrap(),
6659 HashMap::from_iter([
6660 (path!("dir/two.rs").to_string(), vec![6..9]),
6661 (path!("dir/three.rs").to_string(), vec![37..40]),
6662 (path!("dir/four.rs").to_string(), vec![25..28, 36..39])
6663 ])
6664 );
6665}
6666
6667#[gpui::test]
6668async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
6669 init_test(cx);
6670
6671 let search_query = "file";
6672
6673 let fs = FakeFs::new(cx.executor());
6674 fs.insert_tree(
6675 path!("/dir"),
6676 json!({
6677 "one.rs": r#"// Rust file one"#,
6678 "one.ts": r#"// TypeScript file one"#,
6679 "two.rs": r#"// Rust file two"#,
6680 "two.ts": r#"// TypeScript file two"#,
6681 }),
6682 )
6683 .await;
6684 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6685
6686 assert!(
6687 search(
6688 &project,
6689 SearchQuery::text(
6690 search_query,
6691 false,
6692 true,
6693 false,
6694 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6695 Default::default(),
6696 false,
6697 None
6698 )
6699 .unwrap(),
6700 cx
6701 )
6702 .await
6703 .unwrap()
6704 .is_empty(),
6705 "If no inclusions match, no files should be returned"
6706 );
6707
6708 assert_eq!(
6709 search(
6710 &project,
6711 SearchQuery::text(
6712 search_query,
6713 false,
6714 true,
6715 false,
6716 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
6717 Default::default(),
6718 false,
6719 None
6720 )
6721 .unwrap(),
6722 cx
6723 )
6724 .await
6725 .unwrap(),
6726 HashMap::from_iter([
6727 (path!("dir/one.rs").to_string(), vec![8..12]),
6728 (path!("dir/two.rs").to_string(), vec![8..12]),
6729 ]),
6730 "Rust only search should give only Rust files"
6731 );
6732
6733 assert_eq!(
6734 search(
6735 &project,
6736 SearchQuery::text(
6737 search_query,
6738 false,
6739 true,
6740 false,
6741 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6742 .unwrap(),
6743 Default::default(),
6744 false,
6745 None,
6746 )
6747 .unwrap(),
6748 cx
6749 )
6750 .await
6751 .unwrap(),
6752 HashMap::from_iter([
6753 (path!("dir/one.ts").to_string(), vec![14..18]),
6754 (path!("dir/two.ts").to_string(), vec![14..18]),
6755 ]),
6756 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
6757 );
6758
6759 assert_eq!(
6760 search(
6761 &project,
6762 SearchQuery::text(
6763 search_query,
6764 false,
6765 true,
6766 false,
6767 PathMatcher::new(
6768 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
6769 PathStyle::local()
6770 )
6771 .unwrap(),
6772 Default::default(),
6773 false,
6774 None,
6775 )
6776 .unwrap(),
6777 cx
6778 )
6779 .await
6780 .unwrap(),
6781 HashMap::from_iter([
6782 (path!("dir/two.ts").to_string(), vec![14..18]),
6783 (path!("dir/one.rs").to_string(), vec![8..12]),
6784 (path!("dir/one.ts").to_string(), vec![14..18]),
6785 (path!("dir/two.rs").to_string(), vec![8..12]),
6786 ]),
6787 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
6788 );
6789}
6790
6791#[gpui::test]
6792async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
6793 init_test(cx);
6794
6795 let search_query = "file";
6796
6797 let fs = FakeFs::new(cx.executor());
6798 fs.insert_tree(
6799 path!("/dir"),
6800 json!({
6801 "one.rs": r#"// Rust file one"#,
6802 "one.ts": r#"// TypeScript file one"#,
6803 "two.rs": r#"// Rust file two"#,
6804 "two.ts": r#"// TypeScript file two"#,
6805 }),
6806 )
6807 .await;
6808 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6809
6810 assert_eq!(
6811 search(
6812 &project,
6813 SearchQuery::text(
6814 search_query,
6815 false,
6816 true,
6817 false,
6818 Default::default(),
6819 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6820 false,
6821 None,
6822 )
6823 .unwrap(),
6824 cx
6825 )
6826 .await
6827 .unwrap(),
6828 HashMap::from_iter([
6829 (path!("dir/one.rs").to_string(), vec![8..12]),
6830 (path!("dir/one.ts").to_string(), vec![14..18]),
6831 (path!("dir/two.rs").to_string(), vec![8..12]),
6832 (path!("dir/two.ts").to_string(), vec![14..18]),
6833 ]),
6834 "If no exclusions match, all files should be returned"
6835 );
6836
6837 assert_eq!(
6838 search(
6839 &project,
6840 SearchQuery::text(
6841 search_query,
6842 false,
6843 true,
6844 false,
6845 Default::default(),
6846 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
6847 false,
6848 None,
6849 )
6850 .unwrap(),
6851 cx
6852 )
6853 .await
6854 .unwrap(),
6855 HashMap::from_iter([
6856 (path!("dir/one.ts").to_string(), vec![14..18]),
6857 (path!("dir/two.ts").to_string(), vec![14..18]),
6858 ]),
6859 "Rust exclusion search should give only TypeScript files"
6860 );
6861
6862 assert_eq!(
6863 search(
6864 &project,
6865 SearchQuery::text(
6866 search_query,
6867 false,
6868 true,
6869 false,
6870 Default::default(),
6871 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6872 .unwrap(),
6873 false,
6874 None,
6875 )
6876 .unwrap(),
6877 cx
6878 )
6879 .await
6880 .unwrap(),
6881 HashMap::from_iter([
6882 (path!("dir/one.rs").to_string(), vec![8..12]),
6883 (path!("dir/two.rs").to_string(), vec![8..12]),
6884 ]),
6885 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
6886 );
6887
6888 assert!(
6889 search(
6890 &project,
6891 SearchQuery::text(
6892 search_query,
6893 false,
6894 true,
6895 false,
6896 Default::default(),
6897 PathMatcher::new(
6898 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
6899 PathStyle::local(),
6900 )
6901 .unwrap(),
6902 false,
6903 None,
6904 )
6905 .unwrap(),
6906 cx
6907 )
6908 .await
6909 .unwrap()
6910 .is_empty(),
6911 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
6912 );
6913}
6914
6915#[gpui::test]
6916async fn test_search_with_buffer_exclusions(cx: &mut gpui::TestAppContext) {
6917 init_test(cx);
6918
6919 let search_query = "file";
6920
6921 let fs = FakeFs::new(cx.executor());
6922 fs.insert_tree(
6923 path!("/dir"),
6924 json!({
6925 "one.rs": r#"// Rust file one"#,
6926 "one.ts": r#"// TypeScript file one"#,
6927 "two.rs": r#"// Rust file two"#,
6928 "two.ts": r#"// TypeScript file two"#,
6929 }),
6930 )
6931 .await;
6932
6933 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6934 let path_style = PathStyle::local();
6935 let _buffer = project.update(cx, |project, cx| {
6936 project.create_local_buffer("file", None, false, cx)
6937 });
6938
6939 assert_eq!(
6940 search(
6941 &project,
6942 SearchQuery::text(
6943 search_query,
6944 false,
6945 true,
6946 false,
6947 Default::default(),
6948 PathMatcher::new(&["*.odd".to_owned()], path_style).unwrap(),
6949 false,
6950 None,
6951 )
6952 .unwrap(),
6953 cx
6954 )
6955 .await
6956 .unwrap(),
6957 HashMap::from_iter([
6958 (path!("dir/one.rs").to_string(), vec![8..12]),
6959 (path!("dir/one.ts").to_string(), vec![14..18]),
6960 (path!("dir/two.rs").to_string(), vec![8..12]),
6961 (path!("dir/two.ts").to_string(), vec![14..18]),
6962 ]),
6963 "If no exclusions match, all files should be returned"
6964 );
6965
6966 assert_eq!(
6967 search(
6968 &project,
6969 SearchQuery::text(
6970 search_query,
6971 false,
6972 true,
6973 false,
6974 Default::default(),
6975 PathMatcher::new(&["*.rs".to_owned()], path_style).unwrap(),
6976 false,
6977 None,
6978 )
6979 .unwrap(),
6980 cx
6981 )
6982 .await
6983 .unwrap(),
6984 HashMap::from_iter([
6985 (path!("dir/one.ts").to_string(), vec![14..18]),
6986 (path!("dir/two.ts").to_string(), vec![14..18]),
6987 ]),
6988 "Rust exclusion search should give only TypeScript files"
6989 );
6990
6991 assert_eq!(
6992 search(
6993 &project,
6994 SearchQuery::text(
6995 search_query,
6996 false,
6997 true,
6998 false,
6999 Default::default(),
7000 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], path_style).unwrap(),
7001 false,
7002 None,
7003 )
7004 .unwrap(),
7005 cx
7006 )
7007 .await
7008 .unwrap(),
7009 HashMap::from_iter([
7010 (path!("dir/one.rs").to_string(), vec![8..12]),
7011 (path!("dir/two.rs").to_string(), vec![8..12]),
7012 ]),
7013 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
7014 );
7015
7016 assert!(
7017 search(
7018 &project,
7019 SearchQuery::text(
7020 search_query,
7021 false,
7022 true,
7023 false,
7024 Default::default(),
7025 PathMatcher::new(
7026 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
7027 PathStyle::local(),
7028 )
7029 .unwrap(),
7030 false,
7031 None,
7032 )
7033 .unwrap(),
7034 cx
7035 )
7036 .await
7037 .unwrap()
7038 .is_empty(),
7039 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
7040 );
7041}
7042
7043#[gpui::test]
7044async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
7045 init_test(cx);
7046
7047 let search_query = "file";
7048
7049 let fs = FakeFs::new(cx.executor());
7050 fs.insert_tree(
7051 path!("/dir"),
7052 json!({
7053 "one.rs": r#"// Rust file one"#,
7054 "one.ts": r#"// TypeScript file one"#,
7055 "two.rs": r#"// Rust file two"#,
7056 "two.ts": r#"// TypeScript file two"#,
7057 }),
7058 )
7059 .await;
7060 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7061 assert!(
7062 search(
7063 &project,
7064 SearchQuery::text(
7065 search_query,
7066 false,
7067 true,
7068 false,
7069 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
7070 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
7071 false,
7072 None,
7073 )
7074 .unwrap(),
7075 cx
7076 )
7077 .await
7078 .unwrap()
7079 .is_empty(),
7080 "If both no exclusions and inclusions match, exclusions should win and return nothing"
7081 );
7082
7083 assert!(
7084 search(
7085 &project,
7086 SearchQuery::text(
7087 search_query,
7088 false,
7089 true,
7090 false,
7091 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
7092 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
7093 false,
7094 None,
7095 )
7096 .unwrap(),
7097 cx
7098 )
7099 .await
7100 .unwrap()
7101 .is_empty(),
7102 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
7103 );
7104
7105 assert!(
7106 search(
7107 &project,
7108 SearchQuery::text(
7109 search_query,
7110 false,
7111 true,
7112 false,
7113 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
7114 .unwrap(),
7115 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
7116 .unwrap(),
7117 false,
7118 None,
7119 )
7120 .unwrap(),
7121 cx
7122 )
7123 .await
7124 .unwrap()
7125 .is_empty(),
7126 "Non-matching inclusions and exclusions should not change that."
7127 );
7128
7129 assert_eq!(
7130 search(
7131 &project,
7132 SearchQuery::text(
7133 search_query,
7134 false,
7135 true,
7136 false,
7137 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
7138 .unwrap(),
7139 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()], PathStyle::local())
7140 .unwrap(),
7141 false,
7142 None,
7143 )
7144 .unwrap(),
7145 cx
7146 )
7147 .await
7148 .unwrap(),
7149 HashMap::from_iter([
7150 (path!("dir/one.ts").to_string(), vec![14..18]),
7151 (path!("dir/two.ts").to_string(), vec![14..18]),
7152 ]),
7153 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
7154 );
7155}
7156
7157#[gpui::test]
7158async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
7159 init_test(cx);
7160
7161 let fs = FakeFs::new(cx.executor());
7162 fs.insert_tree(
7163 path!("/worktree-a"),
7164 json!({
7165 "haystack.rs": r#"// NEEDLE"#,
7166 "haystack.ts": r#"// NEEDLE"#,
7167 }),
7168 )
7169 .await;
7170 fs.insert_tree(
7171 path!("/worktree-b"),
7172 json!({
7173 "haystack.rs": r#"// NEEDLE"#,
7174 "haystack.ts": r#"// NEEDLE"#,
7175 }),
7176 )
7177 .await;
7178
7179 let path_style = PathStyle::local();
7180 let project = Project::test(
7181 fs.clone(),
7182 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
7183 cx,
7184 )
7185 .await;
7186
7187 assert_eq!(
7188 search(
7189 &project,
7190 SearchQuery::text(
7191 "NEEDLE",
7192 false,
7193 true,
7194 false,
7195 PathMatcher::new(&["worktree-a/*.rs".to_owned()], path_style).unwrap(),
7196 Default::default(),
7197 true,
7198 None,
7199 )
7200 .unwrap(),
7201 cx
7202 )
7203 .await
7204 .unwrap(),
7205 HashMap::from_iter([(path!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
7206 "should only return results from included worktree"
7207 );
7208 assert_eq!(
7209 search(
7210 &project,
7211 SearchQuery::text(
7212 "NEEDLE",
7213 false,
7214 true,
7215 false,
7216 PathMatcher::new(&["worktree-b/*.rs".to_owned()], path_style).unwrap(),
7217 Default::default(),
7218 true,
7219 None,
7220 )
7221 .unwrap(),
7222 cx
7223 )
7224 .await
7225 .unwrap(),
7226 HashMap::from_iter([(path!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
7227 "should only return results from included worktree"
7228 );
7229
7230 assert_eq!(
7231 search(
7232 &project,
7233 SearchQuery::text(
7234 "NEEDLE",
7235 false,
7236 true,
7237 false,
7238 PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap(),
7239 Default::default(),
7240 false,
7241 None,
7242 )
7243 .unwrap(),
7244 cx
7245 )
7246 .await
7247 .unwrap(),
7248 HashMap::from_iter([
7249 (path!("worktree-a/haystack.ts").to_string(), vec![3..9]),
7250 (path!("worktree-b/haystack.ts").to_string(), vec![3..9])
7251 ]),
7252 "should return results from both worktrees"
7253 );
7254}
7255
7256#[gpui::test]
7257async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
7258 init_test(cx);
7259
7260 let fs = FakeFs::new(cx.background_executor.clone());
7261 fs.insert_tree(
7262 path!("/dir"),
7263 json!({
7264 ".git": {},
7265 ".gitignore": "**/target\n/node_modules\n",
7266 "target": {
7267 "index.txt": "index_key:index_value"
7268 },
7269 "node_modules": {
7270 "eslint": {
7271 "index.ts": "const eslint_key = 'eslint value'",
7272 "package.json": r#"{ "some_key": "some value" }"#,
7273 },
7274 "prettier": {
7275 "index.ts": "const prettier_key = 'prettier value'",
7276 "package.json": r#"{ "other_key": "other value" }"#,
7277 },
7278 },
7279 "package.json": r#"{ "main_key": "main value" }"#,
7280 }),
7281 )
7282 .await;
7283 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7284
7285 let query = "key";
7286 assert_eq!(
7287 search(
7288 &project,
7289 SearchQuery::text(
7290 query,
7291 false,
7292 false,
7293 false,
7294 Default::default(),
7295 Default::default(),
7296 false,
7297 None,
7298 )
7299 .unwrap(),
7300 cx
7301 )
7302 .await
7303 .unwrap(),
7304 HashMap::from_iter([(path!("dir/package.json").to_string(), vec![8..11])]),
7305 "Only one non-ignored file should have the query"
7306 );
7307
7308 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7309 let path_style = PathStyle::local();
7310 assert_eq!(
7311 search(
7312 &project,
7313 SearchQuery::text(
7314 query,
7315 false,
7316 false,
7317 true,
7318 Default::default(),
7319 Default::default(),
7320 false,
7321 None,
7322 )
7323 .unwrap(),
7324 cx
7325 )
7326 .await
7327 .unwrap(),
7328 HashMap::from_iter([
7329 (path!("dir/package.json").to_string(), vec![8..11]),
7330 (path!("dir/target/index.txt").to_string(), vec![6..9]),
7331 (
7332 path!("dir/node_modules/prettier/package.json").to_string(),
7333 vec![9..12]
7334 ),
7335 (
7336 path!("dir/node_modules/prettier/index.ts").to_string(),
7337 vec![15..18]
7338 ),
7339 (
7340 path!("dir/node_modules/eslint/index.ts").to_string(),
7341 vec![13..16]
7342 ),
7343 (
7344 path!("dir/node_modules/eslint/package.json").to_string(),
7345 vec![8..11]
7346 ),
7347 ]),
7348 "Unrestricted search with ignored directories should find every file with the query"
7349 );
7350
7351 let files_to_include =
7352 PathMatcher::new(&["node_modules/prettier/**".to_owned()], path_style).unwrap();
7353 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap();
7354 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7355 assert_eq!(
7356 search(
7357 &project,
7358 SearchQuery::text(
7359 query,
7360 false,
7361 false,
7362 true,
7363 files_to_include,
7364 files_to_exclude,
7365 false,
7366 None,
7367 )
7368 .unwrap(),
7369 cx
7370 )
7371 .await
7372 .unwrap(),
7373 HashMap::from_iter([(
7374 path!("dir/node_modules/prettier/package.json").to_string(),
7375 vec![9..12]
7376 )]),
7377 "With search including ignored prettier directory and excluding TS files, only one file should be found"
7378 );
7379}
7380
7381#[gpui::test]
7382async fn test_search_with_unicode(cx: &mut gpui::TestAppContext) {
7383 init_test(cx);
7384
7385 let fs = FakeFs::new(cx.executor());
7386 fs.insert_tree(
7387 path!("/dir"),
7388 json!({
7389 "one.rs": "// ПРИВЕТ? привет!",
7390 "two.rs": "// ПРИВЕТ.",
7391 "three.rs": "// привет",
7392 }),
7393 )
7394 .await;
7395 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7396 let unicode_case_sensitive_query = SearchQuery::text(
7397 "привет",
7398 false,
7399 true,
7400 false,
7401 Default::default(),
7402 Default::default(),
7403 false,
7404 None,
7405 );
7406 assert_matches!(unicode_case_sensitive_query, Ok(SearchQuery::Text { .. }));
7407 assert_eq!(
7408 search(&project, unicode_case_sensitive_query.unwrap(), cx)
7409 .await
7410 .unwrap(),
7411 HashMap::from_iter([
7412 (path!("dir/one.rs").to_string(), vec![17..29]),
7413 (path!("dir/three.rs").to_string(), vec![3..15]),
7414 ])
7415 );
7416
7417 let unicode_case_insensitive_query = SearchQuery::text(
7418 "привет",
7419 false,
7420 false,
7421 false,
7422 Default::default(),
7423 Default::default(),
7424 false,
7425 None,
7426 );
7427 assert_matches!(
7428 unicode_case_insensitive_query,
7429 Ok(SearchQuery::Regex { .. })
7430 );
7431 assert_eq!(
7432 search(&project, unicode_case_insensitive_query.unwrap(), cx)
7433 .await
7434 .unwrap(),
7435 HashMap::from_iter([
7436 (path!("dir/one.rs").to_string(), vec![3..15, 17..29]),
7437 (path!("dir/two.rs").to_string(), vec![3..15]),
7438 (path!("dir/three.rs").to_string(), vec![3..15]),
7439 ])
7440 );
7441
7442 assert_eq!(
7443 search(
7444 &project,
7445 SearchQuery::text(
7446 "привет.",
7447 false,
7448 false,
7449 false,
7450 Default::default(),
7451 Default::default(),
7452 false,
7453 None,
7454 )
7455 .unwrap(),
7456 cx
7457 )
7458 .await
7459 .unwrap(),
7460 HashMap::from_iter([(path!("dir/two.rs").to_string(), vec![3..16]),])
7461 );
7462}
7463
7464#[gpui::test]
7465async fn test_create_entry(cx: &mut gpui::TestAppContext) {
7466 init_test(cx);
7467
7468 let fs = FakeFs::new(cx.executor());
7469 fs.insert_tree(
7470 "/one/two",
7471 json!({
7472 "three": {
7473 "a.txt": "",
7474 "four": {}
7475 },
7476 "c.rs": ""
7477 }),
7478 )
7479 .await;
7480
7481 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
7482 project
7483 .update(cx, |project, cx| {
7484 let id = project.worktrees(cx).next().unwrap().read(cx).id();
7485 project.create_entry((id, rel_path("b..")), true, cx)
7486 })
7487 .await
7488 .unwrap()
7489 .into_included()
7490 .unwrap();
7491
7492 assert_eq!(
7493 fs.paths(true),
7494 vec![
7495 PathBuf::from(path!("/")),
7496 PathBuf::from(path!("/one")),
7497 PathBuf::from(path!("/one/two")),
7498 PathBuf::from(path!("/one/two/c.rs")),
7499 PathBuf::from(path!("/one/two/three")),
7500 PathBuf::from(path!("/one/two/three/a.txt")),
7501 PathBuf::from(path!("/one/two/three/b..")),
7502 PathBuf::from(path!("/one/two/three/four")),
7503 ]
7504 );
7505}
7506
7507#[gpui::test]
7508async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
7509 init_test(cx);
7510
7511 let fs = FakeFs::new(cx.executor());
7512 fs.insert_tree(
7513 path!("/dir"),
7514 json!({
7515 "a.tsx": "a",
7516 }),
7517 )
7518 .await;
7519
7520 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7521
7522 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7523 language_registry.add(tsx_lang());
7524 let language_server_names = [
7525 "TypeScriptServer",
7526 "TailwindServer",
7527 "ESLintServer",
7528 "NoHoverCapabilitiesServer",
7529 ];
7530 let mut language_servers = [
7531 language_registry.register_fake_lsp(
7532 "tsx",
7533 FakeLspAdapter {
7534 name: language_server_names[0],
7535 capabilities: lsp::ServerCapabilities {
7536 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7537 ..lsp::ServerCapabilities::default()
7538 },
7539 ..FakeLspAdapter::default()
7540 },
7541 ),
7542 language_registry.register_fake_lsp(
7543 "tsx",
7544 FakeLspAdapter {
7545 name: language_server_names[1],
7546 capabilities: lsp::ServerCapabilities {
7547 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7548 ..lsp::ServerCapabilities::default()
7549 },
7550 ..FakeLspAdapter::default()
7551 },
7552 ),
7553 language_registry.register_fake_lsp(
7554 "tsx",
7555 FakeLspAdapter {
7556 name: language_server_names[2],
7557 capabilities: lsp::ServerCapabilities {
7558 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7559 ..lsp::ServerCapabilities::default()
7560 },
7561 ..FakeLspAdapter::default()
7562 },
7563 ),
7564 language_registry.register_fake_lsp(
7565 "tsx",
7566 FakeLspAdapter {
7567 name: language_server_names[3],
7568 capabilities: lsp::ServerCapabilities {
7569 hover_provider: None,
7570 ..lsp::ServerCapabilities::default()
7571 },
7572 ..FakeLspAdapter::default()
7573 },
7574 ),
7575 ];
7576
7577 let (buffer, _handle) = project
7578 .update(cx, |p, cx| {
7579 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
7580 })
7581 .await
7582 .unwrap();
7583 cx.executor().run_until_parked();
7584
7585 let mut servers_with_hover_requests = HashMap::default();
7586 for i in 0..language_server_names.len() {
7587 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
7588 panic!(
7589 "Failed to get language server #{i} with name {}",
7590 &language_server_names[i]
7591 )
7592 });
7593 let new_server_name = new_server.server.name();
7594 assert!(
7595 !servers_with_hover_requests.contains_key(&new_server_name),
7596 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
7597 );
7598 match new_server_name.as_ref() {
7599 "TailwindServer" | "TypeScriptServer" => {
7600 servers_with_hover_requests.insert(
7601 new_server_name.clone(),
7602 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7603 move |_, _| {
7604 let name = new_server_name.clone();
7605 async move {
7606 Ok(Some(lsp::Hover {
7607 contents: lsp::HoverContents::Scalar(
7608 lsp::MarkedString::String(format!("{name} hover")),
7609 ),
7610 range: None,
7611 }))
7612 }
7613 },
7614 ),
7615 );
7616 }
7617 "ESLintServer" => {
7618 servers_with_hover_requests.insert(
7619 new_server_name,
7620 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7621 |_, _| async move { Ok(None) },
7622 ),
7623 );
7624 }
7625 "NoHoverCapabilitiesServer" => {
7626 let _never_handled = new_server
7627 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
7628 panic!(
7629 "Should not call for hovers server with no corresponding capabilities"
7630 )
7631 });
7632 }
7633 unexpected => panic!("Unexpected server name: {unexpected}"),
7634 }
7635 }
7636
7637 let hover_task = project.update(cx, |project, cx| {
7638 project.hover(&buffer, Point::new(0, 0), cx)
7639 });
7640 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
7641 |mut hover_request| async move {
7642 hover_request
7643 .next()
7644 .await
7645 .expect("All hover requests should have been triggered")
7646 },
7647 ))
7648 .await;
7649 assert_eq!(
7650 vec!["TailwindServer hover", "TypeScriptServer hover"],
7651 hover_task
7652 .await
7653 .into_iter()
7654 .flatten()
7655 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
7656 .sorted()
7657 .collect::<Vec<_>>(),
7658 "Should receive hover responses from all related servers with hover capabilities"
7659 );
7660}
7661
7662#[gpui::test]
7663async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
7664 init_test(cx);
7665
7666 let fs = FakeFs::new(cx.executor());
7667 fs.insert_tree(
7668 path!("/dir"),
7669 json!({
7670 "a.ts": "a",
7671 }),
7672 )
7673 .await;
7674
7675 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7676
7677 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7678 language_registry.add(typescript_lang());
7679 let mut fake_language_servers = language_registry.register_fake_lsp(
7680 "TypeScript",
7681 FakeLspAdapter {
7682 capabilities: lsp::ServerCapabilities {
7683 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7684 ..lsp::ServerCapabilities::default()
7685 },
7686 ..FakeLspAdapter::default()
7687 },
7688 );
7689
7690 let (buffer, _handle) = project
7691 .update(cx, |p, cx| {
7692 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
7693 })
7694 .await
7695 .unwrap();
7696 cx.executor().run_until_parked();
7697
7698 let fake_server = fake_language_servers
7699 .next()
7700 .await
7701 .expect("failed to get the language server");
7702
7703 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7704 move |_, _| async move {
7705 Ok(Some(lsp::Hover {
7706 contents: lsp::HoverContents::Array(vec![
7707 lsp::MarkedString::String("".to_string()),
7708 lsp::MarkedString::String(" ".to_string()),
7709 lsp::MarkedString::String("\n\n\n".to_string()),
7710 ]),
7711 range: None,
7712 }))
7713 },
7714 );
7715
7716 let hover_task = project.update(cx, |project, cx| {
7717 project.hover(&buffer, Point::new(0, 0), cx)
7718 });
7719 let () = request_handled
7720 .next()
7721 .await
7722 .expect("All hover requests should have been triggered");
7723 assert_eq!(
7724 Vec::<String>::new(),
7725 hover_task
7726 .await
7727 .into_iter()
7728 .flatten()
7729 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
7730 .sorted()
7731 .collect::<Vec<_>>(),
7732 "Empty hover parts should be ignored"
7733 );
7734}
7735
7736#[gpui::test]
7737async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
7738 init_test(cx);
7739
7740 let fs = FakeFs::new(cx.executor());
7741 fs.insert_tree(
7742 path!("/dir"),
7743 json!({
7744 "a.ts": "a",
7745 }),
7746 )
7747 .await;
7748
7749 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7750
7751 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7752 language_registry.add(typescript_lang());
7753 let mut fake_language_servers = language_registry.register_fake_lsp(
7754 "TypeScript",
7755 FakeLspAdapter {
7756 capabilities: lsp::ServerCapabilities {
7757 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
7758 ..lsp::ServerCapabilities::default()
7759 },
7760 ..FakeLspAdapter::default()
7761 },
7762 );
7763
7764 let (buffer, _handle) = project
7765 .update(cx, |p, cx| {
7766 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
7767 })
7768 .await
7769 .unwrap();
7770 cx.executor().run_until_parked();
7771
7772 let fake_server = fake_language_servers
7773 .next()
7774 .await
7775 .expect("failed to get the language server");
7776
7777 let mut request_handled = fake_server
7778 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
7779 Ok(Some(vec![
7780 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7781 title: "organize imports".to_string(),
7782 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
7783 ..lsp::CodeAction::default()
7784 }),
7785 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7786 title: "fix code".to_string(),
7787 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
7788 ..lsp::CodeAction::default()
7789 }),
7790 ]))
7791 });
7792
7793 let code_actions_task = project.update(cx, |project, cx| {
7794 project.code_actions(
7795 &buffer,
7796 0..buffer.read(cx).len(),
7797 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
7798 cx,
7799 )
7800 });
7801
7802 let () = request_handled
7803 .next()
7804 .await
7805 .expect("The code action request should have been triggered");
7806
7807 let code_actions = code_actions_task.await.unwrap().unwrap();
7808 assert_eq!(code_actions.len(), 1);
7809 assert_eq!(
7810 code_actions[0].lsp_action.action_kind(),
7811 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
7812 );
7813}
7814
7815#[gpui::test]
7816async fn test_code_actions_without_requested_kinds_do_not_send_only_filter(
7817 cx: &mut gpui::TestAppContext,
7818) {
7819 init_test(cx);
7820
7821 let fs = FakeFs::new(cx.executor());
7822 fs.insert_tree(
7823 path!("/dir"),
7824 json!({
7825 "a.ts": "a",
7826 }),
7827 )
7828 .await;
7829
7830 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7831
7832 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7833 language_registry.add(typescript_lang());
7834 let mut fake_language_servers = language_registry.register_fake_lsp(
7835 "TypeScript",
7836 FakeLspAdapter {
7837 capabilities: lsp::ServerCapabilities {
7838 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
7839 lsp::CodeActionOptions {
7840 code_action_kinds: Some(vec![
7841 CodeActionKind::SOURCE_ORGANIZE_IMPORTS,
7842 "source.doc".into(),
7843 ]),
7844 ..lsp::CodeActionOptions::default()
7845 },
7846 )),
7847 ..lsp::ServerCapabilities::default()
7848 },
7849 ..FakeLspAdapter::default()
7850 },
7851 );
7852
7853 let (buffer, _handle) = project
7854 .update(cx, |p, cx| {
7855 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
7856 })
7857 .await
7858 .unwrap();
7859 cx.executor().run_until_parked();
7860
7861 let fake_server = fake_language_servers
7862 .next()
7863 .await
7864 .expect("failed to get the language server");
7865
7866 let mut request_handled = fake_server.set_request_handler::<
7867 lsp::request::CodeActionRequest,
7868 _,
7869 _,
7870 >(move |params, _| async move {
7871 assert_eq!(
7872 params.context.only, None,
7873 "Code action requests without explicit kind filters should not send `context.only`"
7874 );
7875 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
7876 lsp::CodeAction {
7877 title: "Add test".to_string(),
7878 kind: Some("source.addTest".into()),
7879 ..lsp::CodeAction::default()
7880 },
7881 )]))
7882 });
7883
7884 let code_actions_task = project.update(cx, |project, cx| {
7885 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
7886 });
7887
7888 let () = request_handled
7889 .next()
7890 .await
7891 .expect("The code action request should have been triggered");
7892
7893 let code_actions = code_actions_task.await.unwrap().unwrap();
7894 assert_eq!(code_actions.len(), 1);
7895 assert_eq!(
7896 code_actions[0].lsp_action.action_kind(),
7897 Some("source.addTest".into())
7898 );
7899}
7900
7901#[gpui::test]
7902async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
7903 init_test(cx);
7904
7905 let fs = FakeFs::new(cx.executor());
7906 fs.insert_tree(
7907 path!("/dir"),
7908 json!({
7909 "a.tsx": "a",
7910 }),
7911 )
7912 .await;
7913
7914 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7915
7916 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7917 language_registry.add(tsx_lang());
7918 let language_server_names = [
7919 "TypeScriptServer",
7920 "TailwindServer",
7921 "ESLintServer",
7922 "NoActionsCapabilitiesServer",
7923 ];
7924
7925 let mut language_server_rxs = [
7926 language_registry.register_fake_lsp(
7927 "tsx",
7928 FakeLspAdapter {
7929 name: language_server_names[0],
7930 capabilities: lsp::ServerCapabilities {
7931 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
7932 ..lsp::ServerCapabilities::default()
7933 },
7934 ..FakeLspAdapter::default()
7935 },
7936 ),
7937 language_registry.register_fake_lsp(
7938 "tsx",
7939 FakeLspAdapter {
7940 name: language_server_names[1],
7941 capabilities: lsp::ServerCapabilities {
7942 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
7943 ..lsp::ServerCapabilities::default()
7944 },
7945 ..FakeLspAdapter::default()
7946 },
7947 ),
7948 language_registry.register_fake_lsp(
7949 "tsx",
7950 FakeLspAdapter {
7951 name: language_server_names[2],
7952 capabilities: lsp::ServerCapabilities {
7953 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
7954 ..lsp::ServerCapabilities::default()
7955 },
7956 ..FakeLspAdapter::default()
7957 },
7958 ),
7959 language_registry.register_fake_lsp(
7960 "tsx",
7961 FakeLspAdapter {
7962 name: language_server_names[3],
7963 capabilities: lsp::ServerCapabilities {
7964 code_action_provider: None,
7965 ..lsp::ServerCapabilities::default()
7966 },
7967 ..FakeLspAdapter::default()
7968 },
7969 ),
7970 ];
7971
7972 let (buffer, _handle) = project
7973 .update(cx, |p, cx| {
7974 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
7975 })
7976 .await
7977 .unwrap();
7978 cx.executor().run_until_parked();
7979
7980 let mut servers_with_actions_requests = HashMap::default();
7981 for i in 0..language_server_names.len() {
7982 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
7983 panic!(
7984 "Failed to get language server #{i} with name {}",
7985 &language_server_names[i]
7986 )
7987 });
7988 let new_server_name = new_server.server.name();
7989
7990 assert!(
7991 !servers_with_actions_requests.contains_key(&new_server_name),
7992 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
7993 );
7994 match new_server_name.0.as_ref() {
7995 "TailwindServer" | "TypeScriptServer" => {
7996 servers_with_actions_requests.insert(
7997 new_server_name.clone(),
7998 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
7999 move |_, _| {
8000 let name = new_server_name.clone();
8001 async move {
8002 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
8003 lsp::CodeAction {
8004 title: format!("{name} code action"),
8005 ..lsp::CodeAction::default()
8006 },
8007 )]))
8008 }
8009 },
8010 ),
8011 );
8012 }
8013 "ESLintServer" => {
8014 servers_with_actions_requests.insert(
8015 new_server_name,
8016 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
8017 |_, _| async move { Ok(None) },
8018 ),
8019 );
8020 }
8021 "NoActionsCapabilitiesServer" => {
8022 let _never_handled = new_server
8023 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
8024 panic!(
8025 "Should not call for code actions server with no corresponding capabilities"
8026 )
8027 });
8028 }
8029 unexpected => panic!("Unexpected server name: {unexpected}"),
8030 }
8031 }
8032
8033 let code_actions_task = project.update(cx, |project, cx| {
8034 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
8035 });
8036
8037 // cx.run_until_parked();
8038 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
8039 |mut code_actions_request| async move {
8040 code_actions_request
8041 .next()
8042 .await
8043 .expect("All code actions requests should have been triggered")
8044 },
8045 ))
8046 .await;
8047 assert_eq!(
8048 vec!["TailwindServer code action", "TypeScriptServer code action"],
8049 code_actions_task
8050 .await
8051 .unwrap()
8052 .unwrap()
8053 .into_iter()
8054 .map(|code_action| code_action.lsp_action.title().to_owned())
8055 .sorted()
8056 .collect::<Vec<_>>(),
8057 "Should receive code actions responses from all related servers with hover capabilities"
8058 );
8059}
8060
8061#[gpui::test]
8062async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
8063 init_test(cx);
8064
8065 let fs = FakeFs::new(cx.executor());
8066 fs.insert_tree(
8067 "/dir",
8068 json!({
8069 "a.rs": "let a = 1;",
8070 "b.rs": "let b = 2;",
8071 "c.rs": "let c = 2;",
8072 }),
8073 )
8074 .await;
8075
8076 let project = Project::test(
8077 fs,
8078 [
8079 "/dir/a.rs".as_ref(),
8080 "/dir/b.rs".as_ref(),
8081 "/dir/c.rs".as_ref(),
8082 ],
8083 cx,
8084 )
8085 .await;
8086
8087 // check the initial state and get the worktrees
8088 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
8089 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8090 assert_eq!(worktrees.len(), 3);
8091
8092 let worktree_a = worktrees[0].read(cx);
8093 let worktree_b = worktrees[1].read(cx);
8094 let worktree_c = worktrees[2].read(cx);
8095
8096 // check they start in the right order
8097 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
8098 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
8099 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
8100
8101 (
8102 worktrees[0].clone(),
8103 worktrees[1].clone(),
8104 worktrees[2].clone(),
8105 )
8106 });
8107
8108 // move first worktree to after the second
8109 // [a, b, c] -> [b, a, c]
8110 project
8111 .update(cx, |project, cx| {
8112 let first = worktree_a.read(cx);
8113 let second = worktree_b.read(cx);
8114 project.move_worktree(first.id(), second.id(), cx)
8115 })
8116 .expect("moving first after second");
8117
8118 // check the state after moving
8119 project.update(cx, |project, cx| {
8120 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8121 assert_eq!(worktrees.len(), 3);
8122
8123 let first = worktrees[0].read(cx);
8124 let second = worktrees[1].read(cx);
8125 let third = worktrees[2].read(cx);
8126
8127 // check they are now in the right order
8128 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
8129 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
8130 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
8131 });
8132
8133 // move the second worktree to before the first
8134 // [b, a, c] -> [a, b, c]
8135 project
8136 .update(cx, |project, cx| {
8137 let second = worktree_a.read(cx);
8138 let first = worktree_b.read(cx);
8139 project.move_worktree(first.id(), second.id(), cx)
8140 })
8141 .expect("moving second before first");
8142
8143 // check the state after moving
8144 project.update(cx, |project, cx| {
8145 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8146 assert_eq!(worktrees.len(), 3);
8147
8148 let first = worktrees[0].read(cx);
8149 let second = worktrees[1].read(cx);
8150 let third = worktrees[2].read(cx);
8151
8152 // check they are now in the right order
8153 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
8154 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
8155 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
8156 });
8157
8158 // move the second worktree to after the third
8159 // [a, b, c] -> [a, c, b]
8160 project
8161 .update(cx, |project, cx| {
8162 let second = worktree_b.read(cx);
8163 let third = worktree_c.read(cx);
8164 project.move_worktree(second.id(), third.id(), cx)
8165 })
8166 .expect("moving second after third");
8167
8168 // check the state after moving
8169 project.update(cx, |project, cx| {
8170 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8171 assert_eq!(worktrees.len(), 3);
8172
8173 let first = worktrees[0].read(cx);
8174 let second = worktrees[1].read(cx);
8175 let third = worktrees[2].read(cx);
8176
8177 // check they are now in the right order
8178 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
8179 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
8180 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
8181 });
8182
8183 // move the third worktree to before the second
8184 // [a, c, b] -> [a, b, c]
8185 project
8186 .update(cx, |project, cx| {
8187 let third = worktree_c.read(cx);
8188 let second = worktree_b.read(cx);
8189 project.move_worktree(third.id(), second.id(), cx)
8190 })
8191 .expect("moving third before second");
8192
8193 // check the state after moving
8194 project.update(cx, |project, cx| {
8195 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8196 assert_eq!(worktrees.len(), 3);
8197
8198 let first = worktrees[0].read(cx);
8199 let second = worktrees[1].read(cx);
8200 let third = worktrees[2].read(cx);
8201
8202 // check they are now in the right order
8203 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
8204 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
8205 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
8206 });
8207
8208 // move the first worktree to after the third
8209 // [a, b, c] -> [b, c, a]
8210 project
8211 .update(cx, |project, cx| {
8212 let first = worktree_a.read(cx);
8213 let third = worktree_c.read(cx);
8214 project.move_worktree(first.id(), third.id(), cx)
8215 })
8216 .expect("moving first after third");
8217
8218 // check the state after moving
8219 project.update(cx, |project, cx| {
8220 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8221 assert_eq!(worktrees.len(), 3);
8222
8223 let first = worktrees[0].read(cx);
8224 let second = worktrees[1].read(cx);
8225 let third = worktrees[2].read(cx);
8226
8227 // check they are now in the right order
8228 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
8229 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
8230 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
8231 });
8232
8233 // move the third worktree to before the first
8234 // [b, c, a] -> [a, b, c]
8235 project
8236 .update(cx, |project, cx| {
8237 let third = worktree_a.read(cx);
8238 let first = worktree_b.read(cx);
8239 project.move_worktree(third.id(), first.id(), cx)
8240 })
8241 .expect("moving third before first");
8242
8243 // check the state after moving
8244 project.update(cx, |project, cx| {
8245 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8246 assert_eq!(worktrees.len(), 3);
8247
8248 let first = worktrees[0].read(cx);
8249 let second = worktrees[1].read(cx);
8250 let third = worktrees[2].read(cx);
8251
8252 // check they are now in the right order
8253 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
8254 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
8255 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
8256 });
8257}
8258
8259#[gpui::test]
8260async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
8261 init_test(cx);
8262
8263 let staged_contents = r#"
8264 fn main() {
8265 println!("hello world");
8266 }
8267 "#
8268 .unindent();
8269 let file_contents = r#"
8270 // print goodbye
8271 fn main() {
8272 println!("goodbye world");
8273 }
8274 "#
8275 .unindent();
8276
8277 let fs = FakeFs::new(cx.background_executor.clone());
8278 fs.insert_tree(
8279 "/dir",
8280 json!({
8281 ".git": {},
8282 "src": {
8283 "main.rs": file_contents,
8284 }
8285 }),
8286 )
8287 .await;
8288
8289 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
8290
8291 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8292
8293 let buffer = project
8294 .update(cx, |project, cx| {
8295 project.open_local_buffer("/dir/src/main.rs", cx)
8296 })
8297 .await
8298 .unwrap();
8299 let unstaged_diff = project
8300 .update(cx, |project, cx| {
8301 project.open_unstaged_diff(buffer.clone(), cx)
8302 })
8303 .await
8304 .unwrap();
8305
8306 cx.run_until_parked();
8307 unstaged_diff.update(cx, |unstaged_diff, cx| {
8308 let snapshot = buffer.read(cx).snapshot();
8309 assert_hunks(
8310 unstaged_diff.snapshot(cx).hunks(&snapshot),
8311 &snapshot,
8312 &unstaged_diff.base_text_string(cx).unwrap(),
8313 &[
8314 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
8315 (
8316 2..3,
8317 " println!(\"hello world\");\n",
8318 " println!(\"goodbye world\");\n",
8319 DiffHunkStatus::modified_none(),
8320 ),
8321 ],
8322 );
8323 });
8324
8325 let staged_contents = r#"
8326 // print goodbye
8327 fn main() {
8328 }
8329 "#
8330 .unindent();
8331
8332 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
8333
8334 cx.run_until_parked();
8335 unstaged_diff.update(cx, |unstaged_diff, cx| {
8336 let snapshot = buffer.read(cx).snapshot();
8337 assert_hunks(
8338 unstaged_diff
8339 .snapshot(cx)
8340 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8341 &snapshot,
8342 &unstaged_diff.base_text(cx).text(),
8343 &[(
8344 2..3,
8345 "",
8346 " println!(\"goodbye world\");\n",
8347 DiffHunkStatus::added_none(),
8348 )],
8349 );
8350 });
8351}
8352
8353#[gpui::test]
8354async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
8355 init_test(cx);
8356
8357 let committed_contents = r#"
8358 fn main() {
8359 println!("hello world");
8360 }
8361 "#
8362 .unindent();
8363 let staged_contents = r#"
8364 fn main() {
8365 println!("goodbye world");
8366 }
8367 "#
8368 .unindent();
8369 let file_contents = r#"
8370 // print goodbye
8371 fn main() {
8372 println!("goodbye world");
8373 }
8374 "#
8375 .unindent();
8376
8377 let fs = FakeFs::new(cx.background_executor.clone());
8378 fs.insert_tree(
8379 "/dir",
8380 json!({
8381 ".git": {},
8382 "src": {
8383 "modification.rs": file_contents,
8384 }
8385 }),
8386 )
8387 .await;
8388
8389 fs.set_head_for_repo(
8390 Path::new("/dir/.git"),
8391 &[
8392 ("src/modification.rs", committed_contents),
8393 ("src/deletion.rs", "// the-deleted-contents\n".into()),
8394 ],
8395 "deadbeef",
8396 );
8397 fs.set_index_for_repo(
8398 Path::new("/dir/.git"),
8399 &[
8400 ("src/modification.rs", staged_contents),
8401 ("src/deletion.rs", "// the-deleted-contents\n".into()),
8402 ],
8403 );
8404
8405 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8406 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
8407 let language = rust_lang();
8408 language_registry.add(language.clone());
8409
8410 let buffer_1 = project
8411 .update(cx, |project, cx| {
8412 project.open_local_buffer("/dir/src/modification.rs", cx)
8413 })
8414 .await
8415 .unwrap();
8416 let diff_1 = project
8417 .update(cx, |project, cx| {
8418 project.open_uncommitted_diff(buffer_1.clone(), cx)
8419 })
8420 .await
8421 .unwrap();
8422 diff_1.read_with(cx, |diff, cx| {
8423 assert_eq!(diff.base_text(cx).language().cloned(), Some(language))
8424 });
8425 cx.run_until_parked();
8426 diff_1.update(cx, |diff, cx| {
8427 let snapshot = buffer_1.read(cx).snapshot();
8428 assert_hunks(
8429 diff.snapshot(cx)
8430 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8431 &snapshot,
8432 &diff.base_text_string(cx).unwrap(),
8433 &[
8434 (
8435 0..1,
8436 "",
8437 "// print goodbye\n",
8438 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
8439 ),
8440 (
8441 2..3,
8442 " println!(\"hello world\");\n",
8443 " println!(\"goodbye world\");\n",
8444 DiffHunkStatus::modified_none(),
8445 ),
8446 ],
8447 );
8448 });
8449
8450 // Reset HEAD to a version that differs from both the buffer and the index.
8451 let committed_contents = r#"
8452 // print goodbye
8453 fn main() {
8454 }
8455 "#
8456 .unindent();
8457 fs.set_head_for_repo(
8458 Path::new("/dir/.git"),
8459 &[
8460 ("src/modification.rs", committed_contents.clone()),
8461 ("src/deletion.rs", "// the-deleted-contents\n".into()),
8462 ],
8463 "deadbeef",
8464 );
8465
8466 // Buffer now has an unstaged hunk.
8467 cx.run_until_parked();
8468 diff_1.update(cx, |diff, cx| {
8469 let snapshot = buffer_1.read(cx).snapshot();
8470 assert_hunks(
8471 diff.snapshot(cx)
8472 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8473 &snapshot,
8474 &diff.base_text(cx).text(),
8475 &[(
8476 2..3,
8477 "",
8478 " println!(\"goodbye world\");\n",
8479 DiffHunkStatus::added_none(),
8480 )],
8481 );
8482 });
8483
8484 // Open a buffer for a file that's been deleted.
8485 let buffer_2 = project
8486 .update(cx, |project, cx| {
8487 project.open_local_buffer("/dir/src/deletion.rs", cx)
8488 })
8489 .await
8490 .unwrap();
8491 let diff_2 = project
8492 .update(cx, |project, cx| {
8493 project.open_uncommitted_diff(buffer_2.clone(), cx)
8494 })
8495 .await
8496 .unwrap();
8497 cx.run_until_parked();
8498 diff_2.update(cx, |diff, cx| {
8499 let snapshot = buffer_2.read(cx).snapshot();
8500 assert_hunks(
8501 diff.snapshot(cx)
8502 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8503 &snapshot,
8504 &diff.base_text_string(cx).unwrap(),
8505 &[(
8506 0..0,
8507 "// the-deleted-contents\n",
8508 "",
8509 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
8510 )],
8511 );
8512 });
8513
8514 // Stage the deletion of this file
8515 fs.set_index_for_repo(
8516 Path::new("/dir/.git"),
8517 &[("src/modification.rs", committed_contents.clone())],
8518 );
8519 cx.run_until_parked();
8520 diff_2.update(cx, |diff, cx| {
8521 let snapshot = buffer_2.read(cx).snapshot();
8522 assert_hunks(
8523 diff.snapshot(cx)
8524 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8525 &snapshot,
8526 &diff.base_text_string(cx).unwrap(),
8527 &[(
8528 0..0,
8529 "// the-deleted-contents\n",
8530 "",
8531 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
8532 )],
8533 );
8534 });
8535}
8536
8537#[gpui::test]
8538async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
8539 use DiffHunkSecondaryStatus::*;
8540 init_test(cx);
8541
8542 let committed_contents = r#"
8543 zero
8544 one
8545 two
8546 three
8547 four
8548 five
8549 "#
8550 .unindent();
8551 let file_contents = r#"
8552 one
8553 TWO
8554 three
8555 FOUR
8556 five
8557 "#
8558 .unindent();
8559
8560 let fs = FakeFs::new(cx.background_executor.clone());
8561 fs.insert_tree(
8562 "/dir",
8563 json!({
8564 ".git": {},
8565 "file.txt": file_contents.clone()
8566 }),
8567 )
8568 .await;
8569
8570 fs.set_head_and_index_for_repo(
8571 path!("/dir/.git").as_ref(),
8572 &[("file.txt", committed_contents.clone())],
8573 );
8574
8575 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8576
8577 let buffer = project
8578 .update(cx, |project, cx| {
8579 project.open_local_buffer("/dir/file.txt", cx)
8580 })
8581 .await
8582 .unwrap();
8583 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8584 let uncommitted_diff = project
8585 .update(cx, |project, cx| {
8586 project.open_uncommitted_diff(buffer.clone(), cx)
8587 })
8588 .await
8589 .unwrap();
8590 let mut diff_events = cx.events(&uncommitted_diff);
8591
8592 // The hunks are initially unstaged.
8593 uncommitted_diff.read_with(cx, |diff, cx| {
8594 assert_hunks(
8595 diff.snapshot(cx).hunks(&snapshot),
8596 &snapshot,
8597 &diff.base_text_string(cx).unwrap(),
8598 &[
8599 (
8600 0..0,
8601 "zero\n",
8602 "",
8603 DiffHunkStatus::deleted(HasSecondaryHunk),
8604 ),
8605 (
8606 1..2,
8607 "two\n",
8608 "TWO\n",
8609 DiffHunkStatus::modified(HasSecondaryHunk),
8610 ),
8611 (
8612 3..4,
8613 "four\n",
8614 "FOUR\n",
8615 DiffHunkStatus::modified(HasSecondaryHunk),
8616 ),
8617 ],
8618 );
8619 });
8620
8621 // Stage a hunk. It appears as optimistically staged.
8622 uncommitted_diff.update(cx, |diff, cx| {
8623 let range =
8624 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
8625 let hunks = diff
8626 .snapshot(cx)
8627 .hunks_intersecting_range(range, &snapshot)
8628 .collect::<Vec<_>>();
8629 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
8630
8631 assert_hunks(
8632 diff.snapshot(cx).hunks(&snapshot),
8633 &snapshot,
8634 &diff.base_text_string(cx).unwrap(),
8635 &[
8636 (
8637 0..0,
8638 "zero\n",
8639 "",
8640 DiffHunkStatus::deleted(HasSecondaryHunk),
8641 ),
8642 (
8643 1..2,
8644 "two\n",
8645 "TWO\n",
8646 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8647 ),
8648 (
8649 3..4,
8650 "four\n",
8651 "FOUR\n",
8652 DiffHunkStatus::modified(HasSecondaryHunk),
8653 ),
8654 ],
8655 );
8656 });
8657
8658 // The diff emits a change event for the range of the staged hunk.
8659 assert!(matches!(
8660 diff_events.next().await.unwrap(),
8661 BufferDiffEvent::HunksStagedOrUnstaged(_)
8662 ));
8663 let event = diff_events.next().await.unwrap();
8664 if let BufferDiffEvent::DiffChanged(DiffChanged {
8665 changed_range: Some(changed_range),
8666 base_text_changed_range: _,
8667 extended_range: _,
8668 }) = event
8669 {
8670 let changed_range = changed_range.to_point(&snapshot);
8671 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
8672 } else {
8673 panic!("Unexpected event {event:?}");
8674 }
8675
8676 // When the write to the index completes, it appears as staged.
8677 cx.run_until_parked();
8678 uncommitted_diff.update(cx, |diff, cx| {
8679 assert_hunks(
8680 diff.snapshot(cx).hunks(&snapshot),
8681 &snapshot,
8682 &diff.base_text_string(cx).unwrap(),
8683 &[
8684 (
8685 0..0,
8686 "zero\n",
8687 "",
8688 DiffHunkStatus::deleted(HasSecondaryHunk),
8689 ),
8690 (
8691 1..2,
8692 "two\n",
8693 "TWO\n",
8694 DiffHunkStatus::modified(NoSecondaryHunk),
8695 ),
8696 (
8697 3..4,
8698 "four\n",
8699 "FOUR\n",
8700 DiffHunkStatus::modified(HasSecondaryHunk),
8701 ),
8702 ],
8703 );
8704 });
8705
8706 // The diff emits a change event for the changed index text.
8707 let event = diff_events.next().await.unwrap();
8708 if let BufferDiffEvent::DiffChanged(DiffChanged {
8709 changed_range: Some(changed_range),
8710 base_text_changed_range: _,
8711 extended_range: _,
8712 }) = event
8713 {
8714 let changed_range = changed_range.to_point(&snapshot);
8715 assert_eq!(changed_range, Point::new(0, 0)..Point::new(4, 0));
8716 } else {
8717 panic!("Unexpected event {event:?}");
8718 }
8719
8720 // Simulate a problem writing to the git index.
8721 fs.set_error_message_for_index_write(
8722 "/dir/.git".as_ref(),
8723 Some("failed to write git index".into()),
8724 );
8725
8726 // Stage another hunk.
8727 uncommitted_diff.update(cx, |diff, cx| {
8728 let range =
8729 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
8730 let hunks = diff
8731 .snapshot(cx)
8732 .hunks_intersecting_range(range, &snapshot)
8733 .collect::<Vec<_>>();
8734 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
8735
8736 assert_hunks(
8737 diff.snapshot(cx).hunks(&snapshot),
8738 &snapshot,
8739 &diff.base_text_string(cx).unwrap(),
8740 &[
8741 (
8742 0..0,
8743 "zero\n",
8744 "",
8745 DiffHunkStatus::deleted(HasSecondaryHunk),
8746 ),
8747 (
8748 1..2,
8749 "two\n",
8750 "TWO\n",
8751 DiffHunkStatus::modified(NoSecondaryHunk),
8752 ),
8753 (
8754 3..4,
8755 "four\n",
8756 "FOUR\n",
8757 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8758 ),
8759 ],
8760 );
8761 });
8762 assert!(matches!(
8763 diff_events.next().await.unwrap(),
8764 BufferDiffEvent::HunksStagedOrUnstaged(_)
8765 ));
8766 let event = diff_events.next().await.unwrap();
8767 if let BufferDiffEvent::DiffChanged(DiffChanged {
8768 changed_range: Some(changed_range),
8769 base_text_changed_range: _,
8770 extended_range: _,
8771 }) = event
8772 {
8773 let changed_range = changed_range.to_point(&snapshot);
8774 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
8775 } else {
8776 panic!("Unexpected event {event:?}");
8777 }
8778
8779 // When the write fails, the hunk returns to being unstaged.
8780 cx.run_until_parked();
8781 uncommitted_diff.update(cx, |diff, cx| {
8782 assert_hunks(
8783 diff.snapshot(cx).hunks(&snapshot),
8784 &snapshot,
8785 &diff.base_text_string(cx).unwrap(),
8786 &[
8787 (
8788 0..0,
8789 "zero\n",
8790 "",
8791 DiffHunkStatus::deleted(HasSecondaryHunk),
8792 ),
8793 (
8794 1..2,
8795 "two\n",
8796 "TWO\n",
8797 DiffHunkStatus::modified(NoSecondaryHunk),
8798 ),
8799 (
8800 3..4,
8801 "four\n",
8802 "FOUR\n",
8803 DiffHunkStatus::modified(HasSecondaryHunk),
8804 ),
8805 ],
8806 );
8807 });
8808
8809 let event = diff_events.next().await.unwrap();
8810 if let BufferDiffEvent::DiffChanged(DiffChanged {
8811 changed_range: Some(changed_range),
8812 base_text_changed_range: _,
8813 extended_range: _,
8814 }) = event
8815 {
8816 let changed_range = changed_range.to_point(&snapshot);
8817 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
8818 } else {
8819 panic!("Unexpected event {event:?}");
8820 }
8821
8822 // Allow writing to the git index to succeed again.
8823 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
8824
8825 // Stage two hunks with separate operations.
8826 uncommitted_diff.update(cx, |diff, cx| {
8827 let hunks = diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>();
8828 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
8829 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
8830 });
8831
8832 // Both staged hunks appear as pending.
8833 uncommitted_diff.update(cx, |diff, cx| {
8834 assert_hunks(
8835 diff.snapshot(cx).hunks(&snapshot),
8836 &snapshot,
8837 &diff.base_text_string(cx).unwrap(),
8838 &[
8839 (
8840 0..0,
8841 "zero\n",
8842 "",
8843 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
8844 ),
8845 (
8846 1..2,
8847 "two\n",
8848 "TWO\n",
8849 DiffHunkStatus::modified(NoSecondaryHunk),
8850 ),
8851 (
8852 3..4,
8853 "four\n",
8854 "FOUR\n",
8855 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8856 ),
8857 ],
8858 );
8859 });
8860
8861 // Both staging operations take effect.
8862 cx.run_until_parked();
8863 uncommitted_diff.update(cx, |diff, cx| {
8864 assert_hunks(
8865 diff.snapshot(cx).hunks(&snapshot),
8866 &snapshot,
8867 &diff.base_text_string(cx).unwrap(),
8868 &[
8869 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
8870 (
8871 1..2,
8872 "two\n",
8873 "TWO\n",
8874 DiffHunkStatus::modified(NoSecondaryHunk),
8875 ),
8876 (
8877 3..4,
8878 "four\n",
8879 "FOUR\n",
8880 DiffHunkStatus::modified(NoSecondaryHunk),
8881 ),
8882 ],
8883 );
8884 });
8885}
8886
8887#[gpui::test(seeds(340, 472))]
8888async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
8889 use DiffHunkSecondaryStatus::*;
8890 init_test(cx);
8891
8892 let committed_contents = r#"
8893 zero
8894 one
8895 two
8896 three
8897 four
8898 five
8899 "#
8900 .unindent();
8901 let file_contents = r#"
8902 one
8903 TWO
8904 three
8905 FOUR
8906 five
8907 "#
8908 .unindent();
8909
8910 let fs = FakeFs::new(cx.background_executor.clone());
8911 fs.insert_tree(
8912 "/dir",
8913 json!({
8914 ".git": {},
8915 "file.txt": file_contents.clone()
8916 }),
8917 )
8918 .await;
8919
8920 fs.set_head_for_repo(
8921 "/dir/.git".as_ref(),
8922 &[("file.txt", committed_contents.clone())],
8923 "deadbeef",
8924 );
8925 fs.set_index_for_repo(
8926 "/dir/.git".as_ref(),
8927 &[("file.txt", committed_contents.clone())],
8928 );
8929
8930 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8931
8932 let buffer = project
8933 .update(cx, |project, cx| {
8934 project.open_local_buffer("/dir/file.txt", cx)
8935 })
8936 .await
8937 .unwrap();
8938 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8939 let uncommitted_diff = project
8940 .update(cx, |project, cx| {
8941 project.open_uncommitted_diff(buffer.clone(), cx)
8942 })
8943 .await
8944 .unwrap();
8945
8946 // The hunks are initially unstaged.
8947 uncommitted_diff.read_with(cx, |diff, cx| {
8948 assert_hunks(
8949 diff.snapshot(cx).hunks(&snapshot),
8950 &snapshot,
8951 &diff.base_text_string(cx).unwrap(),
8952 &[
8953 (
8954 0..0,
8955 "zero\n",
8956 "",
8957 DiffHunkStatus::deleted(HasSecondaryHunk),
8958 ),
8959 (
8960 1..2,
8961 "two\n",
8962 "TWO\n",
8963 DiffHunkStatus::modified(HasSecondaryHunk),
8964 ),
8965 (
8966 3..4,
8967 "four\n",
8968 "FOUR\n",
8969 DiffHunkStatus::modified(HasSecondaryHunk),
8970 ),
8971 ],
8972 );
8973 });
8974
8975 // Pause IO events
8976 fs.pause_events();
8977
8978 // Stage the first hunk.
8979 uncommitted_diff.update(cx, |diff, cx| {
8980 let hunk = diff.snapshot(cx).hunks(&snapshot).next().unwrap();
8981 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
8982 assert_hunks(
8983 diff.snapshot(cx).hunks(&snapshot),
8984 &snapshot,
8985 &diff.base_text_string(cx).unwrap(),
8986 &[
8987 (
8988 0..0,
8989 "zero\n",
8990 "",
8991 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
8992 ),
8993 (
8994 1..2,
8995 "two\n",
8996 "TWO\n",
8997 DiffHunkStatus::modified(HasSecondaryHunk),
8998 ),
8999 (
9000 3..4,
9001 "four\n",
9002 "FOUR\n",
9003 DiffHunkStatus::modified(HasSecondaryHunk),
9004 ),
9005 ],
9006 );
9007 });
9008
9009 // Stage the second hunk *before* receiving the FS event for the first hunk.
9010 cx.run_until_parked();
9011 uncommitted_diff.update(cx, |diff, cx| {
9012 let hunk = diff.snapshot(cx).hunks(&snapshot).nth(1).unwrap();
9013 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
9014 assert_hunks(
9015 diff.snapshot(cx).hunks(&snapshot),
9016 &snapshot,
9017 &diff.base_text_string(cx).unwrap(),
9018 &[
9019 (
9020 0..0,
9021 "zero\n",
9022 "",
9023 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
9024 ),
9025 (
9026 1..2,
9027 "two\n",
9028 "TWO\n",
9029 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
9030 ),
9031 (
9032 3..4,
9033 "four\n",
9034 "FOUR\n",
9035 DiffHunkStatus::modified(HasSecondaryHunk),
9036 ),
9037 ],
9038 );
9039 });
9040
9041 // Process the FS event for staging the first hunk (second event is still pending).
9042 fs.flush_events(1);
9043 cx.run_until_parked();
9044
9045 // Stage the third hunk before receiving the second FS event.
9046 uncommitted_diff.update(cx, |diff, cx| {
9047 let hunk = diff.snapshot(cx).hunks(&snapshot).nth(2).unwrap();
9048 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
9049 });
9050
9051 // Wait for all remaining IO.
9052 cx.run_until_parked();
9053 fs.flush_events(fs.buffered_event_count());
9054
9055 // Now all hunks are staged.
9056 cx.run_until_parked();
9057 uncommitted_diff.update(cx, |diff, cx| {
9058 assert_hunks(
9059 diff.snapshot(cx).hunks(&snapshot),
9060 &snapshot,
9061 &diff.base_text_string(cx).unwrap(),
9062 &[
9063 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
9064 (
9065 1..2,
9066 "two\n",
9067 "TWO\n",
9068 DiffHunkStatus::modified(NoSecondaryHunk),
9069 ),
9070 (
9071 3..4,
9072 "four\n",
9073 "FOUR\n",
9074 DiffHunkStatus::modified(NoSecondaryHunk),
9075 ),
9076 ],
9077 );
9078 });
9079}
9080
9081#[gpui::test(iterations = 25)]
9082async fn test_staging_random_hunks(
9083 mut rng: StdRng,
9084 _executor: BackgroundExecutor,
9085 cx: &mut gpui::TestAppContext,
9086) {
9087 let operations = env::var("OPERATIONS")
9088 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
9089 .unwrap_or(20);
9090
9091 use DiffHunkSecondaryStatus::*;
9092 init_test(cx);
9093
9094 let committed_text = (0..30).map(|i| format!("line {i}\n")).collect::<String>();
9095 let index_text = committed_text.clone();
9096 let buffer_text = (0..30)
9097 .map(|i| match i % 5 {
9098 0 => format!("line {i} (modified)\n"),
9099 _ => format!("line {i}\n"),
9100 })
9101 .collect::<String>();
9102
9103 let fs = FakeFs::new(cx.background_executor.clone());
9104 fs.insert_tree(
9105 path!("/dir"),
9106 json!({
9107 ".git": {},
9108 "file.txt": buffer_text.clone()
9109 }),
9110 )
9111 .await;
9112 fs.set_head_for_repo(
9113 path!("/dir/.git").as_ref(),
9114 &[("file.txt", committed_text.clone())],
9115 "deadbeef",
9116 );
9117 fs.set_index_for_repo(
9118 path!("/dir/.git").as_ref(),
9119 &[("file.txt", index_text.clone())],
9120 );
9121 let repo = fs
9122 .open_repo(path!("/dir/.git").as_ref(), Some("git".as_ref()))
9123 .unwrap();
9124
9125 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
9126 let buffer = project
9127 .update(cx, |project, cx| {
9128 project.open_local_buffer(path!("/dir/file.txt"), cx)
9129 })
9130 .await
9131 .unwrap();
9132 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
9133 let uncommitted_diff = project
9134 .update(cx, |project, cx| {
9135 project.open_uncommitted_diff(buffer.clone(), cx)
9136 })
9137 .await
9138 .unwrap();
9139
9140 let mut hunks = uncommitted_diff.update(cx, |diff, cx| {
9141 diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>()
9142 });
9143 assert_eq!(hunks.len(), 6);
9144
9145 for _i in 0..operations {
9146 let hunk_ix = rng.random_range(0..hunks.len());
9147 let hunk = &mut hunks[hunk_ix];
9148 let row = hunk.range.start.row;
9149
9150 if hunk.status().has_secondary_hunk() {
9151 log::info!("staging hunk at {row}");
9152 uncommitted_diff.update(cx, |diff, cx| {
9153 diff.stage_or_unstage_hunks(true, std::slice::from_ref(hunk), &snapshot, true, cx);
9154 });
9155 hunk.secondary_status = SecondaryHunkRemovalPending;
9156 } else {
9157 log::info!("unstaging hunk at {row}");
9158 uncommitted_diff.update(cx, |diff, cx| {
9159 diff.stage_or_unstage_hunks(false, std::slice::from_ref(hunk), &snapshot, true, cx);
9160 });
9161 hunk.secondary_status = SecondaryHunkAdditionPending;
9162 }
9163
9164 for _ in 0..rng.random_range(0..10) {
9165 log::info!("yielding");
9166 cx.executor().simulate_random_delay().await;
9167 }
9168 }
9169
9170 cx.executor().run_until_parked();
9171
9172 for hunk in &mut hunks {
9173 if hunk.secondary_status == SecondaryHunkRemovalPending {
9174 hunk.secondary_status = NoSecondaryHunk;
9175 } else if hunk.secondary_status == SecondaryHunkAdditionPending {
9176 hunk.secondary_status = HasSecondaryHunk;
9177 }
9178 }
9179
9180 log::info!(
9181 "index text:\n{}",
9182 repo.load_index_text(RepoPath::from_rel_path(rel_path("file.txt")))
9183 .await
9184 .unwrap()
9185 );
9186
9187 uncommitted_diff.update(cx, |diff, cx| {
9188 let expected_hunks = hunks
9189 .iter()
9190 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
9191 .collect::<Vec<_>>();
9192 let actual_hunks = diff
9193 .snapshot(cx)
9194 .hunks(&snapshot)
9195 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
9196 .collect::<Vec<_>>();
9197 assert_eq!(actual_hunks, expected_hunks);
9198 });
9199}
9200
9201#[gpui::test]
9202async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
9203 init_test(cx);
9204
9205 let committed_contents = r#"
9206 fn main() {
9207 println!("hello from HEAD");
9208 }
9209 "#
9210 .unindent();
9211 let file_contents = r#"
9212 fn main() {
9213 println!("hello from the working copy");
9214 }
9215 "#
9216 .unindent();
9217
9218 let fs = FakeFs::new(cx.background_executor.clone());
9219 fs.insert_tree(
9220 "/dir",
9221 json!({
9222 ".git": {},
9223 "src": {
9224 "main.rs": file_contents,
9225 }
9226 }),
9227 )
9228 .await;
9229
9230 fs.set_head_for_repo(
9231 Path::new("/dir/.git"),
9232 &[("src/main.rs", committed_contents.clone())],
9233 "deadbeef",
9234 );
9235 fs.set_index_for_repo(
9236 Path::new("/dir/.git"),
9237 &[("src/main.rs", committed_contents.clone())],
9238 );
9239
9240 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
9241
9242 let buffer = project
9243 .update(cx, |project, cx| {
9244 project.open_local_buffer("/dir/src/main.rs", cx)
9245 })
9246 .await
9247 .unwrap();
9248 let uncommitted_diff = project
9249 .update(cx, |project, cx| {
9250 project.open_uncommitted_diff(buffer.clone(), cx)
9251 })
9252 .await
9253 .unwrap();
9254
9255 cx.run_until_parked();
9256 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
9257 let snapshot = buffer.read(cx).snapshot();
9258 assert_hunks(
9259 uncommitted_diff.snapshot(cx).hunks(&snapshot),
9260 &snapshot,
9261 &uncommitted_diff.base_text_string(cx).unwrap(),
9262 &[(
9263 1..2,
9264 " println!(\"hello from HEAD\");\n",
9265 " println!(\"hello from the working copy\");\n",
9266 DiffHunkStatus {
9267 kind: DiffHunkStatusKind::Modified,
9268 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
9269 },
9270 )],
9271 );
9272 });
9273}
9274
9275// TODO: Should we test this on Windows also?
9276#[gpui::test]
9277#[cfg(not(windows))]
9278async fn test_staging_hunk_preserve_executable_permission(cx: &mut gpui::TestAppContext) {
9279 use std::os::unix::fs::PermissionsExt;
9280 init_test(cx);
9281 cx.executor().allow_parking();
9282 let committed_contents = "bar\n";
9283 let file_contents = "baz\n";
9284 let root = TempTree::new(json!({
9285 "project": {
9286 "foo": committed_contents
9287 },
9288 }));
9289
9290 let work_dir = root.path().join("project");
9291 let file_path = work_dir.join("foo");
9292 let repo = git_init(work_dir.as_path());
9293 let mut perms = std::fs::metadata(&file_path).unwrap().permissions();
9294 perms.set_mode(0o755);
9295 std::fs::set_permissions(&file_path, perms).unwrap();
9296 git_add("foo", &repo);
9297 git_commit("Initial commit", &repo);
9298 std::fs::write(&file_path, file_contents).unwrap();
9299
9300 let project = Project::test(
9301 Arc::new(RealFs::new(None, cx.executor())),
9302 [root.path()],
9303 cx,
9304 )
9305 .await;
9306
9307 let buffer = project
9308 .update(cx, |project, cx| {
9309 project.open_local_buffer(file_path.as_path(), cx)
9310 })
9311 .await
9312 .unwrap();
9313
9314 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
9315
9316 let uncommitted_diff = project
9317 .update(cx, |project, cx| {
9318 project.open_uncommitted_diff(buffer.clone(), cx)
9319 })
9320 .await
9321 .unwrap();
9322
9323 uncommitted_diff.update(cx, |diff, cx| {
9324 let hunks = diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>();
9325 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
9326 });
9327
9328 cx.run_until_parked();
9329
9330 let output = smol::process::Command::new("git")
9331 .current_dir(&work_dir)
9332 .args(["diff", "--staged"])
9333 .output()
9334 .await
9335 .unwrap();
9336
9337 let staged_diff = String::from_utf8_lossy(&output.stdout);
9338
9339 assert!(
9340 !staged_diff.contains("new mode 100644"),
9341 "Staging should not change file mode from 755 to 644.\ngit diff --staged:\n{}",
9342 staged_diff
9343 );
9344
9345 let output = smol::process::Command::new("git")
9346 .current_dir(&work_dir)
9347 .args(["ls-files", "-s"])
9348 .output()
9349 .await
9350 .unwrap();
9351 let index_contents = String::from_utf8_lossy(&output.stdout);
9352
9353 assert!(
9354 index_contents.contains("100755"),
9355 "Index should show file as executable (100755).\ngit ls-files -s:\n{}",
9356 index_contents
9357 );
9358}
9359
9360#[gpui::test]
9361async fn test_repository_and_path_for_project_path(
9362 background_executor: BackgroundExecutor,
9363 cx: &mut gpui::TestAppContext,
9364) {
9365 init_test(cx);
9366 let fs = FakeFs::new(background_executor);
9367 fs.insert_tree(
9368 path!("/root"),
9369 json!({
9370 "c.txt": "",
9371 "dir1": {
9372 ".git": {},
9373 "deps": {
9374 "dep1": {
9375 ".git": {},
9376 "src": {
9377 "a.txt": ""
9378 }
9379 }
9380 },
9381 "src": {
9382 "b.txt": ""
9383 }
9384 },
9385 }),
9386 )
9387 .await;
9388
9389 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
9390 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9391 let tree_id = tree.read_with(cx, |tree, _| tree.id());
9392 project
9393 .update(cx, |project, cx| project.git_scans_complete(cx))
9394 .await;
9395 cx.run_until_parked();
9396
9397 project.read_with(cx, |project, cx| {
9398 let git_store = project.git_store().read(cx);
9399 let pairs = [
9400 ("c.txt", None),
9401 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
9402 (
9403 "dir1/deps/dep1/src/a.txt",
9404 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
9405 ),
9406 ];
9407 let expected = pairs
9408 .iter()
9409 .map(|(path, result)| {
9410 (
9411 path,
9412 result.map(|(repo, repo_path)| {
9413 (Path::new(repo).into(), RepoPath::new(repo_path).unwrap())
9414 }),
9415 )
9416 })
9417 .collect::<Vec<_>>();
9418 let actual = pairs
9419 .iter()
9420 .map(|(path, _)| {
9421 let project_path = (tree_id, rel_path(path)).into();
9422 let result = maybe!({
9423 let (repo, repo_path) =
9424 git_store.repository_and_path_for_project_path(&project_path, cx)?;
9425 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
9426 });
9427 (path, result)
9428 })
9429 .collect::<Vec<_>>();
9430 pretty_assertions::assert_eq!(expected, actual);
9431 });
9432
9433 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
9434 .await
9435 .unwrap();
9436 cx.run_until_parked();
9437
9438 project.read_with(cx, |project, cx| {
9439 let git_store = project.git_store().read(cx);
9440 assert_eq!(
9441 git_store.repository_and_path_for_project_path(
9442 &(tree_id, rel_path("dir1/src/b.txt")).into(),
9443 cx
9444 ),
9445 None
9446 );
9447 });
9448}
9449
9450#[gpui::test]
9451async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
9452 init_test(cx);
9453 let fs = FakeFs::new(cx.background_executor.clone());
9454 let home = paths::home_dir();
9455 fs.insert_tree(
9456 home,
9457 json!({
9458 ".git": {},
9459 "project": {
9460 "a.txt": "A"
9461 },
9462 }),
9463 )
9464 .await;
9465
9466 let project = Project::test(fs.clone(), [home.join("project").as_ref()], cx).await;
9467 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9468 let tree_id = tree.read_with(cx, |tree, _| tree.id());
9469
9470 project
9471 .update(cx, |project, cx| project.git_scans_complete(cx))
9472 .await;
9473 tree.flush_fs_events(cx).await;
9474
9475 project.read_with(cx, |project, cx| {
9476 let containing = project
9477 .git_store()
9478 .read(cx)
9479 .repository_and_path_for_project_path(&(tree_id, rel_path("a.txt")).into(), cx);
9480 assert!(containing.is_none());
9481 });
9482
9483 let project = Project::test(fs.clone(), [home.as_ref()], cx).await;
9484 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9485 let tree_id = tree.read_with(cx, |tree, _| tree.id());
9486 project
9487 .update(cx, |project, cx| project.git_scans_complete(cx))
9488 .await;
9489 tree.flush_fs_events(cx).await;
9490
9491 project.read_with(cx, |project, cx| {
9492 let containing = project
9493 .git_store()
9494 .read(cx)
9495 .repository_and_path_for_project_path(&(tree_id, rel_path("project/a.txt")).into(), cx);
9496 assert_eq!(
9497 containing
9498 .unwrap()
9499 .0
9500 .read(cx)
9501 .work_directory_abs_path
9502 .as_ref(),
9503 home,
9504 );
9505 });
9506}
9507
9508#[gpui::test]
9509async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
9510 init_test(cx);
9511 cx.executor().allow_parking();
9512
9513 let root = TempTree::new(json!({
9514 "project": {
9515 "a.txt": "a", // Modified
9516 "b.txt": "bb", // Added
9517 "c.txt": "ccc", // Unchanged
9518 "d.txt": "dddd", // Deleted
9519 },
9520 }));
9521
9522 // Set up git repository before creating the project.
9523 let work_dir = root.path().join("project");
9524 let repo = git_init(work_dir.as_path());
9525 git_add("a.txt", &repo);
9526 git_add("c.txt", &repo);
9527 git_add("d.txt", &repo);
9528 git_commit("Initial commit", &repo);
9529 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
9530 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
9531
9532 let project = Project::test(
9533 Arc::new(RealFs::new(None, cx.executor())),
9534 [root.path()],
9535 cx,
9536 )
9537 .await;
9538
9539 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9540 tree.flush_fs_events(cx).await;
9541 project
9542 .update(cx, |project, cx| project.git_scans_complete(cx))
9543 .await;
9544 cx.executor().run_until_parked();
9545
9546 let repository = project.read_with(cx, |project, cx| {
9547 project.repositories(cx).values().next().unwrap().clone()
9548 });
9549
9550 // Check that the right git state is observed on startup
9551 repository.read_with(cx, |repository, _| {
9552 let entries = repository.cached_status().collect::<Vec<_>>();
9553 assert_eq!(
9554 entries,
9555 [
9556 StatusEntry {
9557 repo_path: repo_path("a.txt"),
9558 status: StatusCode::Modified.worktree(),
9559 diff_stat: Some(DiffStat {
9560 added: 1,
9561 deleted: 1,
9562 }),
9563 },
9564 StatusEntry {
9565 repo_path: repo_path("b.txt"),
9566 status: FileStatus::Untracked,
9567 diff_stat: None,
9568 },
9569 StatusEntry {
9570 repo_path: repo_path("d.txt"),
9571 status: StatusCode::Deleted.worktree(),
9572 diff_stat: Some(DiffStat {
9573 added: 0,
9574 deleted: 1,
9575 }),
9576 },
9577 ]
9578 );
9579 });
9580
9581 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
9582
9583 tree.flush_fs_events(cx).await;
9584 project
9585 .update(cx, |project, cx| project.git_scans_complete(cx))
9586 .await;
9587 cx.executor().run_until_parked();
9588
9589 repository.read_with(cx, |repository, _| {
9590 let entries = repository.cached_status().collect::<Vec<_>>();
9591 assert_eq!(
9592 entries,
9593 [
9594 StatusEntry {
9595 repo_path: repo_path("a.txt"),
9596 status: StatusCode::Modified.worktree(),
9597 diff_stat: Some(DiffStat {
9598 added: 1,
9599 deleted: 1,
9600 }),
9601 },
9602 StatusEntry {
9603 repo_path: repo_path("b.txt"),
9604 status: FileStatus::Untracked,
9605 diff_stat: None,
9606 },
9607 StatusEntry {
9608 repo_path: repo_path("c.txt"),
9609 status: StatusCode::Modified.worktree(),
9610 diff_stat: Some(DiffStat {
9611 added: 1,
9612 deleted: 1,
9613 }),
9614 },
9615 StatusEntry {
9616 repo_path: repo_path("d.txt"),
9617 status: StatusCode::Deleted.worktree(),
9618 diff_stat: Some(DiffStat {
9619 added: 0,
9620 deleted: 1,
9621 }),
9622 },
9623 ]
9624 );
9625 });
9626
9627 git_add("a.txt", &repo);
9628 git_add("c.txt", &repo);
9629 git_remove_index(Path::new("d.txt"), &repo);
9630 git_commit("Another commit", &repo);
9631 tree.flush_fs_events(cx).await;
9632 project
9633 .update(cx, |project, cx| project.git_scans_complete(cx))
9634 .await;
9635 cx.executor().run_until_parked();
9636
9637 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
9638 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
9639 tree.flush_fs_events(cx).await;
9640 project
9641 .update(cx, |project, cx| project.git_scans_complete(cx))
9642 .await;
9643 cx.executor().run_until_parked();
9644
9645 repository.read_with(cx, |repository, _cx| {
9646 let entries = repository.cached_status().collect::<Vec<_>>();
9647
9648 // Deleting an untracked entry, b.txt, should leave no status
9649 // a.txt was tracked, and so should have a status
9650 assert_eq!(
9651 entries,
9652 [StatusEntry {
9653 repo_path: repo_path("a.txt"),
9654 status: StatusCode::Deleted.worktree(),
9655 diff_stat: Some(DiffStat {
9656 added: 0,
9657 deleted: 1,
9658 }),
9659 }]
9660 );
9661 });
9662}
9663
9664#[gpui::test]
9665#[ignore]
9666async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
9667 init_test(cx);
9668 cx.executor().allow_parking();
9669
9670 let root = TempTree::new(json!({
9671 "project": {
9672 "sub": {},
9673 "a.txt": "",
9674 },
9675 }));
9676
9677 let work_dir = root.path().join("project");
9678 let repo = git_init(work_dir.as_path());
9679 // a.txt exists in HEAD and the working copy but is deleted in the index.
9680 git_add("a.txt", &repo);
9681 git_commit("Initial commit", &repo);
9682 git_remove_index("a.txt".as_ref(), &repo);
9683 // `sub` is a nested git repository.
9684 let _sub = git_init(&work_dir.join("sub"));
9685
9686 let project = Project::test(
9687 Arc::new(RealFs::new(None, cx.executor())),
9688 [root.path()],
9689 cx,
9690 )
9691 .await;
9692
9693 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9694 tree.flush_fs_events(cx).await;
9695 project
9696 .update(cx, |project, cx| project.git_scans_complete(cx))
9697 .await;
9698 cx.executor().run_until_parked();
9699
9700 let repository = project.read_with(cx, |project, cx| {
9701 project
9702 .repositories(cx)
9703 .values()
9704 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
9705 .unwrap()
9706 .clone()
9707 });
9708
9709 repository.read_with(cx, |repository, _cx| {
9710 let entries = repository.cached_status().collect::<Vec<_>>();
9711
9712 // `sub` doesn't appear in our computed statuses.
9713 // a.txt appears with a combined `DA` status.
9714 assert_eq!(
9715 entries,
9716 [StatusEntry {
9717 repo_path: repo_path("a.txt"),
9718 status: TrackedStatus {
9719 index_status: StatusCode::Deleted,
9720 worktree_status: StatusCode::Added
9721 }
9722 .into(),
9723 diff_stat: None,
9724 }]
9725 )
9726 });
9727}
9728
9729#[track_caller]
9730/// We merge lhs into rhs.
9731fn merge_pending_ops_snapshots(
9732 source: Vec<pending_op::PendingOps>,
9733 mut target: Vec<pending_op::PendingOps>,
9734) -> Vec<pending_op::PendingOps> {
9735 for s_ops in source {
9736 if let Some(idx) = target.iter().zip(0..).find_map(|(ops, idx)| {
9737 if ops.repo_path == s_ops.repo_path {
9738 Some(idx)
9739 } else {
9740 None
9741 }
9742 }) {
9743 let t_ops = &mut target[idx];
9744 for s_op in s_ops.ops {
9745 if let Some(op_idx) = t_ops
9746 .ops
9747 .iter()
9748 .zip(0..)
9749 .find_map(|(op, idx)| if op.id == s_op.id { Some(idx) } else { None })
9750 {
9751 let t_op = &mut t_ops.ops[op_idx];
9752 match (s_op.job_status, t_op.job_status) {
9753 (pending_op::JobStatus::Running, _) => {}
9754 (s_st, pending_op::JobStatus::Running) => t_op.job_status = s_st,
9755 (s_st, t_st) if s_st == t_st => {}
9756 _ => unreachable!(),
9757 }
9758 } else {
9759 t_ops.ops.push(s_op);
9760 }
9761 }
9762 t_ops.ops.sort_by(|l, r| l.id.cmp(&r.id));
9763 } else {
9764 target.push(s_ops);
9765 }
9766 }
9767 target
9768}
9769
9770#[gpui::test]
9771async fn test_repository_pending_ops_staging(
9772 executor: gpui::BackgroundExecutor,
9773 cx: &mut gpui::TestAppContext,
9774) {
9775 init_test(cx);
9776
9777 let fs = FakeFs::new(executor);
9778 fs.insert_tree(
9779 path!("/root"),
9780 json!({
9781 "my-repo": {
9782 ".git": {},
9783 "a.txt": "a",
9784 }
9785
9786 }),
9787 )
9788 .await;
9789
9790 fs.set_status_for_repo(
9791 path!("/root/my-repo/.git").as_ref(),
9792 &[("a.txt", FileStatus::Untracked)],
9793 );
9794
9795 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
9796 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
9797 project.update(cx, |project, cx| {
9798 let pending_ops_all = pending_ops_all.clone();
9799 cx.subscribe(project.git_store(), move |_, _, e, _| {
9800 if let GitStoreEvent::RepositoryUpdated(
9801 _,
9802 RepositoryEvent::PendingOpsChanged { pending_ops },
9803 _,
9804 ) = e
9805 {
9806 let merged = merge_pending_ops_snapshots(
9807 pending_ops.items(()),
9808 pending_ops_all.lock().items(()),
9809 );
9810 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
9811 }
9812 })
9813 .detach();
9814 });
9815 project
9816 .update(cx, |project, cx| project.git_scans_complete(cx))
9817 .await;
9818
9819 let repo = project.read_with(cx, |project, cx| {
9820 project.repositories(cx).values().next().unwrap().clone()
9821 });
9822
9823 // Ensure we have no pending ops for any of the untracked files
9824 repo.read_with(cx, |repo, _cx| {
9825 assert!(repo.pending_ops().next().is_none());
9826 });
9827
9828 let mut id = 1u16;
9829
9830 let mut assert_stage = async |path: RepoPath, stage| {
9831 let git_status = if stage {
9832 pending_op::GitStatus::Staged
9833 } else {
9834 pending_op::GitStatus::Unstaged
9835 };
9836 repo.update(cx, |repo, cx| {
9837 let task = if stage {
9838 repo.stage_entries(vec![path.clone()], cx)
9839 } else {
9840 repo.unstage_entries(vec![path.clone()], cx)
9841 };
9842 let ops = repo.pending_ops_for_path(&path).unwrap();
9843 assert_eq!(
9844 ops.ops.last(),
9845 Some(&pending_op::PendingOp {
9846 id: id.into(),
9847 git_status,
9848 job_status: pending_op::JobStatus::Running
9849 })
9850 );
9851 task
9852 })
9853 .await
9854 .unwrap();
9855
9856 repo.read_with(cx, |repo, _cx| {
9857 let ops = repo.pending_ops_for_path(&path).unwrap();
9858 assert_eq!(
9859 ops.ops.last(),
9860 Some(&pending_op::PendingOp {
9861 id: id.into(),
9862 git_status,
9863 job_status: pending_op::JobStatus::Finished
9864 })
9865 );
9866 });
9867
9868 id += 1;
9869 };
9870
9871 assert_stage(repo_path("a.txt"), true).await;
9872 assert_stage(repo_path("a.txt"), false).await;
9873 assert_stage(repo_path("a.txt"), true).await;
9874 assert_stage(repo_path("a.txt"), false).await;
9875 assert_stage(repo_path("a.txt"), true).await;
9876
9877 cx.run_until_parked();
9878
9879 assert_eq!(
9880 pending_ops_all
9881 .lock()
9882 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
9883 .unwrap()
9884 .ops,
9885 vec![
9886 pending_op::PendingOp {
9887 id: 1u16.into(),
9888 git_status: pending_op::GitStatus::Staged,
9889 job_status: pending_op::JobStatus::Finished
9890 },
9891 pending_op::PendingOp {
9892 id: 2u16.into(),
9893 git_status: pending_op::GitStatus::Unstaged,
9894 job_status: pending_op::JobStatus::Finished
9895 },
9896 pending_op::PendingOp {
9897 id: 3u16.into(),
9898 git_status: pending_op::GitStatus::Staged,
9899 job_status: pending_op::JobStatus::Finished
9900 },
9901 pending_op::PendingOp {
9902 id: 4u16.into(),
9903 git_status: pending_op::GitStatus::Unstaged,
9904 job_status: pending_op::JobStatus::Finished
9905 },
9906 pending_op::PendingOp {
9907 id: 5u16.into(),
9908 git_status: pending_op::GitStatus::Staged,
9909 job_status: pending_op::JobStatus::Finished
9910 }
9911 ],
9912 );
9913
9914 repo.update(cx, |repo, _cx| {
9915 let git_statuses = repo.cached_status().collect::<Vec<_>>();
9916
9917 assert_eq!(
9918 git_statuses,
9919 [StatusEntry {
9920 repo_path: repo_path("a.txt"),
9921 status: TrackedStatus {
9922 index_status: StatusCode::Added,
9923 worktree_status: StatusCode::Unmodified
9924 }
9925 .into(),
9926 diff_stat: Some(DiffStat {
9927 added: 1,
9928 deleted: 0,
9929 }),
9930 }]
9931 );
9932 });
9933}
9934
9935#[gpui::test]
9936async fn test_repository_pending_ops_long_running_staging(
9937 executor: gpui::BackgroundExecutor,
9938 cx: &mut gpui::TestAppContext,
9939) {
9940 init_test(cx);
9941
9942 let fs = FakeFs::new(executor);
9943 fs.insert_tree(
9944 path!("/root"),
9945 json!({
9946 "my-repo": {
9947 ".git": {},
9948 "a.txt": "a",
9949 }
9950
9951 }),
9952 )
9953 .await;
9954
9955 fs.set_status_for_repo(
9956 path!("/root/my-repo/.git").as_ref(),
9957 &[("a.txt", FileStatus::Untracked)],
9958 );
9959
9960 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
9961 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
9962 project.update(cx, |project, cx| {
9963 let pending_ops_all = pending_ops_all.clone();
9964 cx.subscribe(project.git_store(), move |_, _, e, _| {
9965 if let GitStoreEvent::RepositoryUpdated(
9966 _,
9967 RepositoryEvent::PendingOpsChanged { pending_ops },
9968 _,
9969 ) = e
9970 {
9971 let merged = merge_pending_ops_snapshots(
9972 pending_ops.items(()),
9973 pending_ops_all.lock().items(()),
9974 );
9975 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
9976 }
9977 })
9978 .detach();
9979 });
9980
9981 project
9982 .update(cx, |project, cx| project.git_scans_complete(cx))
9983 .await;
9984
9985 let repo = project.read_with(cx, |project, cx| {
9986 project.repositories(cx).values().next().unwrap().clone()
9987 });
9988
9989 repo.update(cx, |repo, cx| {
9990 repo.stage_entries(vec![repo_path("a.txt")], cx)
9991 })
9992 .detach();
9993
9994 repo.update(cx, |repo, cx| {
9995 repo.stage_entries(vec![repo_path("a.txt")], cx)
9996 })
9997 .unwrap()
9998 .with_timeout(Duration::from_secs(1), &cx.executor())
9999 .await
10000 .unwrap();
10001
10002 cx.run_until_parked();
10003
10004 assert_eq!(
10005 pending_ops_all
10006 .lock()
10007 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
10008 .unwrap()
10009 .ops,
10010 vec![
10011 pending_op::PendingOp {
10012 id: 1u16.into(),
10013 git_status: pending_op::GitStatus::Staged,
10014 job_status: pending_op::JobStatus::Skipped
10015 },
10016 pending_op::PendingOp {
10017 id: 2u16.into(),
10018 git_status: pending_op::GitStatus::Staged,
10019 job_status: pending_op::JobStatus::Finished
10020 }
10021 ],
10022 );
10023
10024 repo.update(cx, |repo, _cx| {
10025 let git_statuses = repo.cached_status().collect::<Vec<_>>();
10026
10027 assert_eq!(
10028 git_statuses,
10029 [StatusEntry {
10030 repo_path: repo_path("a.txt"),
10031 status: TrackedStatus {
10032 index_status: StatusCode::Added,
10033 worktree_status: StatusCode::Unmodified
10034 }
10035 .into(),
10036 diff_stat: Some(DiffStat {
10037 added: 1,
10038 deleted: 0,
10039 }),
10040 }]
10041 );
10042 });
10043}
10044
10045#[gpui::test]
10046async fn test_repository_pending_ops_stage_all(
10047 executor: gpui::BackgroundExecutor,
10048 cx: &mut gpui::TestAppContext,
10049) {
10050 init_test(cx);
10051
10052 let fs = FakeFs::new(executor);
10053 fs.insert_tree(
10054 path!("/root"),
10055 json!({
10056 "my-repo": {
10057 ".git": {},
10058 "a.txt": "a",
10059 "b.txt": "b"
10060 }
10061
10062 }),
10063 )
10064 .await;
10065
10066 fs.set_status_for_repo(
10067 path!("/root/my-repo/.git").as_ref(),
10068 &[
10069 ("a.txt", FileStatus::Untracked),
10070 ("b.txt", FileStatus::Untracked),
10071 ],
10072 );
10073
10074 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
10075 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
10076 project.update(cx, |project, cx| {
10077 let pending_ops_all = pending_ops_all.clone();
10078 cx.subscribe(project.git_store(), move |_, _, e, _| {
10079 if let GitStoreEvent::RepositoryUpdated(
10080 _,
10081 RepositoryEvent::PendingOpsChanged { pending_ops },
10082 _,
10083 ) = e
10084 {
10085 let merged = merge_pending_ops_snapshots(
10086 pending_ops.items(()),
10087 pending_ops_all.lock().items(()),
10088 );
10089 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
10090 }
10091 })
10092 .detach();
10093 });
10094 project
10095 .update(cx, |project, cx| project.git_scans_complete(cx))
10096 .await;
10097
10098 let repo = project.read_with(cx, |project, cx| {
10099 project.repositories(cx).values().next().unwrap().clone()
10100 });
10101
10102 repo.update(cx, |repo, cx| {
10103 repo.stage_entries(vec![repo_path("a.txt")], cx)
10104 })
10105 .await
10106 .unwrap();
10107 repo.update(cx, |repo, cx| repo.stage_all(cx))
10108 .await
10109 .unwrap();
10110 repo.update(cx, |repo, cx| repo.unstage_all(cx))
10111 .await
10112 .unwrap();
10113
10114 cx.run_until_parked();
10115
10116 assert_eq!(
10117 pending_ops_all
10118 .lock()
10119 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
10120 .unwrap()
10121 .ops,
10122 vec![
10123 pending_op::PendingOp {
10124 id: 1u16.into(),
10125 git_status: pending_op::GitStatus::Staged,
10126 job_status: pending_op::JobStatus::Finished
10127 },
10128 pending_op::PendingOp {
10129 id: 2u16.into(),
10130 git_status: pending_op::GitStatus::Unstaged,
10131 job_status: pending_op::JobStatus::Finished
10132 },
10133 ],
10134 );
10135 assert_eq!(
10136 pending_ops_all
10137 .lock()
10138 .get(&worktree::PathKey(repo_path("b.txt").as_ref().clone()), ())
10139 .unwrap()
10140 .ops,
10141 vec![
10142 pending_op::PendingOp {
10143 id: 1u16.into(),
10144 git_status: pending_op::GitStatus::Staged,
10145 job_status: pending_op::JobStatus::Finished
10146 },
10147 pending_op::PendingOp {
10148 id: 2u16.into(),
10149 git_status: pending_op::GitStatus::Unstaged,
10150 job_status: pending_op::JobStatus::Finished
10151 },
10152 ],
10153 );
10154
10155 repo.update(cx, |repo, _cx| {
10156 let git_statuses = repo.cached_status().collect::<Vec<_>>();
10157
10158 assert_eq!(
10159 git_statuses,
10160 [
10161 StatusEntry {
10162 repo_path: repo_path("a.txt"),
10163 status: FileStatus::Untracked,
10164 diff_stat: None,
10165 },
10166 StatusEntry {
10167 repo_path: repo_path("b.txt"),
10168 status: FileStatus::Untracked,
10169 diff_stat: None,
10170 },
10171 ]
10172 );
10173 });
10174}
10175
10176#[gpui::test]
10177async fn test_repository_subfolder_git_status(
10178 executor: gpui::BackgroundExecutor,
10179 cx: &mut gpui::TestAppContext,
10180) {
10181 init_test(cx);
10182
10183 let fs = FakeFs::new(executor);
10184 fs.insert_tree(
10185 path!("/root"),
10186 json!({
10187 "my-repo": {
10188 ".git": {},
10189 "a.txt": "a",
10190 "sub-folder-1": {
10191 "sub-folder-2": {
10192 "c.txt": "cc",
10193 "d": {
10194 "e.txt": "eee"
10195 }
10196 },
10197 }
10198 },
10199 }),
10200 )
10201 .await;
10202
10203 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
10204 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
10205
10206 fs.set_status_for_repo(
10207 path!("/root/my-repo/.git").as_ref(),
10208 &[(E_TXT, FileStatus::Untracked)],
10209 );
10210
10211 let project = Project::test(
10212 fs.clone(),
10213 [path!("/root/my-repo/sub-folder-1/sub-folder-2").as_ref()],
10214 cx,
10215 )
10216 .await;
10217
10218 project
10219 .update(cx, |project, cx| project.git_scans_complete(cx))
10220 .await;
10221 cx.run_until_parked();
10222
10223 let repository = project.read_with(cx, |project, cx| {
10224 project.repositories(cx).values().next().unwrap().clone()
10225 });
10226
10227 // Ensure that the git status is loaded correctly
10228 repository.read_with(cx, |repository, _cx| {
10229 assert_eq!(
10230 repository.work_directory_abs_path,
10231 Path::new(path!("/root/my-repo")).into()
10232 );
10233
10234 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
10235 assert_eq!(
10236 repository
10237 .status_for_path(&repo_path(E_TXT))
10238 .unwrap()
10239 .status,
10240 FileStatus::Untracked
10241 );
10242 });
10243
10244 fs.set_status_for_repo(path!("/root/my-repo/.git").as_ref(), &[]);
10245 project
10246 .update(cx, |project, cx| project.git_scans_complete(cx))
10247 .await;
10248 cx.run_until_parked();
10249
10250 repository.read_with(cx, |repository, _cx| {
10251 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
10252 assert_eq!(repository.status_for_path(&repo_path(E_TXT)), None);
10253 });
10254}
10255
10256// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
10257#[cfg(any())]
10258#[gpui::test]
10259async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
10260 init_test(cx);
10261 cx.executor().allow_parking();
10262
10263 let root = TempTree::new(json!({
10264 "project": {
10265 "a.txt": "a",
10266 },
10267 }));
10268 let root_path = root.path();
10269
10270 let repo = git_init(&root_path.join("project"));
10271 git_add("a.txt", &repo);
10272 git_commit("init", &repo);
10273
10274 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10275
10276 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10277 tree.flush_fs_events(cx).await;
10278 project
10279 .update(cx, |project, cx| project.git_scans_complete(cx))
10280 .await;
10281 cx.executor().run_until_parked();
10282
10283 let repository = project.read_with(cx, |project, cx| {
10284 project.repositories(cx).values().next().unwrap().clone()
10285 });
10286
10287 git_branch("other-branch", &repo);
10288 git_checkout("refs/heads/other-branch", &repo);
10289 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
10290 git_add("a.txt", &repo);
10291 git_commit("capitalize", &repo);
10292 let commit = repo
10293 .head()
10294 .expect("Failed to get HEAD")
10295 .peel_to_commit()
10296 .expect("HEAD is not a commit");
10297 git_checkout("refs/heads/main", &repo);
10298 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
10299 git_add("a.txt", &repo);
10300 git_commit("improve letter", &repo);
10301 git_cherry_pick(&commit, &repo);
10302 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
10303 .expect("No CHERRY_PICK_HEAD");
10304 pretty_assertions::assert_eq!(
10305 git_status(&repo),
10306 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
10307 );
10308 tree.flush_fs_events(cx).await;
10309 project
10310 .update(cx, |project, cx| project.git_scans_complete(cx))
10311 .await;
10312 cx.executor().run_until_parked();
10313 let conflicts = repository.update(cx, |repository, _| {
10314 repository
10315 .merge_conflicts
10316 .iter()
10317 .cloned()
10318 .collect::<Vec<_>>()
10319 });
10320 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
10321
10322 git_add("a.txt", &repo);
10323 // Attempt to manually simulate what `git cherry-pick --continue` would do.
10324 git_commit("whatevs", &repo);
10325 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
10326 .expect("Failed to remove CHERRY_PICK_HEAD");
10327 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
10328 tree.flush_fs_events(cx).await;
10329 let conflicts = repository.update(cx, |repository, _| {
10330 repository
10331 .merge_conflicts
10332 .iter()
10333 .cloned()
10334 .collect::<Vec<_>>()
10335 });
10336 pretty_assertions::assert_eq!(conflicts, []);
10337}
10338
10339#[gpui::test]
10340async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
10341 init_test(cx);
10342 let fs = FakeFs::new(cx.background_executor.clone());
10343 fs.insert_tree(
10344 path!("/root"),
10345 json!({
10346 ".git": {},
10347 ".gitignore": "*.txt\n",
10348 "a.xml": "<a></a>",
10349 "b.txt": "Some text"
10350 }),
10351 )
10352 .await;
10353
10354 fs.set_head_and_index_for_repo(
10355 path!("/root/.git").as_ref(),
10356 &[
10357 (".gitignore", "*.txt\n".into()),
10358 ("a.xml", "<a></a>".into()),
10359 ],
10360 );
10361
10362 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
10363
10364 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10365 tree.flush_fs_events(cx).await;
10366 project
10367 .update(cx, |project, cx| project.git_scans_complete(cx))
10368 .await;
10369 cx.executor().run_until_parked();
10370
10371 let repository = project.read_with(cx, |project, cx| {
10372 project.repositories(cx).values().next().unwrap().clone()
10373 });
10374
10375 // One file is unmodified, the other is ignored.
10376 cx.read(|cx| {
10377 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
10378 assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
10379 });
10380
10381 // Change the gitignore, and stage the newly non-ignored file.
10382 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
10383 .await
10384 .unwrap();
10385 fs.set_index_for_repo(
10386 Path::new(path!("/root/.git")),
10387 &[
10388 (".gitignore", "*.txt\n".into()),
10389 ("a.xml", "<a></a>".into()),
10390 ("b.txt", "Some text".into()),
10391 ],
10392 );
10393
10394 cx.executor().run_until_parked();
10395 cx.read(|cx| {
10396 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
10397 assert_entry_git_state(
10398 tree.read(cx),
10399 repository.read(cx),
10400 "b.txt",
10401 Some(StatusCode::Added),
10402 false,
10403 );
10404 });
10405}
10406
10407// NOTE:
10408// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
10409// a directory which some program has already open.
10410// This is a limitation of the Windows.
10411// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
10412// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
10413#[gpui::test]
10414#[cfg_attr(target_os = "windows", ignore)]
10415async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
10416 init_test(cx);
10417 cx.executor().allow_parking();
10418 let root = TempTree::new(json!({
10419 "projects": {
10420 "project1": {
10421 "a": "",
10422 "b": "",
10423 }
10424 },
10425
10426 }));
10427 let root_path = root.path();
10428
10429 let repo = git_init(&root_path.join("projects/project1"));
10430 git_add("a", &repo);
10431 git_commit("init", &repo);
10432 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
10433
10434 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10435
10436 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10437 tree.flush_fs_events(cx).await;
10438 project
10439 .update(cx, |project, cx| project.git_scans_complete(cx))
10440 .await;
10441 cx.executor().run_until_parked();
10442
10443 let repository = project.read_with(cx, |project, cx| {
10444 project.repositories(cx).values().next().unwrap().clone()
10445 });
10446
10447 repository.read_with(cx, |repository, _| {
10448 assert_eq!(
10449 repository.work_directory_abs_path.as_ref(),
10450 root_path.join("projects/project1").as_path()
10451 );
10452 assert_eq!(
10453 repository
10454 .status_for_path(&repo_path("a"))
10455 .map(|entry| entry.status),
10456 Some(StatusCode::Modified.worktree()),
10457 );
10458 assert_eq!(
10459 repository
10460 .status_for_path(&repo_path("b"))
10461 .map(|entry| entry.status),
10462 Some(FileStatus::Untracked),
10463 );
10464 });
10465
10466 std::fs::rename(
10467 root_path.join("projects/project1"),
10468 root_path.join("projects/project2"),
10469 )
10470 .unwrap();
10471 tree.flush_fs_events(cx).await;
10472
10473 repository.read_with(cx, |repository, _| {
10474 assert_eq!(
10475 repository.work_directory_abs_path.as_ref(),
10476 root_path.join("projects/project2").as_path()
10477 );
10478 assert_eq!(
10479 repository.status_for_path(&repo_path("a")).unwrap().status,
10480 StatusCode::Modified.worktree(),
10481 );
10482 assert_eq!(
10483 repository.status_for_path(&repo_path("b")).unwrap().status,
10484 FileStatus::Untracked,
10485 );
10486 });
10487}
10488
10489// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
10490// you can't rename a directory which some program has already open. This is a
10491// limitation of the Windows. See:
10492// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
10493// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
10494#[gpui::test]
10495#[cfg_attr(target_os = "windows", ignore)]
10496async fn test_file_status(cx: &mut gpui::TestAppContext) {
10497 init_test(cx);
10498 cx.executor().allow_parking();
10499 const IGNORE_RULE: &str = "**/target";
10500
10501 let root = TempTree::new(json!({
10502 "project": {
10503 "a.txt": "a",
10504 "b.txt": "bb",
10505 "c": {
10506 "d": {
10507 "e.txt": "eee"
10508 }
10509 },
10510 "f.txt": "ffff",
10511 "target": {
10512 "build_file": "???"
10513 },
10514 ".gitignore": IGNORE_RULE
10515 },
10516
10517 }));
10518 let root_path = root.path();
10519
10520 const A_TXT: &str = "a.txt";
10521 const B_TXT: &str = "b.txt";
10522 const E_TXT: &str = "c/d/e.txt";
10523 const F_TXT: &str = "f.txt";
10524 const DOTGITIGNORE: &str = ".gitignore";
10525 const BUILD_FILE: &str = "target/build_file";
10526
10527 // Set up git repository before creating the worktree.
10528 let work_dir = root.path().join("project");
10529 let mut repo = git_init(work_dir.as_path());
10530 repo.add_ignore_rule(IGNORE_RULE).unwrap();
10531 git_add(A_TXT, &repo);
10532 git_add(E_TXT, &repo);
10533 git_add(DOTGITIGNORE, &repo);
10534 git_commit("Initial commit", &repo);
10535
10536 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10537
10538 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10539 tree.flush_fs_events(cx).await;
10540 project
10541 .update(cx, |project, cx| project.git_scans_complete(cx))
10542 .await;
10543 cx.executor().run_until_parked();
10544
10545 let repository = project.read_with(cx, |project, cx| {
10546 project.repositories(cx).values().next().unwrap().clone()
10547 });
10548
10549 // Check that the right git state is observed on startup
10550 repository.read_with(cx, |repository, _cx| {
10551 assert_eq!(
10552 repository.work_directory_abs_path.as_ref(),
10553 root_path.join("project").as_path()
10554 );
10555
10556 assert_eq!(
10557 repository
10558 .status_for_path(&repo_path(B_TXT))
10559 .unwrap()
10560 .status,
10561 FileStatus::Untracked,
10562 );
10563 assert_eq!(
10564 repository
10565 .status_for_path(&repo_path(F_TXT))
10566 .unwrap()
10567 .status,
10568 FileStatus::Untracked,
10569 );
10570 });
10571
10572 // Modify a file in the working copy.
10573 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
10574 tree.flush_fs_events(cx).await;
10575 project
10576 .update(cx, |project, cx| project.git_scans_complete(cx))
10577 .await;
10578 cx.executor().run_until_parked();
10579
10580 // The worktree detects that the file's git status has changed.
10581 repository.read_with(cx, |repository, _| {
10582 assert_eq!(
10583 repository
10584 .status_for_path(&repo_path(A_TXT))
10585 .unwrap()
10586 .status,
10587 StatusCode::Modified.worktree(),
10588 );
10589 });
10590
10591 // Create a commit in the git repository.
10592 git_add(A_TXT, &repo);
10593 git_add(B_TXT, &repo);
10594 git_commit("Committing modified and added", &repo);
10595 tree.flush_fs_events(cx).await;
10596 project
10597 .update(cx, |project, cx| project.git_scans_complete(cx))
10598 .await;
10599 cx.executor().run_until_parked();
10600
10601 // The worktree detects that the files' git status have changed.
10602 repository.read_with(cx, |repository, _cx| {
10603 assert_eq!(
10604 repository
10605 .status_for_path(&repo_path(F_TXT))
10606 .unwrap()
10607 .status,
10608 FileStatus::Untracked,
10609 );
10610 assert_eq!(repository.status_for_path(&repo_path(B_TXT)), None);
10611 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
10612 });
10613
10614 // Modify files in the working copy and perform git operations on other files.
10615 git_reset(0, &repo);
10616 git_remove_index(Path::new(B_TXT), &repo);
10617 git_stash(&mut repo);
10618 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
10619 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
10620 tree.flush_fs_events(cx).await;
10621 project
10622 .update(cx, |project, cx| project.git_scans_complete(cx))
10623 .await;
10624 cx.executor().run_until_parked();
10625
10626 // Check that more complex repo changes are tracked
10627 repository.read_with(cx, |repository, _cx| {
10628 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
10629 assert_eq!(
10630 repository
10631 .status_for_path(&repo_path(B_TXT))
10632 .unwrap()
10633 .status,
10634 FileStatus::Untracked,
10635 );
10636 assert_eq!(
10637 repository
10638 .status_for_path(&repo_path(E_TXT))
10639 .unwrap()
10640 .status,
10641 StatusCode::Modified.worktree(),
10642 );
10643 });
10644
10645 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
10646 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
10647 std::fs::write(
10648 work_dir.join(DOTGITIGNORE),
10649 [IGNORE_RULE, "f.txt"].join("\n"),
10650 )
10651 .unwrap();
10652
10653 git_add(Path::new(DOTGITIGNORE), &repo);
10654 git_commit("Committing modified git ignore", &repo);
10655
10656 tree.flush_fs_events(cx).await;
10657 cx.executor().run_until_parked();
10658
10659 let mut renamed_dir_name = "first_directory/second_directory";
10660 const RENAMED_FILE: &str = "rf.txt";
10661
10662 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
10663 std::fs::write(
10664 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
10665 "new-contents",
10666 )
10667 .unwrap();
10668
10669 tree.flush_fs_events(cx).await;
10670 project
10671 .update(cx, |project, cx| project.git_scans_complete(cx))
10672 .await;
10673 cx.executor().run_until_parked();
10674
10675 repository.read_with(cx, |repository, _cx| {
10676 assert_eq!(
10677 repository
10678 .status_for_path(&RepoPath::from_rel_path(
10679 &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE))
10680 ))
10681 .unwrap()
10682 .status,
10683 FileStatus::Untracked,
10684 );
10685 });
10686
10687 renamed_dir_name = "new_first_directory/second_directory";
10688
10689 std::fs::rename(
10690 work_dir.join("first_directory"),
10691 work_dir.join("new_first_directory"),
10692 )
10693 .unwrap();
10694
10695 tree.flush_fs_events(cx).await;
10696 project
10697 .update(cx, |project, cx| project.git_scans_complete(cx))
10698 .await;
10699 cx.executor().run_until_parked();
10700
10701 repository.read_with(cx, |repository, _cx| {
10702 assert_eq!(
10703 repository
10704 .status_for_path(&RepoPath::from_rel_path(
10705 &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE))
10706 ))
10707 .unwrap()
10708 .status,
10709 FileStatus::Untracked,
10710 );
10711 });
10712}
10713
10714#[gpui::test]
10715#[ignore]
10716async fn test_ignored_dirs_events(cx: &mut gpui::TestAppContext) {
10717 init_test(cx);
10718 cx.executor().allow_parking();
10719
10720 const IGNORE_RULE: &str = "**/target";
10721
10722 let root = TempTree::new(json!({
10723 "project": {
10724 "src": {
10725 "main.rs": "fn main() {}"
10726 },
10727 "target": {
10728 "debug": {
10729 "important_text.txt": "important text",
10730 },
10731 },
10732 ".gitignore": IGNORE_RULE
10733 },
10734
10735 }));
10736 let root_path = root.path();
10737
10738 // Set up git repository before creating the worktree.
10739 let work_dir = root.path().join("project");
10740 let repo = git_init(work_dir.as_path());
10741 repo.add_ignore_rule(IGNORE_RULE).unwrap();
10742 git_add("src/main.rs", &repo);
10743 git_add(".gitignore", &repo);
10744 git_commit("Initial commit", &repo);
10745
10746 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10747 let repository_updates = Arc::new(Mutex::new(Vec::new()));
10748 let project_events = Arc::new(Mutex::new(Vec::new()));
10749 project.update(cx, |project, cx| {
10750 let repo_events = repository_updates.clone();
10751 cx.subscribe(project.git_store(), move |_, _, e, _| {
10752 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
10753 repo_events.lock().push(e.clone());
10754 }
10755 })
10756 .detach();
10757 let project_events = project_events.clone();
10758 cx.subscribe_self(move |_, e, _| {
10759 if let Event::WorktreeUpdatedEntries(_, updates) = e {
10760 project_events.lock().extend(
10761 updates
10762 .iter()
10763 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
10764 .filter(|(path, _)| path != "fs-event-sentinel"),
10765 );
10766 }
10767 })
10768 .detach();
10769 });
10770
10771 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10772 tree.flush_fs_events(cx).await;
10773 tree.update(cx, |tree, cx| {
10774 tree.load_file(rel_path("project/target/debug/important_text.txt"), cx)
10775 })
10776 .await
10777 .unwrap();
10778 tree.update(cx, |tree, _| {
10779 assert_eq!(
10780 tree.entries(true, 0)
10781 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
10782 .collect::<Vec<_>>(),
10783 vec![
10784 (rel_path(""), false),
10785 (rel_path("project/"), false),
10786 (rel_path("project/.gitignore"), false),
10787 (rel_path("project/src"), false),
10788 (rel_path("project/src/main.rs"), false),
10789 (rel_path("project/target"), true),
10790 (rel_path("project/target/debug"), true),
10791 (rel_path("project/target/debug/important_text.txt"), true),
10792 ]
10793 );
10794 });
10795
10796 assert_eq!(
10797 repository_updates.lock().drain(..).collect::<Vec<_>>(),
10798 vec![RepositoryEvent::StatusesChanged,],
10799 "Initial worktree scan should produce a repo update event"
10800 );
10801 assert_eq!(
10802 project_events.lock().drain(..).collect::<Vec<_>>(),
10803 vec![
10804 ("project/target".to_string(), PathChange::Loaded),
10805 ("project/target/debug".to_string(), PathChange::Loaded),
10806 (
10807 "project/target/debug/important_text.txt".to_string(),
10808 PathChange::Loaded
10809 ),
10810 ],
10811 "Initial project changes should show that all not-ignored and all opened files are loaded"
10812 );
10813
10814 let deps_dir = work_dir.join("target").join("debug").join("deps");
10815 std::fs::create_dir_all(&deps_dir).unwrap();
10816 tree.flush_fs_events(cx).await;
10817 project
10818 .update(cx, |project, cx| project.git_scans_complete(cx))
10819 .await;
10820 cx.executor().run_until_parked();
10821 std::fs::write(deps_dir.join("aa.tmp"), "something tmp").unwrap();
10822 tree.flush_fs_events(cx).await;
10823 project
10824 .update(cx, |project, cx| project.git_scans_complete(cx))
10825 .await;
10826 cx.executor().run_until_parked();
10827 std::fs::remove_dir_all(&deps_dir).unwrap();
10828 tree.flush_fs_events(cx).await;
10829 project
10830 .update(cx, |project, cx| project.git_scans_complete(cx))
10831 .await;
10832 cx.executor().run_until_parked();
10833
10834 tree.update(cx, |tree, _| {
10835 assert_eq!(
10836 tree.entries(true, 0)
10837 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
10838 .collect::<Vec<_>>(),
10839 vec![
10840 (rel_path(""), false),
10841 (rel_path("project/"), false),
10842 (rel_path("project/.gitignore"), false),
10843 (rel_path("project/src"), false),
10844 (rel_path("project/src/main.rs"), false),
10845 (rel_path("project/target"), true),
10846 (rel_path("project/target/debug"), true),
10847 (rel_path("project/target/debug/important_text.txt"), true),
10848 ],
10849 "No stray temp files should be left after the flycheck changes"
10850 );
10851 });
10852
10853 assert_eq!(
10854 repository_updates
10855 .lock()
10856 .iter()
10857 .cloned()
10858 .collect::<Vec<_>>(),
10859 Vec::new(),
10860 "No further RepositoryUpdated events should happen, as only ignored dirs' contents was changed",
10861 );
10862 assert_eq!(
10863 project_events.lock().as_slice(),
10864 vec![
10865 ("project/target/debug/deps".to_string(), PathChange::Added),
10866 ("project/target/debug/deps".to_string(), PathChange::Removed),
10867 ],
10868 "Due to `debug` directory being tracked, it should get updates for entries inside it.
10869 No updates for more nested directories should happen as those are ignored",
10870 );
10871}
10872
10873// todo(jk): turning this test off until we rework it in such a way so that it is not so susceptible
10874// to different timings/ordering of events.
10875#[ignore]
10876#[gpui::test]
10877async fn test_odd_events_for_ignored_dirs(
10878 executor: BackgroundExecutor,
10879 cx: &mut gpui::TestAppContext,
10880) {
10881 init_test(cx);
10882 let fs = FakeFs::new(executor);
10883 fs.insert_tree(
10884 path!("/root"),
10885 json!({
10886 ".git": {},
10887 ".gitignore": "**/target/",
10888 "src": {
10889 "main.rs": "fn main() {}",
10890 },
10891 "target": {
10892 "debug": {
10893 "foo.txt": "foo",
10894 "deps": {}
10895 }
10896 }
10897 }),
10898 )
10899 .await;
10900 fs.set_head_and_index_for_repo(
10901 path!("/root/.git").as_ref(),
10902 &[
10903 (".gitignore", "**/target/".into()),
10904 ("src/main.rs", "fn main() {}".into()),
10905 ],
10906 );
10907
10908 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
10909 let repository_updates = Arc::new(Mutex::new(Vec::new()));
10910 let project_events = Arc::new(Mutex::new(Vec::new()));
10911 project.update(cx, |project, cx| {
10912 let repository_updates = repository_updates.clone();
10913 cx.subscribe(project.git_store(), move |_, _, e, _| {
10914 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
10915 repository_updates.lock().push(e.clone());
10916 }
10917 })
10918 .detach();
10919 let project_events = project_events.clone();
10920 cx.subscribe_self(move |_, e, _| {
10921 if let Event::WorktreeUpdatedEntries(_, updates) = e {
10922 project_events.lock().extend(
10923 updates
10924 .iter()
10925 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
10926 .filter(|(path, _)| path != "fs-event-sentinel"),
10927 );
10928 }
10929 })
10930 .detach();
10931 });
10932
10933 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10934 tree.update(cx, |tree, cx| {
10935 tree.load_file(rel_path("target/debug/foo.txt"), cx)
10936 })
10937 .await
10938 .unwrap();
10939 tree.flush_fs_events(cx).await;
10940 project
10941 .update(cx, |project, cx| project.git_scans_complete(cx))
10942 .await;
10943 cx.run_until_parked();
10944 tree.update(cx, |tree, _| {
10945 assert_eq!(
10946 tree.entries(true, 0)
10947 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
10948 .collect::<Vec<_>>(),
10949 vec![
10950 (rel_path(""), false),
10951 (rel_path(".gitignore"), false),
10952 (rel_path("src"), false),
10953 (rel_path("src/main.rs"), false),
10954 (rel_path("target"), true),
10955 (rel_path("target/debug"), true),
10956 (rel_path("target/debug/deps"), true),
10957 (rel_path("target/debug/foo.txt"), true),
10958 ]
10959 );
10960 });
10961
10962 assert_eq!(
10963 repository_updates.lock().drain(..).collect::<Vec<_>>(),
10964 vec![
10965 RepositoryEvent::BranchChanged,
10966 RepositoryEvent::StatusesChanged,
10967 RepositoryEvent::StatusesChanged,
10968 ],
10969 "Initial worktree scan should produce a repo update event"
10970 );
10971 assert_eq!(
10972 project_events.lock().drain(..).collect::<Vec<_>>(),
10973 vec![
10974 ("target".to_string(), PathChange::Loaded),
10975 ("target/debug".to_string(), PathChange::Loaded),
10976 ("target/debug/deps".to_string(), PathChange::Loaded),
10977 ("target/debug/foo.txt".to_string(), PathChange::Loaded),
10978 ],
10979 "All non-ignored entries and all opened firs should be getting a project event",
10980 );
10981
10982 // Emulate a flycheck spawn: it emits a `INODE_META_MOD`-flagged FS event on target/debug/deps, then creates and removes temp files inside.
10983 // This may happen multiple times during a single flycheck, but once is enough for testing.
10984 fs.emit_fs_event("/root/target/debug/deps", None);
10985 tree.flush_fs_events(cx).await;
10986 project
10987 .update(cx, |project, cx| project.git_scans_complete(cx))
10988 .await;
10989 cx.executor().run_until_parked();
10990
10991 assert_eq!(
10992 repository_updates
10993 .lock()
10994 .iter()
10995 .cloned()
10996 .collect::<Vec<_>>(),
10997 Vec::new(),
10998 "No further RepositoryUpdated events should happen, as only ignored dirs received FS events",
10999 );
11000 assert_eq!(
11001 project_events.lock().as_slice(),
11002 Vec::new(),
11003 "No further project events should happen, as only ignored dirs received FS events",
11004 );
11005}
11006
11007#[gpui::test]
11008async fn test_repos_in_invisible_worktrees(
11009 executor: BackgroundExecutor,
11010 cx: &mut gpui::TestAppContext,
11011) {
11012 init_test(cx);
11013 let fs = FakeFs::new(executor);
11014 fs.insert_tree(
11015 path!("/root"),
11016 json!({
11017 "dir1": {
11018 ".git": {},
11019 "dep1": {
11020 ".git": {},
11021 "src": {
11022 "a.txt": "",
11023 },
11024 },
11025 "b.txt": "",
11026 },
11027 }),
11028 )
11029 .await;
11030
11031 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
11032 let _visible_worktree =
11033 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
11034 project
11035 .update(cx, |project, cx| project.git_scans_complete(cx))
11036 .await;
11037
11038 let repos = project.read_with(cx, |project, cx| {
11039 project
11040 .repositories(cx)
11041 .values()
11042 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
11043 .collect::<Vec<_>>()
11044 });
11045 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
11046
11047 let (_invisible_worktree, _) = project
11048 .update(cx, |project, cx| {
11049 project.worktree_store().update(cx, |worktree_store, cx| {
11050 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
11051 })
11052 })
11053 .await
11054 .expect("failed to create worktree");
11055 project
11056 .update(cx, |project, cx| project.git_scans_complete(cx))
11057 .await;
11058
11059 let repos = project.read_with(cx, |project, cx| {
11060 project
11061 .repositories(cx)
11062 .values()
11063 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
11064 .collect::<Vec<_>>()
11065 });
11066 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
11067}
11068
11069#[gpui::test(iterations = 10)]
11070async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
11071 init_test(cx);
11072 cx.update(|cx| {
11073 cx.update_global::<SettingsStore, _>(|store, cx| {
11074 store.update_user_settings(cx, |settings| {
11075 settings.project.worktree.file_scan_exclusions = Some(Vec::new());
11076 });
11077 });
11078 });
11079 let fs = FakeFs::new(cx.background_executor.clone());
11080 fs.insert_tree(
11081 path!("/root"),
11082 json!({
11083 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
11084 "tree": {
11085 ".git": {},
11086 ".gitignore": "ignored-dir\n",
11087 "tracked-dir": {
11088 "tracked-file1": "",
11089 "ancestor-ignored-file1": "",
11090 },
11091 "ignored-dir": {
11092 "ignored-file1": ""
11093 }
11094 }
11095 }),
11096 )
11097 .await;
11098 fs.set_head_and_index_for_repo(
11099 path!("/root/tree/.git").as_ref(),
11100 &[
11101 (".gitignore", "ignored-dir\n".into()),
11102 ("tracked-dir/tracked-file1", "".into()),
11103 ],
11104 );
11105
11106 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
11107
11108 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
11109 tree.flush_fs_events(cx).await;
11110 project
11111 .update(cx, |project, cx| project.git_scans_complete(cx))
11112 .await;
11113 cx.executor().run_until_parked();
11114
11115 let repository = project.read_with(cx, |project, cx| {
11116 project.repositories(cx).values().next().unwrap().clone()
11117 });
11118
11119 tree.read_with(cx, |tree, _| {
11120 tree.as_local()
11121 .unwrap()
11122 .manually_refresh_entries_for_paths(vec![rel_path("ignored-dir").into()])
11123 })
11124 .recv()
11125 .await;
11126
11127 cx.read(|cx| {
11128 assert_entry_git_state(
11129 tree.read(cx),
11130 repository.read(cx),
11131 "tracked-dir/tracked-file1",
11132 None,
11133 false,
11134 );
11135 assert_entry_git_state(
11136 tree.read(cx),
11137 repository.read(cx),
11138 "tracked-dir/ancestor-ignored-file1",
11139 None,
11140 false,
11141 );
11142 assert_entry_git_state(
11143 tree.read(cx),
11144 repository.read(cx),
11145 "ignored-dir/ignored-file1",
11146 None,
11147 true,
11148 );
11149 });
11150
11151 fs.create_file(
11152 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
11153 Default::default(),
11154 )
11155 .await
11156 .unwrap();
11157 fs.set_index_for_repo(
11158 path!("/root/tree/.git").as_ref(),
11159 &[
11160 (".gitignore", "ignored-dir\n".into()),
11161 ("tracked-dir/tracked-file1", "".into()),
11162 ("tracked-dir/tracked-file2", "".into()),
11163 ],
11164 );
11165 fs.create_file(
11166 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
11167 Default::default(),
11168 )
11169 .await
11170 .unwrap();
11171 fs.create_file(
11172 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
11173 Default::default(),
11174 )
11175 .await
11176 .unwrap();
11177
11178 cx.executor().run_until_parked();
11179 cx.read(|cx| {
11180 assert_entry_git_state(
11181 tree.read(cx),
11182 repository.read(cx),
11183 "tracked-dir/tracked-file2",
11184 Some(StatusCode::Added),
11185 false,
11186 );
11187 assert_entry_git_state(
11188 tree.read(cx),
11189 repository.read(cx),
11190 "tracked-dir/ancestor-ignored-file2",
11191 None,
11192 false,
11193 );
11194 assert_entry_git_state(
11195 tree.read(cx),
11196 repository.read(cx),
11197 "ignored-dir/ignored-file2",
11198 None,
11199 true,
11200 );
11201 assert!(
11202 tree.read(cx)
11203 .entry_for_path(&rel_path(".git"))
11204 .unwrap()
11205 .is_ignored
11206 );
11207 });
11208}
11209
11210#[gpui::test]
11211async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) {
11212 init_test(cx);
11213
11214 let fs = FakeFs::new(cx.executor());
11215 fs.insert_tree(
11216 path!("/project"),
11217 json!({
11218 ".git": {
11219 "worktrees": {
11220 "some-worktree": {
11221 "commondir": "../..\n",
11222 // For is_git_dir
11223 "HEAD": "",
11224 "config": ""
11225 }
11226 },
11227 "modules": {
11228 "subdir": {
11229 "some-submodule": {
11230 // For is_git_dir
11231 "HEAD": "",
11232 "config": "",
11233 }
11234 }
11235 }
11236 },
11237 "src": {
11238 "a.txt": "A",
11239 },
11240 "some-worktree": {
11241 ".git": "gitdir: ../.git/worktrees/some-worktree\n",
11242 "src": {
11243 "b.txt": "B",
11244 }
11245 },
11246 "subdir": {
11247 "some-submodule": {
11248 ".git": "gitdir: ../../.git/modules/subdir/some-submodule\n",
11249 "c.txt": "C",
11250 }
11251 }
11252 }),
11253 )
11254 .await;
11255
11256 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
11257 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
11258 scan_complete.await;
11259
11260 let mut repositories = project.update(cx, |project, cx| {
11261 project
11262 .repositories(cx)
11263 .values()
11264 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
11265 .collect::<Vec<_>>()
11266 });
11267 repositories.sort();
11268 pretty_assertions::assert_eq!(
11269 repositories,
11270 [
11271 Path::new(path!("/project")).into(),
11272 Path::new(path!("/project/some-worktree")).into(),
11273 Path::new(path!("/project/subdir/some-submodule")).into(),
11274 ]
11275 );
11276
11277 // Generate a git-related event for the worktree and check that it's refreshed.
11278 fs.with_git_state(
11279 path!("/project/some-worktree/.git").as_ref(),
11280 true,
11281 |state| {
11282 state
11283 .head_contents
11284 .insert(repo_path("src/b.txt"), "b".to_owned());
11285 state
11286 .index_contents
11287 .insert(repo_path("src/b.txt"), "b".to_owned());
11288 },
11289 )
11290 .unwrap();
11291 cx.run_until_parked();
11292
11293 let buffer = project
11294 .update(cx, |project, cx| {
11295 project.open_local_buffer(path!("/project/some-worktree/src/b.txt"), cx)
11296 })
11297 .await
11298 .unwrap();
11299 let (worktree_repo, barrier) = project.update(cx, |project, cx| {
11300 let (repo, _) = project
11301 .git_store()
11302 .read(cx)
11303 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
11304 .unwrap();
11305 pretty_assertions::assert_eq!(
11306 repo.read(cx).work_directory_abs_path,
11307 Path::new(path!("/project/some-worktree")).into(),
11308 );
11309 let barrier = repo.update(cx, |repo, _| repo.barrier());
11310 (repo.clone(), barrier)
11311 });
11312 barrier.await.unwrap();
11313 worktree_repo.update(cx, |repo, _| {
11314 pretty_assertions::assert_eq!(
11315 repo.status_for_path(&repo_path("src/b.txt"))
11316 .unwrap()
11317 .status,
11318 StatusCode::Modified.worktree(),
11319 );
11320 });
11321
11322 // The same for the submodule.
11323 fs.with_git_state(
11324 path!("/project/subdir/some-submodule/.git").as_ref(),
11325 true,
11326 |state| {
11327 state
11328 .head_contents
11329 .insert(repo_path("c.txt"), "c".to_owned());
11330 state
11331 .index_contents
11332 .insert(repo_path("c.txt"), "c".to_owned());
11333 },
11334 )
11335 .unwrap();
11336 cx.run_until_parked();
11337
11338 let buffer = project
11339 .update(cx, |project, cx| {
11340 project.open_local_buffer(path!("/project/subdir/some-submodule/c.txt"), cx)
11341 })
11342 .await
11343 .unwrap();
11344 let (submodule_repo, barrier) = project.update(cx, |project, cx| {
11345 let (repo, _) = project
11346 .git_store()
11347 .read(cx)
11348 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
11349 .unwrap();
11350 pretty_assertions::assert_eq!(
11351 repo.read(cx).work_directory_abs_path,
11352 Path::new(path!("/project/subdir/some-submodule")).into(),
11353 );
11354 let barrier = repo.update(cx, |repo, _| repo.barrier());
11355 (repo.clone(), barrier)
11356 });
11357 barrier.await.unwrap();
11358 submodule_repo.update(cx, |repo, _| {
11359 pretty_assertions::assert_eq!(
11360 repo.status_for_path(&repo_path("c.txt")).unwrap().status,
11361 StatusCode::Modified.worktree(),
11362 );
11363 });
11364}
11365
11366#[gpui::test]
11367async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
11368 init_test(cx);
11369 let fs = FakeFs::new(cx.background_executor.clone());
11370 fs.insert_tree(
11371 path!("/root"),
11372 json!({
11373 "project": {
11374 ".git": {},
11375 "child1": {
11376 "a.txt": "A",
11377 },
11378 "child2": {
11379 "b.txt": "B",
11380 }
11381 }
11382 }),
11383 )
11384 .await;
11385
11386 let project = Project::test(
11387 fs.clone(),
11388 [
11389 path!("/root/project/child1").as_ref(),
11390 path!("/root/project/child2").as_ref(),
11391 ],
11392 cx,
11393 )
11394 .await;
11395
11396 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
11397 tree.flush_fs_events(cx).await;
11398 project
11399 .update(cx, |project, cx| project.git_scans_complete(cx))
11400 .await;
11401 cx.executor().run_until_parked();
11402
11403 let repos = project.read_with(cx, |project, cx| {
11404 project
11405 .repositories(cx)
11406 .values()
11407 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
11408 .collect::<Vec<_>>()
11409 });
11410 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
11411}
11412
11413#[gpui::test]
11414async fn test_buffer_changed_file_path_updates_git_diff(cx: &mut gpui::TestAppContext) {
11415 init_test(cx);
11416
11417 let file_1_committed = String::from(r#"file_1_committed"#);
11418 let file_1_staged = String::from(r#"file_1_staged"#);
11419 let file_2_committed = String::from(r#"file_2_committed"#);
11420 let file_2_staged = String::from(r#"file_2_staged"#);
11421 let buffer_contents = String::from(r#"buffer"#);
11422
11423 let fs = FakeFs::new(cx.background_executor.clone());
11424 fs.insert_tree(
11425 path!("/dir"),
11426 json!({
11427 ".git": {},
11428 "src": {
11429 "file_1.rs": file_1_committed.clone(),
11430 "file_2.rs": file_2_committed.clone(),
11431 }
11432 }),
11433 )
11434 .await;
11435
11436 fs.set_head_for_repo(
11437 path!("/dir/.git").as_ref(),
11438 &[
11439 ("src/file_1.rs", file_1_committed.clone()),
11440 ("src/file_2.rs", file_2_committed.clone()),
11441 ],
11442 "deadbeef",
11443 );
11444 fs.set_index_for_repo(
11445 path!("/dir/.git").as_ref(),
11446 &[
11447 ("src/file_1.rs", file_1_staged.clone()),
11448 ("src/file_2.rs", file_2_staged.clone()),
11449 ],
11450 );
11451
11452 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
11453
11454 let buffer = project
11455 .update(cx, |project, cx| {
11456 project.open_local_buffer(path!("/dir/src/file_1.rs"), cx)
11457 })
11458 .await
11459 .unwrap();
11460
11461 buffer.update(cx, |buffer, cx| {
11462 buffer.edit([(0..buffer.len(), buffer_contents.as_str())], None, cx);
11463 });
11464
11465 let unstaged_diff = project
11466 .update(cx, |project, cx| {
11467 project.open_unstaged_diff(buffer.clone(), cx)
11468 })
11469 .await
11470 .unwrap();
11471
11472 cx.run_until_parked();
11473
11474 unstaged_diff.update(cx, |unstaged_diff, cx| {
11475 let base_text = unstaged_diff.base_text_string(cx).unwrap();
11476 assert_eq!(base_text, file_1_staged, "Should start with file_1 staged");
11477 });
11478
11479 // Save the buffer as `file_2.rs`, which should trigger the
11480 // `BufferChangedFilePath` event.
11481 project
11482 .update(cx, |project, cx| {
11483 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
11484 let path = ProjectPath {
11485 worktree_id,
11486 path: rel_path("src/file_2.rs").into(),
11487 };
11488 project.save_buffer_as(buffer.clone(), path, cx)
11489 })
11490 .await
11491 .unwrap();
11492
11493 cx.run_until_parked();
11494
11495 // Verify that the diff bases have been updated to file_2's contents due to
11496 // the `BufferChangedFilePath` event being handled.
11497 unstaged_diff.update(cx, |unstaged_diff, cx| {
11498 let snapshot = buffer.read(cx).snapshot();
11499 let base_text = unstaged_diff.base_text_string(cx).unwrap();
11500 assert_eq!(
11501 base_text, file_2_staged,
11502 "Diff bases should be automatically updated to file_2 staged content"
11503 );
11504
11505 let hunks: Vec<_> = unstaged_diff.snapshot(cx).hunks(&snapshot).collect();
11506 assert!(!hunks.is_empty(), "Should have diff hunks for file_2");
11507 });
11508
11509 let uncommitted_diff = project
11510 .update(cx, |project, cx| {
11511 project.open_uncommitted_diff(buffer.clone(), cx)
11512 })
11513 .await
11514 .unwrap();
11515
11516 cx.run_until_parked();
11517
11518 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
11519 let base_text = uncommitted_diff.base_text_string(cx).unwrap();
11520 assert_eq!(
11521 base_text, file_2_committed,
11522 "Uncommitted diff should compare against file_2 committed content"
11523 );
11524 });
11525}
11526
11527async fn search(
11528 project: &Entity<Project>,
11529 query: SearchQuery,
11530 cx: &mut gpui::TestAppContext,
11531) -> Result<HashMap<String, Vec<Range<usize>>>> {
11532 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
11533 let mut results = HashMap::default();
11534 while let Ok(search_result) = search_rx.rx.recv().await {
11535 match search_result {
11536 SearchResult::Buffer { buffer, ranges } => {
11537 results.entry(buffer).or_insert(ranges);
11538 }
11539 SearchResult::LimitReached => {}
11540 }
11541 }
11542 Ok(results
11543 .into_iter()
11544 .map(|(buffer, ranges)| {
11545 buffer.update(cx, |buffer, cx| {
11546 let path = buffer
11547 .file()
11548 .unwrap()
11549 .full_path(cx)
11550 .to_string_lossy()
11551 .to_string();
11552 let ranges = ranges
11553 .into_iter()
11554 .map(|range| range.to_offset(buffer))
11555 .collect::<Vec<_>>();
11556 (path, ranges)
11557 })
11558 })
11559 .collect())
11560}
11561
11562#[gpui::test]
11563async fn test_undo_encoding_change(cx: &mut gpui::TestAppContext) {
11564 init_test(cx);
11565
11566 let fs = FakeFs::new(cx.executor());
11567
11568 // Create a file with ASCII content "Hi" - this will be detected as UTF-8
11569 // When reinterpreted as UTF-16LE, the bytes 0x48 0x69 become a single character
11570 let ascii_bytes: Vec<u8> = vec![0x48, 0x69];
11571 fs.insert_tree(path!("/dir"), json!({})).await;
11572 fs.insert_file(path!("/dir/test.txt"), ascii_bytes).await;
11573
11574 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
11575
11576 let buffer = project
11577 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/test.txt"), cx))
11578 .await
11579 .unwrap();
11580
11581 let (initial_encoding, initial_text, initial_dirty) = buffer.read_with(cx, |buffer, _| {
11582 (buffer.encoding(), buffer.text(), buffer.is_dirty())
11583 });
11584 assert_eq!(initial_encoding, encoding_rs::UTF_8);
11585 assert_eq!(initial_text, "Hi");
11586 assert!(!initial_dirty);
11587
11588 let reload_receiver = buffer.update(cx, |buffer, cx| {
11589 buffer.reload_with_encoding(encoding_rs::UTF_16LE, cx)
11590 });
11591 cx.executor().run_until_parked();
11592
11593 // Wait for reload to complete
11594 let _ = reload_receiver.await;
11595
11596 // Verify the encoding changed, text is different, and still not dirty (we reloaded from disk)
11597 let (reloaded_encoding, reloaded_text, reloaded_dirty) = buffer.read_with(cx, |buffer, _| {
11598 (buffer.encoding(), buffer.text(), buffer.is_dirty())
11599 });
11600 assert_eq!(reloaded_encoding, encoding_rs::UTF_16LE);
11601 assert_eq!(reloaded_text, "楈");
11602 assert!(!reloaded_dirty);
11603
11604 // Undo the reload
11605 buffer.update(cx, |buffer, cx| {
11606 buffer.undo(cx);
11607 });
11608
11609 buffer.read_with(cx, |buffer, _| {
11610 assert_eq!(buffer.encoding(), encoding_rs::UTF_8);
11611 assert_eq!(buffer.text(), "Hi");
11612 assert!(!buffer.is_dirty());
11613 });
11614
11615 buffer.update(cx, |buffer, cx| {
11616 buffer.redo(cx);
11617 });
11618
11619 buffer.read_with(cx, |buffer, _| {
11620 assert_eq!(buffer.encoding(), encoding_rs::UTF_16LE);
11621 assert_ne!(buffer.text(), "Hi");
11622 assert!(!buffer.is_dirty());
11623 });
11624}
11625
11626pub fn init_test(cx: &mut gpui::TestAppContext) {
11627 zlog::init_test();
11628
11629 cx.update(|cx| {
11630 let settings_store = SettingsStore::test(cx);
11631 cx.set_global(settings_store);
11632 release_channel::init(semver::Version::new(0, 0, 0), cx);
11633 });
11634}
11635
11636fn json_lang() -> Arc<Language> {
11637 Arc::new(Language::new(
11638 LanguageConfig {
11639 name: "JSON".into(),
11640 matcher: LanguageMatcher {
11641 path_suffixes: vec!["json".to_string()],
11642 ..Default::default()
11643 },
11644 ..Default::default()
11645 },
11646 None,
11647 ))
11648}
11649
11650fn js_lang() -> Arc<Language> {
11651 Arc::new(Language::new(
11652 LanguageConfig {
11653 name: "JavaScript".into(),
11654 matcher: LanguageMatcher {
11655 path_suffixes: vec!["js".to_string()],
11656 ..Default::default()
11657 },
11658 ..Default::default()
11659 },
11660 None,
11661 ))
11662}
11663
11664fn python_lang(fs: Arc<FakeFs>) -> Arc<Language> {
11665 struct PythonMootToolchainLister(Arc<FakeFs>);
11666 #[async_trait]
11667 impl ToolchainLister for PythonMootToolchainLister {
11668 async fn list(
11669 &self,
11670 worktree_root: PathBuf,
11671 subroot_relative_path: Arc<RelPath>,
11672 _: Option<HashMap<String, String>>,
11673 _: &dyn Fs,
11674 ) -> ToolchainList {
11675 // This lister will always return a path .venv directories within ancestors
11676 let ancestors = subroot_relative_path.ancestors().collect::<Vec<_>>();
11677 let mut toolchains = vec![];
11678 for ancestor in ancestors {
11679 let venv_path = worktree_root.join(ancestor.as_std_path()).join(".venv");
11680 if self.0.is_dir(&venv_path).await {
11681 toolchains.push(Toolchain {
11682 name: SharedString::new_static("Python Venv"),
11683 path: venv_path.to_string_lossy().into_owned().into(),
11684 language_name: LanguageName(SharedString::new_static("Python")),
11685 as_json: serde_json::Value::Null,
11686 })
11687 }
11688 }
11689 ToolchainList {
11690 toolchains,
11691 ..Default::default()
11692 }
11693 }
11694 async fn resolve(
11695 &self,
11696 _: PathBuf,
11697 _: Option<HashMap<String, String>>,
11698 _: &dyn Fs,
11699 ) -> anyhow::Result<Toolchain> {
11700 Err(anyhow::anyhow!("Not implemented"))
11701 }
11702 fn meta(&self) -> ToolchainMetadata {
11703 ToolchainMetadata {
11704 term: SharedString::new_static("Virtual Environment"),
11705 new_toolchain_placeholder: SharedString::new_static(
11706 "A path to the python3 executable within a virtual environment, or path to virtual environment itself",
11707 ),
11708 manifest_name: ManifestName::from(SharedString::new_static("pyproject.toml")),
11709 }
11710 }
11711 fn activation_script(
11712 &self,
11713 _: &Toolchain,
11714 _: ShellKind,
11715 _: &gpui::App,
11716 ) -> futures::future::BoxFuture<'static, Vec<String>> {
11717 Box::pin(async { vec![] })
11718 }
11719 }
11720 Arc::new(
11721 Language::new(
11722 LanguageConfig {
11723 name: "Python".into(),
11724 matcher: LanguageMatcher {
11725 path_suffixes: vec!["py".to_string()],
11726 ..Default::default()
11727 },
11728 ..Default::default()
11729 },
11730 None, // We're not testing Python parsing with this language.
11731 )
11732 .with_manifest(Some(ManifestName::from(SharedString::new_static(
11733 "pyproject.toml",
11734 ))))
11735 .with_toolchain_lister(Some(Arc::new(PythonMootToolchainLister(fs)))),
11736 )
11737}
11738
11739fn typescript_lang() -> Arc<Language> {
11740 Arc::new(Language::new(
11741 LanguageConfig {
11742 name: "TypeScript".into(),
11743 matcher: LanguageMatcher {
11744 path_suffixes: vec!["ts".to_string()],
11745 ..Default::default()
11746 },
11747 ..Default::default()
11748 },
11749 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
11750 ))
11751}
11752
11753fn tsx_lang() -> Arc<Language> {
11754 Arc::new(Language::new(
11755 LanguageConfig {
11756 name: "tsx".into(),
11757 matcher: LanguageMatcher {
11758 path_suffixes: vec!["tsx".to_string()],
11759 ..Default::default()
11760 },
11761 ..Default::default()
11762 },
11763 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
11764 ))
11765}
11766
11767fn get_all_tasks(
11768 project: &Entity<Project>,
11769 task_contexts: Arc<TaskContexts>,
11770 cx: &mut App,
11771) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
11772 let new_tasks = project.update(cx, |project, cx| {
11773 project.task_store().update(cx, |task_store, cx| {
11774 task_store.task_inventory().unwrap().update(cx, |this, cx| {
11775 this.used_and_current_resolved_tasks(task_contexts, cx)
11776 })
11777 })
11778 });
11779
11780 cx.background_spawn(async move {
11781 let (mut old, new) = new_tasks.await;
11782 old.extend(new);
11783 old
11784 })
11785}
11786
11787#[track_caller]
11788fn assert_entry_git_state(
11789 tree: &Worktree,
11790 repository: &Repository,
11791 path: &str,
11792 index_status: Option<StatusCode>,
11793 is_ignored: bool,
11794) {
11795 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
11796 let entry = tree
11797 .entry_for_path(&rel_path(path))
11798 .unwrap_or_else(|| panic!("entry {path} not found"));
11799 let status = repository
11800 .status_for_path(&repo_path(path))
11801 .map(|entry| entry.status);
11802 let expected = index_status.map(|index_status| {
11803 TrackedStatus {
11804 index_status,
11805 worktree_status: StatusCode::Unmodified,
11806 }
11807 .into()
11808 });
11809 assert_eq!(
11810 status, expected,
11811 "expected {path} to have git status: {expected:?}"
11812 );
11813 assert_eq!(
11814 entry.is_ignored, is_ignored,
11815 "expected {path} to have is_ignored: {is_ignored}"
11816 );
11817}
11818
11819#[track_caller]
11820fn git_init(path: &Path) -> git2::Repository {
11821 let mut init_opts = RepositoryInitOptions::new();
11822 init_opts.initial_head("main");
11823 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
11824}
11825
11826#[track_caller]
11827fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
11828 let path = path.as_ref();
11829 let mut index = repo.index().expect("Failed to get index");
11830 index.add_path(path).expect("Failed to add file");
11831 index.write().expect("Failed to write index");
11832}
11833
11834#[track_caller]
11835fn git_remove_index(path: &Path, repo: &git2::Repository) {
11836 let mut index = repo.index().expect("Failed to get index");
11837 index.remove_path(path).expect("Failed to add file");
11838 index.write().expect("Failed to write index");
11839}
11840
11841#[track_caller]
11842fn git_commit(msg: &'static str, repo: &git2::Repository) {
11843 use git2::Signature;
11844
11845 let signature = Signature::now("test", "test@zed.dev").unwrap();
11846 let oid = repo.index().unwrap().write_tree().unwrap();
11847 let tree = repo.find_tree(oid).unwrap();
11848 if let Ok(head) = repo.head() {
11849 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
11850
11851 let parent_commit = parent_obj.as_commit().unwrap();
11852
11853 repo.commit(
11854 Some("HEAD"),
11855 &signature,
11856 &signature,
11857 msg,
11858 &tree,
11859 &[parent_commit],
11860 )
11861 .expect("Failed to commit with parent");
11862 } else {
11863 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
11864 .expect("Failed to commit");
11865 }
11866}
11867
11868#[cfg(any())]
11869#[track_caller]
11870fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
11871 repo.cherrypick(commit, None).expect("Failed to cherrypick");
11872}
11873
11874#[track_caller]
11875fn git_stash(repo: &mut git2::Repository) {
11876 use git2::Signature;
11877
11878 let signature = Signature::now("test", "test@zed.dev").unwrap();
11879 repo.stash_save(&signature, "N/A", None)
11880 .expect("Failed to stash");
11881}
11882
11883#[track_caller]
11884fn git_reset(offset: usize, repo: &git2::Repository) {
11885 let head = repo.head().expect("Couldn't get repo head");
11886 let object = head.peel(git2::ObjectType::Commit).unwrap();
11887 let commit = object.as_commit().unwrap();
11888 let new_head = commit
11889 .parents()
11890 .inspect(|parnet| {
11891 parnet.message();
11892 })
11893 .nth(offset)
11894 .expect("Not enough history");
11895 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
11896 .expect("Could not reset");
11897}
11898
11899#[cfg(any())]
11900#[track_caller]
11901fn git_branch(name: &str, repo: &git2::Repository) {
11902 let head = repo
11903 .head()
11904 .expect("Couldn't get repo head")
11905 .peel_to_commit()
11906 .expect("HEAD is not a commit");
11907 repo.branch(name, &head, false).expect("Failed to commit");
11908}
11909
11910#[cfg(any())]
11911#[track_caller]
11912fn git_checkout(name: &str, repo: &git2::Repository) {
11913 repo.set_head(name).expect("Failed to set head");
11914 repo.checkout_head(None).expect("Failed to check out head");
11915}
11916
11917#[cfg(any())]
11918#[track_caller]
11919fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
11920 repo.statuses(None)
11921 .unwrap()
11922 .iter()
11923 .map(|status| (status.path().unwrap().to_string(), status.status()))
11924 .collect()
11925}
11926
11927#[gpui::test]
11928async fn test_find_project_path_abs(
11929 background_executor: BackgroundExecutor,
11930 cx: &mut gpui::TestAppContext,
11931) {
11932 // find_project_path should work with absolute paths
11933 init_test(cx);
11934
11935 let fs = FakeFs::new(background_executor);
11936 fs.insert_tree(
11937 path!("/root"),
11938 json!({
11939 "project1": {
11940 "file1.txt": "content1",
11941 "subdir": {
11942 "file2.txt": "content2"
11943 }
11944 },
11945 "project2": {
11946 "file3.txt": "content3"
11947 }
11948 }),
11949 )
11950 .await;
11951
11952 let project = Project::test(
11953 fs.clone(),
11954 [
11955 path!("/root/project1").as_ref(),
11956 path!("/root/project2").as_ref(),
11957 ],
11958 cx,
11959 )
11960 .await;
11961
11962 // Make sure the worktrees are fully initialized
11963 project
11964 .update(cx, |project, cx| project.git_scans_complete(cx))
11965 .await;
11966 cx.run_until_parked();
11967
11968 let (project1_abs_path, project1_id, project2_abs_path, project2_id) =
11969 project.read_with(cx, |project, cx| {
11970 let worktrees: Vec<_> = project.worktrees(cx).collect();
11971 let abs_path1 = worktrees[0].read(cx).abs_path().to_path_buf();
11972 let id1 = worktrees[0].read(cx).id();
11973 let abs_path2 = worktrees[1].read(cx).abs_path().to_path_buf();
11974 let id2 = worktrees[1].read(cx).id();
11975 (abs_path1, id1, abs_path2, id2)
11976 });
11977
11978 project.update(cx, |project, cx| {
11979 let abs_path = project1_abs_path.join("file1.txt");
11980 let found_path = project.find_project_path(abs_path, cx).unwrap();
11981 assert_eq!(found_path.worktree_id, project1_id);
11982 assert_eq!(&*found_path.path, rel_path("file1.txt"));
11983
11984 let abs_path = project1_abs_path.join("subdir").join("file2.txt");
11985 let found_path = project.find_project_path(abs_path, cx).unwrap();
11986 assert_eq!(found_path.worktree_id, project1_id);
11987 assert_eq!(&*found_path.path, rel_path("subdir/file2.txt"));
11988
11989 let abs_path = project2_abs_path.join("file3.txt");
11990 let found_path = project.find_project_path(abs_path, cx).unwrap();
11991 assert_eq!(found_path.worktree_id, project2_id);
11992 assert_eq!(&*found_path.path, rel_path("file3.txt"));
11993
11994 let abs_path = project1_abs_path.join("nonexistent.txt");
11995 let found_path = project.find_project_path(abs_path, cx);
11996 assert!(
11997 found_path.is_some(),
11998 "Should find project path for nonexistent file in worktree"
11999 );
12000
12001 // Test with an absolute path outside any worktree
12002 let abs_path = Path::new("/some/other/path");
12003 let found_path = project.find_project_path(abs_path, cx);
12004 assert!(
12005 found_path.is_none(),
12006 "Should not find project path for path outside any worktree"
12007 );
12008 });
12009}
12010
12011#[gpui::test]
12012async fn test_git_worktree_remove(cx: &mut gpui::TestAppContext) {
12013 init_test(cx);
12014
12015 let fs = FakeFs::new(cx.executor());
12016 fs.insert_tree(
12017 path!("/root"),
12018 json!({
12019 "a": {
12020 ".git": {},
12021 "src": {
12022 "main.rs": "fn main() {}",
12023 }
12024 },
12025 "b": {
12026 ".git": {},
12027 "src": {
12028 "main.rs": "fn main() {}",
12029 },
12030 "script": {
12031 "run.sh": "#!/bin/bash"
12032 }
12033 }
12034 }),
12035 )
12036 .await;
12037
12038 let project = Project::test(
12039 fs.clone(),
12040 [
12041 path!("/root/a").as_ref(),
12042 path!("/root/b/script").as_ref(),
12043 path!("/root/b").as_ref(),
12044 ],
12045 cx,
12046 )
12047 .await;
12048 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
12049 scan_complete.await;
12050
12051 let worktrees = project.update(cx, |project, cx| project.worktrees(cx).collect::<Vec<_>>());
12052 assert_eq!(worktrees.len(), 3);
12053
12054 let worktree_id_by_abs_path = worktrees
12055 .into_iter()
12056 .map(|worktree| worktree.read_with(cx, |w, _| (w.abs_path(), w.id())))
12057 .collect::<HashMap<_, _>>();
12058 let worktree_id = worktree_id_by_abs_path
12059 .get(Path::new(path!("/root/b/script")))
12060 .unwrap();
12061
12062 let repos = project.update(cx, |p, cx| p.git_store().read(cx).repositories().clone());
12063 assert_eq!(repos.len(), 2);
12064
12065 project.update(cx, |project, cx| {
12066 project.remove_worktree(*worktree_id, cx);
12067 });
12068 cx.run_until_parked();
12069
12070 let mut repo_paths = project
12071 .update(cx, |p, cx| p.git_store().read(cx).repositories().clone())
12072 .values()
12073 .map(|repo| repo.read_with(cx, |r, _| r.work_directory_abs_path.clone()))
12074 .collect::<Vec<_>>();
12075 repo_paths.sort();
12076
12077 pretty_assertions::assert_eq!(
12078 repo_paths,
12079 [
12080 Path::new(path!("/root/a")).into(),
12081 Path::new(path!("/root/b")).into(),
12082 ]
12083 );
12084
12085 let active_repo_path = project
12086 .read_with(cx, |p, cx| {
12087 p.active_repository(cx)
12088 .map(|r| r.read(cx).work_directory_abs_path.clone())
12089 })
12090 .unwrap();
12091 assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/a")));
12092
12093 let worktree_id = worktree_id_by_abs_path
12094 .get(Path::new(path!("/root/a")))
12095 .unwrap();
12096 project.update(cx, |project, cx| {
12097 project.remove_worktree(*worktree_id, cx);
12098 });
12099 cx.run_until_parked();
12100
12101 let active_repo_path = project
12102 .read_with(cx, |p, cx| {
12103 p.active_repository(cx)
12104 .map(|r| r.read(cx).work_directory_abs_path.clone())
12105 })
12106 .unwrap();
12107 assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/b")));
12108
12109 let worktree_id = worktree_id_by_abs_path
12110 .get(Path::new(path!("/root/b")))
12111 .unwrap();
12112 project.update(cx, |project, cx| {
12113 project.remove_worktree(*worktree_id, cx);
12114 });
12115 cx.run_until_parked();
12116
12117 let active_repo_path = project.read_with(cx, |p, cx| {
12118 p.active_repository(cx)
12119 .map(|r| r.read(cx).work_directory_abs_path.clone())
12120 });
12121 assert!(active_repo_path.is_none());
12122}
12123
12124#[gpui::test]
12125async fn test_optimistic_hunks_in_staged_files(cx: &mut gpui::TestAppContext) {
12126 use DiffHunkSecondaryStatus::*;
12127 init_test(cx);
12128
12129 let committed_contents = r#"
12130 one
12131 two
12132 three
12133 "#
12134 .unindent();
12135 let file_contents = r#"
12136 one
12137 TWO
12138 three
12139 "#
12140 .unindent();
12141
12142 let fs = FakeFs::new(cx.background_executor.clone());
12143 fs.insert_tree(
12144 path!("/dir"),
12145 json!({
12146 ".git": {},
12147 "file.txt": file_contents.clone()
12148 }),
12149 )
12150 .await;
12151
12152 fs.set_head_and_index_for_repo(
12153 path!("/dir/.git").as_ref(),
12154 &[("file.txt", committed_contents.clone())],
12155 );
12156
12157 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
12158
12159 let buffer = project
12160 .update(cx, |project, cx| {
12161 project.open_local_buffer(path!("/dir/file.txt"), cx)
12162 })
12163 .await
12164 .unwrap();
12165 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
12166 let uncommitted_diff = project
12167 .update(cx, |project, cx| {
12168 project.open_uncommitted_diff(buffer.clone(), cx)
12169 })
12170 .await
12171 .unwrap();
12172
12173 // The hunk is initially unstaged.
12174 uncommitted_diff.read_with(cx, |diff, cx| {
12175 assert_hunks(
12176 diff.snapshot(cx).hunks(&snapshot),
12177 &snapshot,
12178 &diff.base_text_string(cx).unwrap(),
12179 &[(
12180 1..2,
12181 "two\n",
12182 "TWO\n",
12183 DiffHunkStatus::modified(HasSecondaryHunk),
12184 )],
12185 );
12186 });
12187
12188 // Get the repository handle.
12189 let repo = project.read_with(cx, |project, cx| {
12190 project.repositories(cx).values().next().unwrap().clone()
12191 });
12192
12193 // Stage the file.
12194 let stage_task = repo.update(cx, |repo, cx| {
12195 repo.stage_entries(vec![repo_path("file.txt")], cx)
12196 });
12197
12198 // Run a few ticks to let the job start and mark hunks as pending,
12199 // but don't run_until_parked which would complete the entire operation.
12200 for _ in 0..10 {
12201 cx.executor().tick();
12202 let [hunk]: [_; 1] = uncommitted_diff
12203 .read_with(cx, |diff, cx| {
12204 diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>()
12205 })
12206 .try_into()
12207 .unwrap();
12208 match hunk.secondary_status {
12209 HasSecondaryHunk => {}
12210 SecondaryHunkRemovalPending => break,
12211 NoSecondaryHunk => panic!("hunk was not optimistically staged"),
12212 _ => panic!("unexpected hunk state"),
12213 }
12214 }
12215 uncommitted_diff.read_with(cx, |diff, cx| {
12216 assert_hunks(
12217 diff.snapshot(cx).hunks(&snapshot),
12218 &snapshot,
12219 &diff.base_text_string(cx).unwrap(),
12220 &[(
12221 1..2,
12222 "two\n",
12223 "TWO\n",
12224 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
12225 )],
12226 );
12227 });
12228
12229 // Let the staging complete.
12230 stage_task.await.unwrap();
12231 cx.run_until_parked();
12232
12233 // The hunk is now fully staged.
12234 uncommitted_diff.read_with(cx, |diff, cx| {
12235 assert_hunks(
12236 diff.snapshot(cx).hunks(&snapshot),
12237 &snapshot,
12238 &diff.base_text_string(cx).unwrap(),
12239 &[(
12240 1..2,
12241 "two\n",
12242 "TWO\n",
12243 DiffHunkStatus::modified(NoSecondaryHunk),
12244 )],
12245 );
12246 });
12247
12248 // Simulate a commit by updating HEAD to match the current file contents.
12249 // The FakeGitRepository's commit method is a no-op, so we need to manually
12250 // update HEAD to simulate the commit completing.
12251 fs.set_head_for_repo(
12252 path!("/dir/.git").as_ref(),
12253 &[("file.txt", file_contents.clone())],
12254 "newhead",
12255 );
12256 cx.run_until_parked();
12257
12258 // After committing, there are no more hunks.
12259 uncommitted_diff.read_with(cx, |diff, cx| {
12260 assert_hunks(
12261 diff.snapshot(cx).hunks(&snapshot),
12262 &snapshot,
12263 &diff.base_text_string(cx).unwrap(),
12264 &[] as &[(Range<u32>, &str, &str, DiffHunkStatus)],
12265 );
12266 });
12267}
12268
12269#[gpui::test]
12270async fn test_read_only_files_setting(cx: &mut gpui::TestAppContext) {
12271 init_test(cx);
12272
12273 // Configure read_only_files setting
12274 cx.update(|cx| {
12275 cx.update_global::<SettingsStore, _>(|store, cx| {
12276 store.update_user_settings(cx, |settings| {
12277 settings.project.worktree.read_only_files = Some(vec![
12278 "**/generated/**".to_string(),
12279 "**/*.gen.rs".to_string(),
12280 ]);
12281 });
12282 });
12283 });
12284
12285 let fs = FakeFs::new(cx.background_executor.clone());
12286 fs.insert_tree(
12287 path!("/root"),
12288 json!({
12289 "src": {
12290 "main.rs": "fn main() {}",
12291 "types.gen.rs": "// Generated file",
12292 },
12293 "generated": {
12294 "schema.rs": "// Auto-generated schema",
12295 }
12296 }),
12297 )
12298 .await;
12299
12300 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
12301
12302 // Open a regular file - should be read-write
12303 let regular_buffer = project
12304 .update(cx, |project, cx| {
12305 project.open_local_buffer(path!("/root/src/main.rs"), cx)
12306 })
12307 .await
12308 .unwrap();
12309
12310 regular_buffer.read_with(cx, |buffer, _| {
12311 assert!(!buffer.read_only(), "Regular file should not be read-only");
12312 });
12313
12314 // Open a file matching *.gen.rs pattern - should be read-only
12315 let gen_buffer = project
12316 .update(cx, |project, cx| {
12317 project.open_local_buffer(path!("/root/src/types.gen.rs"), cx)
12318 })
12319 .await
12320 .unwrap();
12321
12322 gen_buffer.read_with(cx, |buffer, _| {
12323 assert!(
12324 buffer.read_only(),
12325 "File matching *.gen.rs pattern should be read-only"
12326 );
12327 });
12328
12329 // Open a file in generated directory - should be read-only
12330 let generated_buffer = project
12331 .update(cx, |project, cx| {
12332 project.open_local_buffer(path!("/root/generated/schema.rs"), cx)
12333 })
12334 .await
12335 .unwrap();
12336
12337 generated_buffer.read_with(cx, |buffer, _| {
12338 assert!(
12339 buffer.read_only(),
12340 "File in generated directory should be read-only"
12341 );
12342 });
12343}
12344
12345#[gpui::test]
12346async fn test_read_only_files_empty_setting(cx: &mut gpui::TestAppContext) {
12347 init_test(cx);
12348
12349 // Explicitly set read_only_files to empty (default behavior)
12350 cx.update(|cx| {
12351 cx.update_global::<SettingsStore, _>(|store, cx| {
12352 store.update_user_settings(cx, |settings| {
12353 settings.project.worktree.read_only_files = Some(vec![]);
12354 });
12355 });
12356 });
12357
12358 let fs = FakeFs::new(cx.background_executor.clone());
12359 fs.insert_tree(
12360 path!("/root"),
12361 json!({
12362 "src": {
12363 "main.rs": "fn main() {}",
12364 },
12365 "generated": {
12366 "schema.rs": "// Auto-generated schema",
12367 }
12368 }),
12369 )
12370 .await;
12371
12372 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
12373
12374 // All files should be read-write when read_only_files is empty
12375 let main_buffer = project
12376 .update(cx, |project, cx| {
12377 project.open_local_buffer(path!("/root/src/main.rs"), cx)
12378 })
12379 .await
12380 .unwrap();
12381
12382 main_buffer.read_with(cx, |buffer, _| {
12383 assert!(
12384 !buffer.read_only(),
12385 "Files should not be read-only when read_only_files is empty"
12386 );
12387 });
12388
12389 let generated_buffer = project
12390 .update(cx, |project, cx| {
12391 project.open_local_buffer(path!("/root/generated/schema.rs"), cx)
12392 })
12393 .await
12394 .unwrap();
12395
12396 generated_buffer.read_with(cx, |buffer, _| {
12397 assert!(
12398 !buffer.read_only(),
12399 "Generated files should not be read-only when read_only_files is empty"
12400 );
12401 });
12402}
12403
12404#[gpui::test]
12405async fn test_read_only_files_with_lock_files(cx: &mut gpui::TestAppContext) {
12406 init_test(cx);
12407
12408 // Configure to make lock files read-only
12409 cx.update(|cx| {
12410 cx.update_global::<SettingsStore, _>(|store, cx| {
12411 store.update_user_settings(cx, |settings| {
12412 settings.project.worktree.read_only_files = Some(vec![
12413 "**/*.lock".to_string(),
12414 "**/package-lock.json".to_string(),
12415 ]);
12416 });
12417 });
12418 });
12419
12420 let fs = FakeFs::new(cx.background_executor.clone());
12421 fs.insert_tree(
12422 path!("/root"),
12423 json!({
12424 "Cargo.lock": "# Lock file",
12425 "Cargo.toml": "[package]",
12426 "package-lock.json": "{}",
12427 "package.json": "{}",
12428 }),
12429 )
12430 .await;
12431
12432 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
12433
12434 // Cargo.lock should be read-only
12435 let cargo_lock = project
12436 .update(cx, |project, cx| {
12437 project.open_local_buffer(path!("/root/Cargo.lock"), cx)
12438 })
12439 .await
12440 .unwrap();
12441
12442 cargo_lock.read_with(cx, |buffer, _| {
12443 assert!(buffer.read_only(), "Cargo.lock should be read-only");
12444 });
12445
12446 // Cargo.toml should be read-write
12447 let cargo_toml = project
12448 .update(cx, |project, cx| {
12449 project.open_local_buffer(path!("/root/Cargo.toml"), cx)
12450 })
12451 .await
12452 .unwrap();
12453
12454 cargo_toml.read_with(cx, |buffer, _| {
12455 assert!(!buffer.read_only(), "Cargo.toml should not be read-only");
12456 });
12457
12458 // package-lock.json should be read-only
12459 let package_lock = project
12460 .update(cx, |project, cx| {
12461 project.open_local_buffer(path!("/root/package-lock.json"), cx)
12462 })
12463 .await
12464 .unwrap();
12465
12466 package_lock.read_with(cx, |buffer, _| {
12467 assert!(buffer.read_only(), "package-lock.json should be read-only");
12468 });
12469
12470 // package.json should be read-write
12471 let package_json = project
12472 .update(cx, |project, cx| {
12473 project.open_local_buffer(path!("/root/package.json"), cx)
12474 })
12475 .await
12476 .unwrap();
12477
12478 package_json.read_with(cx, |buffer, _| {
12479 assert!(!buffer.read_only(), "package.json should not be read-only");
12480 });
12481}
12482
12483mod disable_ai_settings_tests {
12484 use gpui::TestAppContext;
12485 use project::*;
12486 use settings::{Settings, SettingsStore};
12487
12488 #[gpui::test]
12489 async fn test_disable_ai_settings_security(cx: &mut TestAppContext) {
12490 cx.update(|cx| {
12491 settings::init(cx);
12492
12493 // Test 1: Default is false (AI enabled)
12494 assert!(
12495 !DisableAiSettings::get_global(cx).disable_ai,
12496 "Default should allow AI"
12497 );
12498 });
12499
12500 let disable_true = serde_json::json!({
12501 "disable_ai": true
12502 })
12503 .to_string();
12504 let disable_false = serde_json::json!({
12505 "disable_ai": false
12506 })
12507 .to_string();
12508
12509 cx.update_global::<SettingsStore, _>(|store, cx| {
12510 store.set_user_settings(&disable_false, cx).unwrap();
12511 store.set_global_settings(&disable_true, cx).unwrap();
12512 });
12513 cx.update(|cx| {
12514 assert!(
12515 DisableAiSettings::get_global(cx).disable_ai,
12516 "Local false cannot override global true"
12517 );
12518 });
12519
12520 cx.update_global::<SettingsStore, _>(|store, cx| {
12521 store.set_global_settings(&disable_false, cx).unwrap();
12522 store.set_user_settings(&disable_true, cx).unwrap();
12523 });
12524
12525 cx.update(|cx| {
12526 assert!(
12527 DisableAiSettings::get_global(cx).disable_ai,
12528 "Local false cannot override global true"
12529 );
12530 });
12531 }
12532
12533 #[gpui::test]
12534 async fn test_disable_ai_project_level_settings(cx: &mut TestAppContext) {
12535 use settings::{LocalSettingsKind, LocalSettingsPath, SettingsLocation, SettingsStore};
12536 use worktree::WorktreeId;
12537
12538 cx.update(|cx| {
12539 settings::init(cx);
12540
12541 // Default should allow AI
12542 assert!(
12543 !DisableAiSettings::get_global(cx).disable_ai,
12544 "Default should allow AI"
12545 );
12546 });
12547
12548 let worktree_id = WorktreeId::from_usize(1);
12549 let rel_path = |path: &str| -> std::sync::Arc<util::rel_path::RelPath> {
12550 std::sync::Arc::from(util::rel_path::RelPath::unix(path).unwrap())
12551 };
12552 let project_path = rel_path("project");
12553 let settings_location = SettingsLocation {
12554 worktree_id,
12555 path: project_path.as_ref(),
12556 };
12557
12558 // Test: Project-level disable_ai=true should disable AI for files in that project
12559 cx.update_global::<SettingsStore, _>(|store, cx| {
12560 store
12561 .set_local_settings(
12562 worktree_id,
12563 LocalSettingsPath::InWorktree(project_path.clone()),
12564 LocalSettingsKind::Settings,
12565 Some(r#"{ "disable_ai": true }"#),
12566 cx,
12567 )
12568 .unwrap();
12569 });
12570
12571 cx.update(|cx| {
12572 let settings = DisableAiSettings::get(Some(settings_location), cx);
12573 assert!(
12574 settings.disable_ai,
12575 "Project-level disable_ai=true should disable AI for files in that project"
12576 );
12577 // Global should now also be true since project-level disable_ai is merged into global
12578 assert!(
12579 DisableAiSettings::get_global(cx).disable_ai,
12580 "Global setting should be affected by project-level disable_ai=true"
12581 );
12582 });
12583
12584 // Test: Setting project-level to false should allow AI for that project
12585 cx.update_global::<SettingsStore, _>(|store, cx| {
12586 store
12587 .set_local_settings(
12588 worktree_id,
12589 LocalSettingsPath::InWorktree(project_path.clone()),
12590 LocalSettingsKind::Settings,
12591 Some(r#"{ "disable_ai": false }"#),
12592 cx,
12593 )
12594 .unwrap();
12595 });
12596
12597 cx.update(|cx| {
12598 let settings = DisableAiSettings::get(Some(settings_location), cx);
12599 assert!(
12600 !settings.disable_ai,
12601 "Project-level disable_ai=false should allow AI"
12602 );
12603 // Global should also be false now
12604 assert!(
12605 !DisableAiSettings::get_global(cx).disable_ai,
12606 "Global setting should be false when project-level is false"
12607 );
12608 });
12609
12610 // Test: User-level true + project-level false = AI disabled (saturation)
12611 let disable_true = serde_json::json!({ "disable_ai": true }).to_string();
12612 cx.update_global::<SettingsStore, _>(|store, cx| {
12613 store.set_user_settings(&disable_true, cx).unwrap();
12614 store
12615 .set_local_settings(
12616 worktree_id,
12617 LocalSettingsPath::InWorktree(project_path.clone()),
12618 LocalSettingsKind::Settings,
12619 Some(r#"{ "disable_ai": false }"#),
12620 cx,
12621 )
12622 .unwrap();
12623 });
12624
12625 cx.update(|cx| {
12626 let settings = DisableAiSettings::get(Some(settings_location), cx);
12627 assert!(
12628 settings.disable_ai,
12629 "Project-level false cannot override user-level true (SaturatingBool)"
12630 );
12631 });
12632 }
12633}