1#![allow(clippy::format_collect)]
2
3mod color_extractor;
4mod context_server_store;
5mod debugger;
6mod ext_agent_tests;
7mod extension_agent_tests;
8mod git_store;
9mod image_store;
10mod lsp_command;
11mod lsp_store;
12mod manifest_tree;
13mod project_search;
14mod search;
15mod search_history;
16mod signature_help;
17mod task_inventory;
18mod trusted_worktrees;
19mod yarn;
20
21use anyhow::Result;
22use async_trait::async_trait;
23use buffer_diff::{
24 BufferDiffEvent, DiffChanged, DiffHunkSecondaryStatus, DiffHunkStatus, DiffHunkStatusKind,
25 assert_hunks,
26};
27use collections::{BTreeSet, HashMap, HashSet};
28use encoding_rs;
29use fs::{FakeFs, PathEventKind};
30use futures::{StreamExt, future};
31use git::{
32 GitHostingProviderRegistry,
33 repository::{RepoPath, repo_path},
34 status::{DiffStat, FileStatus, StatusCode, TrackedStatus},
35};
36use git2::RepositoryInitOptions;
37use gpui::{
38 App, AppContext, BackgroundExecutor, BorrowAppContext, Entity, FutureExt, SharedString, Task,
39 UpdateGlobal,
40};
41use itertools::Itertools;
42use language::{
43 Buffer, BufferEvent, Diagnostic, DiagnosticEntry, DiagnosticEntryRef, DiagnosticSet,
44 DiagnosticSourceKind, DiskState, FakeLspAdapter, Language, LanguageConfig, LanguageMatcher,
45 LanguageName, LineEnding, ManifestName, ManifestProvider, ManifestQuery, OffsetRangeExt, Point,
46 ToPoint, Toolchain, ToolchainList, ToolchainLister, ToolchainMetadata,
47 language_settings::{LanguageSettingsContent, language_settings},
48 markdown_lang, rust_lang, tree_sitter_typescript,
49};
50use lsp::{
51 CodeActionKind, DEFAULT_LSP_REQUEST_TIMEOUT, DiagnosticSeverity, DocumentChanges,
52 FileOperationFilter, LanguageServerId, LanguageServerName, NumberOrString, TextDocumentEdit,
53 Uri, WillRenameFiles, notification::DidRenameFiles,
54};
55use parking_lot::Mutex;
56use paths::{config_dir, global_gitignore_path, tasks_file};
57use postage::stream::Stream as _;
58use pretty_assertions::{assert_eq, assert_matches};
59use project::{
60 Event, TaskContexts,
61 git_store::{GitStoreEvent, Repository, RepositoryEvent, StatusEntry, pending_op},
62 search::{SearchQuery, SearchResult},
63 task_store::{TaskSettingsLocation, TaskStore},
64 *,
65};
66use rand::{Rng as _, rngs::StdRng};
67use serde_json::json;
68use settings::SettingsStore;
69#[cfg(not(windows))]
70use std::os;
71use std::{
72 cell::RefCell,
73 env, mem,
74 num::NonZeroU32,
75 ops::Range,
76 path::{Path, PathBuf},
77 rc::Rc,
78 str::FromStr,
79 sync::{Arc, OnceLock, atomic},
80 task::Poll,
81 time::Duration,
82};
83use sum_tree::SumTree;
84use task::{ResolvedTask, ShellKind, TaskContext};
85use text::{Anchor, PointUtf16, ReplicaId, ToOffset, Unclipped};
86use unindent::Unindent as _;
87use util::{
88 TryFutureExt as _, assert_set_eq, maybe, path,
89 paths::{PathMatcher, PathStyle},
90 rel_path::{RelPath, rel_path},
91 test::{TempTree, marked_text_offsets},
92 uri,
93};
94use worktree::WorktreeModelHandle as _;
95
96#[gpui::test]
97async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
98 cx.executor().allow_parking();
99
100 let (tx, mut rx) = futures::channel::mpsc::unbounded();
101 let _thread = std::thread::spawn(move || {
102 #[cfg(not(target_os = "windows"))]
103 std::fs::metadata("/tmp").unwrap();
104 #[cfg(target_os = "windows")]
105 std::fs::metadata("C:/Windows").unwrap();
106 std::thread::sleep(Duration::from_millis(1000));
107 tx.unbounded_send(1).unwrap();
108 });
109 rx.next().await.unwrap();
110}
111
112#[gpui::test]
113async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
114 cx.executor().allow_parking();
115
116 let io_task = smol::unblock(move || {
117 println!("sleeping on thread {:?}", std::thread::current().id());
118 std::thread::sleep(Duration::from_millis(10));
119 1
120 });
121
122 let task = cx.foreground_executor().spawn(async move {
123 io_task.await;
124 });
125
126 task.await;
127}
128
129#[gpui::test]
130async fn test_default_session_work_dirs_prefers_directory_worktrees_over_single_file_parents(
131 cx: &mut gpui::TestAppContext,
132) {
133 init_test(cx);
134
135 let fs = FakeFs::new(cx.executor());
136 fs.insert_tree(
137 path!("/root"),
138 json!({
139 "dir-project": {
140 "src": {
141 "main.rs": "fn main() {}"
142 }
143 },
144 "single-file.rs": "fn helper() {}"
145 }),
146 )
147 .await;
148
149 let project = Project::test(
150 fs,
151 [
152 Path::new(path!("/root/single-file.rs")),
153 Path::new(path!("/root/dir-project")),
154 ],
155 cx,
156 )
157 .await;
158
159 let work_dirs = project.read_with(cx, |project, cx| project.default_path_list(cx));
160 let ordered_paths = work_dirs.ordered_paths().cloned().collect::<Vec<_>>();
161
162 assert_eq!(
163 ordered_paths,
164 vec![
165 PathBuf::from(path!("/root/dir-project")),
166 PathBuf::from(path!("/root")),
167 ]
168 );
169}
170
171#[gpui::test]
172async fn test_default_session_work_dirs_falls_back_to_home_for_empty_project(
173 cx: &mut gpui::TestAppContext,
174) {
175 init_test(cx);
176
177 let fs = FakeFs::new(cx.executor());
178 let project = Project::test(fs, [], cx).await;
179
180 let work_dirs = project.read_with(cx, |project, cx| project.default_path_list(cx));
181 let ordered_paths = work_dirs.ordered_paths().cloned().collect::<Vec<_>>();
182
183 assert_eq!(ordered_paths, vec![paths::home_dir().to_path_buf()]);
184}
185
186// NOTE:
187// While POSIX symbolic links are somewhat supported on Windows, they are an opt in by the user, and thus
188// we assume that they are not supported out of the box.
189#[cfg(not(windows))]
190#[gpui::test]
191async fn test_symlinks(cx: &mut gpui::TestAppContext) {
192 init_test(cx);
193 cx.executor().allow_parking();
194
195 let dir = TempTree::new(json!({
196 "root": {
197 "apple": "",
198 "banana": {
199 "carrot": {
200 "date": "",
201 "endive": "",
202 }
203 },
204 "fennel": {
205 "grape": "",
206 }
207 }
208 }));
209
210 let root_link_path = dir.path().join("root_link");
211 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
212 os::unix::fs::symlink(
213 dir.path().join("root/fennel"),
214 dir.path().join("root/finnochio"),
215 )
216 .unwrap();
217
218 let project = Project::test(
219 Arc::new(RealFs::new(None, cx.executor())),
220 [root_link_path.as_ref()],
221 cx,
222 )
223 .await;
224
225 project.update(cx, |project, cx| {
226 let tree = project.worktrees(cx).next().unwrap().read(cx);
227 assert_eq!(tree.file_count(), 5);
228 assert_eq!(
229 tree.entry_for_path(rel_path("fennel/grape")).unwrap().inode,
230 tree.entry_for_path(rel_path("finnochio/grape"))
231 .unwrap()
232 .inode
233 );
234 });
235}
236
237#[gpui::test]
238async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
239 init_test(cx);
240
241 let dir = TempTree::new(json!({
242 ".editorconfig": r#"
243 root = true
244 [*.rs]
245 indent_style = tab
246 indent_size = 3
247 end_of_line = lf
248 insert_final_newline = true
249 trim_trailing_whitespace = true
250 max_line_length = 120
251 [*.js]
252 tab_width = 10
253 max_line_length = off
254 "#,
255 ".zed": {
256 "settings.json": r#"{
257 "tab_size": 8,
258 "hard_tabs": false,
259 "ensure_final_newline_on_save": false,
260 "remove_trailing_whitespace_on_save": false,
261 "preferred_line_length": 64,
262 "soft_wrap": "editor_width",
263 }"#,
264 },
265 "a.rs": "fn a() {\n A\n}",
266 "b": {
267 ".editorconfig": r#"
268 [*.rs]
269 indent_size = 2
270 max_line_length = off,
271 "#,
272 "b.rs": "fn b() {\n B\n}",
273 },
274 "c.js": "def c\n C\nend",
275 "d": {
276 ".editorconfig": r#"
277 [*.rs]
278 indent_size = 1
279 "#,
280 "d.rs": "fn d() {\n D\n}",
281 },
282 "README.json": "tabs are better\n",
283 }));
284
285 let path = dir.path();
286 let fs = FakeFs::new(cx.executor());
287 fs.insert_tree_from_real_fs(path, path).await;
288 let project = Project::test(fs, [path], cx).await;
289
290 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
291 language_registry.add(js_lang());
292 language_registry.add(json_lang());
293 language_registry.add(rust_lang());
294
295 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
296
297 cx.executor().run_until_parked();
298
299 cx.update(|cx| {
300 let tree = worktree.read(cx);
301 let settings_for = |path: &str| {
302 let file_entry = tree.entry_for_path(rel_path(path)).unwrap().clone();
303 let file = File::for_entry(file_entry, worktree.clone());
304 let file_language = project
305 .read(cx)
306 .languages()
307 .load_language_for_file_path(file.path.as_std_path());
308 let file_language = cx
309 .foreground_executor()
310 .block_on(file_language)
311 .expect("Failed to get file language");
312 let file = file as _;
313 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
314 };
315
316 let settings_a = settings_for("a.rs");
317 let settings_b = settings_for("b/b.rs");
318 let settings_c = settings_for("c.js");
319 let settings_d = settings_for("d/d.rs");
320 let settings_readme = settings_for("README.json");
321
322 // .editorconfig overrides .zed/settings
323 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
324 assert_eq!(settings_a.hard_tabs, true);
325 assert_eq!(settings_a.ensure_final_newline_on_save, true);
326 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
327 assert_eq!(settings_a.preferred_line_length, 120);
328
329 // .editorconfig in subdirectory overrides .editorconfig in root
330 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
331 assert_eq!(Some(settings_d.tab_size), NonZeroU32::new(1));
332
333 // "indent_size" is not set, so "tab_width" is used
334 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
335
336 // When max_line_length is "off", default to .zed/settings.json
337 assert_eq!(settings_b.preferred_line_length, 64);
338 assert_eq!(settings_c.preferred_line_length, 64);
339
340 // README.md should not be affected by .editorconfig's globe "*.rs"
341 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
342 });
343}
344
345#[gpui::test]
346async fn test_external_editorconfig_support(cx: &mut gpui::TestAppContext) {
347 init_test(cx);
348
349 let fs = FakeFs::new(cx.executor());
350 fs.insert_tree(
351 path!("/grandparent"),
352 json!({
353 ".editorconfig": "[*]\nindent_size = 4\n",
354 "parent": {
355 ".editorconfig": "[*.rs]\nindent_size = 2\n",
356 "worktree": {
357 ".editorconfig": "[*.md]\nindent_size = 3\n",
358 "main.rs": "fn main() {}",
359 "README.md": "# README",
360 "other.txt": "other content",
361 }
362 }
363 }),
364 )
365 .await;
366
367 let project = Project::test(fs, [path!("/grandparent/parent/worktree").as_ref()], cx).await;
368
369 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
370 language_registry.add(rust_lang());
371 language_registry.add(markdown_lang());
372
373 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
374
375 cx.executor().run_until_parked();
376
377 cx.update(|cx| {
378 let tree = worktree.read(cx);
379 let settings_for = |path: &str| {
380 let file_entry = tree.entry_for_path(rel_path(path)).unwrap().clone();
381 let file = File::for_entry(file_entry, worktree.clone());
382 let file_language = project
383 .read(cx)
384 .languages()
385 .load_language_for_file_path(file.path.as_std_path());
386 let file_language = cx
387 .foreground_executor()
388 .block_on(file_language)
389 .expect("Failed to get file language");
390 let file = file as _;
391 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
392 };
393
394 let settings_rs = settings_for("main.rs");
395 let settings_md = settings_for("README.md");
396 let settings_txt = settings_for("other.txt");
397
398 // main.rs gets indent_size = 2 from parent's external .editorconfig
399 assert_eq!(Some(settings_rs.tab_size), NonZeroU32::new(2));
400
401 // README.md gets indent_size = 3 from internal worktree .editorconfig
402 assert_eq!(Some(settings_md.tab_size), NonZeroU32::new(3));
403
404 // other.txt gets indent_size = 4 from grandparent's external .editorconfig
405 assert_eq!(Some(settings_txt.tab_size), NonZeroU32::new(4));
406 });
407}
408
409#[gpui::test]
410async fn test_internal_editorconfig_root_stops_traversal(cx: &mut gpui::TestAppContext) {
411 init_test(cx);
412
413 let fs = FakeFs::new(cx.executor());
414 fs.insert_tree(
415 path!("/worktree"),
416 json!({
417 ".editorconfig": "[*]\nindent_size = 99\n",
418 "src": {
419 ".editorconfig": "root = true\n[*]\nindent_size = 2\n",
420 "file.rs": "fn main() {}",
421 }
422 }),
423 )
424 .await;
425
426 let project = Project::test(fs, [path!("/worktree").as_ref()], cx).await;
427
428 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
429 language_registry.add(rust_lang());
430
431 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
432
433 cx.executor().run_until_parked();
434
435 cx.update(|cx| {
436 let tree = worktree.read(cx);
437 let file_entry = tree
438 .entry_for_path(rel_path("src/file.rs"))
439 .unwrap()
440 .clone();
441 let file = File::for_entry(file_entry, worktree.clone());
442 let file_language = project
443 .read(cx)
444 .languages()
445 .load_language_for_file_path(file.path.as_std_path());
446 let file_language = cx
447 .foreground_executor()
448 .block_on(file_language)
449 .expect("Failed to get file language");
450 let file = file as _;
451 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
452
453 assert_eq!(Some(settings.tab_size), NonZeroU32::new(2));
454 });
455}
456
457#[gpui::test]
458async fn test_external_editorconfig_root_stops_traversal(cx: &mut gpui::TestAppContext) {
459 init_test(cx);
460
461 let fs = FakeFs::new(cx.executor());
462 fs.insert_tree(
463 path!("/parent"),
464 json!({
465 ".editorconfig": "[*]\nindent_size = 99\n",
466 "worktree": {
467 ".editorconfig": "root = true\n[*]\nindent_size = 2\n",
468 "file.rs": "fn main() {}",
469 }
470 }),
471 )
472 .await;
473
474 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
475
476 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
477 language_registry.add(rust_lang());
478
479 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
480
481 cx.executor().run_until_parked();
482
483 cx.update(|cx| {
484 let tree = worktree.read(cx);
485 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
486 let file = File::for_entry(file_entry, worktree.clone());
487 let file_language = project
488 .read(cx)
489 .languages()
490 .load_language_for_file_path(file.path.as_std_path());
491 let file_language = cx
492 .foreground_executor()
493 .block_on(file_language)
494 .expect("Failed to get file language");
495 let file = file as _;
496 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
497
498 // file.rs gets indent_size = 2 from worktree's root config, NOT 99 from parent
499 assert_eq!(Some(settings.tab_size), NonZeroU32::new(2));
500 });
501}
502
503#[gpui::test]
504async fn test_external_editorconfig_root_in_parent_stops_traversal(cx: &mut gpui::TestAppContext) {
505 init_test(cx);
506
507 let fs = FakeFs::new(cx.executor());
508 fs.insert_tree(
509 path!("/grandparent"),
510 json!({
511 ".editorconfig": "[*]\nindent_size = 99\n",
512 "parent": {
513 ".editorconfig": "root = true\n[*]\nindent_size = 4\n",
514 "worktree": {
515 "file.rs": "fn main() {}",
516 }
517 }
518 }),
519 )
520 .await;
521
522 let project = Project::test(fs, [path!("/grandparent/parent/worktree").as_ref()], cx).await;
523
524 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
525 language_registry.add(rust_lang());
526
527 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
528
529 cx.executor().run_until_parked();
530
531 cx.update(|cx| {
532 let tree = worktree.read(cx);
533 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
534 let file = File::for_entry(file_entry, worktree.clone());
535 let file_language = project
536 .read(cx)
537 .languages()
538 .load_language_for_file_path(file.path.as_std_path());
539 let file_language = cx
540 .foreground_executor()
541 .block_on(file_language)
542 .expect("Failed to get file language");
543 let file = file as _;
544 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
545
546 // file.rs gets indent_size = 4 from parent's root config, NOT 99 from grandparent
547 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
548 });
549}
550
551#[gpui::test]
552async fn test_external_editorconfig_shared_across_worktrees(cx: &mut gpui::TestAppContext) {
553 init_test(cx);
554
555 let fs = FakeFs::new(cx.executor());
556 fs.insert_tree(
557 path!("/parent"),
558 json!({
559 ".editorconfig": "root = true\n[*]\nindent_size = 5\n",
560 "worktree_a": {
561 "file.rs": "fn a() {}",
562 ".editorconfig": "[*]\ninsert_final_newline = true\n",
563 },
564 "worktree_b": {
565 "file.rs": "fn b() {}",
566 ".editorconfig": "[*]\ninsert_final_newline = false\n",
567 }
568 }),
569 )
570 .await;
571
572 let project = Project::test(
573 fs,
574 [
575 path!("/parent/worktree_a").as_ref(),
576 path!("/parent/worktree_b").as_ref(),
577 ],
578 cx,
579 )
580 .await;
581
582 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
583 language_registry.add(rust_lang());
584
585 cx.executor().run_until_parked();
586
587 cx.update(|cx| {
588 let worktrees: Vec<_> = project.read(cx).worktrees(cx).collect();
589 assert_eq!(worktrees.len(), 2);
590
591 for worktree in worktrees {
592 let tree = worktree.read(cx);
593 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
594 let file = File::for_entry(file_entry, worktree.clone());
595 let file_language = project
596 .read(cx)
597 .languages()
598 .load_language_for_file_path(file.path.as_std_path());
599 let file_language = cx
600 .foreground_executor()
601 .block_on(file_language)
602 .expect("Failed to get file language");
603 let file = file as _;
604 let settings =
605 language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
606
607 // Both worktrees should get indent_size = 5 from shared parent .editorconfig
608 assert_eq!(Some(settings.tab_size), NonZeroU32::new(5));
609 }
610 });
611}
612
613#[gpui::test]
614async fn test_external_editorconfig_not_loaded_without_internal_config(
615 cx: &mut gpui::TestAppContext,
616) {
617 init_test(cx);
618
619 let fs = FakeFs::new(cx.executor());
620 fs.insert_tree(
621 path!("/parent"),
622 json!({
623 ".editorconfig": "[*]\nindent_size = 99\n",
624 "worktree": {
625 "file.rs": "fn main() {}",
626 }
627 }),
628 )
629 .await;
630
631 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
632
633 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
634 language_registry.add(rust_lang());
635
636 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
637
638 cx.executor().run_until_parked();
639
640 cx.update(|cx| {
641 let tree = worktree.read(cx);
642 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
643 let file = File::for_entry(file_entry, worktree.clone());
644 let file_language = project
645 .read(cx)
646 .languages()
647 .load_language_for_file_path(file.path.as_std_path());
648 let file_language = cx
649 .foreground_executor()
650 .block_on(file_language)
651 .expect("Failed to get file language");
652 let file = file as _;
653 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
654
655 // file.rs should have default tab_size = 4, NOT 99 from parent's external .editorconfig
656 // because without an internal .editorconfig, external configs are not loaded
657 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
658 });
659}
660
661#[gpui::test]
662async fn test_external_editorconfig_modification_triggers_refresh(cx: &mut gpui::TestAppContext) {
663 init_test(cx);
664
665 let fs = FakeFs::new(cx.executor());
666 fs.insert_tree(
667 path!("/parent"),
668 json!({
669 ".editorconfig": "[*]\nindent_size = 4\n",
670 "worktree": {
671 ".editorconfig": "[*]\n",
672 "file.rs": "fn main() {}",
673 }
674 }),
675 )
676 .await;
677
678 let project = Project::test(fs.clone(), [path!("/parent/worktree").as_ref()], cx).await;
679
680 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
681 language_registry.add(rust_lang());
682
683 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
684
685 cx.executor().run_until_parked();
686
687 cx.update(|cx| {
688 let tree = worktree.read(cx);
689 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
690 let file = File::for_entry(file_entry, worktree.clone());
691 let file_language = project
692 .read(cx)
693 .languages()
694 .load_language_for_file_path(file.path.as_std_path());
695 let file_language = cx
696 .foreground_executor()
697 .block_on(file_language)
698 .expect("Failed to get file language");
699 let file = file as _;
700 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
701
702 // Test initial settings: tab_size = 4 from parent's external .editorconfig
703 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
704 });
705
706 fs.atomic_write(
707 PathBuf::from(path!("/parent/.editorconfig")),
708 "[*]\nindent_size = 8\n".to_owned(),
709 )
710 .await
711 .unwrap();
712
713 cx.executor().run_until_parked();
714
715 cx.update(|cx| {
716 let tree = worktree.read(cx);
717 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
718 let file = File::for_entry(file_entry, worktree.clone());
719 let file_language = project
720 .read(cx)
721 .languages()
722 .load_language_for_file_path(file.path.as_std_path());
723 let file_language = cx
724 .foreground_executor()
725 .block_on(file_language)
726 .expect("Failed to get file language");
727 let file = file as _;
728 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
729
730 // Test settings updated: tab_size = 8
731 assert_eq!(Some(settings.tab_size), NonZeroU32::new(8));
732 });
733}
734
735#[gpui::test]
736async fn test_adding_worktree_discovers_external_editorconfigs(cx: &mut gpui::TestAppContext) {
737 init_test(cx);
738
739 let fs = FakeFs::new(cx.executor());
740 fs.insert_tree(
741 path!("/parent"),
742 json!({
743 ".editorconfig": "root = true\n[*]\nindent_size = 7\n",
744 "existing_worktree": {
745 ".editorconfig": "[*]\n",
746 "file.rs": "fn a() {}",
747 },
748 "new_worktree": {
749 ".editorconfig": "[*]\n",
750 "file.rs": "fn b() {}",
751 }
752 }),
753 )
754 .await;
755
756 let project = Project::test(fs, [path!("/parent/existing_worktree").as_ref()], cx).await;
757
758 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
759 language_registry.add(rust_lang());
760
761 cx.executor().run_until_parked();
762
763 cx.update(|cx| {
764 let worktree = project.read(cx).worktrees(cx).next().unwrap();
765 let tree = worktree.read(cx);
766 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
767 let file = File::for_entry(file_entry, worktree.clone());
768 let file_language = project
769 .read(cx)
770 .languages()
771 .load_language_for_file_path(file.path.as_std_path());
772 let file_language = cx
773 .foreground_executor()
774 .block_on(file_language)
775 .expect("Failed to get file language");
776 let file = file as _;
777 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
778
779 // Test existing worktree has tab_size = 7
780 assert_eq!(Some(settings.tab_size), NonZeroU32::new(7));
781 });
782
783 let (new_worktree, _) = project
784 .update(cx, |project, cx| {
785 project.find_or_create_worktree(path!("/parent/new_worktree"), true, cx)
786 })
787 .await
788 .unwrap();
789
790 cx.executor().run_until_parked();
791
792 cx.update(|cx| {
793 let tree = new_worktree.read(cx);
794 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
795 let file = File::for_entry(file_entry, new_worktree.clone());
796 let file_language = project
797 .read(cx)
798 .languages()
799 .load_language_for_file_path(file.path.as_std_path());
800 let file_language = cx
801 .foreground_executor()
802 .block_on(file_language)
803 .expect("Failed to get file language");
804 let file = file as _;
805 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
806
807 // Verify new worktree also has tab_size = 7 from shared parent editorconfig
808 assert_eq!(Some(settings.tab_size), NonZeroU32::new(7));
809 });
810}
811
812#[gpui::test]
813async fn test_removing_worktree_cleans_up_external_editorconfig(cx: &mut gpui::TestAppContext) {
814 init_test(cx);
815
816 let fs = FakeFs::new(cx.executor());
817 fs.insert_tree(
818 path!("/parent"),
819 json!({
820 ".editorconfig": "[*]\nindent_size = 6\n",
821 "worktree": {
822 ".editorconfig": "[*]\n",
823 "file.rs": "fn main() {}",
824 }
825 }),
826 )
827 .await;
828
829 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
830
831 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
832 language_registry.add(rust_lang());
833
834 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
835 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
836
837 cx.executor().run_until_parked();
838
839 cx.update(|cx| {
840 let store = cx.global::<SettingsStore>();
841 let (worktree_ids, external_paths, watcher_paths) =
842 store.editorconfig_store.read(cx).test_state();
843
844 // Test external config is loaded
845 assert!(worktree_ids.contains(&worktree_id));
846 assert!(!external_paths.is_empty());
847 assert!(!watcher_paths.is_empty());
848 });
849
850 project.update(cx, |project, cx| {
851 project.remove_worktree(worktree_id, cx);
852 });
853
854 cx.executor().run_until_parked();
855
856 cx.update(|cx| {
857 let store = cx.global::<SettingsStore>();
858 let (worktree_ids, external_paths, watcher_paths) =
859 store.editorconfig_store.read(cx).test_state();
860
861 // Test worktree state, external configs, and watchers all removed
862 assert!(!worktree_ids.contains(&worktree_id));
863 assert!(external_paths.is_empty());
864 assert!(watcher_paths.is_empty());
865 });
866}
867
868#[gpui::test]
869async fn test_shared_external_editorconfig_cleanup_with_multiple_worktrees(
870 cx: &mut gpui::TestAppContext,
871) {
872 init_test(cx);
873
874 let fs = FakeFs::new(cx.executor());
875 fs.insert_tree(
876 path!("/parent"),
877 json!({
878 ".editorconfig": "root = true\n[*]\nindent_size = 5\n",
879 "worktree_a": {
880 ".editorconfig": "[*]\n",
881 "file.rs": "fn a() {}",
882 },
883 "worktree_b": {
884 ".editorconfig": "[*]\n",
885 "file.rs": "fn b() {}",
886 }
887 }),
888 )
889 .await;
890
891 let project = Project::test(
892 fs,
893 [
894 path!("/parent/worktree_a").as_ref(),
895 path!("/parent/worktree_b").as_ref(),
896 ],
897 cx,
898 )
899 .await;
900
901 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
902 language_registry.add(rust_lang());
903
904 cx.executor().run_until_parked();
905
906 let (worktree_a_id, worktree_b, worktree_b_id) = cx.update(|cx| {
907 let worktrees: Vec<_> = project.read(cx).worktrees(cx).collect();
908 assert_eq!(worktrees.len(), 2);
909
910 let worktree_a = &worktrees[0];
911 let worktree_b = &worktrees[1];
912 let worktree_a_id = worktree_a.read(cx).id();
913 let worktree_b_id = worktree_b.read(cx).id();
914 (worktree_a_id, worktree_b.clone(), worktree_b_id)
915 });
916
917 cx.update(|cx| {
918 let store = cx.global::<SettingsStore>();
919 let (worktree_ids, external_paths, _) = store.editorconfig_store.read(cx).test_state();
920
921 // Test both worktrees have settings and share external config
922 assert!(worktree_ids.contains(&worktree_a_id));
923 assert!(worktree_ids.contains(&worktree_b_id));
924 assert_eq!(external_paths.len(), 1); // single shared external config
925 });
926
927 project.update(cx, |project, cx| {
928 project.remove_worktree(worktree_a_id, cx);
929 });
930
931 cx.executor().run_until_parked();
932
933 cx.update(|cx| {
934 let store = cx.global::<SettingsStore>();
935 let (worktree_ids, external_paths, watcher_paths) =
936 store.editorconfig_store.read(cx).test_state();
937
938 // Test worktree_a is gone but external config remains for worktree_b
939 assert!(!worktree_ids.contains(&worktree_a_id));
940 assert!(worktree_ids.contains(&worktree_b_id));
941 // External config should still exist because worktree_b uses it
942 assert_eq!(external_paths.len(), 1);
943 assert_eq!(watcher_paths.len(), 1);
944 });
945
946 cx.update(|cx| {
947 let tree = worktree_b.read(cx);
948 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
949 let file = File::for_entry(file_entry, worktree_b.clone());
950 let file_language = project
951 .read(cx)
952 .languages()
953 .load_language_for_file_path(file.path.as_std_path());
954 let file_language = cx
955 .foreground_executor()
956 .block_on(file_language)
957 .expect("Failed to get file language");
958 let file = file as _;
959 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
960
961 // Test worktree_b still has correct settings
962 assert_eq!(Some(settings.tab_size), NonZeroU32::new(5));
963 });
964}
965
966#[gpui::test]
967async fn test_git_provider_project_setting(cx: &mut gpui::TestAppContext) {
968 init_test(cx);
969 cx.update(|cx| {
970 GitHostingProviderRegistry::default_global(cx);
971 git_hosting_providers::init(cx);
972 });
973
974 let fs = FakeFs::new(cx.executor());
975 let str_path = path!("/dir");
976 let path = Path::new(str_path);
977
978 fs.insert_tree(
979 path!("/dir"),
980 json!({
981 ".zed": {
982 "settings.json": r#"{
983 "git_hosting_providers": [
984 {
985 "provider": "gitlab",
986 "base_url": "https://google.com",
987 "name": "foo"
988 }
989 ]
990 }"#
991 },
992 }),
993 )
994 .await;
995
996 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
997 let (_worktree, _) =
998 project.read_with(cx, |project, cx| project.find_worktree(path, cx).unwrap());
999 cx.executor().run_until_parked();
1000
1001 cx.update(|cx| {
1002 let provider = GitHostingProviderRegistry::global(cx);
1003 assert!(
1004 provider
1005 .list_hosting_providers()
1006 .into_iter()
1007 .any(|provider| provider.name() == "foo")
1008 );
1009 });
1010
1011 fs.atomic_write(
1012 Path::new(path!("/dir/.zed/settings.json")).to_owned(),
1013 "{}".into(),
1014 )
1015 .await
1016 .unwrap();
1017
1018 cx.run_until_parked();
1019
1020 cx.update(|cx| {
1021 let provider = GitHostingProviderRegistry::global(cx);
1022 assert!(
1023 !provider
1024 .list_hosting_providers()
1025 .into_iter()
1026 .any(|provider| provider.name() == "foo")
1027 );
1028 });
1029}
1030
1031#[gpui::test]
1032async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
1033 init_test(cx);
1034 TaskStore::init(None);
1035
1036 let fs = FakeFs::new(cx.executor());
1037 fs.insert_tree(
1038 path!("/dir"),
1039 json!({
1040 ".zed": {
1041 "settings.json": r#"{ "tab_size": 8 }"#,
1042 "tasks.json": r#"[{
1043 "label": "cargo check all",
1044 "command": "cargo",
1045 "args": ["check", "--all"]
1046 },]"#,
1047 },
1048 "a": {
1049 "a.rs": "fn a() {\n A\n}"
1050 },
1051 "b": {
1052 ".zed": {
1053 "settings.json": r#"{ "tab_size": 2 }"#,
1054 "tasks.json": r#"[{
1055 "label": "cargo check",
1056 "command": "cargo",
1057 "args": ["check"]
1058 },]"#,
1059 },
1060 "b.rs": "fn b() {\n B\n}"
1061 }
1062 }),
1063 )
1064 .await;
1065
1066 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1067 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
1068
1069 cx.executor().run_until_parked();
1070 let worktree_id = cx.update(|cx| {
1071 project.update(cx, |project, cx| {
1072 project.worktrees(cx).next().unwrap().read(cx).id()
1073 })
1074 });
1075
1076 let mut task_contexts = TaskContexts::default();
1077 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
1078 let task_contexts = Arc::new(task_contexts);
1079
1080 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
1081 id: worktree_id,
1082 directory_in_worktree: rel_path(".zed").into(),
1083 id_base: "local worktree tasks from directory \".zed\"".into(),
1084 };
1085
1086 let all_tasks = cx
1087 .update(|cx| {
1088 let tree = worktree.read(cx);
1089
1090 let file_a = File::for_entry(
1091 tree.entry_for_path(rel_path("a/a.rs")).unwrap().clone(),
1092 worktree.clone(),
1093 ) as _;
1094 let settings_a = language_settings(None, Some(&file_a), cx);
1095 let file_b = File::for_entry(
1096 tree.entry_for_path(rel_path("b/b.rs")).unwrap().clone(),
1097 worktree.clone(),
1098 ) as _;
1099 let settings_b = language_settings(None, Some(&file_b), cx);
1100
1101 assert_eq!(settings_a.tab_size.get(), 8);
1102 assert_eq!(settings_b.tab_size.get(), 2);
1103
1104 get_all_tasks(&project, task_contexts.clone(), cx)
1105 })
1106 .await
1107 .into_iter()
1108 .map(|(source_kind, task)| {
1109 let resolved = task.resolved;
1110 (
1111 source_kind,
1112 task.resolved_label,
1113 resolved.args,
1114 resolved.env,
1115 )
1116 })
1117 .collect::<Vec<_>>();
1118 assert_eq!(
1119 all_tasks,
1120 vec![
1121 (
1122 TaskSourceKind::Worktree {
1123 id: worktree_id,
1124 directory_in_worktree: rel_path("b/.zed").into(),
1125 id_base: "local worktree tasks from directory \"b/.zed\"".into()
1126 },
1127 "cargo check".to_string(),
1128 vec!["check".to_string()],
1129 HashMap::default(),
1130 ),
1131 (
1132 topmost_local_task_source_kind.clone(),
1133 "cargo check all".to_string(),
1134 vec!["check".to_string(), "--all".to_string()],
1135 HashMap::default(),
1136 ),
1137 ]
1138 );
1139
1140 let (_, resolved_task) = cx
1141 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
1142 .await
1143 .into_iter()
1144 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
1145 .expect("should have one global task");
1146 project.update(cx, |project, cx| {
1147 let task_inventory = project
1148 .task_store()
1149 .read(cx)
1150 .task_inventory()
1151 .cloned()
1152 .unwrap();
1153 task_inventory.update(cx, |inventory, _| {
1154 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
1155 inventory
1156 .update_file_based_tasks(
1157 TaskSettingsLocation::Global(tasks_file()),
1158 Some(
1159 &json!([{
1160 "label": "cargo check unstable",
1161 "command": "cargo",
1162 "args": [
1163 "check",
1164 "--all",
1165 "--all-targets"
1166 ],
1167 "env": {
1168 "RUSTFLAGS": "-Zunstable-options"
1169 }
1170 }])
1171 .to_string(),
1172 ),
1173 )
1174 .unwrap();
1175 });
1176 });
1177 cx.run_until_parked();
1178
1179 let all_tasks = cx
1180 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
1181 .await
1182 .into_iter()
1183 .map(|(source_kind, task)| {
1184 let resolved = task.resolved;
1185 (
1186 source_kind,
1187 task.resolved_label,
1188 resolved.args,
1189 resolved.env,
1190 )
1191 })
1192 .collect::<Vec<_>>();
1193 assert_eq!(
1194 all_tasks,
1195 vec![
1196 (
1197 topmost_local_task_source_kind.clone(),
1198 "cargo check all".to_string(),
1199 vec!["check".to_string(), "--all".to_string()],
1200 HashMap::default(),
1201 ),
1202 (
1203 TaskSourceKind::Worktree {
1204 id: worktree_id,
1205 directory_in_worktree: rel_path("b/.zed").into(),
1206 id_base: "local worktree tasks from directory \"b/.zed\"".into()
1207 },
1208 "cargo check".to_string(),
1209 vec!["check".to_string()],
1210 HashMap::default(),
1211 ),
1212 (
1213 TaskSourceKind::AbsPath {
1214 abs_path: paths::tasks_file().clone(),
1215 id_base: "global tasks.json".into(),
1216 },
1217 "cargo check unstable".to_string(),
1218 vec![
1219 "check".to_string(),
1220 "--all".to_string(),
1221 "--all-targets".to_string(),
1222 ],
1223 HashMap::from_iter(Some((
1224 "RUSTFLAGS".to_string(),
1225 "-Zunstable-options".to_string()
1226 ))),
1227 ),
1228 ]
1229 );
1230}
1231
1232#[gpui::test]
1233async fn test_invalid_local_tasks_shows_toast_with_doc_link(cx: &mut gpui::TestAppContext) {
1234 init_test(cx);
1235 TaskStore::init(None);
1236
1237 // We need to start with a valid `.zed/tasks.json` file as otherwise the
1238 // event is emitted before we havd a chance to setup the event subscription.
1239 let fs = FakeFs::new(cx.executor());
1240 fs.insert_tree(
1241 path!("/dir"),
1242 json!({
1243 ".zed": {
1244 "tasks.json": r#"[{ "label": "valid task", "command": "echo" }]"#,
1245 },
1246 "file.rs": ""
1247 }),
1248 )
1249 .await;
1250
1251 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1252 let saw_toast = Rc::new(RefCell::new(false));
1253
1254 // Update the `.zed/tasks.json` file with an invalid variable, so we can
1255 // later assert that the `Event::Toast` even is emitted.
1256 fs.save(
1257 path!("/dir/.zed/tasks.json").as_ref(),
1258 &r#"[{ "label": "test $ZED_FOO", "command": "echo" }]"#.into(),
1259 Default::default(),
1260 )
1261 .await
1262 .unwrap();
1263
1264 project.update(cx, |_, cx| {
1265 let saw_toast = saw_toast.clone();
1266
1267 cx.subscribe(&project, move |_, _, event: &Event, _| match event {
1268 Event::Toast {
1269 notification_id,
1270 message,
1271 link: Some(ToastLink { url, .. }),
1272 } => {
1273 assert!(notification_id.starts_with("local-tasks-"));
1274 assert!(message.contains("ZED_FOO"));
1275 assert_eq!(*url, "https://zed.dev/docs/tasks");
1276 *saw_toast.borrow_mut() = true;
1277 }
1278 _ => {}
1279 })
1280 .detach();
1281 });
1282
1283 cx.run_until_parked();
1284 assert!(
1285 *saw_toast.borrow(),
1286 "Expected `Event::Toast` was never emitted"
1287 );
1288}
1289
1290#[gpui::test]
1291async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
1292 init_test(cx);
1293 TaskStore::init(None);
1294
1295 let fs = FakeFs::new(cx.executor());
1296 fs.insert_tree(
1297 path!("/dir"),
1298 json!({
1299 ".zed": {
1300 "tasks.json": r#"[{
1301 "label": "test worktree root",
1302 "command": "echo $ZED_WORKTREE_ROOT"
1303 }]"#,
1304 },
1305 "a": {
1306 "a.rs": "fn a() {\n A\n}"
1307 },
1308 }),
1309 )
1310 .await;
1311
1312 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1313 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
1314
1315 cx.executor().run_until_parked();
1316 let worktree_id = cx.update(|cx| {
1317 project.update(cx, |project, cx| {
1318 project.worktrees(cx).next().unwrap().read(cx).id()
1319 })
1320 });
1321
1322 let active_non_worktree_item_tasks = cx
1323 .update(|cx| {
1324 get_all_tasks(
1325 &project,
1326 Arc::new(TaskContexts {
1327 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
1328 active_worktree_context: None,
1329 other_worktree_contexts: Vec::new(),
1330 lsp_task_sources: HashMap::default(),
1331 latest_selection: None,
1332 }),
1333 cx,
1334 )
1335 })
1336 .await;
1337 assert!(
1338 active_non_worktree_item_tasks.is_empty(),
1339 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
1340 );
1341
1342 let active_worktree_tasks = cx
1343 .update(|cx| {
1344 get_all_tasks(
1345 &project,
1346 Arc::new(TaskContexts {
1347 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
1348 active_worktree_context: Some((worktree_id, {
1349 let mut worktree_context = TaskContext::default();
1350 worktree_context
1351 .task_variables
1352 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
1353 worktree_context
1354 })),
1355 other_worktree_contexts: Vec::new(),
1356 lsp_task_sources: HashMap::default(),
1357 latest_selection: None,
1358 }),
1359 cx,
1360 )
1361 })
1362 .await;
1363 assert_eq!(
1364 active_worktree_tasks
1365 .into_iter()
1366 .map(|(source_kind, task)| {
1367 let resolved = task.resolved;
1368 (source_kind, resolved.command.unwrap())
1369 })
1370 .collect::<Vec<_>>(),
1371 vec![(
1372 TaskSourceKind::Worktree {
1373 id: worktree_id,
1374 directory_in_worktree: rel_path(".zed").into(),
1375 id_base: "local worktree tasks from directory \".zed\"".into(),
1376 },
1377 "echo /dir".to_string(),
1378 )]
1379 );
1380}
1381
1382#[gpui::test]
1383async fn test_running_multiple_instances_of_a_single_server_in_one_worktree(
1384 cx: &mut gpui::TestAppContext,
1385) {
1386 pub(crate) struct PyprojectTomlManifestProvider;
1387
1388 impl ManifestProvider for PyprojectTomlManifestProvider {
1389 fn name(&self) -> ManifestName {
1390 SharedString::new_static("pyproject.toml").into()
1391 }
1392
1393 fn search(
1394 &self,
1395 ManifestQuery {
1396 path,
1397 depth,
1398 delegate,
1399 }: ManifestQuery,
1400 ) -> Option<Arc<RelPath>> {
1401 for path in path.ancestors().take(depth) {
1402 let p = path.join(rel_path("pyproject.toml"));
1403 if delegate.exists(&p, Some(false)) {
1404 return Some(path.into());
1405 }
1406 }
1407
1408 None
1409 }
1410 }
1411
1412 init_test(cx);
1413 let fs = FakeFs::new(cx.executor());
1414
1415 fs.insert_tree(
1416 path!("/the-root"),
1417 json!({
1418 ".zed": {
1419 "settings.json": r#"
1420 {
1421 "languages": {
1422 "Python": {
1423 "language_servers": ["ty"]
1424 }
1425 }
1426 }"#
1427 },
1428 "project-a": {
1429 ".venv": {},
1430 "file.py": "",
1431 "pyproject.toml": ""
1432 },
1433 "project-b": {
1434 ".venv": {},
1435 "source_file.py":"",
1436 "another_file.py": "",
1437 "pyproject.toml": ""
1438 }
1439 }),
1440 )
1441 .await;
1442 cx.update(|cx| {
1443 ManifestProvidersStore::global(cx).register(Arc::new(PyprojectTomlManifestProvider))
1444 });
1445
1446 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1447 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1448 let _fake_python_server = language_registry.register_fake_lsp(
1449 "Python",
1450 FakeLspAdapter {
1451 name: "ty",
1452 capabilities: lsp::ServerCapabilities {
1453 ..Default::default()
1454 },
1455 ..Default::default()
1456 },
1457 );
1458
1459 language_registry.add(python_lang(fs.clone()));
1460 let (first_buffer, _handle) = project
1461 .update(cx, |project, cx| {
1462 project.open_local_buffer_with_lsp(path!("/the-root/project-a/file.py"), cx)
1463 })
1464 .await
1465 .unwrap();
1466 cx.executor().run_until_parked();
1467 let servers = project.update(cx, |project, cx| {
1468 project.lsp_store().update(cx, |this, cx| {
1469 first_buffer.update(cx, |buffer, cx| {
1470 this.running_language_servers_for_local_buffer(buffer, cx)
1471 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1472 .collect::<Vec<_>>()
1473 })
1474 })
1475 });
1476 cx.executor().run_until_parked();
1477 assert_eq!(servers.len(), 1);
1478 let (adapter, server) = servers.into_iter().next().unwrap();
1479 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1480 assert_eq!(server.server_id(), LanguageServerId(0));
1481 // `workspace_folders` are set to the rooting point.
1482 assert_eq!(
1483 server.workspace_folders(),
1484 BTreeSet::from_iter(
1485 [Uri::from_file_path(path!("/the-root/project-a")).unwrap()].into_iter()
1486 )
1487 );
1488
1489 let (second_project_buffer, _other_handle) = project
1490 .update(cx, |project, cx| {
1491 project.open_local_buffer_with_lsp(path!("/the-root/project-b/source_file.py"), cx)
1492 })
1493 .await
1494 .unwrap();
1495 cx.executor().run_until_parked();
1496 let servers = project.update(cx, |project, cx| {
1497 project.lsp_store().update(cx, |this, cx| {
1498 second_project_buffer.update(cx, |buffer, cx| {
1499 this.running_language_servers_for_local_buffer(buffer, cx)
1500 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1501 .collect::<Vec<_>>()
1502 })
1503 })
1504 });
1505 cx.executor().run_until_parked();
1506 assert_eq!(servers.len(), 1);
1507 let (adapter, server) = servers.into_iter().next().unwrap();
1508 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1509 // We're not using venvs at all here, so both folders should fall under the same root.
1510 assert_eq!(server.server_id(), LanguageServerId(0));
1511 // Now, let's select a different toolchain for one of subprojects.
1512
1513 let Toolchains {
1514 toolchains: available_toolchains_for_b,
1515 root_path,
1516 ..
1517 } = project
1518 .update(cx, |this, cx| {
1519 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1520 this.available_toolchains(
1521 ProjectPath {
1522 worktree_id,
1523 path: rel_path("project-b/source_file.py").into(),
1524 },
1525 LanguageName::new_static("Python"),
1526 cx,
1527 )
1528 })
1529 .await
1530 .expect("A toolchain to be discovered");
1531 assert_eq!(root_path.as_ref(), rel_path("project-b"));
1532 assert_eq!(available_toolchains_for_b.toolchains().len(), 1);
1533 let currently_active_toolchain = project
1534 .update(cx, |this, cx| {
1535 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1536 this.active_toolchain(
1537 ProjectPath {
1538 worktree_id,
1539 path: rel_path("project-b/source_file.py").into(),
1540 },
1541 LanguageName::new_static("Python"),
1542 cx,
1543 )
1544 })
1545 .await;
1546
1547 assert!(currently_active_toolchain.is_none());
1548 let _ = project
1549 .update(cx, |this, cx| {
1550 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1551 this.activate_toolchain(
1552 ProjectPath {
1553 worktree_id,
1554 path: root_path,
1555 },
1556 available_toolchains_for_b
1557 .toolchains
1558 .into_iter()
1559 .next()
1560 .unwrap(),
1561 cx,
1562 )
1563 })
1564 .await
1565 .unwrap();
1566 cx.run_until_parked();
1567 let servers = project.update(cx, |project, cx| {
1568 project.lsp_store().update(cx, |this, cx| {
1569 second_project_buffer.update(cx, |buffer, cx| {
1570 this.running_language_servers_for_local_buffer(buffer, cx)
1571 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1572 .collect::<Vec<_>>()
1573 })
1574 })
1575 });
1576 cx.executor().run_until_parked();
1577 assert_eq!(servers.len(), 1);
1578 let (adapter, server) = servers.into_iter().next().unwrap();
1579 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1580 // There's a new language server in town.
1581 assert_eq!(server.server_id(), LanguageServerId(1));
1582}
1583
1584#[gpui::test]
1585async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
1586 init_test(cx);
1587
1588 let fs = FakeFs::new(cx.executor());
1589 fs.insert_tree(
1590 path!("/dir"),
1591 json!({
1592 "test.rs": "const A: i32 = 1;",
1593 "test2.rs": "",
1594 "Cargo.toml": "a = 1",
1595 "package.json": "{\"a\": 1}",
1596 }),
1597 )
1598 .await;
1599
1600 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1601 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1602
1603 let mut fake_rust_servers = language_registry.register_fake_lsp(
1604 "Rust",
1605 FakeLspAdapter {
1606 name: "the-rust-language-server",
1607 capabilities: lsp::ServerCapabilities {
1608 completion_provider: Some(lsp::CompletionOptions {
1609 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
1610 ..Default::default()
1611 }),
1612 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
1613 lsp::TextDocumentSyncOptions {
1614 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
1615 ..Default::default()
1616 },
1617 )),
1618 ..Default::default()
1619 },
1620 ..Default::default()
1621 },
1622 );
1623 let mut fake_json_servers = language_registry.register_fake_lsp(
1624 "JSON",
1625 FakeLspAdapter {
1626 name: "the-json-language-server",
1627 capabilities: lsp::ServerCapabilities {
1628 completion_provider: Some(lsp::CompletionOptions {
1629 trigger_characters: Some(vec![":".to_string()]),
1630 ..Default::default()
1631 }),
1632 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
1633 lsp::TextDocumentSyncOptions {
1634 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
1635 ..Default::default()
1636 },
1637 )),
1638 ..Default::default()
1639 },
1640 ..Default::default()
1641 },
1642 );
1643
1644 // Open a buffer without an associated language server.
1645 let (toml_buffer, _handle) = project
1646 .update(cx, |project, cx| {
1647 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
1648 })
1649 .await
1650 .unwrap();
1651
1652 // Open a buffer with an associated language server before the language for it has been loaded.
1653 let (rust_buffer, _handle2) = project
1654 .update(cx, |project, cx| {
1655 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
1656 })
1657 .await
1658 .unwrap();
1659 rust_buffer.update(cx, |buffer, _| {
1660 assert_eq!(buffer.language().map(|l| l.name()), None);
1661 });
1662
1663 // Now we add the languages to the project, and ensure they get assigned to all
1664 // the relevant open buffers.
1665 language_registry.add(json_lang());
1666 language_registry.add(rust_lang());
1667 cx.executor().run_until_parked();
1668 rust_buffer.update(cx, |buffer, _| {
1669 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
1670 });
1671
1672 // A server is started up, and it is notified about Rust files.
1673 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1674 assert_eq!(
1675 fake_rust_server
1676 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1677 .await
1678 .text_document,
1679 lsp::TextDocumentItem {
1680 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1681 version: 0,
1682 text: "const A: i32 = 1;".to_string(),
1683 language_id: "rust".to_string(),
1684 }
1685 );
1686
1687 // The buffer is configured based on the language server's capabilities.
1688 rust_buffer.update(cx, |buffer, _| {
1689 assert_eq!(
1690 buffer
1691 .completion_triggers()
1692 .iter()
1693 .cloned()
1694 .collect::<Vec<_>>(),
1695 &[".".to_string(), "::".to_string()]
1696 );
1697 });
1698 toml_buffer.update(cx, |buffer, _| {
1699 assert!(buffer.completion_triggers().is_empty());
1700 });
1701
1702 // Edit a buffer. The changes are reported to the language server.
1703 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
1704 assert_eq!(
1705 fake_rust_server
1706 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1707 .await
1708 .text_document,
1709 lsp::VersionedTextDocumentIdentifier::new(
1710 lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1711 1
1712 )
1713 );
1714
1715 // Open a third buffer with a different associated language server.
1716 let (json_buffer, _json_handle) = project
1717 .update(cx, |project, cx| {
1718 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
1719 })
1720 .await
1721 .unwrap();
1722
1723 // A json language server is started up and is only notified about the json buffer.
1724 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1725 assert_eq!(
1726 fake_json_server
1727 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1728 .await
1729 .text_document,
1730 lsp::TextDocumentItem {
1731 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1732 version: 0,
1733 text: "{\"a\": 1}".to_string(),
1734 language_id: "json".to_string(),
1735 }
1736 );
1737
1738 // This buffer is configured based on the second language server's
1739 // capabilities.
1740 json_buffer.update(cx, |buffer, _| {
1741 assert_eq!(
1742 buffer
1743 .completion_triggers()
1744 .iter()
1745 .cloned()
1746 .collect::<Vec<_>>(),
1747 &[":".to_string()]
1748 );
1749 });
1750
1751 // When opening another buffer whose language server is already running,
1752 // it is also configured based on the existing language server's capabilities.
1753 let (rust_buffer2, _handle4) = project
1754 .update(cx, |project, cx| {
1755 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
1756 })
1757 .await
1758 .unwrap();
1759 rust_buffer2.update(cx, |buffer, _| {
1760 assert_eq!(
1761 buffer
1762 .completion_triggers()
1763 .iter()
1764 .cloned()
1765 .collect::<Vec<_>>(),
1766 &[".".to_string(), "::".to_string()]
1767 );
1768 });
1769
1770 // Changes are reported only to servers matching the buffer's language.
1771 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
1772 rust_buffer2.update(cx, |buffer, cx| {
1773 buffer.edit([(0..0, "let x = 1;")], None, cx)
1774 });
1775 assert_eq!(
1776 fake_rust_server
1777 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1778 .await
1779 .text_document,
1780 lsp::VersionedTextDocumentIdentifier::new(
1781 lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap(),
1782 1
1783 )
1784 );
1785
1786 // Save notifications are reported to all servers.
1787 project
1788 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
1789 .await
1790 .unwrap();
1791 assert_eq!(
1792 fake_rust_server
1793 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1794 .await
1795 .text_document,
1796 lsp::TextDocumentIdentifier::new(
1797 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1798 )
1799 );
1800 assert_eq!(
1801 fake_json_server
1802 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1803 .await
1804 .text_document,
1805 lsp::TextDocumentIdentifier::new(
1806 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1807 )
1808 );
1809
1810 // Renames are reported only to servers matching the buffer's language.
1811 fs.rename(
1812 Path::new(path!("/dir/test2.rs")),
1813 Path::new(path!("/dir/test3.rs")),
1814 Default::default(),
1815 )
1816 .await
1817 .unwrap();
1818 assert_eq!(
1819 fake_rust_server
1820 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1821 .await
1822 .text_document,
1823 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap()),
1824 );
1825 assert_eq!(
1826 fake_rust_server
1827 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1828 .await
1829 .text_document,
1830 lsp::TextDocumentItem {
1831 uri: lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap(),
1832 version: 0,
1833 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1834 language_id: "rust".to_string(),
1835 },
1836 );
1837
1838 rust_buffer2.update(cx, |buffer, cx| {
1839 buffer.update_diagnostics(
1840 LanguageServerId(0),
1841 DiagnosticSet::from_sorted_entries(
1842 vec![DiagnosticEntry {
1843 diagnostic: Default::default(),
1844 range: Anchor::MIN..Anchor::MAX,
1845 }],
1846 &buffer.snapshot(),
1847 ),
1848 cx,
1849 );
1850 assert_eq!(
1851 buffer
1852 .snapshot()
1853 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1854 .count(),
1855 1
1856 );
1857 });
1858
1859 // When the rename changes the extension of the file, the buffer gets closed on the old
1860 // language server and gets opened on the new one.
1861 fs.rename(
1862 Path::new(path!("/dir/test3.rs")),
1863 Path::new(path!("/dir/test3.json")),
1864 Default::default(),
1865 )
1866 .await
1867 .unwrap();
1868 assert_eq!(
1869 fake_rust_server
1870 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1871 .await
1872 .text_document,
1873 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap()),
1874 );
1875 assert_eq!(
1876 fake_json_server
1877 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1878 .await
1879 .text_document,
1880 lsp::TextDocumentItem {
1881 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1882 version: 0,
1883 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1884 language_id: "json".to_string(),
1885 },
1886 );
1887
1888 // We clear the diagnostics, since the language has changed.
1889 rust_buffer2.update(cx, |buffer, _| {
1890 assert_eq!(
1891 buffer
1892 .snapshot()
1893 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1894 .count(),
1895 0
1896 );
1897 });
1898
1899 // The renamed file's version resets after changing language server.
1900 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
1901 assert_eq!(
1902 fake_json_server
1903 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1904 .await
1905 .text_document,
1906 lsp::VersionedTextDocumentIdentifier::new(
1907 lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1908 1
1909 )
1910 );
1911
1912 // Restart language servers
1913 project.update(cx, |project, cx| {
1914 project.restart_language_servers_for_buffers(
1915 vec![rust_buffer.clone(), json_buffer.clone()],
1916 HashSet::default(),
1917 cx,
1918 );
1919 });
1920
1921 let mut rust_shutdown_requests = fake_rust_server
1922 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1923 let mut json_shutdown_requests = fake_json_server
1924 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1925 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
1926
1927 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1928 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1929
1930 // Ensure rust document is reopened in new rust language server
1931 assert_eq!(
1932 fake_rust_server
1933 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1934 .await
1935 .text_document,
1936 lsp::TextDocumentItem {
1937 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1938 version: 0,
1939 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
1940 language_id: "rust".to_string(),
1941 }
1942 );
1943
1944 // Ensure json documents are reopened in new json language server
1945 assert_set_eq!(
1946 [
1947 fake_json_server
1948 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1949 .await
1950 .text_document,
1951 fake_json_server
1952 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1953 .await
1954 .text_document,
1955 ],
1956 [
1957 lsp::TextDocumentItem {
1958 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1959 version: 0,
1960 text: json_buffer.update(cx, |buffer, _| buffer.text()),
1961 language_id: "json".to_string(),
1962 },
1963 lsp::TextDocumentItem {
1964 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1965 version: 0,
1966 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1967 language_id: "json".to_string(),
1968 }
1969 ]
1970 );
1971
1972 // Close notifications are reported only to servers matching the buffer's language.
1973 cx.update(|_| drop(_json_handle));
1974 let close_message = lsp::DidCloseTextDocumentParams {
1975 text_document: lsp::TextDocumentIdentifier::new(
1976 lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1977 ),
1978 };
1979 assert_eq!(
1980 fake_json_server
1981 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1982 .await,
1983 close_message,
1984 );
1985}
1986
1987#[gpui::test]
1988async fn test_language_server_relative_path(cx: &mut gpui::TestAppContext) {
1989 init_test(cx);
1990
1991 let settings_json_contents = json!({
1992 "languages": {
1993 "Rust": {
1994 "language_servers": ["my_fake_lsp", "lsp_on_path"]
1995 }
1996 },
1997 "lsp": {
1998 "my_fake_lsp": {
1999 "binary": {
2000 // file exists, so this is treated as a relative path
2001 "path": path!(".relative_path/to/my_fake_lsp_binary.exe").to_string(),
2002 }
2003 },
2004 "lsp_on_path": {
2005 "binary": {
2006 // file doesn't exist, so it will fall back on PATH env var
2007 "path": path!("lsp_on_path.exe").to_string(),
2008 }
2009 }
2010 },
2011 });
2012
2013 let fs = FakeFs::new(cx.executor());
2014 fs.insert_tree(
2015 path!("/the-root"),
2016 json!({
2017 ".zed": {
2018 "settings.json": settings_json_contents.to_string(),
2019 },
2020 ".relative_path": {
2021 "to": {
2022 "my_fake_lsp.exe": "",
2023 },
2024 },
2025 "src": {
2026 "main.rs": "",
2027 }
2028 }),
2029 )
2030 .await;
2031
2032 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
2033 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2034 language_registry.add(rust_lang());
2035
2036 let mut my_fake_lsp = language_registry.register_fake_lsp(
2037 "Rust",
2038 FakeLspAdapter {
2039 name: "my_fake_lsp",
2040 ..Default::default()
2041 },
2042 );
2043 let mut lsp_on_path = language_registry.register_fake_lsp(
2044 "Rust",
2045 FakeLspAdapter {
2046 name: "lsp_on_path",
2047 ..Default::default()
2048 },
2049 );
2050
2051 cx.run_until_parked();
2052
2053 // Start the language server by opening a buffer with a compatible file extension.
2054 project
2055 .update(cx, |project, cx| {
2056 project.open_local_buffer_with_lsp(path!("/the-root/src/main.rs"), cx)
2057 })
2058 .await
2059 .unwrap();
2060
2061 let lsp_path = my_fake_lsp.next().await.unwrap().binary.path;
2062 assert_eq!(
2063 lsp_path.to_string_lossy(),
2064 path!("/the-root/.relative_path/to/my_fake_lsp_binary.exe"),
2065 );
2066
2067 let lsp_path = lsp_on_path.next().await.unwrap().binary.path;
2068 assert_eq!(lsp_path.to_string_lossy(), path!("lsp_on_path.exe"));
2069}
2070
2071#[gpui::test]
2072async fn test_language_server_tilde_path(cx: &mut gpui::TestAppContext) {
2073 init_test(cx);
2074
2075 let settings_json_contents = json!({
2076 "languages": {
2077 "Rust": {
2078 "language_servers": ["tilde_lsp"]
2079 }
2080 },
2081 "lsp": {
2082 "tilde_lsp": {
2083 "binary": {
2084 "path": "~/.local/bin/rust-analyzer",
2085 }
2086 }
2087 },
2088 });
2089
2090 let fs = FakeFs::new(cx.executor());
2091 fs.insert_tree(
2092 path!("/root"),
2093 json!({
2094 ".zed": {
2095 "settings.json": settings_json_contents.to_string(),
2096 },
2097 "src": {
2098 "main.rs": "fn main() {}",
2099 }
2100 }),
2101 )
2102 .await;
2103
2104 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
2105 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2106 language_registry.add(rust_lang());
2107
2108 let mut tilde_lsp = language_registry.register_fake_lsp(
2109 "Rust",
2110 FakeLspAdapter {
2111 name: "tilde_lsp",
2112 ..Default::default()
2113 },
2114 );
2115 cx.run_until_parked();
2116
2117 project
2118 .update(cx, |project, cx| {
2119 project.open_local_buffer_with_lsp(path!("/root/src/main.rs"), cx)
2120 })
2121 .await
2122 .unwrap();
2123
2124 let lsp_path = tilde_lsp.next().await.unwrap().binary.path;
2125 let expected_path = paths::home_dir().join(".local/bin/rust-analyzer");
2126 assert_eq!(
2127 lsp_path, expected_path,
2128 "Tilde path should expand to home directory"
2129 );
2130}
2131
2132#[gpui::test]
2133async fn test_rescan_fs_change_is_reported_to_language_servers_as_changed(
2134 cx: &mut gpui::TestAppContext,
2135) {
2136 init_test(cx);
2137
2138 let fs = FakeFs::new(cx.executor());
2139 fs.insert_tree(
2140 path!("/the-root"),
2141 json!({
2142 "Cargo.lock": "",
2143 "src": {
2144 "a.rs": "",
2145 }
2146 }),
2147 )
2148 .await;
2149
2150 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
2151 let (language_registry, _lsp_store) = project.read_with(cx, |project, _| {
2152 (project.languages().clone(), project.lsp_store())
2153 });
2154 language_registry.add(rust_lang());
2155 let mut fake_servers = language_registry.register_fake_lsp(
2156 "Rust",
2157 FakeLspAdapter {
2158 name: "the-language-server",
2159 ..Default::default()
2160 },
2161 );
2162
2163 cx.executor().run_until_parked();
2164
2165 project
2166 .update(cx, |project, cx| {
2167 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
2168 })
2169 .await
2170 .unwrap();
2171
2172 let fake_server = fake_servers.next().await.unwrap();
2173 cx.executor().run_until_parked();
2174
2175 let file_changes = Arc::new(Mutex::new(Vec::new()));
2176 fake_server
2177 .request::<lsp::request::RegisterCapability>(
2178 lsp::RegistrationParams {
2179 registrations: vec![lsp::Registration {
2180 id: Default::default(),
2181 method: "workspace/didChangeWatchedFiles".to_string(),
2182 register_options: serde_json::to_value(
2183 lsp::DidChangeWatchedFilesRegistrationOptions {
2184 watchers: vec![lsp::FileSystemWatcher {
2185 glob_pattern: lsp::GlobPattern::String(
2186 path!("/the-root/Cargo.lock").to_string(),
2187 ),
2188 kind: None,
2189 }],
2190 },
2191 )
2192 .ok(),
2193 }],
2194 },
2195 DEFAULT_LSP_REQUEST_TIMEOUT,
2196 )
2197 .await
2198 .into_response()
2199 .unwrap();
2200 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
2201 let file_changes = file_changes.clone();
2202 move |params, _| {
2203 let mut file_changes = file_changes.lock();
2204 file_changes.extend(params.changes);
2205 }
2206 });
2207
2208 cx.executor().run_until_parked();
2209 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
2210
2211 fs.emit_fs_event(path!("/the-root/Cargo.lock"), Some(PathEventKind::Rescan));
2212 cx.executor().run_until_parked();
2213
2214 assert_eq!(
2215 &*file_changes.lock(),
2216 &[lsp::FileEvent {
2217 uri: lsp::Uri::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
2218 typ: lsp::FileChangeType::CHANGED,
2219 }]
2220 );
2221}
2222
2223#[gpui::test]
2224async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
2225 init_test(cx);
2226
2227 let fs = FakeFs::new(cx.executor());
2228 fs.insert_tree(
2229 path!("/the-root"),
2230 json!({
2231 ".gitignore": "target\n",
2232 "Cargo.lock": "",
2233 "src": {
2234 "a.rs": "",
2235 "b.rs": "",
2236 },
2237 "target": {
2238 "x": {
2239 "out": {
2240 "x.rs": ""
2241 }
2242 },
2243 "y": {
2244 "out": {
2245 "y.rs": "",
2246 }
2247 },
2248 "z": {
2249 "out": {
2250 "z.rs": ""
2251 }
2252 }
2253 }
2254 }),
2255 )
2256 .await;
2257 fs.insert_tree(
2258 path!("/the-registry"),
2259 json!({
2260 "dep1": {
2261 "src": {
2262 "dep1.rs": "",
2263 }
2264 },
2265 "dep2": {
2266 "src": {
2267 "dep2.rs": "",
2268 }
2269 },
2270 }),
2271 )
2272 .await;
2273 fs.insert_tree(
2274 path!("/the/stdlib"),
2275 json!({
2276 "LICENSE": "",
2277 "src": {
2278 "string.rs": "",
2279 }
2280 }),
2281 )
2282 .await;
2283
2284 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
2285 let (language_registry, lsp_store) = project.read_with(cx, |project, _| {
2286 (project.languages().clone(), project.lsp_store())
2287 });
2288 language_registry.add(rust_lang());
2289 let mut fake_servers = language_registry.register_fake_lsp(
2290 "Rust",
2291 FakeLspAdapter {
2292 name: "the-language-server",
2293 ..Default::default()
2294 },
2295 );
2296
2297 cx.executor().run_until_parked();
2298
2299 // Start the language server by opening a buffer with a compatible file extension.
2300 project
2301 .update(cx, |project, cx| {
2302 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
2303 })
2304 .await
2305 .unwrap();
2306
2307 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
2308 project.update(cx, |project, cx| {
2309 let worktree = project.worktrees(cx).next().unwrap();
2310 assert_eq!(
2311 worktree
2312 .read(cx)
2313 .snapshot()
2314 .entries(true, 0)
2315 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
2316 .collect::<Vec<_>>(),
2317 &[
2318 ("", false),
2319 (".gitignore", false),
2320 ("Cargo.lock", false),
2321 ("src", false),
2322 ("src/a.rs", false),
2323 ("src/b.rs", false),
2324 ("target", true),
2325 ]
2326 );
2327 });
2328
2329 let prev_read_dir_count = fs.read_dir_call_count();
2330
2331 let fake_server = fake_servers.next().await.unwrap();
2332 cx.executor().run_until_parked();
2333 let server_id = lsp_store.read_with(cx, |lsp_store, _| {
2334 let (id, _) = lsp_store.language_server_statuses().next().unwrap();
2335 id
2336 });
2337
2338 // Simulate jumping to a definition in a dependency outside of the worktree.
2339 let _out_of_worktree_buffer = project
2340 .update(cx, |project, cx| {
2341 project.open_local_buffer_via_lsp(
2342 lsp::Uri::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
2343 server_id,
2344 cx,
2345 )
2346 })
2347 .await
2348 .unwrap();
2349
2350 // Keep track of the FS events reported to the language server.
2351 let file_changes = Arc::new(Mutex::new(Vec::new()));
2352 fake_server
2353 .request::<lsp::request::RegisterCapability>(
2354 lsp::RegistrationParams {
2355 registrations: vec![lsp::Registration {
2356 id: Default::default(),
2357 method: "workspace/didChangeWatchedFiles".to_string(),
2358 register_options: serde_json::to_value(
2359 lsp::DidChangeWatchedFilesRegistrationOptions {
2360 watchers: vec![
2361 lsp::FileSystemWatcher {
2362 glob_pattern: lsp::GlobPattern::String(
2363 path!("/the-root/Cargo.toml").to_string(),
2364 ),
2365 kind: None,
2366 },
2367 lsp::FileSystemWatcher {
2368 glob_pattern: lsp::GlobPattern::String(
2369 path!("/the-root/src/*.{rs,c}").to_string(),
2370 ),
2371 kind: None,
2372 },
2373 lsp::FileSystemWatcher {
2374 glob_pattern: lsp::GlobPattern::String(
2375 path!("/the-root/target/y/**/*.rs").to_string(),
2376 ),
2377 kind: None,
2378 },
2379 lsp::FileSystemWatcher {
2380 glob_pattern: lsp::GlobPattern::String(
2381 path!("/the/stdlib/src/**/*.rs").to_string(),
2382 ),
2383 kind: None,
2384 },
2385 lsp::FileSystemWatcher {
2386 glob_pattern: lsp::GlobPattern::String(
2387 path!("**/Cargo.lock").to_string(),
2388 ),
2389 kind: None,
2390 },
2391 ],
2392 },
2393 )
2394 .ok(),
2395 }],
2396 },
2397 DEFAULT_LSP_REQUEST_TIMEOUT,
2398 )
2399 .await
2400 .into_response()
2401 .unwrap();
2402 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
2403 let file_changes = file_changes.clone();
2404 move |params, _| {
2405 let mut file_changes = file_changes.lock();
2406 file_changes.extend(params.changes);
2407 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
2408 }
2409 });
2410
2411 cx.executor().run_until_parked();
2412 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
2413 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
2414
2415 let mut new_watched_paths = fs.watched_paths();
2416 new_watched_paths.retain(|path| {
2417 !path.starts_with(config_dir()) && !path.starts_with(global_gitignore_path().unwrap())
2418 });
2419 assert_eq!(
2420 &new_watched_paths,
2421 &[
2422 Path::new(path!("/the-root")),
2423 Path::new(path!("/the-registry/dep1/src/dep1.rs")),
2424 Path::new(path!("/the/stdlib/src"))
2425 ]
2426 );
2427
2428 // Now the language server has asked us to watch an ignored directory path,
2429 // so we recursively load it.
2430 project.update(cx, |project, cx| {
2431 let worktree = project.visible_worktrees(cx).next().unwrap();
2432 assert_eq!(
2433 worktree
2434 .read(cx)
2435 .snapshot()
2436 .entries(true, 0)
2437 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
2438 .collect::<Vec<_>>(),
2439 &[
2440 ("", false),
2441 (".gitignore", false),
2442 ("Cargo.lock", false),
2443 ("src", false),
2444 ("src/a.rs", false),
2445 ("src/b.rs", false),
2446 ("target", true),
2447 ("target/x", true),
2448 ("target/y", true),
2449 ("target/y/out", true),
2450 ("target/y/out/y.rs", true),
2451 ("target/z", true),
2452 ]
2453 );
2454 });
2455
2456 // Perform some file system mutations, two of which match the watched patterns,
2457 // and one of which does not.
2458 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
2459 .await
2460 .unwrap();
2461 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
2462 .await
2463 .unwrap();
2464 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
2465 .await
2466 .unwrap();
2467 fs.create_file(
2468 path!("/the-root/target/x/out/x2.rs").as_ref(),
2469 Default::default(),
2470 )
2471 .await
2472 .unwrap();
2473 fs.create_file(
2474 path!("/the-root/target/y/out/y2.rs").as_ref(),
2475 Default::default(),
2476 )
2477 .await
2478 .unwrap();
2479 fs.save(
2480 path!("/the-root/Cargo.lock").as_ref(),
2481 &"".into(),
2482 Default::default(),
2483 )
2484 .await
2485 .unwrap();
2486 fs.save(
2487 path!("/the-stdlib/LICENSE").as_ref(),
2488 &"".into(),
2489 Default::default(),
2490 )
2491 .await
2492 .unwrap();
2493 fs.save(
2494 path!("/the/stdlib/src/string.rs").as_ref(),
2495 &"".into(),
2496 Default::default(),
2497 )
2498 .await
2499 .unwrap();
2500
2501 // The language server receives events for the FS mutations that match its watch patterns.
2502 cx.executor().run_until_parked();
2503 assert_eq!(
2504 &*file_changes.lock(),
2505 &[
2506 lsp::FileEvent {
2507 uri: lsp::Uri::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
2508 typ: lsp::FileChangeType::CHANGED,
2509 },
2510 lsp::FileEvent {
2511 uri: lsp::Uri::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
2512 typ: lsp::FileChangeType::DELETED,
2513 },
2514 lsp::FileEvent {
2515 uri: lsp::Uri::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
2516 typ: lsp::FileChangeType::CREATED,
2517 },
2518 lsp::FileEvent {
2519 uri: lsp::Uri::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
2520 typ: lsp::FileChangeType::CREATED,
2521 },
2522 lsp::FileEvent {
2523 uri: lsp::Uri::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
2524 typ: lsp::FileChangeType::CHANGED,
2525 },
2526 ]
2527 );
2528}
2529
2530#[gpui::test]
2531async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
2532 init_test(cx);
2533
2534 let fs = FakeFs::new(cx.executor());
2535 fs.insert_tree(
2536 path!("/dir"),
2537 json!({
2538 "a.rs": "let a = 1;",
2539 "b.rs": "let b = 2;"
2540 }),
2541 )
2542 .await;
2543
2544 let project = Project::test(
2545 fs,
2546 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
2547 cx,
2548 )
2549 .await;
2550 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2551
2552 let buffer_a = project
2553 .update(cx, |project, cx| {
2554 project.open_local_buffer(path!("/dir/a.rs"), cx)
2555 })
2556 .await
2557 .unwrap();
2558 let buffer_b = project
2559 .update(cx, |project, cx| {
2560 project.open_local_buffer(path!("/dir/b.rs"), cx)
2561 })
2562 .await
2563 .unwrap();
2564
2565 lsp_store.update(cx, |lsp_store, cx| {
2566 lsp_store
2567 .update_diagnostics(
2568 LanguageServerId(0),
2569 lsp::PublishDiagnosticsParams {
2570 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2571 version: None,
2572 diagnostics: vec![lsp::Diagnostic {
2573 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2574 severity: Some(lsp::DiagnosticSeverity::ERROR),
2575 message: "error 1".to_string(),
2576 ..Default::default()
2577 }],
2578 },
2579 None,
2580 DiagnosticSourceKind::Pushed,
2581 &[],
2582 cx,
2583 )
2584 .unwrap();
2585 lsp_store
2586 .update_diagnostics(
2587 LanguageServerId(0),
2588 lsp::PublishDiagnosticsParams {
2589 uri: Uri::from_file_path(path!("/dir/b.rs")).unwrap(),
2590 version: None,
2591 diagnostics: vec![lsp::Diagnostic {
2592 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2593 severity: Some(DiagnosticSeverity::WARNING),
2594 message: "error 2".to_string(),
2595 ..Default::default()
2596 }],
2597 },
2598 None,
2599 DiagnosticSourceKind::Pushed,
2600 &[],
2601 cx,
2602 )
2603 .unwrap();
2604 });
2605
2606 buffer_a.update(cx, |buffer, _| {
2607 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2608 assert_eq!(
2609 chunks
2610 .iter()
2611 .map(|(s, d)| (s.as_str(), *d))
2612 .collect::<Vec<_>>(),
2613 &[
2614 ("let ", None),
2615 ("a", Some(DiagnosticSeverity::ERROR)),
2616 (" = 1;", None),
2617 ]
2618 );
2619 });
2620 buffer_b.update(cx, |buffer, _| {
2621 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2622 assert_eq!(
2623 chunks
2624 .iter()
2625 .map(|(s, d)| (s.as_str(), *d))
2626 .collect::<Vec<_>>(),
2627 &[
2628 ("let ", None),
2629 ("b", Some(DiagnosticSeverity::WARNING)),
2630 (" = 2;", None),
2631 ]
2632 );
2633 });
2634}
2635
2636#[gpui::test]
2637async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
2638 init_test(cx);
2639
2640 let fs = FakeFs::new(cx.executor());
2641 fs.insert_tree(
2642 path!("/root"),
2643 json!({
2644 "dir": {
2645 ".git": {
2646 "HEAD": "ref: refs/heads/main",
2647 },
2648 ".gitignore": "b.rs",
2649 "a.rs": "let a = 1;",
2650 "b.rs": "let b = 2;",
2651 },
2652 "other.rs": "let b = c;"
2653 }),
2654 )
2655 .await;
2656
2657 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
2658 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2659 let (worktree, _) = project
2660 .update(cx, |project, cx| {
2661 project.find_or_create_worktree(path!("/root/dir"), true, cx)
2662 })
2663 .await
2664 .unwrap();
2665 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
2666
2667 let (worktree, _) = project
2668 .update(cx, |project, cx| {
2669 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
2670 })
2671 .await
2672 .unwrap();
2673 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
2674
2675 let server_id = LanguageServerId(0);
2676 lsp_store.update(cx, |lsp_store, cx| {
2677 lsp_store
2678 .update_diagnostics(
2679 server_id,
2680 lsp::PublishDiagnosticsParams {
2681 uri: Uri::from_file_path(path!("/root/dir/b.rs")).unwrap(),
2682 version: None,
2683 diagnostics: vec![lsp::Diagnostic {
2684 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2685 severity: Some(lsp::DiagnosticSeverity::ERROR),
2686 message: "unused variable 'b'".to_string(),
2687 ..Default::default()
2688 }],
2689 },
2690 None,
2691 DiagnosticSourceKind::Pushed,
2692 &[],
2693 cx,
2694 )
2695 .unwrap();
2696 lsp_store
2697 .update_diagnostics(
2698 server_id,
2699 lsp::PublishDiagnosticsParams {
2700 uri: Uri::from_file_path(path!("/root/other.rs")).unwrap(),
2701 version: None,
2702 diagnostics: vec![lsp::Diagnostic {
2703 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
2704 severity: Some(lsp::DiagnosticSeverity::ERROR),
2705 message: "unknown variable 'c'".to_string(),
2706 ..Default::default()
2707 }],
2708 },
2709 None,
2710 DiagnosticSourceKind::Pushed,
2711 &[],
2712 cx,
2713 )
2714 .unwrap();
2715 });
2716
2717 let main_ignored_buffer = project
2718 .update(cx, |project, cx| {
2719 project.open_buffer((main_worktree_id, rel_path("b.rs")), cx)
2720 })
2721 .await
2722 .unwrap();
2723 main_ignored_buffer.update(cx, |buffer, _| {
2724 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2725 assert_eq!(
2726 chunks
2727 .iter()
2728 .map(|(s, d)| (s.as_str(), *d))
2729 .collect::<Vec<_>>(),
2730 &[
2731 ("let ", None),
2732 ("b", Some(DiagnosticSeverity::ERROR)),
2733 (" = 2;", None),
2734 ],
2735 "Gigitnored buffers should still get in-buffer diagnostics",
2736 );
2737 });
2738 let other_buffer = project
2739 .update(cx, |project, cx| {
2740 project.open_buffer((other_worktree_id, rel_path("")), cx)
2741 })
2742 .await
2743 .unwrap();
2744 other_buffer.update(cx, |buffer, _| {
2745 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2746 assert_eq!(
2747 chunks
2748 .iter()
2749 .map(|(s, d)| (s.as_str(), *d))
2750 .collect::<Vec<_>>(),
2751 &[
2752 ("let b = ", None),
2753 ("c", Some(DiagnosticSeverity::ERROR)),
2754 (";", None),
2755 ],
2756 "Buffers from hidden projects should still get in-buffer diagnostics"
2757 );
2758 });
2759
2760 project.update(cx, |project, cx| {
2761 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
2762 assert_eq!(
2763 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
2764 vec![(
2765 ProjectPath {
2766 worktree_id: main_worktree_id,
2767 path: rel_path("b.rs").into(),
2768 },
2769 server_id,
2770 DiagnosticSummary {
2771 error_count: 1,
2772 warning_count: 0,
2773 }
2774 )]
2775 );
2776 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
2777 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
2778 });
2779}
2780
2781#[gpui::test]
2782async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
2783 init_test(cx);
2784
2785 let progress_token = "the-progress-token";
2786
2787 let fs = FakeFs::new(cx.executor());
2788 fs.insert_tree(
2789 path!("/dir"),
2790 json!({
2791 "a.rs": "fn a() { A }",
2792 "b.rs": "const y: i32 = 1",
2793 }),
2794 )
2795 .await;
2796
2797 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2798 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2799
2800 language_registry.add(rust_lang());
2801 let mut fake_servers = language_registry.register_fake_lsp(
2802 "Rust",
2803 FakeLspAdapter {
2804 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2805 disk_based_diagnostics_sources: vec!["disk".into()],
2806 ..Default::default()
2807 },
2808 );
2809
2810 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
2811
2812 // Cause worktree to start the fake language server
2813 let _ = project
2814 .update(cx, |project, cx| {
2815 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
2816 })
2817 .await
2818 .unwrap();
2819
2820 let mut events = cx.events(&project);
2821
2822 let fake_server = fake_servers.next().await.unwrap();
2823 assert_eq!(
2824 events.next().await.unwrap(),
2825 Event::LanguageServerAdded(
2826 LanguageServerId(0),
2827 fake_server.server.name(),
2828 Some(worktree_id)
2829 ),
2830 );
2831
2832 fake_server
2833 .start_progress(format!("{}/0", progress_token))
2834 .await;
2835 assert_eq!(
2836 events.next().await.unwrap(),
2837 Event::DiskBasedDiagnosticsStarted {
2838 language_server_id: LanguageServerId(0),
2839 }
2840 );
2841
2842 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2843 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2844 version: None,
2845 diagnostics: vec![lsp::Diagnostic {
2846 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2847 severity: Some(lsp::DiagnosticSeverity::ERROR),
2848 message: "undefined variable 'A'".to_string(),
2849 ..Default::default()
2850 }],
2851 });
2852 assert_eq!(
2853 events.next().await.unwrap(),
2854 Event::DiagnosticsUpdated {
2855 language_server_id: LanguageServerId(0),
2856 paths: vec![(worktree_id, rel_path("a.rs")).into()],
2857 }
2858 );
2859
2860 fake_server.end_progress(format!("{}/0", progress_token));
2861 assert_eq!(
2862 events.next().await.unwrap(),
2863 Event::DiskBasedDiagnosticsFinished {
2864 language_server_id: LanguageServerId(0)
2865 }
2866 );
2867
2868 let buffer = project
2869 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
2870 .await
2871 .unwrap();
2872
2873 buffer.update(cx, |buffer, _| {
2874 let snapshot = buffer.snapshot();
2875 let diagnostics = snapshot
2876 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2877 .collect::<Vec<_>>();
2878 assert_eq!(
2879 diagnostics,
2880 &[DiagnosticEntryRef {
2881 range: Point::new(0, 9)..Point::new(0, 10),
2882 diagnostic: &Diagnostic {
2883 severity: lsp::DiagnosticSeverity::ERROR,
2884 message: "undefined variable 'A'".to_string(),
2885 group_id: 0,
2886 is_primary: true,
2887 source_kind: DiagnosticSourceKind::Pushed,
2888 ..Diagnostic::default()
2889 }
2890 }]
2891 )
2892 });
2893
2894 // Ensure publishing empty diagnostics twice only results in one update event.
2895 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2896 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2897 version: None,
2898 diagnostics: Default::default(),
2899 });
2900 assert_eq!(
2901 events.next().await.unwrap(),
2902 Event::DiagnosticsUpdated {
2903 language_server_id: LanguageServerId(0),
2904 paths: vec![(worktree_id, rel_path("a.rs")).into()],
2905 }
2906 );
2907
2908 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2909 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2910 version: None,
2911 diagnostics: Default::default(),
2912 });
2913 cx.executor().run_until_parked();
2914 assert_eq!(futures::poll!(events.next()), Poll::Pending);
2915}
2916
2917#[gpui::test]
2918async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
2919 init_test(cx);
2920
2921 let progress_token = "the-progress-token";
2922
2923 let fs = FakeFs::new(cx.executor());
2924 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2925
2926 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2927
2928 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2929 language_registry.add(rust_lang());
2930 let mut fake_servers = language_registry.register_fake_lsp(
2931 "Rust",
2932 FakeLspAdapter {
2933 name: "the-language-server",
2934 disk_based_diagnostics_sources: vec!["disk".into()],
2935 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2936 ..FakeLspAdapter::default()
2937 },
2938 );
2939
2940 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
2941
2942 let (buffer, _handle) = project
2943 .update(cx, |project, cx| {
2944 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2945 })
2946 .await
2947 .unwrap();
2948 let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id());
2949 // Simulate diagnostics starting to update.
2950 let fake_server = fake_servers.next().await.unwrap();
2951 cx.executor().run_until_parked();
2952 fake_server.start_progress(progress_token).await;
2953
2954 // Restart the server before the diagnostics finish updating.
2955 project.update(cx, |project, cx| {
2956 project.restart_language_servers_for_buffers(vec![buffer], HashSet::default(), cx);
2957 });
2958 let mut events = cx.events(&project);
2959
2960 // Simulate the newly started server sending more diagnostics.
2961 let fake_server = fake_servers.next().await.unwrap();
2962 cx.executor().run_until_parked();
2963 assert_eq!(
2964 events.next().await.unwrap(),
2965 Event::LanguageServerRemoved(LanguageServerId(0))
2966 );
2967 assert_eq!(
2968 events.next().await.unwrap(),
2969 Event::LanguageServerAdded(
2970 LanguageServerId(1),
2971 fake_server.server.name(),
2972 Some(worktree_id)
2973 )
2974 );
2975 fake_server.start_progress(progress_token).await;
2976 assert_eq!(
2977 events.next().await.unwrap(),
2978 Event::LanguageServerBufferRegistered {
2979 server_id: LanguageServerId(1),
2980 buffer_id,
2981 buffer_abs_path: PathBuf::from(path!("/dir/a.rs")),
2982 name: Some(fake_server.server.name())
2983 }
2984 );
2985 assert_eq!(
2986 events.next().await.unwrap(),
2987 Event::DiskBasedDiagnosticsStarted {
2988 language_server_id: LanguageServerId(1)
2989 }
2990 );
2991 project.update(cx, |project, cx| {
2992 assert_eq!(
2993 project
2994 .language_servers_running_disk_based_diagnostics(cx)
2995 .collect::<Vec<_>>(),
2996 [LanguageServerId(1)]
2997 );
2998 });
2999
3000 // All diagnostics are considered done, despite the old server's diagnostic
3001 // task never completing.
3002 fake_server.end_progress(progress_token);
3003 assert_eq!(
3004 events.next().await.unwrap(),
3005 Event::DiskBasedDiagnosticsFinished {
3006 language_server_id: LanguageServerId(1)
3007 }
3008 );
3009 project.update(cx, |project, cx| {
3010 assert_eq!(
3011 project
3012 .language_servers_running_disk_based_diagnostics(cx)
3013 .collect::<Vec<_>>(),
3014 [] as [language::LanguageServerId; 0]
3015 );
3016 });
3017}
3018
3019#[gpui::test]
3020async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
3021 init_test(cx);
3022
3023 let fs = FakeFs::new(cx.executor());
3024 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
3025
3026 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3027
3028 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3029 language_registry.add(rust_lang());
3030 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3031
3032 let (buffer, _) = project
3033 .update(cx, |project, cx| {
3034 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3035 })
3036 .await
3037 .unwrap();
3038
3039 // Publish diagnostics
3040 let fake_server = fake_servers.next().await.unwrap();
3041 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3042 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3043 version: None,
3044 diagnostics: vec![lsp::Diagnostic {
3045 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
3046 severity: Some(lsp::DiagnosticSeverity::ERROR),
3047 message: "the message".to_string(),
3048 ..Default::default()
3049 }],
3050 });
3051
3052 cx.executor().run_until_parked();
3053 buffer.update(cx, |buffer, _| {
3054 assert_eq!(
3055 buffer
3056 .snapshot()
3057 .diagnostics_in_range::<_, usize>(0..1, false)
3058 .map(|entry| entry.diagnostic.message.clone())
3059 .collect::<Vec<_>>(),
3060 ["the message".to_string()]
3061 );
3062 });
3063 project.update(cx, |project, cx| {
3064 assert_eq!(
3065 project.diagnostic_summary(false, cx),
3066 DiagnosticSummary {
3067 error_count: 1,
3068 warning_count: 0,
3069 }
3070 );
3071 });
3072
3073 project.update(cx, |project, cx| {
3074 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
3075 });
3076
3077 // The diagnostics are cleared.
3078 cx.executor().run_until_parked();
3079 buffer.update(cx, |buffer, _| {
3080 assert_eq!(
3081 buffer
3082 .snapshot()
3083 .diagnostics_in_range::<_, usize>(0..1, false)
3084 .map(|entry| entry.diagnostic.message.clone())
3085 .collect::<Vec<_>>(),
3086 Vec::<String>::new(),
3087 );
3088 });
3089 project.update(cx, |project, cx| {
3090 assert_eq!(
3091 project.diagnostic_summary(false, cx),
3092 DiagnosticSummary {
3093 error_count: 0,
3094 warning_count: 0,
3095 }
3096 );
3097 });
3098}
3099
3100#[gpui::test]
3101async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
3102 init_test(cx);
3103
3104 let fs = FakeFs::new(cx.executor());
3105 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
3106
3107 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3108 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3109
3110 language_registry.add(rust_lang());
3111 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3112
3113 let (buffer, _handle) = project
3114 .update(cx, |project, cx| {
3115 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3116 })
3117 .await
3118 .unwrap();
3119
3120 // Before restarting the server, report diagnostics with an unknown buffer version.
3121 let fake_server = fake_servers.next().await.unwrap();
3122 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3123 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3124 version: Some(10000),
3125 diagnostics: Vec::new(),
3126 });
3127 cx.executor().run_until_parked();
3128 project.update(cx, |project, cx| {
3129 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
3130 });
3131
3132 let mut fake_server = fake_servers.next().await.unwrap();
3133 let notification = fake_server
3134 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3135 .await
3136 .text_document;
3137 assert_eq!(notification.version, 0);
3138}
3139
3140#[gpui::test]
3141async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
3142 init_test(cx);
3143
3144 let progress_token = "the-progress-token";
3145
3146 let fs = FakeFs::new(cx.executor());
3147 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
3148
3149 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3150
3151 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3152 language_registry.add(rust_lang());
3153 let mut fake_servers = language_registry.register_fake_lsp(
3154 "Rust",
3155 FakeLspAdapter {
3156 name: "the-language-server",
3157 disk_based_diagnostics_sources: vec!["disk".into()],
3158 disk_based_diagnostics_progress_token: Some(progress_token.into()),
3159 ..Default::default()
3160 },
3161 );
3162
3163 let (buffer, _handle) = project
3164 .update(cx, |project, cx| {
3165 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3166 })
3167 .await
3168 .unwrap();
3169
3170 // Simulate diagnostics starting to update.
3171 let mut fake_server = fake_servers.next().await.unwrap();
3172 fake_server
3173 .start_progress_with(
3174 "another-token",
3175 lsp::WorkDoneProgressBegin {
3176 cancellable: Some(false),
3177 ..Default::default()
3178 },
3179 DEFAULT_LSP_REQUEST_TIMEOUT,
3180 )
3181 .await;
3182 // Ensure progress notification is fully processed before starting the next one
3183 cx.executor().run_until_parked();
3184
3185 fake_server
3186 .start_progress_with(
3187 progress_token,
3188 lsp::WorkDoneProgressBegin {
3189 cancellable: Some(true),
3190 ..Default::default()
3191 },
3192 DEFAULT_LSP_REQUEST_TIMEOUT,
3193 )
3194 .await;
3195 // Ensure progress notification is fully processed before cancelling
3196 cx.executor().run_until_parked();
3197
3198 project.update(cx, |project, cx| {
3199 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
3200 });
3201 cx.executor().run_until_parked();
3202
3203 let cancel_notification = fake_server
3204 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
3205 .await;
3206 assert_eq!(
3207 cancel_notification.token,
3208 NumberOrString::String(progress_token.into())
3209 );
3210}
3211
3212#[gpui::test]
3213async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
3214 init_test(cx);
3215
3216 let fs = FakeFs::new(cx.executor());
3217 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
3218 .await;
3219
3220 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3221 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3222
3223 let mut fake_rust_servers = language_registry.register_fake_lsp(
3224 "Rust",
3225 FakeLspAdapter {
3226 name: "rust-lsp",
3227 ..Default::default()
3228 },
3229 );
3230 let mut fake_js_servers = language_registry.register_fake_lsp(
3231 "JavaScript",
3232 FakeLspAdapter {
3233 name: "js-lsp",
3234 ..Default::default()
3235 },
3236 );
3237 language_registry.add(rust_lang());
3238 language_registry.add(js_lang());
3239
3240 let _rs_buffer = project
3241 .update(cx, |project, cx| {
3242 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3243 })
3244 .await
3245 .unwrap();
3246 let _js_buffer = project
3247 .update(cx, |project, cx| {
3248 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
3249 })
3250 .await
3251 .unwrap();
3252
3253 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
3254 assert_eq!(
3255 fake_rust_server_1
3256 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3257 .await
3258 .text_document
3259 .uri
3260 .as_str(),
3261 uri!("file:///dir/a.rs")
3262 );
3263
3264 let mut fake_js_server = fake_js_servers.next().await.unwrap();
3265 assert_eq!(
3266 fake_js_server
3267 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3268 .await
3269 .text_document
3270 .uri
3271 .as_str(),
3272 uri!("file:///dir/b.js")
3273 );
3274
3275 // Disable Rust language server, ensuring only that server gets stopped.
3276 cx.update(|cx| {
3277 SettingsStore::update_global(cx, |settings, cx| {
3278 settings.update_user_settings(cx, |settings| {
3279 settings.languages_mut().insert(
3280 "Rust".into(),
3281 LanguageSettingsContent {
3282 enable_language_server: Some(false),
3283 ..Default::default()
3284 },
3285 );
3286 });
3287 })
3288 });
3289 fake_rust_server_1
3290 .receive_notification::<lsp::notification::Exit>()
3291 .await;
3292
3293 // Enable Rust and disable JavaScript language servers, ensuring that the
3294 // former gets started again and that the latter stops.
3295 cx.update(|cx| {
3296 SettingsStore::update_global(cx, |settings, cx| {
3297 settings.update_user_settings(cx, |settings| {
3298 settings.languages_mut().insert(
3299 "Rust".into(),
3300 LanguageSettingsContent {
3301 enable_language_server: Some(true),
3302 ..Default::default()
3303 },
3304 );
3305 settings.languages_mut().insert(
3306 "JavaScript".into(),
3307 LanguageSettingsContent {
3308 enable_language_server: Some(false),
3309 ..Default::default()
3310 },
3311 );
3312 });
3313 })
3314 });
3315 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
3316 assert_eq!(
3317 fake_rust_server_2
3318 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3319 .await
3320 .text_document
3321 .uri
3322 .as_str(),
3323 uri!("file:///dir/a.rs")
3324 );
3325 fake_js_server
3326 .receive_notification::<lsp::notification::Exit>()
3327 .await;
3328}
3329
3330#[gpui::test(iterations = 3)]
3331async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
3332 init_test(cx);
3333
3334 let text = "
3335 fn a() { A }
3336 fn b() { BB }
3337 fn c() { CCC }
3338 "
3339 .unindent();
3340
3341 let fs = FakeFs::new(cx.executor());
3342 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
3343
3344 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3345 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3346
3347 language_registry.add(rust_lang());
3348 let mut fake_servers = language_registry.register_fake_lsp(
3349 "Rust",
3350 FakeLspAdapter {
3351 disk_based_diagnostics_sources: vec!["disk".into()],
3352 ..Default::default()
3353 },
3354 );
3355
3356 let buffer = project
3357 .update(cx, |project, cx| {
3358 project.open_local_buffer(path!("/dir/a.rs"), cx)
3359 })
3360 .await
3361 .unwrap();
3362
3363 let _handle = project.update(cx, |project, cx| {
3364 project.register_buffer_with_language_servers(&buffer, cx)
3365 });
3366
3367 let mut fake_server = fake_servers.next().await.unwrap();
3368 let open_notification = fake_server
3369 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3370 .await;
3371
3372 // Edit the buffer, moving the content down
3373 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
3374 let change_notification_1 = fake_server
3375 .receive_notification::<lsp::notification::DidChangeTextDocument>()
3376 .await;
3377 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
3378
3379 // Report some diagnostics for the initial version of the buffer
3380 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3381 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3382 version: Some(open_notification.text_document.version),
3383 diagnostics: vec![
3384 lsp::Diagnostic {
3385 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3386 severity: Some(DiagnosticSeverity::ERROR),
3387 message: "undefined variable 'A'".to_string(),
3388 source: Some("disk".to_string()),
3389 ..Default::default()
3390 },
3391 lsp::Diagnostic {
3392 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
3393 severity: Some(DiagnosticSeverity::ERROR),
3394 message: "undefined variable 'BB'".to_string(),
3395 source: Some("disk".to_string()),
3396 ..Default::default()
3397 },
3398 lsp::Diagnostic {
3399 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
3400 severity: Some(DiagnosticSeverity::ERROR),
3401 source: Some("disk".to_string()),
3402 message: "undefined variable 'CCC'".to_string(),
3403 ..Default::default()
3404 },
3405 ],
3406 });
3407
3408 // The diagnostics have moved down since they were created.
3409 cx.executor().run_until_parked();
3410 buffer.update(cx, |buffer, _| {
3411 assert_eq!(
3412 buffer
3413 .snapshot()
3414 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
3415 .collect::<Vec<_>>(),
3416 &[
3417 DiagnosticEntry {
3418 range: Point::new(3, 9)..Point::new(3, 11),
3419 diagnostic: Diagnostic {
3420 source: Some("disk".into()),
3421 severity: DiagnosticSeverity::ERROR,
3422 message: "undefined variable 'BB'".to_string(),
3423 is_disk_based: true,
3424 group_id: 1,
3425 is_primary: true,
3426 source_kind: DiagnosticSourceKind::Pushed,
3427 ..Diagnostic::default()
3428 },
3429 },
3430 DiagnosticEntry {
3431 range: Point::new(4, 9)..Point::new(4, 12),
3432 diagnostic: Diagnostic {
3433 source: Some("disk".into()),
3434 severity: DiagnosticSeverity::ERROR,
3435 message: "undefined variable 'CCC'".to_string(),
3436 is_disk_based: true,
3437 group_id: 2,
3438 is_primary: true,
3439 source_kind: DiagnosticSourceKind::Pushed,
3440 ..Diagnostic::default()
3441 }
3442 }
3443 ]
3444 );
3445 assert_eq!(
3446 chunks_with_diagnostics(buffer, 0..buffer.len()),
3447 [
3448 ("\n\nfn a() { ".to_string(), None),
3449 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
3450 (" }\nfn b() { ".to_string(), None),
3451 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
3452 (" }\nfn c() { ".to_string(), None),
3453 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
3454 (" }\n".to_string(), None),
3455 ]
3456 );
3457 assert_eq!(
3458 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
3459 [
3460 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
3461 (" }\nfn c() { ".to_string(), None),
3462 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
3463 ]
3464 );
3465 });
3466
3467 // Ensure overlapping diagnostics are highlighted correctly.
3468 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3469 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3470 version: Some(open_notification.text_document.version),
3471 diagnostics: vec![
3472 lsp::Diagnostic {
3473 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3474 severity: Some(DiagnosticSeverity::ERROR),
3475 message: "undefined variable 'A'".to_string(),
3476 source: Some("disk".to_string()),
3477 ..Default::default()
3478 },
3479 lsp::Diagnostic {
3480 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
3481 severity: Some(DiagnosticSeverity::WARNING),
3482 message: "unreachable statement".to_string(),
3483 source: Some("disk".to_string()),
3484 ..Default::default()
3485 },
3486 ],
3487 });
3488
3489 cx.executor().run_until_parked();
3490 buffer.update(cx, |buffer, _| {
3491 assert_eq!(
3492 buffer
3493 .snapshot()
3494 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
3495 .collect::<Vec<_>>(),
3496 &[
3497 DiagnosticEntry {
3498 range: Point::new(2, 9)..Point::new(2, 12),
3499 diagnostic: Diagnostic {
3500 source: Some("disk".into()),
3501 severity: DiagnosticSeverity::WARNING,
3502 message: "unreachable statement".to_string(),
3503 is_disk_based: true,
3504 group_id: 4,
3505 is_primary: true,
3506 source_kind: DiagnosticSourceKind::Pushed,
3507 ..Diagnostic::default()
3508 }
3509 },
3510 DiagnosticEntry {
3511 range: Point::new(2, 9)..Point::new(2, 10),
3512 diagnostic: Diagnostic {
3513 source: Some("disk".into()),
3514 severity: DiagnosticSeverity::ERROR,
3515 message: "undefined variable 'A'".to_string(),
3516 is_disk_based: true,
3517 group_id: 3,
3518 is_primary: true,
3519 source_kind: DiagnosticSourceKind::Pushed,
3520 ..Diagnostic::default()
3521 },
3522 }
3523 ]
3524 );
3525 assert_eq!(
3526 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
3527 [
3528 ("fn a() { ".to_string(), None),
3529 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
3530 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
3531 ("\n".to_string(), None),
3532 ]
3533 );
3534 assert_eq!(
3535 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
3536 [
3537 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
3538 ("\n".to_string(), None),
3539 ]
3540 );
3541 });
3542
3543 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
3544 // changes since the last save.
3545 buffer.update(cx, |buffer, cx| {
3546 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
3547 buffer.edit(
3548 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
3549 None,
3550 cx,
3551 );
3552 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
3553 });
3554 let change_notification_2 = fake_server
3555 .receive_notification::<lsp::notification::DidChangeTextDocument>()
3556 .await;
3557 assert!(
3558 change_notification_2.text_document.version > change_notification_1.text_document.version
3559 );
3560
3561 // Handle out-of-order diagnostics
3562 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3563 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3564 version: Some(change_notification_2.text_document.version),
3565 diagnostics: vec![
3566 lsp::Diagnostic {
3567 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
3568 severity: Some(DiagnosticSeverity::ERROR),
3569 message: "undefined variable 'BB'".to_string(),
3570 source: Some("disk".to_string()),
3571 ..Default::default()
3572 },
3573 lsp::Diagnostic {
3574 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3575 severity: Some(DiagnosticSeverity::WARNING),
3576 message: "undefined variable 'A'".to_string(),
3577 source: Some("disk".to_string()),
3578 ..Default::default()
3579 },
3580 ],
3581 });
3582
3583 cx.executor().run_until_parked();
3584 buffer.update(cx, |buffer, _| {
3585 assert_eq!(
3586 buffer
3587 .snapshot()
3588 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3589 .collect::<Vec<_>>(),
3590 &[
3591 DiagnosticEntry {
3592 range: Point::new(2, 21)..Point::new(2, 22),
3593 diagnostic: Diagnostic {
3594 source: Some("disk".into()),
3595 severity: DiagnosticSeverity::WARNING,
3596 message: "undefined variable 'A'".to_string(),
3597 is_disk_based: true,
3598 group_id: 6,
3599 is_primary: true,
3600 source_kind: DiagnosticSourceKind::Pushed,
3601 ..Diagnostic::default()
3602 }
3603 },
3604 DiagnosticEntry {
3605 range: Point::new(3, 9)..Point::new(3, 14),
3606 diagnostic: Diagnostic {
3607 source: Some("disk".into()),
3608 severity: DiagnosticSeverity::ERROR,
3609 message: "undefined variable 'BB'".to_string(),
3610 is_disk_based: true,
3611 group_id: 5,
3612 is_primary: true,
3613 source_kind: DiagnosticSourceKind::Pushed,
3614 ..Diagnostic::default()
3615 },
3616 }
3617 ]
3618 );
3619 });
3620}
3621
3622#[gpui::test]
3623async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
3624 init_test(cx);
3625
3626 let text = concat!(
3627 "let one = ;\n", //
3628 "let two = \n",
3629 "let three = 3;\n",
3630 );
3631
3632 let fs = FakeFs::new(cx.executor());
3633 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
3634
3635 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3636 let buffer = project
3637 .update(cx, |project, cx| {
3638 project.open_local_buffer(path!("/dir/a.rs"), cx)
3639 })
3640 .await
3641 .unwrap();
3642
3643 project.update(cx, |project, cx| {
3644 project.lsp_store().update(cx, |lsp_store, cx| {
3645 lsp_store
3646 .update_diagnostic_entries(
3647 LanguageServerId(0),
3648 PathBuf::from(path!("/dir/a.rs")),
3649 None,
3650 None,
3651 vec![
3652 DiagnosticEntry {
3653 range: Unclipped(PointUtf16::new(0, 10))
3654 ..Unclipped(PointUtf16::new(0, 10)),
3655 diagnostic: Diagnostic {
3656 severity: DiagnosticSeverity::ERROR,
3657 message: "syntax error 1".to_string(),
3658 source_kind: DiagnosticSourceKind::Pushed,
3659 ..Diagnostic::default()
3660 },
3661 },
3662 DiagnosticEntry {
3663 range: Unclipped(PointUtf16::new(1, 10))
3664 ..Unclipped(PointUtf16::new(1, 10)),
3665 diagnostic: Diagnostic {
3666 severity: DiagnosticSeverity::ERROR,
3667 message: "syntax error 2".to_string(),
3668 source_kind: DiagnosticSourceKind::Pushed,
3669 ..Diagnostic::default()
3670 },
3671 },
3672 ],
3673 cx,
3674 )
3675 .unwrap();
3676 })
3677 });
3678
3679 // An empty range is extended forward to include the following character.
3680 // At the end of a line, an empty range is extended backward to include
3681 // the preceding character.
3682 buffer.update(cx, |buffer, _| {
3683 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
3684 assert_eq!(
3685 chunks
3686 .iter()
3687 .map(|(s, d)| (s.as_str(), *d))
3688 .collect::<Vec<_>>(),
3689 &[
3690 ("let one = ", None),
3691 (";", Some(DiagnosticSeverity::ERROR)),
3692 ("\nlet two =", None),
3693 (" ", Some(DiagnosticSeverity::ERROR)),
3694 ("\nlet three = 3;\n", None)
3695 ]
3696 );
3697 });
3698}
3699
3700#[gpui::test]
3701async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
3702 init_test(cx);
3703
3704 let fs = FakeFs::new(cx.executor());
3705 fs.insert_tree(path!("/dir"), json!({ "a.rs": "one two three" }))
3706 .await;
3707
3708 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3709 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3710
3711 lsp_store.update(cx, |lsp_store, cx| {
3712 lsp_store
3713 .update_diagnostic_entries(
3714 LanguageServerId(0),
3715 Path::new(path!("/dir/a.rs")).to_owned(),
3716 None,
3717 None,
3718 vec![DiagnosticEntry {
3719 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
3720 diagnostic: Diagnostic {
3721 severity: DiagnosticSeverity::ERROR,
3722 is_primary: true,
3723 message: "syntax error a1".to_string(),
3724 source_kind: DiagnosticSourceKind::Pushed,
3725 ..Diagnostic::default()
3726 },
3727 }],
3728 cx,
3729 )
3730 .unwrap();
3731 lsp_store
3732 .update_diagnostic_entries(
3733 LanguageServerId(1),
3734 Path::new(path!("/dir/a.rs")).to_owned(),
3735 None,
3736 None,
3737 vec![DiagnosticEntry {
3738 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
3739 diagnostic: Diagnostic {
3740 severity: DiagnosticSeverity::ERROR,
3741 is_primary: true,
3742 message: "syntax error b1".to_string(),
3743 source_kind: DiagnosticSourceKind::Pushed,
3744 ..Diagnostic::default()
3745 },
3746 }],
3747 cx,
3748 )
3749 .unwrap();
3750
3751 assert_eq!(
3752 lsp_store.diagnostic_summary(false, cx),
3753 DiagnosticSummary {
3754 error_count: 2,
3755 warning_count: 0,
3756 }
3757 );
3758 });
3759}
3760
3761#[gpui::test]
3762async fn test_diagnostic_summaries_cleared_on_worktree_entry_removal(
3763 cx: &mut gpui::TestAppContext,
3764) {
3765 init_test(cx);
3766
3767 let fs = FakeFs::new(cx.executor());
3768 fs.insert_tree(path!("/dir"), json!({ "a.rs": "one", "b.rs": "two" }))
3769 .await;
3770
3771 let project = Project::test(fs.clone(), [Path::new(path!("/dir"))], cx).await;
3772 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3773
3774 lsp_store.update(cx, |lsp_store, cx| {
3775 lsp_store
3776 .update_diagnostic_entries(
3777 LanguageServerId(0),
3778 Path::new(path!("/dir/a.rs")).to_owned(),
3779 None,
3780 None,
3781 vec![DiagnosticEntry {
3782 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
3783 diagnostic: Diagnostic {
3784 severity: DiagnosticSeverity::ERROR,
3785 is_primary: true,
3786 message: "error in a".to_string(),
3787 source_kind: DiagnosticSourceKind::Pushed,
3788 ..Diagnostic::default()
3789 },
3790 }],
3791 cx,
3792 )
3793 .unwrap();
3794 lsp_store
3795 .update_diagnostic_entries(
3796 LanguageServerId(0),
3797 Path::new(path!("/dir/b.rs")).to_owned(),
3798 None,
3799 None,
3800 vec![DiagnosticEntry {
3801 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
3802 diagnostic: Diagnostic {
3803 severity: DiagnosticSeverity::WARNING,
3804 is_primary: true,
3805 message: "warning in b".to_string(),
3806 source_kind: DiagnosticSourceKind::Pushed,
3807 ..Diagnostic::default()
3808 },
3809 }],
3810 cx,
3811 )
3812 .unwrap();
3813
3814 assert_eq!(
3815 lsp_store.diagnostic_summary(false, cx),
3816 DiagnosticSummary {
3817 error_count: 1,
3818 warning_count: 1,
3819 }
3820 );
3821 });
3822
3823 fs.remove_file(path!("/dir/a.rs").as_ref(), Default::default())
3824 .await
3825 .unwrap();
3826 cx.executor().run_until_parked();
3827
3828 lsp_store.update(cx, |lsp_store, cx| {
3829 assert_eq!(
3830 lsp_store.diagnostic_summary(false, cx),
3831 DiagnosticSummary {
3832 error_count: 0,
3833 warning_count: 1,
3834 },
3835 );
3836 });
3837}
3838
3839#[gpui::test]
3840async fn test_diagnostic_summaries_cleared_on_server_restart(cx: &mut gpui::TestAppContext) {
3841 init_test(cx);
3842
3843 let fs = FakeFs::new(cx.executor());
3844 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
3845
3846 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3847
3848 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3849 language_registry.add(rust_lang());
3850 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3851
3852 let (buffer, _handle) = project
3853 .update(cx, |project, cx| {
3854 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3855 })
3856 .await
3857 .unwrap();
3858
3859 let fake_server = fake_servers.next().await.unwrap();
3860 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3861 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3862 version: None,
3863 diagnostics: vec![lsp::Diagnostic {
3864 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 1)),
3865 severity: Some(lsp::DiagnosticSeverity::ERROR),
3866 message: "error before restart".to_string(),
3867 ..Default::default()
3868 }],
3869 });
3870 cx.executor().run_until_parked();
3871
3872 project.update(cx, |project, cx| {
3873 assert_eq!(
3874 project.diagnostic_summary(false, cx),
3875 DiagnosticSummary {
3876 error_count: 1,
3877 warning_count: 0,
3878 }
3879 );
3880 });
3881
3882 let mut events = cx.events(&project);
3883
3884 project.update(cx, |project, cx| {
3885 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
3886 });
3887 cx.executor().run_until_parked();
3888
3889 let mut received_diagnostics_updated = false;
3890 while let Some(Some(event)) =
3891 futures::FutureExt::now_or_never(futures::StreamExt::next(&mut events))
3892 {
3893 if matches!(event, Event::DiagnosticsUpdated { .. }) {
3894 received_diagnostics_updated = true;
3895 }
3896 }
3897 assert!(
3898 received_diagnostics_updated,
3899 "DiagnosticsUpdated event should be emitted when a language server is stopped"
3900 );
3901
3902 project.update(cx, |project, cx| {
3903 assert_eq!(
3904 project.diagnostic_summary(false, cx),
3905 DiagnosticSummary {
3906 error_count: 0,
3907 warning_count: 0,
3908 }
3909 );
3910 });
3911}
3912
3913#[gpui::test]
3914async fn test_diagnostic_summaries_cleared_on_buffer_reload(cx: &mut gpui::TestAppContext) {
3915 init_test(cx);
3916
3917 let fs = FakeFs::new(cx.executor());
3918 fs.insert_tree(path!("/dir"), json!({ "a.rs": "one two three" }))
3919 .await;
3920
3921 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3922
3923 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3924 language_registry.add(rust_lang());
3925 let pull_count = Arc::new(atomic::AtomicUsize::new(0));
3926 let closure_pull_count = pull_count.clone();
3927 let mut fake_servers = language_registry.register_fake_lsp(
3928 "Rust",
3929 FakeLspAdapter {
3930 capabilities: lsp::ServerCapabilities {
3931 diagnostic_provider: Some(lsp::DiagnosticServerCapabilities::Options(
3932 lsp::DiagnosticOptions {
3933 identifier: Some("test-reload".to_string()),
3934 inter_file_dependencies: true,
3935 workspace_diagnostics: false,
3936 work_done_progress_options: Default::default(),
3937 },
3938 )),
3939 ..lsp::ServerCapabilities::default()
3940 },
3941 initializer: Some(Box::new(move |fake_server| {
3942 let pull_count = closure_pull_count.clone();
3943 fake_server.set_request_handler::<lsp::request::DocumentDiagnosticRequest, _, _>(
3944 move |_, _| {
3945 let pull_count = pull_count.clone();
3946 async move {
3947 pull_count.fetch_add(1, atomic::Ordering::SeqCst);
3948 Ok(lsp::DocumentDiagnosticReportResult::Report(
3949 lsp::DocumentDiagnosticReport::Full(
3950 lsp::RelatedFullDocumentDiagnosticReport {
3951 related_documents: None,
3952 full_document_diagnostic_report:
3953 lsp::FullDocumentDiagnosticReport {
3954 result_id: None,
3955 items: Vec::new(),
3956 },
3957 },
3958 ),
3959 ))
3960 }
3961 },
3962 );
3963 })),
3964 ..FakeLspAdapter::default()
3965 },
3966 );
3967
3968 let (_buffer, _handle) = project
3969 .update(cx, |project, cx| {
3970 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3971 })
3972 .await
3973 .unwrap();
3974
3975 let fake_server = fake_servers.next().await.unwrap();
3976 cx.executor().run_until_parked();
3977
3978 // Publish initial diagnostics via the fake server.
3979 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3980 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3981 version: None,
3982 diagnostics: vec![lsp::Diagnostic {
3983 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 3)),
3984 severity: Some(lsp::DiagnosticSeverity::ERROR),
3985 message: "error in a".to_string(),
3986 ..Default::default()
3987 }],
3988 });
3989 cx.executor().run_until_parked();
3990
3991 project.update(cx, |project, cx| {
3992 assert_eq!(
3993 project.diagnostic_summary(false, cx),
3994 DiagnosticSummary {
3995 error_count: 1,
3996 warning_count: 0,
3997 }
3998 );
3999 });
4000
4001 let pulls_before = pull_count.load(atomic::Ordering::SeqCst);
4002
4003 // Change the file on disk. The FS event triggers buffer reload,
4004 // which in turn triggers pull_diagnostics_for_buffer.
4005 fs.save(
4006 path!("/dir/a.rs").as_ref(),
4007 &"fixed content".into(),
4008 LineEnding::Unix,
4009 )
4010 .await
4011 .unwrap();
4012 cx.executor().run_until_parked();
4013
4014 let pulls_after = pull_count.load(atomic::Ordering::SeqCst);
4015 assert!(
4016 pulls_after > pulls_before,
4017 "Expected document diagnostic pull after buffer reload (before={pulls_before}, after={pulls_after})"
4018 );
4019}
4020
4021#[gpui::test]
4022async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
4023 init_test(cx);
4024
4025 let text = "
4026 fn a() {
4027 f1();
4028 }
4029 fn b() {
4030 f2();
4031 }
4032 fn c() {
4033 f3();
4034 }
4035 "
4036 .unindent();
4037
4038 let fs = FakeFs::new(cx.executor());
4039 fs.insert_tree(
4040 path!("/dir"),
4041 json!({
4042 "a.rs": text.clone(),
4043 }),
4044 )
4045 .await;
4046
4047 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4048 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4049
4050 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4051 language_registry.add(rust_lang());
4052 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
4053
4054 let (buffer, _handle) = project
4055 .update(cx, |project, cx| {
4056 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
4057 })
4058 .await
4059 .unwrap();
4060
4061 let mut fake_server = fake_servers.next().await.unwrap();
4062 let lsp_document_version = fake_server
4063 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4064 .await
4065 .text_document
4066 .version;
4067
4068 // Simulate editing the buffer after the language server computes some edits.
4069 buffer.update(cx, |buffer, cx| {
4070 buffer.edit(
4071 [(
4072 Point::new(0, 0)..Point::new(0, 0),
4073 "// above first function\n",
4074 )],
4075 None,
4076 cx,
4077 );
4078 buffer.edit(
4079 [(
4080 Point::new(2, 0)..Point::new(2, 0),
4081 " // inside first function\n",
4082 )],
4083 None,
4084 cx,
4085 );
4086 buffer.edit(
4087 [(
4088 Point::new(6, 4)..Point::new(6, 4),
4089 "// inside second function ",
4090 )],
4091 None,
4092 cx,
4093 );
4094
4095 assert_eq!(
4096 buffer.text(),
4097 "
4098 // above first function
4099 fn a() {
4100 // inside first function
4101 f1();
4102 }
4103 fn b() {
4104 // inside second function f2();
4105 }
4106 fn c() {
4107 f3();
4108 }
4109 "
4110 .unindent()
4111 );
4112 });
4113
4114 let edits = lsp_store
4115 .update(cx, |lsp_store, cx| {
4116 lsp_store.as_local_mut().unwrap().edits_from_lsp(
4117 &buffer,
4118 vec![
4119 // replace body of first function
4120 lsp::TextEdit {
4121 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
4122 new_text: "
4123 fn a() {
4124 f10();
4125 }
4126 "
4127 .unindent(),
4128 },
4129 // edit inside second function
4130 lsp::TextEdit {
4131 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
4132 new_text: "00".into(),
4133 },
4134 // edit inside third function via two distinct edits
4135 lsp::TextEdit {
4136 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
4137 new_text: "4000".into(),
4138 },
4139 lsp::TextEdit {
4140 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
4141 new_text: "".into(),
4142 },
4143 ],
4144 LanguageServerId(0),
4145 Some(lsp_document_version),
4146 cx,
4147 )
4148 })
4149 .await
4150 .unwrap();
4151
4152 buffer.update(cx, |buffer, cx| {
4153 for (range, new_text) in edits {
4154 buffer.edit([(range, new_text)], None, cx);
4155 }
4156 assert_eq!(
4157 buffer.text(),
4158 "
4159 // above first function
4160 fn a() {
4161 // inside first function
4162 f10();
4163 }
4164 fn b() {
4165 // inside second function f200();
4166 }
4167 fn c() {
4168 f4000();
4169 }
4170 "
4171 .unindent()
4172 );
4173 });
4174}
4175
4176#[gpui::test]
4177async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
4178 init_test(cx);
4179
4180 let text = "
4181 use a::b;
4182 use a::c;
4183
4184 fn f() {
4185 b();
4186 c();
4187 }
4188 "
4189 .unindent();
4190
4191 let fs = FakeFs::new(cx.executor());
4192 fs.insert_tree(
4193 path!("/dir"),
4194 json!({
4195 "a.rs": text.clone(),
4196 }),
4197 )
4198 .await;
4199
4200 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4201 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4202 let buffer = project
4203 .update(cx, |project, cx| {
4204 project.open_local_buffer(path!("/dir/a.rs"), cx)
4205 })
4206 .await
4207 .unwrap();
4208
4209 // Simulate the language server sending us a small edit in the form of a very large diff.
4210 // Rust-analyzer does this when performing a merge-imports code action.
4211 let edits = lsp_store
4212 .update(cx, |lsp_store, cx| {
4213 lsp_store.as_local_mut().unwrap().edits_from_lsp(
4214 &buffer,
4215 [
4216 // Replace the first use statement without editing the semicolon.
4217 lsp::TextEdit {
4218 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
4219 new_text: "a::{b, c}".into(),
4220 },
4221 // Reinsert the remainder of the file between the semicolon and the final
4222 // newline of the file.
4223 lsp::TextEdit {
4224 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
4225 new_text: "\n\n".into(),
4226 },
4227 lsp::TextEdit {
4228 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
4229 new_text: "
4230 fn f() {
4231 b();
4232 c();
4233 }"
4234 .unindent(),
4235 },
4236 // Delete everything after the first newline of the file.
4237 lsp::TextEdit {
4238 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
4239 new_text: "".into(),
4240 },
4241 ],
4242 LanguageServerId(0),
4243 None,
4244 cx,
4245 )
4246 })
4247 .await
4248 .unwrap();
4249
4250 buffer.update(cx, |buffer, cx| {
4251 let edits = edits
4252 .into_iter()
4253 .map(|(range, text)| {
4254 (
4255 range.start.to_point(buffer)..range.end.to_point(buffer),
4256 text,
4257 )
4258 })
4259 .collect::<Vec<_>>();
4260
4261 assert_eq!(
4262 edits,
4263 [
4264 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
4265 (Point::new(1, 0)..Point::new(2, 0), "".into())
4266 ]
4267 );
4268
4269 for (range, new_text) in edits {
4270 buffer.edit([(range, new_text)], None, cx);
4271 }
4272 assert_eq!(
4273 buffer.text(),
4274 "
4275 use a::{b, c};
4276
4277 fn f() {
4278 b();
4279 c();
4280 }
4281 "
4282 .unindent()
4283 );
4284 });
4285}
4286
4287#[gpui::test]
4288async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
4289 cx: &mut gpui::TestAppContext,
4290) {
4291 init_test(cx);
4292
4293 let text = "Path()";
4294
4295 let fs = FakeFs::new(cx.executor());
4296 fs.insert_tree(
4297 path!("/dir"),
4298 json!({
4299 "a.rs": text
4300 }),
4301 )
4302 .await;
4303
4304 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4305 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4306 let buffer = project
4307 .update(cx, |project, cx| {
4308 project.open_local_buffer(path!("/dir/a.rs"), cx)
4309 })
4310 .await
4311 .unwrap();
4312
4313 // Simulate the language server sending us a pair of edits at the same location,
4314 // with an insertion following a replacement (which violates the LSP spec).
4315 let edits = lsp_store
4316 .update(cx, |lsp_store, cx| {
4317 lsp_store.as_local_mut().unwrap().edits_from_lsp(
4318 &buffer,
4319 [
4320 lsp::TextEdit {
4321 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
4322 new_text: "Path".into(),
4323 },
4324 lsp::TextEdit {
4325 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
4326 new_text: "from path import Path\n\n\n".into(),
4327 },
4328 ],
4329 LanguageServerId(0),
4330 None,
4331 cx,
4332 )
4333 })
4334 .await
4335 .unwrap();
4336
4337 buffer.update(cx, |buffer, cx| {
4338 buffer.edit(edits, None, cx);
4339 assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
4340 });
4341}
4342
4343#[gpui::test]
4344async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
4345 init_test(cx);
4346
4347 let text = "
4348 use a::b;
4349 use a::c;
4350
4351 fn f() {
4352 b();
4353 c();
4354 }
4355 "
4356 .unindent();
4357
4358 let fs = FakeFs::new(cx.executor());
4359 fs.insert_tree(
4360 path!("/dir"),
4361 json!({
4362 "a.rs": text.clone(),
4363 }),
4364 )
4365 .await;
4366
4367 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4368 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4369 let buffer = project
4370 .update(cx, |project, cx| {
4371 project.open_local_buffer(path!("/dir/a.rs"), cx)
4372 })
4373 .await
4374 .unwrap();
4375
4376 // Simulate the language server sending us edits in a non-ordered fashion,
4377 // with ranges sometimes being inverted or pointing to invalid locations.
4378 let edits = lsp_store
4379 .update(cx, |lsp_store, cx| {
4380 lsp_store.as_local_mut().unwrap().edits_from_lsp(
4381 &buffer,
4382 [
4383 lsp::TextEdit {
4384 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
4385 new_text: "\n\n".into(),
4386 },
4387 lsp::TextEdit {
4388 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
4389 new_text: "a::{b, c}".into(),
4390 },
4391 lsp::TextEdit {
4392 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
4393 new_text: "".into(),
4394 },
4395 lsp::TextEdit {
4396 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
4397 new_text: "
4398 fn f() {
4399 b();
4400 c();
4401 }"
4402 .unindent(),
4403 },
4404 ],
4405 LanguageServerId(0),
4406 None,
4407 cx,
4408 )
4409 })
4410 .await
4411 .unwrap();
4412
4413 buffer.update(cx, |buffer, cx| {
4414 let edits = edits
4415 .into_iter()
4416 .map(|(range, text)| {
4417 (
4418 range.start.to_point(buffer)..range.end.to_point(buffer),
4419 text,
4420 )
4421 })
4422 .collect::<Vec<_>>();
4423
4424 assert_eq!(
4425 edits,
4426 [
4427 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
4428 (Point::new(1, 0)..Point::new(2, 0), "".into())
4429 ]
4430 );
4431
4432 for (range, new_text) in edits {
4433 buffer.edit([(range, new_text)], None, cx);
4434 }
4435 assert_eq!(
4436 buffer.text(),
4437 "
4438 use a::{b, c};
4439
4440 fn f() {
4441 b();
4442 c();
4443 }
4444 "
4445 .unindent()
4446 );
4447 });
4448}
4449
4450fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
4451 buffer: &Buffer,
4452 range: Range<T>,
4453) -> Vec<(String, Option<DiagnosticSeverity>)> {
4454 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
4455 for chunk in buffer.snapshot().chunks(range, true) {
4456 if chunks
4457 .last()
4458 .is_some_and(|prev_chunk| prev_chunk.1 == chunk.diagnostic_severity)
4459 {
4460 chunks.last_mut().unwrap().0.push_str(chunk.text);
4461 } else {
4462 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
4463 }
4464 }
4465 chunks
4466}
4467
4468#[gpui::test(iterations = 10)]
4469async fn test_definition(cx: &mut gpui::TestAppContext) {
4470 init_test(cx);
4471
4472 let fs = FakeFs::new(cx.executor());
4473 fs.insert_tree(
4474 path!("/dir"),
4475 json!({
4476 "a.rs": "const fn a() { A }",
4477 "b.rs": "const y: i32 = crate::a()",
4478 }),
4479 )
4480 .await;
4481
4482 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
4483
4484 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4485 language_registry.add(rust_lang());
4486 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
4487
4488 let (buffer, _handle) = project
4489 .update(cx, |project, cx| {
4490 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
4491 })
4492 .await
4493 .unwrap();
4494
4495 let fake_server = fake_servers.next().await.unwrap();
4496 cx.executor().run_until_parked();
4497
4498 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
4499 let params = params.text_document_position_params;
4500 assert_eq!(
4501 params.text_document.uri.to_file_path().unwrap(),
4502 Path::new(path!("/dir/b.rs")),
4503 );
4504 assert_eq!(params.position, lsp::Position::new(0, 22));
4505
4506 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
4507 lsp::Location::new(
4508 lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
4509 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
4510 ),
4511 )))
4512 });
4513 let mut definitions = project
4514 .update(cx, |project, cx| project.definitions(&buffer, 22, cx))
4515 .await
4516 .unwrap()
4517 .unwrap();
4518
4519 // Assert no new language server started
4520 cx.executor().run_until_parked();
4521 assert!(fake_servers.try_next().is_err());
4522
4523 assert_eq!(definitions.len(), 1);
4524 let definition = definitions.pop().unwrap();
4525 cx.update(|cx| {
4526 let target_buffer = definition.target.buffer.read(cx);
4527 assert_eq!(
4528 target_buffer
4529 .file()
4530 .unwrap()
4531 .as_local()
4532 .unwrap()
4533 .abs_path(cx),
4534 Path::new(path!("/dir/a.rs")),
4535 );
4536 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
4537 assert_eq!(
4538 list_worktrees(&project, cx),
4539 [
4540 (path!("/dir/a.rs").as_ref(), false),
4541 (path!("/dir/b.rs").as_ref(), true)
4542 ],
4543 );
4544
4545 drop(definition);
4546 });
4547 cx.update(|cx| {
4548 assert_eq!(
4549 list_worktrees(&project, cx),
4550 [(path!("/dir/b.rs").as_ref(), true)]
4551 );
4552 });
4553
4554 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
4555 project
4556 .read(cx)
4557 .worktrees(cx)
4558 .map(|worktree| {
4559 let worktree = worktree.read(cx);
4560 (
4561 worktree.as_local().unwrap().abs_path().as_ref(),
4562 worktree.is_visible(),
4563 )
4564 })
4565 .collect::<Vec<_>>()
4566 }
4567}
4568
4569#[gpui::test]
4570async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
4571 init_test(cx);
4572
4573 let fs = FakeFs::new(cx.executor());
4574 fs.insert_tree(
4575 path!("/dir"),
4576 json!({
4577 "a.ts": "",
4578 }),
4579 )
4580 .await;
4581
4582 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4583
4584 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4585 language_registry.add(typescript_lang());
4586 let mut fake_language_servers = language_registry.register_fake_lsp(
4587 "TypeScript",
4588 FakeLspAdapter {
4589 capabilities: lsp::ServerCapabilities {
4590 completion_provider: Some(lsp::CompletionOptions {
4591 trigger_characters: Some(vec![".".to_string()]),
4592 ..Default::default()
4593 }),
4594 ..Default::default()
4595 },
4596 ..Default::default()
4597 },
4598 );
4599
4600 let (buffer, _handle) = project
4601 .update(cx, |p, cx| {
4602 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4603 })
4604 .await
4605 .unwrap();
4606
4607 let fake_server = fake_language_servers.next().await.unwrap();
4608 cx.executor().run_until_parked();
4609
4610 // When text_edit exists, it takes precedence over insert_text and label
4611 let text = "let a = obj.fqn";
4612 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4613 let completions = project.update(cx, |project, cx| {
4614 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4615 });
4616
4617 fake_server
4618 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4619 Ok(Some(lsp::CompletionResponse::Array(vec![
4620 lsp::CompletionItem {
4621 label: "labelText".into(),
4622 insert_text: Some("insertText".into()),
4623 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
4624 range: lsp::Range::new(
4625 lsp::Position::new(0, text.len() as u32 - 3),
4626 lsp::Position::new(0, text.len() as u32),
4627 ),
4628 new_text: "textEditText".into(),
4629 })),
4630 ..Default::default()
4631 },
4632 ])))
4633 })
4634 .next()
4635 .await;
4636
4637 let completions = completions
4638 .await
4639 .unwrap()
4640 .into_iter()
4641 .flat_map(|response| response.completions)
4642 .collect::<Vec<_>>();
4643 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4644
4645 assert_eq!(completions.len(), 1);
4646 assert_eq!(completions[0].new_text, "textEditText");
4647 assert_eq!(
4648 completions[0].replace_range.to_offset(&snapshot),
4649 text.len() - 3..text.len()
4650 );
4651}
4652
4653#[gpui::test]
4654async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
4655 init_test(cx);
4656
4657 let fs = FakeFs::new(cx.executor());
4658 fs.insert_tree(
4659 path!("/dir"),
4660 json!({
4661 "a.ts": "",
4662 }),
4663 )
4664 .await;
4665
4666 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4667
4668 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4669 language_registry.add(typescript_lang());
4670 let mut fake_language_servers = language_registry.register_fake_lsp(
4671 "TypeScript",
4672 FakeLspAdapter {
4673 capabilities: lsp::ServerCapabilities {
4674 completion_provider: Some(lsp::CompletionOptions {
4675 trigger_characters: Some(vec![".".to_string()]),
4676 ..Default::default()
4677 }),
4678 ..Default::default()
4679 },
4680 ..Default::default()
4681 },
4682 );
4683
4684 let (buffer, _handle) = project
4685 .update(cx, |p, cx| {
4686 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4687 })
4688 .await
4689 .unwrap();
4690
4691 let fake_server = fake_language_servers.next().await.unwrap();
4692 cx.executor().run_until_parked();
4693 let text = "let a = obj.fqn";
4694
4695 // Test 1: When text_edit is None but text_edit_text exists with default edit_range
4696 {
4697 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4698 let completions = project.update(cx, |project, cx| {
4699 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4700 });
4701
4702 fake_server
4703 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4704 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
4705 is_incomplete: false,
4706 item_defaults: Some(lsp::CompletionListItemDefaults {
4707 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
4708 lsp::Range::new(
4709 lsp::Position::new(0, text.len() as u32 - 3),
4710 lsp::Position::new(0, text.len() as u32),
4711 ),
4712 )),
4713 ..Default::default()
4714 }),
4715 items: vec![lsp::CompletionItem {
4716 label: "labelText".into(),
4717 text_edit_text: Some("textEditText".into()),
4718 text_edit: None,
4719 ..Default::default()
4720 }],
4721 })))
4722 })
4723 .next()
4724 .await;
4725
4726 let completions = completions
4727 .await
4728 .unwrap()
4729 .into_iter()
4730 .flat_map(|response| response.completions)
4731 .collect::<Vec<_>>();
4732 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4733
4734 assert_eq!(completions.len(), 1);
4735 assert_eq!(completions[0].new_text, "textEditText");
4736 assert_eq!(
4737 completions[0].replace_range.to_offset(&snapshot),
4738 text.len() - 3..text.len()
4739 );
4740 }
4741
4742 // Test 2: When both text_edit and text_edit_text are None with default edit_range
4743 {
4744 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4745 let completions = project.update(cx, |project, cx| {
4746 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4747 });
4748
4749 fake_server
4750 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4751 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
4752 is_incomplete: false,
4753 item_defaults: Some(lsp::CompletionListItemDefaults {
4754 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
4755 lsp::Range::new(
4756 lsp::Position::new(0, text.len() as u32 - 3),
4757 lsp::Position::new(0, text.len() as u32),
4758 ),
4759 )),
4760 ..Default::default()
4761 }),
4762 items: vec![lsp::CompletionItem {
4763 label: "labelText".into(),
4764 text_edit_text: None,
4765 insert_text: Some("irrelevant".into()),
4766 text_edit: None,
4767 ..Default::default()
4768 }],
4769 })))
4770 })
4771 .next()
4772 .await;
4773
4774 let completions = completions
4775 .await
4776 .unwrap()
4777 .into_iter()
4778 .flat_map(|response| response.completions)
4779 .collect::<Vec<_>>();
4780 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4781
4782 assert_eq!(completions.len(), 1);
4783 assert_eq!(completions[0].new_text, "labelText");
4784 assert_eq!(
4785 completions[0].replace_range.to_offset(&snapshot),
4786 text.len() - 3..text.len()
4787 );
4788 }
4789}
4790
4791#[gpui::test]
4792async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
4793 init_test(cx);
4794
4795 let fs = FakeFs::new(cx.executor());
4796 fs.insert_tree(
4797 path!("/dir"),
4798 json!({
4799 "a.ts": "",
4800 }),
4801 )
4802 .await;
4803
4804 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4805
4806 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4807 language_registry.add(typescript_lang());
4808 let mut fake_language_servers = language_registry.register_fake_lsp(
4809 "TypeScript",
4810 FakeLspAdapter {
4811 capabilities: lsp::ServerCapabilities {
4812 completion_provider: Some(lsp::CompletionOptions {
4813 trigger_characters: Some(vec![":".to_string()]),
4814 ..Default::default()
4815 }),
4816 ..Default::default()
4817 },
4818 ..Default::default()
4819 },
4820 );
4821
4822 let (buffer, _handle) = project
4823 .update(cx, |p, cx| {
4824 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4825 })
4826 .await
4827 .unwrap();
4828
4829 let fake_server = fake_language_servers.next().await.unwrap();
4830 cx.executor().run_until_parked();
4831
4832 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
4833 let text = "let a = b.fqn";
4834 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4835 let completions = project.update(cx, |project, cx| {
4836 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4837 });
4838
4839 fake_server
4840 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4841 Ok(Some(lsp::CompletionResponse::Array(vec![
4842 lsp::CompletionItem {
4843 label: "fullyQualifiedName?".into(),
4844 insert_text: Some("fullyQualifiedName".into()),
4845 ..Default::default()
4846 },
4847 ])))
4848 })
4849 .next()
4850 .await;
4851 let completions = completions
4852 .await
4853 .unwrap()
4854 .into_iter()
4855 .flat_map(|response| response.completions)
4856 .collect::<Vec<_>>();
4857 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4858 assert_eq!(completions.len(), 1);
4859 assert_eq!(completions[0].new_text, "fullyQualifiedName");
4860 assert_eq!(
4861 completions[0].replace_range.to_offset(&snapshot),
4862 text.len() - 3..text.len()
4863 );
4864
4865 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
4866 let text = "let a = \"atoms/cmp\"";
4867 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4868 let completions = project.update(cx, |project, cx| {
4869 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
4870 });
4871
4872 fake_server
4873 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4874 Ok(Some(lsp::CompletionResponse::Array(vec![
4875 lsp::CompletionItem {
4876 label: "component".into(),
4877 ..Default::default()
4878 },
4879 ])))
4880 })
4881 .next()
4882 .await;
4883 let completions = completions
4884 .await
4885 .unwrap()
4886 .into_iter()
4887 .flat_map(|response| response.completions)
4888 .collect::<Vec<_>>();
4889 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4890 assert_eq!(completions.len(), 1);
4891 assert_eq!(completions[0].new_text, "component");
4892 assert_eq!(
4893 completions[0].replace_range.to_offset(&snapshot),
4894 text.len() - 4..text.len() - 1
4895 );
4896}
4897
4898#[gpui::test]
4899async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
4900 init_test(cx);
4901
4902 let fs = FakeFs::new(cx.executor());
4903 fs.insert_tree(
4904 path!("/dir"),
4905 json!({
4906 "a.ts": "",
4907 }),
4908 )
4909 .await;
4910
4911 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4912
4913 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4914 language_registry.add(typescript_lang());
4915 let mut fake_language_servers = language_registry.register_fake_lsp(
4916 "TypeScript",
4917 FakeLspAdapter {
4918 capabilities: lsp::ServerCapabilities {
4919 completion_provider: Some(lsp::CompletionOptions {
4920 trigger_characters: Some(vec![":".to_string()]),
4921 ..Default::default()
4922 }),
4923 ..Default::default()
4924 },
4925 ..Default::default()
4926 },
4927 );
4928
4929 let (buffer, _handle) = project
4930 .update(cx, |p, cx| {
4931 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4932 })
4933 .await
4934 .unwrap();
4935
4936 let fake_server = fake_language_servers.next().await.unwrap();
4937 cx.executor().run_until_parked();
4938
4939 let text = "let a = b.fqn";
4940 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4941 let completions = project.update(cx, |project, cx| {
4942 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4943 });
4944
4945 fake_server
4946 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4947 Ok(Some(lsp::CompletionResponse::Array(vec![
4948 lsp::CompletionItem {
4949 label: "fullyQualifiedName?".into(),
4950 insert_text: Some("fully\rQualified\r\nName".into()),
4951 ..Default::default()
4952 },
4953 ])))
4954 })
4955 .next()
4956 .await;
4957 let completions = completions
4958 .await
4959 .unwrap()
4960 .into_iter()
4961 .flat_map(|response| response.completions)
4962 .collect::<Vec<_>>();
4963 assert_eq!(completions.len(), 1);
4964 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
4965}
4966
4967#[gpui::test(iterations = 10)]
4968async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
4969 init_test(cx);
4970
4971 let fs = FakeFs::new(cx.executor());
4972 fs.insert_tree(
4973 path!("/dir"),
4974 json!({
4975 "a.ts": "a",
4976 }),
4977 )
4978 .await;
4979
4980 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4981
4982 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4983 language_registry.add(typescript_lang());
4984 let mut fake_language_servers = language_registry.register_fake_lsp(
4985 "TypeScript",
4986 FakeLspAdapter {
4987 capabilities: lsp::ServerCapabilities {
4988 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
4989 lsp::CodeActionOptions {
4990 resolve_provider: Some(true),
4991 ..lsp::CodeActionOptions::default()
4992 },
4993 )),
4994 execute_command_provider: Some(lsp::ExecuteCommandOptions {
4995 commands: vec!["_the/command".to_string()],
4996 ..lsp::ExecuteCommandOptions::default()
4997 }),
4998 ..lsp::ServerCapabilities::default()
4999 },
5000 ..FakeLspAdapter::default()
5001 },
5002 );
5003
5004 let (buffer, _handle) = project
5005 .update(cx, |p, cx| {
5006 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
5007 })
5008 .await
5009 .unwrap();
5010
5011 let fake_server = fake_language_servers.next().await.unwrap();
5012 cx.executor().run_until_parked();
5013
5014 // Language server returns code actions that contain commands, and not edits.
5015 let actions = project.update(cx, |project, cx| {
5016 project.code_actions(&buffer, 0..0, None, cx)
5017 });
5018 fake_server
5019 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
5020 Ok(Some(vec![
5021 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5022 title: "The code action".into(),
5023 data: Some(serde_json::json!({
5024 "command": "_the/command",
5025 })),
5026 ..lsp::CodeAction::default()
5027 }),
5028 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5029 title: "two".into(),
5030 ..lsp::CodeAction::default()
5031 }),
5032 ]))
5033 })
5034 .next()
5035 .await;
5036
5037 let action = actions.await.unwrap().unwrap()[0].clone();
5038 let apply = project.update(cx, |project, cx| {
5039 project.apply_code_action(buffer.clone(), action, true, cx)
5040 });
5041
5042 // Resolving the code action does not populate its edits. In absence of
5043 // edits, we must execute the given command.
5044 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
5045 |mut action, _| async move {
5046 if action.data.is_some() {
5047 action.command = Some(lsp::Command {
5048 title: "The command".into(),
5049 command: "_the/command".into(),
5050 arguments: Some(vec![json!("the-argument")]),
5051 });
5052 }
5053 Ok(action)
5054 },
5055 );
5056
5057 // While executing the command, the language server sends the editor
5058 // a `workspaceEdit` request.
5059 fake_server
5060 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
5061 let fake = fake_server.clone();
5062 move |params, _| {
5063 assert_eq!(params.command, "_the/command");
5064 let fake = fake.clone();
5065 async move {
5066 fake.server
5067 .request::<lsp::request::ApplyWorkspaceEdit>(
5068 lsp::ApplyWorkspaceEditParams {
5069 label: None,
5070 edit: lsp::WorkspaceEdit {
5071 changes: Some(
5072 [(
5073 lsp::Uri::from_file_path(path!("/dir/a.ts")).unwrap(),
5074 vec![lsp::TextEdit {
5075 range: lsp::Range::new(
5076 lsp::Position::new(0, 0),
5077 lsp::Position::new(0, 0),
5078 ),
5079 new_text: "X".into(),
5080 }],
5081 )]
5082 .into_iter()
5083 .collect(),
5084 ),
5085 ..Default::default()
5086 },
5087 },
5088 DEFAULT_LSP_REQUEST_TIMEOUT,
5089 )
5090 .await
5091 .into_response()
5092 .unwrap();
5093 Ok(Some(json!(null)))
5094 }
5095 }
5096 })
5097 .next()
5098 .await;
5099
5100 // Applying the code action returns a project transaction containing the edits
5101 // sent by the language server in its `workspaceEdit` request.
5102 let transaction = apply.await.unwrap();
5103 assert!(transaction.0.contains_key(&buffer));
5104 buffer.update(cx, |buffer, cx| {
5105 assert_eq!(buffer.text(), "Xa");
5106 buffer.undo(cx);
5107 assert_eq!(buffer.text(), "a");
5108 });
5109}
5110
5111#[gpui::test]
5112async fn test_rename_file_to_new_directory(cx: &mut gpui::TestAppContext) {
5113 init_test(cx);
5114 let fs = FakeFs::new(cx.background_executor.clone());
5115 let expected_contents = "content";
5116 fs.as_fake()
5117 .insert_tree(
5118 "/root",
5119 json!({
5120 "test.txt": expected_contents
5121 }),
5122 )
5123 .await;
5124
5125 let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
5126
5127 let (worktree, entry_id) = project.read_with(cx, |project, cx| {
5128 let worktree = project.worktrees(cx).next().unwrap();
5129 let entry_id = worktree
5130 .read(cx)
5131 .entry_for_path(rel_path("test.txt"))
5132 .unwrap()
5133 .id;
5134 (worktree, entry_id)
5135 });
5136 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
5137 let _result = project
5138 .update(cx, |project, cx| {
5139 project.rename_entry(
5140 entry_id,
5141 (worktree_id, rel_path("dir1/dir2/dir3/test.txt")).into(),
5142 cx,
5143 )
5144 })
5145 .await
5146 .unwrap();
5147 worktree.read_with(cx, |worktree, _| {
5148 assert!(
5149 worktree.entry_for_path(rel_path("test.txt")).is_none(),
5150 "Old file should have been removed"
5151 );
5152 assert!(
5153 worktree
5154 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
5155 .is_some(),
5156 "Whole directory hierarchy and the new file should have been created"
5157 );
5158 });
5159 assert_eq!(
5160 worktree
5161 .update(cx, |worktree, cx| {
5162 worktree.load_file(rel_path("dir1/dir2/dir3/test.txt"), cx)
5163 })
5164 .await
5165 .unwrap()
5166 .text,
5167 expected_contents,
5168 "Moved file's contents should be preserved"
5169 );
5170
5171 let entry_id = worktree.read_with(cx, |worktree, _| {
5172 worktree
5173 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
5174 .unwrap()
5175 .id
5176 });
5177
5178 let _result = project
5179 .update(cx, |project, cx| {
5180 project.rename_entry(
5181 entry_id,
5182 (worktree_id, rel_path("dir1/dir2/test.txt")).into(),
5183 cx,
5184 )
5185 })
5186 .await
5187 .unwrap();
5188 worktree.read_with(cx, |worktree, _| {
5189 assert!(
5190 worktree.entry_for_path(rel_path("test.txt")).is_none(),
5191 "First file should not reappear"
5192 );
5193 assert!(
5194 worktree
5195 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
5196 .is_none(),
5197 "Old file should have been removed"
5198 );
5199 assert!(
5200 worktree
5201 .entry_for_path(rel_path("dir1/dir2/test.txt"))
5202 .is_some(),
5203 "No error should have occurred after moving into existing directory"
5204 );
5205 });
5206 assert_eq!(
5207 worktree
5208 .update(cx, |worktree, cx| {
5209 worktree.load_file(rel_path("dir1/dir2/test.txt"), cx)
5210 })
5211 .await
5212 .unwrap()
5213 .text,
5214 expected_contents,
5215 "Moved file's contents should be preserved"
5216 );
5217}
5218
5219#[gpui::test(iterations = 10)]
5220async fn test_save_file(cx: &mut gpui::TestAppContext) {
5221 init_test(cx);
5222
5223 let fs = FakeFs::new(cx.executor());
5224 fs.insert_tree(
5225 path!("/dir"),
5226 json!({
5227 "file1": "the old contents",
5228 }),
5229 )
5230 .await;
5231
5232 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5233 let buffer = project
5234 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5235 .await
5236 .unwrap();
5237 buffer.update(cx, |buffer, cx| {
5238 assert_eq!(buffer.text(), "the old contents");
5239 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
5240 });
5241
5242 project
5243 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
5244 .await
5245 .unwrap();
5246
5247 let new_text = fs
5248 .load(Path::new(path!("/dir/file1")))
5249 .await
5250 .unwrap()
5251 .replace("\r\n", "\n");
5252 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
5253}
5254
5255#[gpui::test(iterations = 10)]
5256async fn test_save_file_spawns_language_server(cx: &mut gpui::TestAppContext) {
5257 // Issue: #24349
5258 init_test(cx);
5259
5260 let fs = FakeFs::new(cx.executor());
5261 fs.insert_tree(path!("/dir"), json!({})).await;
5262
5263 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5264 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5265
5266 language_registry.add(rust_lang());
5267 let mut fake_rust_servers = language_registry.register_fake_lsp(
5268 "Rust",
5269 FakeLspAdapter {
5270 name: "the-rust-language-server",
5271 capabilities: lsp::ServerCapabilities {
5272 completion_provider: Some(lsp::CompletionOptions {
5273 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
5274 ..Default::default()
5275 }),
5276 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
5277 lsp::TextDocumentSyncOptions {
5278 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
5279 ..Default::default()
5280 },
5281 )),
5282 ..Default::default()
5283 },
5284 ..Default::default()
5285 },
5286 );
5287
5288 let buffer = project
5289 .update(cx, |this, cx| this.create_buffer(None, false, cx))
5290 .unwrap()
5291 .await;
5292 project.update(cx, |this, cx| {
5293 this.register_buffer_with_language_servers(&buffer, cx);
5294 buffer.update(cx, |buffer, cx| {
5295 assert!(!this.has_language_servers_for(buffer, cx));
5296 })
5297 });
5298
5299 project
5300 .update(cx, |this, cx| {
5301 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
5302 this.save_buffer_as(
5303 buffer.clone(),
5304 ProjectPath {
5305 worktree_id,
5306 path: rel_path("file.rs").into(),
5307 },
5308 cx,
5309 )
5310 })
5311 .await
5312 .unwrap();
5313 // A server is started up, and it is notified about Rust files.
5314 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5315 assert_eq!(
5316 fake_rust_server
5317 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5318 .await
5319 .text_document,
5320 lsp::TextDocumentItem {
5321 uri: lsp::Uri::from_file_path(path!("/dir/file.rs")).unwrap(),
5322 version: 0,
5323 text: "".to_string(),
5324 language_id: "rust".to_string(),
5325 }
5326 );
5327
5328 project.update(cx, |this, cx| {
5329 buffer.update(cx, |buffer, cx| {
5330 assert!(this.has_language_servers_for(buffer, cx));
5331 })
5332 });
5333}
5334
5335#[gpui::test(iterations = 30)]
5336async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
5337 init_test(cx);
5338
5339 let fs = FakeFs::new(cx.executor());
5340 fs.insert_tree(
5341 path!("/dir"),
5342 json!({
5343 "file1": "the original contents",
5344 }),
5345 )
5346 .await;
5347
5348 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5349 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
5350 let buffer = project
5351 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5352 .await
5353 .unwrap();
5354
5355 // Change the buffer's file on disk, and then wait for the file change
5356 // to be detected by the worktree, so that the buffer starts reloading.
5357 fs.save(
5358 path!("/dir/file1").as_ref(),
5359 &"the first contents".into(),
5360 Default::default(),
5361 )
5362 .await
5363 .unwrap();
5364 worktree.next_event(cx).await;
5365
5366 // Change the buffer's file again. Depending on the random seed, the
5367 // previous file change may still be in progress.
5368 fs.save(
5369 path!("/dir/file1").as_ref(),
5370 &"the second contents".into(),
5371 Default::default(),
5372 )
5373 .await
5374 .unwrap();
5375 worktree.next_event(cx).await;
5376
5377 cx.executor().run_until_parked();
5378 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
5379 buffer.read_with(cx, |buffer, _| {
5380 assert_eq!(buffer.text(), on_disk_text);
5381 assert!(!buffer.is_dirty(), "buffer should not be dirty");
5382 assert!(!buffer.has_conflict(), "buffer should not be dirty");
5383 });
5384}
5385
5386#[gpui::test(iterations = 30)]
5387async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
5388 init_test(cx);
5389
5390 let fs = FakeFs::new(cx.executor());
5391 fs.insert_tree(
5392 path!("/dir"),
5393 json!({
5394 "file1": "the original contents",
5395 }),
5396 )
5397 .await;
5398
5399 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5400 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
5401 let buffer = project
5402 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5403 .await
5404 .unwrap();
5405
5406 // Change the buffer's file on disk, and then wait for the file change
5407 // to be detected by the worktree, so that the buffer starts reloading.
5408 fs.save(
5409 path!("/dir/file1").as_ref(),
5410 &"the first contents".into(),
5411 Default::default(),
5412 )
5413 .await
5414 .unwrap();
5415 worktree.next_event(cx).await;
5416
5417 cx.executor()
5418 .spawn(cx.executor().simulate_random_delay())
5419 .await;
5420
5421 // Perform a noop edit, causing the buffer's version to increase.
5422 buffer.update(cx, |buffer, cx| {
5423 buffer.edit([(0..0, " ")], None, cx);
5424 buffer.undo(cx);
5425 });
5426
5427 cx.executor().run_until_parked();
5428 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
5429 buffer.read_with(cx, |buffer, _| {
5430 let buffer_text = buffer.text();
5431 if buffer_text == on_disk_text {
5432 assert!(
5433 !buffer.is_dirty() && !buffer.has_conflict(),
5434 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
5435 );
5436 }
5437 // If the file change occurred while the buffer was processing the first
5438 // change, the buffer will be in a conflicting state.
5439 else {
5440 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
5441 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
5442 }
5443 });
5444}
5445
5446#[gpui::test]
5447async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
5448 init_test(cx);
5449
5450 let fs = FakeFs::new(cx.executor());
5451 fs.insert_tree(
5452 path!("/dir"),
5453 json!({
5454 "file1": "the old contents",
5455 }),
5456 )
5457 .await;
5458
5459 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
5460 let buffer = project
5461 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5462 .await
5463 .unwrap();
5464 buffer.update(cx, |buffer, cx| {
5465 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
5466 });
5467
5468 project
5469 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
5470 .await
5471 .unwrap();
5472
5473 let new_text = fs
5474 .load(Path::new(path!("/dir/file1")))
5475 .await
5476 .unwrap()
5477 .replace("\r\n", "\n");
5478 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
5479}
5480
5481#[gpui::test]
5482async fn test_save_as(cx: &mut gpui::TestAppContext) {
5483 init_test(cx);
5484
5485 let fs = FakeFs::new(cx.executor());
5486 fs.insert_tree("/dir", json!({})).await;
5487
5488 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5489
5490 let languages = project.update(cx, |project, _| project.languages().clone());
5491 languages.add(rust_lang());
5492
5493 let buffer = project.update(cx, |project, cx| {
5494 project.create_local_buffer("", None, false, cx)
5495 });
5496 buffer.update(cx, |buffer, cx| {
5497 buffer.edit([(0..0, "abc")], None, cx);
5498 assert!(buffer.is_dirty());
5499 assert!(!buffer.has_conflict());
5500 assert_eq!(buffer.language().unwrap().name(), "Plain Text");
5501 });
5502 project
5503 .update(cx, |project, cx| {
5504 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
5505 let path = ProjectPath {
5506 worktree_id,
5507 path: rel_path("file1.rs").into(),
5508 };
5509 project.save_buffer_as(buffer.clone(), path, cx)
5510 })
5511 .await
5512 .unwrap();
5513 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
5514
5515 cx.executor().run_until_parked();
5516 buffer.update(cx, |buffer, cx| {
5517 assert_eq!(
5518 buffer.file().unwrap().full_path(cx),
5519 Path::new("dir/file1.rs")
5520 );
5521 assert!(!buffer.is_dirty());
5522 assert!(!buffer.has_conflict());
5523 assert_eq!(buffer.language().unwrap().name(), "Rust");
5524 });
5525
5526 let opened_buffer = project
5527 .update(cx, |project, cx| {
5528 project.open_local_buffer("/dir/file1.rs", cx)
5529 })
5530 .await
5531 .unwrap();
5532 assert_eq!(opened_buffer, buffer);
5533}
5534
5535#[gpui::test]
5536async fn test_save_as_existing_file(cx: &mut gpui::TestAppContext) {
5537 init_test(cx);
5538
5539 let fs = FakeFs::new(cx.executor());
5540 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5541
5542 fs.insert_tree(
5543 path!("/dir"),
5544 json!({
5545 "data_a.txt": "data about a"
5546 }),
5547 )
5548 .await;
5549
5550 let buffer = project
5551 .update(cx, |project, cx| {
5552 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
5553 })
5554 .await
5555 .unwrap();
5556
5557 buffer.update(cx, |buffer, cx| {
5558 buffer.edit([(11..12, "b")], None, cx);
5559 });
5560
5561 // Save buffer's contents as a new file and confirm that the buffer's now
5562 // associated with `data_b.txt` instead of `data_a.txt`, confirming that the
5563 // file associated with the buffer has now been updated to `data_b.txt`
5564 project
5565 .update(cx, |project, cx| {
5566 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
5567 let new_path = ProjectPath {
5568 worktree_id,
5569 path: rel_path("data_b.txt").into(),
5570 };
5571
5572 project.save_buffer_as(buffer.clone(), new_path, cx)
5573 })
5574 .await
5575 .unwrap();
5576
5577 buffer.update(cx, |buffer, cx| {
5578 assert_eq!(
5579 buffer.file().unwrap().full_path(cx),
5580 Path::new("dir/data_b.txt")
5581 )
5582 });
5583
5584 // Open the original `data_a.txt` file, confirming that its contents are
5585 // unchanged and the resulting buffer's associated file is `data_a.txt`.
5586 let original_buffer = project
5587 .update(cx, |project, cx| {
5588 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
5589 })
5590 .await
5591 .unwrap();
5592
5593 original_buffer.update(cx, |buffer, cx| {
5594 assert_eq!(buffer.text(), "data about a");
5595 assert_eq!(
5596 buffer.file().unwrap().full_path(cx),
5597 Path::new("dir/data_a.txt")
5598 )
5599 });
5600}
5601
5602#[gpui::test(retries = 5)]
5603async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
5604 use worktree::WorktreeModelHandle as _;
5605
5606 init_test(cx);
5607 cx.executor().allow_parking();
5608
5609 let dir = TempTree::new(json!({
5610 "a": {
5611 "file1": "",
5612 "file2": "",
5613 "file3": "",
5614 },
5615 "b": {
5616 "c": {
5617 "file4": "",
5618 "file5": "",
5619 }
5620 }
5621 }));
5622
5623 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
5624
5625 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5626 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
5627 async move { buffer.await.unwrap() }
5628 };
5629 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5630 project.update(cx, |project, cx| {
5631 let tree = project.worktrees(cx).next().unwrap();
5632 tree.read(cx)
5633 .entry_for_path(rel_path(path))
5634 .unwrap_or_else(|| panic!("no entry for path {}", path))
5635 .id
5636 })
5637 };
5638
5639 let buffer2 = buffer_for_path("a/file2", cx).await;
5640 let buffer3 = buffer_for_path("a/file3", cx).await;
5641 let buffer4 = buffer_for_path("b/c/file4", cx).await;
5642 let buffer5 = buffer_for_path("b/c/file5", cx).await;
5643
5644 let file2_id = id_for_path("a/file2", cx);
5645 let file3_id = id_for_path("a/file3", cx);
5646 let file4_id = id_for_path("b/c/file4", cx);
5647
5648 // Create a remote copy of this worktree.
5649 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
5650 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
5651
5652 let updates = Arc::new(Mutex::new(Vec::new()));
5653 tree.update(cx, |tree, cx| {
5654 let updates = updates.clone();
5655 tree.observe_updates(0, cx, move |update| {
5656 updates.lock().push(update);
5657 async { true }
5658 });
5659 });
5660
5661 let remote = cx.update(|cx| {
5662 Worktree::remote(
5663 0,
5664 ReplicaId::REMOTE_SERVER,
5665 metadata,
5666 project.read(cx).client().into(),
5667 project.read(cx).path_style(cx),
5668 cx,
5669 )
5670 });
5671
5672 cx.executor().run_until_parked();
5673
5674 cx.update(|cx| {
5675 assert!(!buffer2.read(cx).is_dirty());
5676 assert!(!buffer3.read(cx).is_dirty());
5677 assert!(!buffer4.read(cx).is_dirty());
5678 assert!(!buffer5.read(cx).is_dirty());
5679 });
5680
5681 // Rename and delete files and directories.
5682 tree.flush_fs_events(cx).await;
5683 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
5684 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
5685 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
5686 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
5687 tree.flush_fs_events(cx).await;
5688
5689 cx.update(|app| {
5690 assert_eq!(
5691 tree.read(app).paths().collect::<Vec<_>>(),
5692 vec![
5693 rel_path("a"),
5694 rel_path("a/file1"),
5695 rel_path("a/file2.new"),
5696 rel_path("b"),
5697 rel_path("d"),
5698 rel_path("d/file3"),
5699 rel_path("d/file4"),
5700 ]
5701 );
5702 });
5703
5704 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
5705 assert_eq!(id_for_path("d/file3", cx), file3_id);
5706 assert_eq!(id_for_path("d/file4", cx), file4_id);
5707
5708 cx.update(|cx| {
5709 assert_eq!(
5710 buffer2.read(cx).file().unwrap().path().as_ref(),
5711 rel_path("a/file2.new")
5712 );
5713 assert_eq!(
5714 buffer3.read(cx).file().unwrap().path().as_ref(),
5715 rel_path("d/file3")
5716 );
5717 assert_eq!(
5718 buffer4.read(cx).file().unwrap().path().as_ref(),
5719 rel_path("d/file4")
5720 );
5721 assert_eq!(
5722 buffer5.read(cx).file().unwrap().path().as_ref(),
5723 rel_path("b/c/file5")
5724 );
5725
5726 assert_matches!(
5727 buffer2.read(cx).file().unwrap().disk_state(),
5728 DiskState::Present { .. }
5729 );
5730 assert_matches!(
5731 buffer3.read(cx).file().unwrap().disk_state(),
5732 DiskState::Present { .. }
5733 );
5734 assert_matches!(
5735 buffer4.read(cx).file().unwrap().disk_state(),
5736 DiskState::Present { .. }
5737 );
5738 assert_eq!(
5739 buffer5.read(cx).file().unwrap().disk_state(),
5740 DiskState::Deleted
5741 );
5742 });
5743
5744 // Update the remote worktree. Check that it becomes consistent with the
5745 // local worktree.
5746 cx.executor().run_until_parked();
5747
5748 remote.update(cx, |remote, _| {
5749 for update in updates.lock().drain(..) {
5750 remote.as_remote_mut().unwrap().update_from_remote(update);
5751 }
5752 });
5753 cx.executor().run_until_parked();
5754 remote.update(cx, |remote, _| {
5755 assert_eq!(
5756 remote.paths().collect::<Vec<_>>(),
5757 vec![
5758 rel_path("a"),
5759 rel_path("a/file1"),
5760 rel_path("a/file2.new"),
5761 rel_path("b"),
5762 rel_path("d"),
5763 rel_path("d/file3"),
5764 rel_path("d/file4"),
5765 ]
5766 );
5767 });
5768}
5769
5770#[cfg(target_os = "linux")]
5771#[gpui::test(retries = 5)]
5772async fn test_recreated_directory_receives_child_events(cx: &mut gpui::TestAppContext) {
5773 init_test(cx);
5774 cx.executor().allow_parking();
5775
5776 let dir = TempTree::new(json!({}));
5777 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
5778 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
5779
5780 tree.flush_fs_events(cx).await;
5781
5782 let repro_dir = dir.path().join("repro");
5783 std::fs::create_dir(&repro_dir).unwrap();
5784 tree.flush_fs_events(cx).await;
5785
5786 cx.update(|cx| {
5787 assert!(tree.read(cx).entry_for_path(rel_path("repro")).is_some());
5788 });
5789
5790 std::fs::remove_dir_all(&repro_dir).unwrap();
5791 tree.flush_fs_events(cx).await;
5792
5793 cx.update(|cx| {
5794 assert!(tree.read(cx).entry_for_path(rel_path("repro")).is_none());
5795 });
5796
5797 std::fs::create_dir(&repro_dir).unwrap();
5798 tree.flush_fs_events(cx).await;
5799
5800 cx.update(|cx| {
5801 assert!(tree.read(cx).entry_for_path(rel_path("repro")).is_some());
5802 });
5803
5804 std::fs::write(repro_dir.join("repro-marker"), "").unwrap();
5805 tree.flush_fs_events(cx).await;
5806
5807 cx.update(|cx| {
5808 assert!(
5809 tree.read(cx)
5810 .entry_for_path(rel_path("repro/repro-marker"))
5811 .is_some()
5812 );
5813 });
5814}
5815
5816#[gpui::test(iterations = 10)]
5817async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
5818 init_test(cx);
5819
5820 let fs = FakeFs::new(cx.executor());
5821 fs.insert_tree(
5822 path!("/dir"),
5823 json!({
5824 "a": {
5825 "file1": "",
5826 }
5827 }),
5828 )
5829 .await;
5830
5831 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
5832 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
5833 let tree_id = tree.update(cx, |tree, _| tree.id());
5834
5835 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5836 project.update(cx, |project, cx| {
5837 let tree = project.worktrees(cx).next().unwrap();
5838 tree.read(cx)
5839 .entry_for_path(rel_path(path))
5840 .unwrap_or_else(|| panic!("no entry for path {}", path))
5841 .id
5842 })
5843 };
5844
5845 let dir_id = id_for_path("a", cx);
5846 let file_id = id_for_path("a/file1", cx);
5847 let buffer = project
5848 .update(cx, |p, cx| {
5849 p.open_buffer((tree_id, rel_path("a/file1")), cx)
5850 })
5851 .await
5852 .unwrap();
5853 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5854
5855 project
5856 .update(cx, |project, cx| {
5857 project.rename_entry(dir_id, (tree_id, rel_path("b")).into(), cx)
5858 })
5859 .unwrap()
5860 .await
5861 .into_included()
5862 .unwrap();
5863 cx.executor().run_until_parked();
5864
5865 assert_eq!(id_for_path("b", cx), dir_id);
5866 assert_eq!(id_for_path("b/file1", cx), file_id);
5867 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5868}
5869
5870#[gpui::test]
5871async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
5872 init_test(cx);
5873
5874 let fs = FakeFs::new(cx.executor());
5875 fs.insert_tree(
5876 "/dir",
5877 json!({
5878 "a.txt": "a-contents",
5879 "b.txt": "b-contents",
5880 }),
5881 )
5882 .await;
5883
5884 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5885
5886 // Spawn multiple tasks to open paths, repeating some paths.
5887 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
5888 (
5889 p.open_local_buffer("/dir/a.txt", cx),
5890 p.open_local_buffer("/dir/b.txt", cx),
5891 p.open_local_buffer("/dir/a.txt", cx),
5892 )
5893 });
5894
5895 let buffer_a_1 = buffer_a_1.await.unwrap();
5896 let buffer_a_2 = buffer_a_2.await.unwrap();
5897 let buffer_b = buffer_b.await.unwrap();
5898 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
5899 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
5900
5901 // There is only one buffer per path.
5902 let buffer_a_id = buffer_a_1.entity_id();
5903 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
5904
5905 // Open the same path again while it is still open.
5906 drop(buffer_a_1);
5907 let buffer_a_3 = project
5908 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
5909 .await
5910 .unwrap();
5911
5912 // There's still only one buffer per path.
5913 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
5914}
5915
5916#[gpui::test]
5917async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
5918 init_test(cx);
5919
5920 let fs = FakeFs::new(cx.executor());
5921 fs.insert_tree(
5922 path!("/dir"),
5923 json!({
5924 "file1": "abc",
5925 "file2": "def",
5926 "file3": "ghi",
5927 }),
5928 )
5929 .await;
5930
5931 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5932
5933 let buffer1 = project
5934 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5935 .await
5936 .unwrap();
5937 let events = Arc::new(Mutex::new(Vec::new()));
5938
5939 // initially, the buffer isn't dirty.
5940 buffer1.update(cx, |buffer, cx| {
5941 cx.subscribe(&buffer1, {
5942 let events = events.clone();
5943 move |_, _, event, _| match event {
5944 BufferEvent::Operation { .. } => {}
5945 _ => events.lock().push(event.clone()),
5946 }
5947 })
5948 .detach();
5949
5950 assert!(!buffer.is_dirty());
5951 assert!(events.lock().is_empty());
5952
5953 buffer.edit([(1..2, "")], None, cx);
5954 });
5955
5956 // after the first edit, the buffer is dirty, and emits a dirtied event.
5957 buffer1.update(cx, |buffer, cx| {
5958 assert!(buffer.text() == "ac");
5959 assert!(buffer.is_dirty());
5960 assert_eq!(
5961 *events.lock(),
5962 &[
5963 language::BufferEvent::Edited { is_local: true },
5964 language::BufferEvent::DirtyChanged
5965 ]
5966 );
5967 events.lock().clear();
5968 buffer.did_save(
5969 buffer.version(),
5970 buffer.file().unwrap().disk_state().mtime(),
5971 cx,
5972 );
5973 });
5974
5975 // after saving, the buffer is not dirty, and emits a saved event.
5976 buffer1.update(cx, |buffer, cx| {
5977 assert!(!buffer.is_dirty());
5978 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
5979 events.lock().clear();
5980
5981 buffer.edit([(1..1, "B")], None, cx);
5982 buffer.edit([(2..2, "D")], None, cx);
5983 });
5984
5985 // after editing again, the buffer is dirty, and emits another dirty event.
5986 buffer1.update(cx, |buffer, cx| {
5987 assert!(buffer.text() == "aBDc");
5988 assert!(buffer.is_dirty());
5989 assert_eq!(
5990 *events.lock(),
5991 &[
5992 language::BufferEvent::Edited { is_local: true },
5993 language::BufferEvent::DirtyChanged,
5994 language::BufferEvent::Edited { is_local: true },
5995 ],
5996 );
5997 events.lock().clear();
5998
5999 // After restoring the buffer to its previously-saved state,
6000 // the buffer is not considered dirty anymore.
6001 buffer.edit([(1..3, "")], None, cx);
6002 assert!(buffer.text() == "ac");
6003 assert!(!buffer.is_dirty());
6004 });
6005
6006 assert_eq!(
6007 *events.lock(),
6008 &[
6009 language::BufferEvent::Edited { is_local: true },
6010 language::BufferEvent::DirtyChanged
6011 ]
6012 );
6013
6014 // When a file is deleted, it is not considered dirty.
6015 let events = Arc::new(Mutex::new(Vec::new()));
6016 let buffer2 = project
6017 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
6018 .await
6019 .unwrap();
6020 buffer2.update(cx, |_, cx| {
6021 cx.subscribe(&buffer2, {
6022 let events = events.clone();
6023 move |_, _, event, _| match event {
6024 BufferEvent::Operation { .. } => {}
6025 _ => events.lock().push(event.clone()),
6026 }
6027 })
6028 .detach();
6029 });
6030
6031 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
6032 .await
6033 .unwrap();
6034 cx.executor().run_until_parked();
6035 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
6036 assert_eq!(
6037 mem::take(&mut *events.lock()),
6038 &[language::BufferEvent::FileHandleChanged]
6039 );
6040
6041 // Buffer becomes dirty when edited.
6042 buffer2.update(cx, |buffer, cx| {
6043 buffer.edit([(2..3, "")], None, cx);
6044 assert_eq!(buffer.is_dirty(), true);
6045 });
6046 assert_eq!(
6047 mem::take(&mut *events.lock()),
6048 &[
6049 language::BufferEvent::Edited { is_local: true },
6050 language::BufferEvent::DirtyChanged
6051 ]
6052 );
6053
6054 // Buffer becomes clean again when all of its content is removed, because
6055 // the file was deleted.
6056 buffer2.update(cx, |buffer, cx| {
6057 buffer.edit([(0..2, "")], None, cx);
6058 assert_eq!(buffer.is_empty(), true);
6059 assert_eq!(buffer.is_dirty(), false);
6060 });
6061 assert_eq!(
6062 *events.lock(),
6063 &[
6064 language::BufferEvent::Edited { is_local: true },
6065 language::BufferEvent::DirtyChanged
6066 ]
6067 );
6068
6069 // When a file is already dirty when deleted, we don't emit a Dirtied event.
6070 let events = Arc::new(Mutex::new(Vec::new()));
6071 let buffer3 = project
6072 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
6073 .await
6074 .unwrap();
6075 buffer3.update(cx, |_, cx| {
6076 cx.subscribe(&buffer3, {
6077 let events = events.clone();
6078 move |_, _, event, _| match event {
6079 BufferEvent::Operation { .. } => {}
6080 _ => events.lock().push(event.clone()),
6081 }
6082 })
6083 .detach();
6084 });
6085
6086 buffer3.update(cx, |buffer, cx| {
6087 buffer.edit([(0..0, "x")], None, cx);
6088 });
6089 events.lock().clear();
6090 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
6091 .await
6092 .unwrap();
6093 cx.executor().run_until_parked();
6094 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
6095 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
6096}
6097
6098#[gpui::test]
6099async fn test_dirty_buffer_reloads_after_undo(cx: &mut gpui::TestAppContext) {
6100 init_test(cx);
6101
6102 let fs = FakeFs::new(cx.executor());
6103 fs.insert_tree(
6104 path!("/dir"),
6105 json!({
6106 "file.txt": "version 1",
6107 }),
6108 )
6109 .await;
6110
6111 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6112 let buffer = project
6113 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file.txt"), cx))
6114 .await
6115 .unwrap();
6116
6117 buffer.read_with(cx, |buffer, _| {
6118 assert_eq!(buffer.text(), "version 1");
6119 assert!(!buffer.is_dirty());
6120 });
6121
6122 // User makes an edit, making the buffer dirty.
6123 buffer.update(cx, |buffer, cx| {
6124 buffer.edit([(0..0, "user edit: ")], None, cx);
6125 });
6126
6127 buffer.read_with(cx, |buffer, _| {
6128 assert!(buffer.is_dirty());
6129 assert_eq!(buffer.text(), "user edit: version 1");
6130 });
6131
6132 // External tool writes new content while buffer is dirty.
6133 // file_updated() updates the File but suppresses ReloadNeeded.
6134 fs.save(
6135 path!("/dir/file.txt").as_ref(),
6136 &"version 2 from external tool".into(),
6137 Default::default(),
6138 )
6139 .await
6140 .unwrap();
6141 cx.executor().run_until_parked();
6142
6143 buffer.read_with(cx, |buffer, _| {
6144 assert!(buffer.has_conflict());
6145 assert_eq!(buffer.text(), "user edit: version 1");
6146 });
6147
6148 // User undoes their edit. Buffer becomes clean, but disk has different
6149 // content. did_edit() detects the dirty->clean transition and checks if
6150 // disk changed while dirty. Since mtime differs from saved_mtime, it
6151 // emits ReloadNeeded.
6152 buffer.update(cx, |buffer, cx| {
6153 buffer.undo(cx);
6154 });
6155 cx.executor().run_until_parked();
6156
6157 buffer.read_with(cx, |buffer, _| {
6158 assert_eq!(
6159 buffer.text(),
6160 "version 2 from external tool",
6161 "buffer should reload from disk after undo makes it clean"
6162 );
6163 assert!(!buffer.is_dirty());
6164 });
6165}
6166
6167#[gpui::test]
6168async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
6169 init_test(cx);
6170
6171 let (initial_contents, initial_offsets) =
6172 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
6173 let fs = FakeFs::new(cx.executor());
6174 fs.insert_tree(
6175 path!("/dir"),
6176 json!({
6177 "the-file": initial_contents,
6178 }),
6179 )
6180 .await;
6181 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6182 let buffer = project
6183 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
6184 .await
6185 .unwrap();
6186
6187 let anchors = initial_offsets
6188 .iter()
6189 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
6190 .collect::<Vec<_>>();
6191
6192 // Change the file on disk, adding two new lines of text, and removing
6193 // one line.
6194 buffer.update(cx, |buffer, _| {
6195 assert!(!buffer.is_dirty());
6196 assert!(!buffer.has_conflict());
6197 });
6198
6199 let (new_contents, new_offsets) =
6200 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
6201 fs.save(
6202 path!("/dir/the-file").as_ref(),
6203 &new_contents.as_str().into(),
6204 LineEnding::Unix,
6205 )
6206 .await
6207 .unwrap();
6208
6209 // Because the buffer was not modified, it is reloaded from disk. Its
6210 // contents are edited according to the diff between the old and new
6211 // file contents.
6212 cx.executor().run_until_parked();
6213 buffer.update(cx, |buffer, _| {
6214 assert_eq!(buffer.text(), new_contents);
6215 assert!(!buffer.is_dirty());
6216 assert!(!buffer.has_conflict());
6217
6218 let anchor_offsets = anchors
6219 .iter()
6220 .map(|anchor| anchor.to_offset(&*buffer))
6221 .collect::<Vec<_>>();
6222 assert_eq!(anchor_offsets, new_offsets);
6223 });
6224
6225 // Modify the buffer
6226 buffer.update(cx, |buffer, cx| {
6227 buffer.edit([(0..0, " ")], None, cx);
6228 assert!(buffer.is_dirty());
6229 assert!(!buffer.has_conflict());
6230 });
6231
6232 // Change the file on disk again, adding blank lines to the beginning.
6233 fs.save(
6234 path!("/dir/the-file").as_ref(),
6235 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
6236 LineEnding::Unix,
6237 )
6238 .await
6239 .unwrap();
6240
6241 // Because the buffer is modified, it doesn't reload from disk, but is
6242 // marked as having a conflict.
6243 cx.executor().run_until_parked();
6244 buffer.update(cx, |buffer, _| {
6245 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
6246 assert!(buffer.has_conflict());
6247 });
6248}
6249
6250#[gpui::test]
6251async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
6252 init_test(cx);
6253
6254 let fs = FakeFs::new(cx.executor());
6255 fs.insert_tree(
6256 path!("/dir"),
6257 json!({
6258 "file1": "a\nb\nc\n",
6259 "file2": "one\r\ntwo\r\nthree\r\n",
6260 }),
6261 )
6262 .await;
6263
6264 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6265 let buffer1 = project
6266 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
6267 .await
6268 .unwrap();
6269 let buffer2 = project
6270 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
6271 .await
6272 .unwrap();
6273
6274 buffer1.update(cx, |buffer, _| {
6275 assert_eq!(buffer.text(), "a\nb\nc\n");
6276 assert_eq!(buffer.line_ending(), LineEnding::Unix);
6277 });
6278 buffer2.update(cx, |buffer, _| {
6279 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
6280 assert_eq!(buffer.line_ending(), LineEnding::Windows);
6281 });
6282
6283 // Change a file's line endings on disk from unix to windows. The buffer's
6284 // state updates correctly.
6285 fs.save(
6286 path!("/dir/file1").as_ref(),
6287 &"aaa\nb\nc\n".into(),
6288 LineEnding::Windows,
6289 )
6290 .await
6291 .unwrap();
6292 cx.executor().run_until_parked();
6293 buffer1.update(cx, |buffer, _| {
6294 assert_eq!(buffer.text(), "aaa\nb\nc\n");
6295 assert_eq!(buffer.line_ending(), LineEnding::Windows);
6296 });
6297
6298 // Save a file with windows line endings. The file is written correctly.
6299 buffer2.update(cx, |buffer, cx| {
6300 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
6301 });
6302 project
6303 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
6304 .await
6305 .unwrap();
6306 assert_eq!(
6307 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
6308 "one\r\ntwo\r\nthree\r\nfour\r\n",
6309 );
6310}
6311
6312#[gpui::test]
6313async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
6314 init_test(cx);
6315
6316 let fs = FakeFs::new(cx.executor());
6317 fs.insert_tree(
6318 path!("/dir"),
6319 json!({
6320 "a.rs": "
6321 fn foo(mut v: Vec<usize>) {
6322 for x in &v {
6323 v.push(1);
6324 }
6325 }
6326 "
6327 .unindent(),
6328 }),
6329 )
6330 .await;
6331
6332 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6333 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
6334 let buffer = project
6335 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
6336 .await
6337 .unwrap();
6338
6339 let buffer_uri = Uri::from_file_path(path!("/dir/a.rs")).unwrap();
6340 let message = lsp::PublishDiagnosticsParams {
6341 uri: buffer_uri.clone(),
6342 diagnostics: vec![
6343 lsp::Diagnostic {
6344 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6345 severity: Some(DiagnosticSeverity::WARNING),
6346 message: "error 1".to_string(),
6347 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6348 location: lsp::Location {
6349 uri: buffer_uri.clone(),
6350 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6351 },
6352 message: "error 1 hint 1".to_string(),
6353 }]),
6354 ..Default::default()
6355 },
6356 lsp::Diagnostic {
6357 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6358 severity: Some(DiagnosticSeverity::HINT),
6359 message: "error 1 hint 1".to_string(),
6360 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6361 location: lsp::Location {
6362 uri: buffer_uri.clone(),
6363 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6364 },
6365 message: "original diagnostic".to_string(),
6366 }]),
6367 ..Default::default()
6368 },
6369 lsp::Diagnostic {
6370 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
6371 severity: Some(DiagnosticSeverity::ERROR),
6372 message: "error 2".to_string(),
6373 related_information: Some(vec![
6374 lsp::DiagnosticRelatedInformation {
6375 location: lsp::Location {
6376 uri: buffer_uri.clone(),
6377 range: lsp::Range::new(
6378 lsp::Position::new(1, 13),
6379 lsp::Position::new(1, 15),
6380 ),
6381 },
6382 message: "error 2 hint 1".to_string(),
6383 },
6384 lsp::DiagnosticRelatedInformation {
6385 location: lsp::Location {
6386 uri: buffer_uri.clone(),
6387 range: lsp::Range::new(
6388 lsp::Position::new(1, 13),
6389 lsp::Position::new(1, 15),
6390 ),
6391 },
6392 message: "error 2 hint 2".to_string(),
6393 },
6394 ]),
6395 ..Default::default()
6396 },
6397 lsp::Diagnostic {
6398 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6399 severity: Some(DiagnosticSeverity::HINT),
6400 message: "error 2 hint 1".to_string(),
6401 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6402 location: lsp::Location {
6403 uri: buffer_uri.clone(),
6404 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
6405 },
6406 message: "original diagnostic".to_string(),
6407 }]),
6408 ..Default::default()
6409 },
6410 lsp::Diagnostic {
6411 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6412 severity: Some(DiagnosticSeverity::HINT),
6413 message: "error 2 hint 2".to_string(),
6414 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6415 location: lsp::Location {
6416 uri: buffer_uri,
6417 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
6418 },
6419 message: "original diagnostic".to_string(),
6420 }]),
6421 ..Default::default()
6422 },
6423 ],
6424 version: None,
6425 };
6426
6427 lsp_store
6428 .update(cx, |lsp_store, cx| {
6429 lsp_store.update_diagnostics(
6430 LanguageServerId(0),
6431 message,
6432 None,
6433 DiagnosticSourceKind::Pushed,
6434 &[],
6435 cx,
6436 )
6437 })
6438 .unwrap();
6439 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
6440
6441 assert_eq!(
6442 buffer
6443 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6444 .collect::<Vec<_>>(),
6445 &[
6446 DiagnosticEntry {
6447 range: Point::new(1, 8)..Point::new(1, 9),
6448 diagnostic: Diagnostic {
6449 severity: DiagnosticSeverity::WARNING,
6450 message: "error 1".to_string(),
6451 group_id: 1,
6452 is_primary: true,
6453 source_kind: DiagnosticSourceKind::Pushed,
6454 ..Diagnostic::default()
6455 }
6456 },
6457 DiagnosticEntry {
6458 range: Point::new(1, 8)..Point::new(1, 9),
6459 diagnostic: Diagnostic {
6460 severity: DiagnosticSeverity::HINT,
6461 message: "error 1 hint 1".to_string(),
6462 group_id: 1,
6463 is_primary: false,
6464 source_kind: DiagnosticSourceKind::Pushed,
6465 ..Diagnostic::default()
6466 }
6467 },
6468 DiagnosticEntry {
6469 range: Point::new(1, 13)..Point::new(1, 15),
6470 diagnostic: Diagnostic {
6471 severity: DiagnosticSeverity::HINT,
6472 message: "error 2 hint 1".to_string(),
6473 group_id: 0,
6474 is_primary: false,
6475 source_kind: DiagnosticSourceKind::Pushed,
6476 ..Diagnostic::default()
6477 }
6478 },
6479 DiagnosticEntry {
6480 range: Point::new(1, 13)..Point::new(1, 15),
6481 diagnostic: Diagnostic {
6482 severity: DiagnosticSeverity::HINT,
6483 message: "error 2 hint 2".to_string(),
6484 group_id: 0,
6485 is_primary: false,
6486 source_kind: DiagnosticSourceKind::Pushed,
6487 ..Diagnostic::default()
6488 }
6489 },
6490 DiagnosticEntry {
6491 range: Point::new(2, 8)..Point::new(2, 17),
6492 diagnostic: Diagnostic {
6493 severity: DiagnosticSeverity::ERROR,
6494 message: "error 2".to_string(),
6495 group_id: 0,
6496 is_primary: true,
6497 source_kind: DiagnosticSourceKind::Pushed,
6498 ..Diagnostic::default()
6499 }
6500 }
6501 ]
6502 );
6503
6504 assert_eq!(
6505 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
6506 &[
6507 DiagnosticEntry {
6508 range: Point::new(1, 13)..Point::new(1, 15),
6509 diagnostic: Diagnostic {
6510 severity: DiagnosticSeverity::HINT,
6511 message: "error 2 hint 1".to_string(),
6512 group_id: 0,
6513 is_primary: false,
6514 source_kind: DiagnosticSourceKind::Pushed,
6515 ..Diagnostic::default()
6516 }
6517 },
6518 DiagnosticEntry {
6519 range: Point::new(1, 13)..Point::new(1, 15),
6520 diagnostic: Diagnostic {
6521 severity: DiagnosticSeverity::HINT,
6522 message: "error 2 hint 2".to_string(),
6523 group_id: 0,
6524 is_primary: false,
6525 source_kind: DiagnosticSourceKind::Pushed,
6526 ..Diagnostic::default()
6527 }
6528 },
6529 DiagnosticEntry {
6530 range: Point::new(2, 8)..Point::new(2, 17),
6531 diagnostic: Diagnostic {
6532 severity: DiagnosticSeverity::ERROR,
6533 message: "error 2".to_string(),
6534 group_id: 0,
6535 is_primary: true,
6536 source_kind: DiagnosticSourceKind::Pushed,
6537 ..Diagnostic::default()
6538 }
6539 }
6540 ]
6541 );
6542
6543 assert_eq!(
6544 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
6545 &[
6546 DiagnosticEntry {
6547 range: Point::new(1, 8)..Point::new(1, 9),
6548 diagnostic: Diagnostic {
6549 severity: DiagnosticSeverity::WARNING,
6550 message: "error 1".to_string(),
6551 group_id: 1,
6552 is_primary: true,
6553 source_kind: DiagnosticSourceKind::Pushed,
6554 ..Diagnostic::default()
6555 }
6556 },
6557 DiagnosticEntry {
6558 range: Point::new(1, 8)..Point::new(1, 9),
6559 diagnostic: Diagnostic {
6560 severity: DiagnosticSeverity::HINT,
6561 message: "error 1 hint 1".to_string(),
6562 group_id: 1,
6563 is_primary: false,
6564 source_kind: DiagnosticSourceKind::Pushed,
6565 ..Diagnostic::default()
6566 }
6567 },
6568 ]
6569 );
6570}
6571
6572#[gpui::test]
6573async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
6574 init_test(cx);
6575
6576 let fs = FakeFs::new(cx.executor());
6577 fs.insert_tree(
6578 path!("/dir"),
6579 json!({
6580 "one.rs": "const ONE: usize = 1;",
6581 "two": {
6582 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
6583 }
6584
6585 }),
6586 )
6587 .await;
6588 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6589
6590 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6591 language_registry.add(rust_lang());
6592 let watched_paths = lsp::FileOperationRegistrationOptions {
6593 filters: vec![
6594 FileOperationFilter {
6595 scheme: Some("file".to_owned()),
6596 pattern: lsp::FileOperationPattern {
6597 glob: "**/*.rs".to_owned(),
6598 matches: Some(lsp::FileOperationPatternKind::File),
6599 options: None,
6600 },
6601 },
6602 FileOperationFilter {
6603 scheme: Some("file".to_owned()),
6604 pattern: lsp::FileOperationPattern {
6605 glob: "**/**".to_owned(),
6606 matches: Some(lsp::FileOperationPatternKind::Folder),
6607 options: None,
6608 },
6609 },
6610 ],
6611 };
6612 let mut fake_servers = language_registry.register_fake_lsp(
6613 "Rust",
6614 FakeLspAdapter {
6615 capabilities: lsp::ServerCapabilities {
6616 workspace: Some(lsp::WorkspaceServerCapabilities {
6617 workspace_folders: None,
6618 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
6619 did_rename: Some(watched_paths.clone()),
6620 will_rename: Some(watched_paths),
6621 ..Default::default()
6622 }),
6623 }),
6624 ..Default::default()
6625 },
6626 ..Default::default()
6627 },
6628 );
6629
6630 let _ = project
6631 .update(cx, |project, cx| {
6632 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
6633 })
6634 .await
6635 .unwrap();
6636
6637 let fake_server = fake_servers.next().await.unwrap();
6638 cx.executor().run_until_parked();
6639 let response = project.update(cx, |project, cx| {
6640 let worktree = project.worktrees(cx).next().unwrap();
6641 let entry = worktree
6642 .read(cx)
6643 .entry_for_path(rel_path("one.rs"))
6644 .unwrap();
6645 project.rename_entry(
6646 entry.id,
6647 (worktree.read(cx).id(), rel_path("three.rs")).into(),
6648 cx,
6649 )
6650 });
6651 let expected_edit = lsp::WorkspaceEdit {
6652 changes: None,
6653 document_changes: Some(DocumentChanges::Edits({
6654 vec![TextDocumentEdit {
6655 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
6656 range: lsp::Range {
6657 start: lsp::Position {
6658 line: 0,
6659 character: 1,
6660 },
6661 end: lsp::Position {
6662 line: 0,
6663 character: 3,
6664 },
6665 },
6666 new_text: "This is not a drill".to_owned(),
6667 })],
6668 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
6669 uri: Uri::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
6670 version: Some(1337),
6671 },
6672 }]
6673 })),
6674 change_annotations: None,
6675 };
6676 let resolved_workspace_edit = Arc::new(OnceLock::new());
6677 fake_server
6678 .set_request_handler::<WillRenameFiles, _, _>({
6679 let resolved_workspace_edit = resolved_workspace_edit.clone();
6680 let expected_edit = expected_edit.clone();
6681 move |params, _| {
6682 let resolved_workspace_edit = resolved_workspace_edit.clone();
6683 let expected_edit = expected_edit.clone();
6684 async move {
6685 assert_eq!(params.files.len(), 1);
6686 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
6687 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
6688 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
6689 Ok(Some(expected_edit))
6690 }
6691 }
6692 })
6693 .next()
6694 .await
6695 .unwrap();
6696 let _ = response.await.unwrap();
6697 fake_server
6698 .handle_notification::<DidRenameFiles, _>(|params, _| {
6699 assert_eq!(params.files.len(), 1);
6700 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
6701 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
6702 })
6703 .next()
6704 .await
6705 .unwrap();
6706 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
6707}
6708
6709#[gpui::test]
6710async fn test_rename(cx: &mut gpui::TestAppContext) {
6711 // hi
6712 init_test(cx);
6713
6714 let fs = FakeFs::new(cx.executor());
6715 fs.insert_tree(
6716 path!("/dir"),
6717 json!({
6718 "one.rs": "const ONE: usize = 1;",
6719 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
6720 }),
6721 )
6722 .await;
6723
6724 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6725
6726 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6727 language_registry.add(rust_lang());
6728 let mut fake_servers = language_registry.register_fake_lsp(
6729 "Rust",
6730 FakeLspAdapter {
6731 capabilities: lsp::ServerCapabilities {
6732 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
6733 prepare_provider: Some(true),
6734 work_done_progress_options: Default::default(),
6735 })),
6736 ..Default::default()
6737 },
6738 ..Default::default()
6739 },
6740 );
6741
6742 let (buffer, _handle) = project
6743 .update(cx, |project, cx| {
6744 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
6745 })
6746 .await
6747 .unwrap();
6748
6749 let fake_server = fake_servers.next().await.unwrap();
6750 cx.executor().run_until_parked();
6751
6752 let response = project.update(cx, |project, cx| {
6753 project.prepare_rename(buffer.clone(), 7, cx)
6754 });
6755 fake_server
6756 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
6757 assert_eq!(
6758 params.text_document.uri.as_str(),
6759 uri!("file:///dir/one.rs")
6760 );
6761 assert_eq!(params.position, lsp::Position::new(0, 7));
6762 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
6763 lsp::Position::new(0, 6),
6764 lsp::Position::new(0, 9),
6765 ))))
6766 })
6767 .next()
6768 .await
6769 .unwrap();
6770 let response = response.await.unwrap();
6771 let PrepareRenameResponse::Success(range) = response else {
6772 panic!("{:?}", response);
6773 };
6774 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
6775 assert_eq!(range, 6..9);
6776
6777 let response = project.update(cx, |project, cx| {
6778 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
6779 });
6780 fake_server
6781 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
6782 assert_eq!(
6783 params.text_document_position.text_document.uri.as_str(),
6784 uri!("file:///dir/one.rs")
6785 );
6786 assert_eq!(
6787 params.text_document_position.position,
6788 lsp::Position::new(0, 7)
6789 );
6790 assert_eq!(params.new_name, "THREE");
6791 Ok(Some(lsp::WorkspaceEdit {
6792 changes: Some(
6793 [
6794 (
6795 lsp::Uri::from_file_path(path!("/dir/one.rs")).unwrap(),
6796 vec![lsp::TextEdit::new(
6797 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
6798 "THREE".to_string(),
6799 )],
6800 ),
6801 (
6802 lsp::Uri::from_file_path(path!("/dir/two.rs")).unwrap(),
6803 vec![
6804 lsp::TextEdit::new(
6805 lsp::Range::new(
6806 lsp::Position::new(0, 24),
6807 lsp::Position::new(0, 27),
6808 ),
6809 "THREE".to_string(),
6810 ),
6811 lsp::TextEdit::new(
6812 lsp::Range::new(
6813 lsp::Position::new(0, 35),
6814 lsp::Position::new(0, 38),
6815 ),
6816 "THREE".to_string(),
6817 ),
6818 ],
6819 ),
6820 ]
6821 .into_iter()
6822 .collect(),
6823 ),
6824 ..Default::default()
6825 }))
6826 })
6827 .next()
6828 .await
6829 .unwrap();
6830 let mut transaction = response.await.unwrap().0;
6831 assert_eq!(transaction.len(), 2);
6832 assert_eq!(
6833 transaction
6834 .remove_entry(&buffer)
6835 .unwrap()
6836 .0
6837 .update(cx, |buffer, _| buffer.text()),
6838 "const THREE: usize = 1;"
6839 );
6840 assert_eq!(
6841 transaction
6842 .into_keys()
6843 .next()
6844 .unwrap()
6845 .update(cx, |buffer, _| buffer.text()),
6846 "const TWO: usize = one::THREE + one::THREE;"
6847 );
6848}
6849
6850#[gpui::test]
6851async fn test_search(cx: &mut gpui::TestAppContext) {
6852 init_test(cx);
6853
6854 let fs = FakeFs::new(cx.executor());
6855 fs.insert_tree(
6856 path!("/dir"),
6857 json!({
6858 "one.rs": "const ONE: usize = 1;",
6859 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
6860 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
6861 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
6862 }),
6863 )
6864 .await;
6865 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6866 assert_eq!(
6867 search(
6868 &project,
6869 SearchQuery::text(
6870 "TWO",
6871 false,
6872 true,
6873 false,
6874 Default::default(),
6875 Default::default(),
6876 false,
6877 None
6878 )
6879 .unwrap(),
6880 cx
6881 )
6882 .await
6883 .unwrap(),
6884 HashMap::from_iter([
6885 (path!("dir/two.rs").to_string(), vec![6..9]),
6886 (path!("dir/three.rs").to_string(), vec![37..40])
6887 ])
6888 );
6889
6890 let buffer_4 = project
6891 .update(cx, |project, cx| {
6892 project.open_local_buffer(path!("/dir/four.rs"), cx)
6893 })
6894 .await
6895 .unwrap();
6896 buffer_4.update(cx, |buffer, cx| {
6897 let text = "two::TWO";
6898 buffer.edit([(20..28, text), (31..43, text)], None, cx);
6899 });
6900
6901 assert_eq!(
6902 search(
6903 &project,
6904 SearchQuery::text(
6905 "TWO",
6906 false,
6907 true,
6908 false,
6909 Default::default(),
6910 Default::default(),
6911 false,
6912 None,
6913 )
6914 .unwrap(),
6915 cx
6916 )
6917 .await
6918 .unwrap(),
6919 HashMap::from_iter([
6920 (path!("dir/two.rs").to_string(), vec![6..9]),
6921 (path!("dir/three.rs").to_string(), vec![37..40]),
6922 (path!("dir/four.rs").to_string(), vec![25..28, 36..39])
6923 ])
6924 );
6925}
6926
6927#[gpui::test]
6928async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
6929 init_test(cx);
6930
6931 let search_query = "file";
6932
6933 let fs = FakeFs::new(cx.executor());
6934 fs.insert_tree(
6935 path!("/dir"),
6936 json!({
6937 "one.rs": r#"// Rust file one"#,
6938 "one.ts": r#"// TypeScript file one"#,
6939 "two.rs": r#"// Rust file two"#,
6940 "two.ts": r#"// TypeScript file two"#,
6941 }),
6942 )
6943 .await;
6944 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6945
6946 assert!(
6947 search(
6948 &project,
6949 SearchQuery::text(
6950 search_query,
6951 false,
6952 true,
6953 false,
6954 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6955 Default::default(),
6956 false,
6957 None
6958 )
6959 .unwrap(),
6960 cx
6961 )
6962 .await
6963 .unwrap()
6964 .is_empty(),
6965 "If no inclusions match, no files should be returned"
6966 );
6967
6968 assert_eq!(
6969 search(
6970 &project,
6971 SearchQuery::text(
6972 search_query,
6973 false,
6974 true,
6975 false,
6976 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
6977 Default::default(),
6978 false,
6979 None
6980 )
6981 .unwrap(),
6982 cx
6983 )
6984 .await
6985 .unwrap(),
6986 HashMap::from_iter([
6987 (path!("dir/one.rs").to_string(), vec![8..12]),
6988 (path!("dir/two.rs").to_string(), vec![8..12]),
6989 ]),
6990 "Rust only search should give only Rust files"
6991 );
6992
6993 assert_eq!(
6994 search(
6995 &project,
6996 SearchQuery::text(
6997 search_query,
6998 false,
6999 true,
7000 false,
7001 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
7002 .unwrap(),
7003 Default::default(),
7004 false,
7005 None,
7006 )
7007 .unwrap(),
7008 cx
7009 )
7010 .await
7011 .unwrap(),
7012 HashMap::from_iter([
7013 (path!("dir/one.ts").to_string(), vec![14..18]),
7014 (path!("dir/two.ts").to_string(), vec![14..18]),
7015 ]),
7016 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
7017 );
7018
7019 assert_eq!(
7020 search(
7021 &project,
7022 SearchQuery::text(
7023 search_query,
7024 false,
7025 true,
7026 false,
7027 PathMatcher::new(
7028 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
7029 PathStyle::local()
7030 )
7031 .unwrap(),
7032 Default::default(),
7033 false,
7034 None,
7035 )
7036 .unwrap(),
7037 cx
7038 )
7039 .await
7040 .unwrap(),
7041 HashMap::from_iter([
7042 (path!("dir/two.ts").to_string(), vec![14..18]),
7043 (path!("dir/one.rs").to_string(), vec![8..12]),
7044 (path!("dir/one.ts").to_string(), vec![14..18]),
7045 (path!("dir/two.rs").to_string(), vec![8..12]),
7046 ]),
7047 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
7048 );
7049}
7050
7051#[gpui::test]
7052async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
7053 init_test(cx);
7054
7055 let search_query = "file";
7056
7057 let fs = FakeFs::new(cx.executor());
7058 fs.insert_tree(
7059 path!("/dir"),
7060 json!({
7061 "one.rs": r#"// Rust file one"#,
7062 "one.ts": r#"// TypeScript file one"#,
7063 "two.rs": r#"// Rust file two"#,
7064 "two.ts": r#"// TypeScript file two"#,
7065 }),
7066 )
7067 .await;
7068 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7069
7070 assert_eq!(
7071 search(
7072 &project,
7073 SearchQuery::text(
7074 search_query,
7075 false,
7076 true,
7077 false,
7078 Default::default(),
7079 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
7080 false,
7081 None,
7082 )
7083 .unwrap(),
7084 cx
7085 )
7086 .await
7087 .unwrap(),
7088 HashMap::from_iter([
7089 (path!("dir/one.rs").to_string(), vec![8..12]),
7090 (path!("dir/one.ts").to_string(), vec![14..18]),
7091 (path!("dir/two.rs").to_string(), vec![8..12]),
7092 (path!("dir/two.ts").to_string(), vec![14..18]),
7093 ]),
7094 "If no exclusions match, all files should be returned"
7095 );
7096
7097 assert_eq!(
7098 search(
7099 &project,
7100 SearchQuery::text(
7101 search_query,
7102 false,
7103 true,
7104 false,
7105 Default::default(),
7106 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
7107 false,
7108 None,
7109 )
7110 .unwrap(),
7111 cx
7112 )
7113 .await
7114 .unwrap(),
7115 HashMap::from_iter([
7116 (path!("dir/one.ts").to_string(), vec![14..18]),
7117 (path!("dir/two.ts").to_string(), vec![14..18]),
7118 ]),
7119 "Rust exclusion search should give only TypeScript files"
7120 );
7121
7122 assert_eq!(
7123 search(
7124 &project,
7125 SearchQuery::text(
7126 search_query,
7127 false,
7128 true,
7129 false,
7130 Default::default(),
7131 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
7132 .unwrap(),
7133 false,
7134 None,
7135 )
7136 .unwrap(),
7137 cx
7138 )
7139 .await
7140 .unwrap(),
7141 HashMap::from_iter([
7142 (path!("dir/one.rs").to_string(), vec![8..12]),
7143 (path!("dir/two.rs").to_string(), vec![8..12]),
7144 ]),
7145 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
7146 );
7147
7148 assert!(
7149 search(
7150 &project,
7151 SearchQuery::text(
7152 search_query,
7153 false,
7154 true,
7155 false,
7156 Default::default(),
7157 PathMatcher::new(
7158 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
7159 PathStyle::local(),
7160 )
7161 .unwrap(),
7162 false,
7163 None,
7164 )
7165 .unwrap(),
7166 cx
7167 )
7168 .await
7169 .unwrap()
7170 .is_empty(),
7171 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
7172 );
7173}
7174
7175#[gpui::test]
7176async fn test_search_with_buffer_exclusions(cx: &mut gpui::TestAppContext) {
7177 init_test(cx);
7178
7179 let search_query = "file";
7180
7181 let fs = FakeFs::new(cx.executor());
7182 fs.insert_tree(
7183 path!("/dir"),
7184 json!({
7185 "one.rs": r#"// Rust file one"#,
7186 "one.ts": r#"// TypeScript file one"#,
7187 "two.rs": r#"// Rust file two"#,
7188 "two.ts": r#"// TypeScript file two"#,
7189 }),
7190 )
7191 .await;
7192
7193 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7194 let path_style = PathStyle::local();
7195 let _buffer = project.update(cx, |project, cx| {
7196 project.create_local_buffer("file", None, false, cx)
7197 });
7198
7199 assert_eq!(
7200 search(
7201 &project,
7202 SearchQuery::text(
7203 search_query,
7204 false,
7205 true,
7206 false,
7207 Default::default(),
7208 PathMatcher::new(&["*.odd".to_owned()], path_style).unwrap(),
7209 false,
7210 None,
7211 )
7212 .unwrap(),
7213 cx
7214 )
7215 .await
7216 .unwrap(),
7217 HashMap::from_iter([
7218 (path!("dir/one.rs").to_string(), vec![8..12]),
7219 (path!("dir/one.ts").to_string(), vec![14..18]),
7220 (path!("dir/two.rs").to_string(), vec![8..12]),
7221 (path!("dir/two.ts").to_string(), vec![14..18]),
7222 ]),
7223 "If no exclusions match, all files should be returned"
7224 );
7225
7226 assert_eq!(
7227 search(
7228 &project,
7229 SearchQuery::text(
7230 search_query,
7231 false,
7232 true,
7233 false,
7234 Default::default(),
7235 PathMatcher::new(&["*.rs".to_owned()], path_style).unwrap(),
7236 false,
7237 None,
7238 )
7239 .unwrap(),
7240 cx
7241 )
7242 .await
7243 .unwrap(),
7244 HashMap::from_iter([
7245 (path!("dir/one.ts").to_string(), vec![14..18]),
7246 (path!("dir/two.ts").to_string(), vec![14..18]),
7247 ]),
7248 "Rust exclusion search should give only TypeScript files"
7249 );
7250
7251 assert_eq!(
7252 search(
7253 &project,
7254 SearchQuery::text(
7255 search_query,
7256 false,
7257 true,
7258 false,
7259 Default::default(),
7260 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], path_style).unwrap(),
7261 false,
7262 None,
7263 )
7264 .unwrap(),
7265 cx
7266 )
7267 .await
7268 .unwrap(),
7269 HashMap::from_iter([
7270 (path!("dir/one.rs").to_string(), vec![8..12]),
7271 (path!("dir/two.rs").to_string(), vec![8..12]),
7272 ]),
7273 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
7274 );
7275
7276 assert!(
7277 search(
7278 &project,
7279 SearchQuery::text(
7280 search_query,
7281 false,
7282 true,
7283 false,
7284 Default::default(),
7285 PathMatcher::new(
7286 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
7287 PathStyle::local(),
7288 )
7289 .unwrap(),
7290 false,
7291 None,
7292 )
7293 .unwrap(),
7294 cx
7295 )
7296 .await
7297 .unwrap()
7298 .is_empty(),
7299 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
7300 );
7301}
7302
7303#[gpui::test]
7304async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
7305 init_test(cx);
7306
7307 let search_query = "file";
7308
7309 let fs = FakeFs::new(cx.executor());
7310 fs.insert_tree(
7311 path!("/dir"),
7312 json!({
7313 "one.rs": r#"// Rust file one"#,
7314 "one.ts": r#"// TypeScript file one"#,
7315 "two.rs": r#"// Rust file two"#,
7316 "two.ts": r#"// TypeScript file two"#,
7317 }),
7318 )
7319 .await;
7320 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7321 assert!(
7322 search(
7323 &project,
7324 SearchQuery::text(
7325 search_query,
7326 false,
7327 true,
7328 false,
7329 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
7330 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
7331 false,
7332 None,
7333 )
7334 .unwrap(),
7335 cx
7336 )
7337 .await
7338 .unwrap()
7339 .is_empty(),
7340 "If both no exclusions and inclusions match, exclusions should win and return nothing"
7341 );
7342
7343 assert!(
7344 search(
7345 &project,
7346 SearchQuery::text(
7347 search_query,
7348 false,
7349 true,
7350 false,
7351 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
7352 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
7353 false,
7354 None,
7355 )
7356 .unwrap(),
7357 cx
7358 )
7359 .await
7360 .unwrap()
7361 .is_empty(),
7362 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
7363 );
7364
7365 assert!(
7366 search(
7367 &project,
7368 SearchQuery::text(
7369 search_query,
7370 false,
7371 true,
7372 false,
7373 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
7374 .unwrap(),
7375 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
7376 .unwrap(),
7377 false,
7378 None,
7379 )
7380 .unwrap(),
7381 cx
7382 )
7383 .await
7384 .unwrap()
7385 .is_empty(),
7386 "Non-matching inclusions and exclusions should not change that."
7387 );
7388
7389 assert_eq!(
7390 search(
7391 &project,
7392 SearchQuery::text(
7393 search_query,
7394 false,
7395 true,
7396 false,
7397 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
7398 .unwrap(),
7399 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()], PathStyle::local())
7400 .unwrap(),
7401 false,
7402 None,
7403 )
7404 .unwrap(),
7405 cx
7406 )
7407 .await
7408 .unwrap(),
7409 HashMap::from_iter([
7410 (path!("dir/one.ts").to_string(), vec![14..18]),
7411 (path!("dir/two.ts").to_string(), vec![14..18]),
7412 ]),
7413 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
7414 );
7415}
7416
7417#[gpui::test]
7418async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
7419 init_test(cx);
7420
7421 let fs = FakeFs::new(cx.executor());
7422 fs.insert_tree(
7423 path!("/worktree-a"),
7424 json!({
7425 "haystack.rs": r#"// NEEDLE"#,
7426 "haystack.ts": r#"// NEEDLE"#,
7427 }),
7428 )
7429 .await;
7430 fs.insert_tree(
7431 path!("/worktree-b"),
7432 json!({
7433 "haystack.rs": r#"// NEEDLE"#,
7434 "haystack.ts": r#"// NEEDLE"#,
7435 }),
7436 )
7437 .await;
7438
7439 let path_style = PathStyle::local();
7440 let project = Project::test(
7441 fs.clone(),
7442 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
7443 cx,
7444 )
7445 .await;
7446
7447 assert_eq!(
7448 search(
7449 &project,
7450 SearchQuery::text(
7451 "NEEDLE",
7452 false,
7453 true,
7454 false,
7455 PathMatcher::new(&["worktree-a/*.rs".to_owned()], path_style).unwrap(),
7456 Default::default(),
7457 true,
7458 None,
7459 )
7460 .unwrap(),
7461 cx
7462 )
7463 .await
7464 .unwrap(),
7465 HashMap::from_iter([(path!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
7466 "should only return results from included worktree"
7467 );
7468 assert_eq!(
7469 search(
7470 &project,
7471 SearchQuery::text(
7472 "NEEDLE",
7473 false,
7474 true,
7475 false,
7476 PathMatcher::new(&["worktree-b/*.rs".to_owned()], path_style).unwrap(),
7477 Default::default(),
7478 true,
7479 None,
7480 )
7481 .unwrap(),
7482 cx
7483 )
7484 .await
7485 .unwrap(),
7486 HashMap::from_iter([(path!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
7487 "should only return results from included worktree"
7488 );
7489
7490 assert_eq!(
7491 search(
7492 &project,
7493 SearchQuery::text(
7494 "NEEDLE",
7495 false,
7496 true,
7497 false,
7498 PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap(),
7499 Default::default(),
7500 false,
7501 None,
7502 )
7503 .unwrap(),
7504 cx
7505 )
7506 .await
7507 .unwrap(),
7508 HashMap::from_iter([
7509 (path!("worktree-a/haystack.ts").to_string(), vec![3..9]),
7510 (path!("worktree-b/haystack.ts").to_string(), vec![3..9])
7511 ]),
7512 "should return results from both worktrees"
7513 );
7514}
7515
7516#[gpui::test]
7517async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
7518 init_test(cx);
7519
7520 let fs = FakeFs::new(cx.background_executor.clone());
7521 fs.insert_tree(
7522 path!("/dir"),
7523 json!({
7524 ".git": {},
7525 ".gitignore": "**/target\n/node_modules\n",
7526 "target": {
7527 "index.txt": "index_key:index_value"
7528 },
7529 "node_modules": {
7530 "eslint": {
7531 "index.ts": "const eslint_key = 'eslint value'",
7532 "package.json": r#"{ "some_key": "some value" }"#,
7533 },
7534 "prettier": {
7535 "index.ts": "const prettier_key = 'prettier value'",
7536 "package.json": r#"{ "other_key": "other value" }"#,
7537 },
7538 },
7539 "package.json": r#"{ "main_key": "main value" }"#,
7540 }),
7541 )
7542 .await;
7543 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7544
7545 let query = "key";
7546 assert_eq!(
7547 search(
7548 &project,
7549 SearchQuery::text(
7550 query,
7551 false,
7552 false,
7553 false,
7554 Default::default(),
7555 Default::default(),
7556 false,
7557 None,
7558 )
7559 .unwrap(),
7560 cx
7561 )
7562 .await
7563 .unwrap(),
7564 HashMap::from_iter([(path!("dir/package.json").to_string(), vec![8..11])]),
7565 "Only one non-ignored file should have the query"
7566 );
7567
7568 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7569 let path_style = PathStyle::local();
7570 assert_eq!(
7571 search(
7572 &project,
7573 SearchQuery::text(
7574 query,
7575 false,
7576 false,
7577 true,
7578 Default::default(),
7579 Default::default(),
7580 false,
7581 None,
7582 )
7583 .unwrap(),
7584 cx
7585 )
7586 .await
7587 .unwrap(),
7588 HashMap::from_iter([
7589 (path!("dir/package.json").to_string(), vec![8..11]),
7590 (path!("dir/target/index.txt").to_string(), vec![6..9]),
7591 (
7592 path!("dir/node_modules/prettier/package.json").to_string(),
7593 vec![9..12]
7594 ),
7595 (
7596 path!("dir/node_modules/prettier/index.ts").to_string(),
7597 vec![15..18]
7598 ),
7599 (
7600 path!("dir/node_modules/eslint/index.ts").to_string(),
7601 vec![13..16]
7602 ),
7603 (
7604 path!("dir/node_modules/eslint/package.json").to_string(),
7605 vec![8..11]
7606 ),
7607 ]),
7608 "Unrestricted search with ignored directories should find every file with the query"
7609 );
7610
7611 let files_to_include =
7612 PathMatcher::new(&["node_modules/prettier/**".to_owned()], path_style).unwrap();
7613 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap();
7614 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7615 assert_eq!(
7616 search(
7617 &project,
7618 SearchQuery::text(
7619 query,
7620 false,
7621 false,
7622 true,
7623 files_to_include,
7624 files_to_exclude,
7625 false,
7626 None,
7627 )
7628 .unwrap(),
7629 cx
7630 )
7631 .await
7632 .unwrap(),
7633 HashMap::from_iter([(
7634 path!("dir/node_modules/prettier/package.json").to_string(),
7635 vec![9..12]
7636 )]),
7637 "With search including ignored prettier directory and excluding TS files, only one file should be found"
7638 );
7639}
7640
7641#[gpui::test]
7642async fn test_search_with_unicode(cx: &mut gpui::TestAppContext) {
7643 init_test(cx);
7644
7645 let fs = FakeFs::new(cx.executor());
7646 fs.insert_tree(
7647 path!("/dir"),
7648 json!({
7649 "one.rs": "// ПРИВЕТ? привет!",
7650 "two.rs": "// ПРИВЕТ.",
7651 "three.rs": "// привет",
7652 }),
7653 )
7654 .await;
7655 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7656 let unicode_case_sensitive_query = SearchQuery::text(
7657 "привет",
7658 false,
7659 true,
7660 false,
7661 Default::default(),
7662 Default::default(),
7663 false,
7664 None,
7665 );
7666 assert_matches!(unicode_case_sensitive_query, Ok(SearchQuery::Text { .. }));
7667 assert_eq!(
7668 search(&project, unicode_case_sensitive_query.unwrap(), cx)
7669 .await
7670 .unwrap(),
7671 HashMap::from_iter([
7672 (path!("dir/one.rs").to_string(), vec![17..29]),
7673 (path!("dir/three.rs").to_string(), vec![3..15]),
7674 ])
7675 );
7676
7677 let unicode_case_insensitive_query = SearchQuery::text(
7678 "привет",
7679 false,
7680 false,
7681 false,
7682 Default::default(),
7683 Default::default(),
7684 false,
7685 None,
7686 );
7687 assert_matches!(
7688 unicode_case_insensitive_query,
7689 Ok(SearchQuery::Regex { .. })
7690 );
7691 assert_eq!(
7692 search(&project, unicode_case_insensitive_query.unwrap(), cx)
7693 .await
7694 .unwrap(),
7695 HashMap::from_iter([
7696 (path!("dir/one.rs").to_string(), vec![3..15, 17..29]),
7697 (path!("dir/two.rs").to_string(), vec![3..15]),
7698 (path!("dir/three.rs").to_string(), vec![3..15]),
7699 ])
7700 );
7701
7702 assert_eq!(
7703 search(
7704 &project,
7705 SearchQuery::text(
7706 "привет.",
7707 false,
7708 false,
7709 false,
7710 Default::default(),
7711 Default::default(),
7712 false,
7713 None,
7714 )
7715 .unwrap(),
7716 cx
7717 )
7718 .await
7719 .unwrap(),
7720 HashMap::from_iter([(path!("dir/two.rs").to_string(), vec![3..16]),])
7721 );
7722}
7723
7724#[gpui::test]
7725async fn test_create_entry(cx: &mut gpui::TestAppContext) {
7726 init_test(cx);
7727
7728 let fs = FakeFs::new(cx.executor());
7729 fs.insert_tree(
7730 "/one/two",
7731 json!({
7732 "three": {
7733 "a.txt": "",
7734 "four": {}
7735 },
7736 "c.rs": ""
7737 }),
7738 )
7739 .await;
7740
7741 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
7742 project
7743 .update(cx, |project, cx| {
7744 let id = project.worktrees(cx).next().unwrap().read(cx).id();
7745 project.create_entry((id, rel_path("b..")), true, cx)
7746 })
7747 .await
7748 .unwrap()
7749 .into_included()
7750 .unwrap();
7751
7752 assert_eq!(
7753 fs.paths(true),
7754 vec![
7755 PathBuf::from(path!("/")),
7756 PathBuf::from(path!("/one")),
7757 PathBuf::from(path!("/one/two")),
7758 PathBuf::from(path!("/one/two/c.rs")),
7759 PathBuf::from(path!("/one/two/three")),
7760 PathBuf::from(path!("/one/two/three/a.txt")),
7761 PathBuf::from(path!("/one/two/three/b..")),
7762 PathBuf::from(path!("/one/two/three/four")),
7763 ]
7764 );
7765}
7766
7767#[gpui::test]
7768async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
7769 init_test(cx);
7770
7771 let fs = FakeFs::new(cx.executor());
7772 fs.insert_tree(
7773 path!("/dir"),
7774 json!({
7775 "a.tsx": "a",
7776 }),
7777 )
7778 .await;
7779
7780 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7781
7782 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7783 language_registry.add(tsx_lang());
7784 let language_server_names = [
7785 "TypeScriptServer",
7786 "TailwindServer",
7787 "ESLintServer",
7788 "NoHoverCapabilitiesServer",
7789 ];
7790 let mut language_servers = [
7791 language_registry.register_fake_lsp(
7792 "tsx",
7793 FakeLspAdapter {
7794 name: language_server_names[0],
7795 capabilities: lsp::ServerCapabilities {
7796 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7797 ..lsp::ServerCapabilities::default()
7798 },
7799 ..FakeLspAdapter::default()
7800 },
7801 ),
7802 language_registry.register_fake_lsp(
7803 "tsx",
7804 FakeLspAdapter {
7805 name: language_server_names[1],
7806 capabilities: lsp::ServerCapabilities {
7807 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7808 ..lsp::ServerCapabilities::default()
7809 },
7810 ..FakeLspAdapter::default()
7811 },
7812 ),
7813 language_registry.register_fake_lsp(
7814 "tsx",
7815 FakeLspAdapter {
7816 name: language_server_names[2],
7817 capabilities: lsp::ServerCapabilities {
7818 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7819 ..lsp::ServerCapabilities::default()
7820 },
7821 ..FakeLspAdapter::default()
7822 },
7823 ),
7824 language_registry.register_fake_lsp(
7825 "tsx",
7826 FakeLspAdapter {
7827 name: language_server_names[3],
7828 capabilities: lsp::ServerCapabilities {
7829 hover_provider: None,
7830 ..lsp::ServerCapabilities::default()
7831 },
7832 ..FakeLspAdapter::default()
7833 },
7834 ),
7835 ];
7836
7837 let (buffer, _handle) = project
7838 .update(cx, |p, cx| {
7839 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
7840 })
7841 .await
7842 .unwrap();
7843 cx.executor().run_until_parked();
7844
7845 let mut servers_with_hover_requests = HashMap::default();
7846 for i in 0..language_server_names.len() {
7847 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
7848 panic!(
7849 "Failed to get language server #{i} with name {}",
7850 &language_server_names[i]
7851 )
7852 });
7853 let new_server_name = new_server.server.name();
7854 assert!(
7855 !servers_with_hover_requests.contains_key(&new_server_name),
7856 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
7857 );
7858 match new_server_name.as_ref() {
7859 "TailwindServer" | "TypeScriptServer" => {
7860 servers_with_hover_requests.insert(
7861 new_server_name.clone(),
7862 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7863 move |_, _| {
7864 let name = new_server_name.clone();
7865 async move {
7866 Ok(Some(lsp::Hover {
7867 contents: lsp::HoverContents::Scalar(
7868 lsp::MarkedString::String(format!("{name} hover")),
7869 ),
7870 range: None,
7871 }))
7872 }
7873 },
7874 ),
7875 );
7876 }
7877 "ESLintServer" => {
7878 servers_with_hover_requests.insert(
7879 new_server_name,
7880 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7881 |_, _| async move { Ok(None) },
7882 ),
7883 );
7884 }
7885 "NoHoverCapabilitiesServer" => {
7886 let _never_handled = new_server
7887 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
7888 panic!(
7889 "Should not call for hovers server with no corresponding capabilities"
7890 )
7891 });
7892 }
7893 unexpected => panic!("Unexpected server name: {unexpected}"),
7894 }
7895 }
7896
7897 let hover_task = project.update(cx, |project, cx| {
7898 project.hover(&buffer, Point::new(0, 0), cx)
7899 });
7900 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
7901 |mut hover_request| async move {
7902 hover_request
7903 .next()
7904 .await
7905 .expect("All hover requests should have been triggered")
7906 },
7907 ))
7908 .await;
7909 assert_eq!(
7910 vec!["TailwindServer hover", "TypeScriptServer hover"],
7911 hover_task
7912 .await
7913 .into_iter()
7914 .flatten()
7915 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
7916 .sorted()
7917 .collect::<Vec<_>>(),
7918 "Should receive hover responses from all related servers with hover capabilities"
7919 );
7920}
7921
7922#[gpui::test]
7923async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
7924 init_test(cx);
7925
7926 let fs = FakeFs::new(cx.executor());
7927 fs.insert_tree(
7928 path!("/dir"),
7929 json!({
7930 "a.ts": "a",
7931 }),
7932 )
7933 .await;
7934
7935 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7936
7937 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7938 language_registry.add(typescript_lang());
7939 let mut fake_language_servers = language_registry.register_fake_lsp(
7940 "TypeScript",
7941 FakeLspAdapter {
7942 capabilities: lsp::ServerCapabilities {
7943 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7944 ..lsp::ServerCapabilities::default()
7945 },
7946 ..FakeLspAdapter::default()
7947 },
7948 );
7949
7950 let (buffer, _handle) = project
7951 .update(cx, |p, cx| {
7952 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
7953 })
7954 .await
7955 .unwrap();
7956 cx.executor().run_until_parked();
7957
7958 let fake_server = fake_language_servers
7959 .next()
7960 .await
7961 .expect("failed to get the language server");
7962
7963 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7964 move |_, _| async move {
7965 Ok(Some(lsp::Hover {
7966 contents: lsp::HoverContents::Array(vec![
7967 lsp::MarkedString::String("".to_string()),
7968 lsp::MarkedString::String(" ".to_string()),
7969 lsp::MarkedString::String("\n\n\n".to_string()),
7970 ]),
7971 range: None,
7972 }))
7973 },
7974 );
7975
7976 let hover_task = project.update(cx, |project, cx| {
7977 project.hover(&buffer, Point::new(0, 0), cx)
7978 });
7979 let () = request_handled
7980 .next()
7981 .await
7982 .expect("All hover requests should have been triggered");
7983 assert_eq!(
7984 Vec::<String>::new(),
7985 hover_task
7986 .await
7987 .into_iter()
7988 .flatten()
7989 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
7990 .sorted()
7991 .collect::<Vec<_>>(),
7992 "Empty hover parts should be ignored"
7993 );
7994}
7995
7996#[gpui::test]
7997async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
7998 init_test(cx);
7999
8000 let fs = FakeFs::new(cx.executor());
8001 fs.insert_tree(
8002 path!("/dir"),
8003 json!({
8004 "a.ts": "a",
8005 }),
8006 )
8007 .await;
8008
8009 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
8010
8011 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
8012 language_registry.add(typescript_lang());
8013 let mut fake_language_servers = language_registry.register_fake_lsp(
8014 "TypeScript",
8015 FakeLspAdapter {
8016 capabilities: lsp::ServerCapabilities {
8017 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
8018 ..lsp::ServerCapabilities::default()
8019 },
8020 ..FakeLspAdapter::default()
8021 },
8022 );
8023
8024 let (buffer, _handle) = project
8025 .update(cx, |p, cx| {
8026 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
8027 })
8028 .await
8029 .unwrap();
8030 cx.executor().run_until_parked();
8031
8032 let fake_server = fake_language_servers
8033 .next()
8034 .await
8035 .expect("failed to get the language server");
8036
8037 let mut request_handled = fake_server
8038 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
8039 Ok(Some(vec![
8040 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
8041 title: "organize imports".to_string(),
8042 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
8043 ..lsp::CodeAction::default()
8044 }),
8045 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
8046 title: "fix code".to_string(),
8047 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
8048 ..lsp::CodeAction::default()
8049 }),
8050 ]))
8051 });
8052
8053 let code_actions_task = project.update(cx, |project, cx| {
8054 project.code_actions(
8055 &buffer,
8056 0..buffer.read(cx).len(),
8057 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
8058 cx,
8059 )
8060 });
8061
8062 let () = request_handled
8063 .next()
8064 .await
8065 .expect("The code action request should have been triggered");
8066
8067 let code_actions = code_actions_task.await.unwrap().unwrap();
8068 assert_eq!(code_actions.len(), 1);
8069 assert_eq!(
8070 code_actions[0].lsp_action.action_kind(),
8071 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
8072 );
8073}
8074
8075#[gpui::test]
8076async fn test_code_actions_without_requested_kinds_do_not_send_only_filter(
8077 cx: &mut gpui::TestAppContext,
8078) {
8079 init_test(cx);
8080
8081 let fs = FakeFs::new(cx.executor());
8082 fs.insert_tree(
8083 path!("/dir"),
8084 json!({
8085 "a.ts": "a",
8086 }),
8087 )
8088 .await;
8089
8090 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
8091
8092 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
8093 language_registry.add(typescript_lang());
8094 let mut fake_language_servers = language_registry.register_fake_lsp(
8095 "TypeScript",
8096 FakeLspAdapter {
8097 capabilities: lsp::ServerCapabilities {
8098 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
8099 lsp::CodeActionOptions {
8100 code_action_kinds: Some(vec![
8101 CodeActionKind::SOURCE_ORGANIZE_IMPORTS,
8102 "source.doc".into(),
8103 ]),
8104 ..lsp::CodeActionOptions::default()
8105 },
8106 )),
8107 ..lsp::ServerCapabilities::default()
8108 },
8109 ..FakeLspAdapter::default()
8110 },
8111 );
8112
8113 let (buffer, _handle) = project
8114 .update(cx, |p, cx| {
8115 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
8116 })
8117 .await
8118 .unwrap();
8119 cx.executor().run_until_parked();
8120
8121 let fake_server = fake_language_servers
8122 .next()
8123 .await
8124 .expect("failed to get the language server");
8125
8126 let mut request_handled = fake_server.set_request_handler::<
8127 lsp::request::CodeActionRequest,
8128 _,
8129 _,
8130 >(move |params, _| async move {
8131 assert_eq!(
8132 params.context.only, None,
8133 "Code action requests without explicit kind filters should not send `context.only`"
8134 );
8135 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
8136 lsp::CodeAction {
8137 title: "Add test".to_string(),
8138 kind: Some("source.addTest".into()),
8139 ..lsp::CodeAction::default()
8140 },
8141 )]))
8142 });
8143
8144 let code_actions_task = project.update(cx, |project, cx| {
8145 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
8146 });
8147
8148 let () = request_handled
8149 .next()
8150 .await
8151 .expect("The code action request should have been triggered");
8152
8153 let code_actions = code_actions_task.await.unwrap().unwrap();
8154 assert_eq!(code_actions.len(), 1);
8155 assert_eq!(
8156 code_actions[0].lsp_action.action_kind(),
8157 Some("source.addTest".into())
8158 );
8159}
8160
8161#[gpui::test]
8162async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
8163 init_test(cx);
8164
8165 let fs = FakeFs::new(cx.executor());
8166 fs.insert_tree(
8167 path!("/dir"),
8168 json!({
8169 "a.tsx": "a",
8170 }),
8171 )
8172 .await;
8173
8174 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
8175
8176 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
8177 language_registry.add(tsx_lang());
8178 let language_server_names = [
8179 "TypeScriptServer",
8180 "TailwindServer",
8181 "ESLintServer",
8182 "NoActionsCapabilitiesServer",
8183 ];
8184
8185 let mut language_server_rxs = [
8186 language_registry.register_fake_lsp(
8187 "tsx",
8188 FakeLspAdapter {
8189 name: language_server_names[0],
8190 capabilities: lsp::ServerCapabilities {
8191 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
8192 ..lsp::ServerCapabilities::default()
8193 },
8194 ..FakeLspAdapter::default()
8195 },
8196 ),
8197 language_registry.register_fake_lsp(
8198 "tsx",
8199 FakeLspAdapter {
8200 name: language_server_names[1],
8201 capabilities: lsp::ServerCapabilities {
8202 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
8203 ..lsp::ServerCapabilities::default()
8204 },
8205 ..FakeLspAdapter::default()
8206 },
8207 ),
8208 language_registry.register_fake_lsp(
8209 "tsx",
8210 FakeLspAdapter {
8211 name: language_server_names[2],
8212 capabilities: lsp::ServerCapabilities {
8213 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
8214 ..lsp::ServerCapabilities::default()
8215 },
8216 ..FakeLspAdapter::default()
8217 },
8218 ),
8219 language_registry.register_fake_lsp(
8220 "tsx",
8221 FakeLspAdapter {
8222 name: language_server_names[3],
8223 capabilities: lsp::ServerCapabilities {
8224 code_action_provider: None,
8225 ..lsp::ServerCapabilities::default()
8226 },
8227 ..FakeLspAdapter::default()
8228 },
8229 ),
8230 ];
8231
8232 let (buffer, _handle) = project
8233 .update(cx, |p, cx| {
8234 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
8235 })
8236 .await
8237 .unwrap();
8238 cx.executor().run_until_parked();
8239
8240 let mut servers_with_actions_requests = HashMap::default();
8241 for i in 0..language_server_names.len() {
8242 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
8243 panic!(
8244 "Failed to get language server #{i} with name {}",
8245 &language_server_names[i]
8246 )
8247 });
8248 let new_server_name = new_server.server.name();
8249
8250 assert!(
8251 !servers_with_actions_requests.contains_key(&new_server_name),
8252 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
8253 );
8254 match new_server_name.0.as_ref() {
8255 "TailwindServer" | "TypeScriptServer" => {
8256 servers_with_actions_requests.insert(
8257 new_server_name.clone(),
8258 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
8259 move |_, _| {
8260 let name = new_server_name.clone();
8261 async move {
8262 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
8263 lsp::CodeAction {
8264 title: format!("{name} code action"),
8265 ..lsp::CodeAction::default()
8266 },
8267 )]))
8268 }
8269 },
8270 ),
8271 );
8272 }
8273 "ESLintServer" => {
8274 servers_with_actions_requests.insert(
8275 new_server_name,
8276 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
8277 |_, _| async move { Ok(None) },
8278 ),
8279 );
8280 }
8281 "NoActionsCapabilitiesServer" => {
8282 let _never_handled = new_server
8283 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
8284 panic!(
8285 "Should not call for code actions server with no corresponding capabilities"
8286 )
8287 });
8288 }
8289 unexpected => panic!("Unexpected server name: {unexpected}"),
8290 }
8291 }
8292
8293 let code_actions_task = project.update(cx, |project, cx| {
8294 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
8295 });
8296
8297 // cx.run_until_parked();
8298 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
8299 |mut code_actions_request| async move {
8300 code_actions_request
8301 .next()
8302 .await
8303 .expect("All code actions requests should have been triggered")
8304 },
8305 ))
8306 .await;
8307 assert_eq!(
8308 vec!["TailwindServer code action", "TypeScriptServer code action"],
8309 code_actions_task
8310 .await
8311 .unwrap()
8312 .unwrap()
8313 .into_iter()
8314 .map(|code_action| code_action.lsp_action.title().to_owned())
8315 .sorted()
8316 .collect::<Vec<_>>(),
8317 "Should receive code actions responses from all related servers with hover capabilities"
8318 );
8319}
8320
8321#[gpui::test]
8322async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
8323 init_test(cx);
8324
8325 let fs = FakeFs::new(cx.executor());
8326 fs.insert_tree(
8327 "/dir",
8328 json!({
8329 "a.rs": "let a = 1;",
8330 "b.rs": "let b = 2;",
8331 "c.rs": "let c = 2;",
8332 }),
8333 )
8334 .await;
8335
8336 let project = Project::test(
8337 fs,
8338 [
8339 "/dir/a.rs".as_ref(),
8340 "/dir/b.rs".as_ref(),
8341 "/dir/c.rs".as_ref(),
8342 ],
8343 cx,
8344 )
8345 .await;
8346
8347 // check the initial state and get the worktrees
8348 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
8349 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8350 assert_eq!(worktrees.len(), 3);
8351
8352 let worktree_a = worktrees[0].read(cx);
8353 let worktree_b = worktrees[1].read(cx);
8354 let worktree_c = worktrees[2].read(cx);
8355
8356 // check they start in the right order
8357 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
8358 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
8359 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
8360
8361 (
8362 worktrees[0].clone(),
8363 worktrees[1].clone(),
8364 worktrees[2].clone(),
8365 )
8366 });
8367
8368 // move first worktree to after the second
8369 // [a, b, c] -> [b, a, c]
8370 project
8371 .update(cx, |project, cx| {
8372 let first = worktree_a.read(cx);
8373 let second = worktree_b.read(cx);
8374 project.move_worktree(first.id(), second.id(), cx)
8375 })
8376 .expect("moving first after second");
8377
8378 // check the state after moving
8379 project.update(cx, |project, cx| {
8380 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8381 assert_eq!(worktrees.len(), 3);
8382
8383 let first = worktrees[0].read(cx);
8384 let second = worktrees[1].read(cx);
8385 let third = worktrees[2].read(cx);
8386
8387 // check they are now in the right order
8388 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
8389 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
8390 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
8391 });
8392
8393 // move the second worktree to before the first
8394 // [b, a, c] -> [a, b, c]
8395 project
8396 .update(cx, |project, cx| {
8397 let second = worktree_a.read(cx);
8398 let first = worktree_b.read(cx);
8399 project.move_worktree(first.id(), second.id(), cx)
8400 })
8401 .expect("moving second before first");
8402
8403 // check the state after moving
8404 project.update(cx, |project, cx| {
8405 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8406 assert_eq!(worktrees.len(), 3);
8407
8408 let first = worktrees[0].read(cx);
8409 let second = worktrees[1].read(cx);
8410 let third = worktrees[2].read(cx);
8411
8412 // check they are now in the right order
8413 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
8414 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
8415 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
8416 });
8417
8418 // move the second worktree to after the third
8419 // [a, b, c] -> [a, c, b]
8420 project
8421 .update(cx, |project, cx| {
8422 let second = worktree_b.read(cx);
8423 let third = worktree_c.read(cx);
8424 project.move_worktree(second.id(), third.id(), cx)
8425 })
8426 .expect("moving second after third");
8427
8428 // check the state after moving
8429 project.update(cx, |project, cx| {
8430 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8431 assert_eq!(worktrees.len(), 3);
8432
8433 let first = worktrees[0].read(cx);
8434 let second = worktrees[1].read(cx);
8435 let third = worktrees[2].read(cx);
8436
8437 // check they are now in the right order
8438 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
8439 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
8440 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
8441 });
8442
8443 // move the third worktree to before the second
8444 // [a, c, b] -> [a, b, c]
8445 project
8446 .update(cx, |project, cx| {
8447 let third = worktree_c.read(cx);
8448 let second = worktree_b.read(cx);
8449 project.move_worktree(third.id(), second.id(), cx)
8450 })
8451 .expect("moving third before second");
8452
8453 // check the state after moving
8454 project.update(cx, |project, cx| {
8455 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8456 assert_eq!(worktrees.len(), 3);
8457
8458 let first = worktrees[0].read(cx);
8459 let second = worktrees[1].read(cx);
8460 let third = worktrees[2].read(cx);
8461
8462 // check they are now in the right order
8463 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
8464 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
8465 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
8466 });
8467
8468 // move the first worktree to after the third
8469 // [a, b, c] -> [b, c, a]
8470 project
8471 .update(cx, |project, cx| {
8472 let first = worktree_a.read(cx);
8473 let third = worktree_c.read(cx);
8474 project.move_worktree(first.id(), third.id(), cx)
8475 })
8476 .expect("moving first after third");
8477
8478 // check the state after moving
8479 project.update(cx, |project, cx| {
8480 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8481 assert_eq!(worktrees.len(), 3);
8482
8483 let first = worktrees[0].read(cx);
8484 let second = worktrees[1].read(cx);
8485 let third = worktrees[2].read(cx);
8486
8487 // check they are now in the right order
8488 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
8489 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
8490 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
8491 });
8492
8493 // move the third worktree to before the first
8494 // [b, c, a] -> [a, b, c]
8495 project
8496 .update(cx, |project, cx| {
8497 let third = worktree_a.read(cx);
8498 let first = worktree_b.read(cx);
8499 project.move_worktree(third.id(), first.id(), cx)
8500 })
8501 .expect("moving third before first");
8502
8503 // check the state after moving
8504 project.update(cx, |project, cx| {
8505 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8506 assert_eq!(worktrees.len(), 3);
8507
8508 let first = worktrees[0].read(cx);
8509 let second = worktrees[1].read(cx);
8510 let third = worktrees[2].read(cx);
8511
8512 // check they are now in the right order
8513 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
8514 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
8515 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
8516 });
8517}
8518
8519#[gpui::test]
8520async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
8521 init_test(cx);
8522
8523 let staged_contents = r#"
8524 fn main() {
8525 println!("hello world");
8526 }
8527 "#
8528 .unindent();
8529 let file_contents = r#"
8530 // print goodbye
8531 fn main() {
8532 println!("goodbye world");
8533 }
8534 "#
8535 .unindent();
8536
8537 let fs = FakeFs::new(cx.background_executor.clone());
8538 fs.insert_tree(
8539 "/dir",
8540 json!({
8541 ".git": {},
8542 "src": {
8543 "main.rs": file_contents,
8544 }
8545 }),
8546 )
8547 .await;
8548
8549 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
8550
8551 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8552
8553 let buffer = project
8554 .update(cx, |project, cx| {
8555 project.open_local_buffer("/dir/src/main.rs", cx)
8556 })
8557 .await
8558 .unwrap();
8559 let unstaged_diff = project
8560 .update(cx, |project, cx| {
8561 project.open_unstaged_diff(buffer.clone(), cx)
8562 })
8563 .await
8564 .unwrap();
8565
8566 cx.run_until_parked();
8567 unstaged_diff.update(cx, |unstaged_diff, cx| {
8568 let snapshot = buffer.read(cx).snapshot();
8569 assert_hunks(
8570 unstaged_diff.snapshot(cx).hunks(&snapshot),
8571 &snapshot,
8572 &unstaged_diff.base_text_string(cx).unwrap(),
8573 &[
8574 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
8575 (
8576 2..3,
8577 " println!(\"hello world\");\n",
8578 " println!(\"goodbye world\");\n",
8579 DiffHunkStatus::modified_none(),
8580 ),
8581 ],
8582 );
8583 });
8584
8585 let staged_contents = r#"
8586 // print goodbye
8587 fn main() {
8588 }
8589 "#
8590 .unindent();
8591
8592 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
8593
8594 cx.run_until_parked();
8595 unstaged_diff.update(cx, |unstaged_diff, cx| {
8596 let snapshot = buffer.read(cx).snapshot();
8597 assert_hunks(
8598 unstaged_diff
8599 .snapshot(cx)
8600 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8601 &snapshot,
8602 &unstaged_diff.base_text(cx).text(),
8603 &[(
8604 2..3,
8605 "",
8606 " println!(\"goodbye world\");\n",
8607 DiffHunkStatus::added_none(),
8608 )],
8609 );
8610 });
8611}
8612
8613#[gpui::test]
8614async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
8615 init_test(cx);
8616
8617 let committed_contents = r#"
8618 fn main() {
8619 println!("hello world");
8620 }
8621 "#
8622 .unindent();
8623 let staged_contents = r#"
8624 fn main() {
8625 println!("goodbye world");
8626 }
8627 "#
8628 .unindent();
8629 let file_contents = r#"
8630 // print goodbye
8631 fn main() {
8632 println!("goodbye world");
8633 }
8634 "#
8635 .unindent();
8636
8637 let fs = FakeFs::new(cx.background_executor.clone());
8638 fs.insert_tree(
8639 "/dir",
8640 json!({
8641 ".git": {},
8642 "src": {
8643 "modification.rs": file_contents,
8644 }
8645 }),
8646 )
8647 .await;
8648
8649 fs.set_head_for_repo(
8650 Path::new("/dir/.git"),
8651 &[
8652 ("src/modification.rs", committed_contents),
8653 ("src/deletion.rs", "// the-deleted-contents\n".into()),
8654 ],
8655 "deadbeef",
8656 );
8657 fs.set_index_for_repo(
8658 Path::new("/dir/.git"),
8659 &[
8660 ("src/modification.rs", staged_contents),
8661 ("src/deletion.rs", "// the-deleted-contents\n".into()),
8662 ],
8663 );
8664
8665 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8666 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
8667 let language = rust_lang();
8668 language_registry.add(language.clone());
8669
8670 let buffer_1 = project
8671 .update(cx, |project, cx| {
8672 project.open_local_buffer("/dir/src/modification.rs", cx)
8673 })
8674 .await
8675 .unwrap();
8676 let diff_1 = project
8677 .update(cx, |project, cx| {
8678 project.open_uncommitted_diff(buffer_1.clone(), cx)
8679 })
8680 .await
8681 .unwrap();
8682 diff_1.read_with(cx, |diff, cx| {
8683 assert_eq!(diff.base_text(cx).language().cloned(), Some(language))
8684 });
8685 cx.run_until_parked();
8686 diff_1.update(cx, |diff, cx| {
8687 let snapshot = buffer_1.read(cx).snapshot();
8688 assert_hunks(
8689 diff.snapshot(cx)
8690 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8691 &snapshot,
8692 &diff.base_text_string(cx).unwrap(),
8693 &[
8694 (
8695 0..1,
8696 "",
8697 "// print goodbye\n",
8698 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
8699 ),
8700 (
8701 2..3,
8702 " println!(\"hello world\");\n",
8703 " println!(\"goodbye world\");\n",
8704 DiffHunkStatus::modified_none(),
8705 ),
8706 ],
8707 );
8708 });
8709
8710 // Reset HEAD to a version that differs from both the buffer and the index.
8711 let committed_contents = r#"
8712 // print goodbye
8713 fn main() {
8714 }
8715 "#
8716 .unindent();
8717 fs.set_head_for_repo(
8718 Path::new("/dir/.git"),
8719 &[
8720 ("src/modification.rs", committed_contents.clone()),
8721 ("src/deletion.rs", "// the-deleted-contents\n".into()),
8722 ],
8723 "deadbeef",
8724 );
8725
8726 // Buffer now has an unstaged hunk.
8727 cx.run_until_parked();
8728 diff_1.update(cx, |diff, cx| {
8729 let snapshot = buffer_1.read(cx).snapshot();
8730 assert_hunks(
8731 diff.snapshot(cx)
8732 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8733 &snapshot,
8734 &diff.base_text(cx).text(),
8735 &[(
8736 2..3,
8737 "",
8738 " println!(\"goodbye world\");\n",
8739 DiffHunkStatus::added_none(),
8740 )],
8741 );
8742 });
8743
8744 // Open a buffer for a file that's been deleted.
8745 let buffer_2 = project
8746 .update(cx, |project, cx| {
8747 project.open_local_buffer("/dir/src/deletion.rs", cx)
8748 })
8749 .await
8750 .unwrap();
8751 let diff_2 = project
8752 .update(cx, |project, cx| {
8753 project.open_uncommitted_diff(buffer_2.clone(), cx)
8754 })
8755 .await
8756 .unwrap();
8757 cx.run_until_parked();
8758 diff_2.update(cx, |diff, cx| {
8759 let snapshot = buffer_2.read(cx).snapshot();
8760 assert_hunks(
8761 diff.snapshot(cx)
8762 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8763 &snapshot,
8764 &diff.base_text_string(cx).unwrap(),
8765 &[(
8766 0..0,
8767 "// the-deleted-contents\n",
8768 "",
8769 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
8770 )],
8771 );
8772 });
8773
8774 // Stage the deletion of this file
8775 fs.set_index_for_repo(
8776 Path::new("/dir/.git"),
8777 &[("src/modification.rs", committed_contents.clone())],
8778 );
8779 cx.run_until_parked();
8780 diff_2.update(cx, |diff, cx| {
8781 let snapshot = buffer_2.read(cx).snapshot();
8782 assert_hunks(
8783 diff.snapshot(cx)
8784 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8785 &snapshot,
8786 &diff.base_text_string(cx).unwrap(),
8787 &[(
8788 0..0,
8789 "// the-deleted-contents\n",
8790 "",
8791 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
8792 )],
8793 );
8794 });
8795}
8796
8797#[gpui::test]
8798async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
8799 use DiffHunkSecondaryStatus::*;
8800 init_test(cx);
8801
8802 let committed_contents = r#"
8803 zero
8804 one
8805 two
8806 three
8807 four
8808 five
8809 "#
8810 .unindent();
8811 let file_contents = r#"
8812 one
8813 TWO
8814 three
8815 FOUR
8816 five
8817 "#
8818 .unindent();
8819
8820 let fs = FakeFs::new(cx.background_executor.clone());
8821 fs.insert_tree(
8822 "/dir",
8823 json!({
8824 ".git": {},
8825 "file.txt": file_contents.clone()
8826 }),
8827 )
8828 .await;
8829
8830 fs.set_head_and_index_for_repo(
8831 path!("/dir/.git").as_ref(),
8832 &[("file.txt", committed_contents.clone())],
8833 );
8834
8835 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8836
8837 let buffer = project
8838 .update(cx, |project, cx| {
8839 project.open_local_buffer("/dir/file.txt", cx)
8840 })
8841 .await
8842 .unwrap();
8843 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8844 let uncommitted_diff = project
8845 .update(cx, |project, cx| {
8846 project.open_uncommitted_diff(buffer.clone(), cx)
8847 })
8848 .await
8849 .unwrap();
8850 let mut diff_events = cx.events(&uncommitted_diff);
8851
8852 // The hunks are initially unstaged.
8853 uncommitted_diff.read_with(cx, |diff, cx| {
8854 assert_hunks(
8855 diff.snapshot(cx).hunks(&snapshot),
8856 &snapshot,
8857 &diff.base_text_string(cx).unwrap(),
8858 &[
8859 (
8860 0..0,
8861 "zero\n",
8862 "",
8863 DiffHunkStatus::deleted(HasSecondaryHunk),
8864 ),
8865 (
8866 1..2,
8867 "two\n",
8868 "TWO\n",
8869 DiffHunkStatus::modified(HasSecondaryHunk),
8870 ),
8871 (
8872 3..4,
8873 "four\n",
8874 "FOUR\n",
8875 DiffHunkStatus::modified(HasSecondaryHunk),
8876 ),
8877 ],
8878 );
8879 });
8880
8881 // Stage a hunk. It appears as optimistically staged.
8882 uncommitted_diff.update(cx, |diff, cx| {
8883 let range =
8884 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
8885 let hunks = diff
8886 .snapshot(cx)
8887 .hunks_intersecting_range(range, &snapshot)
8888 .collect::<Vec<_>>();
8889 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
8890
8891 assert_hunks(
8892 diff.snapshot(cx).hunks(&snapshot),
8893 &snapshot,
8894 &diff.base_text_string(cx).unwrap(),
8895 &[
8896 (
8897 0..0,
8898 "zero\n",
8899 "",
8900 DiffHunkStatus::deleted(HasSecondaryHunk),
8901 ),
8902 (
8903 1..2,
8904 "two\n",
8905 "TWO\n",
8906 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8907 ),
8908 (
8909 3..4,
8910 "four\n",
8911 "FOUR\n",
8912 DiffHunkStatus::modified(HasSecondaryHunk),
8913 ),
8914 ],
8915 );
8916 });
8917
8918 // The diff emits a change event for the range of the staged hunk.
8919 assert!(matches!(
8920 diff_events.next().await.unwrap(),
8921 BufferDiffEvent::HunksStagedOrUnstaged(_)
8922 ));
8923 let event = diff_events.next().await.unwrap();
8924 if let BufferDiffEvent::DiffChanged(DiffChanged {
8925 changed_range: Some(changed_range),
8926 base_text_changed_range: _,
8927 extended_range: _,
8928 }) = event
8929 {
8930 let changed_range = changed_range.to_point(&snapshot);
8931 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
8932 } else {
8933 panic!("Unexpected event {event:?}");
8934 }
8935
8936 // When the write to the index completes, it appears as staged.
8937 cx.run_until_parked();
8938 uncommitted_diff.update(cx, |diff, cx| {
8939 assert_hunks(
8940 diff.snapshot(cx).hunks(&snapshot),
8941 &snapshot,
8942 &diff.base_text_string(cx).unwrap(),
8943 &[
8944 (
8945 0..0,
8946 "zero\n",
8947 "",
8948 DiffHunkStatus::deleted(HasSecondaryHunk),
8949 ),
8950 (
8951 1..2,
8952 "two\n",
8953 "TWO\n",
8954 DiffHunkStatus::modified(NoSecondaryHunk),
8955 ),
8956 (
8957 3..4,
8958 "four\n",
8959 "FOUR\n",
8960 DiffHunkStatus::modified(HasSecondaryHunk),
8961 ),
8962 ],
8963 );
8964 });
8965
8966 // The diff emits a change event for the changed index text.
8967 let event = diff_events.next().await.unwrap();
8968 if let BufferDiffEvent::DiffChanged(DiffChanged {
8969 changed_range: Some(changed_range),
8970 base_text_changed_range: _,
8971 extended_range: _,
8972 }) = event
8973 {
8974 let changed_range = changed_range.to_point(&snapshot);
8975 assert_eq!(changed_range, Point::new(0, 0)..Point::new(4, 0));
8976 } else {
8977 panic!("Unexpected event {event:?}");
8978 }
8979
8980 // Simulate a problem writing to the git index.
8981 fs.set_error_message_for_index_write(
8982 "/dir/.git".as_ref(),
8983 Some("failed to write git index".into()),
8984 );
8985
8986 // Stage another hunk.
8987 uncommitted_diff.update(cx, |diff, cx| {
8988 let range =
8989 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
8990 let hunks = diff
8991 .snapshot(cx)
8992 .hunks_intersecting_range(range, &snapshot)
8993 .collect::<Vec<_>>();
8994 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
8995
8996 assert_hunks(
8997 diff.snapshot(cx).hunks(&snapshot),
8998 &snapshot,
8999 &diff.base_text_string(cx).unwrap(),
9000 &[
9001 (
9002 0..0,
9003 "zero\n",
9004 "",
9005 DiffHunkStatus::deleted(HasSecondaryHunk),
9006 ),
9007 (
9008 1..2,
9009 "two\n",
9010 "TWO\n",
9011 DiffHunkStatus::modified(NoSecondaryHunk),
9012 ),
9013 (
9014 3..4,
9015 "four\n",
9016 "FOUR\n",
9017 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
9018 ),
9019 ],
9020 );
9021 });
9022 assert!(matches!(
9023 diff_events.next().await.unwrap(),
9024 BufferDiffEvent::HunksStagedOrUnstaged(_)
9025 ));
9026 let event = diff_events.next().await.unwrap();
9027 if let BufferDiffEvent::DiffChanged(DiffChanged {
9028 changed_range: Some(changed_range),
9029 base_text_changed_range: _,
9030 extended_range: _,
9031 }) = event
9032 {
9033 let changed_range = changed_range.to_point(&snapshot);
9034 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
9035 } else {
9036 panic!("Unexpected event {event:?}");
9037 }
9038
9039 // When the write fails, the hunk returns to being unstaged.
9040 cx.run_until_parked();
9041 uncommitted_diff.update(cx, |diff, cx| {
9042 assert_hunks(
9043 diff.snapshot(cx).hunks(&snapshot),
9044 &snapshot,
9045 &diff.base_text_string(cx).unwrap(),
9046 &[
9047 (
9048 0..0,
9049 "zero\n",
9050 "",
9051 DiffHunkStatus::deleted(HasSecondaryHunk),
9052 ),
9053 (
9054 1..2,
9055 "two\n",
9056 "TWO\n",
9057 DiffHunkStatus::modified(NoSecondaryHunk),
9058 ),
9059 (
9060 3..4,
9061 "four\n",
9062 "FOUR\n",
9063 DiffHunkStatus::modified(HasSecondaryHunk),
9064 ),
9065 ],
9066 );
9067 });
9068
9069 let event = diff_events.next().await.unwrap();
9070 if let BufferDiffEvent::DiffChanged(DiffChanged {
9071 changed_range: Some(changed_range),
9072 base_text_changed_range: _,
9073 extended_range: _,
9074 }) = event
9075 {
9076 let changed_range = changed_range.to_point(&snapshot);
9077 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
9078 } else {
9079 panic!("Unexpected event {event:?}");
9080 }
9081
9082 // Allow writing to the git index to succeed again.
9083 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
9084
9085 // Stage two hunks with separate operations.
9086 uncommitted_diff.update(cx, |diff, cx| {
9087 let hunks = diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>();
9088 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
9089 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
9090 });
9091
9092 // Both staged hunks appear as pending.
9093 uncommitted_diff.update(cx, |diff, cx| {
9094 assert_hunks(
9095 diff.snapshot(cx).hunks(&snapshot),
9096 &snapshot,
9097 &diff.base_text_string(cx).unwrap(),
9098 &[
9099 (
9100 0..0,
9101 "zero\n",
9102 "",
9103 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
9104 ),
9105 (
9106 1..2,
9107 "two\n",
9108 "TWO\n",
9109 DiffHunkStatus::modified(NoSecondaryHunk),
9110 ),
9111 (
9112 3..4,
9113 "four\n",
9114 "FOUR\n",
9115 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
9116 ),
9117 ],
9118 );
9119 });
9120
9121 // Both staging operations take effect.
9122 cx.run_until_parked();
9123 uncommitted_diff.update(cx, |diff, cx| {
9124 assert_hunks(
9125 diff.snapshot(cx).hunks(&snapshot),
9126 &snapshot,
9127 &diff.base_text_string(cx).unwrap(),
9128 &[
9129 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
9130 (
9131 1..2,
9132 "two\n",
9133 "TWO\n",
9134 DiffHunkStatus::modified(NoSecondaryHunk),
9135 ),
9136 (
9137 3..4,
9138 "four\n",
9139 "FOUR\n",
9140 DiffHunkStatus::modified(NoSecondaryHunk),
9141 ),
9142 ],
9143 );
9144 });
9145}
9146
9147#[gpui::test(seeds(340, 472))]
9148async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
9149 use DiffHunkSecondaryStatus::*;
9150 init_test(cx);
9151
9152 let committed_contents = r#"
9153 zero
9154 one
9155 two
9156 three
9157 four
9158 five
9159 "#
9160 .unindent();
9161 let file_contents = r#"
9162 one
9163 TWO
9164 three
9165 FOUR
9166 five
9167 "#
9168 .unindent();
9169
9170 let fs = FakeFs::new(cx.background_executor.clone());
9171 fs.insert_tree(
9172 "/dir",
9173 json!({
9174 ".git": {},
9175 "file.txt": file_contents.clone()
9176 }),
9177 )
9178 .await;
9179
9180 fs.set_head_for_repo(
9181 "/dir/.git".as_ref(),
9182 &[("file.txt", committed_contents.clone())],
9183 "deadbeef",
9184 );
9185 fs.set_index_for_repo(
9186 "/dir/.git".as_ref(),
9187 &[("file.txt", committed_contents.clone())],
9188 );
9189
9190 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
9191
9192 let buffer = project
9193 .update(cx, |project, cx| {
9194 project.open_local_buffer("/dir/file.txt", cx)
9195 })
9196 .await
9197 .unwrap();
9198 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
9199 let uncommitted_diff = project
9200 .update(cx, |project, cx| {
9201 project.open_uncommitted_diff(buffer.clone(), cx)
9202 })
9203 .await
9204 .unwrap();
9205
9206 // The hunks are initially unstaged.
9207 uncommitted_diff.read_with(cx, |diff, cx| {
9208 assert_hunks(
9209 diff.snapshot(cx).hunks(&snapshot),
9210 &snapshot,
9211 &diff.base_text_string(cx).unwrap(),
9212 &[
9213 (
9214 0..0,
9215 "zero\n",
9216 "",
9217 DiffHunkStatus::deleted(HasSecondaryHunk),
9218 ),
9219 (
9220 1..2,
9221 "two\n",
9222 "TWO\n",
9223 DiffHunkStatus::modified(HasSecondaryHunk),
9224 ),
9225 (
9226 3..4,
9227 "four\n",
9228 "FOUR\n",
9229 DiffHunkStatus::modified(HasSecondaryHunk),
9230 ),
9231 ],
9232 );
9233 });
9234
9235 // Pause IO events
9236 fs.pause_events();
9237
9238 // Stage the first hunk.
9239 uncommitted_diff.update(cx, |diff, cx| {
9240 let hunk = diff.snapshot(cx).hunks(&snapshot).next().unwrap();
9241 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
9242 assert_hunks(
9243 diff.snapshot(cx).hunks(&snapshot),
9244 &snapshot,
9245 &diff.base_text_string(cx).unwrap(),
9246 &[
9247 (
9248 0..0,
9249 "zero\n",
9250 "",
9251 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
9252 ),
9253 (
9254 1..2,
9255 "two\n",
9256 "TWO\n",
9257 DiffHunkStatus::modified(HasSecondaryHunk),
9258 ),
9259 (
9260 3..4,
9261 "four\n",
9262 "FOUR\n",
9263 DiffHunkStatus::modified(HasSecondaryHunk),
9264 ),
9265 ],
9266 );
9267 });
9268
9269 // Stage the second hunk *before* receiving the FS event for the first hunk.
9270 cx.run_until_parked();
9271 uncommitted_diff.update(cx, |diff, cx| {
9272 let hunk = diff.snapshot(cx).hunks(&snapshot).nth(1).unwrap();
9273 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
9274 assert_hunks(
9275 diff.snapshot(cx).hunks(&snapshot),
9276 &snapshot,
9277 &diff.base_text_string(cx).unwrap(),
9278 &[
9279 (
9280 0..0,
9281 "zero\n",
9282 "",
9283 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
9284 ),
9285 (
9286 1..2,
9287 "two\n",
9288 "TWO\n",
9289 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
9290 ),
9291 (
9292 3..4,
9293 "four\n",
9294 "FOUR\n",
9295 DiffHunkStatus::modified(HasSecondaryHunk),
9296 ),
9297 ],
9298 );
9299 });
9300
9301 // Process the FS event for staging the first hunk (second event is still pending).
9302 fs.flush_events(1);
9303 cx.run_until_parked();
9304
9305 // Stage the third hunk before receiving the second FS event.
9306 uncommitted_diff.update(cx, |diff, cx| {
9307 let hunk = diff.snapshot(cx).hunks(&snapshot).nth(2).unwrap();
9308 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
9309 });
9310
9311 // Wait for all remaining IO.
9312 cx.run_until_parked();
9313 fs.flush_events(fs.buffered_event_count());
9314
9315 // Now all hunks are staged.
9316 cx.run_until_parked();
9317 uncommitted_diff.update(cx, |diff, cx| {
9318 assert_hunks(
9319 diff.snapshot(cx).hunks(&snapshot),
9320 &snapshot,
9321 &diff.base_text_string(cx).unwrap(),
9322 &[
9323 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
9324 (
9325 1..2,
9326 "two\n",
9327 "TWO\n",
9328 DiffHunkStatus::modified(NoSecondaryHunk),
9329 ),
9330 (
9331 3..4,
9332 "four\n",
9333 "FOUR\n",
9334 DiffHunkStatus::modified(NoSecondaryHunk),
9335 ),
9336 ],
9337 );
9338 });
9339}
9340
9341#[gpui::test(iterations = 25)]
9342async fn test_staging_random_hunks(
9343 mut rng: StdRng,
9344 _executor: BackgroundExecutor,
9345 cx: &mut gpui::TestAppContext,
9346) {
9347 let operations = env::var("OPERATIONS")
9348 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
9349 .unwrap_or(20);
9350
9351 use DiffHunkSecondaryStatus::*;
9352 init_test(cx);
9353
9354 let committed_text = (0..30).map(|i| format!("line {i}\n")).collect::<String>();
9355 let index_text = committed_text.clone();
9356 let buffer_text = (0..30)
9357 .map(|i| match i % 5 {
9358 0 => format!("line {i} (modified)\n"),
9359 _ => format!("line {i}\n"),
9360 })
9361 .collect::<String>();
9362
9363 let fs = FakeFs::new(cx.background_executor.clone());
9364 fs.insert_tree(
9365 path!("/dir"),
9366 json!({
9367 ".git": {},
9368 "file.txt": buffer_text.clone()
9369 }),
9370 )
9371 .await;
9372 fs.set_head_for_repo(
9373 path!("/dir/.git").as_ref(),
9374 &[("file.txt", committed_text.clone())],
9375 "deadbeef",
9376 );
9377 fs.set_index_for_repo(
9378 path!("/dir/.git").as_ref(),
9379 &[("file.txt", index_text.clone())],
9380 );
9381 let repo = fs
9382 .open_repo(path!("/dir/.git").as_ref(), Some("git".as_ref()))
9383 .unwrap();
9384
9385 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
9386 let buffer = project
9387 .update(cx, |project, cx| {
9388 project.open_local_buffer(path!("/dir/file.txt"), cx)
9389 })
9390 .await
9391 .unwrap();
9392 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
9393 let uncommitted_diff = project
9394 .update(cx, |project, cx| {
9395 project.open_uncommitted_diff(buffer.clone(), cx)
9396 })
9397 .await
9398 .unwrap();
9399
9400 let mut hunks = uncommitted_diff.update(cx, |diff, cx| {
9401 diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>()
9402 });
9403 assert_eq!(hunks.len(), 6);
9404
9405 for _i in 0..operations {
9406 let hunk_ix = rng.random_range(0..hunks.len());
9407 let hunk = &mut hunks[hunk_ix];
9408 let row = hunk.range.start.row;
9409
9410 if hunk.status().has_secondary_hunk() {
9411 log::info!("staging hunk at {row}");
9412 uncommitted_diff.update(cx, |diff, cx| {
9413 diff.stage_or_unstage_hunks(true, std::slice::from_ref(hunk), &snapshot, true, cx);
9414 });
9415 hunk.secondary_status = SecondaryHunkRemovalPending;
9416 } else {
9417 log::info!("unstaging hunk at {row}");
9418 uncommitted_diff.update(cx, |diff, cx| {
9419 diff.stage_or_unstage_hunks(false, std::slice::from_ref(hunk), &snapshot, true, cx);
9420 });
9421 hunk.secondary_status = SecondaryHunkAdditionPending;
9422 }
9423
9424 for _ in 0..rng.random_range(0..10) {
9425 log::info!("yielding");
9426 cx.executor().simulate_random_delay().await;
9427 }
9428 }
9429
9430 cx.executor().run_until_parked();
9431
9432 for hunk in &mut hunks {
9433 if hunk.secondary_status == SecondaryHunkRemovalPending {
9434 hunk.secondary_status = NoSecondaryHunk;
9435 } else if hunk.secondary_status == SecondaryHunkAdditionPending {
9436 hunk.secondary_status = HasSecondaryHunk;
9437 }
9438 }
9439
9440 log::info!(
9441 "index text:\n{}",
9442 repo.load_index_text(RepoPath::from_rel_path(rel_path("file.txt")))
9443 .await
9444 .unwrap()
9445 );
9446
9447 uncommitted_diff.update(cx, |diff, cx| {
9448 let expected_hunks = hunks
9449 .iter()
9450 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
9451 .collect::<Vec<_>>();
9452 let actual_hunks = diff
9453 .snapshot(cx)
9454 .hunks(&snapshot)
9455 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
9456 .collect::<Vec<_>>();
9457 assert_eq!(actual_hunks, expected_hunks);
9458 });
9459}
9460
9461#[gpui::test]
9462async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
9463 init_test(cx);
9464
9465 let committed_contents = r#"
9466 fn main() {
9467 println!("hello from HEAD");
9468 }
9469 "#
9470 .unindent();
9471 let file_contents = r#"
9472 fn main() {
9473 println!("hello from the working copy");
9474 }
9475 "#
9476 .unindent();
9477
9478 let fs = FakeFs::new(cx.background_executor.clone());
9479 fs.insert_tree(
9480 "/dir",
9481 json!({
9482 ".git": {},
9483 "src": {
9484 "main.rs": file_contents,
9485 }
9486 }),
9487 )
9488 .await;
9489
9490 fs.set_head_for_repo(
9491 Path::new("/dir/.git"),
9492 &[("src/main.rs", committed_contents.clone())],
9493 "deadbeef",
9494 );
9495 fs.set_index_for_repo(
9496 Path::new("/dir/.git"),
9497 &[("src/main.rs", committed_contents.clone())],
9498 );
9499
9500 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
9501
9502 let buffer = project
9503 .update(cx, |project, cx| {
9504 project.open_local_buffer("/dir/src/main.rs", cx)
9505 })
9506 .await
9507 .unwrap();
9508 let uncommitted_diff = project
9509 .update(cx, |project, cx| {
9510 project.open_uncommitted_diff(buffer.clone(), cx)
9511 })
9512 .await
9513 .unwrap();
9514
9515 cx.run_until_parked();
9516 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
9517 let snapshot = buffer.read(cx).snapshot();
9518 assert_hunks(
9519 uncommitted_diff.snapshot(cx).hunks(&snapshot),
9520 &snapshot,
9521 &uncommitted_diff.base_text_string(cx).unwrap(),
9522 &[(
9523 1..2,
9524 " println!(\"hello from HEAD\");\n",
9525 " println!(\"hello from the working copy\");\n",
9526 DiffHunkStatus {
9527 kind: DiffHunkStatusKind::Modified,
9528 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
9529 },
9530 )],
9531 );
9532 });
9533}
9534
9535// TODO: Should we test this on Windows also?
9536#[gpui::test]
9537#[cfg(not(windows))]
9538async fn test_staging_hunk_preserve_executable_permission(cx: &mut gpui::TestAppContext) {
9539 use std::os::unix::fs::PermissionsExt;
9540 init_test(cx);
9541 cx.executor().allow_parking();
9542 let committed_contents = "bar\n";
9543 let file_contents = "baz\n";
9544 let root = TempTree::new(json!({
9545 "project": {
9546 "foo": committed_contents
9547 },
9548 }));
9549
9550 let work_dir = root.path().join("project");
9551 let file_path = work_dir.join("foo");
9552 let repo = git_init(work_dir.as_path());
9553 let mut perms = std::fs::metadata(&file_path).unwrap().permissions();
9554 perms.set_mode(0o755);
9555 std::fs::set_permissions(&file_path, perms).unwrap();
9556 git_add("foo", &repo);
9557 git_commit("Initial commit", &repo);
9558 std::fs::write(&file_path, file_contents).unwrap();
9559
9560 let project = Project::test(
9561 Arc::new(RealFs::new(None, cx.executor())),
9562 [root.path()],
9563 cx,
9564 )
9565 .await;
9566
9567 let buffer = project
9568 .update(cx, |project, cx| {
9569 project.open_local_buffer(file_path.as_path(), cx)
9570 })
9571 .await
9572 .unwrap();
9573
9574 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
9575
9576 let uncommitted_diff = project
9577 .update(cx, |project, cx| {
9578 project.open_uncommitted_diff(buffer.clone(), cx)
9579 })
9580 .await
9581 .unwrap();
9582
9583 uncommitted_diff.update(cx, |diff, cx| {
9584 let hunks = diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>();
9585 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
9586 });
9587
9588 cx.run_until_parked();
9589
9590 let output = smol::process::Command::new("git")
9591 .current_dir(&work_dir)
9592 .args(["diff", "--staged"])
9593 .output()
9594 .await
9595 .unwrap();
9596
9597 let staged_diff = String::from_utf8_lossy(&output.stdout);
9598
9599 assert!(
9600 !staged_diff.contains("new mode 100644"),
9601 "Staging should not change file mode from 755 to 644.\ngit diff --staged:\n{}",
9602 staged_diff
9603 );
9604
9605 let output = smol::process::Command::new("git")
9606 .current_dir(&work_dir)
9607 .args(["ls-files", "-s"])
9608 .output()
9609 .await
9610 .unwrap();
9611 let index_contents = String::from_utf8_lossy(&output.stdout);
9612
9613 assert!(
9614 index_contents.contains("100755"),
9615 "Index should show file as executable (100755).\ngit ls-files -s:\n{}",
9616 index_contents
9617 );
9618}
9619
9620#[gpui::test]
9621async fn test_repository_and_path_for_project_path(
9622 background_executor: BackgroundExecutor,
9623 cx: &mut gpui::TestAppContext,
9624) {
9625 init_test(cx);
9626 let fs = FakeFs::new(background_executor);
9627 fs.insert_tree(
9628 path!("/root"),
9629 json!({
9630 "c.txt": "",
9631 "dir1": {
9632 ".git": {},
9633 "deps": {
9634 "dep1": {
9635 ".git": {},
9636 "src": {
9637 "a.txt": ""
9638 }
9639 }
9640 },
9641 "src": {
9642 "b.txt": ""
9643 }
9644 },
9645 }),
9646 )
9647 .await;
9648
9649 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
9650 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9651 let tree_id = tree.read_with(cx, |tree, _| tree.id());
9652 project
9653 .update(cx, |project, cx| project.git_scans_complete(cx))
9654 .await;
9655 cx.run_until_parked();
9656
9657 project.read_with(cx, |project, cx| {
9658 let git_store = project.git_store().read(cx);
9659 let pairs = [
9660 ("c.txt", None),
9661 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
9662 (
9663 "dir1/deps/dep1/src/a.txt",
9664 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
9665 ),
9666 ];
9667 let expected = pairs
9668 .iter()
9669 .map(|(path, result)| {
9670 (
9671 path,
9672 result.map(|(repo, repo_path)| {
9673 (Path::new(repo).into(), RepoPath::new(repo_path).unwrap())
9674 }),
9675 )
9676 })
9677 .collect::<Vec<_>>();
9678 let actual = pairs
9679 .iter()
9680 .map(|(path, _)| {
9681 let project_path = (tree_id, rel_path(path)).into();
9682 let result = maybe!({
9683 let (repo, repo_path) =
9684 git_store.repository_and_path_for_project_path(&project_path, cx)?;
9685 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
9686 });
9687 (path, result)
9688 })
9689 .collect::<Vec<_>>();
9690 pretty_assertions::assert_eq!(expected, actual);
9691 });
9692
9693 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
9694 .await
9695 .unwrap();
9696 cx.run_until_parked();
9697
9698 project.read_with(cx, |project, cx| {
9699 let git_store = project.git_store().read(cx);
9700 assert_eq!(
9701 git_store.repository_and_path_for_project_path(
9702 &(tree_id, rel_path("dir1/src/b.txt")).into(),
9703 cx
9704 ),
9705 None
9706 );
9707 });
9708}
9709
9710#[gpui::test]
9711async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
9712 init_test(cx);
9713 let fs = FakeFs::new(cx.background_executor.clone());
9714 let home = paths::home_dir();
9715 fs.insert_tree(
9716 home,
9717 json!({
9718 ".git": {},
9719 "project": {
9720 "a.txt": "A"
9721 },
9722 }),
9723 )
9724 .await;
9725
9726 let project = Project::test(fs.clone(), [home.join("project").as_ref()], cx).await;
9727 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9728 let tree_id = tree.read_with(cx, |tree, _| tree.id());
9729
9730 project
9731 .update(cx, |project, cx| project.git_scans_complete(cx))
9732 .await;
9733 tree.flush_fs_events(cx).await;
9734
9735 project.read_with(cx, |project, cx| {
9736 let containing = project
9737 .git_store()
9738 .read(cx)
9739 .repository_and_path_for_project_path(&(tree_id, rel_path("a.txt")).into(), cx);
9740 assert!(containing.is_none());
9741 });
9742
9743 let project = Project::test(fs.clone(), [home.as_ref()], cx).await;
9744 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9745 let tree_id = tree.read_with(cx, |tree, _| tree.id());
9746 project
9747 .update(cx, |project, cx| project.git_scans_complete(cx))
9748 .await;
9749 tree.flush_fs_events(cx).await;
9750
9751 project.read_with(cx, |project, cx| {
9752 let containing = project
9753 .git_store()
9754 .read(cx)
9755 .repository_and_path_for_project_path(&(tree_id, rel_path("project/a.txt")).into(), cx);
9756 assert_eq!(
9757 containing
9758 .unwrap()
9759 .0
9760 .read(cx)
9761 .work_directory_abs_path
9762 .as_ref(),
9763 home,
9764 );
9765 });
9766}
9767
9768#[gpui::test]
9769async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
9770 init_test(cx);
9771 cx.executor().allow_parking();
9772
9773 let root = TempTree::new(json!({
9774 "project": {
9775 "a.txt": "a", // Modified
9776 "b.txt": "bb", // Added
9777 "c.txt": "ccc", // Unchanged
9778 "d.txt": "dddd", // Deleted
9779 },
9780 }));
9781
9782 // Set up git repository before creating the project.
9783 let work_dir = root.path().join("project");
9784 let repo = git_init(work_dir.as_path());
9785 git_add("a.txt", &repo);
9786 git_add("c.txt", &repo);
9787 git_add("d.txt", &repo);
9788 git_commit("Initial commit", &repo);
9789 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
9790 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
9791
9792 let project = Project::test(
9793 Arc::new(RealFs::new(None, cx.executor())),
9794 [root.path()],
9795 cx,
9796 )
9797 .await;
9798
9799 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9800 tree.flush_fs_events(cx).await;
9801 project
9802 .update(cx, |project, cx| project.git_scans_complete(cx))
9803 .await;
9804 cx.executor().run_until_parked();
9805
9806 let repository = project.read_with(cx, |project, cx| {
9807 project.repositories(cx).values().next().unwrap().clone()
9808 });
9809
9810 // Check that the right git state is observed on startup
9811 repository.read_with(cx, |repository, _| {
9812 let entries = repository.cached_status().collect::<Vec<_>>();
9813 assert_eq!(
9814 entries,
9815 [
9816 StatusEntry {
9817 repo_path: repo_path("a.txt"),
9818 status: StatusCode::Modified.worktree(),
9819 diff_stat: Some(DiffStat {
9820 added: 1,
9821 deleted: 1,
9822 }),
9823 },
9824 StatusEntry {
9825 repo_path: repo_path("b.txt"),
9826 status: FileStatus::Untracked,
9827 diff_stat: None,
9828 },
9829 StatusEntry {
9830 repo_path: repo_path("d.txt"),
9831 status: StatusCode::Deleted.worktree(),
9832 diff_stat: Some(DiffStat {
9833 added: 0,
9834 deleted: 1,
9835 }),
9836 },
9837 ]
9838 );
9839 });
9840
9841 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
9842
9843 tree.flush_fs_events(cx).await;
9844 project
9845 .update(cx, |project, cx| project.git_scans_complete(cx))
9846 .await;
9847 cx.executor().run_until_parked();
9848
9849 repository.read_with(cx, |repository, _| {
9850 let entries = repository.cached_status().collect::<Vec<_>>();
9851 assert_eq!(
9852 entries,
9853 [
9854 StatusEntry {
9855 repo_path: repo_path("a.txt"),
9856 status: StatusCode::Modified.worktree(),
9857 diff_stat: Some(DiffStat {
9858 added: 1,
9859 deleted: 1,
9860 }),
9861 },
9862 StatusEntry {
9863 repo_path: repo_path("b.txt"),
9864 status: FileStatus::Untracked,
9865 diff_stat: None,
9866 },
9867 StatusEntry {
9868 repo_path: repo_path("c.txt"),
9869 status: StatusCode::Modified.worktree(),
9870 diff_stat: Some(DiffStat {
9871 added: 1,
9872 deleted: 1,
9873 }),
9874 },
9875 StatusEntry {
9876 repo_path: repo_path("d.txt"),
9877 status: StatusCode::Deleted.worktree(),
9878 diff_stat: Some(DiffStat {
9879 added: 0,
9880 deleted: 1,
9881 }),
9882 },
9883 ]
9884 );
9885 });
9886
9887 git_add("a.txt", &repo);
9888 git_add("c.txt", &repo);
9889 git_remove_index(Path::new("d.txt"), &repo);
9890 git_commit("Another commit", &repo);
9891 tree.flush_fs_events(cx).await;
9892 project
9893 .update(cx, |project, cx| project.git_scans_complete(cx))
9894 .await;
9895 cx.executor().run_until_parked();
9896
9897 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
9898 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
9899 tree.flush_fs_events(cx).await;
9900 project
9901 .update(cx, |project, cx| project.git_scans_complete(cx))
9902 .await;
9903 cx.executor().run_until_parked();
9904
9905 repository.read_with(cx, |repository, _cx| {
9906 let entries = repository.cached_status().collect::<Vec<_>>();
9907
9908 // Deleting an untracked entry, b.txt, should leave no status
9909 // a.txt was tracked, and so should have a status
9910 assert_eq!(
9911 entries,
9912 [StatusEntry {
9913 repo_path: repo_path("a.txt"),
9914 status: StatusCode::Deleted.worktree(),
9915 diff_stat: Some(DiffStat {
9916 added: 0,
9917 deleted: 1,
9918 }),
9919 }]
9920 );
9921 });
9922}
9923
9924#[gpui::test]
9925#[ignore]
9926async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
9927 init_test(cx);
9928 cx.executor().allow_parking();
9929
9930 let root = TempTree::new(json!({
9931 "project": {
9932 "sub": {},
9933 "a.txt": "",
9934 },
9935 }));
9936
9937 let work_dir = root.path().join("project");
9938 let repo = git_init(work_dir.as_path());
9939 // a.txt exists in HEAD and the working copy but is deleted in the index.
9940 git_add("a.txt", &repo);
9941 git_commit("Initial commit", &repo);
9942 git_remove_index("a.txt".as_ref(), &repo);
9943 // `sub` is a nested git repository.
9944 let _sub = git_init(&work_dir.join("sub"));
9945
9946 let project = Project::test(
9947 Arc::new(RealFs::new(None, cx.executor())),
9948 [root.path()],
9949 cx,
9950 )
9951 .await;
9952
9953 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9954 tree.flush_fs_events(cx).await;
9955 project
9956 .update(cx, |project, cx| project.git_scans_complete(cx))
9957 .await;
9958 cx.executor().run_until_parked();
9959
9960 let repository = project.read_with(cx, |project, cx| {
9961 project
9962 .repositories(cx)
9963 .values()
9964 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
9965 .unwrap()
9966 .clone()
9967 });
9968
9969 repository.read_with(cx, |repository, _cx| {
9970 let entries = repository.cached_status().collect::<Vec<_>>();
9971
9972 // `sub` doesn't appear in our computed statuses.
9973 // a.txt appears with a combined `DA` status.
9974 assert_eq!(
9975 entries,
9976 [StatusEntry {
9977 repo_path: repo_path("a.txt"),
9978 status: TrackedStatus {
9979 index_status: StatusCode::Deleted,
9980 worktree_status: StatusCode::Added
9981 }
9982 .into(),
9983 diff_stat: None,
9984 }]
9985 )
9986 });
9987}
9988
9989#[track_caller]
9990/// We merge lhs into rhs.
9991fn merge_pending_ops_snapshots(
9992 source: Vec<pending_op::PendingOps>,
9993 mut target: Vec<pending_op::PendingOps>,
9994) -> Vec<pending_op::PendingOps> {
9995 for s_ops in source {
9996 if let Some(idx) = target.iter().zip(0..).find_map(|(ops, idx)| {
9997 if ops.repo_path == s_ops.repo_path {
9998 Some(idx)
9999 } else {
10000 None
10001 }
10002 }) {
10003 let t_ops = &mut target[idx];
10004 for s_op in s_ops.ops {
10005 if let Some(op_idx) = t_ops
10006 .ops
10007 .iter()
10008 .zip(0..)
10009 .find_map(|(op, idx)| if op.id == s_op.id { Some(idx) } else { None })
10010 {
10011 let t_op = &mut t_ops.ops[op_idx];
10012 match (s_op.job_status, t_op.job_status) {
10013 (pending_op::JobStatus::Running, _) => {}
10014 (s_st, pending_op::JobStatus::Running) => t_op.job_status = s_st,
10015 (s_st, t_st) if s_st == t_st => {}
10016 _ => unreachable!(),
10017 }
10018 } else {
10019 t_ops.ops.push(s_op);
10020 }
10021 }
10022 t_ops.ops.sort_by(|l, r| l.id.cmp(&r.id));
10023 } else {
10024 target.push(s_ops);
10025 }
10026 }
10027 target
10028}
10029
10030#[gpui::test]
10031async fn test_repository_pending_ops_staging(
10032 executor: gpui::BackgroundExecutor,
10033 cx: &mut gpui::TestAppContext,
10034) {
10035 init_test(cx);
10036
10037 let fs = FakeFs::new(executor);
10038 fs.insert_tree(
10039 path!("/root"),
10040 json!({
10041 "my-repo": {
10042 ".git": {},
10043 "a.txt": "a",
10044 }
10045
10046 }),
10047 )
10048 .await;
10049
10050 fs.set_status_for_repo(
10051 path!("/root/my-repo/.git").as_ref(),
10052 &[("a.txt", FileStatus::Untracked)],
10053 );
10054
10055 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
10056 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
10057 project.update(cx, |project, cx| {
10058 let pending_ops_all = pending_ops_all.clone();
10059 cx.subscribe(project.git_store(), move |_, _, e, _| {
10060 if let GitStoreEvent::RepositoryUpdated(
10061 _,
10062 RepositoryEvent::PendingOpsChanged { pending_ops },
10063 _,
10064 ) = e
10065 {
10066 let merged = merge_pending_ops_snapshots(
10067 pending_ops.items(()),
10068 pending_ops_all.lock().items(()),
10069 );
10070 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
10071 }
10072 })
10073 .detach();
10074 });
10075 project
10076 .update(cx, |project, cx| project.git_scans_complete(cx))
10077 .await;
10078
10079 let repo = project.read_with(cx, |project, cx| {
10080 project.repositories(cx).values().next().unwrap().clone()
10081 });
10082
10083 // Ensure we have no pending ops for any of the untracked files
10084 repo.read_with(cx, |repo, _cx| {
10085 assert!(repo.pending_ops().next().is_none());
10086 });
10087
10088 let mut id = 1u16;
10089
10090 let mut assert_stage = async |path: RepoPath, stage| {
10091 let git_status = if stage {
10092 pending_op::GitStatus::Staged
10093 } else {
10094 pending_op::GitStatus::Unstaged
10095 };
10096 repo.update(cx, |repo, cx| {
10097 let task = if stage {
10098 repo.stage_entries(vec![path.clone()], cx)
10099 } else {
10100 repo.unstage_entries(vec![path.clone()], cx)
10101 };
10102 let ops = repo.pending_ops_for_path(&path).unwrap();
10103 assert_eq!(
10104 ops.ops.last(),
10105 Some(&pending_op::PendingOp {
10106 id: id.into(),
10107 git_status,
10108 job_status: pending_op::JobStatus::Running
10109 })
10110 );
10111 task
10112 })
10113 .await
10114 .unwrap();
10115
10116 repo.read_with(cx, |repo, _cx| {
10117 let ops = repo.pending_ops_for_path(&path).unwrap();
10118 assert_eq!(
10119 ops.ops.last(),
10120 Some(&pending_op::PendingOp {
10121 id: id.into(),
10122 git_status,
10123 job_status: pending_op::JobStatus::Finished
10124 })
10125 );
10126 });
10127
10128 id += 1;
10129 };
10130
10131 assert_stage(repo_path("a.txt"), true).await;
10132 assert_stage(repo_path("a.txt"), false).await;
10133 assert_stage(repo_path("a.txt"), true).await;
10134 assert_stage(repo_path("a.txt"), false).await;
10135 assert_stage(repo_path("a.txt"), true).await;
10136
10137 cx.run_until_parked();
10138
10139 assert_eq!(
10140 pending_ops_all
10141 .lock()
10142 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
10143 .unwrap()
10144 .ops,
10145 vec![
10146 pending_op::PendingOp {
10147 id: 1u16.into(),
10148 git_status: pending_op::GitStatus::Staged,
10149 job_status: pending_op::JobStatus::Finished
10150 },
10151 pending_op::PendingOp {
10152 id: 2u16.into(),
10153 git_status: pending_op::GitStatus::Unstaged,
10154 job_status: pending_op::JobStatus::Finished
10155 },
10156 pending_op::PendingOp {
10157 id: 3u16.into(),
10158 git_status: pending_op::GitStatus::Staged,
10159 job_status: pending_op::JobStatus::Finished
10160 },
10161 pending_op::PendingOp {
10162 id: 4u16.into(),
10163 git_status: pending_op::GitStatus::Unstaged,
10164 job_status: pending_op::JobStatus::Finished
10165 },
10166 pending_op::PendingOp {
10167 id: 5u16.into(),
10168 git_status: pending_op::GitStatus::Staged,
10169 job_status: pending_op::JobStatus::Finished
10170 }
10171 ],
10172 );
10173
10174 repo.update(cx, |repo, _cx| {
10175 let git_statuses = repo.cached_status().collect::<Vec<_>>();
10176
10177 assert_eq!(
10178 git_statuses,
10179 [StatusEntry {
10180 repo_path: repo_path("a.txt"),
10181 status: TrackedStatus {
10182 index_status: StatusCode::Added,
10183 worktree_status: StatusCode::Unmodified
10184 }
10185 .into(),
10186 diff_stat: Some(DiffStat {
10187 added: 1,
10188 deleted: 0,
10189 }),
10190 }]
10191 );
10192 });
10193}
10194
10195#[gpui::test]
10196async fn test_repository_pending_ops_long_running_staging(
10197 executor: gpui::BackgroundExecutor,
10198 cx: &mut gpui::TestAppContext,
10199) {
10200 init_test(cx);
10201
10202 let fs = FakeFs::new(executor);
10203 fs.insert_tree(
10204 path!("/root"),
10205 json!({
10206 "my-repo": {
10207 ".git": {},
10208 "a.txt": "a",
10209 }
10210
10211 }),
10212 )
10213 .await;
10214
10215 fs.set_status_for_repo(
10216 path!("/root/my-repo/.git").as_ref(),
10217 &[("a.txt", FileStatus::Untracked)],
10218 );
10219
10220 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
10221 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
10222 project.update(cx, |project, cx| {
10223 let pending_ops_all = pending_ops_all.clone();
10224 cx.subscribe(project.git_store(), move |_, _, e, _| {
10225 if let GitStoreEvent::RepositoryUpdated(
10226 _,
10227 RepositoryEvent::PendingOpsChanged { pending_ops },
10228 _,
10229 ) = e
10230 {
10231 let merged = merge_pending_ops_snapshots(
10232 pending_ops.items(()),
10233 pending_ops_all.lock().items(()),
10234 );
10235 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
10236 }
10237 })
10238 .detach();
10239 });
10240
10241 project
10242 .update(cx, |project, cx| project.git_scans_complete(cx))
10243 .await;
10244
10245 let repo = project.read_with(cx, |project, cx| {
10246 project.repositories(cx).values().next().unwrap().clone()
10247 });
10248
10249 repo.update(cx, |repo, cx| {
10250 repo.stage_entries(vec![repo_path("a.txt")], cx)
10251 })
10252 .detach();
10253
10254 repo.update(cx, |repo, cx| {
10255 repo.stage_entries(vec![repo_path("a.txt")], cx)
10256 })
10257 .unwrap()
10258 .with_timeout(Duration::from_secs(1), &cx.executor())
10259 .await
10260 .unwrap();
10261
10262 cx.run_until_parked();
10263
10264 assert_eq!(
10265 pending_ops_all
10266 .lock()
10267 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
10268 .unwrap()
10269 .ops,
10270 vec![
10271 pending_op::PendingOp {
10272 id: 1u16.into(),
10273 git_status: pending_op::GitStatus::Staged,
10274 job_status: pending_op::JobStatus::Skipped
10275 },
10276 pending_op::PendingOp {
10277 id: 2u16.into(),
10278 git_status: pending_op::GitStatus::Staged,
10279 job_status: pending_op::JobStatus::Finished
10280 }
10281 ],
10282 );
10283
10284 repo.update(cx, |repo, _cx| {
10285 let git_statuses = repo.cached_status().collect::<Vec<_>>();
10286
10287 assert_eq!(
10288 git_statuses,
10289 [StatusEntry {
10290 repo_path: repo_path("a.txt"),
10291 status: TrackedStatus {
10292 index_status: StatusCode::Added,
10293 worktree_status: StatusCode::Unmodified
10294 }
10295 .into(),
10296 diff_stat: Some(DiffStat {
10297 added: 1,
10298 deleted: 0,
10299 }),
10300 }]
10301 );
10302 });
10303}
10304
10305#[gpui::test]
10306async fn test_repository_pending_ops_stage_all(
10307 executor: gpui::BackgroundExecutor,
10308 cx: &mut gpui::TestAppContext,
10309) {
10310 init_test(cx);
10311
10312 let fs = FakeFs::new(executor);
10313 fs.insert_tree(
10314 path!("/root"),
10315 json!({
10316 "my-repo": {
10317 ".git": {},
10318 "a.txt": "a",
10319 "b.txt": "b"
10320 }
10321
10322 }),
10323 )
10324 .await;
10325
10326 fs.set_status_for_repo(
10327 path!("/root/my-repo/.git").as_ref(),
10328 &[
10329 ("a.txt", FileStatus::Untracked),
10330 ("b.txt", FileStatus::Untracked),
10331 ],
10332 );
10333
10334 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
10335 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
10336 project.update(cx, |project, cx| {
10337 let pending_ops_all = pending_ops_all.clone();
10338 cx.subscribe(project.git_store(), move |_, _, e, _| {
10339 if let GitStoreEvent::RepositoryUpdated(
10340 _,
10341 RepositoryEvent::PendingOpsChanged { pending_ops },
10342 _,
10343 ) = e
10344 {
10345 let merged = merge_pending_ops_snapshots(
10346 pending_ops.items(()),
10347 pending_ops_all.lock().items(()),
10348 );
10349 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
10350 }
10351 })
10352 .detach();
10353 });
10354 project
10355 .update(cx, |project, cx| project.git_scans_complete(cx))
10356 .await;
10357
10358 let repo = project.read_with(cx, |project, cx| {
10359 project.repositories(cx).values().next().unwrap().clone()
10360 });
10361
10362 repo.update(cx, |repo, cx| {
10363 repo.stage_entries(vec![repo_path("a.txt")], cx)
10364 })
10365 .await
10366 .unwrap();
10367 repo.update(cx, |repo, cx| repo.stage_all(cx))
10368 .await
10369 .unwrap();
10370 repo.update(cx, |repo, cx| repo.unstage_all(cx))
10371 .await
10372 .unwrap();
10373
10374 cx.run_until_parked();
10375
10376 assert_eq!(
10377 pending_ops_all
10378 .lock()
10379 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
10380 .unwrap()
10381 .ops,
10382 vec![
10383 pending_op::PendingOp {
10384 id: 1u16.into(),
10385 git_status: pending_op::GitStatus::Staged,
10386 job_status: pending_op::JobStatus::Finished
10387 },
10388 pending_op::PendingOp {
10389 id: 2u16.into(),
10390 git_status: pending_op::GitStatus::Unstaged,
10391 job_status: pending_op::JobStatus::Finished
10392 },
10393 ],
10394 );
10395 assert_eq!(
10396 pending_ops_all
10397 .lock()
10398 .get(&worktree::PathKey(repo_path("b.txt").as_ref().clone()), ())
10399 .unwrap()
10400 .ops,
10401 vec![
10402 pending_op::PendingOp {
10403 id: 1u16.into(),
10404 git_status: pending_op::GitStatus::Staged,
10405 job_status: pending_op::JobStatus::Finished
10406 },
10407 pending_op::PendingOp {
10408 id: 2u16.into(),
10409 git_status: pending_op::GitStatus::Unstaged,
10410 job_status: pending_op::JobStatus::Finished
10411 },
10412 ],
10413 );
10414
10415 repo.update(cx, |repo, _cx| {
10416 let git_statuses = repo.cached_status().collect::<Vec<_>>();
10417
10418 assert_eq!(
10419 git_statuses,
10420 [
10421 StatusEntry {
10422 repo_path: repo_path("a.txt"),
10423 status: FileStatus::Untracked,
10424 diff_stat: None,
10425 },
10426 StatusEntry {
10427 repo_path: repo_path("b.txt"),
10428 status: FileStatus::Untracked,
10429 diff_stat: None,
10430 },
10431 ]
10432 );
10433 });
10434}
10435
10436#[gpui::test]
10437async fn test_repository_subfolder_git_status(
10438 executor: gpui::BackgroundExecutor,
10439 cx: &mut gpui::TestAppContext,
10440) {
10441 init_test(cx);
10442
10443 let fs = FakeFs::new(executor);
10444 fs.insert_tree(
10445 path!("/root"),
10446 json!({
10447 "my-repo": {
10448 ".git": {},
10449 "a.txt": "a",
10450 "sub-folder-1": {
10451 "sub-folder-2": {
10452 "c.txt": "cc",
10453 "d": {
10454 "e.txt": "eee"
10455 }
10456 },
10457 }
10458 },
10459 }),
10460 )
10461 .await;
10462
10463 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
10464 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
10465
10466 fs.set_status_for_repo(
10467 path!("/root/my-repo/.git").as_ref(),
10468 &[(E_TXT, FileStatus::Untracked)],
10469 );
10470
10471 let project = Project::test(
10472 fs.clone(),
10473 [path!("/root/my-repo/sub-folder-1/sub-folder-2").as_ref()],
10474 cx,
10475 )
10476 .await;
10477
10478 project
10479 .update(cx, |project, cx| project.git_scans_complete(cx))
10480 .await;
10481 cx.run_until_parked();
10482
10483 let repository = project.read_with(cx, |project, cx| {
10484 project.repositories(cx).values().next().unwrap().clone()
10485 });
10486
10487 // Ensure that the git status is loaded correctly
10488 repository.read_with(cx, |repository, _cx| {
10489 assert_eq!(
10490 repository.work_directory_abs_path,
10491 Path::new(path!("/root/my-repo")).into()
10492 );
10493
10494 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
10495 assert_eq!(
10496 repository
10497 .status_for_path(&repo_path(E_TXT))
10498 .unwrap()
10499 .status,
10500 FileStatus::Untracked
10501 );
10502 });
10503
10504 fs.set_status_for_repo(path!("/root/my-repo/.git").as_ref(), &[]);
10505 project
10506 .update(cx, |project, cx| project.git_scans_complete(cx))
10507 .await;
10508 cx.run_until_parked();
10509
10510 repository.read_with(cx, |repository, _cx| {
10511 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
10512 assert_eq!(repository.status_for_path(&repo_path(E_TXT)), None);
10513 });
10514}
10515
10516// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
10517#[cfg(any())]
10518#[gpui::test]
10519async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
10520 init_test(cx);
10521 cx.executor().allow_parking();
10522
10523 let root = TempTree::new(json!({
10524 "project": {
10525 "a.txt": "a",
10526 },
10527 }));
10528 let root_path = root.path();
10529
10530 let repo = git_init(&root_path.join("project"));
10531 git_add("a.txt", &repo);
10532 git_commit("init", &repo);
10533
10534 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10535
10536 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10537 tree.flush_fs_events(cx).await;
10538 project
10539 .update(cx, |project, cx| project.git_scans_complete(cx))
10540 .await;
10541 cx.executor().run_until_parked();
10542
10543 let repository = project.read_with(cx, |project, cx| {
10544 project.repositories(cx).values().next().unwrap().clone()
10545 });
10546
10547 git_branch("other-branch", &repo);
10548 git_checkout("refs/heads/other-branch", &repo);
10549 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
10550 git_add("a.txt", &repo);
10551 git_commit("capitalize", &repo);
10552 let commit = repo
10553 .head()
10554 .expect("Failed to get HEAD")
10555 .peel_to_commit()
10556 .expect("HEAD is not a commit");
10557 git_checkout("refs/heads/main", &repo);
10558 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
10559 git_add("a.txt", &repo);
10560 git_commit("improve letter", &repo);
10561 git_cherry_pick(&commit, &repo);
10562 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
10563 .expect("No CHERRY_PICK_HEAD");
10564 pretty_assertions::assert_eq!(
10565 git_status(&repo),
10566 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
10567 );
10568 tree.flush_fs_events(cx).await;
10569 project
10570 .update(cx, |project, cx| project.git_scans_complete(cx))
10571 .await;
10572 cx.executor().run_until_parked();
10573 let conflicts = repository.update(cx, |repository, _| {
10574 repository
10575 .merge_conflicts
10576 .iter()
10577 .cloned()
10578 .collect::<Vec<_>>()
10579 });
10580 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
10581
10582 git_add("a.txt", &repo);
10583 // Attempt to manually simulate what `git cherry-pick --continue` would do.
10584 git_commit("whatevs", &repo);
10585 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
10586 .expect("Failed to remove CHERRY_PICK_HEAD");
10587 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
10588 tree.flush_fs_events(cx).await;
10589 let conflicts = repository.update(cx, |repository, _| {
10590 repository
10591 .merge_conflicts
10592 .iter()
10593 .cloned()
10594 .collect::<Vec<_>>()
10595 });
10596 pretty_assertions::assert_eq!(conflicts, []);
10597}
10598
10599#[gpui::test]
10600async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
10601 init_test(cx);
10602 let fs = FakeFs::new(cx.background_executor.clone());
10603 fs.insert_tree(
10604 path!("/root"),
10605 json!({
10606 ".git": {},
10607 ".gitignore": "*.txt\n",
10608 "a.xml": "<a></a>",
10609 "b.txt": "Some text"
10610 }),
10611 )
10612 .await;
10613
10614 fs.set_head_and_index_for_repo(
10615 path!("/root/.git").as_ref(),
10616 &[
10617 (".gitignore", "*.txt\n".into()),
10618 ("a.xml", "<a></a>".into()),
10619 ],
10620 );
10621
10622 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
10623
10624 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10625 tree.flush_fs_events(cx).await;
10626 project
10627 .update(cx, |project, cx| project.git_scans_complete(cx))
10628 .await;
10629 cx.executor().run_until_parked();
10630
10631 let repository = project.read_with(cx, |project, cx| {
10632 project.repositories(cx).values().next().unwrap().clone()
10633 });
10634
10635 // One file is unmodified, the other is ignored.
10636 cx.read(|cx| {
10637 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
10638 assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
10639 });
10640
10641 // Change the gitignore, and stage the newly non-ignored file.
10642 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
10643 .await
10644 .unwrap();
10645 fs.set_index_for_repo(
10646 Path::new(path!("/root/.git")),
10647 &[
10648 (".gitignore", "*.txt\n".into()),
10649 ("a.xml", "<a></a>".into()),
10650 ("b.txt", "Some text".into()),
10651 ],
10652 );
10653
10654 cx.executor().run_until_parked();
10655 cx.read(|cx| {
10656 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
10657 assert_entry_git_state(
10658 tree.read(cx),
10659 repository.read(cx),
10660 "b.txt",
10661 Some(StatusCode::Added),
10662 false,
10663 );
10664 });
10665}
10666
10667// NOTE:
10668// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
10669// a directory which some program has already open.
10670// This is a limitation of the Windows.
10671// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
10672// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
10673#[gpui::test]
10674#[cfg_attr(target_os = "windows", ignore)]
10675async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
10676 init_test(cx);
10677 cx.executor().allow_parking();
10678 let root = TempTree::new(json!({
10679 "projects": {
10680 "project1": {
10681 "a": "",
10682 "b": "",
10683 }
10684 },
10685
10686 }));
10687 let root_path = root.path();
10688
10689 let repo = git_init(&root_path.join("projects/project1"));
10690 git_add("a", &repo);
10691 git_commit("init", &repo);
10692 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
10693
10694 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10695
10696 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10697 tree.flush_fs_events(cx).await;
10698 project
10699 .update(cx, |project, cx| project.git_scans_complete(cx))
10700 .await;
10701 cx.executor().run_until_parked();
10702
10703 let repository = project.read_with(cx, |project, cx| {
10704 project.repositories(cx).values().next().unwrap().clone()
10705 });
10706
10707 repository.read_with(cx, |repository, _| {
10708 assert_eq!(
10709 repository.work_directory_abs_path.as_ref(),
10710 root_path.join("projects/project1").as_path()
10711 );
10712 assert_eq!(
10713 repository
10714 .status_for_path(&repo_path("a"))
10715 .map(|entry| entry.status),
10716 Some(StatusCode::Modified.worktree()),
10717 );
10718 assert_eq!(
10719 repository
10720 .status_for_path(&repo_path("b"))
10721 .map(|entry| entry.status),
10722 Some(FileStatus::Untracked),
10723 );
10724 });
10725
10726 std::fs::rename(
10727 root_path.join("projects/project1"),
10728 root_path.join("projects/project2"),
10729 )
10730 .unwrap();
10731 tree.flush_fs_events(cx).await;
10732
10733 repository.read_with(cx, |repository, _| {
10734 assert_eq!(
10735 repository.work_directory_abs_path.as_ref(),
10736 root_path.join("projects/project2").as_path()
10737 );
10738 assert_eq!(
10739 repository.status_for_path(&repo_path("a")).unwrap().status,
10740 StatusCode::Modified.worktree(),
10741 );
10742 assert_eq!(
10743 repository.status_for_path(&repo_path("b")).unwrap().status,
10744 FileStatus::Untracked,
10745 );
10746 });
10747}
10748
10749// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
10750// you can't rename a directory which some program has already open. This is a
10751// limitation of the Windows. See:
10752// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
10753// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
10754#[gpui::test]
10755#[cfg_attr(target_os = "windows", ignore)]
10756async fn test_file_status(cx: &mut gpui::TestAppContext) {
10757 init_test(cx);
10758 cx.executor().allow_parking();
10759 const IGNORE_RULE: &str = "**/target";
10760
10761 let root = TempTree::new(json!({
10762 "project": {
10763 "a.txt": "a",
10764 "b.txt": "bb",
10765 "c": {
10766 "d": {
10767 "e.txt": "eee"
10768 }
10769 },
10770 "f.txt": "ffff",
10771 "target": {
10772 "build_file": "???"
10773 },
10774 ".gitignore": IGNORE_RULE
10775 },
10776
10777 }));
10778 let root_path = root.path();
10779
10780 const A_TXT: &str = "a.txt";
10781 const B_TXT: &str = "b.txt";
10782 const E_TXT: &str = "c/d/e.txt";
10783 const F_TXT: &str = "f.txt";
10784 const DOTGITIGNORE: &str = ".gitignore";
10785 const BUILD_FILE: &str = "target/build_file";
10786
10787 // Set up git repository before creating the worktree.
10788 let work_dir = root.path().join("project");
10789 let mut repo = git_init(work_dir.as_path());
10790 repo.add_ignore_rule(IGNORE_RULE).unwrap();
10791 git_add(A_TXT, &repo);
10792 git_add(E_TXT, &repo);
10793 git_add(DOTGITIGNORE, &repo);
10794 git_commit("Initial commit", &repo);
10795
10796 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10797
10798 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10799 tree.flush_fs_events(cx).await;
10800 project
10801 .update(cx, |project, cx| project.git_scans_complete(cx))
10802 .await;
10803 cx.executor().run_until_parked();
10804
10805 let repository = project.read_with(cx, |project, cx| {
10806 project.repositories(cx).values().next().unwrap().clone()
10807 });
10808
10809 // Check that the right git state is observed on startup
10810 repository.read_with(cx, |repository, _cx| {
10811 assert_eq!(
10812 repository.work_directory_abs_path.as_ref(),
10813 root_path.join("project").as_path()
10814 );
10815
10816 assert_eq!(
10817 repository
10818 .status_for_path(&repo_path(B_TXT))
10819 .unwrap()
10820 .status,
10821 FileStatus::Untracked,
10822 );
10823 assert_eq!(
10824 repository
10825 .status_for_path(&repo_path(F_TXT))
10826 .unwrap()
10827 .status,
10828 FileStatus::Untracked,
10829 );
10830 });
10831
10832 // Modify a file in the working copy.
10833 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
10834 tree.flush_fs_events(cx).await;
10835 project
10836 .update(cx, |project, cx| project.git_scans_complete(cx))
10837 .await;
10838 cx.executor().run_until_parked();
10839
10840 // The worktree detects that the file's git status has changed.
10841 repository.read_with(cx, |repository, _| {
10842 assert_eq!(
10843 repository
10844 .status_for_path(&repo_path(A_TXT))
10845 .unwrap()
10846 .status,
10847 StatusCode::Modified.worktree(),
10848 );
10849 });
10850
10851 // Create a commit in the git repository.
10852 git_add(A_TXT, &repo);
10853 git_add(B_TXT, &repo);
10854 git_commit("Committing modified and added", &repo);
10855 tree.flush_fs_events(cx).await;
10856 project
10857 .update(cx, |project, cx| project.git_scans_complete(cx))
10858 .await;
10859 cx.executor().run_until_parked();
10860
10861 // The worktree detects that the files' git status have changed.
10862 repository.read_with(cx, |repository, _cx| {
10863 assert_eq!(
10864 repository
10865 .status_for_path(&repo_path(F_TXT))
10866 .unwrap()
10867 .status,
10868 FileStatus::Untracked,
10869 );
10870 assert_eq!(repository.status_for_path(&repo_path(B_TXT)), None);
10871 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
10872 });
10873
10874 // Modify files in the working copy and perform git operations on other files.
10875 git_reset(0, &repo);
10876 git_remove_index(Path::new(B_TXT), &repo);
10877 git_stash(&mut repo);
10878 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
10879 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
10880 tree.flush_fs_events(cx).await;
10881 project
10882 .update(cx, |project, cx| project.git_scans_complete(cx))
10883 .await;
10884 cx.executor().run_until_parked();
10885
10886 // Check that more complex repo changes are tracked
10887 repository.read_with(cx, |repository, _cx| {
10888 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
10889 assert_eq!(
10890 repository
10891 .status_for_path(&repo_path(B_TXT))
10892 .unwrap()
10893 .status,
10894 FileStatus::Untracked,
10895 );
10896 assert_eq!(
10897 repository
10898 .status_for_path(&repo_path(E_TXT))
10899 .unwrap()
10900 .status,
10901 StatusCode::Modified.worktree(),
10902 );
10903 });
10904
10905 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
10906 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
10907 std::fs::write(
10908 work_dir.join(DOTGITIGNORE),
10909 [IGNORE_RULE, "f.txt"].join("\n"),
10910 )
10911 .unwrap();
10912
10913 git_add(Path::new(DOTGITIGNORE), &repo);
10914 git_commit("Committing modified git ignore", &repo);
10915
10916 tree.flush_fs_events(cx).await;
10917 cx.executor().run_until_parked();
10918
10919 let mut renamed_dir_name = "first_directory/second_directory";
10920 const RENAMED_FILE: &str = "rf.txt";
10921
10922 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
10923 std::fs::write(
10924 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
10925 "new-contents",
10926 )
10927 .unwrap();
10928
10929 tree.flush_fs_events(cx).await;
10930 project
10931 .update(cx, |project, cx| project.git_scans_complete(cx))
10932 .await;
10933 cx.executor().run_until_parked();
10934
10935 repository.read_with(cx, |repository, _cx| {
10936 assert_eq!(
10937 repository
10938 .status_for_path(&RepoPath::from_rel_path(
10939 &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE))
10940 ))
10941 .unwrap()
10942 .status,
10943 FileStatus::Untracked,
10944 );
10945 });
10946
10947 renamed_dir_name = "new_first_directory/second_directory";
10948
10949 std::fs::rename(
10950 work_dir.join("first_directory"),
10951 work_dir.join("new_first_directory"),
10952 )
10953 .unwrap();
10954
10955 tree.flush_fs_events(cx).await;
10956 project
10957 .update(cx, |project, cx| project.git_scans_complete(cx))
10958 .await;
10959 cx.executor().run_until_parked();
10960
10961 repository.read_with(cx, |repository, _cx| {
10962 assert_eq!(
10963 repository
10964 .status_for_path(&RepoPath::from_rel_path(
10965 &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE))
10966 ))
10967 .unwrap()
10968 .status,
10969 FileStatus::Untracked,
10970 );
10971 });
10972}
10973
10974#[gpui::test]
10975#[ignore]
10976async fn test_ignored_dirs_events(cx: &mut gpui::TestAppContext) {
10977 init_test(cx);
10978 cx.executor().allow_parking();
10979
10980 const IGNORE_RULE: &str = "**/target";
10981
10982 let root = TempTree::new(json!({
10983 "project": {
10984 "src": {
10985 "main.rs": "fn main() {}"
10986 },
10987 "target": {
10988 "debug": {
10989 "important_text.txt": "important text",
10990 },
10991 },
10992 ".gitignore": IGNORE_RULE
10993 },
10994
10995 }));
10996 let root_path = root.path();
10997
10998 // Set up git repository before creating the worktree.
10999 let work_dir = root.path().join("project");
11000 let repo = git_init(work_dir.as_path());
11001 repo.add_ignore_rule(IGNORE_RULE).unwrap();
11002 git_add("src/main.rs", &repo);
11003 git_add(".gitignore", &repo);
11004 git_commit("Initial commit", &repo);
11005
11006 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
11007 let repository_updates = Arc::new(Mutex::new(Vec::new()));
11008 let project_events = Arc::new(Mutex::new(Vec::new()));
11009 project.update(cx, |project, cx| {
11010 let repo_events = repository_updates.clone();
11011 cx.subscribe(project.git_store(), move |_, _, e, _| {
11012 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
11013 repo_events.lock().push(e.clone());
11014 }
11015 })
11016 .detach();
11017 let project_events = project_events.clone();
11018 cx.subscribe_self(move |_, e, _| {
11019 if let Event::WorktreeUpdatedEntries(_, updates) = e {
11020 project_events.lock().extend(
11021 updates
11022 .iter()
11023 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
11024 .filter(|(path, _)| path != "fs-event-sentinel"),
11025 );
11026 }
11027 })
11028 .detach();
11029 });
11030
11031 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
11032 tree.flush_fs_events(cx).await;
11033 tree.update(cx, |tree, cx| {
11034 tree.load_file(rel_path("project/target/debug/important_text.txt"), cx)
11035 })
11036 .await
11037 .unwrap();
11038 tree.update(cx, |tree, _| {
11039 assert_eq!(
11040 tree.entries(true, 0)
11041 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
11042 .collect::<Vec<_>>(),
11043 vec![
11044 (rel_path(""), false),
11045 (rel_path("project/"), false),
11046 (rel_path("project/.gitignore"), false),
11047 (rel_path("project/src"), false),
11048 (rel_path("project/src/main.rs"), false),
11049 (rel_path("project/target"), true),
11050 (rel_path("project/target/debug"), true),
11051 (rel_path("project/target/debug/important_text.txt"), true),
11052 ]
11053 );
11054 });
11055
11056 assert_eq!(
11057 repository_updates.lock().drain(..).collect::<Vec<_>>(),
11058 vec![RepositoryEvent::StatusesChanged,],
11059 "Initial worktree scan should produce a repo update event"
11060 );
11061 assert_eq!(
11062 project_events.lock().drain(..).collect::<Vec<_>>(),
11063 vec![
11064 ("project/target".to_string(), PathChange::Loaded),
11065 ("project/target/debug".to_string(), PathChange::Loaded),
11066 (
11067 "project/target/debug/important_text.txt".to_string(),
11068 PathChange::Loaded
11069 ),
11070 ],
11071 "Initial project changes should show that all not-ignored and all opened files are loaded"
11072 );
11073
11074 let deps_dir = work_dir.join("target").join("debug").join("deps");
11075 std::fs::create_dir_all(&deps_dir).unwrap();
11076 tree.flush_fs_events(cx).await;
11077 project
11078 .update(cx, |project, cx| project.git_scans_complete(cx))
11079 .await;
11080 cx.executor().run_until_parked();
11081 std::fs::write(deps_dir.join("aa.tmp"), "something tmp").unwrap();
11082 tree.flush_fs_events(cx).await;
11083 project
11084 .update(cx, |project, cx| project.git_scans_complete(cx))
11085 .await;
11086 cx.executor().run_until_parked();
11087 std::fs::remove_dir_all(&deps_dir).unwrap();
11088 tree.flush_fs_events(cx).await;
11089 project
11090 .update(cx, |project, cx| project.git_scans_complete(cx))
11091 .await;
11092 cx.executor().run_until_parked();
11093
11094 tree.update(cx, |tree, _| {
11095 assert_eq!(
11096 tree.entries(true, 0)
11097 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
11098 .collect::<Vec<_>>(),
11099 vec![
11100 (rel_path(""), false),
11101 (rel_path("project/"), false),
11102 (rel_path("project/.gitignore"), false),
11103 (rel_path("project/src"), false),
11104 (rel_path("project/src/main.rs"), false),
11105 (rel_path("project/target"), true),
11106 (rel_path("project/target/debug"), true),
11107 (rel_path("project/target/debug/important_text.txt"), true),
11108 ],
11109 "No stray temp files should be left after the flycheck changes"
11110 );
11111 });
11112
11113 assert_eq!(
11114 repository_updates
11115 .lock()
11116 .iter()
11117 .cloned()
11118 .collect::<Vec<_>>(),
11119 Vec::new(),
11120 "No further RepositoryUpdated events should happen, as only ignored dirs' contents was changed",
11121 );
11122 assert_eq!(
11123 project_events.lock().as_slice(),
11124 vec![
11125 ("project/target/debug/deps".to_string(), PathChange::Added),
11126 ("project/target/debug/deps".to_string(), PathChange::Removed),
11127 ],
11128 "Due to `debug` directory being tracked, it should get updates for entries inside it.
11129 No updates for more nested directories should happen as those are ignored",
11130 );
11131}
11132
11133// todo(jk): turning this test off until we rework it in such a way so that it is not so susceptible
11134// to different timings/ordering of events.
11135#[ignore]
11136#[gpui::test]
11137async fn test_odd_events_for_ignored_dirs(
11138 executor: BackgroundExecutor,
11139 cx: &mut gpui::TestAppContext,
11140) {
11141 init_test(cx);
11142 let fs = FakeFs::new(executor);
11143 fs.insert_tree(
11144 path!("/root"),
11145 json!({
11146 ".git": {},
11147 ".gitignore": "**/target/",
11148 "src": {
11149 "main.rs": "fn main() {}",
11150 },
11151 "target": {
11152 "debug": {
11153 "foo.txt": "foo",
11154 "deps": {}
11155 }
11156 }
11157 }),
11158 )
11159 .await;
11160 fs.set_head_and_index_for_repo(
11161 path!("/root/.git").as_ref(),
11162 &[
11163 (".gitignore", "**/target/".into()),
11164 ("src/main.rs", "fn main() {}".into()),
11165 ],
11166 );
11167
11168 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
11169 let repository_updates = Arc::new(Mutex::new(Vec::new()));
11170 let project_events = Arc::new(Mutex::new(Vec::new()));
11171 project.update(cx, |project, cx| {
11172 let repository_updates = repository_updates.clone();
11173 cx.subscribe(project.git_store(), move |_, _, e, _| {
11174 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
11175 repository_updates.lock().push(e.clone());
11176 }
11177 })
11178 .detach();
11179 let project_events = project_events.clone();
11180 cx.subscribe_self(move |_, e, _| {
11181 if let Event::WorktreeUpdatedEntries(_, updates) = e {
11182 project_events.lock().extend(
11183 updates
11184 .iter()
11185 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
11186 .filter(|(path, _)| path != "fs-event-sentinel"),
11187 );
11188 }
11189 })
11190 .detach();
11191 });
11192
11193 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
11194 tree.update(cx, |tree, cx| {
11195 tree.load_file(rel_path("target/debug/foo.txt"), cx)
11196 })
11197 .await
11198 .unwrap();
11199 tree.flush_fs_events(cx).await;
11200 project
11201 .update(cx, |project, cx| project.git_scans_complete(cx))
11202 .await;
11203 cx.run_until_parked();
11204 tree.update(cx, |tree, _| {
11205 assert_eq!(
11206 tree.entries(true, 0)
11207 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
11208 .collect::<Vec<_>>(),
11209 vec![
11210 (rel_path(""), false),
11211 (rel_path(".gitignore"), false),
11212 (rel_path("src"), false),
11213 (rel_path("src/main.rs"), false),
11214 (rel_path("target"), true),
11215 (rel_path("target/debug"), true),
11216 (rel_path("target/debug/deps"), true),
11217 (rel_path("target/debug/foo.txt"), true),
11218 ]
11219 );
11220 });
11221
11222 assert_eq!(
11223 repository_updates.lock().drain(..).collect::<Vec<_>>(),
11224 vec![
11225 RepositoryEvent::BranchChanged,
11226 RepositoryEvent::StatusesChanged,
11227 RepositoryEvent::StatusesChanged,
11228 ],
11229 "Initial worktree scan should produce a repo update event"
11230 );
11231 assert_eq!(
11232 project_events.lock().drain(..).collect::<Vec<_>>(),
11233 vec![
11234 ("target".to_string(), PathChange::Loaded),
11235 ("target/debug".to_string(), PathChange::Loaded),
11236 ("target/debug/deps".to_string(), PathChange::Loaded),
11237 ("target/debug/foo.txt".to_string(), PathChange::Loaded),
11238 ],
11239 "All non-ignored entries and all opened firs should be getting a project event",
11240 );
11241
11242 // Emulate a flycheck spawn: it emits a `INODE_META_MOD`-flagged FS event on target/debug/deps, then creates and removes temp files inside.
11243 // This may happen multiple times during a single flycheck, but once is enough for testing.
11244 fs.emit_fs_event("/root/target/debug/deps", None);
11245 tree.flush_fs_events(cx).await;
11246 project
11247 .update(cx, |project, cx| project.git_scans_complete(cx))
11248 .await;
11249 cx.executor().run_until_parked();
11250
11251 assert_eq!(
11252 repository_updates
11253 .lock()
11254 .iter()
11255 .cloned()
11256 .collect::<Vec<_>>(),
11257 Vec::new(),
11258 "No further RepositoryUpdated events should happen, as only ignored dirs received FS events",
11259 );
11260 assert_eq!(
11261 project_events.lock().as_slice(),
11262 Vec::new(),
11263 "No further project events should happen, as only ignored dirs received FS events",
11264 );
11265}
11266
11267#[gpui::test]
11268async fn test_repos_in_invisible_worktrees(
11269 executor: BackgroundExecutor,
11270 cx: &mut gpui::TestAppContext,
11271) {
11272 init_test(cx);
11273 let fs = FakeFs::new(executor);
11274 fs.insert_tree(
11275 path!("/root"),
11276 json!({
11277 "dir1": {
11278 ".git": {},
11279 "dep1": {
11280 ".git": {},
11281 "src": {
11282 "a.txt": "",
11283 },
11284 },
11285 "b.txt": "",
11286 },
11287 }),
11288 )
11289 .await;
11290
11291 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
11292 let _visible_worktree =
11293 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
11294 project
11295 .update(cx, |project, cx| project.git_scans_complete(cx))
11296 .await;
11297
11298 let repos = project.read_with(cx, |project, cx| {
11299 project
11300 .repositories(cx)
11301 .values()
11302 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
11303 .collect::<Vec<_>>()
11304 });
11305 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
11306
11307 let (_invisible_worktree, _) = project
11308 .update(cx, |project, cx| {
11309 project.worktree_store().update(cx, |worktree_store, cx| {
11310 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
11311 })
11312 })
11313 .await
11314 .expect("failed to create worktree");
11315 project
11316 .update(cx, |project, cx| project.git_scans_complete(cx))
11317 .await;
11318
11319 let repos = project.read_with(cx, |project, cx| {
11320 project
11321 .repositories(cx)
11322 .values()
11323 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
11324 .collect::<Vec<_>>()
11325 });
11326 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
11327}
11328
11329#[gpui::test(iterations = 10)]
11330async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
11331 init_test(cx);
11332 cx.update(|cx| {
11333 cx.update_global::<SettingsStore, _>(|store, cx| {
11334 store.update_user_settings(cx, |settings| {
11335 settings.project.worktree.file_scan_exclusions = Some(Vec::new());
11336 });
11337 });
11338 });
11339 let fs = FakeFs::new(cx.background_executor.clone());
11340 fs.insert_tree(
11341 path!("/root"),
11342 json!({
11343 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
11344 "tree": {
11345 ".git": {},
11346 ".gitignore": "ignored-dir\n",
11347 "tracked-dir": {
11348 "tracked-file1": "",
11349 "ancestor-ignored-file1": "",
11350 },
11351 "ignored-dir": {
11352 "ignored-file1": ""
11353 }
11354 }
11355 }),
11356 )
11357 .await;
11358 fs.set_head_and_index_for_repo(
11359 path!("/root/tree/.git").as_ref(),
11360 &[
11361 (".gitignore", "ignored-dir\n".into()),
11362 ("tracked-dir/tracked-file1", "".into()),
11363 ],
11364 );
11365
11366 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
11367
11368 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
11369 tree.flush_fs_events(cx).await;
11370 project
11371 .update(cx, |project, cx| project.git_scans_complete(cx))
11372 .await;
11373 cx.executor().run_until_parked();
11374
11375 let repository = project.read_with(cx, |project, cx| {
11376 project.repositories(cx).values().next().unwrap().clone()
11377 });
11378
11379 tree.read_with(cx, |tree, _| {
11380 tree.as_local()
11381 .unwrap()
11382 .manually_refresh_entries_for_paths(vec![rel_path("ignored-dir").into()])
11383 })
11384 .recv()
11385 .await;
11386
11387 cx.read(|cx| {
11388 assert_entry_git_state(
11389 tree.read(cx),
11390 repository.read(cx),
11391 "tracked-dir/tracked-file1",
11392 None,
11393 false,
11394 );
11395 assert_entry_git_state(
11396 tree.read(cx),
11397 repository.read(cx),
11398 "tracked-dir/ancestor-ignored-file1",
11399 None,
11400 false,
11401 );
11402 assert_entry_git_state(
11403 tree.read(cx),
11404 repository.read(cx),
11405 "ignored-dir/ignored-file1",
11406 None,
11407 true,
11408 );
11409 });
11410
11411 fs.create_file(
11412 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
11413 Default::default(),
11414 )
11415 .await
11416 .unwrap();
11417 fs.set_index_for_repo(
11418 path!("/root/tree/.git").as_ref(),
11419 &[
11420 (".gitignore", "ignored-dir\n".into()),
11421 ("tracked-dir/tracked-file1", "".into()),
11422 ("tracked-dir/tracked-file2", "".into()),
11423 ],
11424 );
11425 fs.create_file(
11426 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
11427 Default::default(),
11428 )
11429 .await
11430 .unwrap();
11431 fs.create_file(
11432 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
11433 Default::default(),
11434 )
11435 .await
11436 .unwrap();
11437
11438 cx.executor().run_until_parked();
11439 cx.read(|cx| {
11440 assert_entry_git_state(
11441 tree.read(cx),
11442 repository.read(cx),
11443 "tracked-dir/tracked-file2",
11444 Some(StatusCode::Added),
11445 false,
11446 );
11447 assert_entry_git_state(
11448 tree.read(cx),
11449 repository.read(cx),
11450 "tracked-dir/ancestor-ignored-file2",
11451 None,
11452 false,
11453 );
11454 assert_entry_git_state(
11455 tree.read(cx),
11456 repository.read(cx),
11457 "ignored-dir/ignored-file2",
11458 None,
11459 true,
11460 );
11461 assert!(
11462 tree.read(cx)
11463 .entry_for_path(&rel_path(".git"))
11464 .unwrap()
11465 .is_ignored
11466 );
11467 });
11468}
11469
11470#[gpui::test]
11471async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) {
11472 init_test(cx);
11473
11474 let fs = FakeFs::new(cx.executor());
11475 fs.insert_tree(
11476 path!("/project"),
11477 json!({
11478 ".git": {
11479 "worktrees": {
11480 "some-worktree": {
11481 "commondir": "../..\n",
11482 // For is_git_dir
11483 "HEAD": "",
11484 "config": ""
11485 }
11486 },
11487 "modules": {
11488 "subdir": {
11489 "some-submodule": {
11490 // For is_git_dir
11491 "HEAD": "",
11492 "config": "",
11493 }
11494 }
11495 }
11496 },
11497 "src": {
11498 "a.txt": "A",
11499 },
11500 "some-worktree": {
11501 ".git": "gitdir: ../.git/worktrees/some-worktree\n",
11502 "src": {
11503 "b.txt": "B",
11504 }
11505 },
11506 "subdir": {
11507 "some-submodule": {
11508 ".git": "gitdir: ../../.git/modules/subdir/some-submodule\n",
11509 "c.txt": "C",
11510 }
11511 }
11512 }),
11513 )
11514 .await;
11515
11516 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
11517 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
11518 scan_complete.await;
11519
11520 let mut repositories = project.update(cx, |project, cx| {
11521 project
11522 .repositories(cx)
11523 .values()
11524 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
11525 .collect::<Vec<_>>()
11526 });
11527 repositories.sort();
11528 pretty_assertions::assert_eq!(
11529 repositories,
11530 [
11531 Path::new(path!("/project")).into(),
11532 Path::new(path!("/project/some-worktree")).into(),
11533 Path::new(path!("/project/subdir/some-submodule")).into(),
11534 ]
11535 );
11536
11537 // Generate a git-related event for the worktree and check that it's refreshed.
11538 fs.with_git_state(
11539 path!("/project/some-worktree/.git").as_ref(),
11540 true,
11541 |state| {
11542 state
11543 .head_contents
11544 .insert(repo_path("src/b.txt"), "b".to_owned());
11545 state
11546 .index_contents
11547 .insert(repo_path("src/b.txt"), "b".to_owned());
11548 },
11549 )
11550 .unwrap();
11551 cx.run_until_parked();
11552
11553 let buffer = project
11554 .update(cx, |project, cx| {
11555 project.open_local_buffer(path!("/project/some-worktree/src/b.txt"), cx)
11556 })
11557 .await
11558 .unwrap();
11559 let (worktree_repo, barrier) = project.update(cx, |project, cx| {
11560 let (repo, _) = project
11561 .git_store()
11562 .read(cx)
11563 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
11564 .unwrap();
11565 pretty_assertions::assert_eq!(
11566 repo.read(cx).work_directory_abs_path,
11567 Path::new(path!("/project/some-worktree")).into(),
11568 );
11569 let barrier = repo.update(cx, |repo, _| repo.barrier());
11570 (repo.clone(), barrier)
11571 });
11572 barrier.await.unwrap();
11573 worktree_repo.update(cx, |repo, _| {
11574 pretty_assertions::assert_eq!(
11575 repo.status_for_path(&repo_path("src/b.txt"))
11576 .unwrap()
11577 .status,
11578 StatusCode::Modified.worktree(),
11579 );
11580 });
11581
11582 // The same for the submodule.
11583 fs.with_git_state(
11584 path!("/project/subdir/some-submodule/.git").as_ref(),
11585 true,
11586 |state| {
11587 state
11588 .head_contents
11589 .insert(repo_path("c.txt"), "c".to_owned());
11590 state
11591 .index_contents
11592 .insert(repo_path("c.txt"), "c".to_owned());
11593 },
11594 )
11595 .unwrap();
11596 cx.run_until_parked();
11597
11598 let buffer = project
11599 .update(cx, |project, cx| {
11600 project.open_local_buffer(path!("/project/subdir/some-submodule/c.txt"), cx)
11601 })
11602 .await
11603 .unwrap();
11604 let (submodule_repo, barrier) = project.update(cx, |project, cx| {
11605 let (repo, _) = project
11606 .git_store()
11607 .read(cx)
11608 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
11609 .unwrap();
11610 pretty_assertions::assert_eq!(
11611 repo.read(cx).work_directory_abs_path,
11612 Path::new(path!("/project/subdir/some-submodule")).into(),
11613 );
11614 let barrier = repo.update(cx, |repo, _| repo.barrier());
11615 (repo.clone(), barrier)
11616 });
11617 barrier.await.unwrap();
11618 submodule_repo.update(cx, |repo, _| {
11619 pretty_assertions::assert_eq!(
11620 repo.status_for_path(&repo_path("c.txt")).unwrap().status,
11621 StatusCode::Modified.worktree(),
11622 );
11623 });
11624}
11625
11626#[gpui::test]
11627async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
11628 init_test(cx);
11629 let fs = FakeFs::new(cx.background_executor.clone());
11630 fs.insert_tree(
11631 path!("/root"),
11632 json!({
11633 "project": {
11634 ".git": {},
11635 "child1": {
11636 "a.txt": "A",
11637 },
11638 "child2": {
11639 "b.txt": "B",
11640 }
11641 }
11642 }),
11643 )
11644 .await;
11645
11646 let project = Project::test(
11647 fs.clone(),
11648 [
11649 path!("/root/project/child1").as_ref(),
11650 path!("/root/project/child2").as_ref(),
11651 ],
11652 cx,
11653 )
11654 .await;
11655
11656 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
11657 tree.flush_fs_events(cx).await;
11658 project
11659 .update(cx, |project, cx| project.git_scans_complete(cx))
11660 .await;
11661 cx.executor().run_until_parked();
11662
11663 let repos = project.read_with(cx, |project, cx| {
11664 project
11665 .repositories(cx)
11666 .values()
11667 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
11668 .collect::<Vec<_>>()
11669 });
11670 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
11671}
11672
11673#[gpui::test]
11674async fn test_buffer_changed_file_path_updates_git_diff(cx: &mut gpui::TestAppContext) {
11675 init_test(cx);
11676
11677 let file_1_committed = String::from(r#"file_1_committed"#);
11678 let file_1_staged = String::from(r#"file_1_staged"#);
11679 let file_2_committed = String::from(r#"file_2_committed"#);
11680 let file_2_staged = String::from(r#"file_2_staged"#);
11681 let buffer_contents = String::from(r#"buffer"#);
11682
11683 let fs = FakeFs::new(cx.background_executor.clone());
11684 fs.insert_tree(
11685 path!("/dir"),
11686 json!({
11687 ".git": {},
11688 "src": {
11689 "file_1.rs": file_1_committed.clone(),
11690 "file_2.rs": file_2_committed.clone(),
11691 }
11692 }),
11693 )
11694 .await;
11695
11696 fs.set_head_for_repo(
11697 path!("/dir/.git").as_ref(),
11698 &[
11699 ("src/file_1.rs", file_1_committed.clone()),
11700 ("src/file_2.rs", file_2_committed.clone()),
11701 ],
11702 "deadbeef",
11703 );
11704 fs.set_index_for_repo(
11705 path!("/dir/.git").as_ref(),
11706 &[
11707 ("src/file_1.rs", file_1_staged.clone()),
11708 ("src/file_2.rs", file_2_staged.clone()),
11709 ],
11710 );
11711
11712 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
11713
11714 let buffer = project
11715 .update(cx, |project, cx| {
11716 project.open_local_buffer(path!("/dir/src/file_1.rs"), cx)
11717 })
11718 .await
11719 .unwrap();
11720
11721 buffer.update(cx, |buffer, cx| {
11722 buffer.edit([(0..buffer.len(), buffer_contents.as_str())], None, cx);
11723 });
11724
11725 let unstaged_diff = project
11726 .update(cx, |project, cx| {
11727 project.open_unstaged_diff(buffer.clone(), cx)
11728 })
11729 .await
11730 .unwrap();
11731
11732 cx.run_until_parked();
11733
11734 unstaged_diff.update(cx, |unstaged_diff, cx| {
11735 let base_text = unstaged_diff.base_text_string(cx).unwrap();
11736 assert_eq!(base_text, file_1_staged, "Should start with file_1 staged");
11737 });
11738
11739 // Save the buffer as `file_2.rs`, which should trigger the
11740 // `BufferChangedFilePath` event.
11741 project
11742 .update(cx, |project, cx| {
11743 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
11744 let path = ProjectPath {
11745 worktree_id,
11746 path: rel_path("src/file_2.rs").into(),
11747 };
11748 project.save_buffer_as(buffer.clone(), path, cx)
11749 })
11750 .await
11751 .unwrap();
11752
11753 cx.run_until_parked();
11754
11755 // Verify that the diff bases have been updated to file_2's contents due to
11756 // the `BufferChangedFilePath` event being handled.
11757 unstaged_diff.update(cx, |unstaged_diff, cx| {
11758 let snapshot = buffer.read(cx).snapshot();
11759 let base_text = unstaged_diff.base_text_string(cx).unwrap();
11760 assert_eq!(
11761 base_text, file_2_staged,
11762 "Diff bases should be automatically updated to file_2 staged content"
11763 );
11764
11765 let hunks: Vec<_> = unstaged_diff.snapshot(cx).hunks(&snapshot).collect();
11766 assert!(!hunks.is_empty(), "Should have diff hunks for file_2");
11767 });
11768
11769 let uncommitted_diff = project
11770 .update(cx, |project, cx| {
11771 project.open_uncommitted_diff(buffer.clone(), cx)
11772 })
11773 .await
11774 .unwrap();
11775
11776 cx.run_until_parked();
11777
11778 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
11779 let base_text = uncommitted_diff.base_text_string(cx).unwrap();
11780 assert_eq!(
11781 base_text, file_2_committed,
11782 "Uncommitted diff should compare against file_2 committed content"
11783 );
11784 });
11785}
11786
11787async fn search(
11788 project: &Entity<Project>,
11789 query: SearchQuery,
11790 cx: &mut gpui::TestAppContext,
11791) -> Result<HashMap<String, Vec<Range<usize>>>> {
11792 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
11793 let mut results = HashMap::default();
11794 while let Ok(search_result) = search_rx.rx.recv().await {
11795 match search_result {
11796 SearchResult::Buffer { buffer, ranges } => {
11797 results.entry(buffer).or_insert(ranges);
11798 }
11799 SearchResult::LimitReached => {}
11800 }
11801 }
11802 Ok(results
11803 .into_iter()
11804 .map(|(buffer, ranges)| {
11805 buffer.update(cx, |buffer, cx| {
11806 let path = buffer
11807 .file()
11808 .unwrap()
11809 .full_path(cx)
11810 .to_string_lossy()
11811 .to_string();
11812 let ranges = ranges
11813 .into_iter()
11814 .map(|range| range.to_offset(buffer))
11815 .collect::<Vec<_>>();
11816 (path, ranges)
11817 })
11818 })
11819 .collect())
11820}
11821
11822#[gpui::test]
11823async fn test_undo_encoding_change(cx: &mut gpui::TestAppContext) {
11824 init_test(cx);
11825
11826 let fs = FakeFs::new(cx.executor());
11827
11828 // Create a file with ASCII content "Hi" - this will be detected as UTF-8
11829 // When reinterpreted as UTF-16LE, the bytes 0x48 0x69 become a single character
11830 let ascii_bytes: Vec<u8> = vec![0x48, 0x69];
11831 fs.insert_tree(path!("/dir"), json!({})).await;
11832 fs.insert_file(path!("/dir/test.txt"), ascii_bytes).await;
11833
11834 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
11835
11836 let buffer = project
11837 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/test.txt"), cx))
11838 .await
11839 .unwrap();
11840
11841 let (initial_encoding, initial_text, initial_dirty) = buffer.read_with(cx, |buffer, _| {
11842 (buffer.encoding(), buffer.text(), buffer.is_dirty())
11843 });
11844 assert_eq!(initial_encoding, encoding_rs::UTF_8);
11845 assert_eq!(initial_text, "Hi");
11846 assert!(!initial_dirty);
11847
11848 let reload_receiver = buffer.update(cx, |buffer, cx| {
11849 buffer.reload_with_encoding(encoding_rs::UTF_16LE, cx)
11850 });
11851 cx.executor().run_until_parked();
11852
11853 // Wait for reload to complete
11854 let _ = reload_receiver.await;
11855
11856 // Verify the encoding changed, text is different, and still not dirty (we reloaded from disk)
11857 let (reloaded_encoding, reloaded_text, reloaded_dirty) = buffer.read_with(cx, |buffer, _| {
11858 (buffer.encoding(), buffer.text(), buffer.is_dirty())
11859 });
11860 assert_eq!(reloaded_encoding, encoding_rs::UTF_16LE);
11861 assert_eq!(reloaded_text, "楈");
11862 assert!(!reloaded_dirty);
11863
11864 // Undo the reload
11865 buffer.update(cx, |buffer, cx| {
11866 buffer.undo(cx);
11867 });
11868
11869 buffer.read_with(cx, |buffer, _| {
11870 assert_eq!(buffer.encoding(), encoding_rs::UTF_8);
11871 assert_eq!(buffer.text(), "Hi");
11872 assert!(!buffer.is_dirty());
11873 });
11874
11875 buffer.update(cx, |buffer, cx| {
11876 buffer.redo(cx);
11877 });
11878
11879 buffer.read_with(cx, |buffer, _| {
11880 assert_eq!(buffer.encoding(), encoding_rs::UTF_16LE);
11881 assert_ne!(buffer.text(), "Hi");
11882 assert!(!buffer.is_dirty());
11883 });
11884}
11885
11886pub fn init_test(cx: &mut gpui::TestAppContext) {
11887 zlog::init_test();
11888
11889 cx.update(|cx| {
11890 let settings_store = SettingsStore::test(cx);
11891 cx.set_global(settings_store);
11892 release_channel::init(semver::Version::new(0, 0, 0), cx);
11893 });
11894}
11895
11896fn json_lang() -> Arc<Language> {
11897 Arc::new(Language::new(
11898 LanguageConfig {
11899 name: "JSON".into(),
11900 matcher: LanguageMatcher {
11901 path_suffixes: vec!["json".to_string()],
11902 ..Default::default()
11903 },
11904 ..Default::default()
11905 },
11906 None,
11907 ))
11908}
11909
11910fn js_lang() -> Arc<Language> {
11911 Arc::new(Language::new(
11912 LanguageConfig {
11913 name: "JavaScript".into(),
11914 matcher: LanguageMatcher {
11915 path_suffixes: vec!["js".to_string()],
11916 ..Default::default()
11917 },
11918 ..Default::default()
11919 },
11920 None,
11921 ))
11922}
11923
11924fn python_lang(fs: Arc<FakeFs>) -> Arc<Language> {
11925 struct PythonMootToolchainLister(Arc<FakeFs>);
11926 #[async_trait]
11927 impl ToolchainLister for PythonMootToolchainLister {
11928 async fn list(
11929 &self,
11930 worktree_root: PathBuf,
11931 subroot_relative_path: Arc<RelPath>,
11932 _: Option<HashMap<String, String>>,
11933 _: &dyn Fs,
11934 ) -> ToolchainList {
11935 // This lister will always return a path .venv directories within ancestors
11936 let ancestors = subroot_relative_path.ancestors().collect::<Vec<_>>();
11937 let mut toolchains = vec![];
11938 for ancestor in ancestors {
11939 let venv_path = worktree_root.join(ancestor.as_std_path()).join(".venv");
11940 if self.0.is_dir(&venv_path).await {
11941 toolchains.push(Toolchain {
11942 name: SharedString::new_static("Python Venv"),
11943 path: venv_path.to_string_lossy().into_owned().into(),
11944 language_name: LanguageName(SharedString::new_static("Python")),
11945 as_json: serde_json::Value::Null,
11946 })
11947 }
11948 }
11949 ToolchainList {
11950 toolchains,
11951 ..Default::default()
11952 }
11953 }
11954 async fn resolve(
11955 &self,
11956 _: PathBuf,
11957 _: Option<HashMap<String, String>>,
11958 _: &dyn Fs,
11959 ) -> anyhow::Result<Toolchain> {
11960 Err(anyhow::anyhow!("Not implemented"))
11961 }
11962 fn meta(&self) -> ToolchainMetadata {
11963 ToolchainMetadata {
11964 term: SharedString::new_static("Virtual Environment"),
11965 new_toolchain_placeholder: SharedString::new_static(
11966 "A path to the python3 executable within a virtual environment, or path to virtual environment itself",
11967 ),
11968 manifest_name: ManifestName::from(SharedString::new_static("pyproject.toml")),
11969 }
11970 }
11971 fn activation_script(
11972 &self,
11973 _: &Toolchain,
11974 _: ShellKind,
11975 _: &gpui::App,
11976 ) -> futures::future::BoxFuture<'static, Vec<String>> {
11977 Box::pin(async { vec![] })
11978 }
11979 }
11980 Arc::new(
11981 Language::new(
11982 LanguageConfig {
11983 name: "Python".into(),
11984 matcher: LanguageMatcher {
11985 path_suffixes: vec!["py".to_string()],
11986 ..Default::default()
11987 },
11988 ..Default::default()
11989 },
11990 None, // We're not testing Python parsing with this language.
11991 )
11992 .with_manifest(Some(ManifestName::from(SharedString::new_static(
11993 "pyproject.toml",
11994 ))))
11995 .with_toolchain_lister(Some(Arc::new(PythonMootToolchainLister(fs)))),
11996 )
11997}
11998
11999fn typescript_lang() -> Arc<Language> {
12000 Arc::new(Language::new(
12001 LanguageConfig {
12002 name: "TypeScript".into(),
12003 matcher: LanguageMatcher {
12004 path_suffixes: vec!["ts".to_string()],
12005 ..Default::default()
12006 },
12007 ..Default::default()
12008 },
12009 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
12010 ))
12011}
12012
12013fn tsx_lang() -> Arc<Language> {
12014 Arc::new(Language::new(
12015 LanguageConfig {
12016 name: "tsx".into(),
12017 matcher: LanguageMatcher {
12018 path_suffixes: vec!["tsx".to_string()],
12019 ..Default::default()
12020 },
12021 ..Default::default()
12022 },
12023 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
12024 ))
12025}
12026
12027fn get_all_tasks(
12028 project: &Entity<Project>,
12029 task_contexts: Arc<TaskContexts>,
12030 cx: &mut App,
12031) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
12032 let new_tasks = project.update(cx, |project, cx| {
12033 project.task_store().update(cx, |task_store, cx| {
12034 task_store.task_inventory().unwrap().update(cx, |this, cx| {
12035 this.used_and_current_resolved_tasks(task_contexts, cx)
12036 })
12037 })
12038 });
12039
12040 cx.background_spawn(async move {
12041 let (mut old, new) = new_tasks.await;
12042 old.extend(new);
12043 old
12044 })
12045}
12046
12047#[track_caller]
12048fn assert_entry_git_state(
12049 tree: &Worktree,
12050 repository: &Repository,
12051 path: &str,
12052 index_status: Option<StatusCode>,
12053 is_ignored: bool,
12054) {
12055 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
12056 let entry = tree
12057 .entry_for_path(&rel_path(path))
12058 .unwrap_or_else(|| panic!("entry {path} not found"));
12059 let status = repository
12060 .status_for_path(&repo_path(path))
12061 .map(|entry| entry.status);
12062 let expected = index_status.map(|index_status| {
12063 TrackedStatus {
12064 index_status,
12065 worktree_status: StatusCode::Unmodified,
12066 }
12067 .into()
12068 });
12069 assert_eq!(
12070 status, expected,
12071 "expected {path} to have git status: {expected:?}"
12072 );
12073 assert_eq!(
12074 entry.is_ignored, is_ignored,
12075 "expected {path} to have is_ignored: {is_ignored}"
12076 );
12077}
12078
12079#[track_caller]
12080fn git_init(path: &Path) -> git2::Repository {
12081 let mut init_opts = RepositoryInitOptions::new();
12082 init_opts.initial_head("main");
12083 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
12084}
12085
12086#[track_caller]
12087fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
12088 let path = path.as_ref();
12089 let mut index = repo.index().expect("Failed to get index");
12090 index.add_path(path).expect("Failed to add file");
12091 index.write().expect("Failed to write index");
12092}
12093
12094#[track_caller]
12095fn git_remove_index(path: &Path, repo: &git2::Repository) {
12096 let mut index = repo.index().expect("Failed to get index");
12097 index.remove_path(path).expect("Failed to add file");
12098 index.write().expect("Failed to write index");
12099}
12100
12101#[track_caller]
12102fn git_commit(msg: &'static str, repo: &git2::Repository) {
12103 use git2::Signature;
12104
12105 let signature = Signature::now("test", "test@zed.dev").unwrap();
12106 let oid = repo.index().unwrap().write_tree().unwrap();
12107 let tree = repo.find_tree(oid).unwrap();
12108 if let Ok(head) = repo.head() {
12109 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
12110
12111 let parent_commit = parent_obj.as_commit().unwrap();
12112
12113 repo.commit(
12114 Some("HEAD"),
12115 &signature,
12116 &signature,
12117 msg,
12118 &tree,
12119 &[parent_commit],
12120 )
12121 .expect("Failed to commit with parent");
12122 } else {
12123 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
12124 .expect("Failed to commit");
12125 }
12126}
12127
12128#[cfg(any())]
12129#[track_caller]
12130fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
12131 repo.cherrypick(commit, None).expect("Failed to cherrypick");
12132}
12133
12134#[track_caller]
12135fn git_stash(repo: &mut git2::Repository) {
12136 use git2::Signature;
12137
12138 let signature = Signature::now("test", "test@zed.dev").unwrap();
12139 repo.stash_save(&signature, "N/A", None)
12140 .expect("Failed to stash");
12141}
12142
12143#[track_caller]
12144fn git_reset(offset: usize, repo: &git2::Repository) {
12145 let head = repo.head().expect("Couldn't get repo head");
12146 let object = head.peel(git2::ObjectType::Commit).unwrap();
12147 let commit = object.as_commit().unwrap();
12148 let new_head = commit
12149 .parents()
12150 .inspect(|parnet| {
12151 parnet.message();
12152 })
12153 .nth(offset)
12154 .expect("Not enough history");
12155 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
12156 .expect("Could not reset");
12157}
12158
12159#[cfg(any())]
12160#[track_caller]
12161fn git_branch(name: &str, repo: &git2::Repository) {
12162 let head = repo
12163 .head()
12164 .expect("Couldn't get repo head")
12165 .peel_to_commit()
12166 .expect("HEAD is not a commit");
12167 repo.branch(name, &head, false).expect("Failed to commit");
12168}
12169
12170#[cfg(any())]
12171#[track_caller]
12172fn git_checkout(name: &str, repo: &git2::Repository) {
12173 repo.set_head(name).expect("Failed to set head");
12174 repo.checkout_head(None).expect("Failed to check out head");
12175}
12176
12177#[cfg(any())]
12178#[track_caller]
12179fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
12180 repo.statuses(None)
12181 .unwrap()
12182 .iter()
12183 .map(|status| (status.path().unwrap().to_string(), status.status()))
12184 .collect()
12185}
12186
12187#[gpui::test]
12188async fn test_find_project_path_abs(
12189 background_executor: BackgroundExecutor,
12190 cx: &mut gpui::TestAppContext,
12191) {
12192 // find_project_path should work with absolute paths
12193 init_test(cx);
12194
12195 let fs = FakeFs::new(background_executor);
12196 fs.insert_tree(
12197 path!("/root"),
12198 json!({
12199 "project1": {
12200 "file1.txt": "content1",
12201 "subdir": {
12202 "file2.txt": "content2"
12203 }
12204 },
12205 "project2": {
12206 "file3.txt": "content3"
12207 }
12208 }),
12209 )
12210 .await;
12211
12212 let project = Project::test(
12213 fs.clone(),
12214 [
12215 path!("/root/project1").as_ref(),
12216 path!("/root/project2").as_ref(),
12217 ],
12218 cx,
12219 )
12220 .await;
12221
12222 // Make sure the worktrees are fully initialized
12223 project
12224 .update(cx, |project, cx| project.git_scans_complete(cx))
12225 .await;
12226 cx.run_until_parked();
12227
12228 let (project1_abs_path, project1_id, project2_abs_path, project2_id) =
12229 project.read_with(cx, |project, cx| {
12230 let worktrees: Vec<_> = project.worktrees(cx).collect();
12231 let abs_path1 = worktrees[0].read(cx).abs_path().to_path_buf();
12232 let id1 = worktrees[0].read(cx).id();
12233 let abs_path2 = worktrees[1].read(cx).abs_path().to_path_buf();
12234 let id2 = worktrees[1].read(cx).id();
12235 (abs_path1, id1, abs_path2, id2)
12236 });
12237
12238 project.update(cx, |project, cx| {
12239 let abs_path = project1_abs_path.join("file1.txt");
12240 let found_path = project.find_project_path(abs_path, cx).unwrap();
12241 assert_eq!(found_path.worktree_id, project1_id);
12242 assert_eq!(&*found_path.path, rel_path("file1.txt"));
12243
12244 let abs_path = project1_abs_path.join("subdir").join("file2.txt");
12245 let found_path = project.find_project_path(abs_path, cx).unwrap();
12246 assert_eq!(found_path.worktree_id, project1_id);
12247 assert_eq!(&*found_path.path, rel_path("subdir/file2.txt"));
12248
12249 let abs_path = project2_abs_path.join("file3.txt");
12250 let found_path = project.find_project_path(abs_path, cx).unwrap();
12251 assert_eq!(found_path.worktree_id, project2_id);
12252 assert_eq!(&*found_path.path, rel_path("file3.txt"));
12253
12254 let abs_path = project1_abs_path.join("nonexistent.txt");
12255 let found_path = project.find_project_path(abs_path, cx);
12256 assert!(
12257 found_path.is_some(),
12258 "Should find project path for nonexistent file in worktree"
12259 );
12260
12261 // Test with an absolute path outside any worktree
12262 let abs_path = Path::new("/some/other/path");
12263 let found_path = project.find_project_path(abs_path, cx);
12264 assert!(
12265 found_path.is_none(),
12266 "Should not find project path for path outside any worktree"
12267 );
12268 });
12269}
12270
12271#[gpui::test]
12272async fn test_git_worktree_remove(cx: &mut gpui::TestAppContext) {
12273 init_test(cx);
12274
12275 let fs = FakeFs::new(cx.executor());
12276 fs.insert_tree(
12277 path!("/root"),
12278 json!({
12279 "a": {
12280 ".git": {},
12281 "src": {
12282 "main.rs": "fn main() {}",
12283 }
12284 },
12285 "b": {
12286 ".git": {},
12287 "src": {
12288 "main.rs": "fn main() {}",
12289 },
12290 "script": {
12291 "run.sh": "#!/bin/bash"
12292 }
12293 }
12294 }),
12295 )
12296 .await;
12297
12298 let project = Project::test(
12299 fs.clone(),
12300 [
12301 path!("/root/a").as_ref(),
12302 path!("/root/b/script").as_ref(),
12303 path!("/root/b").as_ref(),
12304 ],
12305 cx,
12306 )
12307 .await;
12308 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
12309 scan_complete.await;
12310
12311 let worktrees = project.update(cx, |project, cx| project.worktrees(cx).collect::<Vec<_>>());
12312 assert_eq!(worktrees.len(), 3);
12313
12314 let worktree_id_by_abs_path = worktrees
12315 .into_iter()
12316 .map(|worktree| worktree.read_with(cx, |w, _| (w.abs_path(), w.id())))
12317 .collect::<HashMap<_, _>>();
12318 let worktree_id = worktree_id_by_abs_path
12319 .get(Path::new(path!("/root/b/script")))
12320 .unwrap();
12321
12322 let repos = project.update(cx, |p, cx| p.git_store().read(cx).repositories().clone());
12323 assert_eq!(repos.len(), 2);
12324
12325 project.update(cx, |project, cx| {
12326 project.remove_worktree(*worktree_id, cx);
12327 });
12328 cx.run_until_parked();
12329
12330 let mut repo_paths = project
12331 .update(cx, |p, cx| p.git_store().read(cx).repositories().clone())
12332 .values()
12333 .map(|repo| repo.read_with(cx, |r, _| r.work_directory_abs_path.clone()))
12334 .collect::<Vec<_>>();
12335 repo_paths.sort();
12336
12337 pretty_assertions::assert_eq!(
12338 repo_paths,
12339 [
12340 Path::new(path!("/root/a")).into(),
12341 Path::new(path!("/root/b")).into(),
12342 ]
12343 );
12344
12345 let active_repo_path = project
12346 .read_with(cx, |p, cx| {
12347 p.active_repository(cx)
12348 .map(|r| r.read(cx).work_directory_abs_path.clone())
12349 })
12350 .unwrap();
12351 assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/a")));
12352
12353 let worktree_id = worktree_id_by_abs_path
12354 .get(Path::new(path!("/root/a")))
12355 .unwrap();
12356 project.update(cx, |project, cx| {
12357 project.remove_worktree(*worktree_id, cx);
12358 });
12359 cx.run_until_parked();
12360
12361 let active_repo_path = project
12362 .read_with(cx, |p, cx| {
12363 p.active_repository(cx)
12364 .map(|r| r.read(cx).work_directory_abs_path.clone())
12365 })
12366 .unwrap();
12367 assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/b")));
12368
12369 let worktree_id = worktree_id_by_abs_path
12370 .get(Path::new(path!("/root/b")))
12371 .unwrap();
12372 project.update(cx, |project, cx| {
12373 project.remove_worktree(*worktree_id, cx);
12374 });
12375 cx.run_until_parked();
12376
12377 let active_repo_path = project.read_with(cx, |p, cx| {
12378 p.active_repository(cx)
12379 .map(|r| r.read(cx).work_directory_abs_path.clone())
12380 });
12381 assert!(active_repo_path.is_none());
12382}
12383
12384#[gpui::test]
12385async fn test_optimistic_hunks_in_staged_files(cx: &mut gpui::TestAppContext) {
12386 use DiffHunkSecondaryStatus::*;
12387 init_test(cx);
12388
12389 let committed_contents = r#"
12390 one
12391 two
12392 three
12393 "#
12394 .unindent();
12395 let file_contents = r#"
12396 one
12397 TWO
12398 three
12399 "#
12400 .unindent();
12401
12402 let fs = FakeFs::new(cx.background_executor.clone());
12403 fs.insert_tree(
12404 path!("/dir"),
12405 json!({
12406 ".git": {},
12407 "file.txt": file_contents.clone()
12408 }),
12409 )
12410 .await;
12411
12412 fs.set_head_and_index_for_repo(
12413 path!("/dir/.git").as_ref(),
12414 &[("file.txt", committed_contents.clone())],
12415 );
12416
12417 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
12418
12419 let buffer = project
12420 .update(cx, |project, cx| {
12421 project.open_local_buffer(path!("/dir/file.txt"), cx)
12422 })
12423 .await
12424 .unwrap();
12425 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
12426 let uncommitted_diff = project
12427 .update(cx, |project, cx| {
12428 project.open_uncommitted_diff(buffer.clone(), cx)
12429 })
12430 .await
12431 .unwrap();
12432
12433 // The hunk is initially unstaged.
12434 uncommitted_diff.read_with(cx, |diff, cx| {
12435 assert_hunks(
12436 diff.snapshot(cx).hunks(&snapshot),
12437 &snapshot,
12438 &diff.base_text_string(cx).unwrap(),
12439 &[(
12440 1..2,
12441 "two\n",
12442 "TWO\n",
12443 DiffHunkStatus::modified(HasSecondaryHunk),
12444 )],
12445 );
12446 });
12447
12448 // Get the repository handle.
12449 let repo = project.read_with(cx, |project, cx| {
12450 project.repositories(cx).values().next().unwrap().clone()
12451 });
12452
12453 // Stage the file.
12454 let stage_task = repo.update(cx, |repo, cx| {
12455 repo.stage_entries(vec![repo_path("file.txt")], cx)
12456 });
12457
12458 // Run a few ticks to let the job start and mark hunks as pending,
12459 // but don't run_until_parked which would complete the entire operation.
12460 for _ in 0..10 {
12461 cx.executor().tick();
12462 let [hunk]: [_; 1] = uncommitted_diff
12463 .read_with(cx, |diff, cx| {
12464 diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>()
12465 })
12466 .try_into()
12467 .unwrap();
12468 match hunk.secondary_status {
12469 HasSecondaryHunk => {}
12470 SecondaryHunkRemovalPending => break,
12471 NoSecondaryHunk => panic!("hunk was not optimistically staged"),
12472 _ => panic!("unexpected hunk state"),
12473 }
12474 }
12475 uncommitted_diff.read_with(cx, |diff, cx| {
12476 assert_hunks(
12477 diff.snapshot(cx).hunks(&snapshot),
12478 &snapshot,
12479 &diff.base_text_string(cx).unwrap(),
12480 &[(
12481 1..2,
12482 "two\n",
12483 "TWO\n",
12484 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
12485 )],
12486 );
12487 });
12488
12489 // Let the staging complete.
12490 stage_task.await.unwrap();
12491 cx.run_until_parked();
12492
12493 // The hunk is now fully staged.
12494 uncommitted_diff.read_with(cx, |diff, cx| {
12495 assert_hunks(
12496 diff.snapshot(cx).hunks(&snapshot),
12497 &snapshot,
12498 &diff.base_text_string(cx).unwrap(),
12499 &[(
12500 1..2,
12501 "two\n",
12502 "TWO\n",
12503 DiffHunkStatus::modified(NoSecondaryHunk),
12504 )],
12505 );
12506 });
12507
12508 // Simulate a commit by updating HEAD to match the current file contents.
12509 // The FakeGitRepository's commit method is a no-op, so we need to manually
12510 // update HEAD to simulate the commit completing.
12511 fs.set_head_for_repo(
12512 path!("/dir/.git").as_ref(),
12513 &[("file.txt", file_contents.clone())],
12514 "newhead",
12515 );
12516 cx.run_until_parked();
12517
12518 // After committing, there are no more hunks.
12519 uncommitted_diff.read_with(cx, |diff, cx| {
12520 assert_hunks(
12521 diff.snapshot(cx).hunks(&snapshot),
12522 &snapshot,
12523 &diff.base_text_string(cx).unwrap(),
12524 &[] as &[(Range<u32>, &str, &str, DiffHunkStatus)],
12525 );
12526 });
12527}
12528
12529#[gpui::test]
12530async fn test_read_only_files_setting(cx: &mut gpui::TestAppContext) {
12531 init_test(cx);
12532
12533 // Configure read_only_files setting
12534 cx.update(|cx| {
12535 cx.update_global::<SettingsStore, _>(|store, cx| {
12536 store.update_user_settings(cx, |settings| {
12537 settings.project.worktree.read_only_files = Some(vec![
12538 "**/generated/**".to_string(),
12539 "**/*.gen.rs".to_string(),
12540 ]);
12541 });
12542 });
12543 });
12544
12545 let fs = FakeFs::new(cx.background_executor.clone());
12546 fs.insert_tree(
12547 path!("/root"),
12548 json!({
12549 "src": {
12550 "main.rs": "fn main() {}",
12551 "types.gen.rs": "// Generated file",
12552 },
12553 "generated": {
12554 "schema.rs": "// Auto-generated schema",
12555 }
12556 }),
12557 )
12558 .await;
12559
12560 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
12561
12562 // Open a regular file - should be read-write
12563 let regular_buffer = project
12564 .update(cx, |project, cx| {
12565 project.open_local_buffer(path!("/root/src/main.rs"), cx)
12566 })
12567 .await
12568 .unwrap();
12569
12570 regular_buffer.read_with(cx, |buffer, _| {
12571 assert!(!buffer.read_only(), "Regular file should not be read-only");
12572 });
12573
12574 // Open a file matching *.gen.rs pattern - should be read-only
12575 let gen_buffer = project
12576 .update(cx, |project, cx| {
12577 project.open_local_buffer(path!("/root/src/types.gen.rs"), cx)
12578 })
12579 .await
12580 .unwrap();
12581
12582 gen_buffer.read_with(cx, |buffer, _| {
12583 assert!(
12584 buffer.read_only(),
12585 "File matching *.gen.rs pattern should be read-only"
12586 );
12587 });
12588
12589 // Open a file in generated directory - should be read-only
12590 let generated_buffer = project
12591 .update(cx, |project, cx| {
12592 project.open_local_buffer(path!("/root/generated/schema.rs"), cx)
12593 })
12594 .await
12595 .unwrap();
12596
12597 generated_buffer.read_with(cx, |buffer, _| {
12598 assert!(
12599 buffer.read_only(),
12600 "File in generated directory should be read-only"
12601 );
12602 });
12603}
12604
12605#[gpui::test]
12606async fn test_read_only_files_empty_setting(cx: &mut gpui::TestAppContext) {
12607 init_test(cx);
12608
12609 // Explicitly set read_only_files to empty (default behavior)
12610 cx.update(|cx| {
12611 cx.update_global::<SettingsStore, _>(|store, cx| {
12612 store.update_user_settings(cx, |settings| {
12613 settings.project.worktree.read_only_files = Some(vec![]);
12614 });
12615 });
12616 });
12617
12618 let fs = FakeFs::new(cx.background_executor.clone());
12619 fs.insert_tree(
12620 path!("/root"),
12621 json!({
12622 "src": {
12623 "main.rs": "fn main() {}",
12624 },
12625 "generated": {
12626 "schema.rs": "// Auto-generated schema",
12627 }
12628 }),
12629 )
12630 .await;
12631
12632 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
12633
12634 // All files should be read-write when read_only_files is empty
12635 let main_buffer = project
12636 .update(cx, |project, cx| {
12637 project.open_local_buffer(path!("/root/src/main.rs"), cx)
12638 })
12639 .await
12640 .unwrap();
12641
12642 main_buffer.read_with(cx, |buffer, _| {
12643 assert!(
12644 !buffer.read_only(),
12645 "Files should not be read-only when read_only_files is empty"
12646 );
12647 });
12648
12649 let generated_buffer = project
12650 .update(cx, |project, cx| {
12651 project.open_local_buffer(path!("/root/generated/schema.rs"), cx)
12652 })
12653 .await
12654 .unwrap();
12655
12656 generated_buffer.read_with(cx, |buffer, _| {
12657 assert!(
12658 !buffer.read_only(),
12659 "Generated files should not be read-only when read_only_files is empty"
12660 );
12661 });
12662}
12663
12664#[gpui::test]
12665async fn test_read_only_files_with_lock_files(cx: &mut gpui::TestAppContext) {
12666 init_test(cx);
12667
12668 // Configure to make lock files read-only
12669 cx.update(|cx| {
12670 cx.update_global::<SettingsStore, _>(|store, cx| {
12671 store.update_user_settings(cx, |settings| {
12672 settings.project.worktree.read_only_files = Some(vec![
12673 "**/*.lock".to_string(),
12674 "**/package-lock.json".to_string(),
12675 ]);
12676 });
12677 });
12678 });
12679
12680 let fs = FakeFs::new(cx.background_executor.clone());
12681 fs.insert_tree(
12682 path!("/root"),
12683 json!({
12684 "Cargo.lock": "# Lock file",
12685 "Cargo.toml": "[package]",
12686 "package-lock.json": "{}",
12687 "package.json": "{}",
12688 }),
12689 )
12690 .await;
12691
12692 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
12693
12694 // Cargo.lock should be read-only
12695 let cargo_lock = project
12696 .update(cx, |project, cx| {
12697 project.open_local_buffer(path!("/root/Cargo.lock"), cx)
12698 })
12699 .await
12700 .unwrap();
12701
12702 cargo_lock.read_with(cx, |buffer, _| {
12703 assert!(buffer.read_only(), "Cargo.lock should be read-only");
12704 });
12705
12706 // Cargo.toml should be read-write
12707 let cargo_toml = project
12708 .update(cx, |project, cx| {
12709 project.open_local_buffer(path!("/root/Cargo.toml"), cx)
12710 })
12711 .await
12712 .unwrap();
12713
12714 cargo_toml.read_with(cx, |buffer, _| {
12715 assert!(!buffer.read_only(), "Cargo.toml should not be read-only");
12716 });
12717
12718 // package-lock.json should be read-only
12719 let package_lock = project
12720 .update(cx, |project, cx| {
12721 project.open_local_buffer(path!("/root/package-lock.json"), cx)
12722 })
12723 .await
12724 .unwrap();
12725
12726 package_lock.read_with(cx, |buffer, _| {
12727 assert!(buffer.read_only(), "package-lock.json should be read-only");
12728 });
12729
12730 // package.json should be read-write
12731 let package_json = project
12732 .update(cx, |project, cx| {
12733 project.open_local_buffer(path!("/root/package.json"), cx)
12734 })
12735 .await
12736 .unwrap();
12737
12738 package_json.read_with(cx, |buffer, _| {
12739 assert!(!buffer.read_only(), "package.json should not be read-only");
12740 });
12741}
12742
12743mod disable_ai_settings_tests {
12744 use gpui::TestAppContext;
12745 use project::*;
12746 use settings::{Settings, SettingsStore};
12747
12748 #[gpui::test]
12749 async fn test_disable_ai_settings_security(cx: &mut TestAppContext) {
12750 cx.update(|cx| {
12751 settings::init(cx);
12752
12753 // Test 1: Default is false (AI enabled)
12754 assert!(
12755 !DisableAiSettings::get_global(cx).disable_ai,
12756 "Default should allow AI"
12757 );
12758 });
12759
12760 let disable_true = serde_json::json!({
12761 "disable_ai": true
12762 })
12763 .to_string();
12764 let disable_false = serde_json::json!({
12765 "disable_ai": false
12766 })
12767 .to_string();
12768
12769 cx.update_global::<SettingsStore, _>(|store, cx| {
12770 store.set_user_settings(&disable_false, cx).unwrap();
12771 store.set_global_settings(&disable_true, cx).unwrap();
12772 });
12773 cx.update(|cx| {
12774 assert!(
12775 DisableAiSettings::get_global(cx).disable_ai,
12776 "Local false cannot override global true"
12777 );
12778 });
12779
12780 cx.update_global::<SettingsStore, _>(|store, cx| {
12781 store.set_global_settings(&disable_false, cx).unwrap();
12782 store.set_user_settings(&disable_true, cx).unwrap();
12783 });
12784
12785 cx.update(|cx| {
12786 assert!(
12787 DisableAiSettings::get_global(cx).disable_ai,
12788 "Local false cannot override global true"
12789 );
12790 });
12791 }
12792
12793 #[gpui::test]
12794 async fn test_disable_ai_project_level_settings(cx: &mut TestAppContext) {
12795 use settings::{LocalSettingsKind, LocalSettingsPath, SettingsLocation, SettingsStore};
12796 use worktree::WorktreeId;
12797
12798 cx.update(|cx| {
12799 settings::init(cx);
12800
12801 // Default should allow AI
12802 assert!(
12803 !DisableAiSettings::get_global(cx).disable_ai,
12804 "Default should allow AI"
12805 );
12806 });
12807
12808 let worktree_id = WorktreeId::from_usize(1);
12809 let rel_path = |path: &str| -> std::sync::Arc<util::rel_path::RelPath> {
12810 std::sync::Arc::from(util::rel_path::RelPath::unix(path).unwrap())
12811 };
12812 let project_path = rel_path("project");
12813 let settings_location = SettingsLocation {
12814 worktree_id,
12815 path: project_path.as_ref(),
12816 };
12817
12818 // Test: Project-level disable_ai=true should disable AI for files in that project
12819 cx.update_global::<SettingsStore, _>(|store, cx| {
12820 store
12821 .set_local_settings(
12822 worktree_id,
12823 LocalSettingsPath::InWorktree(project_path.clone()),
12824 LocalSettingsKind::Settings,
12825 Some(r#"{ "disable_ai": true }"#),
12826 cx,
12827 )
12828 .unwrap();
12829 });
12830
12831 cx.update(|cx| {
12832 let settings = DisableAiSettings::get(Some(settings_location), cx);
12833 assert!(
12834 settings.disable_ai,
12835 "Project-level disable_ai=true should disable AI for files in that project"
12836 );
12837 // Global should now also be true since project-level disable_ai is merged into global
12838 assert!(
12839 DisableAiSettings::get_global(cx).disable_ai,
12840 "Global setting should be affected by project-level disable_ai=true"
12841 );
12842 });
12843
12844 // Test: Setting project-level to false should allow AI for that project
12845 cx.update_global::<SettingsStore, _>(|store, cx| {
12846 store
12847 .set_local_settings(
12848 worktree_id,
12849 LocalSettingsPath::InWorktree(project_path.clone()),
12850 LocalSettingsKind::Settings,
12851 Some(r#"{ "disable_ai": false }"#),
12852 cx,
12853 )
12854 .unwrap();
12855 });
12856
12857 cx.update(|cx| {
12858 let settings = DisableAiSettings::get(Some(settings_location), cx);
12859 assert!(
12860 !settings.disable_ai,
12861 "Project-level disable_ai=false should allow AI"
12862 );
12863 // Global should also be false now
12864 assert!(
12865 !DisableAiSettings::get_global(cx).disable_ai,
12866 "Global setting should be false when project-level is false"
12867 );
12868 });
12869
12870 // Test: User-level true + project-level false = AI disabled (saturation)
12871 let disable_true = serde_json::json!({ "disable_ai": true }).to_string();
12872 cx.update_global::<SettingsStore, _>(|store, cx| {
12873 store.set_user_settings(&disable_true, cx).unwrap();
12874 store
12875 .set_local_settings(
12876 worktree_id,
12877 LocalSettingsPath::InWorktree(project_path.clone()),
12878 LocalSettingsKind::Settings,
12879 Some(r#"{ "disable_ai": false }"#),
12880 cx,
12881 )
12882 .unwrap();
12883 });
12884
12885 cx.update(|cx| {
12886 let settings = DisableAiSettings::get(Some(settings_location), cx);
12887 assert!(
12888 settings.disable_ai,
12889 "Project-level false cannot override user-level true (SaturatingBool)"
12890 );
12891 });
12892 }
12893}