1#![allow(clippy::format_collect)]
2
3mod color_extractor;
4mod context_server_store;
5mod debugger;
6mod ext_agent_tests;
7mod extension_agent_tests;
8mod git_store;
9mod image_store;
10mod lsp_command;
11mod lsp_store;
12mod manifest_tree;
13mod project_search;
14mod search;
15mod search_history;
16mod signature_help;
17mod task_inventory;
18mod trusted_worktrees;
19mod yarn;
20
21use anyhow::Result;
22use async_trait::async_trait;
23use buffer_diff::{
24 BufferDiffEvent, DiffChanged, DiffHunkSecondaryStatus, DiffHunkStatus, DiffHunkStatusKind,
25 assert_hunks,
26};
27use collections::{BTreeSet, HashMap, HashSet};
28use fs::FakeFs;
29use futures::{StreamExt, future};
30use git::{
31 GitHostingProviderRegistry,
32 repository::{RepoPath, repo_path},
33 status::{FileStatus, StatusCode, TrackedStatus},
34};
35use git2::RepositoryInitOptions;
36use gpui::{
37 App, AppContext, BackgroundExecutor, BorrowAppContext, Entity, FutureExt, SharedString, Task,
38 UpdateGlobal,
39};
40use itertools::Itertools;
41use language::{
42 Buffer, BufferEvent, Diagnostic, DiagnosticEntry, DiagnosticEntryRef, DiagnosticSet,
43 DiagnosticSourceKind, DiskState, FakeLspAdapter, Language, LanguageConfig, LanguageMatcher,
44 LanguageName, LineEnding, ManifestName, ManifestProvider, ManifestQuery, OffsetRangeExt, Point,
45 ToPoint, Toolchain, ToolchainList, ToolchainLister, ToolchainMetadata,
46 language_settings::{LanguageSettingsContent, language_settings},
47 markdown_lang, rust_lang, tree_sitter_typescript,
48};
49use lsp::{
50 CodeActionKind, DiagnosticSeverity, DocumentChanges, FileOperationFilter, LanguageServerId,
51 LanguageServerName, NumberOrString, TextDocumentEdit, Uri, WillRenameFiles,
52 notification::DidRenameFiles,
53};
54use parking_lot::Mutex;
55use paths::{config_dir, global_gitignore_path, tasks_file};
56use postage::stream::Stream as _;
57use pretty_assertions::{assert_eq, assert_matches};
58use project::{
59 Event, TaskContexts,
60 git_store::{GitStoreEvent, Repository, RepositoryEvent, StatusEntry, pending_op},
61 search::{SearchQuery, SearchResult},
62 task_store::{TaskSettingsLocation, TaskStore},
63 *,
64};
65use rand::{Rng as _, rngs::StdRng};
66use serde_json::json;
67use settings::SettingsStore;
68#[cfg(not(windows))]
69use std::os;
70use std::{
71 cell::RefCell,
72 env, mem,
73 num::NonZeroU32,
74 ops::Range,
75 path::{Path, PathBuf},
76 rc::Rc,
77 str::FromStr,
78 sync::{Arc, OnceLock},
79 task::Poll,
80 time::Duration,
81};
82use sum_tree::SumTree;
83use task::{ResolvedTask, ShellKind, TaskContext};
84use text::{Anchor, PointUtf16, ReplicaId, ToOffset, Unclipped};
85use unindent::Unindent as _;
86use util::{
87 TryFutureExt as _, assert_set_eq, maybe, path,
88 paths::{PathMatcher, PathStyle},
89 rel_path::{RelPath, rel_path},
90 test::{TempTree, marked_text_offsets},
91 uri,
92};
93use worktree::WorktreeModelHandle as _;
94
95#[gpui::test]
96async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
97 cx.executor().allow_parking();
98
99 let (tx, mut rx) = futures::channel::mpsc::unbounded();
100 let _thread = std::thread::spawn(move || {
101 #[cfg(not(target_os = "windows"))]
102 std::fs::metadata("/tmp").unwrap();
103 #[cfg(target_os = "windows")]
104 std::fs::metadata("C:/Windows").unwrap();
105 std::thread::sleep(Duration::from_millis(1000));
106 tx.unbounded_send(1).unwrap();
107 });
108 rx.next().await.unwrap();
109}
110
111#[gpui::test]
112async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
113 cx.executor().allow_parking();
114
115 let io_task = smol::unblock(move || {
116 println!("sleeping on thread {:?}", std::thread::current().id());
117 std::thread::sleep(Duration::from_millis(10));
118 1
119 });
120
121 let task = cx.foreground_executor().spawn(async move {
122 io_task.await;
123 });
124
125 task.await;
126}
127
128// NOTE:
129// While POSIX symbolic links are somewhat supported on Windows, they are an opt in by the user, and thus
130// we assume that they are not supported out of the box.
131#[cfg(not(windows))]
132#[gpui::test]
133async fn test_symlinks(cx: &mut gpui::TestAppContext) {
134 init_test(cx);
135 cx.executor().allow_parking();
136
137 let dir = TempTree::new(json!({
138 "root": {
139 "apple": "",
140 "banana": {
141 "carrot": {
142 "date": "",
143 "endive": "",
144 }
145 },
146 "fennel": {
147 "grape": "",
148 }
149 }
150 }));
151
152 let root_link_path = dir.path().join("root_link");
153 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
154 os::unix::fs::symlink(
155 dir.path().join("root/fennel"),
156 dir.path().join("root/finnochio"),
157 )
158 .unwrap();
159
160 let project = Project::test(
161 Arc::new(RealFs::new(None, cx.executor())),
162 [root_link_path.as_ref()],
163 cx,
164 )
165 .await;
166
167 project.update(cx, |project, cx| {
168 let tree = project.worktrees(cx).next().unwrap().read(cx);
169 assert_eq!(tree.file_count(), 5);
170 assert_eq!(
171 tree.entry_for_path(rel_path("fennel/grape")).unwrap().inode,
172 tree.entry_for_path(rel_path("finnochio/grape"))
173 .unwrap()
174 .inode
175 );
176 });
177}
178
179#[gpui::test]
180async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
181 init_test(cx);
182
183 let dir = TempTree::new(json!({
184 ".editorconfig": r#"
185 root = true
186 [*.rs]
187 indent_style = tab
188 indent_size = 3
189 end_of_line = lf
190 insert_final_newline = true
191 trim_trailing_whitespace = true
192 max_line_length = 120
193 [*.js]
194 tab_width = 10
195 max_line_length = off
196 "#,
197 ".zed": {
198 "settings.json": r#"{
199 "tab_size": 8,
200 "hard_tabs": false,
201 "ensure_final_newline_on_save": false,
202 "remove_trailing_whitespace_on_save": false,
203 "preferred_line_length": 64,
204 "soft_wrap": "editor_width",
205 }"#,
206 },
207 "a.rs": "fn a() {\n A\n}",
208 "b": {
209 ".editorconfig": r#"
210 [*.rs]
211 indent_size = 2
212 max_line_length = off,
213 "#,
214 "b.rs": "fn b() {\n B\n}",
215 },
216 "c.js": "def c\n C\nend",
217 "README.json": "tabs are better\n",
218 }));
219
220 let path = dir.path();
221 let fs = FakeFs::new(cx.executor());
222 fs.insert_tree_from_real_fs(path, path).await;
223 let project = Project::test(fs, [path], cx).await;
224
225 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
226 language_registry.add(js_lang());
227 language_registry.add(json_lang());
228 language_registry.add(rust_lang());
229
230 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
231
232 cx.executor().run_until_parked();
233
234 cx.update(|cx| {
235 let tree = worktree.read(cx);
236 let settings_for = |path: &str| {
237 let file_entry = tree.entry_for_path(rel_path(path)).unwrap().clone();
238 let file = File::for_entry(file_entry, worktree.clone());
239 let file_language = project
240 .read(cx)
241 .languages()
242 .load_language_for_file_path(file.path.as_std_path());
243 let file_language = cx
244 .foreground_executor()
245 .block_on(file_language)
246 .expect("Failed to get file language");
247 let file = file as _;
248 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
249 };
250
251 let settings_a = settings_for("a.rs");
252 let settings_b = settings_for("b/b.rs");
253 let settings_c = settings_for("c.js");
254 let settings_readme = settings_for("README.json");
255
256 // .editorconfig overrides .zed/settings
257 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
258 assert_eq!(settings_a.hard_tabs, true);
259 assert_eq!(settings_a.ensure_final_newline_on_save, true);
260 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
261 assert_eq!(settings_a.preferred_line_length, 120);
262
263 // .editorconfig in b/ overrides .editorconfig in root
264 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
265
266 // "indent_size" is not set, so "tab_width" is used
267 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
268
269 // When max_line_length is "off", default to .zed/settings.json
270 assert_eq!(settings_b.preferred_line_length, 64);
271 assert_eq!(settings_c.preferred_line_length, 64);
272
273 // README.md should not be affected by .editorconfig's globe "*.rs"
274 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
275 });
276}
277
278#[gpui::test]
279async fn test_external_editorconfig_support(cx: &mut gpui::TestAppContext) {
280 init_test(cx);
281
282 let fs = FakeFs::new(cx.executor());
283 fs.insert_tree(
284 path!("/grandparent"),
285 json!({
286 ".editorconfig": "[*]\nindent_size = 4\n",
287 "parent": {
288 ".editorconfig": "[*.rs]\nindent_size = 2\n",
289 "worktree": {
290 ".editorconfig": "[*.md]\nindent_size = 3\n",
291 "main.rs": "fn main() {}",
292 "README.md": "# README",
293 "other.txt": "other content",
294 }
295 }
296 }),
297 )
298 .await;
299
300 let project = Project::test(fs, [path!("/grandparent/parent/worktree").as_ref()], cx).await;
301
302 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
303 language_registry.add(rust_lang());
304 language_registry.add(markdown_lang());
305
306 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
307
308 cx.executor().run_until_parked();
309
310 cx.update(|cx| {
311 let tree = worktree.read(cx);
312 let settings_for = |path: &str| {
313 let file_entry = tree.entry_for_path(rel_path(path)).unwrap().clone();
314 let file = File::for_entry(file_entry, worktree.clone());
315 let file_language = project
316 .read(cx)
317 .languages()
318 .load_language_for_file_path(file.path.as_std_path());
319 let file_language = cx
320 .foreground_executor()
321 .block_on(file_language)
322 .expect("Failed to get file language");
323 let file = file as _;
324 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
325 };
326
327 let settings_rs = settings_for("main.rs");
328 let settings_md = settings_for("README.md");
329 let settings_txt = settings_for("other.txt");
330
331 // main.rs gets indent_size = 2 from parent's external .editorconfig
332 assert_eq!(Some(settings_rs.tab_size), NonZeroU32::new(2));
333
334 // README.md gets indent_size = 3 from internal worktree .editorconfig
335 assert_eq!(Some(settings_md.tab_size), NonZeroU32::new(3));
336
337 // other.txt gets indent_size = 4 from grandparent's external .editorconfig
338 assert_eq!(Some(settings_txt.tab_size), NonZeroU32::new(4));
339 });
340}
341
342#[gpui::test]
343async fn test_external_editorconfig_root_stops_traversal(cx: &mut gpui::TestAppContext) {
344 init_test(cx);
345
346 let fs = FakeFs::new(cx.executor());
347 fs.insert_tree(
348 path!("/parent"),
349 json!({
350 ".editorconfig": "[*]\nindent_size = 99\n",
351 "worktree": {
352 ".editorconfig": "root = true\n[*]\nindent_size = 2\n",
353 "file.rs": "fn main() {}",
354 }
355 }),
356 )
357 .await;
358
359 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
360
361 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
362 language_registry.add(rust_lang());
363
364 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
365
366 cx.executor().run_until_parked();
367
368 cx.update(|cx| {
369 let tree = worktree.read(cx);
370 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
371 let file = File::for_entry(file_entry, worktree.clone());
372 let file_language = project
373 .read(cx)
374 .languages()
375 .load_language_for_file_path(file.path.as_std_path());
376 let file_language = cx
377 .foreground_executor()
378 .block_on(file_language)
379 .expect("Failed to get file language");
380 let file = file as _;
381 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
382
383 // file.rs gets indent_size = 2 from worktree's root config, NOT 99 from parent
384 assert_eq!(Some(settings.tab_size), NonZeroU32::new(2));
385 });
386}
387
388#[gpui::test]
389async fn test_external_editorconfig_root_in_parent_stops_traversal(cx: &mut gpui::TestAppContext) {
390 init_test(cx);
391
392 let fs = FakeFs::new(cx.executor());
393 fs.insert_tree(
394 path!("/grandparent"),
395 json!({
396 ".editorconfig": "[*]\nindent_size = 99\n",
397 "parent": {
398 ".editorconfig": "root = true\n[*]\nindent_size = 4\n",
399 "worktree": {
400 "file.rs": "fn main() {}",
401 }
402 }
403 }),
404 )
405 .await;
406
407 let project = Project::test(fs, [path!("/grandparent/parent/worktree").as_ref()], cx).await;
408
409 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
410 language_registry.add(rust_lang());
411
412 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
413
414 cx.executor().run_until_parked();
415
416 cx.update(|cx| {
417 let tree = worktree.read(cx);
418 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
419 let file = File::for_entry(file_entry, worktree.clone());
420 let file_language = project
421 .read(cx)
422 .languages()
423 .load_language_for_file_path(file.path.as_std_path());
424 let file_language = cx
425 .foreground_executor()
426 .block_on(file_language)
427 .expect("Failed to get file language");
428 let file = file as _;
429 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
430
431 // file.rs gets indent_size = 4 from parent's root config, NOT 99 from grandparent
432 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
433 });
434}
435
436#[gpui::test]
437async fn test_external_editorconfig_shared_across_worktrees(cx: &mut gpui::TestAppContext) {
438 init_test(cx);
439
440 let fs = FakeFs::new(cx.executor());
441 fs.insert_tree(
442 path!("/parent"),
443 json!({
444 ".editorconfig": "root = true\n[*]\nindent_size = 5\n",
445 "worktree_a": {
446 "file.rs": "fn a() {}",
447 ".editorconfig": "[*]\ninsert_final_newline = true\n",
448 },
449 "worktree_b": {
450 "file.rs": "fn b() {}",
451 ".editorconfig": "[*]\ninsert_final_newline = false\n",
452 }
453 }),
454 )
455 .await;
456
457 let project = Project::test(
458 fs,
459 [
460 path!("/parent/worktree_a").as_ref(),
461 path!("/parent/worktree_b").as_ref(),
462 ],
463 cx,
464 )
465 .await;
466
467 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
468 language_registry.add(rust_lang());
469
470 cx.executor().run_until_parked();
471
472 cx.update(|cx| {
473 let worktrees: Vec<_> = project.read(cx).worktrees(cx).collect();
474 assert_eq!(worktrees.len(), 2);
475
476 for worktree in worktrees {
477 let tree = worktree.read(cx);
478 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
479 let file = File::for_entry(file_entry, worktree.clone());
480 let file_language = project
481 .read(cx)
482 .languages()
483 .load_language_for_file_path(file.path.as_std_path());
484 let file_language = cx
485 .foreground_executor()
486 .block_on(file_language)
487 .expect("Failed to get file language");
488 let file = file as _;
489 let settings =
490 language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
491
492 // Both worktrees should get indent_size = 5 from shared parent .editorconfig
493 assert_eq!(Some(settings.tab_size), NonZeroU32::new(5));
494 }
495 });
496}
497
498#[gpui::test]
499async fn test_external_editorconfig_not_loaded_without_internal_config(
500 cx: &mut gpui::TestAppContext,
501) {
502 init_test(cx);
503
504 let fs = FakeFs::new(cx.executor());
505 fs.insert_tree(
506 path!("/parent"),
507 json!({
508 ".editorconfig": "[*]\nindent_size = 99\n",
509 "worktree": {
510 "file.rs": "fn main() {}",
511 }
512 }),
513 )
514 .await;
515
516 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
517
518 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
519 language_registry.add(rust_lang());
520
521 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
522
523 cx.executor().run_until_parked();
524
525 cx.update(|cx| {
526 let tree = worktree.read(cx);
527 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
528 let file = File::for_entry(file_entry, worktree.clone());
529 let file_language = project
530 .read(cx)
531 .languages()
532 .load_language_for_file_path(file.path.as_std_path());
533 let file_language = cx
534 .foreground_executor()
535 .block_on(file_language)
536 .expect("Failed to get file language");
537 let file = file as _;
538 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
539
540 // file.rs should have default tab_size = 4, NOT 99 from parent's external .editorconfig
541 // because without an internal .editorconfig, external configs are not loaded
542 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
543 });
544}
545
546#[gpui::test]
547async fn test_external_editorconfig_modification_triggers_refresh(cx: &mut gpui::TestAppContext) {
548 init_test(cx);
549
550 let fs = FakeFs::new(cx.executor());
551 fs.insert_tree(
552 path!("/parent"),
553 json!({
554 ".editorconfig": "[*]\nindent_size = 4\n",
555 "worktree": {
556 ".editorconfig": "[*]\n",
557 "file.rs": "fn main() {}",
558 }
559 }),
560 )
561 .await;
562
563 let project = Project::test(fs.clone(), [path!("/parent/worktree").as_ref()], cx).await;
564
565 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
566 language_registry.add(rust_lang());
567
568 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
569
570 cx.executor().run_until_parked();
571
572 cx.update(|cx| {
573 let tree = worktree.read(cx);
574 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
575 let file = File::for_entry(file_entry, worktree.clone());
576 let file_language = project
577 .read(cx)
578 .languages()
579 .load_language_for_file_path(file.path.as_std_path());
580 let file_language = cx
581 .foreground_executor()
582 .block_on(file_language)
583 .expect("Failed to get file language");
584 let file = file as _;
585 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
586
587 // Test initial settings: tab_size = 4 from parent's external .editorconfig
588 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
589 });
590
591 fs.atomic_write(
592 PathBuf::from(path!("/parent/.editorconfig")),
593 "[*]\nindent_size = 8\n".to_owned(),
594 )
595 .await
596 .unwrap();
597
598 cx.executor().run_until_parked();
599
600 cx.update(|cx| {
601 let tree = worktree.read(cx);
602 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
603 let file = File::for_entry(file_entry, worktree.clone());
604 let file_language = project
605 .read(cx)
606 .languages()
607 .load_language_for_file_path(file.path.as_std_path());
608 let file_language = cx
609 .foreground_executor()
610 .block_on(file_language)
611 .expect("Failed to get file language");
612 let file = file as _;
613 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
614
615 // Test settings updated: tab_size = 8
616 assert_eq!(Some(settings.tab_size), NonZeroU32::new(8));
617 });
618}
619
620#[gpui::test]
621async fn test_adding_worktree_discovers_external_editorconfigs(cx: &mut gpui::TestAppContext) {
622 init_test(cx);
623
624 let fs = FakeFs::new(cx.executor());
625 fs.insert_tree(
626 path!("/parent"),
627 json!({
628 ".editorconfig": "root = true\n[*]\nindent_size = 7\n",
629 "existing_worktree": {
630 ".editorconfig": "[*]\n",
631 "file.rs": "fn a() {}",
632 },
633 "new_worktree": {
634 ".editorconfig": "[*]\n",
635 "file.rs": "fn b() {}",
636 }
637 }),
638 )
639 .await;
640
641 let project = Project::test(fs, [path!("/parent/existing_worktree").as_ref()], cx).await;
642
643 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
644 language_registry.add(rust_lang());
645
646 cx.executor().run_until_parked();
647
648 cx.update(|cx| {
649 let worktree = project.read(cx).worktrees(cx).next().unwrap();
650 let tree = worktree.read(cx);
651 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
652 let file = File::for_entry(file_entry, worktree.clone());
653 let file_language = project
654 .read(cx)
655 .languages()
656 .load_language_for_file_path(file.path.as_std_path());
657 let file_language = cx
658 .foreground_executor()
659 .block_on(file_language)
660 .expect("Failed to get file language");
661 let file = file as _;
662 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
663
664 // Test existing worktree has tab_size = 7
665 assert_eq!(Some(settings.tab_size), NonZeroU32::new(7));
666 });
667
668 let (new_worktree, _) = project
669 .update(cx, |project, cx| {
670 project.find_or_create_worktree(path!("/parent/new_worktree"), true, cx)
671 })
672 .await
673 .unwrap();
674
675 cx.executor().run_until_parked();
676
677 cx.update(|cx| {
678 let tree = new_worktree.read(cx);
679 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
680 let file = File::for_entry(file_entry, new_worktree.clone());
681 let file_language = project
682 .read(cx)
683 .languages()
684 .load_language_for_file_path(file.path.as_std_path());
685 let file_language = cx
686 .foreground_executor()
687 .block_on(file_language)
688 .expect("Failed to get file language");
689 let file = file as _;
690 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
691
692 // Verify new worktree also has tab_size = 7 from shared parent editorconfig
693 assert_eq!(Some(settings.tab_size), NonZeroU32::new(7));
694 });
695}
696
697#[gpui::test]
698async fn test_removing_worktree_cleans_up_external_editorconfig(cx: &mut gpui::TestAppContext) {
699 init_test(cx);
700
701 let fs = FakeFs::new(cx.executor());
702 fs.insert_tree(
703 path!("/parent"),
704 json!({
705 ".editorconfig": "[*]\nindent_size = 6\n",
706 "worktree": {
707 ".editorconfig": "[*]\n",
708 "file.rs": "fn main() {}",
709 }
710 }),
711 )
712 .await;
713
714 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
715
716 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
717 language_registry.add(rust_lang());
718
719 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
720 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
721
722 cx.executor().run_until_parked();
723
724 cx.update(|cx| {
725 let store = cx.global::<SettingsStore>();
726 let (worktree_ids, external_paths, watcher_paths) =
727 store.editorconfig_store.read(cx).test_state();
728
729 // Test external config is loaded
730 assert!(worktree_ids.contains(&worktree_id));
731 assert!(!external_paths.is_empty());
732 assert!(!watcher_paths.is_empty());
733 });
734
735 project.update(cx, |project, cx| {
736 project.remove_worktree(worktree_id, cx);
737 });
738
739 cx.executor().run_until_parked();
740
741 cx.update(|cx| {
742 let store = cx.global::<SettingsStore>();
743 let (worktree_ids, external_paths, watcher_paths) =
744 store.editorconfig_store.read(cx).test_state();
745
746 // Test worktree state, external configs, and watchers all removed
747 assert!(!worktree_ids.contains(&worktree_id));
748 assert!(external_paths.is_empty());
749 assert!(watcher_paths.is_empty());
750 });
751}
752
753#[gpui::test]
754async fn test_shared_external_editorconfig_cleanup_with_multiple_worktrees(
755 cx: &mut gpui::TestAppContext,
756) {
757 init_test(cx);
758
759 let fs = FakeFs::new(cx.executor());
760 fs.insert_tree(
761 path!("/parent"),
762 json!({
763 ".editorconfig": "root = true\n[*]\nindent_size = 5\n",
764 "worktree_a": {
765 ".editorconfig": "[*]\n",
766 "file.rs": "fn a() {}",
767 },
768 "worktree_b": {
769 ".editorconfig": "[*]\n",
770 "file.rs": "fn b() {}",
771 }
772 }),
773 )
774 .await;
775
776 let project = Project::test(
777 fs,
778 [
779 path!("/parent/worktree_a").as_ref(),
780 path!("/parent/worktree_b").as_ref(),
781 ],
782 cx,
783 )
784 .await;
785
786 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
787 language_registry.add(rust_lang());
788
789 cx.executor().run_until_parked();
790
791 let (worktree_a_id, worktree_b, worktree_b_id) = cx.update(|cx| {
792 let worktrees: Vec<_> = project.read(cx).worktrees(cx).collect();
793 assert_eq!(worktrees.len(), 2);
794
795 let worktree_a = &worktrees[0];
796 let worktree_b = &worktrees[1];
797 let worktree_a_id = worktree_a.read(cx).id();
798 let worktree_b_id = worktree_b.read(cx).id();
799 (worktree_a_id, worktree_b.clone(), worktree_b_id)
800 });
801
802 cx.update(|cx| {
803 let store = cx.global::<SettingsStore>();
804 let (worktree_ids, external_paths, _) = store.editorconfig_store.read(cx).test_state();
805
806 // Test both worktrees have settings and share external config
807 assert!(worktree_ids.contains(&worktree_a_id));
808 assert!(worktree_ids.contains(&worktree_b_id));
809 assert_eq!(external_paths.len(), 1); // single shared external config
810 });
811
812 project.update(cx, |project, cx| {
813 project.remove_worktree(worktree_a_id, cx);
814 });
815
816 cx.executor().run_until_parked();
817
818 cx.update(|cx| {
819 let store = cx.global::<SettingsStore>();
820 let (worktree_ids, external_paths, watcher_paths) =
821 store.editorconfig_store.read(cx).test_state();
822
823 // Test worktree_a is gone but external config remains for worktree_b
824 assert!(!worktree_ids.contains(&worktree_a_id));
825 assert!(worktree_ids.contains(&worktree_b_id));
826 // External config should still exist because worktree_b uses it
827 assert_eq!(external_paths.len(), 1);
828 assert_eq!(watcher_paths.len(), 1);
829 });
830
831 cx.update(|cx| {
832 let tree = worktree_b.read(cx);
833 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
834 let file = File::for_entry(file_entry, worktree_b.clone());
835 let file_language = project
836 .read(cx)
837 .languages()
838 .load_language_for_file_path(file.path.as_std_path());
839 let file_language = cx
840 .foreground_executor()
841 .block_on(file_language)
842 .expect("Failed to get file language");
843 let file = file as _;
844 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
845
846 // Test worktree_b still has correct settings
847 assert_eq!(Some(settings.tab_size), NonZeroU32::new(5));
848 });
849}
850
851#[gpui::test]
852async fn test_git_provider_project_setting(cx: &mut gpui::TestAppContext) {
853 init_test(cx);
854 cx.update(|cx| {
855 GitHostingProviderRegistry::default_global(cx);
856 git_hosting_providers::init(cx);
857 });
858
859 let fs = FakeFs::new(cx.executor());
860 let str_path = path!("/dir");
861 let path = Path::new(str_path);
862
863 fs.insert_tree(
864 path!("/dir"),
865 json!({
866 ".zed": {
867 "settings.json": r#"{
868 "git_hosting_providers": [
869 {
870 "provider": "gitlab",
871 "base_url": "https://google.com",
872 "name": "foo"
873 }
874 ]
875 }"#
876 },
877 }),
878 )
879 .await;
880
881 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
882 let (_worktree, _) =
883 project.read_with(cx, |project, cx| project.find_worktree(path, cx).unwrap());
884 cx.executor().run_until_parked();
885
886 cx.update(|cx| {
887 let provider = GitHostingProviderRegistry::global(cx);
888 assert!(
889 provider
890 .list_hosting_providers()
891 .into_iter()
892 .any(|provider| provider.name() == "foo")
893 );
894 });
895
896 fs.atomic_write(
897 Path::new(path!("/dir/.zed/settings.json")).to_owned(),
898 "{}".into(),
899 )
900 .await
901 .unwrap();
902
903 cx.run_until_parked();
904
905 cx.update(|cx| {
906 let provider = GitHostingProviderRegistry::global(cx);
907 assert!(
908 !provider
909 .list_hosting_providers()
910 .into_iter()
911 .any(|provider| provider.name() == "foo")
912 );
913 });
914}
915
916#[gpui::test]
917async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
918 init_test(cx);
919 TaskStore::init(None);
920
921 let fs = FakeFs::new(cx.executor());
922 fs.insert_tree(
923 path!("/dir"),
924 json!({
925 ".zed": {
926 "settings.json": r#"{ "tab_size": 8 }"#,
927 "tasks.json": r#"[{
928 "label": "cargo check all",
929 "command": "cargo",
930 "args": ["check", "--all"]
931 },]"#,
932 },
933 "a": {
934 "a.rs": "fn a() {\n A\n}"
935 },
936 "b": {
937 ".zed": {
938 "settings.json": r#"{ "tab_size": 2 }"#,
939 "tasks.json": r#"[{
940 "label": "cargo check",
941 "command": "cargo",
942 "args": ["check"]
943 },]"#,
944 },
945 "b.rs": "fn b() {\n B\n}"
946 }
947 }),
948 )
949 .await;
950
951 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
952 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
953
954 cx.executor().run_until_parked();
955 let worktree_id = cx.update(|cx| {
956 project.update(cx, |project, cx| {
957 project.worktrees(cx).next().unwrap().read(cx).id()
958 })
959 });
960
961 let mut task_contexts = TaskContexts::default();
962 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
963 let task_contexts = Arc::new(task_contexts);
964
965 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
966 id: worktree_id,
967 directory_in_worktree: rel_path(".zed").into(),
968 id_base: "local worktree tasks from directory \".zed\"".into(),
969 };
970
971 let all_tasks = cx
972 .update(|cx| {
973 let tree = worktree.read(cx);
974
975 let file_a = File::for_entry(
976 tree.entry_for_path(rel_path("a/a.rs")).unwrap().clone(),
977 worktree.clone(),
978 ) as _;
979 let settings_a = language_settings(None, Some(&file_a), cx);
980 let file_b = File::for_entry(
981 tree.entry_for_path(rel_path("b/b.rs")).unwrap().clone(),
982 worktree.clone(),
983 ) as _;
984 let settings_b = language_settings(None, Some(&file_b), cx);
985
986 assert_eq!(settings_a.tab_size.get(), 8);
987 assert_eq!(settings_b.tab_size.get(), 2);
988
989 get_all_tasks(&project, task_contexts.clone(), cx)
990 })
991 .await
992 .into_iter()
993 .map(|(source_kind, task)| {
994 let resolved = task.resolved;
995 (
996 source_kind,
997 task.resolved_label,
998 resolved.args,
999 resolved.env,
1000 )
1001 })
1002 .collect::<Vec<_>>();
1003 assert_eq!(
1004 all_tasks,
1005 vec![
1006 (
1007 TaskSourceKind::Worktree {
1008 id: worktree_id,
1009 directory_in_worktree: rel_path("b/.zed").into(),
1010 id_base: "local worktree tasks from directory \"b/.zed\"".into()
1011 },
1012 "cargo check".to_string(),
1013 vec!["check".to_string()],
1014 HashMap::default(),
1015 ),
1016 (
1017 topmost_local_task_source_kind.clone(),
1018 "cargo check all".to_string(),
1019 vec!["check".to_string(), "--all".to_string()],
1020 HashMap::default(),
1021 ),
1022 ]
1023 );
1024
1025 let (_, resolved_task) = cx
1026 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
1027 .await
1028 .into_iter()
1029 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
1030 .expect("should have one global task");
1031 project.update(cx, |project, cx| {
1032 let task_inventory = project
1033 .task_store()
1034 .read(cx)
1035 .task_inventory()
1036 .cloned()
1037 .unwrap();
1038 task_inventory.update(cx, |inventory, _| {
1039 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
1040 inventory
1041 .update_file_based_tasks(
1042 TaskSettingsLocation::Global(tasks_file()),
1043 Some(
1044 &json!([{
1045 "label": "cargo check unstable",
1046 "command": "cargo",
1047 "args": [
1048 "check",
1049 "--all",
1050 "--all-targets"
1051 ],
1052 "env": {
1053 "RUSTFLAGS": "-Zunstable-options"
1054 }
1055 }])
1056 .to_string(),
1057 ),
1058 )
1059 .unwrap();
1060 });
1061 });
1062 cx.run_until_parked();
1063
1064 let all_tasks = cx
1065 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
1066 .await
1067 .into_iter()
1068 .map(|(source_kind, task)| {
1069 let resolved = task.resolved;
1070 (
1071 source_kind,
1072 task.resolved_label,
1073 resolved.args,
1074 resolved.env,
1075 )
1076 })
1077 .collect::<Vec<_>>();
1078 assert_eq!(
1079 all_tasks,
1080 vec![
1081 (
1082 topmost_local_task_source_kind.clone(),
1083 "cargo check all".to_string(),
1084 vec!["check".to_string(), "--all".to_string()],
1085 HashMap::default(),
1086 ),
1087 (
1088 TaskSourceKind::Worktree {
1089 id: worktree_id,
1090 directory_in_worktree: rel_path("b/.zed").into(),
1091 id_base: "local worktree tasks from directory \"b/.zed\"".into()
1092 },
1093 "cargo check".to_string(),
1094 vec!["check".to_string()],
1095 HashMap::default(),
1096 ),
1097 (
1098 TaskSourceKind::AbsPath {
1099 abs_path: paths::tasks_file().clone(),
1100 id_base: "global tasks.json".into(),
1101 },
1102 "cargo check unstable".to_string(),
1103 vec![
1104 "check".to_string(),
1105 "--all".to_string(),
1106 "--all-targets".to_string(),
1107 ],
1108 HashMap::from_iter(Some((
1109 "RUSTFLAGS".to_string(),
1110 "-Zunstable-options".to_string()
1111 ))),
1112 ),
1113 ]
1114 );
1115}
1116
1117#[gpui::test]
1118async fn test_invalid_local_tasks_shows_toast_with_doc_link(cx: &mut gpui::TestAppContext) {
1119 init_test(cx);
1120 TaskStore::init(None);
1121
1122 // We need to start with a valid `.zed/tasks.json` file as otherwise the
1123 // event is emitted before we havd a chance to setup the event subscription.
1124 let fs = FakeFs::new(cx.executor());
1125 fs.insert_tree(
1126 path!("/dir"),
1127 json!({
1128 ".zed": {
1129 "tasks.json": r#"[{ "label": "valid task", "command": "echo" }]"#,
1130 },
1131 "file.rs": ""
1132 }),
1133 )
1134 .await;
1135
1136 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1137 let saw_toast = Rc::new(RefCell::new(false));
1138
1139 // Update the `.zed/tasks.json` file with an invalid variable, so we can
1140 // later assert that the `Event::Toast` even is emitted.
1141 fs.save(
1142 path!("/dir/.zed/tasks.json").as_ref(),
1143 &r#"[{ "label": "test $ZED_FOO", "command": "echo" }]"#.into(),
1144 Default::default(),
1145 )
1146 .await
1147 .unwrap();
1148
1149 project.update(cx, |_, cx| {
1150 let saw_toast = saw_toast.clone();
1151
1152 cx.subscribe(&project, move |_, _, event: &Event, _| match event {
1153 Event::Toast {
1154 notification_id,
1155 message,
1156 link: Some(ToastLink { url, .. }),
1157 } => {
1158 assert!(notification_id.starts_with("local-tasks-"));
1159 assert!(message.contains("ZED_FOO"));
1160 assert_eq!(*url, "https://zed.dev/docs/tasks");
1161 *saw_toast.borrow_mut() = true;
1162 }
1163 _ => {}
1164 })
1165 .detach();
1166 });
1167
1168 cx.run_until_parked();
1169 assert!(
1170 *saw_toast.borrow(),
1171 "Expected `Event::Toast` was never emitted"
1172 );
1173}
1174
1175#[gpui::test]
1176async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
1177 init_test(cx);
1178 TaskStore::init(None);
1179
1180 let fs = FakeFs::new(cx.executor());
1181 fs.insert_tree(
1182 path!("/dir"),
1183 json!({
1184 ".zed": {
1185 "tasks.json": r#"[{
1186 "label": "test worktree root",
1187 "command": "echo $ZED_WORKTREE_ROOT"
1188 }]"#,
1189 },
1190 "a": {
1191 "a.rs": "fn a() {\n A\n}"
1192 },
1193 }),
1194 )
1195 .await;
1196
1197 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1198 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
1199
1200 cx.executor().run_until_parked();
1201 let worktree_id = cx.update(|cx| {
1202 project.update(cx, |project, cx| {
1203 project.worktrees(cx).next().unwrap().read(cx).id()
1204 })
1205 });
1206
1207 let active_non_worktree_item_tasks = cx
1208 .update(|cx| {
1209 get_all_tasks(
1210 &project,
1211 Arc::new(TaskContexts {
1212 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
1213 active_worktree_context: None,
1214 other_worktree_contexts: Vec::new(),
1215 lsp_task_sources: HashMap::default(),
1216 latest_selection: None,
1217 }),
1218 cx,
1219 )
1220 })
1221 .await;
1222 assert!(
1223 active_non_worktree_item_tasks.is_empty(),
1224 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
1225 );
1226
1227 let active_worktree_tasks = cx
1228 .update(|cx| {
1229 get_all_tasks(
1230 &project,
1231 Arc::new(TaskContexts {
1232 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
1233 active_worktree_context: Some((worktree_id, {
1234 let mut worktree_context = TaskContext::default();
1235 worktree_context
1236 .task_variables
1237 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
1238 worktree_context
1239 })),
1240 other_worktree_contexts: Vec::new(),
1241 lsp_task_sources: HashMap::default(),
1242 latest_selection: None,
1243 }),
1244 cx,
1245 )
1246 })
1247 .await;
1248 assert_eq!(
1249 active_worktree_tasks
1250 .into_iter()
1251 .map(|(source_kind, task)| {
1252 let resolved = task.resolved;
1253 (source_kind, resolved.command.unwrap())
1254 })
1255 .collect::<Vec<_>>(),
1256 vec![(
1257 TaskSourceKind::Worktree {
1258 id: worktree_id,
1259 directory_in_worktree: rel_path(".zed").into(),
1260 id_base: "local worktree tasks from directory \".zed\"".into(),
1261 },
1262 "echo /dir".to_string(),
1263 )]
1264 );
1265}
1266
1267#[gpui::test]
1268async fn test_running_multiple_instances_of_a_single_server_in_one_worktree(
1269 cx: &mut gpui::TestAppContext,
1270) {
1271 pub(crate) struct PyprojectTomlManifestProvider;
1272
1273 impl ManifestProvider for PyprojectTomlManifestProvider {
1274 fn name(&self) -> ManifestName {
1275 SharedString::new_static("pyproject.toml").into()
1276 }
1277
1278 fn search(
1279 &self,
1280 ManifestQuery {
1281 path,
1282 depth,
1283 delegate,
1284 }: ManifestQuery,
1285 ) -> Option<Arc<RelPath>> {
1286 for path in path.ancestors().take(depth) {
1287 let p = path.join(rel_path("pyproject.toml"));
1288 if delegate.exists(&p, Some(false)) {
1289 return Some(path.into());
1290 }
1291 }
1292
1293 None
1294 }
1295 }
1296
1297 init_test(cx);
1298 let fs = FakeFs::new(cx.executor());
1299
1300 fs.insert_tree(
1301 path!("/the-root"),
1302 json!({
1303 ".zed": {
1304 "settings.json": r#"
1305 {
1306 "languages": {
1307 "Python": {
1308 "language_servers": ["ty"]
1309 }
1310 }
1311 }"#
1312 },
1313 "project-a": {
1314 ".venv": {},
1315 "file.py": "",
1316 "pyproject.toml": ""
1317 },
1318 "project-b": {
1319 ".venv": {},
1320 "source_file.py":"",
1321 "another_file.py": "",
1322 "pyproject.toml": ""
1323 }
1324 }),
1325 )
1326 .await;
1327 cx.update(|cx| {
1328 ManifestProvidersStore::global(cx).register(Arc::new(PyprojectTomlManifestProvider))
1329 });
1330
1331 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1332 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1333 let _fake_python_server = language_registry.register_fake_lsp(
1334 "Python",
1335 FakeLspAdapter {
1336 name: "ty",
1337 capabilities: lsp::ServerCapabilities {
1338 ..Default::default()
1339 },
1340 ..Default::default()
1341 },
1342 );
1343
1344 language_registry.add(python_lang(fs.clone()));
1345 let (first_buffer, _handle) = project
1346 .update(cx, |project, cx| {
1347 project.open_local_buffer_with_lsp(path!("/the-root/project-a/file.py"), cx)
1348 })
1349 .await
1350 .unwrap();
1351 cx.executor().run_until_parked();
1352 let servers = project.update(cx, |project, cx| {
1353 project.lsp_store().update(cx, |this, cx| {
1354 first_buffer.update(cx, |buffer, cx| {
1355 this.running_language_servers_for_local_buffer(buffer, cx)
1356 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1357 .collect::<Vec<_>>()
1358 })
1359 })
1360 });
1361 cx.executor().run_until_parked();
1362 assert_eq!(servers.len(), 1);
1363 let (adapter, server) = servers.into_iter().next().unwrap();
1364 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1365 assert_eq!(server.server_id(), LanguageServerId(0));
1366 // `workspace_folders` are set to the rooting point.
1367 assert_eq!(
1368 server.workspace_folders(),
1369 BTreeSet::from_iter(
1370 [Uri::from_file_path(path!("/the-root/project-a")).unwrap()].into_iter()
1371 )
1372 );
1373
1374 let (second_project_buffer, _other_handle) = project
1375 .update(cx, |project, cx| {
1376 project.open_local_buffer_with_lsp(path!("/the-root/project-b/source_file.py"), cx)
1377 })
1378 .await
1379 .unwrap();
1380 cx.executor().run_until_parked();
1381 let servers = project.update(cx, |project, cx| {
1382 project.lsp_store().update(cx, |this, cx| {
1383 second_project_buffer.update(cx, |buffer, cx| {
1384 this.running_language_servers_for_local_buffer(buffer, cx)
1385 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1386 .collect::<Vec<_>>()
1387 })
1388 })
1389 });
1390 cx.executor().run_until_parked();
1391 assert_eq!(servers.len(), 1);
1392 let (adapter, server) = servers.into_iter().next().unwrap();
1393 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1394 // We're not using venvs at all here, so both folders should fall under the same root.
1395 assert_eq!(server.server_id(), LanguageServerId(0));
1396 // Now, let's select a different toolchain for one of subprojects.
1397
1398 let Toolchains {
1399 toolchains: available_toolchains_for_b,
1400 root_path,
1401 ..
1402 } = project
1403 .update(cx, |this, cx| {
1404 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1405 this.available_toolchains(
1406 ProjectPath {
1407 worktree_id,
1408 path: rel_path("project-b/source_file.py").into(),
1409 },
1410 LanguageName::new_static("Python"),
1411 cx,
1412 )
1413 })
1414 .await
1415 .expect("A toolchain to be discovered");
1416 assert_eq!(root_path.as_ref(), rel_path("project-b"));
1417 assert_eq!(available_toolchains_for_b.toolchains().len(), 1);
1418 let currently_active_toolchain = project
1419 .update(cx, |this, cx| {
1420 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1421 this.active_toolchain(
1422 ProjectPath {
1423 worktree_id,
1424 path: rel_path("project-b/source_file.py").into(),
1425 },
1426 LanguageName::new_static("Python"),
1427 cx,
1428 )
1429 })
1430 .await;
1431
1432 assert!(currently_active_toolchain.is_none());
1433 let _ = project
1434 .update(cx, |this, cx| {
1435 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1436 this.activate_toolchain(
1437 ProjectPath {
1438 worktree_id,
1439 path: root_path,
1440 },
1441 available_toolchains_for_b
1442 .toolchains
1443 .into_iter()
1444 .next()
1445 .unwrap(),
1446 cx,
1447 )
1448 })
1449 .await
1450 .unwrap();
1451 cx.run_until_parked();
1452 let servers = project.update(cx, |project, cx| {
1453 project.lsp_store().update(cx, |this, cx| {
1454 second_project_buffer.update(cx, |buffer, cx| {
1455 this.running_language_servers_for_local_buffer(buffer, cx)
1456 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1457 .collect::<Vec<_>>()
1458 })
1459 })
1460 });
1461 cx.executor().run_until_parked();
1462 assert_eq!(servers.len(), 1);
1463 let (adapter, server) = servers.into_iter().next().unwrap();
1464 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1465 // There's a new language server in town.
1466 assert_eq!(server.server_id(), LanguageServerId(1));
1467}
1468
1469#[gpui::test]
1470async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
1471 init_test(cx);
1472
1473 let fs = FakeFs::new(cx.executor());
1474 fs.insert_tree(
1475 path!("/dir"),
1476 json!({
1477 "test.rs": "const A: i32 = 1;",
1478 "test2.rs": "",
1479 "Cargo.toml": "a = 1",
1480 "package.json": "{\"a\": 1}",
1481 }),
1482 )
1483 .await;
1484
1485 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1486 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1487
1488 let mut fake_rust_servers = language_registry.register_fake_lsp(
1489 "Rust",
1490 FakeLspAdapter {
1491 name: "the-rust-language-server",
1492 capabilities: lsp::ServerCapabilities {
1493 completion_provider: Some(lsp::CompletionOptions {
1494 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
1495 ..Default::default()
1496 }),
1497 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
1498 lsp::TextDocumentSyncOptions {
1499 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
1500 ..Default::default()
1501 },
1502 )),
1503 ..Default::default()
1504 },
1505 ..Default::default()
1506 },
1507 );
1508 let mut fake_json_servers = language_registry.register_fake_lsp(
1509 "JSON",
1510 FakeLspAdapter {
1511 name: "the-json-language-server",
1512 capabilities: lsp::ServerCapabilities {
1513 completion_provider: Some(lsp::CompletionOptions {
1514 trigger_characters: Some(vec![":".to_string()]),
1515 ..Default::default()
1516 }),
1517 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
1518 lsp::TextDocumentSyncOptions {
1519 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
1520 ..Default::default()
1521 },
1522 )),
1523 ..Default::default()
1524 },
1525 ..Default::default()
1526 },
1527 );
1528
1529 // Open a buffer without an associated language server.
1530 let (toml_buffer, _handle) = project
1531 .update(cx, |project, cx| {
1532 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
1533 })
1534 .await
1535 .unwrap();
1536
1537 // Open a buffer with an associated language server before the language for it has been loaded.
1538 let (rust_buffer, _handle2) = project
1539 .update(cx, |project, cx| {
1540 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
1541 })
1542 .await
1543 .unwrap();
1544 rust_buffer.update(cx, |buffer, _| {
1545 assert_eq!(buffer.language().map(|l| l.name()), None);
1546 });
1547
1548 // Now we add the languages to the project, and ensure they get assigned to all
1549 // the relevant open buffers.
1550 language_registry.add(json_lang());
1551 language_registry.add(rust_lang());
1552 cx.executor().run_until_parked();
1553 rust_buffer.update(cx, |buffer, _| {
1554 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
1555 });
1556
1557 // A server is started up, and it is notified about Rust files.
1558 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1559 assert_eq!(
1560 fake_rust_server
1561 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1562 .await
1563 .text_document,
1564 lsp::TextDocumentItem {
1565 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1566 version: 0,
1567 text: "const A: i32 = 1;".to_string(),
1568 language_id: "rust".to_string(),
1569 }
1570 );
1571
1572 // The buffer is configured based on the language server's capabilities.
1573 rust_buffer.update(cx, |buffer, _| {
1574 assert_eq!(
1575 buffer
1576 .completion_triggers()
1577 .iter()
1578 .cloned()
1579 .collect::<Vec<_>>(),
1580 &[".".to_string(), "::".to_string()]
1581 );
1582 });
1583 toml_buffer.update(cx, |buffer, _| {
1584 assert!(buffer.completion_triggers().is_empty());
1585 });
1586
1587 // Edit a buffer. The changes are reported to the language server.
1588 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
1589 assert_eq!(
1590 fake_rust_server
1591 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1592 .await
1593 .text_document,
1594 lsp::VersionedTextDocumentIdentifier::new(
1595 lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1596 1
1597 )
1598 );
1599
1600 // Open a third buffer with a different associated language server.
1601 let (json_buffer, _json_handle) = project
1602 .update(cx, |project, cx| {
1603 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
1604 })
1605 .await
1606 .unwrap();
1607
1608 // A json language server is started up and is only notified about the json buffer.
1609 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1610 assert_eq!(
1611 fake_json_server
1612 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1613 .await
1614 .text_document,
1615 lsp::TextDocumentItem {
1616 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1617 version: 0,
1618 text: "{\"a\": 1}".to_string(),
1619 language_id: "json".to_string(),
1620 }
1621 );
1622
1623 // This buffer is configured based on the second language server's
1624 // capabilities.
1625 json_buffer.update(cx, |buffer, _| {
1626 assert_eq!(
1627 buffer
1628 .completion_triggers()
1629 .iter()
1630 .cloned()
1631 .collect::<Vec<_>>(),
1632 &[":".to_string()]
1633 );
1634 });
1635
1636 // When opening another buffer whose language server is already running,
1637 // it is also configured based on the existing language server's capabilities.
1638 let (rust_buffer2, _handle4) = project
1639 .update(cx, |project, cx| {
1640 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
1641 })
1642 .await
1643 .unwrap();
1644 rust_buffer2.update(cx, |buffer, _| {
1645 assert_eq!(
1646 buffer
1647 .completion_triggers()
1648 .iter()
1649 .cloned()
1650 .collect::<Vec<_>>(),
1651 &[".".to_string(), "::".to_string()]
1652 );
1653 });
1654
1655 // Changes are reported only to servers matching the buffer's language.
1656 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
1657 rust_buffer2.update(cx, |buffer, cx| {
1658 buffer.edit([(0..0, "let x = 1;")], None, cx)
1659 });
1660 assert_eq!(
1661 fake_rust_server
1662 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1663 .await
1664 .text_document,
1665 lsp::VersionedTextDocumentIdentifier::new(
1666 lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap(),
1667 1
1668 )
1669 );
1670
1671 // Save notifications are reported to all servers.
1672 project
1673 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
1674 .await
1675 .unwrap();
1676 assert_eq!(
1677 fake_rust_server
1678 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1679 .await
1680 .text_document,
1681 lsp::TextDocumentIdentifier::new(
1682 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1683 )
1684 );
1685 assert_eq!(
1686 fake_json_server
1687 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1688 .await
1689 .text_document,
1690 lsp::TextDocumentIdentifier::new(
1691 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1692 )
1693 );
1694
1695 // Renames are reported only to servers matching the buffer's language.
1696 fs.rename(
1697 Path::new(path!("/dir/test2.rs")),
1698 Path::new(path!("/dir/test3.rs")),
1699 Default::default(),
1700 )
1701 .await
1702 .unwrap();
1703 assert_eq!(
1704 fake_rust_server
1705 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1706 .await
1707 .text_document,
1708 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap()),
1709 );
1710 assert_eq!(
1711 fake_rust_server
1712 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1713 .await
1714 .text_document,
1715 lsp::TextDocumentItem {
1716 uri: lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap(),
1717 version: 0,
1718 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1719 language_id: "rust".to_string(),
1720 },
1721 );
1722
1723 rust_buffer2.update(cx, |buffer, cx| {
1724 buffer.update_diagnostics(
1725 LanguageServerId(0),
1726 DiagnosticSet::from_sorted_entries(
1727 vec![DiagnosticEntry {
1728 diagnostic: Default::default(),
1729 range: Anchor::MIN..Anchor::MAX,
1730 }],
1731 &buffer.snapshot(),
1732 ),
1733 cx,
1734 );
1735 assert_eq!(
1736 buffer
1737 .snapshot()
1738 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1739 .count(),
1740 1
1741 );
1742 });
1743
1744 // When the rename changes the extension of the file, the buffer gets closed on the old
1745 // language server and gets opened on the new one.
1746 fs.rename(
1747 Path::new(path!("/dir/test3.rs")),
1748 Path::new(path!("/dir/test3.json")),
1749 Default::default(),
1750 )
1751 .await
1752 .unwrap();
1753 assert_eq!(
1754 fake_rust_server
1755 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1756 .await
1757 .text_document,
1758 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap()),
1759 );
1760 assert_eq!(
1761 fake_json_server
1762 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1763 .await
1764 .text_document,
1765 lsp::TextDocumentItem {
1766 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1767 version: 0,
1768 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1769 language_id: "json".to_string(),
1770 },
1771 );
1772
1773 // We clear the diagnostics, since the language has changed.
1774 rust_buffer2.update(cx, |buffer, _| {
1775 assert_eq!(
1776 buffer
1777 .snapshot()
1778 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1779 .count(),
1780 0
1781 );
1782 });
1783
1784 // The renamed file's version resets after changing language server.
1785 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
1786 assert_eq!(
1787 fake_json_server
1788 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1789 .await
1790 .text_document,
1791 lsp::VersionedTextDocumentIdentifier::new(
1792 lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1793 1
1794 )
1795 );
1796
1797 // Restart language servers
1798 project.update(cx, |project, cx| {
1799 project.restart_language_servers_for_buffers(
1800 vec![rust_buffer.clone(), json_buffer.clone()],
1801 HashSet::default(),
1802 cx,
1803 );
1804 });
1805
1806 let mut rust_shutdown_requests = fake_rust_server
1807 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1808 let mut json_shutdown_requests = fake_json_server
1809 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1810 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
1811
1812 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1813 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1814
1815 // Ensure rust document is reopened in new rust language server
1816 assert_eq!(
1817 fake_rust_server
1818 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1819 .await
1820 .text_document,
1821 lsp::TextDocumentItem {
1822 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1823 version: 0,
1824 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
1825 language_id: "rust".to_string(),
1826 }
1827 );
1828
1829 // Ensure json documents are reopened in new json language server
1830 assert_set_eq!(
1831 [
1832 fake_json_server
1833 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1834 .await
1835 .text_document,
1836 fake_json_server
1837 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1838 .await
1839 .text_document,
1840 ],
1841 [
1842 lsp::TextDocumentItem {
1843 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1844 version: 0,
1845 text: json_buffer.update(cx, |buffer, _| buffer.text()),
1846 language_id: "json".to_string(),
1847 },
1848 lsp::TextDocumentItem {
1849 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1850 version: 0,
1851 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1852 language_id: "json".to_string(),
1853 }
1854 ]
1855 );
1856
1857 // Close notifications are reported only to servers matching the buffer's language.
1858 cx.update(|_| drop(_json_handle));
1859 let close_message = lsp::DidCloseTextDocumentParams {
1860 text_document: lsp::TextDocumentIdentifier::new(
1861 lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1862 ),
1863 };
1864 assert_eq!(
1865 fake_json_server
1866 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1867 .await,
1868 close_message,
1869 );
1870}
1871
1872#[gpui::test]
1873async fn test_language_server_relative_path(cx: &mut gpui::TestAppContext) {
1874 init_test(cx);
1875
1876 let settings_json_contents = json!({
1877 "languages": {
1878 "Rust": {
1879 "language_servers": ["my_fake_lsp", "lsp_on_path"]
1880 }
1881 },
1882 "lsp": {
1883 "my_fake_lsp": {
1884 "binary": {
1885 // file exists, so this is treated as a relative path
1886 "path": path!(".relative_path/to/my_fake_lsp_binary.exe").to_string(),
1887 }
1888 },
1889 "lsp_on_path": {
1890 "binary": {
1891 // file doesn't exist, so it will fall back on PATH env var
1892 "path": path!("lsp_on_path.exe").to_string(),
1893 }
1894 }
1895 },
1896 });
1897
1898 let fs = FakeFs::new(cx.executor());
1899 fs.insert_tree(
1900 path!("/the-root"),
1901 json!({
1902 ".zed": {
1903 "settings.json": settings_json_contents.to_string(),
1904 },
1905 ".relative_path": {
1906 "to": {
1907 "my_fake_lsp.exe": "",
1908 },
1909 },
1910 "src": {
1911 "main.rs": "",
1912 }
1913 }),
1914 )
1915 .await;
1916
1917 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1918 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1919 language_registry.add(rust_lang());
1920
1921 let mut my_fake_lsp = language_registry.register_fake_lsp(
1922 "Rust",
1923 FakeLspAdapter {
1924 name: "my_fake_lsp",
1925 ..Default::default()
1926 },
1927 );
1928 let mut lsp_on_path = language_registry.register_fake_lsp(
1929 "Rust",
1930 FakeLspAdapter {
1931 name: "lsp_on_path",
1932 ..Default::default()
1933 },
1934 );
1935
1936 cx.run_until_parked();
1937
1938 // Start the language server by opening a buffer with a compatible file extension.
1939 project
1940 .update(cx, |project, cx| {
1941 project.open_local_buffer_with_lsp(path!("/the-root/src/main.rs"), cx)
1942 })
1943 .await
1944 .unwrap();
1945
1946 let lsp_path = my_fake_lsp.next().await.unwrap().binary.path;
1947 assert_eq!(
1948 lsp_path.to_string_lossy(),
1949 path!("/the-root/.relative_path/to/my_fake_lsp_binary.exe"),
1950 );
1951
1952 let lsp_path = lsp_on_path.next().await.unwrap().binary.path;
1953 assert_eq!(lsp_path.to_string_lossy(), path!("lsp_on_path.exe"));
1954}
1955
1956#[gpui::test]
1957async fn test_language_server_tilde_path(cx: &mut gpui::TestAppContext) {
1958 init_test(cx);
1959
1960 let settings_json_contents = json!({
1961 "languages": {
1962 "Rust": {
1963 "language_servers": ["tilde_lsp"]
1964 }
1965 },
1966 "lsp": {
1967 "tilde_lsp": {
1968 "binary": {
1969 "path": "~/.local/bin/rust-analyzer",
1970 }
1971 }
1972 },
1973 });
1974
1975 let fs = FakeFs::new(cx.executor());
1976 fs.insert_tree(
1977 path!("/root"),
1978 json!({
1979 ".zed": {
1980 "settings.json": settings_json_contents.to_string(),
1981 },
1982 "src": {
1983 "main.rs": "fn main() {}",
1984 }
1985 }),
1986 )
1987 .await;
1988
1989 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
1990 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1991 language_registry.add(rust_lang());
1992
1993 let mut tilde_lsp = language_registry.register_fake_lsp(
1994 "Rust",
1995 FakeLspAdapter {
1996 name: "tilde_lsp",
1997 ..Default::default()
1998 },
1999 );
2000 cx.run_until_parked();
2001
2002 project
2003 .update(cx, |project, cx| {
2004 project.open_local_buffer_with_lsp(path!("/root/src/main.rs"), cx)
2005 })
2006 .await
2007 .unwrap();
2008
2009 let lsp_path = tilde_lsp.next().await.unwrap().binary.path;
2010 let expected_path = paths::home_dir().join(".local/bin/rust-analyzer");
2011 assert_eq!(
2012 lsp_path, expected_path,
2013 "Tilde path should expand to home directory"
2014 );
2015}
2016
2017#[gpui::test]
2018async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
2019 init_test(cx);
2020
2021 let fs = FakeFs::new(cx.executor());
2022 fs.insert_tree(
2023 path!("/the-root"),
2024 json!({
2025 ".gitignore": "target\n",
2026 "Cargo.lock": "",
2027 "src": {
2028 "a.rs": "",
2029 "b.rs": "",
2030 },
2031 "target": {
2032 "x": {
2033 "out": {
2034 "x.rs": ""
2035 }
2036 },
2037 "y": {
2038 "out": {
2039 "y.rs": "",
2040 }
2041 },
2042 "z": {
2043 "out": {
2044 "z.rs": ""
2045 }
2046 }
2047 }
2048 }),
2049 )
2050 .await;
2051 fs.insert_tree(
2052 path!("/the-registry"),
2053 json!({
2054 "dep1": {
2055 "src": {
2056 "dep1.rs": "",
2057 }
2058 },
2059 "dep2": {
2060 "src": {
2061 "dep2.rs": "",
2062 }
2063 },
2064 }),
2065 )
2066 .await;
2067 fs.insert_tree(
2068 path!("/the/stdlib"),
2069 json!({
2070 "LICENSE": "",
2071 "src": {
2072 "string.rs": "",
2073 }
2074 }),
2075 )
2076 .await;
2077
2078 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
2079 let (language_registry, lsp_store) = project.read_with(cx, |project, _| {
2080 (project.languages().clone(), project.lsp_store())
2081 });
2082 language_registry.add(rust_lang());
2083 let mut fake_servers = language_registry.register_fake_lsp(
2084 "Rust",
2085 FakeLspAdapter {
2086 name: "the-language-server",
2087 ..Default::default()
2088 },
2089 );
2090
2091 cx.executor().run_until_parked();
2092
2093 // Start the language server by opening a buffer with a compatible file extension.
2094 project
2095 .update(cx, |project, cx| {
2096 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
2097 })
2098 .await
2099 .unwrap();
2100
2101 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
2102 project.update(cx, |project, cx| {
2103 let worktree = project.worktrees(cx).next().unwrap();
2104 assert_eq!(
2105 worktree
2106 .read(cx)
2107 .snapshot()
2108 .entries(true, 0)
2109 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
2110 .collect::<Vec<_>>(),
2111 &[
2112 ("", false),
2113 (".gitignore", false),
2114 ("Cargo.lock", false),
2115 ("src", false),
2116 ("src/a.rs", false),
2117 ("src/b.rs", false),
2118 ("target", true),
2119 ]
2120 );
2121 });
2122
2123 let prev_read_dir_count = fs.read_dir_call_count();
2124
2125 let fake_server = fake_servers.next().await.unwrap();
2126 cx.executor().run_until_parked();
2127 let server_id = lsp_store.read_with(cx, |lsp_store, _| {
2128 let (id, _) = lsp_store.language_server_statuses().next().unwrap();
2129 id
2130 });
2131
2132 // Simulate jumping to a definition in a dependency outside of the worktree.
2133 let _out_of_worktree_buffer = project
2134 .update(cx, |project, cx| {
2135 project.open_local_buffer_via_lsp(
2136 lsp::Uri::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
2137 server_id,
2138 cx,
2139 )
2140 })
2141 .await
2142 .unwrap();
2143
2144 // Keep track of the FS events reported to the language server.
2145 let file_changes = Arc::new(Mutex::new(Vec::new()));
2146 fake_server
2147 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
2148 registrations: vec![lsp::Registration {
2149 id: Default::default(),
2150 method: "workspace/didChangeWatchedFiles".to_string(),
2151 register_options: serde_json::to_value(
2152 lsp::DidChangeWatchedFilesRegistrationOptions {
2153 watchers: vec![
2154 lsp::FileSystemWatcher {
2155 glob_pattern: lsp::GlobPattern::String(
2156 path!("/the-root/Cargo.toml").to_string(),
2157 ),
2158 kind: None,
2159 },
2160 lsp::FileSystemWatcher {
2161 glob_pattern: lsp::GlobPattern::String(
2162 path!("/the-root/src/*.{rs,c}").to_string(),
2163 ),
2164 kind: None,
2165 },
2166 lsp::FileSystemWatcher {
2167 glob_pattern: lsp::GlobPattern::String(
2168 path!("/the-root/target/y/**/*.rs").to_string(),
2169 ),
2170 kind: None,
2171 },
2172 lsp::FileSystemWatcher {
2173 glob_pattern: lsp::GlobPattern::String(
2174 path!("/the/stdlib/src/**/*.rs").to_string(),
2175 ),
2176 kind: None,
2177 },
2178 lsp::FileSystemWatcher {
2179 glob_pattern: lsp::GlobPattern::String(
2180 path!("**/Cargo.lock").to_string(),
2181 ),
2182 kind: None,
2183 },
2184 ],
2185 },
2186 )
2187 .ok(),
2188 }],
2189 })
2190 .await
2191 .into_response()
2192 .unwrap();
2193 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
2194 let file_changes = file_changes.clone();
2195 move |params, _| {
2196 let mut file_changes = file_changes.lock();
2197 file_changes.extend(params.changes);
2198 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
2199 }
2200 });
2201
2202 cx.executor().run_until_parked();
2203 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
2204 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
2205
2206 let mut new_watched_paths = fs.watched_paths();
2207 new_watched_paths.retain(|path| {
2208 !path.starts_with(config_dir()) && !path.starts_with(global_gitignore_path().unwrap())
2209 });
2210 assert_eq!(
2211 &new_watched_paths,
2212 &[
2213 Path::new(path!("/the-root")),
2214 Path::new(path!("/the-registry/dep1/src/dep1.rs")),
2215 Path::new(path!("/the/stdlib/src"))
2216 ]
2217 );
2218
2219 // Now the language server has asked us to watch an ignored directory path,
2220 // so we recursively load it.
2221 project.update(cx, |project, cx| {
2222 let worktree = project.visible_worktrees(cx).next().unwrap();
2223 assert_eq!(
2224 worktree
2225 .read(cx)
2226 .snapshot()
2227 .entries(true, 0)
2228 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
2229 .collect::<Vec<_>>(),
2230 &[
2231 ("", false),
2232 (".gitignore", false),
2233 ("Cargo.lock", false),
2234 ("src", false),
2235 ("src/a.rs", false),
2236 ("src/b.rs", false),
2237 ("target", true),
2238 ("target/x", true),
2239 ("target/y", true),
2240 ("target/y/out", true),
2241 ("target/y/out/y.rs", true),
2242 ("target/z", true),
2243 ]
2244 );
2245 });
2246
2247 // Perform some file system mutations, two of which match the watched patterns,
2248 // and one of which does not.
2249 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
2250 .await
2251 .unwrap();
2252 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
2253 .await
2254 .unwrap();
2255 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
2256 .await
2257 .unwrap();
2258 fs.create_file(
2259 path!("/the-root/target/x/out/x2.rs").as_ref(),
2260 Default::default(),
2261 )
2262 .await
2263 .unwrap();
2264 fs.create_file(
2265 path!("/the-root/target/y/out/y2.rs").as_ref(),
2266 Default::default(),
2267 )
2268 .await
2269 .unwrap();
2270 fs.save(
2271 path!("/the-root/Cargo.lock").as_ref(),
2272 &"".into(),
2273 Default::default(),
2274 )
2275 .await
2276 .unwrap();
2277 fs.save(
2278 path!("/the-stdlib/LICENSE").as_ref(),
2279 &"".into(),
2280 Default::default(),
2281 )
2282 .await
2283 .unwrap();
2284 fs.save(
2285 path!("/the/stdlib/src/string.rs").as_ref(),
2286 &"".into(),
2287 Default::default(),
2288 )
2289 .await
2290 .unwrap();
2291
2292 // The language server receives events for the FS mutations that match its watch patterns.
2293 cx.executor().run_until_parked();
2294 assert_eq!(
2295 &*file_changes.lock(),
2296 &[
2297 lsp::FileEvent {
2298 uri: lsp::Uri::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
2299 typ: lsp::FileChangeType::CHANGED,
2300 },
2301 lsp::FileEvent {
2302 uri: lsp::Uri::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
2303 typ: lsp::FileChangeType::DELETED,
2304 },
2305 lsp::FileEvent {
2306 uri: lsp::Uri::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
2307 typ: lsp::FileChangeType::CREATED,
2308 },
2309 lsp::FileEvent {
2310 uri: lsp::Uri::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
2311 typ: lsp::FileChangeType::CREATED,
2312 },
2313 lsp::FileEvent {
2314 uri: lsp::Uri::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
2315 typ: lsp::FileChangeType::CHANGED,
2316 },
2317 ]
2318 );
2319}
2320
2321#[gpui::test]
2322async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
2323 init_test(cx);
2324
2325 let fs = FakeFs::new(cx.executor());
2326 fs.insert_tree(
2327 path!("/dir"),
2328 json!({
2329 "a.rs": "let a = 1;",
2330 "b.rs": "let b = 2;"
2331 }),
2332 )
2333 .await;
2334
2335 let project = Project::test(
2336 fs,
2337 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
2338 cx,
2339 )
2340 .await;
2341 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2342
2343 let buffer_a = project
2344 .update(cx, |project, cx| {
2345 project.open_local_buffer(path!("/dir/a.rs"), cx)
2346 })
2347 .await
2348 .unwrap();
2349 let buffer_b = project
2350 .update(cx, |project, cx| {
2351 project.open_local_buffer(path!("/dir/b.rs"), cx)
2352 })
2353 .await
2354 .unwrap();
2355
2356 lsp_store.update(cx, |lsp_store, cx| {
2357 lsp_store
2358 .update_diagnostics(
2359 LanguageServerId(0),
2360 lsp::PublishDiagnosticsParams {
2361 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2362 version: None,
2363 diagnostics: vec![lsp::Diagnostic {
2364 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2365 severity: Some(lsp::DiagnosticSeverity::ERROR),
2366 message: "error 1".to_string(),
2367 ..Default::default()
2368 }],
2369 },
2370 None,
2371 DiagnosticSourceKind::Pushed,
2372 &[],
2373 cx,
2374 )
2375 .unwrap();
2376 lsp_store
2377 .update_diagnostics(
2378 LanguageServerId(0),
2379 lsp::PublishDiagnosticsParams {
2380 uri: Uri::from_file_path(path!("/dir/b.rs")).unwrap(),
2381 version: None,
2382 diagnostics: vec![lsp::Diagnostic {
2383 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2384 severity: Some(DiagnosticSeverity::WARNING),
2385 message: "error 2".to_string(),
2386 ..Default::default()
2387 }],
2388 },
2389 None,
2390 DiagnosticSourceKind::Pushed,
2391 &[],
2392 cx,
2393 )
2394 .unwrap();
2395 });
2396
2397 buffer_a.update(cx, |buffer, _| {
2398 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2399 assert_eq!(
2400 chunks
2401 .iter()
2402 .map(|(s, d)| (s.as_str(), *d))
2403 .collect::<Vec<_>>(),
2404 &[
2405 ("let ", None),
2406 ("a", Some(DiagnosticSeverity::ERROR)),
2407 (" = 1;", None),
2408 ]
2409 );
2410 });
2411 buffer_b.update(cx, |buffer, _| {
2412 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2413 assert_eq!(
2414 chunks
2415 .iter()
2416 .map(|(s, d)| (s.as_str(), *d))
2417 .collect::<Vec<_>>(),
2418 &[
2419 ("let ", None),
2420 ("b", Some(DiagnosticSeverity::WARNING)),
2421 (" = 2;", None),
2422 ]
2423 );
2424 });
2425}
2426
2427#[gpui::test]
2428async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
2429 init_test(cx);
2430
2431 let fs = FakeFs::new(cx.executor());
2432 fs.insert_tree(
2433 path!("/root"),
2434 json!({
2435 "dir": {
2436 ".git": {
2437 "HEAD": "ref: refs/heads/main",
2438 },
2439 ".gitignore": "b.rs",
2440 "a.rs": "let a = 1;",
2441 "b.rs": "let b = 2;",
2442 },
2443 "other.rs": "let b = c;"
2444 }),
2445 )
2446 .await;
2447
2448 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
2449 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2450 let (worktree, _) = project
2451 .update(cx, |project, cx| {
2452 project.find_or_create_worktree(path!("/root/dir"), true, cx)
2453 })
2454 .await
2455 .unwrap();
2456 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
2457
2458 let (worktree, _) = project
2459 .update(cx, |project, cx| {
2460 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
2461 })
2462 .await
2463 .unwrap();
2464 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
2465
2466 let server_id = LanguageServerId(0);
2467 lsp_store.update(cx, |lsp_store, cx| {
2468 lsp_store
2469 .update_diagnostics(
2470 server_id,
2471 lsp::PublishDiagnosticsParams {
2472 uri: Uri::from_file_path(path!("/root/dir/b.rs")).unwrap(),
2473 version: None,
2474 diagnostics: vec![lsp::Diagnostic {
2475 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2476 severity: Some(lsp::DiagnosticSeverity::ERROR),
2477 message: "unused variable 'b'".to_string(),
2478 ..Default::default()
2479 }],
2480 },
2481 None,
2482 DiagnosticSourceKind::Pushed,
2483 &[],
2484 cx,
2485 )
2486 .unwrap();
2487 lsp_store
2488 .update_diagnostics(
2489 server_id,
2490 lsp::PublishDiagnosticsParams {
2491 uri: Uri::from_file_path(path!("/root/other.rs")).unwrap(),
2492 version: None,
2493 diagnostics: vec![lsp::Diagnostic {
2494 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
2495 severity: Some(lsp::DiagnosticSeverity::ERROR),
2496 message: "unknown variable 'c'".to_string(),
2497 ..Default::default()
2498 }],
2499 },
2500 None,
2501 DiagnosticSourceKind::Pushed,
2502 &[],
2503 cx,
2504 )
2505 .unwrap();
2506 });
2507
2508 let main_ignored_buffer = project
2509 .update(cx, |project, cx| {
2510 project.open_buffer((main_worktree_id, rel_path("b.rs")), cx)
2511 })
2512 .await
2513 .unwrap();
2514 main_ignored_buffer.update(cx, |buffer, _| {
2515 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2516 assert_eq!(
2517 chunks
2518 .iter()
2519 .map(|(s, d)| (s.as_str(), *d))
2520 .collect::<Vec<_>>(),
2521 &[
2522 ("let ", None),
2523 ("b", Some(DiagnosticSeverity::ERROR)),
2524 (" = 2;", None),
2525 ],
2526 "Gigitnored buffers should still get in-buffer diagnostics",
2527 );
2528 });
2529 let other_buffer = project
2530 .update(cx, |project, cx| {
2531 project.open_buffer((other_worktree_id, rel_path("")), cx)
2532 })
2533 .await
2534 .unwrap();
2535 other_buffer.update(cx, |buffer, _| {
2536 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2537 assert_eq!(
2538 chunks
2539 .iter()
2540 .map(|(s, d)| (s.as_str(), *d))
2541 .collect::<Vec<_>>(),
2542 &[
2543 ("let b = ", None),
2544 ("c", Some(DiagnosticSeverity::ERROR)),
2545 (";", None),
2546 ],
2547 "Buffers from hidden projects should still get in-buffer diagnostics"
2548 );
2549 });
2550
2551 project.update(cx, |project, cx| {
2552 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
2553 assert_eq!(
2554 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
2555 vec![(
2556 ProjectPath {
2557 worktree_id: main_worktree_id,
2558 path: rel_path("b.rs").into(),
2559 },
2560 server_id,
2561 DiagnosticSummary {
2562 error_count: 1,
2563 warning_count: 0,
2564 }
2565 )]
2566 );
2567 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
2568 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
2569 });
2570}
2571
2572#[gpui::test]
2573async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
2574 init_test(cx);
2575
2576 let progress_token = "the-progress-token";
2577
2578 let fs = FakeFs::new(cx.executor());
2579 fs.insert_tree(
2580 path!("/dir"),
2581 json!({
2582 "a.rs": "fn a() { A }",
2583 "b.rs": "const y: i32 = 1",
2584 }),
2585 )
2586 .await;
2587
2588 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2589 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2590
2591 language_registry.add(rust_lang());
2592 let mut fake_servers = language_registry.register_fake_lsp(
2593 "Rust",
2594 FakeLspAdapter {
2595 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2596 disk_based_diagnostics_sources: vec!["disk".into()],
2597 ..Default::default()
2598 },
2599 );
2600
2601 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
2602
2603 // Cause worktree to start the fake language server
2604 let _ = project
2605 .update(cx, |project, cx| {
2606 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
2607 })
2608 .await
2609 .unwrap();
2610
2611 let mut events = cx.events(&project);
2612
2613 let fake_server = fake_servers.next().await.unwrap();
2614 assert_eq!(
2615 events.next().await.unwrap(),
2616 Event::LanguageServerAdded(
2617 LanguageServerId(0),
2618 fake_server.server.name(),
2619 Some(worktree_id)
2620 ),
2621 );
2622
2623 fake_server
2624 .start_progress(format!("{}/0", progress_token))
2625 .await;
2626 assert_eq!(
2627 events.next().await.unwrap(),
2628 Event::DiskBasedDiagnosticsStarted {
2629 language_server_id: LanguageServerId(0),
2630 }
2631 );
2632
2633 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2634 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2635 version: None,
2636 diagnostics: vec![lsp::Diagnostic {
2637 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2638 severity: Some(lsp::DiagnosticSeverity::ERROR),
2639 message: "undefined variable 'A'".to_string(),
2640 ..Default::default()
2641 }],
2642 });
2643 assert_eq!(
2644 events.next().await.unwrap(),
2645 Event::DiagnosticsUpdated {
2646 language_server_id: LanguageServerId(0),
2647 paths: vec![(worktree_id, rel_path("a.rs")).into()],
2648 }
2649 );
2650
2651 fake_server.end_progress(format!("{}/0", progress_token));
2652 assert_eq!(
2653 events.next().await.unwrap(),
2654 Event::DiskBasedDiagnosticsFinished {
2655 language_server_id: LanguageServerId(0)
2656 }
2657 );
2658
2659 let buffer = project
2660 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
2661 .await
2662 .unwrap();
2663
2664 buffer.update(cx, |buffer, _| {
2665 let snapshot = buffer.snapshot();
2666 let diagnostics = snapshot
2667 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2668 .collect::<Vec<_>>();
2669 assert_eq!(
2670 diagnostics,
2671 &[DiagnosticEntryRef {
2672 range: Point::new(0, 9)..Point::new(0, 10),
2673 diagnostic: &Diagnostic {
2674 severity: lsp::DiagnosticSeverity::ERROR,
2675 message: "undefined variable 'A'".to_string(),
2676 group_id: 0,
2677 is_primary: true,
2678 source_kind: DiagnosticSourceKind::Pushed,
2679 ..Diagnostic::default()
2680 }
2681 }]
2682 )
2683 });
2684
2685 // Ensure publishing empty diagnostics twice only results in one update event.
2686 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2687 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2688 version: None,
2689 diagnostics: Default::default(),
2690 });
2691 assert_eq!(
2692 events.next().await.unwrap(),
2693 Event::DiagnosticsUpdated {
2694 language_server_id: LanguageServerId(0),
2695 paths: vec![(worktree_id, rel_path("a.rs")).into()],
2696 }
2697 );
2698
2699 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2700 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2701 version: None,
2702 diagnostics: Default::default(),
2703 });
2704 cx.executor().run_until_parked();
2705 assert_eq!(futures::poll!(events.next()), Poll::Pending);
2706}
2707
2708#[gpui::test]
2709async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
2710 init_test(cx);
2711
2712 let progress_token = "the-progress-token";
2713
2714 let fs = FakeFs::new(cx.executor());
2715 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2716
2717 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2718
2719 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2720 language_registry.add(rust_lang());
2721 let mut fake_servers = language_registry.register_fake_lsp(
2722 "Rust",
2723 FakeLspAdapter {
2724 name: "the-language-server",
2725 disk_based_diagnostics_sources: vec!["disk".into()],
2726 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2727 ..FakeLspAdapter::default()
2728 },
2729 );
2730
2731 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
2732
2733 let (buffer, _handle) = project
2734 .update(cx, |project, cx| {
2735 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2736 })
2737 .await
2738 .unwrap();
2739 let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id());
2740 // Simulate diagnostics starting to update.
2741 let fake_server = fake_servers.next().await.unwrap();
2742 cx.executor().run_until_parked();
2743 fake_server.start_progress(progress_token).await;
2744
2745 // Restart the server before the diagnostics finish updating.
2746 project.update(cx, |project, cx| {
2747 project.restart_language_servers_for_buffers(vec![buffer], HashSet::default(), cx);
2748 });
2749 let mut events = cx.events(&project);
2750
2751 // Simulate the newly started server sending more diagnostics.
2752 let fake_server = fake_servers.next().await.unwrap();
2753 cx.executor().run_until_parked();
2754 assert_eq!(
2755 events.next().await.unwrap(),
2756 Event::LanguageServerRemoved(LanguageServerId(0))
2757 );
2758 assert_eq!(
2759 events.next().await.unwrap(),
2760 Event::LanguageServerAdded(
2761 LanguageServerId(1),
2762 fake_server.server.name(),
2763 Some(worktree_id)
2764 )
2765 );
2766 fake_server.start_progress(progress_token).await;
2767 assert_eq!(
2768 events.next().await.unwrap(),
2769 Event::LanguageServerBufferRegistered {
2770 server_id: LanguageServerId(1),
2771 buffer_id,
2772 buffer_abs_path: PathBuf::from(path!("/dir/a.rs")),
2773 name: Some(fake_server.server.name())
2774 }
2775 );
2776 assert_eq!(
2777 events.next().await.unwrap(),
2778 Event::DiskBasedDiagnosticsStarted {
2779 language_server_id: LanguageServerId(1)
2780 }
2781 );
2782 project.update(cx, |project, cx| {
2783 assert_eq!(
2784 project
2785 .language_servers_running_disk_based_diagnostics(cx)
2786 .collect::<Vec<_>>(),
2787 [LanguageServerId(1)]
2788 );
2789 });
2790
2791 // All diagnostics are considered done, despite the old server's diagnostic
2792 // task never completing.
2793 fake_server.end_progress(progress_token);
2794 assert_eq!(
2795 events.next().await.unwrap(),
2796 Event::DiskBasedDiagnosticsFinished {
2797 language_server_id: LanguageServerId(1)
2798 }
2799 );
2800 project.update(cx, |project, cx| {
2801 assert_eq!(
2802 project
2803 .language_servers_running_disk_based_diagnostics(cx)
2804 .collect::<Vec<_>>(),
2805 [] as [language::LanguageServerId; 0]
2806 );
2807 });
2808}
2809
2810#[gpui::test]
2811async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
2812 init_test(cx);
2813
2814 let fs = FakeFs::new(cx.executor());
2815 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
2816
2817 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2818
2819 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2820 language_registry.add(rust_lang());
2821 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2822
2823 let (buffer, _) = project
2824 .update(cx, |project, cx| {
2825 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2826 })
2827 .await
2828 .unwrap();
2829
2830 // Publish diagnostics
2831 let fake_server = fake_servers.next().await.unwrap();
2832 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2833 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2834 version: None,
2835 diagnostics: vec![lsp::Diagnostic {
2836 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
2837 severity: Some(lsp::DiagnosticSeverity::ERROR),
2838 message: "the message".to_string(),
2839 ..Default::default()
2840 }],
2841 });
2842
2843 cx.executor().run_until_parked();
2844 buffer.update(cx, |buffer, _| {
2845 assert_eq!(
2846 buffer
2847 .snapshot()
2848 .diagnostics_in_range::<_, usize>(0..1, false)
2849 .map(|entry| entry.diagnostic.message.clone())
2850 .collect::<Vec<_>>(),
2851 ["the message".to_string()]
2852 );
2853 });
2854 project.update(cx, |project, cx| {
2855 assert_eq!(
2856 project.diagnostic_summary(false, cx),
2857 DiagnosticSummary {
2858 error_count: 1,
2859 warning_count: 0,
2860 }
2861 );
2862 });
2863
2864 project.update(cx, |project, cx| {
2865 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2866 });
2867
2868 // The diagnostics are cleared.
2869 cx.executor().run_until_parked();
2870 buffer.update(cx, |buffer, _| {
2871 assert_eq!(
2872 buffer
2873 .snapshot()
2874 .diagnostics_in_range::<_, usize>(0..1, false)
2875 .map(|entry| entry.diagnostic.message.clone())
2876 .collect::<Vec<_>>(),
2877 Vec::<String>::new(),
2878 );
2879 });
2880 project.update(cx, |project, cx| {
2881 assert_eq!(
2882 project.diagnostic_summary(false, cx),
2883 DiagnosticSummary {
2884 error_count: 0,
2885 warning_count: 0,
2886 }
2887 );
2888 });
2889}
2890
2891#[gpui::test]
2892async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
2893 init_test(cx);
2894
2895 let fs = FakeFs::new(cx.executor());
2896 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2897
2898 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2899 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2900
2901 language_registry.add(rust_lang());
2902 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2903
2904 let (buffer, _handle) = project
2905 .update(cx, |project, cx| {
2906 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2907 })
2908 .await
2909 .unwrap();
2910
2911 // Before restarting the server, report diagnostics with an unknown buffer version.
2912 let fake_server = fake_servers.next().await.unwrap();
2913 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2914 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2915 version: Some(10000),
2916 diagnostics: Vec::new(),
2917 });
2918 cx.executor().run_until_parked();
2919 project.update(cx, |project, cx| {
2920 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2921 });
2922
2923 let mut fake_server = fake_servers.next().await.unwrap();
2924 let notification = fake_server
2925 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2926 .await
2927 .text_document;
2928 assert_eq!(notification.version, 0);
2929}
2930
2931#[gpui::test]
2932async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
2933 init_test(cx);
2934
2935 let progress_token = "the-progress-token";
2936
2937 let fs = FakeFs::new(cx.executor());
2938 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2939
2940 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2941
2942 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2943 language_registry.add(rust_lang());
2944 let mut fake_servers = language_registry.register_fake_lsp(
2945 "Rust",
2946 FakeLspAdapter {
2947 name: "the-language-server",
2948 disk_based_diagnostics_sources: vec!["disk".into()],
2949 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2950 ..Default::default()
2951 },
2952 );
2953
2954 let (buffer, _handle) = project
2955 .update(cx, |project, cx| {
2956 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2957 })
2958 .await
2959 .unwrap();
2960
2961 // Simulate diagnostics starting to update.
2962 let mut fake_server = fake_servers.next().await.unwrap();
2963 fake_server
2964 .start_progress_with(
2965 "another-token",
2966 lsp::WorkDoneProgressBegin {
2967 cancellable: Some(false),
2968 ..Default::default()
2969 },
2970 )
2971 .await;
2972 // Ensure progress notification is fully processed before starting the next one
2973 cx.executor().run_until_parked();
2974
2975 fake_server
2976 .start_progress_with(
2977 progress_token,
2978 lsp::WorkDoneProgressBegin {
2979 cancellable: Some(true),
2980 ..Default::default()
2981 },
2982 )
2983 .await;
2984 // Ensure progress notification is fully processed before cancelling
2985 cx.executor().run_until_parked();
2986
2987 project.update(cx, |project, cx| {
2988 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
2989 });
2990 cx.executor().run_until_parked();
2991
2992 let cancel_notification = fake_server
2993 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
2994 .await;
2995 assert_eq!(
2996 cancel_notification.token,
2997 NumberOrString::String(progress_token.into())
2998 );
2999}
3000
3001#[gpui::test]
3002async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
3003 init_test(cx);
3004
3005 let fs = FakeFs::new(cx.executor());
3006 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
3007 .await;
3008
3009 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3010 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3011
3012 let mut fake_rust_servers = language_registry.register_fake_lsp(
3013 "Rust",
3014 FakeLspAdapter {
3015 name: "rust-lsp",
3016 ..Default::default()
3017 },
3018 );
3019 let mut fake_js_servers = language_registry.register_fake_lsp(
3020 "JavaScript",
3021 FakeLspAdapter {
3022 name: "js-lsp",
3023 ..Default::default()
3024 },
3025 );
3026 language_registry.add(rust_lang());
3027 language_registry.add(js_lang());
3028
3029 let _rs_buffer = project
3030 .update(cx, |project, cx| {
3031 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3032 })
3033 .await
3034 .unwrap();
3035 let _js_buffer = project
3036 .update(cx, |project, cx| {
3037 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
3038 })
3039 .await
3040 .unwrap();
3041
3042 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
3043 assert_eq!(
3044 fake_rust_server_1
3045 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3046 .await
3047 .text_document
3048 .uri
3049 .as_str(),
3050 uri!("file:///dir/a.rs")
3051 );
3052
3053 let mut fake_js_server = fake_js_servers.next().await.unwrap();
3054 assert_eq!(
3055 fake_js_server
3056 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3057 .await
3058 .text_document
3059 .uri
3060 .as_str(),
3061 uri!("file:///dir/b.js")
3062 );
3063
3064 // Disable Rust language server, ensuring only that server gets stopped.
3065 cx.update(|cx| {
3066 SettingsStore::update_global(cx, |settings, cx| {
3067 settings.update_user_settings(cx, |settings| {
3068 settings.languages_mut().insert(
3069 "Rust".into(),
3070 LanguageSettingsContent {
3071 enable_language_server: Some(false),
3072 ..Default::default()
3073 },
3074 );
3075 });
3076 })
3077 });
3078 fake_rust_server_1
3079 .receive_notification::<lsp::notification::Exit>()
3080 .await;
3081
3082 // Enable Rust and disable JavaScript language servers, ensuring that the
3083 // former gets started again and that the latter stops.
3084 cx.update(|cx| {
3085 SettingsStore::update_global(cx, |settings, cx| {
3086 settings.update_user_settings(cx, |settings| {
3087 settings.languages_mut().insert(
3088 "Rust".into(),
3089 LanguageSettingsContent {
3090 enable_language_server: Some(true),
3091 ..Default::default()
3092 },
3093 );
3094 settings.languages_mut().insert(
3095 "JavaScript".into(),
3096 LanguageSettingsContent {
3097 enable_language_server: Some(false),
3098 ..Default::default()
3099 },
3100 );
3101 });
3102 })
3103 });
3104 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
3105 assert_eq!(
3106 fake_rust_server_2
3107 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3108 .await
3109 .text_document
3110 .uri
3111 .as_str(),
3112 uri!("file:///dir/a.rs")
3113 );
3114 fake_js_server
3115 .receive_notification::<lsp::notification::Exit>()
3116 .await;
3117}
3118
3119#[gpui::test(iterations = 3)]
3120async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
3121 init_test(cx);
3122
3123 let text = "
3124 fn a() { A }
3125 fn b() { BB }
3126 fn c() { CCC }
3127 "
3128 .unindent();
3129
3130 let fs = FakeFs::new(cx.executor());
3131 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
3132
3133 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3134 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3135
3136 language_registry.add(rust_lang());
3137 let mut fake_servers = language_registry.register_fake_lsp(
3138 "Rust",
3139 FakeLspAdapter {
3140 disk_based_diagnostics_sources: vec!["disk".into()],
3141 ..Default::default()
3142 },
3143 );
3144
3145 let buffer = project
3146 .update(cx, |project, cx| {
3147 project.open_local_buffer(path!("/dir/a.rs"), cx)
3148 })
3149 .await
3150 .unwrap();
3151
3152 let _handle = project.update(cx, |project, cx| {
3153 project.register_buffer_with_language_servers(&buffer, cx)
3154 });
3155
3156 let mut fake_server = fake_servers.next().await.unwrap();
3157 let open_notification = fake_server
3158 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3159 .await;
3160
3161 // Edit the buffer, moving the content down
3162 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
3163 let change_notification_1 = fake_server
3164 .receive_notification::<lsp::notification::DidChangeTextDocument>()
3165 .await;
3166 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
3167
3168 // Report some diagnostics for the initial version of the buffer
3169 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3170 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3171 version: Some(open_notification.text_document.version),
3172 diagnostics: vec![
3173 lsp::Diagnostic {
3174 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3175 severity: Some(DiagnosticSeverity::ERROR),
3176 message: "undefined variable 'A'".to_string(),
3177 source: Some("disk".to_string()),
3178 ..Default::default()
3179 },
3180 lsp::Diagnostic {
3181 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
3182 severity: Some(DiagnosticSeverity::ERROR),
3183 message: "undefined variable 'BB'".to_string(),
3184 source: Some("disk".to_string()),
3185 ..Default::default()
3186 },
3187 lsp::Diagnostic {
3188 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
3189 severity: Some(DiagnosticSeverity::ERROR),
3190 source: Some("disk".to_string()),
3191 message: "undefined variable 'CCC'".to_string(),
3192 ..Default::default()
3193 },
3194 ],
3195 });
3196
3197 // The diagnostics have moved down since they were created.
3198 cx.executor().run_until_parked();
3199 buffer.update(cx, |buffer, _| {
3200 assert_eq!(
3201 buffer
3202 .snapshot()
3203 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
3204 .collect::<Vec<_>>(),
3205 &[
3206 DiagnosticEntry {
3207 range: Point::new(3, 9)..Point::new(3, 11),
3208 diagnostic: Diagnostic {
3209 source: Some("disk".into()),
3210 severity: DiagnosticSeverity::ERROR,
3211 message: "undefined variable 'BB'".to_string(),
3212 is_disk_based: true,
3213 group_id: 1,
3214 is_primary: true,
3215 source_kind: DiagnosticSourceKind::Pushed,
3216 ..Diagnostic::default()
3217 },
3218 },
3219 DiagnosticEntry {
3220 range: Point::new(4, 9)..Point::new(4, 12),
3221 diagnostic: Diagnostic {
3222 source: Some("disk".into()),
3223 severity: DiagnosticSeverity::ERROR,
3224 message: "undefined variable 'CCC'".to_string(),
3225 is_disk_based: true,
3226 group_id: 2,
3227 is_primary: true,
3228 source_kind: DiagnosticSourceKind::Pushed,
3229 ..Diagnostic::default()
3230 }
3231 }
3232 ]
3233 );
3234 assert_eq!(
3235 chunks_with_diagnostics(buffer, 0..buffer.len()),
3236 [
3237 ("\n\nfn a() { ".to_string(), None),
3238 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
3239 (" }\nfn b() { ".to_string(), None),
3240 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
3241 (" }\nfn c() { ".to_string(), None),
3242 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
3243 (" }\n".to_string(), None),
3244 ]
3245 );
3246 assert_eq!(
3247 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
3248 [
3249 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
3250 (" }\nfn c() { ".to_string(), None),
3251 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
3252 ]
3253 );
3254 });
3255
3256 // Ensure overlapping diagnostics are highlighted correctly.
3257 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3258 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3259 version: Some(open_notification.text_document.version),
3260 diagnostics: vec![
3261 lsp::Diagnostic {
3262 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3263 severity: Some(DiagnosticSeverity::ERROR),
3264 message: "undefined variable 'A'".to_string(),
3265 source: Some("disk".to_string()),
3266 ..Default::default()
3267 },
3268 lsp::Diagnostic {
3269 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
3270 severity: Some(DiagnosticSeverity::WARNING),
3271 message: "unreachable statement".to_string(),
3272 source: Some("disk".to_string()),
3273 ..Default::default()
3274 },
3275 ],
3276 });
3277
3278 cx.executor().run_until_parked();
3279 buffer.update(cx, |buffer, _| {
3280 assert_eq!(
3281 buffer
3282 .snapshot()
3283 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
3284 .collect::<Vec<_>>(),
3285 &[
3286 DiagnosticEntry {
3287 range: Point::new(2, 9)..Point::new(2, 12),
3288 diagnostic: Diagnostic {
3289 source: Some("disk".into()),
3290 severity: DiagnosticSeverity::WARNING,
3291 message: "unreachable statement".to_string(),
3292 is_disk_based: true,
3293 group_id: 4,
3294 is_primary: true,
3295 source_kind: DiagnosticSourceKind::Pushed,
3296 ..Diagnostic::default()
3297 }
3298 },
3299 DiagnosticEntry {
3300 range: Point::new(2, 9)..Point::new(2, 10),
3301 diagnostic: Diagnostic {
3302 source: Some("disk".into()),
3303 severity: DiagnosticSeverity::ERROR,
3304 message: "undefined variable 'A'".to_string(),
3305 is_disk_based: true,
3306 group_id: 3,
3307 is_primary: true,
3308 source_kind: DiagnosticSourceKind::Pushed,
3309 ..Diagnostic::default()
3310 },
3311 }
3312 ]
3313 );
3314 assert_eq!(
3315 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
3316 [
3317 ("fn a() { ".to_string(), None),
3318 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
3319 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
3320 ("\n".to_string(), None),
3321 ]
3322 );
3323 assert_eq!(
3324 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
3325 [
3326 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
3327 ("\n".to_string(), None),
3328 ]
3329 );
3330 });
3331
3332 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
3333 // changes since the last save.
3334 buffer.update(cx, |buffer, cx| {
3335 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
3336 buffer.edit(
3337 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
3338 None,
3339 cx,
3340 );
3341 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
3342 });
3343 let change_notification_2 = fake_server
3344 .receive_notification::<lsp::notification::DidChangeTextDocument>()
3345 .await;
3346 assert!(
3347 change_notification_2.text_document.version > change_notification_1.text_document.version
3348 );
3349
3350 // Handle out-of-order diagnostics
3351 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3352 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3353 version: Some(change_notification_2.text_document.version),
3354 diagnostics: vec![
3355 lsp::Diagnostic {
3356 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
3357 severity: Some(DiagnosticSeverity::ERROR),
3358 message: "undefined variable 'BB'".to_string(),
3359 source: Some("disk".to_string()),
3360 ..Default::default()
3361 },
3362 lsp::Diagnostic {
3363 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3364 severity: Some(DiagnosticSeverity::WARNING),
3365 message: "undefined variable 'A'".to_string(),
3366 source: Some("disk".to_string()),
3367 ..Default::default()
3368 },
3369 ],
3370 });
3371
3372 cx.executor().run_until_parked();
3373 buffer.update(cx, |buffer, _| {
3374 assert_eq!(
3375 buffer
3376 .snapshot()
3377 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3378 .collect::<Vec<_>>(),
3379 &[
3380 DiagnosticEntry {
3381 range: Point::new(2, 21)..Point::new(2, 22),
3382 diagnostic: Diagnostic {
3383 source: Some("disk".into()),
3384 severity: DiagnosticSeverity::WARNING,
3385 message: "undefined variable 'A'".to_string(),
3386 is_disk_based: true,
3387 group_id: 6,
3388 is_primary: true,
3389 source_kind: DiagnosticSourceKind::Pushed,
3390 ..Diagnostic::default()
3391 }
3392 },
3393 DiagnosticEntry {
3394 range: Point::new(3, 9)..Point::new(3, 14),
3395 diagnostic: Diagnostic {
3396 source: Some("disk".into()),
3397 severity: DiagnosticSeverity::ERROR,
3398 message: "undefined variable 'BB'".to_string(),
3399 is_disk_based: true,
3400 group_id: 5,
3401 is_primary: true,
3402 source_kind: DiagnosticSourceKind::Pushed,
3403 ..Diagnostic::default()
3404 },
3405 }
3406 ]
3407 );
3408 });
3409}
3410
3411#[gpui::test]
3412async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
3413 init_test(cx);
3414
3415 let text = concat!(
3416 "let one = ;\n", //
3417 "let two = \n",
3418 "let three = 3;\n",
3419 );
3420
3421 let fs = FakeFs::new(cx.executor());
3422 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
3423
3424 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3425 let buffer = project
3426 .update(cx, |project, cx| {
3427 project.open_local_buffer(path!("/dir/a.rs"), cx)
3428 })
3429 .await
3430 .unwrap();
3431
3432 project.update(cx, |project, cx| {
3433 project.lsp_store().update(cx, |lsp_store, cx| {
3434 lsp_store
3435 .update_diagnostic_entries(
3436 LanguageServerId(0),
3437 PathBuf::from(path!("/dir/a.rs")),
3438 None,
3439 None,
3440 vec![
3441 DiagnosticEntry {
3442 range: Unclipped(PointUtf16::new(0, 10))
3443 ..Unclipped(PointUtf16::new(0, 10)),
3444 diagnostic: Diagnostic {
3445 severity: DiagnosticSeverity::ERROR,
3446 message: "syntax error 1".to_string(),
3447 source_kind: DiagnosticSourceKind::Pushed,
3448 ..Diagnostic::default()
3449 },
3450 },
3451 DiagnosticEntry {
3452 range: Unclipped(PointUtf16::new(1, 10))
3453 ..Unclipped(PointUtf16::new(1, 10)),
3454 diagnostic: Diagnostic {
3455 severity: DiagnosticSeverity::ERROR,
3456 message: "syntax error 2".to_string(),
3457 source_kind: DiagnosticSourceKind::Pushed,
3458 ..Diagnostic::default()
3459 },
3460 },
3461 ],
3462 cx,
3463 )
3464 .unwrap();
3465 })
3466 });
3467
3468 // An empty range is extended forward to include the following character.
3469 // At the end of a line, an empty range is extended backward to include
3470 // the preceding character.
3471 buffer.update(cx, |buffer, _| {
3472 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
3473 assert_eq!(
3474 chunks
3475 .iter()
3476 .map(|(s, d)| (s.as_str(), *d))
3477 .collect::<Vec<_>>(),
3478 &[
3479 ("let one = ", None),
3480 (";", Some(DiagnosticSeverity::ERROR)),
3481 ("\nlet two =", None),
3482 (" ", Some(DiagnosticSeverity::ERROR)),
3483 ("\nlet three = 3;\n", None)
3484 ]
3485 );
3486 });
3487}
3488
3489#[gpui::test]
3490async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
3491 init_test(cx);
3492
3493 let fs = FakeFs::new(cx.executor());
3494 fs.insert_tree(path!("/dir"), json!({ "a.rs": "one two three" }))
3495 .await;
3496
3497 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3498 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3499
3500 lsp_store.update(cx, |lsp_store, cx| {
3501 lsp_store
3502 .update_diagnostic_entries(
3503 LanguageServerId(0),
3504 Path::new(path!("/dir/a.rs")).to_owned(),
3505 None,
3506 None,
3507 vec![DiagnosticEntry {
3508 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
3509 diagnostic: Diagnostic {
3510 severity: DiagnosticSeverity::ERROR,
3511 is_primary: true,
3512 message: "syntax error a1".to_string(),
3513 source_kind: DiagnosticSourceKind::Pushed,
3514 ..Diagnostic::default()
3515 },
3516 }],
3517 cx,
3518 )
3519 .unwrap();
3520 lsp_store
3521 .update_diagnostic_entries(
3522 LanguageServerId(1),
3523 Path::new(path!("/dir/a.rs")).to_owned(),
3524 None,
3525 None,
3526 vec![DiagnosticEntry {
3527 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
3528 diagnostic: Diagnostic {
3529 severity: DiagnosticSeverity::ERROR,
3530 is_primary: true,
3531 message: "syntax error b1".to_string(),
3532 source_kind: DiagnosticSourceKind::Pushed,
3533 ..Diagnostic::default()
3534 },
3535 }],
3536 cx,
3537 )
3538 .unwrap();
3539
3540 assert_eq!(
3541 lsp_store.diagnostic_summary(false, cx),
3542 DiagnosticSummary {
3543 error_count: 2,
3544 warning_count: 0,
3545 }
3546 );
3547 });
3548}
3549
3550#[gpui::test]
3551async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
3552 init_test(cx);
3553
3554 let text = "
3555 fn a() {
3556 f1();
3557 }
3558 fn b() {
3559 f2();
3560 }
3561 fn c() {
3562 f3();
3563 }
3564 "
3565 .unindent();
3566
3567 let fs = FakeFs::new(cx.executor());
3568 fs.insert_tree(
3569 path!("/dir"),
3570 json!({
3571 "a.rs": text.clone(),
3572 }),
3573 )
3574 .await;
3575
3576 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3577 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3578
3579 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3580 language_registry.add(rust_lang());
3581 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3582
3583 let (buffer, _handle) = project
3584 .update(cx, |project, cx| {
3585 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3586 })
3587 .await
3588 .unwrap();
3589
3590 let mut fake_server = fake_servers.next().await.unwrap();
3591 let lsp_document_version = fake_server
3592 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3593 .await
3594 .text_document
3595 .version;
3596
3597 // Simulate editing the buffer after the language server computes some edits.
3598 buffer.update(cx, |buffer, cx| {
3599 buffer.edit(
3600 [(
3601 Point::new(0, 0)..Point::new(0, 0),
3602 "// above first function\n",
3603 )],
3604 None,
3605 cx,
3606 );
3607 buffer.edit(
3608 [(
3609 Point::new(2, 0)..Point::new(2, 0),
3610 " // inside first function\n",
3611 )],
3612 None,
3613 cx,
3614 );
3615 buffer.edit(
3616 [(
3617 Point::new(6, 4)..Point::new(6, 4),
3618 "// inside second function ",
3619 )],
3620 None,
3621 cx,
3622 );
3623
3624 assert_eq!(
3625 buffer.text(),
3626 "
3627 // above first function
3628 fn a() {
3629 // inside first function
3630 f1();
3631 }
3632 fn b() {
3633 // inside second function f2();
3634 }
3635 fn c() {
3636 f3();
3637 }
3638 "
3639 .unindent()
3640 );
3641 });
3642
3643 let edits = lsp_store
3644 .update(cx, |lsp_store, cx| {
3645 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3646 &buffer,
3647 vec![
3648 // replace body of first function
3649 lsp::TextEdit {
3650 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
3651 new_text: "
3652 fn a() {
3653 f10();
3654 }
3655 "
3656 .unindent(),
3657 },
3658 // edit inside second function
3659 lsp::TextEdit {
3660 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
3661 new_text: "00".into(),
3662 },
3663 // edit inside third function via two distinct edits
3664 lsp::TextEdit {
3665 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
3666 new_text: "4000".into(),
3667 },
3668 lsp::TextEdit {
3669 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
3670 new_text: "".into(),
3671 },
3672 ],
3673 LanguageServerId(0),
3674 Some(lsp_document_version),
3675 cx,
3676 )
3677 })
3678 .await
3679 .unwrap();
3680
3681 buffer.update(cx, |buffer, cx| {
3682 for (range, new_text) in edits {
3683 buffer.edit([(range, new_text)], None, cx);
3684 }
3685 assert_eq!(
3686 buffer.text(),
3687 "
3688 // above first function
3689 fn a() {
3690 // inside first function
3691 f10();
3692 }
3693 fn b() {
3694 // inside second function f200();
3695 }
3696 fn c() {
3697 f4000();
3698 }
3699 "
3700 .unindent()
3701 );
3702 });
3703}
3704
3705#[gpui::test]
3706async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
3707 init_test(cx);
3708
3709 let text = "
3710 use a::b;
3711 use a::c;
3712
3713 fn f() {
3714 b();
3715 c();
3716 }
3717 "
3718 .unindent();
3719
3720 let fs = FakeFs::new(cx.executor());
3721 fs.insert_tree(
3722 path!("/dir"),
3723 json!({
3724 "a.rs": text.clone(),
3725 }),
3726 )
3727 .await;
3728
3729 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3730 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3731 let buffer = project
3732 .update(cx, |project, cx| {
3733 project.open_local_buffer(path!("/dir/a.rs"), cx)
3734 })
3735 .await
3736 .unwrap();
3737
3738 // Simulate the language server sending us a small edit in the form of a very large diff.
3739 // Rust-analyzer does this when performing a merge-imports code action.
3740 let edits = lsp_store
3741 .update(cx, |lsp_store, cx| {
3742 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3743 &buffer,
3744 [
3745 // Replace the first use statement without editing the semicolon.
3746 lsp::TextEdit {
3747 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
3748 new_text: "a::{b, c}".into(),
3749 },
3750 // Reinsert the remainder of the file between the semicolon and the final
3751 // newline of the file.
3752 lsp::TextEdit {
3753 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3754 new_text: "\n\n".into(),
3755 },
3756 lsp::TextEdit {
3757 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3758 new_text: "
3759 fn f() {
3760 b();
3761 c();
3762 }"
3763 .unindent(),
3764 },
3765 // Delete everything after the first newline of the file.
3766 lsp::TextEdit {
3767 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
3768 new_text: "".into(),
3769 },
3770 ],
3771 LanguageServerId(0),
3772 None,
3773 cx,
3774 )
3775 })
3776 .await
3777 .unwrap();
3778
3779 buffer.update(cx, |buffer, cx| {
3780 let edits = edits
3781 .into_iter()
3782 .map(|(range, text)| {
3783 (
3784 range.start.to_point(buffer)..range.end.to_point(buffer),
3785 text,
3786 )
3787 })
3788 .collect::<Vec<_>>();
3789
3790 assert_eq!(
3791 edits,
3792 [
3793 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
3794 (Point::new(1, 0)..Point::new(2, 0), "".into())
3795 ]
3796 );
3797
3798 for (range, new_text) in edits {
3799 buffer.edit([(range, new_text)], None, cx);
3800 }
3801 assert_eq!(
3802 buffer.text(),
3803 "
3804 use a::{b, c};
3805
3806 fn f() {
3807 b();
3808 c();
3809 }
3810 "
3811 .unindent()
3812 );
3813 });
3814}
3815
3816#[gpui::test]
3817async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
3818 cx: &mut gpui::TestAppContext,
3819) {
3820 init_test(cx);
3821
3822 let text = "Path()";
3823
3824 let fs = FakeFs::new(cx.executor());
3825 fs.insert_tree(
3826 path!("/dir"),
3827 json!({
3828 "a.rs": text
3829 }),
3830 )
3831 .await;
3832
3833 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3834 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3835 let buffer = project
3836 .update(cx, |project, cx| {
3837 project.open_local_buffer(path!("/dir/a.rs"), cx)
3838 })
3839 .await
3840 .unwrap();
3841
3842 // Simulate the language server sending us a pair of edits at the same location,
3843 // with an insertion following a replacement (which violates the LSP spec).
3844 let edits = lsp_store
3845 .update(cx, |lsp_store, cx| {
3846 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3847 &buffer,
3848 [
3849 lsp::TextEdit {
3850 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
3851 new_text: "Path".into(),
3852 },
3853 lsp::TextEdit {
3854 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
3855 new_text: "from path import Path\n\n\n".into(),
3856 },
3857 ],
3858 LanguageServerId(0),
3859 None,
3860 cx,
3861 )
3862 })
3863 .await
3864 .unwrap();
3865
3866 buffer.update(cx, |buffer, cx| {
3867 buffer.edit(edits, None, cx);
3868 assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
3869 });
3870}
3871
3872#[gpui::test]
3873async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
3874 init_test(cx);
3875
3876 let text = "
3877 use a::b;
3878 use a::c;
3879
3880 fn f() {
3881 b();
3882 c();
3883 }
3884 "
3885 .unindent();
3886
3887 let fs = FakeFs::new(cx.executor());
3888 fs.insert_tree(
3889 path!("/dir"),
3890 json!({
3891 "a.rs": text.clone(),
3892 }),
3893 )
3894 .await;
3895
3896 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3897 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3898 let buffer = project
3899 .update(cx, |project, cx| {
3900 project.open_local_buffer(path!("/dir/a.rs"), cx)
3901 })
3902 .await
3903 .unwrap();
3904
3905 // Simulate the language server sending us edits in a non-ordered fashion,
3906 // with ranges sometimes being inverted or pointing to invalid locations.
3907 let edits = lsp_store
3908 .update(cx, |lsp_store, cx| {
3909 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3910 &buffer,
3911 [
3912 lsp::TextEdit {
3913 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3914 new_text: "\n\n".into(),
3915 },
3916 lsp::TextEdit {
3917 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
3918 new_text: "a::{b, c}".into(),
3919 },
3920 lsp::TextEdit {
3921 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
3922 new_text: "".into(),
3923 },
3924 lsp::TextEdit {
3925 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3926 new_text: "
3927 fn f() {
3928 b();
3929 c();
3930 }"
3931 .unindent(),
3932 },
3933 ],
3934 LanguageServerId(0),
3935 None,
3936 cx,
3937 )
3938 })
3939 .await
3940 .unwrap();
3941
3942 buffer.update(cx, |buffer, cx| {
3943 let edits = edits
3944 .into_iter()
3945 .map(|(range, text)| {
3946 (
3947 range.start.to_point(buffer)..range.end.to_point(buffer),
3948 text,
3949 )
3950 })
3951 .collect::<Vec<_>>();
3952
3953 assert_eq!(
3954 edits,
3955 [
3956 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
3957 (Point::new(1, 0)..Point::new(2, 0), "".into())
3958 ]
3959 );
3960
3961 for (range, new_text) in edits {
3962 buffer.edit([(range, new_text)], None, cx);
3963 }
3964 assert_eq!(
3965 buffer.text(),
3966 "
3967 use a::{b, c};
3968
3969 fn f() {
3970 b();
3971 c();
3972 }
3973 "
3974 .unindent()
3975 );
3976 });
3977}
3978
3979fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
3980 buffer: &Buffer,
3981 range: Range<T>,
3982) -> Vec<(String, Option<DiagnosticSeverity>)> {
3983 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
3984 for chunk in buffer.snapshot().chunks(range, true) {
3985 if chunks
3986 .last()
3987 .is_some_and(|prev_chunk| prev_chunk.1 == chunk.diagnostic_severity)
3988 {
3989 chunks.last_mut().unwrap().0.push_str(chunk.text);
3990 } else {
3991 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
3992 }
3993 }
3994 chunks
3995}
3996
3997#[gpui::test(iterations = 10)]
3998async fn test_definition(cx: &mut gpui::TestAppContext) {
3999 init_test(cx);
4000
4001 let fs = FakeFs::new(cx.executor());
4002 fs.insert_tree(
4003 path!("/dir"),
4004 json!({
4005 "a.rs": "const fn a() { A }",
4006 "b.rs": "const y: i32 = crate::a()",
4007 }),
4008 )
4009 .await;
4010
4011 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
4012
4013 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4014 language_registry.add(rust_lang());
4015 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
4016
4017 let (buffer, _handle) = project
4018 .update(cx, |project, cx| {
4019 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
4020 })
4021 .await
4022 .unwrap();
4023
4024 let fake_server = fake_servers.next().await.unwrap();
4025 cx.executor().run_until_parked();
4026
4027 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
4028 let params = params.text_document_position_params;
4029 assert_eq!(
4030 params.text_document.uri.to_file_path().unwrap(),
4031 Path::new(path!("/dir/b.rs")),
4032 );
4033 assert_eq!(params.position, lsp::Position::new(0, 22));
4034
4035 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
4036 lsp::Location::new(
4037 lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
4038 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
4039 ),
4040 )))
4041 });
4042 let mut definitions = project
4043 .update(cx, |project, cx| project.definitions(&buffer, 22, cx))
4044 .await
4045 .unwrap()
4046 .unwrap();
4047
4048 // Assert no new language server started
4049 cx.executor().run_until_parked();
4050 assert!(fake_servers.try_next().is_err());
4051
4052 assert_eq!(definitions.len(), 1);
4053 let definition = definitions.pop().unwrap();
4054 cx.update(|cx| {
4055 let target_buffer = definition.target.buffer.read(cx);
4056 assert_eq!(
4057 target_buffer
4058 .file()
4059 .unwrap()
4060 .as_local()
4061 .unwrap()
4062 .abs_path(cx),
4063 Path::new(path!("/dir/a.rs")),
4064 );
4065 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
4066 assert_eq!(
4067 list_worktrees(&project, cx),
4068 [
4069 (path!("/dir/a.rs").as_ref(), false),
4070 (path!("/dir/b.rs").as_ref(), true)
4071 ],
4072 );
4073
4074 drop(definition);
4075 });
4076 cx.update(|cx| {
4077 assert_eq!(
4078 list_worktrees(&project, cx),
4079 [(path!("/dir/b.rs").as_ref(), true)]
4080 );
4081 });
4082
4083 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
4084 project
4085 .read(cx)
4086 .worktrees(cx)
4087 .map(|worktree| {
4088 let worktree = worktree.read(cx);
4089 (
4090 worktree.as_local().unwrap().abs_path().as_ref(),
4091 worktree.is_visible(),
4092 )
4093 })
4094 .collect::<Vec<_>>()
4095 }
4096}
4097
4098#[gpui::test]
4099async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
4100 init_test(cx);
4101
4102 let fs = FakeFs::new(cx.executor());
4103 fs.insert_tree(
4104 path!("/dir"),
4105 json!({
4106 "a.ts": "",
4107 }),
4108 )
4109 .await;
4110
4111 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4112
4113 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4114 language_registry.add(typescript_lang());
4115 let mut fake_language_servers = language_registry.register_fake_lsp(
4116 "TypeScript",
4117 FakeLspAdapter {
4118 capabilities: lsp::ServerCapabilities {
4119 completion_provider: Some(lsp::CompletionOptions {
4120 trigger_characters: Some(vec![".".to_string()]),
4121 ..Default::default()
4122 }),
4123 ..Default::default()
4124 },
4125 ..Default::default()
4126 },
4127 );
4128
4129 let (buffer, _handle) = project
4130 .update(cx, |p, cx| {
4131 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4132 })
4133 .await
4134 .unwrap();
4135
4136 let fake_server = fake_language_servers.next().await.unwrap();
4137 cx.executor().run_until_parked();
4138
4139 // When text_edit exists, it takes precedence over insert_text and label
4140 let text = "let a = obj.fqn";
4141 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4142 let completions = project.update(cx, |project, cx| {
4143 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4144 });
4145
4146 fake_server
4147 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4148 Ok(Some(lsp::CompletionResponse::Array(vec![
4149 lsp::CompletionItem {
4150 label: "labelText".into(),
4151 insert_text: Some("insertText".into()),
4152 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
4153 range: lsp::Range::new(
4154 lsp::Position::new(0, text.len() as u32 - 3),
4155 lsp::Position::new(0, text.len() as u32),
4156 ),
4157 new_text: "textEditText".into(),
4158 })),
4159 ..Default::default()
4160 },
4161 ])))
4162 })
4163 .next()
4164 .await;
4165
4166 let completions = completions
4167 .await
4168 .unwrap()
4169 .into_iter()
4170 .flat_map(|response| response.completions)
4171 .collect::<Vec<_>>();
4172 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4173
4174 assert_eq!(completions.len(), 1);
4175 assert_eq!(completions[0].new_text, "textEditText");
4176 assert_eq!(
4177 completions[0].replace_range.to_offset(&snapshot),
4178 text.len() - 3..text.len()
4179 );
4180}
4181
4182#[gpui::test]
4183async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
4184 init_test(cx);
4185
4186 let fs = FakeFs::new(cx.executor());
4187 fs.insert_tree(
4188 path!("/dir"),
4189 json!({
4190 "a.ts": "",
4191 }),
4192 )
4193 .await;
4194
4195 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4196
4197 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4198 language_registry.add(typescript_lang());
4199 let mut fake_language_servers = language_registry.register_fake_lsp(
4200 "TypeScript",
4201 FakeLspAdapter {
4202 capabilities: lsp::ServerCapabilities {
4203 completion_provider: Some(lsp::CompletionOptions {
4204 trigger_characters: Some(vec![".".to_string()]),
4205 ..Default::default()
4206 }),
4207 ..Default::default()
4208 },
4209 ..Default::default()
4210 },
4211 );
4212
4213 let (buffer, _handle) = project
4214 .update(cx, |p, cx| {
4215 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4216 })
4217 .await
4218 .unwrap();
4219
4220 let fake_server = fake_language_servers.next().await.unwrap();
4221 cx.executor().run_until_parked();
4222 let text = "let a = obj.fqn";
4223
4224 // Test 1: When text_edit is None but text_edit_text exists with default edit_range
4225 {
4226 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4227 let completions = project.update(cx, |project, cx| {
4228 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4229 });
4230
4231 fake_server
4232 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4233 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
4234 is_incomplete: false,
4235 item_defaults: Some(lsp::CompletionListItemDefaults {
4236 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
4237 lsp::Range::new(
4238 lsp::Position::new(0, text.len() as u32 - 3),
4239 lsp::Position::new(0, text.len() as u32),
4240 ),
4241 )),
4242 ..Default::default()
4243 }),
4244 items: vec![lsp::CompletionItem {
4245 label: "labelText".into(),
4246 text_edit_text: Some("textEditText".into()),
4247 text_edit: None,
4248 ..Default::default()
4249 }],
4250 })))
4251 })
4252 .next()
4253 .await;
4254
4255 let completions = completions
4256 .await
4257 .unwrap()
4258 .into_iter()
4259 .flat_map(|response| response.completions)
4260 .collect::<Vec<_>>();
4261 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4262
4263 assert_eq!(completions.len(), 1);
4264 assert_eq!(completions[0].new_text, "textEditText");
4265 assert_eq!(
4266 completions[0].replace_range.to_offset(&snapshot),
4267 text.len() - 3..text.len()
4268 );
4269 }
4270
4271 // Test 2: When both text_edit and text_edit_text are None with default edit_range
4272 {
4273 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4274 let completions = project.update(cx, |project, cx| {
4275 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4276 });
4277
4278 fake_server
4279 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4280 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
4281 is_incomplete: false,
4282 item_defaults: Some(lsp::CompletionListItemDefaults {
4283 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
4284 lsp::Range::new(
4285 lsp::Position::new(0, text.len() as u32 - 3),
4286 lsp::Position::new(0, text.len() as u32),
4287 ),
4288 )),
4289 ..Default::default()
4290 }),
4291 items: vec![lsp::CompletionItem {
4292 label: "labelText".into(),
4293 text_edit_text: None,
4294 insert_text: Some("irrelevant".into()),
4295 text_edit: None,
4296 ..Default::default()
4297 }],
4298 })))
4299 })
4300 .next()
4301 .await;
4302
4303 let completions = completions
4304 .await
4305 .unwrap()
4306 .into_iter()
4307 .flat_map(|response| response.completions)
4308 .collect::<Vec<_>>();
4309 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4310
4311 assert_eq!(completions.len(), 1);
4312 assert_eq!(completions[0].new_text, "labelText");
4313 assert_eq!(
4314 completions[0].replace_range.to_offset(&snapshot),
4315 text.len() - 3..text.len()
4316 );
4317 }
4318}
4319
4320#[gpui::test]
4321async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
4322 init_test(cx);
4323
4324 let fs = FakeFs::new(cx.executor());
4325 fs.insert_tree(
4326 path!("/dir"),
4327 json!({
4328 "a.ts": "",
4329 }),
4330 )
4331 .await;
4332
4333 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4334
4335 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4336 language_registry.add(typescript_lang());
4337 let mut fake_language_servers = language_registry.register_fake_lsp(
4338 "TypeScript",
4339 FakeLspAdapter {
4340 capabilities: lsp::ServerCapabilities {
4341 completion_provider: Some(lsp::CompletionOptions {
4342 trigger_characters: Some(vec![":".to_string()]),
4343 ..Default::default()
4344 }),
4345 ..Default::default()
4346 },
4347 ..Default::default()
4348 },
4349 );
4350
4351 let (buffer, _handle) = project
4352 .update(cx, |p, cx| {
4353 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4354 })
4355 .await
4356 .unwrap();
4357
4358 let fake_server = fake_language_servers.next().await.unwrap();
4359 cx.executor().run_until_parked();
4360
4361 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
4362 let text = "let a = b.fqn";
4363 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4364 let completions = project.update(cx, |project, cx| {
4365 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4366 });
4367
4368 fake_server
4369 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4370 Ok(Some(lsp::CompletionResponse::Array(vec![
4371 lsp::CompletionItem {
4372 label: "fullyQualifiedName?".into(),
4373 insert_text: Some("fullyQualifiedName".into()),
4374 ..Default::default()
4375 },
4376 ])))
4377 })
4378 .next()
4379 .await;
4380 let completions = completions
4381 .await
4382 .unwrap()
4383 .into_iter()
4384 .flat_map(|response| response.completions)
4385 .collect::<Vec<_>>();
4386 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4387 assert_eq!(completions.len(), 1);
4388 assert_eq!(completions[0].new_text, "fullyQualifiedName");
4389 assert_eq!(
4390 completions[0].replace_range.to_offset(&snapshot),
4391 text.len() - 3..text.len()
4392 );
4393
4394 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
4395 let text = "let a = \"atoms/cmp\"";
4396 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4397 let completions = project.update(cx, |project, cx| {
4398 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
4399 });
4400
4401 fake_server
4402 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4403 Ok(Some(lsp::CompletionResponse::Array(vec![
4404 lsp::CompletionItem {
4405 label: "component".into(),
4406 ..Default::default()
4407 },
4408 ])))
4409 })
4410 .next()
4411 .await;
4412 let completions = completions
4413 .await
4414 .unwrap()
4415 .into_iter()
4416 .flat_map(|response| response.completions)
4417 .collect::<Vec<_>>();
4418 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4419 assert_eq!(completions.len(), 1);
4420 assert_eq!(completions[0].new_text, "component");
4421 assert_eq!(
4422 completions[0].replace_range.to_offset(&snapshot),
4423 text.len() - 4..text.len() - 1
4424 );
4425}
4426
4427#[gpui::test]
4428async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
4429 init_test(cx);
4430
4431 let fs = FakeFs::new(cx.executor());
4432 fs.insert_tree(
4433 path!("/dir"),
4434 json!({
4435 "a.ts": "",
4436 }),
4437 )
4438 .await;
4439
4440 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4441
4442 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4443 language_registry.add(typescript_lang());
4444 let mut fake_language_servers = language_registry.register_fake_lsp(
4445 "TypeScript",
4446 FakeLspAdapter {
4447 capabilities: lsp::ServerCapabilities {
4448 completion_provider: Some(lsp::CompletionOptions {
4449 trigger_characters: Some(vec![":".to_string()]),
4450 ..Default::default()
4451 }),
4452 ..Default::default()
4453 },
4454 ..Default::default()
4455 },
4456 );
4457
4458 let (buffer, _handle) = project
4459 .update(cx, |p, cx| {
4460 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4461 })
4462 .await
4463 .unwrap();
4464
4465 let fake_server = fake_language_servers.next().await.unwrap();
4466 cx.executor().run_until_parked();
4467
4468 let text = "let a = b.fqn";
4469 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4470 let completions = project.update(cx, |project, cx| {
4471 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4472 });
4473
4474 fake_server
4475 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4476 Ok(Some(lsp::CompletionResponse::Array(vec![
4477 lsp::CompletionItem {
4478 label: "fullyQualifiedName?".into(),
4479 insert_text: Some("fully\rQualified\r\nName".into()),
4480 ..Default::default()
4481 },
4482 ])))
4483 })
4484 .next()
4485 .await;
4486 let completions = completions
4487 .await
4488 .unwrap()
4489 .into_iter()
4490 .flat_map(|response| response.completions)
4491 .collect::<Vec<_>>();
4492 assert_eq!(completions.len(), 1);
4493 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
4494}
4495
4496#[gpui::test(iterations = 10)]
4497async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
4498 init_test(cx);
4499
4500 let fs = FakeFs::new(cx.executor());
4501 fs.insert_tree(
4502 path!("/dir"),
4503 json!({
4504 "a.ts": "a",
4505 }),
4506 )
4507 .await;
4508
4509 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4510
4511 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4512 language_registry.add(typescript_lang());
4513 let mut fake_language_servers = language_registry.register_fake_lsp(
4514 "TypeScript",
4515 FakeLspAdapter {
4516 capabilities: lsp::ServerCapabilities {
4517 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
4518 lsp::CodeActionOptions {
4519 resolve_provider: Some(true),
4520 ..lsp::CodeActionOptions::default()
4521 },
4522 )),
4523 execute_command_provider: Some(lsp::ExecuteCommandOptions {
4524 commands: vec!["_the/command".to_string()],
4525 ..lsp::ExecuteCommandOptions::default()
4526 }),
4527 ..lsp::ServerCapabilities::default()
4528 },
4529 ..FakeLspAdapter::default()
4530 },
4531 );
4532
4533 let (buffer, _handle) = project
4534 .update(cx, |p, cx| {
4535 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4536 })
4537 .await
4538 .unwrap();
4539
4540 let fake_server = fake_language_servers.next().await.unwrap();
4541 cx.executor().run_until_parked();
4542
4543 // Language server returns code actions that contain commands, and not edits.
4544 let actions = project.update(cx, |project, cx| {
4545 project.code_actions(&buffer, 0..0, None, cx)
4546 });
4547 fake_server
4548 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
4549 Ok(Some(vec![
4550 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
4551 title: "The code action".into(),
4552 data: Some(serde_json::json!({
4553 "command": "_the/command",
4554 })),
4555 ..lsp::CodeAction::default()
4556 }),
4557 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
4558 title: "two".into(),
4559 ..lsp::CodeAction::default()
4560 }),
4561 ]))
4562 })
4563 .next()
4564 .await;
4565
4566 let action = actions.await.unwrap().unwrap()[0].clone();
4567 let apply = project.update(cx, |project, cx| {
4568 project.apply_code_action(buffer.clone(), action, true, cx)
4569 });
4570
4571 // Resolving the code action does not populate its edits. In absence of
4572 // edits, we must execute the given command.
4573 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
4574 |mut action, _| async move {
4575 if action.data.is_some() {
4576 action.command = Some(lsp::Command {
4577 title: "The command".into(),
4578 command: "_the/command".into(),
4579 arguments: Some(vec![json!("the-argument")]),
4580 });
4581 }
4582 Ok(action)
4583 },
4584 );
4585
4586 // While executing the command, the language server sends the editor
4587 // a `workspaceEdit` request.
4588 fake_server
4589 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
4590 let fake = fake_server.clone();
4591 move |params, _| {
4592 assert_eq!(params.command, "_the/command");
4593 let fake = fake.clone();
4594 async move {
4595 fake.server
4596 .request::<lsp::request::ApplyWorkspaceEdit>(
4597 lsp::ApplyWorkspaceEditParams {
4598 label: None,
4599 edit: lsp::WorkspaceEdit {
4600 changes: Some(
4601 [(
4602 lsp::Uri::from_file_path(path!("/dir/a.ts")).unwrap(),
4603 vec![lsp::TextEdit {
4604 range: lsp::Range::new(
4605 lsp::Position::new(0, 0),
4606 lsp::Position::new(0, 0),
4607 ),
4608 new_text: "X".into(),
4609 }],
4610 )]
4611 .into_iter()
4612 .collect(),
4613 ),
4614 ..Default::default()
4615 },
4616 },
4617 )
4618 .await
4619 .into_response()
4620 .unwrap();
4621 Ok(Some(json!(null)))
4622 }
4623 }
4624 })
4625 .next()
4626 .await;
4627
4628 // Applying the code action returns a project transaction containing the edits
4629 // sent by the language server in its `workspaceEdit` request.
4630 let transaction = apply.await.unwrap();
4631 assert!(transaction.0.contains_key(&buffer));
4632 buffer.update(cx, |buffer, cx| {
4633 assert_eq!(buffer.text(), "Xa");
4634 buffer.undo(cx);
4635 assert_eq!(buffer.text(), "a");
4636 });
4637}
4638
4639#[gpui::test]
4640async fn test_rename_file_to_new_directory(cx: &mut gpui::TestAppContext) {
4641 init_test(cx);
4642 let fs = FakeFs::new(cx.background_executor.clone());
4643 let expected_contents = "content";
4644 fs.as_fake()
4645 .insert_tree(
4646 "/root",
4647 json!({
4648 "test.txt": expected_contents
4649 }),
4650 )
4651 .await;
4652
4653 let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
4654
4655 let (worktree, entry_id) = project.read_with(cx, |project, cx| {
4656 let worktree = project.worktrees(cx).next().unwrap();
4657 let entry_id = worktree
4658 .read(cx)
4659 .entry_for_path(rel_path("test.txt"))
4660 .unwrap()
4661 .id;
4662 (worktree, entry_id)
4663 });
4664 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
4665 let _result = project
4666 .update(cx, |project, cx| {
4667 project.rename_entry(
4668 entry_id,
4669 (worktree_id, rel_path("dir1/dir2/dir3/test.txt")).into(),
4670 cx,
4671 )
4672 })
4673 .await
4674 .unwrap();
4675 worktree.read_with(cx, |worktree, _| {
4676 assert!(
4677 worktree.entry_for_path(rel_path("test.txt")).is_none(),
4678 "Old file should have been removed"
4679 );
4680 assert!(
4681 worktree
4682 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
4683 .is_some(),
4684 "Whole directory hierarchy and the new file should have been created"
4685 );
4686 });
4687 assert_eq!(
4688 worktree
4689 .update(cx, |worktree, cx| {
4690 worktree.load_file(rel_path("dir1/dir2/dir3/test.txt"), cx)
4691 })
4692 .await
4693 .unwrap()
4694 .text,
4695 expected_contents,
4696 "Moved file's contents should be preserved"
4697 );
4698
4699 let entry_id = worktree.read_with(cx, |worktree, _| {
4700 worktree
4701 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
4702 .unwrap()
4703 .id
4704 });
4705
4706 let _result = project
4707 .update(cx, |project, cx| {
4708 project.rename_entry(
4709 entry_id,
4710 (worktree_id, rel_path("dir1/dir2/test.txt")).into(),
4711 cx,
4712 )
4713 })
4714 .await
4715 .unwrap();
4716 worktree.read_with(cx, |worktree, _| {
4717 assert!(
4718 worktree.entry_for_path(rel_path("test.txt")).is_none(),
4719 "First file should not reappear"
4720 );
4721 assert!(
4722 worktree
4723 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
4724 .is_none(),
4725 "Old file should have been removed"
4726 );
4727 assert!(
4728 worktree
4729 .entry_for_path(rel_path("dir1/dir2/test.txt"))
4730 .is_some(),
4731 "No error should have occurred after moving into existing directory"
4732 );
4733 });
4734 assert_eq!(
4735 worktree
4736 .update(cx, |worktree, cx| {
4737 worktree.load_file(rel_path("dir1/dir2/test.txt"), cx)
4738 })
4739 .await
4740 .unwrap()
4741 .text,
4742 expected_contents,
4743 "Moved file's contents should be preserved"
4744 );
4745}
4746
4747#[gpui::test(iterations = 10)]
4748async fn test_save_file(cx: &mut gpui::TestAppContext) {
4749 init_test(cx);
4750
4751 let fs = FakeFs::new(cx.executor());
4752 fs.insert_tree(
4753 path!("/dir"),
4754 json!({
4755 "file1": "the old contents",
4756 }),
4757 )
4758 .await;
4759
4760 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4761 let buffer = project
4762 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4763 .await
4764 .unwrap();
4765 buffer.update(cx, |buffer, cx| {
4766 assert_eq!(buffer.text(), "the old contents");
4767 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
4768 });
4769
4770 project
4771 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
4772 .await
4773 .unwrap();
4774
4775 let new_text = fs
4776 .load(Path::new(path!("/dir/file1")))
4777 .await
4778 .unwrap()
4779 .replace("\r\n", "\n");
4780 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
4781}
4782
4783#[gpui::test(iterations = 10)]
4784async fn test_save_file_spawns_language_server(cx: &mut gpui::TestAppContext) {
4785 // Issue: #24349
4786 init_test(cx);
4787
4788 let fs = FakeFs::new(cx.executor());
4789 fs.insert_tree(path!("/dir"), json!({})).await;
4790
4791 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4792 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4793
4794 language_registry.add(rust_lang());
4795 let mut fake_rust_servers = language_registry.register_fake_lsp(
4796 "Rust",
4797 FakeLspAdapter {
4798 name: "the-rust-language-server",
4799 capabilities: lsp::ServerCapabilities {
4800 completion_provider: Some(lsp::CompletionOptions {
4801 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
4802 ..Default::default()
4803 }),
4804 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
4805 lsp::TextDocumentSyncOptions {
4806 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
4807 ..Default::default()
4808 },
4809 )),
4810 ..Default::default()
4811 },
4812 ..Default::default()
4813 },
4814 );
4815
4816 let buffer = project
4817 .update(cx, |this, cx| this.create_buffer(None, false, cx))
4818 .unwrap()
4819 .await;
4820 project.update(cx, |this, cx| {
4821 this.register_buffer_with_language_servers(&buffer, cx);
4822 buffer.update(cx, |buffer, cx| {
4823 assert!(!this.has_language_servers_for(buffer, cx));
4824 })
4825 });
4826
4827 project
4828 .update(cx, |this, cx| {
4829 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
4830 this.save_buffer_as(
4831 buffer.clone(),
4832 ProjectPath {
4833 worktree_id,
4834 path: rel_path("file.rs").into(),
4835 },
4836 cx,
4837 )
4838 })
4839 .await
4840 .unwrap();
4841 // A server is started up, and it is notified about Rust files.
4842 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
4843 assert_eq!(
4844 fake_rust_server
4845 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4846 .await
4847 .text_document,
4848 lsp::TextDocumentItem {
4849 uri: lsp::Uri::from_file_path(path!("/dir/file.rs")).unwrap(),
4850 version: 0,
4851 text: "".to_string(),
4852 language_id: "rust".to_string(),
4853 }
4854 );
4855
4856 project.update(cx, |this, cx| {
4857 buffer.update(cx, |buffer, cx| {
4858 assert!(this.has_language_servers_for(buffer, cx));
4859 })
4860 });
4861}
4862
4863#[gpui::test(iterations = 30)]
4864async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
4865 init_test(cx);
4866
4867 let fs = FakeFs::new(cx.executor());
4868 fs.insert_tree(
4869 path!("/dir"),
4870 json!({
4871 "file1": "the original contents",
4872 }),
4873 )
4874 .await;
4875
4876 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4877 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
4878 let buffer = project
4879 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4880 .await
4881 .unwrap();
4882
4883 // Change the buffer's file on disk, and then wait for the file change
4884 // to be detected by the worktree, so that the buffer starts reloading.
4885 fs.save(
4886 path!("/dir/file1").as_ref(),
4887 &"the first contents".into(),
4888 Default::default(),
4889 )
4890 .await
4891 .unwrap();
4892 worktree.next_event(cx).await;
4893
4894 // Change the buffer's file again. Depending on the random seed, the
4895 // previous file change may still be in progress.
4896 fs.save(
4897 path!("/dir/file1").as_ref(),
4898 &"the second contents".into(),
4899 Default::default(),
4900 )
4901 .await
4902 .unwrap();
4903 worktree.next_event(cx).await;
4904
4905 cx.executor().run_until_parked();
4906 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
4907 buffer.read_with(cx, |buffer, _| {
4908 assert_eq!(buffer.text(), on_disk_text);
4909 assert!(!buffer.is_dirty(), "buffer should not be dirty");
4910 assert!(!buffer.has_conflict(), "buffer should not be dirty");
4911 });
4912}
4913
4914#[gpui::test(iterations = 30)]
4915async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
4916 init_test(cx);
4917
4918 let fs = FakeFs::new(cx.executor());
4919 fs.insert_tree(
4920 path!("/dir"),
4921 json!({
4922 "file1": "the original contents",
4923 }),
4924 )
4925 .await;
4926
4927 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4928 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
4929 let buffer = project
4930 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4931 .await
4932 .unwrap();
4933
4934 // Change the buffer's file on disk, and then wait for the file change
4935 // to be detected by the worktree, so that the buffer starts reloading.
4936 fs.save(
4937 path!("/dir/file1").as_ref(),
4938 &"the first contents".into(),
4939 Default::default(),
4940 )
4941 .await
4942 .unwrap();
4943 worktree.next_event(cx).await;
4944
4945 cx.executor()
4946 .spawn(cx.executor().simulate_random_delay())
4947 .await;
4948
4949 // Perform a noop edit, causing the buffer's version to increase.
4950 buffer.update(cx, |buffer, cx| {
4951 buffer.edit([(0..0, " ")], None, cx);
4952 buffer.undo(cx);
4953 });
4954
4955 cx.executor().run_until_parked();
4956 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
4957 buffer.read_with(cx, |buffer, _| {
4958 let buffer_text = buffer.text();
4959 if buffer_text == on_disk_text {
4960 assert!(
4961 !buffer.is_dirty() && !buffer.has_conflict(),
4962 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
4963 );
4964 }
4965 // If the file change occurred while the buffer was processing the first
4966 // change, the buffer will be in a conflicting state.
4967 else {
4968 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
4969 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
4970 }
4971 });
4972}
4973
4974#[gpui::test]
4975async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
4976 init_test(cx);
4977
4978 let fs = FakeFs::new(cx.executor());
4979 fs.insert_tree(
4980 path!("/dir"),
4981 json!({
4982 "file1": "the old contents",
4983 }),
4984 )
4985 .await;
4986
4987 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
4988 let buffer = project
4989 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4990 .await
4991 .unwrap();
4992 buffer.update(cx, |buffer, cx| {
4993 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
4994 });
4995
4996 project
4997 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
4998 .await
4999 .unwrap();
5000
5001 let new_text = fs
5002 .load(Path::new(path!("/dir/file1")))
5003 .await
5004 .unwrap()
5005 .replace("\r\n", "\n");
5006 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
5007}
5008
5009#[gpui::test]
5010async fn test_save_as(cx: &mut gpui::TestAppContext) {
5011 init_test(cx);
5012
5013 let fs = FakeFs::new(cx.executor());
5014 fs.insert_tree("/dir", json!({})).await;
5015
5016 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5017
5018 let languages = project.update(cx, |project, _| project.languages().clone());
5019 languages.add(rust_lang());
5020
5021 let buffer = project.update(cx, |project, cx| {
5022 project.create_local_buffer("", None, false, cx)
5023 });
5024 buffer.update(cx, |buffer, cx| {
5025 buffer.edit([(0..0, "abc")], None, cx);
5026 assert!(buffer.is_dirty());
5027 assert!(!buffer.has_conflict());
5028 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
5029 });
5030 project
5031 .update(cx, |project, cx| {
5032 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
5033 let path = ProjectPath {
5034 worktree_id,
5035 path: rel_path("file1.rs").into(),
5036 };
5037 project.save_buffer_as(buffer.clone(), path, cx)
5038 })
5039 .await
5040 .unwrap();
5041 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
5042
5043 cx.executor().run_until_parked();
5044 buffer.update(cx, |buffer, cx| {
5045 assert_eq!(
5046 buffer.file().unwrap().full_path(cx),
5047 Path::new("dir/file1.rs")
5048 );
5049 assert!(!buffer.is_dirty());
5050 assert!(!buffer.has_conflict());
5051 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
5052 });
5053
5054 let opened_buffer = project
5055 .update(cx, |project, cx| {
5056 project.open_local_buffer("/dir/file1.rs", cx)
5057 })
5058 .await
5059 .unwrap();
5060 assert_eq!(opened_buffer, buffer);
5061}
5062
5063#[gpui::test]
5064async fn test_save_as_existing_file(cx: &mut gpui::TestAppContext) {
5065 init_test(cx);
5066
5067 let fs = FakeFs::new(cx.executor());
5068 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5069
5070 fs.insert_tree(
5071 path!("/dir"),
5072 json!({
5073 "data_a.txt": "data about a"
5074 }),
5075 )
5076 .await;
5077
5078 let buffer = project
5079 .update(cx, |project, cx| {
5080 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
5081 })
5082 .await
5083 .unwrap();
5084
5085 buffer.update(cx, |buffer, cx| {
5086 buffer.edit([(11..12, "b")], None, cx);
5087 });
5088
5089 // Save buffer's contents as a new file and confirm that the buffer's now
5090 // associated with `data_b.txt` instead of `data_a.txt`, confirming that the
5091 // file associated with the buffer has now been updated to `data_b.txt`
5092 project
5093 .update(cx, |project, cx| {
5094 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
5095 let new_path = ProjectPath {
5096 worktree_id,
5097 path: rel_path("data_b.txt").into(),
5098 };
5099
5100 project.save_buffer_as(buffer.clone(), new_path, cx)
5101 })
5102 .await
5103 .unwrap();
5104
5105 buffer.update(cx, |buffer, cx| {
5106 assert_eq!(
5107 buffer.file().unwrap().full_path(cx),
5108 Path::new("dir/data_b.txt")
5109 )
5110 });
5111
5112 // Open the original `data_a.txt` file, confirming that its contents are
5113 // unchanged and the resulting buffer's associated file is `data_a.txt`.
5114 let original_buffer = project
5115 .update(cx, |project, cx| {
5116 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
5117 })
5118 .await
5119 .unwrap();
5120
5121 original_buffer.update(cx, |buffer, cx| {
5122 assert_eq!(buffer.text(), "data about a");
5123 assert_eq!(
5124 buffer.file().unwrap().full_path(cx),
5125 Path::new("dir/data_a.txt")
5126 )
5127 });
5128}
5129
5130#[gpui::test(retries = 5)]
5131async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
5132 use worktree::WorktreeModelHandle as _;
5133
5134 init_test(cx);
5135 cx.executor().allow_parking();
5136
5137 let dir = TempTree::new(json!({
5138 "a": {
5139 "file1": "",
5140 "file2": "",
5141 "file3": "",
5142 },
5143 "b": {
5144 "c": {
5145 "file4": "",
5146 "file5": "",
5147 }
5148 }
5149 }));
5150
5151 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
5152
5153 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5154 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
5155 async move { buffer.await.unwrap() }
5156 };
5157 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5158 project.update(cx, |project, cx| {
5159 let tree = project.worktrees(cx).next().unwrap();
5160 tree.read(cx)
5161 .entry_for_path(rel_path(path))
5162 .unwrap_or_else(|| panic!("no entry for path {}", path))
5163 .id
5164 })
5165 };
5166
5167 let buffer2 = buffer_for_path("a/file2", cx).await;
5168 let buffer3 = buffer_for_path("a/file3", cx).await;
5169 let buffer4 = buffer_for_path("b/c/file4", cx).await;
5170 let buffer5 = buffer_for_path("b/c/file5", cx).await;
5171
5172 let file2_id = id_for_path("a/file2", cx);
5173 let file3_id = id_for_path("a/file3", cx);
5174 let file4_id = id_for_path("b/c/file4", cx);
5175
5176 // Create a remote copy of this worktree.
5177 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
5178 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
5179
5180 let updates = Arc::new(Mutex::new(Vec::new()));
5181 tree.update(cx, |tree, cx| {
5182 let updates = updates.clone();
5183 tree.observe_updates(0, cx, move |update| {
5184 updates.lock().push(update);
5185 async { true }
5186 });
5187 });
5188
5189 let remote = cx.update(|cx| {
5190 Worktree::remote(
5191 0,
5192 ReplicaId::REMOTE_SERVER,
5193 metadata,
5194 project.read(cx).client().into(),
5195 project.read(cx).path_style(cx),
5196 cx,
5197 )
5198 });
5199
5200 cx.executor().run_until_parked();
5201
5202 cx.update(|cx| {
5203 assert!(!buffer2.read(cx).is_dirty());
5204 assert!(!buffer3.read(cx).is_dirty());
5205 assert!(!buffer4.read(cx).is_dirty());
5206 assert!(!buffer5.read(cx).is_dirty());
5207 });
5208
5209 // Rename and delete files and directories.
5210 tree.flush_fs_events(cx).await;
5211 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
5212 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
5213 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
5214 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
5215 tree.flush_fs_events(cx).await;
5216
5217 cx.update(|app| {
5218 assert_eq!(
5219 tree.read(app).paths().collect::<Vec<_>>(),
5220 vec![
5221 rel_path("a"),
5222 rel_path("a/file1"),
5223 rel_path("a/file2.new"),
5224 rel_path("b"),
5225 rel_path("d"),
5226 rel_path("d/file3"),
5227 rel_path("d/file4"),
5228 ]
5229 );
5230 });
5231
5232 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
5233 assert_eq!(id_for_path("d/file3", cx), file3_id);
5234 assert_eq!(id_for_path("d/file4", cx), file4_id);
5235
5236 cx.update(|cx| {
5237 assert_eq!(
5238 buffer2.read(cx).file().unwrap().path().as_ref(),
5239 rel_path("a/file2.new")
5240 );
5241 assert_eq!(
5242 buffer3.read(cx).file().unwrap().path().as_ref(),
5243 rel_path("d/file3")
5244 );
5245 assert_eq!(
5246 buffer4.read(cx).file().unwrap().path().as_ref(),
5247 rel_path("d/file4")
5248 );
5249 assert_eq!(
5250 buffer5.read(cx).file().unwrap().path().as_ref(),
5251 rel_path("b/c/file5")
5252 );
5253
5254 assert_matches!(
5255 buffer2.read(cx).file().unwrap().disk_state(),
5256 DiskState::Present { .. }
5257 );
5258 assert_matches!(
5259 buffer3.read(cx).file().unwrap().disk_state(),
5260 DiskState::Present { .. }
5261 );
5262 assert_matches!(
5263 buffer4.read(cx).file().unwrap().disk_state(),
5264 DiskState::Present { .. }
5265 );
5266 assert_eq!(
5267 buffer5.read(cx).file().unwrap().disk_state(),
5268 DiskState::Deleted
5269 );
5270 });
5271
5272 // Update the remote worktree. Check that it becomes consistent with the
5273 // local worktree.
5274 cx.executor().run_until_parked();
5275
5276 remote.update(cx, |remote, _| {
5277 for update in updates.lock().drain(..) {
5278 remote.as_remote_mut().unwrap().update_from_remote(update);
5279 }
5280 });
5281 cx.executor().run_until_parked();
5282 remote.update(cx, |remote, _| {
5283 assert_eq!(
5284 remote.paths().collect::<Vec<_>>(),
5285 vec![
5286 rel_path("a"),
5287 rel_path("a/file1"),
5288 rel_path("a/file2.new"),
5289 rel_path("b"),
5290 rel_path("d"),
5291 rel_path("d/file3"),
5292 rel_path("d/file4"),
5293 ]
5294 );
5295 });
5296}
5297
5298#[gpui::test(iterations = 10)]
5299async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
5300 init_test(cx);
5301
5302 let fs = FakeFs::new(cx.executor());
5303 fs.insert_tree(
5304 path!("/dir"),
5305 json!({
5306 "a": {
5307 "file1": "",
5308 }
5309 }),
5310 )
5311 .await;
5312
5313 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
5314 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
5315 let tree_id = tree.update(cx, |tree, _| tree.id());
5316
5317 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5318 project.update(cx, |project, cx| {
5319 let tree = project.worktrees(cx).next().unwrap();
5320 tree.read(cx)
5321 .entry_for_path(rel_path(path))
5322 .unwrap_or_else(|| panic!("no entry for path {}", path))
5323 .id
5324 })
5325 };
5326
5327 let dir_id = id_for_path("a", cx);
5328 let file_id = id_for_path("a/file1", cx);
5329 let buffer = project
5330 .update(cx, |p, cx| {
5331 p.open_buffer((tree_id, rel_path("a/file1")), cx)
5332 })
5333 .await
5334 .unwrap();
5335 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5336
5337 project
5338 .update(cx, |project, cx| {
5339 project.rename_entry(dir_id, (tree_id, rel_path("b")).into(), cx)
5340 })
5341 .unwrap()
5342 .await
5343 .into_included()
5344 .unwrap();
5345 cx.executor().run_until_parked();
5346
5347 assert_eq!(id_for_path("b", cx), dir_id);
5348 assert_eq!(id_for_path("b/file1", cx), file_id);
5349 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5350}
5351
5352#[gpui::test]
5353async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
5354 init_test(cx);
5355
5356 let fs = FakeFs::new(cx.executor());
5357 fs.insert_tree(
5358 "/dir",
5359 json!({
5360 "a.txt": "a-contents",
5361 "b.txt": "b-contents",
5362 }),
5363 )
5364 .await;
5365
5366 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5367
5368 // Spawn multiple tasks to open paths, repeating some paths.
5369 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
5370 (
5371 p.open_local_buffer("/dir/a.txt", cx),
5372 p.open_local_buffer("/dir/b.txt", cx),
5373 p.open_local_buffer("/dir/a.txt", cx),
5374 )
5375 });
5376
5377 let buffer_a_1 = buffer_a_1.await.unwrap();
5378 let buffer_a_2 = buffer_a_2.await.unwrap();
5379 let buffer_b = buffer_b.await.unwrap();
5380 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
5381 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
5382
5383 // There is only one buffer per path.
5384 let buffer_a_id = buffer_a_1.entity_id();
5385 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
5386
5387 // Open the same path again while it is still open.
5388 drop(buffer_a_1);
5389 let buffer_a_3 = project
5390 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
5391 .await
5392 .unwrap();
5393
5394 // There's still only one buffer per path.
5395 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
5396}
5397
5398#[gpui::test]
5399async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
5400 init_test(cx);
5401
5402 let fs = FakeFs::new(cx.executor());
5403 fs.insert_tree(
5404 path!("/dir"),
5405 json!({
5406 "file1": "abc",
5407 "file2": "def",
5408 "file3": "ghi",
5409 }),
5410 )
5411 .await;
5412
5413 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5414
5415 let buffer1 = project
5416 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5417 .await
5418 .unwrap();
5419 let events = Arc::new(Mutex::new(Vec::new()));
5420
5421 // initially, the buffer isn't dirty.
5422 buffer1.update(cx, |buffer, cx| {
5423 cx.subscribe(&buffer1, {
5424 let events = events.clone();
5425 move |_, _, event, _| match event {
5426 BufferEvent::Operation { .. } => {}
5427 _ => events.lock().push(event.clone()),
5428 }
5429 })
5430 .detach();
5431
5432 assert!(!buffer.is_dirty());
5433 assert!(events.lock().is_empty());
5434
5435 buffer.edit([(1..2, "")], None, cx);
5436 });
5437
5438 // after the first edit, the buffer is dirty, and emits a dirtied event.
5439 buffer1.update(cx, |buffer, cx| {
5440 assert!(buffer.text() == "ac");
5441 assert!(buffer.is_dirty());
5442 assert_eq!(
5443 *events.lock(),
5444 &[
5445 language::BufferEvent::Edited,
5446 language::BufferEvent::DirtyChanged
5447 ]
5448 );
5449 events.lock().clear();
5450 buffer.did_save(
5451 buffer.version(),
5452 buffer.file().unwrap().disk_state().mtime(),
5453 cx,
5454 );
5455 });
5456
5457 // after saving, the buffer is not dirty, and emits a saved event.
5458 buffer1.update(cx, |buffer, cx| {
5459 assert!(!buffer.is_dirty());
5460 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
5461 events.lock().clear();
5462
5463 buffer.edit([(1..1, "B")], None, cx);
5464 buffer.edit([(2..2, "D")], None, cx);
5465 });
5466
5467 // after editing again, the buffer is dirty, and emits another dirty event.
5468 buffer1.update(cx, |buffer, cx| {
5469 assert!(buffer.text() == "aBDc");
5470 assert!(buffer.is_dirty());
5471 assert_eq!(
5472 *events.lock(),
5473 &[
5474 language::BufferEvent::Edited,
5475 language::BufferEvent::DirtyChanged,
5476 language::BufferEvent::Edited,
5477 ],
5478 );
5479 events.lock().clear();
5480
5481 // After restoring the buffer to its previously-saved state,
5482 // the buffer is not considered dirty anymore.
5483 buffer.edit([(1..3, "")], None, cx);
5484 assert!(buffer.text() == "ac");
5485 assert!(!buffer.is_dirty());
5486 });
5487
5488 assert_eq!(
5489 *events.lock(),
5490 &[
5491 language::BufferEvent::Edited,
5492 language::BufferEvent::DirtyChanged
5493 ]
5494 );
5495
5496 // When a file is deleted, it is not considered dirty.
5497 let events = Arc::new(Mutex::new(Vec::new()));
5498 let buffer2 = project
5499 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
5500 .await
5501 .unwrap();
5502 buffer2.update(cx, |_, cx| {
5503 cx.subscribe(&buffer2, {
5504 let events = events.clone();
5505 move |_, _, event, _| match event {
5506 BufferEvent::Operation { .. } => {}
5507 _ => events.lock().push(event.clone()),
5508 }
5509 })
5510 .detach();
5511 });
5512
5513 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
5514 .await
5515 .unwrap();
5516 cx.executor().run_until_parked();
5517 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5518 assert_eq!(
5519 mem::take(&mut *events.lock()),
5520 &[language::BufferEvent::FileHandleChanged]
5521 );
5522
5523 // Buffer becomes dirty when edited.
5524 buffer2.update(cx, |buffer, cx| {
5525 buffer.edit([(2..3, "")], None, cx);
5526 assert_eq!(buffer.is_dirty(), true);
5527 });
5528 assert_eq!(
5529 mem::take(&mut *events.lock()),
5530 &[
5531 language::BufferEvent::Edited,
5532 language::BufferEvent::DirtyChanged
5533 ]
5534 );
5535
5536 // Buffer becomes clean again when all of its content is removed, because
5537 // the file was deleted.
5538 buffer2.update(cx, |buffer, cx| {
5539 buffer.edit([(0..2, "")], None, cx);
5540 assert_eq!(buffer.is_empty(), true);
5541 assert_eq!(buffer.is_dirty(), false);
5542 });
5543 assert_eq!(
5544 *events.lock(),
5545 &[
5546 language::BufferEvent::Edited,
5547 language::BufferEvent::DirtyChanged
5548 ]
5549 );
5550
5551 // When a file is already dirty when deleted, we don't emit a Dirtied event.
5552 let events = Arc::new(Mutex::new(Vec::new()));
5553 let buffer3 = project
5554 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
5555 .await
5556 .unwrap();
5557 buffer3.update(cx, |_, cx| {
5558 cx.subscribe(&buffer3, {
5559 let events = events.clone();
5560 move |_, _, event, _| match event {
5561 BufferEvent::Operation { .. } => {}
5562 _ => events.lock().push(event.clone()),
5563 }
5564 })
5565 .detach();
5566 });
5567
5568 buffer3.update(cx, |buffer, cx| {
5569 buffer.edit([(0..0, "x")], None, cx);
5570 });
5571 events.lock().clear();
5572 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
5573 .await
5574 .unwrap();
5575 cx.executor().run_until_parked();
5576 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
5577 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
5578}
5579
5580#[gpui::test]
5581async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
5582 init_test(cx);
5583
5584 let (initial_contents, initial_offsets) =
5585 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
5586 let fs = FakeFs::new(cx.executor());
5587 fs.insert_tree(
5588 path!("/dir"),
5589 json!({
5590 "the-file": initial_contents,
5591 }),
5592 )
5593 .await;
5594 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5595 let buffer = project
5596 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
5597 .await
5598 .unwrap();
5599
5600 let anchors = initial_offsets
5601 .iter()
5602 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
5603 .collect::<Vec<_>>();
5604
5605 // Change the file on disk, adding two new lines of text, and removing
5606 // one line.
5607 buffer.update(cx, |buffer, _| {
5608 assert!(!buffer.is_dirty());
5609 assert!(!buffer.has_conflict());
5610 });
5611
5612 let (new_contents, new_offsets) =
5613 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
5614 fs.save(
5615 path!("/dir/the-file").as_ref(),
5616 &new_contents.as_str().into(),
5617 LineEnding::Unix,
5618 )
5619 .await
5620 .unwrap();
5621
5622 // Because the buffer was not modified, it is reloaded from disk. Its
5623 // contents are edited according to the diff between the old and new
5624 // file contents.
5625 cx.executor().run_until_parked();
5626 buffer.update(cx, |buffer, _| {
5627 assert_eq!(buffer.text(), new_contents);
5628 assert!(!buffer.is_dirty());
5629 assert!(!buffer.has_conflict());
5630
5631 let anchor_offsets = anchors
5632 .iter()
5633 .map(|anchor| anchor.to_offset(&*buffer))
5634 .collect::<Vec<_>>();
5635 assert_eq!(anchor_offsets, new_offsets);
5636 });
5637
5638 // Modify the buffer
5639 buffer.update(cx, |buffer, cx| {
5640 buffer.edit([(0..0, " ")], None, cx);
5641 assert!(buffer.is_dirty());
5642 assert!(!buffer.has_conflict());
5643 });
5644
5645 // Change the file on disk again, adding blank lines to the beginning.
5646 fs.save(
5647 path!("/dir/the-file").as_ref(),
5648 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
5649 LineEnding::Unix,
5650 )
5651 .await
5652 .unwrap();
5653
5654 // Because the buffer is modified, it doesn't reload from disk, but is
5655 // marked as having a conflict.
5656 cx.executor().run_until_parked();
5657 buffer.update(cx, |buffer, _| {
5658 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
5659 assert!(buffer.has_conflict());
5660 });
5661}
5662
5663#[gpui::test]
5664async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
5665 init_test(cx);
5666
5667 let fs = FakeFs::new(cx.executor());
5668 fs.insert_tree(
5669 path!("/dir"),
5670 json!({
5671 "file1": "a\nb\nc\n",
5672 "file2": "one\r\ntwo\r\nthree\r\n",
5673 }),
5674 )
5675 .await;
5676
5677 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5678 let buffer1 = project
5679 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5680 .await
5681 .unwrap();
5682 let buffer2 = project
5683 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
5684 .await
5685 .unwrap();
5686
5687 buffer1.update(cx, |buffer, _| {
5688 assert_eq!(buffer.text(), "a\nb\nc\n");
5689 assert_eq!(buffer.line_ending(), LineEnding::Unix);
5690 });
5691 buffer2.update(cx, |buffer, _| {
5692 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
5693 assert_eq!(buffer.line_ending(), LineEnding::Windows);
5694 });
5695
5696 // Change a file's line endings on disk from unix to windows. The buffer's
5697 // state updates correctly.
5698 fs.save(
5699 path!("/dir/file1").as_ref(),
5700 &"aaa\nb\nc\n".into(),
5701 LineEnding::Windows,
5702 )
5703 .await
5704 .unwrap();
5705 cx.executor().run_until_parked();
5706 buffer1.update(cx, |buffer, _| {
5707 assert_eq!(buffer.text(), "aaa\nb\nc\n");
5708 assert_eq!(buffer.line_ending(), LineEnding::Windows);
5709 });
5710
5711 // Save a file with windows line endings. The file is written correctly.
5712 buffer2.update(cx, |buffer, cx| {
5713 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
5714 });
5715 project
5716 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
5717 .await
5718 .unwrap();
5719 assert_eq!(
5720 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
5721 "one\r\ntwo\r\nthree\r\nfour\r\n",
5722 );
5723}
5724
5725#[gpui::test]
5726async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
5727 init_test(cx);
5728
5729 let fs = FakeFs::new(cx.executor());
5730 fs.insert_tree(
5731 path!("/dir"),
5732 json!({
5733 "a.rs": "
5734 fn foo(mut v: Vec<usize>) {
5735 for x in &v {
5736 v.push(1);
5737 }
5738 }
5739 "
5740 .unindent(),
5741 }),
5742 )
5743 .await;
5744
5745 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5746 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
5747 let buffer = project
5748 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
5749 .await
5750 .unwrap();
5751
5752 let buffer_uri = Uri::from_file_path(path!("/dir/a.rs")).unwrap();
5753 let message = lsp::PublishDiagnosticsParams {
5754 uri: buffer_uri.clone(),
5755 diagnostics: vec![
5756 lsp::Diagnostic {
5757 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5758 severity: Some(DiagnosticSeverity::WARNING),
5759 message: "error 1".to_string(),
5760 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5761 location: lsp::Location {
5762 uri: buffer_uri.clone(),
5763 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5764 },
5765 message: "error 1 hint 1".to_string(),
5766 }]),
5767 ..Default::default()
5768 },
5769 lsp::Diagnostic {
5770 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5771 severity: Some(DiagnosticSeverity::HINT),
5772 message: "error 1 hint 1".to_string(),
5773 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5774 location: lsp::Location {
5775 uri: buffer_uri.clone(),
5776 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5777 },
5778 message: "original diagnostic".to_string(),
5779 }]),
5780 ..Default::default()
5781 },
5782 lsp::Diagnostic {
5783 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5784 severity: Some(DiagnosticSeverity::ERROR),
5785 message: "error 2".to_string(),
5786 related_information: Some(vec![
5787 lsp::DiagnosticRelatedInformation {
5788 location: lsp::Location {
5789 uri: buffer_uri.clone(),
5790 range: lsp::Range::new(
5791 lsp::Position::new(1, 13),
5792 lsp::Position::new(1, 15),
5793 ),
5794 },
5795 message: "error 2 hint 1".to_string(),
5796 },
5797 lsp::DiagnosticRelatedInformation {
5798 location: lsp::Location {
5799 uri: buffer_uri.clone(),
5800 range: lsp::Range::new(
5801 lsp::Position::new(1, 13),
5802 lsp::Position::new(1, 15),
5803 ),
5804 },
5805 message: "error 2 hint 2".to_string(),
5806 },
5807 ]),
5808 ..Default::default()
5809 },
5810 lsp::Diagnostic {
5811 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
5812 severity: Some(DiagnosticSeverity::HINT),
5813 message: "error 2 hint 1".to_string(),
5814 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5815 location: lsp::Location {
5816 uri: buffer_uri.clone(),
5817 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5818 },
5819 message: "original diagnostic".to_string(),
5820 }]),
5821 ..Default::default()
5822 },
5823 lsp::Diagnostic {
5824 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
5825 severity: Some(DiagnosticSeverity::HINT),
5826 message: "error 2 hint 2".to_string(),
5827 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5828 location: lsp::Location {
5829 uri: buffer_uri,
5830 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5831 },
5832 message: "original diagnostic".to_string(),
5833 }]),
5834 ..Default::default()
5835 },
5836 ],
5837 version: None,
5838 };
5839
5840 lsp_store
5841 .update(cx, |lsp_store, cx| {
5842 lsp_store.update_diagnostics(
5843 LanguageServerId(0),
5844 message,
5845 None,
5846 DiagnosticSourceKind::Pushed,
5847 &[],
5848 cx,
5849 )
5850 })
5851 .unwrap();
5852 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
5853
5854 assert_eq!(
5855 buffer
5856 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5857 .collect::<Vec<_>>(),
5858 &[
5859 DiagnosticEntry {
5860 range: Point::new(1, 8)..Point::new(1, 9),
5861 diagnostic: Diagnostic {
5862 severity: DiagnosticSeverity::WARNING,
5863 message: "error 1".to_string(),
5864 group_id: 1,
5865 is_primary: true,
5866 source_kind: DiagnosticSourceKind::Pushed,
5867 ..Diagnostic::default()
5868 }
5869 },
5870 DiagnosticEntry {
5871 range: Point::new(1, 8)..Point::new(1, 9),
5872 diagnostic: Diagnostic {
5873 severity: DiagnosticSeverity::HINT,
5874 message: "error 1 hint 1".to_string(),
5875 group_id: 1,
5876 is_primary: false,
5877 source_kind: DiagnosticSourceKind::Pushed,
5878 ..Diagnostic::default()
5879 }
5880 },
5881 DiagnosticEntry {
5882 range: Point::new(1, 13)..Point::new(1, 15),
5883 diagnostic: Diagnostic {
5884 severity: DiagnosticSeverity::HINT,
5885 message: "error 2 hint 1".to_string(),
5886 group_id: 0,
5887 is_primary: false,
5888 source_kind: DiagnosticSourceKind::Pushed,
5889 ..Diagnostic::default()
5890 }
5891 },
5892 DiagnosticEntry {
5893 range: Point::new(1, 13)..Point::new(1, 15),
5894 diagnostic: Diagnostic {
5895 severity: DiagnosticSeverity::HINT,
5896 message: "error 2 hint 2".to_string(),
5897 group_id: 0,
5898 is_primary: false,
5899 source_kind: DiagnosticSourceKind::Pushed,
5900 ..Diagnostic::default()
5901 }
5902 },
5903 DiagnosticEntry {
5904 range: Point::new(2, 8)..Point::new(2, 17),
5905 diagnostic: Diagnostic {
5906 severity: DiagnosticSeverity::ERROR,
5907 message: "error 2".to_string(),
5908 group_id: 0,
5909 is_primary: true,
5910 source_kind: DiagnosticSourceKind::Pushed,
5911 ..Diagnostic::default()
5912 }
5913 }
5914 ]
5915 );
5916
5917 assert_eq!(
5918 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
5919 &[
5920 DiagnosticEntry {
5921 range: Point::new(1, 13)..Point::new(1, 15),
5922 diagnostic: Diagnostic {
5923 severity: DiagnosticSeverity::HINT,
5924 message: "error 2 hint 1".to_string(),
5925 group_id: 0,
5926 is_primary: false,
5927 source_kind: DiagnosticSourceKind::Pushed,
5928 ..Diagnostic::default()
5929 }
5930 },
5931 DiagnosticEntry {
5932 range: Point::new(1, 13)..Point::new(1, 15),
5933 diagnostic: Diagnostic {
5934 severity: DiagnosticSeverity::HINT,
5935 message: "error 2 hint 2".to_string(),
5936 group_id: 0,
5937 is_primary: false,
5938 source_kind: DiagnosticSourceKind::Pushed,
5939 ..Diagnostic::default()
5940 }
5941 },
5942 DiagnosticEntry {
5943 range: Point::new(2, 8)..Point::new(2, 17),
5944 diagnostic: Diagnostic {
5945 severity: DiagnosticSeverity::ERROR,
5946 message: "error 2".to_string(),
5947 group_id: 0,
5948 is_primary: true,
5949 source_kind: DiagnosticSourceKind::Pushed,
5950 ..Diagnostic::default()
5951 }
5952 }
5953 ]
5954 );
5955
5956 assert_eq!(
5957 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
5958 &[
5959 DiagnosticEntry {
5960 range: Point::new(1, 8)..Point::new(1, 9),
5961 diagnostic: Diagnostic {
5962 severity: DiagnosticSeverity::WARNING,
5963 message: "error 1".to_string(),
5964 group_id: 1,
5965 is_primary: true,
5966 source_kind: DiagnosticSourceKind::Pushed,
5967 ..Diagnostic::default()
5968 }
5969 },
5970 DiagnosticEntry {
5971 range: Point::new(1, 8)..Point::new(1, 9),
5972 diagnostic: Diagnostic {
5973 severity: DiagnosticSeverity::HINT,
5974 message: "error 1 hint 1".to_string(),
5975 group_id: 1,
5976 is_primary: false,
5977 source_kind: DiagnosticSourceKind::Pushed,
5978 ..Diagnostic::default()
5979 }
5980 },
5981 ]
5982 );
5983}
5984
5985#[gpui::test]
5986async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
5987 init_test(cx);
5988
5989 let fs = FakeFs::new(cx.executor());
5990 fs.insert_tree(
5991 path!("/dir"),
5992 json!({
5993 "one.rs": "const ONE: usize = 1;",
5994 "two": {
5995 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
5996 }
5997
5998 }),
5999 )
6000 .await;
6001 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6002
6003 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6004 language_registry.add(rust_lang());
6005 let watched_paths = lsp::FileOperationRegistrationOptions {
6006 filters: vec![
6007 FileOperationFilter {
6008 scheme: Some("file".to_owned()),
6009 pattern: lsp::FileOperationPattern {
6010 glob: "**/*.rs".to_owned(),
6011 matches: Some(lsp::FileOperationPatternKind::File),
6012 options: None,
6013 },
6014 },
6015 FileOperationFilter {
6016 scheme: Some("file".to_owned()),
6017 pattern: lsp::FileOperationPattern {
6018 glob: "**/**".to_owned(),
6019 matches: Some(lsp::FileOperationPatternKind::Folder),
6020 options: None,
6021 },
6022 },
6023 ],
6024 };
6025 let mut fake_servers = language_registry.register_fake_lsp(
6026 "Rust",
6027 FakeLspAdapter {
6028 capabilities: lsp::ServerCapabilities {
6029 workspace: Some(lsp::WorkspaceServerCapabilities {
6030 workspace_folders: None,
6031 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
6032 did_rename: Some(watched_paths.clone()),
6033 will_rename: Some(watched_paths),
6034 ..Default::default()
6035 }),
6036 }),
6037 ..Default::default()
6038 },
6039 ..Default::default()
6040 },
6041 );
6042
6043 let _ = project
6044 .update(cx, |project, cx| {
6045 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
6046 })
6047 .await
6048 .unwrap();
6049
6050 let fake_server = fake_servers.next().await.unwrap();
6051 cx.executor().run_until_parked();
6052 let response = project.update(cx, |project, cx| {
6053 let worktree = project.worktrees(cx).next().unwrap();
6054 let entry = worktree
6055 .read(cx)
6056 .entry_for_path(rel_path("one.rs"))
6057 .unwrap();
6058 project.rename_entry(
6059 entry.id,
6060 (worktree.read(cx).id(), rel_path("three.rs")).into(),
6061 cx,
6062 )
6063 });
6064 let expected_edit = lsp::WorkspaceEdit {
6065 changes: None,
6066 document_changes: Some(DocumentChanges::Edits({
6067 vec![TextDocumentEdit {
6068 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
6069 range: lsp::Range {
6070 start: lsp::Position {
6071 line: 0,
6072 character: 1,
6073 },
6074 end: lsp::Position {
6075 line: 0,
6076 character: 3,
6077 },
6078 },
6079 new_text: "This is not a drill".to_owned(),
6080 })],
6081 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
6082 uri: Uri::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
6083 version: Some(1337),
6084 },
6085 }]
6086 })),
6087 change_annotations: None,
6088 };
6089 let resolved_workspace_edit = Arc::new(OnceLock::new());
6090 fake_server
6091 .set_request_handler::<WillRenameFiles, _, _>({
6092 let resolved_workspace_edit = resolved_workspace_edit.clone();
6093 let expected_edit = expected_edit.clone();
6094 move |params, _| {
6095 let resolved_workspace_edit = resolved_workspace_edit.clone();
6096 let expected_edit = expected_edit.clone();
6097 async move {
6098 assert_eq!(params.files.len(), 1);
6099 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
6100 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
6101 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
6102 Ok(Some(expected_edit))
6103 }
6104 }
6105 })
6106 .next()
6107 .await
6108 .unwrap();
6109 let _ = response.await.unwrap();
6110 fake_server
6111 .handle_notification::<DidRenameFiles, _>(|params, _| {
6112 assert_eq!(params.files.len(), 1);
6113 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
6114 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
6115 })
6116 .next()
6117 .await
6118 .unwrap();
6119 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
6120}
6121
6122#[gpui::test]
6123async fn test_rename(cx: &mut gpui::TestAppContext) {
6124 // hi
6125 init_test(cx);
6126
6127 let fs = FakeFs::new(cx.executor());
6128 fs.insert_tree(
6129 path!("/dir"),
6130 json!({
6131 "one.rs": "const ONE: usize = 1;",
6132 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
6133 }),
6134 )
6135 .await;
6136
6137 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6138
6139 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6140 language_registry.add(rust_lang());
6141 let mut fake_servers = language_registry.register_fake_lsp(
6142 "Rust",
6143 FakeLspAdapter {
6144 capabilities: lsp::ServerCapabilities {
6145 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
6146 prepare_provider: Some(true),
6147 work_done_progress_options: Default::default(),
6148 })),
6149 ..Default::default()
6150 },
6151 ..Default::default()
6152 },
6153 );
6154
6155 let (buffer, _handle) = project
6156 .update(cx, |project, cx| {
6157 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
6158 })
6159 .await
6160 .unwrap();
6161
6162 let fake_server = fake_servers.next().await.unwrap();
6163 cx.executor().run_until_parked();
6164
6165 let response = project.update(cx, |project, cx| {
6166 project.prepare_rename(buffer.clone(), 7, cx)
6167 });
6168 fake_server
6169 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
6170 assert_eq!(
6171 params.text_document.uri.as_str(),
6172 uri!("file:///dir/one.rs")
6173 );
6174 assert_eq!(params.position, lsp::Position::new(0, 7));
6175 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
6176 lsp::Position::new(0, 6),
6177 lsp::Position::new(0, 9),
6178 ))))
6179 })
6180 .next()
6181 .await
6182 .unwrap();
6183 let response = response.await.unwrap();
6184 let PrepareRenameResponse::Success(range) = response else {
6185 panic!("{:?}", response);
6186 };
6187 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
6188 assert_eq!(range, 6..9);
6189
6190 let response = project.update(cx, |project, cx| {
6191 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
6192 });
6193 fake_server
6194 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
6195 assert_eq!(
6196 params.text_document_position.text_document.uri.as_str(),
6197 uri!("file:///dir/one.rs")
6198 );
6199 assert_eq!(
6200 params.text_document_position.position,
6201 lsp::Position::new(0, 7)
6202 );
6203 assert_eq!(params.new_name, "THREE");
6204 Ok(Some(lsp::WorkspaceEdit {
6205 changes: Some(
6206 [
6207 (
6208 lsp::Uri::from_file_path(path!("/dir/one.rs")).unwrap(),
6209 vec![lsp::TextEdit::new(
6210 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
6211 "THREE".to_string(),
6212 )],
6213 ),
6214 (
6215 lsp::Uri::from_file_path(path!("/dir/two.rs")).unwrap(),
6216 vec![
6217 lsp::TextEdit::new(
6218 lsp::Range::new(
6219 lsp::Position::new(0, 24),
6220 lsp::Position::new(0, 27),
6221 ),
6222 "THREE".to_string(),
6223 ),
6224 lsp::TextEdit::new(
6225 lsp::Range::new(
6226 lsp::Position::new(0, 35),
6227 lsp::Position::new(0, 38),
6228 ),
6229 "THREE".to_string(),
6230 ),
6231 ],
6232 ),
6233 ]
6234 .into_iter()
6235 .collect(),
6236 ),
6237 ..Default::default()
6238 }))
6239 })
6240 .next()
6241 .await
6242 .unwrap();
6243 let mut transaction = response.await.unwrap().0;
6244 assert_eq!(transaction.len(), 2);
6245 assert_eq!(
6246 transaction
6247 .remove_entry(&buffer)
6248 .unwrap()
6249 .0
6250 .update(cx, |buffer, _| buffer.text()),
6251 "const THREE: usize = 1;"
6252 );
6253 assert_eq!(
6254 transaction
6255 .into_keys()
6256 .next()
6257 .unwrap()
6258 .update(cx, |buffer, _| buffer.text()),
6259 "const TWO: usize = one::THREE + one::THREE;"
6260 );
6261}
6262
6263#[gpui::test]
6264async fn test_search(cx: &mut gpui::TestAppContext) {
6265 init_test(cx);
6266
6267 let fs = FakeFs::new(cx.executor());
6268 fs.insert_tree(
6269 path!("/dir"),
6270 json!({
6271 "one.rs": "const ONE: usize = 1;",
6272 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
6273 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
6274 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
6275 }),
6276 )
6277 .await;
6278 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6279 assert_eq!(
6280 search(
6281 &project,
6282 SearchQuery::text(
6283 "TWO",
6284 false,
6285 true,
6286 false,
6287 Default::default(),
6288 Default::default(),
6289 false,
6290 None
6291 )
6292 .unwrap(),
6293 cx
6294 )
6295 .await
6296 .unwrap(),
6297 HashMap::from_iter([
6298 (path!("dir/two.rs").to_string(), vec![6..9]),
6299 (path!("dir/three.rs").to_string(), vec![37..40])
6300 ])
6301 );
6302
6303 let buffer_4 = project
6304 .update(cx, |project, cx| {
6305 project.open_local_buffer(path!("/dir/four.rs"), cx)
6306 })
6307 .await
6308 .unwrap();
6309 buffer_4.update(cx, |buffer, cx| {
6310 let text = "two::TWO";
6311 buffer.edit([(20..28, text), (31..43, text)], None, cx);
6312 });
6313
6314 assert_eq!(
6315 search(
6316 &project,
6317 SearchQuery::text(
6318 "TWO",
6319 false,
6320 true,
6321 false,
6322 Default::default(),
6323 Default::default(),
6324 false,
6325 None,
6326 )
6327 .unwrap(),
6328 cx
6329 )
6330 .await
6331 .unwrap(),
6332 HashMap::from_iter([
6333 (path!("dir/two.rs").to_string(), vec![6..9]),
6334 (path!("dir/three.rs").to_string(), vec![37..40]),
6335 (path!("dir/four.rs").to_string(), vec![25..28, 36..39])
6336 ])
6337 );
6338}
6339
6340#[gpui::test]
6341async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
6342 init_test(cx);
6343
6344 let search_query = "file";
6345
6346 let fs = FakeFs::new(cx.executor());
6347 fs.insert_tree(
6348 path!("/dir"),
6349 json!({
6350 "one.rs": r#"// Rust file one"#,
6351 "one.ts": r#"// TypeScript file one"#,
6352 "two.rs": r#"// Rust file two"#,
6353 "two.ts": r#"// TypeScript file two"#,
6354 }),
6355 )
6356 .await;
6357 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6358
6359 assert!(
6360 search(
6361 &project,
6362 SearchQuery::text(
6363 search_query,
6364 false,
6365 true,
6366 false,
6367 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6368 Default::default(),
6369 false,
6370 None
6371 )
6372 .unwrap(),
6373 cx
6374 )
6375 .await
6376 .unwrap()
6377 .is_empty(),
6378 "If no inclusions match, no files should be returned"
6379 );
6380
6381 assert_eq!(
6382 search(
6383 &project,
6384 SearchQuery::text(
6385 search_query,
6386 false,
6387 true,
6388 false,
6389 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
6390 Default::default(),
6391 false,
6392 None
6393 )
6394 .unwrap(),
6395 cx
6396 )
6397 .await
6398 .unwrap(),
6399 HashMap::from_iter([
6400 (path!("dir/one.rs").to_string(), vec![8..12]),
6401 (path!("dir/two.rs").to_string(), vec![8..12]),
6402 ]),
6403 "Rust only search should give only Rust files"
6404 );
6405
6406 assert_eq!(
6407 search(
6408 &project,
6409 SearchQuery::text(
6410 search_query,
6411 false,
6412 true,
6413 false,
6414 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6415 .unwrap(),
6416 Default::default(),
6417 false,
6418 None,
6419 )
6420 .unwrap(),
6421 cx
6422 )
6423 .await
6424 .unwrap(),
6425 HashMap::from_iter([
6426 (path!("dir/one.ts").to_string(), vec![14..18]),
6427 (path!("dir/two.ts").to_string(), vec![14..18]),
6428 ]),
6429 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
6430 );
6431
6432 assert_eq!(
6433 search(
6434 &project,
6435 SearchQuery::text(
6436 search_query,
6437 false,
6438 true,
6439 false,
6440 PathMatcher::new(
6441 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
6442 PathStyle::local()
6443 )
6444 .unwrap(),
6445 Default::default(),
6446 false,
6447 None,
6448 )
6449 .unwrap(),
6450 cx
6451 )
6452 .await
6453 .unwrap(),
6454 HashMap::from_iter([
6455 (path!("dir/two.ts").to_string(), vec![14..18]),
6456 (path!("dir/one.rs").to_string(), vec![8..12]),
6457 (path!("dir/one.ts").to_string(), vec![14..18]),
6458 (path!("dir/two.rs").to_string(), vec![8..12]),
6459 ]),
6460 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
6461 );
6462}
6463
6464#[gpui::test]
6465async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
6466 init_test(cx);
6467
6468 let search_query = "file";
6469
6470 let fs = FakeFs::new(cx.executor());
6471 fs.insert_tree(
6472 path!("/dir"),
6473 json!({
6474 "one.rs": r#"// Rust file one"#,
6475 "one.ts": r#"// TypeScript file one"#,
6476 "two.rs": r#"// Rust file two"#,
6477 "two.ts": r#"// TypeScript file two"#,
6478 }),
6479 )
6480 .await;
6481 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6482
6483 assert_eq!(
6484 search(
6485 &project,
6486 SearchQuery::text(
6487 search_query,
6488 false,
6489 true,
6490 false,
6491 Default::default(),
6492 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6493 false,
6494 None,
6495 )
6496 .unwrap(),
6497 cx
6498 )
6499 .await
6500 .unwrap(),
6501 HashMap::from_iter([
6502 (path!("dir/one.rs").to_string(), vec![8..12]),
6503 (path!("dir/one.ts").to_string(), vec![14..18]),
6504 (path!("dir/two.rs").to_string(), vec![8..12]),
6505 (path!("dir/two.ts").to_string(), vec![14..18]),
6506 ]),
6507 "If no exclusions match, all files should be returned"
6508 );
6509
6510 assert_eq!(
6511 search(
6512 &project,
6513 SearchQuery::text(
6514 search_query,
6515 false,
6516 true,
6517 false,
6518 Default::default(),
6519 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
6520 false,
6521 None,
6522 )
6523 .unwrap(),
6524 cx
6525 )
6526 .await
6527 .unwrap(),
6528 HashMap::from_iter([
6529 (path!("dir/one.ts").to_string(), vec![14..18]),
6530 (path!("dir/two.ts").to_string(), vec![14..18]),
6531 ]),
6532 "Rust exclusion search should give only TypeScript files"
6533 );
6534
6535 assert_eq!(
6536 search(
6537 &project,
6538 SearchQuery::text(
6539 search_query,
6540 false,
6541 true,
6542 false,
6543 Default::default(),
6544 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6545 .unwrap(),
6546 false,
6547 None,
6548 )
6549 .unwrap(),
6550 cx
6551 )
6552 .await
6553 .unwrap(),
6554 HashMap::from_iter([
6555 (path!("dir/one.rs").to_string(), vec![8..12]),
6556 (path!("dir/two.rs").to_string(), vec![8..12]),
6557 ]),
6558 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
6559 );
6560
6561 assert!(
6562 search(
6563 &project,
6564 SearchQuery::text(
6565 search_query,
6566 false,
6567 true,
6568 false,
6569 Default::default(),
6570 PathMatcher::new(
6571 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
6572 PathStyle::local(),
6573 )
6574 .unwrap(),
6575 false,
6576 None,
6577 )
6578 .unwrap(),
6579 cx
6580 )
6581 .await
6582 .unwrap()
6583 .is_empty(),
6584 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
6585 );
6586}
6587
6588#[gpui::test]
6589async fn test_search_with_buffer_exclusions(cx: &mut gpui::TestAppContext) {
6590 init_test(cx);
6591
6592 let search_query = "file";
6593
6594 let fs = FakeFs::new(cx.executor());
6595 fs.insert_tree(
6596 path!("/dir"),
6597 json!({
6598 "one.rs": r#"// Rust file one"#,
6599 "one.ts": r#"// TypeScript file one"#,
6600 "two.rs": r#"// Rust file two"#,
6601 "two.ts": r#"// TypeScript file two"#,
6602 }),
6603 )
6604 .await;
6605
6606 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6607 let path_style = PathStyle::local();
6608 let _buffer = project.update(cx, |project, cx| {
6609 project.create_local_buffer("file", None, false, cx)
6610 });
6611
6612 assert_eq!(
6613 search(
6614 &project,
6615 SearchQuery::text(
6616 search_query,
6617 false,
6618 true,
6619 false,
6620 Default::default(),
6621 PathMatcher::new(&["*.odd".to_owned()], path_style).unwrap(),
6622 false,
6623 None,
6624 )
6625 .unwrap(),
6626 cx
6627 )
6628 .await
6629 .unwrap(),
6630 HashMap::from_iter([
6631 (path!("dir/one.rs").to_string(), vec![8..12]),
6632 (path!("dir/one.ts").to_string(), vec![14..18]),
6633 (path!("dir/two.rs").to_string(), vec![8..12]),
6634 (path!("dir/two.ts").to_string(), vec![14..18]),
6635 ]),
6636 "If no exclusions match, all files should be returned"
6637 );
6638
6639 assert_eq!(
6640 search(
6641 &project,
6642 SearchQuery::text(
6643 search_query,
6644 false,
6645 true,
6646 false,
6647 Default::default(),
6648 PathMatcher::new(&["*.rs".to_owned()], path_style).unwrap(),
6649 false,
6650 None,
6651 )
6652 .unwrap(),
6653 cx
6654 )
6655 .await
6656 .unwrap(),
6657 HashMap::from_iter([
6658 (path!("dir/one.ts").to_string(), vec![14..18]),
6659 (path!("dir/two.ts").to_string(), vec![14..18]),
6660 ]),
6661 "Rust exclusion search should give only TypeScript files"
6662 );
6663
6664 assert_eq!(
6665 search(
6666 &project,
6667 SearchQuery::text(
6668 search_query,
6669 false,
6670 true,
6671 false,
6672 Default::default(),
6673 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], path_style).unwrap(),
6674 false,
6675 None,
6676 )
6677 .unwrap(),
6678 cx
6679 )
6680 .await
6681 .unwrap(),
6682 HashMap::from_iter([
6683 (path!("dir/one.rs").to_string(), vec![8..12]),
6684 (path!("dir/two.rs").to_string(), vec![8..12]),
6685 ]),
6686 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
6687 );
6688
6689 assert!(
6690 search(
6691 &project,
6692 SearchQuery::text(
6693 search_query,
6694 false,
6695 true,
6696 false,
6697 Default::default(),
6698 PathMatcher::new(
6699 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
6700 PathStyle::local(),
6701 )
6702 .unwrap(),
6703 false,
6704 None,
6705 )
6706 .unwrap(),
6707 cx
6708 )
6709 .await
6710 .unwrap()
6711 .is_empty(),
6712 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
6713 );
6714}
6715
6716#[gpui::test]
6717async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
6718 init_test(cx);
6719
6720 let search_query = "file";
6721
6722 let fs = FakeFs::new(cx.executor());
6723 fs.insert_tree(
6724 path!("/dir"),
6725 json!({
6726 "one.rs": r#"// Rust file one"#,
6727 "one.ts": r#"// TypeScript file one"#,
6728 "two.rs": r#"// Rust file two"#,
6729 "two.ts": r#"// TypeScript file two"#,
6730 }),
6731 )
6732 .await;
6733 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6734 assert!(
6735 search(
6736 &project,
6737 SearchQuery::text(
6738 search_query,
6739 false,
6740 true,
6741 false,
6742 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6743 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6744 false,
6745 None,
6746 )
6747 .unwrap(),
6748 cx
6749 )
6750 .await
6751 .unwrap()
6752 .is_empty(),
6753 "If both no exclusions and inclusions match, exclusions should win and return nothing"
6754 );
6755
6756 assert!(
6757 search(
6758 &project,
6759 SearchQuery::text(
6760 search_query,
6761 false,
6762 true,
6763 false,
6764 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
6765 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
6766 false,
6767 None,
6768 )
6769 .unwrap(),
6770 cx
6771 )
6772 .await
6773 .unwrap()
6774 .is_empty(),
6775 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
6776 );
6777
6778 assert!(
6779 search(
6780 &project,
6781 SearchQuery::text(
6782 search_query,
6783 false,
6784 true,
6785 false,
6786 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6787 .unwrap(),
6788 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6789 .unwrap(),
6790 false,
6791 None,
6792 )
6793 .unwrap(),
6794 cx
6795 )
6796 .await
6797 .unwrap()
6798 .is_empty(),
6799 "Non-matching inclusions and exclusions should not change that."
6800 );
6801
6802 assert_eq!(
6803 search(
6804 &project,
6805 SearchQuery::text(
6806 search_query,
6807 false,
6808 true,
6809 false,
6810 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6811 .unwrap(),
6812 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()], PathStyle::local())
6813 .unwrap(),
6814 false,
6815 None,
6816 )
6817 .unwrap(),
6818 cx
6819 )
6820 .await
6821 .unwrap(),
6822 HashMap::from_iter([
6823 (path!("dir/one.ts").to_string(), vec![14..18]),
6824 (path!("dir/two.ts").to_string(), vec![14..18]),
6825 ]),
6826 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
6827 );
6828}
6829
6830#[gpui::test]
6831async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
6832 init_test(cx);
6833
6834 let fs = FakeFs::new(cx.executor());
6835 fs.insert_tree(
6836 path!("/worktree-a"),
6837 json!({
6838 "haystack.rs": r#"// NEEDLE"#,
6839 "haystack.ts": r#"// NEEDLE"#,
6840 }),
6841 )
6842 .await;
6843 fs.insert_tree(
6844 path!("/worktree-b"),
6845 json!({
6846 "haystack.rs": r#"// NEEDLE"#,
6847 "haystack.ts": r#"// NEEDLE"#,
6848 }),
6849 )
6850 .await;
6851
6852 let path_style = PathStyle::local();
6853 let project = Project::test(
6854 fs.clone(),
6855 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
6856 cx,
6857 )
6858 .await;
6859
6860 assert_eq!(
6861 search(
6862 &project,
6863 SearchQuery::text(
6864 "NEEDLE",
6865 false,
6866 true,
6867 false,
6868 PathMatcher::new(&["worktree-a/*.rs".to_owned()], path_style).unwrap(),
6869 Default::default(),
6870 true,
6871 None,
6872 )
6873 .unwrap(),
6874 cx
6875 )
6876 .await
6877 .unwrap(),
6878 HashMap::from_iter([(path!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
6879 "should only return results from included worktree"
6880 );
6881 assert_eq!(
6882 search(
6883 &project,
6884 SearchQuery::text(
6885 "NEEDLE",
6886 false,
6887 true,
6888 false,
6889 PathMatcher::new(&["worktree-b/*.rs".to_owned()], path_style).unwrap(),
6890 Default::default(),
6891 true,
6892 None,
6893 )
6894 .unwrap(),
6895 cx
6896 )
6897 .await
6898 .unwrap(),
6899 HashMap::from_iter([(path!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
6900 "should only return results from included worktree"
6901 );
6902
6903 assert_eq!(
6904 search(
6905 &project,
6906 SearchQuery::text(
6907 "NEEDLE",
6908 false,
6909 true,
6910 false,
6911 PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap(),
6912 Default::default(),
6913 false,
6914 None,
6915 )
6916 .unwrap(),
6917 cx
6918 )
6919 .await
6920 .unwrap(),
6921 HashMap::from_iter([
6922 (path!("worktree-a/haystack.ts").to_string(), vec![3..9]),
6923 (path!("worktree-b/haystack.ts").to_string(), vec![3..9])
6924 ]),
6925 "should return results from both worktrees"
6926 );
6927}
6928
6929#[gpui::test]
6930async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
6931 init_test(cx);
6932
6933 let fs = FakeFs::new(cx.background_executor.clone());
6934 fs.insert_tree(
6935 path!("/dir"),
6936 json!({
6937 ".git": {},
6938 ".gitignore": "**/target\n/node_modules\n",
6939 "target": {
6940 "index.txt": "index_key:index_value"
6941 },
6942 "node_modules": {
6943 "eslint": {
6944 "index.ts": "const eslint_key = 'eslint value'",
6945 "package.json": r#"{ "some_key": "some value" }"#,
6946 },
6947 "prettier": {
6948 "index.ts": "const prettier_key = 'prettier value'",
6949 "package.json": r#"{ "other_key": "other value" }"#,
6950 },
6951 },
6952 "package.json": r#"{ "main_key": "main value" }"#,
6953 }),
6954 )
6955 .await;
6956 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6957
6958 let query = "key";
6959 assert_eq!(
6960 search(
6961 &project,
6962 SearchQuery::text(
6963 query,
6964 false,
6965 false,
6966 false,
6967 Default::default(),
6968 Default::default(),
6969 false,
6970 None,
6971 )
6972 .unwrap(),
6973 cx
6974 )
6975 .await
6976 .unwrap(),
6977 HashMap::from_iter([(path!("dir/package.json").to_string(), vec![8..11])]),
6978 "Only one non-ignored file should have the query"
6979 );
6980
6981 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6982 let path_style = PathStyle::local();
6983 assert_eq!(
6984 search(
6985 &project,
6986 SearchQuery::text(
6987 query,
6988 false,
6989 false,
6990 true,
6991 Default::default(),
6992 Default::default(),
6993 false,
6994 None,
6995 )
6996 .unwrap(),
6997 cx
6998 )
6999 .await
7000 .unwrap(),
7001 HashMap::from_iter([
7002 (path!("dir/package.json").to_string(), vec![8..11]),
7003 (path!("dir/target/index.txt").to_string(), vec![6..9]),
7004 (
7005 path!("dir/node_modules/prettier/package.json").to_string(),
7006 vec![9..12]
7007 ),
7008 (
7009 path!("dir/node_modules/prettier/index.ts").to_string(),
7010 vec![15..18]
7011 ),
7012 (
7013 path!("dir/node_modules/eslint/index.ts").to_string(),
7014 vec![13..16]
7015 ),
7016 (
7017 path!("dir/node_modules/eslint/package.json").to_string(),
7018 vec![8..11]
7019 ),
7020 ]),
7021 "Unrestricted search with ignored directories should find every file with the query"
7022 );
7023
7024 let files_to_include =
7025 PathMatcher::new(&["node_modules/prettier/**".to_owned()], path_style).unwrap();
7026 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap();
7027 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7028 assert_eq!(
7029 search(
7030 &project,
7031 SearchQuery::text(
7032 query,
7033 false,
7034 false,
7035 true,
7036 files_to_include,
7037 files_to_exclude,
7038 false,
7039 None,
7040 )
7041 .unwrap(),
7042 cx
7043 )
7044 .await
7045 .unwrap(),
7046 HashMap::from_iter([(
7047 path!("dir/node_modules/prettier/package.json").to_string(),
7048 vec![9..12]
7049 )]),
7050 "With search including ignored prettier directory and excluding TS files, only one file should be found"
7051 );
7052}
7053
7054#[gpui::test]
7055async fn test_search_with_unicode(cx: &mut gpui::TestAppContext) {
7056 init_test(cx);
7057
7058 let fs = FakeFs::new(cx.executor());
7059 fs.insert_tree(
7060 path!("/dir"),
7061 json!({
7062 "one.rs": "// ПРИВЕТ? привет!",
7063 "two.rs": "// ПРИВЕТ.",
7064 "three.rs": "// привет",
7065 }),
7066 )
7067 .await;
7068 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7069 let unicode_case_sensitive_query = SearchQuery::text(
7070 "привет",
7071 false,
7072 true,
7073 false,
7074 Default::default(),
7075 Default::default(),
7076 false,
7077 None,
7078 );
7079 assert_matches!(unicode_case_sensitive_query, Ok(SearchQuery::Text { .. }));
7080 assert_eq!(
7081 search(&project, unicode_case_sensitive_query.unwrap(), cx)
7082 .await
7083 .unwrap(),
7084 HashMap::from_iter([
7085 (path!("dir/one.rs").to_string(), vec![17..29]),
7086 (path!("dir/three.rs").to_string(), vec![3..15]),
7087 ])
7088 );
7089
7090 let unicode_case_insensitive_query = SearchQuery::text(
7091 "привет",
7092 false,
7093 false,
7094 false,
7095 Default::default(),
7096 Default::default(),
7097 false,
7098 None,
7099 );
7100 assert_matches!(
7101 unicode_case_insensitive_query,
7102 Ok(SearchQuery::Regex { .. })
7103 );
7104 assert_eq!(
7105 search(&project, unicode_case_insensitive_query.unwrap(), cx)
7106 .await
7107 .unwrap(),
7108 HashMap::from_iter([
7109 (path!("dir/one.rs").to_string(), vec![3..15, 17..29]),
7110 (path!("dir/two.rs").to_string(), vec![3..15]),
7111 (path!("dir/three.rs").to_string(), vec![3..15]),
7112 ])
7113 );
7114
7115 assert_eq!(
7116 search(
7117 &project,
7118 SearchQuery::text(
7119 "привет.",
7120 false,
7121 false,
7122 false,
7123 Default::default(),
7124 Default::default(),
7125 false,
7126 None,
7127 )
7128 .unwrap(),
7129 cx
7130 )
7131 .await
7132 .unwrap(),
7133 HashMap::from_iter([(path!("dir/two.rs").to_string(), vec![3..16]),])
7134 );
7135}
7136
7137#[gpui::test]
7138async fn test_create_entry(cx: &mut gpui::TestAppContext) {
7139 init_test(cx);
7140
7141 let fs = FakeFs::new(cx.executor());
7142 fs.insert_tree(
7143 "/one/two",
7144 json!({
7145 "three": {
7146 "a.txt": "",
7147 "four": {}
7148 },
7149 "c.rs": ""
7150 }),
7151 )
7152 .await;
7153
7154 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
7155 project
7156 .update(cx, |project, cx| {
7157 let id = project.worktrees(cx).next().unwrap().read(cx).id();
7158 project.create_entry((id, rel_path("b..")), true, cx)
7159 })
7160 .await
7161 .unwrap()
7162 .into_included()
7163 .unwrap();
7164
7165 assert_eq!(
7166 fs.paths(true),
7167 vec![
7168 PathBuf::from(path!("/")),
7169 PathBuf::from(path!("/one")),
7170 PathBuf::from(path!("/one/two")),
7171 PathBuf::from(path!("/one/two/c.rs")),
7172 PathBuf::from(path!("/one/two/three")),
7173 PathBuf::from(path!("/one/two/three/a.txt")),
7174 PathBuf::from(path!("/one/two/three/b..")),
7175 PathBuf::from(path!("/one/two/three/four")),
7176 ]
7177 );
7178}
7179
7180#[gpui::test]
7181async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
7182 init_test(cx);
7183
7184 let fs = FakeFs::new(cx.executor());
7185 fs.insert_tree(
7186 path!("/dir"),
7187 json!({
7188 "a.tsx": "a",
7189 }),
7190 )
7191 .await;
7192
7193 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7194
7195 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7196 language_registry.add(tsx_lang());
7197 let language_server_names = [
7198 "TypeScriptServer",
7199 "TailwindServer",
7200 "ESLintServer",
7201 "NoHoverCapabilitiesServer",
7202 ];
7203 let mut language_servers = [
7204 language_registry.register_fake_lsp(
7205 "tsx",
7206 FakeLspAdapter {
7207 name: language_server_names[0],
7208 capabilities: lsp::ServerCapabilities {
7209 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7210 ..lsp::ServerCapabilities::default()
7211 },
7212 ..FakeLspAdapter::default()
7213 },
7214 ),
7215 language_registry.register_fake_lsp(
7216 "tsx",
7217 FakeLspAdapter {
7218 name: language_server_names[1],
7219 capabilities: lsp::ServerCapabilities {
7220 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7221 ..lsp::ServerCapabilities::default()
7222 },
7223 ..FakeLspAdapter::default()
7224 },
7225 ),
7226 language_registry.register_fake_lsp(
7227 "tsx",
7228 FakeLspAdapter {
7229 name: language_server_names[2],
7230 capabilities: lsp::ServerCapabilities {
7231 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7232 ..lsp::ServerCapabilities::default()
7233 },
7234 ..FakeLspAdapter::default()
7235 },
7236 ),
7237 language_registry.register_fake_lsp(
7238 "tsx",
7239 FakeLspAdapter {
7240 name: language_server_names[3],
7241 capabilities: lsp::ServerCapabilities {
7242 hover_provider: None,
7243 ..lsp::ServerCapabilities::default()
7244 },
7245 ..FakeLspAdapter::default()
7246 },
7247 ),
7248 ];
7249
7250 let (buffer, _handle) = project
7251 .update(cx, |p, cx| {
7252 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
7253 })
7254 .await
7255 .unwrap();
7256 cx.executor().run_until_parked();
7257
7258 let mut servers_with_hover_requests = HashMap::default();
7259 for i in 0..language_server_names.len() {
7260 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
7261 panic!(
7262 "Failed to get language server #{i} with name {}",
7263 &language_server_names[i]
7264 )
7265 });
7266 let new_server_name = new_server.server.name();
7267 assert!(
7268 !servers_with_hover_requests.contains_key(&new_server_name),
7269 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
7270 );
7271 match new_server_name.as_ref() {
7272 "TailwindServer" | "TypeScriptServer" => {
7273 servers_with_hover_requests.insert(
7274 new_server_name.clone(),
7275 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7276 move |_, _| {
7277 let name = new_server_name.clone();
7278 async move {
7279 Ok(Some(lsp::Hover {
7280 contents: lsp::HoverContents::Scalar(
7281 lsp::MarkedString::String(format!("{name} hover")),
7282 ),
7283 range: None,
7284 }))
7285 }
7286 },
7287 ),
7288 );
7289 }
7290 "ESLintServer" => {
7291 servers_with_hover_requests.insert(
7292 new_server_name,
7293 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7294 |_, _| async move { Ok(None) },
7295 ),
7296 );
7297 }
7298 "NoHoverCapabilitiesServer" => {
7299 let _never_handled = new_server
7300 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
7301 panic!(
7302 "Should not call for hovers server with no corresponding capabilities"
7303 )
7304 });
7305 }
7306 unexpected => panic!("Unexpected server name: {unexpected}"),
7307 }
7308 }
7309
7310 let hover_task = project.update(cx, |project, cx| {
7311 project.hover(&buffer, Point::new(0, 0), cx)
7312 });
7313 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
7314 |mut hover_request| async move {
7315 hover_request
7316 .next()
7317 .await
7318 .expect("All hover requests should have been triggered")
7319 },
7320 ))
7321 .await;
7322 assert_eq!(
7323 vec!["TailwindServer hover", "TypeScriptServer hover"],
7324 hover_task
7325 .await
7326 .into_iter()
7327 .flatten()
7328 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
7329 .sorted()
7330 .collect::<Vec<_>>(),
7331 "Should receive hover responses from all related servers with hover capabilities"
7332 );
7333}
7334
7335#[gpui::test]
7336async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
7337 init_test(cx);
7338
7339 let fs = FakeFs::new(cx.executor());
7340 fs.insert_tree(
7341 path!("/dir"),
7342 json!({
7343 "a.ts": "a",
7344 }),
7345 )
7346 .await;
7347
7348 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7349
7350 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7351 language_registry.add(typescript_lang());
7352 let mut fake_language_servers = language_registry.register_fake_lsp(
7353 "TypeScript",
7354 FakeLspAdapter {
7355 capabilities: lsp::ServerCapabilities {
7356 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7357 ..lsp::ServerCapabilities::default()
7358 },
7359 ..FakeLspAdapter::default()
7360 },
7361 );
7362
7363 let (buffer, _handle) = project
7364 .update(cx, |p, cx| {
7365 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
7366 })
7367 .await
7368 .unwrap();
7369 cx.executor().run_until_parked();
7370
7371 let fake_server = fake_language_servers
7372 .next()
7373 .await
7374 .expect("failed to get the language server");
7375
7376 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7377 move |_, _| async move {
7378 Ok(Some(lsp::Hover {
7379 contents: lsp::HoverContents::Array(vec![
7380 lsp::MarkedString::String("".to_string()),
7381 lsp::MarkedString::String(" ".to_string()),
7382 lsp::MarkedString::String("\n\n\n".to_string()),
7383 ]),
7384 range: None,
7385 }))
7386 },
7387 );
7388
7389 let hover_task = project.update(cx, |project, cx| {
7390 project.hover(&buffer, Point::new(0, 0), cx)
7391 });
7392 let () = request_handled
7393 .next()
7394 .await
7395 .expect("All hover requests should have been triggered");
7396 assert_eq!(
7397 Vec::<String>::new(),
7398 hover_task
7399 .await
7400 .into_iter()
7401 .flatten()
7402 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
7403 .sorted()
7404 .collect::<Vec<_>>(),
7405 "Empty hover parts should be ignored"
7406 );
7407}
7408
7409#[gpui::test]
7410async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
7411 init_test(cx);
7412
7413 let fs = FakeFs::new(cx.executor());
7414 fs.insert_tree(
7415 path!("/dir"),
7416 json!({
7417 "a.ts": "a",
7418 }),
7419 )
7420 .await;
7421
7422 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7423
7424 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7425 language_registry.add(typescript_lang());
7426 let mut fake_language_servers = language_registry.register_fake_lsp(
7427 "TypeScript",
7428 FakeLspAdapter {
7429 capabilities: lsp::ServerCapabilities {
7430 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
7431 ..lsp::ServerCapabilities::default()
7432 },
7433 ..FakeLspAdapter::default()
7434 },
7435 );
7436
7437 let (buffer, _handle) = project
7438 .update(cx, |p, cx| {
7439 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
7440 })
7441 .await
7442 .unwrap();
7443 cx.executor().run_until_parked();
7444
7445 let fake_server = fake_language_servers
7446 .next()
7447 .await
7448 .expect("failed to get the language server");
7449
7450 let mut request_handled = fake_server
7451 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
7452 Ok(Some(vec![
7453 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7454 title: "organize imports".to_string(),
7455 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
7456 ..lsp::CodeAction::default()
7457 }),
7458 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7459 title: "fix code".to_string(),
7460 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
7461 ..lsp::CodeAction::default()
7462 }),
7463 ]))
7464 });
7465
7466 let code_actions_task = project.update(cx, |project, cx| {
7467 project.code_actions(
7468 &buffer,
7469 0..buffer.read(cx).len(),
7470 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
7471 cx,
7472 )
7473 });
7474
7475 let () = request_handled
7476 .next()
7477 .await
7478 .expect("The code action request should have been triggered");
7479
7480 let code_actions = code_actions_task.await.unwrap().unwrap();
7481 assert_eq!(code_actions.len(), 1);
7482 assert_eq!(
7483 code_actions[0].lsp_action.action_kind(),
7484 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
7485 );
7486}
7487
7488#[gpui::test]
7489async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
7490 init_test(cx);
7491
7492 let fs = FakeFs::new(cx.executor());
7493 fs.insert_tree(
7494 path!("/dir"),
7495 json!({
7496 "a.tsx": "a",
7497 }),
7498 )
7499 .await;
7500
7501 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7502
7503 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7504 language_registry.add(tsx_lang());
7505 let language_server_names = [
7506 "TypeScriptServer",
7507 "TailwindServer",
7508 "ESLintServer",
7509 "NoActionsCapabilitiesServer",
7510 ];
7511
7512 let mut language_server_rxs = [
7513 language_registry.register_fake_lsp(
7514 "tsx",
7515 FakeLspAdapter {
7516 name: language_server_names[0],
7517 capabilities: lsp::ServerCapabilities {
7518 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
7519 ..lsp::ServerCapabilities::default()
7520 },
7521 ..FakeLspAdapter::default()
7522 },
7523 ),
7524 language_registry.register_fake_lsp(
7525 "tsx",
7526 FakeLspAdapter {
7527 name: language_server_names[1],
7528 capabilities: lsp::ServerCapabilities {
7529 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
7530 ..lsp::ServerCapabilities::default()
7531 },
7532 ..FakeLspAdapter::default()
7533 },
7534 ),
7535 language_registry.register_fake_lsp(
7536 "tsx",
7537 FakeLspAdapter {
7538 name: language_server_names[2],
7539 capabilities: lsp::ServerCapabilities {
7540 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
7541 ..lsp::ServerCapabilities::default()
7542 },
7543 ..FakeLspAdapter::default()
7544 },
7545 ),
7546 language_registry.register_fake_lsp(
7547 "tsx",
7548 FakeLspAdapter {
7549 name: language_server_names[3],
7550 capabilities: lsp::ServerCapabilities {
7551 code_action_provider: None,
7552 ..lsp::ServerCapabilities::default()
7553 },
7554 ..FakeLspAdapter::default()
7555 },
7556 ),
7557 ];
7558
7559 let (buffer, _handle) = project
7560 .update(cx, |p, cx| {
7561 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
7562 })
7563 .await
7564 .unwrap();
7565 cx.executor().run_until_parked();
7566
7567 let mut servers_with_actions_requests = HashMap::default();
7568 for i in 0..language_server_names.len() {
7569 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
7570 panic!(
7571 "Failed to get language server #{i} with name {}",
7572 &language_server_names[i]
7573 )
7574 });
7575 let new_server_name = new_server.server.name();
7576
7577 assert!(
7578 !servers_with_actions_requests.contains_key(&new_server_name),
7579 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
7580 );
7581 match new_server_name.0.as_ref() {
7582 "TailwindServer" | "TypeScriptServer" => {
7583 servers_with_actions_requests.insert(
7584 new_server_name.clone(),
7585 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
7586 move |_, _| {
7587 let name = new_server_name.clone();
7588 async move {
7589 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
7590 lsp::CodeAction {
7591 title: format!("{name} code action"),
7592 ..lsp::CodeAction::default()
7593 },
7594 )]))
7595 }
7596 },
7597 ),
7598 );
7599 }
7600 "ESLintServer" => {
7601 servers_with_actions_requests.insert(
7602 new_server_name,
7603 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
7604 |_, _| async move { Ok(None) },
7605 ),
7606 );
7607 }
7608 "NoActionsCapabilitiesServer" => {
7609 let _never_handled = new_server
7610 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
7611 panic!(
7612 "Should not call for code actions server with no corresponding capabilities"
7613 )
7614 });
7615 }
7616 unexpected => panic!("Unexpected server name: {unexpected}"),
7617 }
7618 }
7619
7620 let code_actions_task = project.update(cx, |project, cx| {
7621 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
7622 });
7623
7624 // cx.run_until_parked();
7625 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
7626 |mut code_actions_request| async move {
7627 code_actions_request
7628 .next()
7629 .await
7630 .expect("All code actions requests should have been triggered")
7631 },
7632 ))
7633 .await;
7634 assert_eq!(
7635 vec!["TailwindServer code action", "TypeScriptServer code action"],
7636 code_actions_task
7637 .await
7638 .unwrap()
7639 .unwrap()
7640 .into_iter()
7641 .map(|code_action| code_action.lsp_action.title().to_owned())
7642 .sorted()
7643 .collect::<Vec<_>>(),
7644 "Should receive code actions responses from all related servers with hover capabilities"
7645 );
7646}
7647
7648#[gpui::test]
7649async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
7650 init_test(cx);
7651
7652 let fs = FakeFs::new(cx.executor());
7653 fs.insert_tree(
7654 "/dir",
7655 json!({
7656 "a.rs": "let a = 1;",
7657 "b.rs": "let b = 2;",
7658 "c.rs": "let c = 2;",
7659 }),
7660 )
7661 .await;
7662
7663 let project = Project::test(
7664 fs,
7665 [
7666 "/dir/a.rs".as_ref(),
7667 "/dir/b.rs".as_ref(),
7668 "/dir/c.rs".as_ref(),
7669 ],
7670 cx,
7671 )
7672 .await;
7673
7674 // check the initial state and get the worktrees
7675 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
7676 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7677 assert_eq!(worktrees.len(), 3);
7678
7679 let worktree_a = worktrees[0].read(cx);
7680 let worktree_b = worktrees[1].read(cx);
7681 let worktree_c = worktrees[2].read(cx);
7682
7683 // check they start in the right order
7684 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
7685 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
7686 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
7687
7688 (
7689 worktrees[0].clone(),
7690 worktrees[1].clone(),
7691 worktrees[2].clone(),
7692 )
7693 });
7694
7695 // move first worktree to after the second
7696 // [a, b, c] -> [b, a, c]
7697 project
7698 .update(cx, |project, cx| {
7699 let first = worktree_a.read(cx);
7700 let second = worktree_b.read(cx);
7701 project.move_worktree(first.id(), second.id(), cx)
7702 })
7703 .expect("moving first after second");
7704
7705 // check the state after moving
7706 project.update(cx, |project, cx| {
7707 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7708 assert_eq!(worktrees.len(), 3);
7709
7710 let first = worktrees[0].read(cx);
7711 let second = worktrees[1].read(cx);
7712 let third = worktrees[2].read(cx);
7713
7714 // check they are now in the right order
7715 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
7716 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
7717 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7718 });
7719
7720 // move the second worktree to before the first
7721 // [b, a, c] -> [a, b, c]
7722 project
7723 .update(cx, |project, cx| {
7724 let second = worktree_a.read(cx);
7725 let first = worktree_b.read(cx);
7726 project.move_worktree(first.id(), second.id(), cx)
7727 })
7728 .expect("moving second before first");
7729
7730 // check the state after moving
7731 project.update(cx, |project, cx| {
7732 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7733 assert_eq!(worktrees.len(), 3);
7734
7735 let first = worktrees[0].read(cx);
7736 let second = worktrees[1].read(cx);
7737 let third = worktrees[2].read(cx);
7738
7739 // check they are now in the right order
7740 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7741 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
7742 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7743 });
7744
7745 // move the second worktree to after the third
7746 // [a, b, c] -> [a, c, b]
7747 project
7748 .update(cx, |project, cx| {
7749 let second = worktree_b.read(cx);
7750 let third = worktree_c.read(cx);
7751 project.move_worktree(second.id(), third.id(), cx)
7752 })
7753 .expect("moving second after third");
7754
7755 // check the state after moving
7756 project.update(cx, |project, cx| {
7757 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7758 assert_eq!(worktrees.len(), 3);
7759
7760 let first = worktrees[0].read(cx);
7761 let second = worktrees[1].read(cx);
7762 let third = worktrees[2].read(cx);
7763
7764 // check they are now in the right order
7765 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7766 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
7767 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
7768 });
7769
7770 // move the third worktree to before the second
7771 // [a, c, b] -> [a, b, c]
7772 project
7773 .update(cx, |project, cx| {
7774 let third = worktree_c.read(cx);
7775 let second = worktree_b.read(cx);
7776 project.move_worktree(third.id(), second.id(), cx)
7777 })
7778 .expect("moving third before second");
7779
7780 // check the state after moving
7781 project.update(cx, |project, cx| {
7782 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7783 assert_eq!(worktrees.len(), 3);
7784
7785 let first = worktrees[0].read(cx);
7786 let second = worktrees[1].read(cx);
7787 let third = worktrees[2].read(cx);
7788
7789 // check they are now in the right order
7790 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7791 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
7792 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7793 });
7794
7795 // move the first worktree to after the third
7796 // [a, b, c] -> [b, c, a]
7797 project
7798 .update(cx, |project, cx| {
7799 let first = worktree_a.read(cx);
7800 let third = worktree_c.read(cx);
7801 project.move_worktree(first.id(), third.id(), cx)
7802 })
7803 .expect("moving first after third");
7804
7805 // check the state after moving
7806 project.update(cx, |project, cx| {
7807 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7808 assert_eq!(worktrees.len(), 3);
7809
7810 let first = worktrees[0].read(cx);
7811 let second = worktrees[1].read(cx);
7812 let third = worktrees[2].read(cx);
7813
7814 // check they are now in the right order
7815 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
7816 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
7817 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
7818 });
7819
7820 // move the third worktree to before the first
7821 // [b, c, a] -> [a, b, c]
7822 project
7823 .update(cx, |project, cx| {
7824 let third = worktree_a.read(cx);
7825 let first = worktree_b.read(cx);
7826 project.move_worktree(third.id(), first.id(), cx)
7827 })
7828 .expect("moving third before first");
7829
7830 // check the state after moving
7831 project.update(cx, |project, cx| {
7832 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7833 assert_eq!(worktrees.len(), 3);
7834
7835 let first = worktrees[0].read(cx);
7836 let second = worktrees[1].read(cx);
7837 let third = worktrees[2].read(cx);
7838
7839 // check they are now in the right order
7840 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7841 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
7842 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7843 });
7844}
7845
7846#[gpui::test]
7847async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
7848 init_test(cx);
7849
7850 let staged_contents = r#"
7851 fn main() {
7852 println!("hello world");
7853 }
7854 "#
7855 .unindent();
7856 let file_contents = r#"
7857 // print goodbye
7858 fn main() {
7859 println!("goodbye world");
7860 }
7861 "#
7862 .unindent();
7863
7864 let fs = FakeFs::new(cx.background_executor.clone());
7865 fs.insert_tree(
7866 "/dir",
7867 json!({
7868 ".git": {},
7869 "src": {
7870 "main.rs": file_contents,
7871 }
7872 }),
7873 )
7874 .await;
7875
7876 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
7877
7878 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7879
7880 let buffer = project
7881 .update(cx, |project, cx| {
7882 project.open_local_buffer("/dir/src/main.rs", cx)
7883 })
7884 .await
7885 .unwrap();
7886 let unstaged_diff = project
7887 .update(cx, |project, cx| {
7888 project.open_unstaged_diff(buffer.clone(), cx)
7889 })
7890 .await
7891 .unwrap();
7892
7893 cx.run_until_parked();
7894 unstaged_diff.update(cx, |unstaged_diff, cx| {
7895 let snapshot = buffer.read(cx).snapshot();
7896 assert_hunks(
7897 unstaged_diff.snapshot(cx).hunks(&snapshot),
7898 &snapshot,
7899 &unstaged_diff.base_text_string(cx).unwrap(),
7900 &[
7901 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
7902 (
7903 2..3,
7904 " println!(\"hello world\");\n",
7905 " println!(\"goodbye world\");\n",
7906 DiffHunkStatus::modified_none(),
7907 ),
7908 ],
7909 );
7910 });
7911
7912 let staged_contents = r#"
7913 // print goodbye
7914 fn main() {
7915 }
7916 "#
7917 .unindent();
7918
7919 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
7920
7921 cx.run_until_parked();
7922 unstaged_diff.update(cx, |unstaged_diff, cx| {
7923 let snapshot = buffer.read(cx).snapshot();
7924 assert_hunks(
7925 unstaged_diff
7926 .snapshot(cx)
7927 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
7928 &snapshot,
7929 &unstaged_diff.base_text(cx).text(),
7930 &[(
7931 2..3,
7932 "",
7933 " println!(\"goodbye world\");\n",
7934 DiffHunkStatus::added_none(),
7935 )],
7936 );
7937 });
7938}
7939
7940#[gpui::test]
7941async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
7942 init_test(cx);
7943
7944 let committed_contents = r#"
7945 fn main() {
7946 println!("hello world");
7947 }
7948 "#
7949 .unindent();
7950 let staged_contents = r#"
7951 fn main() {
7952 println!("goodbye world");
7953 }
7954 "#
7955 .unindent();
7956 let file_contents = r#"
7957 // print goodbye
7958 fn main() {
7959 println!("goodbye world");
7960 }
7961 "#
7962 .unindent();
7963
7964 let fs = FakeFs::new(cx.background_executor.clone());
7965 fs.insert_tree(
7966 "/dir",
7967 json!({
7968 ".git": {},
7969 "src": {
7970 "modification.rs": file_contents,
7971 }
7972 }),
7973 )
7974 .await;
7975
7976 fs.set_head_for_repo(
7977 Path::new("/dir/.git"),
7978 &[
7979 ("src/modification.rs", committed_contents),
7980 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7981 ],
7982 "deadbeef",
7983 );
7984 fs.set_index_for_repo(
7985 Path::new("/dir/.git"),
7986 &[
7987 ("src/modification.rs", staged_contents),
7988 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7989 ],
7990 );
7991
7992 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7993 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7994 let language = rust_lang();
7995 language_registry.add(language.clone());
7996
7997 let buffer_1 = project
7998 .update(cx, |project, cx| {
7999 project.open_local_buffer("/dir/src/modification.rs", cx)
8000 })
8001 .await
8002 .unwrap();
8003 let diff_1 = project
8004 .update(cx, |project, cx| {
8005 project.open_uncommitted_diff(buffer_1.clone(), cx)
8006 })
8007 .await
8008 .unwrap();
8009 diff_1.read_with(cx, |diff, cx| {
8010 assert_eq!(diff.base_text(cx).language().cloned(), Some(language))
8011 });
8012 cx.run_until_parked();
8013 diff_1.update(cx, |diff, cx| {
8014 let snapshot = buffer_1.read(cx).snapshot();
8015 assert_hunks(
8016 diff.snapshot(cx)
8017 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8018 &snapshot,
8019 &diff.base_text_string(cx).unwrap(),
8020 &[
8021 (
8022 0..1,
8023 "",
8024 "// print goodbye\n",
8025 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
8026 ),
8027 (
8028 2..3,
8029 " println!(\"hello world\");\n",
8030 " println!(\"goodbye world\");\n",
8031 DiffHunkStatus::modified_none(),
8032 ),
8033 ],
8034 );
8035 });
8036
8037 // Reset HEAD to a version that differs from both the buffer and the index.
8038 let committed_contents = r#"
8039 // print goodbye
8040 fn main() {
8041 }
8042 "#
8043 .unindent();
8044 fs.set_head_for_repo(
8045 Path::new("/dir/.git"),
8046 &[
8047 ("src/modification.rs", committed_contents.clone()),
8048 ("src/deletion.rs", "// the-deleted-contents\n".into()),
8049 ],
8050 "deadbeef",
8051 );
8052
8053 // Buffer now has an unstaged hunk.
8054 cx.run_until_parked();
8055 diff_1.update(cx, |diff, cx| {
8056 let snapshot = buffer_1.read(cx).snapshot();
8057 assert_hunks(
8058 diff.snapshot(cx)
8059 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8060 &snapshot,
8061 &diff.base_text(cx).text(),
8062 &[(
8063 2..3,
8064 "",
8065 " println!(\"goodbye world\");\n",
8066 DiffHunkStatus::added_none(),
8067 )],
8068 );
8069 });
8070
8071 // Open a buffer for a file that's been deleted.
8072 let buffer_2 = project
8073 .update(cx, |project, cx| {
8074 project.open_local_buffer("/dir/src/deletion.rs", cx)
8075 })
8076 .await
8077 .unwrap();
8078 let diff_2 = project
8079 .update(cx, |project, cx| {
8080 project.open_uncommitted_diff(buffer_2.clone(), cx)
8081 })
8082 .await
8083 .unwrap();
8084 cx.run_until_parked();
8085 diff_2.update(cx, |diff, cx| {
8086 let snapshot = buffer_2.read(cx).snapshot();
8087 assert_hunks(
8088 diff.snapshot(cx)
8089 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8090 &snapshot,
8091 &diff.base_text_string(cx).unwrap(),
8092 &[(
8093 0..0,
8094 "// the-deleted-contents\n",
8095 "",
8096 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
8097 )],
8098 );
8099 });
8100
8101 // Stage the deletion of this file
8102 fs.set_index_for_repo(
8103 Path::new("/dir/.git"),
8104 &[("src/modification.rs", committed_contents.clone())],
8105 );
8106 cx.run_until_parked();
8107 diff_2.update(cx, |diff, cx| {
8108 let snapshot = buffer_2.read(cx).snapshot();
8109 assert_hunks(
8110 diff.snapshot(cx)
8111 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8112 &snapshot,
8113 &diff.base_text_string(cx).unwrap(),
8114 &[(
8115 0..0,
8116 "// the-deleted-contents\n",
8117 "",
8118 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
8119 )],
8120 );
8121 });
8122}
8123
8124#[gpui::test]
8125async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
8126 use DiffHunkSecondaryStatus::*;
8127 init_test(cx);
8128
8129 let committed_contents = r#"
8130 zero
8131 one
8132 two
8133 three
8134 four
8135 five
8136 "#
8137 .unindent();
8138 let file_contents = r#"
8139 one
8140 TWO
8141 three
8142 FOUR
8143 five
8144 "#
8145 .unindent();
8146
8147 let fs = FakeFs::new(cx.background_executor.clone());
8148 fs.insert_tree(
8149 "/dir",
8150 json!({
8151 ".git": {},
8152 "file.txt": file_contents.clone()
8153 }),
8154 )
8155 .await;
8156
8157 fs.set_head_and_index_for_repo(
8158 path!("/dir/.git").as_ref(),
8159 &[("file.txt", committed_contents.clone())],
8160 );
8161
8162 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8163
8164 let buffer = project
8165 .update(cx, |project, cx| {
8166 project.open_local_buffer("/dir/file.txt", cx)
8167 })
8168 .await
8169 .unwrap();
8170 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8171 let uncommitted_diff = project
8172 .update(cx, |project, cx| {
8173 project.open_uncommitted_diff(buffer.clone(), cx)
8174 })
8175 .await
8176 .unwrap();
8177 let mut diff_events = cx.events(&uncommitted_diff);
8178
8179 // The hunks are initially unstaged.
8180 uncommitted_diff.read_with(cx, |diff, cx| {
8181 assert_hunks(
8182 diff.snapshot(cx).hunks(&snapshot),
8183 &snapshot,
8184 &diff.base_text_string(cx).unwrap(),
8185 &[
8186 (
8187 0..0,
8188 "zero\n",
8189 "",
8190 DiffHunkStatus::deleted(HasSecondaryHunk),
8191 ),
8192 (
8193 1..2,
8194 "two\n",
8195 "TWO\n",
8196 DiffHunkStatus::modified(HasSecondaryHunk),
8197 ),
8198 (
8199 3..4,
8200 "four\n",
8201 "FOUR\n",
8202 DiffHunkStatus::modified(HasSecondaryHunk),
8203 ),
8204 ],
8205 );
8206 });
8207
8208 // Stage a hunk. It appears as optimistically staged.
8209 uncommitted_diff.update(cx, |diff, cx| {
8210 let range =
8211 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
8212 let hunks = diff
8213 .snapshot(cx)
8214 .hunks_intersecting_range(range, &snapshot)
8215 .collect::<Vec<_>>();
8216 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
8217
8218 assert_hunks(
8219 diff.snapshot(cx).hunks(&snapshot),
8220 &snapshot,
8221 &diff.base_text_string(cx).unwrap(),
8222 &[
8223 (
8224 0..0,
8225 "zero\n",
8226 "",
8227 DiffHunkStatus::deleted(HasSecondaryHunk),
8228 ),
8229 (
8230 1..2,
8231 "two\n",
8232 "TWO\n",
8233 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8234 ),
8235 (
8236 3..4,
8237 "four\n",
8238 "FOUR\n",
8239 DiffHunkStatus::modified(HasSecondaryHunk),
8240 ),
8241 ],
8242 );
8243 });
8244
8245 // The diff emits a change event for the range of the staged hunk.
8246 assert!(matches!(
8247 diff_events.next().await.unwrap(),
8248 BufferDiffEvent::HunksStagedOrUnstaged(_)
8249 ));
8250 let event = diff_events.next().await.unwrap();
8251 if let BufferDiffEvent::DiffChanged(DiffChanged {
8252 changed_range: Some(changed_range),
8253 base_text_changed_range: _,
8254 extended_range: _,
8255 }) = event
8256 {
8257 let changed_range = changed_range.to_point(&snapshot);
8258 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
8259 } else {
8260 panic!("Unexpected event {event:?}");
8261 }
8262
8263 // When the write to the index completes, it appears as staged.
8264 cx.run_until_parked();
8265 uncommitted_diff.update(cx, |diff, cx| {
8266 assert_hunks(
8267 diff.snapshot(cx).hunks(&snapshot),
8268 &snapshot,
8269 &diff.base_text_string(cx).unwrap(),
8270 &[
8271 (
8272 0..0,
8273 "zero\n",
8274 "",
8275 DiffHunkStatus::deleted(HasSecondaryHunk),
8276 ),
8277 (
8278 1..2,
8279 "two\n",
8280 "TWO\n",
8281 DiffHunkStatus::modified(NoSecondaryHunk),
8282 ),
8283 (
8284 3..4,
8285 "four\n",
8286 "FOUR\n",
8287 DiffHunkStatus::modified(HasSecondaryHunk),
8288 ),
8289 ],
8290 );
8291 });
8292
8293 // The diff emits a change event for the changed index text.
8294 let event = diff_events.next().await.unwrap();
8295 if let BufferDiffEvent::DiffChanged(DiffChanged {
8296 changed_range: Some(changed_range),
8297 base_text_changed_range: _,
8298 extended_range: _,
8299 }) = event
8300 {
8301 let changed_range = changed_range.to_point(&snapshot);
8302 assert_eq!(changed_range, Point::new(0, 0)..Point::new(4, 0));
8303 } else {
8304 panic!("Unexpected event {event:?}");
8305 }
8306
8307 // Simulate a problem writing to the git index.
8308 fs.set_error_message_for_index_write(
8309 "/dir/.git".as_ref(),
8310 Some("failed to write git index".into()),
8311 );
8312
8313 // Stage another hunk.
8314 uncommitted_diff.update(cx, |diff, cx| {
8315 let range =
8316 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
8317 let hunks = diff
8318 .snapshot(cx)
8319 .hunks_intersecting_range(range, &snapshot)
8320 .collect::<Vec<_>>();
8321 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
8322
8323 assert_hunks(
8324 diff.snapshot(cx).hunks(&snapshot),
8325 &snapshot,
8326 &diff.base_text_string(cx).unwrap(),
8327 &[
8328 (
8329 0..0,
8330 "zero\n",
8331 "",
8332 DiffHunkStatus::deleted(HasSecondaryHunk),
8333 ),
8334 (
8335 1..2,
8336 "two\n",
8337 "TWO\n",
8338 DiffHunkStatus::modified(NoSecondaryHunk),
8339 ),
8340 (
8341 3..4,
8342 "four\n",
8343 "FOUR\n",
8344 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8345 ),
8346 ],
8347 );
8348 });
8349 assert!(matches!(
8350 diff_events.next().await.unwrap(),
8351 BufferDiffEvent::HunksStagedOrUnstaged(_)
8352 ));
8353 let event = diff_events.next().await.unwrap();
8354 if let BufferDiffEvent::DiffChanged(DiffChanged {
8355 changed_range: Some(changed_range),
8356 base_text_changed_range: _,
8357 extended_range: _,
8358 }) = event
8359 {
8360 let changed_range = changed_range.to_point(&snapshot);
8361 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
8362 } else {
8363 panic!("Unexpected event {event:?}");
8364 }
8365
8366 // When the write fails, the hunk returns to being unstaged.
8367 cx.run_until_parked();
8368 uncommitted_diff.update(cx, |diff, cx| {
8369 assert_hunks(
8370 diff.snapshot(cx).hunks(&snapshot),
8371 &snapshot,
8372 &diff.base_text_string(cx).unwrap(),
8373 &[
8374 (
8375 0..0,
8376 "zero\n",
8377 "",
8378 DiffHunkStatus::deleted(HasSecondaryHunk),
8379 ),
8380 (
8381 1..2,
8382 "two\n",
8383 "TWO\n",
8384 DiffHunkStatus::modified(NoSecondaryHunk),
8385 ),
8386 (
8387 3..4,
8388 "four\n",
8389 "FOUR\n",
8390 DiffHunkStatus::modified(HasSecondaryHunk),
8391 ),
8392 ],
8393 );
8394 });
8395
8396 let event = diff_events.next().await.unwrap();
8397 if let BufferDiffEvent::DiffChanged(DiffChanged {
8398 changed_range: Some(changed_range),
8399 base_text_changed_range: _,
8400 extended_range: _,
8401 }) = event
8402 {
8403 let changed_range = changed_range.to_point(&snapshot);
8404 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
8405 } else {
8406 panic!("Unexpected event {event:?}");
8407 }
8408
8409 // Allow writing to the git index to succeed again.
8410 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
8411
8412 // Stage two hunks with separate operations.
8413 uncommitted_diff.update(cx, |diff, cx| {
8414 let hunks = diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>();
8415 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
8416 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
8417 });
8418
8419 // Both staged hunks appear as pending.
8420 uncommitted_diff.update(cx, |diff, cx| {
8421 assert_hunks(
8422 diff.snapshot(cx).hunks(&snapshot),
8423 &snapshot,
8424 &diff.base_text_string(cx).unwrap(),
8425 &[
8426 (
8427 0..0,
8428 "zero\n",
8429 "",
8430 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
8431 ),
8432 (
8433 1..2,
8434 "two\n",
8435 "TWO\n",
8436 DiffHunkStatus::modified(NoSecondaryHunk),
8437 ),
8438 (
8439 3..4,
8440 "four\n",
8441 "FOUR\n",
8442 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8443 ),
8444 ],
8445 );
8446 });
8447
8448 // Both staging operations take effect.
8449 cx.run_until_parked();
8450 uncommitted_diff.update(cx, |diff, cx| {
8451 assert_hunks(
8452 diff.snapshot(cx).hunks(&snapshot),
8453 &snapshot,
8454 &diff.base_text_string(cx).unwrap(),
8455 &[
8456 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
8457 (
8458 1..2,
8459 "two\n",
8460 "TWO\n",
8461 DiffHunkStatus::modified(NoSecondaryHunk),
8462 ),
8463 (
8464 3..4,
8465 "four\n",
8466 "FOUR\n",
8467 DiffHunkStatus::modified(NoSecondaryHunk),
8468 ),
8469 ],
8470 );
8471 });
8472}
8473
8474#[gpui::test(seeds(340, 472))]
8475async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
8476 use DiffHunkSecondaryStatus::*;
8477 init_test(cx);
8478
8479 let committed_contents = r#"
8480 zero
8481 one
8482 two
8483 three
8484 four
8485 five
8486 "#
8487 .unindent();
8488 let file_contents = r#"
8489 one
8490 TWO
8491 three
8492 FOUR
8493 five
8494 "#
8495 .unindent();
8496
8497 let fs = FakeFs::new(cx.background_executor.clone());
8498 fs.insert_tree(
8499 "/dir",
8500 json!({
8501 ".git": {},
8502 "file.txt": file_contents.clone()
8503 }),
8504 )
8505 .await;
8506
8507 fs.set_head_for_repo(
8508 "/dir/.git".as_ref(),
8509 &[("file.txt", committed_contents.clone())],
8510 "deadbeef",
8511 );
8512 fs.set_index_for_repo(
8513 "/dir/.git".as_ref(),
8514 &[("file.txt", committed_contents.clone())],
8515 );
8516
8517 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8518
8519 let buffer = project
8520 .update(cx, |project, cx| {
8521 project.open_local_buffer("/dir/file.txt", cx)
8522 })
8523 .await
8524 .unwrap();
8525 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8526 let uncommitted_diff = project
8527 .update(cx, |project, cx| {
8528 project.open_uncommitted_diff(buffer.clone(), cx)
8529 })
8530 .await
8531 .unwrap();
8532
8533 // The hunks are initially unstaged.
8534 uncommitted_diff.read_with(cx, |diff, cx| {
8535 assert_hunks(
8536 diff.snapshot(cx).hunks(&snapshot),
8537 &snapshot,
8538 &diff.base_text_string(cx).unwrap(),
8539 &[
8540 (
8541 0..0,
8542 "zero\n",
8543 "",
8544 DiffHunkStatus::deleted(HasSecondaryHunk),
8545 ),
8546 (
8547 1..2,
8548 "two\n",
8549 "TWO\n",
8550 DiffHunkStatus::modified(HasSecondaryHunk),
8551 ),
8552 (
8553 3..4,
8554 "four\n",
8555 "FOUR\n",
8556 DiffHunkStatus::modified(HasSecondaryHunk),
8557 ),
8558 ],
8559 );
8560 });
8561
8562 // Pause IO events
8563 fs.pause_events();
8564
8565 // Stage the first hunk.
8566 uncommitted_diff.update(cx, |diff, cx| {
8567 let hunk = diff.snapshot(cx).hunks(&snapshot).next().unwrap();
8568 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
8569 assert_hunks(
8570 diff.snapshot(cx).hunks(&snapshot),
8571 &snapshot,
8572 &diff.base_text_string(cx).unwrap(),
8573 &[
8574 (
8575 0..0,
8576 "zero\n",
8577 "",
8578 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
8579 ),
8580 (
8581 1..2,
8582 "two\n",
8583 "TWO\n",
8584 DiffHunkStatus::modified(HasSecondaryHunk),
8585 ),
8586 (
8587 3..4,
8588 "four\n",
8589 "FOUR\n",
8590 DiffHunkStatus::modified(HasSecondaryHunk),
8591 ),
8592 ],
8593 );
8594 });
8595
8596 // Stage the second hunk *before* receiving the FS event for the first hunk.
8597 cx.run_until_parked();
8598 uncommitted_diff.update(cx, |diff, cx| {
8599 let hunk = diff.snapshot(cx).hunks(&snapshot).nth(1).unwrap();
8600 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
8601 assert_hunks(
8602 diff.snapshot(cx).hunks(&snapshot),
8603 &snapshot,
8604 &diff.base_text_string(cx).unwrap(),
8605 &[
8606 (
8607 0..0,
8608 "zero\n",
8609 "",
8610 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
8611 ),
8612 (
8613 1..2,
8614 "two\n",
8615 "TWO\n",
8616 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8617 ),
8618 (
8619 3..4,
8620 "four\n",
8621 "FOUR\n",
8622 DiffHunkStatus::modified(HasSecondaryHunk),
8623 ),
8624 ],
8625 );
8626 });
8627
8628 // Process the FS event for staging the first hunk (second event is still pending).
8629 fs.flush_events(1);
8630 cx.run_until_parked();
8631
8632 // Stage the third hunk before receiving the second FS event.
8633 uncommitted_diff.update(cx, |diff, cx| {
8634 let hunk = diff.snapshot(cx).hunks(&snapshot).nth(2).unwrap();
8635 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
8636 });
8637
8638 // Wait for all remaining IO.
8639 cx.run_until_parked();
8640 fs.flush_events(fs.buffered_event_count());
8641
8642 // Now all hunks are staged.
8643 cx.run_until_parked();
8644 uncommitted_diff.update(cx, |diff, cx| {
8645 assert_hunks(
8646 diff.snapshot(cx).hunks(&snapshot),
8647 &snapshot,
8648 &diff.base_text_string(cx).unwrap(),
8649 &[
8650 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
8651 (
8652 1..2,
8653 "two\n",
8654 "TWO\n",
8655 DiffHunkStatus::modified(NoSecondaryHunk),
8656 ),
8657 (
8658 3..4,
8659 "four\n",
8660 "FOUR\n",
8661 DiffHunkStatus::modified(NoSecondaryHunk),
8662 ),
8663 ],
8664 );
8665 });
8666}
8667
8668#[gpui::test(iterations = 25)]
8669async fn test_staging_random_hunks(
8670 mut rng: StdRng,
8671 _executor: BackgroundExecutor,
8672 cx: &mut gpui::TestAppContext,
8673) {
8674 let operations = env::var("OPERATIONS")
8675 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
8676 .unwrap_or(20);
8677
8678 use DiffHunkSecondaryStatus::*;
8679 init_test(cx);
8680
8681 let committed_text = (0..30).map(|i| format!("line {i}\n")).collect::<String>();
8682 let index_text = committed_text.clone();
8683 let buffer_text = (0..30)
8684 .map(|i| match i % 5 {
8685 0 => format!("line {i} (modified)\n"),
8686 _ => format!("line {i}\n"),
8687 })
8688 .collect::<String>();
8689
8690 let fs = FakeFs::new(cx.background_executor.clone());
8691 fs.insert_tree(
8692 path!("/dir"),
8693 json!({
8694 ".git": {},
8695 "file.txt": buffer_text.clone()
8696 }),
8697 )
8698 .await;
8699 fs.set_head_for_repo(
8700 path!("/dir/.git").as_ref(),
8701 &[("file.txt", committed_text.clone())],
8702 "deadbeef",
8703 );
8704 fs.set_index_for_repo(
8705 path!("/dir/.git").as_ref(),
8706 &[("file.txt", index_text.clone())],
8707 );
8708 let repo = fs
8709 .open_repo(path!("/dir/.git").as_ref(), Some("git".as_ref()))
8710 .unwrap();
8711
8712 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
8713 let buffer = project
8714 .update(cx, |project, cx| {
8715 project.open_local_buffer(path!("/dir/file.txt"), cx)
8716 })
8717 .await
8718 .unwrap();
8719 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8720 let uncommitted_diff = project
8721 .update(cx, |project, cx| {
8722 project.open_uncommitted_diff(buffer.clone(), cx)
8723 })
8724 .await
8725 .unwrap();
8726
8727 let mut hunks = uncommitted_diff.update(cx, |diff, cx| {
8728 diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>()
8729 });
8730 assert_eq!(hunks.len(), 6);
8731
8732 for _i in 0..operations {
8733 let hunk_ix = rng.random_range(0..hunks.len());
8734 let hunk = &mut hunks[hunk_ix];
8735 let row = hunk.range.start.row;
8736
8737 if hunk.status().has_secondary_hunk() {
8738 log::info!("staging hunk at {row}");
8739 uncommitted_diff.update(cx, |diff, cx| {
8740 diff.stage_or_unstage_hunks(true, std::slice::from_ref(hunk), &snapshot, true, cx);
8741 });
8742 hunk.secondary_status = SecondaryHunkRemovalPending;
8743 } else {
8744 log::info!("unstaging hunk at {row}");
8745 uncommitted_diff.update(cx, |diff, cx| {
8746 diff.stage_or_unstage_hunks(false, std::slice::from_ref(hunk), &snapshot, true, cx);
8747 });
8748 hunk.secondary_status = SecondaryHunkAdditionPending;
8749 }
8750
8751 for _ in 0..rng.random_range(0..10) {
8752 log::info!("yielding");
8753 cx.executor().simulate_random_delay().await;
8754 }
8755 }
8756
8757 cx.executor().run_until_parked();
8758
8759 for hunk in &mut hunks {
8760 if hunk.secondary_status == SecondaryHunkRemovalPending {
8761 hunk.secondary_status = NoSecondaryHunk;
8762 } else if hunk.secondary_status == SecondaryHunkAdditionPending {
8763 hunk.secondary_status = HasSecondaryHunk;
8764 }
8765 }
8766
8767 log::info!(
8768 "index text:\n{}",
8769 repo.load_index_text(RepoPath::from_rel_path(rel_path("file.txt")))
8770 .await
8771 .unwrap()
8772 );
8773
8774 uncommitted_diff.update(cx, |diff, cx| {
8775 let expected_hunks = hunks
8776 .iter()
8777 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
8778 .collect::<Vec<_>>();
8779 let actual_hunks = diff
8780 .snapshot(cx)
8781 .hunks(&snapshot)
8782 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
8783 .collect::<Vec<_>>();
8784 assert_eq!(actual_hunks, expected_hunks);
8785 });
8786}
8787
8788#[gpui::test]
8789async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
8790 init_test(cx);
8791
8792 let committed_contents = r#"
8793 fn main() {
8794 println!("hello from HEAD");
8795 }
8796 "#
8797 .unindent();
8798 let file_contents = r#"
8799 fn main() {
8800 println!("hello from the working copy");
8801 }
8802 "#
8803 .unindent();
8804
8805 let fs = FakeFs::new(cx.background_executor.clone());
8806 fs.insert_tree(
8807 "/dir",
8808 json!({
8809 ".git": {},
8810 "src": {
8811 "main.rs": file_contents,
8812 }
8813 }),
8814 )
8815 .await;
8816
8817 fs.set_head_for_repo(
8818 Path::new("/dir/.git"),
8819 &[("src/main.rs", committed_contents.clone())],
8820 "deadbeef",
8821 );
8822 fs.set_index_for_repo(
8823 Path::new("/dir/.git"),
8824 &[("src/main.rs", committed_contents.clone())],
8825 );
8826
8827 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
8828
8829 let buffer = project
8830 .update(cx, |project, cx| {
8831 project.open_local_buffer("/dir/src/main.rs", cx)
8832 })
8833 .await
8834 .unwrap();
8835 let uncommitted_diff = project
8836 .update(cx, |project, cx| {
8837 project.open_uncommitted_diff(buffer.clone(), cx)
8838 })
8839 .await
8840 .unwrap();
8841
8842 cx.run_until_parked();
8843 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
8844 let snapshot = buffer.read(cx).snapshot();
8845 assert_hunks(
8846 uncommitted_diff.snapshot(cx).hunks(&snapshot),
8847 &snapshot,
8848 &uncommitted_diff.base_text_string(cx).unwrap(),
8849 &[(
8850 1..2,
8851 " println!(\"hello from HEAD\");\n",
8852 " println!(\"hello from the working copy\");\n",
8853 DiffHunkStatus {
8854 kind: DiffHunkStatusKind::Modified,
8855 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
8856 },
8857 )],
8858 );
8859 });
8860}
8861
8862// TODO: Should we test this on Windows also?
8863#[gpui::test]
8864#[cfg(not(windows))]
8865async fn test_staging_hunk_preserve_executable_permission(cx: &mut gpui::TestAppContext) {
8866 use std::os::unix::fs::PermissionsExt;
8867 init_test(cx);
8868 cx.executor().allow_parking();
8869 let committed_contents = "bar\n";
8870 let file_contents = "baz\n";
8871 let root = TempTree::new(json!({
8872 "project": {
8873 "foo": committed_contents
8874 },
8875 }));
8876
8877 let work_dir = root.path().join("project");
8878 let file_path = work_dir.join("foo");
8879 let repo = git_init(work_dir.as_path());
8880 let mut perms = std::fs::metadata(&file_path).unwrap().permissions();
8881 perms.set_mode(0o755);
8882 std::fs::set_permissions(&file_path, perms).unwrap();
8883 git_add("foo", &repo);
8884 git_commit("Initial commit", &repo);
8885 std::fs::write(&file_path, file_contents).unwrap();
8886
8887 let project = Project::test(
8888 Arc::new(RealFs::new(None, cx.executor())),
8889 [root.path()],
8890 cx,
8891 )
8892 .await;
8893
8894 let buffer = project
8895 .update(cx, |project, cx| {
8896 project.open_local_buffer(file_path.as_path(), cx)
8897 })
8898 .await
8899 .unwrap();
8900
8901 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8902
8903 let uncommitted_diff = project
8904 .update(cx, |project, cx| {
8905 project.open_uncommitted_diff(buffer.clone(), cx)
8906 })
8907 .await
8908 .unwrap();
8909
8910 uncommitted_diff.update(cx, |diff, cx| {
8911 let hunks = diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>();
8912 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
8913 });
8914
8915 cx.run_until_parked();
8916
8917 let output = smol::process::Command::new("git")
8918 .current_dir(&work_dir)
8919 .args(["diff", "--staged"])
8920 .output()
8921 .await
8922 .unwrap();
8923
8924 let staged_diff = String::from_utf8_lossy(&output.stdout);
8925
8926 assert!(
8927 !staged_diff.contains("new mode 100644"),
8928 "Staging should not change file mode from 755 to 644.\ngit diff --staged:\n{}",
8929 staged_diff
8930 );
8931
8932 let output = smol::process::Command::new("git")
8933 .current_dir(&work_dir)
8934 .args(["ls-files", "-s"])
8935 .output()
8936 .await
8937 .unwrap();
8938 let index_contents = String::from_utf8_lossy(&output.stdout);
8939
8940 assert!(
8941 index_contents.contains("100755"),
8942 "Index should show file as executable (100755).\ngit ls-files -s:\n{}",
8943 index_contents
8944 );
8945}
8946
8947#[gpui::test]
8948async fn test_repository_and_path_for_project_path(
8949 background_executor: BackgroundExecutor,
8950 cx: &mut gpui::TestAppContext,
8951) {
8952 init_test(cx);
8953 let fs = FakeFs::new(background_executor);
8954 fs.insert_tree(
8955 path!("/root"),
8956 json!({
8957 "c.txt": "",
8958 "dir1": {
8959 ".git": {},
8960 "deps": {
8961 "dep1": {
8962 ".git": {},
8963 "src": {
8964 "a.txt": ""
8965 }
8966 }
8967 },
8968 "src": {
8969 "b.txt": ""
8970 }
8971 },
8972 }),
8973 )
8974 .await;
8975
8976 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
8977 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8978 let tree_id = tree.read_with(cx, |tree, _| tree.id());
8979 project
8980 .update(cx, |project, cx| project.git_scans_complete(cx))
8981 .await;
8982 cx.run_until_parked();
8983
8984 project.read_with(cx, |project, cx| {
8985 let git_store = project.git_store().read(cx);
8986 let pairs = [
8987 ("c.txt", None),
8988 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
8989 (
8990 "dir1/deps/dep1/src/a.txt",
8991 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
8992 ),
8993 ];
8994 let expected = pairs
8995 .iter()
8996 .map(|(path, result)| {
8997 (
8998 path,
8999 result.map(|(repo, repo_path)| {
9000 (Path::new(repo).into(), RepoPath::new(repo_path).unwrap())
9001 }),
9002 )
9003 })
9004 .collect::<Vec<_>>();
9005 let actual = pairs
9006 .iter()
9007 .map(|(path, _)| {
9008 let project_path = (tree_id, rel_path(path)).into();
9009 let result = maybe!({
9010 let (repo, repo_path) =
9011 git_store.repository_and_path_for_project_path(&project_path, cx)?;
9012 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
9013 });
9014 (path, result)
9015 })
9016 .collect::<Vec<_>>();
9017 pretty_assertions::assert_eq!(expected, actual);
9018 });
9019
9020 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
9021 .await
9022 .unwrap();
9023 cx.run_until_parked();
9024
9025 project.read_with(cx, |project, cx| {
9026 let git_store = project.git_store().read(cx);
9027 assert_eq!(
9028 git_store.repository_and_path_for_project_path(
9029 &(tree_id, rel_path("dir1/src/b.txt")).into(),
9030 cx
9031 ),
9032 None
9033 );
9034 });
9035}
9036
9037#[gpui::test]
9038async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
9039 init_test(cx);
9040 let fs = FakeFs::new(cx.background_executor.clone());
9041 let home = paths::home_dir();
9042 fs.insert_tree(
9043 home,
9044 json!({
9045 ".git": {},
9046 "project": {
9047 "a.txt": "A"
9048 },
9049 }),
9050 )
9051 .await;
9052
9053 let project = Project::test(fs.clone(), [home.join("project").as_ref()], cx).await;
9054 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9055 let tree_id = tree.read_with(cx, |tree, _| tree.id());
9056
9057 project
9058 .update(cx, |project, cx| project.git_scans_complete(cx))
9059 .await;
9060 tree.flush_fs_events(cx).await;
9061
9062 project.read_with(cx, |project, cx| {
9063 let containing = project
9064 .git_store()
9065 .read(cx)
9066 .repository_and_path_for_project_path(&(tree_id, rel_path("a.txt")).into(), cx);
9067 assert!(containing.is_none());
9068 });
9069
9070 let project = Project::test(fs.clone(), [home.as_ref()], cx).await;
9071 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9072 let tree_id = tree.read_with(cx, |tree, _| tree.id());
9073 project
9074 .update(cx, |project, cx| project.git_scans_complete(cx))
9075 .await;
9076 tree.flush_fs_events(cx).await;
9077
9078 project.read_with(cx, |project, cx| {
9079 let containing = project
9080 .git_store()
9081 .read(cx)
9082 .repository_and_path_for_project_path(&(tree_id, rel_path("project/a.txt")).into(), cx);
9083 assert_eq!(
9084 containing
9085 .unwrap()
9086 .0
9087 .read(cx)
9088 .work_directory_abs_path
9089 .as_ref(),
9090 home,
9091 );
9092 });
9093}
9094
9095#[gpui::test]
9096async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
9097 init_test(cx);
9098 cx.executor().allow_parking();
9099
9100 let root = TempTree::new(json!({
9101 "project": {
9102 "a.txt": "a", // Modified
9103 "b.txt": "bb", // Added
9104 "c.txt": "ccc", // Unchanged
9105 "d.txt": "dddd", // Deleted
9106 },
9107 }));
9108
9109 // Set up git repository before creating the project.
9110 let work_dir = root.path().join("project");
9111 let repo = git_init(work_dir.as_path());
9112 git_add("a.txt", &repo);
9113 git_add("c.txt", &repo);
9114 git_add("d.txt", &repo);
9115 git_commit("Initial commit", &repo);
9116 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
9117 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
9118
9119 let project = Project::test(
9120 Arc::new(RealFs::new(None, cx.executor())),
9121 [root.path()],
9122 cx,
9123 )
9124 .await;
9125
9126 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9127 tree.flush_fs_events(cx).await;
9128 project
9129 .update(cx, |project, cx| project.git_scans_complete(cx))
9130 .await;
9131 cx.executor().run_until_parked();
9132
9133 let repository = project.read_with(cx, |project, cx| {
9134 project.repositories(cx).values().next().unwrap().clone()
9135 });
9136
9137 // Check that the right git state is observed on startup
9138 repository.read_with(cx, |repository, _| {
9139 let entries = repository.cached_status().collect::<Vec<_>>();
9140 assert_eq!(
9141 entries,
9142 [
9143 StatusEntry {
9144 repo_path: repo_path("a.txt"),
9145 status: StatusCode::Modified.worktree(),
9146 },
9147 StatusEntry {
9148 repo_path: repo_path("b.txt"),
9149 status: FileStatus::Untracked,
9150 },
9151 StatusEntry {
9152 repo_path: repo_path("d.txt"),
9153 status: StatusCode::Deleted.worktree(),
9154 },
9155 ]
9156 );
9157 });
9158
9159 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
9160
9161 tree.flush_fs_events(cx).await;
9162 project
9163 .update(cx, |project, cx| project.git_scans_complete(cx))
9164 .await;
9165 cx.executor().run_until_parked();
9166
9167 repository.read_with(cx, |repository, _| {
9168 let entries = repository.cached_status().collect::<Vec<_>>();
9169 assert_eq!(
9170 entries,
9171 [
9172 StatusEntry {
9173 repo_path: repo_path("a.txt"),
9174 status: StatusCode::Modified.worktree(),
9175 },
9176 StatusEntry {
9177 repo_path: repo_path("b.txt"),
9178 status: FileStatus::Untracked,
9179 },
9180 StatusEntry {
9181 repo_path: repo_path("c.txt"),
9182 status: StatusCode::Modified.worktree(),
9183 },
9184 StatusEntry {
9185 repo_path: repo_path("d.txt"),
9186 status: StatusCode::Deleted.worktree(),
9187 },
9188 ]
9189 );
9190 });
9191
9192 git_add("a.txt", &repo);
9193 git_add("c.txt", &repo);
9194 git_remove_index(Path::new("d.txt"), &repo);
9195 git_commit("Another commit", &repo);
9196 tree.flush_fs_events(cx).await;
9197 project
9198 .update(cx, |project, cx| project.git_scans_complete(cx))
9199 .await;
9200 cx.executor().run_until_parked();
9201
9202 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
9203 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
9204 tree.flush_fs_events(cx).await;
9205 project
9206 .update(cx, |project, cx| project.git_scans_complete(cx))
9207 .await;
9208 cx.executor().run_until_parked();
9209
9210 repository.read_with(cx, |repository, _cx| {
9211 let entries = repository.cached_status().collect::<Vec<_>>();
9212
9213 // Deleting an untracked entry, b.txt, should leave no status
9214 // a.txt was tracked, and so should have a status
9215 assert_eq!(
9216 entries,
9217 [StatusEntry {
9218 repo_path: repo_path("a.txt"),
9219 status: StatusCode::Deleted.worktree(),
9220 }]
9221 );
9222 });
9223}
9224
9225#[gpui::test]
9226#[ignore]
9227async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
9228 init_test(cx);
9229 cx.executor().allow_parking();
9230
9231 let root = TempTree::new(json!({
9232 "project": {
9233 "sub": {},
9234 "a.txt": "",
9235 },
9236 }));
9237
9238 let work_dir = root.path().join("project");
9239 let repo = git_init(work_dir.as_path());
9240 // a.txt exists in HEAD and the working copy but is deleted in the index.
9241 git_add("a.txt", &repo);
9242 git_commit("Initial commit", &repo);
9243 git_remove_index("a.txt".as_ref(), &repo);
9244 // `sub` is a nested git repository.
9245 let _sub = git_init(&work_dir.join("sub"));
9246
9247 let project = Project::test(
9248 Arc::new(RealFs::new(None, cx.executor())),
9249 [root.path()],
9250 cx,
9251 )
9252 .await;
9253
9254 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9255 tree.flush_fs_events(cx).await;
9256 project
9257 .update(cx, |project, cx| project.git_scans_complete(cx))
9258 .await;
9259 cx.executor().run_until_parked();
9260
9261 let repository = project.read_with(cx, |project, cx| {
9262 project
9263 .repositories(cx)
9264 .values()
9265 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
9266 .unwrap()
9267 .clone()
9268 });
9269
9270 repository.read_with(cx, |repository, _cx| {
9271 let entries = repository.cached_status().collect::<Vec<_>>();
9272
9273 // `sub` doesn't appear in our computed statuses.
9274 // a.txt appears with a combined `DA` status.
9275 assert_eq!(
9276 entries,
9277 [StatusEntry {
9278 repo_path: repo_path("a.txt"),
9279 status: TrackedStatus {
9280 index_status: StatusCode::Deleted,
9281 worktree_status: StatusCode::Added
9282 }
9283 .into(),
9284 }]
9285 )
9286 });
9287}
9288
9289#[track_caller]
9290/// We merge lhs into rhs.
9291fn merge_pending_ops_snapshots(
9292 source: Vec<pending_op::PendingOps>,
9293 mut target: Vec<pending_op::PendingOps>,
9294) -> Vec<pending_op::PendingOps> {
9295 for s_ops in source {
9296 if let Some(idx) = target.iter().zip(0..).find_map(|(ops, idx)| {
9297 if ops.repo_path == s_ops.repo_path {
9298 Some(idx)
9299 } else {
9300 None
9301 }
9302 }) {
9303 let t_ops = &mut target[idx];
9304 for s_op in s_ops.ops {
9305 if let Some(op_idx) = t_ops
9306 .ops
9307 .iter()
9308 .zip(0..)
9309 .find_map(|(op, idx)| if op.id == s_op.id { Some(idx) } else { None })
9310 {
9311 let t_op = &mut t_ops.ops[op_idx];
9312 match (s_op.job_status, t_op.job_status) {
9313 (pending_op::JobStatus::Running, _) => {}
9314 (s_st, pending_op::JobStatus::Running) => t_op.job_status = s_st,
9315 (s_st, t_st) if s_st == t_st => {}
9316 _ => unreachable!(),
9317 }
9318 } else {
9319 t_ops.ops.push(s_op);
9320 }
9321 }
9322 t_ops.ops.sort_by(|l, r| l.id.cmp(&r.id));
9323 } else {
9324 target.push(s_ops);
9325 }
9326 }
9327 target
9328}
9329
9330#[gpui::test]
9331async fn test_repository_pending_ops_staging(
9332 executor: gpui::BackgroundExecutor,
9333 cx: &mut gpui::TestAppContext,
9334) {
9335 init_test(cx);
9336
9337 let fs = FakeFs::new(executor);
9338 fs.insert_tree(
9339 path!("/root"),
9340 json!({
9341 "my-repo": {
9342 ".git": {},
9343 "a.txt": "a",
9344 }
9345
9346 }),
9347 )
9348 .await;
9349
9350 fs.set_status_for_repo(
9351 path!("/root/my-repo/.git").as_ref(),
9352 &[("a.txt", FileStatus::Untracked)],
9353 );
9354
9355 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
9356 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
9357 project.update(cx, |project, cx| {
9358 let pending_ops_all = pending_ops_all.clone();
9359 cx.subscribe(project.git_store(), move |_, _, e, _| {
9360 if let GitStoreEvent::RepositoryUpdated(
9361 _,
9362 RepositoryEvent::PendingOpsChanged { pending_ops },
9363 _,
9364 ) = e
9365 {
9366 let merged = merge_pending_ops_snapshots(
9367 pending_ops.items(()),
9368 pending_ops_all.lock().items(()),
9369 );
9370 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
9371 }
9372 })
9373 .detach();
9374 });
9375 project
9376 .update(cx, |project, cx| project.git_scans_complete(cx))
9377 .await;
9378
9379 let repo = project.read_with(cx, |project, cx| {
9380 project.repositories(cx).values().next().unwrap().clone()
9381 });
9382
9383 // Ensure we have no pending ops for any of the untracked files
9384 repo.read_with(cx, |repo, _cx| {
9385 assert!(repo.pending_ops().next().is_none());
9386 });
9387
9388 let mut id = 1u16;
9389
9390 let mut assert_stage = async |path: RepoPath, stage| {
9391 let git_status = if stage {
9392 pending_op::GitStatus::Staged
9393 } else {
9394 pending_op::GitStatus::Unstaged
9395 };
9396 repo.update(cx, |repo, cx| {
9397 let task = if stage {
9398 repo.stage_entries(vec![path.clone()], cx)
9399 } else {
9400 repo.unstage_entries(vec![path.clone()], cx)
9401 };
9402 let ops = repo.pending_ops_for_path(&path).unwrap();
9403 assert_eq!(
9404 ops.ops.last(),
9405 Some(&pending_op::PendingOp {
9406 id: id.into(),
9407 git_status,
9408 job_status: pending_op::JobStatus::Running
9409 })
9410 );
9411 task
9412 })
9413 .await
9414 .unwrap();
9415
9416 repo.read_with(cx, |repo, _cx| {
9417 let ops = repo.pending_ops_for_path(&path).unwrap();
9418 assert_eq!(
9419 ops.ops.last(),
9420 Some(&pending_op::PendingOp {
9421 id: id.into(),
9422 git_status,
9423 job_status: pending_op::JobStatus::Finished
9424 })
9425 );
9426 });
9427
9428 id += 1;
9429 };
9430
9431 assert_stage(repo_path("a.txt"), true).await;
9432 assert_stage(repo_path("a.txt"), false).await;
9433 assert_stage(repo_path("a.txt"), true).await;
9434 assert_stage(repo_path("a.txt"), false).await;
9435 assert_stage(repo_path("a.txt"), true).await;
9436
9437 cx.run_until_parked();
9438
9439 assert_eq!(
9440 pending_ops_all
9441 .lock()
9442 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
9443 .unwrap()
9444 .ops,
9445 vec![
9446 pending_op::PendingOp {
9447 id: 1u16.into(),
9448 git_status: pending_op::GitStatus::Staged,
9449 job_status: pending_op::JobStatus::Finished
9450 },
9451 pending_op::PendingOp {
9452 id: 2u16.into(),
9453 git_status: pending_op::GitStatus::Unstaged,
9454 job_status: pending_op::JobStatus::Finished
9455 },
9456 pending_op::PendingOp {
9457 id: 3u16.into(),
9458 git_status: pending_op::GitStatus::Staged,
9459 job_status: pending_op::JobStatus::Finished
9460 },
9461 pending_op::PendingOp {
9462 id: 4u16.into(),
9463 git_status: pending_op::GitStatus::Unstaged,
9464 job_status: pending_op::JobStatus::Finished
9465 },
9466 pending_op::PendingOp {
9467 id: 5u16.into(),
9468 git_status: pending_op::GitStatus::Staged,
9469 job_status: pending_op::JobStatus::Finished
9470 }
9471 ],
9472 );
9473
9474 repo.update(cx, |repo, _cx| {
9475 let git_statuses = repo.cached_status().collect::<Vec<_>>();
9476
9477 assert_eq!(
9478 git_statuses,
9479 [StatusEntry {
9480 repo_path: repo_path("a.txt"),
9481 status: TrackedStatus {
9482 index_status: StatusCode::Added,
9483 worktree_status: StatusCode::Unmodified
9484 }
9485 .into(),
9486 }]
9487 );
9488 });
9489}
9490
9491#[gpui::test]
9492async fn test_repository_pending_ops_long_running_staging(
9493 executor: gpui::BackgroundExecutor,
9494 cx: &mut gpui::TestAppContext,
9495) {
9496 init_test(cx);
9497
9498 let fs = FakeFs::new(executor);
9499 fs.insert_tree(
9500 path!("/root"),
9501 json!({
9502 "my-repo": {
9503 ".git": {},
9504 "a.txt": "a",
9505 }
9506
9507 }),
9508 )
9509 .await;
9510
9511 fs.set_status_for_repo(
9512 path!("/root/my-repo/.git").as_ref(),
9513 &[("a.txt", FileStatus::Untracked)],
9514 );
9515
9516 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
9517 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
9518 project.update(cx, |project, cx| {
9519 let pending_ops_all = pending_ops_all.clone();
9520 cx.subscribe(project.git_store(), move |_, _, e, _| {
9521 if let GitStoreEvent::RepositoryUpdated(
9522 _,
9523 RepositoryEvent::PendingOpsChanged { pending_ops },
9524 _,
9525 ) = e
9526 {
9527 let merged = merge_pending_ops_snapshots(
9528 pending_ops.items(()),
9529 pending_ops_all.lock().items(()),
9530 );
9531 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
9532 }
9533 })
9534 .detach();
9535 });
9536
9537 project
9538 .update(cx, |project, cx| project.git_scans_complete(cx))
9539 .await;
9540
9541 let repo = project.read_with(cx, |project, cx| {
9542 project.repositories(cx).values().next().unwrap().clone()
9543 });
9544
9545 repo.update(cx, |repo, cx| {
9546 repo.stage_entries(vec![repo_path("a.txt")], cx)
9547 })
9548 .detach();
9549
9550 repo.update(cx, |repo, cx| {
9551 repo.stage_entries(vec![repo_path("a.txt")], cx)
9552 })
9553 .unwrap()
9554 .with_timeout(Duration::from_secs(1), &cx.executor())
9555 .await
9556 .unwrap();
9557
9558 cx.run_until_parked();
9559
9560 assert_eq!(
9561 pending_ops_all
9562 .lock()
9563 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
9564 .unwrap()
9565 .ops,
9566 vec![
9567 pending_op::PendingOp {
9568 id: 1u16.into(),
9569 git_status: pending_op::GitStatus::Staged,
9570 job_status: pending_op::JobStatus::Skipped
9571 },
9572 pending_op::PendingOp {
9573 id: 2u16.into(),
9574 git_status: pending_op::GitStatus::Staged,
9575 job_status: pending_op::JobStatus::Finished
9576 }
9577 ],
9578 );
9579
9580 repo.update(cx, |repo, _cx| {
9581 let git_statuses = repo.cached_status().collect::<Vec<_>>();
9582
9583 assert_eq!(
9584 git_statuses,
9585 [StatusEntry {
9586 repo_path: repo_path("a.txt"),
9587 status: TrackedStatus {
9588 index_status: StatusCode::Added,
9589 worktree_status: StatusCode::Unmodified
9590 }
9591 .into(),
9592 }]
9593 );
9594 });
9595}
9596
9597#[gpui::test]
9598async fn test_repository_pending_ops_stage_all(
9599 executor: gpui::BackgroundExecutor,
9600 cx: &mut gpui::TestAppContext,
9601) {
9602 init_test(cx);
9603
9604 let fs = FakeFs::new(executor);
9605 fs.insert_tree(
9606 path!("/root"),
9607 json!({
9608 "my-repo": {
9609 ".git": {},
9610 "a.txt": "a",
9611 "b.txt": "b"
9612 }
9613
9614 }),
9615 )
9616 .await;
9617
9618 fs.set_status_for_repo(
9619 path!("/root/my-repo/.git").as_ref(),
9620 &[
9621 ("a.txt", FileStatus::Untracked),
9622 ("b.txt", FileStatus::Untracked),
9623 ],
9624 );
9625
9626 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
9627 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
9628 project.update(cx, |project, cx| {
9629 let pending_ops_all = pending_ops_all.clone();
9630 cx.subscribe(project.git_store(), move |_, _, e, _| {
9631 if let GitStoreEvent::RepositoryUpdated(
9632 _,
9633 RepositoryEvent::PendingOpsChanged { pending_ops },
9634 _,
9635 ) = e
9636 {
9637 let merged = merge_pending_ops_snapshots(
9638 pending_ops.items(()),
9639 pending_ops_all.lock().items(()),
9640 );
9641 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
9642 }
9643 })
9644 .detach();
9645 });
9646 project
9647 .update(cx, |project, cx| project.git_scans_complete(cx))
9648 .await;
9649
9650 let repo = project.read_with(cx, |project, cx| {
9651 project.repositories(cx).values().next().unwrap().clone()
9652 });
9653
9654 repo.update(cx, |repo, cx| {
9655 repo.stage_entries(vec![repo_path("a.txt")], cx)
9656 })
9657 .await
9658 .unwrap();
9659 repo.update(cx, |repo, cx| repo.stage_all(cx))
9660 .await
9661 .unwrap();
9662 repo.update(cx, |repo, cx| repo.unstage_all(cx))
9663 .await
9664 .unwrap();
9665
9666 cx.run_until_parked();
9667
9668 assert_eq!(
9669 pending_ops_all
9670 .lock()
9671 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
9672 .unwrap()
9673 .ops,
9674 vec![
9675 pending_op::PendingOp {
9676 id: 1u16.into(),
9677 git_status: pending_op::GitStatus::Staged,
9678 job_status: pending_op::JobStatus::Finished
9679 },
9680 pending_op::PendingOp {
9681 id: 2u16.into(),
9682 git_status: pending_op::GitStatus::Unstaged,
9683 job_status: pending_op::JobStatus::Finished
9684 },
9685 ],
9686 );
9687 assert_eq!(
9688 pending_ops_all
9689 .lock()
9690 .get(&worktree::PathKey(repo_path("b.txt").as_ref().clone()), ())
9691 .unwrap()
9692 .ops,
9693 vec![
9694 pending_op::PendingOp {
9695 id: 1u16.into(),
9696 git_status: pending_op::GitStatus::Staged,
9697 job_status: pending_op::JobStatus::Finished
9698 },
9699 pending_op::PendingOp {
9700 id: 2u16.into(),
9701 git_status: pending_op::GitStatus::Unstaged,
9702 job_status: pending_op::JobStatus::Finished
9703 },
9704 ],
9705 );
9706
9707 repo.update(cx, |repo, _cx| {
9708 let git_statuses = repo.cached_status().collect::<Vec<_>>();
9709
9710 assert_eq!(
9711 git_statuses,
9712 [
9713 StatusEntry {
9714 repo_path: repo_path("a.txt"),
9715 status: FileStatus::Untracked,
9716 },
9717 StatusEntry {
9718 repo_path: repo_path("b.txt"),
9719 status: FileStatus::Untracked,
9720 },
9721 ]
9722 );
9723 });
9724}
9725
9726#[gpui::test]
9727async fn test_repository_subfolder_git_status(
9728 executor: gpui::BackgroundExecutor,
9729 cx: &mut gpui::TestAppContext,
9730) {
9731 init_test(cx);
9732
9733 let fs = FakeFs::new(executor);
9734 fs.insert_tree(
9735 path!("/root"),
9736 json!({
9737 "my-repo": {
9738 ".git": {},
9739 "a.txt": "a",
9740 "sub-folder-1": {
9741 "sub-folder-2": {
9742 "c.txt": "cc",
9743 "d": {
9744 "e.txt": "eee"
9745 }
9746 },
9747 }
9748 },
9749 }),
9750 )
9751 .await;
9752
9753 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
9754 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
9755
9756 fs.set_status_for_repo(
9757 path!("/root/my-repo/.git").as_ref(),
9758 &[(E_TXT, FileStatus::Untracked)],
9759 );
9760
9761 let project = Project::test(
9762 fs.clone(),
9763 [path!("/root/my-repo/sub-folder-1/sub-folder-2").as_ref()],
9764 cx,
9765 )
9766 .await;
9767
9768 project
9769 .update(cx, |project, cx| project.git_scans_complete(cx))
9770 .await;
9771 cx.run_until_parked();
9772
9773 let repository = project.read_with(cx, |project, cx| {
9774 project.repositories(cx).values().next().unwrap().clone()
9775 });
9776
9777 // Ensure that the git status is loaded correctly
9778 repository.read_with(cx, |repository, _cx| {
9779 assert_eq!(
9780 repository.work_directory_abs_path,
9781 Path::new(path!("/root/my-repo")).into()
9782 );
9783
9784 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
9785 assert_eq!(
9786 repository
9787 .status_for_path(&repo_path(E_TXT))
9788 .unwrap()
9789 .status,
9790 FileStatus::Untracked
9791 );
9792 });
9793
9794 fs.set_status_for_repo(path!("/root/my-repo/.git").as_ref(), &[]);
9795 project
9796 .update(cx, |project, cx| project.git_scans_complete(cx))
9797 .await;
9798 cx.run_until_parked();
9799
9800 repository.read_with(cx, |repository, _cx| {
9801 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
9802 assert_eq!(repository.status_for_path(&repo_path(E_TXT)), None);
9803 });
9804}
9805
9806// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
9807#[cfg(any())]
9808#[gpui::test]
9809async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
9810 init_test(cx);
9811 cx.executor().allow_parking();
9812
9813 let root = TempTree::new(json!({
9814 "project": {
9815 "a.txt": "a",
9816 },
9817 }));
9818 let root_path = root.path();
9819
9820 let repo = git_init(&root_path.join("project"));
9821 git_add("a.txt", &repo);
9822 git_commit("init", &repo);
9823
9824 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
9825
9826 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9827 tree.flush_fs_events(cx).await;
9828 project
9829 .update(cx, |project, cx| project.git_scans_complete(cx))
9830 .await;
9831 cx.executor().run_until_parked();
9832
9833 let repository = project.read_with(cx, |project, cx| {
9834 project.repositories(cx).values().next().unwrap().clone()
9835 });
9836
9837 git_branch("other-branch", &repo);
9838 git_checkout("refs/heads/other-branch", &repo);
9839 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
9840 git_add("a.txt", &repo);
9841 git_commit("capitalize", &repo);
9842 let commit = repo
9843 .head()
9844 .expect("Failed to get HEAD")
9845 .peel_to_commit()
9846 .expect("HEAD is not a commit");
9847 git_checkout("refs/heads/main", &repo);
9848 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
9849 git_add("a.txt", &repo);
9850 git_commit("improve letter", &repo);
9851 git_cherry_pick(&commit, &repo);
9852 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
9853 .expect("No CHERRY_PICK_HEAD");
9854 pretty_assertions::assert_eq!(
9855 git_status(&repo),
9856 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
9857 );
9858 tree.flush_fs_events(cx).await;
9859 project
9860 .update(cx, |project, cx| project.git_scans_complete(cx))
9861 .await;
9862 cx.executor().run_until_parked();
9863 let conflicts = repository.update(cx, |repository, _| {
9864 repository
9865 .merge_conflicts
9866 .iter()
9867 .cloned()
9868 .collect::<Vec<_>>()
9869 });
9870 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
9871
9872 git_add("a.txt", &repo);
9873 // Attempt to manually simulate what `git cherry-pick --continue` would do.
9874 git_commit("whatevs", &repo);
9875 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
9876 .expect("Failed to remove CHERRY_PICK_HEAD");
9877 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
9878 tree.flush_fs_events(cx).await;
9879 let conflicts = repository.update(cx, |repository, _| {
9880 repository
9881 .merge_conflicts
9882 .iter()
9883 .cloned()
9884 .collect::<Vec<_>>()
9885 });
9886 pretty_assertions::assert_eq!(conflicts, []);
9887}
9888
9889#[gpui::test]
9890async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
9891 init_test(cx);
9892 let fs = FakeFs::new(cx.background_executor.clone());
9893 fs.insert_tree(
9894 path!("/root"),
9895 json!({
9896 ".git": {},
9897 ".gitignore": "*.txt\n",
9898 "a.xml": "<a></a>",
9899 "b.txt": "Some text"
9900 }),
9901 )
9902 .await;
9903
9904 fs.set_head_and_index_for_repo(
9905 path!("/root/.git").as_ref(),
9906 &[
9907 (".gitignore", "*.txt\n".into()),
9908 ("a.xml", "<a></a>".into()),
9909 ],
9910 );
9911
9912 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
9913
9914 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9915 tree.flush_fs_events(cx).await;
9916 project
9917 .update(cx, |project, cx| project.git_scans_complete(cx))
9918 .await;
9919 cx.executor().run_until_parked();
9920
9921 let repository = project.read_with(cx, |project, cx| {
9922 project.repositories(cx).values().next().unwrap().clone()
9923 });
9924
9925 // One file is unmodified, the other is ignored.
9926 cx.read(|cx| {
9927 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
9928 assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
9929 });
9930
9931 // Change the gitignore, and stage the newly non-ignored file.
9932 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
9933 .await
9934 .unwrap();
9935 fs.set_index_for_repo(
9936 Path::new(path!("/root/.git")),
9937 &[
9938 (".gitignore", "*.txt\n".into()),
9939 ("a.xml", "<a></a>".into()),
9940 ("b.txt", "Some text".into()),
9941 ],
9942 );
9943
9944 cx.executor().run_until_parked();
9945 cx.read(|cx| {
9946 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
9947 assert_entry_git_state(
9948 tree.read(cx),
9949 repository.read(cx),
9950 "b.txt",
9951 Some(StatusCode::Added),
9952 false,
9953 );
9954 });
9955}
9956
9957// NOTE:
9958// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
9959// a directory which some program has already open.
9960// This is a limitation of the Windows.
9961// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
9962// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
9963#[gpui::test]
9964#[cfg_attr(target_os = "windows", ignore)]
9965async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
9966 init_test(cx);
9967 cx.executor().allow_parking();
9968 let root = TempTree::new(json!({
9969 "projects": {
9970 "project1": {
9971 "a": "",
9972 "b": "",
9973 }
9974 },
9975
9976 }));
9977 let root_path = root.path();
9978
9979 let repo = git_init(&root_path.join("projects/project1"));
9980 git_add("a", &repo);
9981 git_commit("init", &repo);
9982 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
9983
9984 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
9985
9986 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9987 tree.flush_fs_events(cx).await;
9988 project
9989 .update(cx, |project, cx| project.git_scans_complete(cx))
9990 .await;
9991 cx.executor().run_until_parked();
9992
9993 let repository = project.read_with(cx, |project, cx| {
9994 project.repositories(cx).values().next().unwrap().clone()
9995 });
9996
9997 repository.read_with(cx, |repository, _| {
9998 assert_eq!(
9999 repository.work_directory_abs_path.as_ref(),
10000 root_path.join("projects/project1").as_path()
10001 );
10002 assert_eq!(
10003 repository
10004 .status_for_path(&repo_path("a"))
10005 .map(|entry| entry.status),
10006 Some(StatusCode::Modified.worktree()),
10007 );
10008 assert_eq!(
10009 repository
10010 .status_for_path(&repo_path("b"))
10011 .map(|entry| entry.status),
10012 Some(FileStatus::Untracked),
10013 );
10014 });
10015
10016 std::fs::rename(
10017 root_path.join("projects/project1"),
10018 root_path.join("projects/project2"),
10019 )
10020 .unwrap();
10021 tree.flush_fs_events(cx).await;
10022
10023 repository.read_with(cx, |repository, _| {
10024 assert_eq!(
10025 repository.work_directory_abs_path.as_ref(),
10026 root_path.join("projects/project2").as_path()
10027 );
10028 assert_eq!(
10029 repository.status_for_path(&repo_path("a")).unwrap().status,
10030 StatusCode::Modified.worktree(),
10031 );
10032 assert_eq!(
10033 repository.status_for_path(&repo_path("b")).unwrap().status,
10034 FileStatus::Untracked,
10035 );
10036 });
10037}
10038
10039// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
10040// you can't rename a directory which some program has already open. This is a
10041// limitation of the Windows. See:
10042// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
10043// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
10044#[gpui::test]
10045#[cfg_attr(target_os = "windows", ignore)]
10046async fn test_file_status(cx: &mut gpui::TestAppContext) {
10047 init_test(cx);
10048 cx.executor().allow_parking();
10049 const IGNORE_RULE: &str = "**/target";
10050
10051 let root = TempTree::new(json!({
10052 "project": {
10053 "a.txt": "a",
10054 "b.txt": "bb",
10055 "c": {
10056 "d": {
10057 "e.txt": "eee"
10058 }
10059 },
10060 "f.txt": "ffff",
10061 "target": {
10062 "build_file": "???"
10063 },
10064 ".gitignore": IGNORE_RULE
10065 },
10066
10067 }));
10068 let root_path = root.path();
10069
10070 const A_TXT: &str = "a.txt";
10071 const B_TXT: &str = "b.txt";
10072 const E_TXT: &str = "c/d/e.txt";
10073 const F_TXT: &str = "f.txt";
10074 const DOTGITIGNORE: &str = ".gitignore";
10075 const BUILD_FILE: &str = "target/build_file";
10076
10077 // Set up git repository before creating the worktree.
10078 let work_dir = root.path().join("project");
10079 let mut repo = git_init(work_dir.as_path());
10080 repo.add_ignore_rule(IGNORE_RULE).unwrap();
10081 git_add(A_TXT, &repo);
10082 git_add(E_TXT, &repo);
10083 git_add(DOTGITIGNORE, &repo);
10084 git_commit("Initial commit", &repo);
10085
10086 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10087
10088 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10089 tree.flush_fs_events(cx).await;
10090 project
10091 .update(cx, |project, cx| project.git_scans_complete(cx))
10092 .await;
10093 cx.executor().run_until_parked();
10094
10095 let repository = project.read_with(cx, |project, cx| {
10096 project.repositories(cx).values().next().unwrap().clone()
10097 });
10098
10099 // Check that the right git state is observed on startup
10100 repository.read_with(cx, |repository, _cx| {
10101 assert_eq!(
10102 repository.work_directory_abs_path.as_ref(),
10103 root_path.join("project").as_path()
10104 );
10105
10106 assert_eq!(
10107 repository
10108 .status_for_path(&repo_path(B_TXT))
10109 .unwrap()
10110 .status,
10111 FileStatus::Untracked,
10112 );
10113 assert_eq!(
10114 repository
10115 .status_for_path(&repo_path(F_TXT))
10116 .unwrap()
10117 .status,
10118 FileStatus::Untracked,
10119 );
10120 });
10121
10122 // Modify a file in the working copy.
10123 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
10124 tree.flush_fs_events(cx).await;
10125 project
10126 .update(cx, |project, cx| project.git_scans_complete(cx))
10127 .await;
10128 cx.executor().run_until_parked();
10129
10130 // The worktree detects that the file's git status has changed.
10131 repository.read_with(cx, |repository, _| {
10132 assert_eq!(
10133 repository
10134 .status_for_path(&repo_path(A_TXT))
10135 .unwrap()
10136 .status,
10137 StatusCode::Modified.worktree(),
10138 );
10139 });
10140
10141 // Create a commit in the git repository.
10142 git_add(A_TXT, &repo);
10143 git_add(B_TXT, &repo);
10144 git_commit("Committing modified and added", &repo);
10145 tree.flush_fs_events(cx).await;
10146 project
10147 .update(cx, |project, cx| project.git_scans_complete(cx))
10148 .await;
10149 cx.executor().run_until_parked();
10150
10151 // The worktree detects that the files' git status have changed.
10152 repository.read_with(cx, |repository, _cx| {
10153 assert_eq!(
10154 repository
10155 .status_for_path(&repo_path(F_TXT))
10156 .unwrap()
10157 .status,
10158 FileStatus::Untracked,
10159 );
10160 assert_eq!(repository.status_for_path(&repo_path(B_TXT)), None);
10161 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
10162 });
10163
10164 // Modify files in the working copy and perform git operations on other files.
10165 git_reset(0, &repo);
10166 git_remove_index(Path::new(B_TXT), &repo);
10167 git_stash(&mut repo);
10168 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
10169 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
10170 tree.flush_fs_events(cx).await;
10171 project
10172 .update(cx, |project, cx| project.git_scans_complete(cx))
10173 .await;
10174 cx.executor().run_until_parked();
10175
10176 // Check that more complex repo changes are tracked
10177 repository.read_with(cx, |repository, _cx| {
10178 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
10179 assert_eq!(
10180 repository
10181 .status_for_path(&repo_path(B_TXT))
10182 .unwrap()
10183 .status,
10184 FileStatus::Untracked,
10185 );
10186 assert_eq!(
10187 repository
10188 .status_for_path(&repo_path(E_TXT))
10189 .unwrap()
10190 .status,
10191 StatusCode::Modified.worktree(),
10192 );
10193 });
10194
10195 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
10196 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
10197 std::fs::write(
10198 work_dir.join(DOTGITIGNORE),
10199 [IGNORE_RULE, "f.txt"].join("\n"),
10200 )
10201 .unwrap();
10202
10203 git_add(Path::new(DOTGITIGNORE), &repo);
10204 git_commit("Committing modified git ignore", &repo);
10205
10206 tree.flush_fs_events(cx).await;
10207 cx.executor().run_until_parked();
10208
10209 let mut renamed_dir_name = "first_directory/second_directory";
10210 const RENAMED_FILE: &str = "rf.txt";
10211
10212 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
10213 std::fs::write(
10214 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
10215 "new-contents",
10216 )
10217 .unwrap();
10218
10219 tree.flush_fs_events(cx).await;
10220 project
10221 .update(cx, |project, cx| project.git_scans_complete(cx))
10222 .await;
10223 cx.executor().run_until_parked();
10224
10225 repository.read_with(cx, |repository, _cx| {
10226 assert_eq!(
10227 repository
10228 .status_for_path(&RepoPath::from_rel_path(
10229 &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE))
10230 ))
10231 .unwrap()
10232 .status,
10233 FileStatus::Untracked,
10234 );
10235 });
10236
10237 renamed_dir_name = "new_first_directory/second_directory";
10238
10239 std::fs::rename(
10240 work_dir.join("first_directory"),
10241 work_dir.join("new_first_directory"),
10242 )
10243 .unwrap();
10244
10245 tree.flush_fs_events(cx).await;
10246 project
10247 .update(cx, |project, cx| project.git_scans_complete(cx))
10248 .await;
10249 cx.executor().run_until_parked();
10250
10251 repository.read_with(cx, |repository, _cx| {
10252 assert_eq!(
10253 repository
10254 .status_for_path(&RepoPath::from_rel_path(
10255 &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE))
10256 ))
10257 .unwrap()
10258 .status,
10259 FileStatus::Untracked,
10260 );
10261 });
10262}
10263
10264#[gpui::test]
10265#[ignore]
10266async fn test_ignored_dirs_events(cx: &mut gpui::TestAppContext) {
10267 init_test(cx);
10268 cx.executor().allow_parking();
10269
10270 const IGNORE_RULE: &str = "**/target";
10271
10272 let root = TempTree::new(json!({
10273 "project": {
10274 "src": {
10275 "main.rs": "fn main() {}"
10276 },
10277 "target": {
10278 "debug": {
10279 "important_text.txt": "important text",
10280 },
10281 },
10282 ".gitignore": IGNORE_RULE
10283 },
10284
10285 }));
10286 let root_path = root.path();
10287
10288 // Set up git repository before creating the worktree.
10289 let work_dir = root.path().join("project");
10290 let repo = git_init(work_dir.as_path());
10291 repo.add_ignore_rule(IGNORE_RULE).unwrap();
10292 git_add("src/main.rs", &repo);
10293 git_add(".gitignore", &repo);
10294 git_commit("Initial commit", &repo);
10295
10296 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10297 let repository_updates = Arc::new(Mutex::new(Vec::new()));
10298 let project_events = Arc::new(Mutex::new(Vec::new()));
10299 project.update(cx, |project, cx| {
10300 let repo_events = repository_updates.clone();
10301 cx.subscribe(project.git_store(), move |_, _, e, _| {
10302 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
10303 repo_events.lock().push(e.clone());
10304 }
10305 })
10306 .detach();
10307 let project_events = project_events.clone();
10308 cx.subscribe_self(move |_, e, _| {
10309 if let Event::WorktreeUpdatedEntries(_, updates) = e {
10310 project_events.lock().extend(
10311 updates
10312 .iter()
10313 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
10314 .filter(|(path, _)| path != "fs-event-sentinel"),
10315 );
10316 }
10317 })
10318 .detach();
10319 });
10320
10321 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10322 tree.flush_fs_events(cx).await;
10323 tree.update(cx, |tree, cx| {
10324 tree.load_file(rel_path("project/target/debug/important_text.txt"), cx)
10325 })
10326 .await
10327 .unwrap();
10328 tree.update(cx, |tree, _| {
10329 assert_eq!(
10330 tree.entries(true, 0)
10331 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
10332 .collect::<Vec<_>>(),
10333 vec![
10334 (rel_path(""), false),
10335 (rel_path("project/"), false),
10336 (rel_path("project/.gitignore"), false),
10337 (rel_path("project/src"), false),
10338 (rel_path("project/src/main.rs"), false),
10339 (rel_path("project/target"), true),
10340 (rel_path("project/target/debug"), true),
10341 (rel_path("project/target/debug/important_text.txt"), true),
10342 ]
10343 );
10344 });
10345
10346 assert_eq!(
10347 repository_updates.lock().drain(..).collect::<Vec<_>>(),
10348 vec![
10349 RepositoryEvent::StatusesChanged,
10350 RepositoryEvent::MergeHeadsChanged,
10351 ],
10352 "Initial worktree scan should produce a repo update event"
10353 );
10354 assert_eq!(
10355 project_events.lock().drain(..).collect::<Vec<_>>(),
10356 vec![
10357 ("project/target".to_string(), PathChange::Loaded),
10358 ("project/target/debug".to_string(), PathChange::Loaded),
10359 (
10360 "project/target/debug/important_text.txt".to_string(),
10361 PathChange::Loaded
10362 ),
10363 ],
10364 "Initial project changes should show that all not-ignored and all opened files are loaded"
10365 );
10366
10367 let deps_dir = work_dir.join("target").join("debug").join("deps");
10368 std::fs::create_dir_all(&deps_dir).unwrap();
10369 tree.flush_fs_events(cx).await;
10370 project
10371 .update(cx, |project, cx| project.git_scans_complete(cx))
10372 .await;
10373 cx.executor().run_until_parked();
10374 std::fs::write(deps_dir.join("aa.tmp"), "something tmp").unwrap();
10375 tree.flush_fs_events(cx).await;
10376 project
10377 .update(cx, |project, cx| project.git_scans_complete(cx))
10378 .await;
10379 cx.executor().run_until_parked();
10380 std::fs::remove_dir_all(&deps_dir).unwrap();
10381 tree.flush_fs_events(cx).await;
10382 project
10383 .update(cx, |project, cx| project.git_scans_complete(cx))
10384 .await;
10385 cx.executor().run_until_parked();
10386
10387 tree.update(cx, |tree, _| {
10388 assert_eq!(
10389 tree.entries(true, 0)
10390 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
10391 .collect::<Vec<_>>(),
10392 vec![
10393 (rel_path(""), false),
10394 (rel_path("project/"), false),
10395 (rel_path("project/.gitignore"), false),
10396 (rel_path("project/src"), false),
10397 (rel_path("project/src/main.rs"), false),
10398 (rel_path("project/target"), true),
10399 (rel_path("project/target/debug"), true),
10400 (rel_path("project/target/debug/important_text.txt"), true),
10401 ],
10402 "No stray temp files should be left after the flycheck changes"
10403 );
10404 });
10405
10406 assert_eq!(
10407 repository_updates
10408 .lock()
10409 .iter()
10410 .cloned()
10411 .collect::<Vec<_>>(),
10412 Vec::new(),
10413 "No further RepositoryUpdated events should happen, as only ignored dirs' contents was changed",
10414 );
10415 assert_eq!(
10416 project_events.lock().as_slice(),
10417 vec![
10418 ("project/target/debug/deps".to_string(), PathChange::Added),
10419 ("project/target/debug/deps".to_string(), PathChange::Removed),
10420 ],
10421 "Due to `debug` directory being tracked, it should get updates for entries inside it.
10422 No updates for more nested directories should happen as those are ignored",
10423 );
10424}
10425
10426// todo(jk): turning this test off until we rework it in such a way so that it is not so susceptible
10427// to different timings/ordering of events.
10428#[ignore]
10429#[gpui::test]
10430async fn test_odd_events_for_ignored_dirs(
10431 executor: BackgroundExecutor,
10432 cx: &mut gpui::TestAppContext,
10433) {
10434 init_test(cx);
10435 let fs = FakeFs::new(executor);
10436 fs.insert_tree(
10437 path!("/root"),
10438 json!({
10439 ".git": {},
10440 ".gitignore": "**/target/",
10441 "src": {
10442 "main.rs": "fn main() {}",
10443 },
10444 "target": {
10445 "debug": {
10446 "foo.txt": "foo",
10447 "deps": {}
10448 }
10449 }
10450 }),
10451 )
10452 .await;
10453 fs.set_head_and_index_for_repo(
10454 path!("/root/.git").as_ref(),
10455 &[
10456 (".gitignore", "**/target/".into()),
10457 ("src/main.rs", "fn main() {}".into()),
10458 ],
10459 );
10460
10461 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
10462 let repository_updates = Arc::new(Mutex::new(Vec::new()));
10463 let project_events = Arc::new(Mutex::new(Vec::new()));
10464 project.update(cx, |project, cx| {
10465 let repository_updates = repository_updates.clone();
10466 cx.subscribe(project.git_store(), move |_, _, e, _| {
10467 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
10468 repository_updates.lock().push(e.clone());
10469 }
10470 })
10471 .detach();
10472 let project_events = project_events.clone();
10473 cx.subscribe_self(move |_, e, _| {
10474 if let Event::WorktreeUpdatedEntries(_, updates) = e {
10475 project_events.lock().extend(
10476 updates
10477 .iter()
10478 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
10479 .filter(|(path, _)| path != "fs-event-sentinel"),
10480 );
10481 }
10482 })
10483 .detach();
10484 });
10485
10486 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10487 tree.update(cx, |tree, cx| {
10488 tree.load_file(rel_path("target/debug/foo.txt"), cx)
10489 })
10490 .await
10491 .unwrap();
10492 tree.flush_fs_events(cx).await;
10493 project
10494 .update(cx, |project, cx| project.git_scans_complete(cx))
10495 .await;
10496 cx.run_until_parked();
10497 tree.update(cx, |tree, _| {
10498 assert_eq!(
10499 tree.entries(true, 0)
10500 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
10501 .collect::<Vec<_>>(),
10502 vec![
10503 (rel_path(""), false),
10504 (rel_path(".gitignore"), false),
10505 (rel_path("src"), false),
10506 (rel_path("src/main.rs"), false),
10507 (rel_path("target"), true),
10508 (rel_path("target/debug"), true),
10509 (rel_path("target/debug/deps"), true),
10510 (rel_path("target/debug/foo.txt"), true),
10511 ]
10512 );
10513 });
10514
10515 assert_eq!(
10516 repository_updates.lock().drain(..).collect::<Vec<_>>(),
10517 vec![
10518 RepositoryEvent::MergeHeadsChanged,
10519 RepositoryEvent::BranchChanged,
10520 RepositoryEvent::StatusesChanged,
10521 RepositoryEvent::StatusesChanged,
10522 ],
10523 "Initial worktree scan should produce a repo update event"
10524 );
10525 assert_eq!(
10526 project_events.lock().drain(..).collect::<Vec<_>>(),
10527 vec![
10528 ("target".to_string(), PathChange::Loaded),
10529 ("target/debug".to_string(), PathChange::Loaded),
10530 ("target/debug/deps".to_string(), PathChange::Loaded),
10531 ("target/debug/foo.txt".to_string(), PathChange::Loaded),
10532 ],
10533 "All non-ignored entries and all opened firs should be getting a project event",
10534 );
10535
10536 // Emulate a flycheck spawn: it emits a `INODE_META_MOD`-flagged FS event on target/debug/deps, then creates and removes temp files inside.
10537 // This may happen multiple times during a single flycheck, but once is enough for testing.
10538 fs.emit_fs_event("/root/target/debug/deps", None);
10539 tree.flush_fs_events(cx).await;
10540 project
10541 .update(cx, |project, cx| project.git_scans_complete(cx))
10542 .await;
10543 cx.executor().run_until_parked();
10544
10545 assert_eq!(
10546 repository_updates
10547 .lock()
10548 .iter()
10549 .cloned()
10550 .collect::<Vec<_>>(),
10551 Vec::new(),
10552 "No further RepositoryUpdated events should happen, as only ignored dirs received FS events",
10553 );
10554 assert_eq!(
10555 project_events.lock().as_slice(),
10556 Vec::new(),
10557 "No further project events should happen, as only ignored dirs received FS events",
10558 );
10559}
10560
10561#[gpui::test]
10562async fn test_repos_in_invisible_worktrees(
10563 executor: BackgroundExecutor,
10564 cx: &mut gpui::TestAppContext,
10565) {
10566 init_test(cx);
10567 let fs = FakeFs::new(executor);
10568 fs.insert_tree(
10569 path!("/root"),
10570 json!({
10571 "dir1": {
10572 ".git": {},
10573 "dep1": {
10574 ".git": {},
10575 "src": {
10576 "a.txt": "",
10577 },
10578 },
10579 "b.txt": "",
10580 },
10581 }),
10582 )
10583 .await;
10584
10585 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
10586 let _visible_worktree =
10587 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10588 project
10589 .update(cx, |project, cx| project.git_scans_complete(cx))
10590 .await;
10591
10592 let repos = project.read_with(cx, |project, cx| {
10593 project
10594 .repositories(cx)
10595 .values()
10596 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
10597 .collect::<Vec<_>>()
10598 });
10599 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
10600
10601 let (_invisible_worktree, _) = project
10602 .update(cx, |project, cx| {
10603 project.worktree_store().update(cx, |worktree_store, cx| {
10604 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
10605 })
10606 })
10607 .await
10608 .expect("failed to create worktree");
10609 project
10610 .update(cx, |project, cx| project.git_scans_complete(cx))
10611 .await;
10612
10613 let repos = project.read_with(cx, |project, cx| {
10614 project
10615 .repositories(cx)
10616 .values()
10617 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
10618 .collect::<Vec<_>>()
10619 });
10620 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
10621}
10622
10623#[gpui::test(iterations = 10)]
10624async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
10625 init_test(cx);
10626 cx.update(|cx| {
10627 cx.update_global::<SettingsStore, _>(|store, cx| {
10628 store.update_user_settings(cx, |settings| {
10629 settings.project.worktree.file_scan_exclusions = Some(Vec::new());
10630 });
10631 });
10632 });
10633 let fs = FakeFs::new(cx.background_executor.clone());
10634 fs.insert_tree(
10635 path!("/root"),
10636 json!({
10637 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
10638 "tree": {
10639 ".git": {},
10640 ".gitignore": "ignored-dir\n",
10641 "tracked-dir": {
10642 "tracked-file1": "",
10643 "ancestor-ignored-file1": "",
10644 },
10645 "ignored-dir": {
10646 "ignored-file1": ""
10647 }
10648 }
10649 }),
10650 )
10651 .await;
10652 fs.set_head_and_index_for_repo(
10653 path!("/root/tree/.git").as_ref(),
10654 &[
10655 (".gitignore", "ignored-dir\n".into()),
10656 ("tracked-dir/tracked-file1", "".into()),
10657 ],
10658 );
10659
10660 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
10661
10662 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10663 tree.flush_fs_events(cx).await;
10664 project
10665 .update(cx, |project, cx| project.git_scans_complete(cx))
10666 .await;
10667 cx.executor().run_until_parked();
10668
10669 let repository = project.read_with(cx, |project, cx| {
10670 project.repositories(cx).values().next().unwrap().clone()
10671 });
10672
10673 tree.read_with(cx, |tree, _| {
10674 tree.as_local()
10675 .unwrap()
10676 .manually_refresh_entries_for_paths(vec![rel_path("ignored-dir").into()])
10677 })
10678 .recv()
10679 .await;
10680
10681 cx.read(|cx| {
10682 assert_entry_git_state(
10683 tree.read(cx),
10684 repository.read(cx),
10685 "tracked-dir/tracked-file1",
10686 None,
10687 false,
10688 );
10689 assert_entry_git_state(
10690 tree.read(cx),
10691 repository.read(cx),
10692 "tracked-dir/ancestor-ignored-file1",
10693 None,
10694 false,
10695 );
10696 assert_entry_git_state(
10697 tree.read(cx),
10698 repository.read(cx),
10699 "ignored-dir/ignored-file1",
10700 None,
10701 true,
10702 );
10703 });
10704
10705 fs.create_file(
10706 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
10707 Default::default(),
10708 )
10709 .await
10710 .unwrap();
10711 fs.set_index_for_repo(
10712 path!("/root/tree/.git").as_ref(),
10713 &[
10714 (".gitignore", "ignored-dir\n".into()),
10715 ("tracked-dir/tracked-file1", "".into()),
10716 ("tracked-dir/tracked-file2", "".into()),
10717 ],
10718 );
10719 fs.create_file(
10720 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
10721 Default::default(),
10722 )
10723 .await
10724 .unwrap();
10725 fs.create_file(
10726 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
10727 Default::default(),
10728 )
10729 .await
10730 .unwrap();
10731
10732 cx.executor().run_until_parked();
10733 cx.read(|cx| {
10734 assert_entry_git_state(
10735 tree.read(cx),
10736 repository.read(cx),
10737 "tracked-dir/tracked-file2",
10738 Some(StatusCode::Added),
10739 false,
10740 );
10741 assert_entry_git_state(
10742 tree.read(cx),
10743 repository.read(cx),
10744 "tracked-dir/ancestor-ignored-file2",
10745 None,
10746 false,
10747 );
10748 assert_entry_git_state(
10749 tree.read(cx),
10750 repository.read(cx),
10751 "ignored-dir/ignored-file2",
10752 None,
10753 true,
10754 );
10755 assert!(
10756 tree.read(cx)
10757 .entry_for_path(&rel_path(".git"))
10758 .unwrap()
10759 .is_ignored
10760 );
10761 });
10762}
10763
10764#[gpui::test]
10765async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) {
10766 init_test(cx);
10767
10768 let fs = FakeFs::new(cx.executor());
10769 fs.insert_tree(
10770 path!("/project"),
10771 json!({
10772 ".git": {
10773 "worktrees": {
10774 "some-worktree": {
10775 "commondir": "../..\n",
10776 // For is_git_dir
10777 "HEAD": "",
10778 "config": ""
10779 }
10780 },
10781 "modules": {
10782 "subdir": {
10783 "some-submodule": {
10784 // For is_git_dir
10785 "HEAD": "",
10786 "config": "",
10787 }
10788 }
10789 }
10790 },
10791 "src": {
10792 "a.txt": "A",
10793 },
10794 "some-worktree": {
10795 ".git": "gitdir: ../.git/worktrees/some-worktree\n",
10796 "src": {
10797 "b.txt": "B",
10798 }
10799 },
10800 "subdir": {
10801 "some-submodule": {
10802 ".git": "gitdir: ../../.git/modules/subdir/some-submodule\n",
10803 "c.txt": "C",
10804 }
10805 }
10806 }),
10807 )
10808 .await;
10809
10810 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
10811 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
10812 scan_complete.await;
10813
10814 let mut repositories = project.update(cx, |project, cx| {
10815 project
10816 .repositories(cx)
10817 .values()
10818 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
10819 .collect::<Vec<_>>()
10820 });
10821 repositories.sort();
10822 pretty_assertions::assert_eq!(
10823 repositories,
10824 [
10825 Path::new(path!("/project")).into(),
10826 Path::new(path!("/project/some-worktree")).into(),
10827 Path::new(path!("/project/subdir/some-submodule")).into(),
10828 ]
10829 );
10830
10831 // Generate a git-related event for the worktree and check that it's refreshed.
10832 fs.with_git_state(
10833 path!("/project/some-worktree/.git").as_ref(),
10834 true,
10835 |state| {
10836 state
10837 .head_contents
10838 .insert(repo_path("src/b.txt"), "b".to_owned());
10839 state
10840 .index_contents
10841 .insert(repo_path("src/b.txt"), "b".to_owned());
10842 },
10843 )
10844 .unwrap();
10845 cx.run_until_parked();
10846
10847 let buffer = project
10848 .update(cx, |project, cx| {
10849 project.open_local_buffer(path!("/project/some-worktree/src/b.txt"), cx)
10850 })
10851 .await
10852 .unwrap();
10853 let (worktree_repo, barrier) = project.update(cx, |project, cx| {
10854 let (repo, _) = project
10855 .git_store()
10856 .read(cx)
10857 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
10858 .unwrap();
10859 pretty_assertions::assert_eq!(
10860 repo.read(cx).work_directory_abs_path,
10861 Path::new(path!("/project/some-worktree")).into(),
10862 );
10863 let barrier = repo.update(cx, |repo, _| repo.barrier());
10864 (repo.clone(), barrier)
10865 });
10866 barrier.await.unwrap();
10867 worktree_repo.update(cx, |repo, _| {
10868 pretty_assertions::assert_eq!(
10869 repo.status_for_path(&repo_path("src/b.txt"))
10870 .unwrap()
10871 .status,
10872 StatusCode::Modified.worktree(),
10873 );
10874 });
10875
10876 // The same for the submodule.
10877 fs.with_git_state(
10878 path!("/project/subdir/some-submodule/.git").as_ref(),
10879 true,
10880 |state| {
10881 state
10882 .head_contents
10883 .insert(repo_path("c.txt"), "c".to_owned());
10884 state
10885 .index_contents
10886 .insert(repo_path("c.txt"), "c".to_owned());
10887 },
10888 )
10889 .unwrap();
10890 cx.run_until_parked();
10891
10892 let buffer = project
10893 .update(cx, |project, cx| {
10894 project.open_local_buffer(path!("/project/subdir/some-submodule/c.txt"), cx)
10895 })
10896 .await
10897 .unwrap();
10898 let (submodule_repo, barrier) = project.update(cx, |project, cx| {
10899 let (repo, _) = project
10900 .git_store()
10901 .read(cx)
10902 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
10903 .unwrap();
10904 pretty_assertions::assert_eq!(
10905 repo.read(cx).work_directory_abs_path,
10906 Path::new(path!("/project/subdir/some-submodule")).into(),
10907 );
10908 let barrier = repo.update(cx, |repo, _| repo.barrier());
10909 (repo.clone(), barrier)
10910 });
10911 barrier.await.unwrap();
10912 submodule_repo.update(cx, |repo, _| {
10913 pretty_assertions::assert_eq!(
10914 repo.status_for_path(&repo_path("c.txt")).unwrap().status,
10915 StatusCode::Modified.worktree(),
10916 );
10917 });
10918}
10919
10920#[gpui::test]
10921async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
10922 init_test(cx);
10923 let fs = FakeFs::new(cx.background_executor.clone());
10924 fs.insert_tree(
10925 path!("/root"),
10926 json!({
10927 "project": {
10928 ".git": {},
10929 "child1": {
10930 "a.txt": "A",
10931 },
10932 "child2": {
10933 "b.txt": "B",
10934 }
10935 }
10936 }),
10937 )
10938 .await;
10939
10940 let project = Project::test(
10941 fs.clone(),
10942 [
10943 path!("/root/project/child1").as_ref(),
10944 path!("/root/project/child2").as_ref(),
10945 ],
10946 cx,
10947 )
10948 .await;
10949
10950 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10951 tree.flush_fs_events(cx).await;
10952 project
10953 .update(cx, |project, cx| project.git_scans_complete(cx))
10954 .await;
10955 cx.executor().run_until_parked();
10956
10957 let repos = project.read_with(cx, |project, cx| {
10958 project
10959 .repositories(cx)
10960 .values()
10961 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
10962 .collect::<Vec<_>>()
10963 });
10964 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
10965}
10966
10967#[gpui::test]
10968async fn test_buffer_changed_file_path_updates_git_diff(cx: &mut gpui::TestAppContext) {
10969 init_test(cx);
10970
10971 let file_1_committed = String::from(r#"file_1_committed"#);
10972 let file_1_staged = String::from(r#"file_1_staged"#);
10973 let file_2_committed = String::from(r#"file_2_committed"#);
10974 let file_2_staged = String::from(r#"file_2_staged"#);
10975 let buffer_contents = String::from(r#"buffer"#);
10976
10977 let fs = FakeFs::new(cx.background_executor.clone());
10978 fs.insert_tree(
10979 path!("/dir"),
10980 json!({
10981 ".git": {},
10982 "src": {
10983 "file_1.rs": file_1_committed.clone(),
10984 "file_2.rs": file_2_committed.clone(),
10985 }
10986 }),
10987 )
10988 .await;
10989
10990 fs.set_head_for_repo(
10991 path!("/dir/.git").as_ref(),
10992 &[
10993 ("src/file_1.rs", file_1_committed.clone()),
10994 ("src/file_2.rs", file_2_committed.clone()),
10995 ],
10996 "deadbeef",
10997 );
10998 fs.set_index_for_repo(
10999 path!("/dir/.git").as_ref(),
11000 &[
11001 ("src/file_1.rs", file_1_staged.clone()),
11002 ("src/file_2.rs", file_2_staged.clone()),
11003 ],
11004 );
11005
11006 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
11007
11008 let buffer = project
11009 .update(cx, |project, cx| {
11010 project.open_local_buffer(path!("/dir/src/file_1.rs"), cx)
11011 })
11012 .await
11013 .unwrap();
11014
11015 buffer.update(cx, |buffer, cx| {
11016 buffer.edit([(0..buffer.len(), buffer_contents.as_str())], None, cx);
11017 });
11018
11019 let unstaged_diff = project
11020 .update(cx, |project, cx| {
11021 project.open_unstaged_diff(buffer.clone(), cx)
11022 })
11023 .await
11024 .unwrap();
11025
11026 cx.run_until_parked();
11027
11028 unstaged_diff.update(cx, |unstaged_diff, cx| {
11029 let base_text = unstaged_diff.base_text_string(cx).unwrap();
11030 assert_eq!(base_text, file_1_staged, "Should start with file_1 staged");
11031 });
11032
11033 // Save the buffer as `file_2.rs`, which should trigger the
11034 // `BufferChangedFilePath` event.
11035 project
11036 .update(cx, |project, cx| {
11037 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
11038 let path = ProjectPath {
11039 worktree_id,
11040 path: rel_path("src/file_2.rs").into(),
11041 };
11042 project.save_buffer_as(buffer.clone(), path, cx)
11043 })
11044 .await
11045 .unwrap();
11046
11047 cx.run_until_parked();
11048
11049 // Verify that the diff bases have been updated to file_2's contents due to
11050 // the `BufferChangedFilePath` event being handled.
11051 unstaged_diff.update(cx, |unstaged_diff, cx| {
11052 let snapshot = buffer.read(cx).snapshot();
11053 let base_text = unstaged_diff.base_text_string(cx).unwrap();
11054 assert_eq!(
11055 base_text, file_2_staged,
11056 "Diff bases should be automatically updated to file_2 staged content"
11057 );
11058
11059 let hunks: Vec<_> = unstaged_diff.snapshot(cx).hunks(&snapshot).collect();
11060 assert!(!hunks.is_empty(), "Should have diff hunks for file_2");
11061 });
11062
11063 let uncommitted_diff = project
11064 .update(cx, |project, cx| {
11065 project.open_uncommitted_diff(buffer.clone(), cx)
11066 })
11067 .await
11068 .unwrap();
11069
11070 cx.run_until_parked();
11071
11072 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
11073 let base_text = uncommitted_diff.base_text_string(cx).unwrap();
11074 assert_eq!(
11075 base_text, file_2_committed,
11076 "Uncommitted diff should compare against file_2 committed content"
11077 );
11078 });
11079}
11080
11081async fn search(
11082 project: &Entity<Project>,
11083 query: SearchQuery,
11084 cx: &mut gpui::TestAppContext,
11085) -> Result<HashMap<String, Vec<Range<usize>>>> {
11086 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
11087 let mut results = HashMap::default();
11088 while let Ok(search_result) = search_rx.rx.recv().await {
11089 match search_result {
11090 SearchResult::Buffer { buffer, ranges } => {
11091 results.entry(buffer).or_insert(ranges);
11092 }
11093 SearchResult::LimitReached => {}
11094 }
11095 }
11096 Ok(results
11097 .into_iter()
11098 .map(|(buffer, ranges)| {
11099 buffer.update(cx, |buffer, cx| {
11100 let path = buffer
11101 .file()
11102 .unwrap()
11103 .full_path(cx)
11104 .to_string_lossy()
11105 .to_string();
11106 let ranges = ranges
11107 .into_iter()
11108 .map(|range| range.to_offset(buffer))
11109 .collect::<Vec<_>>();
11110 (path, ranges)
11111 })
11112 })
11113 .collect())
11114}
11115
11116pub fn init_test(cx: &mut gpui::TestAppContext) {
11117 zlog::init_test();
11118
11119 cx.update(|cx| {
11120 let settings_store = SettingsStore::test(cx);
11121 cx.set_global(settings_store);
11122 release_channel::init(semver::Version::new(0, 0, 0), cx);
11123 });
11124}
11125
11126fn json_lang() -> Arc<Language> {
11127 Arc::new(Language::new(
11128 LanguageConfig {
11129 name: "JSON".into(),
11130 matcher: LanguageMatcher {
11131 path_suffixes: vec!["json".to_string()],
11132 ..Default::default()
11133 },
11134 ..Default::default()
11135 },
11136 None,
11137 ))
11138}
11139
11140fn js_lang() -> Arc<Language> {
11141 Arc::new(Language::new(
11142 LanguageConfig {
11143 name: "JavaScript".into(),
11144 matcher: LanguageMatcher {
11145 path_suffixes: vec!["js".to_string()],
11146 ..Default::default()
11147 },
11148 ..Default::default()
11149 },
11150 None,
11151 ))
11152}
11153
11154fn python_lang(fs: Arc<FakeFs>) -> Arc<Language> {
11155 struct PythonMootToolchainLister(Arc<FakeFs>);
11156 #[async_trait]
11157 impl ToolchainLister for PythonMootToolchainLister {
11158 async fn list(
11159 &self,
11160 worktree_root: PathBuf,
11161 subroot_relative_path: Arc<RelPath>,
11162 _: Option<HashMap<String, String>>,
11163 _: &dyn Fs,
11164 ) -> ToolchainList {
11165 // This lister will always return a path .venv directories within ancestors
11166 let ancestors = subroot_relative_path.ancestors().collect::<Vec<_>>();
11167 let mut toolchains = vec![];
11168 for ancestor in ancestors {
11169 let venv_path = worktree_root.join(ancestor.as_std_path()).join(".venv");
11170 if self.0.is_dir(&venv_path).await {
11171 toolchains.push(Toolchain {
11172 name: SharedString::new("Python Venv"),
11173 path: venv_path.to_string_lossy().into_owned().into(),
11174 language_name: LanguageName(SharedString::new_static("Python")),
11175 as_json: serde_json::Value::Null,
11176 })
11177 }
11178 }
11179 ToolchainList {
11180 toolchains,
11181 ..Default::default()
11182 }
11183 }
11184 async fn resolve(
11185 &self,
11186 _: PathBuf,
11187 _: Option<HashMap<String, String>>,
11188 _: &dyn Fs,
11189 ) -> anyhow::Result<Toolchain> {
11190 Err(anyhow::anyhow!("Not implemented"))
11191 }
11192 fn meta(&self) -> ToolchainMetadata {
11193 ToolchainMetadata {
11194 term: SharedString::new_static("Virtual Environment"),
11195 new_toolchain_placeholder: SharedString::new_static(
11196 "A path to the python3 executable within a virtual environment, or path to virtual environment itself",
11197 ),
11198 manifest_name: ManifestName::from(SharedString::new_static("pyproject.toml")),
11199 }
11200 }
11201 fn activation_script(
11202 &self,
11203 _: &Toolchain,
11204 _: ShellKind,
11205 _: &gpui::App,
11206 ) -> futures::future::BoxFuture<'static, Vec<String>> {
11207 Box::pin(async { vec![] })
11208 }
11209 }
11210 Arc::new(
11211 Language::new(
11212 LanguageConfig {
11213 name: "Python".into(),
11214 matcher: LanguageMatcher {
11215 path_suffixes: vec!["py".to_string()],
11216 ..Default::default()
11217 },
11218 ..Default::default()
11219 },
11220 None, // We're not testing Python parsing with this language.
11221 )
11222 .with_manifest(Some(ManifestName::from(SharedString::new_static(
11223 "pyproject.toml",
11224 ))))
11225 .with_toolchain_lister(Some(Arc::new(PythonMootToolchainLister(fs)))),
11226 )
11227}
11228
11229fn typescript_lang() -> Arc<Language> {
11230 Arc::new(Language::new(
11231 LanguageConfig {
11232 name: "TypeScript".into(),
11233 matcher: LanguageMatcher {
11234 path_suffixes: vec!["ts".to_string()],
11235 ..Default::default()
11236 },
11237 ..Default::default()
11238 },
11239 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
11240 ))
11241}
11242
11243fn tsx_lang() -> Arc<Language> {
11244 Arc::new(Language::new(
11245 LanguageConfig {
11246 name: "tsx".into(),
11247 matcher: LanguageMatcher {
11248 path_suffixes: vec!["tsx".to_string()],
11249 ..Default::default()
11250 },
11251 ..Default::default()
11252 },
11253 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
11254 ))
11255}
11256
11257fn get_all_tasks(
11258 project: &Entity<Project>,
11259 task_contexts: Arc<TaskContexts>,
11260 cx: &mut App,
11261) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
11262 let new_tasks = project.update(cx, |project, cx| {
11263 project.task_store().update(cx, |task_store, cx| {
11264 task_store.task_inventory().unwrap().update(cx, |this, cx| {
11265 this.used_and_current_resolved_tasks(task_contexts, cx)
11266 })
11267 })
11268 });
11269
11270 cx.background_spawn(async move {
11271 let (mut old, new) = new_tasks.await;
11272 old.extend(new);
11273 old
11274 })
11275}
11276
11277#[track_caller]
11278fn assert_entry_git_state(
11279 tree: &Worktree,
11280 repository: &Repository,
11281 path: &str,
11282 index_status: Option<StatusCode>,
11283 is_ignored: bool,
11284) {
11285 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
11286 let entry = tree
11287 .entry_for_path(&rel_path(path))
11288 .unwrap_or_else(|| panic!("entry {path} not found"));
11289 let status = repository
11290 .status_for_path(&repo_path(path))
11291 .map(|entry| entry.status);
11292 let expected = index_status.map(|index_status| {
11293 TrackedStatus {
11294 index_status,
11295 worktree_status: StatusCode::Unmodified,
11296 }
11297 .into()
11298 });
11299 assert_eq!(
11300 status, expected,
11301 "expected {path} to have git status: {expected:?}"
11302 );
11303 assert_eq!(
11304 entry.is_ignored, is_ignored,
11305 "expected {path} to have is_ignored: {is_ignored}"
11306 );
11307}
11308
11309#[track_caller]
11310fn git_init(path: &Path) -> git2::Repository {
11311 let mut init_opts = RepositoryInitOptions::new();
11312 init_opts.initial_head("main");
11313 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
11314}
11315
11316#[track_caller]
11317fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
11318 let path = path.as_ref();
11319 let mut index = repo.index().expect("Failed to get index");
11320 index.add_path(path).expect("Failed to add file");
11321 index.write().expect("Failed to write index");
11322}
11323
11324#[track_caller]
11325fn git_remove_index(path: &Path, repo: &git2::Repository) {
11326 let mut index = repo.index().expect("Failed to get index");
11327 index.remove_path(path).expect("Failed to add file");
11328 index.write().expect("Failed to write index");
11329}
11330
11331#[track_caller]
11332fn git_commit(msg: &'static str, repo: &git2::Repository) {
11333 use git2::Signature;
11334
11335 let signature = Signature::now("test", "test@zed.dev").unwrap();
11336 let oid = repo.index().unwrap().write_tree().unwrap();
11337 let tree = repo.find_tree(oid).unwrap();
11338 if let Ok(head) = repo.head() {
11339 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
11340
11341 let parent_commit = parent_obj.as_commit().unwrap();
11342
11343 repo.commit(
11344 Some("HEAD"),
11345 &signature,
11346 &signature,
11347 msg,
11348 &tree,
11349 &[parent_commit],
11350 )
11351 .expect("Failed to commit with parent");
11352 } else {
11353 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
11354 .expect("Failed to commit");
11355 }
11356}
11357
11358#[cfg(any())]
11359#[track_caller]
11360fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
11361 repo.cherrypick(commit, None).expect("Failed to cherrypick");
11362}
11363
11364#[track_caller]
11365fn git_stash(repo: &mut git2::Repository) {
11366 use git2::Signature;
11367
11368 let signature = Signature::now("test", "test@zed.dev").unwrap();
11369 repo.stash_save(&signature, "N/A", None)
11370 .expect("Failed to stash");
11371}
11372
11373#[track_caller]
11374fn git_reset(offset: usize, repo: &git2::Repository) {
11375 let head = repo.head().expect("Couldn't get repo head");
11376 let object = head.peel(git2::ObjectType::Commit).unwrap();
11377 let commit = object.as_commit().unwrap();
11378 let new_head = commit
11379 .parents()
11380 .inspect(|parnet| {
11381 parnet.message();
11382 })
11383 .nth(offset)
11384 .expect("Not enough history");
11385 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
11386 .expect("Could not reset");
11387}
11388
11389#[cfg(any())]
11390#[track_caller]
11391fn git_branch(name: &str, repo: &git2::Repository) {
11392 let head = repo
11393 .head()
11394 .expect("Couldn't get repo head")
11395 .peel_to_commit()
11396 .expect("HEAD is not a commit");
11397 repo.branch(name, &head, false).expect("Failed to commit");
11398}
11399
11400#[cfg(any())]
11401#[track_caller]
11402fn git_checkout(name: &str, repo: &git2::Repository) {
11403 repo.set_head(name).expect("Failed to set head");
11404 repo.checkout_head(None).expect("Failed to check out head");
11405}
11406
11407#[cfg(any())]
11408#[track_caller]
11409fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
11410 repo.statuses(None)
11411 .unwrap()
11412 .iter()
11413 .map(|status| (status.path().unwrap().to_string(), status.status()))
11414 .collect()
11415}
11416
11417#[gpui::test]
11418async fn test_find_project_path_abs(
11419 background_executor: BackgroundExecutor,
11420 cx: &mut gpui::TestAppContext,
11421) {
11422 // find_project_path should work with absolute paths
11423 init_test(cx);
11424
11425 let fs = FakeFs::new(background_executor);
11426 fs.insert_tree(
11427 path!("/root"),
11428 json!({
11429 "project1": {
11430 "file1.txt": "content1",
11431 "subdir": {
11432 "file2.txt": "content2"
11433 }
11434 },
11435 "project2": {
11436 "file3.txt": "content3"
11437 }
11438 }),
11439 )
11440 .await;
11441
11442 let project = Project::test(
11443 fs.clone(),
11444 [
11445 path!("/root/project1").as_ref(),
11446 path!("/root/project2").as_ref(),
11447 ],
11448 cx,
11449 )
11450 .await;
11451
11452 // Make sure the worktrees are fully initialized
11453 project
11454 .update(cx, |project, cx| project.git_scans_complete(cx))
11455 .await;
11456 cx.run_until_parked();
11457
11458 let (project1_abs_path, project1_id, project2_abs_path, project2_id) =
11459 project.read_with(cx, |project, cx| {
11460 let worktrees: Vec<_> = project.worktrees(cx).collect();
11461 let abs_path1 = worktrees[0].read(cx).abs_path().to_path_buf();
11462 let id1 = worktrees[0].read(cx).id();
11463 let abs_path2 = worktrees[1].read(cx).abs_path().to_path_buf();
11464 let id2 = worktrees[1].read(cx).id();
11465 (abs_path1, id1, abs_path2, id2)
11466 });
11467
11468 project.update(cx, |project, cx| {
11469 let abs_path = project1_abs_path.join("file1.txt");
11470 let found_path = project.find_project_path(abs_path, cx).unwrap();
11471 assert_eq!(found_path.worktree_id, project1_id);
11472 assert_eq!(&*found_path.path, rel_path("file1.txt"));
11473
11474 let abs_path = project1_abs_path.join("subdir").join("file2.txt");
11475 let found_path = project.find_project_path(abs_path, cx).unwrap();
11476 assert_eq!(found_path.worktree_id, project1_id);
11477 assert_eq!(&*found_path.path, rel_path("subdir/file2.txt"));
11478
11479 let abs_path = project2_abs_path.join("file3.txt");
11480 let found_path = project.find_project_path(abs_path, cx).unwrap();
11481 assert_eq!(found_path.worktree_id, project2_id);
11482 assert_eq!(&*found_path.path, rel_path("file3.txt"));
11483
11484 let abs_path = project1_abs_path.join("nonexistent.txt");
11485 let found_path = project.find_project_path(abs_path, cx);
11486 assert!(
11487 found_path.is_some(),
11488 "Should find project path for nonexistent file in worktree"
11489 );
11490
11491 // Test with an absolute path outside any worktree
11492 let abs_path = Path::new("/some/other/path");
11493 let found_path = project.find_project_path(abs_path, cx);
11494 assert!(
11495 found_path.is_none(),
11496 "Should not find project path for path outside any worktree"
11497 );
11498 });
11499}
11500
11501#[gpui::test]
11502async fn test_git_worktree_remove(cx: &mut gpui::TestAppContext) {
11503 init_test(cx);
11504
11505 let fs = FakeFs::new(cx.executor());
11506 fs.insert_tree(
11507 path!("/root"),
11508 json!({
11509 "a": {
11510 ".git": {},
11511 "src": {
11512 "main.rs": "fn main() {}",
11513 }
11514 },
11515 "b": {
11516 ".git": {},
11517 "src": {
11518 "main.rs": "fn main() {}",
11519 },
11520 "script": {
11521 "run.sh": "#!/bin/bash"
11522 }
11523 }
11524 }),
11525 )
11526 .await;
11527
11528 let project = Project::test(
11529 fs.clone(),
11530 [
11531 path!("/root/a").as_ref(),
11532 path!("/root/b/script").as_ref(),
11533 path!("/root/b").as_ref(),
11534 ],
11535 cx,
11536 )
11537 .await;
11538 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
11539 scan_complete.await;
11540
11541 let worktrees = project.update(cx, |project, cx| project.worktrees(cx).collect::<Vec<_>>());
11542 assert_eq!(worktrees.len(), 3);
11543
11544 let worktree_id_by_abs_path = worktrees
11545 .into_iter()
11546 .map(|worktree| worktree.read_with(cx, |w, _| (w.abs_path(), w.id())))
11547 .collect::<HashMap<_, _>>();
11548 let worktree_id = worktree_id_by_abs_path
11549 .get(Path::new(path!("/root/b/script")))
11550 .unwrap();
11551
11552 let repos = project.update(cx, |p, cx| p.git_store().read(cx).repositories().clone());
11553 assert_eq!(repos.len(), 2);
11554
11555 project.update(cx, |project, cx| {
11556 project.remove_worktree(*worktree_id, cx);
11557 });
11558 cx.run_until_parked();
11559
11560 let mut repo_paths = project
11561 .update(cx, |p, cx| p.git_store().read(cx).repositories().clone())
11562 .values()
11563 .map(|repo| repo.read_with(cx, |r, _| r.work_directory_abs_path.clone()))
11564 .collect::<Vec<_>>();
11565 repo_paths.sort();
11566
11567 pretty_assertions::assert_eq!(
11568 repo_paths,
11569 [
11570 Path::new(path!("/root/a")).into(),
11571 Path::new(path!("/root/b")).into(),
11572 ]
11573 );
11574
11575 let active_repo_path = project
11576 .read_with(cx, |p, cx| {
11577 p.active_repository(cx)
11578 .map(|r| r.read(cx).work_directory_abs_path.clone())
11579 })
11580 .unwrap();
11581 assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/a")));
11582
11583 let worktree_id = worktree_id_by_abs_path
11584 .get(Path::new(path!("/root/a")))
11585 .unwrap();
11586 project.update(cx, |project, cx| {
11587 project.remove_worktree(*worktree_id, cx);
11588 });
11589 cx.run_until_parked();
11590
11591 let active_repo_path = project
11592 .read_with(cx, |p, cx| {
11593 p.active_repository(cx)
11594 .map(|r| r.read(cx).work_directory_abs_path.clone())
11595 })
11596 .unwrap();
11597 assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/b")));
11598
11599 let worktree_id = worktree_id_by_abs_path
11600 .get(Path::new(path!("/root/b")))
11601 .unwrap();
11602 project.update(cx, |project, cx| {
11603 project.remove_worktree(*worktree_id, cx);
11604 });
11605 cx.run_until_parked();
11606
11607 let active_repo_path = project.read_with(cx, |p, cx| {
11608 p.active_repository(cx)
11609 .map(|r| r.read(cx).work_directory_abs_path.clone())
11610 });
11611 assert!(active_repo_path.is_none());
11612}
11613
11614#[gpui::test]
11615async fn test_optimistic_hunks_in_staged_files(cx: &mut gpui::TestAppContext) {
11616 use DiffHunkSecondaryStatus::*;
11617 init_test(cx);
11618
11619 let committed_contents = r#"
11620 one
11621 two
11622 three
11623 "#
11624 .unindent();
11625 let file_contents = r#"
11626 one
11627 TWO
11628 three
11629 "#
11630 .unindent();
11631
11632 let fs = FakeFs::new(cx.background_executor.clone());
11633 fs.insert_tree(
11634 path!("/dir"),
11635 json!({
11636 ".git": {},
11637 "file.txt": file_contents.clone()
11638 }),
11639 )
11640 .await;
11641
11642 fs.set_head_and_index_for_repo(
11643 path!("/dir/.git").as_ref(),
11644 &[("file.txt", committed_contents.clone())],
11645 );
11646
11647 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
11648
11649 let buffer = project
11650 .update(cx, |project, cx| {
11651 project.open_local_buffer(path!("/dir/file.txt"), cx)
11652 })
11653 .await
11654 .unwrap();
11655 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
11656 let uncommitted_diff = project
11657 .update(cx, |project, cx| {
11658 project.open_uncommitted_diff(buffer.clone(), cx)
11659 })
11660 .await
11661 .unwrap();
11662
11663 // The hunk is initially unstaged.
11664 uncommitted_diff.read_with(cx, |diff, cx| {
11665 assert_hunks(
11666 diff.snapshot(cx).hunks(&snapshot),
11667 &snapshot,
11668 &diff.base_text_string(cx).unwrap(),
11669 &[(
11670 1..2,
11671 "two\n",
11672 "TWO\n",
11673 DiffHunkStatus::modified(HasSecondaryHunk),
11674 )],
11675 );
11676 });
11677
11678 // Get the repository handle.
11679 let repo = project.read_with(cx, |project, cx| {
11680 project.repositories(cx).values().next().unwrap().clone()
11681 });
11682
11683 // Stage the file.
11684 let stage_task = repo.update(cx, |repo, cx| {
11685 repo.stage_entries(vec![repo_path("file.txt")], cx)
11686 });
11687
11688 // Run a few ticks to let the job start and mark hunks as pending,
11689 // but don't run_until_parked which would complete the entire operation.
11690 for _ in 0..10 {
11691 cx.executor().tick();
11692 let [hunk]: [_; 1] = uncommitted_diff
11693 .read_with(cx, |diff, cx| {
11694 diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>()
11695 })
11696 .try_into()
11697 .unwrap();
11698 match hunk.secondary_status {
11699 HasSecondaryHunk => {}
11700 SecondaryHunkRemovalPending => break,
11701 NoSecondaryHunk => panic!("hunk was not optimistically staged"),
11702 _ => panic!("unexpected hunk state"),
11703 }
11704 }
11705 uncommitted_diff.read_with(cx, |diff, cx| {
11706 assert_hunks(
11707 diff.snapshot(cx).hunks(&snapshot),
11708 &snapshot,
11709 &diff.base_text_string(cx).unwrap(),
11710 &[(
11711 1..2,
11712 "two\n",
11713 "TWO\n",
11714 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
11715 )],
11716 );
11717 });
11718
11719 // Let the staging complete.
11720 stage_task.await.unwrap();
11721 cx.run_until_parked();
11722
11723 // The hunk is now fully staged.
11724 uncommitted_diff.read_with(cx, |diff, cx| {
11725 assert_hunks(
11726 diff.snapshot(cx).hunks(&snapshot),
11727 &snapshot,
11728 &diff.base_text_string(cx).unwrap(),
11729 &[(
11730 1..2,
11731 "two\n",
11732 "TWO\n",
11733 DiffHunkStatus::modified(NoSecondaryHunk),
11734 )],
11735 );
11736 });
11737
11738 // Simulate a commit by updating HEAD to match the current file contents.
11739 // The FakeGitRepository's commit method is a no-op, so we need to manually
11740 // update HEAD to simulate the commit completing.
11741 fs.set_head_for_repo(
11742 path!("/dir/.git").as_ref(),
11743 &[("file.txt", file_contents.clone())],
11744 "newhead",
11745 );
11746 cx.run_until_parked();
11747
11748 // After committing, there are no more hunks.
11749 uncommitted_diff.read_with(cx, |diff, cx| {
11750 assert_hunks(
11751 diff.snapshot(cx).hunks(&snapshot),
11752 &snapshot,
11753 &diff.base_text_string(cx).unwrap(),
11754 &[] as &[(Range<u32>, &str, &str, DiffHunkStatus)],
11755 );
11756 });
11757}
11758
11759#[gpui::test]
11760async fn test_read_only_files_setting(cx: &mut gpui::TestAppContext) {
11761 init_test(cx);
11762
11763 // Configure read_only_files setting
11764 cx.update(|cx| {
11765 cx.update_global::<SettingsStore, _>(|store, cx| {
11766 store.update_user_settings(cx, |settings| {
11767 settings.project.worktree.read_only_files = Some(vec![
11768 "**/generated/**".to_string(),
11769 "**/*.gen.rs".to_string(),
11770 ]);
11771 });
11772 });
11773 });
11774
11775 let fs = FakeFs::new(cx.background_executor.clone());
11776 fs.insert_tree(
11777 path!("/root"),
11778 json!({
11779 "src": {
11780 "main.rs": "fn main() {}",
11781 "types.gen.rs": "// Generated file",
11782 },
11783 "generated": {
11784 "schema.rs": "// Auto-generated schema",
11785 }
11786 }),
11787 )
11788 .await;
11789
11790 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
11791
11792 // Open a regular file - should be read-write
11793 let regular_buffer = project
11794 .update(cx, |project, cx| {
11795 project.open_local_buffer(path!("/root/src/main.rs"), cx)
11796 })
11797 .await
11798 .unwrap();
11799
11800 regular_buffer.read_with(cx, |buffer, _| {
11801 assert!(!buffer.read_only(), "Regular file should not be read-only");
11802 });
11803
11804 // Open a file matching *.gen.rs pattern - should be read-only
11805 let gen_buffer = project
11806 .update(cx, |project, cx| {
11807 project.open_local_buffer(path!("/root/src/types.gen.rs"), cx)
11808 })
11809 .await
11810 .unwrap();
11811
11812 gen_buffer.read_with(cx, |buffer, _| {
11813 assert!(
11814 buffer.read_only(),
11815 "File matching *.gen.rs pattern should be read-only"
11816 );
11817 });
11818
11819 // Open a file in generated directory - should be read-only
11820 let generated_buffer = project
11821 .update(cx, |project, cx| {
11822 project.open_local_buffer(path!("/root/generated/schema.rs"), cx)
11823 })
11824 .await
11825 .unwrap();
11826
11827 generated_buffer.read_with(cx, |buffer, _| {
11828 assert!(
11829 buffer.read_only(),
11830 "File in generated directory should be read-only"
11831 );
11832 });
11833}
11834
11835#[gpui::test]
11836async fn test_read_only_files_empty_setting(cx: &mut gpui::TestAppContext) {
11837 init_test(cx);
11838
11839 // Explicitly set read_only_files to empty (default behavior)
11840 cx.update(|cx| {
11841 cx.update_global::<SettingsStore, _>(|store, cx| {
11842 store.update_user_settings(cx, |settings| {
11843 settings.project.worktree.read_only_files = Some(vec![]);
11844 });
11845 });
11846 });
11847
11848 let fs = FakeFs::new(cx.background_executor.clone());
11849 fs.insert_tree(
11850 path!("/root"),
11851 json!({
11852 "src": {
11853 "main.rs": "fn main() {}",
11854 },
11855 "generated": {
11856 "schema.rs": "// Auto-generated schema",
11857 }
11858 }),
11859 )
11860 .await;
11861
11862 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
11863
11864 // All files should be read-write when read_only_files is empty
11865 let main_buffer = project
11866 .update(cx, |project, cx| {
11867 project.open_local_buffer(path!("/root/src/main.rs"), cx)
11868 })
11869 .await
11870 .unwrap();
11871
11872 main_buffer.read_with(cx, |buffer, _| {
11873 assert!(
11874 !buffer.read_only(),
11875 "Files should not be read-only when read_only_files is empty"
11876 );
11877 });
11878
11879 let generated_buffer = project
11880 .update(cx, |project, cx| {
11881 project.open_local_buffer(path!("/root/generated/schema.rs"), cx)
11882 })
11883 .await
11884 .unwrap();
11885
11886 generated_buffer.read_with(cx, |buffer, _| {
11887 assert!(
11888 !buffer.read_only(),
11889 "Generated files should not be read-only when read_only_files is empty"
11890 );
11891 });
11892}
11893
11894#[gpui::test]
11895async fn test_read_only_files_with_lock_files(cx: &mut gpui::TestAppContext) {
11896 init_test(cx);
11897
11898 // Configure to make lock files read-only
11899 cx.update(|cx| {
11900 cx.update_global::<SettingsStore, _>(|store, cx| {
11901 store.update_user_settings(cx, |settings| {
11902 settings.project.worktree.read_only_files = Some(vec![
11903 "**/*.lock".to_string(),
11904 "**/package-lock.json".to_string(),
11905 ]);
11906 });
11907 });
11908 });
11909
11910 let fs = FakeFs::new(cx.background_executor.clone());
11911 fs.insert_tree(
11912 path!("/root"),
11913 json!({
11914 "Cargo.lock": "# Lock file",
11915 "Cargo.toml": "[package]",
11916 "package-lock.json": "{}",
11917 "package.json": "{}",
11918 }),
11919 )
11920 .await;
11921
11922 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
11923
11924 // Cargo.lock should be read-only
11925 let cargo_lock = project
11926 .update(cx, |project, cx| {
11927 project.open_local_buffer(path!("/root/Cargo.lock"), cx)
11928 })
11929 .await
11930 .unwrap();
11931
11932 cargo_lock.read_with(cx, |buffer, _| {
11933 assert!(buffer.read_only(), "Cargo.lock should be read-only");
11934 });
11935
11936 // Cargo.toml should be read-write
11937 let cargo_toml = project
11938 .update(cx, |project, cx| {
11939 project.open_local_buffer(path!("/root/Cargo.toml"), cx)
11940 })
11941 .await
11942 .unwrap();
11943
11944 cargo_toml.read_with(cx, |buffer, _| {
11945 assert!(!buffer.read_only(), "Cargo.toml should not be read-only");
11946 });
11947
11948 // package-lock.json should be read-only
11949 let package_lock = project
11950 .update(cx, |project, cx| {
11951 project.open_local_buffer(path!("/root/package-lock.json"), cx)
11952 })
11953 .await
11954 .unwrap();
11955
11956 package_lock.read_with(cx, |buffer, _| {
11957 assert!(buffer.read_only(), "package-lock.json should be read-only");
11958 });
11959
11960 // package.json should be read-write
11961 let package_json = project
11962 .update(cx, |project, cx| {
11963 project.open_local_buffer(path!("/root/package.json"), cx)
11964 })
11965 .await
11966 .unwrap();
11967
11968 package_json.read_with(cx, |buffer, _| {
11969 assert!(!buffer.read_only(), "package.json should not be read-only");
11970 });
11971}
11972
11973mod disable_ai_settings_tests {
11974 use gpui::TestAppContext;
11975 use project::*;
11976 use settings::{Settings, SettingsStore};
11977
11978 #[gpui::test]
11979 async fn test_disable_ai_settings_security(cx: &mut TestAppContext) {
11980 cx.update(|cx| {
11981 settings::init(cx);
11982
11983 // Test 1: Default is false (AI enabled)
11984 assert!(
11985 !DisableAiSettings::get_global(cx).disable_ai,
11986 "Default should allow AI"
11987 );
11988 });
11989
11990 let disable_true = serde_json::json!({
11991 "disable_ai": true
11992 })
11993 .to_string();
11994 let disable_false = serde_json::json!({
11995 "disable_ai": false
11996 })
11997 .to_string();
11998
11999 cx.update_global::<SettingsStore, _>(|store, cx| {
12000 store.set_user_settings(&disable_false, cx).unwrap();
12001 store.set_global_settings(&disable_true, cx).unwrap();
12002 });
12003 cx.update(|cx| {
12004 assert!(
12005 DisableAiSettings::get_global(cx).disable_ai,
12006 "Local false cannot override global true"
12007 );
12008 });
12009
12010 cx.update_global::<SettingsStore, _>(|store, cx| {
12011 store.set_global_settings(&disable_false, cx).unwrap();
12012 store.set_user_settings(&disable_true, cx).unwrap();
12013 });
12014
12015 cx.update(|cx| {
12016 assert!(
12017 DisableAiSettings::get_global(cx).disable_ai,
12018 "Local false cannot override global true"
12019 );
12020 });
12021 }
12022}