1#![allow(clippy::format_collect)]
2
3mod bookmark_store;
4mod color_extractor;
5mod context_server_store;
6mod debugger;
7mod ext_agent_tests;
8mod extension_agent_tests;
9mod git_store;
10mod image_store;
11mod lsp_command;
12mod lsp_store;
13mod manifest_tree;
14mod project_search;
15mod search;
16mod search_history;
17mod signature_help;
18mod task_inventory;
19mod trusted_worktrees;
20mod yarn;
21
22use anyhow::Result;
23use async_trait::async_trait;
24use buffer_diff::{
25 BufferDiffEvent, DiffChanged, DiffHunkSecondaryStatus, DiffHunkStatus, DiffHunkStatusKind,
26 assert_hunks,
27};
28use collections::{BTreeSet, HashMap, HashSet};
29use encoding_rs;
30use fs::{FakeFs, PathEventKind};
31use futures::{StreamExt, future};
32use git::{
33 GitHostingProviderRegistry,
34 repository::{RepoPath, repo_path},
35 status::{DiffStat, FileStatus, StatusCode, TrackedStatus},
36};
37use git2::RepositoryInitOptions;
38use gpui::{
39 App, AppContext, BackgroundExecutor, BorrowAppContext, Entity, FutureExt, SharedString, Task,
40 TestAppContext, UpdateGlobal,
41};
42use itertools::Itertools;
43use language::{
44 Buffer, BufferEvent, Diagnostic, DiagnosticEntry, DiagnosticEntryRef, DiagnosticSet,
45 DiagnosticSourceKind, DiskState, FakeLspAdapter, Language, LanguageAwareStyling,
46 LanguageConfig, LanguageMatcher, LanguageName, LineEnding, ManifestName, ManifestProvider,
47 ManifestQuery, OffsetRangeExt, Point, ToPoint, Toolchain, ToolchainList, ToolchainLister,
48 ToolchainMetadata,
49 language_settings::{
50 Formatter, FormatterList, LanguageSettings, LanguageSettingsContent, LineEndingSetting,
51 },
52 markdown_lang, rust_lang, tree_sitter_typescript,
53};
54use lsp::{
55 CodeActionKind, DEFAULT_LSP_REQUEST_TIMEOUT, DiagnosticSeverity, DocumentChanges,
56 FileOperationFilter, LanguageServerId, LanguageServerName, NumberOrString, TextDocumentEdit,
57 Uri, WillRenameFiles, notification::DidRenameFiles,
58};
59use parking_lot::Mutex;
60use paths::{config_dir, global_gitignore_path, tasks_file};
61use postage::stream::Stream as _;
62use pretty_assertions::{assert_eq, assert_matches};
63use project::{
64 Event, TaskContexts,
65 git_store::{GitStoreEvent, Repository, RepositoryEvent, StatusEntry, pending_op},
66 search::{SearchQuery, SearchResult},
67 task_store::{TaskSettingsLocation, TaskStore},
68 *,
69};
70use rand::{Rng as _, rngs::StdRng};
71use serde_json::json;
72use settings::SettingsStore;
73#[cfg(not(windows))]
74use std::os;
75use std::{
76 cell::RefCell,
77 env, mem,
78 num::NonZeroU32,
79 ops::Range,
80 path::{Path, PathBuf},
81 rc::Rc,
82 str::FromStr,
83 sync::{Arc, OnceLock, atomic},
84 task::Poll,
85 time::Duration,
86};
87use sum_tree::SumTree;
88use task::{ResolvedTask, ShellKind, TaskContext};
89use text::{Anchor, PointUtf16, ReplicaId, ToOffset, Unclipped};
90use unindent::Unindent as _;
91use util::{
92 TryFutureExt as _, assert_set_eq, maybe, path,
93 paths::{PathMatcher, PathStyle},
94 rel_path::{RelPath, rel_path},
95 test::{TempTree, marked_text_offsets},
96 uri,
97};
98use worktree::WorktreeModelHandle as _;
99
100#[gpui::test]
101async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
102 cx.executor().allow_parking();
103
104 let (tx, mut rx) = futures::channel::mpsc::unbounded();
105 let _thread = std::thread::spawn(move || {
106 #[cfg(not(target_os = "windows"))]
107 std::fs::metadata("/tmp").unwrap();
108 #[cfg(target_os = "windows")]
109 std::fs::metadata("C:/Windows").unwrap();
110 std::thread::sleep(Duration::from_millis(1000));
111 tx.unbounded_send(1).unwrap();
112 });
113 rx.next().await.unwrap();
114}
115
116#[gpui::test]
117async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
118 cx.executor().allow_parking();
119
120 let io_task = smol::unblock(move || {
121 println!("sleeping on thread {:?}", std::thread::current().id());
122 std::thread::sleep(Duration::from_millis(10));
123 1
124 });
125
126 let task = cx.foreground_executor().spawn(async move {
127 io_task.await;
128 });
129
130 task.await;
131}
132
133#[gpui::test]
134async fn test_default_session_work_dirs_prefers_directory_worktrees_over_single_file_parents(
135 cx: &mut gpui::TestAppContext,
136) {
137 init_test(cx);
138
139 let fs = FakeFs::new(cx.executor());
140 fs.insert_tree(
141 path!("/root"),
142 json!({
143 "dir-project": {
144 "src": {
145 "main.rs": "fn main() {}"
146 }
147 },
148 "single-file.rs": "fn helper() {}"
149 }),
150 )
151 .await;
152
153 let project = Project::test(
154 fs,
155 [
156 Path::new(path!("/root/single-file.rs")),
157 Path::new(path!("/root/dir-project")),
158 ],
159 cx,
160 )
161 .await;
162
163 let work_dirs = project.read_with(cx, |project, cx| project.default_path_list(cx));
164 let ordered_paths = work_dirs.ordered_paths().cloned().collect::<Vec<_>>();
165
166 assert_eq!(
167 ordered_paths,
168 vec![
169 PathBuf::from(path!("/root/dir-project")),
170 PathBuf::from(path!("/root")),
171 ]
172 );
173}
174
175#[gpui::test]
176async fn test_default_session_work_dirs_falls_back_to_home_for_empty_project(
177 cx: &mut gpui::TestAppContext,
178) {
179 init_test(cx);
180
181 let fs = FakeFs::new(cx.executor());
182 let project = Project::test(fs, [], cx).await;
183
184 let work_dirs = project.read_with(cx, |project, cx| project.default_path_list(cx));
185 let ordered_paths = work_dirs.ordered_paths().cloned().collect::<Vec<_>>();
186
187 assert_eq!(ordered_paths, vec![paths::home_dir().to_path_buf()]);
188}
189
190// NOTE:
191// While POSIX symbolic links are somewhat supported on Windows, they are an opt in by the user, and thus
192// we assume that they are not supported out of the box.
193#[cfg(not(windows))]
194#[gpui::test]
195async fn test_symlinks(cx: &mut gpui::TestAppContext) {
196 init_test(cx);
197 cx.executor().allow_parking();
198
199 let dir = TempTree::new(json!({
200 "root": {
201 "apple": "",
202 "banana": {
203 "carrot": {
204 "date": "",
205 "endive": "",
206 }
207 },
208 "fennel": {
209 "grape": "",
210 }
211 }
212 }));
213
214 let root_link_path = dir.path().join("root_link");
215 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
216 os::unix::fs::symlink(
217 dir.path().join("root/fennel"),
218 dir.path().join("root/finnochio"),
219 )
220 .unwrap();
221
222 let project = Project::test(
223 Arc::new(RealFs::new(None, cx.executor())),
224 [root_link_path.as_ref()],
225 cx,
226 )
227 .await;
228
229 project.update(cx, |project, cx| {
230 let tree = project.worktrees(cx).next().unwrap().read(cx);
231 assert_eq!(tree.file_count(), 5);
232 assert_eq!(
233 tree.entry_for_path(rel_path("fennel/grape")).unwrap().inode,
234 tree.entry_for_path(rel_path("finnochio/grape"))
235 .unwrap()
236 .inode
237 );
238 });
239}
240
241#[gpui::test]
242async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
243 init_test(cx);
244
245 let dir = TempTree::new(json!({
246 ".editorconfig": r#"
247 root = true
248 [*.rs]
249 indent_style = tab
250 indent_size = 3
251 end_of_line = lf
252 insert_final_newline = true
253 trim_trailing_whitespace = true
254 max_line_length = 120
255 [*.js]
256 tab_width = 10
257 max_line_length = off
258 "#,
259 ".zed": {
260 "settings.json": r#"{
261 "tab_size": 8,
262 "hard_tabs": false,
263 "ensure_final_newline_on_save": false,
264 "remove_trailing_whitespace_on_save": false,
265 "preferred_line_length": 64,
266 "soft_wrap": "editor_width",
267 }"#,
268 },
269 "a.rs": "fn a() {\n A\n}",
270 "b": {
271 ".editorconfig": r#"
272 [*.rs]
273 indent_size = 2
274 max_line_length = off,
275 "#,
276 "b.rs": "fn b() {\n B\n}",
277 },
278 "c.js": "def c\n C\nend",
279 "d": {
280 ".editorconfig": r#"
281 [*.rs]
282 indent_size = 1
283 "#,
284 "d.rs": "fn d() {\n D\n}",
285 },
286 "README.json": "tabs are better\n",
287 }));
288
289 let path = dir.path();
290 let fs = FakeFs::new(cx.executor());
291 fs.insert_tree_from_real_fs(path, path).await;
292 let project = Project::test(fs, [path], cx).await;
293
294 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
295 language_registry.add(js_lang());
296 language_registry.add(json_lang());
297 language_registry.add(rust_lang());
298
299 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
300
301 cx.executor().run_until_parked();
302
303 let settings_for = async |path: &str, cx: &mut TestAppContext| -> LanguageSettings {
304 let buffer = project
305 .update(cx, |project, cx| {
306 project.open_buffer((worktree.read(cx).id(), rel_path(path)), cx)
307 })
308 .await
309 .unwrap();
310 cx.update(|cx| LanguageSettings::for_buffer(&buffer.read(cx), cx).into_owned())
311 };
312
313 let settings_a = settings_for("a.rs", cx).await;
314 let settings_b = settings_for("b/b.rs", cx).await;
315 let settings_c = settings_for("c.js", cx).await;
316 let settings_d = settings_for("d/d.rs", cx).await;
317 let settings_readme = settings_for("README.json", cx).await;
318 // .editorconfig overrides .zed/settings
319 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
320 assert_eq!(settings_a.hard_tabs, true);
321 assert_eq!(settings_a.ensure_final_newline_on_save, true);
322 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
323 assert_eq!(settings_a.line_ending, LineEndingSetting::EnforceLf);
324 assert_eq!(settings_a.preferred_line_length, 120);
325
326 // .editorconfig in b/ overrides .editorconfig in root
327 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
328
329 // .editorconfig in subdirectory overrides .editorconfig in root
330 assert_eq!(Some(settings_d.tab_size), NonZeroU32::new(1));
331
332 // "indent_size" is not set, so "tab_width" is used
333 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
334
335 // When max_line_length is "off", default to .zed/settings.json
336 assert_eq!(settings_b.preferred_line_length, 64);
337 assert_eq!(settings_c.preferred_line_length, 64);
338
339 // README.md should not be affected by .editorconfig's globe "*.rs"
340 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
341}
342
343#[gpui::test]
344async fn test_external_editorconfig_support(cx: &mut gpui::TestAppContext) {
345 init_test(cx);
346
347 let fs = FakeFs::new(cx.executor());
348 fs.insert_tree(
349 path!("/grandparent"),
350 json!({
351 ".editorconfig": "[*]\nindent_size = 4\n",
352 "parent": {
353 ".editorconfig": "[*.rs]\nindent_size = 2\n",
354 "worktree": {
355 ".editorconfig": "[*.md]\nindent_size = 3\n",
356 "main.rs": "fn main() {}",
357 "README.md": "# README",
358 "other.txt": "other content",
359 }
360 }
361 }),
362 )
363 .await;
364
365 let project = Project::test(fs, [path!("/grandparent/parent/worktree").as_ref()], cx).await;
366
367 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
368 language_registry.add(rust_lang());
369 language_registry.add(markdown_lang());
370
371 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
372
373 cx.executor().run_until_parked();
374 let settings_for = async |path: &str, cx: &mut TestAppContext| -> LanguageSettings {
375 let buffer = project
376 .update(cx, |project, cx| {
377 project.open_buffer((worktree.read(cx).id(), rel_path(path)), cx)
378 })
379 .await
380 .unwrap();
381 cx.update(|cx| LanguageSettings::for_buffer(&buffer.read(cx), cx).into_owned())
382 };
383
384 let settings_rs = settings_for("main.rs", cx).await;
385 let settings_md = settings_for("README.md", cx).await;
386 let settings_txt = settings_for("other.txt", cx).await;
387
388 // main.rs gets indent_size = 2 from parent's external .editorconfig
389 assert_eq!(Some(settings_rs.tab_size), NonZeroU32::new(2));
390
391 // README.md gets indent_size = 3 from internal worktree .editorconfig
392 assert_eq!(Some(settings_md.tab_size), NonZeroU32::new(3));
393
394 // other.txt gets indent_size = 4 from grandparent's external .editorconfig
395 assert_eq!(Some(settings_txt.tab_size), NonZeroU32::new(4));
396}
397
398#[gpui::test]
399async fn test_internal_editorconfig_root_stops_traversal(cx: &mut gpui::TestAppContext) {
400 init_test(cx);
401
402 let fs = FakeFs::new(cx.executor());
403 fs.insert_tree(
404 path!("/worktree"),
405 json!({
406 ".editorconfig": "[*]\nindent_size = 99\n",
407 "src": {
408 ".editorconfig": "root = true\n[*]\nindent_size = 2\n",
409 "file.rs": "fn main() {}",
410 }
411 }),
412 )
413 .await;
414
415 let project = Project::test(fs, [path!("/worktree").as_ref()], cx).await;
416
417 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
418 language_registry.add(rust_lang());
419
420 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
421
422 cx.executor().run_until_parked();
423
424 let buffer = project
425 .update(cx, |project, cx| {
426 project.open_buffer((worktree.read(cx).id(), rel_path("src/file.rs")), cx)
427 })
428 .await
429 .unwrap();
430 cx.update(|cx| {
431 let settings = LanguageSettings::for_buffer(buffer.read(cx), cx).into_owned();
432 assert_eq!(Some(settings.tab_size), NonZeroU32::new(2));
433 });
434}
435
436#[gpui::test]
437async fn test_external_editorconfig_root_stops_traversal(cx: &mut gpui::TestAppContext) {
438 init_test(cx);
439
440 let fs = FakeFs::new(cx.executor());
441 fs.insert_tree(
442 path!("/parent"),
443 json!({
444 ".editorconfig": "[*]\nindent_size = 99\n",
445 "worktree": {
446 ".editorconfig": "root = true\n[*]\nindent_size = 2\n",
447 "file.rs": "fn main() {}",
448 }
449 }),
450 )
451 .await;
452
453 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
454
455 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
456 language_registry.add(rust_lang());
457
458 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
459
460 cx.executor().run_until_parked();
461
462 let buffer = project
463 .update(cx, |project, cx| {
464 project.open_buffer((worktree.read(cx).id(), rel_path("file.rs")), cx)
465 })
466 .await
467 .unwrap();
468
469 cx.update(|cx| {
470 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
471
472 // file.rs gets indent_size = 2 from worktree's root config, NOT 99 from parent
473 assert_eq!(Some(settings.tab_size), NonZeroU32::new(2));
474 });
475}
476
477#[gpui::test]
478async fn test_external_editorconfig_root_in_parent_stops_traversal(cx: &mut gpui::TestAppContext) {
479 init_test(cx);
480
481 let fs = FakeFs::new(cx.executor());
482 fs.insert_tree(
483 path!("/grandparent"),
484 json!({
485 ".editorconfig": "[*]\nindent_size = 99\n",
486 "parent": {
487 ".editorconfig": "root = true\n[*]\nindent_size = 4\n",
488 "worktree": {
489 "file.rs": "fn main() {}",
490 }
491 }
492 }),
493 )
494 .await;
495
496 let project = Project::test(fs, [path!("/grandparent/parent/worktree").as_ref()], cx).await;
497
498 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
499 language_registry.add(rust_lang());
500
501 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
502
503 cx.executor().run_until_parked();
504
505 let buffer = project
506 .update(cx, |project, cx| {
507 project.open_buffer((worktree.read(cx).id(), rel_path("file.rs")), cx)
508 })
509 .await
510 .unwrap();
511
512 cx.update(|cx| {
513 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
514
515 // file.rs gets indent_size = 4 from parent's root config, NOT 99 from grandparent
516 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
517 });
518}
519
520#[gpui::test]
521async fn test_external_editorconfig_shared_across_worktrees(cx: &mut gpui::TestAppContext) {
522 init_test(cx);
523
524 let fs = FakeFs::new(cx.executor());
525 fs.insert_tree(
526 path!("/parent"),
527 json!({
528 ".editorconfig": "root = true\n[*]\nindent_size = 5\n",
529 "worktree_a": {
530 "file.rs": "fn a() {}",
531 ".editorconfig": "[*]\ninsert_final_newline = true\n",
532 },
533 "worktree_b": {
534 "file.rs": "fn b() {}",
535 ".editorconfig": "[*]\ninsert_final_newline = false\n",
536 }
537 }),
538 )
539 .await;
540
541 let project = Project::test(
542 fs,
543 [
544 path!("/parent/worktree_a").as_ref(),
545 path!("/parent/worktree_b").as_ref(),
546 ],
547 cx,
548 )
549 .await;
550
551 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
552 language_registry.add(rust_lang());
553
554 cx.executor().run_until_parked();
555
556 let worktrees: Vec<_> = cx.update(|cx| project.read(cx).worktrees(cx).collect());
557 assert_eq!(worktrees.len(), 2);
558
559 for worktree in worktrees {
560 let buffer = project
561 .update(cx, |project, cx| {
562 project.open_buffer((worktree.read(cx).id(), rel_path("file.rs")), cx)
563 })
564 .await
565 .unwrap();
566
567 cx.update(|cx| {
568 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
569
570 // Both worktrees should get indent_size = 5 from shared parent .editorconfig
571 assert_eq!(Some(settings.tab_size), NonZeroU32::new(5));
572 });
573 }
574}
575
576#[gpui::test]
577async fn test_external_editorconfig_not_loaded_without_internal_config(
578 cx: &mut gpui::TestAppContext,
579) {
580 init_test(cx);
581
582 let fs = FakeFs::new(cx.executor());
583 fs.insert_tree(
584 path!("/parent"),
585 json!({
586 ".editorconfig": "[*]\nindent_size = 99\n",
587 "worktree": {
588 "file.rs": "fn main() {}",
589 }
590 }),
591 )
592 .await;
593
594 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
595
596 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
597 language_registry.add(rust_lang());
598
599 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
600
601 cx.executor().run_until_parked();
602
603 let buffer = project
604 .update(cx, |project, cx| {
605 project.open_buffer((worktree.read(cx).id(), rel_path("file.rs")), cx)
606 })
607 .await
608 .unwrap();
609
610 cx.update(|cx| {
611 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
612
613 // file.rs should have default tab_size = 4, NOT 99 from parent's external .editorconfig
614 // because without an internal .editorconfig, external configs are not loaded
615 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
616 });
617}
618
619#[gpui::test]
620async fn test_external_editorconfig_modification_triggers_refresh(cx: &mut gpui::TestAppContext) {
621 init_test(cx);
622
623 let fs = FakeFs::new(cx.executor());
624 fs.insert_tree(
625 path!("/parent"),
626 json!({
627 ".editorconfig": "[*]\nindent_size = 4\n",
628 "worktree": {
629 ".editorconfig": "[*]\n",
630 "file.rs": "fn main() {}",
631 }
632 }),
633 )
634 .await;
635
636 let project = Project::test(fs.clone(), [path!("/parent/worktree").as_ref()], cx).await;
637
638 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
639 language_registry.add(rust_lang());
640
641 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
642
643 cx.executor().run_until_parked();
644
645 let buffer = project
646 .update(cx, |project, cx| {
647 project.open_buffer((worktree.read(cx).id(), rel_path("file.rs")), cx)
648 })
649 .await
650 .unwrap();
651
652 cx.update(|cx| {
653 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
654
655 // Test initial settings: tab_size = 4 from parent's external .editorconfig
656 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
657 });
658
659 fs.atomic_write(
660 PathBuf::from(path!("/parent/.editorconfig")),
661 "[*]\nindent_size = 8\n".to_owned(),
662 )
663 .await
664 .unwrap();
665
666 cx.executor().run_until_parked();
667
668 let buffer = project
669 .update(cx, |project, cx| {
670 project.open_buffer((worktree.read(cx).id(), rel_path("file.rs")), cx)
671 })
672 .await
673 .unwrap();
674
675 cx.update(|cx| {
676 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
677
678 // Test settings updated: tab_size = 8
679 assert_eq!(Some(settings.tab_size), NonZeroU32::new(8));
680 });
681}
682
683#[gpui::test]
684async fn test_adding_worktree_discovers_external_editorconfigs(cx: &mut gpui::TestAppContext) {
685 init_test(cx);
686
687 let fs = FakeFs::new(cx.executor());
688 fs.insert_tree(
689 path!("/parent"),
690 json!({
691 ".editorconfig": "root = true\n[*]\nindent_size = 7\n",
692 "existing_worktree": {
693 ".editorconfig": "[*]\n",
694 "file.rs": "fn a() {}",
695 },
696 "new_worktree": {
697 ".editorconfig": "[*]\n",
698 "file.rs": "fn b() {}",
699 }
700 }),
701 )
702 .await;
703
704 let project = Project::test(fs, [path!("/parent/existing_worktree").as_ref()], cx).await;
705
706 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
707 language_registry.add(rust_lang());
708
709 cx.executor().run_until_parked();
710
711 let buffer = project
712 .update(cx, |project, cx| {
713 let id = project.worktrees(cx).next().unwrap().read(cx).id();
714 project.open_buffer((id, rel_path("file.rs")), cx)
715 })
716 .await
717 .unwrap();
718
719 cx.update(|cx| {
720 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx).into_owned();
721
722 // Test existing worktree has tab_size = 7
723 assert_eq!(Some(settings.tab_size), NonZeroU32::new(7));
724 });
725
726 let (new_worktree, _) = project
727 .update(cx, |project, cx| {
728 project.find_or_create_worktree(path!("/parent/new_worktree"), true, cx)
729 })
730 .await
731 .unwrap();
732
733 cx.executor().run_until_parked();
734
735 let buffer = project
736 .update(cx, |project, cx| {
737 project.open_buffer((new_worktree.read(cx).id(), rel_path("file.rs")), cx)
738 })
739 .await
740 .unwrap();
741
742 cx.update(|cx| {
743 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
744
745 // Verify new worktree also has tab_size = 7 from shared parent editorconfig
746 assert_eq!(Some(settings.tab_size), NonZeroU32::new(7));
747 });
748}
749
750#[gpui::test]
751async fn test_removing_worktree_cleans_up_external_editorconfig(cx: &mut gpui::TestAppContext) {
752 init_test(cx);
753
754 let fs = FakeFs::new(cx.executor());
755 fs.insert_tree(
756 path!("/parent"),
757 json!({
758 ".editorconfig": "[*]\nindent_size = 6\n",
759 "worktree": {
760 ".editorconfig": "[*]\n",
761 "file.rs": "fn main() {}",
762 }
763 }),
764 )
765 .await;
766
767 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
768
769 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
770 language_registry.add(rust_lang());
771
772 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
773 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
774
775 cx.executor().run_until_parked();
776
777 cx.update(|cx| {
778 let store = cx.global::<SettingsStore>();
779 let (worktree_ids, external_paths, watcher_paths) =
780 store.editorconfig_store.read(cx).test_state();
781
782 // Test external config is loaded
783 assert!(worktree_ids.contains(&worktree_id));
784 assert!(!external_paths.is_empty());
785 assert!(!watcher_paths.is_empty());
786 });
787
788 project.update(cx, |project, cx| {
789 project.remove_worktree(worktree_id, cx);
790 });
791
792 cx.executor().run_until_parked();
793
794 cx.update(|cx| {
795 let store = cx.global::<SettingsStore>();
796 let (worktree_ids, external_paths, watcher_paths) =
797 store.editorconfig_store.read(cx).test_state();
798
799 // Test worktree state, external configs, and watchers all removed
800 assert!(!worktree_ids.contains(&worktree_id));
801 assert!(external_paths.is_empty());
802 assert!(watcher_paths.is_empty());
803 });
804}
805
806#[gpui::test]
807async fn test_shared_external_editorconfig_cleanup_with_multiple_worktrees(
808 cx: &mut gpui::TestAppContext,
809) {
810 init_test(cx);
811
812 let fs = FakeFs::new(cx.executor());
813 fs.insert_tree(
814 path!("/parent"),
815 json!({
816 ".editorconfig": "root = true\n[*]\nindent_size = 5\n",
817 "worktree_a": {
818 ".editorconfig": "[*]\n",
819 "file.rs": "fn a() {}",
820 },
821 "worktree_b": {
822 ".editorconfig": "[*]\n",
823 "file.rs": "fn b() {}",
824 }
825 }),
826 )
827 .await;
828
829 let project = Project::test(
830 fs,
831 [
832 path!("/parent/worktree_a").as_ref(),
833 path!("/parent/worktree_b").as_ref(),
834 ],
835 cx,
836 )
837 .await;
838
839 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
840 language_registry.add(rust_lang());
841
842 cx.executor().run_until_parked();
843
844 let (worktree_a_id, worktree_b, worktree_b_id) = cx.update(|cx| {
845 let worktrees: Vec<_> = project.read(cx).worktrees(cx).collect();
846 assert_eq!(worktrees.len(), 2);
847
848 let worktree_a = &worktrees[0];
849 let worktree_b = &worktrees[1];
850 let worktree_a_id = worktree_a.read(cx).id();
851 let worktree_b_id = worktree_b.read(cx).id();
852 (worktree_a_id, worktree_b.clone(), worktree_b_id)
853 });
854
855 cx.update(|cx| {
856 let store = cx.global::<SettingsStore>();
857 let (worktree_ids, external_paths, _) = store.editorconfig_store.read(cx).test_state();
858
859 // Test both worktrees have settings and share external config
860 assert!(worktree_ids.contains(&worktree_a_id));
861 assert!(worktree_ids.contains(&worktree_b_id));
862 assert_eq!(external_paths.len(), 1); // single shared external config
863 });
864
865 project.update(cx, |project, cx| {
866 project.remove_worktree(worktree_a_id, cx);
867 });
868
869 cx.executor().run_until_parked();
870
871 cx.update(|cx| {
872 let store = cx.global::<SettingsStore>();
873 let (worktree_ids, external_paths, watcher_paths) =
874 store.editorconfig_store.read(cx).test_state();
875
876 // Test worktree_a is gone but external config remains for worktree_b
877 assert!(!worktree_ids.contains(&worktree_a_id));
878 assert!(worktree_ids.contains(&worktree_b_id));
879 // External config should still exist because worktree_b uses it
880 assert_eq!(external_paths.len(), 1);
881 assert_eq!(watcher_paths.len(), 1);
882 });
883
884 let buffer = project
885 .update(cx, |project, cx| {
886 project.open_buffer((worktree_b.read(cx).id(), rel_path("file.rs")), cx)
887 })
888 .await
889 .unwrap();
890
891 cx.update(|cx| {
892 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
893
894 // Test worktree_b still has correct settings
895 assert_eq!(Some(settings.tab_size), NonZeroU32::new(5));
896 });
897}
898
899#[gpui::test]
900async fn test_git_provider_project_setting(cx: &mut gpui::TestAppContext) {
901 init_test(cx);
902 cx.update(|cx| {
903 GitHostingProviderRegistry::default_global(cx);
904 git_hosting_providers::init(cx);
905 });
906
907 let fs = FakeFs::new(cx.executor());
908 let str_path = path!("/dir");
909 let path = Path::new(str_path);
910
911 fs.insert_tree(
912 path!("/dir"),
913 json!({
914 ".zed": {
915 "settings.json": r#"{
916 "git_hosting_providers": [
917 {
918 "provider": "gitlab",
919 "base_url": "https://google.com",
920 "name": "foo"
921 }
922 ]
923 }"#
924 },
925 }),
926 )
927 .await;
928
929 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
930 let (_worktree, _) =
931 project.read_with(cx, |project, cx| project.find_worktree(path, cx).unwrap());
932 cx.executor().run_until_parked();
933
934 cx.update(|cx| {
935 let provider = GitHostingProviderRegistry::global(cx);
936 assert!(
937 provider
938 .list_hosting_providers()
939 .into_iter()
940 .any(|provider| provider.name() == "foo")
941 );
942 });
943
944 fs.atomic_write(
945 Path::new(path!("/dir/.zed/settings.json")).to_owned(),
946 "{}".into(),
947 )
948 .await
949 .unwrap();
950
951 cx.run_until_parked();
952
953 cx.update(|cx| {
954 let provider = GitHostingProviderRegistry::global(cx);
955 assert!(
956 !provider
957 .list_hosting_providers()
958 .into_iter()
959 .any(|provider| provider.name() == "foo")
960 );
961 });
962}
963
964#[gpui::test]
965async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
966 init_test(cx);
967 TaskStore::init(None);
968
969 let fs = FakeFs::new(cx.executor());
970 fs.insert_tree(
971 path!("/dir"),
972 json!({
973 ".zed": {
974 "settings.json": r#"{ "tab_size": 8 }"#,
975 "tasks.json": r#"[{
976 "label": "cargo check all",
977 "command": "cargo",
978 "args": ["check", "--all"]
979 },]"#,
980 },
981 "a": {
982 "a.rs": "fn a() {\n A\n}"
983 },
984 "b": {
985 ".zed": {
986 "settings.json": r#"{ "tab_size": 2 }"#,
987 "tasks.json": r#"[{
988 "label": "cargo check",
989 "command": "cargo",
990 "args": ["check"]
991 },]"#,
992 },
993 "b.rs": "fn b() {\n B\n}"
994 }
995 }),
996 )
997 .await;
998
999 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1000 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
1001
1002 cx.executor().run_until_parked();
1003 let worktree_id = cx.update(|cx| {
1004 project.update(cx, |project, cx| {
1005 project.worktrees(cx).next().unwrap().read(cx).id()
1006 })
1007 });
1008
1009 let mut task_contexts = TaskContexts::default();
1010 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
1011 let task_contexts = Arc::new(task_contexts);
1012
1013 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
1014 id: worktree_id,
1015 directory_in_worktree: rel_path(".zed").into(),
1016 id_base: "local worktree tasks from directory \".zed\"".into(),
1017 };
1018
1019 let buffer_a = project
1020 .update(cx, |project, cx| {
1021 project.open_buffer((worktree.read(cx).id(), rel_path("a/a.rs")), cx)
1022 })
1023 .await
1024 .unwrap();
1025 let buffer_b = project
1026 .update(cx, |project, cx| {
1027 project.open_buffer((worktree.read(cx).id(), rel_path("b/b.rs")), cx)
1028 })
1029 .await
1030 .unwrap();
1031 cx.update(|cx| {
1032 let settings_a = LanguageSettings::for_buffer(&buffer_a.read(cx), cx);
1033 let settings_b = LanguageSettings::for_buffer(&buffer_b.read(cx), cx);
1034
1035 assert_eq!(settings_a.tab_size.get(), 8);
1036 assert_eq!(settings_b.tab_size.get(), 2);
1037 });
1038
1039 let all_tasks = cx
1040 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
1041 .await
1042 .into_iter()
1043 .map(|(source_kind, task)| {
1044 let resolved = task.resolved;
1045 (
1046 source_kind,
1047 task.resolved_label,
1048 resolved.args,
1049 resolved.env,
1050 )
1051 })
1052 .collect::<Vec<_>>();
1053 assert_eq!(
1054 all_tasks,
1055 vec![
1056 (
1057 TaskSourceKind::Worktree {
1058 id: worktree_id,
1059 directory_in_worktree: rel_path("b/.zed").into(),
1060 id_base: "local worktree tasks from directory \"b/.zed\"".into()
1061 },
1062 "cargo check".to_string(),
1063 vec!["check".to_string()],
1064 HashMap::default(),
1065 ),
1066 (
1067 topmost_local_task_source_kind.clone(),
1068 "cargo check all".to_string(),
1069 vec!["check".to_string(), "--all".to_string()],
1070 HashMap::default(),
1071 ),
1072 ]
1073 );
1074
1075 let (_, resolved_task) = cx
1076 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
1077 .await
1078 .into_iter()
1079 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
1080 .expect("should have one global task");
1081 project.update(cx, |project, cx| {
1082 let task_inventory = project
1083 .task_store()
1084 .read(cx)
1085 .task_inventory()
1086 .cloned()
1087 .unwrap();
1088 task_inventory.update(cx, |inventory, _| {
1089 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
1090 inventory
1091 .update_file_based_tasks(
1092 TaskSettingsLocation::Global(tasks_file()),
1093 Some(
1094 &json!([{
1095 "label": "cargo check unstable",
1096 "command": "cargo",
1097 "args": [
1098 "check",
1099 "--all",
1100 "--all-targets"
1101 ],
1102 "env": {
1103 "RUSTFLAGS": "-Zunstable-options"
1104 }
1105 }])
1106 .to_string(),
1107 ),
1108 )
1109 .unwrap();
1110 });
1111 });
1112 cx.run_until_parked();
1113
1114 let all_tasks = cx
1115 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
1116 .await
1117 .into_iter()
1118 .map(|(source_kind, task)| {
1119 let resolved = task.resolved;
1120 (
1121 source_kind,
1122 task.resolved_label,
1123 resolved.args,
1124 resolved.env,
1125 )
1126 })
1127 .collect::<Vec<_>>();
1128 assert_eq!(
1129 all_tasks,
1130 vec![
1131 (
1132 topmost_local_task_source_kind.clone(),
1133 "cargo check all".to_string(),
1134 vec!["check".to_string(), "--all".to_string()],
1135 HashMap::default(),
1136 ),
1137 (
1138 TaskSourceKind::Worktree {
1139 id: worktree_id,
1140 directory_in_worktree: rel_path("b/.zed").into(),
1141 id_base: "local worktree tasks from directory \"b/.zed\"".into()
1142 },
1143 "cargo check".to_string(),
1144 vec!["check".to_string()],
1145 HashMap::default(),
1146 ),
1147 (
1148 TaskSourceKind::AbsPath {
1149 abs_path: paths::tasks_file().clone(),
1150 id_base: "global tasks.json".into(),
1151 },
1152 "cargo check unstable".to_string(),
1153 vec![
1154 "check".to_string(),
1155 "--all".to_string(),
1156 "--all-targets".to_string(),
1157 ],
1158 HashMap::from_iter(Some((
1159 "RUSTFLAGS".to_string(),
1160 "-Zunstable-options".to_string()
1161 ))),
1162 ),
1163 ]
1164 );
1165}
1166
1167#[gpui::test]
1168async fn test_invalid_local_tasks_shows_toast_with_doc_link(cx: &mut gpui::TestAppContext) {
1169 init_test(cx);
1170 TaskStore::init(None);
1171
1172 // We need to start with a valid `.zed/tasks.json` file as otherwise the
1173 // event is emitted before we havd a chance to setup the event subscription.
1174 let fs = FakeFs::new(cx.executor());
1175 fs.insert_tree(
1176 path!("/dir"),
1177 json!({
1178 ".zed": {
1179 "tasks.json": r#"[{ "label": "valid task", "command": "echo" }]"#,
1180 },
1181 "file.rs": ""
1182 }),
1183 )
1184 .await;
1185
1186 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1187 let saw_toast = Rc::new(RefCell::new(false));
1188
1189 // Update the `.zed/tasks.json` file with an invalid variable, so we can
1190 // later assert that the `Event::Toast` even is emitted.
1191 fs.save(
1192 path!("/dir/.zed/tasks.json").as_ref(),
1193 &r#"[{ "label": "test $ZED_FOO", "command": "echo" }]"#.into(),
1194 Default::default(),
1195 )
1196 .await
1197 .unwrap();
1198
1199 project.update(cx, |_, cx| {
1200 let saw_toast = saw_toast.clone();
1201
1202 cx.subscribe(&project, move |_, _, event: &Event, _| match event {
1203 Event::Toast {
1204 notification_id,
1205 message,
1206 link: Some(ToastLink { url, .. }),
1207 } => {
1208 assert!(notification_id.starts_with("local-tasks-"));
1209 assert!(message.contains("ZED_FOO"));
1210 assert_eq!(*url, "https://zed.dev/docs/tasks");
1211 *saw_toast.borrow_mut() = true;
1212 }
1213 _ => {}
1214 })
1215 .detach();
1216 });
1217
1218 cx.run_until_parked();
1219 assert!(
1220 *saw_toast.borrow(),
1221 "Expected `Event::Toast` was never emitted"
1222 );
1223}
1224
1225#[gpui::test]
1226async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
1227 init_test(cx);
1228 TaskStore::init(None);
1229
1230 let fs = FakeFs::new(cx.executor());
1231 fs.insert_tree(
1232 path!("/dir"),
1233 json!({
1234 ".zed": {
1235 "tasks.json": r#"[{
1236 "label": "test worktree root",
1237 "command": "echo $ZED_WORKTREE_ROOT"
1238 }]"#,
1239 },
1240 "a": {
1241 "a.rs": "fn a() {\n A\n}"
1242 },
1243 }),
1244 )
1245 .await;
1246
1247 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1248 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
1249
1250 cx.executor().run_until_parked();
1251 let worktree_id = cx.update(|cx| {
1252 project.update(cx, |project, cx| {
1253 project.worktrees(cx).next().unwrap().read(cx).id()
1254 })
1255 });
1256
1257 let active_non_worktree_item_tasks = cx
1258 .update(|cx| {
1259 get_all_tasks(
1260 &project,
1261 Arc::new(TaskContexts {
1262 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
1263 active_worktree_context: None,
1264 other_worktree_contexts: Vec::new(),
1265 lsp_task_sources: HashMap::default(),
1266 latest_selection: None,
1267 }),
1268 cx,
1269 )
1270 })
1271 .await;
1272 assert!(
1273 active_non_worktree_item_tasks.is_empty(),
1274 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
1275 );
1276
1277 let active_worktree_tasks = cx
1278 .update(|cx| {
1279 get_all_tasks(
1280 &project,
1281 Arc::new(TaskContexts {
1282 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
1283 active_worktree_context: Some((worktree_id, {
1284 let mut worktree_context = TaskContext::default();
1285 worktree_context
1286 .task_variables
1287 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
1288 worktree_context
1289 })),
1290 other_worktree_contexts: Vec::new(),
1291 lsp_task_sources: HashMap::default(),
1292 latest_selection: None,
1293 }),
1294 cx,
1295 )
1296 })
1297 .await;
1298 assert_eq!(
1299 active_worktree_tasks
1300 .into_iter()
1301 .map(|(source_kind, task)| {
1302 let resolved = task.resolved;
1303 (source_kind, resolved.command.unwrap())
1304 })
1305 .collect::<Vec<_>>(),
1306 vec![(
1307 TaskSourceKind::Worktree {
1308 id: worktree_id,
1309 directory_in_worktree: rel_path(".zed").into(),
1310 id_base: "local worktree tasks from directory \".zed\"".into(),
1311 },
1312 "echo /dir".to_string(),
1313 )]
1314 );
1315}
1316
1317#[gpui::test]
1318async fn test_running_multiple_instances_of_a_single_server_in_one_worktree(
1319 cx: &mut gpui::TestAppContext,
1320) {
1321 pub(crate) struct PyprojectTomlManifestProvider;
1322
1323 impl ManifestProvider for PyprojectTomlManifestProvider {
1324 fn name(&self) -> ManifestName {
1325 SharedString::new_static("pyproject.toml").into()
1326 }
1327
1328 fn search(
1329 &self,
1330 ManifestQuery {
1331 path,
1332 depth,
1333 delegate,
1334 }: ManifestQuery,
1335 ) -> Option<Arc<RelPath>> {
1336 const WORKSPACE_LOCKFILES: &[&str] =
1337 &["uv.lock", "poetry.lock", "pdm.lock", "Pipfile.lock"];
1338
1339 let mut innermost_pyproject = None;
1340 let mut outermost_workspace_root = None;
1341
1342 for path in path.ancestors().take(depth) {
1343 let pyproject_path = path.join(rel_path("pyproject.toml"));
1344 if delegate.exists(&pyproject_path, Some(false)) {
1345 if innermost_pyproject.is_none() {
1346 innermost_pyproject = Some(Arc::from(path));
1347 }
1348
1349 let has_lockfile = WORKSPACE_LOCKFILES.iter().any(|lockfile| {
1350 let lockfile_path = path.join(rel_path(lockfile));
1351 delegate.exists(&lockfile_path, Some(false))
1352 });
1353 if has_lockfile {
1354 outermost_workspace_root = Some(Arc::from(path));
1355 }
1356 }
1357 }
1358
1359 outermost_workspace_root.or(innermost_pyproject)
1360 }
1361 }
1362
1363 init_test(cx);
1364 let fs = FakeFs::new(cx.executor());
1365
1366 fs.insert_tree(
1367 path!("/the-root"),
1368 json!({
1369 ".zed": {
1370 "settings.json": r#"
1371 {
1372 "languages": {
1373 "Python": {
1374 "language_servers": ["ty"]
1375 }
1376 }
1377 }"#
1378 },
1379 "project-a": {
1380 ".venv": {},
1381 "file.py": "",
1382 "pyproject.toml": ""
1383 },
1384 "project-b": {
1385 ".venv": {},
1386 "source_file.py":"",
1387 "another_file.py": "",
1388 "pyproject.toml": ""
1389 }
1390 }),
1391 )
1392 .await;
1393 cx.update(|cx| {
1394 ManifestProvidersStore::global(cx).register(Arc::new(PyprojectTomlManifestProvider))
1395 });
1396
1397 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1398 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1399 let _fake_python_server = language_registry.register_fake_lsp(
1400 "Python",
1401 FakeLspAdapter {
1402 name: "ty",
1403 capabilities: lsp::ServerCapabilities {
1404 ..Default::default()
1405 },
1406 ..Default::default()
1407 },
1408 );
1409
1410 language_registry.add(python_lang(fs.clone()));
1411 let (first_buffer, _handle) = project
1412 .update(cx, |project, cx| {
1413 project.open_local_buffer_with_lsp(path!("/the-root/project-a/file.py"), cx)
1414 })
1415 .await
1416 .unwrap();
1417 cx.executor().run_until_parked();
1418 let servers = project.update(cx, |project, cx| {
1419 project.lsp_store().update(cx, |this, cx| {
1420 first_buffer.update(cx, |buffer, cx| {
1421 this.running_language_servers_for_local_buffer(buffer, cx)
1422 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1423 .collect::<Vec<_>>()
1424 })
1425 })
1426 });
1427 cx.executor().run_until_parked();
1428 assert_eq!(servers.len(), 1);
1429 let (adapter, server) = servers.into_iter().next().unwrap();
1430 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1431 assert_eq!(server.server_id(), LanguageServerId(0));
1432 // `workspace_folders` are set to the rooting point.
1433 assert_eq!(
1434 server.workspace_folders(),
1435 BTreeSet::from_iter(
1436 [Uri::from_file_path(path!("/the-root/project-a")).unwrap()].into_iter()
1437 )
1438 );
1439
1440 let (second_project_buffer, _other_handle) = project
1441 .update(cx, |project, cx| {
1442 project.open_local_buffer_with_lsp(path!("/the-root/project-b/source_file.py"), cx)
1443 })
1444 .await
1445 .unwrap();
1446 cx.executor().run_until_parked();
1447 let servers = project.update(cx, |project, cx| {
1448 project.lsp_store().update(cx, |this, cx| {
1449 second_project_buffer.update(cx, |buffer, cx| {
1450 this.running_language_servers_for_local_buffer(buffer, cx)
1451 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1452 .collect::<Vec<_>>()
1453 })
1454 })
1455 });
1456 cx.executor().run_until_parked();
1457 assert_eq!(servers.len(), 1);
1458 let (adapter, server) = servers.into_iter().next().unwrap();
1459 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1460 // We're not using venvs at all here, so both folders should fall under the same root.
1461 assert_eq!(server.server_id(), LanguageServerId(0));
1462 // Now, let's select a different toolchain for one of subprojects.
1463
1464 let Toolchains {
1465 toolchains: available_toolchains_for_b,
1466 root_path,
1467 ..
1468 } = project
1469 .update(cx, |this, cx| {
1470 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1471 this.available_toolchains(
1472 ProjectPath {
1473 worktree_id,
1474 path: rel_path("project-b/source_file.py").into(),
1475 },
1476 LanguageName::new_static("Python"),
1477 cx,
1478 )
1479 })
1480 .await
1481 .expect("A toolchain to be discovered");
1482 assert_eq!(root_path.as_ref(), rel_path("project-b"));
1483 assert_eq!(available_toolchains_for_b.toolchains().len(), 1);
1484 let currently_active_toolchain = project
1485 .update(cx, |this, cx| {
1486 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1487 this.active_toolchain(
1488 ProjectPath {
1489 worktree_id,
1490 path: rel_path("project-b/source_file.py").into(),
1491 },
1492 LanguageName::new_static("Python"),
1493 cx,
1494 )
1495 })
1496 .await;
1497
1498 assert!(currently_active_toolchain.is_none());
1499 let _ = project
1500 .update(cx, |this, cx| {
1501 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1502 this.activate_toolchain(
1503 ProjectPath {
1504 worktree_id,
1505 path: root_path,
1506 },
1507 available_toolchains_for_b
1508 .toolchains
1509 .into_iter()
1510 .next()
1511 .unwrap(),
1512 cx,
1513 )
1514 })
1515 .await
1516 .unwrap();
1517 cx.run_until_parked();
1518 let servers = project.update(cx, |project, cx| {
1519 project.lsp_store().update(cx, |this, cx| {
1520 second_project_buffer.update(cx, |buffer, cx| {
1521 this.running_language_servers_for_local_buffer(buffer, cx)
1522 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1523 .collect::<Vec<_>>()
1524 })
1525 })
1526 });
1527 cx.executor().run_until_parked();
1528 assert_eq!(servers.len(), 1);
1529 let (adapter, server) = servers.into_iter().next().unwrap();
1530 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1531 // There's a new language server in town.
1532 assert_eq!(server.server_id(), LanguageServerId(1));
1533}
1534
1535#[gpui::test]
1536async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
1537 init_test(cx);
1538
1539 let fs = FakeFs::new(cx.executor());
1540 fs.insert_tree(
1541 path!("/dir"),
1542 json!({
1543 "test.rs": "const A: i32 = 1;",
1544 "test2.rs": "",
1545 "Cargo.toml": "a = 1",
1546 "package.json": "{\"a\": 1}",
1547 }),
1548 )
1549 .await;
1550
1551 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1552 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1553
1554 let mut fake_rust_servers = language_registry.register_fake_lsp(
1555 "Rust",
1556 FakeLspAdapter {
1557 name: "the-rust-language-server",
1558 capabilities: lsp::ServerCapabilities {
1559 completion_provider: Some(lsp::CompletionOptions {
1560 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
1561 ..Default::default()
1562 }),
1563 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
1564 lsp::TextDocumentSyncOptions {
1565 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
1566 ..Default::default()
1567 },
1568 )),
1569 ..Default::default()
1570 },
1571 ..Default::default()
1572 },
1573 );
1574 let mut fake_json_servers = language_registry.register_fake_lsp(
1575 "JSON",
1576 FakeLspAdapter {
1577 name: "the-json-language-server",
1578 capabilities: lsp::ServerCapabilities {
1579 completion_provider: Some(lsp::CompletionOptions {
1580 trigger_characters: Some(vec![":".to_string()]),
1581 ..Default::default()
1582 }),
1583 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
1584 lsp::TextDocumentSyncOptions {
1585 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
1586 ..Default::default()
1587 },
1588 )),
1589 ..Default::default()
1590 },
1591 ..Default::default()
1592 },
1593 );
1594
1595 // Open a buffer without an associated language server.
1596 let (toml_buffer, _handle) = project
1597 .update(cx, |project, cx| {
1598 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
1599 })
1600 .await
1601 .unwrap();
1602
1603 // Open a buffer with an associated language server before the language for it has been loaded.
1604 let (rust_buffer, _handle2) = project
1605 .update(cx, |project, cx| {
1606 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
1607 })
1608 .await
1609 .unwrap();
1610 rust_buffer.update(cx, |buffer, _| {
1611 assert_eq!(buffer.language().map(|l| l.name()), None);
1612 });
1613
1614 // Now we add the languages to the project, and ensure they get assigned to all
1615 // the relevant open buffers.
1616 language_registry.add(json_lang());
1617 language_registry.add(rust_lang());
1618 cx.executor().run_until_parked();
1619 rust_buffer.update(cx, |buffer, _| {
1620 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
1621 });
1622
1623 // A server is started up, and it is notified about Rust files.
1624 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1625 assert_eq!(
1626 fake_rust_server
1627 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1628 .await
1629 .text_document,
1630 lsp::TextDocumentItem {
1631 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1632 version: 0,
1633 text: "const A: i32 = 1;".to_string(),
1634 language_id: "rust".to_string(),
1635 }
1636 );
1637
1638 // The buffer is configured based on the language server's capabilities.
1639 rust_buffer.update(cx, |buffer, _| {
1640 assert_eq!(
1641 buffer
1642 .completion_triggers()
1643 .iter()
1644 .cloned()
1645 .collect::<Vec<_>>(),
1646 &[".".to_string(), "::".to_string()]
1647 );
1648 });
1649 toml_buffer.update(cx, |buffer, _| {
1650 assert!(buffer.completion_triggers().is_empty());
1651 });
1652
1653 // Edit a buffer. The changes are reported to the language server.
1654 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
1655 assert_eq!(
1656 fake_rust_server
1657 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1658 .await
1659 .text_document,
1660 lsp::VersionedTextDocumentIdentifier::new(
1661 lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1662 1
1663 )
1664 );
1665
1666 // Open a third buffer with a different associated language server.
1667 let (json_buffer, _json_handle) = project
1668 .update(cx, |project, cx| {
1669 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
1670 })
1671 .await
1672 .unwrap();
1673
1674 // A json language server is started up and is only notified about the json buffer.
1675 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1676 assert_eq!(
1677 fake_json_server
1678 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1679 .await
1680 .text_document,
1681 lsp::TextDocumentItem {
1682 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1683 version: 0,
1684 text: "{\"a\": 1}".to_string(),
1685 language_id: "json".to_string(),
1686 }
1687 );
1688
1689 // This buffer is configured based on the second language server's
1690 // capabilities.
1691 json_buffer.update(cx, |buffer, _| {
1692 assert_eq!(
1693 buffer
1694 .completion_triggers()
1695 .iter()
1696 .cloned()
1697 .collect::<Vec<_>>(),
1698 &[":".to_string()]
1699 );
1700 });
1701
1702 // When opening another buffer whose language server is already running,
1703 // it is also configured based on the existing language server's capabilities.
1704 let (rust_buffer2, _handle4) = project
1705 .update(cx, |project, cx| {
1706 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
1707 })
1708 .await
1709 .unwrap();
1710 rust_buffer2.update(cx, |buffer, _| {
1711 assert_eq!(
1712 buffer
1713 .completion_triggers()
1714 .iter()
1715 .cloned()
1716 .collect::<Vec<_>>(),
1717 &[".".to_string(), "::".to_string()]
1718 );
1719 });
1720
1721 // Changes are reported only to servers matching the buffer's language.
1722 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
1723 rust_buffer2.update(cx, |buffer, cx| {
1724 buffer.edit([(0..0, "let x = 1;")], None, cx)
1725 });
1726 assert_eq!(
1727 fake_rust_server
1728 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1729 .await
1730 .text_document,
1731 lsp::VersionedTextDocumentIdentifier::new(
1732 lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap(),
1733 1
1734 )
1735 );
1736
1737 // Save notifications are reported to all servers.
1738 project
1739 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
1740 .await
1741 .unwrap();
1742 assert_eq!(
1743 fake_rust_server
1744 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1745 .await
1746 .text_document,
1747 lsp::TextDocumentIdentifier::new(
1748 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1749 )
1750 );
1751 assert_eq!(
1752 fake_json_server
1753 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1754 .await
1755 .text_document,
1756 lsp::TextDocumentIdentifier::new(
1757 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1758 )
1759 );
1760
1761 // Renames are reported only to servers matching the buffer's language.
1762 fs.rename(
1763 Path::new(path!("/dir/test2.rs")),
1764 Path::new(path!("/dir/test3.rs")),
1765 Default::default(),
1766 )
1767 .await
1768 .unwrap();
1769 assert_eq!(
1770 fake_rust_server
1771 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1772 .await
1773 .text_document,
1774 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap()),
1775 );
1776 assert_eq!(
1777 fake_rust_server
1778 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1779 .await
1780 .text_document,
1781 lsp::TextDocumentItem {
1782 uri: lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap(),
1783 version: 0,
1784 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1785 language_id: "rust".to_string(),
1786 },
1787 );
1788
1789 rust_buffer2.update(cx, |buffer, cx| {
1790 buffer.update_diagnostics(
1791 LanguageServerId(0),
1792 DiagnosticSet::from_sorted_entries(
1793 vec![DiagnosticEntry {
1794 diagnostic: Default::default(),
1795 range: Anchor::min_max_range_for_buffer(buffer.remote_id()),
1796 }],
1797 &buffer.snapshot(),
1798 ),
1799 cx,
1800 );
1801 assert_eq!(
1802 buffer
1803 .snapshot()
1804 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1805 .count(),
1806 1
1807 );
1808 });
1809
1810 // When the rename changes the extension of the file, the buffer gets closed on the old
1811 // language server and gets opened on the new one.
1812 fs.rename(
1813 Path::new(path!("/dir/test3.rs")),
1814 Path::new(path!("/dir/test3.json")),
1815 Default::default(),
1816 )
1817 .await
1818 .unwrap();
1819 assert_eq!(
1820 fake_rust_server
1821 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1822 .await
1823 .text_document,
1824 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap()),
1825 );
1826 assert_eq!(
1827 fake_json_server
1828 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1829 .await
1830 .text_document,
1831 lsp::TextDocumentItem {
1832 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1833 version: 0,
1834 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1835 language_id: "json".to_string(),
1836 },
1837 );
1838
1839 // We clear the diagnostics, since the language has changed.
1840 rust_buffer2.update(cx, |buffer, _| {
1841 assert_eq!(
1842 buffer
1843 .snapshot()
1844 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1845 .count(),
1846 0
1847 );
1848 });
1849
1850 // The renamed file's version resets after changing language server.
1851 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
1852 assert_eq!(
1853 fake_json_server
1854 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1855 .await
1856 .text_document,
1857 lsp::VersionedTextDocumentIdentifier::new(
1858 lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1859 1
1860 )
1861 );
1862
1863 // Restart language servers
1864 project.update(cx, |project, cx| {
1865 project.restart_language_servers_for_buffers(
1866 vec![rust_buffer.clone(), json_buffer.clone()],
1867 HashSet::default(),
1868 cx,
1869 );
1870 });
1871
1872 let mut rust_shutdown_requests = fake_rust_server
1873 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1874 let mut json_shutdown_requests = fake_json_server
1875 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1876 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
1877
1878 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1879 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1880
1881 // Ensure rust document is reopened in new rust language server
1882 assert_eq!(
1883 fake_rust_server
1884 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1885 .await
1886 .text_document,
1887 lsp::TextDocumentItem {
1888 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1889 version: 0,
1890 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
1891 language_id: "rust".to_string(),
1892 }
1893 );
1894
1895 // Ensure json documents are reopened in new json language server
1896 assert_set_eq!(
1897 [
1898 fake_json_server
1899 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1900 .await
1901 .text_document,
1902 fake_json_server
1903 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1904 .await
1905 .text_document,
1906 ],
1907 [
1908 lsp::TextDocumentItem {
1909 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1910 version: 0,
1911 text: json_buffer.update(cx, |buffer, _| buffer.text()),
1912 language_id: "json".to_string(),
1913 },
1914 lsp::TextDocumentItem {
1915 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1916 version: 0,
1917 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1918 language_id: "json".to_string(),
1919 }
1920 ]
1921 );
1922
1923 // Close notifications are reported only to servers matching the buffer's language.
1924 cx.update(|_| drop(_json_handle));
1925 let close_message = lsp::DidCloseTextDocumentParams {
1926 text_document: lsp::TextDocumentIdentifier::new(
1927 lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1928 ),
1929 };
1930 assert_eq!(
1931 fake_json_server
1932 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1933 .await,
1934 close_message,
1935 );
1936}
1937
1938#[gpui::test]
1939async fn test_late_lsp_adapter_registration(cx: &mut gpui::TestAppContext) {
1940 init_test(cx);
1941
1942 let fs = FakeFs::new(cx.executor());
1943 fs.insert_tree(
1944 path!("/dir"),
1945 json!({
1946 "test.rs": "const A: i32 = 1;",
1947 }),
1948 )
1949 .await;
1950
1951 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1952 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1953
1954 // Add the language first so the buffer gets assigned a language.
1955 language_registry.add(rust_lang());
1956 cx.executor().run_until_parked();
1957
1958 // Open a buffer — it gets assigned the Rust language but there is no LSP adapter yet.
1959 let (rust_buffer, _handle) = project
1960 .update(cx, |project, cx| {
1961 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
1962 })
1963 .await
1964 .unwrap();
1965
1966 rust_buffer.update(cx, |buffer, _| {
1967 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
1968 });
1969
1970 // Now register the LSP adapter late (simulating an extension loading after startup).
1971 let mut fake_rust_servers = language_registry.register_fake_lsp(
1972 "Rust",
1973 FakeLspAdapter {
1974 name: "the-rust-language-server",
1975 capabilities: lsp::ServerCapabilities {
1976 completion_provider: Some(lsp::CompletionOptions {
1977 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
1978 ..Default::default()
1979 }),
1980 ..Default::default()
1981 },
1982 ..Default::default()
1983 },
1984 );
1985 cx.executor().run_until_parked();
1986
1987 // The language server should start and receive a DidOpenTextDocument notification
1988 // for the already-open buffer.
1989 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1990 assert_eq!(
1991 fake_rust_server
1992 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1993 .await
1994 .text_document,
1995 lsp::TextDocumentItem {
1996 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1997 version: 0,
1998 text: "const A: i32 = 1;".to_string(),
1999 language_id: "rust".to_string(),
2000 }
2001 );
2002
2003 // The buffer should be configured with the language server's capabilities.
2004 rust_buffer.update(cx, |buffer, _| {
2005 assert_eq!(
2006 buffer
2007 .completion_triggers()
2008 .iter()
2009 .cloned()
2010 .collect::<Vec<_>>(),
2011 &[".".to_string(), "::".to_string()]
2012 );
2013 });
2014}
2015
2016#[gpui::test]
2017async fn test_language_server_relative_path(cx: &mut gpui::TestAppContext) {
2018 init_test(cx);
2019
2020 let settings_json_contents = json!({
2021 "languages": {
2022 "Rust": {
2023 "language_servers": ["my_fake_lsp", "lsp_on_path"]
2024 }
2025 },
2026 "lsp": {
2027 "my_fake_lsp": {
2028 "binary": {
2029 // file exists, so this is treated as a relative path
2030 "path": path!(".relative_path/to/my_fake_lsp_binary.exe").to_string(),
2031 }
2032 },
2033 "lsp_on_path": {
2034 "binary": {
2035 // file doesn't exist, so it will fall back on PATH env var
2036 "path": path!("lsp_on_path.exe").to_string(),
2037 }
2038 }
2039 },
2040 });
2041
2042 let fs = FakeFs::new(cx.executor());
2043 fs.insert_tree(
2044 path!("/the-root"),
2045 json!({
2046 ".zed": {
2047 "settings.json": settings_json_contents.to_string(),
2048 },
2049 ".relative_path": {
2050 "to": {
2051 "my_fake_lsp.exe": "",
2052 },
2053 },
2054 "src": {
2055 "main.rs": "",
2056 }
2057 }),
2058 )
2059 .await;
2060
2061 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
2062 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2063 language_registry.add(rust_lang());
2064
2065 let mut my_fake_lsp = language_registry.register_fake_lsp(
2066 "Rust",
2067 FakeLspAdapter {
2068 name: "my_fake_lsp",
2069 ..Default::default()
2070 },
2071 );
2072 let mut lsp_on_path = language_registry.register_fake_lsp(
2073 "Rust",
2074 FakeLspAdapter {
2075 name: "lsp_on_path",
2076 ..Default::default()
2077 },
2078 );
2079
2080 cx.run_until_parked();
2081
2082 // Start the language server by opening a buffer with a compatible file extension.
2083 project
2084 .update(cx, |project, cx| {
2085 project.open_local_buffer_with_lsp(path!("/the-root/src/main.rs"), cx)
2086 })
2087 .await
2088 .unwrap();
2089
2090 let lsp_path = my_fake_lsp.next().await.unwrap().binary.path;
2091 assert_eq!(
2092 lsp_path.to_string_lossy(),
2093 path!("/the-root/.relative_path/to/my_fake_lsp_binary.exe"),
2094 );
2095
2096 let lsp_path = lsp_on_path.next().await.unwrap().binary.path;
2097 assert_eq!(lsp_path.to_string_lossy(), path!("lsp_on_path.exe"));
2098}
2099
2100#[gpui::test]
2101async fn test_language_server_tilde_path(cx: &mut gpui::TestAppContext) {
2102 init_test(cx);
2103
2104 let settings_json_contents = json!({
2105 "languages": {
2106 "Rust": {
2107 "language_servers": ["tilde_lsp"]
2108 }
2109 },
2110 "lsp": {
2111 "tilde_lsp": {
2112 "binary": {
2113 "path": "~/.local/bin/rust-analyzer",
2114 }
2115 }
2116 },
2117 });
2118
2119 let fs = FakeFs::new(cx.executor());
2120 fs.insert_tree(
2121 path!("/root"),
2122 json!({
2123 ".zed": {
2124 "settings.json": settings_json_contents.to_string(),
2125 },
2126 "src": {
2127 "main.rs": "fn main() {}",
2128 }
2129 }),
2130 )
2131 .await;
2132
2133 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
2134 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2135 language_registry.add(rust_lang());
2136
2137 let mut tilde_lsp = language_registry.register_fake_lsp(
2138 "Rust",
2139 FakeLspAdapter {
2140 name: "tilde_lsp",
2141 ..Default::default()
2142 },
2143 );
2144 cx.run_until_parked();
2145
2146 project
2147 .update(cx, |project, cx| {
2148 project.open_local_buffer_with_lsp(path!("/root/src/main.rs"), cx)
2149 })
2150 .await
2151 .unwrap();
2152
2153 let lsp_path = tilde_lsp.next().await.unwrap().binary.path;
2154 let expected_path = paths::home_dir().join(".local/bin/rust-analyzer");
2155 assert_eq!(
2156 lsp_path, expected_path,
2157 "Tilde path should expand to home directory"
2158 );
2159}
2160
2161#[gpui::test]
2162async fn test_rescan_fs_change_is_reported_to_language_servers_as_changed(
2163 cx: &mut gpui::TestAppContext,
2164) {
2165 init_test(cx);
2166
2167 let fs = FakeFs::new(cx.executor());
2168 fs.insert_tree(
2169 path!("/the-root"),
2170 json!({
2171 "Cargo.lock": "",
2172 "src": {
2173 "a.rs": "",
2174 }
2175 }),
2176 )
2177 .await;
2178
2179 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
2180 let (language_registry, _lsp_store) = project.read_with(cx, |project, _| {
2181 (project.languages().clone(), project.lsp_store())
2182 });
2183 language_registry.add(rust_lang());
2184 let mut fake_servers = language_registry.register_fake_lsp(
2185 "Rust",
2186 FakeLspAdapter {
2187 name: "the-language-server",
2188 ..Default::default()
2189 },
2190 );
2191
2192 cx.executor().run_until_parked();
2193
2194 project
2195 .update(cx, |project, cx| {
2196 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
2197 })
2198 .await
2199 .unwrap();
2200
2201 let fake_server = fake_servers.next().await.unwrap();
2202 cx.executor().run_until_parked();
2203
2204 let file_changes = Arc::new(Mutex::new(Vec::new()));
2205 fake_server
2206 .request::<lsp::request::RegisterCapability>(
2207 lsp::RegistrationParams {
2208 registrations: vec![lsp::Registration {
2209 id: Default::default(),
2210 method: "workspace/didChangeWatchedFiles".to_string(),
2211 register_options: serde_json::to_value(
2212 lsp::DidChangeWatchedFilesRegistrationOptions {
2213 watchers: vec![lsp::FileSystemWatcher {
2214 glob_pattern: lsp::GlobPattern::String(
2215 path!("/the-root/Cargo.lock").to_string(),
2216 ),
2217 kind: None,
2218 }],
2219 },
2220 )
2221 .ok(),
2222 }],
2223 },
2224 DEFAULT_LSP_REQUEST_TIMEOUT,
2225 )
2226 .await
2227 .into_response()
2228 .unwrap();
2229 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
2230 let file_changes = file_changes.clone();
2231 move |params, _| {
2232 let mut file_changes = file_changes.lock();
2233 file_changes.extend(params.changes);
2234 }
2235 });
2236
2237 cx.executor().run_until_parked();
2238 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
2239
2240 fs.emit_fs_event(path!("/the-root/Cargo.lock"), Some(PathEventKind::Rescan));
2241 cx.executor().run_until_parked();
2242
2243 assert_eq!(
2244 &*file_changes.lock(),
2245 &[lsp::FileEvent {
2246 uri: lsp::Uri::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
2247 typ: lsp::FileChangeType::CHANGED,
2248 }]
2249 );
2250}
2251
2252#[gpui::test]
2253async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
2254 init_test(cx);
2255
2256 let fs = FakeFs::new(cx.executor());
2257 fs.insert_tree(
2258 path!("/the-root"),
2259 json!({
2260 ".gitignore": "target\n",
2261 "Cargo.lock": "",
2262 "src": {
2263 "a.rs": "",
2264 "b.rs": "",
2265 },
2266 "target": {
2267 "x": {
2268 "out": {
2269 "x.rs": ""
2270 }
2271 },
2272 "y": {
2273 "out": {
2274 "y.rs": "",
2275 }
2276 },
2277 "z": {
2278 "out": {
2279 "z.rs": ""
2280 }
2281 }
2282 }
2283 }),
2284 )
2285 .await;
2286 fs.insert_tree(
2287 path!("/the-registry"),
2288 json!({
2289 "dep1": {
2290 "src": {
2291 "dep1.rs": "",
2292 }
2293 },
2294 "dep2": {
2295 "src": {
2296 "dep2.rs": "",
2297 }
2298 },
2299 }),
2300 )
2301 .await;
2302 fs.insert_tree(
2303 path!("/the/stdlib"),
2304 json!({
2305 "LICENSE": "",
2306 "src": {
2307 "string.rs": "",
2308 }
2309 }),
2310 )
2311 .await;
2312
2313 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
2314 let (language_registry, lsp_store) = project.read_with(cx, |project, _| {
2315 (project.languages().clone(), project.lsp_store())
2316 });
2317 language_registry.add(rust_lang());
2318 let mut fake_servers = language_registry.register_fake_lsp(
2319 "Rust",
2320 FakeLspAdapter {
2321 name: "the-language-server",
2322 ..Default::default()
2323 },
2324 );
2325
2326 cx.executor().run_until_parked();
2327
2328 // Start the language server by opening a buffer with a compatible file extension.
2329 project
2330 .update(cx, |project, cx| {
2331 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
2332 })
2333 .await
2334 .unwrap();
2335
2336 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
2337 project.update(cx, |project, cx| {
2338 let worktree = project.worktrees(cx).next().unwrap();
2339 assert_eq!(
2340 worktree
2341 .read(cx)
2342 .snapshot()
2343 .entries(true, 0)
2344 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
2345 .collect::<Vec<_>>(),
2346 &[
2347 ("", false),
2348 (".gitignore", false),
2349 ("Cargo.lock", false),
2350 ("src", false),
2351 ("src/a.rs", false),
2352 ("src/b.rs", false),
2353 ("target", true),
2354 ]
2355 );
2356 });
2357
2358 let prev_read_dir_count = fs.read_dir_call_count();
2359
2360 let fake_server = fake_servers.next().await.unwrap();
2361 cx.executor().run_until_parked();
2362 let server_id = lsp_store.read_with(cx, |lsp_store, _| {
2363 let (id, _) = lsp_store.language_server_statuses().next().unwrap();
2364 id
2365 });
2366
2367 // Simulate jumping to a definition in a dependency outside of the worktree.
2368 let _out_of_worktree_buffer = project
2369 .update(cx, |project, cx| {
2370 project.open_local_buffer_via_lsp(
2371 lsp::Uri::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
2372 server_id,
2373 cx,
2374 )
2375 })
2376 .await
2377 .unwrap();
2378
2379 // Keep track of the FS events reported to the language server.
2380 let file_changes = Arc::new(Mutex::new(Vec::new()));
2381 fake_server
2382 .request::<lsp::request::RegisterCapability>(
2383 lsp::RegistrationParams {
2384 registrations: vec![lsp::Registration {
2385 id: Default::default(),
2386 method: "workspace/didChangeWatchedFiles".to_string(),
2387 register_options: serde_json::to_value(
2388 lsp::DidChangeWatchedFilesRegistrationOptions {
2389 watchers: vec![
2390 lsp::FileSystemWatcher {
2391 glob_pattern: lsp::GlobPattern::String(
2392 path!("/the-root/Cargo.toml").to_string(),
2393 ),
2394 kind: None,
2395 },
2396 lsp::FileSystemWatcher {
2397 glob_pattern: lsp::GlobPattern::String(
2398 path!("/the-root/src/*.{rs,c}").to_string(),
2399 ),
2400 kind: None,
2401 },
2402 lsp::FileSystemWatcher {
2403 glob_pattern: lsp::GlobPattern::String(
2404 path!("/the-root/target/y/**/*.rs").to_string(),
2405 ),
2406 kind: None,
2407 },
2408 lsp::FileSystemWatcher {
2409 glob_pattern: lsp::GlobPattern::String(
2410 path!("/the/stdlib/src/**/*.rs").to_string(),
2411 ),
2412 kind: None,
2413 },
2414 lsp::FileSystemWatcher {
2415 glob_pattern: lsp::GlobPattern::String(
2416 path!("**/Cargo.lock").to_string(),
2417 ),
2418 kind: None,
2419 },
2420 ],
2421 },
2422 )
2423 .ok(),
2424 }],
2425 },
2426 DEFAULT_LSP_REQUEST_TIMEOUT,
2427 )
2428 .await
2429 .into_response()
2430 .unwrap();
2431 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
2432 let file_changes = file_changes.clone();
2433 move |params, _| {
2434 let mut file_changes = file_changes.lock();
2435 file_changes.extend(params.changes);
2436 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
2437 }
2438 });
2439
2440 cx.executor().run_until_parked();
2441 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
2442 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
2443
2444 let mut new_watched_paths = fs.watched_paths();
2445 new_watched_paths.retain(|path| {
2446 !path.starts_with(config_dir()) && !path.starts_with(global_gitignore_path().unwrap())
2447 });
2448 assert_eq!(
2449 &new_watched_paths,
2450 &[
2451 Path::new(path!("/the-root")),
2452 Path::new(path!("/the-registry/dep1/src/dep1.rs")),
2453 Path::new(path!("/the/stdlib/src"))
2454 ]
2455 );
2456
2457 // Now the language server has asked us to watch an ignored directory path,
2458 // so we recursively load it.
2459 project.update(cx, |project, cx| {
2460 let worktree = project.visible_worktrees(cx).next().unwrap();
2461 assert_eq!(
2462 worktree
2463 .read(cx)
2464 .snapshot()
2465 .entries(true, 0)
2466 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
2467 .collect::<Vec<_>>(),
2468 &[
2469 ("", false),
2470 (".gitignore", false),
2471 ("Cargo.lock", false),
2472 ("src", false),
2473 ("src/a.rs", false),
2474 ("src/b.rs", false),
2475 ("target", true),
2476 ("target/x", true),
2477 ("target/y", true),
2478 ("target/y/out", true),
2479 ("target/y/out/y.rs", true),
2480 ("target/z", true),
2481 ]
2482 );
2483 });
2484
2485 // Perform some file system mutations, two of which match the watched patterns,
2486 // and one of which does not.
2487 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
2488 .await
2489 .unwrap();
2490 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
2491 .await
2492 .unwrap();
2493 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
2494 .await
2495 .unwrap();
2496 fs.create_file(
2497 path!("/the-root/target/x/out/x2.rs").as_ref(),
2498 Default::default(),
2499 )
2500 .await
2501 .unwrap();
2502 fs.create_file(
2503 path!("/the-root/target/y/out/y2.rs").as_ref(),
2504 Default::default(),
2505 )
2506 .await
2507 .unwrap();
2508 fs.save(
2509 path!("/the-root/Cargo.lock").as_ref(),
2510 &"".into(),
2511 Default::default(),
2512 )
2513 .await
2514 .unwrap();
2515 fs.save(
2516 path!("/the-stdlib/LICENSE").as_ref(),
2517 &"".into(),
2518 Default::default(),
2519 )
2520 .await
2521 .unwrap();
2522 fs.save(
2523 path!("/the/stdlib/src/string.rs").as_ref(),
2524 &"".into(),
2525 Default::default(),
2526 )
2527 .await
2528 .unwrap();
2529
2530 // The language server receives events for the FS mutations that match its watch patterns.
2531 cx.executor().run_until_parked();
2532 assert_eq!(
2533 &*file_changes.lock(),
2534 &[
2535 lsp::FileEvent {
2536 uri: lsp::Uri::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
2537 typ: lsp::FileChangeType::CHANGED,
2538 },
2539 lsp::FileEvent {
2540 uri: lsp::Uri::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
2541 typ: lsp::FileChangeType::DELETED,
2542 },
2543 lsp::FileEvent {
2544 uri: lsp::Uri::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
2545 typ: lsp::FileChangeType::CREATED,
2546 },
2547 lsp::FileEvent {
2548 uri: lsp::Uri::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
2549 typ: lsp::FileChangeType::CREATED,
2550 },
2551 lsp::FileEvent {
2552 uri: lsp::Uri::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
2553 typ: lsp::FileChangeType::CHANGED,
2554 },
2555 ]
2556 );
2557}
2558
2559#[gpui::test]
2560async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
2561 init_test(cx);
2562
2563 let fs = FakeFs::new(cx.executor());
2564 fs.insert_tree(
2565 path!("/dir"),
2566 json!({
2567 "a.rs": "let a = 1;",
2568 "b.rs": "let b = 2;"
2569 }),
2570 )
2571 .await;
2572
2573 let project = Project::test(
2574 fs,
2575 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
2576 cx,
2577 )
2578 .await;
2579 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2580
2581 let buffer_a = project
2582 .update(cx, |project, cx| {
2583 project.open_local_buffer(path!("/dir/a.rs"), cx)
2584 })
2585 .await
2586 .unwrap();
2587 let buffer_b = project
2588 .update(cx, |project, cx| {
2589 project.open_local_buffer(path!("/dir/b.rs"), cx)
2590 })
2591 .await
2592 .unwrap();
2593
2594 lsp_store.update(cx, |lsp_store, cx| {
2595 lsp_store
2596 .update_diagnostics(
2597 LanguageServerId(0),
2598 lsp::PublishDiagnosticsParams {
2599 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2600 version: None,
2601 diagnostics: vec![lsp::Diagnostic {
2602 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2603 severity: Some(lsp::DiagnosticSeverity::ERROR),
2604 message: "error 1".to_string(),
2605 ..Default::default()
2606 }],
2607 },
2608 None,
2609 DiagnosticSourceKind::Pushed,
2610 &[],
2611 cx,
2612 )
2613 .unwrap();
2614 lsp_store
2615 .update_diagnostics(
2616 LanguageServerId(0),
2617 lsp::PublishDiagnosticsParams {
2618 uri: Uri::from_file_path(path!("/dir/b.rs")).unwrap(),
2619 version: None,
2620 diagnostics: vec![lsp::Diagnostic {
2621 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2622 severity: Some(DiagnosticSeverity::WARNING),
2623 message: "error 2".to_string(),
2624 ..Default::default()
2625 }],
2626 },
2627 None,
2628 DiagnosticSourceKind::Pushed,
2629 &[],
2630 cx,
2631 )
2632 .unwrap();
2633 });
2634
2635 buffer_a.update(cx, |buffer, _| {
2636 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2637 assert_eq!(
2638 chunks
2639 .iter()
2640 .map(|(s, d)| (s.as_str(), *d))
2641 .collect::<Vec<_>>(),
2642 &[
2643 ("let ", None),
2644 ("a", Some(DiagnosticSeverity::ERROR)),
2645 (" = 1;", None),
2646 ]
2647 );
2648 });
2649 buffer_b.update(cx, |buffer, _| {
2650 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2651 assert_eq!(
2652 chunks
2653 .iter()
2654 .map(|(s, d)| (s.as_str(), *d))
2655 .collect::<Vec<_>>(),
2656 &[
2657 ("let ", None),
2658 ("b", Some(DiagnosticSeverity::WARNING)),
2659 (" = 2;", None),
2660 ]
2661 );
2662 });
2663}
2664
2665#[gpui::test]
2666async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
2667 init_test(cx);
2668
2669 let fs = FakeFs::new(cx.executor());
2670 fs.insert_tree(
2671 path!("/root"),
2672 json!({
2673 "dir": {
2674 ".git": {
2675 "HEAD": "ref: refs/heads/main",
2676 },
2677 ".gitignore": "b.rs",
2678 "a.rs": "let a = 1;",
2679 "b.rs": "let b = 2;",
2680 },
2681 "other.rs": "let b = c;"
2682 }),
2683 )
2684 .await;
2685
2686 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
2687 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2688 let (worktree, _) = project
2689 .update(cx, |project, cx| {
2690 project.find_or_create_worktree(path!("/root/dir"), true, cx)
2691 })
2692 .await
2693 .unwrap();
2694 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
2695
2696 let (worktree, _) = project
2697 .update(cx, |project, cx| {
2698 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
2699 })
2700 .await
2701 .unwrap();
2702 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
2703
2704 let server_id = LanguageServerId(0);
2705 lsp_store.update(cx, |lsp_store, cx| {
2706 lsp_store
2707 .update_diagnostics(
2708 server_id,
2709 lsp::PublishDiagnosticsParams {
2710 uri: Uri::from_file_path(path!("/root/dir/b.rs")).unwrap(),
2711 version: None,
2712 diagnostics: vec![lsp::Diagnostic {
2713 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2714 severity: Some(lsp::DiagnosticSeverity::ERROR),
2715 message: "unused variable 'b'".to_string(),
2716 ..Default::default()
2717 }],
2718 },
2719 None,
2720 DiagnosticSourceKind::Pushed,
2721 &[],
2722 cx,
2723 )
2724 .unwrap();
2725 lsp_store
2726 .update_diagnostics(
2727 server_id,
2728 lsp::PublishDiagnosticsParams {
2729 uri: Uri::from_file_path(path!("/root/other.rs")).unwrap(),
2730 version: None,
2731 diagnostics: vec![lsp::Diagnostic {
2732 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
2733 severity: Some(lsp::DiagnosticSeverity::ERROR),
2734 message: "unknown variable 'c'".to_string(),
2735 ..Default::default()
2736 }],
2737 },
2738 None,
2739 DiagnosticSourceKind::Pushed,
2740 &[],
2741 cx,
2742 )
2743 .unwrap();
2744 });
2745
2746 let main_ignored_buffer = project
2747 .update(cx, |project, cx| {
2748 project.open_buffer((main_worktree_id, rel_path("b.rs")), cx)
2749 })
2750 .await
2751 .unwrap();
2752 main_ignored_buffer.update(cx, |buffer, _| {
2753 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2754 assert_eq!(
2755 chunks
2756 .iter()
2757 .map(|(s, d)| (s.as_str(), *d))
2758 .collect::<Vec<_>>(),
2759 &[
2760 ("let ", None),
2761 ("b", Some(DiagnosticSeverity::ERROR)),
2762 (" = 2;", None),
2763 ],
2764 "Gigitnored buffers should still get in-buffer diagnostics",
2765 );
2766 });
2767 let other_buffer = project
2768 .update(cx, |project, cx| {
2769 project.open_buffer((other_worktree_id, rel_path("")), cx)
2770 })
2771 .await
2772 .unwrap();
2773 other_buffer.update(cx, |buffer, _| {
2774 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2775 assert_eq!(
2776 chunks
2777 .iter()
2778 .map(|(s, d)| (s.as_str(), *d))
2779 .collect::<Vec<_>>(),
2780 &[
2781 ("let b = ", None),
2782 ("c", Some(DiagnosticSeverity::ERROR)),
2783 (";", None),
2784 ],
2785 "Buffers from hidden projects should still get in-buffer diagnostics"
2786 );
2787 });
2788
2789 project.update(cx, |project, cx| {
2790 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
2791 assert_eq!(
2792 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
2793 vec![(
2794 ProjectPath {
2795 worktree_id: main_worktree_id,
2796 path: rel_path("b.rs").into(),
2797 },
2798 server_id,
2799 DiagnosticSummary {
2800 error_count: 1,
2801 warning_count: 0,
2802 }
2803 )]
2804 );
2805 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
2806 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
2807 });
2808}
2809
2810#[gpui::test]
2811async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
2812 init_test(cx);
2813
2814 let progress_token = "the-progress-token";
2815
2816 let fs = FakeFs::new(cx.executor());
2817 fs.insert_tree(
2818 path!("/dir"),
2819 json!({
2820 "a.rs": "fn a() { A }",
2821 "b.rs": "const y: i32 = 1",
2822 }),
2823 )
2824 .await;
2825
2826 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2827 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2828
2829 language_registry.add(rust_lang());
2830 let mut fake_servers = language_registry.register_fake_lsp(
2831 "Rust",
2832 FakeLspAdapter {
2833 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2834 disk_based_diagnostics_sources: vec!["disk".into()],
2835 ..Default::default()
2836 },
2837 );
2838
2839 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
2840
2841 // Cause worktree to start the fake language server
2842 let _ = project
2843 .update(cx, |project, cx| {
2844 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
2845 })
2846 .await
2847 .unwrap();
2848
2849 let mut events = cx.events(&project);
2850
2851 let fake_server = fake_servers.next().await.unwrap();
2852 assert_eq!(
2853 events.next().await.unwrap(),
2854 Event::LanguageServerAdded(
2855 LanguageServerId(0),
2856 fake_server.server.name(),
2857 Some(worktree_id)
2858 ),
2859 );
2860
2861 fake_server
2862 .start_progress(format!("{}/0", progress_token))
2863 .await;
2864 assert_eq!(
2865 events.next().await.unwrap(),
2866 Event::DiskBasedDiagnosticsStarted {
2867 language_server_id: LanguageServerId(0),
2868 }
2869 );
2870
2871 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2872 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2873 version: None,
2874 diagnostics: vec![lsp::Diagnostic {
2875 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2876 severity: Some(lsp::DiagnosticSeverity::ERROR),
2877 message: "undefined variable 'A'".to_string(),
2878 ..Default::default()
2879 }],
2880 });
2881 assert_eq!(
2882 events.next().await.unwrap(),
2883 Event::DiagnosticsUpdated {
2884 language_server_id: LanguageServerId(0),
2885 paths: vec![(worktree_id, rel_path("a.rs")).into()],
2886 }
2887 );
2888
2889 fake_server.end_progress(format!("{}/0", progress_token));
2890 assert_eq!(
2891 events.next().await.unwrap(),
2892 Event::DiskBasedDiagnosticsFinished {
2893 language_server_id: LanguageServerId(0)
2894 }
2895 );
2896
2897 let buffer = project
2898 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
2899 .await
2900 .unwrap();
2901
2902 buffer.update(cx, |buffer, _| {
2903 let snapshot = buffer.snapshot();
2904 let diagnostics = snapshot
2905 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2906 .collect::<Vec<_>>();
2907 assert_eq!(
2908 diagnostics,
2909 &[DiagnosticEntryRef {
2910 range: Point::new(0, 9)..Point::new(0, 10),
2911 diagnostic: &Diagnostic {
2912 severity: lsp::DiagnosticSeverity::ERROR,
2913 message: "undefined variable 'A'".to_string(),
2914 group_id: 0,
2915 is_primary: true,
2916 source_kind: DiagnosticSourceKind::Pushed,
2917 ..Diagnostic::default()
2918 }
2919 }]
2920 )
2921 });
2922
2923 // Ensure publishing empty diagnostics twice only results in one update event.
2924 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2925 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2926 version: None,
2927 diagnostics: Default::default(),
2928 });
2929 assert_eq!(
2930 events.next().await.unwrap(),
2931 Event::DiagnosticsUpdated {
2932 language_server_id: LanguageServerId(0),
2933 paths: vec![(worktree_id, rel_path("a.rs")).into()],
2934 }
2935 );
2936
2937 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2938 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2939 version: None,
2940 diagnostics: Default::default(),
2941 });
2942 cx.executor().run_until_parked();
2943 assert_eq!(futures::poll!(events.next()), Poll::Pending);
2944}
2945
2946#[gpui::test]
2947async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
2948 init_test(cx);
2949
2950 let progress_token = "the-progress-token";
2951
2952 let fs = FakeFs::new(cx.executor());
2953 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2954
2955 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2956
2957 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2958 language_registry.add(rust_lang());
2959 let mut fake_servers = language_registry.register_fake_lsp(
2960 "Rust",
2961 FakeLspAdapter {
2962 name: "the-language-server",
2963 disk_based_diagnostics_sources: vec!["disk".into()],
2964 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2965 ..FakeLspAdapter::default()
2966 },
2967 );
2968
2969 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
2970
2971 let (buffer, _handle) = project
2972 .update(cx, |project, cx| {
2973 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2974 })
2975 .await
2976 .unwrap();
2977 let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id());
2978 // Simulate diagnostics starting to update.
2979 let fake_server = fake_servers.next().await.unwrap();
2980 cx.executor().run_until_parked();
2981 fake_server.start_progress(progress_token).await;
2982
2983 // Restart the server before the diagnostics finish updating.
2984 project.update(cx, |project, cx| {
2985 project.restart_language_servers_for_buffers(vec![buffer], HashSet::default(), cx);
2986 });
2987 let mut events = cx.events(&project);
2988
2989 // Simulate the newly started server sending more diagnostics.
2990 let fake_server = fake_servers.next().await.unwrap();
2991 cx.executor().run_until_parked();
2992 assert_eq!(
2993 events.next().await.unwrap(),
2994 Event::LanguageServerRemoved(LanguageServerId(0))
2995 );
2996 assert_eq!(
2997 events.next().await.unwrap(),
2998 Event::LanguageServerAdded(
2999 LanguageServerId(1),
3000 fake_server.server.name(),
3001 Some(worktree_id)
3002 )
3003 );
3004 fake_server.start_progress(progress_token).await;
3005 assert_eq!(
3006 events.next().await.unwrap(),
3007 Event::LanguageServerBufferRegistered {
3008 server_id: LanguageServerId(1),
3009 buffer_id,
3010 buffer_abs_path: PathBuf::from(path!("/dir/a.rs")),
3011 name: Some(fake_server.server.name())
3012 }
3013 );
3014 assert_eq!(
3015 events.next().await.unwrap(),
3016 Event::DiskBasedDiagnosticsStarted {
3017 language_server_id: LanguageServerId(1)
3018 }
3019 );
3020 project.update(cx, |project, cx| {
3021 assert_eq!(
3022 project
3023 .language_servers_running_disk_based_diagnostics(cx)
3024 .collect::<Vec<_>>(),
3025 [LanguageServerId(1)]
3026 );
3027 });
3028
3029 // All diagnostics are considered done, despite the old server's diagnostic
3030 // task never completing.
3031 fake_server.end_progress(progress_token);
3032 assert_eq!(
3033 events.next().await.unwrap(),
3034 Event::DiskBasedDiagnosticsFinished {
3035 language_server_id: LanguageServerId(1)
3036 }
3037 );
3038 project.update(cx, |project, cx| {
3039 assert_eq!(
3040 project
3041 .language_servers_running_disk_based_diagnostics(cx)
3042 .collect::<Vec<_>>(),
3043 [] as [language::LanguageServerId; 0]
3044 );
3045 });
3046}
3047
3048#[gpui::test]
3049async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
3050 init_test(cx);
3051
3052 let fs = FakeFs::new(cx.executor());
3053 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
3054
3055 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3056
3057 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3058 language_registry.add(rust_lang());
3059 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3060
3061 let (buffer, _) = project
3062 .update(cx, |project, cx| {
3063 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3064 })
3065 .await
3066 .unwrap();
3067
3068 // Publish diagnostics
3069 let fake_server = fake_servers.next().await.unwrap();
3070 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3071 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3072 version: None,
3073 diagnostics: vec![lsp::Diagnostic {
3074 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
3075 severity: Some(lsp::DiagnosticSeverity::ERROR),
3076 message: "the message".to_string(),
3077 ..Default::default()
3078 }],
3079 });
3080
3081 cx.executor().run_until_parked();
3082 buffer.update(cx, |buffer, _| {
3083 assert_eq!(
3084 buffer
3085 .snapshot()
3086 .diagnostics_in_range::<_, usize>(0..1, false)
3087 .map(|entry| entry.diagnostic.message.clone())
3088 .collect::<Vec<_>>(),
3089 ["the message".to_string()]
3090 );
3091 });
3092 project.update(cx, |project, cx| {
3093 assert_eq!(
3094 project.diagnostic_summary(false, cx),
3095 DiagnosticSummary {
3096 error_count: 1,
3097 warning_count: 0,
3098 }
3099 );
3100 });
3101
3102 project.update(cx, |project, cx| {
3103 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
3104 });
3105
3106 // The diagnostics are cleared.
3107 cx.executor().run_until_parked();
3108 buffer.update(cx, |buffer, _| {
3109 assert_eq!(
3110 buffer
3111 .snapshot()
3112 .diagnostics_in_range::<_, usize>(0..1, false)
3113 .map(|entry| entry.diagnostic.message.clone())
3114 .collect::<Vec<_>>(),
3115 Vec::<String>::new(),
3116 );
3117 });
3118 project.update(cx, |project, cx| {
3119 assert_eq!(
3120 project.diagnostic_summary(false, cx),
3121 DiagnosticSummary {
3122 error_count: 0,
3123 warning_count: 0,
3124 }
3125 );
3126 });
3127}
3128
3129#[gpui::test]
3130async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
3131 init_test(cx);
3132
3133 let fs = FakeFs::new(cx.executor());
3134 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
3135
3136 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3137 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3138
3139 language_registry.add(rust_lang());
3140 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3141
3142 let (buffer, _handle) = project
3143 .update(cx, |project, cx| {
3144 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3145 })
3146 .await
3147 .unwrap();
3148
3149 // Before restarting the server, report diagnostics with an unknown buffer version.
3150 let fake_server = fake_servers.next().await.unwrap();
3151 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3152 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3153 version: Some(10000),
3154 diagnostics: Vec::new(),
3155 });
3156 cx.executor().run_until_parked();
3157 project.update(cx, |project, cx| {
3158 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
3159 });
3160
3161 let mut fake_server = fake_servers.next().await.unwrap();
3162 let notification = fake_server
3163 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3164 .await
3165 .text_document;
3166 assert_eq!(notification.version, 0);
3167}
3168
3169#[gpui::test]
3170async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
3171 init_test(cx);
3172
3173 let progress_token = "the-progress-token";
3174
3175 let fs = FakeFs::new(cx.executor());
3176 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
3177
3178 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3179
3180 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3181 language_registry.add(rust_lang());
3182 let mut fake_servers = language_registry.register_fake_lsp(
3183 "Rust",
3184 FakeLspAdapter {
3185 name: "the-language-server",
3186 disk_based_diagnostics_sources: vec!["disk".into()],
3187 disk_based_diagnostics_progress_token: Some(progress_token.into()),
3188 ..Default::default()
3189 },
3190 );
3191
3192 let (buffer, _handle) = project
3193 .update(cx, |project, cx| {
3194 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3195 })
3196 .await
3197 .unwrap();
3198
3199 // Simulate diagnostics starting to update.
3200 let mut fake_server = fake_servers.next().await.unwrap();
3201 fake_server
3202 .start_progress_with(
3203 "another-token",
3204 lsp::WorkDoneProgressBegin {
3205 cancellable: Some(false),
3206 ..Default::default()
3207 },
3208 DEFAULT_LSP_REQUEST_TIMEOUT,
3209 )
3210 .await;
3211 // Ensure progress notification is fully processed before starting the next one
3212 cx.executor().run_until_parked();
3213
3214 fake_server
3215 .start_progress_with(
3216 progress_token,
3217 lsp::WorkDoneProgressBegin {
3218 cancellable: Some(true),
3219 ..Default::default()
3220 },
3221 DEFAULT_LSP_REQUEST_TIMEOUT,
3222 )
3223 .await;
3224 // Ensure progress notification is fully processed before cancelling
3225 cx.executor().run_until_parked();
3226
3227 project.update(cx, |project, cx| {
3228 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
3229 });
3230 cx.executor().run_until_parked();
3231
3232 let cancel_notification = fake_server
3233 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
3234 .await;
3235 assert_eq!(
3236 cancel_notification.token,
3237 NumberOrString::String(progress_token.into())
3238 );
3239}
3240
3241#[gpui::test]
3242async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
3243 init_test(cx);
3244
3245 let fs = FakeFs::new(cx.executor());
3246 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
3247 .await;
3248
3249 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3250 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3251
3252 let mut fake_rust_servers = language_registry.register_fake_lsp(
3253 "Rust",
3254 FakeLspAdapter {
3255 name: "rust-lsp",
3256 ..Default::default()
3257 },
3258 );
3259 let mut fake_js_servers = language_registry.register_fake_lsp(
3260 "JavaScript",
3261 FakeLspAdapter {
3262 name: "js-lsp",
3263 ..Default::default()
3264 },
3265 );
3266 language_registry.add(rust_lang());
3267 language_registry.add(js_lang());
3268
3269 let _rs_buffer = project
3270 .update(cx, |project, cx| {
3271 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3272 })
3273 .await
3274 .unwrap();
3275 let _js_buffer = project
3276 .update(cx, |project, cx| {
3277 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
3278 })
3279 .await
3280 .unwrap();
3281
3282 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
3283 assert_eq!(
3284 fake_rust_server_1
3285 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3286 .await
3287 .text_document
3288 .uri
3289 .as_str(),
3290 uri!("file:///dir/a.rs")
3291 );
3292
3293 let mut fake_js_server = fake_js_servers.next().await.unwrap();
3294 assert_eq!(
3295 fake_js_server
3296 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3297 .await
3298 .text_document
3299 .uri
3300 .as_str(),
3301 uri!("file:///dir/b.js")
3302 );
3303
3304 // Disable Rust language server, ensuring only that server gets stopped.
3305 cx.update(|cx| {
3306 SettingsStore::update_global(cx, |settings, cx| {
3307 settings.update_user_settings(cx, |settings| {
3308 settings.languages_mut().insert(
3309 "Rust".into(),
3310 LanguageSettingsContent {
3311 enable_language_server: Some(false),
3312 ..Default::default()
3313 },
3314 );
3315 });
3316 })
3317 });
3318 fake_rust_server_1
3319 .receive_notification::<lsp::notification::Exit>()
3320 .await;
3321
3322 // Enable Rust and disable JavaScript language servers, ensuring that the
3323 // former gets started again and that the latter stops.
3324 cx.update(|cx| {
3325 SettingsStore::update_global(cx, |settings, cx| {
3326 settings.update_user_settings(cx, |settings| {
3327 settings.languages_mut().insert(
3328 "Rust".into(),
3329 LanguageSettingsContent {
3330 enable_language_server: Some(true),
3331 ..Default::default()
3332 },
3333 );
3334 settings.languages_mut().insert(
3335 "JavaScript".into(),
3336 LanguageSettingsContent {
3337 enable_language_server: Some(false),
3338 ..Default::default()
3339 },
3340 );
3341 });
3342 })
3343 });
3344 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
3345 assert_eq!(
3346 fake_rust_server_2
3347 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3348 .await
3349 .text_document
3350 .uri
3351 .as_str(),
3352 uri!("file:///dir/a.rs")
3353 );
3354 fake_js_server
3355 .receive_notification::<lsp::notification::Exit>()
3356 .await;
3357}
3358
3359#[gpui::test(iterations = 3)]
3360async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
3361 init_test(cx);
3362
3363 let text = "
3364 fn a() { A }
3365 fn b() { BB }
3366 fn c() { CCC }
3367 "
3368 .unindent();
3369
3370 let fs = FakeFs::new(cx.executor());
3371 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
3372
3373 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3374 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3375
3376 language_registry.add(rust_lang());
3377 let mut fake_servers = language_registry.register_fake_lsp(
3378 "Rust",
3379 FakeLspAdapter {
3380 disk_based_diagnostics_sources: vec!["disk".into()],
3381 ..Default::default()
3382 },
3383 );
3384
3385 let buffer = project
3386 .update(cx, |project, cx| {
3387 project.open_local_buffer(path!("/dir/a.rs"), cx)
3388 })
3389 .await
3390 .unwrap();
3391
3392 let _handle = project.update(cx, |project, cx| {
3393 project.register_buffer_with_language_servers(&buffer, cx)
3394 });
3395
3396 let mut fake_server = fake_servers.next().await.unwrap();
3397 let open_notification = fake_server
3398 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3399 .await;
3400
3401 // Edit the buffer, moving the content down
3402 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
3403 let change_notification_1 = fake_server
3404 .receive_notification::<lsp::notification::DidChangeTextDocument>()
3405 .await;
3406 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
3407
3408 // Report some diagnostics for the initial version of the buffer
3409 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3410 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3411 version: Some(open_notification.text_document.version),
3412 diagnostics: vec![
3413 lsp::Diagnostic {
3414 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3415 severity: Some(DiagnosticSeverity::ERROR),
3416 message: "undefined variable 'A'".to_string(),
3417 source: Some("disk".to_string()),
3418 ..Default::default()
3419 },
3420 lsp::Diagnostic {
3421 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
3422 severity: Some(DiagnosticSeverity::ERROR),
3423 message: "undefined variable 'BB'".to_string(),
3424 source: Some("disk".to_string()),
3425 ..Default::default()
3426 },
3427 lsp::Diagnostic {
3428 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
3429 severity: Some(DiagnosticSeverity::ERROR),
3430 source: Some("disk".to_string()),
3431 message: "undefined variable 'CCC'".to_string(),
3432 ..Default::default()
3433 },
3434 ],
3435 });
3436
3437 // The diagnostics have moved down since they were created.
3438 cx.executor().run_until_parked();
3439 buffer.update(cx, |buffer, _| {
3440 assert_eq!(
3441 buffer
3442 .snapshot()
3443 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
3444 .collect::<Vec<_>>(),
3445 &[
3446 DiagnosticEntry {
3447 range: Point::new(3, 9)..Point::new(3, 11),
3448 diagnostic: Diagnostic {
3449 source: Some("disk".into()),
3450 severity: DiagnosticSeverity::ERROR,
3451 message: "undefined variable 'BB'".to_string(),
3452 is_disk_based: true,
3453 group_id: 1,
3454 is_primary: true,
3455 source_kind: DiagnosticSourceKind::Pushed,
3456 ..Diagnostic::default()
3457 },
3458 },
3459 DiagnosticEntry {
3460 range: Point::new(4, 9)..Point::new(4, 12),
3461 diagnostic: Diagnostic {
3462 source: Some("disk".into()),
3463 severity: DiagnosticSeverity::ERROR,
3464 message: "undefined variable 'CCC'".to_string(),
3465 is_disk_based: true,
3466 group_id: 2,
3467 is_primary: true,
3468 source_kind: DiagnosticSourceKind::Pushed,
3469 ..Diagnostic::default()
3470 }
3471 }
3472 ]
3473 );
3474 assert_eq!(
3475 chunks_with_diagnostics(buffer, 0..buffer.len()),
3476 [
3477 ("\n\nfn a() { ".to_string(), None),
3478 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
3479 (" }\nfn b() { ".to_string(), None),
3480 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
3481 (" }\nfn c() { ".to_string(), None),
3482 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
3483 (" }\n".to_string(), None),
3484 ]
3485 );
3486 assert_eq!(
3487 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
3488 [
3489 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
3490 (" }\nfn c() { ".to_string(), None),
3491 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
3492 ]
3493 );
3494 });
3495
3496 // Ensure overlapping diagnostics are highlighted correctly.
3497 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3498 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3499 version: Some(open_notification.text_document.version),
3500 diagnostics: vec![
3501 lsp::Diagnostic {
3502 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3503 severity: Some(DiagnosticSeverity::ERROR),
3504 message: "undefined variable 'A'".to_string(),
3505 source: Some("disk".to_string()),
3506 ..Default::default()
3507 },
3508 lsp::Diagnostic {
3509 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
3510 severity: Some(DiagnosticSeverity::WARNING),
3511 message: "unreachable statement".to_string(),
3512 source: Some("disk".to_string()),
3513 ..Default::default()
3514 },
3515 ],
3516 });
3517
3518 cx.executor().run_until_parked();
3519 buffer.update(cx, |buffer, _| {
3520 assert_eq!(
3521 buffer
3522 .snapshot()
3523 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
3524 .collect::<Vec<_>>(),
3525 &[
3526 DiagnosticEntry {
3527 range: Point::new(2, 9)..Point::new(2, 12),
3528 diagnostic: Diagnostic {
3529 source: Some("disk".into()),
3530 severity: DiagnosticSeverity::WARNING,
3531 message: "unreachable statement".to_string(),
3532 is_disk_based: true,
3533 group_id: 4,
3534 is_primary: true,
3535 source_kind: DiagnosticSourceKind::Pushed,
3536 ..Diagnostic::default()
3537 }
3538 },
3539 DiagnosticEntry {
3540 range: Point::new(2, 9)..Point::new(2, 10),
3541 diagnostic: Diagnostic {
3542 source: Some("disk".into()),
3543 severity: DiagnosticSeverity::ERROR,
3544 message: "undefined variable 'A'".to_string(),
3545 is_disk_based: true,
3546 group_id: 3,
3547 is_primary: true,
3548 source_kind: DiagnosticSourceKind::Pushed,
3549 ..Diagnostic::default()
3550 },
3551 }
3552 ]
3553 );
3554 assert_eq!(
3555 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
3556 [
3557 ("fn a() { ".to_string(), None),
3558 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
3559 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
3560 ("\n".to_string(), None),
3561 ]
3562 );
3563 assert_eq!(
3564 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
3565 [
3566 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
3567 ("\n".to_string(), None),
3568 ]
3569 );
3570 });
3571
3572 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
3573 // changes since the last save.
3574 buffer.update(cx, |buffer, cx| {
3575 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
3576 buffer.edit(
3577 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
3578 None,
3579 cx,
3580 );
3581 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
3582 });
3583 let change_notification_2 = fake_server
3584 .receive_notification::<lsp::notification::DidChangeTextDocument>()
3585 .await;
3586 assert!(
3587 change_notification_2.text_document.version > change_notification_1.text_document.version
3588 );
3589
3590 // Handle out-of-order diagnostics
3591 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3592 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3593 version: Some(change_notification_2.text_document.version),
3594 diagnostics: vec![
3595 lsp::Diagnostic {
3596 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
3597 severity: Some(DiagnosticSeverity::ERROR),
3598 message: "undefined variable 'BB'".to_string(),
3599 source: Some("disk".to_string()),
3600 ..Default::default()
3601 },
3602 lsp::Diagnostic {
3603 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3604 severity: Some(DiagnosticSeverity::WARNING),
3605 message: "undefined variable 'A'".to_string(),
3606 source: Some("disk".to_string()),
3607 ..Default::default()
3608 },
3609 ],
3610 });
3611
3612 cx.executor().run_until_parked();
3613 buffer.update(cx, |buffer, _| {
3614 assert_eq!(
3615 buffer
3616 .snapshot()
3617 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3618 .collect::<Vec<_>>(),
3619 &[
3620 DiagnosticEntry {
3621 range: Point::new(2, 21)..Point::new(2, 22),
3622 diagnostic: Diagnostic {
3623 source: Some("disk".into()),
3624 severity: DiagnosticSeverity::WARNING,
3625 message: "undefined variable 'A'".to_string(),
3626 is_disk_based: true,
3627 group_id: 6,
3628 is_primary: true,
3629 source_kind: DiagnosticSourceKind::Pushed,
3630 ..Diagnostic::default()
3631 }
3632 },
3633 DiagnosticEntry {
3634 range: Point::new(3, 9)..Point::new(3, 14),
3635 diagnostic: Diagnostic {
3636 source: Some("disk".into()),
3637 severity: DiagnosticSeverity::ERROR,
3638 message: "undefined variable 'BB'".to_string(),
3639 is_disk_based: true,
3640 group_id: 5,
3641 is_primary: true,
3642 source_kind: DiagnosticSourceKind::Pushed,
3643 ..Diagnostic::default()
3644 },
3645 }
3646 ]
3647 );
3648 });
3649}
3650
3651#[gpui::test]
3652async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
3653 init_test(cx);
3654
3655 let text = concat!(
3656 "let one = ;\n", //
3657 "let two = \n",
3658 "let three = 3;\n",
3659 );
3660
3661 let fs = FakeFs::new(cx.executor());
3662 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
3663
3664 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3665 let buffer = project
3666 .update(cx, |project, cx| {
3667 project.open_local_buffer(path!("/dir/a.rs"), cx)
3668 })
3669 .await
3670 .unwrap();
3671
3672 project.update(cx, |project, cx| {
3673 project.lsp_store().update(cx, |lsp_store, cx| {
3674 lsp_store
3675 .update_diagnostic_entries(
3676 LanguageServerId(0),
3677 PathBuf::from(path!("/dir/a.rs")),
3678 None,
3679 None,
3680 vec![
3681 DiagnosticEntry {
3682 range: Unclipped(PointUtf16::new(0, 10))
3683 ..Unclipped(PointUtf16::new(0, 10)),
3684 diagnostic: Diagnostic {
3685 severity: DiagnosticSeverity::ERROR,
3686 message: "syntax error 1".to_string(),
3687 source_kind: DiagnosticSourceKind::Pushed,
3688 ..Diagnostic::default()
3689 },
3690 },
3691 DiagnosticEntry {
3692 range: Unclipped(PointUtf16::new(1, 10))
3693 ..Unclipped(PointUtf16::new(1, 10)),
3694 diagnostic: Diagnostic {
3695 severity: DiagnosticSeverity::ERROR,
3696 message: "syntax error 2".to_string(),
3697 source_kind: DiagnosticSourceKind::Pushed,
3698 ..Diagnostic::default()
3699 },
3700 },
3701 ],
3702 cx,
3703 )
3704 .unwrap();
3705 })
3706 });
3707
3708 // An empty range is extended forward to include the following character.
3709 // At the end of a line, an empty range is extended backward to include
3710 // the preceding character.
3711 buffer.update(cx, |buffer, _| {
3712 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
3713 assert_eq!(
3714 chunks
3715 .iter()
3716 .map(|(s, d)| (s.as_str(), *d))
3717 .collect::<Vec<_>>(),
3718 &[
3719 ("let one = ", None),
3720 (";", Some(DiagnosticSeverity::ERROR)),
3721 ("\nlet two =", None),
3722 (" ", Some(DiagnosticSeverity::ERROR)),
3723 ("\nlet three = 3;\n", None)
3724 ]
3725 );
3726 });
3727}
3728
3729#[gpui::test]
3730async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
3731 init_test(cx);
3732
3733 let fs = FakeFs::new(cx.executor());
3734 fs.insert_tree(path!("/dir"), json!({ "a.rs": "one two three" }))
3735 .await;
3736
3737 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3738 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3739
3740 lsp_store.update(cx, |lsp_store, cx| {
3741 lsp_store
3742 .update_diagnostic_entries(
3743 LanguageServerId(0),
3744 Path::new(path!("/dir/a.rs")).to_owned(),
3745 None,
3746 None,
3747 vec![DiagnosticEntry {
3748 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
3749 diagnostic: Diagnostic {
3750 severity: DiagnosticSeverity::ERROR,
3751 is_primary: true,
3752 message: "syntax error a1".to_string(),
3753 source_kind: DiagnosticSourceKind::Pushed,
3754 ..Diagnostic::default()
3755 },
3756 }],
3757 cx,
3758 )
3759 .unwrap();
3760 lsp_store
3761 .update_diagnostic_entries(
3762 LanguageServerId(1),
3763 Path::new(path!("/dir/a.rs")).to_owned(),
3764 None,
3765 None,
3766 vec![DiagnosticEntry {
3767 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
3768 diagnostic: Diagnostic {
3769 severity: DiagnosticSeverity::ERROR,
3770 is_primary: true,
3771 message: "syntax error b1".to_string(),
3772 source_kind: DiagnosticSourceKind::Pushed,
3773 ..Diagnostic::default()
3774 },
3775 }],
3776 cx,
3777 )
3778 .unwrap();
3779
3780 assert_eq!(
3781 lsp_store.diagnostic_summary(false, cx),
3782 DiagnosticSummary {
3783 error_count: 2,
3784 warning_count: 0,
3785 }
3786 );
3787 });
3788}
3789
3790#[gpui::test]
3791async fn test_diagnostic_summaries_cleared_on_worktree_entry_removal(
3792 cx: &mut gpui::TestAppContext,
3793) {
3794 init_test(cx);
3795
3796 let fs = FakeFs::new(cx.executor());
3797 fs.insert_tree(path!("/dir"), json!({ "a.rs": "one", "b.rs": "two" }))
3798 .await;
3799
3800 let project = Project::test(fs.clone(), [Path::new(path!("/dir"))], cx).await;
3801 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3802
3803 lsp_store.update(cx, |lsp_store, cx| {
3804 lsp_store
3805 .update_diagnostic_entries(
3806 LanguageServerId(0),
3807 Path::new(path!("/dir/a.rs")).to_owned(),
3808 None,
3809 None,
3810 vec![DiagnosticEntry {
3811 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
3812 diagnostic: Diagnostic {
3813 severity: DiagnosticSeverity::ERROR,
3814 is_primary: true,
3815 message: "error in a".to_string(),
3816 source_kind: DiagnosticSourceKind::Pushed,
3817 ..Diagnostic::default()
3818 },
3819 }],
3820 cx,
3821 )
3822 .unwrap();
3823 lsp_store
3824 .update_diagnostic_entries(
3825 LanguageServerId(0),
3826 Path::new(path!("/dir/b.rs")).to_owned(),
3827 None,
3828 None,
3829 vec![DiagnosticEntry {
3830 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
3831 diagnostic: Diagnostic {
3832 severity: DiagnosticSeverity::WARNING,
3833 is_primary: true,
3834 message: "warning in b".to_string(),
3835 source_kind: DiagnosticSourceKind::Pushed,
3836 ..Diagnostic::default()
3837 },
3838 }],
3839 cx,
3840 )
3841 .unwrap();
3842
3843 assert_eq!(
3844 lsp_store.diagnostic_summary(false, cx),
3845 DiagnosticSummary {
3846 error_count: 1,
3847 warning_count: 1,
3848 }
3849 );
3850 });
3851
3852 fs.remove_file(path!("/dir/a.rs").as_ref(), Default::default())
3853 .await
3854 .unwrap();
3855 cx.executor().run_until_parked();
3856
3857 lsp_store.update(cx, |lsp_store, cx| {
3858 assert_eq!(
3859 lsp_store.diagnostic_summary(false, cx),
3860 DiagnosticSummary {
3861 error_count: 0,
3862 warning_count: 1,
3863 },
3864 );
3865 });
3866}
3867
3868#[gpui::test]
3869async fn test_diagnostic_summaries_cleared_on_server_restart(cx: &mut gpui::TestAppContext) {
3870 init_test(cx);
3871
3872 let fs = FakeFs::new(cx.executor());
3873 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
3874
3875 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3876
3877 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3878 language_registry.add(rust_lang());
3879 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3880
3881 let (buffer, _handle) = project
3882 .update(cx, |project, cx| {
3883 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3884 })
3885 .await
3886 .unwrap();
3887
3888 let fake_server = fake_servers.next().await.unwrap();
3889 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3890 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3891 version: None,
3892 diagnostics: vec![lsp::Diagnostic {
3893 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 1)),
3894 severity: Some(lsp::DiagnosticSeverity::ERROR),
3895 message: "error before restart".to_string(),
3896 ..Default::default()
3897 }],
3898 });
3899 cx.executor().run_until_parked();
3900
3901 project.update(cx, |project, cx| {
3902 assert_eq!(
3903 project.diagnostic_summary(false, cx),
3904 DiagnosticSummary {
3905 error_count: 1,
3906 warning_count: 0,
3907 }
3908 );
3909 });
3910
3911 let mut events = cx.events(&project);
3912
3913 project.update(cx, |project, cx| {
3914 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
3915 });
3916 cx.executor().run_until_parked();
3917
3918 let mut received_diagnostics_updated = false;
3919 while let Some(Some(event)) =
3920 futures::FutureExt::now_or_never(futures::StreamExt::next(&mut events))
3921 {
3922 if matches!(event, Event::DiagnosticsUpdated { .. }) {
3923 received_diagnostics_updated = true;
3924 }
3925 }
3926 assert!(
3927 received_diagnostics_updated,
3928 "DiagnosticsUpdated event should be emitted when a language server is stopped"
3929 );
3930
3931 project.update(cx, |project, cx| {
3932 assert_eq!(
3933 project.diagnostic_summary(false, cx),
3934 DiagnosticSummary {
3935 error_count: 0,
3936 warning_count: 0,
3937 }
3938 );
3939 });
3940}
3941
3942#[gpui::test]
3943async fn test_diagnostic_summaries_cleared_on_buffer_reload(cx: &mut gpui::TestAppContext) {
3944 init_test(cx);
3945
3946 let fs = FakeFs::new(cx.executor());
3947 fs.insert_tree(path!("/dir"), json!({ "a.rs": "one two three" }))
3948 .await;
3949
3950 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3951
3952 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3953 language_registry.add(rust_lang());
3954 let pull_count = Arc::new(atomic::AtomicUsize::new(0));
3955 let closure_pull_count = pull_count.clone();
3956 let mut fake_servers = language_registry.register_fake_lsp(
3957 "Rust",
3958 FakeLspAdapter {
3959 capabilities: lsp::ServerCapabilities {
3960 diagnostic_provider: Some(lsp::DiagnosticServerCapabilities::Options(
3961 lsp::DiagnosticOptions {
3962 identifier: Some("test-reload".to_string()),
3963 inter_file_dependencies: true,
3964 workspace_diagnostics: false,
3965 work_done_progress_options: Default::default(),
3966 },
3967 )),
3968 ..lsp::ServerCapabilities::default()
3969 },
3970 initializer: Some(Box::new(move |fake_server| {
3971 let pull_count = closure_pull_count.clone();
3972 fake_server.set_request_handler::<lsp::request::DocumentDiagnosticRequest, _, _>(
3973 move |_, _| {
3974 let pull_count = pull_count.clone();
3975 async move {
3976 pull_count.fetch_add(1, atomic::Ordering::SeqCst);
3977 Ok(lsp::DocumentDiagnosticReportResult::Report(
3978 lsp::DocumentDiagnosticReport::Full(
3979 lsp::RelatedFullDocumentDiagnosticReport {
3980 related_documents: None,
3981 full_document_diagnostic_report:
3982 lsp::FullDocumentDiagnosticReport {
3983 result_id: None,
3984 items: Vec::new(),
3985 },
3986 },
3987 ),
3988 ))
3989 }
3990 },
3991 );
3992 })),
3993 ..FakeLspAdapter::default()
3994 },
3995 );
3996
3997 let (_buffer, _handle) = project
3998 .update(cx, |project, cx| {
3999 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
4000 })
4001 .await
4002 .unwrap();
4003
4004 let fake_server = fake_servers.next().await.unwrap();
4005 cx.executor().run_until_parked();
4006
4007 // Publish initial diagnostics via the fake server.
4008 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
4009 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
4010 version: None,
4011 diagnostics: vec![lsp::Diagnostic {
4012 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 3)),
4013 severity: Some(lsp::DiagnosticSeverity::ERROR),
4014 message: "error in a".to_string(),
4015 ..Default::default()
4016 }],
4017 });
4018 cx.executor().run_until_parked();
4019
4020 project.update(cx, |project, cx| {
4021 assert_eq!(
4022 project.diagnostic_summary(false, cx),
4023 DiagnosticSummary {
4024 error_count: 1,
4025 warning_count: 0,
4026 }
4027 );
4028 });
4029
4030 let pulls_before = pull_count.load(atomic::Ordering::SeqCst);
4031
4032 // Change the file on disk. The FS event triggers buffer reload,
4033 // which in turn triggers pull_diagnostics_for_buffer.
4034 fs.save(
4035 path!("/dir/a.rs").as_ref(),
4036 &"fixed content".into(),
4037 LineEnding::Unix,
4038 )
4039 .await
4040 .unwrap();
4041 cx.executor().run_until_parked();
4042
4043 let pulls_after = pull_count.load(atomic::Ordering::SeqCst);
4044 assert!(
4045 pulls_after > pulls_before,
4046 "Expected document diagnostic pull after buffer reload (before={pulls_before}, after={pulls_after})"
4047 );
4048}
4049
4050#[gpui::test]
4051async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
4052 init_test(cx);
4053
4054 let text = "
4055 fn a() {
4056 f1();
4057 }
4058 fn b() {
4059 f2();
4060 }
4061 fn c() {
4062 f3();
4063 }
4064 "
4065 .unindent();
4066
4067 let fs = FakeFs::new(cx.executor());
4068 fs.insert_tree(
4069 path!("/dir"),
4070 json!({
4071 "a.rs": text.clone(),
4072 }),
4073 )
4074 .await;
4075
4076 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4077 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4078
4079 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4080 language_registry.add(rust_lang());
4081 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
4082
4083 let (buffer, _handle) = project
4084 .update(cx, |project, cx| {
4085 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
4086 })
4087 .await
4088 .unwrap();
4089
4090 let mut fake_server = fake_servers.next().await.unwrap();
4091 let lsp_document_version = fake_server
4092 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4093 .await
4094 .text_document
4095 .version;
4096
4097 // Simulate editing the buffer after the language server computes some edits.
4098 buffer.update(cx, |buffer, cx| {
4099 buffer.edit(
4100 [(
4101 Point::new(0, 0)..Point::new(0, 0),
4102 "// above first function\n",
4103 )],
4104 None,
4105 cx,
4106 );
4107 buffer.edit(
4108 [(
4109 Point::new(2, 0)..Point::new(2, 0),
4110 " // inside first function\n",
4111 )],
4112 None,
4113 cx,
4114 );
4115 buffer.edit(
4116 [(
4117 Point::new(6, 4)..Point::new(6, 4),
4118 "// inside second function ",
4119 )],
4120 None,
4121 cx,
4122 );
4123
4124 assert_eq!(
4125 buffer.text(),
4126 "
4127 // above first function
4128 fn a() {
4129 // inside first function
4130 f1();
4131 }
4132 fn b() {
4133 // inside second function f2();
4134 }
4135 fn c() {
4136 f3();
4137 }
4138 "
4139 .unindent()
4140 );
4141 });
4142
4143 let edits = lsp_store
4144 .update(cx, |lsp_store, cx| {
4145 lsp_store.as_local_mut().unwrap().edits_from_lsp(
4146 &buffer,
4147 vec![
4148 // replace body of first function
4149 lsp::TextEdit {
4150 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
4151 new_text: "
4152 fn a() {
4153 f10();
4154 }
4155 "
4156 .unindent(),
4157 },
4158 // edit inside second function
4159 lsp::TextEdit {
4160 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
4161 new_text: "00".into(),
4162 },
4163 // edit inside third function via two distinct edits
4164 lsp::TextEdit {
4165 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
4166 new_text: "4000".into(),
4167 },
4168 lsp::TextEdit {
4169 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
4170 new_text: "".into(),
4171 },
4172 ],
4173 LanguageServerId(0),
4174 Some(lsp_document_version),
4175 cx,
4176 )
4177 })
4178 .await
4179 .unwrap();
4180
4181 buffer.update(cx, |buffer, cx| {
4182 for (range, new_text) in edits {
4183 buffer.edit([(range, new_text)], None, cx);
4184 }
4185 assert_eq!(
4186 buffer.text(),
4187 "
4188 // above first function
4189 fn a() {
4190 // inside first function
4191 f10();
4192 }
4193 fn b() {
4194 // inside second function f200();
4195 }
4196 fn c() {
4197 f4000();
4198 }
4199 "
4200 .unindent()
4201 );
4202 });
4203}
4204
4205#[gpui::test]
4206async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
4207 init_test(cx);
4208
4209 let text = "
4210 use a::b;
4211 use a::c;
4212
4213 fn f() {
4214 b();
4215 c();
4216 }
4217 "
4218 .unindent();
4219
4220 let fs = FakeFs::new(cx.executor());
4221 fs.insert_tree(
4222 path!("/dir"),
4223 json!({
4224 "a.rs": text.clone(),
4225 }),
4226 )
4227 .await;
4228
4229 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4230 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4231 let buffer = project
4232 .update(cx, |project, cx| {
4233 project.open_local_buffer(path!("/dir/a.rs"), cx)
4234 })
4235 .await
4236 .unwrap();
4237
4238 // Simulate the language server sending us a small edit in the form of a very large diff.
4239 // Rust-analyzer does this when performing a merge-imports code action.
4240 let edits = lsp_store
4241 .update(cx, |lsp_store, cx| {
4242 lsp_store.as_local_mut().unwrap().edits_from_lsp(
4243 &buffer,
4244 [
4245 // Replace the first use statement without editing the semicolon.
4246 lsp::TextEdit {
4247 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
4248 new_text: "a::{b, c}".into(),
4249 },
4250 // Reinsert the remainder of the file between the semicolon and the final
4251 // newline of the file.
4252 lsp::TextEdit {
4253 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
4254 new_text: "\n\n".into(),
4255 },
4256 lsp::TextEdit {
4257 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
4258 new_text: "
4259 fn f() {
4260 b();
4261 c();
4262 }"
4263 .unindent(),
4264 },
4265 // Delete everything after the first newline of the file.
4266 lsp::TextEdit {
4267 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
4268 new_text: "".into(),
4269 },
4270 ],
4271 LanguageServerId(0),
4272 None,
4273 cx,
4274 )
4275 })
4276 .await
4277 .unwrap();
4278
4279 buffer.update(cx, |buffer, cx| {
4280 let edits = edits
4281 .into_iter()
4282 .map(|(range, text)| {
4283 (
4284 range.start.to_point(buffer)..range.end.to_point(buffer),
4285 text,
4286 )
4287 })
4288 .collect::<Vec<_>>();
4289
4290 assert_eq!(
4291 edits,
4292 [
4293 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
4294 (Point::new(1, 0)..Point::new(2, 0), "".into())
4295 ]
4296 );
4297
4298 for (range, new_text) in edits {
4299 buffer.edit([(range, new_text)], None, cx);
4300 }
4301 assert_eq!(
4302 buffer.text(),
4303 "
4304 use a::{b, c};
4305
4306 fn f() {
4307 b();
4308 c();
4309 }
4310 "
4311 .unindent()
4312 );
4313 });
4314}
4315
4316#[gpui::test]
4317async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
4318 cx: &mut gpui::TestAppContext,
4319) {
4320 init_test(cx);
4321
4322 let text = "Path()";
4323
4324 let fs = FakeFs::new(cx.executor());
4325 fs.insert_tree(
4326 path!("/dir"),
4327 json!({
4328 "a.rs": text
4329 }),
4330 )
4331 .await;
4332
4333 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4334 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4335 let buffer = project
4336 .update(cx, |project, cx| {
4337 project.open_local_buffer(path!("/dir/a.rs"), cx)
4338 })
4339 .await
4340 .unwrap();
4341
4342 // Simulate the language server sending us a pair of edits at the same location,
4343 // with an insertion following a replacement (which violates the LSP spec).
4344 let edits = lsp_store
4345 .update(cx, |lsp_store, cx| {
4346 lsp_store.as_local_mut().unwrap().edits_from_lsp(
4347 &buffer,
4348 [
4349 lsp::TextEdit {
4350 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
4351 new_text: "Path".into(),
4352 },
4353 lsp::TextEdit {
4354 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
4355 new_text: "from path import Path\n\n\n".into(),
4356 },
4357 ],
4358 LanguageServerId(0),
4359 None,
4360 cx,
4361 )
4362 })
4363 .await
4364 .unwrap();
4365
4366 buffer.update(cx, |buffer, cx| {
4367 buffer.edit(edits, None, cx);
4368 assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
4369 });
4370}
4371
4372#[gpui::test]
4373async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
4374 init_test(cx);
4375
4376 let text = "
4377 use a::b;
4378 use a::c;
4379
4380 fn f() {
4381 b();
4382 c();
4383 }
4384 "
4385 .unindent();
4386
4387 let fs = FakeFs::new(cx.executor());
4388 fs.insert_tree(
4389 path!("/dir"),
4390 json!({
4391 "a.rs": text.clone(),
4392 }),
4393 )
4394 .await;
4395
4396 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4397 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4398 let buffer = project
4399 .update(cx, |project, cx| {
4400 project.open_local_buffer(path!("/dir/a.rs"), cx)
4401 })
4402 .await
4403 .unwrap();
4404
4405 // Simulate the language server sending us edits in a non-ordered fashion,
4406 // with ranges sometimes being inverted or pointing to invalid locations.
4407 let edits = lsp_store
4408 .update(cx, |lsp_store, cx| {
4409 lsp_store.as_local_mut().unwrap().edits_from_lsp(
4410 &buffer,
4411 [
4412 lsp::TextEdit {
4413 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
4414 new_text: "\n\n".into(),
4415 },
4416 lsp::TextEdit {
4417 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
4418 new_text: "a::{b, c}".into(),
4419 },
4420 lsp::TextEdit {
4421 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
4422 new_text: "".into(),
4423 },
4424 lsp::TextEdit {
4425 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
4426 new_text: "
4427 fn f() {
4428 b();
4429 c();
4430 }"
4431 .unindent(),
4432 },
4433 ],
4434 LanguageServerId(0),
4435 None,
4436 cx,
4437 )
4438 })
4439 .await
4440 .unwrap();
4441
4442 buffer.update(cx, |buffer, cx| {
4443 let edits = edits
4444 .into_iter()
4445 .map(|(range, text)| {
4446 (
4447 range.start.to_point(buffer)..range.end.to_point(buffer),
4448 text,
4449 )
4450 })
4451 .collect::<Vec<_>>();
4452
4453 assert_eq!(
4454 edits,
4455 [
4456 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
4457 (Point::new(1, 0)..Point::new(2, 0), "".into())
4458 ]
4459 );
4460
4461 for (range, new_text) in edits {
4462 buffer.edit([(range, new_text)], None, cx);
4463 }
4464 assert_eq!(
4465 buffer.text(),
4466 "
4467 use a::{b, c};
4468
4469 fn f() {
4470 b();
4471 c();
4472 }
4473 "
4474 .unindent()
4475 );
4476 });
4477}
4478
4479fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
4480 buffer: &Buffer,
4481 range: Range<T>,
4482) -> Vec<(String, Option<DiagnosticSeverity>)> {
4483 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
4484 for chunk in buffer.snapshot().chunks(
4485 range,
4486 LanguageAwareStyling {
4487 tree_sitter: true,
4488 diagnostics: true,
4489 },
4490 ) {
4491 if chunks
4492 .last()
4493 .is_some_and(|prev_chunk| prev_chunk.1 == chunk.diagnostic_severity)
4494 {
4495 chunks.last_mut().unwrap().0.push_str(chunk.text);
4496 } else {
4497 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
4498 }
4499 }
4500 chunks
4501}
4502
4503#[gpui::test(iterations = 10)]
4504async fn test_definition(cx: &mut gpui::TestAppContext) {
4505 init_test(cx);
4506
4507 let fs = FakeFs::new(cx.executor());
4508 fs.insert_tree(
4509 path!("/dir"),
4510 json!({
4511 "a.rs": "const fn a() { A }",
4512 "b.rs": "const y: i32 = crate::a()",
4513 }),
4514 )
4515 .await;
4516
4517 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
4518
4519 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4520 language_registry.add(rust_lang());
4521 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
4522
4523 let (buffer, _handle) = project
4524 .update(cx, |project, cx| {
4525 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
4526 })
4527 .await
4528 .unwrap();
4529
4530 let fake_server = fake_servers.next().await.unwrap();
4531 cx.executor().run_until_parked();
4532
4533 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
4534 let params = params.text_document_position_params;
4535 assert_eq!(
4536 params.text_document.uri.to_file_path().unwrap(),
4537 Path::new(path!("/dir/b.rs")),
4538 );
4539 assert_eq!(params.position, lsp::Position::new(0, 22));
4540
4541 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
4542 lsp::Location::new(
4543 lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
4544 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
4545 ),
4546 )))
4547 });
4548 let mut definitions = project
4549 .update(cx, |project, cx| project.definitions(&buffer, 22, cx))
4550 .await
4551 .unwrap()
4552 .unwrap();
4553
4554 // Assert no new language server started
4555 cx.executor().run_until_parked();
4556 assert!(fake_servers.try_recv().is_err());
4557
4558 assert_eq!(definitions.len(), 1);
4559 let definition = definitions.pop().unwrap();
4560 cx.update(|cx| {
4561 let target_buffer = definition.target.buffer.read(cx);
4562 assert_eq!(
4563 target_buffer
4564 .file()
4565 .unwrap()
4566 .as_local()
4567 .unwrap()
4568 .abs_path(cx),
4569 Path::new(path!("/dir/a.rs")),
4570 );
4571 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
4572 assert_eq!(
4573 list_worktrees(&project, cx),
4574 [
4575 (path!("/dir/a.rs").as_ref(), false),
4576 (path!("/dir/b.rs").as_ref(), true)
4577 ],
4578 );
4579
4580 drop(definition);
4581 });
4582 cx.update(|cx| {
4583 assert_eq!(
4584 list_worktrees(&project, cx),
4585 [(path!("/dir/b.rs").as_ref(), true)]
4586 );
4587 });
4588
4589 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
4590 project
4591 .read(cx)
4592 .worktrees(cx)
4593 .map(|worktree| {
4594 let worktree = worktree.read(cx);
4595 (
4596 worktree.as_local().unwrap().abs_path().as_ref(),
4597 worktree.is_visible(),
4598 )
4599 })
4600 .collect::<Vec<_>>()
4601 }
4602}
4603
4604#[gpui::test]
4605async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
4606 init_test(cx);
4607
4608 let fs = FakeFs::new(cx.executor());
4609 fs.insert_tree(
4610 path!("/dir"),
4611 json!({
4612 "a.ts": "",
4613 }),
4614 )
4615 .await;
4616
4617 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4618
4619 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4620 language_registry.add(typescript_lang());
4621 let mut fake_language_servers = language_registry.register_fake_lsp(
4622 "TypeScript",
4623 FakeLspAdapter {
4624 capabilities: lsp::ServerCapabilities {
4625 completion_provider: Some(lsp::CompletionOptions {
4626 trigger_characters: Some(vec![".".to_string()]),
4627 ..Default::default()
4628 }),
4629 ..Default::default()
4630 },
4631 ..Default::default()
4632 },
4633 );
4634
4635 let (buffer, _handle) = project
4636 .update(cx, |p, cx| {
4637 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4638 })
4639 .await
4640 .unwrap();
4641
4642 let fake_server = fake_language_servers.next().await.unwrap();
4643 cx.executor().run_until_parked();
4644
4645 // When text_edit exists, it takes precedence over insert_text and label
4646 let text = "let a = obj.fqn";
4647 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4648 let completions = project.update(cx, |project, cx| {
4649 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4650 });
4651
4652 fake_server
4653 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4654 Ok(Some(lsp::CompletionResponse::Array(vec![
4655 lsp::CompletionItem {
4656 label: "labelText".into(),
4657 insert_text: Some("insertText".into()),
4658 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
4659 range: lsp::Range::new(
4660 lsp::Position::new(0, text.len() as u32 - 3),
4661 lsp::Position::new(0, text.len() as u32),
4662 ),
4663 new_text: "textEditText".into(),
4664 })),
4665 ..Default::default()
4666 },
4667 ])))
4668 })
4669 .next()
4670 .await;
4671
4672 let completions = completions
4673 .await
4674 .unwrap()
4675 .into_iter()
4676 .flat_map(|response| response.completions)
4677 .collect::<Vec<_>>();
4678 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4679
4680 assert_eq!(completions.len(), 1);
4681 assert_eq!(completions[0].new_text, "textEditText");
4682 assert_eq!(
4683 completions[0].replace_range.to_offset(&snapshot),
4684 text.len() - 3..text.len()
4685 );
4686}
4687
4688#[gpui::test]
4689async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
4690 init_test(cx);
4691
4692 let fs = FakeFs::new(cx.executor());
4693 fs.insert_tree(
4694 path!("/dir"),
4695 json!({
4696 "a.ts": "",
4697 }),
4698 )
4699 .await;
4700
4701 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4702
4703 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4704 language_registry.add(typescript_lang());
4705 let mut fake_language_servers = language_registry.register_fake_lsp(
4706 "TypeScript",
4707 FakeLspAdapter {
4708 capabilities: lsp::ServerCapabilities {
4709 completion_provider: Some(lsp::CompletionOptions {
4710 trigger_characters: Some(vec![".".to_string()]),
4711 ..Default::default()
4712 }),
4713 ..Default::default()
4714 },
4715 ..Default::default()
4716 },
4717 );
4718
4719 let (buffer, _handle) = project
4720 .update(cx, |p, cx| {
4721 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4722 })
4723 .await
4724 .unwrap();
4725
4726 let fake_server = fake_language_servers.next().await.unwrap();
4727 cx.executor().run_until_parked();
4728 let text = "let a = obj.fqn";
4729
4730 // Test 1: When text_edit is None but text_edit_text exists with default edit_range
4731 {
4732 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4733 let completions = project.update(cx, |project, cx| {
4734 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4735 });
4736
4737 fake_server
4738 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4739 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
4740 is_incomplete: false,
4741 item_defaults: Some(lsp::CompletionListItemDefaults {
4742 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
4743 lsp::Range::new(
4744 lsp::Position::new(0, text.len() as u32 - 3),
4745 lsp::Position::new(0, text.len() as u32),
4746 ),
4747 )),
4748 ..Default::default()
4749 }),
4750 items: vec![lsp::CompletionItem {
4751 label: "labelText".into(),
4752 text_edit_text: Some("textEditText".into()),
4753 text_edit: None,
4754 ..Default::default()
4755 }],
4756 })))
4757 })
4758 .next()
4759 .await;
4760
4761 let completions = completions
4762 .await
4763 .unwrap()
4764 .into_iter()
4765 .flat_map(|response| response.completions)
4766 .collect::<Vec<_>>();
4767 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4768
4769 assert_eq!(completions.len(), 1);
4770 assert_eq!(completions[0].new_text, "textEditText");
4771 assert_eq!(
4772 completions[0].replace_range.to_offset(&snapshot),
4773 text.len() - 3..text.len()
4774 );
4775 }
4776
4777 // Test 2: When both text_edit and text_edit_text are None with default edit_range
4778 {
4779 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4780 let completions = project.update(cx, |project, cx| {
4781 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4782 });
4783
4784 fake_server
4785 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4786 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
4787 is_incomplete: false,
4788 item_defaults: Some(lsp::CompletionListItemDefaults {
4789 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
4790 lsp::Range::new(
4791 lsp::Position::new(0, text.len() as u32 - 3),
4792 lsp::Position::new(0, text.len() as u32),
4793 ),
4794 )),
4795 ..Default::default()
4796 }),
4797 items: vec![lsp::CompletionItem {
4798 label: "labelText".into(),
4799 text_edit_text: None,
4800 insert_text: Some("irrelevant".into()),
4801 text_edit: None,
4802 ..Default::default()
4803 }],
4804 })))
4805 })
4806 .next()
4807 .await;
4808
4809 let completions = completions
4810 .await
4811 .unwrap()
4812 .into_iter()
4813 .flat_map(|response| response.completions)
4814 .collect::<Vec<_>>();
4815 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4816
4817 assert_eq!(completions.len(), 1);
4818 assert_eq!(completions[0].new_text, "labelText");
4819 assert_eq!(
4820 completions[0].replace_range.to_offset(&snapshot),
4821 text.len() - 3..text.len()
4822 );
4823 }
4824}
4825
4826#[gpui::test]
4827async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
4828 init_test(cx);
4829
4830 let fs = FakeFs::new(cx.executor());
4831 fs.insert_tree(
4832 path!("/dir"),
4833 json!({
4834 "a.ts": "",
4835 }),
4836 )
4837 .await;
4838
4839 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4840
4841 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4842 language_registry.add(typescript_lang());
4843 let mut fake_language_servers = language_registry.register_fake_lsp(
4844 "TypeScript",
4845 FakeLspAdapter {
4846 capabilities: lsp::ServerCapabilities {
4847 completion_provider: Some(lsp::CompletionOptions {
4848 trigger_characters: Some(vec![":".to_string()]),
4849 ..Default::default()
4850 }),
4851 ..Default::default()
4852 },
4853 ..Default::default()
4854 },
4855 );
4856
4857 let (buffer, _handle) = project
4858 .update(cx, |p, cx| {
4859 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4860 })
4861 .await
4862 .unwrap();
4863
4864 let fake_server = fake_language_servers.next().await.unwrap();
4865 cx.executor().run_until_parked();
4866
4867 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
4868 let text = "let a = b.fqn";
4869 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4870 let completions = project.update(cx, |project, cx| {
4871 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4872 });
4873
4874 fake_server
4875 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4876 Ok(Some(lsp::CompletionResponse::Array(vec![
4877 lsp::CompletionItem {
4878 label: "fullyQualifiedName?".into(),
4879 insert_text: Some("fullyQualifiedName".into()),
4880 ..Default::default()
4881 },
4882 ])))
4883 })
4884 .next()
4885 .await;
4886 let completions = completions
4887 .await
4888 .unwrap()
4889 .into_iter()
4890 .flat_map(|response| response.completions)
4891 .collect::<Vec<_>>();
4892 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4893 assert_eq!(completions.len(), 1);
4894 assert_eq!(completions[0].new_text, "fullyQualifiedName");
4895 assert_eq!(
4896 completions[0].replace_range.to_offset(&snapshot),
4897 text.len() - 3..text.len()
4898 );
4899
4900 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
4901 let text = "let a = \"atoms/cmp\"";
4902 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4903 let completions = project.update(cx, |project, cx| {
4904 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
4905 });
4906
4907 fake_server
4908 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4909 Ok(Some(lsp::CompletionResponse::Array(vec![
4910 lsp::CompletionItem {
4911 label: "component".into(),
4912 ..Default::default()
4913 },
4914 ])))
4915 })
4916 .next()
4917 .await;
4918 let completions = completions
4919 .await
4920 .unwrap()
4921 .into_iter()
4922 .flat_map(|response| response.completions)
4923 .collect::<Vec<_>>();
4924 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4925 assert_eq!(completions.len(), 1);
4926 assert_eq!(completions[0].new_text, "component");
4927 assert_eq!(
4928 completions[0].replace_range.to_offset(&snapshot),
4929 text.len() - 4..text.len() - 1
4930 );
4931}
4932
4933#[gpui::test]
4934async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
4935 init_test(cx);
4936
4937 let fs = FakeFs::new(cx.executor());
4938 fs.insert_tree(
4939 path!("/dir"),
4940 json!({
4941 "a.ts": "",
4942 }),
4943 )
4944 .await;
4945
4946 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4947
4948 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4949 language_registry.add(typescript_lang());
4950 let mut fake_language_servers = language_registry.register_fake_lsp(
4951 "TypeScript",
4952 FakeLspAdapter {
4953 capabilities: lsp::ServerCapabilities {
4954 completion_provider: Some(lsp::CompletionOptions {
4955 trigger_characters: Some(vec![":".to_string()]),
4956 ..Default::default()
4957 }),
4958 ..Default::default()
4959 },
4960 ..Default::default()
4961 },
4962 );
4963
4964 let (buffer, _handle) = project
4965 .update(cx, |p, cx| {
4966 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4967 })
4968 .await
4969 .unwrap();
4970
4971 let fake_server = fake_language_servers.next().await.unwrap();
4972 cx.executor().run_until_parked();
4973
4974 let text = "let a = b.fqn";
4975 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4976 let completions = project.update(cx, |project, cx| {
4977 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4978 });
4979
4980 fake_server
4981 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4982 Ok(Some(lsp::CompletionResponse::Array(vec![
4983 lsp::CompletionItem {
4984 label: "fullyQualifiedName?".into(),
4985 insert_text: Some("fully\rQualified\r\nName".into()),
4986 ..Default::default()
4987 },
4988 ])))
4989 })
4990 .next()
4991 .await;
4992 let completions = completions
4993 .await
4994 .unwrap()
4995 .into_iter()
4996 .flat_map(|response| response.completions)
4997 .collect::<Vec<_>>();
4998 assert_eq!(completions.len(), 1);
4999 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
5000}
5001
5002#[gpui::test]
5003async fn test_supports_range_formatting_ignores_unrelated_language_servers(
5004 cx: &mut gpui::TestAppContext,
5005) {
5006 init_test(cx);
5007 cx.update(|cx| {
5008 SettingsStore::update_global(cx, |store, cx| {
5009 store.update_user_settings(cx, |settings| {
5010 settings.project.all_languages.defaults.formatter = Some(FormatterList::Single(
5011 Formatter::LanguageServer(settings::LanguageServerFormatterSpecifier::Current),
5012 ));
5013 });
5014 });
5015 });
5016
5017 let fs = FakeFs::new(cx.executor());
5018 fs.insert_tree(
5019 path!("/dir"),
5020 json!({
5021 "a.ts": "",
5022 "b.rs": "",
5023 }),
5024 )
5025 .await;
5026
5027 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5028 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5029 language_registry.add(typescript_lang());
5030 language_registry.add(rust_lang());
5031
5032 let mut typescript_language_servers = language_registry.register_fake_lsp(
5033 "TypeScript",
5034 FakeLspAdapter {
5035 name: "typescript-fake-language-server",
5036 capabilities: lsp::ServerCapabilities {
5037 document_range_formatting_provider: Some(lsp::OneOf::Left(true)),
5038 ..lsp::ServerCapabilities::default()
5039 },
5040 ..FakeLspAdapter::default()
5041 },
5042 );
5043 let mut rust_language_servers = language_registry.register_fake_lsp(
5044 "Rust",
5045 FakeLspAdapter {
5046 name: "rust-fake-language-server",
5047 capabilities: lsp::ServerCapabilities {
5048 document_formatting_provider: Some(lsp::OneOf::Left(true)),
5049 document_range_formatting_provider: Some(lsp::OneOf::Left(false)),
5050 ..lsp::ServerCapabilities::default()
5051 },
5052 ..FakeLspAdapter::default()
5053 },
5054 );
5055
5056 let (typescript_buffer, _typescript_handle) = project
5057 .update(cx, |project, cx| {
5058 project.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
5059 })
5060 .await
5061 .unwrap();
5062 let (rust_buffer, _rust_handle) = project
5063 .update(cx, |project, cx| {
5064 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
5065 })
5066 .await
5067 .unwrap();
5068
5069 let _typescript_language_server = typescript_language_servers.next().await.unwrap();
5070 let _rust_language_server = rust_language_servers.next().await.unwrap();
5071 cx.executor().run_until_parked();
5072
5073 assert!(project.read_with(cx, |project, cx| {
5074 project.supports_range_formatting(&typescript_buffer, cx)
5075 }));
5076 assert!(!project.read_with(cx, |project, cx| {
5077 project.supports_range_formatting(&rust_buffer, cx)
5078 }));
5079}
5080
5081#[gpui::test(iterations = 10)]
5082async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
5083 init_test(cx);
5084
5085 let fs = FakeFs::new(cx.executor());
5086 fs.insert_tree(
5087 path!("/dir"),
5088 json!({
5089 "a.ts": "a",
5090 }),
5091 )
5092 .await;
5093
5094 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5095
5096 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5097 language_registry.add(typescript_lang());
5098 let mut fake_language_servers = language_registry.register_fake_lsp(
5099 "TypeScript",
5100 FakeLspAdapter {
5101 capabilities: lsp::ServerCapabilities {
5102 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
5103 lsp::CodeActionOptions {
5104 resolve_provider: Some(true),
5105 ..lsp::CodeActionOptions::default()
5106 },
5107 )),
5108 execute_command_provider: Some(lsp::ExecuteCommandOptions {
5109 commands: vec!["_the/command".to_string()],
5110 ..lsp::ExecuteCommandOptions::default()
5111 }),
5112 ..lsp::ServerCapabilities::default()
5113 },
5114 ..FakeLspAdapter::default()
5115 },
5116 );
5117
5118 let (buffer, _handle) = project
5119 .update(cx, |p, cx| {
5120 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
5121 })
5122 .await
5123 .unwrap();
5124
5125 let fake_server = fake_language_servers.next().await.unwrap();
5126 cx.executor().run_until_parked();
5127
5128 // Language server returns code actions that contain commands, and not edits.
5129 let actions = project.update(cx, |project, cx| {
5130 project.code_actions(&buffer, 0..0, None, cx)
5131 });
5132 fake_server
5133 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
5134 Ok(Some(vec![
5135 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5136 title: "The code action".into(),
5137 data: Some(serde_json::json!({
5138 "command": "_the/command",
5139 })),
5140 ..lsp::CodeAction::default()
5141 }),
5142 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5143 title: "two".into(),
5144 ..lsp::CodeAction::default()
5145 }),
5146 ]))
5147 })
5148 .next()
5149 .await;
5150
5151 let action = actions.await.unwrap().unwrap()[0].clone();
5152 let apply = project.update(cx, |project, cx| {
5153 project.apply_code_action(buffer.clone(), action, true, cx)
5154 });
5155
5156 // Resolving the code action does not populate its edits. In absence of
5157 // edits, we must execute the given command.
5158 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
5159 |mut action, _| async move {
5160 if action.data.is_some() {
5161 action.command = Some(lsp::Command {
5162 title: "The command".into(),
5163 command: "_the/command".into(),
5164 arguments: Some(vec![json!("the-argument")]),
5165 });
5166 }
5167 Ok(action)
5168 },
5169 );
5170
5171 // While executing the command, the language server sends the editor
5172 // a `workspaceEdit` request.
5173 fake_server
5174 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
5175 let fake = fake_server.clone();
5176 move |params, _| {
5177 assert_eq!(params.command, "_the/command");
5178 let fake = fake.clone();
5179 async move {
5180 fake.server
5181 .request::<lsp::request::ApplyWorkspaceEdit>(
5182 lsp::ApplyWorkspaceEditParams {
5183 label: None,
5184 edit: lsp::WorkspaceEdit {
5185 changes: Some(
5186 [(
5187 lsp::Uri::from_file_path(path!("/dir/a.ts")).unwrap(),
5188 vec![lsp::TextEdit {
5189 range: lsp::Range::new(
5190 lsp::Position::new(0, 0),
5191 lsp::Position::new(0, 0),
5192 ),
5193 new_text: "X".into(),
5194 }],
5195 )]
5196 .into_iter()
5197 .collect(),
5198 ),
5199 ..Default::default()
5200 },
5201 },
5202 DEFAULT_LSP_REQUEST_TIMEOUT,
5203 )
5204 .await
5205 .into_response()
5206 .unwrap();
5207 Ok(Some(json!(null)))
5208 }
5209 }
5210 })
5211 .next()
5212 .await;
5213
5214 // Applying the code action returns a project transaction containing the edits
5215 // sent by the language server in its `workspaceEdit` request.
5216 let transaction = apply.await.unwrap();
5217 assert!(transaction.0.contains_key(&buffer));
5218 buffer.update(cx, |buffer, cx| {
5219 assert_eq!(buffer.text(), "Xa");
5220 buffer.undo(cx);
5221 assert_eq!(buffer.text(), "a");
5222 });
5223}
5224
5225#[gpui::test]
5226async fn test_rename_file_to_new_directory(cx: &mut gpui::TestAppContext) {
5227 init_test(cx);
5228 let fs = FakeFs::new(cx.background_executor.clone());
5229 let expected_contents = "content";
5230 fs.as_fake()
5231 .insert_tree(
5232 "/root",
5233 json!({
5234 "test.txt": expected_contents
5235 }),
5236 )
5237 .await;
5238
5239 let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
5240
5241 let (worktree, entry_id) = project.read_with(cx, |project, cx| {
5242 let worktree = project.worktrees(cx).next().unwrap();
5243 let entry_id = worktree
5244 .read(cx)
5245 .entry_for_path(rel_path("test.txt"))
5246 .unwrap()
5247 .id;
5248 (worktree, entry_id)
5249 });
5250 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
5251 let _result = project
5252 .update(cx, |project, cx| {
5253 project.rename_entry(
5254 entry_id,
5255 (worktree_id, rel_path("dir1/dir2/dir3/test.txt")).into(),
5256 cx,
5257 )
5258 })
5259 .await
5260 .unwrap();
5261 worktree.read_with(cx, |worktree, _| {
5262 assert!(
5263 worktree.entry_for_path(rel_path("test.txt")).is_none(),
5264 "Old file should have been removed"
5265 );
5266 assert!(
5267 worktree
5268 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
5269 .is_some(),
5270 "Whole directory hierarchy and the new file should have been created"
5271 );
5272 });
5273 assert_eq!(
5274 worktree
5275 .update(cx, |worktree, cx| {
5276 worktree.load_file(rel_path("dir1/dir2/dir3/test.txt"), cx)
5277 })
5278 .await
5279 .unwrap()
5280 .text,
5281 expected_contents,
5282 "Moved file's contents should be preserved"
5283 );
5284
5285 let entry_id = worktree.read_with(cx, |worktree, _| {
5286 worktree
5287 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
5288 .unwrap()
5289 .id
5290 });
5291
5292 let _result = project
5293 .update(cx, |project, cx| {
5294 project.rename_entry(
5295 entry_id,
5296 (worktree_id, rel_path("dir1/dir2/test.txt")).into(),
5297 cx,
5298 )
5299 })
5300 .await
5301 .unwrap();
5302 worktree.read_with(cx, |worktree, _| {
5303 assert!(
5304 worktree.entry_for_path(rel_path("test.txt")).is_none(),
5305 "First file should not reappear"
5306 );
5307 assert!(
5308 worktree
5309 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
5310 .is_none(),
5311 "Old file should have been removed"
5312 );
5313 assert!(
5314 worktree
5315 .entry_for_path(rel_path("dir1/dir2/test.txt"))
5316 .is_some(),
5317 "No error should have occurred after moving into existing directory"
5318 );
5319 });
5320 assert_eq!(
5321 worktree
5322 .update(cx, |worktree, cx| {
5323 worktree.load_file(rel_path("dir1/dir2/test.txt"), cx)
5324 })
5325 .await
5326 .unwrap()
5327 .text,
5328 expected_contents,
5329 "Moved file's contents should be preserved"
5330 );
5331}
5332
5333#[gpui::test(iterations = 10)]
5334async fn test_save_file(cx: &mut gpui::TestAppContext) {
5335 init_test(cx);
5336
5337 let fs = FakeFs::new(cx.executor());
5338 fs.insert_tree(
5339 path!("/dir"),
5340 json!({
5341 "file1": "the old contents",
5342 }),
5343 )
5344 .await;
5345
5346 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5347 let buffer = project
5348 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5349 .await
5350 .unwrap();
5351 buffer.update(cx, |buffer, cx| {
5352 assert_eq!(buffer.text(), "the old contents");
5353 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
5354 });
5355
5356 project
5357 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
5358 .await
5359 .unwrap();
5360
5361 let new_text = fs
5362 .load(Path::new(path!("/dir/file1")))
5363 .await
5364 .unwrap()
5365 .replace("\r\n", "\n");
5366 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
5367}
5368
5369#[gpui::test(iterations = 10)]
5370async fn test_save_file_spawns_language_server(cx: &mut gpui::TestAppContext) {
5371 // Issue: #24349
5372 init_test(cx);
5373
5374 let fs = FakeFs::new(cx.executor());
5375 fs.insert_tree(path!("/dir"), json!({})).await;
5376
5377 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5378 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5379
5380 language_registry.add(rust_lang());
5381 let mut fake_rust_servers = language_registry.register_fake_lsp(
5382 "Rust",
5383 FakeLspAdapter {
5384 name: "the-rust-language-server",
5385 capabilities: lsp::ServerCapabilities {
5386 completion_provider: Some(lsp::CompletionOptions {
5387 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
5388 ..Default::default()
5389 }),
5390 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
5391 lsp::TextDocumentSyncOptions {
5392 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
5393 ..Default::default()
5394 },
5395 )),
5396 ..Default::default()
5397 },
5398 ..Default::default()
5399 },
5400 );
5401
5402 let buffer = project
5403 .update(cx, |this, cx| this.create_buffer(None, false, cx))
5404 .unwrap()
5405 .await;
5406 project.update(cx, |this, cx| {
5407 this.register_buffer_with_language_servers(&buffer, cx);
5408 buffer.update(cx, |buffer, cx| {
5409 assert!(!this.has_language_servers_for(buffer, cx));
5410 })
5411 });
5412
5413 project
5414 .update(cx, |this, cx| {
5415 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
5416 this.save_buffer_as(
5417 buffer.clone(),
5418 ProjectPath {
5419 worktree_id,
5420 path: rel_path("file.rs").into(),
5421 },
5422 cx,
5423 )
5424 })
5425 .await
5426 .unwrap();
5427 // A server is started up, and it is notified about Rust files.
5428 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5429 assert_eq!(
5430 fake_rust_server
5431 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5432 .await
5433 .text_document,
5434 lsp::TextDocumentItem {
5435 uri: lsp::Uri::from_file_path(path!("/dir/file.rs")).unwrap(),
5436 version: 0,
5437 text: "".to_string(),
5438 language_id: "rust".to_string(),
5439 }
5440 );
5441
5442 project.update(cx, |this, cx| {
5443 buffer.update(cx, |buffer, cx| {
5444 assert!(this.has_language_servers_for(buffer, cx));
5445 })
5446 });
5447}
5448
5449#[gpui::test(iterations = 30)]
5450async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
5451 init_test(cx);
5452
5453 let fs = FakeFs::new(cx.executor());
5454 fs.insert_tree(
5455 path!("/dir"),
5456 json!({
5457 "file1": "the original contents",
5458 }),
5459 )
5460 .await;
5461
5462 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5463 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
5464 let buffer = project
5465 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5466 .await
5467 .unwrap();
5468
5469 // Change the buffer's file on disk, and then wait for the file change
5470 // to be detected by the worktree, so that the buffer starts reloading.
5471 fs.save(
5472 path!("/dir/file1").as_ref(),
5473 &"the first contents".into(),
5474 Default::default(),
5475 )
5476 .await
5477 .unwrap();
5478 worktree.next_event(cx).await;
5479
5480 // Change the buffer's file again. Depending on the random seed, the
5481 // previous file change may still be in progress.
5482 fs.save(
5483 path!("/dir/file1").as_ref(),
5484 &"the second contents".into(),
5485 Default::default(),
5486 )
5487 .await
5488 .unwrap();
5489 worktree.next_event(cx).await;
5490
5491 cx.executor().run_until_parked();
5492 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
5493 buffer.read_with(cx, |buffer, _| {
5494 assert_eq!(buffer.text(), on_disk_text);
5495 assert!(!buffer.is_dirty(), "buffer should not be dirty");
5496 assert!(!buffer.has_conflict(), "buffer should not be dirty");
5497 });
5498}
5499
5500#[gpui::test(iterations = 30)]
5501async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
5502 init_test(cx);
5503
5504 let fs = FakeFs::new(cx.executor());
5505 fs.insert_tree(
5506 path!("/dir"),
5507 json!({
5508 "file1": "the original contents",
5509 }),
5510 )
5511 .await;
5512
5513 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5514 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
5515 let buffer = project
5516 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5517 .await
5518 .unwrap();
5519
5520 // Change the buffer's file on disk, and then wait for the file change
5521 // to be detected by the worktree, so that the buffer starts reloading.
5522 fs.save(
5523 path!("/dir/file1").as_ref(),
5524 &"the first contents".into(),
5525 Default::default(),
5526 )
5527 .await
5528 .unwrap();
5529 worktree.next_event(cx).await;
5530
5531 cx.executor()
5532 .spawn(cx.executor().simulate_random_delay())
5533 .await;
5534
5535 // Perform a noop edit, causing the buffer's version to increase.
5536 buffer.update(cx, |buffer, cx| {
5537 buffer.edit([(0..0, " ")], None, cx);
5538 buffer.undo(cx);
5539 });
5540
5541 cx.executor().run_until_parked();
5542 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
5543 buffer.read_with(cx, |buffer, _| {
5544 let buffer_text = buffer.text();
5545 if buffer_text == on_disk_text {
5546 assert!(
5547 !buffer.is_dirty() && !buffer.has_conflict(),
5548 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
5549 );
5550 }
5551 // If the file change occurred while the buffer was processing the first
5552 // change, the buffer will be in a conflicting state.
5553 else {
5554 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
5555 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
5556 }
5557 });
5558}
5559
5560#[gpui::test]
5561async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
5562 init_test(cx);
5563
5564 let fs = FakeFs::new(cx.executor());
5565 fs.insert_tree(
5566 path!("/dir"),
5567 json!({
5568 "file1": "the old contents",
5569 }),
5570 )
5571 .await;
5572
5573 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
5574 let buffer = project
5575 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5576 .await
5577 .unwrap();
5578 buffer.update(cx, |buffer, cx| {
5579 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
5580 });
5581
5582 project
5583 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
5584 .await
5585 .unwrap();
5586
5587 let new_text = fs
5588 .load(Path::new(path!("/dir/file1")))
5589 .await
5590 .unwrap()
5591 .replace("\r\n", "\n");
5592 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
5593}
5594
5595#[gpui::test]
5596async fn test_save_as(cx: &mut gpui::TestAppContext) {
5597 init_test(cx);
5598
5599 let fs = FakeFs::new(cx.executor());
5600 fs.insert_tree("/dir", json!({})).await;
5601
5602 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5603
5604 let languages = project.update(cx, |project, _| project.languages().clone());
5605 languages.add(rust_lang());
5606
5607 let buffer = project.update(cx, |project, cx| {
5608 project.create_local_buffer("", None, false, cx)
5609 });
5610 buffer.update(cx, |buffer, cx| {
5611 buffer.edit([(0..0, "abc")], None, cx);
5612 assert!(buffer.is_dirty());
5613 assert!(!buffer.has_conflict());
5614 assert_eq!(buffer.language().unwrap().name(), "Plain Text");
5615 });
5616 project
5617 .update(cx, |project, cx| {
5618 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
5619 let path = ProjectPath {
5620 worktree_id,
5621 path: rel_path("file1.rs").into(),
5622 };
5623 project.save_buffer_as(buffer.clone(), path, cx)
5624 })
5625 .await
5626 .unwrap();
5627 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
5628
5629 cx.executor().run_until_parked();
5630 buffer.update(cx, |buffer, cx| {
5631 assert_eq!(
5632 buffer.file().unwrap().full_path(cx),
5633 Path::new("dir/file1.rs")
5634 );
5635 assert!(!buffer.is_dirty());
5636 assert!(!buffer.has_conflict());
5637 assert_eq!(buffer.language().unwrap().name(), "Rust");
5638 });
5639
5640 let opened_buffer = project
5641 .update(cx, |project, cx| {
5642 project.open_local_buffer("/dir/file1.rs", cx)
5643 })
5644 .await
5645 .unwrap();
5646 assert_eq!(opened_buffer, buffer);
5647}
5648
5649#[gpui::test]
5650async fn test_save_as_existing_file(cx: &mut gpui::TestAppContext) {
5651 init_test(cx);
5652
5653 let fs = FakeFs::new(cx.executor());
5654 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5655
5656 fs.insert_tree(
5657 path!("/dir"),
5658 json!({
5659 "data_a.txt": "data about a"
5660 }),
5661 )
5662 .await;
5663
5664 let buffer = project
5665 .update(cx, |project, cx| {
5666 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
5667 })
5668 .await
5669 .unwrap();
5670
5671 buffer.update(cx, |buffer, cx| {
5672 buffer.edit([(11..12, "b")], None, cx);
5673 });
5674
5675 // Save buffer's contents as a new file and confirm that the buffer's now
5676 // associated with `data_b.txt` instead of `data_a.txt`, confirming that the
5677 // file associated with the buffer has now been updated to `data_b.txt`
5678 project
5679 .update(cx, |project, cx| {
5680 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
5681 let new_path = ProjectPath {
5682 worktree_id,
5683 path: rel_path("data_b.txt").into(),
5684 };
5685
5686 project.save_buffer_as(buffer.clone(), new_path, cx)
5687 })
5688 .await
5689 .unwrap();
5690
5691 buffer.update(cx, |buffer, cx| {
5692 assert_eq!(
5693 buffer.file().unwrap().full_path(cx),
5694 Path::new("dir/data_b.txt")
5695 )
5696 });
5697
5698 // Open the original `data_a.txt` file, confirming that its contents are
5699 // unchanged and the resulting buffer's associated file is `data_a.txt`.
5700 let original_buffer = project
5701 .update(cx, |project, cx| {
5702 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
5703 })
5704 .await
5705 .unwrap();
5706
5707 original_buffer.update(cx, |buffer, cx| {
5708 assert_eq!(buffer.text(), "data about a");
5709 assert_eq!(
5710 buffer.file().unwrap().full_path(cx),
5711 Path::new("dir/data_a.txt")
5712 )
5713 });
5714}
5715
5716#[gpui::test(retries = 5)]
5717async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
5718 use worktree::WorktreeModelHandle as _;
5719
5720 init_test(cx);
5721 cx.executor().allow_parking();
5722
5723 let dir = TempTree::new(json!({
5724 "a": {
5725 "file1": "",
5726 "file2": "",
5727 "file3": "",
5728 },
5729 "b": {
5730 "c": {
5731 "file4": "",
5732 "file5": "",
5733 }
5734 }
5735 }));
5736
5737 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
5738
5739 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5740 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
5741 async move { buffer.await.unwrap() }
5742 };
5743 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5744 project.update(cx, |project, cx| {
5745 let tree = project.worktrees(cx).next().unwrap();
5746 tree.read(cx)
5747 .entry_for_path(rel_path(path))
5748 .unwrap_or_else(|| panic!("no entry for path {}", path))
5749 .id
5750 })
5751 };
5752
5753 let buffer2 = buffer_for_path("a/file2", cx).await;
5754 let buffer3 = buffer_for_path("a/file3", cx).await;
5755 let buffer4 = buffer_for_path("b/c/file4", cx).await;
5756 let buffer5 = buffer_for_path("b/c/file5", cx).await;
5757
5758 let file2_id = id_for_path("a/file2", cx);
5759 let file3_id = id_for_path("a/file3", cx);
5760 let file4_id = id_for_path("b/c/file4", cx);
5761
5762 // Create a remote copy of this worktree.
5763 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
5764 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
5765
5766 let updates = Arc::new(Mutex::new(Vec::new()));
5767 tree.update(cx, |tree, cx| {
5768 let updates = updates.clone();
5769 tree.observe_updates(0, cx, move |update| {
5770 updates.lock().push(update);
5771 async { true }
5772 });
5773 });
5774
5775 let remote = cx.update(|cx| {
5776 Worktree::remote(
5777 0,
5778 ReplicaId::REMOTE_SERVER,
5779 metadata,
5780 project.read(cx).client().into(),
5781 project.read(cx).path_style(cx),
5782 cx,
5783 )
5784 });
5785
5786 cx.executor().run_until_parked();
5787
5788 cx.update(|cx| {
5789 assert!(!buffer2.read(cx).is_dirty());
5790 assert!(!buffer3.read(cx).is_dirty());
5791 assert!(!buffer4.read(cx).is_dirty());
5792 assert!(!buffer5.read(cx).is_dirty());
5793 });
5794
5795 // Rename and delete files and directories.
5796 tree.flush_fs_events(cx).await;
5797 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
5798 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
5799 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
5800 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
5801 tree.flush_fs_events(cx).await;
5802
5803 cx.update(|app| {
5804 assert_eq!(
5805 tree.read(app).paths().collect::<Vec<_>>(),
5806 vec![
5807 rel_path("a"),
5808 rel_path("a/file1"),
5809 rel_path("a/file2.new"),
5810 rel_path("b"),
5811 rel_path("d"),
5812 rel_path("d/file3"),
5813 rel_path("d/file4"),
5814 ]
5815 );
5816 });
5817
5818 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
5819 assert_eq!(id_for_path("d/file3", cx), file3_id);
5820 assert_eq!(id_for_path("d/file4", cx), file4_id);
5821
5822 cx.update(|cx| {
5823 assert_eq!(
5824 buffer2.read(cx).file().unwrap().path().as_ref(),
5825 rel_path("a/file2.new")
5826 );
5827 assert_eq!(
5828 buffer3.read(cx).file().unwrap().path().as_ref(),
5829 rel_path("d/file3")
5830 );
5831 assert_eq!(
5832 buffer4.read(cx).file().unwrap().path().as_ref(),
5833 rel_path("d/file4")
5834 );
5835 assert_eq!(
5836 buffer5.read(cx).file().unwrap().path().as_ref(),
5837 rel_path("b/c/file5")
5838 );
5839
5840 assert_matches!(
5841 buffer2.read(cx).file().unwrap().disk_state(),
5842 DiskState::Present { .. }
5843 );
5844 assert_matches!(
5845 buffer3.read(cx).file().unwrap().disk_state(),
5846 DiskState::Present { .. }
5847 );
5848 assert_matches!(
5849 buffer4.read(cx).file().unwrap().disk_state(),
5850 DiskState::Present { .. }
5851 );
5852 assert_eq!(
5853 buffer5.read(cx).file().unwrap().disk_state(),
5854 DiskState::Deleted
5855 );
5856 });
5857
5858 // Update the remote worktree. Check that it becomes consistent with the
5859 // local worktree.
5860 cx.executor().run_until_parked();
5861
5862 remote.update(cx, |remote, _| {
5863 for update in updates.lock().drain(..) {
5864 remote.as_remote_mut().unwrap().update_from_remote(update);
5865 }
5866 });
5867 cx.executor().run_until_parked();
5868 remote.update(cx, |remote, _| {
5869 assert_eq!(
5870 remote.paths().collect::<Vec<_>>(),
5871 vec![
5872 rel_path("a"),
5873 rel_path("a/file1"),
5874 rel_path("a/file2.new"),
5875 rel_path("b"),
5876 rel_path("d"),
5877 rel_path("d/file3"),
5878 rel_path("d/file4"),
5879 ]
5880 );
5881 });
5882}
5883
5884#[cfg(target_os = "linux")]
5885#[gpui::test(retries = 5)]
5886async fn test_recreated_directory_receives_child_events(cx: &mut gpui::TestAppContext) {
5887 init_test(cx);
5888 cx.executor().allow_parking();
5889
5890 let dir = TempTree::new(json!({}));
5891 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
5892 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
5893
5894 tree.flush_fs_events(cx).await;
5895
5896 let repro_dir = dir.path().join("repro");
5897 std::fs::create_dir(&repro_dir).unwrap();
5898 tree.flush_fs_events(cx).await;
5899
5900 cx.update(|cx| {
5901 assert!(tree.read(cx).entry_for_path(rel_path("repro")).is_some());
5902 });
5903
5904 std::fs::remove_dir_all(&repro_dir).unwrap();
5905 tree.flush_fs_events(cx).await;
5906
5907 cx.update(|cx| {
5908 assert!(tree.read(cx).entry_for_path(rel_path("repro")).is_none());
5909 });
5910
5911 std::fs::create_dir(&repro_dir).unwrap();
5912 tree.flush_fs_events(cx).await;
5913
5914 cx.update(|cx| {
5915 assert!(tree.read(cx).entry_for_path(rel_path("repro")).is_some());
5916 });
5917
5918 std::fs::write(repro_dir.join("repro-marker"), "").unwrap();
5919 tree.flush_fs_events(cx).await;
5920
5921 cx.update(|cx| {
5922 assert!(
5923 tree.read(cx)
5924 .entry_for_path(rel_path("repro/repro-marker"))
5925 .is_some()
5926 );
5927 });
5928}
5929
5930#[gpui::test(iterations = 10)]
5931async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
5932 init_test(cx);
5933
5934 let fs = FakeFs::new(cx.executor());
5935 fs.insert_tree(
5936 path!("/dir"),
5937 json!({
5938 "a": {
5939 "file1": "",
5940 }
5941 }),
5942 )
5943 .await;
5944
5945 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
5946 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
5947 let tree_id = tree.update(cx, |tree, _| tree.id());
5948
5949 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5950 project.update(cx, |project, cx| {
5951 let tree = project.worktrees(cx).next().unwrap();
5952 tree.read(cx)
5953 .entry_for_path(rel_path(path))
5954 .unwrap_or_else(|| panic!("no entry for path {}", path))
5955 .id
5956 })
5957 };
5958
5959 let dir_id = id_for_path("a", cx);
5960 let file_id = id_for_path("a/file1", cx);
5961 let buffer = project
5962 .update(cx, |p, cx| {
5963 p.open_buffer((tree_id, rel_path("a/file1")), cx)
5964 })
5965 .await
5966 .unwrap();
5967 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5968
5969 project
5970 .update(cx, |project, cx| {
5971 project.rename_entry(dir_id, (tree_id, rel_path("b")).into(), cx)
5972 })
5973 .unwrap()
5974 .await
5975 .into_included()
5976 .unwrap();
5977 cx.executor().run_until_parked();
5978
5979 assert_eq!(id_for_path("b", cx), dir_id);
5980 assert_eq!(id_for_path("b/file1", cx), file_id);
5981 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5982}
5983
5984#[gpui::test]
5985async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
5986 init_test(cx);
5987
5988 let fs = FakeFs::new(cx.executor());
5989 fs.insert_tree(
5990 "/dir",
5991 json!({
5992 "a.txt": "a-contents",
5993 "b.txt": "b-contents",
5994 }),
5995 )
5996 .await;
5997
5998 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5999
6000 // Spawn multiple tasks to open paths, repeating some paths.
6001 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
6002 (
6003 p.open_local_buffer("/dir/a.txt", cx),
6004 p.open_local_buffer("/dir/b.txt", cx),
6005 p.open_local_buffer("/dir/a.txt", cx),
6006 )
6007 });
6008
6009 let buffer_a_1 = buffer_a_1.await.unwrap();
6010 let buffer_a_2 = buffer_a_2.await.unwrap();
6011 let buffer_b = buffer_b.await.unwrap();
6012 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
6013 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
6014
6015 // There is only one buffer per path.
6016 let buffer_a_id = buffer_a_1.entity_id();
6017 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
6018
6019 // Open the same path again while it is still open.
6020 drop(buffer_a_1);
6021 let buffer_a_3 = project
6022 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
6023 .await
6024 .unwrap();
6025
6026 // There's still only one buffer per path.
6027 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
6028}
6029
6030#[gpui::test]
6031async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
6032 init_test(cx);
6033
6034 let fs = FakeFs::new(cx.executor());
6035 fs.insert_tree(
6036 path!("/dir"),
6037 json!({
6038 "file1": "abc",
6039 "file2": "def",
6040 "file3": "ghi",
6041 }),
6042 )
6043 .await;
6044
6045 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6046
6047 let buffer1 = project
6048 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
6049 .await
6050 .unwrap();
6051 let events = Arc::new(Mutex::new(Vec::new()));
6052
6053 // initially, the buffer isn't dirty.
6054 buffer1.update(cx, |buffer, cx| {
6055 cx.subscribe(&buffer1, {
6056 let events = events.clone();
6057 move |_, _, event, _| match event {
6058 BufferEvent::Operation { .. } => {}
6059 _ => events.lock().push(event.clone()),
6060 }
6061 })
6062 .detach();
6063
6064 assert!(!buffer.is_dirty());
6065 assert!(events.lock().is_empty());
6066
6067 buffer.edit([(1..2, "")], None, cx);
6068 });
6069
6070 // after the first edit, the buffer is dirty, and emits a dirtied event.
6071 buffer1.update(cx, |buffer, cx| {
6072 assert!(buffer.text() == "ac");
6073 assert!(buffer.is_dirty());
6074 assert_eq!(
6075 *events.lock(),
6076 &[
6077 language::BufferEvent::Edited { is_local: true },
6078 language::BufferEvent::DirtyChanged
6079 ]
6080 );
6081 events.lock().clear();
6082 buffer.did_save(
6083 buffer.version(),
6084 buffer.file().unwrap().disk_state().mtime(),
6085 cx,
6086 );
6087 });
6088
6089 // after saving, the buffer is not dirty, and emits a saved event.
6090 buffer1.update(cx, |buffer, cx| {
6091 assert!(!buffer.is_dirty());
6092 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
6093 events.lock().clear();
6094
6095 buffer.edit([(1..1, "B")], None, cx);
6096 buffer.edit([(2..2, "D")], None, cx);
6097 });
6098
6099 // after editing again, the buffer is dirty, and emits another dirty event.
6100 buffer1.update(cx, |buffer, cx| {
6101 assert!(buffer.text() == "aBDc");
6102 assert!(buffer.is_dirty());
6103 assert_eq!(
6104 *events.lock(),
6105 &[
6106 language::BufferEvent::Edited { is_local: true },
6107 language::BufferEvent::DirtyChanged,
6108 language::BufferEvent::Edited { is_local: true },
6109 ],
6110 );
6111 events.lock().clear();
6112
6113 // After restoring the buffer to its previously-saved state,
6114 // the buffer is not considered dirty anymore.
6115 buffer.edit([(1..3, "")], None, cx);
6116 assert!(buffer.text() == "ac");
6117 assert!(!buffer.is_dirty());
6118 });
6119
6120 assert_eq!(
6121 *events.lock(),
6122 &[
6123 language::BufferEvent::Edited { is_local: true },
6124 language::BufferEvent::DirtyChanged
6125 ]
6126 );
6127
6128 // When a file is deleted, it is not considered dirty.
6129 let events = Arc::new(Mutex::new(Vec::new()));
6130 let buffer2 = project
6131 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
6132 .await
6133 .unwrap();
6134 buffer2.update(cx, |_, cx| {
6135 cx.subscribe(&buffer2, {
6136 let events = events.clone();
6137 move |_, _, event, _| match event {
6138 BufferEvent::Operation { .. } => {}
6139 _ => events.lock().push(event.clone()),
6140 }
6141 })
6142 .detach();
6143 });
6144
6145 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
6146 .await
6147 .unwrap();
6148 cx.executor().run_until_parked();
6149 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
6150 assert_eq!(
6151 mem::take(&mut *events.lock()),
6152 &[language::BufferEvent::FileHandleChanged]
6153 );
6154
6155 // Buffer becomes dirty when edited.
6156 buffer2.update(cx, |buffer, cx| {
6157 buffer.edit([(2..3, "")], None, cx);
6158 assert_eq!(buffer.is_dirty(), true);
6159 });
6160 assert_eq!(
6161 mem::take(&mut *events.lock()),
6162 &[
6163 language::BufferEvent::Edited { is_local: true },
6164 language::BufferEvent::DirtyChanged
6165 ]
6166 );
6167
6168 // Buffer becomes clean again when all of its content is removed, because
6169 // the file was deleted.
6170 buffer2.update(cx, |buffer, cx| {
6171 buffer.edit([(0..2, "")], None, cx);
6172 assert_eq!(buffer.is_empty(), true);
6173 assert_eq!(buffer.is_dirty(), false);
6174 });
6175 assert_eq!(
6176 *events.lock(),
6177 &[
6178 language::BufferEvent::Edited { is_local: true },
6179 language::BufferEvent::DirtyChanged
6180 ]
6181 );
6182
6183 // When a file is already dirty when deleted, we don't emit a Dirtied event.
6184 let events = Arc::new(Mutex::new(Vec::new()));
6185 let buffer3 = project
6186 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
6187 .await
6188 .unwrap();
6189 buffer3.update(cx, |_, cx| {
6190 cx.subscribe(&buffer3, {
6191 let events = events.clone();
6192 move |_, _, event, _| match event {
6193 BufferEvent::Operation { .. } => {}
6194 _ => events.lock().push(event.clone()),
6195 }
6196 })
6197 .detach();
6198 });
6199
6200 buffer3.update(cx, |buffer, cx| {
6201 buffer.edit([(0..0, "x")], None, cx);
6202 });
6203 events.lock().clear();
6204 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
6205 .await
6206 .unwrap();
6207 cx.executor().run_until_parked();
6208 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
6209 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
6210}
6211
6212#[gpui::test]
6213async fn test_dirty_buffer_reloads_after_undo(cx: &mut gpui::TestAppContext) {
6214 init_test(cx);
6215
6216 let fs = FakeFs::new(cx.executor());
6217 fs.insert_tree(
6218 path!("/dir"),
6219 json!({
6220 "file.txt": "version 1",
6221 }),
6222 )
6223 .await;
6224
6225 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6226 let buffer = project
6227 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file.txt"), cx))
6228 .await
6229 .unwrap();
6230
6231 buffer.read_with(cx, |buffer, _| {
6232 assert_eq!(buffer.text(), "version 1");
6233 assert!(!buffer.is_dirty());
6234 });
6235
6236 // User makes an edit, making the buffer dirty.
6237 buffer.update(cx, |buffer, cx| {
6238 buffer.edit([(0..0, "user edit: ")], None, cx);
6239 });
6240
6241 buffer.read_with(cx, |buffer, _| {
6242 assert!(buffer.is_dirty());
6243 assert_eq!(buffer.text(), "user edit: version 1");
6244 });
6245
6246 // External tool writes new content while buffer is dirty.
6247 // file_updated() updates the File but suppresses ReloadNeeded.
6248 fs.save(
6249 path!("/dir/file.txt").as_ref(),
6250 &"version 2 from external tool".into(),
6251 Default::default(),
6252 )
6253 .await
6254 .unwrap();
6255 cx.executor().run_until_parked();
6256
6257 buffer.read_with(cx, |buffer, _| {
6258 assert!(buffer.has_conflict());
6259 assert_eq!(buffer.text(), "user edit: version 1");
6260 });
6261
6262 // User undoes their edit. Buffer becomes clean, but disk has different
6263 // content. did_edit() detects the dirty->clean transition and checks if
6264 // disk changed while dirty. Since mtime differs from saved_mtime, it
6265 // emits ReloadNeeded.
6266 buffer.update(cx, |buffer, cx| {
6267 buffer.undo(cx);
6268 });
6269 cx.executor().run_until_parked();
6270
6271 buffer.read_with(cx, |buffer, _| {
6272 assert_eq!(
6273 buffer.text(),
6274 "version 2 from external tool",
6275 "buffer should reload from disk after undo makes it clean"
6276 );
6277 assert!(!buffer.is_dirty());
6278 });
6279}
6280
6281#[gpui::test]
6282async fn test_buffer_file_change_to_binary_fails(cx: &mut gpui::TestAppContext) {
6283 init_test(cx);
6284
6285 let fs = FakeFs::new(cx.executor());
6286 fs.insert_tree(
6287 path!("/dir"),
6288 json!({
6289 "file.txt": "",
6290 }),
6291 )
6292 .await;
6293
6294 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6295 let buffer = project
6296 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file.txt"), cx))
6297 .await
6298 .unwrap();
6299
6300 fs.write(
6301 path!("/dir/file.txt").as_ref(),
6302 b"\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\x01",
6303 )
6304 .await
6305 .unwrap();
6306 cx.executor().run_until_parked();
6307
6308 // Test that existing buffer is left untouched
6309 buffer.read_with(cx, |buffer, _| {
6310 assert_eq!(buffer.text(), "");
6311 });
6312}
6313
6314#[gpui::test]
6315async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
6316 init_test(cx);
6317
6318 let (initial_contents, initial_offsets) =
6319 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
6320 let fs = FakeFs::new(cx.executor());
6321 fs.insert_tree(
6322 path!("/dir"),
6323 json!({
6324 "the-file": initial_contents,
6325 }),
6326 )
6327 .await;
6328 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6329 let buffer = project
6330 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
6331 .await
6332 .unwrap();
6333
6334 let anchors = initial_offsets
6335 .iter()
6336 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
6337 .collect::<Vec<_>>();
6338
6339 // Change the file on disk, adding two new lines of text, and removing
6340 // one line.
6341 buffer.update(cx, |buffer, _| {
6342 assert!(!buffer.is_dirty());
6343 assert!(!buffer.has_conflict());
6344 });
6345
6346 let (new_contents, new_offsets) =
6347 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
6348 fs.save(
6349 path!("/dir/the-file").as_ref(),
6350 &new_contents.as_str().into(),
6351 LineEnding::Unix,
6352 )
6353 .await
6354 .unwrap();
6355
6356 // Because the buffer was not modified, it is reloaded from disk. Its
6357 // contents are edited according to the diff between the old and new
6358 // file contents.
6359 cx.executor().run_until_parked();
6360 buffer.update(cx, |buffer, _| {
6361 assert_eq!(buffer.text(), new_contents);
6362 assert!(!buffer.is_dirty());
6363 assert!(!buffer.has_conflict());
6364
6365 let anchor_offsets = anchors
6366 .iter()
6367 .map(|anchor| anchor.to_offset(&*buffer))
6368 .collect::<Vec<_>>();
6369 assert_eq!(anchor_offsets, new_offsets);
6370 });
6371
6372 // Modify the buffer
6373 buffer.update(cx, |buffer, cx| {
6374 buffer.edit([(0..0, " ")], None, cx);
6375 assert!(buffer.is_dirty());
6376 assert!(!buffer.has_conflict());
6377 });
6378
6379 // Change the file on disk again, adding blank lines to the beginning.
6380 fs.save(
6381 path!("/dir/the-file").as_ref(),
6382 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
6383 LineEnding::Unix,
6384 )
6385 .await
6386 .unwrap();
6387
6388 // Because the buffer is modified, it doesn't reload from disk, but is
6389 // marked as having a conflict.
6390 cx.executor().run_until_parked();
6391 buffer.update(cx, |buffer, _| {
6392 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
6393 assert!(buffer.has_conflict());
6394 });
6395}
6396
6397#[gpui::test]
6398async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
6399 init_test(cx);
6400
6401 let fs = FakeFs::new(cx.executor());
6402 fs.insert_tree(
6403 path!("/dir"),
6404 json!({
6405 "file1": "a\nb\nc\n",
6406 "file2": "one\r\ntwo\r\nthree\r\n",
6407 }),
6408 )
6409 .await;
6410
6411 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6412 let buffer1 = project
6413 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
6414 .await
6415 .unwrap();
6416 let buffer2 = project
6417 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
6418 .await
6419 .unwrap();
6420
6421 buffer1.update(cx, |buffer, _| {
6422 assert_eq!(buffer.text(), "a\nb\nc\n");
6423 assert_eq!(buffer.line_ending(), LineEnding::Unix);
6424 });
6425 buffer2.update(cx, |buffer, _| {
6426 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
6427 assert_eq!(buffer.line_ending(), LineEnding::Windows);
6428 });
6429
6430 // Change a file's line endings on disk from unix to windows. The buffer's
6431 // state updates correctly.
6432 fs.save(
6433 path!("/dir/file1").as_ref(),
6434 &"aaa\nb\nc\n".into(),
6435 LineEnding::Windows,
6436 )
6437 .await
6438 .unwrap();
6439 cx.executor().run_until_parked();
6440 buffer1.update(cx, |buffer, _| {
6441 assert_eq!(buffer.text(), "aaa\nb\nc\n");
6442 assert_eq!(buffer.line_ending(), LineEnding::Windows);
6443 });
6444
6445 // Save a file with windows line endings. The file is written correctly.
6446 buffer2.update(cx, |buffer, cx| {
6447 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
6448 });
6449 project
6450 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
6451 .await
6452 .unwrap();
6453 assert_eq!(
6454 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
6455 "one\r\ntwo\r\nthree\r\nfour\r\n",
6456 );
6457}
6458
6459#[gpui::test]
6460async fn test_line_ending_user_settings_on_format(cx: &mut gpui::TestAppContext) {
6461 init_test(cx);
6462
6463 let cases = [
6464 (
6465 "default",
6466 None,
6467 [
6468 ("crlf_file.rs", LineEnding::Windows),
6469 ("lf_file.rs", LineEnding::Unix),
6470 ("no_newline.rs", LineEnding::default()),
6471 ],
6472 ),
6473 (
6474 "detect",
6475 Some(LineEndingSetting::Detect),
6476 [
6477 ("crlf_file.rs", LineEnding::Windows),
6478 ("lf_file.rs", LineEnding::Unix),
6479 ("no_newline.rs", LineEnding::default()),
6480 ],
6481 ),
6482 (
6483 "prefer_lf",
6484 Some(LineEndingSetting::PreferLf),
6485 [
6486 ("crlf_file.rs", LineEnding::Windows),
6487 ("lf_file.rs", LineEnding::Unix),
6488 ("no_newline.rs", LineEnding::Unix),
6489 ],
6490 ),
6491 (
6492 "prefer_crlf",
6493 Some(LineEndingSetting::PreferCrlf),
6494 [
6495 ("crlf_file.rs", LineEnding::Windows),
6496 ("lf_file.rs", LineEnding::Unix),
6497 ("no_newline.rs", LineEnding::Windows),
6498 ],
6499 ),
6500 (
6501 "enforce_lf",
6502 Some(LineEndingSetting::EnforceLf),
6503 [
6504 ("crlf_file.rs", LineEnding::Unix),
6505 ("lf_file.rs", LineEnding::Unix),
6506 ("no_newline.rs", LineEnding::Unix),
6507 ],
6508 ),
6509 (
6510 "enforce_crlf",
6511 Some(LineEndingSetting::EnforceCrlf),
6512 [
6513 ("crlf_file.rs", LineEnding::Windows),
6514 ("lf_file.rs", LineEnding::Windows),
6515 ("no_newline.rs", LineEnding::Windows),
6516 ],
6517 ),
6518 ];
6519
6520 for (case_name, line_ending_setting, expected_line_endings) in cases {
6521 let fs = FakeFs::new(cx.executor());
6522 fs.insert_tree(
6523 path!("/dir"),
6524 json!({
6525 "crlf_file.rs": "one\r\ntwo\r\nthree\r\n",
6526 "lf_file.rs": "one\ntwo\nthree\n",
6527 "no_newline.rs": "single line",
6528 }),
6529 )
6530 .await;
6531
6532 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6533 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6534 language_registry.add(rust_lang());
6535 let worktree_id = project.update(cx, |project, cx| {
6536 project.worktrees(cx).next().unwrap().read(cx).id()
6537 });
6538
6539 cx.update(|cx| {
6540 SettingsStore::update_global(cx, |store, cx| {
6541 store.update_user_settings(cx, |settings| {
6542 settings.project.all_languages.defaults.line_ending = line_ending_setting;
6543 });
6544 });
6545 });
6546 cx.executor().run_until_parked();
6547
6548 assert_line_endings_after_format(
6549 cx,
6550 &project,
6551 worktree_id,
6552 case_name,
6553 &expected_line_endings,
6554 )
6555 .await;
6556 }
6557}
6558
6559#[gpui::test]
6560async fn test_line_ending_editorconfig_on_format_and_save(cx: &mut gpui::TestAppContext) {
6561 init_test(cx);
6562
6563 let cases = [
6564 (
6565 "editorconfig lf",
6566 "lf",
6567 "crlf_file.rs",
6568 LineEnding::Windows,
6569 [
6570 ("crlf_file.rs", LineEnding::Unix),
6571 ("lf_file.rs", LineEnding::Unix),
6572 ("no_newline.rs", LineEnding::Unix),
6573 ],
6574 "one\ntwo\nthree\n",
6575 ),
6576 (
6577 "editorconfig crlf",
6578 "crlf",
6579 "lf_file.rs",
6580 LineEnding::Unix,
6581 [
6582 ("crlf_file.rs", LineEnding::Windows),
6583 ("lf_file.rs", LineEnding::Windows),
6584 ("no_newline.rs", LineEnding::Windows),
6585 ],
6586 "one\r\ntwo\r\nthree\r\n",
6587 ),
6588 ];
6589
6590 for (
6591 case_name,
6592 editorconfig_end_of_line,
6593 buffer_path,
6594 initial_line_ending,
6595 expected_line_endings,
6596 expected_saved_contents,
6597 ) in cases
6598 {
6599 let file_system = FakeFs::new(cx.executor());
6600 file_system
6601 .insert_tree(
6602 path!("/dir"),
6603 json!({
6604 ".editorconfig": format!("root = true\n[*.rs]\nend_of_line = {editorconfig_end_of_line}\n"),
6605 "crlf_file.rs": "one\r\ntwo\r\nthree\r\n",
6606 "lf_file.rs": "one\ntwo\nthree\n",
6607 "no_newline.rs": "single line",
6608 }),
6609 )
6610 .await;
6611
6612 let project = Project::test(file_system.clone(), [path!("/dir").as_ref()], cx).await;
6613 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6614 language_registry.add(rust_lang());
6615 cx.executor().run_until_parked();
6616 let worktree_id = project.update(cx, |project, cx| {
6617 project.worktrees(cx).next().unwrap().read(cx).id()
6618 });
6619
6620 let buffer = project
6621 .update(cx, |project, cx| {
6622 project.open_buffer((worktree_id, rel_path(buffer_path)), cx)
6623 })
6624 .await
6625 .unwrap();
6626 buffer.update(cx, |buffer, _| {
6627 assert_eq!(buffer.line_ending(), initial_line_ending);
6628 });
6629
6630 assert_line_endings_after_format(
6631 cx,
6632 &project,
6633 worktree_id,
6634 case_name,
6635 &expected_line_endings,
6636 )
6637 .await;
6638
6639 project
6640 .update(cx, |project, cx| project.save_buffer(buffer, cx))
6641 .await
6642 .unwrap();
6643 let saved_path = PathBuf::from(path!("/dir")).join(buffer_path);
6644 assert_eq!(
6645 file_system.load(&saved_path).await.unwrap(),
6646 expected_saved_contents,
6647 );
6648 }
6649}
6650
6651#[gpui::test]
6652async fn test_line_ending_initialization_for_new_buffers(cx: &mut gpui::TestAppContext) {
6653 init_test(cx);
6654
6655 let cases = [
6656 (Some(LineEndingSetting::Detect), LineEnding::default()),
6657 (Some(LineEndingSetting::PreferLf), LineEnding::Unix),
6658 (Some(LineEndingSetting::PreferCrlf), LineEnding::Windows),
6659 (Some(LineEndingSetting::EnforceLf), LineEnding::Unix),
6660 (Some(LineEndingSetting::EnforceCrlf), LineEnding::Windows),
6661 ];
6662
6663 for (line_ending_setting, expected_line_ending) in cases {
6664 let fs = FakeFs::new(cx.executor());
6665 fs.insert_tree(path!("/dir"), json!({})).await;
6666
6667 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6668 cx.update(|cx| {
6669 SettingsStore::update_global(cx, |store, cx| {
6670 store.update_user_settings(cx, |settings| {
6671 settings.project.all_languages.defaults.line_ending = line_ending_setting;
6672 });
6673 });
6674 });
6675 cx.executor().run_until_parked();
6676
6677 let created_buffer = project
6678 .update(cx, |project, cx| project.create_buffer(None, false, cx))
6679 .unwrap()
6680 .await;
6681 created_buffer.update(cx, |buffer, _| {
6682 assert_eq!(buffer.line_ending(), expected_line_ending);
6683 });
6684
6685 let local_buffer = project.update(cx, |project, cx| {
6686 project.create_local_buffer("single line", None, false, cx)
6687 });
6688 local_buffer.update(cx, |buffer, _| {
6689 assert_eq!(buffer.line_ending(), expected_line_ending);
6690 });
6691
6692 let opened_missing_buffer = project
6693 .update(cx, |project, cx| {
6694 project.open_local_buffer(path!("/dir/new_file.rs"), cx)
6695 })
6696 .await
6697 .unwrap();
6698 opened_missing_buffer.update(cx, |buffer, _| {
6699 assert_eq!(buffer.line_ending(), expected_line_ending);
6700 });
6701 }
6702}
6703
6704async fn assert_line_endings_after_format(
6705 cx: &mut gpui::TestAppContext,
6706 project: &Entity<Project>,
6707 worktree_id: WorktreeId,
6708 case_name: &str,
6709 expected_line_endings: &[(&str, LineEnding)],
6710) {
6711 for (path, expected_line_ending) in expected_line_endings {
6712 let buffer = project
6713 .update(cx, |project, cx| {
6714 project.open_buffer((worktree_id, rel_path(path)), cx)
6715 })
6716 .await
6717 .unwrap();
6718 let mut buffers = HashSet::default();
6719 buffers.insert(buffer.clone());
6720 project
6721 .update(cx, |project, cx| {
6722 project.format(
6723 buffers,
6724 project::lsp_store::LspFormatTarget::Buffers,
6725 false,
6726 project::lsp_store::FormatTrigger::Save,
6727 cx,
6728 )
6729 })
6730 .await
6731 .unwrap();
6732 buffer.update(cx, |buffer, _| {
6733 assert_eq!(
6734 buffer.line_ending(),
6735 *expected_line_ending,
6736 "unexpected line ending for {path} in {case_name}"
6737 );
6738 });
6739 }
6740}
6741
6742#[gpui::test]
6743async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
6744 init_test(cx);
6745
6746 let fs = FakeFs::new(cx.executor());
6747 fs.insert_tree(
6748 path!("/dir"),
6749 json!({
6750 "a.rs": "
6751 fn foo(mut v: Vec<usize>) {
6752 for x in &v {
6753 v.push(1);
6754 }
6755 }
6756 "
6757 .unindent(),
6758 }),
6759 )
6760 .await;
6761
6762 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6763 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
6764 let buffer = project
6765 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
6766 .await
6767 .unwrap();
6768
6769 let buffer_uri = Uri::from_file_path(path!("/dir/a.rs")).unwrap();
6770 let message = lsp::PublishDiagnosticsParams {
6771 uri: buffer_uri.clone(),
6772 diagnostics: vec![
6773 lsp::Diagnostic {
6774 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6775 severity: Some(DiagnosticSeverity::WARNING),
6776 message: "error 1".to_string(),
6777 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6778 location: lsp::Location {
6779 uri: buffer_uri.clone(),
6780 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6781 },
6782 message: "error 1 hint 1".to_string(),
6783 }]),
6784 ..Default::default()
6785 },
6786 lsp::Diagnostic {
6787 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6788 severity: Some(DiagnosticSeverity::HINT),
6789 message: "error 1 hint 1".to_string(),
6790 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6791 location: lsp::Location {
6792 uri: buffer_uri.clone(),
6793 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6794 },
6795 message: "original diagnostic".to_string(),
6796 }]),
6797 ..Default::default()
6798 },
6799 lsp::Diagnostic {
6800 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
6801 severity: Some(DiagnosticSeverity::ERROR),
6802 message: "error 2".to_string(),
6803 related_information: Some(vec![
6804 lsp::DiagnosticRelatedInformation {
6805 location: lsp::Location {
6806 uri: buffer_uri.clone(),
6807 range: lsp::Range::new(
6808 lsp::Position::new(1, 13),
6809 lsp::Position::new(1, 15),
6810 ),
6811 },
6812 message: "error 2 hint 1".to_string(),
6813 },
6814 lsp::DiagnosticRelatedInformation {
6815 location: lsp::Location {
6816 uri: buffer_uri.clone(),
6817 range: lsp::Range::new(
6818 lsp::Position::new(1, 13),
6819 lsp::Position::new(1, 15),
6820 ),
6821 },
6822 message: "error 2 hint 2".to_string(),
6823 },
6824 ]),
6825 ..Default::default()
6826 },
6827 lsp::Diagnostic {
6828 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6829 severity: Some(DiagnosticSeverity::HINT),
6830 message: "error 2 hint 1".to_string(),
6831 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6832 location: lsp::Location {
6833 uri: buffer_uri.clone(),
6834 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
6835 },
6836 message: "original diagnostic".to_string(),
6837 }]),
6838 ..Default::default()
6839 },
6840 lsp::Diagnostic {
6841 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6842 severity: Some(DiagnosticSeverity::HINT),
6843 message: "error 2 hint 2".to_string(),
6844 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6845 location: lsp::Location {
6846 uri: buffer_uri,
6847 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
6848 },
6849 message: "original diagnostic".to_string(),
6850 }]),
6851 ..Default::default()
6852 },
6853 ],
6854 version: None,
6855 };
6856
6857 lsp_store
6858 .update(cx, |lsp_store, cx| {
6859 lsp_store.update_diagnostics(
6860 LanguageServerId(0),
6861 message,
6862 None,
6863 DiagnosticSourceKind::Pushed,
6864 &[],
6865 cx,
6866 )
6867 })
6868 .unwrap();
6869 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
6870
6871 assert_eq!(
6872 buffer
6873 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6874 .collect::<Vec<_>>(),
6875 &[
6876 DiagnosticEntry {
6877 range: Point::new(1, 8)..Point::new(1, 9),
6878 diagnostic: Diagnostic {
6879 severity: DiagnosticSeverity::WARNING,
6880 message: "error 1".to_string(),
6881 group_id: 1,
6882 is_primary: true,
6883 source_kind: DiagnosticSourceKind::Pushed,
6884 ..Diagnostic::default()
6885 }
6886 },
6887 DiagnosticEntry {
6888 range: Point::new(1, 8)..Point::new(1, 9),
6889 diagnostic: Diagnostic {
6890 severity: DiagnosticSeverity::HINT,
6891 message: "error 1 hint 1".to_string(),
6892 group_id: 1,
6893 is_primary: false,
6894 source_kind: DiagnosticSourceKind::Pushed,
6895 ..Diagnostic::default()
6896 }
6897 },
6898 DiagnosticEntry {
6899 range: Point::new(1, 13)..Point::new(1, 15),
6900 diagnostic: Diagnostic {
6901 severity: DiagnosticSeverity::HINT,
6902 message: "error 2 hint 1".to_string(),
6903 group_id: 0,
6904 is_primary: false,
6905 source_kind: DiagnosticSourceKind::Pushed,
6906 ..Diagnostic::default()
6907 }
6908 },
6909 DiagnosticEntry {
6910 range: Point::new(1, 13)..Point::new(1, 15),
6911 diagnostic: Diagnostic {
6912 severity: DiagnosticSeverity::HINT,
6913 message: "error 2 hint 2".to_string(),
6914 group_id: 0,
6915 is_primary: false,
6916 source_kind: DiagnosticSourceKind::Pushed,
6917 ..Diagnostic::default()
6918 }
6919 },
6920 DiagnosticEntry {
6921 range: Point::new(2, 8)..Point::new(2, 17),
6922 diagnostic: Diagnostic {
6923 severity: DiagnosticSeverity::ERROR,
6924 message: "error 2".to_string(),
6925 group_id: 0,
6926 is_primary: true,
6927 source_kind: DiagnosticSourceKind::Pushed,
6928 ..Diagnostic::default()
6929 }
6930 }
6931 ]
6932 );
6933
6934 assert_eq!(
6935 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
6936 &[
6937 DiagnosticEntry {
6938 range: Point::new(1, 13)..Point::new(1, 15),
6939 diagnostic: Diagnostic {
6940 severity: DiagnosticSeverity::HINT,
6941 message: "error 2 hint 1".to_string(),
6942 group_id: 0,
6943 is_primary: false,
6944 source_kind: DiagnosticSourceKind::Pushed,
6945 ..Diagnostic::default()
6946 }
6947 },
6948 DiagnosticEntry {
6949 range: Point::new(1, 13)..Point::new(1, 15),
6950 diagnostic: Diagnostic {
6951 severity: DiagnosticSeverity::HINT,
6952 message: "error 2 hint 2".to_string(),
6953 group_id: 0,
6954 is_primary: false,
6955 source_kind: DiagnosticSourceKind::Pushed,
6956 ..Diagnostic::default()
6957 }
6958 },
6959 DiagnosticEntry {
6960 range: Point::new(2, 8)..Point::new(2, 17),
6961 diagnostic: Diagnostic {
6962 severity: DiagnosticSeverity::ERROR,
6963 message: "error 2".to_string(),
6964 group_id: 0,
6965 is_primary: true,
6966 source_kind: DiagnosticSourceKind::Pushed,
6967 ..Diagnostic::default()
6968 }
6969 }
6970 ]
6971 );
6972
6973 assert_eq!(
6974 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
6975 &[
6976 DiagnosticEntry {
6977 range: Point::new(1, 8)..Point::new(1, 9),
6978 diagnostic: Diagnostic {
6979 severity: DiagnosticSeverity::WARNING,
6980 message: "error 1".to_string(),
6981 group_id: 1,
6982 is_primary: true,
6983 source_kind: DiagnosticSourceKind::Pushed,
6984 ..Diagnostic::default()
6985 }
6986 },
6987 DiagnosticEntry {
6988 range: Point::new(1, 8)..Point::new(1, 9),
6989 diagnostic: Diagnostic {
6990 severity: DiagnosticSeverity::HINT,
6991 message: "error 1 hint 1".to_string(),
6992 group_id: 1,
6993 is_primary: false,
6994 source_kind: DiagnosticSourceKind::Pushed,
6995 ..Diagnostic::default()
6996 }
6997 },
6998 ]
6999 );
7000}
7001
7002#[gpui::test]
7003async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
7004 init_test(cx);
7005
7006 let fs = FakeFs::new(cx.executor());
7007 fs.insert_tree(
7008 path!("/dir"),
7009 json!({
7010 "one.rs": "const ONE: usize = 1;",
7011 "two": {
7012 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
7013 }
7014
7015 }),
7016 )
7017 .await;
7018 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7019
7020 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7021 language_registry.add(rust_lang());
7022 let watched_paths = lsp::FileOperationRegistrationOptions {
7023 filters: vec![
7024 FileOperationFilter {
7025 scheme: Some("file".to_owned()),
7026 pattern: lsp::FileOperationPattern {
7027 glob: "**/*.rs".to_owned(),
7028 matches: Some(lsp::FileOperationPatternKind::File),
7029 options: None,
7030 },
7031 },
7032 FileOperationFilter {
7033 scheme: Some("file".to_owned()),
7034 pattern: lsp::FileOperationPattern {
7035 glob: "**/**".to_owned(),
7036 matches: Some(lsp::FileOperationPatternKind::Folder),
7037 options: None,
7038 },
7039 },
7040 ],
7041 };
7042 let mut fake_servers = language_registry.register_fake_lsp(
7043 "Rust",
7044 FakeLspAdapter {
7045 capabilities: lsp::ServerCapabilities {
7046 workspace: Some(lsp::WorkspaceServerCapabilities {
7047 workspace_folders: None,
7048 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
7049 did_rename: Some(watched_paths.clone()),
7050 will_rename: Some(watched_paths),
7051 ..Default::default()
7052 }),
7053 }),
7054 ..Default::default()
7055 },
7056 ..Default::default()
7057 },
7058 );
7059
7060 let _ = project
7061 .update(cx, |project, cx| {
7062 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
7063 })
7064 .await
7065 .unwrap();
7066
7067 let fake_server = fake_servers.next().await.unwrap();
7068 cx.executor().run_until_parked();
7069 let response = project.update(cx, |project, cx| {
7070 let worktree = project.worktrees(cx).next().unwrap();
7071 let entry = worktree
7072 .read(cx)
7073 .entry_for_path(rel_path("one.rs"))
7074 .unwrap();
7075 project.rename_entry(
7076 entry.id,
7077 (worktree.read(cx).id(), rel_path("three.rs")).into(),
7078 cx,
7079 )
7080 });
7081 let expected_edit = lsp::WorkspaceEdit {
7082 changes: None,
7083 document_changes: Some(DocumentChanges::Edits({
7084 vec![TextDocumentEdit {
7085 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
7086 range: lsp::Range {
7087 start: lsp::Position {
7088 line: 0,
7089 character: 1,
7090 },
7091 end: lsp::Position {
7092 line: 0,
7093 character: 3,
7094 },
7095 },
7096 new_text: "This is not a drill".to_owned(),
7097 })],
7098 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
7099 uri: Uri::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
7100 version: Some(1337),
7101 },
7102 }]
7103 })),
7104 change_annotations: None,
7105 };
7106 let resolved_workspace_edit = Arc::new(OnceLock::new());
7107 fake_server
7108 .set_request_handler::<WillRenameFiles, _, _>({
7109 let resolved_workspace_edit = resolved_workspace_edit.clone();
7110 let expected_edit = expected_edit.clone();
7111 move |params, _| {
7112 let resolved_workspace_edit = resolved_workspace_edit.clone();
7113 let expected_edit = expected_edit.clone();
7114 async move {
7115 assert_eq!(params.files.len(), 1);
7116 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
7117 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
7118 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
7119 Ok(Some(expected_edit))
7120 }
7121 }
7122 })
7123 .next()
7124 .await
7125 .unwrap();
7126 let _ = response.await.unwrap();
7127 fake_server
7128 .handle_notification::<DidRenameFiles, _>(|params, _| {
7129 assert_eq!(params.files.len(), 1);
7130 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
7131 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
7132 })
7133 .next()
7134 .await
7135 .unwrap();
7136 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
7137}
7138
7139#[gpui::test]
7140async fn test_rename(cx: &mut gpui::TestAppContext) {
7141 // hi
7142 init_test(cx);
7143
7144 let fs = FakeFs::new(cx.executor());
7145 fs.insert_tree(
7146 path!("/dir"),
7147 json!({
7148 "one.rs": "const ONE: usize = 1;",
7149 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
7150 }),
7151 )
7152 .await;
7153
7154 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7155
7156 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7157 language_registry.add(rust_lang());
7158 let mut fake_servers = language_registry.register_fake_lsp(
7159 "Rust",
7160 FakeLspAdapter {
7161 capabilities: lsp::ServerCapabilities {
7162 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
7163 prepare_provider: Some(true),
7164 work_done_progress_options: Default::default(),
7165 })),
7166 ..Default::default()
7167 },
7168 ..Default::default()
7169 },
7170 );
7171
7172 let (buffer, _handle) = project
7173 .update(cx, |project, cx| {
7174 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
7175 })
7176 .await
7177 .unwrap();
7178
7179 let fake_server = fake_servers.next().await.unwrap();
7180 cx.executor().run_until_parked();
7181
7182 let response = project.update(cx, |project, cx| {
7183 project.prepare_rename(buffer.clone(), 7, cx)
7184 });
7185 fake_server
7186 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
7187 assert_eq!(
7188 params.text_document.uri.as_str(),
7189 uri!("file:///dir/one.rs")
7190 );
7191 assert_eq!(params.position, lsp::Position::new(0, 7));
7192 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
7193 lsp::Position::new(0, 6),
7194 lsp::Position::new(0, 9),
7195 ))))
7196 })
7197 .next()
7198 .await
7199 .unwrap();
7200 let response = response.await.unwrap();
7201 let PrepareRenameResponse::Success(range) = response else {
7202 panic!("{:?}", response);
7203 };
7204 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
7205 assert_eq!(range, 6..9);
7206
7207 let response = project.update(cx, |project, cx| {
7208 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
7209 });
7210 fake_server
7211 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
7212 assert_eq!(
7213 params.text_document_position.text_document.uri.as_str(),
7214 uri!("file:///dir/one.rs")
7215 );
7216 assert_eq!(
7217 params.text_document_position.position,
7218 lsp::Position::new(0, 7)
7219 );
7220 assert_eq!(params.new_name, "THREE");
7221 Ok(Some(lsp::WorkspaceEdit {
7222 changes: Some(
7223 [
7224 (
7225 lsp::Uri::from_file_path(path!("/dir/one.rs")).unwrap(),
7226 vec![lsp::TextEdit::new(
7227 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
7228 "THREE".to_string(),
7229 )],
7230 ),
7231 (
7232 lsp::Uri::from_file_path(path!("/dir/two.rs")).unwrap(),
7233 vec![
7234 lsp::TextEdit::new(
7235 lsp::Range::new(
7236 lsp::Position::new(0, 24),
7237 lsp::Position::new(0, 27),
7238 ),
7239 "THREE".to_string(),
7240 ),
7241 lsp::TextEdit::new(
7242 lsp::Range::new(
7243 lsp::Position::new(0, 35),
7244 lsp::Position::new(0, 38),
7245 ),
7246 "THREE".to_string(),
7247 ),
7248 ],
7249 ),
7250 ]
7251 .into_iter()
7252 .collect(),
7253 ),
7254 ..Default::default()
7255 }))
7256 })
7257 .next()
7258 .await
7259 .unwrap();
7260 let mut transaction = response.await.unwrap().0;
7261 assert_eq!(transaction.len(), 2);
7262 assert_eq!(
7263 transaction
7264 .remove_entry(&buffer)
7265 .unwrap()
7266 .0
7267 .update(cx, |buffer, _| buffer.text()),
7268 "const THREE: usize = 1;"
7269 );
7270 assert_eq!(
7271 transaction
7272 .into_keys()
7273 .next()
7274 .unwrap()
7275 .update(cx, |buffer, _| buffer.text()),
7276 "const TWO: usize = one::THREE + one::THREE;"
7277 );
7278}
7279
7280#[gpui::test]
7281async fn test_search(cx: &mut gpui::TestAppContext) {
7282 init_test(cx);
7283
7284 let fs = FakeFs::new(cx.executor());
7285 fs.insert_tree(
7286 path!("/dir"),
7287 json!({
7288 "one.rs": "const ONE: usize = 1;",
7289 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
7290 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
7291 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
7292 }),
7293 )
7294 .await;
7295 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7296 assert_eq!(
7297 search(
7298 &project,
7299 SearchQuery::text(
7300 "TWO",
7301 false,
7302 true,
7303 false,
7304 Default::default(),
7305 Default::default(),
7306 false,
7307 None
7308 )
7309 .unwrap(),
7310 cx
7311 )
7312 .await
7313 .unwrap(),
7314 HashMap::from_iter([
7315 (path!("dir/two.rs").to_string(), vec![6..9]),
7316 (path!("dir/three.rs").to_string(), vec![37..40])
7317 ])
7318 );
7319
7320 let buffer_4 = project
7321 .update(cx, |project, cx| {
7322 project.open_local_buffer(path!("/dir/four.rs"), cx)
7323 })
7324 .await
7325 .unwrap();
7326 buffer_4.update(cx, |buffer, cx| {
7327 let text = "two::TWO";
7328 buffer.edit([(20..28, text), (31..43, text)], None, cx);
7329 });
7330
7331 assert_eq!(
7332 search(
7333 &project,
7334 SearchQuery::text(
7335 "TWO",
7336 false,
7337 true,
7338 false,
7339 Default::default(),
7340 Default::default(),
7341 false,
7342 None,
7343 )
7344 .unwrap(),
7345 cx
7346 )
7347 .await
7348 .unwrap(),
7349 HashMap::from_iter([
7350 (path!("dir/two.rs").to_string(), vec![6..9]),
7351 (path!("dir/three.rs").to_string(), vec![37..40]),
7352 (path!("dir/four.rs").to_string(), vec![25..28, 36..39])
7353 ])
7354 );
7355}
7356
7357#[gpui::test]
7358async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
7359 init_test(cx);
7360
7361 let search_query = "file";
7362
7363 let fs = FakeFs::new(cx.executor());
7364 fs.insert_tree(
7365 path!("/dir"),
7366 json!({
7367 "one.rs": r#"// Rust file one"#,
7368 "one.ts": r#"// TypeScript file one"#,
7369 "two.rs": r#"// Rust file two"#,
7370 "two.ts": r#"// TypeScript file two"#,
7371 }),
7372 )
7373 .await;
7374 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7375
7376 assert!(
7377 search(
7378 &project,
7379 SearchQuery::text(
7380 search_query,
7381 false,
7382 true,
7383 false,
7384 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
7385 Default::default(),
7386 false,
7387 None
7388 )
7389 .unwrap(),
7390 cx
7391 )
7392 .await
7393 .unwrap()
7394 .is_empty(),
7395 "If no inclusions match, no files should be returned"
7396 );
7397
7398 assert_eq!(
7399 search(
7400 &project,
7401 SearchQuery::text(
7402 search_query,
7403 false,
7404 true,
7405 false,
7406 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
7407 Default::default(),
7408 false,
7409 None
7410 )
7411 .unwrap(),
7412 cx
7413 )
7414 .await
7415 .unwrap(),
7416 HashMap::from_iter([
7417 (path!("dir/one.rs").to_string(), vec![8..12]),
7418 (path!("dir/two.rs").to_string(), vec![8..12]),
7419 ]),
7420 "Rust only search should give only Rust files"
7421 );
7422
7423 assert_eq!(
7424 search(
7425 &project,
7426 SearchQuery::text(
7427 search_query,
7428 false,
7429 true,
7430 false,
7431 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
7432 .unwrap(),
7433 Default::default(),
7434 false,
7435 None,
7436 )
7437 .unwrap(),
7438 cx
7439 )
7440 .await
7441 .unwrap(),
7442 HashMap::from_iter([
7443 (path!("dir/one.ts").to_string(), vec![14..18]),
7444 (path!("dir/two.ts").to_string(), vec![14..18]),
7445 ]),
7446 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
7447 );
7448
7449 assert_eq!(
7450 search(
7451 &project,
7452 SearchQuery::text(
7453 search_query,
7454 false,
7455 true,
7456 false,
7457 PathMatcher::new(
7458 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
7459 PathStyle::local()
7460 )
7461 .unwrap(),
7462 Default::default(),
7463 false,
7464 None,
7465 )
7466 .unwrap(),
7467 cx
7468 )
7469 .await
7470 .unwrap(),
7471 HashMap::from_iter([
7472 (path!("dir/two.ts").to_string(), vec![14..18]),
7473 (path!("dir/one.rs").to_string(), vec![8..12]),
7474 (path!("dir/one.ts").to_string(), vec![14..18]),
7475 (path!("dir/two.rs").to_string(), vec![8..12]),
7476 ]),
7477 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
7478 );
7479}
7480
7481#[gpui::test]
7482async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
7483 init_test(cx);
7484
7485 let search_query = "file";
7486
7487 let fs = FakeFs::new(cx.executor());
7488 fs.insert_tree(
7489 path!("/dir"),
7490 json!({
7491 "one.rs": r#"// Rust file one"#,
7492 "one.ts": r#"// TypeScript file one"#,
7493 "two.rs": r#"// Rust file two"#,
7494 "two.ts": r#"// TypeScript file two"#,
7495 }),
7496 )
7497 .await;
7498 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7499
7500 assert_eq!(
7501 search(
7502 &project,
7503 SearchQuery::text(
7504 search_query,
7505 false,
7506 true,
7507 false,
7508 Default::default(),
7509 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
7510 false,
7511 None,
7512 )
7513 .unwrap(),
7514 cx
7515 )
7516 .await
7517 .unwrap(),
7518 HashMap::from_iter([
7519 (path!("dir/one.rs").to_string(), vec![8..12]),
7520 (path!("dir/one.ts").to_string(), vec![14..18]),
7521 (path!("dir/two.rs").to_string(), vec![8..12]),
7522 (path!("dir/two.ts").to_string(), vec![14..18]),
7523 ]),
7524 "If no exclusions match, all files should be returned"
7525 );
7526
7527 assert_eq!(
7528 search(
7529 &project,
7530 SearchQuery::text(
7531 search_query,
7532 false,
7533 true,
7534 false,
7535 Default::default(),
7536 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
7537 false,
7538 None,
7539 )
7540 .unwrap(),
7541 cx
7542 )
7543 .await
7544 .unwrap(),
7545 HashMap::from_iter([
7546 (path!("dir/one.ts").to_string(), vec![14..18]),
7547 (path!("dir/two.ts").to_string(), vec![14..18]),
7548 ]),
7549 "Rust exclusion search should give only TypeScript files"
7550 );
7551
7552 assert_eq!(
7553 search(
7554 &project,
7555 SearchQuery::text(
7556 search_query,
7557 false,
7558 true,
7559 false,
7560 Default::default(),
7561 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
7562 .unwrap(),
7563 false,
7564 None,
7565 )
7566 .unwrap(),
7567 cx
7568 )
7569 .await
7570 .unwrap(),
7571 HashMap::from_iter([
7572 (path!("dir/one.rs").to_string(), vec![8..12]),
7573 (path!("dir/two.rs").to_string(), vec![8..12]),
7574 ]),
7575 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
7576 );
7577
7578 assert!(
7579 search(
7580 &project,
7581 SearchQuery::text(
7582 search_query,
7583 false,
7584 true,
7585 false,
7586 Default::default(),
7587 PathMatcher::new(
7588 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
7589 PathStyle::local(),
7590 )
7591 .unwrap(),
7592 false,
7593 None,
7594 )
7595 .unwrap(),
7596 cx
7597 )
7598 .await
7599 .unwrap()
7600 .is_empty(),
7601 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
7602 );
7603}
7604
7605#[gpui::test]
7606async fn test_search_with_buffer_exclusions(cx: &mut gpui::TestAppContext) {
7607 init_test(cx);
7608
7609 let search_query = "file";
7610
7611 let fs = FakeFs::new(cx.executor());
7612 fs.insert_tree(
7613 path!("/dir"),
7614 json!({
7615 "one.rs": r#"// Rust file one"#,
7616 "one.ts": r#"// TypeScript file one"#,
7617 "two.rs": r#"// Rust file two"#,
7618 "two.ts": r#"// TypeScript file two"#,
7619 }),
7620 )
7621 .await;
7622
7623 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7624 let path_style = PathStyle::local();
7625 let _buffer = project.update(cx, |project, cx| {
7626 project.create_local_buffer("file", None, false, cx)
7627 });
7628
7629 assert_eq!(
7630 search(
7631 &project,
7632 SearchQuery::text(
7633 search_query,
7634 false,
7635 true,
7636 false,
7637 Default::default(),
7638 PathMatcher::new(&["*.odd".to_owned()], path_style).unwrap(),
7639 false,
7640 None,
7641 )
7642 .unwrap(),
7643 cx
7644 )
7645 .await
7646 .unwrap(),
7647 HashMap::from_iter([
7648 (path!("dir/one.rs").to_string(), vec![8..12]),
7649 (path!("dir/one.ts").to_string(), vec![14..18]),
7650 (path!("dir/two.rs").to_string(), vec![8..12]),
7651 (path!("dir/two.ts").to_string(), vec![14..18]),
7652 ]),
7653 "If no exclusions match, all files should be returned"
7654 );
7655
7656 assert_eq!(
7657 search(
7658 &project,
7659 SearchQuery::text(
7660 search_query,
7661 false,
7662 true,
7663 false,
7664 Default::default(),
7665 PathMatcher::new(&["*.rs".to_owned()], path_style).unwrap(),
7666 false,
7667 None,
7668 )
7669 .unwrap(),
7670 cx
7671 )
7672 .await
7673 .unwrap(),
7674 HashMap::from_iter([
7675 (path!("dir/one.ts").to_string(), vec![14..18]),
7676 (path!("dir/two.ts").to_string(), vec![14..18]),
7677 ]),
7678 "Rust exclusion search should give only TypeScript files"
7679 );
7680
7681 assert_eq!(
7682 search(
7683 &project,
7684 SearchQuery::text(
7685 search_query,
7686 false,
7687 true,
7688 false,
7689 Default::default(),
7690 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], path_style).unwrap(),
7691 false,
7692 None,
7693 )
7694 .unwrap(),
7695 cx
7696 )
7697 .await
7698 .unwrap(),
7699 HashMap::from_iter([
7700 (path!("dir/one.rs").to_string(), vec![8..12]),
7701 (path!("dir/two.rs").to_string(), vec![8..12]),
7702 ]),
7703 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
7704 );
7705
7706 assert!(
7707 search(
7708 &project,
7709 SearchQuery::text(
7710 search_query,
7711 false,
7712 true,
7713 false,
7714 Default::default(),
7715 PathMatcher::new(
7716 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
7717 PathStyle::local(),
7718 )
7719 .unwrap(),
7720 false,
7721 None,
7722 )
7723 .unwrap(),
7724 cx
7725 )
7726 .await
7727 .unwrap()
7728 .is_empty(),
7729 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
7730 );
7731}
7732
7733#[gpui::test]
7734async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
7735 init_test(cx);
7736
7737 let search_query = "file";
7738
7739 let fs = FakeFs::new(cx.executor());
7740 fs.insert_tree(
7741 path!("/dir"),
7742 json!({
7743 "one.rs": r#"// Rust file one"#,
7744 "one.ts": r#"// TypeScript file one"#,
7745 "two.rs": r#"// Rust file two"#,
7746 "two.ts": r#"// TypeScript file two"#,
7747 }),
7748 )
7749 .await;
7750 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7751 assert!(
7752 search(
7753 &project,
7754 SearchQuery::text(
7755 search_query,
7756 false,
7757 true,
7758 false,
7759 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
7760 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
7761 false,
7762 None,
7763 )
7764 .unwrap(),
7765 cx
7766 )
7767 .await
7768 .unwrap()
7769 .is_empty(),
7770 "If both no exclusions and inclusions match, exclusions should win and return nothing"
7771 );
7772
7773 assert!(
7774 search(
7775 &project,
7776 SearchQuery::text(
7777 search_query,
7778 false,
7779 true,
7780 false,
7781 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
7782 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
7783 false,
7784 None,
7785 )
7786 .unwrap(),
7787 cx
7788 )
7789 .await
7790 .unwrap()
7791 .is_empty(),
7792 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
7793 );
7794
7795 assert!(
7796 search(
7797 &project,
7798 SearchQuery::text(
7799 search_query,
7800 false,
7801 true,
7802 false,
7803 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
7804 .unwrap(),
7805 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
7806 .unwrap(),
7807 false,
7808 None,
7809 )
7810 .unwrap(),
7811 cx
7812 )
7813 .await
7814 .unwrap()
7815 .is_empty(),
7816 "Non-matching inclusions and exclusions should not change that."
7817 );
7818
7819 assert_eq!(
7820 search(
7821 &project,
7822 SearchQuery::text(
7823 search_query,
7824 false,
7825 true,
7826 false,
7827 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
7828 .unwrap(),
7829 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()], PathStyle::local())
7830 .unwrap(),
7831 false,
7832 None,
7833 )
7834 .unwrap(),
7835 cx
7836 )
7837 .await
7838 .unwrap(),
7839 HashMap::from_iter([
7840 (path!("dir/one.ts").to_string(), vec![14..18]),
7841 (path!("dir/two.ts").to_string(), vec![14..18]),
7842 ]),
7843 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
7844 );
7845}
7846
7847#[gpui::test]
7848async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
7849 init_test(cx);
7850
7851 let fs = FakeFs::new(cx.executor());
7852 fs.insert_tree(
7853 path!("/worktree-a"),
7854 json!({
7855 "haystack.rs": r#"// NEEDLE"#,
7856 "haystack.ts": r#"// NEEDLE"#,
7857 }),
7858 )
7859 .await;
7860 fs.insert_tree(
7861 path!("/worktree-b"),
7862 json!({
7863 "haystack.rs": r#"// NEEDLE"#,
7864 "haystack.ts": r#"// NEEDLE"#,
7865 }),
7866 )
7867 .await;
7868
7869 let path_style = PathStyle::local();
7870 let project = Project::test(
7871 fs.clone(),
7872 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
7873 cx,
7874 )
7875 .await;
7876
7877 assert_eq!(
7878 search(
7879 &project,
7880 SearchQuery::text(
7881 "NEEDLE",
7882 false,
7883 true,
7884 false,
7885 PathMatcher::new(&["worktree-a/*.rs".to_owned()], path_style).unwrap(),
7886 Default::default(),
7887 true,
7888 None,
7889 )
7890 .unwrap(),
7891 cx
7892 )
7893 .await
7894 .unwrap(),
7895 HashMap::from_iter([(path!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
7896 "should only return results from included worktree"
7897 );
7898 assert_eq!(
7899 search(
7900 &project,
7901 SearchQuery::text(
7902 "NEEDLE",
7903 false,
7904 true,
7905 false,
7906 PathMatcher::new(&["worktree-b/*.rs".to_owned()], path_style).unwrap(),
7907 Default::default(),
7908 true,
7909 None,
7910 )
7911 .unwrap(),
7912 cx
7913 )
7914 .await
7915 .unwrap(),
7916 HashMap::from_iter([(path!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
7917 "should only return results from included worktree"
7918 );
7919
7920 assert_eq!(
7921 search(
7922 &project,
7923 SearchQuery::text(
7924 "NEEDLE",
7925 false,
7926 true,
7927 false,
7928 PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap(),
7929 Default::default(),
7930 false,
7931 None,
7932 )
7933 .unwrap(),
7934 cx
7935 )
7936 .await
7937 .unwrap(),
7938 HashMap::from_iter([
7939 (path!("worktree-a/haystack.ts").to_string(), vec![3..9]),
7940 (path!("worktree-b/haystack.ts").to_string(), vec![3..9])
7941 ]),
7942 "should return results from both worktrees"
7943 );
7944}
7945
7946#[gpui::test]
7947async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
7948 init_test(cx);
7949
7950 let fs = FakeFs::new(cx.background_executor.clone());
7951 fs.insert_tree(
7952 path!("/dir"),
7953 json!({
7954 ".git": {},
7955 ".gitignore": "**/target\n/node_modules\n",
7956 "target": {
7957 "index.txt": "index_key:index_value"
7958 },
7959 "node_modules": {
7960 "eslint": {
7961 "index.ts": "const eslint_key = 'eslint value'",
7962 "package.json": r#"{ "some_key": "some value" }"#,
7963 },
7964 "prettier": {
7965 "index.ts": "const prettier_key = 'prettier value'",
7966 "package.json": r#"{ "other_key": "other value" }"#,
7967 },
7968 },
7969 "package.json": r#"{ "main_key": "main value" }"#,
7970 }),
7971 )
7972 .await;
7973 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7974
7975 let query = "key";
7976 assert_eq!(
7977 search(
7978 &project,
7979 SearchQuery::text(
7980 query,
7981 false,
7982 false,
7983 false,
7984 Default::default(),
7985 Default::default(),
7986 false,
7987 None,
7988 )
7989 .unwrap(),
7990 cx
7991 )
7992 .await
7993 .unwrap(),
7994 HashMap::from_iter([(path!("dir/package.json").to_string(), vec![8..11])]),
7995 "Only one non-ignored file should have the query"
7996 );
7997
7998 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7999 let path_style = PathStyle::local();
8000 assert_eq!(
8001 search(
8002 &project,
8003 SearchQuery::text(
8004 query,
8005 false,
8006 false,
8007 true,
8008 Default::default(),
8009 Default::default(),
8010 false,
8011 None,
8012 )
8013 .unwrap(),
8014 cx
8015 )
8016 .await
8017 .unwrap(),
8018 HashMap::from_iter([
8019 (path!("dir/package.json").to_string(), vec![8..11]),
8020 (path!("dir/target/index.txt").to_string(), vec![6..9]),
8021 (
8022 path!("dir/node_modules/prettier/package.json").to_string(),
8023 vec![9..12]
8024 ),
8025 (
8026 path!("dir/node_modules/prettier/index.ts").to_string(),
8027 vec![15..18]
8028 ),
8029 (
8030 path!("dir/node_modules/eslint/index.ts").to_string(),
8031 vec![13..16]
8032 ),
8033 (
8034 path!("dir/node_modules/eslint/package.json").to_string(),
8035 vec![8..11]
8036 ),
8037 ]),
8038 "Unrestricted search with ignored directories should find every file with the query"
8039 );
8040
8041 let files_to_include =
8042 PathMatcher::new(&["node_modules/prettier/**".to_owned()], path_style).unwrap();
8043 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap();
8044 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
8045 assert_eq!(
8046 search(
8047 &project,
8048 SearchQuery::text(
8049 query,
8050 false,
8051 false,
8052 true,
8053 files_to_include,
8054 files_to_exclude,
8055 false,
8056 None,
8057 )
8058 .unwrap(),
8059 cx
8060 )
8061 .await
8062 .unwrap(),
8063 HashMap::from_iter([(
8064 path!("dir/node_modules/prettier/package.json").to_string(),
8065 vec![9..12]
8066 )]),
8067 "With search including ignored prettier directory and excluding TS files, only one file should be found"
8068 );
8069}
8070
8071#[gpui::test]
8072async fn test_search_with_unicode(cx: &mut gpui::TestAppContext) {
8073 init_test(cx);
8074
8075 let fs = FakeFs::new(cx.executor());
8076 fs.insert_tree(
8077 path!("/dir"),
8078 json!({
8079 "one.rs": "// ПРИВЕТ? привет!",
8080 "two.rs": "// ПРИВЕТ.",
8081 "three.rs": "// привет",
8082 }),
8083 )
8084 .await;
8085 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
8086 let unicode_case_sensitive_query = SearchQuery::text(
8087 "привет",
8088 false,
8089 true,
8090 false,
8091 Default::default(),
8092 Default::default(),
8093 false,
8094 None,
8095 );
8096 assert_matches!(unicode_case_sensitive_query, Ok(SearchQuery::Text { .. }));
8097 assert_eq!(
8098 search(&project, unicode_case_sensitive_query.unwrap(), cx)
8099 .await
8100 .unwrap(),
8101 HashMap::from_iter([
8102 (path!("dir/one.rs").to_string(), vec![17..29]),
8103 (path!("dir/three.rs").to_string(), vec![3..15]),
8104 ])
8105 );
8106
8107 let unicode_case_insensitive_query = SearchQuery::text(
8108 "привет",
8109 false,
8110 false,
8111 false,
8112 Default::default(),
8113 Default::default(),
8114 false,
8115 None,
8116 );
8117 assert_matches!(
8118 unicode_case_insensitive_query,
8119 Ok(SearchQuery::Regex { .. })
8120 );
8121 assert_eq!(
8122 search(&project, unicode_case_insensitive_query.unwrap(), cx)
8123 .await
8124 .unwrap(),
8125 HashMap::from_iter([
8126 (path!("dir/one.rs").to_string(), vec![3..15, 17..29]),
8127 (path!("dir/two.rs").to_string(), vec![3..15]),
8128 (path!("dir/three.rs").to_string(), vec![3..15]),
8129 ])
8130 );
8131
8132 assert_eq!(
8133 search(
8134 &project,
8135 SearchQuery::text(
8136 "привет.",
8137 false,
8138 false,
8139 false,
8140 Default::default(),
8141 Default::default(),
8142 false,
8143 None,
8144 )
8145 .unwrap(),
8146 cx
8147 )
8148 .await
8149 .unwrap(),
8150 HashMap::from_iter([(path!("dir/two.rs").to_string(), vec![3..16]),])
8151 );
8152}
8153
8154#[gpui::test]
8155async fn test_create_entry(cx: &mut gpui::TestAppContext) {
8156 init_test(cx);
8157
8158 let fs = FakeFs::new(cx.executor());
8159 fs.insert_tree(
8160 "/one/two",
8161 json!({
8162 "three": {
8163 "a.txt": "",
8164 "four": {}
8165 },
8166 "c.rs": ""
8167 }),
8168 )
8169 .await;
8170
8171 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
8172 project
8173 .update(cx, |project, cx| {
8174 let id = project.worktrees(cx).next().unwrap().read(cx).id();
8175 project.create_entry((id, rel_path("b..")), true, cx)
8176 })
8177 .await
8178 .unwrap()
8179 .into_included()
8180 .unwrap();
8181
8182 assert_eq!(
8183 fs.paths(true),
8184 vec![
8185 PathBuf::from(path!("/")),
8186 PathBuf::from(path!("/one")),
8187 PathBuf::from(path!("/one/two")),
8188 PathBuf::from(path!("/one/two/c.rs")),
8189 PathBuf::from(path!("/one/two/three")),
8190 PathBuf::from(path!("/one/two/three/a.txt")),
8191 PathBuf::from(path!("/one/two/three/b..")),
8192 PathBuf::from(path!("/one/two/three/four")),
8193 ]
8194 );
8195}
8196
8197#[gpui::test]
8198async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
8199 init_test(cx);
8200
8201 let fs = FakeFs::new(cx.executor());
8202 fs.insert_tree(
8203 path!("/dir"),
8204 json!({
8205 "a.tsx": "a",
8206 }),
8207 )
8208 .await;
8209
8210 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
8211
8212 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
8213 language_registry.add(tsx_lang());
8214 let language_server_names = [
8215 "TypeScriptServer",
8216 "TailwindServer",
8217 "ESLintServer",
8218 "NoHoverCapabilitiesServer",
8219 ];
8220 let mut language_servers = [
8221 language_registry.register_fake_lsp(
8222 "tsx",
8223 FakeLspAdapter {
8224 name: language_server_names[0],
8225 capabilities: lsp::ServerCapabilities {
8226 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
8227 ..lsp::ServerCapabilities::default()
8228 },
8229 ..FakeLspAdapter::default()
8230 },
8231 ),
8232 language_registry.register_fake_lsp(
8233 "tsx",
8234 FakeLspAdapter {
8235 name: language_server_names[1],
8236 capabilities: lsp::ServerCapabilities {
8237 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
8238 ..lsp::ServerCapabilities::default()
8239 },
8240 ..FakeLspAdapter::default()
8241 },
8242 ),
8243 language_registry.register_fake_lsp(
8244 "tsx",
8245 FakeLspAdapter {
8246 name: language_server_names[2],
8247 capabilities: lsp::ServerCapabilities {
8248 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
8249 ..lsp::ServerCapabilities::default()
8250 },
8251 ..FakeLspAdapter::default()
8252 },
8253 ),
8254 language_registry.register_fake_lsp(
8255 "tsx",
8256 FakeLspAdapter {
8257 name: language_server_names[3],
8258 capabilities: lsp::ServerCapabilities {
8259 hover_provider: None,
8260 ..lsp::ServerCapabilities::default()
8261 },
8262 ..FakeLspAdapter::default()
8263 },
8264 ),
8265 ];
8266
8267 let (buffer, _handle) = project
8268 .update(cx, |p, cx| {
8269 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
8270 })
8271 .await
8272 .unwrap();
8273 cx.executor().run_until_parked();
8274
8275 let mut servers_with_hover_requests = HashMap::default();
8276 for i in 0..language_server_names.len() {
8277 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
8278 panic!(
8279 "Failed to get language server #{i} with name {}",
8280 &language_server_names[i]
8281 )
8282 });
8283 let new_server_name = new_server.server.name();
8284 assert!(
8285 !servers_with_hover_requests.contains_key(&new_server_name),
8286 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
8287 );
8288 match new_server_name.as_ref() {
8289 "TailwindServer" | "TypeScriptServer" => {
8290 servers_with_hover_requests.insert(
8291 new_server_name.clone(),
8292 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
8293 move |_, _| {
8294 let name = new_server_name.clone();
8295 async move {
8296 Ok(Some(lsp::Hover {
8297 contents: lsp::HoverContents::Scalar(
8298 lsp::MarkedString::String(format!("{name} hover")),
8299 ),
8300 range: None,
8301 }))
8302 }
8303 },
8304 ),
8305 );
8306 }
8307 "ESLintServer" => {
8308 servers_with_hover_requests.insert(
8309 new_server_name,
8310 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
8311 |_, _| async move { Ok(None) },
8312 ),
8313 );
8314 }
8315 "NoHoverCapabilitiesServer" => {
8316 let _never_handled = new_server
8317 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
8318 panic!(
8319 "Should not call for hovers server with no corresponding capabilities"
8320 )
8321 });
8322 }
8323 unexpected => panic!("Unexpected server name: {unexpected}"),
8324 }
8325 }
8326
8327 let hover_task = project.update(cx, |project, cx| {
8328 project.hover(&buffer, Point::new(0, 0), cx)
8329 });
8330 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
8331 |mut hover_request| async move {
8332 hover_request
8333 .next()
8334 .await
8335 .expect("All hover requests should have been triggered")
8336 },
8337 ))
8338 .await;
8339 assert_eq!(
8340 vec!["TailwindServer hover", "TypeScriptServer hover"],
8341 hover_task
8342 .await
8343 .into_iter()
8344 .flatten()
8345 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
8346 .sorted()
8347 .collect::<Vec<_>>(),
8348 "Should receive hover responses from all related servers with hover capabilities"
8349 );
8350}
8351
8352#[gpui::test]
8353async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
8354 init_test(cx);
8355
8356 let fs = FakeFs::new(cx.executor());
8357 fs.insert_tree(
8358 path!("/dir"),
8359 json!({
8360 "a.ts": "a",
8361 }),
8362 )
8363 .await;
8364
8365 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
8366
8367 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
8368 language_registry.add(typescript_lang());
8369 let mut fake_language_servers = language_registry.register_fake_lsp(
8370 "TypeScript",
8371 FakeLspAdapter {
8372 capabilities: lsp::ServerCapabilities {
8373 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
8374 ..lsp::ServerCapabilities::default()
8375 },
8376 ..FakeLspAdapter::default()
8377 },
8378 );
8379
8380 let (buffer, _handle) = project
8381 .update(cx, |p, cx| {
8382 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
8383 })
8384 .await
8385 .unwrap();
8386 cx.executor().run_until_parked();
8387
8388 let fake_server = fake_language_servers
8389 .next()
8390 .await
8391 .expect("failed to get the language server");
8392
8393 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
8394 move |_, _| async move {
8395 Ok(Some(lsp::Hover {
8396 contents: lsp::HoverContents::Array(vec![
8397 lsp::MarkedString::String("".to_string()),
8398 lsp::MarkedString::String(" ".to_string()),
8399 lsp::MarkedString::String("\n\n\n".to_string()),
8400 ]),
8401 range: None,
8402 }))
8403 },
8404 );
8405
8406 let hover_task = project.update(cx, |project, cx| {
8407 project.hover(&buffer, Point::new(0, 0), cx)
8408 });
8409 let () = request_handled
8410 .next()
8411 .await
8412 .expect("All hover requests should have been triggered");
8413 assert_eq!(
8414 Vec::<String>::new(),
8415 hover_task
8416 .await
8417 .into_iter()
8418 .flatten()
8419 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
8420 .sorted()
8421 .collect::<Vec<_>>(),
8422 "Empty hover parts should be ignored"
8423 );
8424}
8425
8426#[gpui::test]
8427async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
8428 init_test(cx);
8429
8430 let fs = FakeFs::new(cx.executor());
8431 fs.insert_tree(
8432 path!("/dir"),
8433 json!({
8434 "a.ts": "a",
8435 }),
8436 )
8437 .await;
8438
8439 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
8440
8441 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
8442 language_registry.add(typescript_lang());
8443 let mut fake_language_servers = language_registry.register_fake_lsp(
8444 "TypeScript",
8445 FakeLspAdapter {
8446 capabilities: lsp::ServerCapabilities {
8447 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
8448 ..lsp::ServerCapabilities::default()
8449 },
8450 ..FakeLspAdapter::default()
8451 },
8452 );
8453
8454 let (buffer, _handle) = project
8455 .update(cx, |p, cx| {
8456 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
8457 })
8458 .await
8459 .unwrap();
8460 cx.executor().run_until_parked();
8461
8462 let fake_server = fake_language_servers
8463 .next()
8464 .await
8465 .expect("failed to get the language server");
8466
8467 let mut request_handled = fake_server
8468 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
8469 Ok(Some(vec![
8470 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
8471 title: "organize imports".to_string(),
8472 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
8473 ..lsp::CodeAction::default()
8474 }),
8475 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
8476 title: "fix code".to_string(),
8477 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
8478 ..lsp::CodeAction::default()
8479 }),
8480 ]))
8481 });
8482
8483 let code_actions_task = project.update(cx, |project, cx| {
8484 project.code_actions(
8485 &buffer,
8486 0..buffer.read(cx).len(),
8487 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
8488 cx,
8489 )
8490 });
8491
8492 let () = request_handled
8493 .next()
8494 .await
8495 .expect("The code action request should have been triggered");
8496
8497 let code_actions = code_actions_task.await.unwrap().unwrap();
8498 assert_eq!(code_actions.len(), 1);
8499 assert_eq!(
8500 code_actions[0].lsp_action.action_kind(),
8501 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
8502 );
8503}
8504
8505#[gpui::test]
8506async fn test_code_actions_without_requested_kinds_do_not_send_only_filter(
8507 cx: &mut gpui::TestAppContext,
8508) {
8509 init_test(cx);
8510
8511 let fs = FakeFs::new(cx.executor());
8512 fs.insert_tree(
8513 path!("/dir"),
8514 json!({
8515 "a.ts": "a",
8516 }),
8517 )
8518 .await;
8519
8520 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
8521
8522 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
8523 language_registry.add(typescript_lang());
8524 let mut fake_language_servers = language_registry.register_fake_lsp(
8525 "TypeScript",
8526 FakeLspAdapter {
8527 capabilities: lsp::ServerCapabilities {
8528 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
8529 lsp::CodeActionOptions {
8530 code_action_kinds: Some(vec![
8531 CodeActionKind::SOURCE_ORGANIZE_IMPORTS,
8532 "source.doc".into(),
8533 ]),
8534 ..lsp::CodeActionOptions::default()
8535 },
8536 )),
8537 ..lsp::ServerCapabilities::default()
8538 },
8539 ..FakeLspAdapter::default()
8540 },
8541 );
8542
8543 let (buffer, _handle) = project
8544 .update(cx, |p, cx| {
8545 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
8546 })
8547 .await
8548 .unwrap();
8549 cx.executor().run_until_parked();
8550
8551 let fake_server = fake_language_servers
8552 .next()
8553 .await
8554 .expect("failed to get the language server");
8555
8556 let mut request_handled = fake_server.set_request_handler::<
8557 lsp::request::CodeActionRequest,
8558 _,
8559 _,
8560 >(move |params, _| async move {
8561 assert_eq!(
8562 params.context.only, None,
8563 "Code action requests without explicit kind filters should not send `context.only`"
8564 );
8565 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
8566 lsp::CodeAction {
8567 title: "Add test".to_string(),
8568 kind: Some("source.addTest".into()),
8569 ..lsp::CodeAction::default()
8570 },
8571 )]))
8572 });
8573
8574 let code_actions_task = project.update(cx, |project, cx| {
8575 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
8576 });
8577
8578 let () = request_handled
8579 .next()
8580 .await
8581 .expect("The code action request should have been triggered");
8582
8583 let code_actions = code_actions_task.await.unwrap().unwrap();
8584 assert_eq!(code_actions.len(), 1);
8585 assert_eq!(
8586 code_actions[0].lsp_action.action_kind(),
8587 Some("source.addTest".into())
8588 );
8589}
8590
8591#[gpui::test]
8592async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
8593 init_test(cx);
8594
8595 let fs = FakeFs::new(cx.executor());
8596 fs.insert_tree(
8597 path!("/dir"),
8598 json!({
8599 "a.tsx": "a",
8600 }),
8601 )
8602 .await;
8603
8604 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
8605
8606 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
8607 language_registry.add(tsx_lang());
8608 let language_server_names = [
8609 "TypeScriptServer",
8610 "TailwindServer",
8611 "ESLintServer",
8612 "NoActionsCapabilitiesServer",
8613 ];
8614
8615 let mut language_server_rxs = [
8616 language_registry.register_fake_lsp(
8617 "tsx",
8618 FakeLspAdapter {
8619 name: language_server_names[0],
8620 capabilities: lsp::ServerCapabilities {
8621 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
8622 ..lsp::ServerCapabilities::default()
8623 },
8624 ..FakeLspAdapter::default()
8625 },
8626 ),
8627 language_registry.register_fake_lsp(
8628 "tsx",
8629 FakeLspAdapter {
8630 name: language_server_names[1],
8631 capabilities: lsp::ServerCapabilities {
8632 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
8633 ..lsp::ServerCapabilities::default()
8634 },
8635 ..FakeLspAdapter::default()
8636 },
8637 ),
8638 language_registry.register_fake_lsp(
8639 "tsx",
8640 FakeLspAdapter {
8641 name: language_server_names[2],
8642 capabilities: lsp::ServerCapabilities {
8643 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
8644 ..lsp::ServerCapabilities::default()
8645 },
8646 ..FakeLspAdapter::default()
8647 },
8648 ),
8649 language_registry.register_fake_lsp(
8650 "tsx",
8651 FakeLspAdapter {
8652 name: language_server_names[3],
8653 capabilities: lsp::ServerCapabilities {
8654 code_action_provider: None,
8655 ..lsp::ServerCapabilities::default()
8656 },
8657 ..FakeLspAdapter::default()
8658 },
8659 ),
8660 ];
8661
8662 let (buffer, _handle) = project
8663 .update(cx, |p, cx| {
8664 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
8665 })
8666 .await
8667 .unwrap();
8668 cx.executor().run_until_parked();
8669
8670 let mut servers_with_actions_requests = HashMap::default();
8671 for i in 0..language_server_names.len() {
8672 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
8673 panic!(
8674 "Failed to get language server #{i} with name {}",
8675 &language_server_names[i]
8676 )
8677 });
8678 let new_server_name = new_server.server.name();
8679
8680 assert!(
8681 !servers_with_actions_requests.contains_key(&new_server_name),
8682 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
8683 );
8684 match new_server_name.0.as_ref() {
8685 "TailwindServer" | "TypeScriptServer" => {
8686 servers_with_actions_requests.insert(
8687 new_server_name.clone(),
8688 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
8689 move |_, _| {
8690 let name = new_server_name.clone();
8691 async move {
8692 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
8693 lsp::CodeAction {
8694 title: format!("{name} code action"),
8695 ..lsp::CodeAction::default()
8696 },
8697 )]))
8698 }
8699 },
8700 ),
8701 );
8702 }
8703 "ESLintServer" => {
8704 servers_with_actions_requests.insert(
8705 new_server_name,
8706 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
8707 |_, _| async move { Ok(None) },
8708 ),
8709 );
8710 }
8711 "NoActionsCapabilitiesServer" => {
8712 let _never_handled = new_server
8713 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
8714 panic!(
8715 "Should not call for code actions server with no corresponding capabilities"
8716 )
8717 });
8718 }
8719 unexpected => panic!("Unexpected server name: {unexpected}"),
8720 }
8721 }
8722
8723 let code_actions_task = project.update(cx, |project, cx| {
8724 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
8725 });
8726
8727 // cx.run_until_parked();
8728 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
8729 |mut code_actions_request| async move {
8730 code_actions_request
8731 .next()
8732 .await
8733 .expect("All code actions requests should have been triggered")
8734 },
8735 ))
8736 .await;
8737 assert_eq!(
8738 vec!["TailwindServer code action", "TypeScriptServer code action"],
8739 code_actions_task
8740 .await
8741 .unwrap()
8742 .unwrap()
8743 .into_iter()
8744 .map(|code_action| code_action.lsp_action.title().to_owned())
8745 .sorted()
8746 .collect::<Vec<_>>(),
8747 "Should receive code actions responses from all related servers with hover capabilities"
8748 );
8749}
8750
8751#[gpui::test]
8752async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
8753 init_test(cx);
8754
8755 let fs = FakeFs::new(cx.executor());
8756 fs.insert_tree(
8757 "/dir",
8758 json!({
8759 "a.rs": "let a = 1;",
8760 "b.rs": "let b = 2;",
8761 "c.rs": "let c = 2;",
8762 }),
8763 )
8764 .await;
8765
8766 let project = Project::test(
8767 fs,
8768 [
8769 "/dir/a.rs".as_ref(),
8770 "/dir/b.rs".as_ref(),
8771 "/dir/c.rs".as_ref(),
8772 ],
8773 cx,
8774 )
8775 .await;
8776
8777 // check the initial state and get the worktrees
8778 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
8779 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8780 assert_eq!(worktrees.len(), 3);
8781
8782 let worktree_a = worktrees[0].read(cx);
8783 let worktree_b = worktrees[1].read(cx);
8784 let worktree_c = worktrees[2].read(cx);
8785
8786 // check they start in the right order
8787 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
8788 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
8789 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
8790
8791 (
8792 worktrees[0].clone(),
8793 worktrees[1].clone(),
8794 worktrees[2].clone(),
8795 )
8796 });
8797
8798 // move first worktree to after the second
8799 // [a, b, c] -> [b, a, c]
8800 project
8801 .update(cx, |project, cx| {
8802 let first = worktree_a.read(cx);
8803 let second = worktree_b.read(cx);
8804 project.move_worktree(first.id(), second.id(), cx)
8805 })
8806 .expect("moving first after second");
8807
8808 // check the state after moving
8809 project.update(cx, |project, cx| {
8810 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8811 assert_eq!(worktrees.len(), 3);
8812
8813 let first = worktrees[0].read(cx);
8814 let second = worktrees[1].read(cx);
8815 let third = worktrees[2].read(cx);
8816
8817 // check they are now in the right order
8818 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
8819 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
8820 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
8821 });
8822
8823 // move the second worktree to before the first
8824 // [b, a, c] -> [a, b, c]
8825 project
8826 .update(cx, |project, cx| {
8827 let second = worktree_a.read(cx);
8828 let first = worktree_b.read(cx);
8829 project.move_worktree(first.id(), second.id(), cx)
8830 })
8831 .expect("moving second before first");
8832
8833 // check the state after moving
8834 project.update(cx, |project, cx| {
8835 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8836 assert_eq!(worktrees.len(), 3);
8837
8838 let first = worktrees[0].read(cx);
8839 let second = worktrees[1].read(cx);
8840 let third = worktrees[2].read(cx);
8841
8842 // check they are now in the right order
8843 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
8844 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
8845 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
8846 });
8847
8848 // move the second worktree to after the third
8849 // [a, b, c] -> [a, c, b]
8850 project
8851 .update(cx, |project, cx| {
8852 let second = worktree_b.read(cx);
8853 let third = worktree_c.read(cx);
8854 project.move_worktree(second.id(), third.id(), cx)
8855 })
8856 .expect("moving second after third");
8857
8858 // check the state after moving
8859 project.update(cx, |project, cx| {
8860 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8861 assert_eq!(worktrees.len(), 3);
8862
8863 let first = worktrees[0].read(cx);
8864 let second = worktrees[1].read(cx);
8865 let third = worktrees[2].read(cx);
8866
8867 // check they are now in the right order
8868 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
8869 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
8870 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
8871 });
8872
8873 // move the third worktree to before the second
8874 // [a, c, b] -> [a, b, c]
8875 project
8876 .update(cx, |project, cx| {
8877 let third = worktree_c.read(cx);
8878 let second = worktree_b.read(cx);
8879 project.move_worktree(third.id(), second.id(), cx)
8880 })
8881 .expect("moving third before second");
8882
8883 // check the state after moving
8884 project.update(cx, |project, cx| {
8885 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8886 assert_eq!(worktrees.len(), 3);
8887
8888 let first = worktrees[0].read(cx);
8889 let second = worktrees[1].read(cx);
8890 let third = worktrees[2].read(cx);
8891
8892 // check they are now in the right order
8893 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
8894 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
8895 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
8896 });
8897
8898 // move the first worktree to after the third
8899 // [a, b, c] -> [b, c, a]
8900 project
8901 .update(cx, |project, cx| {
8902 let first = worktree_a.read(cx);
8903 let third = worktree_c.read(cx);
8904 project.move_worktree(first.id(), third.id(), cx)
8905 })
8906 .expect("moving first after third");
8907
8908 // check the state after moving
8909 project.update(cx, |project, cx| {
8910 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8911 assert_eq!(worktrees.len(), 3);
8912
8913 let first = worktrees[0].read(cx);
8914 let second = worktrees[1].read(cx);
8915 let third = worktrees[2].read(cx);
8916
8917 // check they are now in the right order
8918 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
8919 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
8920 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
8921 });
8922
8923 // move the third worktree to before the first
8924 // [b, c, a] -> [a, b, c]
8925 project
8926 .update(cx, |project, cx| {
8927 let third = worktree_a.read(cx);
8928 let first = worktree_b.read(cx);
8929 project.move_worktree(third.id(), first.id(), cx)
8930 })
8931 .expect("moving third before first");
8932
8933 // check the state after moving
8934 project.update(cx, |project, cx| {
8935 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8936 assert_eq!(worktrees.len(), 3);
8937
8938 let first = worktrees[0].read(cx);
8939 let second = worktrees[1].read(cx);
8940 let third = worktrees[2].read(cx);
8941
8942 // check they are now in the right order
8943 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
8944 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
8945 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
8946 });
8947}
8948
8949#[gpui::test]
8950async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
8951 init_test(cx);
8952
8953 let staged_contents = r#"
8954 fn main() {
8955 println!("hello world");
8956 }
8957 "#
8958 .unindent();
8959 let file_contents = r#"
8960 // print goodbye
8961 fn main() {
8962 println!("goodbye world");
8963 }
8964 "#
8965 .unindent();
8966
8967 let fs = FakeFs::new(cx.background_executor.clone());
8968 fs.insert_tree(
8969 "/dir",
8970 json!({
8971 ".git": {},
8972 "src": {
8973 "main.rs": file_contents,
8974 }
8975 }),
8976 )
8977 .await;
8978
8979 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
8980
8981 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8982
8983 let buffer = project
8984 .update(cx, |project, cx| {
8985 project.open_local_buffer("/dir/src/main.rs", cx)
8986 })
8987 .await
8988 .unwrap();
8989 let unstaged_diff = project
8990 .update(cx, |project, cx| {
8991 project.open_unstaged_diff(buffer.clone(), cx)
8992 })
8993 .await
8994 .unwrap();
8995
8996 cx.run_until_parked();
8997 unstaged_diff.update(cx, |unstaged_diff, cx| {
8998 let snapshot = buffer.read(cx).snapshot();
8999 assert_hunks(
9000 unstaged_diff.snapshot(cx).hunks(&snapshot),
9001 &snapshot,
9002 &unstaged_diff.base_text_string(cx).unwrap(),
9003 &[
9004 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
9005 (
9006 2..3,
9007 " println!(\"hello world\");\n",
9008 " println!(\"goodbye world\");\n",
9009 DiffHunkStatus::modified_none(),
9010 ),
9011 ],
9012 );
9013 });
9014
9015 let staged_contents = r#"
9016 // print goodbye
9017 fn main() {
9018 }
9019 "#
9020 .unindent();
9021
9022 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
9023
9024 cx.run_until_parked();
9025 unstaged_diff.update(cx, |unstaged_diff, cx| {
9026 let snapshot = buffer.read(cx).snapshot();
9027 assert_hunks(
9028 unstaged_diff.snapshot(cx).hunks_intersecting_range(
9029 Anchor::min_max_range_for_buffer(snapshot.remote_id()),
9030 &snapshot,
9031 ),
9032 &snapshot,
9033 &unstaged_diff.base_text(cx).text(),
9034 &[(
9035 2..3,
9036 "",
9037 " println!(\"goodbye world\");\n",
9038 DiffHunkStatus::added_none(),
9039 )],
9040 );
9041 });
9042}
9043
9044#[gpui::test]
9045async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
9046 init_test(cx);
9047
9048 let committed_contents = r#"
9049 fn main() {
9050 println!("hello world");
9051 }
9052 "#
9053 .unindent();
9054 let staged_contents = r#"
9055 fn main() {
9056 println!("goodbye world");
9057 }
9058 "#
9059 .unindent();
9060 let file_contents = r#"
9061 // print goodbye
9062 fn main() {
9063 println!("goodbye world");
9064 }
9065 "#
9066 .unindent();
9067
9068 let fs = FakeFs::new(cx.background_executor.clone());
9069 fs.insert_tree(
9070 "/dir",
9071 json!({
9072 ".git": {},
9073 "src": {
9074 "modification.rs": file_contents,
9075 }
9076 }),
9077 )
9078 .await;
9079
9080 fs.set_head_for_repo(
9081 Path::new("/dir/.git"),
9082 &[
9083 ("src/modification.rs", committed_contents),
9084 ("src/deletion.rs", "// the-deleted-contents\n".into()),
9085 ],
9086 "deadbeef",
9087 );
9088 fs.set_index_for_repo(
9089 Path::new("/dir/.git"),
9090 &[
9091 ("src/modification.rs", staged_contents),
9092 ("src/deletion.rs", "// the-deleted-contents\n".into()),
9093 ],
9094 );
9095
9096 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
9097 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
9098 let language = rust_lang();
9099 language_registry.add(language.clone());
9100
9101 let buffer_1 = project
9102 .update(cx, |project, cx| {
9103 project.open_local_buffer("/dir/src/modification.rs", cx)
9104 })
9105 .await
9106 .unwrap();
9107 let diff_1 = project
9108 .update(cx, |project, cx| {
9109 project.open_uncommitted_diff(buffer_1.clone(), cx)
9110 })
9111 .await
9112 .unwrap();
9113 diff_1.read_with(cx, |diff, cx| {
9114 assert_eq!(diff.base_text(cx).language().cloned(), Some(language))
9115 });
9116 cx.run_until_parked();
9117 diff_1.update(cx, |diff, cx| {
9118 let snapshot = buffer_1.read(cx).snapshot();
9119 assert_hunks(
9120 diff.snapshot(cx).hunks_intersecting_range(
9121 Anchor::min_max_range_for_buffer(snapshot.remote_id()),
9122 &snapshot,
9123 ),
9124 &snapshot,
9125 &diff.base_text_string(cx).unwrap(),
9126 &[
9127 (
9128 0..1,
9129 "",
9130 "// print goodbye\n",
9131 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
9132 ),
9133 (
9134 2..3,
9135 " println!(\"hello world\");\n",
9136 " println!(\"goodbye world\");\n",
9137 DiffHunkStatus::modified_none(),
9138 ),
9139 ],
9140 );
9141 });
9142
9143 // Reset HEAD to a version that differs from both the buffer and the index.
9144 let committed_contents = r#"
9145 // print goodbye
9146 fn main() {
9147 }
9148 "#
9149 .unindent();
9150 fs.set_head_for_repo(
9151 Path::new("/dir/.git"),
9152 &[
9153 ("src/modification.rs", committed_contents.clone()),
9154 ("src/deletion.rs", "// the-deleted-contents\n".into()),
9155 ],
9156 "deadbeef",
9157 );
9158
9159 // Buffer now has an unstaged hunk.
9160 cx.run_until_parked();
9161 diff_1.update(cx, |diff, cx| {
9162 let snapshot = buffer_1.read(cx).snapshot();
9163 assert_hunks(
9164 diff.snapshot(cx).hunks_intersecting_range(
9165 Anchor::min_max_range_for_buffer(snapshot.remote_id()),
9166 &snapshot,
9167 ),
9168 &snapshot,
9169 &diff.base_text(cx).text(),
9170 &[(
9171 2..3,
9172 "",
9173 " println!(\"goodbye world\");\n",
9174 DiffHunkStatus::added_none(),
9175 )],
9176 );
9177 });
9178
9179 // Open a buffer for a file that's been deleted.
9180 let buffer_2 = project
9181 .update(cx, |project, cx| {
9182 project.open_local_buffer("/dir/src/deletion.rs", cx)
9183 })
9184 .await
9185 .unwrap();
9186 let diff_2 = project
9187 .update(cx, |project, cx| {
9188 project.open_uncommitted_diff(buffer_2.clone(), cx)
9189 })
9190 .await
9191 .unwrap();
9192 cx.run_until_parked();
9193 diff_2.update(cx, |diff, cx| {
9194 let snapshot = buffer_2.read(cx).snapshot();
9195 assert_hunks(
9196 diff.snapshot(cx).hunks_intersecting_range(
9197 Anchor::min_max_range_for_buffer(snapshot.remote_id()),
9198 &snapshot,
9199 ),
9200 &snapshot,
9201 &diff.base_text_string(cx).unwrap(),
9202 &[(
9203 0..0,
9204 "// the-deleted-contents\n",
9205 "",
9206 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
9207 )],
9208 );
9209 });
9210
9211 // Stage the deletion of this file
9212 fs.set_index_for_repo(
9213 Path::new("/dir/.git"),
9214 &[("src/modification.rs", committed_contents.clone())],
9215 );
9216 cx.run_until_parked();
9217 diff_2.update(cx, |diff, cx| {
9218 let snapshot = buffer_2.read(cx).snapshot();
9219 assert_hunks(
9220 diff.snapshot(cx).hunks_intersecting_range(
9221 Anchor::min_max_range_for_buffer(snapshot.remote_id()),
9222 &snapshot,
9223 ),
9224 &snapshot,
9225 &diff.base_text_string(cx).unwrap(),
9226 &[(
9227 0..0,
9228 "// the-deleted-contents\n",
9229 "",
9230 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
9231 )],
9232 );
9233 });
9234}
9235
9236#[gpui::test]
9237async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
9238 use DiffHunkSecondaryStatus::*;
9239 init_test(cx);
9240
9241 let committed_contents = r#"
9242 zero
9243 one
9244 two
9245 three
9246 four
9247 five
9248 "#
9249 .unindent();
9250 let file_contents = r#"
9251 one
9252 TWO
9253 three
9254 FOUR
9255 five
9256 "#
9257 .unindent();
9258
9259 let fs = FakeFs::new(cx.background_executor.clone());
9260 fs.insert_tree(
9261 "/dir",
9262 json!({
9263 ".git": {},
9264 "file.txt": file_contents.clone()
9265 }),
9266 )
9267 .await;
9268
9269 fs.set_head_and_index_for_repo(
9270 path!("/dir/.git").as_ref(),
9271 &[("file.txt", committed_contents.clone())],
9272 );
9273
9274 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
9275
9276 let buffer = project
9277 .update(cx, |project, cx| {
9278 project.open_local_buffer("/dir/file.txt", cx)
9279 })
9280 .await
9281 .unwrap();
9282 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
9283 let uncommitted_diff = project
9284 .update(cx, |project, cx| {
9285 project.open_uncommitted_diff(buffer.clone(), cx)
9286 })
9287 .await
9288 .unwrap();
9289 let mut diff_events = cx.events(&uncommitted_diff);
9290
9291 // The hunks are initially unstaged.
9292 uncommitted_diff.read_with(cx, |diff, cx| {
9293 assert_hunks(
9294 diff.snapshot(cx).hunks(&snapshot),
9295 &snapshot,
9296 &diff.base_text_string(cx).unwrap(),
9297 &[
9298 (
9299 0..0,
9300 "zero\n",
9301 "",
9302 DiffHunkStatus::deleted(HasSecondaryHunk),
9303 ),
9304 (
9305 1..2,
9306 "two\n",
9307 "TWO\n",
9308 DiffHunkStatus::modified(HasSecondaryHunk),
9309 ),
9310 (
9311 3..4,
9312 "four\n",
9313 "FOUR\n",
9314 DiffHunkStatus::modified(HasSecondaryHunk),
9315 ),
9316 ],
9317 );
9318 });
9319
9320 // Stage a hunk. It appears as optimistically staged.
9321 uncommitted_diff.update(cx, |diff, cx| {
9322 let range =
9323 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
9324 let hunks = diff
9325 .snapshot(cx)
9326 .hunks_intersecting_range(range, &snapshot)
9327 .collect::<Vec<_>>();
9328 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
9329
9330 assert_hunks(
9331 diff.snapshot(cx).hunks(&snapshot),
9332 &snapshot,
9333 &diff.base_text_string(cx).unwrap(),
9334 &[
9335 (
9336 0..0,
9337 "zero\n",
9338 "",
9339 DiffHunkStatus::deleted(HasSecondaryHunk),
9340 ),
9341 (
9342 1..2,
9343 "two\n",
9344 "TWO\n",
9345 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
9346 ),
9347 (
9348 3..4,
9349 "four\n",
9350 "FOUR\n",
9351 DiffHunkStatus::modified(HasSecondaryHunk),
9352 ),
9353 ],
9354 );
9355 });
9356
9357 // The diff emits a change event for the range of the staged hunk.
9358 assert!(matches!(
9359 diff_events.next().await.unwrap(),
9360 BufferDiffEvent::HunksStagedOrUnstaged(_)
9361 ));
9362 let event = diff_events.next().await.unwrap();
9363 if let BufferDiffEvent::DiffChanged(DiffChanged {
9364 changed_range: Some(changed_range),
9365 base_text_changed_range: _,
9366 extended_range: _,
9367 }) = event
9368 {
9369 let changed_range = changed_range.to_point(&snapshot);
9370 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
9371 } else {
9372 panic!("Unexpected event {event:?}");
9373 }
9374
9375 // When the write to the index completes, it appears as staged.
9376 cx.run_until_parked();
9377 uncommitted_diff.update(cx, |diff, cx| {
9378 assert_hunks(
9379 diff.snapshot(cx).hunks(&snapshot),
9380 &snapshot,
9381 &diff.base_text_string(cx).unwrap(),
9382 &[
9383 (
9384 0..0,
9385 "zero\n",
9386 "",
9387 DiffHunkStatus::deleted(HasSecondaryHunk),
9388 ),
9389 (
9390 1..2,
9391 "two\n",
9392 "TWO\n",
9393 DiffHunkStatus::modified(NoSecondaryHunk),
9394 ),
9395 (
9396 3..4,
9397 "four\n",
9398 "FOUR\n",
9399 DiffHunkStatus::modified(HasSecondaryHunk),
9400 ),
9401 ],
9402 );
9403 });
9404
9405 // The diff emits a change event for the changed index text.
9406 let event = diff_events.next().await.unwrap();
9407 if let BufferDiffEvent::DiffChanged(DiffChanged {
9408 changed_range: Some(changed_range),
9409 base_text_changed_range: _,
9410 extended_range: _,
9411 }) = event
9412 {
9413 let changed_range = changed_range.to_point(&snapshot);
9414 assert_eq!(changed_range, Point::new(0, 0)..Point::new(4, 0));
9415 } else {
9416 panic!("Unexpected event {event:?}");
9417 }
9418
9419 // Simulate a problem writing to the git index.
9420 fs.set_error_message_for_index_write(
9421 "/dir/.git".as_ref(),
9422 Some("failed to write git index".into()),
9423 );
9424
9425 // Stage another hunk.
9426 uncommitted_diff.update(cx, |diff, cx| {
9427 let range =
9428 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
9429 let hunks = diff
9430 .snapshot(cx)
9431 .hunks_intersecting_range(range, &snapshot)
9432 .collect::<Vec<_>>();
9433 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
9434
9435 assert_hunks(
9436 diff.snapshot(cx).hunks(&snapshot),
9437 &snapshot,
9438 &diff.base_text_string(cx).unwrap(),
9439 &[
9440 (
9441 0..0,
9442 "zero\n",
9443 "",
9444 DiffHunkStatus::deleted(HasSecondaryHunk),
9445 ),
9446 (
9447 1..2,
9448 "two\n",
9449 "TWO\n",
9450 DiffHunkStatus::modified(NoSecondaryHunk),
9451 ),
9452 (
9453 3..4,
9454 "four\n",
9455 "FOUR\n",
9456 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
9457 ),
9458 ],
9459 );
9460 });
9461 assert!(matches!(
9462 diff_events.next().await.unwrap(),
9463 BufferDiffEvent::HunksStagedOrUnstaged(_)
9464 ));
9465 let event = diff_events.next().await.unwrap();
9466 if let BufferDiffEvent::DiffChanged(DiffChanged {
9467 changed_range: Some(changed_range),
9468 base_text_changed_range: _,
9469 extended_range: _,
9470 }) = event
9471 {
9472 let changed_range = changed_range.to_point(&snapshot);
9473 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
9474 } else {
9475 panic!("Unexpected event {event:?}");
9476 }
9477
9478 // When the write fails, the hunk returns to being unstaged.
9479 cx.run_until_parked();
9480 uncommitted_diff.update(cx, |diff, cx| {
9481 assert_hunks(
9482 diff.snapshot(cx).hunks(&snapshot),
9483 &snapshot,
9484 &diff.base_text_string(cx).unwrap(),
9485 &[
9486 (
9487 0..0,
9488 "zero\n",
9489 "",
9490 DiffHunkStatus::deleted(HasSecondaryHunk),
9491 ),
9492 (
9493 1..2,
9494 "two\n",
9495 "TWO\n",
9496 DiffHunkStatus::modified(NoSecondaryHunk),
9497 ),
9498 (
9499 3..4,
9500 "four\n",
9501 "FOUR\n",
9502 DiffHunkStatus::modified(HasSecondaryHunk),
9503 ),
9504 ],
9505 );
9506 });
9507
9508 let event = diff_events.next().await.unwrap();
9509 if let BufferDiffEvent::DiffChanged(DiffChanged {
9510 changed_range: Some(changed_range),
9511 base_text_changed_range: _,
9512 extended_range: _,
9513 }) = event
9514 {
9515 let changed_range = changed_range.to_point(&snapshot);
9516 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
9517 } else {
9518 panic!("Unexpected event {event:?}");
9519 }
9520
9521 // Allow writing to the git index to succeed again.
9522 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
9523
9524 // Stage two hunks with separate operations.
9525 uncommitted_diff.update(cx, |diff, cx| {
9526 let hunks = diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>();
9527 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
9528 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
9529 });
9530
9531 // Both staged hunks appear as pending.
9532 uncommitted_diff.update(cx, |diff, cx| {
9533 assert_hunks(
9534 diff.snapshot(cx).hunks(&snapshot),
9535 &snapshot,
9536 &diff.base_text_string(cx).unwrap(),
9537 &[
9538 (
9539 0..0,
9540 "zero\n",
9541 "",
9542 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
9543 ),
9544 (
9545 1..2,
9546 "two\n",
9547 "TWO\n",
9548 DiffHunkStatus::modified(NoSecondaryHunk),
9549 ),
9550 (
9551 3..4,
9552 "four\n",
9553 "FOUR\n",
9554 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
9555 ),
9556 ],
9557 );
9558 });
9559
9560 // Both staging operations take effect.
9561 cx.run_until_parked();
9562 uncommitted_diff.update(cx, |diff, cx| {
9563 assert_hunks(
9564 diff.snapshot(cx).hunks(&snapshot),
9565 &snapshot,
9566 &diff.base_text_string(cx).unwrap(),
9567 &[
9568 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
9569 (
9570 1..2,
9571 "two\n",
9572 "TWO\n",
9573 DiffHunkStatus::modified(NoSecondaryHunk),
9574 ),
9575 (
9576 3..4,
9577 "four\n",
9578 "FOUR\n",
9579 DiffHunkStatus::modified(NoSecondaryHunk),
9580 ),
9581 ],
9582 );
9583 });
9584}
9585
9586#[gpui::test(iterations = 10)]
9587async fn test_uncommitted_diff_opened_before_unstaged_diff(cx: &mut gpui::TestAppContext) {
9588 use DiffHunkSecondaryStatus::*;
9589 init_test(cx);
9590
9591 let committed_contents = "one\ntwo\nthree\n";
9592 let file_contents = "one\nTWO\nthree\n";
9593
9594 let fs = FakeFs::new(cx.background_executor.clone());
9595 fs.insert_tree(
9596 "/dir",
9597 json!({
9598 ".git": {},
9599 "file.txt": file_contents,
9600 }),
9601 )
9602 .await;
9603 fs.set_head_and_index_for_repo(
9604 path!("/dir/.git").as_ref(),
9605 &[("file.txt", committed_contents.into())],
9606 );
9607
9608 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
9609 let buffer = project
9610 .update(cx, |project, cx| {
9611 project.open_local_buffer("/dir/file.txt", cx)
9612 })
9613 .await
9614 .unwrap();
9615
9616 let uncommitted_diff_task = project.update(cx, |project, cx| {
9617 project.open_uncommitted_diff(buffer.clone(), cx)
9618 });
9619 let unstaged_diff_task = project.update(cx, |project, cx| {
9620 project.open_unstaged_diff(buffer.clone(), cx)
9621 });
9622 let (uncommitted_diff, _unstaged_diff) =
9623 futures::future::join(uncommitted_diff_task, unstaged_diff_task).await;
9624 let uncommitted_diff = uncommitted_diff.unwrap();
9625 let _unstaged_diff = _unstaged_diff.unwrap();
9626
9627 cx.run_until_parked();
9628
9629 uncommitted_diff.read_with(cx, |diff, cx| {
9630 let snapshot = buffer.read(cx).snapshot();
9631 assert_hunks(
9632 diff.snapshot(cx).hunks_intersecting_range(
9633 Anchor::min_max_range_for_buffer(snapshot.remote_id()),
9634 &snapshot,
9635 ),
9636 &snapshot,
9637 &diff.base_text_string(cx).unwrap(),
9638 &[(
9639 1..2,
9640 "two\n",
9641 "TWO\n",
9642 DiffHunkStatus::modified(HasSecondaryHunk),
9643 )],
9644 );
9645 });
9646}
9647
9648#[gpui::test(seeds(340, 472))]
9649async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
9650 use DiffHunkSecondaryStatus::*;
9651 init_test(cx);
9652
9653 let committed_contents = r#"
9654 zero
9655 one
9656 two
9657 three
9658 four
9659 five
9660 "#
9661 .unindent();
9662 let file_contents = r#"
9663 one
9664 TWO
9665 three
9666 FOUR
9667 five
9668 "#
9669 .unindent();
9670
9671 let fs = FakeFs::new(cx.background_executor.clone());
9672 fs.insert_tree(
9673 "/dir",
9674 json!({
9675 ".git": {},
9676 "file.txt": file_contents.clone()
9677 }),
9678 )
9679 .await;
9680
9681 fs.set_head_for_repo(
9682 "/dir/.git".as_ref(),
9683 &[("file.txt", committed_contents.clone())],
9684 "deadbeef",
9685 );
9686 fs.set_index_for_repo(
9687 "/dir/.git".as_ref(),
9688 &[("file.txt", committed_contents.clone())],
9689 );
9690
9691 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
9692
9693 let buffer = project
9694 .update(cx, |project, cx| {
9695 project.open_local_buffer("/dir/file.txt", cx)
9696 })
9697 .await
9698 .unwrap();
9699 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
9700 let uncommitted_diff = project
9701 .update(cx, |project, cx| {
9702 project.open_uncommitted_diff(buffer.clone(), cx)
9703 })
9704 .await
9705 .unwrap();
9706
9707 // The hunks are initially unstaged.
9708 uncommitted_diff.read_with(cx, |diff, cx| {
9709 assert_hunks(
9710 diff.snapshot(cx).hunks(&snapshot),
9711 &snapshot,
9712 &diff.base_text_string(cx).unwrap(),
9713 &[
9714 (
9715 0..0,
9716 "zero\n",
9717 "",
9718 DiffHunkStatus::deleted(HasSecondaryHunk),
9719 ),
9720 (
9721 1..2,
9722 "two\n",
9723 "TWO\n",
9724 DiffHunkStatus::modified(HasSecondaryHunk),
9725 ),
9726 (
9727 3..4,
9728 "four\n",
9729 "FOUR\n",
9730 DiffHunkStatus::modified(HasSecondaryHunk),
9731 ),
9732 ],
9733 );
9734 });
9735
9736 // Pause IO events
9737 fs.pause_events();
9738
9739 // Stage the first hunk.
9740 uncommitted_diff.update(cx, |diff, cx| {
9741 let hunk = diff.snapshot(cx).hunks(&snapshot).next().unwrap();
9742 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
9743 assert_hunks(
9744 diff.snapshot(cx).hunks(&snapshot),
9745 &snapshot,
9746 &diff.base_text_string(cx).unwrap(),
9747 &[
9748 (
9749 0..0,
9750 "zero\n",
9751 "",
9752 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
9753 ),
9754 (
9755 1..2,
9756 "two\n",
9757 "TWO\n",
9758 DiffHunkStatus::modified(HasSecondaryHunk),
9759 ),
9760 (
9761 3..4,
9762 "four\n",
9763 "FOUR\n",
9764 DiffHunkStatus::modified(HasSecondaryHunk),
9765 ),
9766 ],
9767 );
9768 });
9769
9770 // Stage the second hunk *before* receiving the FS event for the first hunk.
9771 cx.run_until_parked();
9772 uncommitted_diff.update(cx, |diff, cx| {
9773 let hunk = diff.snapshot(cx).hunks(&snapshot).nth(1).unwrap();
9774 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
9775 assert_hunks(
9776 diff.snapshot(cx).hunks(&snapshot),
9777 &snapshot,
9778 &diff.base_text_string(cx).unwrap(),
9779 &[
9780 (
9781 0..0,
9782 "zero\n",
9783 "",
9784 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
9785 ),
9786 (
9787 1..2,
9788 "two\n",
9789 "TWO\n",
9790 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
9791 ),
9792 (
9793 3..4,
9794 "four\n",
9795 "FOUR\n",
9796 DiffHunkStatus::modified(HasSecondaryHunk),
9797 ),
9798 ],
9799 );
9800 });
9801
9802 // Process the FS event for staging the first hunk (second event is still pending).
9803 fs.flush_events(1);
9804 cx.run_until_parked();
9805
9806 // Stage the third hunk before receiving the second FS event.
9807 uncommitted_diff.update(cx, |diff, cx| {
9808 let hunk = diff.snapshot(cx).hunks(&snapshot).nth(2).unwrap();
9809 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
9810 });
9811
9812 // Wait for all remaining IO.
9813 cx.run_until_parked();
9814 fs.flush_events(fs.buffered_event_count());
9815
9816 // Now all hunks are staged.
9817 cx.run_until_parked();
9818 uncommitted_diff.update(cx, |diff, cx| {
9819 assert_hunks(
9820 diff.snapshot(cx).hunks(&snapshot),
9821 &snapshot,
9822 &diff.base_text_string(cx).unwrap(),
9823 &[
9824 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
9825 (
9826 1..2,
9827 "two\n",
9828 "TWO\n",
9829 DiffHunkStatus::modified(NoSecondaryHunk),
9830 ),
9831 (
9832 3..4,
9833 "four\n",
9834 "FOUR\n",
9835 DiffHunkStatus::modified(NoSecondaryHunk),
9836 ),
9837 ],
9838 );
9839 });
9840}
9841
9842#[gpui::test(iterations = 25)]
9843async fn test_staging_random_hunks(
9844 mut rng: StdRng,
9845 _executor: BackgroundExecutor,
9846 cx: &mut gpui::TestAppContext,
9847) {
9848 let operations = env::var("OPERATIONS")
9849 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
9850 .unwrap_or(20);
9851
9852 use DiffHunkSecondaryStatus::*;
9853 init_test(cx);
9854
9855 let committed_text = (0..30).map(|i| format!("line {i}\n")).collect::<String>();
9856 let index_text = committed_text.clone();
9857 let buffer_text = (0..30)
9858 .map(|i| match i % 5 {
9859 0 => format!("line {i} (modified)\n"),
9860 _ => format!("line {i}\n"),
9861 })
9862 .collect::<String>();
9863
9864 let fs = FakeFs::new(cx.background_executor.clone());
9865 fs.insert_tree(
9866 path!("/dir"),
9867 json!({
9868 ".git": {},
9869 "file.txt": buffer_text.clone()
9870 }),
9871 )
9872 .await;
9873 fs.set_head_for_repo(
9874 path!("/dir/.git").as_ref(),
9875 &[("file.txt", committed_text.clone())],
9876 "deadbeef",
9877 );
9878 fs.set_index_for_repo(
9879 path!("/dir/.git").as_ref(),
9880 &[("file.txt", index_text.clone())],
9881 );
9882 let repo = fs
9883 .open_repo(path!("/dir/.git").as_ref(), Some("git".as_ref()))
9884 .unwrap();
9885
9886 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
9887 let buffer = project
9888 .update(cx, |project, cx| {
9889 project.open_local_buffer(path!("/dir/file.txt"), cx)
9890 })
9891 .await
9892 .unwrap();
9893 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
9894 let uncommitted_diff = project
9895 .update(cx, |project, cx| {
9896 project.open_uncommitted_diff(buffer.clone(), cx)
9897 })
9898 .await
9899 .unwrap();
9900
9901 let mut hunks = uncommitted_diff.update(cx, |diff, cx| {
9902 diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>()
9903 });
9904 assert_eq!(hunks.len(), 6);
9905
9906 for _i in 0..operations {
9907 let hunk_ix = rng.random_range(0..hunks.len());
9908 let hunk = &mut hunks[hunk_ix];
9909 let row = hunk.range.start.row;
9910
9911 if hunk.status().has_secondary_hunk() {
9912 log::info!("staging hunk at {row}");
9913 uncommitted_diff.update(cx, |diff, cx| {
9914 diff.stage_or_unstage_hunks(true, std::slice::from_ref(hunk), &snapshot, true, cx);
9915 });
9916 hunk.secondary_status = SecondaryHunkRemovalPending;
9917 } else {
9918 log::info!("unstaging hunk at {row}");
9919 uncommitted_diff.update(cx, |diff, cx| {
9920 diff.stage_or_unstage_hunks(false, std::slice::from_ref(hunk), &snapshot, true, cx);
9921 });
9922 hunk.secondary_status = SecondaryHunkAdditionPending;
9923 }
9924
9925 for _ in 0..rng.random_range(0..10) {
9926 log::info!("yielding");
9927 cx.executor().simulate_random_delay().await;
9928 }
9929 }
9930
9931 cx.executor().run_until_parked();
9932
9933 for hunk in &mut hunks {
9934 if hunk.secondary_status == SecondaryHunkRemovalPending {
9935 hunk.secondary_status = NoSecondaryHunk;
9936 } else if hunk.secondary_status == SecondaryHunkAdditionPending {
9937 hunk.secondary_status = HasSecondaryHunk;
9938 }
9939 }
9940
9941 log::info!(
9942 "index text:\n{}",
9943 repo.load_index_text(RepoPath::from_rel_path(rel_path("file.txt")))
9944 .await
9945 .unwrap()
9946 );
9947
9948 uncommitted_diff.update(cx, |diff, cx| {
9949 let expected_hunks = hunks
9950 .iter()
9951 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
9952 .collect::<Vec<_>>();
9953 let actual_hunks = diff
9954 .snapshot(cx)
9955 .hunks(&snapshot)
9956 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
9957 .collect::<Vec<_>>();
9958 assert_eq!(actual_hunks, expected_hunks);
9959 });
9960}
9961
9962#[gpui::test]
9963async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
9964 init_test(cx);
9965
9966 let committed_contents = r#"
9967 fn main() {
9968 println!("hello from HEAD");
9969 }
9970 "#
9971 .unindent();
9972 let file_contents = r#"
9973 fn main() {
9974 println!("hello from the working copy");
9975 }
9976 "#
9977 .unindent();
9978
9979 let fs = FakeFs::new(cx.background_executor.clone());
9980 fs.insert_tree(
9981 "/dir",
9982 json!({
9983 ".git": {},
9984 "src": {
9985 "main.rs": file_contents,
9986 }
9987 }),
9988 )
9989 .await;
9990
9991 fs.set_head_for_repo(
9992 Path::new("/dir/.git"),
9993 &[("src/main.rs", committed_contents.clone())],
9994 "deadbeef",
9995 );
9996 fs.set_index_for_repo(
9997 Path::new("/dir/.git"),
9998 &[("src/main.rs", committed_contents.clone())],
9999 );
10000
10001 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
10002
10003 let buffer = project
10004 .update(cx, |project, cx| {
10005 project.open_local_buffer("/dir/src/main.rs", cx)
10006 })
10007 .await
10008 .unwrap();
10009 let uncommitted_diff = project
10010 .update(cx, |project, cx| {
10011 project.open_uncommitted_diff(buffer.clone(), cx)
10012 })
10013 .await
10014 .unwrap();
10015
10016 cx.run_until_parked();
10017 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
10018 let snapshot = buffer.read(cx).snapshot();
10019 assert_hunks(
10020 uncommitted_diff.snapshot(cx).hunks(&snapshot),
10021 &snapshot,
10022 &uncommitted_diff.base_text_string(cx).unwrap(),
10023 &[(
10024 1..2,
10025 " println!(\"hello from HEAD\");\n",
10026 " println!(\"hello from the working copy\");\n",
10027 DiffHunkStatus {
10028 kind: DiffHunkStatusKind::Modified,
10029 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
10030 },
10031 )],
10032 );
10033 });
10034}
10035
10036// TODO: Should we test this on Windows also?
10037#[gpui::test]
10038#[cfg(not(windows))]
10039async fn test_staging_hunk_preserve_executable_permission(cx: &mut gpui::TestAppContext) {
10040 use std::os::unix::fs::PermissionsExt;
10041 init_test(cx);
10042 cx.executor().allow_parking();
10043 let committed_contents = "bar\n";
10044 let file_contents = "baz\n";
10045 let root = TempTree::new(json!({
10046 "project": {
10047 "foo": committed_contents
10048 },
10049 }));
10050
10051 let work_dir = root.path().join("project");
10052 let file_path = work_dir.join("foo");
10053 let repo = git_init(work_dir.as_path());
10054 let mut perms = std::fs::metadata(&file_path).unwrap().permissions();
10055 perms.set_mode(0o755);
10056 std::fs::set_permissions(&file_path, perms).unwrap();
10057 git_add("foo", &repo);
10058 git_commit("Initial commit", &repo);
10059 std::fs::write(&file_path, file_contents).unwrap();
10060
10061 let project = Project::test(
10062 Arc::new(RealFs::new(None, cx.executor())),
10063 [root.path()],
10064 cx,
10065 )
10066 .await;
10067
10068 let buffer = project
10069 .update(cx, |project, cx| {
10070 project.open_local_buffer(file_path.as_path(), cx)
10071 })
10072 .await
10073 .unwrap();
10074
10075 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
10076
10077 let uncommitted_diff = project
10078 .update(cx, |project, cx| {
10079 project.open_uncommitted_diff(buffer.clone(), cx)
10080 })
10081 .await
10082 .unwrap();
10083
10084 uncommitted_diff.update(cx, |diff, cx| {
10085 let hunks = diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>();
10086 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
10087 });
10088
10089 cx.run_until_parked();
10090
10091 let output = smol::process::Command::new("git")
10092 .current_dir(&work_dir)
10093 .args(["diff", "--staged"])
10094 .output()
10095 .await
10096 .unwrap();
10097
10098 let staged_diff = String::from_utf8_lossy(&output.stdout);
10099
10100 assert!(
10101 !staged_diff.contains("new mode 100644"),
10102 "Staging should not change file mode from 755 to 644.\ngit diff --staged:\n{}",
10103 staged_diff
10104 );
10105
10106 let output = smol::process::Command::new("git")
10107 .current_dir(&work_dir)
10108 .args(["ls-files", "-s"])
10109 .output()
10110 .await
10111 .unwrap();
10112 let index_contents = String::from_utf8_lossy(&output.stdout);
10113
10114 assert!(
10115 index_contents.contains("100755"),
10116 "Index should show file as executable (100755).\ngit ls-files -s:\n{}",
10117 index_contents
10118 );
10119}
10120
10121#[gpui::test]
10122async fn test_repository_and_path_for_project_path(
10123 background_executor: BackgroundExecutor,
10124 cx: &mut gpui::TestAppContext,
10125) {
10126 init_test(cx);
10127 let fs = FakeFs::new(background_executor);
10128 fs.insert_tree(
10129 path!("/root"),
10130 json!({
10131 "c.txt": "",
10132 "dir1": {
10133 ".git": {},
10134 "deps": {
10135 "dep1": {
10136 ".git": {},
10137 "src": {
10138 "a.txt": ""
10139 }
10140 }
10141 },
10142 "src": {
10143 "b.txt": ""
10144 }
10145 },
10146 }),
10147 )
10148 .await;
10149
10150 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
10151 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10152 let tree_id = tree.read_with(cx, |tree, _| tree.id());
10153 project
10154 .update(cx, |project, cx| project.git_scans_complete(cx))
10155 .await;
10156 cx.run_until_parked();
10157
10158 project.read_with(cx, |project, cx| {
10159 let git_store = project.git_store().read(cx);
10160 let pairs = [
10161 ("c.txt", None),
10162 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
10163 (
10164 "dir1/deps/dep1/src/a.txt",
10165 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
10166 ),
10167 ];
10168 let expected = pairs
10169 .iter()
10170 .map(|(path, result)| {
10171 (
10172 path,
10173 result.map(|(repo, repo_path)| {
10174 (Path::new(repo).into(), RepoPath::new(repo_path).unwrap())
10175 }),
10176 )
10177 })
10178 .collect::<Vec<_>>();
10179 let actual = pairs
10180 .iter()
10181 .map(|(path, _)| {
10182 let project_path = (tree_id, rel_path(path)).into();
10183 let result = maybe!({
10184 let (repo, repo_path) =
10185 git_store.repository_and_path_for_project_path(&project_path, cx)?;
10186 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
10187 });
10188 (path, result)
10189 })
10190 .collect::<Vec<_>>();
10191 pretty_assertions::assert_eq!(expected, actual);
10192 });
10193
10194 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
10195 .await
10196 .unwrap();
10197 cx.run_until_parked();
10198
10199 project.read_with(cx, |project, cx| {
10200 let git_store = project.git_store().read(cx);
10201 assert_eq!(
10202 git_store.repository_and_path_for_project_path(
10203 &(tree_id, rel_path("dir1/src/b.txt")).into(),
10204 cx
10205 ),
10206 None
10207 );
10208 });
10209}
10210
10211#[gpui::test]
10212async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
10213 init_test(cx);
10214 let fs = FakeFs::new(cx.background_executor.clone());
10215 let home = paths::home_dir();
10216 fs.insert_tree(
10217 home,
10218 json!({
10219 ".git": {},
10220 "project": {
10221 "a.txt": "A"
10222 },
10223 }),
10224 )
10225 .await;
10226
10227 let project = Project::test(fs.clone(), [home.join("project").as_ref()], cx).await;
10228 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10229 let tree_id = tree.read_with(cx, |tree, _| tree.id());
10230
10231 project
10232 .update(cx, |project, cx| project.git_scans_complete(cx))
10233 .await;
10234 tree.flush_fs_events(cx).await;
10235
10236 project.read_with(cx, |project, cx| {
10237 let containing = project
10238 .git_store()
10239 .read(cx)
10240 .repository_and_path_for_project_path(&(tree_id, rel_path("a.txt")).into(), cx);
10241 assert!(containing.is_none());
10242 });
10243
10244 let project = Project::test(fs.clone(), [home.as_ref()], cx).await;
10245 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10246 let tree_id = tree.read_with(cx, |tree, _| tree.id());
10247 project
10248 .update(cx, |project, cx| project.git_scans_complete(cx))
10249 .await;
10250 tree.flush_fs_events(cx).await;
10251
10252 project.read_with(cx, |project, cx| {
10253 let containing = project
10254 .git_store()
10255 .read(cx)
10256 .repository_and_path_for_project_path(&(tree_id, rel_path("project/a.txt")).into(), cx);
10257 assert_eq!(
10258 containing
10259 .unwrap()
10260 .0
10261 .read(cx)
10262 .work_directory_abs_path
10263 .as_ref(),
10264 home,
10265 );
10266 });
10267}
10268
10269#[gpui::test]
10270async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
10271 init_test(cx);
10272 cx.executor().allow_parking();
10273
10274 let root = TempTree::new(json!({
10275 "project": {
10276 "a.txt": "a", // Modified
10277 "b.txt": "bb", // Added
10278 "c.txt": "ccc", // Unchanged
10279 "d.txt": "dddd", // Deleted
10280 },
10281 }));
10282
10283 // Set up git repository before creating the project.
10284 let work_dir = root.path().join("project");
10285 let repo = git_init(work_dir.as_path());
10286 git_add("a.txt", &repo);
10287 git_add("c.txt", &repo);
10288 git_add("d.txt", &repo);
10289 git_commit("Initial commit", &repo);
10290 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
10291 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
10292
10293 let project = Project::test(
10294 Arc::new(RealFs::new(None, cx.executor())),
10295 [root.path()],
10296 cx,
10297 )
10298 .await;
10299
10300 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10301 tree.flush_fs_events(cx).await;
10302 project
10303 .update(cx, |project, cx| project.git_scans_complete(cx))
10304 .await;
10305 cx.executor().run_until_parked();
10306
10307 let repository = project.read_with(cx, |project, cx| {
10308 project.repositories(cx).values().next().unwrap().clone()
10309 });
10310
10311 // Check that the right git state is observed on startup
10312 repository.read_with(cx, |repository, _| {
10313 let entries = repository.cached_status().collect::<Vec<_>>();
10314 assert_eq!(
10315 entries,
10316 [
10317 StatusEntry {
10318 repo_path: repo_path("a.txt"),
10319 status: StatusCode::Modified.worktree(),
10320 diff_stat: Some(DiffStat {
10321 added: 1,
10322 deleted: 1,
10323 }),
10324 },
10325 StatusEntry {
10326 repo_path: repo_path("b.txt"),
10327 status: FileStatus::Untracked,
10328 diff_stat: None,
10329 },
10330 StatusEntry {
10331 repo_path: repo_path("d.txt"),
10332 status: StatusCode::Deleted.worktree(),
10333 diff_stat: Some(DiffStat {
10334 added: 0,
10335 deleted: 1,
10336 }),
10337 },
10338 ]
10339 );
10340 });
10341
10342 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
10343
10344 tree.flush_fs_events(cx).await;
10345 project
10346 .update(cx, |project, cx| project.git_scans_complete(cx))
10347 .await;
10348 cx.executor().run_until_parked();
10349
10350 repository.read_with(cx, |repository, _| {
10351 let entries = repository.cached_status().collect::<Vec<_>>();
10352 assert_eq!(
10353 entries,
10354 [
10355 StatusEntry {
10356 repo_path: repo_path("a.txt"),
10357 status: StatusCode::Modified.worktree(),
10358 diff_stat: Some(DiffStat {
10359 added: 1,
10360 deleted: 1,
10361 }),
10362 },
10363 StatusEntry {
10364 repo_path: repo_path("b.txt"),
10365 status: FileStatus::Untracked,
10366 diff_stat: None,
10367 },
10368 StatusEntry {
10369 repo_path: repo_path("c.txt"),
10370 status: StatusCode::Modified.worktree(),
10371 diff_stat: Some(DiffStat {
10372 added: 1,
10373 deleted: 1,
10374 }),
10375 },
10376 StatusEntry {
10377 repo_path: repo_path("d.txt"),
10378 status: StatusCode::Deleted.worktree(),
10379 diff_stat: Some(DiffStat {
10380 added: 0,
10381 deleted: 1,
10382 }),
10383 },
10384 ]
10385 );
10386 });
10387
10388 git_add("a.txt", &repo);
10389 git_add("c.txt", &repo);
10390 git_remove_index(Path::new("d.txt"), &repo);
10391 git_commit("Another commit", &repo);
10392 tree.flush_fs_events(cx).await;
10393 project
10394 .update(cx, |project, cx| project.git_scans_complete(cx))
10395 .await;
10396 cx.executor().run_until_parked();
10397
10398 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
10399 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
10400 tree.flush_fs_events(cx).await;
10401 project
10402 .update(cx, |project, cx| project.git_scans_complete(cx))
10403 .await;
10404 cx.executor().run_until_parked();
10405
10406 repository.read_with(cx, |repository, _cx| {
10407 let entries = repository.cached_status().collect::<Vec<_>>();
10408
10409 // Deleting an untracked entry, b.txt, should leave no status
10410 // a.txt was tracked, and so should have a status
10411 assert_eq!(
10412 entries,
10413 [StatusEntry {
10414 repo_path: repo_path("a.txt"),
10415 status: StatusCode::Deleted.worktree(),
10416 diff_stat: Some(DiffStat {
10417 added: 0,
10418 deleted: 1,
10419 }),
10420 }]
10421 );
10422 });
10423}
10424
10425#[gpui::test]
10426#[ignore]
10427async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
10428 init_test(cx);
10429 cx.executor().allow_parking();
10430
10431 let root = TempTree::new(json!({
10432 "project": {
10433 "sub": {},
10434 "a.txt": "",
10435 },
10436 }));
10437
10438 let work_dir = root.path().join("project");
10439 let repo = git_init(work_dir.as_path());
10440 // a.txt exists in HEAD and the working copy but is deleted in the index.
10441 git_add("a.txt", &repo);
10442 git_commit("Initial commit", &repo);
10443 git_remove_index("a.txt".as_ref(), &repo);
10444 // `sub` is a nested git repository.
10445 let _sub = git_init(&work_dir.join("sub"));
10446
10447 let project = Project::test(
10448 Arc::new(RealFs::new(None, cx.executor())),
10449 [root.path()],
10450 cx,
10451 )
10452 .await;
10453
10454 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10455 tree.flush_fs_events(cx).await;
10456 project
10457 .update(cx, |project, cx| project.git_scans_complete(cx))
10458 .await;
10459 cx.executor().run_until_parked();
10460
10461 let repository = project.read_with(cx, |project, cx| {
10462 project
10463 .repositories(cx)
10464 .values()
10465 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
10466 .unwrap()
10467 .clone()
10468 });
10469
10470 repository.read_with(cx, |repository, _cx| {
10471 let entries = repository.cached_status().collect::<Vec<_>>();
10472
10473 // `sub` doesn't appear in our computed statuses.
10474 // a.txt appears with a combined `DA` status.
10475 assert_eq!(
10476 entries,
10477 [StatusEntry {
10478 repo_path: repo_path("a.txt"),
10479 status: TrackedStatus {
10480 index_status: StatusCode::Deleted,
10481 worktree_status: StatusCode::Added
10482 }
10483 .into(),
10484 diff_stat: None,
10485 }]
10486 )
10487 });
10488}
10489
10490#[track_caller]
10491/// We merge lhs into rhs.
10492fn merge_pending_ops_snapshots(
10493 source: Vec<pending_op::PendingOps>,
10494 mut target: Vec<pending_op::PendingOps>,
10495) -> Vec<pending_op::PendingOps> {
10496 for s_ops in source {
10497 if let Some(idx) = target.iter().zip(0..).find_map(|(ops, idx)| {
10498 if ops.repo_path == s_ops.repo_path {
10499 Some(idx)
10500 } else {
10501 None
10502 }
10503 }) {
10504 let t_ops = &mut target[idx];
10505 for s_op in s_ops.ops {
10506 if let Some(op_idx) = t_ops
10507 .ops
10508 .iter()
10509 .zip(0..)
10510 .find_map(|(op, idx)| if op.id == s_op.id { Some(idx) } else { None })
10511 {
10512 let t_op = &mut t_ops.ops[op_idx];
10513 match (s_op.job_status, t_op.job_status) {
10514 (pending_op::JobStatus::Running, _) => {}
10515 (s_st, pending_op::JobStatus::Running) => t_op.job_status = s_st,
10516 (s_st, t_st) if s_st == t_st => {}
10517 _ => unreachable!(),
10518 }
10519 } else {
10520 t_ops.ops.push(s_op);
10521 }
10522 }
10523 t_ops.ops.sort_by(|l, r| l.id.cmp(&r.id));
10524 } else {
10525 target.push(s_ops);
10526 }
10527 }
10528 target
10529}
10530
10531#[gpui::test]
10532async fn test_repository_pending_ops_staging(
10533 executor: gpui::BackgroundExecutor,
10534 cx: &mut gpui::TestAppContext,
10535) {
10536 init_test(cx);
10537
10538 let fs = FakeFs::new(executor);
10539 fs.insert_tree(
10540 path!("/root"),
10541 json!({
10542 "my-repo": {
10543 ".git": {},
10544 "a.txt": "a",
10545 }
10546
10547 }),
10548 )
10549 .await;
10550
10551 fs.set_status_for_repo(
10552 path!("/root/my-repo/.git").as_ref(),
10553 &[("a.txt", FileStatus::Untracked)],
10554 );
10555
10556 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
10557 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
10558 project.update(cx, |project, cx| {
10559 let pending_ops_all = pending_ops_all.clone();
10560 cx.subscribe(project.git_store(), move |_, _, e, _| {
10561 if let GitStoreEvent::RepositoryUpdated(
10562 _,
10563 RepositoryEvent::PendingOpsChanged { pending_ops },
10564 _,
10565 ) = e
10566 {
10567 let merged = merge_pending_ops_snapshots(
10568 pending_ops.items(()),
10569 pending_ops_all.lock().items(()),
10570 );
10571 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
10572 }
10573 })
10574 .detach();
10575 });
10576 project
10577 .update(cx, |project, cx| project.git_scans_complete(cx))
10578 .await;
10579
10580 let repo = project.read_with(cx, |project, cx| {
10581 project.repositories(cx).values().next().unwrap().clone()
10582 });
10583
10584 // Ensure we have no pending ops for any of the untracked files
10585 repo.read_with(cx, |repo, _cx| {
10586 assert!(repo.pending_ops().next().is_none());
10587 });
10588
10589 let mut id = 1u16;
10590
10591 let mut assert_stage = async |path: RepoPath, stage| {
10592 let git_status = if stage {
10593 pending_op::GitStatus::Staged
10594 } else {
10595 pending_op::GitStatus::Unstaged
10596 };
10597 repo.update(cx, |repo, cx| {
10598 let task = if stage {
10599 repo.stage_entries(vec![path.clone()], cx)
10600 } else {
10601 repo.unstage_entries(vec![path.clone()], cx)
10602 };
10603 let ops = repo.pending_ops_for_path(&path).unwrap();
10604 assert_eq!(
10605 ops.ops.last(),
10606 Some(&pending_op::PendingOp {
10607 id: id.into(),
10608 git_status,
10609 job_status: pending_op::JobStatus::Running
10610 })
10611 );
10612 task
10613 })
10614 .await
10615 .unwrap();
10616
10617 repo.read_with(cx, |repo, _cx| {
10618 let ops = repo.pending_ops_for_path(&path).unwrap();
10619 assert_eq!(
10620 ops.ops.last(),
10621 Some(&pending_op::PendingOp {
10622 id: id.into(),
10623 git_status,
10624 job_status: pending_op::JobStatus::Finished
10625 })
10626 );
10627 });
10628
10629 id += 1;
10630 };
10631
10632 assert_stage(repo_path("a.txt"), true).await;
10633 assert_stage(repo_path("a.txt"), false).await;
10634 assert_stage(repo_path("a.txt"), true).await;
10635 assert_stage(repo_path("a.txt"), false).await;
10636 assert_stage(repo_path("a.txt"), true).await;
10637
10638 cx.run_until_parked();
10639
10640 assert_eq!(
10641 pending_ops_all
10642 .lock()
10643 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
10644 .unwrap()
10645 .ops,
10646 vec![
10647 pending_op::PendingOp {
10648 id: 1u16.into(),
10649 git_status: pending_op::GitStatus::Staged,
10650 job_status: pending_op::JobStatus::Finished
10651 },
10652 pending_op::PendingOp {
10653 id: 2u16.into(),
10654 git_status: pending_op::GitStatus::Unstaged,
10655 job_status: pending_op::JobStatus::Finished
10656 },
10657 pending_op::PendingOp {
10658 id: 3u16.into(),
10659 git_status: pending_op::GitStatus::Staged,
10660 job_status: pending_op::JobStatus::Finished
10661 },
10662 pending_op::PendingOp {
10663 id: 4u16.into(),
10664 git_status: pending_op::GitStatus::Unstaged,
10665 job_status: pending_op::JobStatus::Finished
10666 },
10667 pending_op::PendingOp {
10668 id: 5u16.into(),
10669 git_status: pending_op::GitStatus::Staged,
10670 job_status: pending_op::JobStatus::Finished
10671 }
10672 ],
10673 );
10674
10675 repo.update(cx, |repo, _cx| {
10676 let git_statuses = repo.cached_status().collect::<Vec<_>>();
10677
10678 assert_eq!(
10679 git_statuses,
10680 [StatusEntry {
10681 repo_path: repo_path("a.txt"),
10682 status: TrackedStatus {
10683 index_status: StatusCode::Added,
10684 worktree_status: StatusCode::Unmodified
10685 }
10686 .into(),
10687 diff_stat: Some(DiffStat {
10688 added: 1,
10689 deleted: 0,
10690 }),
10691 }]
10692 );
10693 });
10694}
10695
10696#[gpui::test]
10697async fn test_repository_pending_ops_long_running_staging(
10698 executor: gpui::BackgroundExecutor,
10699 cx: &mut gpui::TestAppContext,
10700) {
10701 init_test(cx);
10702
10703 let fs = FakeFs::new(executor);
10704 fs.insert_tree(
10705 path!("/root"),
10706 json!({
10707 "my-repo": {
10708 ".git": {},
10709 "a.txt": "a",
10710 }
10711
10712 }),
10713 )
10714 .await;
10715
10716 fs.set_status_for_repo(
10717 path!("/root/my-repo/.git").as_ref(),
10718 &[("a.txt", FileStatus::Untracked)],
10719 );
10720
10721 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
10722 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
10723 project.update(cx, |project, cx| {
10724 let pending_ops_all = pending_ops_all.clone();
10725 cx.subscribe(project.git_store(), move |_, _, e, _| {
10726 if let GitStoreEvent::RepositoryUpdated(
10727 _,
10728 RepositoryEvent::PendingOpsChanged { pending_ops },
10729 _,
10730 ) = e
10731 {
10732 let merged = merge_pending_ops_snapshots(
10733 pending_ops.items(()),
10734 pending_ops_all.lock().items(()),
10735 );
10736 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
10737 }
10738 })
10739 .detach();
10740 });
10741
10742 project
10743 .update(cx, |project, cx| project.git_scans_complete(cx))
10744 .await;
10745
10746 let repo = project.read_with(cx, |project, cx| {
10747 project.repositories(cx).values().next().unwrap().clone()
10748 });
10749
10750 repo.update(cx, |repo, cx| {
10751 repo.stage_entries(vec![repo_path("a.txt")], cx)
10752 })
10753 .detach();
10754
10755 repo.update(cx, |repo, cx| {
10756 repo.stage_entries(vec![repo_path("a.txt")], cx)
10757 })
10758 .unwrap()
10759 .with_timeout(Duration::from_secs(1), &cx.executor())
10760 .await
10761 .unwrap();
10762
10763 cx.run_until_parked();
10764
10765 assert_eq!(
10766 pending_ops_all
10767 .lock()
10768 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
10769 .unwrap()
10770 .ops,
10771 vec![
10772 pending_op::PendingOp {
10773 id: 1u16.into(),
10774 git_status: pending_op::GitStatus::Staged,
10775 job_status: pending_op::JobStatus::Skipped
10776 },
10777 pending_op::PendingOp {
10778 id: 2u16.into(),
10779 git_status: pending_op::GitStatus::Staged,
10780 job_status: pending_op::JobStatus::Finished
10781 }
10782 ],
10783 );
10784
10785 repo.update(cx, |repo, _cx| {
10786 let git_statuses = repo.cached_status().collect::<Vec<_>>();
10787
10788 assert_eq!(
10789 git_statuses,
10790 [StatusEntry {
10791 repo_path: repo_path("a.txt"),
10792 status: TrackedStatus {
10793 index_status: StatusCode::Added,
10794 worktree_status: StatusCode::Unmodified
10795 }
10796 .into(),
10797 diff_stat: Some(DiffStat {
10798 added: 1,
10799 deleted: 0,
10800 }),
10801 }]
10802 );
10803 });
10804}
10805
10806#[gpui::test]
10807async fn test_repository_pending_ops_stage_all(
10808 executor: gpui::BackgroundExecutor,
10809 cx: &mut gpui::TestAppContext,
10810) {
10811 init_test(cx);
10812
10813 let fs = FakeFs::new(executor);
10814 fs.insert_tree(
10815 path!("/root"),
10816 json!({
10817 "my-repo": {
10818 ".git": {},
10819 "a.txt": "a",
10820 "b.txt": "b"
10821 }
10822
10823 }),
10824 )
10825 .await;
10826
10827 fs.set_status_for_repo(
10828 path!("/root/my-repo/.git").as_ref(),
10829 &[
10830 ("a.txt", FileStatus::Untracked),
10831 ("b.txt", FileStatus::Untracked),
10832 ],
10833 );
10834
10835 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
10836 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
10837 project.update(cx, |project, cx| {
10838 let pending_ops_all = pending_ops_all.clone();
10839 cx.subscribe(project.git_store(), move |_, _, e, _| {
10840 if let GitStoreEvent::RepositoryUpdated(
10841 _,
10842 RepositoryEvent::PendingOpsChanged { pending_ops },
10843 _,
10844 ) = e
10845 {
10846 let merged = merge_pending_ops_snapshots(
10847 pending_ops.items(()),
10848 pending_ops_all.lock().items(()),
10849 );
10850 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
10851 }
10852 })
10853 .detach();
10854 });
10855 project
10856 .update(cx, |project, cx| project.git_scans_complete(cx))
10857 .await;
10858
10859 let repo = project.read_with(cx, |project, cx| {
10860 project.repositories(cx).values().next().unwrap().clone()
10861 });
10862
10863 repo.update(cx, |repo, cx| {
10864 repo.stage_entries(vec![repo_path("a.txt")], cx)
10865 })
10866 .await
10867 .unwrap();
10868 repo.update(cx, |repo, cx| repo.stage_all(cx))
10869 .await
10870 .unwrap();
10871 repo.update(cx, |repo, cx| repo.unstage_all(cx))
10872 .await
10873 .unwrap();
10874
10875 cx.run_until_parked();
10876
10877 assert_eq!(
10878 pending_ops_all
10879 .lock()
10880 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
10881 .unwrap()
10882 .ops,
10883 vec![
10884 pending_op::PendingOp {
10885 id: 1u16.into(),
10886 git_status: pending_op::GitStatus::Staged,
10887 job_status: pending_op::JobStatus::Finished
10888 },
10889 pending_op::PendingOp {
10890 id: 2u16.into(),
10891 git_status: pending_op::GitStatus::Unstaged,
10892 job_status: pending_op::JobStatus::Finished
10893 },
10894 ],
10895 );
10896 assert_eq!(
10897 pending_ops_all
10898 .lock()
10899 .get(&worktree::PathKey(repo_path("b.txt").as_ref().clone()), ())
10900 .unwrap()
10901 .ops,
10902 vec![
10903 pending_op::PendingOp {
10904 id: 1u16.into(),
10905 git_status: pending_op::GitStatus::Staged,
10906 job_status: pending_op::JobStatus::Finished
10907 },
10908 pending_op::PendingOp {
10909 id: 2u16.into(),
10910 git_status: pending_op::GitStatus::Unstaged,
10911 job_status: pending_op::JobStatus::Finished
10912 },
10913 ],
10914 );
10915
10916 repo.update(cx, |repo, _cx| {
10917 let git_statuses = repo.cached_status().collect::<Vec<_>>();
10918
10919 assert_eq!(
10920 git_statuses,
10921 [
10922 StatusEntry {
10923 repo_path: repo_path("a.txt"),
10924 status: FileStatus::Untracked,
10925 diff_stat: None,
10926 },
10927 StatusEntry {
10928 repo_path: repo_path("b.txt"),
10929 status: FileStatus::Untracked,
10930 diff_stat: None,
10931 },
10932 ]
10933 );
10934 });
10935}
10936
10937#[gpui::test]
10938async fn test_repository_subfolder_git_status(
10939 executor: gpui::BackgroundExecutor,
10940 cx: &mut gpui::TestAppContext,
10941) {
10942 init_test(cx);
10943
10944 let fs = FakeFs::new(executor);
10945 fs.insert_tree(
10946 path!("/root"),
10947 json!({
10948 "my-repo": {
10949 ".git": {},
10950 "a.txt": "a",
10951 "sub-folder-1": {
10952 "sub-folder-2": {
10953 "c.txt": "cc",
10954 "d": {
10955 "e.txt": "eee"
10956 }
10957 },
10958 }
10959 },
10960 }),
10961 )
10962 .await;
10963
10964 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
10965 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
10966
10967 fs.set_status_for_repo(
10968 path!("/root/my-repo/.git").as_ref(),
10969 &[(E_TXT, FileStatus::Untracked)],
10970 );
10971
10972 let project = Project::test(
10973 fs.clone(),
10974 [path!("/root/my-repo/sub-folder-1/sub-folder-2").as_ref()],
10975 cx,
10976 )
10977 .await;
10978
10979 project
10980 .update(cx, |project, cx| project.git_scans_complete(cx))
10981 .await;
10982 cx.run_until_parked();
10983
10984 let repository = project.read_with(cx, |project, cx| {
10985 project.repositories(cx).values().next().unwrap().clone()
10986 });
10987
10988 // Ensure that the git status is loaded correctly
10989 repository.read_with(cx, |repository, _cx| {
10990 assert_eq!(
10991 repository.work_directory_abs_path,
10992 Path::new(path!("/root/my-repo")).into()
10993 );
10994
10995 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
10996 assert_eq!(
10997 repository
10998 .status_for_path(&repo_path(E_TXT))
10999 .unwrap()
11000 .status,
11001 FileStatus::Untracked
11002 );
11003 });
11004
11005 fs.set_status_for_repo(path!("/root/my-repo/.git").as_ref(), &[]);
11006 project
11007 .update(cx, |project, cx| project.git_scans_complete(cx))
11008 .await;
11009 cx.run_until_parked();
11010
11011 repository.read_with(cx, |repository, _cx| {
11012 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
11013 assert_eq!(repository.status_for_path(&repo_path(E_TXT)), None);
11014 });
11015}
11016
11017// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
11018#[cfg(any())]
11019#[gpui::test]
11020async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
11021 init_test(cx);
11022 cx.executor().allow_parking();
11023
11024 let root = TempTree::new(json!({
11025 "project": {
11026 "a.txt": "a",
11027 },
11028 }));
11029 let root_path = root.path();
11030
11031 let repo = git_init(&root_path.join("project"));
11032 git_add("a.txt", &repo);
11033 git_commit("init", &repo);
11034
11035 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
11036
11037 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
11038 tree.flush_fs_events(cx).await;
11039 project
11040 .update(cx, |project, cx| project.git_scans_complete(cx))
11041 .await;
11042 cx.executor().run_until_parked();
11043
11044 let repository = project.read_with(cx, |project, cx| {
11045 project.repositories(cx).values().next().unwrap().clone()
11046 });
11047
11048 git_branch("other-branch", &repo);
11049 git_checkout("refs/heads/other-branch", &repo);
11050 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
11051 git_add("a.txt", &repo);
11052 git_commit("capitalize", &repo);
11053 let commit = repo
11054 .head()
11055 .expect("Failed to get HEAD")
11056 .peel_to_commit()
11057 .expect("HEAD is not a commit");
11058 git_checkout("refs/heads/main", &repo);
11059 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
11060 git_add("a.txt", &repo);
11061 git_commit("improve letter", &repo);
11062 git_cherry_pick(&commit, &repo);
11063 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
11064 .expect("No CHERRY_PICK_HEAD");
11065 pretty_assertions::assert_eq!(
11066 git_status(&repo),
11067 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
11068 );
11069 tree.flush_fs_events(cx).await;
11070 project
11071 .update(cx, |project, cx| project.git_scans_complete(cx))
11072 .await;
11073 cx.executor().run_until_parked();
11074 let conflicts = repository.update(cx, |repository, _| {
11075 repository
11076 .merge_conflicts
11077 .iter()
11078 .cloned()
11079 .collect::<Vec<_>>()
11080 });
11081 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
11082
11083 git_add("a.txt", &repo);
11084 // Attempt to manually simulate what `git cherry-pick --continue` would do.
11085 git_commit("whatevs", &repo);
11086 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
11087 .expect("Failed to remove CHERRY_PICK_HEAD");
11088 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
11089 tree.flush_fs_events(cx).await;
11090 let conflicts = repository.update(cx, |repository, _| {
11091 repository
11092 .merge_conflicts
11093 .iter()
11094 .cloned()
11095 .collect::<Vec<_>>()
11096 });
11097 pretty_assertions::assert_eq!(conflicts, []);
11098}
11099
11100#[gpui::test]
11101async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
11102 init_test(cx);
11103 let fs = FakeFs::new(cx.background_executor.clone());
11104 fs.insert_tree(
11105 path!("/root"),
11106 json!({
11107 ".git": {},
11108 ".gitignore": "*.txt\n",
11109 "a.xml": "<a></a>",
11110 "b.txt": "Some text"
11111 }),
11112 )
11113 .await;
11114
11115 fs.set_head_and_index_for_repo(
11116 path!("/root/.git").as_ref(),
11117 &[
11118 (".gitignore", "*.txt\n".into()),
11119 ("a.xml", "<a></a>".into()),
11120 ],
11121 );
11122
11123 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
11124
11125 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
11126 tree.flush_fs_events(cx).await;
11127 project
11128 .update(cx, |project, cx| project.git_scans_complete(cx))
11129 .await;
11130 cx.executor().run_until_parked();
11131
11132 let repository = project.read_with(cx, |project, cx| {
11133 project.repositories(cx).values().next().unwrap().clone()
11134 });
11135
11136 // One file is unmodified, the other is ignored.
11137 cx.read(|cx| {
11138 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
11139 assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
11140 });
11141
11142 // Change the gitignore, and stage the newly non-ignored file.
11143 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
11144 .await
11145 .unwrap();
11146 fs.set_index_for_repo(
11147 Path::new(path!("/root/.git")),
11148 &[
11149 (".gitignore", "*.txt\n".into()),
11150 ("a.xml", "<a></a>".into()),
11151 ("b.txt", "Some text".into()),
11152 ],
11153 );
11154
11155 cx.executor().run_until_parked();
11156 cx.read(|cx| {
11157 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
11158 assert_entry_git_state(
11159 tree.read(cx),
11160 repository.read(cx),
11161 "b.txt",
11162 Some(StatusCode::Added),
11163 false,
11164 );
11165 });
11166}
11167
11168// NOTE:
11169// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
11170// a directory which some program has already open.
11171// This is a limitation of the Windows.
11172// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
11173// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
11174#[gpui::test]
11175#[cfg_attr(target_os = "windows", ignore)]
11176async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
11177 init_test(cx);
11178 cx.executor().allow_parking();
11179 let root = TempTree::new(json!({
11180 "projects": {
11181 "project1": {
11182 "a": "",
11183 "b": "",
11184 }
11185 },
11186
11187 }));
11188 let root_path = root.path();
11189
11190 let repo = git_init(&root_path.join("projects/project1"));
11191 git_add("a", &repo);
11192 git_commit("init", &repo);
11193 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
11194
11195 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
11196
11197 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
11198 tree.flush_fs_events(cx).await;
11199 project
11200 .update(cx, |project, cx| project.git_scans_complete(cx))
11201 .await;
11202 cx.executor().run_until_parked();
11203
11204 let repository = project.read_with(cx, |project, cx| {
11205 project.repositories(cx).values().next().unwrap().clone()
11206 });
11207
11208 repository.read_with(cx, |repository, _| {
11209 assert_eq!(
11210 repository.work_directory_abs_path.as_ref(),
11211 root_path.join("projects/project1").as_path()
11212 );
11213 assert_eq!(
11214 repository
11215 .status_for_path(&repo_path("a"))
11216 .map(|entry| entry.status),
11217 Some(StatusCode::Modified.worktree()),
11218 );
11219 assert_eq!(
11220 repository
11221 .status_for_path(&repo_path("b"))
11222 .map(|entry| entry.status),
11223 Some(FileStatus::Untracked),
11224 );
11225 });
11226
11227 std::fs::rename(
11228 root_path.join("projects/project1"),
11229 root_path.join("projects/project2"),
11230 )
11231 .unwrap();
11232 tree.flush_fs_events(cx).await;
11233
11234 repository.read_with(cx, |repository, _| {
11235 assert_eq!(
11236 repository.work_directory_abs_path.as_ref(),
11237 root_path.join("projects/project2").as_path()
11238 );
11239 assert_eq!(
11240 repository.status_for_path(&repo_path("a")).unwrap().status,
11241 StatusCode::Modified.worktree(),
11242 );
11243 assert_eq!(
11244 repository.status_for_path(&repo_path("b")).unwrap().status,
11245 FileStatus::Untracked,
11246 );
11247 });
11248}
11249
11250// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
11251// you can't rename a directory which some program has already open. This is a
11252// limitation of the Windows. See:
11253// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
11254// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
11255#[gpui::test]
11256#[cfg_attr(target_os = "windows", ignore)]
11257async fn test_file_status(cx: &mut gpui::TestAppContext) {
11258 init_test(cx);
11259 cx.executor().allow_parking();
11260 const IGNORE_RULE: &str = "**/target";
11261
11262 let root = TempTree::new(json!({
11263 "project": {
11264 "a.txt": "a",
11265 "b.txt": "bb",
11266 "c": {
11267 "d": {
11268 "e.txt": "eee"
11269 }
11270 },
11271 "f.txt": "ffff",
11272 "target": {
11273 "build_file": "???"
11274 },
11275 ".gitignore": IGNORE_RULE
11276 },
11277
11278 }));
11279 let root_path = root.path();
11280
11281 const A_TXT: &str = "a.txt";
11282 const B_TXT: &str = "b.txt";
11283 const E_TXT: &str = "c/d/e.txt";
11284 const F_TXT: &str = "f.txt";
11285 const DOTGITIGNORE: &str = ".gitignore";
11286 const BUILD_FILE: &str = "target/build_file";
11287
11288 // Set up git repository before creating the worktree.
11289 let work_dir = root.path().join("project");
11290 let mut repo = git_init(work_dir.as_path());
11291 repo.add_ignore_rule(IGNORE_RULE).unwrap();
11292 git_add(A_TXT, &repo);
11293 git_add(E_TXT, &repo);
11294 git_add(DOTGITIGNORE, &repo);
11295 git_commit("Initial commit", &repo);
11296
11297 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
11298
11299 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
11300 tree.flush_fs_events(cx).await;
11301 project
11302 .update(cx, |project, cx| project.git_scans_complete(cx))
11303 .await;
11304 cx.executor().run_until_parked();
11305
11306 let repository = project.read_with(cx, |project, cx| {
11307 project.repositories(cx).values().next().unwrap().clone()
11308 });
11309
11310 // Check that the right git state is observed on startup
11311 repository.read_with(cx, |repository, _cx| {
11312 assert_eq!(
11313 repository.work_directory_abs_path.as_ref(),
11314 root_path.join("project").as_path()
11315 );
11316
11317 assert_eq!(
11318 repository
11319 .status_for_path(&repo_path(B_TXT))
11320 .unwrap()
11321 .status,
11322 FileStatus::Untracked,
11323 );
11324 assert_eq!(
11325 repository
11326 .status_for_path(&repo_path(F_TXT))
11327 .unwrap()
11328 .status,
11329 FileStatus::Untracked,
11330 );
11331 });
11332
11333 // Modify a file in the working copy.
11334 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
11335 tree.flush_fs_events(cx).await;
11336 project
11337 .update(cx, |project, cx| project.git_scans_complete(cx))
11338 .await;
11339 cx.executor().run_until_parked();
11340
11341 // The worktree detects that the file's git status has changed.
11342 repository.read_with(cx, |repository, _| {
11343 assert_eq!(
11344 repository
11345 .status_for_path(&repo_path(A_TXT))
11346 .unwrap()
11347 .status,
11348 StatusCode::Modified.worktree(),
11349 );
11350 });
11351
11352 // Create a commit in the git repository.
11353 git_add(A_TXT, &repo);
11354 git_add(B_TXT, &repo);
11355 git_commit("Committing modified and added", &repo);
11356 tree.flush_fs_events(cx).await;
11357 project
11358 .update(cx, |project, cx| project.git_scans_complete(cx))
11359 .await;
11360 cx.executor().run_until_parked();
11361
11362 // The worktree detects that the files' git status have changed.
11363 repository.read_with(cx, |repository, _cx| {
11364 assert_eq!(
11365 repository
11366 .status_for_path(&repo_path(F_TXT))
11367 .unwrap()
11368 .status,
11369 FileStatus::Untracked,
11370 );
11371 assert_eq!(repository.status_for_path(&repo_path(B_TXT)), None);
11372 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
11373 });
11374
11375 // Modify files in the working copy and perform git operations on other files.
11376 git_reset(0, &repo);
11377 git_remove_index(Path::new(B_TXT), &repo);
11378 git_stash(&mut repo);
11379 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
11380 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
11381 tree.flush_fs_events(cx).await;
11382 project
11383 .update(cx, |project, cx| project.git_scans_complete(cx))
11384 .await;
11385 cx.executor().run_until_parked();
11386
11387 // Check that more complex repo changes are tracked
11388 repository.read_with(cx, |repository, _cx| {
11389 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
11390 assert_eq!(
11391 repository
11392 .status_for_path(&repo_path(B_TXT))
11393 .unwrap()
11394 .status,
11395 FileStatus::Untracked,
11396 );
11397 assert_eq!(
11398 repository
11399 .status_for_path(&repo_path(E_TXT))
11400 .unwrap()
11401 .status,
11402 StatusCode::Modified.worktree(),
11403 );
11404 });
11405
11406 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
11407 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
11408 std::fs::write(
11409 work_dir.join(DOTGITIGNORE),
11410 [IGNORE_RULE, "f.txt"].join("\n"),
11411 )
11412 .unwrap();
11413
11414 git_add(Path::new(DOTGITIGNORE), &repo);
11415 git_commit("Committing modified git ignore", &repo);
11416
11417 tree.flush_fs_events(cx).await;
11418 cx.executor().run_until_parked();
11419
11420 let mut renamed_dir_name = "first_directory/second_directory";
11421 const RENAMED_FILE: &str = "rf.txt";
11422
11423 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
11424 std::fs::write(
11425 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
11426 "new-contents",
11427 )
11428 .unwrap();
11429
11430 tree.flush_fs_events(cx).await;
11431 project
11432 .update(cx, |project, cx| project.git_scans_complete(cx))
11433 .await;
11434 cx.executor().run_until_parked();
11435
11436 repository.read_with(cx, |repository, _cx| {
11437 assert_eq!(
11438 repository
11439 .status_for_path(&RepoPath::from_rel_path(
11440 &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE))
11441 ))
11442 .unwrap()
11443 .status,
11444 FileStatus::Untracked,
11445 );
11446 });
11447
11448 renamed_dir_name = "new_first_directory/second_directory";
11449
11450 std::fs::rename(
11451 work_dir.join("first_directory"),
11452 work_dir.join("new_first_directory"),
11453 )
11454 .unwrap();
11455
11456 tree.flush_fs_events(cx).await;
11457 project
11458 .update(cx, |project, cx| project.git_scans_complete(cx))
11459 .await;
11460 cx.executor().run_until_parked();
11461
11462 repository.read_with(cx, |repository, _cx| {
11463 assert_eq!(
11464 repository
11465 .status_for_path(&RepoPath::from_rel_path(
11466 &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE))
11467 ))
11468 .unwrap()
11469 .status,
11470 FileStatus::Untracked,
11471 );
11472 });
11473}
11474
11475#[gpui::test]
11476#[ignore]
11477async fn test_ignored_dirs_events(cx: &mut gpui::TestAppContext) {
11478 init_test(cx);
11479 cx.executor().allow_parking();
11480
11481 const IGNORE_RULE: &str = "**/target";
11482
11483 let root = TempTree::new(json!({
11484 "project": {
11485 "src": {
11486 "main.rs": "fn main() {}"
11487 },
11488 "target": {
11489 "debug": {
11490 "important_text.txt": "important text",
11491 },
11492 },
11493 ".gitignore": IGNORE_RULE
11494 },
11495
11496 }));
11497 let root_path = root.path();
11498
11499 // Set up git repository before creating the worktree.
11500 let work_dir = root.path().join("project");
11501 let repo = git_init(work_dir.as_path());
11502 repo.add_ignore_rule(IGNORE_RULE).unwrap();
11503 git_add("src/main.rs", &repo);
11504 git_add(".gitignore", &repo);
11505 git_commit("Initial commit", &repo);
11506
11507 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
11508 let repository_updates = Arc::new(Mutex::new(Vec::new()));
11509 let project_events = Arc::new(Mutex::new(Vec::new()));
11510 project.update(cx, |project, cx| {
11511 let repo_events = repository_updates.clone();
11512 cx.subscribe(project.git_store(), move |_, _, e, _| {
11513 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
11514 repo_events.lock().push(e.clone());
11515 }
11516 })
11517 .detach();
11518 let project_events = project_events.clone();
11519 cx.subscribe_self(move |_, e, _| {
11520 if let Event::WorktreeUpdatedEntries(_, updates) = e {
11521 project_events.lock().extend(
11522 updates
11523 .iter()
11524 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
11525 .filter(|(path, _)| path != "fs-event-sentinel"),
11526 );
11527 }
11528 })
11529 .detach();
11530 });
11531
11532 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
11533 tree.flush_fs_events(cx).await;
11534 tree.update(cx, |tree, cx| {
11535 tree.load_file(rel_path("project/target/debug/important_text.txt"), cx)
11536 })
11537 .await
11538 .unwrap();
11539 tree.update(cx, |tree, _| {
11540 assert_eq!(
11541 tree.entries(true, 0)
11542 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
11543 .collect::<Vec<_>>(),
11544 vec![
11545 (rel_path(""), false),
11546 (rel_path("project/"), false),
11547 (rel_path("project/.gitignore"), false),
11548 (rel_path("project/src"), false),
11549 (rel_path("project/src/main.rs"), false),
11550 (rel_path("project/target"), true),
11551 (rel_path("project/target/debug"), true),
11552 (rel_path("project/target/debug/important_text.txt"), true),
11553 ]
11554 );
11555 });
11556
11557 assert_eq!(
11558 repository_updates.lock().drain(..).collect::<Vec<_>>(),
11559 vec![RepositoryEvent::StatusesChanged,],
11560 "Initial worktree scan should produce a repo update event"
11561 );
11562 assert_eq!(
11563 project_events.lock().drain(..).collect::<Vec<_>>(),
11564 vec![
11565 ("project/target".to_string(), PathChange::Loaded),
11566 ("project/target/debug".to_string(), PathChange::Loaded),
11567 (
11568 "project/target/debug/important_text.txt".to_string(),
11569 PathChange::Loaded
11570 ),
11571 ],
11572 "Initial project changes should show that all not-ignored and all opened files are loaded"
11573 );
11574
11575 let deps_dir = work_dir.join("target").join("debug").join("deps");
11576 std::fs::create_dir_all(&deps_dir).unwrap();
11577 tree.flush_fs_events(cx).await;
11578 project
11579 .update(cx, |project, cx| project.git_scans_complete(cx))
11580 .await;
11581 cx.executor().run_until_parked();
11582 std::fs::write(deps_dir.join("aa.tmp"), "something tmp").unwrap();
11583 tree.flush_fs_events(cx).await;
11584 project
11585 .update(cx, |project, cx| project.git_scans_complete(cx))
11586 .await;
11587 cx.executor().run_until_parked();
11588 std::fs::remove_dir_all(&deps_dir).unwrap();
11589 tree.flush_fs_events(cx).await;
11590 project
11591 .update(cx, |project, cx| project.git_scans_complete(cx))
11592 .await;
11593 cx.executor().run_until_parked();
11594
11595 tree.update(cx, |tree, _| {
11596 assert_eq!(
11597 tree.entries(true, 0)
11598 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
11599 .collect::<Vec<_>>(),
11600 vec![
11601 (rel_path(""), false),
11602 (rel_path("project/"), false),
11603 (rel_path("project/.gitignore"), false),
11604 (rel_path("project/src"), false),
11605 (rel_path("project/src/main.rs"), false),
11606 (rel_path("project/target"), true),
11607 (rel_path("project/target/debug"), true),
11608 (rel_path("project/target/debug/important_text.txt"), true),
11609 ],
11610 "No stray temp files should be left after the flycheck changes"
11611 );
11612 });
11613
11614 assert_eq!(
11615 repository_updates
11616 .lock()
11617 .iter()
11618 .cloned()
11619 .collect::<Vec<_>>(),
11620 Vec::new(),
11621 "No further RepositoryUpdated events should happen, as only ignored dirs' contents was changed",
11622 );
11623 assert_eq!(
11624 project_events.lock().as_slice(),
11625 vec![
11626 ("project/target/debug/deps".to_string(), PathChange::Added),
11627 ("project/target/debug/deps".to_string(), PathChange::Removed),
11628 ],
11629 "Due to `debug` directory being tracked, it should get updates for entries inside it.
11630 No updates for more nested directories should happen as those are ignored",
11631 );
11632}
11633
11634// todo(jk): turning this test off until we rework it in such a way so that it is not so susceptible
11635// to different timings/ordering of events.
11636#[ignore]
11637#[gpui::test]
11638async fn test_odd_events_for_ignored_dirs(
11639 executor: BackgroundExecutor,
11640 cx: &mut gpui::TestAppContext,
11641) {
11642 init_test(cx);
11643 let fs = FakeFs::new(executor);
11644 fs.insert_tree(
11645 path!("/root"),
11646 json!({
11647 ".git": {},
11648 ".gitignore": "**/target/",
11649 "src": {
11650 "main.rs": "fn main() {}",
11651 },
11652 "target": {
11653 "debug": {
11654 "foo.txt": "foo",
11655 "deps": {}
11656 }
11657 }
11658 }),
11659 )
11660 .await;
11661 fs.set_head_and_index_for_repo(
11662 path!("/root/.git").as_ref(),
11663 &[
11664 (".gitignore", "**/target/".into()),
11665 ("src/main.rs", "fn main() {}".into()),
11666 ],
11667 );
11668
11669 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
11670 let repository_updates = Arc::new(Mutex::new(Vec::new()));
11671 let project_events = Arc::new(Mutex::new(Vec::new()));
11672 project.update(cx, |project, cx| {
11673 let repository_updates = repository_updates.clone();
11674 cx.subscribe(project.git_store(), move |_, _, e, _| {
11675 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
11676 repository_updates.lock().push(e.clone());
11677 }
11678 })
11679 .detach();
11680 let project_events = project_events.clone();
11681 cx.subscribe_self(move |_, e, _| {
11682 if let Event::WorktreeUpdatedEntries(_, updates) = e {
11683 project_events.lock().extend(
11684 updates
11685 .iter()
11686 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
11687 .filter(|(path, _)| path != "fs-event-sentinel"),
11688 );
11689 }
11690 })
11691 .detach();
11692 });
11693
11694 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
11695 tree.update(cx, |tree, cx| {
11696 tree.load_file(rel_path("target/debug/foo.txt"), cx)
11697 })
11698 .await
11699 .unwrap();
11700 tree.flush_fs_events(cx).await;
11701 project
11702 .update(cx, |project, cx| project.git_scans_complete(cx))
11703 .await;
11704 cx.run_until_parked();
11705 tree.update(cx, |tree, _| {
11706 assert_eq!(
11707 tree.entries(true, 0)
11708 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
11709 .collect::<Vec<_>>(),
11710 vec![
11711 (rel_path(""), false),
11712 (rel_path(".gitignore"), false),
11713 (rel_path("src"), false),
11714 (rel_path("src/main.rs"), false),
11715 (rel_path("target"), true),
11716 (rel_path("target/debug"), true),
11717 (rel_path("target/debug/deps"), true),
11718 (rel_path("target/debug/foo.txt"), true),
11719 ]
11720 );
11721 });
11722
11723 assert_eq!(
11724 repository_updates.lock().drain(..).collect::<Vec<_>>(),
11725 vec![
11726 RepositoryEvent::HeadChanged,
11727 RepositoryEvent::StatusesChanged,
11728 RepositoryEvent::StatusesChanged,
11729 ],
11730 "Initial worktree scan should produce a repo update event"
11731 );
11732 assert_eq!(
11733 project_events.lock().drain(..).collect::<Vec<_>>(),
11734 vec![
11735 ("target".to_string(), PathChange::Loaded),
11736 ("target/debug".to_string(), PathChange::Loaded),
11737 ("target/debug/deps".to_string(), PathChange::Loaded),
11738 ("target/debug/foo.txt".to_string(), PathChange::Loaded),
11739 ],
11740 "All non-ignored entries and all opened firs should be getting a project event",
11741 );
11742
11743 // Emulate a flycheck spawn: it emits a `INODE_META_MOD`-flagged FS event on target/debug/deps, then creates and removes temp files inside.
11744 // This may happen multiple times during a single flycheck, but once is enough for testing.
11745 fs.emit_fs_event("/root/target/debug/deps", None);
11746 tree.flush_fs_events(cx).await;
11747 project
11748 .update(cx, |project, cx| project.git_scans_complete(cx))
11749 .await;
11750 cx.executor().run_until_parked();
11751
11752 assert_eq!(
11753 repository_updates
11754 .lock()
11755 .iter()
11756 .cloned()
11757 .collect::<Vec<_>>(),
11758 Vec::new(),
11759 "No further RepositoryUpdated events should happen, as only ignored dirs received FS events",
11760 );
11761 assert_eq!(
11762 project_events.lock().as_slice(),
11763 Vec::new(),
11764 "No further project events should happen, as only ignored dirs received FS events",
11765 );
11766}
11767
11768#[gpui::test]
11769async fn test_repos_in_invisible_worktrees(
11770 executor: BackgroundExecutor,
11771 cx: &mut gpui::TestAppContext,
11772) {
11773 init_test(cx);
11774 let fs = FakeFs::new(executor);
11775 fs.insert_tree(
11776 path!("/root"),
11777 json!({
11778 "dir1": {
11779 ".git": {},
11780 "dep1": {
11781 ".git": {},
11782 "src": {
11783 "a.txt": "",
11784 },
11785 },
11786 "b.txt": "",
11787 },
11788 }),
11789 )
11790 .await;
11791
11792 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
11793 let _visible_worktree =
11794 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
11795 project
11796 .update(cx, |project, cx| project.git_scans_complete(cx))
11797 .await;
11798
11799 let repos = project.read_with(cx, |project, cx| {
11800 project
11801 .repositories(cx)
11802 .values()
11803 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
11804 .collect::<Vec<_>>()
11805 });
11806 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
11807
11808 let (_invisible_worktree, _) = project
11809 .update(cx, |project, cx| {
11810 project.worktree_store().update(cx, |worktree_store, cx| {
11811 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
11812 })
11813 })
11814 .await
11815 .expect("failed to create worktree");
11816 project
11817 .update(cx, |project, cx| project.git_scans_complete(cx))
11818 .await;
11819
11820 let repos = project.read_with(cx, |project, cx| {
11821 project
11822 .repositories(cx)
11823 .values()
11824 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
11825 .collect::<Vec<_>>()
11826 });
11827 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
11828}
11829
11830#[gpui::test(iterations = 10)]
11831async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
11832 init_test(cx);
11833 cx.update(|cx| {
11834 cx.update_global::<SettingsStore, _>(|store, cx| {
11835 store.update_user_settings(cx, |settings| {
11836 settings.project.worktree.file_scan_exclusions = Some(Vec::new());
11837 });
11838 });
11839 });
11840 let fs = FakeFs::new(cx.background_executor.clone());
11841 fs.insert_tree(
11842 path!("/root"),
11843 json!({
11844 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
11845 "tree": {
11846 ".git": {},
11847 ".gitignore": "ignored-dir\n",
11848 "tracked-dir": {
11849 "tracked-file1": "",
11850 "ancestor-ignored-file1": "",
11851 },
11852 "ignored-dir": {
11853 "ignored-file1": ""
11854 }
11855 }
11856 }),
11857 )
11858 .await;
11859 fs.set_head_and_index_for_repo(
11860 path!("/root/tree/.git").as_ref(),
11861 &[
11862 (".gitignore", "ignored-dir\n".into()),
11863 ("tracked-dir/tracked-file1", "".into()),
11864 ],
11865 );
11866
11867 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
11868
11869 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
11870 tree.flush_fs_events(cx).await;
11871 project
11872 .update(cx, |project, cx| project.git_scans_complete(cx))
11873 .await;
11874 cx.executor().run_until_parked();
11875
11876 let repository = project.read_with(cx, |project, cx| {
11877 project.repositories(cx).values().next().unwrap().clone()
11878 });
11879
11880 tree.read_with(cx, |tree, _| {
11881 tree.as_local()
11882 .unwrap()
11883 .manually_refresh_entries_for_paths(vec![rel_path("ignored-dir").into()])
11884 })
11885 .recv()
11886 .await;
11887
11888 cx.read(|cx| {
11889 assert_entry_git_state(
11890 tree.read(cx),
11891 repository.read(cx),
11892 "tracked-dir/tracked-file1",
11893 None,
11894 false,
11895 );
11896 assert_entry_git_state(
11897 tree.read(cx),
11898 repository.read(cx),
11899 "tracked-dir/ancestor-ignored-file1",
11900 None,
11901 false,
11902 );
11903 assert_entry_git_state(
11904 tree.read(cx),
11905 repository.read(cx),
11906 "ignored-dir/ignored-file1",
11907 None,
11908 true,
11909 );
11910 });
11911
11912 fs.create_file(
11913 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
11914 Default::default(),
11915 )
11916 .await
11917 .unwrap();
11918 fs.set_index_for_repo(
11919 path!("/root/tree/.git").as_ref(),
11920 &[
11921 (".gitignore", "ignored-dir\n".into()),
11922 ("tracked-dir/tracked-file1", "".into()),
11923 ("tracked-dir/tracked-file2", "".into()),
11924 ],
11925 );
11926 fs.create_file(
11927 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
11928 Default::default(),
11929 )
11930 .await
11931 .unwrap();
11932 fs.create_file(
11933 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
11934 Default::default(),
11935 )
11936 .await
11937 .unwrap();
11938
11939 cx.executor().run_until_parked();
11940 cx.read(|cx| {
11941 assert_entry_git_state(
11942 tree.read(cx),
11943 repository.read(cx),
11944 "tracked-dir/tracked-file2",
11945 Some(StatusCode::Added),
11946 false,
11947 );
11948 assert_entry_git_state(
11949 tree.read(cx),
11950 repository.read(cx),
11951 "tracked-dir/ancestor-ignored-file2",
11952 None,
11953 false,
11954 );
11955 assert_entry_git_state(
11956 tree.read(cx),
11957 repository.read(cx),
11958 "ignored-dir/ignored-file2",
11959 None,
11960 true,
11961 );
11962 assert!(
11963 tree.read(cx)
11964 .entry_for_path(&rel_path(".git"))
11965 .unwrap()
11966 .is_ignored
11967 );
11968 });
11969}
11970
11971#[gpui::test]
11972async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) {
11973 init_test(cx);
11974
11975 let fs = FakeFs::new(cx.executor());
11976 fs.insert_tree(
11977 path!("/project"),
11978 json!({
11979 ".git": {
11980 "worktrees": {
11981 "some-worktree": {
11982 "commondir": "../..\n",
11983 // For is_git_dir
11984 "HEAD": "",
11985 "config": ""
11986 }
11987 },
11988 "modules": {
11989 "subdir": {
11990 "some-submodule": {
11991 // For is_git_dir
11992 "HEAD": "",
11993 "config": "",
11994 }
11995 }
11996 }
11997 },
11998 "src": {
11999 "a.txt": "A",
12000 },
12001 "some-worktree": {
12002 ".git": "gitdir: ../.git/worktrees/some-worktree\n",
12003 "src": {
12004 "b.txt": "B",
12005 }
12006 },
12007 "subdir": {
12008 "some-submodule": {
12009 ".git": "gitdir: ../../.git/modules/subdir/some-submodule\n",
12010 "c.txt": "C",
12011 }
12012 }
12013 }),
12014 )
12015 .await;
12016
12017 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
12018 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
12019 scan_complete.await;
12020
12021 let mut repositories = project.update(cx, |project, cx| {
12022 project
12023 .repositories(cx)
12024 .values()
12025 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
12026 .collect::<Vec<_>>()
12027 });
12028 repositories.sort();
12029 pretty_assertions::assert_eq!(
12030 repositories,
12031 [
12032 Path::new(path!("/project")).into(),
12033 Path::new(path!("/project/some-worktree")).into(),
12034 Path::new(path!("/project/subdir/some-submodule")).into(),
12035 ]
12036 );
12037
12038 // Generate a git-related event for the worktree and check that it's refreshed.
12039 fs.with_git_state(
12040 path!("/project/some-worktree/.git").as_ref(),
12041 true,
12042 |state| {
12043 state
12044 .head_contents
12045 .insert(repo_path("src/b.txt"), "b".to_owned());
12046 state
12047 .index_contents
12048 .insert(repo_path("src/b.txt"), "b".to_owned());
12049 },
12050 )
12051 .unwrap();
12052 cx.run_until_parked();
12053
12054 let buffer = project
12055 .update(cx, |project, cx| {
12056 project.open_local_buffer(path!("/project/some-worktree/src/b.txt"), cx)
12057 })
12058 .await
12059 .unwrap();
12060 let (worktree_repo, barrier) = project.update(cx, |project, cx| {
12061 let (repo, _) = project
12062 .git_store()
12063 .read(cx)
12064 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
12065 .unwrap();
12066 pretty_assertions::assert_eq!(
12067 repo.read(cx).work_directory_abs_path,
12068 Path::new(path!("/project/some-worktree")).into(),
12069 );
12070 pretty_assertions::assert_eq!(
12071 repo.read(cx).original_repo_abs_path,
12072 Path::new(path!("/project")).into(),
12073 );
12074 assert!(
12075 repo.read(cx).linked_worktree_path().is_some(),
12076 "linked worktree should be detected as a linked worktree"
12077 );
12078 let barrier = repo.update(cx, |repo, _| repo.barrier());
12079 (repo.clone(), barrier)
12080 });
12081 barrier.await.unwrap();
12082 worktree_repo.update(cx, |repo, _| {
12083 pretty_assertions::assert_eq!(
12084 repo.status_for_path(&repo_path("src/b.txt"))
12085 .unwrap()
12086 .status,
12087 StatusCode::Modified.worktree(),
12088 );
12089 });
12090
12091 // The same for the submodule.
12092 fs.with_git_state(
12093 path!("/project/subdir/some-submodule/.git").as_ref(),
12094 true,
12095 |state| {
12096 state
12097 .head_contents
12098 .insert(repo_path("c.txt"), "c".to_owned());
12099 state
12100 .index_contents
12101 .insert(repo_path("c.txt"), "c".to_owned());
12102 },
12103 )
12104 .unwrap();
12105 cx.run_until_parked();
12106
12107 let buffer = project
12108 .update(cx, |project, cx| {
12109 project.open_local_buffer(path!("/project/subdir/some-submodule/c.txt"), cx)
12110 })
12111 .await
12112 .unwrap();
12113 let (submodule_repo, barrier) = project.update(cx, |project, cx| {
12114 let (repo, _) = project
12115 .git_store()
12116 .read(cx)
12117 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
12118 .unwrap();
12119 pretty_assertions::assert_eq!(
12120 repo.read(cx).work_directory_abs_path,
12121 Path::new(path!("/project/subdir/some-submodule")).into(),
12122 );
12123 pretty_assertions::assert_eq!(
12124 repo.read(cx).original_repo_abs_path,
12125 Path::new(path!("/project/subdir/some-submodule")).into(),
12126 );
12127 assert!(
12128 repo.read(cx).linked_worktree_path().is_none(),
12129 "submodule should not be detected as a linked worktree"
12130 );
12131 let barrier = repo.update(cx, |repo, _| repo.barrier());
12132 (repo.clone(), barrier)
12133 });
12134 barrier.await.unwrap();
12135 submodule_repo.update(cx, |repo, _| {
12136 pretty_assertions::assert_eq!(
12137 repo.status_for_path(&repo_path("c.txt")).unwrap().status,
12138 StatusCode::Modified.worktree(),
12139 );
12140 });
12141}
12142
12143#[gpui::test]
12144async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
12145 init_test(cx);
12146 let fs = FakeFs::new(cx.background_executor.clone());
12147 fs.insert_tree(
12148 path!("/root"),
12149 json!({
12150 "project": {
12151 ".git": {},
12152 "child1": {
12153 "a.txt": "A",
12154 },
12155 "child2": {
12156 "b.txt": "B",
12157 }
12158 }
12159 }),
12160 )
12161 .await;
12162
12163 let project = Project::test(
12164 fs.clone(),
12165 [
12166 path!("/root/project/child1").as_ref(),
12167 path!("/root/project/child2").as_ref(),
12168 ],
12169 cx,
12170 )
12171 .await;
12172
12173 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
12174 tree.flush_fs_events(cx).await;
12175 project
12176 .update(cx, |project, cx| project.git_scans_complete(cx))
12177 .await;
12178 cx.executor().run_until_parked();
12179
12180 let repos = project.read_with(cx, |project, cx| {
12181 project
12182 .repositories(cx)
12183 .values()
12184 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
12185 .collect::<Vec<_>>()
12186 });
12187 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
12188}
12189
12190#[gpui::test]
12191async fn test_buffer_changed_file_path_updates_git_diff(cx: &mut gpui::TestAppContext) {
12192 init_test(cx);
12193
12194 let file_1_committed = String::from(r#"file_1_committed"#);
12195 let file_1_staged = String::from(r#"file_1_staged"#);
12196 let file_2_committed = String::from(r#"file_2_committed"#);
12197 let file_2_staged = String::from(r#"file_2_staged"#);
12198 let buffer_contents = String::from(r#"buffer"#);
12199
12200 let fs = FakeFs::new(cx.background_executor.clone());
12201 fs.insert_tree(
12202 path!("/dir"),
12203 json!({
12204 ".git": {},
12205 "src": {
12206 "file_1.rs": file_1_committed.clone(),
12207 "file_2.rs": file_2_committed.clone(),
12208 }
12209 }),
12210 )
12211 .await;
12212
12213 fs.set_head_for_repo(
12214 path!("/dir/.git").as_ref(),
12215 &[
12216 ("src/file_1.rs", file_1_committed.clone()),
12217 ("src/file_2.rs", file_2_committed.clone()),
12218 ],
12219 "deadbeef",
12220 );
12221 fs.set_index_for_repo(
12222 path!("/dir/.git").as_ref(),
12223 &[
12224 ("src/file_1.rs", file_1_staged.clone()),
12225 ("src/file_2.rs", file_2_staged.clone()),
12226 ],
12227 );
12228
12229 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
12230
12231 let buffer = project
12232 .update(cx, |project, cx| {
12233 project.open_local_buffer(path!("/dir/src/file_1.rs"), cx)
12234 })
12235 .await
12236 .unwrap();
12237
12238 buffer.update(cx, |buffer, cx| {
12239 buffer.edit([(0..buffer.len(), buffer_contents.as_str())], None, cx);
12240 });
12241
12242 let unstaged_diff = project
12243 .update(cx, |project, cx| {
12244 project.open_unstaged_diff(buffer.clone(), cx)
12245 })
12246 .await
12247 .unwrap();
12248
12249 cx.run_until_parked();
12250
12251 unstaged_diff.update(cx, |unstaged_diff, cx| {
12252 let base_text = unstaged_diff.base_text_string(cx).unwrap();
12253 assert_eq!(base_text, file_1_staged, "Should start with file_1 staged");
12254 });
12255
12256 // Save the buffer as `file_2.rs`, which should trigger the
12257 // `BufferChangedFilePath` event.
12258 project
12259 .update(cx, |project, cx| {
12260 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
12261 let path = ProjectPath {
12262 worktree_id,
12263 path: rel_path("src/file_2.rs").into(),
12264 };
12265 project.save_buffer_as(buffer.clone(), path, cx)
12266 })
12267 .await
12268 .unwrap();
12269
12270 cx.run_until_parked();
12271
12272 // Verify that the diff bases have been updated to file_2's contents due to
12273 // the `BufferChangedFilePath` event being handled.
12274 unstaged_diff.update(cx, |unstaged_diff, cx| {
12275 let snapshot = buffer.read(cx).snapshot();
12276 let base_text = unstaged_diff.base_text_string(cx).unwrap();
12277 assert_eq!(
12278 base_text, file_2_staged,
12279 "Diff bases should be automatically updated to file_2 staged content"
12280 );
12281
12282 let hunks: Vec<_> = unstaged_diff.snapshot(cx).hunks(&snapshot).collect();
12283 assert!(!hunks.is_empty(), "Should have diff hunks for file_2");
12284 });
12285
12286 let uncommitted_diff = project
12287 .update(cx, |project, cx| {
12288 project.open_uncommitted_diff(buffer.clone(), cx)
12289 })
12290 .await
12291 .unwrap();
12292
12293 cx.run_until_parked();
12294
12295 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
12296 let base_text = uncommitted_diff.base_text_string(cx).unwrap();
12297 assert_eq!(
12298 base_text, file_2_committed,
12299 "Uncommitted diff should compare against file_2 committed content"
12300 );
12301 });
12302}
12303
12304async fn search(
12305 project: &Entity<Project>,
12306 query: SearchQuery,
12307 cx: &mut gpui::TestAppContext,
12308) -> Result<HashMap<String, Vec<Range<usize>>>> {
12309 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
12310 let mut results = HashMap::default();
12311 while let Ok(search_result) = search_rx.rx.recv().await {
12312 match search_result {
12313 SearchResult::Buffer { buffer, ranges } => {
12314 results.entry(buffer).or_insert(ranges);
12315 }
12316 SearchResult::LimitReached | SearchResult::WaitingForScan => {}
12317 }
12318 }
12319 Ok(results
12320 .into_iter()
12321 .map(|(buffer, ranges)| {
12322 buffer.update(cx, |buffer, cx| {
12323 let path = buffer
12324 .file()
12325 .unwrap()
12326 .full_path(cx)
12327 .to_string_lossy()
12328 .to_string();
12329 let ranges = ranges
12330 .into_iter()
12331 .map(|range| range.to_offset(buffer))
12332 .collect::<Vec<_>>();
12333 (path, ranges)
12334 })
12335 })
12336 .collect())
12337}
12338
12339#[gpui::test]
12340async fn test_undo_encoding_change(cx: &mut gpui::TestAppContext) {
12341 init_test(cx);
12342
12343 let fs = FakeFs::new(cx.executor());
12344
12345 // Create a file with ASCII content "Hi" - this will be detected as UTF-8
12346 // When reinterpreted as UTF-16LE, the bytes 0x48 0x69 become a single character
12347 let ascii_bytes: Vec<u8> = vec![0x48, 0x69];
12348 fs.insert_tree(path!("/dir"), json!({})).await;
12349 fs.insert_file(path!("/dir/test.txt"), ascii_bytes).await;
12350
12351 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
12352
12353 let buffer = project
12354 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/test.txt"), cx))
12355 .await
12356 .unwrap();
12357
12358 let (initial_encoding, initial_text, initial_dirty) = buffer.read_with(cx, |buffer, _| {
12359 (buffer.encoding(), buffer.text(), buffer.is_dirty())
12360 });
12361 assert_eq!(initial_encoding, encoding_rs::UTF_8);
12362 assert_eq!(initial_text, "Hi");
12363 assert!(!initial_dirty);
12364
12365 let reload_receiver = buffer.update(cx, |buffer, cx| {
12366 buffer.reload_with_encoding(encoding_rs::UTF_16LE, cx)
12367 });
12368 cx.executor().run_until_parked();
12369
12370 // Wait for reload to complete
12371 let _ = reload_receiver.await;
12372
12373 // Verify the encoding changed, text is different, and still not dirty (we reloaded from disk)
12374 let (reloaded_encoding, reloaded_text, reloaded_dirty) = buffer.read_with(cx, |buffer, _| {
12375 (buffer.encoding(), buffer.text(), buffer.is_dirty())
12376 });
12377 assert_eq!(reloaded_encoding, encoding_rs::UTF_16LE);
12378 assert_eq!(reloaded_text, "楈");
12379 assert!(!reloaded_dirty);
12380
12381 // Undo the reload
12382 buffer.update(cx, |buffer, cx| {
12383 buffer.undo(cx);
12384 });
12385
12386 buffer.read_with(cx, |buffer, _| {
12387 assert_eq!(buffer.encoding(), encoding_rs::UTF_8);
12388 assert_eq!(buffer.text(), "Hi");
12389 assert!(!buffer.is_dirty());
12390 });
12391
12392 buffer.update(cx, |buffer, cx| {
12393 buffer.redo(cx);
12394 });
12395
12396 buffer.read_with(cx, |buffer, _| {
12397 assert_eq!(buffer.encoding(), encoding_rs::UTF_16LE);
12398 assert_ne!(buffer.text(), "Hi");
12399 assert!(!buffer.is_dirty());
12400 });
12401}
12402
12403#[gpui::test]
12404async fn test_initial_scan_complete(cx: &mut gpui::TestAppContext) {
12405 init_test(cx);
12406
12407 let fs = FakeFs::new(cx.executor());
12408 fs.insert_tree(
12409 path!("/root"),
12410 json!({
12411 "a": {
12412 ".git": {},
12413 ".zed": {
12414 "tasks.json": r#"[{"label": "task-a", "command": "echo a"}]"#
12415 },
12416 "src": { "main.rs": "" }
12417 },
12418 "b": {
12419 ".git": {},
12420 ".zed": {
12421 "tasks.json": r#"[{"label": "task-b", "command": "echo b"}]"#
12422 },
12423 "src": { "lib.rs": "" }
12424 },
12425 }),
12426 )
12427 .await;
12428
12429 let repos_created = Rc::new(RefCell::new(Vec::new()));
12430 let _observe = {
12431 let repos_created = repos_created.clone();
12432 cx.update(|cx| {
12433 cx.observe_new::<Repository>(move |repo, _, cx| {
12434 repos_created.borrow_mut().push(cx.entity().downgrade());
12435 let _ = repo;
12436 })
12437 })
12438 };
12439
12440 let project = Project::test(
12441 fs.clone(),
12442 [path!("/root/a").as_ref(), path!("/root/b").as_ref()],
12443 cx,
12444 )
12445 .await;
12446
12447 let scan_complete = project.read_with(cx, |project, cx| project.wait_for_initial_scan(cx));
12448 scan_complete.await;
12449
12450 project.read_with(cx, |project, cx| {
12451 assert!(
12452 project.worktree_store().read(cx).initial_scan_completed(),
12453 "Expected initial scan to be completed after awaiting wait_for_initial_scan"
12454 );
12455 });
12456
12457 let created_repos_len = repos_created.borrow().len();
12458 assert_eq!(
12459 created_repos_len, 2,
12460 "Expected 2 repositories to be created during scan, got {}",
12461 created_repos_len
12462 );
12463
12464 project.read_with(cx, |project, cx| {
12465 let git_store = project.git_store().read(cx);
12466 assert_eq!(
12467 git_store.repositories().len(),
12468 2,
12469 "Expected 2 repositories in GitStore"
12470 );
12471 });
12472}
12473
12474pub fn init_test(cx: &mut gpui::TestAppContext) {
12475 zlog::init_test();
12476
12477 cx.update(|cx| {
12478 let settings_store = SettingsStore::test(cx);
12479 cx.set_global(settings_store);
12480 release_channel::init(semver::Version::new(0, 0, 0), cx);
12481 });
12482}
12483
12484fn json_lang() -> Arc<Language> {
12485 Arc::new(Language::new(
12486 LanguageConfig {
12487 name: "JSON".into(),
12488 matcher: LanguageMatcher {
12489 path_suffixes: vec!["json".to_string()],
12490 ..Default::default()
12491 },
12492 ..Default::default()
12493 },
12494 None,
12495 ))
12496}
12497
12498fn js_lang() -> Arc<Language> {
12499 Arc::new(Language::new(
12500 LanguageConfig {
12501 name: "JavaScript".into(),
12502 matcher: LanguageMatcher {
12503 path_suffixes: vec!["js".to_string()],
12504 ..Default::default()
12505 },
12506 ..Default::default()
12507 },
12508 None,
12509 ))
12510}
12511
12512fn python_lang(fs: Arc<FakeFs>) -> Arc<Language> {
12513 struct PythonMootToolchainLister(Arc<FakeFs>);
12514 #[async_trait]
12515 impl ToolchainLister for PythonMootToolchainLister {
12516 async fn list(
12517 &self,
12518 worktree_root: PathBuf,
12519 subroot_relative_path: Arc<RelPath>,
12520 _: Option<HashMap<String, String>>,
12521 ) -> ToolchainList {
12522 // This lister will always return a path .venv directories within ancestors
12523 let ancestors = subroot_relative_path.ancestors().collect::<Vec<_>>();
12524 let mut toolchains = vec![];
12525 for ancestor in ancestors {
12526 let venv_path = worktree_root.join(ancestor.as_std_path()).join(".venv");
12527 if self.0.is_dir(&venv_path).await {
12528 toolchains.push(Toolchain {
12529 name: SharedString::new_static("Python Venv"),
12530 path: venv_path.to_string_lossy().into_owned().into(),
12531 language_name: LanguageName(SharedString::new_static("Python")),
12532 as_json: serde_json::Value::Null,
12533 })
12534 }
12535 }
12536 ToolchainList {
12537 toolchains,
12538 ..Default::default()
12539 }
12540 }
12541 async fn resolve(
12542 &self,
12543 _: PathBuf,
12544 _: Option<HashMap<String, String>>,
12545 ) -> anyhow::Result<Toolchain> {
12546 Err(anyhow::anyhow!("Not implemented"))
12547 }
12548 fn meta(&self) -> ToolchainMetadata {
12549 ToolchainMetadata {
12550 term: SharedString::new_static("Virtual Environment"),
12551 new_toolchain_placeholder: SharedString::new_static(
12552 "A path to the python3 executable within a virtual environment, or path to virtual environment itself",
12553 ),
12554 manifest_name: ManifestName::from(SharedString::new_static("pyproject.toml")),
12555 }
12556 }
12557 fn activation_script(
12558 &self,
12559 _: &Toolchain,
12560 _: ShellKind,
12561 _: &gpui::App,
12562 ) -> futures::future::BoxFuture<'static, Vec<String>> {
12563 Box::pin(async { vec![] })
12564 }
12565 }
12566 Arc::new(
12567 Language::new(
12568 LanguageConfig {
12569 name: "Python".into(),
12570 matcher: LanguageMatcher {
12571 path_suffixes: vec!["py".to_string()],
12572 ..Default::default()
12573 },
12574 ..Default::default()
12575 },
12576 None, // We're not testing Python parsing with this language.
12577 )
12578 .with_manifest(Some(ManifestName::from(SharedString::new_static(
12579 "pyproject.toml",
12580 ))))
12581 .with_toolchain_lister(Some(Arc::new(PythonMootToolchainLister(fs)))),
12582 )
12583}
12584
12585fn typescript_lang() -> Arc<Language> {
12586 Arc::new(Language::new(
12587 LanguageConfig {
12588 name: "TypeScript".into(),
12589 matcher: LanguageMatcher {
12590 path_suffixes: vec!["ts".to_string()],
12591 ..Default::default()
12592 },
12593 ..Default::default()
12594 },
12595 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
12596 ))
12597}
12598
12599fn tsx_lang() -> Arc<Language> {
12600 Arc::new(Language::new(
12601 LanguageConfig {
12602 name: "tsx".into(),
12603 matcher: LanguageMatcher {
12604 path_suffixes: vec!["tsx".to_string()],
12605 ..Default::default()
12606 },
12607 ..Default::default()
12608 },
12609 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
12610 ))
12611}
12612
12613fn get_all_tasks(
12614 project: &Entity<Project>,
12615 task_contexts: Arc<TaskContexts>,
12616 cx: &mut App,
12617) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
12618 let new_tasks = project.update(cx, |project, cx| {
12619 project.task_store().update(cx, |task_store, cx| {
12620 task_store.task_inventory().unwrap().update(cx, |this, cx| {
12621 this.used_and_current_resolved_tasks(task_contexts, cx)
12622 })
12623 })
12624 });
12625
12626 cx.background_spawn(async move {
12627 let (mut old, new) = new_tasks.await;
12628 old.extend(new);
12629 old
12630 })
12631}
12632
12633#[track_caller]
12634fn assert_entry_git_state(
12635 tree: &Worktree,
12636 repository: &Repository,
12637 path: &str,
12638 index_status: Option<StatusCode>,
12639 is_ignored: bool,
12640) {
12641 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
12642 let entry = tree
12643 .entry_for_path(&rel_path(path))
12644 .unwrap_or_else(|| panic!("entry {path} not found"));
12645 let status = repository
12646 .status_for_path(&repo_path(path))
12647 .map(|entry| entry.status);
12648 let expected = index_status.map(|index_status| {
12649 TrackedStatus {
12650 index_status,
12651 worktree_status: StatusCode::Unmodified,
12652 }
12653 .into()
12654 });
12655 assert_eq!(
12656 status, expected,
12657 "expected {path} to have git status: {expected:?}"
12658 );
12659 assert_eq!(
12660 entry.is_ignored, is_ignored,
12661 "expected {path} to have is_ignored: {is_ignored}"
12662 );
12663}
12664
12665#[track_caller]
12666fn git_init(path: &Path) -> git2::Repository {
12667 let mut init_opts = RepositoryInitOptions::new();
12668 init_opts.initial_head("main");
12669 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
12670}
12671
12672#[track_caller]
12673fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
12674 let path = path.as_ref();
12675 let mut index = repo.index().expect("Failed to get index");
12676 index.add_path(path).expect("Failed to add file");
12677 index.write().expect("Failed to write index");
12678}
12679
12680#[track_caller]
12681fn git_remove_index(path: &Path, repo: &git2::Repository) {
12682 let mut index = repo.index().expect("Failed to get index");
12683 index.remove_path(path).expect("Failed to add file");
12684 index.write().expect("Failed to write index");
12685}
12686
12687#[track_caller]
12688fn git_commit(msg: &'static str, repo: &git2::Repository) {
12689 use git2::Signature;
12690
12691 let signature = Signature::now("test", "test@zed.dev").unwrap();
12692 let oid = repo.index().unwrap().write_tree().unwrap();
12693 let tree = repo.find_tree(oid).unwrap();
12694 if let Ok(head) = repo.head() {
12695 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
12696
12697 let parent_commit = parent_obj.as_commit().unwrap();
12698
12699 repo.commit(
12700 Some("HEAD"),
12701 &signature,
12702 &signature,
12703 msg,
12704 &tree,
12705 &[parent_commit],
12706 )
12707 .expect("Failed to commit with parent");
12708 } else {
12709 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
12710 .expect("Failed to commit");
12711 }
12712}
12713
12714#[cfg(any())]
12715#[track_caller]
12716fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
12717 repo.cherrypick(commit, None).expect("Failed to cherrypick");
12718}
12719
12720#[track_caller]
12721fn git_stash(repo: &mut git2::Repository) {
12722 use git2::Signature;
12723
12724 let signature = Signature::now("test", "test@zed.dev").unwrap();
12725 repo.stash_save(&signature, "N/A", None)
12726 .expect("Failed to stash");
12727}
12728
12729#[track_caller]
12730fn git_reset(offset: usize, repo: &git2::Repository) {
12731 let head = repo.head().expect("Couldn't get repo head");
12732 let object = head.peel(git2::ObjectType::Commit).unwrap();
12733 let commit = object.as_commit().unwrap();
12734 let new_head = commit
12735 .parents()
12736 .inspect(|parnet| {
12737 parnet.message();
12738 })
12739 .nth(offset)
12740 .expect("Not enough history");
12741 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
12742 .expect("Could not reset");
12743}
12744
12745#[cfg(any())]
12746#[track_caller]
12747fn git_branch(name: &str, repo: &git2::Repository) {
12748 let head = repo
12749 .head()
12750 .expect("Couldn't get repo head")
12751 .peel_to_commit()
12752 .expect("HEAD is not a commit");
12753 repo.branch(name, &head, false).expect("Failed to commit");
12754}
12755
12756#[cfg(any())]
12757#[track_caller]
12758fn git_checkout(name: &str, repo: &git2::Repository) {
12759 repo.set_head(name).expect("Failed to set head");
12760 repo.checkout_head(None).expect("Failed to check out head");
12761}
12762
12763#[cfg(any())]
12764#[track_caller]
12765fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
12766 repo.statuses(None)
12767 .unwrap()
12768 .iter()
12769 .map(|status| (status.path().unwrap().to_string(), status.status()))
12770 .collect()
12771}
12772
12773#[gpui::test]
12774async fn test_find_project_path_abs(
12775 background_executor: BackgroundExecutor,
12776 cx: &mut gpui::TestAppContext,
12777) {
12778 // find_project_path should work with absolute paths
12779 init_test(cx);
12780
12781 let fs = FakeFs::new(background_executor);
12782 fs.insert_tree(
12783 path!("/root"),
12784 json!({
12785 "project1": {
12786 "file1.txt": "content1",
12787 "subdir": {
12788 "file2.txt": "content2"
12789 }
12790 },
12791 "project2": {
12792 "file3.txt": "content3"
12793 }
12794 }),
12795 )
12796 .await;
12797
12798 let project = Project::test(
12799 fs.clone(),
12800 [
12801 path!("/root/project1").as_ref(),
12802 path!("/root/project2").as_ref(),
12803 ],
12804 cx,
12805 )
12806 .await;
12807
12808 // Make sure the worktrees are fully initialized
12809 project
12810 .update(cx, |project, cx| project.git_scans_complete(cx))
12811 .await;
12812 cx.run_until_parked();
12813
12814 let (project1_abs_path, project1_id, project2_abs_path, project2_id) =
12815 project.read_with(cx, |project, cx| {
12816 let worktrees: Vec<_> = project.worktrees(cx).collect();
12817 let abs_path1 = worktrees[0].read(cx).abs_path().to_path_buf();
12818 let id1 = worktrees[0].read(cx).id();
12819 let abs_path2 = worktrees[1].read(cx).abs_path().to_path_buf();
12820 let id2 = worktrees[1].read(cx).id();
12821 (abs_path1, id1, abs_path2, id2)
12822 });
12823
12824 project.update(cx, |project, cx| {
12825 let abs_path = project1_abs_path.join("file1.txt");
12826 let found_path = project.find_project_path(abs_path, cx).unwrap();
12827 assert_eq!(found_path.worktree_id, project1_id);
12828 assert_eq!(&*found_path.path, rel_path("file1.txt"));
12829
12830 let abs_path = project1_abs_path.join("subdir").join("file2.txt");
12831 let found_path = project.find_project_path(abs_path, cx).unwrap();
12832 assert_eq!(found_path.worktree_id, project1_id);
12833 assert_eq!(&*found_path.path, rel_path("subdir/file2.txt"));
12834
12835 let abs_path = project2_abs_path.join("file3.txt");
12836 let found_path = project.find_project_path(abs_path, cx).unwrap();
12837 assert_eq!(found_path.worktree_id, project2_id);
12838 assert_eq!(&*found_path.path, rel_path("file3.txt"));
12839
12840 let abs_path = project1_abs_path.join("nonexistent.txt");
12841 let found_path = project.find_project_path(abs_path, cx);
12842 assert!(
12843 found_path.is_some(),
12844 "Should find project path for nonexistent file in worktree"
12845 );
12846
12847 // Test with an absolute path outside any worktree
12848 let abs_path = Path::new("/some/other/path");
12849 let found_path = project.find_project_path(abs_path, cx);
12850 assert!(
12851 found_path.is_none(),
12852 "Should not find project path for path outside any worktree"
12853 );
12854 });
12855}
12856
12857#[gpui::test]
12858async fn test_git_worktree_remove(cx: &mut gpui::TestAppContext) {
12859 init_test(cx);
12860
12861 let fs = FakeFs::new(cx.executor());
12862 fs.insert_tree(
12863 path!("/root"),
12864 json!({
12865 "a": {
12866 ".git": {},
12867 "src": {
12868 "main.rs": "fn main() {}",
12869 }
12870 },
12871 "b": {
12872 ".git": {},
12873 "src": {
12874 "main.rs": "fn main() {}",
12875 },
12876 "script": {
12877 "run.sh": "#!/bin/bash"
12878 }
12879 }
12880 }),
12881 )
12882 .await;
12883
12884 let project = Project::test(
12885 fs.clone(),
12886 [
12887 path!("/root/a").as_ref(),
12888 path!("/root/b/script").as_ref(),
12889 path!("/root/b").as_ref(),
12890 ],
12891 cx,
12892 )
12893 .await;
12894 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
12895 scan_complete.await;
12896
12897 let worktrees = project.update(cx, |project, cx| project.worktrees(cx).collect::<Vec<_>>());
12898 assert_eq!(worktrees.len(), 3);
12899
12900 let worktree_id_by_abs_path = worktrees
12901 .into_iter()
12902 .map(|worktree| worktree.read_with(cx, |w, _| (w.abs_path(), w.id())))
12903 .collect::<HashMap<_, _>>();
12904 let worktree_id = worktree_id_by_abs_path
12905 .get(Path::new(path!("/root/b/script")))
12906 .unwrap();
12907
12908 let repos = project.update(cx, |p, cx| p.git_store().read(cx).repositories().clone());
12909 assert_eq!(repos.len(), 2);
12910
12911 project.update(cx, |project, cx| {
12912 project.remove_worktree(*worktree_id, cx);
12913 });
12914 cx.run_until_parked();
12915
12916 let mut repo_paths = project
12917 .update(cx, |p, cx| p.git_store().read(cx).repositories().clone())
12918 .values()
12919 .map(|repo| repo.read_with(cx, |r, _| r.work_directory_abs_path.clone()))
12920 .collect::<Vec<_>>();
12921 repo_paths.sort();
12922
12923 pretty_assertions::assert_eq!(
12924 repo_paths,
12925 [
12926 Path::new(path!("/root/a")).into(),
12927 Path::new(path!("/root/b")).into(),
12928 ]
12929 );
12930
12931 let active_repo_path = project
12932 .read_with(cx, |p, cx| {
12933 p.active_repository(cx)
12934 .map(|r| r.read(cx).work_directory_abs_path.clone())
12935 })
12936 .unwrap();
12937 assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/a")));
12938
12939 let worktree_id = worktree_id_by_abs_path
12940 .get(Path::new(path!("/root/a")))
12941 .unwrap();
12942 project.update(cx, |project, cx| {
12943 project.remove_worktree(*worktree_id, cx);
12944 });
12945 cx.run_until_parked();
12946
12947 let active_repo_path = project
12948 .read_with(cx, |p, cx| {
12949 p.active_repository(cx)
12950 .map(|r| r.read(cx).work_directory_abs_path.clone())
12951 })
12952 .unwrap();
12953 assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/b")));
12954
12955 let worktree_id = worktree_id_by_abs_path
12956 .get(Path::new(path!("/root/b")))
12957 .unwrap();
12958 project.update(cx, |project, cx| {
12959 project.remove_worktree(*worktree_id, cx);
12960 });
12961 cx.run_until_parked();
12962
12963 let active_repo_path = project.read_with(cx, |p, cx| {
12964 p.active_repository(cx)
12965 .map(|r| r.read(cx).work_directory_abs_path.clone())
12966 });
12967 assert!(active_repo_path.is_none());
12968}
12969
12970#[gpui::test]
12971async fn test_optimistic_hunks_in_staged_files(cx: &mut gpui::TestAppContext) {
12972 use DiffHunkSecondaryStatus::*;
12973 init_test(cx);
12974
12975 let committed_contents = r#"
12976 one
12977 two
12978 three
12979 "#
12980 .unindent();
12981 let file_contents = r#"
12982 one
12983 TWO
12984 three
12985 "#
12986 .unindent();
12987
12988 let fs = FakeFs::new(cx.background_executor.clone());
12989 fs.insert_tree(
12990 path!("/dir"),
12991 json!({
12992 ".git": {},
12993 "file.txt": file_contents.clone()
12994 }),
12995 )
12996 .await;
12997
12998 fs.set_head_and_index_for_repo(
12999 path!("/dir/.git").as_ref(),
13000 &[("file.txt", committed_contents.clone())],
13001 );
13002
13003 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
13004
13005 let buffer = project
13006 .update(cx, |project, cx| {
13007 project.open_local_buffer(path!("/dir/file.txt"), cx)
13008 })
13009 .await
13010 .unwrap();
13011 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
13012 let uncommitted_diff = project
13013 .update(cx, |project, cx| {
13014 project.open_uncommitted_diff(buffer.clone(), cx)
13015 })
13016 .await
13017 .unwrap();
13018
13019 // The hunk is initially unstaged.
13020 uncommitted_diff.read_with(cx, |diff, cx| {
13021 assert_hunks(
13022 diff.snapshot(cx).hunks(&snapshot),
13023 &snapshot,
13024 &diff.base_text_string(cx).unwrap(),
13025 &[(
13026 1..2,
13027 "two\n",
13028 "TWO\n",
13029 DiffHunkStatus::modified(HasSecondaryHunk),
13030 )],
13031 );
13032 });
13033
13034 // Get the repository handle.
13035 let repo = project.read_with(cx, |project, cx| {
13036 project.repositories(cx).values().next().unwrap().clone()
13037 });
13038
13039 // Stage the file.
13040 let stage_task = repo.update(cx, |repo, cx| {
13041 repo.stage_entries(vec![repo_path("file.txt")], cx)
13042 });
13043
13044 // Run a few ticks to let the job start and mark hunks as pending,
13045 // but don't run_until_parked which would complete the entire operation.
13046 for _ in 0..10 {
13047 cx.executor().tick();
13048 let [hunk]: [_; 1] = uncommitted_diff
13049 .read_with(cx, |diff, cx| {
13050 diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>()
13051 })
13052 .try_into()
13053 .unwrap();
13054 match hunk.secondary_status {
13055 HasSecondaryHunk => {}
13056 SecondaryHunkRemovalPending => break,
13057 NoSecondaryHunk => panic!("hunk was not optimistically staged"),
13058 _ => panic!("unexpected hunk state"),
13059 }
13060 }
13061 uncommitted_diff.read_with(cx, |diff, cx| {
13062 assert_hunks(
13063 diff.snapshot(cx).hunks(&snapshot),
13064 &snapshot,
13065 &diff.base_text_string(cx).unwrap(),
13066 &[(
13067 1..2,
13068 "two\n",
13069 "TWO\n",
13070 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
13071 )],
13072 );
13073 });
13074
13075 // Let the staging complete.
13076 stage_task.await.unwrap();
13077 cx.run_until_parked();
13078
13079 // The hunk is now fully staged.
13080 uncommitted_diff.read_with(cx, |diff, cx| {
13081 assert_hunks(
13082 diff.snapshot(cx).hunks(&snapshot),
13083 &snapshot,
13084 &diff.base_text_string(cx).unwrap(),
13085 &[(
13086 1..2,
13087 "two\n",
13088 "TWO\n",
13089 DiffHunkStatus::modified(NoSecondaryHunk),
13090 )],
13091 );
13092 });
13093
13094 // Simulate a commit by updating HEAD to match the current file contents.
13095 // The FakeGitRepository's commit method is a no-op, so we need to manually
13096 // update HEAD to simulate the commit completing.
13097 fs.set_head_for_repo(
13098 path!("/dir/.git").as_ref(),
13099 &[("file.txt", file_contents.clone())],
13100 "newhead",
13101 );
13102 cx.run_until_parked();
13103
13104 // After committing, there are no more hunks.
13105 uncommitted_diff.read_with(cx, |diff, cx| {
13106 assert_hunks(
13107 diff.snapshot(cx).hunks(&snapshot),
13108 &snapshot,
13109 &diff.base_text_string(cx).unwrap(),
13110 &[] as &[(Range<u32>, &str, &str, DiffHunkStatus)],
13111 );
13112 });
13113}
13114
13115#[gpui::test]
13116async fn test_read_only_files_setting(cx: &mut gpui::TestAppContext) {
13117 init_test(cx);
13118
13119 // Configure read_only_files setting
13120 cx.update(|cx| {
13121 cx.update_global::<SettingsStore, _>(|store, cx| {
13122 store.update_user_settings(cx, |settings| {
13123 settings.project.worktree.read_only_files = Some(vec![
13124 "**/generated/**".to_string(),
13125 "**/*.gen.rs".to_string(),
13126 ]);
13127 });
13128 });
13129 });
13130
13131 let fs = FakeFs::new(cx.background_executor.clone());
13132 fs.insert_tree(
13133 path!("/root"),
13134 json!({
13135 "src": {
13136 "main.rs": "fn main() {}",
13137 "types.gen.rs": "// Generated file",
13138 },
13139 "generated": {
13140 "schema.rs": "// Auto-generated schema",
13141 }
13142 }),
13143 )
13144 .await;
13145
13146 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
13147
13148 // Open a regular file - should be read-write
13149 let regular_buffer = project
13150 .update(cx, |project, cx| {
13151 project.open_local_buffer(path!("/root/src/main.rs"), cx)
13152 })
13153 .await
13154 .unwrap();
13155
13156 regular_buffer.read_with(cx, |buffer, _| {
13157 assert!(!buffer.read_only(), "Regular file should not be read-only");
13158 });
13159
13160 // Open a file matching *.gen.rs pattern - should be read-only
13161 let gen_buffer = project
13162 .update(cx, |project, cx| {
13163 project.open_local_buffer(path!("/root/src/types.gen.rs"), cx)
13164 })
13165 .await
13166 .unwrap();
13167
13168 gen_buffer.read_with(cx, |buffer, _| {
13169 assert!(
13170 buffer.read_only(),
13171 "File matching *.gen.rs pattern should be read-only"
13172 );
13173 });
13174
13175 // Open a file in generated directory - should be read-only
13176 let generated_buffer = project
13177 .update(cx, |project, cx| {
13178 project.open_local_buffer(path!("/root/generated/schema.rs"), cx)
13179 })
13180 .await
13181 .unwrap();
13182
13183 generated_buffer.read_with(cx, |buffer, _| {
13184 assert!(
13185 buffer.read_only(),
13186 "File in generated directory should be read-only"
13187 );
13188 });
13189}
13190
13191#[gpui::test]
13192async fn test_read_only_files_empty_setting(cx: &mut gpui::TestAppContext) {
13193 init_test(cx);
13194
13195 // Explicitly set read_only_files to empty (default behavior)
13196 cx.update(|cx| {
13197 cx.update_global::<SettingsStore, _>(|store, cx| {
13198 store.update_user_settings(cx, |settings| {
13199 settings.project.worktree.read_only_files = Some(vec![]);
13200 });
13201 });
13202 });
13203
13204 let fs = FakeFs::new(cx.background_executor.clone());
13205 fs.insert_tree(
13206 path!("/root"),
13207 json!({
13208 "src": {
13209 "main.rs": "fn main() {}",
13210 },
13211 "generated": {
13212 "schema.rs": "// Auto-generated schema",
13213 }
13214 }),
13215 )
13216 .await;
13217
13218 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
13219
13220 // All files should be read-write when read_only_files is empty
13221 let main_buffer = project
13222 .update(cx, |project, cx| {
13223 project.open_local_buffer(path!("/root/src/main.rs"), cx)
13224 })
13225 .await
13226 .unwrap();
13227
13228 main_buffer.read_with(cx, |buffer, _| {
13229 assert!(
13230 !buffer.read_only(),
13231 "Files should not be read-only when read_only_files is empty"
13232 );
13233 });
13234
13235 let generated_buffer = project
13236 .update(cx, |project, cx| {
13237 project.open_local_buffer(path!("/root/generated/schema.rs"), cx)
13238 })
13239 .await
13240 .unwrap();
13241
13242 generated_buffer.read_with(cx, |buffer, _| {
13243 assert!(
13244 !buffer.read_only(),
13245 "Generated files should not be read-only when read_only_files is empty"
13246 );
13247 });
13248}
13249
13250#[gpui::test]
13251async fn test_read_only_files_with_lock_files(cx: &mut gpui::TestAppContext) {
13252 init_test(cx);
13253
13254 // Configure to make lock files read-only
13255 cx.update(|cx| {
13256 cx.update_global::<SettingsStore, _>(|store, cx| {
13257 store.update_user_settings(cx, |settings| {
13258 settings.project.worktree.read_only_files = Some(vec![
13259 "**/*.lock".to_string(),
13260 "**/package-lock.json".to_string(),
13261 ]);
13262 });
13263 });
13264 });
13265
13266 let fs = FakeFs::new(cx.background_executor.clone());
13267 fs.insert_tree(
13268 path!("/root"),
13269 json!({
13270 "Cargo.lock": "# Lock file",
13271 "Cargo.toml": "[package]",
13272 "package-lock.json": "{}",
13273 "package.json": "{}",
13274 }),
13275 )
13276 .await;
13277
13278 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
13279
13280 // Cargo.lock should be read-only
13281 let cargo_lock = project
13282 .update(cx, |project, cx| {
13283 project.open_local_buffer(path!("/root/Cargo.lock"), cx)
13284 })
13285 .await
13286 .unwrap();
13287
13288 cargo_lock.read_with(cx, |buffer, _| {
13289 assert!(buffer.read_only(), "Cargo.lock should be read-only");
13290 });
13291
13292 // Cargo.toml should be read-write
13293 let cargo_toml = project
13294 .update(cx, |project, cx| {
13295 project.open_local_buffer(path!("/root/Cargo.toml"), cx)
13296 })
13297 .await
13298 .unwrap();
13299
13300 cargo_toml.read_with(cx, |buffer, _| {
13301 assert!(!buffer.read_only(), "Cargo.toml should not be read-only");
13302 });
13303
13304 // package-lock.json should be read-only
13305 let package_lock = project
13306 .update(cx, |project, cx| {
13307 project.open_local_buffer(path!("/root/package-lock.json"), cx)
13308 })
13309 .await
13310 .unwrap();
13311
13312 package_lock.read_with(cx, |buffer, _| {
13313 assert!(buffer.read_only(), "package-lock.json should be read-only");
13314 });
13315
13316 // package.json should be read-write
13317 let package_json = project
13318 .update(cx, |project, cx| {
13319 project.open_local_buffer(path!("/root/package.json"), cx)
13320 })
13321 .await
13322 .unwrap();
13323
13324 package_json.read_with(cx, |buffer, _| {
13325 assert!(!buffer.read_only(), "package.json should not be read-only");
13326 });
13327}
13328
13329mod disable_ai_settings_tests {
13330 use gpui::TestAppContext;
13331 use project::*;
13332 use settings::{Settings, SettingsStore};
13333
13334 #[gpui::test]
13335 async fn test_disable_ai_settings_security(cx: &mut TestAppContext) {
13336 cx.update(|cx| {
13337 settings::init(cx);
13338
13339 // Test 1: Default is false (AI enabled)
13340 assert!(
13341 !DisableAiSettings::get_global(cx).disable_ai,
13342 "Default should allow AI"
13343 );
13344 });
13345
13346 let disable_true = serde_json::json!({
13347 "disable_ai": true
13348 })
13349 .to_string();
13350 let disable_false = serde_json::json!({
13351 "disable_ai": false
13352 })
13353 .to_string();
13354
13355 cx.update_global::<SettingsStore, _>(|store, cx| {
13356 store.set_user_settings(&disable_false, cx).unwrap();
13357 store.set_global_settings(&disable_true, cx).unwrap();
13358 });
13359 cx.update(|cx| {
13360 assert!(
13361 DisableAiSettings::get_global(cx).disable_ai,
13362 "Local false cannot override global true"
13363 );
13364 });
13365
13366 cx.update_global::<SettingsStore, _>(|store, cx| {
13367 store.set_global_settings(&disable_false, cx).unwrap();
13368 store.set_user_settings(&disable_true, cx).unwrap();
13369 });
13370
13371 cx.update(|cx| {
13372 assert!(
13373 DisableAiSettings::get_global(cx).disable_ai,
13374 "Local false cannot override global true"
13375 );
13376 });
13377 }
13378
13379 #[gpui::test]
13380 async fn test_disable_ai_project_level_settings(cx: &mut TestAppContext) {
13381 use settings::{LocalSettingsKind, LocalSettingsPath, SettingsLocation, SettingsStore};
13382 use worktree::WorktreeId;
13383
13384 cx.update(|cx| {
13385 settings::init(cx);
13386
13387 // Default should allow AI
13388 assert!(
13389 !DisableAiSettings::get_global(cx).disable_ai,
13390 "Default should allow AI"
13391 );
13392 });
13393
13394 let worktree_id = WorktreeId::from_usize(1);
13395 let rel_path = |path: &str| -> std::sync::Arc<util::rel_path::RelPath> {
13396 std::sync::Arc::from(util::rel_path::RelPath::unix(path).unwrap())
13397 };
13398 let project_path = rel_path("project");
13399 let settings_location = SettingsLocation {
13400 worktree_id,
13401 path: project_path.as_ref(),
13402 };
13403
13404 // Test: Project-level disable_ai=true should disable AI for files in that project
13405 cx.update_global::<SettingsStore, _>(|store, cx| {
13406 store
13407 .set_local_settings(
13408 worktree_id,
13409 LocalSettingsPath::InWorktree(project_path.clone()),
13410 LocalSettingsKind::Settings,
13411 Some(r#"{ "disable_ai": true }"#),
13412 cx,
13413 )
13414 .unwrap();
13415 });
13416
13417 cx.update(|cx| {
13418 let settings = DisableAiSettings::get(Some(settings_location), cx);
13419 assert!(
13420 settings.disable_ai,
13421 "Project-level disable_ai=true should disable AI for files in that project"
13422 );
13423 // Global should now also be true since project-level disable_ai is merged into global
13424 assert!(
13425 DisableAiSettings::get_global(cx).disable_ai,
13426 "Global setting should be affected by project-level disable_ai=true"
13427 );
13428 });
13429
13430 // Test: Setting project-level to false should allow AI for that project
13431 cx.update_global::<SettingsStore, _>(|store, cx| {
13432 store
13433 .set_local_settings(
13434 worktree_id,
13435 LocalSettingsPath::InWorktree(project_path.clone()),
13436 LocalSettingsKind::Settings,
13437 Some(r#"{ "disable_ai": false }"#),
13438 cx,
13439 )
13440 .unwrap();
13441 });
13442
13443 cx.update(|cx| {
13444 let settings = DisableAiSettings::get(Some(settings_location), cx);
13445 assert!(
13446 !settings.disable_ai,
13447 "Project-level disable_ai=false should allow AI"
13448 );
13449 // Global should also be false now
13450 assert!(
13451 !DisableAiSettings::get_global(cx).disable_ai,
13452 "Global setting should be false when project-level is false"
13453 );
13454 });
13455
13456 // Test: User-level true + project-level false = AI disabled (saturation)
13457 let disable_true = serde_json::json!({ "disable_ai": true }).to_string();
13458 cx.update_global::<SettingsStore, _>(|store, cx| {
13459 store.set_user_settings(&disable_true, cx).unwrap();
13460 store
13461 .set_local_settings(
13462 worktree_id,
13463 LocalSettingsPath::InWorktree(project_path.clone()),
13464 LocalSettingsKind::Settings,
13465 Some(r#"{ "disable_ai": false }"#),
13466 cx,
13467 )
13468 .unwrap();
13469 });
13470
13471 cx.update(|cx| {
13472 let settings = DisableAiSettings::get(Some(settings_location), cx);
13473 assert!(
13474 settings.disable_ai,
13475 "Project-level false cannot override user-level true (SaturatingBool)"
13476 );
13477 });
13478 }
13479}