1#![allow(clippy::format_collect)]
2
3mod color_extractor;
4mod context_server_store;
5mod debugger;
6mod ext_agent_tests;
7mod extension_agent_tests;
8mod git_store;
9mod image_store;
10mod lsp_command;
11mod lsp_store;
12mod manifest_tree;
13mod project_search;
14mod search;
15mod search_history;
16mod signature_help;
17mod task_inventory;
18mod trusted_worktrees;
19mod yarn;
20
21use anyhow::Result;
22use async_trait::async_trait;
23use buffer_diff::{
24 BufferDiffEvent, DiffChanged, DiffHunkSecondaryStatus, DiffHunkStatus, DiffHunkStatusKind,
25 assert_hunks,
26};
27use collections::{BTreeSet, HashMap, HashSet};
28use encoding_rs;
29use fs::FakeFs;
30use futures::{StreamExt, future};
31use git::{
32 GitHostingProviderRegistry,
33 repository::{RepoPath, repo_path},
34 status::{FileStatus, StatusCode, TrackedStatus},
35};
36use git2::RepositoryInitOptions;
37use gpui::{
38 App, AppContext, BackgroundExecutor, BorrowAppContext, Entity, FutureExt, SharedString, Task,
39 UpdateGlobal,
40};
41use itertools::Itertools;
42use language::{
43 Buffer, BufferEvent, Diagnostic, DiagnosticEntry, DiagnosticEntryRef, DiagnosticSet,
44 DiagnosticSourceKind, DiskState, FakeLspAdapter, Language, LanguageConfig, LanguageMatcher,
45 LanguageName, LineEnding, ManifestName, ManifestProvider, ManifestQuery, OffsetRangeExt, Point,
46 ToPoint, Toolchain, ToolchainList, ToolchainLister, ToolchainMetadata,
47 language_settings::{LanguageSettingsContent, language_settings},
48 markdown_lang, rust_lang, tree_sitter_typescript,
49};
50use lsp::{
51 CodeActionKind, DiagnosticSeverity, DocumentChanges, FileOperationFilter, LanguageServerId,
52 LanguageServerName, NumberOrString, TextDocumentEdit, Uri, WillRenameFiles,
53 notification::DidRenameFiles,
54};
55use parking_lot::Mutex;
56use paths::{config_dir, global_gitignore_path, tasks_file};
57use postage::stream::Stream as _;
58use pretty_assertions::{assert_eq, assert_matches};
59use project::{
60 Event, TaskContexts,
61 git_store::{GitStoreEvent, Repository, RepositoryEvent, StatusEntry, pending_op},
62 search::{SearchQuery, SearchResult},
63 task_store::{TaskSettingsLocation, TaskStore},
64 *,
65};
66use rand::{Rng as _, rngs::StdRng};
67use serde_json::json;
68use settings::SettingsStore;
69#[cfg(not(windows))]
70use std::os;
71use std::{
72 cell::RefCell,
73 env, mem,
74 num::NonZeroU32,
75 ops::Range,
76 path::{Path, PathBuf},
77 rc::Rc,
78 str::FromStr,
79 sync::{Arc, OnceLock},
80 task::Poll,
81 time::Duration,
82};
83use sum_tree::SumTree;
84use task::{ResolvedTask, ShellKind, TaskContext};
85use text::{Anchor, PointUtf16, ReplicaId, ToOffset, Unclipped};
86use unindent::Unindent as _;
87use util::{
88 TryFutureExt as _, assert_set_eq, maybe, path,
89 paths::{PathMatcher, PathStyle},
90 rel_path::{RelPath, rel_path},
91 test::{TempTree, marked_text_offsets},
92 uri,
93};
94use worktree::WorktreeModelHandle as _;
95
96#[gpui::test]
97async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
98 cx.executor().allow_parking();
99
100 let (tx, mut rx) = futures::channel::mpsc::unbounded();
101 let _thread = std::thread::spawn(move || {
102 #[cfg(not(target_os = "windows"))]
103 std::fs::metadata("/tmp").unwrap();
104 #[cfg(target_os = "windows")]
105 std::fs::metadata("C:/Windows").unwrap();
106 std::thread::sleep(Duration::from_millis(1000));
107 tx.unbounded_send(1).unwrap();
108 });
109 rx.next().await.unwrap();
110}
111
112#[gpui::test]
113async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
114 cx.executor().allow_parking();
115
116 let io_task = smol::unblock(move || {
117 println!("sleeping on thread {:?}", std::thread::current().id());
118 std::thread::sleep(Duration::from_millis(10));
119 1
120 });
121
122 let task = cx.foreground_executor().spawn(async move {
123 io_task.await;
124 });
125
126 task.await;
127}
128
129// NOTE:
130// While POSIX symbolic links are somewhat supported on Windows, they are an opt in by the user, and thus
131// we assume that they are not supported out of the box.
132#[cfg(not(windows))]
133#[gpui::test]
134async fn test_symlinks(cx: &mut gpui::TestAppContext) {
135 init_test(cx);
136 cx.executor().allow_parking();
137
138 let dir = TempTree::new(json!({
139 "root": {
140 "apple": "",
141 "banana": {
142 "carrot": {
143 "date": "",
144 "endive": "",
145 }
146 },
147 "fennel": {
148 "grape": "",
149 }
150 }
151 }));
152
153 let root_link_path = dir.path().join("root_link");
154 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
155 os::unix::fs::symlink(
156 dir.path().join("root/fennel"),
157 dir.path().join("root/finnochio"),
158 )
159 .unwrap();
160
161 let project = Project::test(
162 Arc::new(RealFs::new(None, cx.executor())),
163 [root_link_path.as_ref()],
164 cx,
165 )
166 .await;
167
168 project.update(cx, |project, cx| {
169 let tree = project.worktrees(cx).next().unwrap().read(cx);
170 assert_eq!(tree.file_count(), 5);
171 assert_eq!(
172 tree.entry_for_path(rel_path("fennel/grape")).unwrap().inode,
173 tree.entry_for_path(rel_path("finnochio/grape"))
174 .unwrap()
175 .inode
176 );
177 });
178}
179
180#[gpui::test]
181async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
182 init_test(cx);
183
184 let dir = TempTree::new(json!({
185 ".editorconfig": r#"
186 root = true
187 [*.rs]
188 indent_style = tab
189 indent_size = 3
190 end_of_line = lf
191 insert_final_newline = true
192 trim_trailing_whitespace = true
193 max_line_length = 120
194 [*.js]
195 tab_width = 10
196 max_line_length = off
197 "#,
198 ".zed": {
199 "settings.json": r#"{
200 "tab_size": 8,
201 "hard_tabs": false,
202 "ensure_final_newline_on_save": false,
203 "remove_trailing_whitespace_on_save": false,
204 "preferred_line_length": 64,
205 "soft_wrap": "editor_width",
206 }"#,
207 },
208 "a.rs": "fn a() {\n A\n}",
209 "b": {
210 ".editorconfig": r#"
211 [*.rs]
212 indent_size = 2
213 max_line_length = off,
214 "#,
215 "b.rs": "fn b() {\n B\n}",
216 },
217 "c.js": "def c\n C\nend",
218 "d": {
219 ".editorconfig": r#"
220 [*.rs]
221 indent_size = 1
222 "#,
223 "d.rs": "fn d() {\n D\n}",
224 },
225 "README.json": "tabs are better\n",
226 }));
227
228 let path = dir.path();
229 let fs = FakeFs::new(cx.executor());
230 fs.insert_tree_from_real_fs(path, path).await;
231 let project = Project::test(fs, [path], cx).await;
232
233 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
234 language_registry.add(js_lang());
235 language_registry.add(json_lang());
236 language_registry.add(rust_lang());
237
238 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
239
240 cx.executor().run_until_parked();
241
242 cx.update(|cx| {
243 let tree = worktree.read(cx);
244 let settings_for = |path: &str| {
245 let file_entry = tree.entry_for_path(rel_path(path)).unwrap().clone();
246 let file = File::for_entry(file_entry, worktree.clone());
247 let file_language = project
248 .read(cx)
249 .languages()
250 .load_language_for_file_path(file.path.as_std_path());
251 let file_language = cx
252 .foreground_executor()
253 .block_on(file_language)
254 .expect("Failed to get file language");
255 let file = file as _;
256 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
257 };
258
259 let settings_a = settings_for("a.rs");
260 let settings_b = settings_for("b/b.rs");
261 let settings_c = settings_for("c.js");
262 let settings_d = settings_for("d/d.rs");
263 let settings_readme = settings_for("README.json");
264
265 // .editorconfig overrides .zed/settings
266 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
267 assert_eq!(settings_a.hard_tabs, true);
268 assert_eq!(settings_a.ensure_final_newline_on_save, true);
269 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
270 assert_eq!(settings_a.preferred_line_length, 120);
271
272 // .editorconfig in subdirectory overrides .editorconfig in root
273 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
274 assert_eq!(Some(settings_d.tab_size), NonZeroU32::new(1));
275
276 // "indent_size" is not set, so "tab_width" is used
277 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
278
279 // When max_line_length is "off", default to .zed/settings.json
280 assert_eq!(settings_b.preferred_line_length, 64);
281 assert_eq!(settings_c.preferred_line_length, 64);
282
283 // README.md should not be affected by .editorconfig's globe "*.rs"
284 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
285 });
286}
287
288#[gpui::test]
289async fn test_external_editorconfig_support(cx: &mut gpui::TestAppContext) {
290 init_test(cx);
291
292 let fs = FakeFs::new(cx.executor());
293 fs.insert_tree(
294 path!("/grandparent"),
295 json!({
296 ".editorconfig": "[*]\nindent_size = 4\n",
297 "parent": {
298 ".editorconfig": "[*.rs]\nindent_size = 2\n",
299 "worktree": {
300 ".editorconfig": "[*.md]\nindent_size = 3\n",
301 "main.rs": "fn main() {}",
302 "README.md": "# README",
303 "other.txt": "other content",
304 }
305 }
306 }),
307 )
308 .await;
309
310 let project = Project::test(fs, [path!("/grandparent/parent/worktree").as_ref()], cx).await;
311
312 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
313 language_registry.add(rust_lang());
314 language_registry.add(markdown_lang());
315
316 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
317
318 cx.executor().run_until_parked();
319
320 cx.update(|cx| {
321 let tree = worktree.read(cx);
322 let settings_for = |path: &str| {
323 let file_entry = tree.entry_for_path(rel_path(path)).unwrap().clone();
324 let file = File::for_entry(file_entry, worktree.clone());
325 let file_language = project
326 .read(cx)
327 .languages()
328 .load_language_for_file_path(file.path.as_std_path());
329 let file_language = cx
330 .foreground_executor()
331 .block_on(file_language)
332 .expect("Failed to get file language");
333 let file = file as _;
334 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
335 };
336
337 let settings_rs = settings_for("main.rs");
338 let settings_md = settings_for("README.md");
339 let settings_txt = settings_for("other.txt");
340
341 // main.rs gets indent_size = 2 from parent's external .editorconfig
342 assert_eq!(Some(settings_rs.tab_size), NonZeroU32::new(2));
343
344 // README.md gets indent_size = 3 from internal worktree .editorconfig
345 assert_eq!(Some(settings_md.tab_size), NonZeroU32::new(3));
346
347 // other.txt gets indent_size = 4 from grandparent's external .editorconfig
348 assert_eq!(Some(settings_txt.tab_size), NonZeroU32::new(4));
349 });
350}
351
352#[gpui::test]
353async fn test_internal_editorconfig_root_stops_traversal(cx: &mut gpui::TestAppContext) {
354 init_test(cx);
355
356 let fs = FakeFs::new(cx.executor());
357 fs.insert_tree(
358 path!("/worktree"),
359 json!({
360 ".editorconfig": "[*]\nindent_size = 99\n",
361 "src": {
362 ".editorconfig": "root = true\n[*]\nindent_size = 2\n",
363 "file.rs": "fn main() {}",
364 }
365 }),
366 )
367 .await;
368
369 let project = Project::test(fs, [path!("/worktree").as_ref()], cx).await;
370
371 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
372 language_registry.add(rust_lang());
373
374 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
375
376 cx.executor().run_until_parked();
377
378 cx.update(|cx| {
379 let tree = worktree.read(cx);
380 let file_entry = tree
381 .entry_for_path(rel_path("src/file.rs"))
382 .unwrap()
383 .clone();
384 let file = File::for_entry(file_entry, worktree.clone());
385 let file_language = project
386 .read(cx)
387 .languages()
388 .load_language_for_file_path(file.path.as_std_path());
389 let file_language = cx
390 .foreground_executor()
391 .block_on(file_language)
392 .expect("Failed to get file language");
393 let file = file as _;
394 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
395
396 assert_eq!(Some(settings.tab_size), NonZeroU32::new(2));
397 });
398}
399
400#[gpui::test]
401async fn test_external_editorconfig_root_stops_traversal(cx: &mut gpui::TestAppContext) {
402 init_test(cx);
403
404 let fs = FakeFs::new(cx.executor());
405 fs.insert_tree(
406 path!("/parent"),
407 json!({
408 ".editorconfig": "[*]\nindent_size = 99\n",
409 "worktree": {
410 ".editorconfig": "root = true\n[*]\nindent_size = 2\n",
411 "file.rs": "fn main() {}",
412 }
413 }),
414 )
415 .await;
416
417 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
418
419 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
420 language_registry.add(rust_lang());
421
422 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
423
424 cx.executor().run_until_parked();
425
426 cx.update(|cx| {
427 let tree = worktree.read(cx);
428 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
429 let file = File::for_entry(file_entry, worktree.clone());
430 let file_language = project
431 .read(cx)
432 .languages()
433 .load_language_for_file_path(file.path.as_std_path());
434 let file_language = cx
435 .foreground_executor()
436 .block_on(file_language)
437 .expect("Failed to get file language");
438 let file = file as _;
439 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
440
441 // file.rs gets indent_size = 2 from worktree's root config, NOT 99 from parent
442 assert_eq!(Some(settings.tab_size), NonZeroU32::new(2));
443 });
444}
445
446#[gpui::test]
447async fn test_external_editorconfig_root_in_parent_stops_traversal(cx: &mut gpui::TestAppContext) {
448 init_test(cx);
449
450 let fs = FakeFs::new(cx.executor());
451 fs.insert_tree(
452 path!("/grandparent"),
453 json!({
454 ".editorconfig": "[*]\nindent_size = 99\n",
455 "parent": {
456 ".editorconfig": "root = true\n[*]\nindent_size = 4\n",
457 "worktree": {
458 "file.rs": "fn main() {}",
459 }
460 }
461 }),
462 )
463 .await;
464
465 let project = Project::test(fs, [path!("/grandparent/parent/worktree").as_ref()], cx).await;
466
467 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
468 language_registry.add(rust_lang());
469
470 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
471
472 cx.executor().run_until_parked();
473
474 cx.update(|cx| {
475 let tree = worktree.read(cx);
476 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
477 let file = File::for_entry(file_entry, worktree.clone());
478 let file_language = project
479 .read(cx)
480 .languages()
481 .load_language_for_file_path(file.path.as_std_path());
482 let file_language = cx
483 .foreground_executor()
484 .block_on(file_language)
485 .expect("Failed to get file language");
486 let file = file as _;
487 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
488
489 // file.rs gets indent_size = 4 from parent's root config, NOT 99 from grandparent
490 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
491 });
492}
493
494#[gpui::test]
495async fn test_external_editorconfig_shared_across_worktrees(cx: &mut gpui::TestAppContext) {
496 init_test(cx);
497
498 let fs = FakeFs::new(cx.executor());
499 fs.insert_tree(
500 path!("/parent"),
501 json!({
502 ".editorconfig": "root = true\n[*]\nindent_size = 5\n",
503 "worktree_a": {
504 "file.rs": "fn a() {}",
505 ".editorconfig": "[*]\ninsert_final_newline = true\n",
506 },
507 "worktree_b": {
508 "file.rs": "fn b() {}",
509 ".editorconfig": "[*]\ninsert_final_newline = false\n",
510 }
511 }),
512 )
513 .await;
514
515 let project = Project::test(
516 fs,
517 [
518 path!("/parent/worktree_a").as_ref(),
519 path!("/parent/worktree_b").as_ref(),
520 ],
521 cx,
522 )
523 .await;
524
525 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
526 language_registry.add(rust_lang());
527
528 cx.executor().run_until_parked();
529
530 cx.update(|cx| {
531 let worktrees: Vec<_> = project.read(cx).worktrees(cx).collect();
532 assert_eq!(worktrees.len(), 2);
533
534 for worktree in worktrees {
535 let tree = worktree.read(cx);
536 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
537 let file = File::for_entry(file_entry, worktree.clone());
538 let file_language = project
539 .read(cx)
540 .languages()
541 .load_language_for_file_path(file.path.as_std_path());
542 let file_language = cx
543 .foreground_executor()
544 .block_on(file_language)
545 .expect("Failed to get file language");
546 let file = file as _;
547 let settings =
548 language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
549
550 // Both worktrees should get indent_size = 5 from shared parent .editorconfig
551 assert_eq!(Some(settings.tab_size), NonZeroU32::new(5));
552 }
553 });
554}
555
556#[gpui::test]
557async fn test_external_editorconfig_not_loaded_without_internal_config(
558 cx: &mut gpui::TestAppContext,
559) {
560 init_test(cx);
561
562 let fs = FakeFs::new(cx.executor());
563 fs.insert_tree(
564 path!("/parent"),
565 json!({
566 ".editorconfig": "[*]\nindent_size = 99\n",
567 "worktree": {
568 "file.rs": "fn main() {}",
569 }
570 }),
571 )
572 .await;
573
574 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
575
576 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
577 language_registry.add(rust_lang());
578
579 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
580
581 cx.executor().run_until_parked();
582
583 cx.update(|cx| {
584 let tree = worktree.read(cx);
585 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
586 let file = File::for_entry(file_entry, worktree.clone());
587 let file_language = project
588 .read(cx)
589 .languages()
590 .load_language_for_file_path(file.path.as_std_path());
591 let file_language = cx
592 .foreground_executor()
593 .block_on(file_language)
594 .expect("Failed to get file language");
595 let file = file as _;
596 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
597
598 // file.rs should have default tab_size = 4, NOT 99 from parent's external .editorconfig
599 // because without an internal .editorconfig, external configs are not loaded
600 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
601 });
602}
603
604#[gpui::test]
605async fn test_external_editorconfig_modification_triggers_refresh(cx: &mut gpui::TestAppContext) {
606 init_test(cx);
607
608 let fs = FakeFs::new(cx.executor());
609 fs.insert_tree(
610 path!("/parent"),
611 json!({
612 ".editorconfig": "[*]\nindent_size = 4\n",
613 "worktree": {
614 ".editorconfig": "[*]\n",
615 "file.rs": "fn main() {}",
616 }
617 }),
618 )
619 .await;
620
621 let project = Project::test(fs.clone(), [path!("/parent/worktree").as_ref()], cx).await;
622
623 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
624 language_registry.add(rust_lang());
625
626 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
627
628 cx.executor().run_until_parked();
629
630 cx.update(|cx| {
631 let tree = worktree.read(cx);
632 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
633 let file = File::for_entry(file_entry, worktree.clone());
634 let file_language = project
635 .read(cx)
636 .languages()
637 .load_language_for_file_path(file.path.as_std_path());
638 let file_language = cx
639 .foreground_executor()
640 .block_on(file_language)
641 .expect("Failed to get file language");
642 let file = file as _;
643 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
644
645 // Test initial settings: tab_size = 4 from parent's external .editorconfig
646 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
647 });
648
649 fs.atomic_write(
650 PathBuf::from(path!("/parent/.editorconfig")),
651 "[*]\nindent_size = 8\n".to_owned(),
652 )
653 .await
654 .unwrap();
655
656 cx.executor().run_until_parked();
657
658 cx.update(|cx| {
659 let tree = worktree.read(cx);
660 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
661 let file = File::for_entry(file_entry, worktree.clone());
662 let file_language = project
663 .read(cx)
664 .languages()
665 .load_language_for_file_path(file.path.as_std_path());
666 let file_language = cx
667 .foreground_executor()
668 .block_on(file_language)
669 .expect("Failed to get file language");
670 let file = file as _;
671 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
672
673 // Test settings updated: tab_size = 8
674 assert_eq!(Some(settings.tab_size), NonZeroU32::new(8));
675 });
676}
677
678#[gpui::test]
679async fn test_adding_worktree_discovers_external_editorconfigs(cx: &mut gpui::TestAppContext) {
680 init_test(cx);
681
682 let fs = FakeFs::new(cx.executor());
683 fs.insert_tree(
684 path!("/parent"),
685 json!({
686 ".editorconfig": "root = true\n[*]\nindent_size = 7\n",
687 "existing_worktree": {
688 ".editorconfig": "[*]\n",
689 "file.rs": "fn a() {}",
690 },
691 "new_worktree": {
692 ".editorconfig": "[*]\n",
693 "file.rs": "fn b() {}",
694 }
695 }),
696 )
697 .await;
698
699 let project = Project::test(fs, [path!("/parent/existing_worktree").as_ref()], cx).await;
700
701 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
702 language_registry.add(rust_lang());
703
704 cx.executor().run_until_parked();
705
706 cx.update(|cx| {
707 let worktree = project.read(cx).worktrees(cx).next().unwrap();
708 let tree = worktree.read(cx);
709 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
710 let file = File::for_entry(file_entry, worktree.clone());
711 let file_language = project
712 .read(cx)
713 .languages()
714 .load_language_for_file_path(file.path.as_std_path());
715 let file_language = cx
716 .foreground_executor()
717 .block_on(file_language)
718 .expect("Failed to get file language");
719 let file = file as _;
720 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
721
722 // Test existing worktree has tab_size = 7
723 assert_eq!(Some(settings.tab_size), NonZeroU32::new(7));
724 });
725
726 let (new_worktree, _) = project
727 .update(cx, |project, cx| {
728 project.find_or_create_worktree(path!("/parent/new_worktree"), true, cx)
729 })
730 .await
731 .unwrap();
732
733 cx.executor().run_until_parked();
734
735 cx.update(|cx| {
736 let tree = new_worktree.read(cx);
737 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
738 let file = File::for_entry(file_entry, new_worktree.clone());
739 let file_language = project
740 .read(cx)
741 .languages()
742 .load_language_for_file_path(file.path.as_std_path());
743 let file_language = cx
744 .foreground_executor()
745 .block_on(file_language)
746 .expect("Failed to get file language");
747 let file = file as _;
748 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
749
750 // Verify new worktree also has tab_size = 7 from shared parent editorconfig
751 assert_eq!(Some(settings.tab_size), NonZeroU32::new(7));
752 });
753}
754
755#[gpui::test]
756async fn test_removing_worktree_cleans_up_external_editorconfig(cx: &mut gpui::TestAppContext) {
757 init_test(cx);
758
759 let fs = FakeFs::new(cx.executor());
760 fs.insert_tree(
761 path!("/parent"),
762 json!({
763 ".editorconfig": "[*]\nindent_size = 6\n",
764 "worktree": {
765 ".editorconfig": "[*]\n",
766 "file.rs": "fn main() {}",
767 }
768 }),
769 )
770 .await;
771
772 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
773
774 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
775 language_registry.add(rust_lang());
776
777 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
778 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
779
780 cx.executor().run_until_parked();
781
782 cx.update(|cx| {
783 let store = cx.global::<SettingsStore>();
784 let (worktree_ids, external_paths, watcher_paths) =
785 store.editorconfig_store.read(cx).test_state();
786
787 // Test external config is loaded
788 assert!(worktree_ids.contains(&worktree_id));
789 assert!(!external_paths.is_empty());
790 assert!(!watcher_paths.is_empty());
791 });
792
793 project.update(cx, |project, cx| {
794 project.remove_worktree(worktree_id, cx);
795 });
796
797 cx.executor().run_until_parked();
798
799 cx.update(|cx| {
800 let store = cx.global::<SettingsStore>();
801 let (worktree_ids, external_paths, watcher_paths) =
802 store.editorconfig_store.read(cx).test_state();
803
804 // Test worktree state, external configs, and watchers all removed
805 assert!(!worktree_ids.contains(&worktree_id));
806 assert!(external_paths.is_empty());
807 assert!(watcher_paths.is_empty());
808 });
809}
810
811#[gpui::test]
812async fn test_shared_external_editorconfig_cleanup_with_multiple_worktrees(
813 cx: &mut gpui::TestAppContext,
814) {
815 init_test(cx);
816
817 let fs = FakeFs::new(cx.executor());
818 fs.insert_tree(
819 path!("/parent"),
820 json!({
821 ".editorconfig": "root = true\n[*]\nindent_size = 5\n",
822 "worktree_a": {
823 ".editorconfig": "[*]\n",
824 "file.rs": "fn a() {}",
825 },
826 "worktree_b": {
827 ".editorconfig": "[*]\n",
828 "file.rs": "fn b() {}",
829 }
830 }),
831 )
832 .await;
833
834 let project = Project::test(
835 fs,
836 [
837 path!("/parent/worktree_a").as_ref(),
838 path!("/parent/worktree_b").as_ref(),
839 ],
840 cx,
841 )
842 .await;
843
844 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
845 language_registry.add(rust_lang());
846
847 cx.executor().run_until_parked();
848
849 let (worktree_a_id, worktree_b, worktree_b_id) = cx.update(|cx| {
850 let worktrees: Vec<_> = project.read(cx).worktrees(cx).collect();
851 assert_eq!(worktrees.len(), 2);
852
853 let worktree_a = &worktrees[0];
854 let worktree_b = &worktrees[1];
855 let worktree_a_id = worktree_a.read(cx).id();
856 let worktree_b_id = worktree_b.read(cx).id();
857 (worktree_a_id, worktree_b.clone(), worktree_b_id)
858 });
859
860 cx.update(|cx| {
861 let store = cx.global::<SettingsStore>();
862 let (worktree_ids, external_paths, _) = store.editorconfig_store.read(cx).test_state();
863
864 // Test both worktrees have settings and share external config
865 assert!(worktree_ids.contains(&worktree_a_id));
866 assert!(worktree_ids.contains(&worktree_b_id));
867 assert_eq!(external_paths.len(), 1); // single shared external config
868 });
869
870 project.update(cx, |project, cx| {
871 project.remove_worktree(worktree_a_id, cx);
872 });
873
874 cx.executor().run_until_parked();
875
876 cx.update(|cx| {
877 let store = cx.global::<SettingsStore>();
878 let (worktree_ids, external_paths, watcher_paths) =
879 store.editorconfig_store.read(cx).test_state();
880
881 // Test worktree_a is gone but external config remains for worktree_b
882 assert!(!worktree_ids.contains(&worktree_a_id));
883 assert!(worktree_ids.contains(&worktree_b_id));
884 // External config should still exist because worktree_b uses it
885 assert_eq!(external_paths.len(), 1);
886 assert_eq!(watcher_paths.len(), 1);
887 });
888
889 cx.update(|cx| {
890 let tree = worktree_b.read(cx);
891 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
892 let file = File::for_entry(file_entry, worktree_b.clone());
893 let file_language = project
894 .read(cx)
895 .languages()
896 .load_language_for_file_path(file.path.as_std_path());
897 let file_language = cx
898 .foreground_executor()
899 .block_on(file_language)
900 .expect("Failed to get file language");
901 let file = file as _;
902 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
903
904 // Test worktree_b still has correct settings
905 assert_eq!(Some(settings.tab_size), NonZeroU32::new(5));
906 });
907}
908
909#[gpui::test]
910async fn test_git_provider_project_setting(cx: &mut gpui::TestAppContext) {
911 init_test(cx);
912 cx.update(|cx| {
913 GitHostingProviderRegistry::default_global(cx);
914 git_hosting_providers::init(cx);
915 });
916
917 let fs = FakeFs::new(cx.executor());
918 let str_path = path!("/dir");
919 let path = Path::new(str_path);
920
921 fs.insert_tree(
922 path!("/dir"),
923 json!({
924 ".zed": {
925 "settings.json": r#"{
926 "git_hosting_providers": [
927 {
928 "provider": "gitlab",
929 "base_url": "https://google.com",
930 "name": "foo"
931 }
932 ]
933 }"#
934 },
935 }),
936 )
937 .await;
938
939 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
940 let (_worktree, _) =
941 project.read_with(cx, |project, cx| project.find_worktree(path, cx).unwrap());
942 cx.executor().run_until_parked();
943
944 cx.update(|cx| {
945 let provider = GitHostingProviderRegistry::global(cx);
946 assert!(
947 provider
948 .list_hosting_providers()
949 .into_iter()
950 .any(|provider| provider.name() == "foo")
951 );
952 });
953
954 fs.atomic_write(
955 Path::new(path!("/dir/.zed/settings.json")).to_owned(),
956 "{}".into(),
957 )
958 .await
959 .unwrap();
960
961 cx.run_until_parked();
962
963 cx.update(|cx| {
964 let provider = GitHostingProviderRegistry::global(cx);
965 assert!(
966 !provider
967 .list_hosting_providers()
968 .into_iter()
969 .any(|provider| provider.name() == "foo")
970 );
971 });
972}
973
974#[gpui::test]
975async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
976 init_test(cx);
977 TaskStore::init(None);
978
979 let fs = FakeFs::new(cx.executor());
980 fs.insert_tree(
981 path!("/dir"),
982 json!({
983 ".zed": {
984 "settings.json": r#"{ "tab_size": 8 }"#,
985 "tasks.json": r#"[{
986 "label": "cargo check all",
987 "command": "cargo",
988 "args": ["check", "--all"]
989 },]"#,
990 },
991 "a": {
992 "a.rs": "fn a() {\n A\n}"
993 },
994 "b": {
995 ".zed": {
996 "settings.json": r#"{ "tab_size": 2 }"#,
997 "tasks.json": r#"[{
998 "label": "cargo check",
999 "command": "cargo",
1000 "args": ["check"]
1001 },]"#,
1002 },
1003 "b.rs": "fn b() {\n B\n}"
1004 }
1005 }),
1006 )
1007 .await;
1008
1009 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1010 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
1011
1012 cx.executor().run_until_parked();
1013 let worktree_id = cx.update(|cx| {
1014 project.update(cx, |project, cx| {
1015 project.worktrees(cx).next().unwrap().read(cx).id()
1016 })
1017 });
1018
1019 let mut task_contexts = TaskContexts::default();
1020 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
1021 let task_contexts = Arc::new(task_contexts);
1022
1023 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
1024 id: worktree_id,
1025 directory_in_worktree: rel_path(".zed").into(),
1026 id_base: "local worktree tasks from directory \".zed\"".into(),
1027 };
1028
1029 let all_tasks = cx
1030 .update(|cx| {
1031 let tree = worktree.read(cx);
1032
1033 let file_a = File::for_entry(
1034 tree.entry_for_path(rel_path("a/a.rs")).unwrap().clone(),
1035 worktree.clone(),
1036 ) as _;
1037 let settings_a = language_settings(None, Some(&file_a), cx);
1038 let file_b = File::for_entry(
1039 tree.entry_for_path(rel_path("b/b.rs")).unwrap().clone(),
1040 worktree.clone(),
1041 ) as _;
1042 let settings_b = language_settings(None, Some(&file_b), cx);
1043
1044 assert_eq!(settings_a.tab_size.get(), 8);
1045 assert_eq!(settings_b.tab_size.get(), 2);
1046
1047 get_all_tasks(&project, task_contexts.clone(), cx)
1048 })
1049 .await
1050 .into_iter()
1051 .map(|(source_kind, task)| {
1052 let resolved = task.resolved;
1053 (
1054 source_kind,
1055 task.resolved_label,
1056 resolved.args,
1057 resolved.env,
1058 )
1059 })
1060 .collect::<Vec<_>>();
1061 assert_eq!(
1062 all_tasks,
1063 vec![
1064 (
1065 TaskSourceKind::Worktree {
1066 id: worktree_id,
1067 directory_in_worktree: rel_path("b/.zed").into(),
1068 id_base: "local worktree tasks from directory \"b/.zed\"".into()
1069 },
1070 "cargo check".to_string(),
1071 vec!["check".to_string()],
1072 HashMap::default(),
1073 ),
1074 (
1075 topmost_local_task_source_kind.clone(),
1076 "cargo check all".to_string(),
1077 vec!["check".to_string(), "--all".to_string()],
1078 HashMap::default(),
1079 ),
1080 ]
1081 );
1082
1083 let (_, resolved_task) = cx
1084 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
1085 .await
1086 .into_iter()
1087 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
1088 .expect("should have one global task");
1089 project.update(cx, |project, cx| {
1090 let task_inventory = project
1091 .task_store()
1092 .read(cx)
1093 .task_inventory()
1094 .cloned()
1095 .unwrap();
1096 task_inventory.update(cx, |inventory, _| {
1097 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
1098 inventory
1099 .update_file_based_tasks(
1100 TaskSettingsLocation::Global(tasks_file()),
1101 Some(
1102 &json!([{
1103 "label": "cargo check unstable",
1104 "command": "cargo",
1105 "args": [
1106 "check",
1107 "--all",
1108 "--all-targets"
1109 ],
1110 "env": {
1111 "RUSTFLAGS": "-Zunstable-options"
1112 }
1113 }])
1114 .to_string(),
1115 ),
1116 )
1117 .unwrap();
1118 });
1119 });
1120 cx.run_until_parked();
1121
1122 let all_tasks = cx
1123 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
1124 .await
1125 .into_iter()
1126 .map(|(source_kind, task)| {
1127 let resolved = task.resolved;
1128 (
1129 source_kind,
1130 task.resolved_label,
1131 resolved.args,
1132 resolved.env,
1133 )
1134 })
1135 .collect::<Vec<_>>();
1136 assert_eq!(
1137 all_tasks,
1138 vec![
1139 (
1140 topmost_local_task_source_kind.clone(),
1141 "cargo check all".to_string(),
1142 vec!["check".to_string(), "--all".to_string()],
1143 HashMap::default(),
1144 ),
1145 (
1146 TaskSourceKind::Worktree {
1147 id: worktree_id,
1148 directory_in_worktree: rel_path("b/.zed").into(),
1149 id_base: "local worktree tasks from directory \"b/.zed\"".into()
1150 },
1151 "cargo check".to_string(),
1152 vec!["check".to_string()],
1153 HashMap::default(),
1154 ),
1155 (
1156 TaskSourceKind::AbsPath {
1157 abs_path: paths::tasks_file().clone(),
1158 id_base: "global tasks.json".into(),
1159 },
1160 "cargo check unstable".to_string(),
1161 vec![
1162 "check".to_string(),
1163 "--all".to_string(),
1164 "--all-targets".to_string(),
1165 ],
1166 HashMap::from_iter(Some((
1167 "RUSTFLAGS".to_string(),
1168 "-Zunstable-options".to_string()
1169 ))),
1170 ),
1171 ]
1172 );
1173}
1174
1175#[gpui::test]
1176async fn test_invalid_local_tasks_shows_toast_with_doc_link(cx: &mut gpui::TestAppContext) {
1177 init_test(cx);
1178 TaskStore::init(None);
1179
1180 // We need to start with a valid `.zed/tasks.json` file as otherwise the
1181 // event is emitted before we havd a chance to setup the event subscription.
1182 let fs = FakeFs::new(cx.executor());
1183 fs.insert_tree(
1184 path!("/dir"),
1185 json!({
1186 ".zed": {
1187 "tasks.json": r#"[{ "label": "valid task", "command": "echo" }]"#,
1188 },
1189 "file.rs": ""
1190 }),
1191 )
1192 .await;
1193
1194 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1195 let saw_toast = Rc::new(RefCell::new(false));
1196
1197 // Update the `.zed/tasks.json` file with an invalid variable, so we can
1198 // later assert that the `Event::Toast` even is emitted.
1199 fs.save(
1200 path!("/dir/.zed/tasks.json").as_ref(),
1201 &r#"[{ "label": "test $ZED_FOO", "command": "echo" }]"#.into(),
1202 Default::default(),
1203 )
1204 .await
1205 .unwrap();
1206
1207 project.update(cx, |_, cx| {
1208 let saw_toast = saw_toast.clone();
1209
1210 cx.subscribe(&project, move |_, _, event: &Event, _| match event {
1211 Event::Toast {
1212 notification_id,
1213 message,
1214 link: Some(ToastLink { url, .. }),
1215 } => {
1216 assert!(notification_id.starts_with("local-tasks-"));
1217 assert!(message.contains("ZED_FOO"));
1218 assert_eq!(*url, "https://zed.dev/docs/tasks");
1219 *saw_toast.borrow_mut() = true;
1220 }
1221 _ => {}
1222 })
1223 .detach();
1224 });
1225
1226 cx.run_until_parked();
1227 assert!(
1228 *saw_toast.borrow(),
1229 "Expected `Event::Toast` was never emitted"
1230 );
1231}
1232
1233#[gpui::test]
1234async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
1235 init_test(cx);
1236 TaskStore::init(None);
1237
1238 let fs = FakeFs::new(cx.executor());
1239 fs.insert_tree(
1240 path!("/dir"),
1241 json!({
1242 ".zed": {
1243 "tasks.json": r#"[{
1244 "label": "test worktree root",
1245 "command": "echo $ZED_WORKTREE_ROOT"
1246 }]"#,
1247 },
1248 "a": {
1249 "a.rs": "fn a() {\n A\n}"
1250 },
1251 }),
1252 )
1253 .await;
1254
1255 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1256 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
1257
1258 cx.executor().run_until_parked();
1259 let worktree_id = cx.update(|cx| {
1260 project.update(cx, |project, cx| {
1261 project.worktrees(cx).next().unwrap().read(cx).id()
1262 })
1263 });
1264
1265 let active_non_worktree_item_tasks = cx
1266 .update(|cx| {
1267 get_all_tasks(
1268 &project,
1269 Arc::new(TaskContexts {
1270 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
1271 active_worktree_context: None,
1272 other_worktree_contexts: Vec::new(),
1273 lsp_task_sources: HashMap::default(),
1274 latest_selection: None,
1275 }),
1276 cx,
1277 )
1278 })
1279 .await;
1280 assert!(
1281 active_non_worktree_item_tasks.is_empty(),
1282 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
1283 );
1284
1285 let active_worktree_tasks = cx
1286 .update(|cx| {
1287 get_all_tasks(
1288 &project,
1289 Arc::new(TaskContexts {
1290 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
1291 active_worktree_context: Some((worktree_id, {
1292 let mut worktree_context = TaskContext::default();
1293 worktree_context
1294 .task_variables
1295 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
1296 worktree_context
1297 })),
1298 other_worktree_contexts: Vec::new(),
1299 lsp_task_sources: HashMap::default(),
1300 latest_selection: None,
1301 }),
1302 cx,
1303 )
1304 })
1305 .await;
1306 assert_eq!(
1307 active_worktree_tasks
1308 .into_iter()
1309 .map(|(source_kind, task)| {
1310 let resolved = task.resolved;
1311 (source_kind, resolved.command.unwrap())
1312 })
1313 .collect::<Vec<_>>(),
1314 vec![(
1315 TaskSourceKind::Worktree {
1316 id: worktree_id,
1317 directory_in_worktree: rel_path(".zed").into(),
1318 id_base: "local worktree tasks from directory \".zed\"".into(),
1319 },
1320 "echo /dir".to_string(),
1321 )]
1322 );
1323}
1324
1325#[gpui::test]
1326async fn test_running_multiple_instances_of_a_single_server_in_one_worktree(
1327 cx: &mut gpui::TestAppContext,
1328) {
1329 pub(crate) struct PyprojectTomlManifestProvider;
1330
1331 impl ManifestProvider for PyprojectTomlManifestProvider {
1332 fn name(&self) -> ManifestName {
1333 SharedString::new_static("pyproject.toml").into()
1334 }
1335
1336 fn search(
1337 &self,
1338 ManifestQuery {
1339 path,
1340 depth,
1341 delegate,
1342 }: ManifestQuery,
1343 ) -> Option<Arc<RelPath>> {
1344 for path in path.ancestors().take(depth) {
1345 let p = path.join(rel_path("pyproject.toml"));
1346 if delegate.exists(&p, Some(false)) {
1347 return Some(path.into());
1348 }
1349 }
1350
1351 None
1352 }
1353 }
1354
1355 init_test(cx);
1356 let fs = FakeFs::new(cx.executor());
1357
1358 fs.insert_tree(
1359 path!("/the-root"),
1360 json!({
1361 ".zed": {
1362 "settings.json": r#"
1363 {
1364 "languages": {
1365 "Python": {
1366 "language_servers": ["ty"]
1367 }
1368 }
1369 }"#
1370 },
1371 "project-a": {
1372 ".venv": {},
1373 "file.py": "",
1374 "pyproject.toml": ""
1375 },
1376 "project-b": {
1377 ".venv": {},
1378 "source_file.py":"",
1379 "another_file.py": "",
1380 "pyproject.toml": ""
1381 }
1382 }),
1383 )
1384 .await;
1385 cx.update(|cx| {
1386 ManifestProvidersStore::global(cx).register(Arc::new(PyprojectTomlManifestProvider))
1387 });
1388
1389 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1390 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1391 let _fake_python_server = language_registry.register_fake_lsp(
1392 "Python",
1393 FakeLspAdapter {
1394 name: "ty",
1395 capabilities: lsp::ServerCapabilities {
1396 ..Default::default()
1397 },
1398 ..Default::default()
1399 },
1400 );
1401
1402 language_registry.add(python_lang(fs.clone()));
1403 let (first_buffer, _handle) = project
1404 .update(cx, |project, cx| {
1405 project.open_local_buffer_with_lsp(path!("/the-root/project-a/file.py"), cx)
1406 })
1407 .await
1408 .unwrap();
1409 cx.executor().run_until_parked();
1410 let servers = project.update(cx, |project, cx| {
1411 project.lsp_store().update(cx, |this, cx| {
1412 first_buffer.update(cx, |buffer, cx| {
1413 this.running_language_servers_for_local_buffer(buffer, cx)
1414 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1415 .collect::<Vec<_>>()
1416 })
1417 })
1418 });
1419 cx.executor().run_until_parked();
1420 assert_eq!(servers.len(), 1);
1421 let (adapter, server) = servers.into_iter().next().unwrap();
1422 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1423 assert_eq!(server.server_id(), LanguageServerId(0));
1424 // `workspace_folders` are set to the rooting point.
1425 assert_eq!(
1426 server.workspace_folders(),
1427 BTreeSet::from_iter(
1428 [Uri::from_file_path(path!("/the-root/project-a")).unwrap()].into_iter()
1429 )
1430 );
1431
1432 let (second_project_buffer, _other_handle) = project
1433 .update(cx, |project, cx| {
1434 project.open_local_buffer_with_lsp(path!("/the-root/project-b/source_file.py"), cx)
1435 })
1436 .await
1437 .unwrap();
1438 cx.executor().run_until_parked();
1439 let servers = project.update(cx, |project, cx| {
1440 project.lsp_store().update(cx, |this, cx| {
1441 second_project_buffer.update(cx, |buffer, cx| {
1442 this.running_language_servers_for_local_buffer(buffer, cx)
1443 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1444 .collect::<Vec<_>>()
1445 })
1446 })
1447 });
1448 cx.executor().run_until_parked();
1449 assert_eq!(servers.len(), 1);
1450 let (adapter, server) = servers.into_iter().next().unwrap();
1451 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1452 // We're not using venvs at all here, so both folders should fall under the same root.
1453 assert_eq!(server.server_id(), LanguageServerId(0));
1454 // Now, let's select a different toolchain for one of subprojects.
1455
1456 let Toolchains {
1457 toolchains: available_toolchains_for_b,
1458 root_path,
1459 ..
1460 } = project
1461 .update(cx, |this, cx| {
1462 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1463 this.available_toolchains(
1464 ProjectPath {
1465 worktree_id,
1466 path: rel_path("project-b/source_file.py").into(),
1467 },
1468 LanguageName::new_static("Python"),
1469 cx,
1470 )
1471 })
1472 .await
1473 .expect("A toolchain to be discovered");
1474 assert_eq!(root_path.as_ref(), rel_path("project-b"));
1475 assert_eq!(available_toolchains_for_b.toolchains().len(), 1);
1476 let currently_active_toolchain = project
1477 .update(cx, |this, cx| {
1478 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1479 this.active_toolchain(
1480 ProjectPath {
1481 worktree_id,
1482 path: rel_path("project-b/source_file.py").into(),
1483 },
1484 LanguageName::new_static("Python"),
1485 cx,
1486 )
1487 })
1488 .await;
1489
1490 assert!(currently_active_toolchain.is_none());
1491 let _ = project
1492 .update(cx, |this, cx| {
1493 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1494 this.activate_toolchain(
1495 ProjectPath {
1496 worktree_id,
1497 path: root_path,
1498 },
1499 available_toolchains_for_b
1500 .toolchains
1501 .into_iter()
1502 .next()
1503 .unwrap(),
1504 cx,
1505 )
1506 })
1507 .await
1508 .unwrap();
1509 cx.run_until_parked();
1510 let servers = project.update(cx, |project, cx| {
1511 project.lsp_store().update(cx, |this, cx| {
1512 second_project_buffer.update(cx, |buffer, cx| {
1513 this.running_language_servers_for_local_buffer(buffer, cx)
1514 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1515 .collect::<Vec<_>>()
1516 })
1517 })
1518 });
1519 cx.executor().run_until_parked();
1520 assert_eq!(servers.len(), 1);
1521 let (adapter, server) = servers.into_iter().next().unwrap();
1522 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1523 // There's a new language server in town.
1524 assert_eq!(server.server_id(), LanguageServerId(1));
1525}
1526
1527#[gpui::test]
1528async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
1529 init_test(cx);
1530
1531 let fs = FakeFs::new(cx.executor());
1532 fs.insert_tree(
1533 path!("/dir"),
1534 json!({
1535 "test.rs": "const A: i32 = 1;",
1536 "test2.rs": "",
1537 "Cargo.toml": "a = 1",
1538 "package.json": "{\"a\": 1}",
1539 }),
1540 )
1541 .await;
1542
1543 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1544 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1545
1546 let mut fake_rust_servers = language_registry.register_fake_lsp(
1547 "Rust",
1548 FakeLspAdapter {
1549 name: "the-rust-language-server",
1550 capabilities: lsp::ServerCapabilities {
1551 completion_provider: Some(lsp::CompletionOptions {
1552 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
1553 ..Default::default()
1554 }),
1555 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
1556 lsp::TextDocumentSyncOptions {
1557 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
1558 ..Default::default()
1559 },
1560 )),
1561 ..Default::default()
1562 },
1563 ..Default::default()
1564 },
1565 );
1566 let mut fake_json_servers = language_registry.register_fake_lsp(
1567 "JSON",
1568 FakeLspAdapter {
1569 name: "the-json-language-server",
1570 capabilities: lsp::ServerCapabilities {
1571 completion_provider: Some(lsp::CompletionOptions {
1572 trigger_characters: Some(vec![":".to_string()]),
1573 ..Default::default()
1574 }),
1575 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
1576 lsp::TextDocumentSyncOptions {
1577 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
1578 ..Default::default()
1579 },
1580 )),
1581 ..Default::default()
1582 },
1583 ..Default::default()
1584 },
1585 );
1586
1587 // Open a buffer without an associated language server.
1588 let (toml_buffer, _handle) = project
1589 .update(cx, |project, cx| {
1590 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
1591 })
1592 .await
1593 .unwrap();
1594
1595 // Open a buffer with an associated language server before the language for it has been loaded.
1596 let (rust_buffer, _handle2) = project
1597 .update(cx, |project, cx| {
1598 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
1599 })
1600 .await
1601 .unwrap();
1602 rust_buffer.update(cx, |buffer, _| {
1603 assert_eq!(buffer.language().map(|l| l.name()), None);
1604 });
1605
1606 // Now we add the languages to the project, and ensure they get assigned to all
1607 // the relevant open buffers.
1608 language_registry.add(json_lang());
1609 language_registry.add(rust_lang());
1610 cx.executor().run_until_parked();
1611 rust_buffer.update(cx, |buffer, _| {
1612 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
1613 });
1614
1615 // A server is started up, and it is notified about Rust files.
1616 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1617 assert_eq!(
1618 fake_rust_server
1619 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1620 .await
1621 .text_document,
1622 lsp::TextDocumentItem {
1623 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1624 version: 0,
1625 text: "const A: i32 = 1;".to_string(),
1626 language_id: "rust".to_string(),
1627 }
1628 );
1629
1630 // The buffer is configured based on the language server's capabilities.
1631 rust_buffer.update(cx, |buffer, _| {
1632 assert_eq!(
1633 buffer
1634 .completion_triggers()
1635 .iter()
1636 .cloned()
1637 .collect::<Vec<_>>(),
1638 &[".".to_string(), "::".to_string()]
1639 );
1640 });
1641 toml_buffer.update(cx, |buffer, _| {
1642 assert!(buffer.completion_triggers().is_empty());
1643 });
1644
1645 // Edit a buffer. The changes are reported to the language server.
1646 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
1647 assert_eq!(
1648 fake_rust_server
1649 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1650 .await
1651 .text_document,
1652 lsp::VersionedTextDocumentIdentifier::new(
1653 lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1654 1
1655 )
1656 );
1657
1658 // Open a third buffer with a different associated language server.
1659 let (json_buffer, _json_handle) = project
1660 .update(cx, |project, cx| {
1661 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
1662 })
1663 .await
1664 .unwrap();
1665
1666 // A json language server is started up and is only notified about the json buffer.
1667 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1668 assert_eq!(
1669 fake_json_server
1670 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1671 .await
1672 .text_document,
1673 lsp::TextDocumentItem {
1674 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1675 version: 0,
1676 text: "{\"a\": 1}".to_string(),
1677 language_id: "json".to_string(),
1678 }
1679 );
1680
1681 // This buffer is configured based on the second language server's
1682 // capabilities.
1683 json_buffer.update(cx, |buffer, _| {
1684 assert_eq!(
1685 buffer
1686 .completion_triggers()
1687 .iter()
1688 .cloned()
1689 .collect::<Vec<_>>(),
1690 &[":".to_string()]
1691 );
1692 });
1693
1694 // When opening another buffer whose language server is already running,
1695 // it is also configured based on the existing language server's capabilities.
1696 let (rust_buffer2, _handle4) = project
1697 .update(cx, |project, cx| {
1698 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
1699 })
1700 .await
1701 .unwrap();
1702 rust_buffer2.update(cx, |buffer, _| {
1703 assert_eq!(
1704 buffer
1705 .completion_triggers()
1706 .iter()
1707 .cloned()
1708 .collect::<Vec<_>>(),
1709 &[".".to_string(), "::".to_string()]
1710 );
1711 });
1712
1713 // Changes are reported only to servers matching the buffer's language.
1714 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
1715 rust_buffer2.update(cx, |buffer, cx| {
1716 buffer.edit([(0..0, "let x = 1;")], None, cx)
1717 });
1718 assert_eq!(
1719 fake_rust_server
1720 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1721 .await
1722 .text_document,
1723 lsp::VersionedTextDocumentIdentifier::new(
1724 lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap(),
1725 1
1726 )
1727 );
1728
1729 // Save notifications are reported to all servers.
1730 project
1731 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
1732 .await
1733 .unwrap();
1734 assert_eq!(
1735 fake_rust_server
1736 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1737 .await
1738 .text_document,
1739 lsp::TextDocumentIdentifier::new(
1740 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1741 )
1742 );
1743 assert_eq!(
1744 fake_json_server
1745 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1746 .await
1747 .text_document,
1748 lsp::TextDocumentIdentifier::new(
1749 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1750 )
1751 );
1752
1753 // Renames are reported only to servers matching the buffer's language.
1754 fs.rename(
1755 Path::new(path!("/dir/test2.rs")),
1756 Path::new(path!("/dir/test3.rs")),
1757 Default::default(),
1758 )
1759 .await
1760 .unwrap();
1761 assert_eq!(
1762 fake_rust_server
1763 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1764 .await
1765 .text_document,
1766 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap()),
1767 );
1768 assert_eq!(
1769 fake_rust_server
1770 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1771 .await
1772 .text_document,
1773 lsp::TextDocumentItem {
1774 uri: lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap(),
1775 version: 0,
1776 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1777 language_id: "rust".to_string(),
1778 },
1779 );
1780
1781 rust_buffer2.update(cx, |buffer, cx| {
1782 buffer.update_diagnostics(
1783 LanguageServerId(0),
1784 DiagnosticSet::from_sorted_entries(
1785 vec![DiagnosticEntry {
1786 diagnostic: Default::default(),
1787 range: Anchor::MIN..Anchor::MAX,
1788 }],
1789 &buffer.snapshot(),
1790 ),
1791 cx,
1792 );
1793 assert_eq!(
1794 buffer
1795 .snapshot()
1796 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1797 .count(),
1798 1
1799 );
1800 });
1801
1802 // When the rename changes the extension of the file, the buffer gets closed on the old
1803 // language server and gets opened on the new one.
1804 fs.rename(
1805 Path::new(path!("/dir/test3.rs")),
1806 Path::new(path!("/dir/test3.json")),
1807 Default::default(),
1808 )
1809 .await
1810 .unwrap();
1811 assert_eq!(
1812 fake_rust_server
1813 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1814 .await
1815 .text_document,
1816 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap()),
1817 );
1818 assert_eq!(
1819 fake_json_server
1820 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1821 .await
1822 .text_document,
1823 lsp::TextDocumentItem {
1824 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1825 version: 0,
1826 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1827 language_id: "json".to_string(),
1828 },
1829 );
1830
1831 // We clear the diagnostics, since the language has changed.
1832 rust_buffer2.update(cx, |buffer, _| {
1833 assert_eq!(
1834 buffer
1835 .snapshot()
1836 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1837 .count(),
1838 0
1839 );
1840 });
1841
1842 // The renamed file's version resets after changing language server.
1843 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
1844 assert_eq!(
1845 fake_json_server
1846 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1847 .await
1848 .text_document,
1849 lsp::VersionedTextDocumentIdentifier::new(
1850 lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1851 1
1852 )
1853 );
1854
1855 // Restart language servers
1856 project.update(cx, |project, cx| {
1857 project.restart_language_servers_for_buffers(
1858 vec![rust_buffer.clone(), json_buffer.clone()],
1859 HashSet::default(),
1860 cx,
1861 );
1862 });
1863
1864 let mut rust_shutdown_requests = fake_rust_server
1865 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1866 let mut json_shutdown_requests = fake_json_server
1867 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1868 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
1869
1870 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1871 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1872
1873 // Ensure rust document is reopened in new rust language server
1874 assert_eq!(
1875 fake_rust_server
1876 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1877 .await
1878 .text_document,
1879 lsp::TextDocumentItem {
1880 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1881 version: 0,
1882 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
1883 language_id: "rust".to_string(),
1884 }
1885 );
1886
1887 // Ensure json documents are reopened in new json language server
1888 assert_set_eq!(
1889 [
1890 fake_json_server
1891 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1892 .await
1893 .text_document,
1894 fake_json_server
1895 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1896 .await
1897 .text_document,
1898 ],
1899 [
1900 lsp::TextDocumentItem {
1901 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1902 version: 0,
1903 text: json_buffer.update(cx, |buffer, _| buffer.text()),
1904 language_id: "json".to_string(),
1905 },
1906 lsp::TextDocumentItem {
1907 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1908 version: 0,
1909 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1910 language_id: "json".to_string(),
1911 }
1912 ]
1913 );
1914
1915 // Close notifications are reported only to servers matching the buffer's language.
1916 cx.update(|_| drop(_json_handle));
1917 let close_message = lsp::DidCloseTextDocumentParams {
1918 text_document: lsp::TextDocumentIdentifier::new(
1919 lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1920 ),
1921 };
1922 assert_eq!(
1923 fake_json_server
1924 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1925 .await,
1926 close_message,
1927 );
1928}
1929
1930#[gpui::test]
1931async fn test_language_server_relative_path(cx: &mut gpui::TestAppContext) {
1932 init_test(cx);
1933
1934 let settings_json_contents = json!({
1935 "languages": {
1936 "Rust": {
1937 "language_servers": ["my_fake_lsp", "lsp_on_path"]
1938 }
1939 },
1940 "lsp": {
1941 "my_fake_lsp": {
1942 "binary": {
1943 // file exists, so this is treated as a relative path
1944 "path": path!(".relative_path/to/my_fake_lsp_binary.exe").to_string(),
1945 }
1946 },
1947 "lsp_on_path": {
1948 "binary": {
1949 // file doesn't exist, so it will fall back on PATH env var
1950 "path": path!("lsp_on_path.exe").to_string(),
1951 }
1952 }
1953 },
1954 });
1955
1956 let fs = FakeFs::new(cx.executor());
1957 fs.insert_tree(
1958 path!("/the-root"),
1959 json!({
1960 ".zed": {
1961 "settings.json": settings_json_contents.to_string(),
1962 },
1963 ".relative_path": {
1964 "to": {
1965 "my_fake_lsp.exe": "",
1966 },
1967 },
1968 "src": {
1969 "main.rs": "",
1970 }
1971 }),
1972 )
1973 .await;
1974
1975 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1976 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1977 language_registry.add(rust_lang());
1978
1979 let mut my_fake_lsp = language_registry.register_fake_lsp(
1980 "Rust",
1981 FakeLspAdapter {
1982 name: "my_fake_lsp",
1983 ..Default::default()
1984 },
1985 );
1986 let mut lsp_on_path = language_registry.register_fake_lsp(
1987 "Rust",
1988 FakeLspAdapter {
1989 name: "lsp_on_path",
1990 ..Default::default()
1991 },
1992 );
1993
1994 cx.run_until_parked();
1995
1996 // Start the language server by opening a buffer with a compatible file extension.
1997 project
1998 .update(cx, |project, cx| {
1999 project.open_local_buffer_with_lsp(path!("/the-root/src/main.rs"), cx)
2000 })
2001 .await
2002 .unwrap();
2003
2004 let lsp_path = my_fake_lsp.next().await.unwrap().binary.path;
2005 assert_eq!(
2006 lsp_path.to_string_lossy(),
2007 path!("/the-root/.relative_path/to/my_fake_lsp_binary.exe"),
2008 );
2009
2010 let lsp_path = lsp_on_path.next().await.unwrap().binary.path;
2011 assert_eq!(lsp_path.to_string_lossy(), path!("lsp_on_path.exe"));
2012}
2013
2014#[gpui::test]
2015async fn test_language_server_tilde_path(cx: &mut gpui::TestAppContext) {
2016 init_test(cx);
2017
2018 let settings_json_contents = json!({
2019 "languages": {
2020 "Rust": {
2021 "language_servers": ["tilde_lsp"]
2022 }
2023 },
2024 "lsp": {
2025 "tilde_lsp": {
2026 "binary": {
2027 "path": "~/.local/bin/rust-analyzer",
2028 }
2029 }
2030 },
2031 });
2032
2033 let fs = FakeFs::new(cx.executor());
2034 fs.insert_tree(
2035 path!("/root"),
2036 json!({
2037 ".zed": {
2038 "settings.json": settings_json_contents.to_string(),
2039 },
2040 "src": {
2041 "main.rs": "fn main() {}",
2042 }
2043 }),
2044 )
2045 .await;
2046
2047 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
2048 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2049 language_registry.add(rust_lang());
2050
2051 let mut tilde_lsp = language_registry.register_fake_lsp(
2052 "Rust",
2053 FakeLspAdapter {
2054 name: "tilde_lsp",
2055 ..Default::default()
2056 },
2057 );
2058 cx.run_until_parked();
2059
2060 project
2061 .update(cx, |project, cx| {
2062 project.open_local_buffer_with_lsp(path!("/root/src/main.rs"), cx)
2063 })
2064 .await
2065 .unwrap();
2066
2067 let lsp_path = tilde_lsp.next().await.unwrap().binary.path;
2068 let expected_path = paths::home_dir().join(".local/bin/rust-analyzer");
2069 assert_eq!(
2070 lsp_path, expected_path,
2071 "Tilde path should expand to home directory"
2072 );
2073}
2074
2075#[gpui::test]
2076async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
2077 init_test(cx);
2078
2079 let fs = FakeFs::new(cx.executor());
2080 fs.insert_tree(
2081 path!("/the-root"),
2082 json!({
2083 ".gitignore": "target\n",
2084 "Cargo.lock": "",
2085 "src": {
2086 "a.rs": "",
2087 "b.rs": "",
2088 },
2089 "target": {
2090 "x": {
2091 "out": {
2092 "x.rs": ""
2093 }
2094 },
2095 "y": {
2096 "out": {
2097 "y.rs": "",
2098 }
2099 },
2100 "z": {
2101 "out": {
2102 "z.rs": ""
2103 }
2104 }
2105 }
2106 }),
2107 )
2108 .await;
2109 fs.insert_tree(
2110 path!("/the-registry"),
2111 json!({
2112 "dep1": {
2113 "src": {
2114 "dep1.rs": "",
2115 }
2116 },
2117 "dep2": {
2118 "src": {
2119 "dep2.rs": "",
2120 }
2121 },
2122 }),
2123 )
2124 .await;
2125 fs.insert_tree(
2126 path!("/the/stdlib"),
2127 json!({
2128 "LICENSE": "",
2129 "src": {
2130 "string.rs": "",
2131 }
2132 }),
2133 )
2134 .await;
2135
2136 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
2137 let (language_registry, lsp_store) = project.read_with(cx, |project, _| {
2138 (project.languages().clone(), project.lsp_store())
2139 });
2140 language_registry.add(rust_lang());
2141 let mut fake_servers = language_registry.register_fake_lsp(
2142 "Rust",
2143 FakeLspAdapter {
2144 name: "the-language-server",
2145 ..Default::default()
2146 },
2147 );
2148
2149 cx.executor().run_until_parked();
2150
2151 // Start the language server by opening a buffer with a compatible file extension.
2152 project
2153 .update(cx, |project, cx| {
2154 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
2155 })
2156 .await
2157 .unwrap();
2158
2159 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
2160 project.update(cx, |project, cx| {
2161 let worktree = project.worktrees(cx).next().unwrap();
2162 assert_eq!(
2163 worktree
2164 .read(cx)
2165 .snapshot()
2166 .entries(true, 0)
2167 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
2168 .collect::<Vec<_>>(),
2169 &[
2170 ("", false),
2171 (".gitignore", false),
2172 ("Cargo.lock", false),
2173 ("src", false),
2174 ("src/a.rs", false),
2175 ("src/b.rs", false),
2176 ("target", true),
2177 ]
2178 );
2179 });
2180
2181 let prev_read_dir_count = fs.read_dir_call_count();
2182
2183 let fake_server = fake_servers.next().await.unwrap();
2184 cx.executor().run_until_parked();
2185 let server_id = lsp_store.read_with(cx, |lsp_store, _| {
2186 let (id, _) = lsp_store.language_server_statuses().next().unwrap();
2187 id
2188 });
2189
2190 // Simulate jumping to a definition in a dependency outside of the worktree.
2191 let _out_of_worktree_buffer = project
2192 .update(cx, |project, cx| {
2193 project.open_local_buffer_via_lsp(
2194 lsp::Uri::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
2195 server_id,
2196 cx,
2197 )
2198 })
2199 .await
2200 .unwrap();
2201
2202 // Keep track of the FS events reported to the language server.
2203 let file_changes = Arc::new(Mutex::new(Vec::new()));
2204 fake_server
2205 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
2206 registrations: vec![lsp::Registration {
2207 id: Default::default(),
2208 method: "workspace/didChangeWatchedFiles".to_string(),
2209 register_options: serde_json::to_value(
2210 lsp::DidChangeWatchedFilesRegistrationOptions {
2211 watchers: vec![
2212 lsp::FileSystemWatcher {
2213 glob_pattern: lsp::GlobPattern::String(
2214 path!("/the-root/Cargo.toml").to_string(),
2215 ),
2216 kind: None,
2217 },
2218 lsp::FileSystemWatcher {
2219 glob_pattern: lsp::GlobPattern::String(
2220 path!("/the-root/src/*.{rs,c}").to_string(),
2221 ),
2222 kind: None,
2223 },
2224 lsp::FileSystemWatcher {
2225 glob_pattern: lsp::GlobPattern::String(
2226 path!("/the-root/target/y/**/*.rs").to_string(),
2227 ),
2228 kind: None,
2229 },
2230 lsp::FileSystemWatcher {
2231 glob_pattern: lsp::GlobPattern::String(
2232 path!("/the/stdlib/src/**/*.rs").to_string(),
2233 ),
2234 kind: None,
2235 },
2236 lsp::FileSystemWatcher {
2237 glob_pattern: lsp::GlobPattern::String(
2238 path!("**/Cargo.lock").to_string(),
2239 ),
2240 kind: None,
2241 },
2242 ],
2243 },
2244 )
2245 .ok(),
2246 }],
2247 })
2248 .await
2249 .into_response()
2250 .unwrap();
2251 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
2252 let file_changes = file_changes.clone();
2253 move |params, _| {
2254 let mut file_changes = file_changes.lock();
2255 file_changes.extend(params.changes);
2256 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
2257 }
2258 });
2259
2260 cx.executor().run_until_parked();
2261 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
2262 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
2263
2264 let mut new_watched_paths = fs.watched_paths();
2265 new_watched_paths.retain(|path| {
2266 !path.starts_with(config_dir()) && !path.starts_with(global_gitignore_path().unwrap())
2267 });
2268 assert_eq!(
2269 &new_watched_paths,
2270 &[
2271 Path::new(path!("/the-root")),
2272 Path::new(path!("/the-registry/dep1/src/dep1.rs")),
2273 Path::new(path!("/the/stdlib/src"))
2274 ]
2275 );
2276
2277 // Now the language server has asked us to watch an ignored directory path,
2278 // so we recursively load it.
2279 project.update(cx, |project, cx| {
2280 let worktree = project.visible_worktrees(cx).next().unwrap();
2281 assert_eq!(
2282 worktree
2283 .read(cx)
2284 .snapshot()
2285 .entries(true, 0)
2286 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
2287 .collect::<Vec<_>>(),
2288 &[
2289 ("", false),
2290 (".gitignore", false),
2291 ("Cargo.lock", false),
2292 ("src", false),
2293 ("src/a.rs", false),
2294 ("src/b.rs", false),
2295 ("target", true),
2296 ("target/x", true),
2297 ("target/y", true),
2298 ("target/y/out", true),
2299 ("target/y/out/y.rs", true),
2300 ("target/z", true),
2301 ]
2302 );
2303 });
2304
2305 // Perform some file system mutations, two of which match the watched patterns,
2306 // and one of which does not.
2307 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
2308 .await
2309 .unwrap();
2310 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
2311 .await
2312 .unwrap();
2313 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
2314 .await
2315 .unwrap();
2316 fs.create_file(
2317 path!("/the-root/target/x/out/x2.rs").as_ref(),
2318 Default::default(),
2319 )
2320 .await
2321 .unwrap();
2322 fs.create_file(
2323 path!("/the-root/target/y/out/y2.rs").as_ref(),
2324 Default::default(),
2325 )
2326 .await
2327 .unwrap();
2328 fs.save(
2329 path!("/the-root/Cargo.lock").as_ref(),
2330 &"".into(),
2331 Default::default(),
2332 )
2333 .await
2334 .unwrap();
2335 fs.save(
2336 path!("/the-stdlib/LICENSE").as_ref(),
2337 &"".into(),
2338 Default::default(),
2339 )
2340 .await
2341 .unwrap();
2342 fs.save(
2343 path!("/the/stdlib/src/string.rs").as_ref(),
2344 &"".into(),
2345 Default::default(),
2346 )
2347 .await
2348 .unwrap();
2349
2350 // The language server receives events for the FS mutations that match its watch patterns.
2351 cx.executor().run_until_parked();
2352 assert_eq!(
2353 &*file_changes.lock(),
2354 &[
2355 lsp::FileEvent {
2356 uri: lsp::Uri::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
2357 typ: lsp::FileChangeType::CHANGED,
2358 },
2359 lsp::FileEvent {
2360 uri: lsp::Uri::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
2361 typ: lsp::FileChangeType::DELETED,
2362 },
2363 lsp::FileEvent {
2364 uri: lsp::Uri::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
2365 typ: lsp::FileChangeType::CREATED,
2366 },
2367 lsp::FileEvent {
2368 uri: lsp::Uri::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
2369 typ: lsp::FileChangeType::CREATED,
2370 },
2371 lsp::FileEvent {
2372 uri: lsp::Uri::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
2373 typ: lsp::FileChangeType::CHANGED,
2374 },
2375 ]
2376 );
2377}
2378
2379#[gpui::test]
2380async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
2381 init_test(cx);
2382
2383 let fs = FakeFs::new(cx.executor());
2384 fs.insert_tree(
2385 path!("/dir"),
2386 json!({
2387 "a.rs": "let a = 1;",
2388 "b.rs": "let b = 2;"
2389 }),
2390 )
2391 .await;
2392
2393 let project = Project::test(
2394 fs,
2395 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
2396 cx,
2397 )
2398 .await;
2399 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2400
2401 let buffer_a = project
2402 .update(cx, |project, cx| {
2403 project.open_local_buffer(path!("/dir/a.rs"), cx)
2404 })
2405 .await
2406 .unwrap();
2407 let buffer_b = project
2408 .update(cx, |project, cx| {
2409 project.open_local_buffer(path!("/dir/b.rs"), cx)
2410 })
2411 .await
2412 .unwrap();
2413
2414 lsp_store.update(cx, |lsp_store, cx| {
2415 lsp_store
2416 .update_diagnostics(
2417 LanguageServerId(0),
2418 lsp::PublishDiagnosticsParams {
2419 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2420 version: None,
2421 diagnostics: vec![lsp::Diagnostic {
2422 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2423 severity: Some(lsp::DiagnosticSeverity::ERROR),
2424 message: "error 1".to_string(),
2425 ..Default::default()
2426 }],
2427 },
2428 None,
2429 DiagnosticSourceKind::Pushed,
2430 &[],
2431 cx,
2432 )
2433 .unwrap();
2434 lsp_store
2435 .update_diagnostics(
2436 LanguageServerId(0),
2437 lsp::PublishDiagnosticsParams {
2438 uri: Uri::from_file_path(path!("/dir/b.rs")).unwrap(),
2439 version: None,
2440 diagnostics: vec![lsp::Diagnostic {
2441 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2442 severity: Some(DiagnosticSeverity::WARNING),
2443 message: "error 2".to_string(),
2444 ..Default::default()
2445 }],
2446 },
2447 None,
2448 DiagnosticSourceKind::Pushed,
2449 &[],
2450 cx,
2451 )
2452 .unwrap();
2453 });
2454
2455 buffer_a.update(cx, |buffer, _| {
2456 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2457 assert_eq!(
2458 chunks
2459 .iter()
2460 .map(|(s, d)| (s.as_str(), *d))
2461 .collect::<Vec<_>>(),
2462 &[
2463 ("let ", None),
2464 ("a", Some(DiagnosticSeverity::ERROR)),
2465 (" = 1;", None),
2466 ]
2467 );
2468 });
2469 buffer_b.update(cx, |buffer, _| {
2470 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2471 assert_eq!(
2472 chunks
2473 .iter()
2474 .map(|(s, d)| (s.as_str(), *d))
2475 .collect::<Vec<_>>(),
2476 &[
2477 ("let ", None),
2478 ("b", Some(DiagnosticSeverity::WARNING)),
2479 (" = 2;", None),
2480 ]
2481 );
2482 });
2483}
2484
2485#[gpui::test]
2486async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
2487 init_test(cx);
2488
2489 let fs = FakeFs::new(cx.executor());
2490 fs.insert_tree(
2491 path!("/root"),
2492 json!({
2493 "dir": {
2494 ".git": {
2495 "HEAD": "ref: refs/heads/main",
2496 },
2497 ".gitignore": "b.rs",
2498 "a.rs": "let a = 1;",
2499 "b.rs": "let b = 2;",
2500 },
2501 "other.rs": "let b = c;"
2502 }),
2503 )
2504 .await;
2505
2506 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
2507 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2508 let (worktree, _) = project
2509 .update(cx, |project, cx| {
2510 project.find_or_create_worktree(path!("/root/dir"), true, cx)
2511 })
2512 .await
2513 .unwrap();
2514 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
2515
2516 let (worktree, _) = project
2517 .update(cx, |project, cx| {
2518 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
2519 })
2520 .await
2521 .unwrap();
2522 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
2523
2524 let server_id = LanguageServerId(0);
2525 lsp_store.update(cx, |lsp_store, cx| {
2526 lsp_store
2527 .update_diagnostics(
2528 server_id,
2529 lsp::PublishDiagnosticsParams {
2530 uri: Uri::from_file_path(path!("/root/dir/b.rs")).unwrap(),
2531 version: None,
2532 diagnostics: vec![lsp::Diagnostic {
2533 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2534 severity: Some(lsp::DiagnosticSeverity::ERROR),
2535 message: "unused variable 'b'".to_string(),
2536 ..Default::default()
2537 }],
2538 },
2539 None,
2540 DiagnosticSourceKind::Pushed,
2541 &[],
2542 cx,
2543 )
2544 .unwrap();
2545 lsp_store
2546 .update_diagnostics(
2547 server_id,
2548 lsp::PublishDiagnosticsParams {
2549 uri: Uri::from_file_path(path!("/root/other.rs")).unwrap(),
2550 version: None,
2551 diagnostics: vec![lsp::Diagnostic {
2552 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
2553 severity: Some(lsp::DiagnosticSeverity::ERROR),
2554 message: "unknown variable 'c'".to_string(),
2555 ..Default::default()
2556 }],
2557 },
2558 None,
2559 DiagnosticSourceKind::Pushed,
2560 &[],
2561 cx,
2562 )
2563 .unwrap();
2564 });
2565
2566 let main_ignored_buffer = project
2567 .update(cx, |project, cx| {
2568 project.open_buffer((main_worktree_id, rel_path("b.rs")), cx)
2569 })
2570 .await
2571 .unwrap();
2572 main_ignored_buffer.update(cx, |buffer, _| {
2573 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2574 assert_eq!(
2575 chunks
2576 .iter()
2577 .map(|(s, d)| (s.as_str(), *d))
2578 .collect::<Vec<_>>(),
2579 &[
2580 ("let ", None),
2581 ("b", Some(DiagnosticSeverity::ERROR)),
2582 (" = 2;", None),
2583 ],
2584 "Gigitnored buffers should still get in-buffer diagnostics",
2585 );
2586 });
2587 let other_buffer = project
2588 .update(cx, |project, cx| {
2589 project.open_buffer((other_worktree_id, rel_path("")), cx)
2590 })
2591 .await
2592 .unwrap();
2593 other_buffer.update(cx, |buffer, _| {
2594 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2595 assert_eq!(
2596 chunks
2597 .iter()
2598 .map(|(s, d)| (s.as_str(), *d))
2599 .collect::<Vec<_>>(),
2600 &[
2601 ("let b = ", None),
2602 ("c", Some(DiagnosticSeverity::ERROR)),
2603 (";", None),
2604 ],
2605 "Buffers from hidden projects should still get in-buffer diagnostics"
2606 );
2607 });
2608
2609 project.update(cx, |project, cx| {
2610 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
2611 assert_eq!(
2612 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
2613 vec![(
2614 ProjectPath {
2615 worktree_id: main_worktree_id,
2616 path: rel_path("b.rs").into(),
2617 },
2618 server_id,
2619 DiagnosticSummary {
2620 error_count: 1,
2621 warning_count: 0,
2622 }
2623 )]
2624 );
2625 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
2626 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
2627 });
2628}
2629
2630#[gpui::test]
2631async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
2632 init_test(cx);
2633
2634 let progress_token = "the-progress-token";
2635
2636 let fs = FakeFs::new(cx.executor());
2637 fs.insert_tree(
2638 path!("/dir"),
2639 json!({
2640 "a.rs": "fn a() { A }",
2641 "b.rs": "const y: i32 = 1",
2642 }),
2643 )
2644 .await;
2645
2646 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2647 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2648
2649 language_registry.add(rust_lang());
2650 let mut fake_servers = language_registry.register_fake_lsp(
2651 "Rust",
2652 FakeLspAdapter {
2653 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2654 disk_based_diagnostics_sources: vec!["disk".into()],
2655 ..Default::default()
2656 },
2657 );
2658
2659 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
2660
2661 // Cause worktree to start the fake language server
2662 let _ = project
2663 .update(cx, |project, cx| {
2664 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
2665 })
2666 .await
2667 .unwrap();
2668
2669 let mut events = cx.events(&project);
2670
2671 let fake_server = fake_servers.next().await.unwrap();
2672 assert_eq!(
2673 events.next().await.unwrap(),
2674 Event::LanguageServerAdded(
2675 LanguageServerId(0),
2676 fake_server.server.name(),
2677 Some(worktree_id)
2678 ),
2679 );
2680
2681 fake_server
2682 .start_progress(format!("{}/0", progress_token))
2683 .await;
2684 assert_eq!(
2685 events.next().await.unwrap(),
2686 Event::DiskBasedDiagnosticsStarted {
2687 language_server_id: LanguageServerId(0),
2688 }
2689 );
2690
2691 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2692 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2693 version: None,
2694 diagnostics: vec![lsp::Diagnostic {
2695 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2696 severity: Some(lsp::DiagnosticSeverity::ERROR),
2697 message: "undefined variable 'A'".to_string(),
2698 ..Default::default()
2699 }],
2700 });
2701 assert_eq!(
2702 events.next().await.unwrap(),
2703 Event::DiagnosticsUpdated {
2704 language_server_id: LanguageServerId(0),
2705 paths: vec![(worktree_id, rel_path("a.rs")).into()],
2706 }
2707 );
2708
2709 fake_server.end_progress(format!("{}/0", progress_token));
2710 assert_eq!(
2711 events.next().await.unwrap(),
2712 Event::DiskBasedDiagnosticsFinished {
2713 language_server_id: LanguageServerId(0)
2714 }
2715 );
2716
2717 let buffer = project
2718 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
2719 .await
2720 .unwrap();
2721
2722 buffer.update(cx, |buffer, _| {
2723 let snapshot = buffer.snapshot();
2724 let diagnostics = snapshot
2725 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2726 .collect::<Vec<_>>();
2727 assert_eq!(
2728 diagnostics,
2729 &[DiagnosticEntryRef {
2730 range: Point::new(0, 9)..Point::new(0, 10),
2731 diagnostic: &Diagnostic {
2732 severity: lsp::DiagnosticSeverity::ERROR,
2733 message: "undefined variable 'A'".to_string(),
2734 group_id: 0,
2735 is_primary: true,
2736 source_kind: DiagnosticSourceKind::Pushed,
2737 ..Diagnostic::default()
2738 }
2739 }]
2740 )
2741 });
2742
2743 // Ensure publishing empty diagnostics twice only results in one update event.
2744 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2745 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2746 version: None,
2747 diagnostics: Default::default(),
2748 });
2749 assert_eq!(
2750 events.next().await.unwrap(),
2751 Event::DiagnosticsUpdated {
2752 language_server_id: LanguageServerId(0),
2753 paths: vec![(worktree_id, rel_path("a.rs")).into()],
2754 }
2755 );
2756
2757 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2758 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2759 version: None,
2760 diagnostics: Default::default(),
2761 });
2762 cx.executor().run_until_parked();
2763 assert_eq!(futures::poll!(events.next()), Poll::Pending);
2764}
2765
2766#[gpui::test]
2767async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
2768 init_test(cx);
2769
2770 let progress_token = "the-progress-token";
2771
2772 let fs = FakeFs::new(cx.executor());
2773 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2774
2775 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2776
2777 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2778 language_registry.add(rust_lang());
2779 let mut fake_servers = language_registry.register_fake_lsp(
2780 "Rust",
2781 FakeLspAdapter {
2782 name: "the-language-server",
2783 disk_based_diagnostics_sources: vec!["disk".into()],
2784 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2785 ..FakeLspAdapter::default()
2786 },
2787 );
2788
2789 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
2790
2791 let (buffer, _handle) = project
2792 .update(cx, |project, cx| {
2793 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2794 })
2795 .await
2796 .unwrap();
2797 let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id());
2798 // Simulate diagnostics starting to update.
2799 let fake_server = fake_servers.next().await.unwrap();
2800 cx.executor().run_until_parked();
2801 fake_server.start_progress(progress_token).await;
2802
2803 // Restart the server before the diagnostics finish updating.
2804 project.update(cx, |project, cx| {
2805 project.restart_language_servers_for_buffers(vec![buffer], HashSet::default(), cx);
2806 });
2807 let mut events = cx.events(&project);
2808
2809 // Simulate the newly started server sending more diagnostics.
2810 let fake_server = fake_servers.next().await.unwrap();
2811 cx.executor().run_until_parked();
2812 assert_eq!(
2813 events.next().await.unwrap(),
2814 Event::LanguageServerRemoved(LanguageServerId(0))
2815 );
2816 assert_eq!(
2817 events.next().await.unwrap(),
2818 Event::LanguageServerAdded(
2819 LanguageServerId(1),
2820 fake_server.server.name(),
2821 Some(worktree_id)
2822 )
2823 );
2824 fake_server.start_progress(progress_token).await;
2825 assert_eq!(
2826 events.next().await.unwrap(),
2827 Event::LanguageServerBufferRegistered {
2828 server_id: LanguageServerId(1),
2829 buffer_id,
2830 buffer_abs_path: PathBuf::from(path!("/dir/a.rs")),
2831 name: Some(fake_server.server.name())
2832 }
2833 );
2834 assert_eq!(
2835 events.next().await.unwrap(),
2836 Event::DiskBasedDiagnosticsStarted {
2837 language_server_id: LanguageServerId(1)
2838 }
2839 );
2840 project.update(cx, |project, cx| {
2841 assert_eq!(
2842 project
2843 .language_servers_running_disk_based_diagnostics(cx)
2844 .collect::<Vec<_>>(),
2845 [LanguageServerId(1)]
2846 );
2847 });
2848
2849 // All diagnostics are considered done, despite the old server's diagnostic
2850 // task never completing.
2851 fake_server.end_progress(progress_token);
2852 assert_eq!(
2853 events.next().await.unwrap(),
2854 Event::DiskBasedDiagnosticsFinished {
2855 language_server_id: LanguageServerId(1)
2856 }
2857 );
2858 project.update(cx, |project, cx| {
2859 assert_eq!(
2860 project
2861 .language_servers_running_disk_based_diagnostics(cx)
2862 .collect::<Vec<_>>(),
2863 [] as [language::LanguageServerId; 0]
2864 );
2865 });
2866}
2867
2868#[gpui::test]
2869async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
2870 init_test(cx);
2871
2872 let fs = FakeFs::new(cx.executor());
2873 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
2874
2875 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2876
2877 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2878 language_registry.add(rust_lang());
2879 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2880
2881 let (buffer, _) = project
2882 .update(cx, |project, cx| {
2883 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2884 })
2885 .await
2886 .unwrap();
2887
2888 // Publish diagnostics
2889 let fake_server = fake_servers.next().await.unwrap();
2890 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2891 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2892 version: None,
2893 diagnostics: vec![lsp::Diagnostic {
2894 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
2895 severity: Some(lsp::DiagnosticSeverity::ERROR),
2896 message: "the message".to_string(),
2897 ..Default::default()
2898 }],
2899 });
2900
2901 cx.executor().run_until_parked();
2902 buffer.update(cx, |buffer, _| {
2903 assert_eq!(
2904 buffer
2905 .snapshot()
2906 .diagnostics_in_range::<_, usize>(0..1, false)
2907 .map(|entry| entry.diagnostic.message.clone())
2908 .collect::<Vec<_>>(),
2909 ["the message".to_string()]
2910 );
2911 });
2912 project.update(cx, |project, cx| {
2913 assert_eq!(
2914 project.diagnostic_summary(false, cx),
2915 DiagnosticSummary {
2916 error_count: 1,
2917 warning_count: 0,
2918 }
2919 );
2920 });
2921
2922 project.update(cx, |project, cx| {
2923 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2924 });
2925
2926 // The diagnostics are cleared.
2927 cx.executor().run_until_parked();
2928 buffer.update(cx, |buffer, _| {
2929 assert_eq!(
2930 buffer
2931 .snapshot()
2932 .diagnostics_in_range::<_, usize>(0..1, false)
2933 .map(|entry| entry.diagnostic.message.clone())
2934 .collect::<Vec<_>>(),
2935 Vec::<String>::new(),
2936 );
2937 });
2938 project.update(cx, |project, cx| {
2939 assert_eq!(
2940 project.diagnostic_summary(false, cx),
2941 DiagnosticSummary {
2942 error_count: 0,
2943 warning_count: 0,
2944 }
2945 );
2946 });
2947}
2948
2949#[gpui::test]
2950async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
2951 init_test(cx);
2952
2953 let fs = FakeFs::new(cx.executor());
2954 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2955
2956 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2957 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2958
2959 language_registry.add(rust_lang());
2960 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2961
2962 let (buffer, _handle) = project
2963 .update(cx, |project, cx| {
2964 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2965 })
2966 .await
2967 .unwrap();
2968
2969 // Before restarting the server, report diagnostics with an unknown buffer version.
2970 let fake_server = fake_servers.next().await.unwrap();
2971 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2972 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2973 version: Some(10000),
2974 diagnostics: Vec::new(),
2975 });
2976 cx.executor().run_until_parked();
2977 project.update(cx, |project, cx| {
2978 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2979 });
2980
2981 let mut fake_server = fake_servers.next().await.unwrap();
2982 let notification = fake_server
2983 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2984 .await
2985 .text_document;
2986 assert_eq!(notification.version, 0);
2987}
2988
2989#[gpui::test]
2990async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
2991 init_test(cx);
2992
2993 let progress_token = "the-progress-token";
2994
2995 let fs = FakeFs::new(cx.executor());
2996 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2997
2998 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2999
3000 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3001 language_registry.add(rust_lang());
3002 let mut fake_servers = language_registry.register_fake_lsp(
3003 "Rust",
3004 FakeLspAdapter {
3005 name: "the-language-server",
3006 disk_based_diagnostics_sources: vec!["disk".into()],
3007 disk_based_diagnostics_progress_token: Some(progress_token.into()),
3008 ..Default::default()
3009 },
3010 );
3011
3012 let (buffer, _handle) = project
3013 .update(cx, |project, cx| {
3014 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3015 })
3016 .await
3017 .unwrap();
3018
3019 // Simulate diagnostics starting to update.
3020 let mut fake_server = fake_servers.next().await.unwrap();
3021 fake_server
3022 .start_progress_with(
3023 "another-token",
3024 lsp::WorkDoneProgressBegin {
3025 cancellable: Some(false),
3026 ..Default::default()
3027 },
3028 )
3029 .await;
3030 // Ensure progress notification is fully processed before starting the next one
3031 cx.executor().run_until_parked();
3032
3033 fake_server
3034 .start_progress_with(
3035 progress_token,
3036 lsp::WorkDoneProgressBegin {
3037 cancellable: Some(true),
3038 ..Default::default()
3039 },
3040 )
3041 .await;
3042 // Ensure progress notification is fully processed before cancelling
3043 cx.executor().run_until_parked();
3044
3045 project.update(cx, |project, cx| {
3046 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
3047 });
3048 cx.executor().run_until_parked();
3049
3050 let cancel_notification = fake_server
3051 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
3052 .await;
3053 assert_eq!(
3054 cancel_notification.token,
3055 NumberOrString::String(progress_token.into())
3056 );
3057}
3058
3059#[gpui::test]
3060async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
3061 init_test(cx);
3062
3063 let fs = FakeFs::new(cx.executor());
3064 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
3065 .await;
3066
3067 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3068 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3069
3070 let mut fake_rust_servers = language_registry.register_fake_lsp(
3071 "Rust",
3072 FakeLspAdapter {
3073 name: "rust-lsp",
3074 ..Default::default()
3075 },
3076 );
3077 let mut fake_js_servers = language_registry.register_fake_lsp(
3078 "JavaScript",
3079 FakeLspAdapter {
3080 name: "js-lsp",
3081 ..Default::default()
3082 },
3083 );
3084 language_registry.add(rust_lang());
3085 language_registry.add(js_lang());
3086
3087 let _rs_buffer = project
3088 .update(cx, |project, cx| {
3089 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3090 })
3091 .await
3092 .unwrap();
3093 let _js_buffer = project
3094 .update(cx, |project, cx| {
3095 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
3096 })
3097 .await
3098 .unwrap();
3099
3100 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
3101 assert_eq!(
3102 fake_rust_server_1
3103 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3104 .await
3105 .text_document
3106 .uri
3107 .as_str(),
3108 uri!("file:///dir/a.rs")
3109 );
3110
3111 let mut fake_js_server = fake_js_servers.next().await.unwrap();
3112 assert_eq!(
3113 fake_js_server
3114 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3115 .await
3116 .text_document
3117 .uri
3118 .as_str(),
3119 uri!("file:///dir/b.js")
3120 );
3121
3122 // Disable Rust language server, ensuring only that server gets stopped.
3123 cx.update(|cx| {
3124 SettingsStore::update_global(cx, |settings, cx| {
3125 settings.update_user_settings(cx, |settings| {
3126 settings.languages_mut().insert(
3127 "Rust".into(),
3128 LanguageSettingsContent {
3129 enable_language_server: Some(false),
3130 ..Default::default()
3131 },
3132 );
3133 });
3134 })
3135 });
3136 fake_rust_server_1
3137 .receive_notification::<lsp::notification::Exit>()
3138 .await;
3139
3140 // Enable Rust and disable JavaScript language servers, ensuring that the
3141 // former gets started again and that the latter stops.
3142 cx.update(|cx| {
3143 SettingsStore::update_global(cx, |settings, cx| {
3144 settings.update_user_settings(cx, |settings| {
3145 settings.languages_mut().insert(
3146 "Rust".into(),
3147 LanguageSettingsContent {
3148 enable_language_server: Some(true),
3149 ..Default::default()
3150 },
3151 );
3152 settings.languages_mut().insert(
3153 "JavaScript".into(),
3154 LanguageSettingsContent {
3155 enable_language_server: Some(false),
3156 ..Default::default()
3157 },
3158 );
3159 });
3160 })
3161 });
3162 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
3163 assert_eq!(
3164 fake_rust_server_2
3165 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3166 .await
3167 .text_document
3168 .uri
3169 .as_str(),
3170 uri!("file:///dir/a.rs")
3171 );
3172 fake_js_server
3173 .receive_notification::<lsp::notification::Exit>()
3174 .await;
3175}
3176
3177#[gpui::test(iterations = 3)]
3178async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
3179 init_test(cx);
3180
3181 let text = "
3182 fn a() { A }
3183 fn b() { BB }
3184 fn c() { CCC }
3185 "
3186 .unindent();
3187
3188 let fs = FakeFs::new(cx.executor());
3189 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
3190
3191 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3192 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3193
3194 language_registry.add(rust_lang());
3195 let mut fake_servers = language_registry.register_fake_lsp(
3196 "Rust",
3197 FakeLspAdapter {
3198 disk_based_diagnostics_sources: vec!["disk".into()],
3199 ..Default::default()
3200 },
3201 );
3202
3203 let buffer = project
3204 .update(cx, |project, cx| {
3205 project.open_local_buffer(path!("/dir/a.rs"), cx)
3206 })
3207 .await
3208 .unwrap();
3209
3210 let _handle = project.update(cx, |project, cx| {
3211 project.register_buffer_with_language_servers(&buffer, cx)
3212 });
3213
3214 let mut fake_server = fake_servers.next().await.unwrap();
3215 let open_notification = fake_server
3216 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3217 .await;
3218
3219 // Edit the buffer, moving the content down
3220 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
3221 let change_notification_1 = fake_server
3222 .receive_notification::<lsp::notification::DidChangeTextDocument>()
3223 .await;
3224 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
3225
3226 // Report some diagnostics for the initial version of the buffer
3227 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3228 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3229 version: Some(open_notification.text_document.version),
3230 diagnostics: vec![
3231 lsp::Diagnostic {
3232 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3233 severity: Some(DiagnosticSeverity::ERROR),
3234 message: "undefined variable 'A'".to_string(),
3235 source: Some("disk".to_string()),
3236 ..Default::default()
3237 },
3238 lsp::Diagnostic {
3239 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
3240 severity: Some(DiagnosticSeverity::ERROR),
3241 message: "undefined variable 'BB'".to_string(),
3242 source: Some("disk".to_string()),
3243 ..Default::default()
3244 },
3245 lsp::Diagnostic {
3246 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
3247 severity: Some(DiagnosticSeverity::ERROR),
3248 source: Some("disk".to_string()),
3249 message: "undefined variable 'CCC'".to_string(),
3250 ..Default::default()
3251 },
3252 ],
3253 });
3254
3255 // The diagnostics have moved down since they were created.
3256 cx.executor().run_until_parked();
3257 buffer.update(cx, |buffer, _| {
3258 assert_eq!(
3259 buffer
3260 .snapshot()
3261 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
3262 .collect::<Vec<_>>(),
3263 &[
3264 DiagnosticEntry {
3265 range: Point::new(3, 9)..Point::new(3, 11),
3266 diagnostic: Diagnostic {
3267 source: Some("disk".into()),
3268 severity: DiagnosticSeverity::ERROR,
3269 message: "undefined variable 'BB'".to_string(),
3270 is_disk_based: true,
3271 group_id: 1,
3272 is_primary: true,
3273 source_kind: DiagnosticSourceKind::Pushed,
3274 ..Diagnostic::default()
3275 },
3276 },
3277 DiagnosticEntry {
3278 range: Point::new(4, 9)..Point::new(4, 12),
3279 diagnostic: Diagnostic {
3280 source: Some("disk".into()),
3281 severity: DiagnosticSeverity::ERROR,
3282 message: "undefined variable 'CCC'".to_string(),
3283 is_disk_based: true,
3284 group_id: 2,
3285 is_primary: true,
3286 source_kind: DiagnosticSourceKind::Pushed,
3287 ..Diagnostic::default()
3288 }
3289 }
3290 ]
3291 );
3292 assert_eq!(
3293 chunks_with_diagnostics(buffer, 0..buffer.len()),
3294 [
3295 ("\n\nfn a() { ".to_string(), None),
3296 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
3297 (" }\nfn b() { ".to_string(), None),
3298 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
3299 (" }\nfn c() { ".to_string(), None),
3300 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
3301 (" }\n".to_string(), None),
3302 ]
3303 );
3304 assert_eq!(
3305 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
3306 [
3307 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
3308 (" }\nfn c() { ".to_string(), None),
3309 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
3310 ]
3311 );
3312 });
3313
3314 // Ensure overlapping diagnostics are highlighted correctly.
3315 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3316 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3317 version: Some(open_notification.text_document.version),
3318 diagnostics: vec![
3319 lsp::Diagnostic {
3320 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3321 severity: Some(DiagnosticSeverity::ERROR),
3322 message: "undefined variable 'A'".to_string(),
3323 source: Some("disk".to_string()),
3324 ..Default::default()
3325 },
3326 lsp::Diagnostic {
3327 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
3328 severity: Some(DiagnosticSeverity::WARNING),
3329 message: "unreachable statement".to_string(),
3330 source: Some("disk".to_string()),
3331 ..Default::default()
3332 },
3333 ],
3334 });
3335
3336 cx.executor().run_until_parked();
3337 buffer.update(cx, |buffer, _| {
3338 assert_eq!(
3339 buffer
3340 .snapshot()
3341 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
3342 .collect::<Vec<_>>(),
3343 &[
3344 DiagnosticEntry {
3345 range: Point::new(2, 9)..Point::new(2, 12),
3346 diagnostic: Diagnostic {
3347 source: Some("disk".into()),
3348 severity: DiagnosticSeverity::WARNING,
3349 message: "unreachable statement".to_string(),
3350 is_disk_based: true,
3351 group_id: 4,
3352 is_primary: true,
3353 source_kind: DiagnosticSourceKind::Pushed,
3354 ..Diagnostic::default()
3355 }
3356 },
3357 DiagnosticEntry {
3358 range: Point::new(2, 9)..Point::new(2, 10),
3359 diagnostic: Diagnostic {
3360 source: Some("disk".into()),
3361 severity: DiagnosticSeverity::ERROR,
3362 message: "undefined variable 'A'".to_string(),
3363 is_disk_based: true,
3364 group_id: 3,
3365 is_primary: true,
3366 source_kind: DiagnosticSourceKind::Pushed,
3367 ..Diagnostic::default()
3368 },
3369 }
3370 ]
3371 );
3372 assert_eq!(
3373 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
3374 [
3375 ("fn a() { ".to_string(), None),
3376 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
3377 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
3378 ("\n".to_string(), None),
3379 ]
3380 );
3381 assert_eq!(
3382 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
3383 [
3384 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
3385 ("\n".to_string(), None),
3386 ]
3387 );
3388 });
3389
3390 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
3391 // changes since the last save.
3392 buffer.update(cx, |buffer, cx| {
3393 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
3394 buffer.edit(
3395 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
3396 None,
3397 cx,
3398 );
3399 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
3400 });
3401 let change_notification_2 = fake_server
3402 .receive_notification::<lsp::notification::DidChangeTextDocument>()
3403 .await;
3404 assert!(
3405 change_notification_2.text_document.version > change_notification_1.text_document.version
3406 );
3407
3408 // Handle out-of-order diagnostics
3409 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3410 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3411 version: Some(change_notification_2.text_document.version),
3412 diagnostics: vec![
3413 lsp::Diagnostic {
3414 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
3415 severity: Some(DiagnosticSeverity::ERROR),
3416 message: "undefined variable 'BB'".to_string(),
3417 source: Some("disk".to_string()),
3418 ..Default::default()
3419 },
3420 lsp::Diagnostic {
3421 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3422 severity: Some(DiagnosticSeverity::WARNING),
3423 message: "undefined variable 'A'".to_string(),
3424 source: Some("disk".to_string()),
3425 ..Default::default()
3426 },
3427 ],
3428 });
3429
3430 cx.executor().run_until_parked();
3431 buffer.update(cx, |buffer, _| {
3432 assert_eq!(
3433 buffer
3434 .snapshot()
3435 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3436 .collect::<Vec<_>>(),
3437 &[
3438 DiagnosticEntry {
3439 range: Point::new(2, 21)..Point::new(2, 22),
3440 diagnostic: Diagnostic {
3441 source: Some("disk".into()),
3442 severity: DiagnosticSeverity::WARNING,
3443 message: "undefined variable 'A'".to_string(),
3444 is_disk_based: true,
3445 group_id: 6,
3446 is_primary: true,
3447 source_kind: DiagnosticSourceKind::Pushed,
3448 ..Diagnostic::default()
3449 }
3450 },
3451 DiagnosticEntry {
3452 range: Point::new(3, 9)..Point::new(3, 14),
3453 diagnostic: Diagnostic {
3454 source: Some("disk".into()),
3455 severity: DiagnosticSeverity::ERROR,
3456 message: "undefined variable 'BB'".to_string(),
3457 is_disk_based: true,
3458 group_id: 5,
3459 is_primary: true,
3460 source_kind: DiagnosticSourceKind::Pushed,
3461 ..Diagnostic::default()
3462 },
3463 }
3464 ]
3465 );
3466 });
3467}
3468
3469#[gpui::test]
3470async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
3471 init_test(cx);
3472
3473 let text = concat!(
3474 "let one = ;\n", //
3475 "let two = \n",
3476 "let three = 3;\n",
3477 );
3478
3479 let fs = FakeFs::new(cx.executor());
3480 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
3481
3482 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3483 let buffer = project
3484 .update(cx, |project, cx| {
3485 project.open_local_buffer(path!("/dir/a.rs"), cx)
3486 })
3487 .await
3488 .unwrap();
3489
3490 project.update(cx, |project, cx| {
3491 project.lsp_store().update(cx, |lsp_store, cx| {
3492 lsp_store
3493 .update_diagnostic_entries(
3494 LanguageServerId(0),
3495 PathBuf::from(path!("/dir/a.rs")),
3496 None,
3497 None,
3498 vec![
3499 DiagnosticEntry {
3500 range: Unclipped(PointUtf16::new(0, 10))
3501 ..Unclipped(PointUtf16::new(0, 10)),
3502 diagnostic: Diagnostic {
3503 severity: DiagnosticSeverity::ERROR,
3504 message: "syntax error 1".to_string(),
3505 source_kind: DiagnosticSourceKind::Pushed,
3506 ..Diagnostic::default()
3507 },
3508 },
3509 DiagnosticEntry {
3510 range: Unclipped(PointUtf16::new(1, 10))
3511 ..Unclipped(PointUtf16::new(1, 10)),
3512 diagnostic: Diagnostic {
3513 severity: DiagnosticSeverity::ERROR,
3514 message: "syntax error 2".to_string(),
3515 source_kind: DiagnosticSourceKind::Pushed,
3516 ..Diagnostic::default()
3517 },
3518 },
3519 ],
3520 cx,
3521 )
3522 .unwrap();
3523 })
3524 });
3525
3526 // An empty range is extended forward to include the following character.
3527 // At the end of a line, an empty range is extended backward to include
3528 // the preceding character.
3529 buffer.update(cx, |buffer, _| {
3530 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
3531 assert_eq!(
3532 chunks
3533 .iter()
3534 .map(|(s, d)| (s.as_str(), *d))
3535 .collect::<Vec<_>>(),
3536 &[
3537 ("let one = ", None),
3538 (";", Some(DiagnosticSeverity::ERROR)),
3539 ("\nlet two =", None),
3540 (" ", Some(DiagnosticSeverity::ERROR)),
3541 ("\nlet three = 3;\n", None)
3542 ]
3543 );
3544 });
3545}
3546
3547#[gpui::test]
3548async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
3549 init_test(cx);
3550
3551 let fs = FakeFs::new(cx.executor());
3552 fs.insert_tree(path!("/dir"), json!({ "a.rs": "one two three" }))
3553 .await;
3554
3555 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3556 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3557
3558 lsp_store.update(cx, |lsp_store, cx| {
3559 lsp_store
3560 .update_diagnostic_entries(
3561 LanguageServerId(0),
3562 Path::new(path!("/dir/a.rs")).to_owned(),
3563 None,
3564 None,
3565 vec![DiagnosticEntry {
3566 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
3567 diagnostic: Diagnostic {
3568 severity: DiagnosticSeverity::ERROR,
3569 is_primary: true,
3570 message: "syntax error a1".to_string(),
3571 source_kind: DiagnosticSourceKind::Pushed,
3572 ..Diagnostic::default()
3573 },
3574 }],
3575 cx,
3576 )
3577 .unwrap();
3578 lsp_store
3579 .update_diagnostic_entries(
3580 LanguageServerId(1),
3581 Path::new(path!("/dir/a.rs")).to_owned(),
3582 None,
3583 None,
3584 vec![DiagnosticEntry {
3585 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
3586 diagnostic: Diagnostic {
3587 severity: DiagnosticSeverity::ERROR,
3588 is_primary: true,
3589 message: "syntax error b1".to_string(),
3590 source_kind: DiagnosticSourceKind::Pushed,
3591 ..Diagnostic::default()
3592 },
3593 }],
3594 cx,
3595 )
3596 .unwrap();
3597
3598 assert_eq!(
3599 lsp_store.diagnostic_summary(false, cx),
3600 DiagnosticSummary {
3601 error_count: 2,
3602 warning_count: 0,
3603 }
3604 );
3605 });
3606}
3607
3608#[gpui::test]
3609async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
3610 init_test(cx);
3611
3612 let text = "
3613 fn a() {
3614 f1();
3615 }
3616 fn b() {
3617 f2();
3618 }
3619 fn c() {
3620 f3();
3621 }
3622 "
3623 .unindent();
3624
3625 let fs = FakeFs::new(cx.executor());
3626 fs.insert_tree(
3627 path!("/dir"),
3628 json!({
3629 "a.rs": text.clone(),
3630 }),
3631 )
3632 .await;
3633
3634 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3635 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3636
3637 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3638 language_registry.add(rust_lang());
3639 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3640
3641 let (buffer, _handle) = project
3642 .update(cx, |project, cx| {
3643 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3644 })
3645 .await
3646 .unwrap();
3647
3648 let mut fake_server = fake_servers.next().await.unwrap();
3649 let lsp_document_version = fake_server
3650 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3651 .await
3652 .text_document
3653 .version;
3654
3655 // Simulate editing the buffer after the language server computes some edits.
3656 buffer.update(cx, |buffer, cx| {
3657 buffer.edit(
3658 [(
3659 Point::new(0, 0)..Point::new(0, 0),
3660 "// above first function\n",
3661 )],
3662 None,
3663 cx,
3664 );
3665 buffer.edit(
3666 [(
3667 Point::new(2, 0)..Point::new(2, 0),
3668 " // inside first function\n",
3669 )],
3670 None,
3671 cx,
3672 );
3673 buffer.edit(
3674 [(
3675 Point::new(6, 4)..Point::new(6, 4),
3676 "// inside second function ",
3677 )],
3678 None,
3679 cx,
3680 );
3681
3682 assert_eq!(
3683 buffer.text(),
3684 "
3685 // above first function
3686 fn a() {
3687 // inside first function
3688 f1();
3689 }
3690 fn b() {
3691 // inside second function f2();
3692 }
3693 fn c() {
3694 f3();
3695 }
3696 "
3697 .unindent()
3698 );
3699 });
3700
3701 let edits = lsp_store
3702 .update(cx, |lsp_store, cx| {
3703 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3704 &buffer,
3705 vec![
3706 // replace body of first function
3707 lsp::TextEdit {
3708 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
3709 new_text: "
3710 fn a() {
3711 f10();
3712 }
3713 "
3714 .unindent(),
3715 },
3716 // edit inside second function
3717 lsp::TextEdit {
3718 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
3719 new_text: "00".into(),
3720 },
3721 // edit inside third function via two distinct edits
3722 lsp::TextEdit {
3723 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
3724 new_text: "4000".into(),
3725 },
3726 lsp::TextEdit {
3727 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
3728 new_text: "".into(),
3729 },
3730 ],
3731 LanguageServerId(0),
3732 Some(lsp_document_version),
3733 cx,
3734 )
3735 })
3736 .await
3737 .unwrap();
3738
3739 buffer.update(cx, |buffer, cx| {
3740 for (range, new_text) in edits {
3741 buffer.edit([(range, new_text)], None, cx);
3742 }
3743 assert_eq!(
3744 buffer.text(),
3745 "
3746 // above first function
3747 fn a() {
3748 // inside first function
3749 f10();
3750 }
3751 fn b() {
3752 // inside second function f200();
3753 }
3754 fn c() {
3755 f4000();
3756 }
3757 "
3758 .unindent()
3759 );
3760 });
3761}
3762
3763#[gpui::test]
3764async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
3765 init_test(cx);
3766
3767 let text = "
3768 use a::b;
3769 use a::c;
3770
3771 fn f() {
3772 b();
3773 c();
3774 }
3775 "
3776 .unindent();
3777
3778 let fs = FakeFs::new(cx.executor());
3779 fs.insert_tree(
3780 path!("/dir"),
3781 json!({
3782 "a.rs": text.clone(),
3783 }),
3784 )
3785 .await;
3786
3787 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3788 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3789 let buffer = project
3790 .update(cx, |project, cx| {
3791 project.open_local_buffer(path!("/dir/a.rs"), cx)
3792 })
3793 .await
3794 .unwrap();
3795
3796 // Simulate the language server sending us a small edit in the form of a very large diff.
3797 // Rust-analyzer does this when performing a merge-imports code action.
3798 let edits = lsp_store
3799 .update(cx, |lsp_store, cx| {
3800 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3801 &buffer,
3802 [
3803 // Replace the first use statement without editing the semicolon.
3804 lsp::TextEdit {
3805 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
3806 new_text: "a::{b, c}".into(),
3807 },
3808 // Reinsert the remainder of the file between the semicolon and the final
3809 // newline of the file.
3810 lsp::TextEdit {
3811 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3812 new_text: "\n\n".into(),
3813 },
3814 lsp::TextEdit {
3815 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3816 new_text: "
3817 fn f() {
3818 b();
3819 c();
3820 }"
3821 .unindent(),
3822 },
3823 // Delete everything after the first newline of the file.
3824 lsp::TextEdit {
3825 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
3826 new_text: "".into(),
3827 },
3828 ],
3829 LanguageServerId(0),
3830 None,
3831 cx,
3832 )
3833 })
3834 .await
3835 .unwrap();
3836
3837 buffer.update(cx, |buffer, cx| {
3838 let edits = edits
3839 .into_iter()
3840 .map(|(range, text)| {
3841 (
3842 range.start.to_point(buffer)..range.end.to_point(buffer),
3843 text,
3844 )
3845 })
3846 .collect::<Vec<_>>();
3847
3848 assert_eq!(
3849 edits,
3850 [
3851 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
3852 (Point::new(1, 0)..Point::new(2, 0), "".into())
3853 ]
3854 );
3855
3856 for (range, new_text) in edits {
3857 buffer.edit([(range, new_text)], None, cx);
3858 }
3859 assert_eq!(
3860 buffer.text(),
3861 "
3862 use a::{b, c};
3863
3864 fn f() {
3865 b();
3866 c();
3867 }
3868 "
3869 .unindent()
3870 );
3871 });
3872}
3873
3874#[gpui::test]
3875async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
3876 cx: &mut gpui::TestAppContext,
3877) {
3878 init_test(cx);
3879
3880 let text = "Path()";
3881
3882 let fs = FakeFs::new(cx.executor());
3883 fs.insert_tree(
3884 path!("/dir"),
3885 json!({
3886 "a.rs": text
3887 }),
3888 )
3889 .await;
3890
3891 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3892 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3893 let buffer = project
3894 .update(cx, |project, cx| {
3895 project.open_local_buffer(path!("/dir/a.rs"), cx)
3896 })
3897 .await
3898 .unwrap();
3899
3900 // Simulate the language server sending us a pair of edits at the same location,
3901 // with an insertion following a replacement (which violates the LSP spec).
3902 let edits = lsp_store
3903 .update(cx, |lsp_store, cx| {
3904 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3905 &buffer,
3906 [
3907 lsp::TextEdit {
3908 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
3909 new_text: "Path".into(),
3910 },
3911 lsp::TextEdit {
3912 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
3913 new_text: "from path import Path\n\n\n".into(),
3914 },
3915 ],
3916 LanguageServerId(0),
3917 None,
3918 cx,
3919 )
3920 })
3921 .await
3922 .unwrap();
3923
3924 buffer.update(cx, |buffer, cx| {
3925 buffer.edit(edits, None, cx);
3926 assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
3927 });
3928}
3929
3930#[gpui::test]
3931async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
3932 init_test(cx);
3933
3934 let text = "
3935 use a::b;
3936 use a::c;
3937
3938 fn f() {
3939 b();
3940 c();
3941 }
3942 "
3943 .unindent();
3944
3945 let fs = FakeFs::new(cx.executor());
3946 fs.insert_tree(
3947 path!("/dir"),
3948 json!({
3949 "a.rs": text.clone(),
3950 }),
3951 )
3952 .await;
3953
3954 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3955 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3956 let buffer = project
3957 .update(cx, |project, cx| {
3958 project.open_local_buffer(path!("/dir/a.rs"), cx)
3959 })
3960 .await
3961 .unwrap();
3962
3963 // Simulate the language server sending us edits in a non-ordered fashion,
3964 // with ranges sometimes being inverted or pointing to invalid locations.
3965 let edits = lsp_store
3966 .update(cx, |lsp_store, cx| {
3967 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3968 &buffer,
3969 [
3970 lsp::TextEdit {
3971 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3972 new_text: "\n\n".into(),
3973 },
3974 lsp::TextEdit {
3975 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
3976 new_text: "a::{b, c}".into(),
3977 },
3978 lsp::TextEdit {
3979 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
3980 new_text: "".into(),
3981 },
3982 lsp::TextEdit {
3983 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3984 new_text: "
3985 fn f() {
3986 b();
3987 c();
3988 }"
3989 .unindent(),
3990 },
3991 ],
3992 LanguageServerId(0),
3993 None,
3994 cx,
3995 )
3996 })
3997 .await
3998 .unwrap();
3999
4000 buffer.update(cx, |buffer, cx| {
4001 let edits = edits
4002 .into_iter()
4003 .map(|(range, text)| {
4004 (
4005 range.start.to_point(buffer)..range.end.to_point(buffer),
4006 text,
4007 )
4008 })
4009 .collect::<Vec<_>>();
4010
4011 assert_eq!(
4012 edits,
4013 [
4014 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
4015 (Point::new(1, 0)..Point::new(2, 0), "".into())
4016 ]
4017 );
4018
4019 for (range, new_text) in edits {
4020 buffer.edit([(range, new_text)], None, cx);
4021 }
4022 assert_eq!(
4023 buffer.text(),
4024 "
4025 use a::{b, c};
4026
4027 fn f() {
4028 b();
4029 c();
4030 }
4031 "
4032 .unindent()
4033 );
4034 });
4035}
4036
4037fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
4038 buffer: &Buffer,
4039 range: Range<T>,
4040) -> Vec<(String, Option<DiagnosticSeverity>)> {
4041 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
4042 for chunk in buffer.snapshot().chunks(range, true) {
4043 if chunks
4044 .last()
4045 .is_some_and(|prev_chunk| prev_chunk.1 == chunk.diagnostic_severity)
4046 {
4047 chunks.last_mut().unwrap().0.push_str(chunk.text);
4048 } else {
4049 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
4050 }
4051 }
4052 chunks
4053}
4054
4055#[gpui::test(iterations = 10)]
4056async fn test_definition(cx: &mut gpui::TestAppContext) {
4057 init_test(cx);
4058
4059 let fs = FakeFs::new(cx.executor());
4060 fs.insert_tree(
4061 path!("/dir"),
4062 json!({
4063 "a.rs": "const fn a() { A }",
4064 "b.rs": "const y: i32 = crate::a()",
4065 }),
4066 )
4067 .await;
4068
4069 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
4070
4071 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4072 language_registry.add(rust_lang());
4073 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
4074
4075 let (buffer, _handle) = project
4076 .update(cx, |project, cx| {
4077 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
4078 })
4079 .await
4080 .unwrap();
4081
4082 let fake_server = fake_servers.next().await.unwrap();
4083 cx.executor().run_until_parked();
4084
4085 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
4086 let params = params.text_document_position_params;
4087 assert_eq!(
4088 params.text_document.uri.to_file_path().unwrap(),
4089 Path::new(path!("/dir/b.rs")),
4090 );
4091 assert_eq!(params.position, lsp::Position::new(0, 22));
4092
4093 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
4094 lsp::Location::new(
4095 lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
4096 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
4097 ),
4098 )))
4099 });
4100 let mut definitions = project
4101 .update(cx, |project, cx| project.definitions(&buffer, 22, cx))
4102 .await
4103 .unwrap()
4104 .unwrap();
4105
4106 // Assert no new language server started
4107 cx.executor().run_until_parked();
4108 assert!(fake_servers.try_next().is_err());
4109
4110 assert_eq!(definitions.len(), 1);
4111 let definition = definitions.pop().unwrap();
4112 cx.update(|cx| {
4113 let target_buffer = definition.target.buffer.read(cx);
4114 assert_eq!(
4115 target_buffer
4116 .file()
4117 .unwrap()
4118 .as_local()
4119 .unwrap()
4120 .abs_path(cx),
4121 Path::new(path!("/dir/a.rs")),
4122 );
4123 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
4124 assert_eq!(
4125 list_worktrees(&project, cx),
4126 [
4127 (path!("/dir/a.rs").as_ref(), false),
4128 (path!("/dir/b.rs").as_ref(), true)
4129 ],
4130 );
4131
4132 drop(definition);
4133 });
4134 cx.update(|cx| {
4135 assert_eq!(
4136 list_worktrees(&project, cx),
4137 [(path!("/dir/b.rs").as_ref(), true)]
4138 );
4139 });
4140
4141 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
4142 project
4143 .read(cx)
4144 .worktrees(cx)
4145 .map(|worktree| {
4146 let worktree = worktree.read(cx);
4147 (
4148 worktree.as_local().unwrap().abs_path().as_ref(),
4149 worktree.is_visible(),
4150 )
4151 })
4152 .collect::<Vec<_>>()
4153 }
4154}
4155
4156#[gpui::test]
4157async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
4158 init_test(cx);
4159
4160 let fs = FakeFs::new(cx.executor());
4161 fs.insert_tree(
4162 path!("/dir"),
4163 json!({
4164 "a.ts": "",
4165 }),
4166 )
4167 .await;
4168
4169 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4170
4171 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4172 language_registry.add(typescript_lang());
4173 let mut fake_language_servers = language_registry.register_fake_lsp(
4174 "TypeScript",
4175 FakeLspAdapter {
4176 capabilities: lsp::ServerCapabilities {
4177 completion_provider: Some(lsp::CompletionOptions {
4178 trigger_characters: Some(vec![".".to_string()]),
4179 ..Default::default()
4180 }),
4181 ..Default::default()
4182 },
4183 ..Default::default()
4184 },
4185 );
4186
4187 let (buffer, _handle) = project
4188 .update(cx, |p, cx| {
4189 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4190 })
4191 .await
4192 .unwrap();
4193
4194 let fake_server = fake_language_servers.next().await.unwrap();
4195 cx.executor().run_until_parked();
4196
4197 // When text_edit exists, it takes precedence over insert_text and label
4198 let text = "let a = obj.fqn";
4199 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4200 let completions = project.update(cx, |project, cx| {
4201 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4202 });
4203
4204 fake_server
4205 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4206 Ok(Some(lsp::CompletionResponse::Array(vec![
4207 lsp::CompletionItem {
4208 label: "labelText".into(),
4209 insert_text: Some("insertText".into()),
4210 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
4211 range: lsp::Range::new(
4212 lsp::Position::new(0, text.len() as u32 - 3),
4213 lsp::Position::new(0, text.len() as u32),
4214 ),
4215 new_text: "textEditText".into(),
4216 })),
4217 ..Default::default()
4218 },
4219 ])))
4220 })
4221 .next()
4222 .await;
4223
4224 let completions = completions
4225 .await
4226 .unwrap()
4227 .into_iter()
4228 .flat_map(|response| response.completions)
4229 .collect::<Vec<_>>();
4230 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4231
4232 assert_eq!(completions.len(), 1);
4233 assert_eq!(completions[0].new_text, "textEditText");
4234 assert_eq!(
4235 completions[0].replace_range.to_offset(&snapshot),
4236 text.len() - 3..text.len()
4237 );
4238}
4239
4240#[gpui::test]
4241async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
4242 init_test(cx);
4243
4244 let fs = FakeFs::new(cx.executor());
4245 fs.insert_tree(
4246 path!("/dir"),
4247 json!({
4248 "a.ts": "",
4249 }),
4250 )
4251 .await;
4252
4253 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4254
4255 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4256 language_registry.add(typescript_lang());
4257 let mut fake_language_servers = language_registry.register_fake_lsp(
4258 "TypeScript",
4259 FakeLspAdapter {
4260 capabilities: lsp::ServerCapabilities {
4261 completion_provider: Some(lsp::CompletionOptions {
4262 trigger_characters: Some(vec![".".to_string()]),
4263 ..Default::default()
4264 }),
4265 ..Default::default()
4266 },
4267 ..Default::default()
4268 },
4269 );
4270
4271 let (buffer, _handle) = project
4272 .update(cx, |p, cx| {
4273 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4274 })
4275 .await
4276 .unwrap();
4277
4278 let fake_server = fake_language_servers.next().await.unwrap();
4279 cx.executor().run_until_parked();
4280 let text = "let a = obj.fqn";
4281
4282 // Test 1: When text_edit is None but text_edit_text exists with default edit_range
4283 {
4284 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4285 let completions = project.update(cx, |project, cx| {
4286 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4287 });
4288
4289 fake_server
4290 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4291 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
4292 is_incomplete: false,
4293 item_defaults: Some(lsp::CompletionListItemDefaults {
4294 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
4295 lsp::Range::new(
4296 lsp::Position::new(0, text.len() as u32 - 3),
4297 lsp::Position::new(0, text.len() as u32),
4298 ),
4299 )),
4300 ..Default::default()
4301 }),
4302 items: vec![lsp::CompletionItem {
4303 label: "labelText".into(),
4304 text_edit_text: Some("textEditText".into()),
4305 text_edit: None,
4306 ..Default::default()
4307 }],
4308 })))
4309 })
4310 .next()
4311 .await;
4312
4313 let completions = completions
4314 .await
4315 .unwrap()
4316 .into_iter()
4317 .flat_map(|response| response.completions)
4318 .collect::<Vec<_>>();
4319 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4320
4321 assert_eq!(completions.len(), 1);
4322 assert_eq!(completions[0].new_text, "textEditText");
4323 assert_eq!(
4324 completions[0].replace_range.to_offset(&snapshot),
4325 text.len() - 3..text.len()
4326 );
4327 }
4328
4329 // Test 2: When both text_edit and text_edit_text are None with default edit_range
4330 {
4331 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4332 let completions = project.update(cx, |project, cx| {
4333 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4334 });
4335
4336 fake_server
4337 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4338 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
4339 is_incomplete: false,
4340 item_defaults: Some(lsp::CompletionListItemDefaults {
4341 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
4342 lsp::Range::new(
4343 lsp::Position::new(0, text.len() as u32 - 3),
4344 lsp::Position::new(0, text.len() as u32),
4345 ),
4346 )),
4347 ..Default::default()
4348 }),
4349 items: vec![lsp::CompletionItem {
4350 label: "labelText".into(),
4351 text_edit_text: None,
4352 insert_text: Some("irrelevant".into()),
4353 text_edit: None,
4354 ..Default::default()
4355 }],
4356 })))
4357 })
4358 .next()
4359 .await;
4360
4361 let completions = completions
4362 .await
4363 .unwrap()
4364 .into_iter()
4365 .flat_map(|response| response.completions)
4366 .collect::<Vec<_>>();
4367 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4368
4369 assert_eq!(completions.len(), 1);
4370 assert_eq!(completions[0].new_text, "labelText");
4371 assert_eq!(
4372 completions[0].replace_range.to_offset(&snapshot),
4373 text.len() - 3..text.len()
4374 );
4375 }
4376}
4377
4378#[gpui::test]
4379async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
4380 init_test(cx);
4381
4382 let fs = FakeFs::new(cx.executor());
4383 fs.insert_tree(
4384 path!("/dir"),
4385 json!({
4386 "a.ts": "",
4387 }),
4388 )
4389 .await;
4390
4391 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4392
4393 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4394 language_registry.add(typescript_lang());
4395 let mut fake_language_servers = language_registry.register_fake_lsp(
4396 "TypeScript",
4397 FakeLspAdapter {
4398 capabilities: lsp::ServerCapabilities {
4399 completion_provider: Some(lsp::CompletionOptions {
4400 trigger_characters: Some(vec![":".to_string()]),
4401 ..Default::default()
4402 }),
4403 ..Default::default()
4404 },
4405 ..Default::default()
4406 },
4407 );
4408
4409 let (buffer, _handle) = project
4410 .update(cx, |p, cx| {
4411 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4412 })
4413 .await
4414 .unwrap();
4415
4416 let fake_server = fake_language_servers.next().await.unwrap();
4417 cx.executor().run_until_parked();
4418
4419 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
4420 let text = "let a = b.fqn";
4421 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4422 let completions = project.update(cx, |project, cx| {
4423 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4424 });
4425
4426 fake_server
4427 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4428 Ok(Some(lsp::CompletionResponse::Array(vec![
4429 lsp::CompletionItem {
4430 label: "fullyQualifiedName?".into(),
4431 insert_text: Some("fullyQualifiedName".into()),
4432 ..Default::default()
4433 },
4434 ])))
4435 })
4436 .next()
4437 .await;
4438 let completions = completions
4439 .await
4440 .unwrap()
4441 .into_iter()
4442 .flat_map(|response| response.completions)
4443 .collect::<Vec<_>>();
4444 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4445 assert_eq!(completions.len(), 1);
4446 assert_eq!(completions[0].new_text, "fullyQualifiedName");
4447 assert_eq!(
4448 completions[0].replace_range.to_offset(&snapshot),
4449 text.len() - 3..text.len()
4450 );
4451
4452 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
4453 let text = "let a = \"atoms/cmp\"";
4454 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4455 let completions = project.update(cx, |project, cx| {
4456 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
4457 });
4458
4459 fake_server
4460 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4461 Ok(Some(lsp::CompletionResponse::Array(vec![
4462 lsp::CompletionItem {
4463 label: "component".into(),
4464 ..Default::default()
4465 },
4466 ])))
4467 })
4468 .next()
4469 .await;
4470 let completions = completions
4471 .await
4472 .unwrap()
4473 .into_iter()
4474 .flat_map(|response| response.completions)
4475 .collect::<Vec<_>>();
4476 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4477 assert_eq!(completions.len(), 1);
4478 assert_eq!(completions[0].new_text, "component");
4479 assert_eq!(
4480 completions[0].replace_range.to_offset(&snapshot),
4481 text.len() - 4..text.len() - 1
4482 );
4483}
4484
4485#[gpui::test]
4486async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
4487 init_test(cx);
4488
4489 let fs = FakeFs::new(cx.executor());
4490 fs.insert_tree(
4491 path!("/dir"),
4492 json!({
4493 "a.ts": "",
4494 }),
4495 )
4496 .await;
4497
4498 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4499
4500 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4501 language_registry.add(typescript_lang());
4502 let mut fake_language_servers = language_registry.register_fake_lsp(
4503 "TypeScript",
4504 FakeLspAdapter {
4505 capabilities: lsp::ServerCapabilities {
4506 completion_provider: Some(lsp::CompletionOptions {
4507 trigger_characters: Some(vec![":".to_string()]),
4508 ..Default::default()
4509 }),
4510 ..Default::default()
4511 },
4512 ..Default::default()
4513 },
4514 );
4515
4516 let (buffer, _handle) = project
4517 .update(cx, |p, cx| {
4518 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4519 })
4520 .await
4521 .unwrap();
4522
4523 let fake_server = fake_language_servers.next().await.unwrap();
4524 cx.executor().run_until_parked();
4525
4526 let text = "let a = b.fqn";
4527 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4528 let completions = project.update(cx, |project, cx| {
4529 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4530 });
4531
4532 fake_server
4533 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4534 Ok(Some(lsp::CompletionResponse::Array(vec![
4535 lsp::CompletionItem {
4536 label: "fullyQualifiedName?".into(),
4537 insert_text: Some("fully\rQualified\r\nName".into()),
4538 ..Default::default()
4539 },
4540 ])))
4541 })
4542 .next()
4543 .await;
4544 let completions = completions
4545 .await
4546 .unwrap()
4547 .into_iter()
4548 .flat_map(|response| response.completions)
4549 .collect::<Vec<_>>();
4550 assert_eq!(completions.len(), 1);
4551 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
4552}
4553
4554#[gpui::test(iterations = 10)]
4555async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
4556 init_test(cx);
4557
4558 let fs = FakeFs::new(cx.executor());
4559 fs.insert_tree(
4560 path!("/dir"),
4561 json!({
4562 "a.ts": "a",
4563 }),
4564 )
4565 .await;
4566
4567 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4568
4569 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4570 language_registry.add(typescript_lang());
4571 let mut fake_language_servers = language_registry.register_fake_lsp(
4572 "TypeScript",
4573 FakeLspAdapter {
4574 capabilities: lsp::ServerCapabilities {
4575 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
4576 lsp::CodeActionOptions {
4577 resolve_provider: Some(true),
4578 ..lsp::CodeActionOptions::default()
4579 },
4580 )),
4581 execute_command_provider: Some(lsp::ExecuteCommandOptions {
4582 commands: vec!["_the/command".to_string()],
4583 ..lsp::ExecuteCommandOptions::default()
4584 }),
4585 ..lsp::ServerCapabilities::default()
4586 },
4587 ..FakeLspAdapter::default()
4588 },
4589 );
4590
4591 let (buffer, _handle) = project
4592 .update(cx, |p, cx| {
4593 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4594 })
4595 .await
4596 .unwrap();
4597
4598 let fake_server = fake_language_servers.next().await.unwrap();
4599 cx.executor().run_until_parked();
4600
4601 // Language server returns code actions that contain commands, and not edits.
4602 let actions = project.update(cx, |project, cx| {
4603 project.code_actions(&buffer, 0..0, None, cx)
4604 });
4605 fake_server
4606 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
4607 Ok(Some(vec![
4608 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
4609 title: "The code action".into(),
4610 data: Some(serde_json::json!({
4611 "command": "_the/command",
4612 })),
4613 ..lsp::CodeAction::default()
4614 }),
4615 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
4616 title: "two".into(),
4617 ..lsp::CodeAction::default()
4618 }),
4619 ]))
4620 })
4621 .next()
4622 .await;
4623
4624 let action = actions.await.unwrap().unwrap()[0].clone();
4625 let apply = project.update(cx, |project, cx| {
4626 project.apply_code_action(buffer.clone(), action, true, cx)
4627 });
4628
4629 // Resolving the code action does not populate its edits. In absence of
4630 // edits, we must execute the given command.
4631 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
4632 |mut action, _| async move {
4633 if action.data.is_some() {
4634 action.command = Some(lsp::Command {
4635 title: "The command".into(),
4636 command: "_the/command".into(),
4637 arguments: Some(vec![json!("the-argument")]),
4638 });
4639 }
4640 Ok(action)
4641 },
4642 );
4643
4644 // While executing the command, the language server sends the editor
4645 // a `workspaceEdit` request.
4646 fake_server
4647 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
4648 let fake = fake_server.clone();
4649 move |params, _| {
4650 assert_eq!(params.command, "_the/command");
4651 let fake = fake.clone();
4652 async move {
4653 fake.server
4654 .request::<lsp::request::ApplyWorkspaceEdit>(
4655 lsp::ApplyWorkspaceEditParams {
4656 label: None,
4657 edit: lsp::WorkspaceEdit {
4658 changes: Some(
4659 [(
4660 lsp::Uri::from_file_path(path!("/dir/a.ts")).unwrap(),
4661 vec![lsp::TextEdit {
4662 range: lsp::Range::new(
4663 lsp::Position::new(0, 0),
4664 lsp::Position::new(0, 0),
4665 ),
4666 new_text: "X".into(),
4667 }],
4668 )]
4669 .into_iter()
4670 .collect(),
4671 ),
4672 ..Default::default()
4673 },
4674 },
4675 )
4676 .await
4677 .into_response()
4678 .unwrap();
4679 Ok(Some(json!(null)))
4680 }
4681 }
4682 })
4683 .next()
4684 .await;
4685
4686 // Applying the code action returns a project transaction containing the edits
4687 // sent by the language server in its `workspaceEdit` request.
4688 let transaction = apply.await.unwrap();
4689 assert!(transaction.0.contains_key(&buffer));
4690 buffer.update(cx, |buffer, cx| {
4691 assert_eq!(buffer.text(), "Xa");
4692 buffer.undo(cx);
4693 assert_eq!(buffer.text(), "a");
4694 });
4695}
4696
4697#[gpui::test]
4698async fn test_rename_file_to_new_directory(cx: &mut gpui::TestAppContext) {
4699 init_test(cx);
4700 let fs = FakeFs::new(cx.background_executor.clone());
4701 let expected_contents = "content";
4702 fs.as_fake()
4703 .insert_tree(
4704 "/root",
4705 json!({
4706 "test.txt": expected_contents
4707 }),
4708 )
4709 .await;
4710
4711 let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
4712
4713 let (worktree, entry_id) = project.read_with(cx, |project, cx| {
4714 let worktree = project.worktrees(cx).next().unwrap();
4715 let entry_id = worktree
4716 .read(cx)
4717 .entry_for_path(rel_path("test.txt"))
4718 .unwrap()
4719 .id;
4720 (worktree, entry_id)
4721 });
4722 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
4723 let _result = project
4724 .update(cx, |project, cx| {
4725 project.rename_entry(
4726 entry_id,
4727 (worktree_id, rel_path("dir1/dir2/dir3/test.txt")).into(),
4728 cx,
4729 )
4730 })
4731 .await
4732 .unwrap();
4733 worktree.read_with(cx, |worktree, _| {
4734 assert!(
4735 worktree.entry_for_path(rel_path("test.txt")).is_none(),
4736 "Old file should have been removed"
4737 );
4738 assert!(
4739 worktree
4740 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
4741 .is_some(),
4742 "Whole directory hierarchy and the new file should have been created"
4743 );
4744 });
4745 assert_eq!(
4746 worktree
4747 .update(cx, |worktree, cx| {
4748 worktree.load_file(rel_path("dir1/dir2/dir3/test.txt"), cx)
4749 })
4750 .await
4751 .unwrap()
4752 .text,
4753 expected_contents,
4754 "Moved file's contents should be preserved"
4755 );
4756
4757 let entry_id = worktree.read_with(cx, |worktree, _| {
4758 worktree
4759 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
4760 .unwrap()
4761 .id
4762 });
4763
4764 let _result = project
4765 .update(cx, |project, cx| {
4766 project.rename_entry(
4767 entry_id,
4768 (worktree_id, rel_path("dir1/dir2/test.txt")).into(),
4769 cx,
4770 )
4771 })
4772 .await
4773 .unwrap();
4774 worktree.read_with(cx, |worktree, _| {
4775 assert!(
4776 worktree.entry_for_path(rel_path("test.txt")).is_none(),
4777 "First file should not reappear"
4778 );
4779 assert!(
4780 worktree
4781 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
4782 .is_none(),
4783 "Old file should have been removed"
4784 );
4785 assert!(
4786 worktree
4787 .entry_for_path(rel_path("dir1/dir2/test.txt"))
4788 .is_some(),
4789 "No error should have occurred after moving into existing directory"
4790 );
4791 });
4792 assert_eq!(
4793 worktree
4794 .update(cx, |worktree, cx| {
4795 worktree.load_file(rel_path("dir1/dir2/test.txt"), cx)
4796 })
4797 .await
4798 .unwrap()
4799 .text,
4800 expected_contents,
4801 "Moved file's contents should be preserved"
4802 );
4803}
4804
4805#[gpui::test(iterations = 10)]
4806async fn test_save_file(cx: &mut gpui::TestAppContext) {
4807 init_test(cx);
4808
4809 let fs = FakeFs::new(cx.executor());
4810 fs.insert_tree(
4811 path!("/dir"),
4812 json!({
4813 "file1": "the old contents",
4814 }),
4815 )
4816 .await;
4817
4818 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4819 let buffer = project
4820 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4821 .await
4822 .unwrap();
4823 buffer.update(cx, |buffer, cx| {
4824 assert_eq!(buffer.text(), "the old contents");
4825 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
4826 });
4827
4828 project
4829 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
4830 .await
4831 .unwrap();
4832
4833 let new_text = fs
4834 .load(Path::new(path!("/dir/file1")))
4835 .await
4836 .unwrap()
4837 .replace("\r\n", "\n");
4838 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
4839}
4840
4841#[gpui::test(iterations = 10)]
4842async fn test_save_file_spawns_language_server(cx: &mut gpui::TestAppContext) {
4843 // Issue: #24349
4844 init_test(cx);
4845
4846 let fs = FakeFs::new(cx.executor());
4847 fs.insert_tree(path!("/dir"), json!({})).await;
4848
4849 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4850 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4851
4852 language_registry.add(rust_lang());
4853 let mut fake_rust_servers = language_registry.register_fake_lsp(
4854 "Rust",
4855 FakeLspAdapter {
4856 name: "the-rust-language-server",
4857 capabilities: lsp::ServerCapabilities {
4858 completion_provider: Some(lsp::CompletionOptions {
4859 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
4860 ..Default::default()
4861 }),
4862 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
4863 lsp::TextDocumentSyncOptions {
4864 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
4865 ..Default::default()
4866 },
4867 )),
4868 ..Default::default()
4869 },
4870 ..Default::default()
4871 },
4872 );
4873
4874 let buffer = project
4875 .update(cx, |this, cx| this.create_buffer(None, false, cx))
4876 .unwrap()
4877 .await;
4878 project.update(cx, |this, cx| {
4879 this.register_buffer_with_language_servers(&buffer, cx);
4880 buffer.update(cx, |buffer, cx| {
4881 assert!(!this.has_language_servers_for(buffer, cx));
4882 })
4883 });
4884
4885 project
4886 .update(cx, |this, cx| {
4887 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
4888 this.save_buffer_as(
4889 buffer.clone(),
4890 ProjectPath {
4891 worktree_id,
4892 path: rel_path("file.rs").into(),
4893 },
4894 cx,
4895 )
4896 })
4897 .await
4898 .unwrap();
4899 // A server is started up, and it is notified about Rust files.
4900 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
4901 assert_eq!(
4902 fake_rust_server
4903 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4904 .await
4905 .text_document,
4906 lsp::TextDocumentItem {
4907 uri: lsp::Uri::from_file_path(path!("/dir/file.rs")).unwrap(),
4908 version: 0,
4909 text: "".to_string(),
4910 language_id: "rust".to_string(),
4911 }
4912 );
4913
4914 project.update(cx, |this, cx| {
4915 buffer.update(cx, |buffer, cx| {
4916 assert!(this.has_language_servers_for(buffer, cx));
4917 })
4918 });
4919}
4920
4921#[gpui::test(iterations = 30)]
4922async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
4923 init_test(cx);
4924
4925 let fs = FakeFs::new(cx.executor());
4926 fs.insert_tree(
4927 path!("/dir"),
4928 json!({
4929 "file1": "the original contents",
4930 }),
4931 )
4932 .await;
4933
4934 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4935 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
4936 let buffer = project
4937 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4938 .await
4939 .unwrap();
4940
4941 // Change the buffer's file on disk, and then wait for the file change
4942 // to be detected by the worktree, so that the buffer starts reloading.
4943 fs.save(
4944 path!("/dir/file1").as_ref(),
4945 &"the first contents".into(),
4946 Default::default(),
4947 )
4948 .await
4949 .unwrap();
4950 worktree.next_event(cx).await;
4951
4952 // Change the buffer's file again. Depending on the random seed, the
4953 // previous file change may still be in progress.
4954 fs.save(
4955 path!("/dir/file1").as_ref(),
4956 &"the second contents".into(),
4957 Default::default(),
4958 )
4959 .await
4960 .unwrap();
4961 worktree.next_event(cx).await;
4962
4963 cx.executor().run_until_parked();
4964 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
4965 buffer.read_with(cx, |buffer, _| {
4966 assert_eq!(buffer.text(), on_disk_text);
4967 assert!(!buffer.is_dirty(), "buffer should not be dirty");
4968 assert!(!buffer.has_conflict(), "buffer should not be dirty");
4969 });
4970}
4971
4972#[gpui::test(iterations = 30)]
4973async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
4974 init_test(cx);
4975
4976 let fs = FakeFs::new(cx.executor());
4977 fs.insert_tree(
4978 path!("/dir"),
4979 json!({
4980 "file1": "the original contents",
4981 }),
4982 )
4983 .await;
4984
4985 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4986 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
4987 let buffer = project
4988 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4989 .await
4990 .unwrap();
4991
4992 // Change the buffer's file on disk, and then wait for the file change
4993 // to be detected by the worktree, so that the buffer starts reloading.
4994 fs.save(
4995 path!("/dir/file1").as_ref(),
4996 &"the first contents".into(),
4997 Default::default(),
4998 )
4999 .await
5000 .unwrap();
5001 worktree.next_event(cx).await;
5002
5003 cx.executor()
5004 .spawn(cx.executor().simulate_random_delay())
5005 .await;
5006
5007 // Perform a noop edit, causing the buffer's version to increase.
5008 buffer.update(cx, |buffer, cx| {
5009 buffer.edit([(0..0, " ")], None, cx);
5010 buffer.undo(cx);
5011 });
5012
5013 cx.executor().run_until_parked();
5014 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
5015 buffer.read_with(cx, |buffer, _| {
5016 let buffer_text = buffer.text();
5017 if buffer_text == on_disk_text {
5018 assert!(
5019 !buffer.is_dirty() && !buffer.has_conflict(),
5020 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
5021 );
5022 }
5023 // If the file change occurred while the buffer was processing the first
5024 // change, the buffer will be in a conflicting state.
5025 else {
5026 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
5027 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
5028 }
5029 });
5030}
5031
5032#[gpui::test]
5033async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
5034 init_test(cx);
5035
5036 let fs = FakeFs::new(cx.executor());
5037 fs.insert_tree(
5038 path!("/dir"),
5039 json!({
5040 "file1": "the old contents",
5041 }),
5042 )
5043 .await;
5044
5045 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
5046 let buffer = project
5047 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5048 .await
5049 .unwrap();
5050 buffer.update(cx, |buffer, cx| {
5051 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
5052 });
5053
5054 project
5055 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
5056 .await
5057 .unwrap();
5058
5059 let new_text = fs
5060 .load(Path::new(path!("/dir/file1")))
5061 .await
5062 .unwrap()
5063 .replace("\r\n", "\n");
5064 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
5065}
5066
5067#[gpui::test]
5068async fn test_save_as(cx: &mut gpui::TestAppContext) {
5069 init_test(cx);
5070
5071 let fs = FakeFs::new(cx.executor());
5072 fs.insert_tree("/dir", json!({})).await;
5073
5074 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5075
5076 let languages = project.update(cx, |project, _| project.languages().clone());
5077 languages.add(rust_lang());
5078
5079 let buffer = project.update(cx, |project, cx| {
5080 project.create_local_buffer("", None, false, cx)
5081 });
5082 buffer.update(cx, |buffer, cx| {
5083 buffer.edit([(0..0, "abc")], None, cx);
5084 assert!(buffer.is_dirty());
5085 assert!(!buffer.has_conflict());
5086 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
5087 });
5088 project
5089 .update(cx, |project, cx| {
5090 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
5091 let path = ProjectPath {
5092 worktree_id,
5093 path: rel_path("file1.rs").into(),
5094 };
5095 project.save_buffer_as(buffer.clone(), path, cx)
5096 })
5097 .await
5098 .unwrap();
5099 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
5100
5101 cx.executor().run_until_parked();
5102 buffer.update(cx, |buffer, cx| {
5103 assert_eq!(
5104 buffer.file().unwrap().full_path(cx),
5105 Path::new("dir/file1.rs")
5106 );
5107 assert!(!buffer.is_dirty());
5108 assert!(!buffer.has_conflict());
5109 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
5110 });
5111
5112 let opened_buffer = project
5113 .update(cx, |project, cx| {
5114 project.open_local_buffer("/dir/file1.rs", cx)
5115 })
5116 .await
5117 .unwrap();
5118 assert_eq!(opened_buffer, buffer);
5119}
5120
5121#[gpui::test]
5122async fn test_save_as_existing_file(cx: &mut gpui::TestAppContext) {
5123 init_test(cx);
5124
5125 let fs = FakeFs::new(cx.executor());
5126 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5127
5128 fs.insert_tree(
5129 path!("/dir"),
5130 json!({
5131 "data_a.txt": "data about a"
5132 }),
5133 )
5134 .await;
5135
5136 let buffer = project
5137 .update(cx, |project, cx| {
5138 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
5139 })
5140 .await
5141 .unwrap();
5142
5143 buffer.update(cx, |buffer, cx| {
5144 buffer.edit([(11..12, "b")], None, cx);
5145 });
5146
5147 // Save buffer's contents as a new file and confirm that the buffer's now
5148 // associated with `data_b.txt` instead of `data_a.txt`, confirming that the
5149 // file associated with the buffer has now been updated to `data_b.txt`
5150 project
5151 .update(cx, |project, cx| {
5152 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
5153 let new_path = ProjectPath {
5154 worktree_id,
5155 path: rel_path("data_b.txt").into(),
5156 };
5157
5158 project.save_buffer_as(buffer.clone(), new_path, cx)
5159 })
5160 .await
5161 .unwrap();
5162
5163 buffer.update(cx, |buffer, cx| {
5164 assert_eq!(
5165 buffer.file().unwrap().full_path(cx),
5166 Path::new("dir/data_b.txt")
5167 )
5168 });
5169
5170 // Open the original `data_a.txt` file, confirming that its contents are
5171 // unchanged and the resulting buffer's associated file is `data_a.txt`.
5172 let original_buffer = project
5173 .update(cx, |project, cx| {
5174 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
5175 })
5176 .await
5177 .unwrap();
5178
5179 original_buffer.update(cx, |buffer, cx| {
5180 assert_eq!(buffer.text(), "data about a");
5181 assert_eq!(
5182 buffer.file().unwrap().full_path(cx),
5183 Path::new("dir/data_a.txt")
5184 )
5185 });
5186}
5187
5188#[gpui::test(retries = 5)]
5189async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
5190 use worktree::WorktreeModelHandle as _;
5191
5192 init_test(cx);
5193 cx.executor().allow_parking();
5194
5195 let dir = TempTree::new(json!({
5196 "a": {
5197 "file1": "",
5198 "file2": "",
5199 "file3": "",
5200 },
5201 "b": {
5202 "c": {
5203 "file4": "",
5204 "file5": "",
5205 }
5206 }
5207 }));
5208
5209 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
5210
5211 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5212 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
5213 async move { buffer.await.unwrap() }
5214 };
5215 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5216 project.update(cx, |project, cx| {
5217 let tree = project.worktrees(cx).next().unwrap();
5218 tree.read(cx)
5219 .entry_for_path(rel_path(path))
5220 .unwrap_or_else(|| panic!("no entry for path {}", path))
5221 .id
5222 })
5223 };
5224
5225 let buffer2 = buffer_for_path("a/file2", cx).await;
5226 let buffer3 = buffer_for_path("a/file3", cx).await;
5227 let buffer4 = buffer_for_path("b/c/file4", cx).await;
5228 let buffer5 = buffer_for_path("b/c/file5", cx).await;
5229
5230 let file2_id = id_for_path("a/file2", cx);
5231 let file3_id = id_for_path("a/file3", cx);
5232 let file4_id = id_for_path("b/c/file4", cx);
5233
5234 // Create a remote copy of this worktree.
5235 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
5236 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
5237
5238 let updates = Arc::new(Mutex::new(Vec::new()));
5239 tree.update(cx, |tree, cx| {
5240 let updates = updates.clone();
5241 tree.observe_updates(0, cx, move |update| {
5242 updates.lock().push(update);
5243 async { true }
5244 });
5245 });
5246
5247 let remote = cx.update(|cx| {
5248 Worktree::remote(
5249 0,
5250 ReplicaId::REMOTE_SERVER,
5251 metadata,
5252 project.read(cx).client().into(),
5253 project.read(cx).path_style(cx),
5254 cx,
5255 )
5256 });
5257
5258 cx.executor().run_until_parked();
5259
5260 cx.update(|cx| {
5261 assert!(!buffer2.read(cx).is_dirty());
5262 assert!(!buffer3.read(cx).is_dirty());
5263 assert!(!buffer4.read(cx).is_dirty());
5264 assert!(!buffer5.read(cx).is_dirty());
5265 });
5266
5267 // Rename and delete files and directories.
5268 tree.flush_fs_events(cx).await;
5269 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
5270 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
5271 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
5272 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
5273 tree.flush_fs_events(cx).await;
5274
5275 cx.update(|app| {
5276 assert_eq!(
5277 tree.read(app).paths().collect::<Vec<_>>(),
5278 vec![
5279 rel_path("a"),
5280 rel_path("a/file1"),
5281 rel_path("a/file2.new"),
5282 rel_path("b"),
5283 rel_path("d"),
5284 rel_path("d/file3"),
5285 rel_path("d/file4"),
5286 ]
5287 );
5288 });
5289
5290 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
5291 assert_eq!(id_for_path("d/file3", cx), file3_id);
5292 assert_eq!(id_for_path("d/file4", cx), file4_id);
5293
5294 cx.update(|cx| {
5295 assert_eq!(
5296 buffer2.read(cx).file().unwrap().path().as_ref(),
5297 rel_path("a/file2.new")
5298 );
5299 assert_eq!(
5300 buffer3.read(cx).file().unwrap().path().as_ref(),
5301 rel_path("d/file3")
5302 );
5303 assert_eq!(
5304 buffer4.read(cx).file().unwrap().path().as_ref(),
5305 rel_path("d/file4")
5306 );
5307 assert_eq!(
5308 buffer5.read(cx).file().unwrap().path().as_ref(),
5309 rel_path("b/c/file5")
5310 );
5311
5312 assert_matches!(
5313 buffer2.read(cx).file().unwrap().disk_state(),
5314 DiskState::Present { .. }
5315 );
5316 assert_matches!(
5317 buffer3.read(cx).file().unwrap().disk_state(),
5318 DiskState::Present { .. }
5319 );
5320 assert_matches!(
5321 buffer4.read(cx).file().unwrap().disk_state(),
5322 DiskState::Present { .. }
5323 );
5324 assert_eq!(
5325 buffer5.read(cx).file().unwrap().disk_state(),
5326 DiskState::Deleted
5327 );
5328 });
5329
5330 // Update the remote worktree. Check that it becomes consistent with the
5331 // local worktree.
5332 cx.executor().run_until_parked();
5333
5334 remote.update(cx, |remote, _| {
5335 for update in updates.lock().drain(..) {
5336 remote.as_remote_mut().unwrap().update_from_remote(update);
5337 }
5338 });
5339 cx.executor().run_until_parked();
5340 remote.update(cx, |remote, _| {
5341 assert_eq!(
5342 remote.paths().collect::<Vec<_>>(),
5343 vec![
5344 rel_path("a"),
5345 rel_path("a/file1"),
5346 rel_path("a/file2.new"),
5347 rel_path("b"),
5348 rel_path("d"),
5349 rel_path("d/file3"),
5350 rel_path("d/file4"),
5351 ]
5352 );
5353 });
5354}
5355
5356#[gpui::test(iterations = 10)]
5357async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
5358 init_test(cx);
5359
5360 let fs = FakeFs::new(cx.executor());
5361 fs.insert_tree(
5362 path!("/dir"),
5363 json!({
5364 "a": {
5365 "file1": "",
5366 }
5367 }),
5368 )
5369 .await;
5370
5371 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
5372 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
5373 let tree_id = tree.update(cx, |tree, _| tree.id());
5374
5375 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5376 project.update(cx, |project, cx| {
5377 let tree = project.worktrees(cx).next().unwrap();
5378 tree.read(cx)
5379 .entry_for_path(rel_path(path))
5380 .unwrap_or_else(|| panic!("no entry for path {}", path))
5381 .id
5382 })
5383 };
5384
5385 let dir_id = id_for_path("a", cx);
5386 let file_id = id_for_path("a/file1", cx);
5387 let buffer = project
5388 .update(cx, |p, cx| {
5389 p.open_buffer((tree_id, rel_path("a/file1")), cx)
5390 })
5391 .await
5392 .unwrap();
5393 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5394
5395 project
5396 .update(cx, |project, cx| {
5397 project.rename_entry(dir_id, (tree_id, rel_path("b")).into(), cx)
5398 })
5399 .unwrap()
5400 .await
5401 .into_included()
5402 .unwrap();
5403 cx.executor().run_until_parked();
5404
5405 assert_eq!(id_for_path("b", cx), dir_id);
5406 assert_eq!(id_for_path("b/file1", cx), file_id);
5407 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5408}
5409
5410#[gpui::test]
5411async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
5412 init_test(cx);
5413
5414 let fs = FakeFs::new(cx.executor());
5415 fs.insert_tree(
5416 "/dir",
5417 json!({
5418 "a.txt": "a-contents",
5419 "b.txt": "b-contents",
5420 }),
5421 )
5422 .await;
5423
5424 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5425
5426 // Spawn multiple tasks to open paths, repeating some paths.
5427 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
5428 (
5429 p.open_local_buffer("/dir/a.txt", cx),
5430 p.open_local_buffer("/dir/b.txt", cx),
5431 p.open_local_buffer("/dir/a.txt", cx),
5432 )
5433 });
5434
5435 let buffer_a_1 = buffer_a_1.await.unwrap();
5436 let buffer_a_2 = buffer_a_2.await.unwrap();
5437 let buffer_b = buffer_b.await.unwrap();
5438 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
5439 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
5440
5441 // There is only one buffer per path.
5442 let buffer_a_id = buffer_a_1.entity_id();
5443 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
5444
5445 // Open the same path again while it is still open.
5446 drop(buffer_a_1);
5447 let buffer_a_3 = project
5448 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
5449 .await
5450 .unwrap();
5451
5452 // There's still only one buffer per path.
5453 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
5454}
5455
5456#[gpui::test]
5457async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
5458 init_test(cx);
5459
5460 let fs = FakeFs::new(cx.executor());
5461 fs.insert_tree(
5462 path!("/dir"),
5463 json!({
5464 "file1": "abc",
5465 "file2": "def",
5466 "file3": "ghi",
5467 }),
5468 )
5469 .await;
5470
5471 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5472
5473 let buffer1 = project
5474 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5475 .await
5476 .unwrap();
5477 let events = Arc::new(Mutex::new(Vec::new()));
5478
5479 // initially, the buffer isn't dirty.
5480 buffer1.update(cx, |buffer, cx| {
5481 cx.subscribe(&buffer1, {
5482 let events = events.clone();
5483 move |_, _, event, _| match event {
5484 BufferEvent::Operation { .. } => {}
5485 _ => events.lock().push(event.clone()),
5486 }
5487 })
5488 .detach();
5489
5490 assert!(!buffer.is_dirty());
5491 assert!(events.lock().is_empty());
5492
5493 buffer.edit([(1..2, "")], None, cx);
5494 });
5495
5496 // after the first edit, the buffer is dirty, and emits a dirtied event.
5497 buffer1.update(cx, |buffer, cx| {
5498 assert!(buffer.text() == "ac");
5499 assert!(buffer.is_dirty());
5500 assert_eq!(
5501 *events.lock(),
5502 &[
5503 language::BufferEvent::Edited,
5504 language::BufferEvent::DirtyChanged
5505 ]
5506 );
5507 events.lock().clear();
5508 buffer.did_save(
5509 buffer.version(),
5510 buffer.file().unwrap().disk_state().mtime(),
5511 cx,
5512 );
5513 });
5514
5515 // after saving, the buffer is not dirty, and emits a saved event.
5516 buffer1.update(cx, |buffer, cx| {
5517 assert!(!buffer.is_dirty());
5518 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
5519 events.lock().clear();
5520
5521 buffer.edit([(1..1, "B")], None, cx);
5522 buffer.edit([(2..2, "D")], None, cx);
5523 });
5524
5525 // after editing again, the buffer is dirty, and emits another dirty event.
5526 buffer1.update(cx, |buffer, cx| {
5527 assert!(buffer.text() == "aBDc");
5528 assert!(buffer.is_dirty());
5529 assert_eq!(
5530 *events.lock(),
5531 &[
5532 language::BufferEvent::Edited,
5533 language::BufferEvent::DirtyChanged,
5534 language::BufferEvent::Edited,
5535 ],
5536 );
5537 events.lock().clear();
5538
5539 // After restoring the buffer to its previously-saved state,
5540 // the buffer is not considered dirty anymore.
5541 buffer.edit([(1..3, "")], None, cx);
5542 assert!(buffer.text() == "ac");
5543 assert!(!buffer.is_dirty());
5544 });
5545
5546 assert_eq!(
5547 *events.lock(),
5548 &[
5549 language::BufferEvent::Edited,
5550 language::BufferEvent::DirtyChanged
5551 ]
5552 );
5553
5554 // When a file is deleted, it is not considered dirty.
5555 let events = Arc::new(Mutex::new(Vec::new()));
5556 let buffer2 = project
5557 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
5558 .await
5559 .unwrap();
5560 buffer2.update(cx, |_, cx| {
5561 cx.subscribe(&buffer2, {
5562 let events = events.clone();
5563 move |_, _, event, _| match event {
5564 BufferEvent::Operation { .. } => {}
5565 _ => events.lock().push(event.clone()),
5566 }
5567 })
5568 .detach();
5569 });
5570
5571 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
5572 .await
5573 .unwrap();
5574 cx.executor().run_until_parked();
5575 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5576 assert_eq!(
5577 mem::take(&mut *events.lock()),
5578 &[language::BufferEvent::FileHandleChanged]
5579 );
5580
5581 // Buffer becomes dirty when edited.
5582 buffer2.update(cx, |buffer, cx| {
5583 buffer.edit([(2..3, "")], None, cx);
5584 assert_eq!(buffer.is_dirty(), true);
5585 });
5586 assert_eq!(
5587 mem::take(&mut *events.lock()),
5588 &[
5589 language::BufferEvent::Edited,
5590 language::BufferEvent::DirtyChanged
5591 ]
5592 );
5593
5594 // Buffer becomes clean again when all of its content is removed, because
5595 // the file was deleted.
5596 buffer2.update(cx, |buffer, cx| {
5597 buffer.edit([(0..2, "")], None, cx);
5598 assert_eq!(buffer.is_empty(), true);
5599 assert_eq!(buffer.is_dirty(), false);
5600 });
5601 assert_eq!(
5602 *events.lock(),
5603 &[
5604 language::BufferEvent::Edited,
5605 language::BufferEvent::DirtyChanged
5606 ]
5607 );
5608
5609 // When a file is already dirty when deleted, we don't emit a Dirtied event.
5610 let events = Arc::new(Mutex::new(Vec::new()));
5611 let buffer3 = project
5612 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
5613 .await
5614 .unwrap();
5615 buffer3.update(cx, |_, cx| {
5616 cx.subscribe(&buffer3, {
5617 let events = events.clone();
5618 move |_, _, event, _| match event {
5619 BufferEvent::Operation { .. } => {}
5620 _ => events.lock().push(event.clone()),
5621 }
5622 })
5623 .detach();
5624 });
5625
5626 buffer3.update(cx, |buffer, cx| {
5627 buffer.edit([(0..0, "x")], None, cx);
5628 });
5629 events.lock().clear();
5630 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
5631 .await
5632 .unwrap();
5633 cx.executor().run_until_parked();
5634 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
5635 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
5636}
5637
5638#[gpui::test]
5639async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
5640 init_test(cx);
5641
5642 let (initial_contents, initial_offsets) =
5643 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
5644 let fs = FakeFs::new(cx.executor());
5645 fs.insert_tree(
5646 path!("/dir"),
5647 json!({
5648 "the-file": initial_contents,
5649 }),
5650 )
5651 .await;
5652 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5653 let buffer = project
5654 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
5655 .await
5656 .unwrap();
5657
5658 let anchors = initial_offsets
5659 .iter()
5660 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
5661 .collect::<Vec<_>>();
5662
5663 // Change the file on disk, adding two new lines of text, and removing
5664 // one line.
5665 buffer.update(cx, |buffer, _| {
5666 assert!(!buffer.is_dirty());
5667 assert!(!buffer.has_conflict());
5668 });
5669
5670 let (new_contents, new_offsets) =
5671 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
5672 fs.save(
5673 path!("/dir/the-file").as_ref(),
5674 &new_contents.as_str().into(),
5675 LineEnding::Unix,
5676 )
5677 .await
5678 .unwrap();
5679
5680 // Because the buffer was not modified, it is reloaded from disk. Its
5681 // contents are edited according to the diff between the old and new
5682 // file contents.
5683 cx.executor().run_until_parked();
5684 buffer.update(cx, |buffer, _| {
5685 assert_eq!(buffer.text(), new_contents);
5686 assert!(!buffer.is_dirty());
5687 assert!(!buffer.has_conflict());
5688
5689 let anchor_offsets = anchors
5690 .iter()
5691 .map(|anchor| anchor.to_offset(&*buffer))
5692 .collect::<Vec<_>>();
5693 assert_eq!(anchor_offsets, new_offsets);
5694 });
5695
5696 // Modify the buffer
5697 buffer.update(cx, |buffer, cx| {
5698 buffer.edit([(0..0, " ")], None, cx);
5699 assert!(buffer.is_dirty());
5700 assert!(!buffer.has_conflict());
5701 });
5702
5703 // Change the file on disk again, adding blank lines to the beginning.
5704 fs.save(
5705 path!("/dir/the-file").as_ref(),
5706 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
5707 LineEnding::Unix,
5708 )
5709 .await
5710 .unwrap();
5711
5712 // Because the buffer is modified, it doesn't reload from disk, but is
5713 // marked as having a conflict.
5714 cx.executor().run_until_parked();
5715 buffer.update(cx, |buffer, _| {
5716 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
5717 assert!(buffer.has_conflict());
5718 });
5719}
5720
5721#[gpui::test]
5722async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
5723 init_test(cx);
5724
5725 let fs = FakeFs::new(cx.executor());
5726 fs.insert_tree(
5727 path!("/dir"),
5728 json!({
5729 "file1": "a\nb\nc\n",
5730 "file2": "one\r\ntwo\r\nthree\r\n",
5731 }),
5732 )
5733 .await;
5734
5735 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5736 let buffer1 = project
5737 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5738 .await
5739 .unwrap();
5740 let buffer2 = project
5741 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
5742 .await
5743 .unwrap();
5744
5745 buffer1.update(cx, |buffer, _| {
5746 assert_eq!(buffer.text(), "a\nb\nc\n");
5747 assert_eq!(buffer.line_ending(), LineEnding::Unix);
5748 });
5749 buffer2.update(cx, |buffer, _| {
5750 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
5751 assert_eq!(buffer.line_ending(), LineEnding::Windows);
5752 });
5753
5754 // Change a file's line endings on disk from unix to windows. The buffer's
5755 // state updates correctly.
5756 fs.save(
5757 path!("/dir/file1").as_ref(),
5758 &"aaa\nb\nc\n".into(),
5759 LineEnding::Windows,
5760 )
5761 .await
5762 .unwrap();
5763 cx.executor().run_until_parked();
5764 buffer1.update(cx, |buffer, _| {
5765 assert_eq!(buffer.text(), "aaa\nb\nc\n");
5766 assert_eq!(buffer.line_ending(), LineEnding::Windows);
5767 });
5768
5769 // Save a file with windows line endings. The file is written correctly.
5770 buffer2.update(cx, |buffer, cx| {
5771 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
5772 });
5773 project
5774 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
5775 .await
5776 .unwrap();
5777 assert_eq!(
5778 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
5779 "one\r\ntwo\r\nthree\r\nfour\r\n",
5780 );
5781}
5782
5783#[gpui::test]
5784async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
5785 init_test(cx);
5786
5787 let fs = FakeFs::new(cx.executor());
5788 fs.insert_tree(
5789 path!("/dir"),
5790 json!({
5791 "a.rs": "
5792 fn foo(mut v: Vec<usize>) {
5793 for x in &v {
5794 v.push(1);
5795 }
5796 }
5797 "
5798 .unindent(),
5799 }),
5800 )
5801 .await;
5802
5803 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5804 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
5805 let buffer = project
5806 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
5807 .await
5808 .unwrap();
5809
5810 let buffer_uri = Uri::from_file_path(path!("/dir/a.rs")).unwrap();
5811 let message = lsp::PublishDiagnosticsParams {
5812 uri: buffer_uri.clone(),
5813 diagnostics: vec![
5814 lsp::Diagnostic {
5815 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5816 severity: Some(DiagnosticSeverity::WARNING),
5817 message: "error 1".to_string(),
5818 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5819 location: lsp::Location {
5820 uri: buffer_uri.clone(),
5821 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5822 },
5823 message: "error 1 hint 1".to_string(),
5824 }]),
5825 ..Default::default()
5826 },
5827 lsp::Diagnostic {
5828 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5829 severity: Some(DiagnosticSeverity::HINT),
5830 message: "error 1 hint 1".to_string(),
5831 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5832 location: lsp::Location {
5833 uri: buffer_uri.clone(),
5834 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5835 },
5836 message: "original diagnostic".to_string(),
5837 }]),
5838 ..Default::default()
5839 },
5840 lsp::Diagnostic {
5841 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5842 severity: Some(DiagnosticSeverity::ERROR),
5843 message: "error 2".to_string(),
5844 related_information: Some(vec![
5845 lsp::DiagnosticRelatedInformation {
5846 location: lsp::Location {
5847 uri: buffer_uri.clone(),
5848 range: lsp::Range::new(
5849 lsp::Position::new(1, 13),
5850 lsp::Position::new(1, 15),
5851 ),
5852 },
5853 message: "error 2 hint 1".to_string(),
5854 },
5855 lsp::DiagnosticRelatedInformation {
5856 location: lsp::Location {
5857 uri: buffer_uri.clone(),
5858 range: lsp::Range::new(
5859 lsp::Position::new(1, 13),
5860 lsp::Position::new(1, 15),
5861 ),
5862 },
5863 message: "error 2 hint 2".to_string(),
5864 },
5865 ]),
5866 ..Default::default()
5867 },
5868 lsp::Diagnostic {
5869 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
5870 severity: Some(DiagnosticSeverity::HINT),
5871 message: "error 2 hint 1".to_string(),
5872 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5873 location: lsp::Location {
5874 uri: buffer_uri.clone(),
5875 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5876 },
5877 message: "original diagnostic".to_string(),
5878 }]),
5879 ..Default::default()
5880 },
5881 lsp::Diagnostic {
5882 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
5883 severity: Some(DiagnosticSeverity::HINT),
5884 message: "error 2 hint 2".to_string(),
5885 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5886 location: lsp::Location {
5887 uri: buffer_uri,
5888 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5889 },
5890 message: "original diagnostic".to_string(),
5891 }]),
5892 ..Default::default()
5893 },
5894 ],
5895 version: None,
5896 };
5897
5898 lsp_store
5899 .update(cx, |lsp_store, cx| {
5900 lsp_store.update_diagnostics(
5901 LanguageServerId(0),
5902 message,
5903 None,
5904 DiagnosticSourceKind::Pushed,
5905 &[],
5906 cx,
5907 )
5908 })
5909 .unwrap();
5910 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
5911
5912 assert_eq!(
5913 buffer
5914 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5915 .collect::<Vec<_>>(),
5916 &[
5917 DiagnosticEntry {
5918 range: Point::new(1, 8)..Point::new(1, 9),
5919 diagnostic: Diagnostic {
5920 severity: DiagnosticSeverity::WARNING,
5921 message: "error 1".to_string(),
5922 group_id: 1,
5923 is_primary: true,
5924 source_kind: DiagnosticSourceKind::Pushed,
5925 ..Diagnostic::default()
5926 }
5927 },
5928 DiagnosticEntry {
5929 range: Point::new(1, 8)..Point::new(1, 9),
5930 diagnostic: Diagnostic {
5931 severity: DiagnosticSeverity::HINT,
5932 message: "error 1 hint 1".to_string(),
5933 group_id: 1,
5934 is_primary: false,
5935 source_kind: DiagnosticSourceKind::Pushed,
5936 ..Diagnostic::default()
5937 }
5938 },
5939 DiagnosticEntry {
5940 range: Point::new(1, 13)..Point::new(1, 15),
5941 diagnostic: Diagnostic {
5942 severity: DiagnosticSeverity::HINT,
5943 message: "error 2 hint 1".to_string(),
5944 group_id: 0,
5945 is_primary: false,
5946 source_kind: DiagnosticSourceKind::Pushed,
5947 ..Diagnostic::default()
5948 }
5949 },
5950 DiagnosticEntry {
5951 range: Point::new(1, 13)..Point::new(1, 15),
5952 diagnostic: Diagnostic {
5953 severity: DiagnosticSeverity::HINT,
5954 message: "error 2 hint 2".to_string(),
5955 group_id: 0,
5956 is_primary: false,
5957 source_kind: DiagnosticSourceKind::Pushed,
5958 ..Diagnostic::default()
5959 }
5960 },
5961 DiagnosticEntry {
5962 range: Point::new(2, 8)..Point::new(2, 17),
5963 diagnostic: Diagnostic {
5964 severity: DiagnosticSeverity::ERROR,
5965 message: "error 2".to_string(),
5966 group_id: 0,
5967 is_primary: true,
5968 source_kind: DiagnosticSourceKind::Pushed,
5969 ..Diagnostic::default()
5970 }
5971 }
5972 ]
5973 );
5974
5975 assert_eq!(
5976 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
5977 &[
5978 DiagnosticEntry {
5979 range: Point::new(1, 13)..Point::new(1, 15),
5980 diagnostic: Diagnostic {
5981 severity: DiagnosticSeverity::HINT,
5982 message: "error 2 hint 1".to_string(),
5983 group_id: 0,
5984 is_primary: false,
5985 source_kind: DiagnosticSourceKind::Pushed,
5986 ..Diagnostic::default()
5987 }
5988 },
5989 DiagnosticEntry {
5990 range: Point::new(1, 13)..Point::new(1, 15),
5991 diagnostic: Diagnostic {
5992 severity: DiagnosticSeverity::HINT,
5993 message: "error 2 hint 2".to_string(),
5994 group_id: 0,
5995 is_primary: false,
5996 source_kind: DiagnosticSourceKind::Pushed,
5997 ..Diagnostic::default()
5998 }
5999 },
6000 DiagnosticEntry {
6001 range: Point::new(2, 8)..Point::new(2, 17),
6002 diagnostic: Diagnostic {
6003 severity: DiagnosticSeverity::ERROR,
6004 message: "error 2".to_string(),
6005 group_id: 0,
6006 is_primary: true,
6007 source_kind: DiagnosticSourceKind::Pushed,
6008 ..Diagnostic::default()
6009 }
6010 }
6011 ]
6012 );
6013
6014 assert_eq!(
6015 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
6016 &[
6017 DiagnosticEntry {
6018 range: Point::new(1, 8)..Point::new(1, 9),
6019 diagnostic: Diagnostic {
6020 severity: DiagnosticSeverity::WARNING,
6021 message: "error 1".to_string(),
6022 group_id: 1,
6023 is_primary: true,
6024 source_kind: DiagnosticSourceKind::Pushed,
6025 ..Diagnostic::default()
6026 }
6027 },
6028 DiagnosticEntry {
6029 range: Point::new(1, 8)..Point::new(1, 9),
6030 diagnostic: Diagnostic {
6031 severity: DiagnosticSeverity::HINT,
6032 message: "error 1 hint 1".to_string(),
6033 group_id: 1,
6034 is_primary: false,
6035 source_kind: DiagnosticSourceKind::Pushed,
6036 ..Diagnostic::default()
6037 }
6038 },
6039 ]
6040 );
6041}
6042
6043#[gpui::test]
6044async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
6045 init_test(cx);
6046
6047 let fs = FakeFs::new(cx.executor());
6048 fs.insert_tree(
6049 path!("/dir"),
6050 json!({
6051 "one.rs": "const ONE: usize = 1;",
6052 "two": {
6053 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
6054 }
6055
6056 }),
6057 )
6058 .await;
6059 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6060
6061 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6062 language_registry.add(rust_lang());
6063 let watched_paths = lsp::FileOperationRegistrationOptions {
6064 filters: vec![
6065 FileOperationFilter {
6066 scheme: Some("file".to_owned()),
6067 pattern: lsp::FileOperationPattern {
6068 glob: "**/*.rs".to_owned(),
6069 matches: Some(lsp::FileOperationPatternKind::File),
6070 options: None,
6071 },
6072 },
6073 FileOperationFilter {
6074 scheme: Some("file".to_owned()),
6075 pattern: lsp::FileOperationPattern {
6076 glob: "**/**".to_owned(),
6077 matches: Some(lsp::FileOperationPatternKind::Folder),
6078 options: None,
6079 },
6080 },
6081 ],
6082 };
6083 let mut fake_servers = language_registry.register_fake_lsp(
6084 "Rust",
6085 FakeLspAdapter {
6086 capabilities: lsp::ServerCapabilities {
6087 workspace: Some(lsp::WorkspaceServerCapabilities {
6088 workspace_folders: None,
6089 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
6090 did_rename: Some(watched_paths.clone()),
6091 will_rename: Some(watched_paths),
6092 ..Default::default()
6093 }),
6094 }),
6095 ..Default::default()
6096 },
6097 ..Default::default()
6098 },
6099 );
6100
6101 let _ = project
6102 .update(cx, |project, cx| {
6103 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
6104 })
6105 .await
6106 .unwrap();
6107
6108 let fake_server = fake_servers.next().await.unwrap();
6109 cx.executor().run_until_parked();
6110 let response = project.update(cx, |project, cx| {
6111 let worktree = project.worktrees(cx).next().unwrap();
6112 let entry = worktree
6113 .read(cx)
6114 .entry_for_path(rel_path("one.rs"))
6115 .unwrap();
6116 project.rename_entry(
6117 entry.id,
6118 (worktree.read(cx).id(), rel_path("three.rs")).into(),
6119 cx,
6120 )
6121 });
6122 let expected_edit = lsp::WorkspaceEdit {
6123 changes: None,
6124 document_changes: Some(DocumentChanges::Edits({
6125 vec![TextDocumentEdit {
6126 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
6127 range: lsp::Range {
6128 start: lsp::Position {
6129 line: 0,
6130 character: 1,
6131 },
6132 end: lsp::Position {
6133 line: 0,
6134 character: 3,
6135 },
6136 },
6137 new_text: "This is not a drill".to_owned(),
6138 })],
6139 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
6140 uri: Uri::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
6141 version: Some(1337),
6142 },
6143 }]
6144 })),
6145 change_annotations: None,
6146 };
6147 let resolved_workspace_edit = Arc::new(OnceLock::new());
6148 fake_server
6149 .set_request_handler::<WillRenameFiles, _, _>({
6150 let resolved_workspace_edit = resolved_workspace_edit.clone();
6151 let expected_edit = expected_edit.clone();
6152 move |params, _| {
6153 let resolved_workspace_edit = resolved_workspace_edit.clone();
6154 let expected_edit = expected_edit.clone();
6155 async move {
6156 assert_eq!(params.files.len(), 1);
6157 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
6158 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
6159 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
6160 Ok(Some(expected_edit))
6161 }
6162 }
6163 })
6164 .next()
6165 .await
6166 .unwrap();
6167 let _ = response.await.unwrap();
6168 fake_server
6169 .handle_notification::<DidRenameFiles, _>(|params, _| {
6170 assert_eq!(params.files.len(), 1);
6171 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
6172 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
6173 })
6174 .next()
6175 .await
6176 .unwrap();
6177 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
6178}
6179
6180#[gpui::test]
6181async fn test_rename(cx: &mut gpui::TestAppContext) {
6182 // hi
6183 init_test(cx);
6184
6185 let fs = FakeFs::new(cx.executor());
6186 fs.insert_tree(
6187 path!("/dir"),
6188 json!({
6189 "one.rs": "const ONE: usize = 1;",
6190 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
6191 }),
6192 )
6193 .await;
6194
6195 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6196
6197 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6198 language_registry.add(rust_lang());
6199 let mut fake_servers = language_registry.register_fake_lsp(
6200 "Rust",
6201 FakeLspAdapter {
6202 capabilities: lsp::ServerCapabilities {
6203 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
6204 prepare_provider: Some(true),
6205 work_done_progress_options: Default::default(),
6206 })),
6207 ..Default::default()
6208 },
6209 ..Default::default()
6210 },
6211 );
6212
6213 let (buffer, _handle) = project
6214 .update(cx, |project, cx| {
6215 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
6216 })
6217 .await
6218 .unwrap();
6219
6220 let fake_server = fake_servers.next().await.unwrap();
6221 cx.executor().run_until_parked();
6222
6223 let response = project.update(cx, |project, cx| {
6224 project.prepare_rename(buffer.clone(), 7, cx)
6225 });
6226 fake_server
6227 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
6228 assert_eq!(
6229 params.text_document.uri.as_str(),
6230 uri!("file:///dir/one.rs")
6231 );
6232 assert_eq!(params.position, lsp::Position::new(0, 7));
6233 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
6234 lsp::Position::new(0, 6),
6235 lsp::Position::new(0, 9),
6236 ))))
6237 })
6238 .next()
6239 .await
6240 .unwrap();
6241 let response = response.await.unwrap();
6242 let PrepareRenameResponse::Success(range) = response else {
6243 panic!("{:?}", response);
6244 };
6245 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
6246 assert_eq!(range, 6..9);
6247
6248 let response = project.update(cx, |project, cx| {
6249 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
6250 });
6251 fake_server
6252 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
6253 assert_eq!(
6254 params.text_document_position.text_document.uri.as_str(),
6255 uri!("file:///dir/one.rs")
6256 );
6257 assert_eq!(
6258 params.text_document_position.position,
6259 lsp::Position::new(0, 7)
6260 );
6261 assert_eq!(params.new_name, "THREE");
6262 Ok(Some(lsp::WorkspaceEdit {
6263 changes: Some(
6264 [
6265 (
6266 lsp::Uri::from_file_path(path!("/dir/one.rs")).unwrap(),
6267 vec![lsp::TextEdit::new(
6268 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
6269 "THREE".to_string(),
6270 )],
6271 ),
6272 (
6273 lsp::Uri::from_file_path(path!("/dir/two.rs")).unwrap(),
6274 vec![
6275 lsp::TextEdit::new(
6276 lsp::Range::new(
6277 lsp::Position::new(0, 24),
6278 lsp::Position::new(0, 27),
6279 ),
6280 "THREE".to_string(),
6281 ),
6282 lsp::TextEdit::new(
6283 lsp::Range::new(
6284 lsp::Position::new(0, 35),
6285 lsp::Position::new(0, 38),
6286 ),
6287 "THREE".to_string(),
6288 ),
6289 ],
6290 ),
6291 ]
6292 .into_iter()
6293 .collect(),
6294 ),
6295 ..Default::default()
6296 }))
6297 })
6298 .next()
6299 .await
6300 .unwrap();
6301 let mut transaction = response.await.unwrap().0;
6302 assert_eq!(transaction.len(), 2);
6303 assert_eq!(
6304 transaction
6305 .remove_entry(&buffer)
6306 .unwrap()
6307 .0
6308 .update(cx, |buffer, _| buffer.text()),
6309 "const THREE: usize = 1;"
6310 );
6311 assert_eq!(
6312 transaction
6313 .into_keys()
6314 .next()
6315 .unwrap()
6316 .update(cx, |buffer, _| buffer.text()),
6317 "const TWO: usize = one::THREE + one::THREE;"
6318 );
6319}
6320
6321#[gpui::test]
6322async fn test_search(cx: &mut gpui::TestAppContext) {
6323 init_test(cx);
6324
6325 let fs = FakeFs::new(cx.executor());
6326 fs.insert_tree(
6327 path!("/dir"),
6328 json!({
6329 "one.rs": "const ONE: usize = 1;",
6330 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
6331 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
6332 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
6333 }),
6334 )
6335 .await;
6336 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6337 assert_eq!(
6338 search(
6339 &project,
6340 SearchQuery::text(
6341 "TWO",
6342 false,
6343 true,
6344 false,
6345 Default::default(),
6346 Default::default(),
6347 false,
6348 None
6349 )
6350 .unwrap(),
6351 cx
6352 )
6353 .await
6354 .unwrap(),
6355 HashMap::from_iter([
6356 (path!("dir/two.rs").to_string(), vec![6..9]),
6357 (path!("dir/three.rs").to_string(), vec![37..40])
6358 ])
6359 );
6360
6361 let buffer_4 = project
6362 .update(cx, |project, cx| {
6363 project.open_local_buffer(path!("/dir/four.rs"), cx)
6364 })
6365 .await
6366 .unwrap();
6367 buffer_4.update(cx, |buffer, cx| {
6368 let text = "two::TWO";
6369 buffer.edit([(20..28, text), (31..43, text)], None, cx);
6370 });
6371
6372 assert_eq!(
6373 search(
6374 &project,
6375 SearchQuery::text(
6376 "TWO",
6377 false,
6378 true,
6379 false,
6380 Default::default(),
6381 Default::default(),
6382 false,
6383 None,
6384 )
6385 .unwrap(),
6386 cx
6387 )
6388 .await
6389 .unwrap(),
6390 HashMap::from_iter([
6391 (path!("dir/two.rs").to_string(), vec![6..9]),
6392 (path!("dir/three.rs").to_string(), vec![37..40]),
6393 (path!("dir/four.rs").to_string(), vec![25..28, 36..39])
6394 ])
6395 );
6396}
6397
6398#[gpui::test]
6399async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
6400 init_test(cx);
6401
6402 let search_query = "file";
6403
6404 let fs = FakeFs::new(cx.executor());
6405 fs.insert_tree(
6406 path!("/dir"),
6407 json!({
6408 "one.rs": r#"// Rust file one"#,
6409 "one.ts": r#"// TypeScript file one"#,
6410 "two.rs": r#"// Rust file two"#,
6411 "two.ts": r#"// TypeScript file two"#,
6412 }),
6413 )
6414 .await;
6415 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6416
6417 assert!(
6418 search(
6419 &project,
6420 SearchQuery::text(
6421 search_query,
6422 false,
6423 true,
6424 false,
6425 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6426 Default::default(),
6427 false,
6428 None
6429 )
6430 .unwrap(),
6431 cx
6432 )
6433 .await
6434 .unwrap()
6435 .is_empty(),
6436 "If no inclusions match, no files should be returned"
6437 );
6438
6439 assert_eq!(
6440 search(
6441 &project,
6442 SearchQuery::text(
6443 search_query,
6444 false,
6445 true,
6446 false,
6447 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
6448 Default::default(),
6449 false,
6450 None
6451 )
6452 .unwrap(),
6453 cx
6454 )
6455 .await
6456 .unwrap(),
6457 HashMap::from_iter([
6458 (path!("dir/one.rs").to_string(), vec![8..12]),
6459 (path!("dir/two.rs").to_string(), vec![8..12]),
6460 ]),
6461 "Rust only search should give only Rust files"
6462 );
6463
6464 assert_eq!(
6465 search(
6466 &project,
6467 SearchQuery::text(
6468 search_query,
6469 false,
6470 true,
6471 false,
6472 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6473 .unwrap(),
6474 Default::default(),
6475 false,
6476 None,
6477 )
6478 .unwrap(),
6479 cx
6480 )
6481 .await
6482 .unwrap(),
6483 HashMap::from_iter([
6484 (path!("dir/one.ts").to_string(), vec![14..18]),
6485 (path!("dir/two.ts").to_string(), vec![14..18]),
6486 ]),
6487 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
6488 );
6489
6490 assert_eq!(
6491 search(
6492 &project,
6493 SearchQuery::text(
6494 search_query,
6495 false,
6496 true,
6497 false,
6498 PathMatcher::new(
6499 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
6500 PathStyle::local()
6501 )
6502 .unwrap(),
6503 Default::default(),
6504 false,
6505 None,
6506 )
6507 .unwrap(),
6508 cx
6509 )
6510 .await
6511 .unwrap(),
6512 HashMap::from_iter([
6513 (path!("dir/two.ts").to_string(), vec![14..18]),
6514 (path!("dir/one.rs").to_string(), vec![8..12]),
6515 (path!("dir/one.ts").to_string(), vec![14..18]),
6516 (path!("dir/two.rs").to_string(), vec![8..12]),
6517 ]),
6518 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
6519 );
6520}
6521
6522#[gpui::test]
6523async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
6524 init_test(cx);
6525
6526 let search_query = "file";
6527
6528 let fs = FakeFs::new(cx.executor());
6529 fs.insert_tree(
6530 path!("/dir"),
6531 json!({
6532 "one.rs": r#"// Rust file one"#,
6533 "one.ts": r#"// TypeScript file one"#,
6534 "two.rs": r#"// Rust file two"#,
6535 "two.ts": r#"// TypeScript file two"#,
6536 }),
6537 )
6538 .await;
6539 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6540
6541 assert_eq!(
6542 search(
6543 &project,
6544 SearchQuery::text(
6545 search_query,
6546 false,
6547 true,
6548 false,
6549 Default::default(),
6550 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6551 false,
6552 None,
6553 )
6554 .unwrap(),
6555 cx
6556 )
6557 .await
6558 .unwrap(),
6559 HashMap::from_iter([
6560 (path!("dir/one.rs").to_string(), vec![8..12]),
6561 (path!("dir/one.ts").to_string(), vec![14..18]),
6562 (path!("dir/two.rs").to_string(), vec![8..12]),
6563 (path!("dir/two.ts").to_string(), vec![14..18]),
6564 ]),
6565 "If no exclusions match, all files should be returned"
6566 );
6567
6568 assert_eq!(
6569 search(
6570 &project,
6571 SearchQuery::text(
6572 search_query,
6573 false,
6574 true,
6575 false,
6576 Default::default(),
6577 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
6578 false,
6579 None,
6580 )
6581 .unwrap(),
6582 cx
6583 )
6584 .await
6585 .unwrap(),
6586 HashMap::from_iter([
6587 (path!("dir/one.ts").to_string(), vec![14..18]),
6588 (path!("dir/two.ts").to_string(), vec![14..18]),
6589 ]),
6590 "Rust exclusion search should give only TypeScript files"
6591 );
6592
6593 assert_eq!(
6594 search(
6595 &project,
6596 SearchQuery::text(
6597 search_query,
6598 false,
6599 true,
6600 false,
6601 Default::default(),
6602 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6603 .unwrap(),
6604 false,
6605 None,
6606 )
6607 .unwrap(),
6608 cx
6609 )
6610 .await
6611 .unwrap(),
6612 HashMap::from_iter([
6613 (path!("dir/one.rs").to_string(), vec![8..12]),
6614 (path!("dir/two.rs").to_string(), vec![8..12]),
6615 ]),
6616 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
6617 );
6618
6619 assert!(
6620 search(
6621 &project,
6622 SearchQuery::text(
6623 search_query,
6624 false,
6625 true,
6626 false,
6627 Default::default(),
6628 PathMatcher::new(
6629 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
6630 PathStyle::local(),
6631 )
6632 .unwrap(),
6633 false,
6634 None,
6635 )
6636 .unwrap(),
6637 cx
6638 )
6639 .await
6640 .unwrap()
6641 .is_empty(),
6642 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
6643 );
6644}
6645
6646#[gpui::test]
6647async fn test_search_with_buffer_exclusions(cx: &mut gpui::TestAppContext) {
6648 init_test(cx);
6649
6650 let search_query = "file";
6651
6652 let fs = FakeFs::new(cx.executor());
6653 fs.insert_tree(
6654 path!("/dir"),
6655 json!({
6656 "one.rs": r#"// Rust file one"#,
6657 "one.ts": r#"// TypeScript file one"#,
6658 "two.rs": r#"// Rust file two"#,
6659 "two.ts": r#"// TypeScript file two"#,
6660 }),
6661 )
6662 .await;
6663
6664 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6665 let path_style = PathStyle::local();
6666 let _buffer = project.update(cx, |project, cx| {
6667 project.create_local_buffer("file", None, false, cx)
6668 });
6669
6670 assert_eq!(
6671 search(
6672 &project,
6673 SearchQuery::text(
6674 search_query,
6675 false,
6676 true,
6677 false,
6678 Default::default(),
6679 PathMatcher::new(&["*.odd".to_owned()], path_style).unwrap(),
6680 false,
6681 None,
6682 )
6683 .unwrap(),
6684 cx
6685 )
6686 .await
6687 .unwrap(),
6688 HashMap::from_iter([
6689 (path!("dir/one.rs").to_string(), vec![8..12]),
6690 (path!("dir/one.ts").to_string(), vec![14..18]),
6691 (path!("dir/two.rs").to_string(), vec![8..12]),
6692 (path!("dir/two.ts").to_string(), vec![14..18]),
6693 ]),
6694 "If no exclusions match, all files should be returned"
6695 );
6696
6697 assert_eq!(
6698 search(
6699 &project,
6700 SearchQuery::text(
6701 search_query,
6702 false,
6703 true,
6704 false,
6705 Default::default(),
6706 PathMatcher::new(&["*.rs".to_owned()], path_style).unwrap(),
6707 false,
6708 None,
6709 )
6710 .unwrap(),
6711 cx
6712 )
6713 .await
6714 .unwrap(),
6715 HashMap::from_iter([
6716 (path!("dir/one.ts").to_string(), vec![14..18]),
6717 (path!("dir/two.ts").to_string(), vec![14..18]),
6718 ]),
6719 "Rust exclusion search should give only TypeScript files"
6720 );
6721
6722 assert_eq!(
6723 search(
6724 &project,
6725 SearchQuery::text(
6726 search_query,
6727 false,
6728 true,
6729 false,
6730 Default::default(),
6731 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], path_style).unwrap(),
6732 false,
6733 None,
6734 )
6735 .unwrap(),
6736 cx
6737 )
6738 .await
6739 .unwrap(),
6740 HashMap::from_iter([
6741 (path!("dir/one.rs").to_string(), vec![8..12]),
6742 (path!("dir/two.rs").to_string(), vec![8..12]),
6743 ]),
6744 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
6745 );
6746
6747 assert!(
6748 search(
6749 &project,
6750 SearchQuery::text(
6751 search_query,
6752 false,
6753 true,
6754 false,
6755 Default::default(),
6756 PathMatcher::new(
6757 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
6758 PathStyle::local(),
6759 )
6760 .unwrap(),
6761 false,
6762 None,
6763 )
6764 .unwrap(),
6765 cx
6766 )
6767 .await
6768 .unwrap()
6769 .is_empty(),
6770 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
6771 );
6772}
6773
6774#[gpui::test]
6775async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
6776 init_test(cx);
6777
6778 let search_query = "file";
6779
6780 let fs = FakeFs::new(cx.executor());
6781 fs.insert_tree(
6782 path!("/dir"),
6783 json!({
6784 "one.rs": r#"// Rust file one"#,
6785 "one.ts": r#"// TypeScript file one"#,
6786 "two.rs": r#"// Rust file two"#,
6787 "two.ts": r#"// TypeScript file two"#,
6788 }),
6789 )
6790 .await;
6791 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6792 assert!(
6793 search(
6794 &project,
6795 SearchQuery::text(
6796 search_query,
6797 false,
6798 true,
6799 false,
6800 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6801 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6802 false,
6803 None,
6804 )
6805 .unwrap(),
6806 cx
6807 )
6808 .await
6809 .unwrap()
6810 .is_empty(),
6811 "If both no exclusions and inclusions match, exclusions should win and return nothing"
6812 );
6813
6814 assert!(
6815 search(
6816 &project,
6817 SearchQuery::text(
6818 search_query,
6819 false,
6820 true,
6821 false,
6822 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
6823 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
6824 false,
6825 None,
6826 )
6827 .unwrap(),
6828 cx
6829 )
6830 .await
6831 .unwrap()
6832 .is_empty(),
6833 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
6834 );
6835
6836 assert!(
6837 search(
6838 &project,
6839 SearchQuery::text(
6840 search_query,
6841 false,
6842 true,
6843 false,
6844 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6845 .unwrap(),
6846 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6847 .unwrap(),
6848 false,
6849 None,
6850 )
6851 .unwrap(),
6852 cx
6853 )
6854 .await
6855 .unwrap()
6856 .is_empty(),
6857 "Non-matching inclusions and exclusions should not change that."
6858 );
6859
6860 assert_eq!(
6861 search(
6862 &project,
6863 SearchQuery::text(
6864 search_query,
6865 false,
6866 true,
6867 false,
6868 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6869 .unwrap(),
6870 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()], PathStyle::local())
6871 .unwrap(),
6872 false,
6873 None,
6874 )
6875 .unwrap(),
6876 cx
6877 )
6878 .await
6879 .unwrap(),
6880 HashMap::from_iter([
6881 (path!("dir/one.ts").to_string(), vec![14..18]),
6882 (path!("dir/two.ts").to_string(), vec![14..18]),
6883 ]),
6884 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
6885 );
6886}
6887
6888#[gpui::test]
6889async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
6890 init_test(cx);
6891
6892 let fs = FakeFs::new(cx.executor());
6893 fs.insert_tree(
6894 path!("/worktree-a"),
6895 json!({
6896 "haystack.rs": r#"// NEEDLE"#,
6897 "haystack.ts": r#"// NEEDLE"#,
6898 }),
6899 )
6900 .await;
6901 fs.insert_tree(
6902 path!("/worktree-b"),
6903 json!({
6904 "haystack.rs": r#"// NEEDLE"#,
6905 "haystack.ts": r#"// NEEDLE"#,
6906 }),
6907 )
6908 .await;
6909
6910 let path_style = PathStyle::local();
6911 let project = Project::test(
6912 fs.clone(),
6913 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
6914 cx,
6915 )
6916 .await;
6917
6918 assert_eq!(
6919 search(
6920 &project,
6921 SearchQuery::text(
6922 "NEEDLE",
6923 false,
6924 true,
6925 false,
6926 PathMatcher::new(&["worktree-a/*.rs".to_owned()], path_style).unwrap(),
6927 Default::default(),
6928 true,
6929 None,
6930 )
6931 .unwrap(),
6932 cx
6933 )
6934 .await
6935 .unwrap(),
6936 HashMap::from_iter([(path!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
6937 "should only return results from included worktree"
6938 );
6939 assert_eq!(
6940 search(
6941 &project,
6942 SearchQuery::text(
6943 "NEEDLE",
6944 false,
6945 true,
6946 false,
6947 PathMatcher::new(&["worktree-b/*.rs".to_owned()], path_style).unwrap(),
6948 Default::default(),
6949 true,
6950 None,
6951 )
6952 .unwrap(),
6953 cx
6954 )
6955 .await
6956 .unwrap(),
6957 HashMap::from_iter([(path!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
6958 "should only return results from included worktree"
6959 );
6960
6961 assert_eq!(
6962 search(
6963 &project,
6964 SearchQuery::text(
6965 "NEEDLE",
6966 false,
6967 true,
6968 false,
6969 PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap(),
6970 Default::default(),
6971 false,
6972 None,
6973 )
6974 .unwrap(),
6975 cx
6976 )
6977 .await
6978 .unwrap(),
6979 HashMap::from_iter([
6980 (path!("worktree-a/haystack.ts").to_string(), vec![3..9]),
6981 (path!("worktree-b/haystack.ts").to_string(), vec![3..9])
6982 ]),
6983 "should return results from both worktrees"
6984 );
6985}
6986
6987#[gpui::test]
6988async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
6989 init_test(cx);
6990
6991 let fs = FakeFs::new(cx.background_executor.clone());
6992 fs.insert_tree(
6993 path!("/dir"),
6994 json!({
6995 ".git": {},
6996 ".gitignore": "**/target\n/node_modules\n",
6997 "target": {
6998 "index.txt": "index_key:index_value"
6999 },
7000 "node_modules": {
7001 "eslint": {
7002 "index.ts": "const eslint_key = 'eslint value'",
7003 "package.json": r#"{ "some_key": "some value" }"#,
7004 },
7005 "prettier": {
7006 "index.ts": "const prettier_key = 'prettier value'",
7007 "package.json": r#"{ "other_key": "other value" }"#,
7008 },
7009 },
7010 "package.json": r#"{ "main_key": "main value" }"#,
7011 }),
7012 )
7013 .await;
7014 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7015
7016 let query = "key";
7017 assert_eq!(
7018 search(
7019 &project,
7020 SearchQuery::text(
7021 query,
7022 false,
7023 false,
7024 false,
7025 Default::default(),
7026 Default::default(),
7027 false,
7028 None,
7029 )
7030 .unwrap(),
7031 cx
7032 )
7033 .await
7034 .unwrap(),
7035 HashMap::from_iter([(path!("dir/package.json").to_string(), vec![8..11])]),
7036 "Only one non-ignored file should have the query"
7037 );
7038
7039 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7040 let path_style = PathStyle::local();
7041 assert_eq!(
7042 search(
7043 &project,
7044 SearchQuery::text(
7045 query,
7046 false,
7047 false,
7048 true,
7049 Default::default(),
7050 Default::default(),
7051 false,
7052 None,
7053 )
7054 .unwrap(),
7055 cx
7056 )
7057 .await
7058 .unwrap(),
7059 HashMap::from_iter([
7060 (path!("dir/package.json").to_string(), vec![8..11]),
7061 (path!("dir/target/index.txt").to_string(), vec![6..9]),
7062 (
7063 path!("dir/node_modules/prettier/package.json").to_string(),
7064 vec![9..12]
7065 ),
7066 (
7067 path!("dir/node_modules/prettier/index.ts").to_string(),
7068 vec![15..18]
7069 ),
7070 (
7071 path!("dir/node_modules/eslint/index.ts").to_string(),
7072 vec![13..16]
7073 ),
7074 (
7075 path!("dir/node_modules/eslint/package.json").to_string(),
7076 vec![8..11]
7077 ),
7078 ]),
7079 "Unrestricted search with ignored directories should find every file with the query"
7080 );
7081
7082 let files_to_include =
7083 PathMatcher::new(&["node_modules/prettier/**".to_owned()], path_style).unwrap();
7084 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap();
7085 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7086 assert_eq!(
7087 search(
7088 &project,
7089 SearchQuery::text(
7090 query,
7091 false,
7092 false,
7093 true,
7094 files_to_include,
7095 files_to_exclude,
7096 false,
7097 None,
7098 )
7099 .unwrap(),
7100 cx
7101 )
7102 .await
7103 .unwrap(),
7104 HashMap::from_iter([(
7105 path!("dir/node_modules/prettier/package.json").to_string(),
7106 vec![9..12]
7107 )]),
7108 "With search including ignored prettier directory and excluding TS files, only one file should be found"
7109 );
7110}
7111
7112#[gpui::test]
7113async fn test_search_with_unicode(cx: &mut gpui::TestAppContext) {
7114 init_test(cx);
7115
7116 let fs = FakeFs::new(cx.executor());
7117 fs.insert_tree(
7118 path!("/dir"),
7119 json!({
7120 "one.rs": "// ПРИВЕТ? привет!",
7121 "two.rs": "// ПРИВЕТ.",
7122 "three.rs": "// привет",
7123 }),
7124 )
7125 .await;
7126 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7127 let unicode_case_sensitive_query = SearchQuery::text(
7128 "привет",
7129 false,
7130 true,
7131 false,
7132 Default::default(),
7133 Default::default(),
7134 false,
7135 None,
7136 );
7137 assert_matches!(unicode_case_sensitive_query, Ok(SearchQuery::Text { .. }));
7138 assert_eq!(
7139 search(&project, unicode_case_sensitive_query.unwrap(), cx)
7140 .await
7141 .unwrap(),
7142 HashMap::from_iter([
7143 (path!("dir/one.rs").to_string(), vec![17..29]),
7144 (path!("dir/three.rs").to_string(), vec![3..15]),
7145 ])
7146 );
7147
7148 let unicode_case_insensitive_query = SearchQuery::text(
7149 "привет",
7150 false,
7151 false,
7152 false,
7153 Default::default(),
7154 Default::default(),
7155 false,
7156 None,
7157 );
7158 assert_matches!(
7159 unicode_case_insensitive_query,
7160 Ok(SearchQuery::Regex { .. })
7161 );
7162 assert_eq!(
7163 search(&project, unicode_case_insensitive_query.unwrap(), cx)
7164 .await
7165 .unwrap(),
7166 HashMap::from_iter([
7167 (path!("dir/one.rs").to_string(), vec![3..15, 17..29]),
7168 (path!("dir/two.rs").to_string(), vec![3..15]),
7169 (path!("dir/three.rs").to_string(), vec![3..15]),
7170 ])
7171 );
7172
7173 assert_eq!(
7174 search(
7175 &project,
7176 SearchQuery::text(
7177 "привет.",
7178 false,
7179 false,
7180 false,
7181 Default::default(),
7182 Default::default(),
7183 false,
7184 None,
7185 )
7186 .unwrap(),
7187 cx
7188 )
7189 .await
7190 .unwrap(),
7191 HashMap::from_iter([(path!("dir/two.rs").to_string(), vec![3..16]),])
7192 );
7193}
7194
7195#[gpui::test]
7196async fn test_create_entry(cx: &mut gpui::TestAppContext) {
7197 init_test(cx);
7198
7199 let fs = FakeFs::new(cx.executor());
7200 fs.insert_tree(
7201 "/one/two",
7202 json!({
7203 "three": {
7204 "a.txt": "",
7205 "four": {}
7206 },
7207 "c.rs": ""
7208 }),
7209 )
7210 .await;
7211
7212 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
7213 project
7214 .update(cx, |project, cx| {
7215 let id = project.worktrees(cx).next().unwrap().read(cx).id();
7216 project.create_entry((id, rel_path("b..")), true, cx)
7217 })
7218 .await
7219 .unwrap()
7220 .into_included()
7221 .unwrap();
7222
7223 assert_eq!(
7224 fs.paths(true),
7225 vec![
7226 PathBuf::from(path!("/")),
7227 PathBuf::from(path!("/one")),
7228 PathBuf::from(path!("/one/two")),
7229 PathBuf::from(path!("/one/two/c.rs")),
7230 PathBuf::from(path!("/one/two/three")),
7231 PathBuf::from(path!("/one/two/three/a.txt")),
7232 PathBuf::from(path!("/one/two/three/b..")),
7233 PathBuf::from(path!("/one/two/three/four")),
7234 ]
7235 );
7236}
7237
7238#[gpui::test]
7239async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
7240 init_test(cx);
7241
7242 let fs = FakeFs::new(cx.executor());
7243 fs.insert_tree(
7244 path!("/dir"),
7245 json!({
7246 "a.tsx": "a",
7247 }),
7248 )
7249 .await;
7250
7251 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7252
7253 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7254 language_registry.add(tsx_lang());
7255 let language_server_names = [
7256 "TypeScriptServer",
7257 "TailwindServer",
7258 "ESLintServer",
7259 "NoHoverCapabilitiesServer",
7260 ];
7261 let mut language_servers = [
7262 language_registry.register_fake_lsp(
7263 "tsx",
7264 FakeLspAdapter {
7265 name: language_server_names[0],
7266 capabilities: lsp::ServerCapabilities {
7267 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7268 ..lsp::ServerCapabilities::default()
7269 },
7270 ..FakeLspAdapter::default()
7271 },
7272 ),
7273 language_registry.register_fake_lsp(
7274 "tsx",
7275 FakeLspAdapter {
7276 name: language_server_names[1],
7277 capabilities: lsp::ServerCapabilities {
7278 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7279 ..lsp::ServerCapabilities::default()
7280 },
7281 ..FakeLspAdapter::default()
7282 },
7283 ),
7284 language_registry.register_fake_lsp(
7285 "tsx",
7286 FakeLspAdapter {
7287 name: language_server_names[2],
7288 capabilities: lsp::ServerCapabilities {
7289 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7290 ..lsp::ServerCapabilities::default()
7291 },
7292 ..FakeLspAdapter::default()
7293 },
7294 ),
7295 language_registry.register_fake_lsp(
7296 "tsx",
7297 FakeLspAdapter {
7298 name: language_server_names[3],
7299 capabilities: lsp::ServerCapabilities {
7300 hover_provider: None,
7301 ..lsp::ServerCapabilities::default()
7302 },
7303 ..FakeLspAdapter::default()
7304 },
7305 ),
7306 ];
7307
7308 let (buffer, _handle) = project
7309 .update(cx, |p, cx| {
7310 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
7311 })
7312 .await
7313 .unwrap();
7314 cx.executor().run_until_parked();
7315
7316 let mut servers_with_hover_requests = HashMap::default();
7317 for i in 0..language_server_names.len() {
7318 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
7319 panic!(
7320 "Failed to get language server #{i} with name {}",
7321 &language_server_names[i]
7322 )
7323 });
7324 let new_server_name = new_server.server.name();
7325 assert!(
7326 !servers_with_hover_requests.contains_key(&new_server_name),
7327 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
7328 );
7329 match new_server_name.as_ref() {
7330 "TailwindServer" | "TypeScriptServer" => {
7331 servers_with_hover_requests.insert(
7332 new_server_name.clone(),
7333 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7334 move |_, _| {
7335 let name = new_server_name.clone();
7336 async move {
7337 Ok(Some(lsp::Hover {
7338 contents: lsp::HoverContents::Scalar(
7339 lsp::MarkedString::String(format!("{name} hover")),
7340 ),
7341 range: None,
7342 }))
7343 }
7344 },
7345 ),
7346 );
7347 }
7348 "ESLintServer" => {
7349 servers_with_hover_requests.insert(
7350 new_server_name,
7351 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7352 |_, _| async move { Ok(None) },
7353 ),
7354 );
7355 }
7356 "NoHoverCapabilitiesServer" => {
7357 let _never_handled = new_server
7358 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
7359 panic!(
7360 "Should not call for hovers server with no corresponding capabilities"
7361 )
7362 });
7363 }
7364 unexpected => panic!("Unexpected server name: {unexpected}"),
7365 }
7366 }
7367
7368 let hover_task = project.update(cx, |project, cx| {
7369 project.hover(&buffer, Point::new(0, 0), cx)
7370 });
7371 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
7372 |mut hover_request| async move {
7373 hover_request
7374 .next()
7375 .await
7376 .expect("All hover requests should have been triggered")
7377 },
7378 ))
7379 .await;
7380 assert_eq!(
7381 vec!["TailwindServer hover", "TypeScriptServer hover"],
7382 hover_task
7383 .await
7384 .into_iter()
7385 .flatten()
7386 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
7387 .sorted()
7388 .collect::<Vec<_>>(),
7389 "Should receive hover responses from all related servers with hover capabilities"
7390 );
7391}
7392
7393#[gpui::test]
7394async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
7395 init_test(cx);
7396
7397 let fs = FakeFs::new(cx.executor());
7398 fs.insert_tree(
7399 path!("/dir"),
7400 json!({
7401 "a.ts": "a",
7402 }),
7403 )
7404 .await;
7405
7406 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7407
7408 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7409 language_registry.add(typescript_lang());
7410 let mut fake_language_servers = language_registry.register_fake_lsp(
7411 "TypeScript",
7412 FakeLspAdapter {
7413 capabilities: lsp::ServerCapabilities {
7414 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7415 ..lsp::ServerCapabilities::default()
7416 },
7417 ..FakeLspAdapter::default()
7418 },
7419 );
7420
7421 let (buffer, _handle) = project
7422 .update(cx, |p, cx| {
7423 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
7424 })
7425 .await
7426 .unwrap();
7427 cx.executor().run_until_parked();
7428
7429 let fake_server = fake_language_servers
7430 .next()
7431 .await
7432 .expect("failed to get the language server");
7433
7434 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7435 move |_, _| async move {
7436 Ok(Some(lsp::Hover {
7437 contents: lsp::HoverContents::Array(vec![
7438 lsp::MarkedString::String("".to_string()),
7439 lsp::MarkedString::String(" ".to_string()),
7440 lsp::MarkedString::String("\n\n\n".to_string()),
7441 ]),
7442 range: None,
7443 }))
7444 },
7445 );
7446
7447 let hover_task = project.update(cx, |project, cx| {
7448 project.hover(&buffer, Point::new(0, 0), cx)
7449 });
7450 let () = request_handled
7451 .next()
7452 .await
7453 .expect("All hover requests should have been triggered");
7454 assert_eq!(
7455 Vec::<String>::new(),
7456 hover_task
7457 .await
7458 .into_iter()
7459 .flatten()
7460 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
7461 .sorted()
7462 .collect::<Vec<_>>(),
7463 "Empty hover parts should be ignored"
7464 );
7465}
7466
7467#[gpui::test]
7468async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
7469 init_test(cx);
7470
7471 let fs = FakeFs::new(cx.executor());
7472 fs.insert_tree(
7473 path!("/dir"),
7474 json!({
7475 "a.ts": "a",
7476 }),
7477 )
7478 .await;
7479
7480 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7481
7482 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7483 language_registry.add(typescript_lang());
7484 let mut fake_language_servers = language_registry.register_fake_lsp(
7485 "TypeScript",
7486 FakeLspAdapter {
7487 capabilities: lsp::ServerCapabilities {
7488 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
7489 ..lsp::ServerCapabilities::default()
7490 },
7491 ..FakeLspAdapter::default()
7492 },
7493 );
7494
7495 let (buffer, _handle) = project
7496 .update(cx, |p, cx| {
7497 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
7498 })
7499 .await
7500 .unwrap();
7501 cx.executor().run_until_parked();
7502
7503 let fake_server = fake_language_servers
7504 .next()
7505 .await
7506 .expect("failed to get the language server");
7507
7508 let mut request_handled = fake_server
7509 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
7510 Ok(Some(vec![
7511 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7512 title: "organize imports".to_string(),
7513 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
7514 ..lsp::CodeAction::default()
7515 }),
7516 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7517 title: "fix code".to_string(),
7518 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
7519 ..lsp::CodeAction::default()
7520 }),
7521 ]))
7522 });
7523
7524 let code_actions_task = project.update(cx, |project, cx| {
7525 project.code_actions(
7526 &buffer,
7527 0..buffer.read(cx).len(),
7528 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
7529 cx,
7530 )
7531 });
7532
7533 let () = request_handled
7534 .next()
7535 .await
7536 .expect("The code action request should have been triggered");
7537
7538 let code_actions = code_actions_task.await.unwrap().unwrap();
7539 assert_eq!(code_actions.len(), 1);
7540 assert_eq!(
7541 code_actions[0].lsp_action.action_kind(),
7542 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
7543 );
7544}
7545
7546#[gpui::test]
7547async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
7548 init_test(cx);
7549
7550 let fs = FakeFs::new(cx.executor());
7551 fs.insert_tree(
7552 path!("/dir"),
7553 json!({
7554 "a.tsx": "a",
7555 }),
7556 )
7557 .await;
7558
7559 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7560
7561 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7562 language_registry.add(tsx_lang());
7563 let language_server_names = [
7564 "TypeScriptServer",
7565 "TailwindServer",
7566 "ESLintServer",
7567 "NoActionsCapabilitiesServer",
7568 ];
7569
7570 let mut language_server_rxs = [
7571 language_registry.register_fake_lsp(
7572 "tsx",
7573 FakeLspAdapter {
7574 name: language_server_names[0],
7575 capabilities: lsp::ServerCapabilities {
7576 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
7577 ..lsp::ServerCapabilities::default()
7578 },
7579 ..FakeLspAdapter::default()
7580 },
7581 ),
7582 language_registry.register_fake_lsp(
7583 "tsx",
7584 FakeLspAdapter {
7585 name: language_server_names[1],
7586 capabilities: lsp::ServerCapabilities {
7587 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
7588 ..lsp::ServerCapabilities::default()
7589 },
7590 ..FakeLspAdapter::default()
7591 },
7592 ),
7593 language_registry.register_fake_lsp(
7594 "tsx",
7595 FakeLspAdapter {
7596 name: language_server_names[2],
7597 capabilities: lsp::ServerCapabilities {
7598 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
7599 ..lsp::ServerCapabilities::default()
7600 },
7601 ..FakeLspAdapter::default()
7602 },
7603 ),
7604 language_registry.register_fake_lsp(
7605 "tsx",
7606 FakeLspAdapter {
7607 name: language_server_names[3],
7608 capabilities: lsp::ServerCapabilities {
7609 code_action_provider: None,
7610 ..lsp::ServerCapabilities::default()
7611 },
7612 ..FakeLspAdapter::default()
7613 },
7614 ),
7615 ];
7616
7617 let (buffer, _handle) = project
7618 .update(cx, |p, cx| {
7619 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
7620 })
7621 .await
7622 .unwrap();
7623 cx.executor().run_until_parked();
7624
7625 let mut servers_with_actions_requests = HashMap::default();
7626 for i in 0..language_server_names.len() {
7627 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
7628 panic!(
7629 "Failed to get language server #{i} with name {}",
7630 &language_server_names[i]
7631 )
7632 });
7633 let new_server_name = new_server.server.name();
7634
7635 assert!(
7636 !servers_with_actions_requests.contains_key(&new_server_name),
7637 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
7638 );
7639 match new_server_name.0.as_ref() {
7640 "TailwindServer" | "TypeScriptServer" => {
7641 servers_with_actions_requests.insert(
7642 new_server_name.clone(),
7643 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
7644 move |_, _| {
7645 let name = new_server_name.clone();
7646 async move {
7647 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
7648 lsp::CodeAction {
7649 title: format!("{name} code action"),
7650 ..lsp::CodeAction::default()
7651 },
7652 )]))
7653 }
7654 },
7655 ),
7656 );
7657 }
7658 "ESLintServer" => {
7659 servers_with_actions_requests.insert(
7660 new_server_name,
7661 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
7662 |_, _| async move { Ok(None) },
7663 ),
7664 );
7665 }
7666 "NoActionsCapabilitiesServer" => {
7667 let _never_handled = new_server
7668 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
7669 panic!(
7670 "Should not call for code actions server with no corresponding capabilities"
7671 )
7672 });
7673 }
7674 unexpected => panic!("Unexpected server name: {unexpected}"),
7675 }
7676 }
7677
7678 let code_actions_task = project.update(cx, |project, cx| {
7679 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
7680 });
7681
7682 // cx.run_until_parked();
7683 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
7684 |mut code_actions_request| async move {
7685 code_actions_request
7686 .next()
7687 .await
7688 .expect("All code actions requests should have been triggered")
7689 },
7690 ))
7691 .await;
7692 assert_eq!(
7693 vec!["TailwindServer code action", "TypeScriptServer code action"],
7694 code_actions_task
7695 .await
7696 .unwrap()
7697 .unwrap()
7698 .into_iter()
7699 .map(|code_action| code_action.lsp_action.title().to_owned())
7700 .sorted()
7701 .collect::<Vec<_>>(),
7702 "Should receive code actions responses from all related servers with hover capabilities"
7703 );
7704}
7705
7706#[gpui::test]
7707async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
7708 init_test(cx);
7709
7710 let fs = FakeFs::new(cx.executor());
7711 fs.insert_tree(
7712 "/dir",
7713 json!({
7714 "a.rs": "let a = 1;",
7715 "b.rs": "let b = 2;",
7716 "c.rs": "let c = 2;",
7717 }),
7718 )
7719 .await;
7720
7721 let project = Project::test(
7722 fs,
7723 [
7724 "/dir/a.rs".as_ref(),
7725 "/dir/b.rs".as_ref(),
7726 "/dir/c.rs".as_ref(),
7727 ],
7728 cx,
7729 )
7730 .await;
7731
7732 // check the initial state and get the worktrees
7733 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
7734 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7735 assert_eq!(worktrees.len(), 3);
7736
7737 let worktree_a = worktrees[0].read(cx);
7738 let worktree_b = worktrees[1].read(cx);
7739 let worktree_c = worktrees[2].read(cx);
7740
7741 // check they start in the right order
7742 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
7743 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
7744 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
7745
7746 (
7747 worktrees[0].clone(),
7748 worktrees[1].clone(),
7749 worktrees[2].clone(),
7750 )
7751 });
7752
7753 // move first worktree to after the second
7754 // [a, b, c] -> [b, a, c]
7755 project
7756 .update(cx, |project, cx| {
7757 let first = worktree_a.read(cx);
7758 let second = worktree_b.read(cx);
7759 project.move_worktree(first.id(), second.id(), cx)
7760 })
7761 .expect("moving first after second");
7762
7763 // check the state after moving
7764 project.update(cx, |project, cx| {
7765 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7766 assert_eq!(worktrees.len(), 3);
7767
7768 let first = worktrees[0].read(cx);
7769 let second = worktrees[1].read(cx);
7770 let third = worktrees[2].read(cx);
7771
7772 // check they are now in the right order
7773 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
7774 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
7775 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7776 });
7777
7778 // move the second worktree to before the first
7779 // [b, a, c] -> [a, b, c]
7780 project
7781 .update(cx, |project, cx| {
7782 let second = worktree_a.read(cx);
7783 let first = worktree_b.read(cx);
7784 project.move_worktree(first.id(), second.id(), cx)
7785 })
7786 .expect("moving second before first");
7787
7788 // check the state after moving
7789 project.update(cx, |project, cx| {
7790 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7791 assert_eq!(worktrees.len(), 3);
7792
7793 let first = worktrees[0].read(cx);
7794 let second = worktrees[1].read(cx);
7795 let third = worktrees[2].read(cx);
7796
7797 // check they are now in the right order
7798 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7799 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
7800 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7801 });
7802
7803 // move the second worktree to after the third
7804 // [a, b, c] -> [a, c, b]
7805 project
7806 .update(cx, |project, cx| {
7807 let second = worktree_b.read(cx);
7808 let third = worktree_c.read(cx);
7809 project.move_worktree(second.id(), third.id(), cx)
7810 })
7811 .expect("moving second after third");
7812
7813 // check the state after moving
7814 project.update(cx, |project, cx| {
7815 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7816 assert_eq!(worktrees.len(), 3);
7817
7818 let first = worktrees[0].read(cx);
7819 let second = worktrees[1].read(cx);
7820 let third = worktrees[2].read(cx);
7821
7822 // check they are now in the right order
7823 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7824 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
7825 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
7826 });
7827
7828 // move the third worktree to before the second
7829 // [a, c, b] -> [a, b, c]
7830 project
7831 .update(cx, |project, cx| {
7832 let third = worktree_c.read(cx);
7833 let second = worktree_b.read(cx);
7834 project.move_worktree(third.id(), second.id(), cx)
7835 })
7836 .expect("moving third before second");
7837
7838 // check the state after moving
7839 project.update(cx, |project, cx| {
7840 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7841 assert_eq!(worktrees.len(), 3);
7842
7843 let first = worktrees[0].read(cx);
7844 let second = worktrees[1].read(cx);
7845 let third = worktrees[2].read(cx);
7846
7847 // check they are now in the right order
7848 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7849 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
7850 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7851 });
7852
7853 // move the first worktree to after the third
7854 // [a, b, c] -> [b, c, a]
7855 project
7856 .update(cx, |project, cx| {
7857 let first = worktree_a.read(cx);
7858 let third = worktree_c.read(cx);
7859 project.move_worktree(first.id(), third.id(), cx)
7860 })
7861 .expect("moving first after third");
7862
7863 // check the state after moving
7864 project.update(cx, |project, cx| {
7865 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7866 assert_eq!(worktrees.len(), 3);
7867
7868 let first = worktrees[0].read(cx);
7869 let second = worktrees[1].read(cx);
7870 let third = worktrees[2].read(cx);
7871
7872 // check they are now in the right order
7873 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
7874 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
7875 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
7876 });
7877
7878 // move the third worktree to before the first
7879 // [b, c, a] -> [a, b, c]
7880 project
7881 .update(cx, |project, cx| {
7882 let third = worktree_a.read(cx);
7883 let first = worktree_b.read(cx);
7884 project.move_worktree(third.id(), first.id(), cx)
7885 })
7886 .expect("moving third before first");
7887
7888 // check the state after moving
7889 project.update(cx, |project, cx| {
7890 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7891 assert_eq!(worktrees.len(), 3);
7892
7893 let first = worktrees[0].read(cx);
7894 let second = worktrees[1].read(cx);
7895 let third = worktrees[2].read(cx);
7896
7897 // check they are now in the right order
7898 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7899 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
7900 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7901 });
7902}
7903
7904#[gpui::test]
7905async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
7906 init_test(cx);
7907
7908 let staged_contents = r#"
7909 fn main() {
7910 println!("hello world");
7911 }
7912 "#
7913 .unindent();
7914 let file_contents = r#"
7915 // print goodbye
7916 fn main() {
7917 println!("goodbye world");
7918 }
7919 "#
7920 .unindent();
7921
7922 let fs = FakeFs::new(cx.background_executor.clone());
7923 fs.insert_tree(
7924 "/dir",
7925 json!({
7926 ".git": {},
7927 "src": {
7928 "main.rs": file_contents,
7929 }
7930 }),
7931 )
7932 .await;
7933
7934 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
7935
7936 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7937
7938 let buffer = project
7939 .update(cx, |project, cx| {
7940 project.open_local_buffer("/dir/src/main.rs", cx)
7941 })
7942 .await
7943 .unwrap();
7944 let unstaged_diff = project
7945 .update(cx, |project, cx| {
7946 project.open_unstaged_diff(buffer.clone(), cx)
7947 })
7948 .await
7949 .unwrap();
7950
7951 cx.run_until_parked();
7952 unstaged_diff.update(cx, |unstaged_diff, cx| {
7953 let snapshot = buffer.read(cx).snapshot();
7954 assert_hunks(
7955 unstaged_diff.snapshot(cx).hunks(&snapshot),
7956 &snapshot,
7957 &unstaged_diff.base_text_string(cx).unwrap(),
7958 &[
7959 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
7960 (
7961 2..3,
7962 " println!(\"hello world\");\n",
7963 " println!(\"goodbye world\");\n",
7964 DiffHunkStatus::modified_none(),
7965 ),
7966 ],
7967 );
7968 });
7969
7970 let staged_contents = r#"
7971 // print goodbye
7972 fn main() {
7973 }
7974 "#
7975 .unindent();
7976
7977 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
7978
7979 cx.run_until_parked();
7980 unstaged_diff.update(cx, |unstaged_diff, cx| {
7981 let snapshot = buffer.read(cx).snapshot();
7982 assert_hunks(
7983 unstaged_diff
7984 .snapshot(cx)
7985 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
7986 &snapshot,
7987 &unstaged_diff.base_text(cx).text(),
7988 &[(
7989 2..3,
7990 "",
7991 " println!(\"goodbye world\");\n",
7992 DiffHunkStatus::added_none(),
7993 )],
7994 );
7995 });
7996}
7997
7998#[gpui::test]
7999async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
8000 init_test(cx);
8001
8002 let committed_contents = r#"
8003 fn main() {
8004 println!("hello world");
8005 }
8006 "#
8007 .unindent();
8008 let staged_contents = r#"
8009 fn main() {
8010 println!("goodbye world");
8011 }
8012 "#
8013 .unindent();
8014 let file_contents = r#"
8015 // print goodbye
8016 fn main() {
8017 println!("goodbye world");
8018 }
8019 "#
8020 .unindent();
8021
8022 let fs = FakeFs::new(cx.background_executor.clone());
8023 fs.insert_tree(
8024 "/dir",
8025 json!({
8026 ".git": {},
8027 "src": {
8028 "modification.rs": file_contents,
8029 }
8030 }),
8031 )
8032 .await;
8033
8034 fs.set_head_for_repo(
8035 Path::new("/dir/.git"),
8036 &[
8037 ("src/modification.rs", committed_contents),
8038 ("src/deletion.rs", "// the-deleted-contents\n".into()),
8039 ],
8040 "deadbeef",
8041 );
8042 fs.set_index_for_repo(
8043 Path::new("/dir/.git"),
8044 &[
8045 ("src/modification.rs", staged_contents),
8046 ("src/deletion.rs", "// the-deleted-contents\n".into()),
8047 ],
8048 );
8049
8050 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8051 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
8052 let language = rust_lang();
8053 language_registry.add(language.clone());
8054
8055 let buffer_1 = project
8056 .update(cx, |project, cx| {
8057 project.open_local_buffer("/dir/src/modification.rs", cx)
8058 })
8059 .await
8060 .unwrap();
8061 let diff_1 = project
8062 .update(cx, |project, cx| {
8063 project.open_uncommitted_diff(buffer_1.clone(), cx)
8064 })
8065 .await
8066 .unwrap();
8067 diff_1.read_with(cx, |diff, cx| {
8068 assert_eq!(diff.base_text(cx).language().cloned(), Some(language))
8069 });
8070 cx.run_until_parked();
8071 diff_1.update(cx, |diff, cx| {
8072 let snapshot = buffer_1.read(cx).snapshot();
8073 assert_hunks(
8074 diff.snapshot(cx)
8075 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8076 &snapshot,
8077 &diff.base_text_string(cx).unwrap(),
8078 &[
8079 (
8080 0..1,
8081 "",
8082 "// print goodbye\n",
8083 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
8084 ),
8085 (
8086 2..3,
8087 " println!(\"hello world\");\n",
8088 " println!(\"goodbye world\");\n",
8089 DiffHunkStatus::modified_none(),
8090 ),
8091 ],
8092 );
8093 });
8094
8095 // Reset HEAD to a version that differs from both the buffer and the index.
8096 let committed_contents = r#"
8097 // print goodbye
8098 fn main() {
8099 }
8100 "#
8101 .unindent();
8102 fs.set_head_for_repo(
8103 Path::new("/dir/.git"),
8104 &[
8105 ("src/modification.rs", committed_contents.clone()),
8106 ("src/deletion.rs", "// the-deleted-contents\n".into()),
8107 ],
8108 "deadbeef",
8109 );
8110
8111 // Buffer now has an unstaged hunk.
8112 cx.run_until_parked();
8113 diff_1.update(cx, |diff, cx| {
8114 let snapshot = buffer_1.read(cx).snapshot();
8115 assert_hunks(
8116 diff.snapshot(cx)
8117 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8118 &snapshot,
8119 &diff.base_text(cx).text(),
8120 &[(
8121 2..3,
8122 "",
8123 " println!(\"goodbye world\");\n",
8124 DiffHunkStatus::added_none(),
8125 )],
8126 );
8127 });
8128
8129 // Open a buffer for a file that's been deleted.
8130 let buffer_2 = project
8131 .update(cx, |project, cx| {
8132 project.open_local_buffer("/dir/src/deletion.rs", cx)
8133 })
8134 .await
8135 .unwrap();
8136 let diff_2 = project
8137 .update(cx, |project, cx| {
8138 project.open_uncommitted_diff(buffer_2.clone(), cx)
8139 })
8140 .await
8141 .unwrap();
8142 cx.run_until_parked();
8143 diff_2.update(cx, |diff, cx| {
8144 let snapshot = buffer_2.read(cx).snapshot();
8145 assert_hunks(
8146 diff.snapshot(cx)
8147 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8148 &snapshot,
8149 &diff.base_text_string(cx).unwrap(),
8150 &[(
8151 0..0,
8152 "// the-deleted-contents\n",
8153 "",
8154 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
8155 )],
8156 );
8157 });
8158
8159 // Stage the deletion of this file
8160 fs.set_index_for_repo(
8161 Path::new("/dir/.git"),
8162 &[("src/modification.rs", committed_contents.clone())],
8163 );
8164 cx.run_until_parked();
8165 diff_2.update(cx, |diff, cx| {
8166 let snapshot = buffer_2.read(cx).snapshot();
8167 assert_hunks(
8168 diff.snapshot(cx)
8169 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8170 &snapshot,
8171 &diff.base_text_string(cx).unwrap(),
8172 &[(
8173 0..0,
8174 "// the-deleted-contents\n",
8175 "",
8176 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
8177 )],
8178 );
8179 });
8180}
8181
8182#[gpui::test]
8183async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
8184 use DiffHunkSecondaryStatus::*;
8185 init_test(cx);
8186
8187 let committed_contents = r#"
8188 zero
8189 one
8190 two
8191 three
8192 four
8193 five
8194 "#
8195 .unindent();
8196 let file_contents = r#"
8197 one
8198 TWO
8199 three
8200 FOUR
8201 five
8202 "#
8203 .unindent();
8204
8205 let fs = FakeFs::new(cx.background_executor.clone());
8206 fs.insert_tree(
8207 "/dir",
8208 json!({
8209 ".git": {},
8210 "file.txt": file_contents.clone()
8211 }),
8212 )
8213 .await;
8214
8215 fs.set_head_and_index_for_repo(
8216 path!("/dir/.git").as_ref(),
8217 &[("file.txt", committed_contents.clone())],
8218 );
8219
8220 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8221
8222 let buffer = project
8223 .update(cx, |project, cx| {
8224 project.open_local_buffer("/dir/file.txt", cx)
8225 })
8226 .await
8227 .unwrap();
8228 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8229 let uncommitted_diff = project
8230 .update(cx, |project, cx| {
8231 project.open_uncommitted_diff(buffer.clone(), cx)
8232 })
8233 .await
8234 .unwrap();
8235 let mut diff_events = cx.events(&uncommitted_diff);
8236
8237 // The hunks are initially unstaged.
8238 uncommitted_diff.read_with(cx, |diff, cx| {
8239 assert_hunks(
8240 diff.snapshot(cx).hunks(&snapshot),
8241 &snapshot,
8242 &diff.base_text_string(cx).unwrap(),
8243 &[
8244 (
8245 0..0,
8246 "zero\n",
8247 "",
8248 DiffHunkStatus::deleted(HasSecondaryHunk),
8249 ),
8250 (
8251 1..2,
8252 "two\n",
8253 "TWO\n",
8254 DiffHunkStatus::modified(HasSecondaryHunk),
8255 ),
8256 (
8257 3..4,
8258 "four\n",
8259 "FOUR\n",
8260 DiffHunkStatus::modified(HasSecondaryHunk),
8261 ),
8262 ],
8263 );
8264 });
8265
8266 // Stage a hunk. It appears as optimistically staged.
8267 uncommitted_diff.update(cx, |diff, cx| {
8268 let range =
8269 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
8270 let hunks = diff
8271 .snapshot(cx)
8272 .hunks_intersecting_range(range, &snapshot)
8273 .collect::<Vec<_>>();
8274 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
8275
8276 assert_hunks(
8277 diff.snapshot(cx).hunks(&snapshot),
8278 &snapshot,
8279 &diff.base_text_string(cx).unwrap(),
8280 &[
8281 (
8282 0..0,
8283 "zero\n",
8284 "",
8285 DiffHunkStatus::deleted(HasSecondaryHunk),
8286 ),
8287 (
8288 1..2,
8289 "two\n",
8290 "TWO\n",
8291 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8292 ),
8293 (
8294 3..4,
8295 "four\n",
8296 "FOUR\n",
8297 DiffHunkStatus::modified(HasSecondaryHunk),
8298 ),
8299 ],
8300 );
8301 });
8302
8303 // The diff emits a change event for the range of the staged hunk.
8304 assert!(matches!(
8305 diff_events.next().await.unwrap(),
8306 BufferDiffEvent::HunksStagedOrUnstaged(_)
8307 ));
8308 let event = diff_events.next().await.unwrap();
8309 if let BufferDiffEvent::DiffChanged(DiffChanged {
8310 changed_range: Some(changed_range),
8311 base_text_changed_range: _,
8312 extended_range: _,
8313 }) = event
8314 {
8315 let changed_range = changed_range.to_point(&snapshot);
8316 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
8317 } else {
8318 panic!("Unexpected event {event:?}");
8319 }
8320
8321 // When the write to the index completes, it appears as staged.
8322 cx.run_until_parked();
8323 uncommitted_diff.update(cx, |diff, cx| {
8324 assert_hunks(
8325 diff.snapshot(cx).hunks(&snapshot),
8326 &snapshot,
8327 &diff.base_text_string(cx).unwrap(),
8328 &[
8329 (
8330 0..0,
8331 "zero\n",
8332 "",
8333 DiffHunkStatus::deleted(HasSecondaryHunk),
8334 ),
8335 (
8336 1..2,
8337 "two\n",
8338 "TWO\n",
8339 DiffHunkStatus::modified(NoSecondaryHunk),
8340 ),
8341 (
8342 3..4,
8343 "four\n",
8344 "FOUR\n",
8345 DiffHunkStatus::modified(HasSecondaryHunk),
8346 ),
8347 ],
8348 );
8349 });
8350
8351 // The diff emits a change event for the changed index text.
8352 let event = diff_events.next().await.unwrap();
8353 if let BufferDiffEvent::DiffChanged(DiffChanged {
8354 changed_range: Some(changed_range),
8355 base_text_changed_range: _,
8356 extended_range: _,
8357 }) = event
8358 {
8359 let changed_range = changed_range.to_point(&snapshot);
8360 assert_eq!(changed_range, Point::new(0, 0)..Point::new(4, 0));
8361 } else {
8362 panic!("Unexpected event {event:?}");
8363 }
8364
8365 // Simulate a problem writing to the git index.
8366 fs.set_error_message_for_index_write(
8367 "/dir/.git".as_ref(),
8368 Some("failed to write git index".into()),
8369 );
8370
8371 // Stage another hunk.
8372 uncommitted_diff.update(cx, |diff, cx| {
8373 let range =
8374 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
8375 let hunks = diff
8376 .snapshot(cx)
8377 .hunks_intersecting_range(range, &snapshot)
8378 .collect::<Vec<_>>();
8379 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
8380
8381 assert_hunks(
8382 diff.snapshot(cx).hunks(&snapshot),
8383 &snapshot,
8384 &diff.base_text_string(cx).unwrap(),
8385 &[
8386 (
8387 0..0,
8388 "zero\n",
8389 "",
8390 DiffHunkStatus::deleted(HasSecondaryHunk),
8391 ),
8392 (
8393 1..2,
8394 "two\n",
8395 "TWO\n",
8396 DiffHunkStatus::modified(NoSecondaryHunk),
8397 ),
8398 (
8399 3..4,
8400 "four\n",
8401 "FOUR\n",
8402 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8403 ),
8404 ],
8405 );
8406 });
8407 assert!(matches!(
8408 diff_events.next().await.unwrap(),
8409 BufferDiffEvent::HunksStagedOrUnstaged(_)
8410 ));
8411 let event = diff_events.next().await.unwrap();
8412 if let BufferDiffEvent::DiffChanged(DiffChanged {
8413 changed_range: Some(changed_range),
8414 base_text_changed_range: _,
8415 extended_range: _,
8416 }) = event
8417 {
8418 let changed_range = changed_range.to_point(&snapshot);
8419 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
8420 } else {
8421 panic!("Unexpected event {event:?}");
8422 }
8423
8424 // When the write fails, the hunk returns to being unstaged.
8425 cx.run_until_parked();
8426 uncommitted_diff.update(cx, |diff, cx| {
8427 assert_hunks(
8428 diff.snapshot(cx).hunks(&snapshot),
8429 &snapshot,
8430 &diff.base_text_string(cx).unwrap(),
8431 &[
8432 (
8433 0..0,
8434 "zero\n",
8435 "",
8436 DiffHunkStatus::deleted(HasSecondaryHunk),
8437 ),
8438 (
8439 1..2,
8440 "two\n",
8441 "TWO\n",
8442 DiffHunkStatus::modified(NoSecondaryHunk),
8443 ),
8444 (
8445 3..4,
8446 "four\n",
8447 "FOUR\n",
8448 DiffHunkStatus::modified(HasSecondaryHunk),
8449 ),
8450 ],
8451 );
8452 });
8453
8454 let event = diff_events.next().await.unwrap();
8455 if let BufferDiffEvent::DiffChanged(DiffChanged {
8456 changed_range: Some(changed_range),
8457 base_text_changed_range: _,
8458 extended_range: _,
8459 }) = event
8460 {
8461 let changed_range = changed_range.to_point(&snapshot);
8462 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
8463 } else {
8464 panic!("Unexpected event {event:?}");
8465 }
8466
8467 // Allow writing to the git index to succeed again.
8468 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
8469
8470 // Stage two hunks with separate operations.
8471 uncommitted_diff.update(cx, |diff, cx| {
8472 let hunks = diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>();
8473 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
8474 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
8475 });
8476
8477 // Both staged hunks appear as pending.
8478 uncommitted_diff.update(cx, |diff, cx| {
8479 assert_hunks(
8480 diff.snapshot(cx).hunks(&snapshot),
8481 &snapshot,
8482 &diff.base_text_string(cx).unwrap(),
8483 &[
8484 (
8485 0..0,
8486 "zero\n",
8487 "",
8488 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
8489 ),
8490 (
8491 1..2,
8492 "two\n",
8493 "TWO\n",
8494 DiffHunkStatus::modified(NoSecondaryHunk),
8495 ),
8496 (
8497 3..4,
8498 "four\n",
8499 "FOUR\n",
8500 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8501 ),
8502 ],
8503 );
8504 });
8505
8506 // Both staging operations take effect.
8507 cx.run_until_parked();
8508 uncommitted_diff.update(cx, |diff, cx| {
8509 assert_hunks(
8510 diff.snapshot(cx).hunks(&snapshot),
8511 &snapshot,
8512 &diff.base_text_string(cx).unwrap(),
8513 &[
8514 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
8515 (
8516 1..2,
8517 "two\n",
8518 "TWO\n",
8519 DiffHunkStatus::modified(NoSecondaryHunk),
8520 ),
8521 (
8522 3..4,
8523 "four\n",
8524 "FOUR\n",
8525 DiffHunkStatus::modified(NoSecondaryHunk),
8526 ),
8527 ],
8528 );
8529 });
8530}
8531
8532#[gpui::test(seeds(340, 472))]
8533async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
8534 use DiffHunkSecondaryStatus::*;
8535 init_test(cx);
8536
8537 let committed_contents = r#"
8538 zero
8539 one
8540 two
8541 three
8542 four
8543 five
8544 "#
8545 .unindent();
8546 let file_contents = r#"
8547 one
8548 TWO
8549 three
8550 FOUR
8551 five
8552 "#
8553 .unindent();
8554
8555 let fs = FakeFs::new(cx.background_executor.clone());
8556 fs.insert_tree(
8557 "/dir",
8558 json!({
8559 ".git": {},
8560 "file.txt": file_contents.clone()
8561 }),
8562 )
8563 .await;
8564
8565 fs.set_head_for_repo(
8566 "/dir/.git".as_ref(),
8567 &[("file.txt", committed_contents.clone())],
8568 "deadbeef",
8569 );
8570 fs.set_index_for_repo(
8571 "/dir/.git".as_ref(),
8572 &[("file.txt", committed_contents.clone())],
8573 );
8574
8575 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8576
8577 let buffer = project
8578 .update(cx, |project, cx| {
8579 project.open_local_buffer("/dir/file.txt", cx)
8580 })
8581 .await
8582 .unwrap();
8583 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8584 let uncommitted_diff = project
8585 .update(cx, |project, cx| {
8586 project.open_uncommitted_diff(buffer.clone(), cx)
8587 })
8588 .await
8589 .unwrap();
8590
8591 // The hunks are initially unstaged.
8592 uncommitted_diff.read_with(cx, |diff, cx| {
8593 assert_hunks(
8594 diff.snapshot(cx).hunks(&snapshot),
8595 &snapshot,
8596 &diff.base_text_string(cx).unwrap(),
8597 &[
8598 (
8599 0..0,
8600 "zero\n",
8601 "",
8602 DiffHunkStatus::deleted(HasSecondaryHunk),
8603 ),
8604 (
8605 1..2,
8606 "two\n",
8607 "TWO\n",
8608 DiffHunkStatus::modified(HasSecondaryHunk),
8609 ),
8610 (
8611 3..4,
8612 "four\n",
8613 "FOUR\n",
8614 DiffHunkStatus::modified(HasSecondaryHunk),
8615 ),
8616 ],
8617 );
8618 });
8619
8620 // Pause IO events
8621 fs.pause_events();
8622
8623 // Stage the first hunk.
8624 uncommitted_diff.update(cx, |diff, cx| {
8625 let hunk = diff.snapshot(cx).hunks(&snapshot).next().unwrap();
8626 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
8627 assert_hunks(
8628 diff.snapshot(cx).hunks(&snapshot),
8629 &snapshot,
8630 &diff.base_text_string(cx).unwrap(),
8631 &[
8632 (
8633 0..0,
8634 "zero\n",
8635 "",
8636 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
8637 ),
8638 (
8639 1..2,
8640 "two\n",
8641 "TWO\n",
8642 DiffHunkStatus::modified(HasSecondaryHunk),
8643 ),
8644 (
8645 3..4,
8646 "four\n",
8647 "FOUR\n",
8648 DiffHunkStatus::modified(HasSecondaryHunk),
8649 ),
8650 ],
8651 );
8652 });
8653
8654 // Stage the second hunk *before* receiving the FS event for the first hunk.
8655 cx.run_until_parked();
8656 uncommitted_diff.update(cx, |diff, cx| {
8657 let hunk = diff.snapshot(cx).hunks(&snapshot).nth(1).unwrap();
8658 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
8659 assert_hunks(
8660 diff.snapshot(cx).hunks(&snapshot),
8661 &snapshot,
8662 &diff.base_text_string(cx).unwrap(),
8663 &[
8664 (
8665 0..0,
8666 "zero\n",
8667 "",
8668 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
8669 ),
8670 (
8671 1..2,
8672 "two\n",
8673 "TWO\n",
8674 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8675 ),
8676 (
8677 3..4,
8678 "four\n",
8679 "FOUR\n",
8680 DiffHunkStatus::modified(HasSecondaryHunk),
8681 ),
8682 ],
8683 );
8684 });
8685
8686 // Process the FS event for staging the first hunk (second event is still pending).
8687 fs.flush_events(1);
8688 cx.run_until_parked();
8689
8690 // Stage the third hunk before receiving the second FS event.
8691 uncommitted_diff.update(cx, |diff, cx| {
8692 let hunk = diff.snapshot(cx).hunks(&snapshot).nth(2).unwrap();
8693 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
8694 });
8695
8696 // Wait for all remaining IO.
8697 cx.run_until_parked();
8698 fs.flush_events(fs.buffered_event_count());
8699
8700 // Now all hunks are staged.
8701 cx.run_until_parked();
8702 uncommitted_diff.update(cx, |diff, cx| {
8703 assert_hunks(
8704 diff.snapshot(cx).hunks(&snapshot),
8705 &snapshot,
8706 &diff.base_text_string(cx).unwrap(),
8707 &[
8708 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
8709 (
8710 1..2,
8711 "two\n",
8712 "TWO\n",
8713 DiffHunkStatus::modified(NoSecondaryHunk),
8714 ),
8715 (
8716 3..4,
8717 "four\n",
8718 "FOUR\n",
8719 DiffHunkStatus::modified(NoSecondaryHunk),
8720 ),
8721 ],
8722 );
8723 });
8724}
8725
8726#[gpui::test(iterations = 25)]
8727async fn test_staging_random_hunks(
8728 mut rng: StdRng,
8729 _executor: BackgroundExecutor,
8730 cx: &mut gpui::TestAppContext,
8731) {
8732 let operations = env::var("OPERATIONS")
8733 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
8734 .unwrap_or(20);
8735
8736 use DiffHunkSecondaryStatus::*;
8737 init_test(cx);
8738
8739 let committed_text = (0..30).map(|i| format!("line {i}\n")).collect::<String>();
8740 let index_text = committed_text.clone();
8741 let buffer_text = (0..30)
8742 .map(|i| match i % 5 {
8743 0 => format!("line {i} (modified)\n"),
8744 _ => format!("line {i}\n"),
8745 })
8746 .collect::<String>();
8747
8748 let fs = FakeFs::new(cx.background_executor.clone());
8749 fs.insert_tree(
8750 path!("/dir"),
8751 json!({
8752 ".git": {},
8753 "file.txt": buffer_text.clone()
8754 }),
8755 )
8756 .await;
8757 fs.set_head_for_repo(
8758 path!("/dir/.git").as_ref(),
8759 &[("file.txt", committed_text.clone())],
8760 "deadbeef",
8761 );
8762 fs.set_index_for_repo(
8763 path!("/dir/.git").as_ref(),
8764 &[("file.txt", index_text.clone())],
8765 );
8766 let repo = fs
8767 .open_repo(path!("/dir/.git").as_ref(), Some("git".as_ref()))
8768 .unwrap();
8769
8770 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
8771 let buffer = project
8772 .update(cx, |project, cx| {
8773 project.open_local_buffer(path!("/dir/file.txt"), cx)
8774 })
8775 .await
8776 .unwrap();
8777 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8778 let uncommitted_diff = project
8779 .update(cx, |project, cx| {
8780 project.open_uncommitted_diff(buffer.clone(), cx)
8781 })
8782 .await
8783 .unwrap();
8784
8785 let mut hunks = uncommitted_diff.update(cx, |diff, cx| {
8786 diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>()
8787 });
8788 assert_eq!(hunks.len(), 6);
8789
8790 for _i in 0..operations {
8791 let hunk_ix = rng.random_range(0..hunks.len());
8792 let hunk = &mut hunks[hunk_ix];
8793 let row = hunk.range.start.row;
8794
8795 if hunk.status().has_secondary_hunk() {
8796 log::info!("staging hunk at {row}");
8797 uncommitted_diff.update(cx, |diff, cx| {
8798 diff.stage_or_unstage_hunks(true, std::slice::from_ref(hunk), &snapshot, true, cx);
8799 });
8800 hunk.secondary_status = SecondaryHunkRemovalPending;
8801 } else {
8802 log::info!("unstaging hunk at {row}");
8803 uncommitted_diff.update(cx, |diff, cx| {
8804 diff.stage_or_unstage_hunks(false, std::slice::from_ref(hunk), &snapshot, true, cx);
8805 });
8806 hunk.secondary_status = SecondaryHunkAdditionPending;
8807 }
8808
8809 for _ in 0..rng.random_range(0..10) {
8810 log::info!("yielding");
8811 cx.executor().simulate_random_delay().await;
8812 }
8813 }
8814
8815 cx.executor().run_until_parked();
8816
8817 for hunk in &mut hunks {
8818 if hunk.secondary_status == SecondaryHunkRemovalPending {
8819 hunk.secondary_status = NoSecondaryHunk;
8820 } else if hunk.secondary_status == SecondaryHunkAdditionPending {
8821 hunk.secondary_status = HasSecondaryHunk;
8822 }
8823 }
8824
8825 log::info!(
8826 "index text:\n{}",
8827 repo.load_index_text(RepoPath::from_rel_path(rel_path("file.txt")))
8828 .await
8829 .unwrap()
8830 );
8831
8832 uncommitted_diff.update(cx, |diff, cx| {
8833 let expected_hunks = hunks
8834 .iter()
8835 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
8836 .collect::<Vec<_>>();
8837 let actual_hunks = diff
8838 .snapshot(cx)
8839 .hunks(&snapshot)
8840 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
8841 .collect::<Vec<_>>();
8842 assert_eq!(actual_hunks, expected_hunks);
8843 });
8844}
8845
8846#[gpui::test]
8847async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
8848 init_test(cx);
8849
8850 let committed_contents = r#"
8851 fn main() {
8852 println!("hello from HEAD");
8853 }
8854 "#
8855 .unindent();
8856 let file_contents = r#"
8857 fn main() {
8858 println!("hello from the working copy");
8859 }
8860 "#
8861 .unindent();
8862
8863 let fs = FakeFs::new(cx.background_executor.clone());
8864 fs.insert_tree(
8865 "/dir",
8866 json!({
8867 ".git": {},
8868 "src": {
8869 "main.rs": file_contents,
8870 }
8871 }),
8872 )
8873 .await;
8874
8875 fs.set_head_for_repo(
8876 Path::new("/dir/.git"),
8877 &[("src/main.rs", committed_contents.clone())],
8878 "deadbeef",
8879 );
8880 fs.set_index_for_repo(
8881 Path::new("/dir/.git"),
8882 &[("src/main.rs", committed_contents.clone())],
8883 );
8884
8885 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
8886
8887 let buffer = project
8888 .update(cx, |project, cx| {
8889 project.open_local_buffer("/dir/src/main.rs", cx)
8890 })
8891 .await
8892 .unwrap();
8893 let uncommitted_diff = project
8894 .update(cx, |project, cx| {
8895 project.open_uncommitted_diff(buffer.clone(), cx)
8896 })
8897 .await
8898 .unwrap();
8899
8900 cx.run_until_parked();
8901 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
8902 let snapshot = buffer.read(cx).snapshot();
8903 assert_hunks(
8904 uncommitted_diff.snapshot(cx).hunks(&snapshot),
8905 &snapshot,
8906 &uncommitted_diff.base_text_string(cx).unwrap(),
8907 &[(
8908 1..2,
8909 " println!(\"hello from HEAD\");\n",
8910 " println!(\"hello from the working copy\");\n",
8911 DiffHunkStatus {
8912 kind: DiffHunkStatusKind::Modified,
8913 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
8914 },
8915 )],
8916 );
8917 });
8918}
8919
8920// TODO: Should we test this on Windows also?
8921#[gpui::test]
8922#[cfg(not(windows))]
8923async fn test_staging_hunk_preserve_executable_permission(cx: &mut gpui::TestAppContext) {
8924 use std::os::unix::fs::PermissionsExt;
8925 init_test(cx);
8926 cx.executor().allow_parking();
8927 let committed_contents = "bar\n";
8928 let file_contents = "baz\n";
8929 let root = TempTree::new(json!({
8930 "project": {
8931 "foo": committed_contents
8932 },
8933 }));
8934
8935 let work_dir = root.path().join("project");
8936 let file_path = work_dir.join("foo");
8937 let repo = git_init(work_dir.as_path());
8938 let mut perms = std::fs::metadata(&file_path).unwrap().permissions();
8939 perms.set_mode(0o755);
8940 std::fs::set_permissions(&file_path, perms).unwrap();
8941 git_add("foo", &repo);
8942 git_commit("Initial commit", &repo);
8943 std::fs::write(&file_path, file_contents).unwrap();
8944
8945 let project = Project::test(
8946 Arc::new(RealFs::new(None, cx.executor())),
8947 [root.path()],
8948 cx,
8949 )
8950 .await;
8951
8952 let buffer = project
8953 .update(cx, |project, cx| {
8954 project.open_local_buffer(file_path.as_path(), cx)
8955 })
8956 .await
8957 .unwrap();
8958
8959 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8960
8961 let uncommitted_diff = project
8962 .update(cx, |project, cx| {
8963 project.open_uncommitted_diff(buffer.clone(), cx)
8964 })
8965 .await
8966 .unwrap();
8967
8968 uncommitted_diff.update(cx, |diff, cx| {
8969 let hunks = diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>();
8970 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
8971 });
8972
8973 cx.run_until_parked();
8974
8975 let output = smol::process::Command::new("git")
8976 .current_dir(&work_dir)
8977 .args(["diff", "--staged"])
8978 .output()
8979 .await
8980 .unwrap();
8981
8982 let staged_diff = String::from_utf8_lossy(&output.stdout);
8983
8984 assert!(
8985 !staged_diff.contains("new mode 100644"),
8986 "Staging should not change file mode from 755 to 644.\ngit diff --staged:\n{}",
8987 staged_diff
8988 );
8989
8990 let output = smol::process::Command::new("git")
8991 .current_dir(&work_dir)
8992 .args(["ls-files", "-s"])
8993 .output()
8994 .await
8995 .unwrap();
8996 let index_contents = String::from_utf8_lossy(&output.stdout);
8997
8998 assert!(
8999 index_contents.contains("100755"),
9000 "Index should show file as executable (100755).\ngit ls-files -s:\n{}",
9001 index_contents
9002 );
9003}
9004
9005#[gpui::test]
9006async fn test_repository_and_path_for_project_path(
9007 background_executor: BackgroundExecutor,
9008 cx: &mut gpui::TestAppContext,
9009) {
9010 init_test(cx);
9011 let fs = FakeFs::new(background_executor);
9012 fs.insert_tree(
9013 path!("/root"),
9014 json!({
9015 "c.txt": "",
9016 "dir1": {
9017 ".git": {},
9018 "deps": {
9019 "dep1": {
9020 ".git": {},
9021 "src": {
9022 "a.txt": ""
9023 }
9024 }
9025 },
9026 "src": {
9027 "b.txt": ""
9028 }
9029 },
9030 }),
9031 )
9032 .await;
9033
9034 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
9035 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9036 let tree_id = tree.read_with(cx, |tree, _| tree.id());
9037 project
9038 .update(cx, |project, cx| project.git_scans_complete(cx))
9039 .await;
9040 cx.run_until_parked();
9041
9042 project.read_with(cx, |project, cx| {
9043 let git_store = project.git_store().read(cx);
9044 let pairs = [
9045 ("c.txt", None),
9046 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
9047 (
9048 "dir1/deps/dep1/src/a.txt",
9049 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
9050 ),
9051 ];
9052 let expected = pairs
9053 .iter()
9054 .map(|(path, result)| {
9055 (
9056 path,
9057 result.map(|(repo, repo_path)| {
9058 (Path::new(repo).into(), RepoPath::new(repo_path).unwrap())
9059 }),
9060 )
9061 })
9062 .collect::<Vec<_>>();
9063 let actual = pairs
9064 .iter()
9065 .map(|(path, _)| {
9066 let project_path = (tree_id, rel_path(path)).into();
9067 let result = maybe!({
9068 let (repo, repo_path) =
9069 git_store.repository_and_path_for_project_path(&project_path, cx)?;
9070 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
9071 });
9072 (path, result)
9073 })
9074 .collect::<Vec<_>>();
9075 pretty_assertions::assert_eq!(expected, actual);
9076 });
9077
9078 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
9079 .await
9080 .unwrap();
9081 cx.run_until_parked();
9082
9083 project.read_with(cx, |project, cx| {
9084 let git_store = project.git_store().read(cx);
9085 assert_eq!(
9086 git_store.repository_and_path_for_project_path(
9087 &(tree_id, rel_path("dir1/src/b.txt")).into(),
9088 cx
9089 ),
9090 None
9091 );
9092 });
9093}
9094
9095#[gpui::test]
9096async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
9097 init_test(cx);
9098 let fs = FakeFs::new(cx.background_executor.clone());
9099 let home = paths::home_dir();
9100 fs.insert_tree(
9101 home,
9102 json!({
9103 ".git": {},
9104 "project": {
9105 "a.txt": "A"
9106 },
9107 }),
9108 )
9109 .await;
9110
9111 let project = Project::test(fs.clone(), [home.join("project").as_ref()], cx).await;
9112 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9113 let tree_id = tree.read_with(cx, |tree, _| tree.id());
9114
9115 project
9116 .update(cx, |project, cx| project.git_scans_complete(cx))
9117 .await;
9118 tree.flush_fs_events(cx).await;
9119
9120 project.read_with(cx, |project, cx| {
9121 let containing = project
9122 .git_store()
9123 .read(cx)
9124 .repository_and_path_for_project_path(&(tree_id, rel_path("a.txt")).into(), cx);
9125 assert!(containing.is_none());
9126 });
9127
9128 let project = Project::test(fs.clone(), [home.as_ref()], cx).await;
9129 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9130 let tree_id = tree.read_with(cx, |tree, _| tree.id());
9131 project
9132 .update(cx, |project, cx| project.git_scans_complete(cx))
9133 .await;
9134 tree.flush_fs_events(cx).await;
9135
9136 project.read_with(cx, |project, cx| {
9137 let containing = project
9138 .git_store()
9139 .read(cx)
9140 .repository_and_path_for_project_path(&(tree_id, rel_path("project/a.txt")).into(), cx);
9141 assert_eq!(
9142 containing
9143 .unwrap()
9144 .0
9145 .read(cx)
9146 .work_directory_abs_path
9147 .as_ref(),
9148 home,
9149 );
9150 });
9151}
9152
9153#[gpui::test]
9154async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
9155 init_test(cx);
9156 cx.executor().allow_parking();
9157
9158 let root = TempTree::new(json!({
9159 "project": {
9160 "a.txt": "a", // Modified
9161 "b.txt": "bb", // Added
9162 "c.txt": "ccc", // Unchanged
9163 "d.txt": "dddd", // Deleted
9164 },
9165 }));
9166
9167 // Set up git repository before creating the project.
9168 let work_dir = root.path().join("project");
9169 let repo = git_init(work_dir.as_path());
9170 git_add("a.txt", &repo);
9171 git_add("c.txt", &repo);
9172 git_add("d.txt", &repo);
9173 git_commit("Initial commit", &repo);
9174 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
9175 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
9176
9177 let project = Project::test(
9178 Arc::new(RealFs::new(None, cx.executor())),
9179 [root.path()],
9180 cx,
9181 )
9182 .await;
9183
9184 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9185 tree.flush_fs_events(cx).await;
9186 project
9187 .update(cx, |project, cx| project.git_scans_complete(cx))
9188 .await;
9189 cx.executor().run_until_parked();
9190
9191 let repository = project.read_with(cx, |project, cx| {
9192 project.repositories(cx).values().next().unwrap().clone()
9193 });
9194
9195 // Check that the right git state is observed on startup
9196 repository.read_with(cx, |repository, _| {
9197 let entries = repository.cached_status().collect::<Vec<_>>();
9198 assert_eq!(
9199 entries,
9200 [
9201 StatusEntry {
9202 repo_path: repo_path("a.txt"),
9203 status: StatusCode::Modified.worktree(),
9204 },
9205 StatusEntry {
9206 repo_path: repo_path("b.txt"),
9207 status: FileStatus::Untracked,
9208 },
9209 StatusEntry {
9210 repo_path: repo_path("d.txt"),
9211 status: StatusCode::Deleted.worktree(),
9212 },
9213 ]
9214 );
9215 });
9216
9217 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
9218
9219 tree.flush_fs_events(cx).await;
9220 project
9221 .update(cx, |project, cx| project.git_scans_complete(cx))
9222 .await;
9223 cx.executor().run_until_parked();
9224
9225 repository.read_with(cx, |repository, _| {
9226 let entries = repository.cached_status().collect::<Vec<_>>();
9227 assert_eq!(
9228 entries,
9229 [
9230 StatusEntry {
9231 repo_path: repo_path("a.txt"),
9232 status: StatusCode::Modified.worktree(),
9233 },
9234 StatusEntry {
9235 repo_path: repo_path("b.txt"),
9236 status: FileStatus::Untracked,
9237 },
9238 StatusEntry {
9239 repo_path: repo_path("c.txt"),
9240 status: StatusCode::Modified.worktree(),
9241 },
9242 StatusEntry {
9243 repo_path: repo_path("d.txt"),
9244 status: StatusCode::Deleted.worktree(),
9245 },
9246 ]
9247 );
9248 });
9249
9250 git_add("a.txt", &repo);
9251 git_add("c.txt", &repo);
9252 git_remove_index(Path::new("d.txt"), &repo);
9253 git_commit("Another commit", &repo);
9254 tree.flush_fs_events(cx).await;
9255 project
9256 .update(cx, |project, cx| project.git_scans_complete(cx))
9257 .await;
9258 cx.executor().run_until_parked();
9259
9260 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
9261 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
9262 tree.flush_fs_events(cx).await;
9263 project
9264 .update(cx, |project, cx| project.git_scans_complete(cx))
9265 .await;
9266 cx.executor().run_until_parked();
9267
9268 repository.read_with(cx, |repository, _cx| {
9269 let entries = repository.cached_status().collect::<Vec<_>>();
9270
9271 // Deleting an untracked entry, b.txt, should leave no status
9272 // a.txt was tracked, and so should have a status
9273 assert_eq!(
9274 entries,
9275 [StatusEntry {
9276 repo_path: repo_path("a.txt"),
9277 status: StatusCode::Deleted.worktree(),
9278 }]
9279 );
9280 });
9281}
9282
9283#[gpui::test]
9284#[ignore]
9285async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
9286 init_test(cx);
9287 cx.executor().allow_parking();
9288
9289 let root = TempTree::new(json!({
9290 "project": {
9291 "sub": {},
9292 "a.txt": "",
9293 },
9294 }));
9295
9296 let work_dir = root.path().join("project");
9297 let repo = git_init(work_dir.as_path());
9298 // a.txt exists in HEAD and the working copy but is deleted in the index.
9299 git_add("a.txt", &repo);
9300 git_commit("Initial commit", &repo);
9301 git_remove_index("a.txt".as_ref(), &repo);
9302 // `sub` is a nested git repository.
9303 let _sub = git_init(&work_dir.join("sub"));
9304
9305 let project = Project::test(
9306 Arc::new(RealFs::new(None, cx.executor())),
9307 [root.path()],
9308 cx,
9309 )
9310 .await;
9311
9312 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9313 tree.flush_fs_events(cx).await;
9314 project
9315 .update(cx, |project, cx| project.git_scans_complete(cx))
9316 .await;
9317 cx.executor().run_until_parked();
9318
9319 let repository = project.read_with(cx, |project, cx| {
9320 project
9321 .repositories(cx)
9322 .values()
9323 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
9324 .unwrap()
9325 .clone()
9326 });
9327
9328 repository.read_with(cx, |repository, _cx| {
9329 let entries = repository.cached_status().collect::<Vec<_>>();
9330
9331 // `sub` doesn't appear in our computed statuses.
9332 // a.txt appears with a combined `DA` status.
9333 assert_eq!(
9334 entries,
9335 [StatusEntry {
9336 repo_path: repo_path("a.txt"),
9337 status: TrackedStatus {
9338 index_status: StatusCode::Deleted,
9339 worktree_status: StatusCode::Added
9340 }
9341 .into(),
9342 }]
9343 )
9344 });
9345}
9346
9347#[track_caller]
9348/// We merge lhs into rhs.
9349fn merge_pending_ops_snapshots(
9350 source: Vec<pending_op::PendingOps>,
9351 mut target: Vec<pending_op::PendingOps>,
9352) -> Vec<pending_op::PendingOps> {
9353 for s_ops in source {
9354 if let Some(idx) = target.iter().zip(0..).find_map(|(ops, idx)| {
9355 if ops.repo_path == s_ops.repo_path {
9356 Some(idx)
9357 } else {
9358 None
9359 }
9360 }) {
9361 let t_ops = &mut target[idx];
9362 for s_op in s_ops.ops {
9363 if let Some(op_idx) = t_ops
9364 .ops
9365 .iter()
9366 .zip(0..)
9367 .find_map(|(op, idx)| if op.id == s_op.id { Some(idx) } else { None })
9368 {
9369 let t_op = &mut t_ops.ops[op_idx];
9370 match (s_op.job_status, t_op.job_status) {
9371 (pending_op::JobStatus::Running, _) => {}
9372 (s_st, pending_op::JobStatus::Running) => t_op.job_status = s_st,
9373 (s_st, t_st) if s_st == t_st => {}
9374 _ => unreachable!(),
9375 }
9376 } else {
9377 t_ops.ops.push(s_op);
9378 }
9379 }
9380 t_ops.ops.sort_by(|l, r| l.id.cmp(&r.id));
9381 } else {
9382 target.push(s_ops);
9383 }
9384 }
9385 target
9386}
9387
9388#[gpui::test]
9389async fn test_repository_pending_ops_staging(
9390 executor: gpui::BackgroundExecutor,
9391 cx: &mut gpui::TestAppContext,
9392) {
9393 init_test(cx);
9394
9395 let fs = FakeFs::new(executor);
9396 fs.insert_tree(
9397 path!("/root"),
9398 json!({
9399 "my-repo": {
9400 ".git": {},
9401 "a.txt": "a",
9402 }
9403
9404 }),
9405 )
9406 .await;
9407
9408 fs.set_status_for_repo(
9409 path!("/root/my-repo/.git").as_ref(),
9410 &[("a.txt", FileStatus::Untracked)],
9411 );
9412
9413 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
9414 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
9415 project.update(cx, |project, cx| {
9416 let pending_ops_all = pending_ops_all.clone();
9417 cx.subscribe(project.git_store(), move |_, _, e, _| {
9418 if let GitStoreEvent::RepositoryUpdated(
9419 _,
9420 RepositoryEvent::PendingOpsChanged { pending_ops },
9421 _,
9422 ) = e
9423 {
9424 let merged = merge_pending_ops_snapshots(
9425 pending_ops.items(()),
9426 pending_ops_all.lock().items(()),
9427 );
9428 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
9429 }
9430 })
9431 .detach();
9432 });
9433 project
9434 .update(cx, |project, cx| project.git_scans_complete(cx))
9435 .await;
9436
9437 let repo = project.read_with(cx, |project, cx| {
9438 project.repositories(cx).values().next().unwrap().clone()
9439 });
9440
9441 // Ensure we have no pending ops for any of the untracked files
9442 repo.read_with(cx, |repo, _cx| {
9443 assert!(repo.pending_ops().next().is_none());
9444 });
9445
9446 let mut id = 1u16;
9447
9448 let mut assert_stage = async |path: RepoPath, stage| {
9449 let git_status = if stage {
9450 pending_op::GitStatus::Staged
9451 } else {
9452 pending_op::GitStatus::Unstaged
9453 };
9454 repo.update(cx, |repo, cx| {
9455 let task = if stage {
9456 repo.stage_entries(vec![path.clone()], cx)
9457 } else {
9458 repo.unstage_entries(vec![path.clone()], cx)
9459 };
9460 let ops = repo.pending_ops_for_path(&path).unwrap();
9461 assert_eq!(
9462 ops.ops.last(),
9463 Some(&pending_op::PendingOp {
9464 id: id.into(),
9465 git_status,
9466 job_status: pending_op::JobStatus::Running
9467 })
9468 );
9469 task
9470 })
9471 .await
9472 .unwrap();
9473
9474 repo.read_with(cx, |repo, _cx| {
9475 let ops = repo.pending_ops_for_path(&path).unwrap();
9476 assert_eq!(
9477 ops.ops.last(),
9478 Some(&pending_op::PendingOp {
9479 id: id.into(),
9480 git_status,
9481 job_status: pending_op::JobStatus::Finished
9482 })
9483 );
9484 });
9485
9486 id += 1;
9487 };
9488
9489 assert_stage(repo_path("a.txt"), true).await;
9490 assert_stage(repo_path("a.txt"), false).await;
9491 assert_stage(repo_path("a.txt"), true).await;
9492 assert_stage(repo_path("a.txt"), false).await;
9493 assert_stage(repo_path("a.txt"), true).await;
9494
9495 cx.run_until_parked();
9496
9497 assert_eq!(
9498 pending_ops_all
9499 .lock()
9500 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
9501 .unwrap()
9502 .ops,
9503 vec![
9504 pending_op::PendingOp {
9505 id: 1u16.into(),
9506 git_status: pending_op::GitStatus::Staged,
9507 job_status: pending_op::JobStatus::Finished
9508 },
9509 pending_op::PendingOp {
9510 id: 2u16.into(),
9511 git_status: pending_op::GitStatus::Unstaged,
9512 job_status: pending_op::JobStatus::Finished
9513 },
9514 pending_op::PendingOp {
9515 id: 3u16.into(),
9516 git_status: pending_op::GitStatus::Staged,
9517 job_status: pending_op::JobStatus::Finished
9518 },
9519 pending_op::PendingOp {
9520 id: 4u16.into(),
9521 git_status: pending_op::GitStatus::Unstaged,
9522 job_status: pending_op::JobStatus::Finished
9523 },
9524 pending_op::PendingOp {
9525 id: 5u16.into(),
9526 git_status: pending_op::GitStatus::Staged,
9527 job_status: pending_op::JobStatus::Finished
9528 }
9529 ],
9530 );
9531
9532 repo.update(cx, |repo, _cx| {
9533 let git_statuses = repo.cached_status().collect::<Vec<_>>();
9534
9535 assert_eq!(
9536 git_statuses,
9537 [StatusEntry {
9538 repo_path: repo_path("a.txt"),
9539 status: TrackedStatus {
9540 index_status: StatusCode::Added,
9541 worktree_status: StatusCode::Unmodified
9542 }
9543 .into(),
9544 }]
9545 );
9546 });
9547}
9548
9549#[gpui::test]
9550async fn test_repository_pending_ops_long_running_staging(
9551 executor: gpui::BackgroundExecutor,
9552 cx: &mut gpui::TestAppContext,
9553) {
9554 init_test(cx);
9555
9556 let fs = FakeFs::new(executor);
9557 fs.insert_tree(
9558 path!("/root"),
9559 json!({
9560 "my-repo": {
9561 ".git": {},
9562 "a.txt": "a",
9563 }
9564
9565 }),
9566 )
9567 .await;
9568
9569 fs.set_status_for_repo(
9570 path!("/root/my-repo/.git").as_ref(),
9571 &[("a.txt", FileStatus::Untracked)],
9572 );
9573
9574 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
9575 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
9576 project.update(cx, |project, cx| {
9577 let pending_ops_all = pending_ops_all.clone();
9578 cx.subscribe(project.git_store(), move |_, _, e, _| {
9579 if let GitStoreEvent::RepositoryUpdated(
9580 _,
9581 RepositoryEvent::PendingOpsChanged { pending_ops },
9582 _,
9583 ) = e
9584 {
9585 let merged = merge_pending_ops_snapshots(
9586 pending_ops.items(()),
9587 pending_ops_all.lock().items(()),
9588 );
9589 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
9590 }
9591 })
9592 .detach();
9593 });
9594
9595 project
9596 .update(cx, |project, cx| project.git_scans_complete(cx))
9597 .await;
9598
9599 let repo = project.read_with(cx, |project, cx| {
9600 project.repositories(cx).values().next().unwrap().clone()
9601 });
9602
9603 repo.update(cx, |repo, cx| {
9604 repo.stage_entries(vec![repo_path("a.txt")], cx)
9605 })
9606 .detach();
9607
9608 repo.update(cx, |repo, cx| {
9609 repo.stage_entries(vec![repo_path("a.txt")], cx)
9610 })
9611 .unwrap()
9612 .with_timeout(Duration::from_secs(1), &cx.executor())
9613 .await
9614 .unwrap();
9615
9616 cx.run_until_parked();
9617
9618 assert_eq!(
9619 pending_ops_all
9620 .lock()
9621 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
9622 .unwrap()
9623 .ops,
9624 vec![
9625 pending_op::PendingOp {
9626 id: 1u16.into(),
9627 git_status: pending_op::GitStatus::Staged,
9628 job_status: pending_op::JobStatus::Skipped
9629 },
9630 pending_op::PendingOp {
9631 id: 2u16.into(),
9632 git_status: pending_op::GitStatus::Staged,
9633 job_status: pending_op::JobStatus::Finished
9634 }
9635 ],
9636 );
9637
9638 repo.update(cx, |repo, _cx| {
9639 let git_statuses = repo.cached_status().collect::<Vec<_>>();
9640
9641 assert_eq!(
9642 git_statuses,
9643 [StatusEntry {
9644 repo_path: repo_path("a.txt"),
9645 status: TrackedStatus {
9646 index_status: StatusCode::Added,
9647 worktree_status: StatusCode::Unmodified
9648 }
9649 .into(),
9650 }]
9651 );
9652 });
9653}
9654
9655#[gpui::test]
9656async fn test_repository_pending_ops_stage_all(
9657 executor: gpui::BackgroundExecutor,
9658 cx: &mut gpui::TestAppContext,
9659) {
9660 init_test(cx);
9661
9662 let fs = FakeFs::new(executor);
9663 fs.insert_tree(
9664 path!("/root"),
9665 json!({
9666 "my-repo": {
9667 ".git": {},
9668 "a.txt": "a",
9669 "b.txt": "b"
9670 }
9671
9672 }),
9673 )
9674 .await;
9675
9676 fs.set_status_for_repo(
9677 path!("/root/my-repo/.git").as_ref(),
9678 &[
9679 ("a.txt", FileStatus::Untracked),
9680 ("b.txt", FileStatus::Untracked),
9681 ],
9682 );
9683
9684 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
9685 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
9686 project.update(cx, |project, cx| {
9687 let pending_ops_all = pending_ops_all.clone();
9688 cx.subscribe(project.git_store(), move |_, _, e, _| {
9689 if let GitStoreEvent::RepositoryUpdated(
9690 _,
9691 RepositoryEvent::PendingOpsChanged { pending_ops },
9692 _,
9693 ) = e
9694 {
9695 let merged = merge_pending_ops_snapshots(
9696 pending_ops.items(()),
9697 pending_ops_all.lock().items(()),
9698 );
9699 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
9700 }
9701 })
9702 .detach();
9703 });
9704 project
9705 .update(cx, |project, cx| project.git_scans_complete(cx))
9706 .await;
9707
9708 let repo = project.read_with(cx, |project, cx| {
9709 project.repositories(cx).values().next().unwrap().clone()
9710 });
9711
9712 repo.update(cx, |repo, cx| {
9713 repo.stage_entries(vec![repo_path("a.txt")], cx)
9714 })
9715 .await
9716 .unwrap();
9717 repo.update(cx, |repo, cx| repo.stage_all(cx))
9718 .await
9719 .unwrap();
9720 repo.update(cx, |repo, cx| repo.unstage_all(cx))
9721 .await
9722 .unwrap();
9723
9724 cx.run_until_parked();
9725
9726 assert_eq!(
9727 pending_ops_all
9728 .lock()
9729 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
9730 .unwrap()
9731 .ops,
9732 vec![
9733 pending_op::PendingOp {
9734 id: 1u16.into(),
9735 git_status: pending_op::GitStatus::Staged,
9736 job_status: pending_op::JobStatus::Finished
9737 },
9738 pending_op::PendingOp {
9739 id: 2u16.into(),
9740 git_status: pending_op::GitStatus::Unstaged,
9741 job_status: pending_op::JobStatus::Finished
9742 },
9743 ],
9744 );
9745 assert_eq!(
9746 pending_ops_all
9747 .lock()
9748 .get(&worktree::PathKey(repo_path("b.txt").as_ref().clone()), ())
9749 .unwrap()
9750 .ops,
9751 vec![
9752 pending_op::PendingOp {
9753 id: 1u16.into(),
9754 git_status: pending_op::GitStatus::Staged,
9755 job_status: pending_op::JobStatus::Finished
9756 },
9757 pending_op::PendingOp {
9758 id: 2u16.into(),
9759 git_status: pending_op::GitStatus::Unstaged,
9760 job_status: pending_op::JobStatus::Finished
9761 },
9762 ],
9763 );
9764
9765 repo.update(cx, |repo, _cx| {
9766 let git_statuses = repo.cached_status().collect::<Vec<_>>();
9767
9768 assert_eq!(
9769 git_statuses,
9770 [
9771 StatusEntry {
9772 repo_path: repo_path("a.txt"),
9773 status: FileStatus::Untracked,
9774 },
9775 StatusEntry {
9776 repo_path: repo_path("b.txt"),
9777 status: FileStatus::Untracked,
9778 },
9779 ]
9780 );
9781 });
9782}
9783
9784#[gpui::test]
9785async fn test_repository_subfolder_git_status(
9786 executor: gpui::BackgroundExecutor,
9787 cx: &mut gpui::TestAppContext,
9788) {
9789 init_test(cx);
9790
9791 let fs = FakeFs::new(executor);
9792 fs.insert_tree(
9793 path!("/root"),
9794 json!({
9795 "my-repo": {
9796 ".git": {},
9797 "a.txt": "a",
9798 "sub-folder-1": {
9799 "sub-folder-2": {
9800 "c.txt": "cc",
9801 "d": {
9802 "e.txt": "eee"
9803 }
9804 },
9805 }
9806 },
9807 }),
9808 )
9809 .await;
9810
9811 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
9812 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
9813
9814 fs.set_status_for_repo(
9815 path!("/root/my-repo/.git").as_ref(),
9816 &[(E_TXT, FileStatus::Untracked)],
9817 );
9818
9819 let project = Project::test(
9820 fs.clone(),
9821 [path!("/root/my-repo/sub-folder-1/sub-folder-2").as_ref()],
9822 cx,
9823 )
9824 .await;
9825
9826 project
9827 .update(cx, |project, cx| project.git_scans_complete(cx))
9828 .await;
9829 cx.run_until_parked();
9830
9831 let repository = project.read_with(cx, |project, cx| {
9832 project.repositories(cx).values().next().unwrap().clone()
9833 });
9834
9835 // Ensure that the git status is loaded correctly
9836 repository.read_with(cx, |repository, _cx| {
9837 assert_eq!(
9838 repository.work_directory_abs_path,
9839 Path::new(path!("/root/my-repo")).into()
9840 );
9841
9842 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
9843 assert_eq!(
9844 repository
9845 .status_for_path(&repo_path(E_TXT))
9846 .unwrap()
9847 .status,
9848 FileStatus::Untracked
9849 );
9850 });
9851
9852 fs.set_status_for_repo(path!("/root/my-repo/.git").as_ref(), &[]);
9853 project
9854 .update(cx, |project, cx| project.git_scans_complete(cx))
9855 .await;
9856 cx.run_until_parked();
9857
9858 repository.read_with(cx, |repository, _cx| {
9859 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
9860 assert_eq!(repository.status_for_path(&repo_path(E_TXT)), None);
9861 });
9862}
9863
9864// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
9865#[cfg(any())]
9866#[gpui::test]
9867async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
9868 init_test(cx);
9869 cx.executor().allow_parking();
9870
9871 let root = TempTree::new(json!({
9872 "project": {
9873 "a.txt": "a",
9874 },
9875 }));
9876 let root_path = root.path();
9877
9878 let repo = git_init(&root_path.join("project"));
9879 git_add("a.txt", &repo);
9880 git_commit("init", &repo);
9881
9882 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
9883
9884 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9885 tree.flush_fs_events(cx).await;
9886 project
9887 .update(cx, |project, cx| project.git_scans_complete(cx))
9888 .await;
9889 cx.executor().run_until_parked();
9890
9891 let repository = project.read_with(cx, |project, cx| {
9892 project.repositories(cx).values().next().unwrap().clone()
9893 });
9894
9895 git_branch("other-branch", &repo);
9896 git_checkout("refs/heads/other-branch", &repo);
9897 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
9898 git_add("a.txt", &repo);
9899 git_commit("capitalize", &repo);
9900 let commit = repo
9901 .head()
9902 .expect("Failed to get HEAD")
9903 .peel_to_commit()
9904 .expect("HEAD is not a commit");
9905 git_checkout("refs/heads/main", &repo);
9906 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
9907 git_add("a.txt", &repo);
9908 git_commit("improve letter", &repo);
9909 git_cherry_pick(&commit, &repo);
9910 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
9911 .expect("No CHERRY_PICK_HEAD");
9912 pretty_assertions::assert_eq!(
9913 git_status(&repo),
9914 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
9915 );
9916 tree.flush_fs_events(cx).await;
9917 project
9918 .update(cx, |project, cx| project.git_scans_complete(cx))
9919 .await;
9920 cx.executor().run_until_parked();
9921 let conflicts = repository.update(cx, |repository, _| {
9922 repository
9923 .merge_conflicts
9924 .iter()
9925 .cloned()
9926 .collect::<Vec<_>>()
9927 });
9928 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
9929
9930 git_add("a.txt", &repo);
9931 // Attempt to manually simulate what `git cherry-pick --continue` would do.
9932 git_commit("whatevs", &repo);
9933 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
9934 .expect("Failed to remove CHERRY_PICK_HEAD");
9935 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
9936 tree.flush_fs_events(cx).await;
9937 let conflicts = repository.update(cx, |repository, _| {
9938 repository
9939 .merge_conflicts
9940 .iter()
9941 .cloned()
9942 .collect::<Vec<_>>()
9943 });
9944 pretty_assertions::assert_eq!(conflicts, []);
9945}
9946
9947#[gpui::test]
9948async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
9949 init_test(cx);
9950 let fs = FakeFs::new(cx.background_executor.clone());
9951 fs.insert_tree(
9952 path!("/root"),
9953 json!({
9954 ".git": {},
9955 ".gitignore": "*.txt\n",
9956 "a.xml": "<a></a>",
9957 "b.txt": "Some text"
9958 }),
9959 )
9960 .await;
9961
9962 fs.set_head_and_index_for_repo(
9963 path!("/root/.git").as_ref(),
9964 &[
9965 (".gitignore", "*.txt\n".into()),
9966 ("a.xml", "<a></a>".into()),
9967 ],
9968 );
9969
9970 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
9971
9972 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9973 tree.flush_fs_events(cx).await;
9974 project
9975 .update(cx, |project, cx| project.git_scans_complete(cx))
9976 .await;
9977 cx.executor().run_until_parked();
9978
9979 let repository = project.read_with(cx, |project, cx| {
9980 project.repositories(cx).values().next().unwrap().clone()
9981 });
9982
9983 // One file is unmodified, the other is ignored.
9984 cx.read(|cx| {
9985 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
9986 assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
9987 });
9988
9989 // Change the gitignore, and stage the newly non-ignored file.
9990 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
9991 .await
9992 .unwrap();
9993 fs.set_index_for_repo(
9994 Path::new(path!("/root/.git")),
9995 &[
9996 (".gitignore", "*.txt\n".into()),
9997 ("a.xml", "<a></a>".into()),
9998 ("b.txt", "Some text".into()),
9999 ],
10000 );
10001
10002 cx.executor().run_until_parked();
10003 cx.read(|cx| {
10004 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
10005 assert_entry_git_state(
10006 tree.read(cx),
10007 repository.read(cx),
10008 "b.txt",
10009 Some(StatusCode::Added),
10010 false,
10011 );
10012 });
10013}
10014
10015// NOTE:
10016// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
10017// a directory which some program has already open.
10018// This is a limitation of the Windows.
10019// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
10020// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
10021#[gpui::test]
10022#[cfg_attr(target_os = "windows", ignore)]
10023async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
10024 init_test(cx);
10025 cx.executor().allow_parking();
10026 let root = TempTree::new(json!({
10027 "projects": {
10028 "project1": {
10029 "a": "",
10030 "b": "",
10031 }
10032 },
10033
10034 }));
10035 let root_path = root.path();
10036
10037 let repo = git_init(&root_path.join("projects/project1"));
10038 git_add("a", &repo);
10039 git_commit("init", &repo);
10040 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
10041
10042 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10043
10044 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10045 tree.flush_fs_events(cx).await;
10046 project
10047 .update(cx, |project, cx| project.git_scans_complete(cx))
10048 .await;
10049 cx.executor().run_until_parked();
10050
10051 let repository = project.read_with(cx, |project, cx| {
10052 project.repositories(cx).values().next().unwrap().clone()
10053 });
10054
10055 repository.read_with(cx, |repository, _| {
10056 assert_eq!(
10057 repository.work_directory_abs_path.as_ref(),
10058 root_path.join("projects/project1").as_path()
10059 );
10060 assert_eq!(
10061 repository
10062 .status_for_path(&repo_path("a"))
10063 .map(|entry| entry.status),
10064 Some(StatusCode::Modified.worktree()),
10065 );
10066 assert_eq!(
10067 repository
10068 .status_for_path(&repo_path("b"))
10069 .map(|entry| entry.status),
10070 Some(FileStatus::Untracked),
10071 );
10072 });
10073
10074 std::fs::rename(
10075 root_path.join("projects/project1"),
10076 root_path.join("projects/project2"),
10077 )
10078 .unwrap();
10079 tree.flush_fs_events(cx).await;
10080
10081 repository.read_with(cx, |repository, _| {
10082 assert_eq!(
10083 repository.work_directory_abs_path.as_ref(),
10084 root_path.join("projects/project2").as_path()
10085 );
10086 assert_eq!(
10087 repository.status_for_path(&repo_path("a")).unwrap().status,
10088 StatusCode::Modified.worktree(),
10089 );
10090 assert_eq!(
10091 repository.status_for_path(&repo_path("b")).unwrap().status,
10092 FileStatus::Untracked,
10093 );
10094 });
10095}
10096
10097// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
10098// you can't rename a directory which some program has already open. This is a
10099// limitation of the Windows. See:
10100// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
10101// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
10102#[gpui::test]
10103#[cfg_attr(target_os = "windows", ignore)]
10104async fn test_file_status(cx: &mut gpui::TestAppContext) {
10105 init_test(cx);
10106 cx.executor().allow_parking();
10107 const IGNORE_RULE: &str = "**/target";
10108
10109 let root = TempTree::new(json!({
10110 "project": {
10111 "a.txt": "a",
10112 "b.txt": "bb",
10113 "c": {
10114 "d": {
10115 "e.txt": "eee"
10116 }
10117 },
10118 "f.txt": "ffff",
10119 "target": {
10120 "build_file": "???"
10121 },
10122 ".gitignore": IGNORE_RULE
10123 },
10124
10125 }));
10126 let root_path = root.path();
10127
10128 const A_TXT: &str = "a.txt";
10129 const B_TXT: &str = "b.txt";
10130 const E_TXT: &str = "c/d/e.txt";
10131 const F_TXT: &str = "f.txt";
10132 const DOTGITIGNORE: &str = ".gitignore";
10133 const BUILD_FILE: &str = "target/build_file";
10134
10135 // Set up git repository before creating the worktree.
10136 let work_dir = root.path().join("project");
10137 let mut repo = git_init(work_dir.as_path());
10138 repo.add_ignore_rule(IGNORE_RULE).unwrap();
10139 git_add(A_TXT, &repo);
10140 git_add(E_TXT, &repo);
10141 git_add(DOTGITIGNORE, &repo);
10142 git_commit("Initial commit", &repo);
10143
10144 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10145
10146 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10147 tree.flush_fs_events(cx).await;
10148 project
10149 .update(cx, |project, cx| project.git_scans_complete(cx))
10150 .await;
10151 cx.executor().run_until_parked();
10152
10153 let repository = project.read_with(cx, |project, cx| {
10154 project.repositories(cx).values().next().unwrap().clone()
10155 });
10156
10157 // Check that the right git state is observed on startup
10158 repository.read_with(cx, |repository, _cx| {
10159 assert_eq!(
10160 repository.work_directory_abs_path.as_ref(),
10161 root_path.join("project").as_path()
10162 );
10163
10164 assert_eq!(
10165 repository
10166 .status_for_path(&repo_path(B_TXT))
10167 .unwrap()
10168 .status,
10169 FileStatus::Untracked,
10170 );
10171 assert_eq!(
10172 repository
10173 .status_for_path(&repo_path(F_TXT))
10174 .unwrap()
10175 .status,
10176 FileStatus::Untracked,
10177 );
10178 });
10179
10180 // Modify a file in the working copy.
10181 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
10182 tree.flush_fs_events(cx).await;
10183 project
10184 .update(cx, |project, cx| project.git_scans_complete(cx))
10185 .await;
10186 cx.executor().run_until_parked();
10187
10188 // The worktree detects that the file's git status has changed.
10189 repository.read_with(cx, |repository, _| {
10190 assert_eq!(
10191 repository
10192 .status_for_path(&repo_path(A_TXT))
10193 .unwrap()
10194 .status,
10195 StatusCode::Modified.worktree(),
10196 );
10197 });
10198
10199 // Create a commit in the git repository.
10200 git_add(A_TXT, &repo);
10201 git_add(B_TXT, &repo);
10202 git_commit("Committing modified and added", &repo);
10203 tree.flush_fs_events(cx).await;
10204 project
10205 .update(cx, |project, cx| project.git_scans_complete(cx))
10206 .await;
10207 cx.executor().run_until_parked();
10208
10209 // The worktree detects that the files' git status have changed.
10210 repository.read_with(cx, |repository, _cx| {
10211 assert_eq!(
10212 repository
10213 .status_for_path(&repo_path(F_TXT))
10214 .unwrap()
10215 .status,
10216 FileStatus::Untracked,
10217 );
10218 assert_eq!(repository.status_for_path(&repo_path(B_TXT)), None);
10219 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
10220 });
10221
10222 // Modify files in the working copy and perform git operations on other files.
10223 git_reset(0, &repo);
10224 git_remove_index(Path::new(B_TXT), &repo);
10225 git_stash(&mut repo);
10226 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
10227 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
10228 tree.flush_fs_events(cx).await;
10229 project
10230 .update(cx, |project, cx| project.git_scans_complete(cx))
10231 .await;
10232 cx.executor().run_until_parked();
10233
10234 // Check that more complex repo changes are tracked
10235 repository.read_with(cx, |repository, _cx| {
10236 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
10237 assert_eq!(
10238 repository
10239 .status_for_path(&repo_path(B_TXT))
10240 .unwrap()
10241 .status,
10242 FileStatus::Untracked,
10243 );
10244 assert_eq!(
10245 repository
10246 .status_for_path(&repo_path(E_TXT))
10247 .unwrap()
10248 .status,
10249 StatusCode::Modified.worktree(),
10250 );
10251 });
10252
10253 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
10254 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
10255 std::fs::write(
10256 work_dir.join(DOTGITIGNORE),
10257 [IGNORE_RULE, "f.txt"].join("\n"),
10258 )
10259 .unwrap();
10260
10261 git_add(Path::new(DOTGITIGNORE), &repo);
10262 git_commit("Committing modified git ignore", &repo);
10263
10264 tree.flush_fs_events(cx).await;
10265 cx.executor().run_until_parked();
10266
10267 let mut renamed_dir_name = "first_directory/second_directory";
10268 const RENAMED_FILE: &str = "rf.txt";
10269
10270 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
10271 std::fs::write(
10272 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
10273 "new-contents",
10274 )
10275 .unwrap();
10276
10277 tree.flush_fs_events(cx).await;
10278 project
10279 .update(cx, |project, cx| project.git_scans_complete(cx))
10280 .await;
10281 cx.executor().run_until_parked();
10282
10283 repository.read_with(cx, |repository, _cx| {
10284 assert_eq!(
10285 repository
10286 .status_for_path(&RepoPath::from_rel_path(
10287 &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE))
10288 ))
10289 .unwrap()
10290 .status,
10291 FileStatus::Untracked,
10292 );
10293 });
10294
10295 renamed_dir_name = "new_first_directory/second_directory";
10296
10297 std::fs::rename(
10298 work_dir.join("first_directory"),
10299 work_dir.join("new_first_directory"),
10300 )
10301 .unwrap();
10302
10303 tree.flush_fs_events(cx).await;
10304 project
10305 .update(cx, |project, cx| project.git_scans_complete(cx))
10306 .await;
10307 cx.executor().run_until_parked();
10308
10309 repository.read_with(cx, |repository, _cx| {
10310 assert_eq!(
10311 repository
10312 .status_for_path(&RepoPath::from_rel_path(
10313 &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE))
10314 ))
10315 .unwrap()
10316 .status,
10317 FileStatus::Untracked,
10318 );
10319 });
10320}
10321
10322#[gpui::test]
10323#[ignore]
10324async fn test_ignored_dirs_events(cx: &mut gpui::TestAppContext) {
10325 init_test(cx);
10326 cx.executor().allow_parking();
10327
10328 const IGNORE_RULE: &str = "**/target";
10329
10330 let root = TempTree::new(json!({
10331 "project": {
10332 "src": {
10333 "main.rs": "fn main() {}"
10334 },
10335 "target": {
10336 "debug": {
10337 "important_text.txt": "important text",
10338 },
10339 },
10340 ".gitignore": IGNORE_RULE
10341 },
10342
10343 }));
10344 let root_path = root.path();
10345
10346 // Set up git repository before creating the worktree.
10347 let work_dir = root.path().join("project");
10348 let repo = git_init(work_dir.as_path());
10349 repo.add_ignore_rule(IGNORE_RULE).unwrap();
10350 git_add("src/main.rs", &repo);
10351 git_add(".gitignore", &repo);
10352 git_commit("Initial commit", &repo);
10353
10354 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10355 let repository_updates = Arc::new(Mutex::new(Vec::new()));
10356 let project_events = Arc::new(Mutex::new(Vec::new()));
10357 project.update(cx, |project, cx| {
10358 let repo_events = repository_updates.clone();
10359 cx.subscribe(project.git_store(), move |_, _, e, _| {
10360 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
10361 repo_events.lock().push(e.clone());
10362 }
10363 })
10364 .detach();
10365 let project_events = project_events.clone();
10366 cx.subscribe_self(move |_, e, _| {
10367 if let Event::WorktreeUpdatedEntries(_, updates) = e {
10368 project_events.lock().extend(
10369 updates
10370 .iter()
10371 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
10372 .filter(|(path, _)| path != "fs-event-sentinel"),
10373 );
10374 }
10375 })
10376 .detach();
10377 });
10378
10379 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10380 tree.flush_fs_events(cx).await;
10381 tree.update(cx, |tree, cx| {
10382 tree.load_file(rel_path("project/target/debug/important_text.txt"), cx)
10383 })
10384 .await
10385 .unwrap();
10386 tree.update(cx, |tree, _| {
10387 assert_eq!(
10388 tree.entries(true, 0)
10389 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
10390 .collect::<Vec<_>>(),
10391 vec![
10392 (rel_path(""), false),
10393 (rel_path("project/"), false),
10394 (rel_path("project/.gitignore"), false),
10395 (rel_path("project/src"), false),
10396 (rel_path("project/src/main.rs"), false),
10397 (rel_path("project/target"), true),
10398 (rel_path("project/target/debug"), true),
10399 (rel_path("project/target/debug/important_text.txt"), true),
10400 ]
10401 );
10402 });
10403
10404 assert_eq!(
10405 repository_updates.lock().drain(..).collect::<Vec<_>>(),
10406 vec![
10407 RepositoryEvent::StatusesChanged,
10408 RepositoryEvent::MergeHeadsChanged,
10409 ],
10410 "Initial worktree scan should produce a repo update event"
10411 );
10412 assert_eq!(
10413 project_events.lock().drain(..).collect::<Vec<_>>(),
10414 vec![
10415 ("project/target".to_string(), PathChange::Loaded),
10416 ("project/target/debug".to_string(), PathChange::Loaded),
10417 (
10418 "project/target/debug/important_text.txt".to_string(),
10419 PathChange::Loaded
10420 ),
10421 ],
10422 "Initial project changes should show that all not-ignored and all opened files are loaded"
10423 );
10424
10425 let deps_dir = work_dir.join("target").join("debug").join("deps");
10426 std::fs::create_dir_all(&deps_dir).unwrap();
10427 tree.flush_fs_events(cx).await;
10428 project
10429 .update(cx, |project, cx| project.git_scans_complete(cx))
10430 .await;
10431 cx.executor().run_until_parked();
10432 std::fs::write(deps_dir.join("aa.tmp"), "something tmp").unwrap();
10433 tree.flush_fs_events(cx).await;
10434 project
10435 .update(cx, |project, cx| project.git_scans_complete(cx))
10436 .await;
10437 cx.executor().run_until_parked();
10438 std::fs::remove_dir_all(&deps_dir).unwrap();
10439 tree.flush_fs_events(cx).await;
10440 project
10441 .update(cx, |project, cx| project.git_scans_complete(cx))
10442 .await;
10443 cx.executor().run_until_parked();
10444
10445 tree.update(cx, |tree, _| {
10446 assert_eq!(
10447 tree.entries(true, 0)
10448 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
10449 .collect::<Vec<_>>(),
10450 vec![
10451 (rel_path(""), false),
10452 (rel_path("project/"), false),
10453 (rel_path("project/.gitignore"), false),
10454 (rel_path("project/src"), false),
10455 (rel_path("project/src/main.rs"), false),
10456 (rel_path("project/target"), true),
10457 (rel_path("project/target/debug"), true),
10458 (rel_path("project/target/debug/important_text.txt"), true),
10459 ],
10460 "No stray temp files should be left after the flycheck changes"
10461 );
10462 });
10463
10464 assert_eq!(
10465 repository_updates
10466 .lock()
10467 .iter()
10468 .cloned()
10469 .collect::<Vec<_>>(),
10470 Vec::new(),
10471 "No further RepositoryUpdated events should happen, as only ignored dirs' contents was changed",
10472 );
10473 assert_eq!(
10474 project_events.lock().as_slice(),
10475 vec![
10476 ("project/target/debug/deps".to_string(), PathChange::Added),
10477 ("project/target/debug/deps".to_string(), PathChange::Removed),
10478 ],
10479 "Due to `debug` directory being tracked, it should get updates for entries inside it.
10480 No updates for more nested directories should happen as those are ignored",
10481 );
10482}
10483
10484// todo(jk): turning this test off until we rework it in such a way so that it is not so susceptible
10485// to different timings/ordering of events.
10486#[ignore]
10487#[gpui::test]
10488async fn test_odd_events_for_ignored_dirs(
10489 executor: BackgroundExecutor,
10490 cx: &mut gpui::TestAppContext,
10491) {
10492 init_test(cx);
10493 let fs = FakeFs::new(executor);
10494 fs.insert_tree(
10495 path!("/root"),
10496 json!({
10497 ".git": {},
10498 ".gitignore": "**/target/",
10499 "src": {
10500 "main.rs": "fn main() {}",
10501 },
10502 "target": {
10503 "debug": {
10504 "foo.txt": "foo",
10505 "deps": {}
10506 }
10507 }
10508 }),
10509 )
10510 .await;
10511 fs.set_head_and_index_for_repo(
10512 path!("/root/.git").as_ref(),
10513 &[
10514 (".gitignore", "**/target/".into()),
10515 ("src/main.rs", "fn main() {}".into()),
10516 ],
10517 );
10518
10519 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
10520 let repository_updates = Arc::new(Mutex::new(Vec::new()));
10521 let project_events = Arc::new(Mutex::new(Vec::new()));
10522 project.update(cx, |project, cx| {
10523 let repository_updates = repository_updates.clone();
10524 cx.subscribe(project.git_store(), move |_, _, e, _| {
10525 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
10526 repository_updates.lock().push(e.clone());
10527 }
10528 })
10529 .detach();
10530 let project_events = project_events.clone();
10531 cx.subscribe_self(move |_, e, _| {
10532 if let Event::WorktreeUpdatedEntries(_, updates) = e {
10533 project_events.lock().extend(
10534 updates
10535 .iter()
10536 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
10537 .filter(|(path, _)| path != "fs-event-sentinel"),
10538 );
10539 }
10540 })
10541 .detach();
10542 });
10543
10544 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10545 tree.update(cx, |tree, cx| {
10546 tree.load_file(rel_path("target/debug/foo.txt"), cx)
10547 })
10548 .await
10549 .unwrap();
10550 tree.flush_fs_events(cx).await;
10551 project
10552 .update(cx, |project, cx| project.git_scans_complete(cx))
10553 .await;
10554 cx.run_until_parked();
10555 tree.update(cx, |tree, _| {
10556 assert_eq!(
10557 tree.entries(true, 0)
10558 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
10559 .collect::<Vec<_>>(),
10560 vec![
10561 (rel_path(""), false),
10562 (rel_path(".gitignore"), false),
10563 (rel_path("src"), false),
10564 (rel_path("src/main.rs"), false),
10565 (rel_path("target"), true),
10566 (rel_path("target/debug"), true),
10567 (rel_path("target/debug/deps"), true),
10568 (rel_path("target/debug/foo.txt"), true),
10569 ]
10570 );
10571 });
10572
10573 assert_eq!(
10574 repository_updates.lock().drain(..).collect::<Vec<_>>(),
10575 vec![
10576 RepositoryEvent::MergeHeadsChanged,
10577 RepositoryEvent::BranchChanged,
10578 RepositoryEvent::StatusesChanged,
10579 RepositoryEvent::StatusesChanged,
10580 ],
10581 "Initial worktree scan should produce a repo update event"
10582 );
10583 assert_eq!(
10584 project_events.lock().drain(..).collect::<Vec<_>>(),
10585 vec![
10586 ("target".to_string(), PathChange::Loaded),
10587 ("target/debug".to_string(), PathChange::Loaded),
10588 ("target/debug/deps".to_string(), PathChange::Loaded),
10589 ("target/debug/foo.txt".to_string(), PathChange::Loaded),
10590 ],
10591 "All non-ignored entries and all opened firs should be getting a project event",
10592 );
10593
10594 // Emulate a flycheck spawn: it emits a `INODE_META_MOD`-flagged FS event on target/debug/deps, then creates and removes temp files inside.
10595 // This may happen multiple times during a single flycheck, but once is enough for testing.
10596 fs.emit_fs_event("/root/target/debug/deps", None);
10597 tree.flush_fs_events(cx).await;
10598 project
10599 .update(cx, |project, cx| project.git_scans_complete(cx))
10600 .await;
10601 cx.executor().run_until_parked();
10602
10603 assert_eq!(
10604 repository_updates
10605 .lock()
10606 .iter()
10607 .cloned()
10608 .collect::<Vec<_>>(),
10609 Vec::new(),
10610 "No further RepositoryUpdated events should happen, as only ignored dirs received FS events",
10611 );
10612 assert_eq!(
10613 project_events.lock().as_slice(),
10614 Vec::new(),
10615 "No further project events should happen, as only ignored dirs received FS events",
10616 );
10617}
10618
10619#[gpui::test]
10620async fn test_repos_in_invisible_worktrees(
10621 executor: BackgroundExecutor,
10622 cx: &mut gpui::TestAppContext,
10623) {
10624 init_test(cx);
10625 let fs = FakeFs::new(executor);
10626 fs.insert_tree(
10627 path!("/root"),
10628 json!({
10629 "dir1": {
10630 ".git": {},
10631 "dep1": {
10632 ".git": {},
10633 "src": {
10634 "a.txt": "",
10635 },
10636 },
10637 "b.txt": "",
10638 },
10639 }),
10640 )
10641 .await;
10642
10643 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
10644 let _visible_worktree =
10645 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10646 project
10647 .update(cx, |project, cx| project.git_scans_complete(cx))
10648 .await;
10649
10650 let repos = project.read_with(cx, |project, cx| {
10651 project
10652 .repositories(cx)
10653 .values()
10654 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
10655 .collect::<Vec<_>>()
10656 });
10657 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
10658
10659 let (_invisible_worktree, _) = project
10660 .update(cx, |project, cx| {
10661 project.worktree_store().update(cx, |worktree_store, cx| {
10662 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
10663 })
10664 })
10665 .await
10666 .expect("failed to create worktree");
10667 project
10668 .update(cx, |project, cx| project.git_scans_complete(cx))
10669 .await;
10670
10671 let repos = project.read_with(cx, |project, cx| {
10672 project
10673 .repositories(cx)
10674 .values()
10675 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
10676 .collect::<Vec<_>>()
10677 });
10678 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
10679}
10680
10681#[gpui::test(iterations = 10)]
10682async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
10683 init_test(cx);
10684 cx.update(|cx| {
10685 cx.update_global::<SettingsStore, _>(|store, cx| {
10686 store.update_user_settings(cx, |settings| {
10687 settings.project.worktree.file_scan_exclusions = Some(Vec::new());
10688 });
10689 });
10690 });
10691 let fs = FakeFs::new(cx.background_executor.clone());
10692 fs.insert_tree(
10693 path!("/root"),
10694 json!({
10695 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
10696 "tree": {
10697 ".git": {},
10698 ".gitignore": "ignored-dir\n",
10699 "tracked-dir": {
10700 "tracked-file1": "",
10701 "ancestor-ignored-file1": "",
10702 },
10703 "ignored-dir": {
10704 "ignored-file1": ""
10705 }
10706 }
10707 }),
10708 )
10709 .await;
10710 fs.set_head_and_index_for_repo(
10711 path!("/root/tree/.git").as_ref(),
10712 &[
10713 (".gitignore", "ignored-dir\n".into()),
10714 ("tracked-dir/tracked-file1", "".into()),
10715 ],
10716 );
10717
10718 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
10719
10720 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10721 tree.flush_fs_events(cx).await;
10722 project
10723 .update(cx, |project, cx| project.git_scans_complete(cx))
10724 .await;
10725 cx.executor().run_until_parked();
10726
10727 let repository = project.read_with(cx, |project, cx| {
10728 project.repositories(cx).values().next().unwrap().clone()
10729 });
10730
10731 tree.read_with(cx, |tree, _| {
10732 tree.as_local()
10733 .unwrap()
10734 .manually_refresh_entries_for_paths(vec![rel_path("ignored-dir").into()])
10735 })
10736 .recv()
10737 .await;
10738
10739 cx.read(|cx| {
10740 assert_entry_git_state(
10741 tree.read(cx),
10742 repository.read(cx),
10743 "tracked-dir/tracked-file1",
10744 None,
10745 false,
10746 );
10747 assert_entry_git_state(
10748 tree.read(cx),
10749 repository.read(cx),
10750 "tracked-dir/ancestor-ignored-file1",
10751 None,
10752 false,
10753 );
10754 assert_entry_git_state(
10755 tree.read(cx),
10756 repository.read(cx),
10757 "ignored-dir/ignored-file1",
10758 None,
10759 true,
10760 );
10761 });
10762
10763 fs.create_file(
10764 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
10765 Default::default(),
10766 )
10767 .await
10768 .unwrap();
10769 fs.set_index_for_repo(
10770 path!("/root/tree/.git").as_ref(),
10771 &[
10772 (".gitignore", "ignored-dir\n".into()),
10773 ("tracked-dir/tracked-file1", "".into()),
10774 ("tracked-dir/tracked-file2", "".into()),
10775 ],
10776 );
10777 fs.create_file(
10778 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
10779 Default::default(),
10780 )
10781 .await
10782 .unwrap();
10783 fs.create_file(
10784 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
10785 Default::default(),
10786 )
10787 .await
10788 .unwrap();
10789
10790 cx.executor().run_until_parked();
10791 cx.read(|cx| {
10792 assert_entry_git_state(
10793 tree.read(cx),
10794 repository.read(cx),
10795 "tracked-dir/tracked-file2",
10796 Some(StatusCode::Added),
10797 false,
10798 );
10799 assert_entry_git_state(
10800 tree.read(cx),
10801 repository.read(cx),
10802 "tracked-dir/ancestor-ignored-file2",
10803 None,
10804 false,
10805 );
10806 assert_entry_git_state(
10807 tree.read(cx),
10808 repository.read(cx),
10809 "ignored-dir/ignored-file2",
10810 None,
10811 true,
10812 );
10813 assert!(
10814 tree.read(cx)
10815 .entry_for_path(&rel_path(".git"))
10816 .unwrap()
10817 .is_ignored
10818 );
10819 });
10820}
10821
10822#[gpui::test]
10823async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) {
10824 init_test(cx);
10825
10826 let fs = FakeFs::new(cx.executor());
10827 fs.insert_tree(
10828 path!("/project"),
10829 json!({
10830 ".git": {
10831 "worktrees": {
10832 "some-worktree": {
10833 "commondir": "../..\n",
10834 // For is_git_dir
10835 "HEAD": "",
10836 "config": ""
10837 }
10838 },
10839 "modules": {
10840 "subdir": {
10841 "some-submodule": {
10842 // For is_git_dir
10843 "HEAD": "",
10844 "config": "",
10845 }
10846 }
10847 }
10848 },
10849 "src": {
10850 "a.txt": "A",
10851 },
10852 "some-worktree": {
10853 ".git": "gitdir: ../.git/worktrees/some-worktree\n",
10854 "src": {
10855 "b.txt": "B",
10856 }
10857 },
10858 "subdir": {
10859 "some-submodule": {
10860 ".git": "gitdir: ../../.git/modules/subdir/some-submodule\n",
10861 "c.txt": "C",
10862 }
10863 }
10864 }),
10865 )
10866 .await;
10867
10868 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
10869 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
10870 scan_complete.await;
10871
10872 let mut repositories = project.update(cx, |project, cx| {
10873 project
10874 .repositories(cx)
10875 .values()
10876 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
10877 .collect::<Vec<_>>()
10878 });
10879 repositories.sort();
10880 pretty_assertions::assert_eq!(
10881 repositories,
10882 [
10883 Path::new(path!("/project")).into(),
10884 Path::new(path!("/project/some-worktree")).into(),
10885 Path::new(path!("/project/subdir/some-submodule")).into(),
10886 ]
10887 );
10888
10889 // Generate a git-related event for the worktree and check that it's refreshed.
10890 fs.with_git_state(
10891 path!("/project/some-worktree/.git").as_ref(),
10892 true,
10893 |state| {
10894 state
10895 .head_contents
10896 .insert(repo_path("src/b.txt"), "b".to_owned());
10897 state
10898 .index_contents
10899 .insert(repo_path("src/b.txt"), "b".to_owned());
10900 },
10901 )
10902 .unwrap();
10903 cx.run_until_parked();
10904
10905 let buffer = project
10906 .update(cx, |project, cx| {
10907 project.open_local_buffer(path!("/project/some-worktree/src/b.txt"), cx)
10908 })
10909 .await
10910 .unwrap();
10911 let (worktree_repo, barrier) = project.update(cx, |project, cx| {
10912 let (repo, _) = project
10913 .git_store()
10914 .read(cx)
10915 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
10916 .unwrap();
10917 pretty_assertions::assert_eq!(
10918 repo.read(cx).work_directory_abs_path,
10919 Path::new(path!("/project/some-worktree")).into(),
10920 );
10921 let barrier = repo.update(cx, |repo, _| repo.barrier());
10922 (repo.clone(), barrier)
10923 });
10924 barrier.await.unwrap();
10925 worktree_repo.update(cx, |repo, _| {
10926 pretty_assertions::assert_eq!(
10927 repo.status_for_path(&repo_path("src/b.txt"))
10928 .unwrap()
10929 .status,
10930 StatusCode::Modified.worktree(),
10931 );
10932 });
10933
10934 // The same for the submodule.
10935 fs.with_git_state(
10936 path!("/project/subdir/some-submodule/.git").as_ref(),
10937 true,
10938 |state| {
10939 state
10940 .head_contents
10941 .insert(repo_path("c.txt"), "c".to_owned());
10942 state
10943 .index_contents
10944 .insert(repo_path("c.txt"), "c".to_owned());
10945 },
10946 )
10947 .unwrap();
10948 cx.run_until_parked();
10949
10950 let buffer = project
10951 .update(cx, |project, cx| {
10952 project.open_local_buffer(path!("/project/subdir/some-submodule/c.txt"), cx)
10953 })
10954 .await
10955 .unwrap();
10956 let (submodule_repo, barrier) = project.update(cx, |project, cx| {
10957 let (repo, _) = project
10958 .git_store()
10959 .read(cx)
10960 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
10961 .unwrap();
10962 pretty_assertions::assert_eq!(
10963 repo.read(cx).work_directory_abs_path,
10964 Path::new(path!("/project/subdir/some-submodule")).into(),
10965 );
10966 let barrier = repo.update(cx, |repo, _| repo.barrier());
10967 (repo.clone(), barrier)
10968 });
10969 barrier.await.unwrap();
10970 submodule_repo.update(cx, |repo, _| {
10971 pretty_assertions::assert_eq!(
10972 repo.status_for_path(&repo_path("c.txt")).unwrap().status,
10973 StatusCode::Modified.worktree(),
10974 );
10975 });
10976}
10977
10978#[gpui::test]
10979async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
10980 init_test(cx);
10981 let fs = FakeFs::new(cx.background_executor.clone());
10982 fs.insert_tree(
10983 path!("/root"),
10984 json!({
10985 "project": {
10986 ".git": {},
10987 "child1": {
10988 "a.txt": "A",
10989 },
10990 "child2": {
10991 "b.txt": "B",
10992 }
10993 }
10994 }),
10995 )
10996 .await;
10997
10998 let project = Project::test(
10999 fs.clone(),
11000 [
11001 path!("/root/project/child1").as_ref(),
11002 path!("/root/project/child2").as_ref(),
11003 ],
11004 cx,
11005 )
11006 .await;
11007
11008 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
11009 tree.flush_fs_events(cx).await;
11010 project
11011 .update(cx, |project, cx| project.git_scans_complete(cx))
11012 .await;
11013 cx.executor().run_until_parked();
11014
11015 let repos = project.read_with(cx, |project, cx| {
11016 project
11017 .repositories(cx)
11018 .values()
11019 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
11020 .collect::<Vec<_>>()
11021 });
11022 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
11023}
11024
11025#[gpui::test]
11026async fn test_buffer_changed_file_path_updates_git_diff(cx: &mut gpui::TestAppContext) {
11027 init_test(cx);
11028
11029 let file_1_committed = String::from(r#"file_1_committed"#);
11030 let file_1_staged = String::from(r#"file_1_staged"#);
11031 let file_2_committed = String::from(r#"file_2_committed"#);
11032 let file_2_staged = String::from(r#"file_2_staged"#);
11033 let buffer_contents = String::from(r#"buffer"#);
11034
11035 let fs = FakeFs::new(cx.background_executor.clone());
11036 fs.insert_tree(
11037 path!("/dir"),
11038 json!({
11039 ".git": {},
11040 "src": {
11041 "file_1.rs": file_1_committed.clone(),
11042 "file_2.rs": file_2_committed.clone(),
11043 }
11044 }),
11045 )
11046 .await;
11047
11048 fs.set_head_for_repo(
11049 path!("/dir/.git").as_ref(),
11050 &[
11051 ("src/file_1.rs", file_1_committed.clone()),
11052 ("src/file_2.rs", file_2_committed.clone()),
11053 ],
11054 "deadbeef",
11055 );
11056 fs.set_index_for_repo(
11057 path!("/dir/.git").as_ref(),
11058 &[
11059 ("src/file_1.rs", file_1_staged.clone()),
11060 ("src/file_2.rs", file_2_staged.clone()),
11061 ],
11062 );
11063
11064 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
11065
11066 let buffer = project
11067 .update(cx, |project, cx| {
11068 project.open_local_buffer(path!("/dir/src/file_1.rs"), cx)
11069 })
11070 .await
11071 .unwrap();
11072
11073 buffer.update(cx, |buffer, cx| {
11074 buffer.edit([(0..buffer.len(), buffer_contents.as_str())], None, cx);
11075 });
11076
11077 let unstaged_diff = project
11078 .update(cx, |project, cx| {
11079 project.open_unstaged_diff(buffer.clone(), cx)
11080 })
11081 .await
11082 .unwrap();
11083
11084 cx.run_until_parked();
11085
11086 unstaged_diff.update(cx, |unstaged_diff, cx| {
11087 let base_text = unstaged_diff.base_text_string(cx).unwrap();
11088 assert_eq!(base_text, file_1_staged, "Should start with file_1 staged");
11089 });
11090
11091 // Save the buffer as `file_2.rs`, which should trigger the
11092 // `BufferChangedFilePath` event.
11093 project
11094 .update(cx, |project, cx| {
11095 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
11096 let path = ProjectPath {
11097 worktree_id,
11098 path: rel_path("src/file_2.rs").into(),
11099 };
11100 project.save_buffer_as(buffer.clone(), path, cx)
11101 })
11102 .await
11103 .unwrap();
11104
11105 cx.run_until_parked();
11106
11107 // Verify that the diff bases have been updated to file_2's contents due to
11108 // the `BufferChangedFilePath` event being handled.
11109 unstaged_diff.update(cx, |unstaged_diff, cx| {
11110 let snapshot = buffer.read(cx).snapshot();
11111 let base_text = unstaged_diff.base_text_string(cx).unwrap();
11112 assert_eq!(
11113 base_text, file_2_staged,
11114 "Diff bases should be automatically updated to file_2 staged content"
11115 );
11116
11117 let hunks: Vec<_> = unstaged_diff.snapshot(cx).hunks(&snapshot).collect();
11118 assert!(!hunks.is_empty(), "Should have diff hunks for file_2");
11119 });
11120
11121 let uncommitted_diff = project
11122 .update(cx, |project, cx| {
11123 project.open_uncommitted_diff(buffer.clone(), cx)
11124 })
11125 .await
11126 .unwrap();
11127
11128 cx.run_until_parked();
11129
11130 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
11131 let base_text = uncommitted_diff.base_text_string(cx).unwrap();
11132 assert_eq!(
11133 base_text, file_2_committed,
11134 "Uncommitted diff should compare against file_2 committed content"
11135 );
11136 });
11137}
11138
11139async fn search(
11140 project: &Entity<Project>,
11141 query: SearchQuery,
11142 cx: &mut gpui::TestAppContext,
11143) -> Result<HashMap<String, Vec<Range<usize>>>> {
11144 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
11145 let mut results = HashMap::default();
11146 while let Ok(search_result) = search_rx.rx.recv().await {
11147 match search_result {
11148 SearchResult::Buffer { buffer, ranges } => {
11149 results.entry(buffer).or_insert(ranges);
11150 }
11151 SearchResult::LimitReached => {}
11152 }
11153 }
11154 Ok(results
11155 .into_iter()
11156 .map(|(buffer, ranges)| {
11157 buffer.update(cx, |buffer, cx| {
11158 let path = buffer
11159 .file()
11160 .unwrap()
11161 .full_path(cx)
11162 .to_string_lossy()
11163 .to_string();
11164 let ranges = ranges
11165 .into_iter()
11166 .map(|range| range.to_offset(buffer))
11167 .collect::<Vec<_>>();
11168 (path, ranges)
11169 })
11170 })
11171 .collect())
11172}
11173
11174#[gpui::test]
11175async fn test_undo_encoding_change(cx: &mut gpui::TestAppContext) {
11176 init_test(cx);
11177
11178 let fs = FakeFs::new(cx.executor());
11179
11180 // Create a file with ASCII content "Hi" - this will be detected as UTF-8
11181 // When reinterpreted as UTF-16LE, the bytes 0x48 0x69 become a single character
11182 let ascii_bytes: Vec<u8> = vec![0x48, 0x69];
11183 fs.insert_tree(path!("/dir"), json!({})).await;
11184 fs.insert_file(path!("/dir/test.txt"), ascii_bytes).await;
11185
11186 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
11187
11188 let buffer = project
11189 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/test.txt"), cx))
11190 .await
11191 .unwrap();
11192
11193 let (initial_encoding, initial_text, initial_dirty) = buffer.read_with(cx, |buffer, _| {
11194 (buffer.encoding(), buffer.text(), buffer.is_dirty())
11195 });
11196 assert_eq!(initial_encoding, encoding_rs::UTF_8);
11197 assert_eq!(initial_text, "Hi");
11198 assert!(!initial_dirty);
11199
11200 let reload_receiver = buffer.update(cx, |buffer, cx| {
11201 buffer.reload_with_encoding(encoding_rs::UTF_16LE, cx)
11202 });
11203 cx.executor().run_until_parked();
11204
11205 // Wait for reload to complete
11206 let _ = reload_receiver.await;
11207
11208 // Verify the encoding changed, text is different, and still not dirty (we reloaded from disk)
11209 let (reloaded_encoding, reloaded_text, reloaded_dirty) = buffer.read_with(cx, |buffer, _| {
11210 (buffer.encoding(), buffer.text(), buffer.is_dirty())
11211 });
11212 assert_eq!(reloaded_encoding, encoding_rs::UTF_16LE);
11213 assert_eq!(reloaded_text, "楈");
11214 assert!(!reloaded_dirty);
11215
11216 // Undo the reload
11217 buffer.update(cx, |buffer, cx| {
11218 buffer.undo(cx);
11219 });
11220
11221 buffer.read_with(cx, |buffer, _| {
11222 assert_eq!(buffer.encoding(), encoding_rs::UTF_8);
11223 assert_eq!(buffer.text(), "Hi");
11224 assert!(!buffer.is_dirty());
11225 });
11226
11227 buffer.update(cx, |buffer, cx| {
11228 buffer.redo(cx);
11229 });
11230
11231 buffer.read_with(cx, |buffer, _| {
11232 assert_eq!(buffer.encoding(), encoding_rs::UTF_16LE);
11233 assert_ne!(buffer.text(), "Hi");
11234 assert!(!buffer.is_dirty());
11235 });
11236}
11237
11238pub fn init_test(cx: &mut gpui::TestAppContext) {
11239 zlog::init_test();
11240
11241 cx.update(|cx| {
11242 let settings_store = SettingsStore::test(cx);
11243 cx.set_global(settings_store);
11244 release_channel::init(semver::Version::new(0, 0, 0), cx);
11245 });
11246}
11247
11248fn json_lang() -> Arc<Language> {
11249 Arc::new(Language::new(
11250 LanguageConfig {
11251 name: "JSON".into(),
11252 matcher: LanguageMatcher {
11253 path_suffixes: vec!["json".to_string()],
11254 ..Default::default()
11255 },
11256 ..Default::default()
11257 },
11258 None,
11259 ))
11260}
11261
11262fn js_lang() -> Arc<Language> {
11263 Arc::new(Language::new(
11264 LanguageConfig {
11265 name: "JavaScript".into(),
11266 matcher: LanguageMatcher {
11267 path_suffixes: vec!["js".to_string()],
11268 ..Default::default()
11269 },
11270 ..Default::default()
11271 },
11272 None,
11273 ))
11274}
11275
11276fn python_lang(fs: Arc<FakeFs>) -> Arc<Language> {
11277 struct PythonMootToolchainLister(Arc<FakeFs>);
11278 #[async_trait]
11279 impl ToolchainLister for PythonMootToolchainLister {
11280 async fn list(
11281 &self,
11282 worktree_root: PathBuf,
11283 subroot_relative_path: Arc<RelPath>,
11284 _: Option<HashMap<String, String>>,
11285 _: &dyn Fs,
11286 ) -> ToolchainList {
11287 // This lister will always return a path .venv directories within ancestors
11288 let ancestors = subroot_relative_path.ancestors().collect::<Vec<_>>();
11289 let mut toolchains = vec![];
11290 for ancestor in ancestors {
11291 let venv_path = worktree_root.join(ancestor.as_std_path()).join(".venv");
11292 if self.0.is_dir(&venv_path).await {
11293 toolchains.push(Toolchain {
11294 name: SharedString::new_static("Python Venv"),
11295 path: venv_path.to_string_lossy().into_owned().into(),
11296 language_name: LanguageName(SharedString::new_static("Python")),
11297 as_json: serde_json::Value::Null,
11298 })
11299 }
11300 }
11301 ToolchainList {
11302 toolchains,
11303 ..Default::default()
11304 }
11305 }
11306 async fn resolve(
11307 &self,
11308 _: PathBuf,
11309 _: Option<HashMap<String, String>>,
11310 _: &dyn Fs,
11311 ) -> anyhow::Result<Toolchain> {
11312 Err(anyhow::anyhow!("Not implemented"))
11313 }
11314 fn meta(&self) -> ToolchainMetadata {
11315 ToolchainMetadata {
11316 term: SharedString::new_static("Virtual Environment"),
11317 new_toolchain_placeholder: SharedString::new_static(
11318 "A path to the python3 executable within a virtual environment, or path to virtual environment itself",
11319 ),
11320 manifest_name: ManifestName::from(SharedString::new_static("pyproject.toml")),
11321 }
11322 }
11323 fn activation_script(
11324 &self,
11325 _: &Toolchain,
11326 _: ShellKind,
11327 _: &gpui::App,
11328 ) -> futures::future::BoxFuture<'static, Vec<String>> {
11329 Box::pin(async { vec![] })
11330 }
11331 }
11332 Arc::new(
11333 Language::new(
11334 LanguageConfig {
11335 name: "Python".into(),
11336 matcher: LanguageMatcher {
11337 path_suffixes: vec!["py".to_string()],
11338 ..Default::default()
11339 },
11340 ..Default::default()
11341 },
11342 None, // We're not testing Python parsing with this language.
11343 )
11344 .with_manifest(Some(ManifestName::from(SharedString::new_static(
11345 "pyproject.toml",
11346 ))))
11347 .with_toolchain_lister(Some(Arc::new(PythonMootToolchainLister(fs)))),
11348 )
11349}
11350
11351fn typescript_lang() -> Arc<Language> {
11352 Arc::new(Language::new(
11353 LanguageConfig {
11354 name: "TypeScript".into(),
11355 matcher: LanguageMatcher {
11356 path_suffixes: vec!["ts".to_string()],
11357 ..Default::default()
11358 },
11359 ..Default::default()
11360 },
11361 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
11362 ))
11363}
11364
11365fn tsx_lang() -> Arc<Language> {
11366 Arc::new(Language::new(
11367 LanguageConfig {
11368 name: "tsx".into(),
11369 matcher: LanguageMatcher {
11370 path_suffixes: vec!["tsx".to_string()],
11371 ..Default::default()
11372 },
11373 ..Default::default()
11374 },
11375 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
11376 ))
11377}
11378
11379fn get_all_tasks(
11380 project: &Entity<Project>,
11381 task_contexts: Arc<TaskContexts>,
11382 cx: &mut App,
11383) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
11384 let new_tasks = project.update(cx, |project, cx| {
11385 project.task_store().update(cx, |task_store, cx| {
11386 task_store.task_inventory().unwrap().update(cx, |this, cx| {
11387 this.used_and_current_resolved_tasks(task_contexts, cx)
11388 })
11389 })
11390 });
11391
11392 cx.background_spawn(async move {
11393 let (mut old, new) = new_tasks.await;
11394 old.extend(new);
11395 old
11396 })
11397}
11398
11399#[track_caller]
11400fn assert_entry_git_state(
11401 tree: &Worktree,
11402 repository: &Repository,
11403 path: &str,
11404 index_status: Option<StatusCode>,
11405 is_ignored: bool,
11406) {
11407 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
11408 let entry = tree
11409 .entry_for_path(&rel_path(path))
11410 .unwrap_or_else(|| panic!("entry {path} not found"));
11411 let status = repository
11412 .status_for_path(&repo_path(path))
11413 .map(|entry| entry.status);
11414 let expected = index_status.map(|index_status| {
11415 TrackedStatus {
11416 index_status,
11417 worktree_status: StatusCode::Unmodified,
11418 }
11419 .into()
11420 });
11421 assert_eq!(
11422 status, expected,
11423 "expected {path} to have git status: {expected:?}"
11424 );
11425 assert_eq!(
11426 entry.is_ignored, is_ignored,
11427 "expected {path} to have is_ignored: {is_ignored}"
11428 );
11429}
11430
11431#[track_caller]
11432fn git_init(path: &Path) -> git2::Repository {
11433 let mut init_opts = RepositoryInitOptions::new();
11434 init_opts.initial_head("main");
11435 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
11436}
11437
11438#[track_caller]
11439fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
11440 let path = path.as_ref();
11441 let mut index = repo.index().expect("Failed to get index");
11442 index.add_path(path).expect("Failed to add file");
11443 index.write().expect("Failed to write index");
11444}
11445
11446#[track_caller]
11447fn git_remove_index(path: &Path, repo: &git2::Repository) {
11448 let mut index = repo.index().expect("Failed to get index");
11449 index.remove_path(path).expect("Failed to add file");
11450 index.write().expect("Failed to write index");
11451}
11452
11453#[track_caller]
11454fn git_commit(msg: &'static str, repo: &git2::Repository) {
11455 use git2::Signature;
11456
11457 let signature = Signature::now("test", "test@zed.dev").unwrap();
11458 let oid = repo.index().unwrap().write_tree().unwrap();
11459 let tree = repo.find_tree(oid).unwrap();
11460 if let Ok(head) = repo.head() {
11461 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
11462
11463 let parent_commit = parent_obj.as_commit().unwrap();
11464
11465 repo.commit(
11466 Some("HEAD"),
11467 &signature,
11468 &signature,
11469 msg,
11470 &tree,
11471 &[parent_commit],
11472 )
11473 .expect("Failed to commit with parent");
11474 } else {
11475 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
11476 .expect("Failed to commit");
11477 }
11478}
11479
11480#[cfg(any())]
11481#[track_caller]
11482fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
11483 repo.cherrypick(commit, None).expect("Failed to cherrypick");
11484}
11485
11486#[track_caller]
11487fn git_stash(repo: &mut git2::Repository) {
11488 use git2::Signature;
11489
11490 let signature = Signature::now("test", "test@zed.dev").unwrap();
11491 repo.stash_save(&signature, "N/A", None)
11492 .expect("Failed to stash");
11493}
11494
11495#[track_caller]
11496fn git_reset(offset: usize, repo: &git2::Repository) {
11497 let head = repo.head().expect("Couldn't get repo head");
11498 let object = head.peel(git2::ObjectType::Commit).unwrap();
11499 let commit = object.as_commit().unwrap();
11500 let new_head = commit
11501 .parents()
11502 .inspect(|parnet| {
11503 parnet.message();
11504 })
11505 .nth(offset)
11506 .expect("Not enough history");
11507 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
11508 .expect("Could not reset");
11509}
11510
11511#[cfg(any())]
11512#[track_caller]
11513fn git_branch(name: &str, repo: &git2::Repository) {
11514 let head = repo
11515 .head()
11516 .expect("Couldn't get repo head")
11517 .peel_to_commit()
11518 .expect("HEAD is not a commit");
11519 repo.branch(name, &head, false).expect("Failed to commit");
11520}
11521
11522#[cfg(any())]
11523#[track_caller]
11524fn git_checkout(name: &str, repo: &git2::Repository) {
11525 repo.set_head(name).expect("Failed to set head");
11526 repo.checkout_head(None).expect("Failed to check out head");
11527}
11528
11529#[cfg(any())]
11530#[track_caller]
11531fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
11532 repo.statuses(None)
11533 .unwrap()
11534 .iter()
11535 .map(|status| (status.path().unwrap().to_string(), status.status()))
11536 .collect()
11537}
11538
11539#[gpui::test]
11540async fn test_find_project_path_abs(
11541 background_executor: BackgroundExecutor,
11542 cx: &mut gpui::TestAppContext,
11543) {
11544 // find_project_path should work with absolute paths
11545 init_test(cx);
11546
11547 let fs = FakeFs::new(background_executor);
11548 fs.insert_tree(
11549 path!("/root"),
11550 json!({
11551 "project1": {
11552 "file1.txt": "content1",
11553 "subdir": {
11554 "file2.txt": "content2"
11555 }
11556 },
11557 "project2": {
11558 "file3.txt": "content3"
11559 }
11560 }),
11561 )
11562 .await;
11563
11564 let project = Project::test(
11565 fs.clone(),
11566 [
11567 path!("/root/project1").as_ref(),
11568 path!("/root/project2").as_ref(),
11569 ],
11570 cx,
11571 )
11572 .await;
11573
11574 // Make sure the worktrees are fully initialized
11575 project
11576 .update(cx, |project, cx| project.git_scans_complete(cx))
11577 .await;
11578 cx.run_until_parked();
11579
11580 let (project1_abs_path, project1_id, project2_abs_path, project2_id) =
11581 project.read_with(cx, |project, cx| {
11582 let worktrees: Vec<_> = project.worktrees(cx).collect();
11583 let abs_path1 = worktrees[0].read(cx).abs_path().to_path_buf();
11584 let id1 = worktrees[0].read(cx).id();
11585 let abs_path2 = worktrees[1].read(cx).abs_path().to_path_buf();
11586 let id2 = worktrees[1].read(cx).id();
11587 (abs_path1, id1, abs_path2, id2)
11588 });
11589
11590 project.update(cx, |project, cx| {
11591 let abs_path = project1_abs_path.join("file1.txt");
11592 let found_path = project.find_project_path(abs_path, cx).unwrap();
11593 assert_eq!(found_path.worktree_id, project1_id);
11594 assert_eq!(&*found_path.path, rel_path("file1.txt"));
11595
11596 let abs_path = project1_abs_path.join("subdir").join("file2.txt");
11597 let found_path = project.find_project_path(abs_path, cx).unwrap();
11598 assert_eq!(found_path.worktree_id, project1_id);
11599 assert_eq!(&*found_path.path, rel_path("subdir/file2.txt"));
11600
11601 let abs_path = project2_abs_path.join("file3.txt");
11602 let found_path = project.find_project_path(abs_path, cx).unwrap();
11603 assert_eq!(found_path.worktree_id, project2_id);
11604 assert_eq!(&*found_path.path, rel_path("file3.txt"));
11605
11606 let abs_path = project1_abs_path.join("nonexistent.txt");
11607 let found_path = project.find_project_path(abs_path, cx);
11608 assert!(
11609 found_path.is_some(),
11610 "Should find project path for nonexistent file in worktree"
11611 );
11612
11613 // Test with an absolute path outside any worktree
11614 let abs_path = Path::new("/some/other/path");
11615 let found_path = project.find_project_path(abs_path, cx);
11616 assert!(
11617 found_path.is_none(),
11618 "Should not find project path for path outside any worktree"
11619 );
11620 });
11621}
11622
11623#[gpui::test]
11624async fn test_git_worktree_remove(cx: &mut gpui::TestAppContext) {
11625 init_test(cx);
11626
11627 let fs = FakeFs::new(cx.executor());
11628 fs.insert_tree(
11629 path!("/root"),
11630 json!({
11631 "a": {
11632 ".git": {},
11633 "src": {
11634 "main.rs": "fn main() {}",
11635 }
11636 },
11637 "b": {
11638 ".git": {},
11639 "src": {
11640 "main.rs": "fn main() {}",
11641 },
11642 "script": {
11643 "run.sh": "#!/bin/bash"
11644 }
11645 }
11646 }),
11647 )
11648 .await;
11649
11650 let project = Project::test(
11651 fs.clone(),
11652 [
11653 path!("/root/a").as_ref(),
11654 path!("/root/b/script").as_ref(),
11655 path!("/root/b").as_ref(),
11656 ],
11657 cx,
11658 )
11659 .await;
11660 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
11661 scan_complete.await;
11662
11663 let worktrees = project.update(cx, |project, cx| project.worktrees(cx).collect::<Vec<_>>());
11664 assert_eq!(worktrees.len(), 3);
11665
11666 let worktree_id_by_abs_path = worktrees
11667 .into_iter()
11668 .map(|worktree| worktree.read_with(cx, |w, _| (w.abs_path(), w.id())))
11669 .collect::<HashMap<_, _>>();
11670 let worktree_id = worktree_id_by_abs_path
11671 .get(Path::new(path!("/root/b/script")))
11672 .unwrap();
11673
11674 let repos = project.update(cx, |p, cx| p.git_store().read(cx).repositories().clone());
11675 assert_eq!(repos.len(), 2);
11676
11677 project.update(cx, |project, cx| {
11678 project.remove_worktree(*worktree_id, cx);
11679 });
11680 cx.run_until_parked();
11681
11682 let mut repo_paths = project
11683 .update(cx, |p, cx| p.git_store().read(cx).repositories().clone())
11684 .values()
11685 .map(|repo| repo.read_with(cx, |r, _| r.work_directory_abs_path.clone()))
11686 .collect::<Vec<_>>();
11687 repo_paths.sort();
11688
11689 pretty_assertions::assert_eq!(
11690 repo_paths,
11691 [
11692 Path::new(path!("/root/a")).into(),
11693 Path::new(path!("/root/b")).into(),
11694 ]
11695 );
11696
11697 let active_repo_path = project
11698 .read_with(cx, |p, cx| {
11699 p.active_repository(cx)
11700 .map(|r| r.read(cx).work_directory_abs_path.clone())
11701 })
11702 .unwrap();
11703 assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/a")));
11704
11705 let worktree_id = worktree_id_by_abs_path
11706 .get(Path::new(path!("/root/a")))
11707 .unwrap();
11708 project.update(cx, |project, cx| {
11709 project.remove_worktree(*worktree_id, cx);
11710 });
11711 cx.run_until_parked();
11712
11713 let active_repo_path = project
11714 .read_with(cx, |p, cx| {
11715 p.active_repository(cx)
11716 .map(|r| r.read(cx).work_directory_abs_path.clone())
11717 })
11718 .unwrap();
11719 assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/b")));
11720
11721 let worktree_id = worktree_id_by_abs_path
11722 .get(Path::new(path!("/root/b")))
11723 .unwrap();
11724 project.update(cx, |project, cx| {
11725 project.remove_worktree(*worktree_id, cx);
11726 });
11727 cx.run_until_parked();
11728
11729 let active_repo_path = project.read_with(cx, |p, cx| {
11730 p.active_repository(cx)
11731 .map(|r| r.read(cx).work_directory_abs_path.clone())
11732 });
11733 assert!(active_repo_path.is_none());
11734}
11735
11736#[gpui::test]
11737async fn test_optimistic_hunks_in_staged_files(cx: &mut gpui::TestAppContext) {
11738 use DiffHunkSecondaryStatus::*;
11739 init_test(cx);
11740
11741 let committed_contents = r#"
11742 one
11743 two
11744 three
11745 "#
11746 .unindent();
11747 let file_contents = r#"
11748 one
11749 TWO
11750 three
11751 "#
11752 .unindent();
11753
11754 let fs = FakeFs::new(cx.background_executor.clone());
11755 fs.insert_tree(
11756 path!("/dir"),
11757 json!({
11758 ".git": {},
11759 "file.txt": file_contents.clone()
11760 }),
11761 )
11762 .await;
11763
11764 fs.set_head_and_index_for_repo(
11765 path!("/dir/.git").as_ref(),
11766 &[("file.txt", committed_contents.clone())],
11767 );
11768
11769 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
11770
11771 let buffer = project
11772 .update(cx, |project, cx| {
11773 project.open_local_buffer(path!("/dir/file.txt"), cx)
11774 })
11775 .await
11776 .unwrap();
11777 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
11778 let uncommitted_diff = project
11779 .update(cx, |project, cx| {
11780 project.open_uncommitted_diff(buffer.clone(), cx)
11781 })
11782 .await
11783 .unwrap();
11784
11785 // The hunk is initially unstaged.
11786 uncommitted_diff.read_with(cx, |diff, cx| {
11787 assert_hunks(
11788 diff.snapshot(cx).hunks(&snapshot),
11789 &snapshot,
11790 &diff.base_text_string(cx).unwrap(),
11791 &[(
11792 1..2,
11793 "two\n",
11794 "TWO\n",
11795 DiffHunkStatus::modified(HasSecondaryHunk),
11796 )],
11797 );
11798 });
11799
11800 // Get the repository handle.
11801 let repo = project.read_with(cx, |project, cx| {
11802 project.repositories(cx).values().next().unwrap().clone()
11803 });
11804
11805 // Stage the file.
11806 let stage_task = repo.update(cx, |repo, cx| {
11807 repo.stage_entries(vec![repo_path("file.txt")], cx)
11808 });
11809
11810 // Run a few ticks to let the job start and mark hunks as pending,
11811 // but don't run_until_parked which would complete the entire operation.
11812 for _ in 0..10 {
11813 cx.executor().tick();
11814 let [hunk]: [_; 1] = uncommitted_diff
11815 .read_with(cx, |diff, cx| {
11816 diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>()
11817 })
11818 .try_into()
11819 .unwrap();
11820 match hunk.secondary_status {
11821 HasSecondaryHunk => {}
11822 SecondaryHunkRemovalPending => break,
11823 NoSecondaryHunk => panic!("hunk was not optimistically staged"),
11824 _ => panic!("unexpected hunk state"),
11825 }
11826 }
11827 uncommitted_diff.read_with(cx, |diff, cx| {
11828 assert_hunks(
11829 diff.snapshot(cx).hunks(&snapshot),
11830 &snapshot,
11831 &diff.base_text_string(cx).unwrap(),
11832 &[(
11833 1..2,
11834 "two\n",
11835 "TWO\n",
11836 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
11837 )],
11838 );
11839 });
11840
11841 // Let the staging complete.
11842 stage_task.await.unwrap();
11843 cx.run_until_parked();
11844
11845 // The hunk is now fully staged.
11846 uncommitted_diff.read_with(cx, |diff, cx| {
11847 assert_hunks(
11848 diff.snapshot(cx).hunks(&snapshot),
11849 &snapshot,
11850 &diff.base_text_string(cx).unwrap(),
11851 &[(
11852 1..2,
11853 "two\n",
11854 "TWO\n",
11855 DiffHunkStatus::modified(NoSecondaryHunk),
11856 )],
11857 );
11858 });
11859
11860 // Simulate a commit by updating HEAD to match the current file contents.
11861 // The FakeGitRepository's commit method is a no-op, so we need to manually
11862 // update HEAD to simulate the commit completing.
11863 fs.set_head_for_repo(
11864 path!("/dir/.git").as_ref(),
11865 &[("file.txt", file_contents.clone())],
11866 "newhead",
11867 );
11868 cx.run_until_parked();
11869
11870 // After committing, there are no more hunks.
11871 uncommitted_diff.read_with(cx, |diff, cx| {
11872 assert_hunks(
11873 diff.snapshot(cx).hunks(&snapshot),
11874 &snapshot,
11875 &diff.base_text_string(cx).unwrap(),
11876 &[] as &[(Range<u32>, &str, &str, DiffHunkStatus)],
11877 );
11878 });
11879}
11880
11881#[gpui::test]
11882async fn test_read_only_files_setting(cx: &mut gpui::TestAppContext) {
11883 init_test(cx);
11884
11885 // Configure read_only_files setting
11886 cx.update(|cx| {
11887 cx.update_global::<SettingsStore, _>(|store, cx| {
11888 store.update_user_settings(cx, |settings| {
11889 settings.project.worktree.read_only_files = Some(vec![
11890 "**/generated/**".to_string(),
11891 "**/*.gen.rs".to_string(),
11892 ]);
11893 });
11894 });
11895 });
11896
11897 let fs = FakeFs::new(cx.background_executor.clone());
11898 fs.insert_tree(
11899 path!("/root"),
11900 json!({
11901 "src": {
11902 "main.rs": "fn main() {}",
11903 "types.gen.rs": "// Generated file",
11904 },
11905 "generated": {
11906 "schema.rs": "// Auto-generated schema",
11907 }
11908 }),
11909 )
11910 .await;
11911
11912 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
11913
11914 // Open a regular file - should be read-write
11915 let regular_buffer = project
11916 .update(cx, |project, cx| {
11917 project.open_local_buffer(path!("/root/src/main.rs"), cx)
11918 })
11919 .await
11920 .unwrap();
11921
11922 regular_buffer.read_with(cx, |buffer, _| {
11923 assert!(!buffer.read_only(), "Regular file should not be read-only");
11924 });
11925
11926 // Open a file matching *.gen.rs pattern - should be read-only
11927 let gen_buffer = project
11928 .update(cx, |project, cx| {
11929 project.open_local_buffer(path!("/root/src/types.gen.rs"), cx)
11930 })
11931 .await
11932 .unwrap();
11933
11934 gen_buffer.read_with(cx, |buffer, _| {
11935 assert!(
11936 buffer.read_only(),
11937 "File matching *.gen.rs pattern should be read-only"
11938 );
11939 });
11940
11941 // Open a file in generated directory - should be read-only
11942 let generated_buffer = project
11943 .update(cx, |project, cx| {
11944 project.open_local_buffer(path!("/root/generated/schema.rs"), cx)
11945 })
11946 .await
11947 .unwrap();
11948
11949 generated_buffer.read_with(cx, |buffer, _| {
11950 assert!(
11951 buffer.read_only(),
11952 "File in generated directory should be read-only"
11953 );
11954 });
11955}
11956
11957#[gpui::test]
11958async fn test_read_only_files_empty_setting(cx: &mut gpui::TestAppContext) {
11959 init_test(cx);
11960
11961 // Explicitly set read_only_files to empty (default behavior)
11962 cx.update(|cx| {
11963 cx.update_global::<SettingsStore, _>(|store, cx| {
11964 store.update_user_settings(cx, |settings| {
11965 settings.project.worktree.read_only_files = Some(vec![]);
11966 });
11967 });
11968 });
11969
11970 let fs = FakeFs::new(cx.background_executor.clone());
11971 fs.insert_tree(
11972 path!("/root"),
11973 json!({
11974 "src": {
11975 "main.rs": "fn main() {}",
11976 },
11977 "generated": {
11978 "schema.rs": "// Auto-generated schema",
11979 }
11980 }),
11981 )
11982 .await;
11983
11984 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
11985
11986 // All files should be read-write when read_only_files is empty
11987 let main_buffer = project
11988 .update(cx, |project, cx| {
11989 project.open_local_buffer(path!("/root/src/main.rs"), cx)
11990 })
11991 .await
11992 .unwrap();
11993
11994 main_buffer.read_with(cx, |buffer, _| {
11995 assert!(
11996 !buffer.read_only(),
11997 "Files should not be read-only when read_only_files is empty"
11998 );
11999 });
12000
12001 let generated_buffer = project
12002 .update(cx, |project, cx| {
12003 project.open_local_buffer(path!("/root/generated/schema.rs"), cx)
12004 })
12005 .await
12006 .unwrap();
12007
12008 generated_buffer.read_with(cx, |buffer, _| {
12009 assert!(
12010 !buffer.read_only(),
12011 "Generated files should not be read-only when read_only_files is empty"
12012 );
12013 });
12014}
12015
12016#[gpui::test]
12017async fn test_read_only_files_with_lock_files(cx: &mut gpui::TestAppContext) {
12018 init_test(cx);
12019
12020 // Configure to make lock files read-only
12021 cx.update(|cx| {
12022 cx.update_global::<SettingsStore, _>(|store, cx| {
12023 store.update_user_settings(cx, |settings| {
12024 settings.project.worktree.read_only_files = Some(vec![
12025 "**/*.lock".to_string(),
12026 "**/package-lock.json".to_string(),
12027 ]);
12028 });
12029 });
12030 });
12031
12032 let fs = FakeFs::new(cx.background_executor.clone());
12033 fs.insert_tree(
12034 path!("/root"),
12035 json!({
12036 "Cargo.lock": "# Lock file",
12037 "Cargo.toml": "[package]",
12038 "package-lock.json": "{}",
12039 "package.json": "{}",
12040 }),
12041 )
12042 .await;
12043
12044 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
12045
12046 // Cargo.lock should be read-only
12047 let cargo_lock = project
12048 .update(cx, |project, cx| {
12049 project.open_local_buffer(path!("/root/Cargo.lock"), cx)
12050 })
12051 .await
12052 .unwrap();
12053
12054 cargo_lock.read_with(cx, |buffer, _| {
12055 assert!(buffer.read_only(), "Cargo.lock should be read-only");
12056 });
12057
12058 // Cargo.toml should be read-write
12059 let cargo_toml = project
12060 .update(cx, |project, cx| {
12061 project.open_local_buffer(path!("/root/Cargo.toml"), cx)
12062 })
12063 .await
12064 .unwrap();
12065
12066 cargo_toml.read_with(cx, |buffer, _| {
12067 assert!(!buffer.read_only(), "Cargo.toml should not be read-only");
12068 });
12069
12070 // package-lock.json should be read-only
12071 let package_lock = project
12072 .update(cx, |project, cx| {
12073 project.open_local_buffer(path!("/root/package-lock.json"), cx)
12074 })
12075 .await
12076 .unwrap();
12077
12078 package_lock.read_with(cx, |buffer, _| {
12079 assert!(buffer.read_only(), "package-lock.json should be read-only");
12080 });
12081
12082 // package.json should be read-write
12083 let package_json = project
12084 .update(cx, |project, cx| {
12085 project.open_local_buffer(path!("/root/package.json"), cx)
12086 })
12087 .await
12088 .unwrap();
12089
12090 package_json.read_with(cx, |buffer, _| {
12091 assert!(!buffer.read_only(), "package.json should not be read-only");
12092 });
12093}
12094
12095mod disable_ai_settings_tests {
12096 use gpui::TestAppContext;
12097 use project::*;
12098 use settings::{Settings, SettingsStore};
12099
12100 #[gpui::test]
12101 async fn test_disable_ai_settings_security(cx: &mut TestAppContext) {
12102 cx.update(|cx| {
12103 settings::init(cx);
12104
12105 // Test 1: Default is false (AI enabled)
12106 assert!(
12107 !DisableAiSettings::get_global(cx).disable_ai,
12108 "Default should allow AI"
12109 );
12110 });
12111
12112 let disable_true = serde_json::json!({
12113 "disable_ai": true
12114 })
12115 .to_string();
12116 let disable_false = serde_json::json!({
12117 "disable_ai": false
12118 })
12119 .to_string();
12120
12121 cx.update_global::<SettingsStore, _>(|store, cx| {
12122 store.set_user_settings(&disable_false, cx).unwrap();
12123 store.set_global_settings(&disable_true, cx).unwrap();
12124 });
12125 cx.update(|cx| {
12126 assert!(
12127 DisableAiSettings::get_global(cx).disable_ai,
12128 "Local false cannot override global true"
12129 );
12130 });
12131
12132 cx.update_global::<SettingsStore, _>(|store, cx| {
12133 store.set_global_settings(&disable_false, cx).unwrap();
12134 store.set_user_settings(&disable_true, cx).unwrap();
12135 });
12136
12137 cx.update(|cx| {
12138 assert!(
12139 DisableAiSettings::get_global(cx).disable_ai,
12140 "Local false cannot override global true"
12141 );
12142 });
12143 }
12144}