1#![allow(clippy::format_collect)]
2
3mod color_extractor;
4mod context_server_store;
5mod debugger;
6mod ext_agent_tests;
7mod extension_agent_tests;
8mod git_store;
9mod image_store;
10mod lsp_command;
11mod lsp_store;
12mod manifest_tree;
13mod project_search;
14mod search;
15mod search_history;
16mod signature_help;
17mod task_inventory;
18mod trusted_worktrees;
19mod yarn;
20
21use anyhow::Result;
22use async_trait::async_trait;
23use buffer_diff::{
24 BufferDiffEvent, DiffChanged, DiffHunkSecondaryStatus, DiffHunkStatus, DiffHunkStatusKind,
25 assert_hunks,
26};
27use collections::{BTreeSet, HashMap, HashSet};
28use encoding_rs;
29use fs::FakeFs;
30use futures::{StreamExt, future};
31use git::{
32 GitHostingProviderRegistry,
33 repository::{RepoPath, repo_path},
34 status::{FileStatus, StatusCode, TrackedStatus},
35};
36use git2::RepositoryInitOptions;
37use gpui::{
38 App, AppContext, BackgroundExecutor, BorrowAppContext, Entity, FutureExt, SharedString, Task,
39 UpdateGlobal,
40};
41use itertools::Itertools;
42use language::{
43 Buffer, BufferEvent, Diagnostic, DiagnosticEntry, DiagnosticEntryRef, DiagnosticSet,
44 DiagnosticSourceKind, DiskState, FakeLspAdapter, Language, LanguageConfig, LanguageMatcher,
45 LanguageName, LineEnding, ManifestName, ManifestProvider, ManifestQuery, OffsetRangeExt, Point,
46 ToPoint, Toolchain, ToolchainList, ToolchainLister, ToolchainMetadata,
47 language_settings::{LanguageSettingsContent, language_settings},
48 markdown_lang, rust_lang, tree_sitter_typescript,
49};
50use lsp::{
51 CodeActionKind, DEFAULT_LSP_REQUEST_TIMEOUT, DiagnosticSeverity, DocumentChanges,
52 FileOperationFilter, LanguageServerId, LanguageServerName, NumberOrString, TextDocumentEdit,
53 Uri, WillRenameFiles, notification::DidRenameFiles,
54};
55use parking_lot::Mutex;
56use paths::{config_dir, global_gitignore_path, tasks_file};
57use postage::stream::Stream as _;
58use pretty_assertions::{assert_eq, assert_matches};
59use project::{
60 Event, TaskContexts,
61 git_store::{GitStoreEvent, Repository, RepositoryEvent, StatusEntry, pending_op},
62 search::{SearchQuery, SearchResult},
63 task_store::{TaskSettingsLocation, TaskStore},
64 *,
65};
66use rand::{Rng as _, rngs::StdRng};
67use serde_json::json;
68use settings::SettingsStore;
69#[cfg(not(windows))]
70use std::os;
71use std::{
72 cell::RefCell,
73 env, mem,
74 num::NonZeroU32,
75 ops::Range,
76 path::{Path, PathBuf},
77 rc::Rc,
78 str::FromStr,
79 sync::{Arc, OnceLock},
80 task::Poll,
81 time::Duration,
82};
83use sum_tree::SumTree;
84use task::{ResolvedTask, ShellKind, TaskContext};
85use text::{Anchor, PointUtf16, ReplicaId, ToOffset, Unclipped};
86use unindent::Unindent as _;
87use util::{
88 TryFutureExt as _, assert_set_eq, maybe, path,
89 paths::{PathMatcher, PathStyle},
90 rel_path::{RelPath, rel_path},
91 test::{TempTree, marked_text_offsets},
92 uri,
93};
94use worktree::WorktreeModelHandle as _;
95
96#[gpui::test]
97async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
98 cx.executor().allow_parking();
99
100 let (tx, mut rx) = futures::channel::mpsc::unbounded();
101 let _thread = std::thread::spawn(move || {
102 #[cfg(not(target_os = "windows"))]
103 std::fs::metadata("/tmp").unwrap();
104 #[cfg(target_os = "windows")]
105 std::fs::metadata("C:/Windows").unwrap();
106 std::thread::sleep(Duration::from_millis(1000));
107 tx.unbounded_send(1).unwrap();
108 });
109 rx.next().await.unwrap();
110}
111
112#[gpui::test]
113async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
114 cx.executor().allow_parking();
115
116 let io_task = smol::unblock(move || {
117 println!("sleeping on thread {:?}", std::thread::current().id());
118 std::thread::sleep(Duration::from_millis(10));
119 1
120 });
121
122 let task = cx.foreground_executor().spawn(async move {
123 io_task.await;
124 });
125
126 task.await;
127}
128
129// NOTE:
130// While POSIX symbolic links are somewhat supported on Windows, they are an opt in by the user, and thus
131// we assume that they are not supported out of the box.
132#[cfg(not(windows))]
133#[gpui::test]
134async fn test_symlinks(cx: &mut gpui::TestAppContext) {
135 init_test(cx);
136 cx.executor().allow_parking();
137
138 let dir = TempTree::new(json!({
139 "root": {
140 "apple": "",
141 "banana": {
142 "carrot": {
143 "date": "",
144 "endive": "",
145 }
146 },
147 "fennel": {
148 "grape": "",
149 }
150 }
151 }));
152
153 let root_link_path = dir.path().join("root_link");
154 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
155 os::unix::fs::symlink(
156 dir.path().join("root/fennel"),
157 dir.path().join("root/finnochio"),
158 )
159 .unwrap();
160
161 let project = Project::test(
162 Arc::new(RealFs::new(None, cx.executor())),
163 [root_link_path.as_ref()],
164 cx,
165 )
166 .await;
167
168 project.update(cx, |project, cx| {
169 let tree = project.worktrees(cx).next().unwrap().read(cx);
170 assert_eq!(tree.file_count(), 5);
171 assert_eq!(
172 tree.entry_for_path(rel_path("fennel/grape")).unwrap().inode,
173 tree.entry_for_path(rel_path("finnochio/grape"))
174 .unwrap()
175 .inode
176 );
177 });
178}
179
180#[gpui::test]
181async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
182 init_test(cx);
183
184 let dir = TempTree::new(json!({
185 ".editorconfig": r#"
186 root = true
187 [*.rs]
188 indent_style = tab
189 indent_size = 3
190 end_of_line = lf
191 insert_final_newline = true
192 trim_trailing_whitespace = true
193 max_line_length = 120
194 [*.js]
195 tab_width = 10
196 max_line_length = off
197 "#,
198 ".zed": {
199 "settings.json": r#"{
200 "tab_size": 8,
201 "hard_tabs": false,
202 "ensure_final_newline_on_save": false,
203 "remove_trailing_whitespace_on_save": false,
204 "preferred_line_length": 64,
205 "soft_wrap": "editor_width",
206 }"#,
207 },
208 "a.rs": "fn a() {\n A\n}",
209 "b": {
210 ".editorconfig": r#"
211 [*.rs]
212 indent_size = 2
213 max_line_length = off,
214 "#,
215 "b.rs": "fn b() {\n B\n}",
216 },
217 "c.js": "def c\n C\nend",
218 "d": {
219 ".editorconfig": r#"
220 [*.rs]
221 indent_size = 1
222 "#,
223 "d.rs": "fn d() {\n D\n}",
224 },
225 "README.json": "tabs are better\n",
226 }));
227
228 let path = dir.path();
229 let fs = FakeFs::new(cx.executor());
230 fs.insert_tree_from_real_fs(path, path).await;
231 let project = Project::test(fs, [path], cx).await;
232
233 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
234 language_registry.add(js_lang());
235 language_registry.add(json_lang());
236 language_registry.add(rust_lang());
237
238 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
239
240 cx.executor().run_until_parked();
241
242 cx.update(|cx| {
243 let tree = worktree.read(cx);
244 let settings_for = |path: &str| {
245 let file_entry = tree.entry_for_path(rel_path(path)).unwrap().clone();
246 let file = File::for_entry(file_entry, worktree.clone());
247 let file_language = project
248 .read(cx)
249 .languages()
250 .load_language_for_file_path(file.path.as_std_path());
251 let file_language = cx
252 .foreground_executor()
253 .block_on(file_language)
254 .expect("Failed to get file language");
255 let file = file as _;
256 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
257 };
258
259 let settings_a = settings_for("a.rs");
260 let settings_b = settings_for("b/b.rs");
261 let settings_c = settings_for("c.js");
262 let settings_d = settings_for("d/d.rs");
263 let settings_readme = settings_for("README.json");
264
265 // .editorconfig overrides .zed/settings
266 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
267 assert_eq!(settings_a.hard_tabs, true);
268 assert_eq!(settings_a.ensure_final_newline_on_save, true);
269 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
270 assert_eq!(settings_a.preferred_line_length, 120);
271
272 // .editorconfig in subdirectory overrides .editorconfig in root
273 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
274 assert_eq!(Some(settings_d.tab_size), NonZeroU32::new(1));
275
276 // "indent_size" is not set, so "tab_width" is used
277 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
278
279 // When max_line_length is "off", default to .zed/settings.json
280 assert_eq!(settings_b.preferred_line_length, 64);
281 assert_eq!(settings_c.preferred_line_length, 64);
282
283 // README.md should not be affected by .editorconfig's globe "*.rs"
284 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
285 });
286}
287
288#[gpui::test]
289async fn test_external_editorconfig_support(cx: &mut gpui::TestAppContext) {
290 init_test(cx);
291
292 let fs = FakeFs::new(cx.executor());
293 fs.insert_tree(
294 path!("/grandparent"),
295 json!({
296 ".editorconfig": "[*]\nindent_size = 4\n",
297 "parent": {
298 ".editorconfig": "[*.rs]\nindent_size = 2\n",
299 "worktree": {
300 ".editorconfig": "[*.md]\nindent_size = 3\n",
301 "main.rs": "fn main() {}",
302 "README.md": "# README",
303 "other.txt": "other content",
304 }
305 }
306 }),
307 )
308 .await;
309
310 let project = Project::test(fs, [path!("/grandparent/parent/worktree").as_ref()], cx).await;
311
312 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
313 language_registry.add(rust_lang());
314 language_registry.add(markdown_lang());
315
316 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
317
318 cx.executor().run_until_parked();
319
320 cx.update(|cx| {
321 let tree = worktree.read(cx);
322 let settings_for = |path: &str| {
323 let file_entry = tree.entry_for_path(rel_path(path)).unwrap().clone();
324 let file = File::for_entry(file_entry, worktree.clone());
325 let file_language = project
326 .read(cx)
327 .languages()
328 .load_language_for_file_path(file.path.as_std_path());
329 let file_language = cx
330 .foreground_executor()
331 .block_on(file_language)
332 .expect("Failed to get file language");
333 let file = file as _;
334 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
335 };
336
337 let settings_rs = settings_for("main.rs");
338 let settings_md = settings_for("README.md");
339 let settings_txt = settings_for("other.txt");
340
341 // main.rs gets indent_size = 2 from parent's external .editorconfig
342 assert_eq!(Some(settings_rs.tab_size), NonZeroU32::new(2));
343
344 // README.md gets indent_size = 3 from internal worktree .editorconfig
345 assert_eq!(Some(settings_md.tab_size), NonZeroU32::new(3));
346
347 // other.txt gets indent_size = 4 from grandparent's external .editorconfig
348 assert_eq!(Some(settings_txt.tab_size), NonZeroU32::new(4));
349 });
350}
351
352#[gpui::test]
353async fn test_internal_editorconfig_root_stops_traversal(cx: &mut gpui::TestAppContext) {
354 init_test(cx);
355
356 let fs = FakeFs::new(cx.executor());
357 fs.insert_tree(
358 path!("/worktree"),
359 json!({
360 ".editorconfig": "[*]\nindent_size = 99\n",
361 "src": {
362 ".editorconfig": "root = true\n[*]\nindent_size = 2\n",
363 "file.rs": "fn main() {}",
364 }
365 }),
366 )
367 .await;
368
369 let project = Project::test(fs, [path!("/worktree").as_ref()], cx).await;
370
371 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
372 language_registry.add(rust_lang());
373
374 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
375
376 cx.executor().run_until_parked();
377
378 cx.update(|cx| {
379 let tree = worktree.read(cx);
380 let file_entry = tree
381 .entry_for_path(rel_path("src/file.rs"))
382 .unwrap()
383 .clone();
384 let file = File::for_entry(file_entry, worktree.clone());
385 let file_language = project
386 .read(cx)
387 .languages()
388 .load_language_for_file_path(file.path.as_std_path());
389 let file_language = cx
390 .foreground_executor()
391 .block_on(file_language)
392 .expect("Failed to get file language");
393 let file = file as _;
394 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
395
396 assert_eq!(Some(settings.tab_size), NonZeroU32::new(2));
397 });
398}
399
400#[gpui::test]
401async fn test_external_editorconfig_root_stops_traversal(cx: &mut gpui::TestAppContext) {
402 init_test(cx);
403
404 let fs = FakeFs::new(cx.executor());
405 fs.insert_tree(
406 path!("/parent"),
407 json!({
408 ".editorconfig": "[*]\nindent_size = 99\n",
409 "worktree": {
410 ".editorconfig": "root = true\n[*]\nindent_size = 2\n",
411 "file.rs": "fn main() {}",
412 }
413 }),
414 )
415 .await;
416
417 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
418
419 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
420 language_registry.add(rust_lang());
421
422 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
423
424 cx.executor().run_until_parked();
425
426 cx.update(|cx| {
427 let tree = worktree.read(cx);
428 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
429 let file = File::for_entry(file_entry, worktree.clone());
430 let file_language = project
431 .read(cx)
432 .languages()
433 .load_language_for_file_path(file.path.as_std_path());
434 let file_language = cx
435 .foreground_executor()
436 .block_on(file_language)
437 .expect("Failed to get file language");
438 let file = file as _;
439 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
440
441 // file.rs gets indent_size = 2 from worktree's root config, NOT 99 from parent
442 assert_eq!(Some(settings.tab_size), NonZeroU32::new(2));
443 });
444}
445
446#[gpui::test]
447async fn test_external_editorconfig_root_in_parent_stops_traversal(cx: &mut gpui::TestAppContext) {
448 init_test(cx);
449
450 let fs = FakeFs::new(cx.executor());
451 fs.insert_tree(
452 path!("/grandparent"),
453 json!({
454 ".editorconfig": "[*]\nindent_size = 99\n",
455 "parent": {
456 ".editorconfig": "root = true\n[*]\nindent_size = 4\n",
457 "worktree": {
458 "file.rs": "fn main() {}",
459 }
460 }
461 }),
462 )
463 .await;
464
465 let project = Project::test(fs, [path!("/grandparent/parent/worktree").as_ref()], cx).await;
466
467 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
468 language_registry.add(rust_lang());
469
470 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
471
472 cx.executor().run_until_parked();
473
474 cx.update(|cx| {
475 let tree = worktree.read(cx);
476 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
477 let file = File::for_entry(file_entry, worktree.clone());
478 let file_language = project
479 .read(cx)
480 .languages()
481 .load_language_for_file_path(file.path.as_std_path());
482 let file_language = cx
483 .foreground_executor()
484 .block_on(file_language)
485 .expect("Failed to get file language");
486 let file = file as _;
487 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
488
489 // file.rs gets indent_size = 4 from parent's root config, NOT 99 from grandparent
490 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
491 });
492}
493
494#[gpui::test]
495async fn test_external_editorconfig_shared_across_worktrees(cx: &mut gpui::TestAppContext) {
496 init_test(cx);
497
498 let fs = FakeFs::new(cx.executor());
499 fs.insert_tree(
500 path!("/parent"),
501 json!({
502 ".editorconfig": "root = true\n[*]\nindent_size = 5\n",
503 "worktree_a": {
504 "file.rs": "fn a() {}",
505 ".editorconfig": "[*]\ninsert_final_newline = true\n",
506 },
507 "worktree_b": {
508 "file.rs": "fn b() {}",
509 ".editorconfig": "[*]\ninsert_final_newline = false\n",
510 }
511 }),
512 )
513 .await;
514
515 let project = Project::test(
516 fs,
517 [
518 path!("/parent/worktree_a").as_ref(),
519 path!("/parent/worktree_b").as_ref(),
520 ],
521 cx,
522 )
523 .await;
524
525 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
526 language_registry.add(rust_lang());
527
528 cx.executor().run_until_parked();
529
530 cx.update(|cx| {
531 let worktrees: Vec<_> = project.read(cx).worktrees(cx).collect();
532 assert_eq!(worktrees.len(), 2);
533
534 for worktree in worktrees {
535 let tree = worktree.read(cx);
536 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
537 let file = File::for_entry(file_entry, worktree.clone());
538 let file_language = project
539 .read(cx)
540 .languages()
541 .load_language_for_file_path(file.path.as_std_path());
542 let file_language = cx
543 .foreground_executor()
544 .block_on(file_language)
545 .expect("Failed to get file language");
546 let file = file as _;
547 let settings =
548 language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
549
550 // Both worktrees should get indent_size = 5 from shared parent .editorconfig
551 assert_eq!(Some(settings.tab_size), NonZeroU32::new(5));
552 }
553 });
554}
555
556#[gpui::test]
557async fn test_external_editorconfig_not_loaded_without_internal_config(
558 cx: &mut gpui::TestAppContext,
559) {
560 init_test(cx);
561
562 let fs = FakeFs::new(cx.executor());
563 fs.insert_tree(
564 path!("/parent"),
565 json!({
566 ".editorconfig": "[*]\nindent_size = 99\n",
567 "worktree": {
568 "file.rs": "fn main() {}",
569 }
570 }),
571 )
572 .await;
573
574 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
575
576 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
577 language_registry.add(rust_lang());
578
579 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
580
581 cx.executor().run_until_parked();
582
583 cx.update(|cx| {
584 let tree = worktree.read(cx);
585 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
586 let file = File::for_entry(file_entry, worktree.clone());
587 let file_language = project
588 .read(cx)
589 .languages()
590 .load_language_for_file_path(file.path.as_std_path());
591 let file_language = cx
592 .foreground_executor()
593 .block_on(file_language)
594 .expect("Failed to get file language");
595 let file = file as _;
596 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
597
598 // file.rs should have default tab_size = 4, NOT 99 from parent's external .editorconfig
599 // because without an internal .editorconfig, external configs are not loaded
600 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
601 });
602}
603
604#[gpui::test]
605async fn test_external_editorconfig_modification_triggers_refresh(cx: &mut gpui::TestAppContext) {
606 init_test(cx);
607
608 let fs = FakeFs::new(cx.executor());
609 fs.insert_tree(
610 path!("/parent"),
611 json!({
612 ".editorconfig": "[*]\nindent_size = 4\n",
613 "worktree": {
614 ".editorconfig": "[*]\n",
615 "file.rs": "fn main() {}",
616 }
617 }),
618 )
619 .await;
620
621 let project = Project::test(fs.clone(), [path!("/parent/worktree").as_ref()], cx).await;
622
623 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
624 language_registry.add(rust_lang());
625
626 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
627
628 cx.executor().run_until_parked();
629
630 cx.update(|cx| {
631 let tree = worktree.read(cx);
632 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
633 let file = File::for_entry(file_entry, worktree.clone());
634 let file_language = project
635 .read(cx)
636 .languages()
637 .load_language_for_file_path(file.path.as_std_path());
638 let file_language = cx
639 .foreground_executor()
640 .block_on(file_language)
641 .expect("Failed to get file language");
642 let file = file as _;
643 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
644
645 // Test initial settings: tab_size = 4 from parent's external .editorconfig
646 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
647 });
648
649 fs.atomic_write(
650 PathBuf::from(path!("/parent/.editorconfig")),
651 "[*]\nindent_size = 8\n".to_owned(),
652 )
653 .await
654 .unwrap();
655
656 cx.executor().run_until_parked();
657
658 cx.update(|cx| {
659 let tree = worktree.read(cx);
660 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
661 let file = File::for_entry(file_entry, worktree.clone());
662 let file_language = project
663 .read(cx)
664 .languages()
665 .load_language_for_file_path(file.path.as_std_path());
666 let file_language = cx
667 .foreground_executor()
668 .block_on(file_language)
669 .expect("Failed to get file language");
670 let file = file as _;
671 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
672
673 // Test settings updated: tab_size = 8
674 assert_eq!(Some(settings.tab_size), NonZeroU32::new(8));
675 });
676}
677
678#[gpui::test]
679async fn test_adding_worktree_discovers_external_editorconfigs(cx: &mut gpui::TestAppContext) {
680 init_test(cx);
681
682 let fs = FakeFs::new(cx.executor());
683 fs.insert_tree(
684 path!("/parent"),
685 json!({
686 ".editorconfig": "root = true\n[*]\nindent_size = 7\n",
687 "existing_worktree": {
688 ".editorconfig": "[*]\n",
689 "file.rs": "fn a() {}",
690 },
691 "new_worktree": {
692 ".editorconfig": "[*]\n",
693 "file.rs": "fn b() {}",
694 }
695 }),
696 )
697 .await;
698
699 let project = Project::test(fs, [path!("/parent/existing_worktree").as_ref()], cx).await;
700
701 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
702 language_registry.add(rust_lang());
703
704 cx.executor().run_until_parked();
705
706 cx.update(|cx| {
707 let worktree = project.read(cx).worktrees(cx).next().unwrap();
708 let tree = worktree.read(cx);
709 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
710 let file = File::for_entry(file_entry, worktree.clone());
711 let file_language = project
712 .read(cx)
713 .languages()
714 .load_language_for_file_path(file.path.as_std_path());
715 let file_language = cx
716 .foreground_executor()
717 .block_on(file_language)
718 .expect("Failed to get file language");
719 let file = file as _;
720 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
721
722 // Test existing worktree has tab_size = 7
723 assert_eq!(Some(settings.tab_size), NonZeroU32::new(7));
724 });
725
726 let (new_worktree, _) = project
727 .update(cx, |project, cx| {
728 project.find_or_create_worktree(path!("/parent/new_worktree"), true, cx)
729 })
730 .await
731 .unwrap();
732
733 cx.executor().run_until_parked();
734
735 cx.update(|cx| {
736 let tree = new_worktree.read(cx);
737 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
738 let file = File::for_entry(file_entry, new_worktree.clone());
739 let file_language = project
740 .read(cx)
741 .languages()
742 .load_language_for_file_path(file.path.as_std_path());
743 let file_language = cx
744 .foreground_executor()
745 .block_on(file_language)
746 .expect("Failed to get file language");
747 let file = file as _;
748 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
749
750 // Verify new worktree also has tab_size = 7 from shared parent editorconfig
751 assert_eq!(Some(settings.tab_size), NonZeroU32::new(7));
752 });
753}
754
755#[gpui::test]
756async fn test_removing_worktree_cleans_up_external_editorconfig(cx: &mut gpui::TestAppContext) {
757 init_test(cx);
758
759 let fs = FakeFs::new(cx.executor());
760 fs.insert_tree(
761 path!("/parent"),
762 json!({
763 ".editorconfig": "[*]\nindent_size = 6\n",
764 "worktree": {
765 ".editorconfig": "[*]\n",
766 "file.rs": "fn main() {}",
767 }
768 }),
769 )
770 .await;
771
772 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
773
774 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
775 language_registry.add(rust_lang());
776
777 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
778 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
779
780 cx.executor().run_until_parked();
781
782 cx.update(|cx| {
783 let store = cx.global::<SettingsStore>();
784 let (worktree_ids, external_paths, watcher_paths) =
785 store.editorconfig_store.read(cx).test_state();
786
787 // Test external config is loaded
788 assert!(worktree_ids.contains(&worktree_id));
789 assert!(!external_paths.is_empty());
790 assert!(!watcher_paths.is_empty());
791 });
792
793 project.update(cx, |project, cx| {
794 project.remove_worktree(worktree_id, cx);
795 });
796
797 cx.executor().run_until_parked();
798
799 cx.update(|cx| {
800 let store = cx.global::<SettingsStore>();
801 let (worktree_ids, external_paths, watcher_paths) =
802 store.editorconfig_store.read(cx).test_state();
803
804 // Test worktree state, external configs, and watchers all removed
805 assert!(!worktree_ids.contains(&worktree_id));
806 assert!(external_paths.is_empty());
807 assert!(watcher_paths.is_empty());
808 });
809}
810
811#[gpui::test]
812async fn test_shared_external_editorconfig_cleanup_with_multiple_worktrees(
813 cx: &mut gpui::TestAppContext,
814) {
815 init_test(cx);
816
817 let fs = FakeFs::new(cx.executor());
818 fs.insert_tree(
819 path!("/parent"),
820 json!({
821 ".editorconfig": "root = true\n[*]\nindent_size = 5\n",
822 "worktree_a": {
823 ".editorconfig": "[*]\n",
824 "file.rs": "fn a() {}",
825 },
826 "worktree_b": {
827 ".editorconfig": "[*]\n",
828 "file.rs": "fn b() {}",
829 }
830 }),
831 )
832 .await;
833
834 let project = Project::test(
835 fs,
836 [
837 path!("/parent/worktree_a").as_ref(),
838 path!("/parent/worktree_b").as_ref(),
839 ],
840 cx,
841 )
842 .await;
843
844 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
845 language_registry.add(rust_lang());
846
847 cx.executor().run_until_parked();
848
849 let (worktree_a_id, worktree_b, worktree_b_id) = cx.update(|cx| {
850 let worktrees: Vec<_> = project.read(cx).worktrees(cx).collect();
851 assert_eq!(worktrees.len(), 2);
852
853 let worktree_a = &worktrees[0];
854 let worktree_b = &worktrees[1];
855 let worktree_a_id = worktree_a.read(cx).id();
856 let worktree_b_id = worktree_b.read(cx).id();
857 (worktree_a_id, worktree_b.clone(), worktree_b_id)
858 });
859
860 cx.update(|cx| {
861 let store = cx.global::<SettingsStore>();
862 let (worktree_ids, external_paths, _) = store.editorconfig_store.read(cx).test_state();
863
864 // Test both worktrees have settings and share external config
865 assert!(worktree_ids.contains(&worktree_a_id));
866 assert!(worktree_ids.contains(&worktree_b_id));
867 assert_eq!(external_paths.len(), 1); // single shared external config
868 });
869
870 project.update(cx, |project, cx| {
871 project.remove_worktree(worktree_a_id, cx);
872 });
873
874 cx.executor().run_until_parked();
875
876 cx.update(|cx| {
877 let store = cx.global::<SettingsStore>();
878 let (worktree_ids, external_paths, watcher_paths) =
879 store.editorconfig_store.read(cx).test_state();
880
881 // Test worktree_a is gone but external config remains for worktree_b
882 assert!(!worktree_ids.contains(&worktree_a_id));
883 assert!(worktree_ids.contains(&worktree_b_id));
884 // External config should still exist because worktree_b uses it
885 assert_eq!(external_paths.len(), 1);
886 assert_eq!(watcher_paths.len(), 1);
887 });
888
889 cx.update(|cx| {
890 let tree = worktree_b.read(cx);
891 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
892 let file = File::for_entry(file_entry, worktree_b.clone());
893 let file_language = project
894 .read(cx)
895 .languages()
896 .load_language_for_file_path(file.path.as_std_path());
897 let file_language = cx
898 .foreground_executor()
899 .block_on(file_language)
900 .expect("Failed to get file language");
901 let file = file as _;
902 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
903
904 // Test worktree_b still has correct settings
905 assert_eq!(Some(settings.tab_size), NonZeroU32::new(5));
906 });
907}
908
909#[gpui::test]
910async fn test_git_provider_project_setting(cx: &mut gpui::TestAppContext) {
911 init_test(cx);
912 cx.update(|cx| {
913 GitHostingProviderRegistry::default_global(cx);
914 git_hosting_providers::init(cx);
915 });
916
917 let fs = FakeFs::new(cx.executor());
918 let str_path = path!("/dir");
919 let path = Path::new(str_path);
920
921 fs.insert_tree(
922 path!("/dir"),
923 json!({
924 ".zed": {
925 "settings.json": r#"{
926 "git_hosting_providers": [
927 {
928 "provider": "gitlab",
929 "base_url": "https://google.com",
930 "name": "foo"
931 }
932 ]
933 }"#
934 },
935 }),
936 )
937 .await;
938
939 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
940 let (_worktree, _) =
941 project.read_with(cx, |project, cx| project.find_worktree(path, cx).unwrap());
942 cx.executor().run_until_parked();
943
944 cx.update(|cx| {
945 let provider = GitHostingProviderRegistry::global(cx);
946 assert!(
947 provider
948 .list_hosting_providers()
949 .into_iter()
950 .any(|provider| provider.name() == "foo")
951 );
952 });
953
954 fs.atomic_write(
955 Path::new(path!("/dir/.zed/settings.json")).to_owned(),
956 "{}".into(),
957 )
958 .await
959 .unwrap();
960
961 cx.run_until_parked();
962
963 cx.update(|cx| {
964 let provider = GitHostingProviderRegistry::global(cx);
965 assert!(
966 !provider
967 .list_hosting_providers()
968 .into_iter()
969 .any(|provider| provider.name() == "foo")
970 );
971 });
972}
973
974#[gpui::test]
975async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
976 init_test(cx);
977 TaskStore::init(None);
978
979 let fs = FakeFs::new(cx.executor());
980 fs.insert_tree(
981 path!("/dir"),
982 json!({
983 ".zed": {
984 "settings.json": r#"{ "tab_size": 8 }"#,
985 "tasks.json": r#"[{
986 "label": "cargo check all",
987 "command": "cargo",
988 "args": ["check", "--all"]
989 },]"#,
990 },
991 "a": {
992 "a.rs": "fn a() {\n A\n}"
993 },
994 "b": {
995 ".zed": {
996 "settings.json": r#"{ "tab_size": 2 }"#,
997 "tasks.json": r#"[{
998 "label": "cargo check",
999 "command": "cargo",
1000 "args": ["check"]
1001 },]"#,
1002 },
1003 "b.rs": "fn b() {\n B\n}"
1004 }
1005 }),
1006 )
1007 .await;
1008
1009 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1010 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
1011
1012 cx.executor().run_until_parked();
1013 let worktree_id = cx.update(|cx| {
1014 project.update(cx, |project, cx| {
1015 project.worktrees(cx).next().unwrap().read(cx).id()
1016 })
1017 });
1018
1019 let mut task_contexts = TaskContexts::default();
1020 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
1021 let task_contexts = Arc::new(task_contexts);
1022
1023 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
1024 id: worktree_id,
1025 directory_in_worktree: rel_path(".zed").into(),
1026 id_base: "local worktree tasks from directory \".zed\"".into(),
1027 };
1028
1029 let all_tasks = cx
1030 .update(|cx| {
1031 let tree = worktree.read(cx);
1032
1033 let file_a = File::for_entry(
1034 tree.entry_for_path(rel_path("a/a.rs")).unwrap().clone(),
1035 worktree.clone(),
1036 ) as _;
1037 let settings_a = language_settings(None, Some(&file_a), cx);
1038 let file_b = File::for_entry(
1039 tree.entry_for_path(rel_path("b/b.rs")).unwrap().clone(),
1040 worktree.clone(),
1041 ) as _;
1042 let settings_b = language_settings(None, Some(&file_b), cx);
1043
1044 assert_eq!(settings_a.tab_size.get(), 8);
1045 assert_eq!(settings_b.tab_size.get(), 2);
1046
1047 get_all_tasks(&project, task_contexts.clone(), cx)
1048 })
1049 .await
1050 .into_iter()
1051 .map(|(source_kind, task)| {
1052 let resolved = task.resolved;
1053 (
1054 source_kind,
1055 task.resolved_label,
1056 resolved.args,
1057 resolved.env,
1058 )
1059 })
1060 .collect::<Vec<_>>();
1061 assert_eq!(
1062 all_tasks,
1063 vec![
1064 (
1065 TaskSourceKind::Worktree {
1066 id: worktree_id,
1067 directory_in_worktree: rel_path("b/.zed").into(),
1068 id_base: "local worktree tasks from directory \"b/.zed\"".into()
1069 },
1070 "cargo check".to_string(),
1071 vec!["check".to_string()],
1072 HashMap::default(),
1073 ),
1074 (
1075 topmost_local_task_source_kind.clone(),
1076 "cargo check all".to_string(),
1077 vec!["check".to_string(), "--all".to_string()],
1078 HashMap::default(),
1079 ),
1080 ]
1081 );
1082
1083 let (_, resolved_task) = cx
1084 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
1085 .await
1086 .into_iter()
1087 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
1088 .expect("should have one global task");
1089 project.update(cx, |project, cx| {
1090 let task_inventory = project
1091 .task_store()
1092 .read(cx)
1093 .task_inventory()
1094 .cloned()
1095 .unwrap();
1096 task_inventory.update(cx, |inventory, _| {
1097 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
1098 inventory
1099 .update_file_based_tasks(
1100 TaskSettingsLocation::Global(tasks_file()),
1101 Some(
1102 &json!([{
1103 "label": "cargo check unstable",
1104 "command": "cargo",
1105 "args": [
1106 "check",
1107 "--all",
1108 "--all-targets"
1109 ],
1110 "env": {
1111 "RUSTFLAGS": "-Zunstable-options"
1112 }
1113 }])
1114 .to_string(),
1115 ),
1116 )
1117 .unwrap();
1118 });
1119 });
1120 cx.run_until_parked();
1121
1122 let all_tasks = cx
1123 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
1124 .await
1125 .into_iter()
1126 .map(|(source_kind, task)| {
1127 let resolved = task.resolved;
1128 (
1129 source_kind,
1130 task.resolved_label,
1131 resolved.args,
1132 resolved.env,
1133 )
1134 })
1135 .collect::<Vec<_>>();
1136 assert_eq!(
1137 all_tasks,
1138 vec![
1139 (
1140 topmost_local_task_source_kind.clone(),
1141 "cargo check all".to_string(),
1142 vec!["check".to_string(), "--all".to_string()],
1143 HashMap::default(),
1144 ),
1145 (
1146 TaskSourceKind::Worktree {
1147 id: worktree_id,
1148 directory_in_worktree: rel_path("b/.zed").into(),
1149 id_base: "local worktree tasks from directory \"b/.zed\"".into()
1150 },
1151 "cargo check".to_string(),
1152 vec!["check".to_string()],
1153 HashMap::default(),
1154 ),
1155 (
1156 TaskSourceKind::AbsPath {
1157 abs_path: paths::tasks_file().clone(),
1158 id_base: "global tasks.json".into(),
1159 },
1160 "cargo check unstable".to_string(),
1161 vec![
1162 "check".to_string(),
1163 "--all".to_string(),
1164 "--all-targets".to_string(),
1165 ],
1166 HashMap::from_iter(Some((
1167 "RUSTFLAGS".to_string(),
1168 "-Zunstable-options".to_string()
1169 ))),
1170 ),
1171 ]
1172 );
1173}
1174
1175#[gpui::test]
1176async fn test_invalid_local_tasks_shows_toast_with_doc_link(cx: &mut gpui::TestAppContext) {
1177 init_test(cx);
1178 TaskStore::init(None);
1179
1180 // We need to start with a valid `.zed/tasks.json` file as otherwise the
1181 // event is emitted before we havd a chance to setup the event subscription.
1182 let fs = FakeFs::new(cx.executor());
1183 fs.insert_tree(
1184 path!("/dir"),
1185 json!({
1186 ".zed": {
1187 "tasks.json": r#"[{ "label": "valid task", "command": "echo" }]"#,
1188 },
1189 "file.rs": ""
1190 }),
1191 )
1192 .await;
1193
1194 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1195 let saw_toast = Rc::new(RefCell::new(false));
1196
1197 // Update the `.zed/tasks.json` file with an invalid variable, so we can
1198 // later assert that the `Event::Toast` even is emitted.
1199 fs.save(
1200 path!("/dir/.zed/tasks.json").as_ref(),
1201 &r#"[{ "label": "test $ZED_FOO", "command": "echo" }]"#.into(),
1202 Default::default(),
1203 )
1204 .await
1205 .unwrap();
1206
1207 project.update(cx, |_, cx| {
1208 let saw_toast = saw_toast.clone();
1209
1210 cx.subscribe(&project, move |_, _, event: &Event, _| match event {
1211 Event::Toast {
1212 notification_id,
1213 message,
1214 link: Some(ToastLink { url, .. }),
1215 } => {
1216 assert!(notification_id.starts_with("local-tasks-"));
1217 assert!(message.contains("ZED_FOO"));
1218 assert_eq!(*url, "https://zed.dev/docs/tasks");
1219 *saw_toast.borrow_mut() = true;
1220 }
1221 _ => {}
1222 })
1223 .detach();
1224 });
1225
1226 cx.run_until_parked();
1227 assert!(
1228 *saw_toast.borrow(),
1229 "Expected `Event::Toast` was never emitted"
1230 );
1231}
1232
1233#[gpui::test]
1234async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
1235 init_test(cx);
1236 TaskStore::init(None);
1237
1238 let fs = FakeFs::new(cx.executor());
1239 fs.insert_tree(
1240 path!("/dir"),
1241 json!({
1242 ".zed": {
1243 "tasks.json": r#"[{
1244 "label": "test worktree root",
1245 "command": "echo $ZED_WORKTREE_ROOT"
1246 }]"#,
1247 },
1248 "a": {
1249 "a.rs": "fn a() {\n A\n}"
1250 },
1251 }),
1252 )
1253 .await;
1254
1255 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1256 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
1257
1258 cx.executor().run_until_parked();
1259 let worktree_id = cx.update(|cx| {
1260 project.update(cx, |project, cx| {
1261 project.worktrees(cx).next().unwrap().read(cx).id()
1262 })
1263 });
1264
1265 let active_non_worktree_item_tasks = cx
1266 .update(|cx| {
1267 get_all_tasks(
1268 &project,
1269 Arc::new(TaskContexts {
1270 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
1271 active_worktree_context: None,
1272 other_worktree_contexts: Vec::new(),
1273 lsp_task_sources: HashMap::default(),
1274 latest_selection: None,
1275 }),
1276 cx,
1277 )
1278 })
1279 .await;
1280 assert!(
1281 active_non_worktree_item_tasks.is_empty(),
1282 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
1283 );
1284
1285 let active_worktree_tasks = cx
1286 .update(|cx| {
1287 get_all_tasks(
1288 &project,
1289 Arc::new(TaskContexts {
1290 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
1291 active_worktree_context: Some((worktree_id, {
1292 let mut worktree_context = TaskContext::default();
1293 worktree_context
1294 .task_variables
1295 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
1296 worktree_context
1297 })),
1298 other_worktree_contexts: Vec::new(),
1299 lsp_task_sources: HashMap::default(),
1300 latest_selection: None,
1301 }),
1302 cx,
1303 )
1304 })
1305 .await;
1306 assert_eq!(
1307 active_worktree_tasks
1308 .into_iter()
1309 .map(|(source_kind, task)| {
1310 let resolved = task.resolved;
1311 (source_kind, resolved.command.unwrap())
1312 })
1313 .collect::<Vec<_>>(),
1314 vec![(
1315 TaskSourceKind::Worktree {
1316 id: worktree_id,
1317 directory_in_worktree: rel_path(".zed").into(),
1318 id_base: "local worktree tasks from directory \".zed\"".into(),
1319 },
1320 "echo /dir".to_string(),
1321 )]
1322 );
1323}
1324
1325#[gpui::test]
1326async fn test_running_multiple_instances_of_a_single_server_in_one_worktree(
1327 cx: &mut gpui::TestAppContext,
1328) {
1329 pub(crate) struct PyprojectTomlManifestProvider;
1330
1331 impl ManifestProvider for PyprojectTomlManifestProvider {
1332 fn name(&self) -> ManifestName {
1333 SharedString::new_static("pyproject.toml").into()
1334 }
1335
1336 fn search(
1337 &self,
1338 ManifestQuery {
1339 path,
1340 depth,
1341 delegate,
1342 }: ManifestQuery,
1343 ) -> Option<Arc<RelPath>> {
1344 for path in path.ancestors().take(depth) {
1345 let p = path.join(rel_path("pyproject.toml"));
1346 if delegate.exists(&p, Some(false)) {
1347 return Some(path.into());
1348 }
1349 }
1350
1351 None
1352 }
1353 }
1354
1355 init_test(cx);
1356 let fs = FakeFs::new(cx.executor());
1357
1358 fs.insert_tree(
1359 path!("/the-root"),
1360 json!({
1361 ".zed": {
1362 "settings.json": r#"
1363 {
1364 "languages": {
1365 "Python": {
1366 "language_servers": ["ty"]
1367 }
1368 }
1369 }"#
1370 },
1371 "project-a": {
1372 ".venv": {},
1373 "file.py": "",
1374 "pyproject.toml": ""
1375 },
1376 "project-b": {
1377 ".venv": {},
1378 "source_file.py":"",
1379 "another_file.py": "",
1380 "pyproject.toml": ""
1381 }
1382 }),
1383 )
1384 .await;
1385 cx.update(|cx| {
1386 ManifestProvidersStore::global(cx).register(Arc::new(PyprojectTomlManifestProvider))
1387 });
1388
1389 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1390 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1391 let _fake_python_server = language_registry.register_fake_lsp(
1392 "Python",
1393 FakeLspAdapter {
1394 name: "ty",
1395 capabilities: lsp::ServerCapabilities {
1396 ..Default::default()
1397 },
1398 ..Default::default()
1399 },
1400 );
1401
1402 language_registry.add(python_lang(fs.clone()));
1403 let (first_buffer, _handle) = project
1404 .update(cx, |project, cx| {
1405 project.open_local_buffer_with_lsp(path!("/the-root/project-a/file.py"), cx)
1406 })
1407 .await
1408 .unwrap();
1409 cx.executor().run_until_parked();
1410 let servers = project.update(cx, |project, cx| {
1411 project.lsp_store().update(cx, |this, cx| {
1412 first_buffer.update(cx, |buffer, cx| {
1413 this.running_language_servers_for_local_buffer(buffer, cx)
1414 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1415 .collect::<Vec<_>>()
1416 })
1417 })
1418 });
1419 cx.executor().run_until_parked();
1420 assert_eq!(servers.len(), 1);
1421 let (adapter, server) = servers.into_iter().next().unwrap();
1422 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1423 assert_eq!(server.server_id(), LanguageServerId(0));
1424 // `workspace_folders` are set to the rooting point.
1425 assert_eq!(
1426 server.workspace_folders(),
1427 BTreeSet::from_iter(
1428 [Uri::from_file_path(path!("/the-root/project-a")).unwrap()].into_iter()
1429 )
1430 );
1431
1432 let (second_project_buffer, _other_handle) = project
1433 .update(cx, |project, cx| {
1434 project.open_local_buffer_with_lsp(path!("/the-root/project-b/source_file.py"), cx)
1435 })
1436 .await
1437 .unwrap();
1438 cx.executor().run_until_parked();
1439 let servers = project.update(cx, |project, cx| {
1440 project.lsp_store().update(cx, |this, cx| {
1441 second_project_buffer.update(cx, |buffer, cx| {
1442 this.running_language_servers_for_local_buffer(buffer, cx)
1443 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1444 .collect::<Vec<_>>()
1445 })
1446 })
1447 });
1448 cx.executor().run_until_parked();
1449 assert_eq!(servers.len(), 1);
1450 let (adapter, server) = servers.into_iter().next().unwrap();
1451 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1452 // We're not using venvs at all here, so both folders should fall under the same root.
1453 assert_eq!(server.server_id(), LanguageServerId(0));
1454 // Now, let's select a different toolchain for one of subprojects.
1455
1456 let Toolchains {
1457 toolchains: available_toolchains_for_b,
1458 root_path,
1459 ..
1460 } = project
1461 .update(cx, |this, cx| {
1462 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1463 this.available_toolchains(
1464 ProjectPath {
1465 worktree_id,
1466 path: rel_path("project-b/source_file.py").into(),
1467 },
1468 LanguageName::new_static("Python"),
1469 cx,
1470 )
1471 })
1472 .await
1473 .expect("A toolchain to be discovered");
1474 assert_eq!(root_path.as_ref(), rel_path("project-b"));
1475 assert_eq!(available_toolchains_for_b.toolchains().len(), 1);
1476 let currently_active_toolchain = project
1477 .update(cx, |this, cx| {
1478 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1479 this.active_toolchain(
1480 ProjectPath {
1481 worktree_id,
1482 path: rel_path("project-b/source_file.py").into(),
1483 },
1484 LanguageName::new_static("Python"),
1485 cx,
1486 )
1487 })
1488 .await;
1489
1490 assert!(currently_active_toolchain.is_none());
1491 let _ = project
1492 .update(cx, |this, cx| {
1493 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1494 this.activate_toolchain(
1495 ProjectPath {
1496 worktree_id,
1497 path: root_path,
1498 },
1499 available_toolchains_for_b
1500 .toolchains
1501 .into_iter()
1502 .next()
1503 .unwrap(),
1504 cx,
1505 )
1506 })
1507 .await
1508 .unwrap();
1509 cx.run_until_parked();
1510 let servers = project.update(cx, |project, cx| {
1511 project.lsp_store().update(cx, |this, cx| {
1512 second_project_buffer.update(cx, |buffer, cx| {
1513 this.running_language_servers_for_local_buffer(buffer, cx)
1514 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1515 .collect::<Vec<_>>()
1516 })
1517 })
1518 });
1519 cx.executor().run_until_parked();
1520 assert_eq!(servers.len(), 1);
1521 let (adapter, server) = servers.into_iter().next().unwrap();
1522 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1523 // There's a new language server in town.
1524 assert_eq!(server.server_id(), LanguageServerId(1));
1525}
1526
1527#[gpui::test]
1528async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
1529 init_test(cx);
1530
1531 let fs = FakeFs::new(cx.executor());
1532 fs.insert_tree(
1533 path!("/dir"),
1534 json!({
1535 "test.rs": "const A: i32 = 1;",
1536 "test2.rs": "",
1537 "Cargo.toml": "a = 1",
1538 "package.json": "{\"a\": 1}",
1539 }),
1540 )
1541 .await;
1542
1543 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1544 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1545
1546 let mut fake_rust_servers = language_registry.register_fake_lsp(
1547 "Rust",
1548 FakeLspAdapter {
1549 name: "the-rust-language-server",
1550 capabilities: lsp::ServerCapabilities {
1551 completion_provider: Some(lsp::CompletionOptions {
1552 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
1553 ..Default::default()
1554 }),
1555 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
1556 lsp::TextDocumentSyncOptions {
1557 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
1558 ..Default::default()
1559 },
1560 )),
1561 ..Default::default()
1562 },
1563 ..Default::default()
1564 },
1565 );
1566 let mut fake_json_servers = language_registry.register_fake_lsp(
1567 "JSON",
1568 FakeLspAdapter {
1569 name: "the-json-language-server",
1570 capabilities: lsp::ServerCapabilities {
1571 completion_provider: Some(lsp::CompletionOptions {
1572 trigger_characters: Some(vec![":".to_string()]),
1573 ..Default::default()
1574 }),
1575 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
1576 lsp::TextDocumentSyncOptions {
1577 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
1578 ..Default::default()
1579 },
1580 )),
1581 ..Default::default()
1582 },
1583 ..Default::default()
1584 },
1585 );
1586
1587 // Open a buffer without an associated language server.
1588 let (toml_buffer, _handle) = project
1589 .update(cx, |project, cx| {
1590 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
1591 })
1592 .await
1593 .unwrap();
1594
1595 // Open a buffer with an associated language server before the language for it has been loaded.
1596 let (rust_buffer, _handle2) = project
1597 .update(cx, |project, cx| {
1598 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
1599 })
1600 .await
1601 .unwrap();
1602 rust_buffer.update(cx, |buffer, _| {
1603 assert_eq!(buffer.language().map(|l| l.name()), None);
1604 });
1605
1606 // Now we add the languages to the project, and ensure they get assigned to all
1607 // the relevant open buffers.
1608 language_registry.add(json_lang());
1609 language_registry.add(rust_lang());
1610 cx.executor().run_until_parked();
1611 rust_buffer.update(cx, |buffer, _| {
1612 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
1613 });
1614
1615 // A server is started up, and it is notified about Rust files.
1616 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1617 assert_eq!(
1618 fake_rust_server
1619 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1620 .await
1621 .text_document,
1622 lsp::TextDocumentItem {
1623 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1624 version: 0,
1625 text: "const A: i32 = 1;".to_string(),
1626 language_id: "rust".to_string(),
1627 }
1628 );
1629
1630 // The buffer is configured based on the language server's capabilities.
1631 rust_buffer.update(cx, |buffer, _| {
1632 assert_eq!(
1633 buffer
1634 .completion_triggers()
1635 .iter()
1636 .cloned()
1637 .collect::<Vec<_>>(),
1638 &[".".to_string(), "::".to_string()]
1639 );
1640 });
1641 toml_buffer.update(cx, |buffer, _| {
1642 assert!(buffer.completion_triggers().is_empty());
1643 });
1644
1645 // Edit a buffer. The changes are reported to the language server.
1646 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
1647 assert_eq!(
1648 fake_rust_server
1649 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1650 .await
1651 .text_document,
1652 lsp::VersionedTextDocumentIdentifier::new(
1653 lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1654 1
1655 )
1656 );
1657
1658 // Open a third buffer with a different associated language server.
1659 let (json_buffer, _json_handle) = project
1660 .update(cx, |project, cx| {
1661 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
1662 })
1663 .await
1664 .unwrap();
1665
1666 // A json language server is started up and is only notified about the json buffer.
1667 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1668 assert_eq!(
1669 fake_json_server
1670 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1671 .await
1672 .text_document,
1673 lsp::TextDocumentItem {
1674 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1675 version: 0,
1676 text: "{\"a\": 1}".to_string(),
1677 language_id: "json".to_string(),
1678 }
1679 );
1680
1681 // This buffer is configured based on the second language server's
1682 // capabilities.
1683 json_buffer.update(cx, |buffer, _| {
1684 assert_eq!(
1685 buffer
1686 .completion_triggers()
1687 .iter()
1688 .cloned()
1689 .collect::<Vec<_>>(),
1690 &[":".to_string()]
1691 );
1692 });
1693
1694 // When opening another buffer whose language server is already running,
1695 // it is also configured based on the existing language server's capabilities.
1696 let (rust_buffer2, _handle4) = project
1697 .update(cx, |project, cx| {
1698 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
1699 })
1700 .await
1701 .unwrap();
1702 rust_buffer2.update(cx, |buffer, _| {
1703 assert_eq!(
1704 buffer
1705 .completion_triggers()
1706 .iter()
1707 .cloned()
1708 .collect::<Vec<_>>(),
1709 &[".".to_string(), "::".to_string()]
1710 );
1711 });
1712
1713 // Changes are reported only to servers matching the buffer's language.
1714 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
1715 rust_buffer2.update(cx, |buffer, cx| {
1716 buffer.edit([(0..0, "let x = 1;")], None, cx)
1717 });
1718 assert_eq!(
1719 fake_rust_server
1720 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1721 .await
1722 .text_document,
1723 lsp::VersionedTextDocumentIdentifier::new(
1724 lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap(),
1725 1
1726 )
1727 );
1728
1729 // Save notifications are reported to all servers.
1730 project
1731 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
1732 .await
1733 .unwrap();
1734 assert_eq!(
1735 fake_rust_server
1736 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1737 .await
1738 .text_document,
1739 lsp::TextDocumentIdentifier::new(
1740 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1741 )
1742 );
1743 assert_eq!(
1744 fake_json_server
1745 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1746 .await
1747 .text_document,
1748 lsp::TextDocumentIdentifier::new(
1749 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1750 )
1751 );
1752
1753 // Renames are reported only to servers matching the buffer's language.
1754 fs.rename(
1755 Path::new(path!("/dir/test2.rs")),
1756 Path::new(path!("/dir/test3.rs")),
1757 Default::default(),
1758 )
1759 .await
1760 .unwrap();
1761 assert_eq!(
1762 fake_rust_server
1763 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1764 .await
1765 .text_document,
1766 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap()),
1767 );
1768 assert_eq!(
1769 fake_rust_server
1770 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1771 .await
1772 .text_document,
1773 lsp::TextDocumentItem {
1774 uri: lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap(),
1775 version: 0,
1776 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1777 language_id: "rust".to_string(),
1778 },
1779 );
1780
1781 rust_buffer2.update(cx, |buffer, cx| {
1782 buffer.update_diagnostics(
1783 LanguageServerId(0),
1784 DiagnosticSet::from_sorted_entries(
1785 vec![DiagnosticEntry {
1786 diagnostic: Default::default(),
1787 range: Anchor::MIN..Anchor::MAX,
1788 }],
1789 &buffer.snapshot(),
1790 ),
1791 cx,
1792 );
1793 assert_eq!(
1794 buffer
1795 .snapshot()
1796 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1797 .count(),
1798 1
1799 );
1800 });
1801
1802 // When the rename changes the extension of the file, the buffer gets closed on the old
1803 // language server and gets opened on the new one.
1804 fs.rename(
1805 Path::new(path!("/dir/test3.rs")),
1806 Path::new(path!("/dir/test3.json")),
1807 Default::default(),
1808 )
1809 .await
1810 .unwrap();
1811 assert_eq!(
1812 fake_rust_server
1813 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1814 .await
1815 .text_document,
1816 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap()),
1817 );
1818 assert_eq!(
1819 fake_json_server
1820 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1821 .await
1822 .text_document,
1823 lsp::TextDocumentItem {
1824 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1825 version: 0,
1826 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1827 language_id: "json".to_string(),
1828 },
1829 );
1830
1831 // We clear the diagnostics, since the language has changed.
1832 rust_buffer2.update(cx, |buffer, _| {
1833 assert_eq!(
1834 buffer
1835 .snapshot()
1836 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1837 .count(),
1838 0
1839 );
1840 });
1841
1842 // The renamed file's version resets after changing language server.
1843 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
1844 assert_eq!(
1845 fake_json_server
1846 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1847 .await
1848 .text_document,
1849 lsp::VersionedTextDocumentIdentifier::new(
1850 lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1851 1
1852 )
1853 );
1854
1855 // Restart language servers
1856 project.update(cx, |project, cx| {
1857 project.restart_language_servers_for_buffers(
1858 vec![rust_buffer.clone(), json_buffer.clone()],
1859 HashSet::default(),
1860 cx,
1861 );
1862 });
1863
1864 let mut rust_shutdown_requests = fake_rust_server
1865 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1866 let mut json_shutdown_requests = fake_json_server
1867 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1868 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
1869
1870 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1871 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1872
1873 // Ensure rust document is reopened in new rust language server
1874 assert_eq!(
1875 fake_rust_server
1876 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1877 .await
1878 .text_document,
1879 lsp::TextDocumentItem {
1880 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1881 version: 0,
1882 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
1883 language_id: "rust".to_string(),
1884 }
1885 );
1886
1887 // Ensure json documents are reopened in new json language server
1888 assert_set_eq!(
1889 [
1890 fake_json_server
1891 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1892 .await
1893 .text_document,
1894 fake_json_server
1895 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1896 .await
1897 .text_document,
1898 ],
1899 [
1900 lsp::TextDocumentItem {
1901 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1902 version: 0,
1903 text: json_buffer.update(cx, |buffer, _| buffer.text()),
1904 language_id: "json".to_string(),
1905 },
1906 lsp::TextDocumentItem {
1907 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1908 version: 0,
1909 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1910 language_id: "json".to_string(),
1911 }
1912 ]
1913 );
1914
1915 // Close notifications are reported only to servers matching the buffer's language.
1916 cx.update(|_| drop(_json_handle));
1917 let close_message = lsp::DidCloseTextDocumentParams {
1918 text_document: lsp::TextDocumentIdentifier::new(
1919 lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1920 ),
1921 };
1922 assert_eq!(
1923 fake_json_server
1924 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1925 .await,
1926 close_message,
1927 );
1928}
1929
1930#[gpui::test]
1931async fn test_language_server_relative_path(cx: &mut gpui::TestAppContext) {
1932 init_test(cx);
1933
1934 let settings_json_contents = json!({
1935 "languages": {
1936 "Rust": {
1937 "language_servers": ["my_fake_lsp", "lsp_on_path"]
1938 }
1939 },
1940 "lsp": {
1941 "my_fake_lsp": {
1942 "binary": {
1943 // file exists, so this is treated as a relative path
1944 "path": path!(".relative_path/to/my_fake_lsp_binary.exe").to_string(),
1945 }
1946 },
1947 "lsp_on_path": {
1948 "binary": {
1949 // file doesn't exist, so it will fall back on PATH env var
1950 "path": path!("lsp_on_path.exe").to_string(),
1951 }
1952 }
1953 },
1954 });
1955
1956 let fs = FakeFs::new(cx.executor());
1957 fs.insert_tree(
1958 path!("/the-root"),
1959 json!({
1960 ".zed": {
1961 "settings.json": settings_json_contents.to_string(),
1962 },
1963 ".relative_path": {
1964 "to": {
1965 "my_fake_lsp.exe": "",
1966 },
1967 },
1968 "src": {
1969 "main.rs": "",
1970 }
1971 }),
1972 )
1973 .await;
1974
1975 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1976 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1977 language_registry.add(rust_lang());
1978
1979 let mut my_fake_lsp = language_registry.register_fake_lsp(
1980 "Rust",
1981 FakeLspAdapter {
1982 name: "my_fake_lsp",
1983 ..Default::default()
1984 },
1985 );
1986 let mut lsp_on_path = language_registry.register_fake_lsp(
1987 "Rust",
1988 FakeLspAdapter {
1989 name: "lsp_on_path",
1990 ..Default::default()
1991 },
1992 );
1993
1994 cx.run_until_parked();
1995
1996 // Start the language server by opening a buffer with a compatible file extension.
1997 project
1998 .update(cx, |project, cx| {
1999 project.open_local_buffer_with_lsp(path!("/the-root/src/main.rs"), cx)
2000 })
2001 .await
2002 .unwrap();
2003
2004 let lsp_path = my_fake_lsp.next().await.unwrap().binary.path;
2005 assert_eq!(
2006 lsp_path.to_string_lossy(),
2007 path!("/the-root/.relative_path/to/my_fake_lsp_binary.exe"),
2008 );
2009
2010 let lsp_path = lsp_on_path.next().await.unwrap().binary.path;
2011 assert_eq!(lsp_path.to_string_lossy(), path!("lsp_on_path.exe"));
2012}
2013
2014#[gpui::test]
2015async fn test_language_server_tilde_path(cx: &mut gpui::TestAppContext) {
2016 init_test(cx);
2017
2018 let settings_json_contents = json!({
2019 "languages": {
2020 "Rust": {
2021 "language_servers": ["tilde_lsp"]
2022 }
2023 },
2024 "lsp": {
2025 "tilde_lsp": {
2026 "binary": {
2027 "path": "~/.local/bin/rust-analyzer",
2028 }
2029 }
2030 },
2031 });
2032
2033 let fs = FakeFs::new(cx.executor());
2034 fs.insert_tree(
2035 path!("/root"),
2036 json!({
2037 ".zed": {
2038 "settings.json": settings_json_contents.to_string(),
2039 },
2040 "src": {
2041 "main.rs": "fn main() {}",
2042 }
2043 }),
2044 )
2045 .await;
2046
2047 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
2048 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2049 language_registry.add(rust_lang());
2050
2051 let mut tilde_lsp = language_registry.register_fake_lsp(
2052 "Rust",
2053 FakeLspAdapter {
2054 name: "tilde_lsp",
2055 ..Default::default()
2056 },
2057 );
2058 cx.run_until_parked();
2059
2060 project
2061 .update(cx, |project, cx| {
2062 project.open_local_buffer_with_lsp(path!("/root/src/main.rs"), cx)
2063 })
2064 .await
2065 .unwrap();
2066
2067 let lsp_path = tilde_lsp.next().await.unwrap().binary.path;
2068 let expected_path = paths::home_dir().join(".local/bin/rust-analyzer");
2069 assert_eq!(
2070 lsp_path, expected_path,
2071 "Tilde path should expand to home directory"
2072 );
2073}
2074
2075#[gpui::test]
2076async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
2077 init_test(cx);
2078
2079 let fs = FakeFs::new(cx.executor());
2080 fs.insert_tree(
2081 path!("/the-root"),
2082 json!({
2083 ".gitignore": "target\n",
2084 "Cargo.lock": "",
2085 "src": {
2086 "a.rs": "",
2087 "b.rs": "",
2088 },
2089 "target": {
2090 "x": {
2091 "out": {
2092 "x.rs": ""
2093 }
2094 },
2095 "y": {
2096 "out": {
2097 "y.rs": "",
2098 }
2099 },
2100 "z": {
2101 "out": {
2102 "z.rs": ""
2103 }
2104 }
2105 }
2106 }),
2107 )
2108 .await;
2109 fs.insert_tree(
2110 path!("/the-registry"),
2111 json!({
2112 "dep1": {
2113 "src": {
2114 "dep1.rs": "",
2115 }
2116 },
2117 "dep2": {
2118 "src": {
2119 "dep2.rs": "",
2120 }
2121 },
2122 }),
2123 )
2124 .await;
2125 fs.insert_tree(
2126 path!("/the/stdlib"),
2127 json!({
2128 "LICENSE": "",
2129 "src": {
2130 "string.rs": "",
2131 }
2132 }),
2133 )
2134 .await;
2135
2136 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
2137 let (language_registry, lsp_store) = project.read_with(cx, |project, _| {
2138 (project.languages().clone(), project.lsp_store())
2139 });
2140 language_registry.add(rust_lang());
2141 let mut fake_servers = language_registry.register_fake_lsp(
2142 "Rust",
2143 FakeLspAdapter {
2144 name: "the-language-server",
2145 ..Default::default()
2146 },
2147 );
2148
2149 cx.executor().run_until_parked();
2150
2151 // Start the language server by opening a buffer with a compatible file extension.
2152 project
2153 .update(cx, |project, cx| {
2154 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
2155 })
2156 .await
2157 .unwrap();
2158
2159 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
2160 project.update(cx, |project, cx| {
2161 let worktree = project.worktrees(cx).next().unwrap();
2162 assert_eq!(
2163 worktree
2164 .read(cx)
2165 .snapshot()
2166 .entries(true, 0)
2167 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
2168 .collect::<Vec<_>>(),
2169 &[
2170 ("", false),
2171 (".gitignore", false),
2172 ("Cargo.lock", false),
2173 ("src", false),
2174 ("src/a.rs", false),
2175 ("src/b.rs", false),
2176 ("target", true),
2177 ]
2178 );
2179 });
2180
2181 let prev_read_dir_count = fs.read_dir_call_count();
2182
2183 let fake_server = fake_servers.next().await.unwrap();
2184 cx.executor().run_until_parked();
2185 let server_id = lsp_store.read_with(cx, |lsp_store, _| {
2186 let (id, _) = lsp_store.language_server_statuses().next().unwrap();
2187 id
2188 });
2189
2190 // Simulate jumping to a definition in a dependency outside of the worktree.
2191 let _out_of_worktree_buffer = project
2192 .update(cx, |project, cx| {
2193 project.open_local_buffer_via_lsp(
2194 lsp::Uri::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
2195 server_id,
2196 cx,
2197 )
2198 })
2199 .await
2200 .unwrap();
2201
2202 // Keep track of the FS events reported to the language server.
2203 let file_changes = Arc::new(Mutex::new(Vec::new()));
2204 fake_server
2205 .request::<lsp::request::RegisterCapability>(
2206 lsp::RegistrationParams {
2207 registrations: vec![lsp::Registration {
2208 id: Default::default(),
2209 method: "workspace/didChangeWatchedFiles".to_string(),
2210 register_options: serde_json::to_value(
2211 lsp::DidChangeWatchedFilesRegistrationOptions {
2212 watchers: vec![
2213 lsp::FileSystemWatcher {
2214 glob_pattern: lsp::GlobPattern::String(
2215 path!("/the-root/Cargo.toml").to_string(),
2216 ),
2217 kind: None,
2218 },
2219 lsp::FileSystemWatcher {
2220 glob_pattern: lsp::GlobPattern::String(
2221 path!("/the-root/src/*.{rs,c}").to_string(),
2222 ),
2223 kind: None,
2224 },
2225 lsp::FileSystemWatcher {
2226 glob_pattern: lsp::GlobPattern::String(
2227 path!("/the-root/target/y/**/*.rs").to_string(),
2228 ),
2229 kind: None,
2230 },
2231 lsp::FileSystemWatcher {
2232 glob_pattern: lsp::GlobPattern::String(
2233 path!("/the/stdlib/src/**/*.rs").to_string(),
2234 ),
2235 kind: None,
2236 },
2237 lsp::FileSystemWatcher {
2238 glob_pattern: lsp::GlobPattern::String(
2239 path!("**/Cargo.lock").to_string(),
2240 ),
2241 kind: None,
2242 },
2243 ],
2244 },
2245 )
2246 .ok(),
2247 }],
2248 },
2249 DEFAULT_LSP_REQUEST_TIMEOUT,
2250 )
2251 .await
2252 .into_response()
2253 .unwrap();
2254 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
2255 let file_changes = file_changes.clone();
2256 move |params, _| {
2257 let mut file_changes = file_changes.lock();
2258 file_changes.extend(params.changes);
2259 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
2260 }
2261 });
2262
2263 cx.executor().run_until_parked();
2264 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
2265 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
2266
2267 let mut new_watched_paths = fs.watched_paths();
2268 new_watched_paths.retain(|path| {
2269 !path.starts_with(config_dir()) && !path.starts_with(global_gitignore_path().unwrap())
2270 });
2271 assert_eq!(
2272 &new_watched_paths,
2273 &[
2274 Path::new(path!("/the-root")),
2275 Path::new(path!("/the-registry/dep1/src/dep1.rs")),
2276 Path::new(path!("/the/stdlib/src"))
2277 ]
2278 );
2279
2280 // Now the language server has asked us to watch an ignored directory path,
2281 // so we recursively load it.
2282 project.update(cx, |project, cx| {
2283 let worktree = project.visible_worktrees(cx).next().unwrap();
2284 assert_eq!(
2285 worktree
2286 .read(cx)
2287 .snapshot()
2288 .entries(true, 0)
2289 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
2290 .collect::<Vec<_>>(),
2291 &[
2292 ("", false),
2293 (".gitignore", false),
2294 ("Cargo.lock", false),
2295 ("src", false),
2296 ("src/a.rs", false),
2297 ("src/b.rs", false),
2298 ("target", true),
2299 ("target/x", true),
2300 ("target/y", true),
2301 ("target/y/out", true),
2302 ("target/y/out/y.rs", true),
2303 ("target/z", true),
2304 ]
2305 );
2306 });
2307
2308 // Perform some file system mutations, two of which match the watched patterns,
2309 // and one of which does not.
2310 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
2311 .await
2312 .unwrap();
2313 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
2314 .await
2315 .unwrap();
2316 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
2317 .await
2318 .unwrap();
2319 fs.create_file(
2320 path!("/the-root/target/x/out/x2.rs").as_ref(),
2321 Default::default(),
2322 )
2323 .await
2324 .unwrap();
2325 fs.create_file(
2326 path!("/the-root/target/y/out/y2.rs").as_ref(),
2327 Default::default(),
2328 )
2329 .await
2330 .unwrap();
2331 fs.save(
2332 path!("/the-root/Cargo.lock").as_ref(),
2333 &"".into(),
2334 Default::default(),
2335 )
2336 .await
2337 .unwrap();
2338 fs.save(
2339 path!("/the-stdlib/LICENSE").as_ref(),
2340 &"".into(),
2341 Default::default(),
2342 )
2343 .await
2344 .unwrap();
2345 fs.save(
2346 path!("/the/stdlib/src/string.rs").as_ref(),
2347 &"".into(),
2348 Default::default(),
2349 )
2350 .await
2351 .unwrap();
2352
2353 // The language server receives events for the FS mutations that match its watch patterns.
2354 cx.executor().run_until_parked();
2355 assert_eq!(
2356 &*file_changes.lock(),
2357 &[
2358 lsp::FileEvent {
2359 uri: lsp::Uri::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
2360 typ: lsp::FileChangeType::CHANGED,
2361 },
2362 lsp::FileEvent {
2363 uri: lsp::Uri::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
2364 typ: lsp::FileChangeType::DELETED,
2365 },
2366 lsp::FileEvent {
2367 uri: lsp::Uri::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
2368 typ: lsp::FileChangeType::CREATED,
2369 },
2370 lsp::FileEvent {
2371 uri: lsp::Uri::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
2372 typ: lsp::FileChangeType::CREATED,
2373 },
2374 lsp::FileEvent {
2375 uri: lsp::Uri::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
2376 typ: lsp::FileChangeType::CHANGED,
2377 },
2378 ]
2379 );
2380}
2381
2382#[gpui::test]
2383async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
2384 init_test(cx);
2385
2386 let fs = FakeFs::new(cx.executor());
2387 fs.insert_tree(
2388 path!("/dir"),
2389 json!({
2390 "a.rs": "let a = 1;",
2391 "b.rs": "let b = 2;"
2392 }),
2393 )
2394 .await;
2395
2396 let project = Project::test(
2397 fs,
2398 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
2399 cx,
2400 )
2401 .await;
2402 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2403
2404 let buffer_a = project
2405 .update(cx, |project, cx| {
2406 project.open_local_buffer(path!("/dir/a.rs"), cx)
2407 })
2408 .await
2409 .unwrap();
2410 let buffer_b = project
2411 .update(cx, |project, cx| {
2412 project.open_local_buffer(path!("/dir/b.rs"), cx)
2413 })
2414 .await
2415 .unwrap();
2416
2417 lsp_store.update(cx, |lsp_store, cx| {
2418 lsp_store
2419 .update_diagnostics(
2420 LanguageServerId(0),
2421 lsp::PublishDiagnosticsParams {
2422 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2423 version: None,
2424 diagnostics: vec![lsp::Diagnostic {
2425 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2426 severity: Some(lsp::DiagnosticSeverity::ERROR),
2427 message: "error 1".to_string(),
2428 ..Default::default()
2429 }],
2430 },
2431 None,
2432 DiagnosticSourceKind::Pushed,
2433 &[],
2434 cx,
2435 )
2436 .unwrap();
2437 lsp_store
2438 .update_diagnostics(
2439 LanguageServerId(0),
2440 lsp::PublishDiagnosticsParams {
2441 uri: Uri::from_file_path(path!("/dir/b.rs")).unwrap(),
2442 version: None,
2443 diagnostics: vec![lsp::Diagnostic {
2444 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2445 severity: Some(DiagnosticSeverity::WARNING),
2446 message: "error 2".to_string(),
2447 ..Default::default()
2448 }],
2449 },
2450 None,
2451 DiagnosticSourceKind::Pushed,
2452 &[],
2453 cx,
2454 )
2455 .unwrap();
2456 });
2457
2458 buffer_a.update(cx, |buffer, _| {
2459 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2460 assert_eq!(
2461 chunks
2462 .iter()
2463 .map(|(s, d)| (s.as_str(), *d))
2464 .collect::<Vec<_>>(),
2465 &[
2466 ("let ", None),
2467 ("a", Some(DiagnosticSeverity::ERROR)),
2468 (" = 1;", None),
2469 ]
2470 );
2471 });
2472 buffer_b.update(cx, |buffer, _| {
2473 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2474 assert_eq!(
2475 chunks
2476 .iter()
2477 .map(|(s, d)| (s.as_str(), *d))
2478 .collect::<Vec<_>>(),
2479 &[
2480 ("let ", None),
2481 ("b", Some(DiagnosticSeverity::WARNING)),
2482 (" = 2;", None),
2483 ]
2484 );
2485 });
2486}
2487
2488#[gpui::test]
2489async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
2490 init_test(cx);
2491
2492 let fs = FakeFs::new(cx.executor());
2493 fs.insert_tree(
2494 path!("/root"),
2495 json!({
2496 "dir": {
2497 ".git": {
2498 "HEAD": "ref: refs/heads/main",
2499 },
2500 ".gitignore": "b.rs",
2501 "a.rs": "let a = 1;",
2502 "b.rs": "let b = 2;",
2503 },
2504 "other.rs": "let b = c;"
2505 }),
2506 )
2507 .await;
2508
2509 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
2510 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2511 let (worktree, _) = project
2512 .update(cx, |project, cx| {
2513 project.find_or_create_worktree(path!("/root/dir"), true, cx)
2514 })
2515 .await
2516 .unwrap();
2517 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
2518
2519 let (worktree, _) = project
2520 .update(cx, |project, cx| {
2521 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
2522 })
2523 .await
2524 .unwrap();
2525 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
2526
2527 let server_id = LanguageServerId(0);
2528 lsp_store.update(cx, |lsp_store, cx| {
2529 lsp_store
2530 .update_diagnostics(
2531 server_id,
2532 lsp::PublishDiagnosticsParams {
2533 uri: Uri::from_file_path(path!("/root/dir/b.rs")).unwrap(),
2534 version: None,
2535 diagnostics: vec![lsp::Diagnostic {
2536 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2537 severity: Some(lsp::DiagnosticSeverity::ERROR),
2538 message: "unused variable 'b'".to_string(),
2539 ..Default::default()
2540 }],
2541 },
2542 None,
2543 DiagnosticSourceKind::Pushed,
2544 &[],
2545 cx,
2546 )
2547 .unwrap();
2548 lsp_store
2549 .update_diagnostics(
2550 server_id,
2551 lsp::PublishDiagnosticsParams {
2552 uri: Uri::from_file_path(path!("/root/other.rs")).unwrap(),
2553 version: None,
2554 diagnostics: vec![lsp::Diagnostic {
2555 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
2556 severity: Some(lsp::DiagnosticSeverity::ERROR),
2557 message: "unknown variable 'c'".to_string(),
2558 ..Default::default()
2559 }],
2560 },
2561 None,
2562 DiagnosticSourceKind::Pushed,
2563 &[],
2564 cx,
2565 )
2566 .unwrap();
2567 });
2568
2569 let main_ignored_buffer = project
2570 .update(cx, |project, cx| {
2571 project.open_buffer((main_worktree_id, rel_path("b.rs")), cx)
2572 })
2573 .await
2574 .unwrap();
2575 main_ignored_buffer.update(cx, |buffer, _| {
2576 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2577 assert_eq!(
2578 chunks
2579 .iter()
2580 .map(|(s, d)| (s.as_str(), *d))
2581 .collect::<Vec<_>>(),
2582 &[
2583 ("let ", None),
2584 ("b", Some(DiagnosticSeverity::ERROR)),
2585 (" = 2;", None),
2586 ],
2587 "Gigitnored buffers should still get in-buffer diagnostics",
2588 );
2589 });
2590 let other_buffer = project
2591 .update(cx, |project, cx| {
2592 project.open_buffer((other_worktree_id, rel_path("")), cx)
2593 })
2594 .await
2595 .unwrap();
2596 other_buffer.update(cx, |buffer, _| {
2597 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2598 assert_eq!(
2599 chunks
2600 .iter()
2601 .map(|(s, d)| (s.as_str(), *d))
2602 .collect::<Vec<_>>(),
2603 &[
2604 ("let b = ", None),
2605 ("c", Some(DiagnosticSeverity::ERROR)),
2606 (";", None),
2607 ],
2608 "Buffers from hidden projects should still get in-buffer diagnostics"
2609 );
2610 });
2611
2612 project.update(cx, |project, cx| {
2613 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
2614 assert_eq!(
2615 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
2616 vec![(
2617 ProjectPath {
2618 worktree_id: main_worktree_id,
2619 path: rel_path("b.rs").into(),
2620 },
2621 server_id,
2622 DiagnosticSummary {
2623 error_count: 1,
2624 warning_count: 0,
2625 }
2626 )]
2627 );
2628 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
2629 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
2630 });
2631}
2632
2633#[gpui::test]
2634async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
2635 init_test(cx);
2636
2637 let progress_token = "the-progress-token";
2638
2639 let fs = FakeFs::new(cx.executor());
2640 fs.insert_tree(
2641 path!("/dir"),
2642 json!({
2643 "a.rs": "fn a() { A }",
2644 "b.rs": "const y: i32 = 1",
2645 }),
2646 )
2647 .await;
2648
2649 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2650 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2651
2652 language_registry.add(rust_lang());
2653 let mut fake_servers = language_registry.register_fake_lsp(
2654 "Rust",
2655 FakeLspAdapter {
2656 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2657 disk_based_diagnostics_sources: vec!["disk".into()],
2658 ..Default::default()
2659 },
2660 );
2661
2662 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
2663
2664 // Cause worktree to start the fake language server
2665 let _ = project
2666 .update(cx, |project, cx| {
2667 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
2668 })
2669 .await
2670 .unwrap();
2671
2672 let mut events = cx.events(&project);
2673
2674 let fake_server = fake_servers.next().await.unwrap();
2675 assert_eq!(
2676 events.next().await.unwrap(),
2677 Event::LanguageServerAdded(
2678 LanguageServerId(0),
2679 fake_server.server.name(),
2680 Some(worktree_id)
2681 ),
2682 );
2683
2684 fake_server
2685 .start_progress(format!("{}/0", progress_token))
2686 .await;
2687 assert_eq!(
2688 events.next().await.unwrap(),
2689 Event::DiskBasedDiagnosticsStarted {
2690 language_server_id: LanguageServerId(0),
2691 }
2692 );
2693
2694 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2695 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2696 version: None,
2697 diagnostics: vec![lsp::Diagnostic {
2698 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2699 severity: Some(lsp::DiagnosticSeverity::ERROR),
2700 message: "undefined variable 'A'".to_string(),
2701 ..Default::default()
2702 }],
2703 });
2704 assert_eq!(
2705 events.next().await.unwrap(),
2706 Event::DiagnosticsUpdated {
2707 language_server_id: LanguageServerId(0),
2708 paths: vec![(worktree_id, rel_path("a.rs")).into()],
2709 }
2710 );
2711
2712 fake_server.end_progress(format!("{}/0", progress_token));
2713 assert_eq!(
2714 events.next().await.unwrap(),
2715 Event::DiskBasedDiagnosticsFinished {
2716 language_server_id: LanguageServerId(0)
2717 }
2718 );
2719
2720 let buffer = project
2721 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
2722 .await
2723 .unwrap();
2724
2725 buffer.update(cx, |buffer, _| {
2726 let snapshot = buffer.snapshot();
2727 let diagnostics = snapshot
2728 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2729 .collect::<Vec<_>>();
2730 assert_eq!(
2731 diagnostics,
2732 &[DiagnosticEntryRef {
2733 range: Point::new(0, 9)..Point::new(0, 10),
2734 diagnostic: &Diagnostic {
2735 severity: lsp::DiagnosticSeverity::ERROR,
2736 message: "undefined variable 'A'".to_string(),
2737 group_id: 0,
2738 is_primary: true,
2739 source_kind: DiagnosticSourceKind::Pushed,
2740 ..Diagnostic::default()
2741 }
2742 }]
2743 )
2744 });
2745
2746 // Ensure publishing empty diagnostics twice only results in one update event.
2747 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2748 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2749 version: None,
2750 diagnostics: Default::default(),
2751 });
2752 assert_eq!(
2753 events.next().await.unwrap(),
2754 Event::DiagnosticsUpdated {
2755 language_server_id: LanguageServerId(0),
2756 paths: vec![(worktree_id, rel_path("a.rs")).into()],
2757 }
2758 );
2759
2760 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2761 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2762 version: None,
2763 diagnostics: Default::default(),
2764 });
2765 cx.executor().run_until_parked();
2766 assert_eq!(futures::poll!(events.next()), Poll::Pending);
2767}
2768
2769#[gpui::test]
2770async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
2771 init_test(cx);
2772
2773 let progress_token = "the-progress-token";
2774
2775 let fs = FakeFs::new(cx.executor());
2776 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2777
2778 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2779
2780 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2781 language_registry.add(rust_lang());
2782 let mut fake_servers = language_registry.register_fake_lsp(
2783 "Rust",
2784 FakeLspAdapter {
2785 name: "the-language-server",
2786 disk_based_diagnostics_sources: vec!["disk".into()],
2787 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2788 ..FakeLspAdapter::default()
2789 },
2790 );
2791
2792 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
2793
2794 let (buffer, _handle) = project
2795 .update(cx, |project, cx| {
2796 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2797 })
2798 .await
2799 .unwrap();
2800 let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id());
2801 // Simulate diagnostics starting to update.
2802 let fake_server = fake_servers.next().await.unwrap();
2803 cx.executor().run_until_parked();
2804 fake_server.start_progress(progress_token).await;
2805
2806 // Restart the server before the diagnostics finish updating.
2807 project.update(cx, |project, cx| {
2808 project.restart_language_servers_for_buffers(vec![buffer], HashSet::default(), cx);
2809 });
2810 let mut events = cx.events(&project);
2811
2812 // Simulate the newly started server sending more diagnostics.
2813 let fake_server = fake_servers.next().await.unwrap();
2814 cx.executor().run_until_parked();
2815 assert_eq!(
2816 events.next().await.unwrap(),
2817 Event::LanguageServerRemoved(LanguageServerId(0))
2818 );
2819 assert_eq!(
2820 events.next().await.unwrap(),
2821 Event::LanguageServerAdded(
2822 LanguageServerId(1),
2823 fake_server.server.name(),
2824 Some(worktree_id)
2825 )
2826 );
2827 fake_server.start_progress(progress_token).await;
2828 assert_eq!(
2829 events.next().await.unwrap(),
2830 Event::LanguageServerBufferRegistered {
2831 server_id: LanguageServerId(1),
2832 buffer_id,
2833 buffer_abs_path: PathBuf::from(path!("/dir/a.rs")),
2834 name: Some(fake_server.server.name())
2835 }
2836 );
2837 assert_eq!(
2838 events.next().await.unwrap(),
2839 Event::DiskBasedDiagnosticsStarted {
2840 language_server_id: LanguageServerId(1)
2841 }
2842 );
2843 project.update(cx, |project, cx| {
2844 assert_eq!(
2845 project
2846 .language_servers_running_disk_based_diagnostics(cx)
2847 .collect::<Vec<_>>(),
2848 [LanguageServerId(1)]
2849 );
2850 });
2851
2852 // All diagnostics are considered done, despite the old server's diagnostic
2853 // task never completing.
2854 fake_server.end_progress(progress_token);
2855 assert_eq!(
2856 events.next().await.unwrap(),
2857 Event::DiskBasedDiagnosticsFinished {
2858 language_server_id: LanguageServerId(1)
2859 }
2860 );
2861 project.update(cx, |project, cx| {
2862 assert_eq!(
2863 project
2864 .language_servers_running_disk_based_diagnostics(cx)
2865 .collect::<Vec<_>>(),
2866 [] as [language::LanguageServerId; 0]
2867 );
2868 });
2869}
2870
2871#[gpui::test]
2872async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
2873 init_test(cx);
2874
2875 let fs = FakeFs::new(cx.executor());
2876 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
2877
2878 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2879
2880 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2881 language_registry.add(rust_lang());
2882 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2883
2884 let (buffer, _) = project
2885 .update(cx, |project, cx| {
2886 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2887 })
2888 .await
2889 .unwrap();
2890
2891 // Publish diagnostics
2892 let fake_server = fake_servers.next().await.unwrap();
2893 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2894 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2895 version: None,
2896 diagnostics: vec![lsp::Diagnostic {
2897 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
2898 severity: Some(lsp::DiagnosticSeverity::ERROR),
2899 message: "the message".to_string(),
2900 ..Default::default()
2901 }],
2902 });
2903
2904 cx.executor().run_until_parked();
2905 buffer.update(cx, |buffer, _| {
2906 assert_eq!(
2907 buffer
2908 .snapshot()
2909 .diagnostics_in_range::<_, usize>(0..1, false)
2910 .map(|entry| entry.diagnostic.message.clone())
2911 .collect::<Vec<_>>(),
2912 ["the message".to_string()]
2913 );
2914 });
2915 project.update(cx, |project, cx| {
2916 assert_eq!(
2917 project.diagnostic_summary(false, cx),
2918 DiagnosticSummary {
2919 error_count: 1,
2920 warning_count: 0,
2921 }
2922 );
2923 });
2924
2925 project.update(cx, |project, cx| {
2926 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2927 });
2928
2929 // The diagnostics are cleared.
2930 cx.executor().run_until_parked();
2931 buffer.update(cx, |buffer, _| {
2932 assert_eq!(
2933 buffer
2934 .snapshot()
2935 .diagnostics_in_range::<_, usize>(0..1, false)
2936 .map(|entry| entry.diagnostic.message.clone())
2937 .collect::<Vec<_>>(),
2938 Vec::<String>::new(),
2939 );
2940 });
2941 project.update(cx, |project, cx| {
2942 assert_eq!(
2943 project.diagnostic_summary(false, cx),
2944 DiagnosticSummary {
2945 error_count: 0,
2946 warning_count: 0,
2947 }
2948 );
2949 });
2950}
2951
2952#[gpui::test]
2953async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
2954 init_test(cx);
2955
2956 let fs = FakeFs::new(cx.executor());
2957 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2958
2959 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2960 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2961
2962 language_registry.add(rust_lang());
2963 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2964
2965 let (buffer, _handle) = project
2966 .update(cx, |project, cx| {
2967 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2968 })
2969 .await
2970 .unwrap();
2971
2972 // Before restarting the server, report diagnostics with an unknown buffer version.
2973 let fake_server = fake_servers.next().await.unwrap();
2974 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2975 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2976 version: Some(10000),
2977 diagnostics: Vec::new(),
2978 });
2979 cx.executor().run_until_parked();
2980 project.update(cx, |project, cx| {
2981 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2982 });
2983
2984 let mut fake_server = fake_servers.next().await.unwrap();
2985 let notification = fake_server
2986 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2987 .await
2988 .text_document;
2989 assert_eq!(notification.version, 0);
2990}
2991
2992#[gpui::test]
2993async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
2994 init_test(cx);
2995
2996 let progress_token = "the-progress-token";
2997
2998 let fs = FakeFs::new(cx.executor());
2999 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
3000
3001 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3002
3003 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3004 language_registry.add(rust_lang());
3005 let mut fake_servers = language_registry.register_fake_lsp(
3006 "Rust",
3007 FakeLspAdapter {
3008 name: "the-language-server",
3009 disk_based_diagnostics_sources: vec!["disk".into()],
3010 disk_based_diagnostics_progress_token: Some(progress_token.into()),
3011 ..Default::default()
3012 },
3013 );
3014
3015 let (buffer, _handle) = project
3016 .update(cx, |project, cx| {
3017 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3018 })
3019 .await
3020 .unwrap();
3021
3022 // Simulate diagnostics starting to update.
3023 let mut fake_server = fake_servers.next().await.unwrap();
3024 fake_server
3025 .start_progress_with(
3026 "another-token",
3027 lsp::WorkDoneProgressBegin {
3028 cancellable: Some(false),
3029 ..Default::default()
3030 },
3031 DEFAULT_LSP_REQUEST_TIMEOUT,
3032 )
3033 .await;
3034 // Ensure progress notification is fully processed before starting the next one
3035 cx.executor().run_until_parked();
3036
3037 fake_server
3038 .start_progress_with(
3039 progress_token,
3040 lsp::WorkDoneProgressBegin {
3041 cancellable: Some(true),
3042 ..Default::default()
3043 },
3044 DEFAULT_LSP_REQUEST_TIMEOUT,
3045 )
3046 .await;
3047 // Ensure progress notification is fully processed before cancelling
3048 cx.executor().run_until_parked();
3049
3050 project.update(cx, |project, cx| {
3051 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
3052 });
3053 cx.executor().run_until_parked();
3054
3055 let cancel_notification = fake_server
3056 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
3057 .await;
3058 assert_eq!(
3059 cancel_notification.token,
3060 NumberOrString::String(progress_token.into())
3061 );
3062}
3063
3064#[gpui::test]
3065async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
3066 init_test(cx);
3067
3068 let fs = FakeFs::new(cx.executor());
3069 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
3070 .await;
3071
3072 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3073 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3074
3075 let mut fake_rust_servers = language_registry.register_fake_lsp(
3076 "Rust",
3077 FakeLspAdapter {
3078 name: "rust-lsp",
3079 ..Default::default()
3080 },
3081 );
3082 let mut fake_js_servers = language_registry.register_fake_lsp(
3083 "JavaScript",
3084 FakeLspAdapter {
3085 name: "js-lsp",
3086 ..Default::default()
3087 },
3088 );
3089 language_registry.add(rust_lang());
3090 language_registry.add(js_lang());
3091
3092 let _rs_buffer = project
3093 .update(cx, |project, cx| {
3094 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3095 })
3096 .await
3097 .unwrap();
3098 let _js_buffer = project
3099 .update(cx, |project, cx| {
3100 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
3101 })
3102 .await
3103 .unwrap();
3104
3105 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
3106 assert_eq!(
3107 fake_rust_server_1
3108 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3109 .await
3110 .text_document
3111 .uri
3112 .as_str(),
3113 uri!("file:///dir/a.rs")
3114 );
3115
3116 let mut fake_js_server = fake_js_servers.next().await.unwrap();
3117 assert_eq!(
3118 fake_js_server
3119 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3120 .await
3121 .text_document
3122 .uri
3123 .as_str(),
3124 uri!("file:///dir/b.js")
3125 );
3126
3127 // Disable Rust language server, ensuring only that server gets stopped.
3128 cx.update(|cx| {
3129 SettingsStore::update_global(cx, |settings, cx| {
3130 settings.update_user_settings(cx, |settings| {
3131 settings.languages_mut().insert(
3132 "Rust".into(),
3133 LanguageSettingsContent {
3134 enable_language_server: Some(false),
3135 ..Default::default()
3136 },
3137 );
3138 });
3139 })
3140 });
3141 fake_rust_server_1
3142 .receive_notification::<lsp::notification::Exit>()
3143 .await;
3144
3145 // Enable Rust and disable JavaScript language servers, ensuring that the
3146 // former gets started again and that the latter stops.
3147 cx.update(|cx| {
3148 SettingsStore::update_global(cx, |settings, cx| {
3149 settings.update_user_settings(cx, |settings| {
3150 settings.languages_mut().insert(
3151 "Rust".into(),
3152 LanguageSettingsContent {
3153 enable_language_server: Some(true),
3154 ..Default::default()
3155 },
3156 );
3157 settings.languages_mut().insert(
3158 "JavaScript".into(),
3159 LanguageSettingsContent {
3160 enable_language_server: Some(false),
3161 ..Default::default()
3162 },
3163 );
3164 });
3165 })
3166 });
3167 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
3168 assert_eq!(
3169 fake_rust_server_2
3170 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3171 .await
3172 .text_document
3173 .uri
3174 .as_str(),
3175 uri!("file:///dir/a.rs")
3176 );
3177 fake_js_server
3178 .receive_notification::<lsp::notification::Exit>()
3179 .await;
3180}
3181
3182#[gpui::test(iterations = 3)]
3183async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
3184 init_test(cx);
3185
3186 let text = "
3187 fn a() { A }
3188 fn b() { BB }
3189 fn c() { CCC }
3190 "
3191 .unindent();
3192
3193 let fs = FakeFs::new(cx.executor());
3194 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
3195
3196 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3197 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3198
3199 language_registry.add(rust_lang());
3200 let mut fake_servers = language_registry.register_fake_lsp(
3201 "Rust",
3202 FakeLspAdapter {
3203 disk_based_diagnostics_sources: vec!["disk".into()],
3204 ..Default::default()
3205 },
3206 );
3207
3208 let buffer = project
3209 .update(cx, |project, cx| {
3210 project.open_local_buffer(path!("/dir/a.rs"), cx)
3211 })
3212 .await
3213 .unwrap();
3214
3215 let _handle = project.update(cx, |project, cx| {
3216 project.register_buffer_with_language_servers(&buffer, cx)
3217 });
3218
3219 let mut fake_server = fake_servers.next().await.unwrap();
3220 let open_notification = fake_server
3221 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3222 .await;
3223
3224 // Edit the buffer, moving the content down
3225 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
3226 let change_notification_1 = fake_server
3227 .receive_notification::<lsp::notification::DidChangeTextDocument>()
3228 .await;
3229 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
3230
3231 // Report some diagnostics for the initial version of the buffer
3232 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3233 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3234 version: Some(open_notification.text_document.version),
3235 diagnostics: vec![
3236 lsp::Diagnostic {
3237 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3238 severity: Some(DiagnosticSeverity::ERROR),
3239 message: "undefined variable 'A'".to_string(),
3240 source: Some("disk".to_string()),
3241 ..Default::default()
3242 },
3243 lsp::Diagnostic {
3244 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
3245 severity: Some(DiagnosticSeverity::ERROR),
3246 message: "undefined variable 'BB'".to_string(),
3247 source: Some("disk".to_string()),
3248 ..Default::default()
3249 },
3250 lsp::Diagnostic {
3251 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
3252 severity: Some(DiagnosticSeverity::ERROR),
3253 source: Some("disk".to_string()),
3254 message: "undefined variable 'CCC'".to_string(),
3255 ..Default::default()
3256 },
3257 ],
3258 });
3259
3260 // The diagnostics have moved down since they were created.
3261 cx.executor().run_until_parked();
3262 buffer.update(cx, |buffer, _| {
3263 assert_eq!(
3264 buffer
3265 .snapshot()
3266 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
3267 .collect::<Vec<_>>(),
3268 &[
3269 DiagnosticEntry {
3270 range: Point::new(3, 9)..Point::new(3, 11),
3271 diagnostic: Diagnostic {
3272 source: Some("disk".into()),
3273 severity: DiagnosticSeverity::ERROR,
3274 message: "undefined variable 'BB'".to_string(),
3275 is_disk_based: true,
3276 group_id: 1,
3277 is_primary: true,
3278 source_kind: DiagnosticSourceKind::Pushed,
3279 ..Diagnostic::default()
3280 },
3281 },
3282 DiagnosticEntry {
3283 range: Point::new(4, 9)..Point::new(4, 12),
3284 diagnostic: Diagnostic {
3285 source: Some("disk".into()),
3286 severity: DiagnosticSeverity::ERROR,
3287 message: "undefined variable 'CCC'".to_string(),
3288 is_disk_based: true,
3289 group_id: 2,
3290 is_primary: true,
3291 source_kind: DiagnosticSourceKind::Pushed,
3292 ..Diagnostic::default()
3293 }
3294 }
3295 ]
3296 );
3297 assert_eq!(
3298 chunks_with_diagnostics(buffer, 0..buffer.len()),
3299 [
3300 ("\n\nfn a() { ".to_string(), None),
3301 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
3302 (" }\nfn b() { ".to_string(), None),
3303 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
3304 (" }\nfn c() { ".to_string(), None),
3305 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
3306 (" }\n".to_string(), None),
3307 ]
3308 );
3309 assert_eq!(
3310 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
3311 [
3312 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
3313 (" }\nfn c() { ".to_string(), None),
3314 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
3315 ]
3316 );
3317 });
3318
3319 // Ensure overlapping diagnostics are highlighted correctly.
3320 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3321 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3322 version: Some(open_notification.text_document.version),
3323 diagnostics: vec![
3324 lsp::Diagnostic {
3325 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3326 severity: Some(DiagnosticSeverity::ERROR),
3327 message: "undefined variable 'A'".to_string(),
3328 source: Some("disk".to_string()),
3329 ..Default::default()
3330 },
3331 lsp::Diagnostic {
3332 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
3333 severity: Some(DiagnosticSeverity::WARNING),
3334 message: "unreachable statement".to_string(),
3335 source: Some("disk".to_string()),
3336 ..Default::default()
3337 },
3338 ],
3339 });
3340
3341 cx.executor().run_until_parked();
3342 buffer.update(cx, |buffer, _| {
3343 assert_eq!(
3344 buffer
3345 .snapshot()
3346 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
3347 .collect::<Vec<_>>(),
3348 &[
3349 DiagnosticEntry {
3350 range: Point::new(2, 9)..Point::new(2, 12),
3351 diagnostic: Diagnostic {
3352 source: Some("disk".into()),
3353 severity: DiagnosticSeverity::WARNING,
3354 message: "unreachable statement".to_string(),
3355 is_disk_based: true,
3356 group_id: 4,
3357 is_primary: true,
3358 source_kind: DiagnosticSourceKind::Pushed,
3359 ..Diagnostic::default()
3360 }
3361 },
3362 DiagnosticEntry {
3363 range: Point::new(2, 9)..Point::new(2, 10),
3364 diagnostic: Diagnostic {
3365 source: Some("disk".into()),
3366 severity: DiagnosticSeverity::ERROR,
3367 message: "undefined variable 'A'".to_string(),
3368 is_disk_based: true,
3369 group_id: 3,
3370 is_primary: true,
3371 source_kind: DiagnosticSourceKind::Pushed,
3372 ..Diagnostic::default()
3373 },
3374 }
3375 ]
3376 );
3377 assert_eq!(
3378 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
3379 [
3380 ("fn a() { ".to_string(), None),
3381 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
3382 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
3383 ("\n".to_string(), None),
3384 ]
3385 );
3386 assert_eq!(
3387 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
3388 [
3389 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
3390 ("\n".to_string(), None),
3391 ]
3392 );
3393 });
3394
3395 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
3396 // changes since the last save.
3397 buffer.update(cx, |buffer, cx| {
3398 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
3399 buffer.edit(
3400 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
3401 None,
3402 cx,
3403 );
3404 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
3405 });
3406 let change_notification_2 = fake_server
3407 .receive_notification::<lsp::notification::DidChangeTextDocument>()
3408 .await;
3409 assert!(
3410 change_notification_2.text_document.version > change_notification_1.text_document.version
3411 );
3412
3413 // Handle out-of-order diagnostics
3414 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3415 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3416 version: Some(change_notification_2.text_document.version),
3417 diagnostics: vec![
3418 lsp::Diagnostic {
3419 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
3420 severity: Some(DiagnosticSeverity::ERROR),
3421 message: "undefined variable 'BB'".to_string(),
3422 source: Some("disk".to_string()),
3423 ..Default::default()
3424 },
3425 lsp::Diagnostic {
3426 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3427 severity: Some(DiagnosticSeverity::WARNING),
3428 message: "undefined variable 'A'".to_string(),
3429 source: Some("disk".to_string()),
3430 ..Default::default()
3431 },
3432 ],
3433 });
3434
3435 cx.executor().run_until_parked();
3436 buffer.update(cx, |buffer, _| {
3437 assert_eq!(
3438 buffer
3439 .snapshot()
3440 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3441 .collect::<Vec<_>>(),
3442 &[
3443 DiagnosticEntry {
3444 range: Point::new(2, 21)..Point::new(2, 22),
3445 diagnostic: Diagnostic {
3446 source: Some("disk".into()),
3447 severity: DiagnosticSeverity::WARNING,
3448 message: "undefined variable 'A'".to_string(),
3449 is_disk_based: true,
3450 group_id: 6,
3451 is_primary: true,
3452 source_kind: DiagnosticSourceKind::Pushed,
3453 ..Diagnostic::default()
3454 }
3455 },
3456 DiagnosticEntry {
3457 range: Point::new(3, 9)..Point::new(3, 14),
3458 diagnostic: Diagnostic {
3459 source: Some("disk".into()),
3460 severity: DiagnosticSeverity::ERROR,
3461 message: "undefined variable 'BB'".to_string(),
3462 is_disk_based: true,
3463 group_id: 5,
3464 is_primary: true,
3465 source_kind: DiagnosticSourceKind::Pushed,
3466 ..Diagnostic::default()
3467 },
3468 }
3469 ]
3470 );
3471 });
3472}
3473
3474#[gpui::test]
3475async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
3476 init_test(cx);
3477
3478 let text = concat!(
3479 "let one = ;\n", //
3480 "let two = \n",
3481 "let three = 3;\n",
3482 );
3483
3484 let fs = FakeFs::new(cx.executor());
3485 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
3486
3487 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3488 let buffer = project
3489 .update(cx, |project, cx| {
3490 project.open_local_buffer(path!("/dir/a.rs"), cx)
3491 })
3492 .await
3493 .unwrap();
3494
3495 project.update(cx, |project, cx| {
3496 project.lsp_store().update(cx, |lsp_store, cx| {
3497 lsp_store
3498 .update_diagnostic_entries(
3499 LanguageServerId(0),
3500 PathBuf::from(path!("/dir/a.rs")),
3501 None,
3502 None,
3503 vec![
3504 DiagnosticEntry {
3505 range: Unclipped(PointUtf16::new(0, 10))
3506 ..Unclipped(PointUtf16::new(0, 10)),
3507 diagnostic: Diagnostic {
3508 severity: DiagnosticSeverity::ERROR,
3509 message: "syntax error 1".to_string(),
3510 source_kind: DiagnosticSourceKind::Pushed,
3511 ..Diagnostic::default()
3512 },
3513 },
3514 DiagnosticEntry {
3515 range: Unclipped(PointUtf16::new(1, 10))
3516 ..Unclipped(PointUtf16::new(1, 10)),
3517 diagnostic: Diagnostic {
3518 severity: DiagnosticSeverity::ERROR,
3519 message: "syntax error 2".to_string(),
3520 source_kind: DiagnosticSourceKind::Pushed,
3521 ..Diagnostic::default()
3522 },
3523 },
3524 ],
3525 cx,
3526 )
3527 .unwrap();
3528 })
3529 });
3530
3531 // An empty range is extended forward to include the following character.
3532 // At the end of a line, an empty range is extended backward to include
3533 // the preceding character.
3534 buffer.update(cx, |buffer, _| {
3535 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
3536 assert_eq!(
3537 chunks
3538 .iter()
3539 .map(|(s, d)| (s.as_str(), *d))
3540 .collect::<Vec<_>>(),
3541 &[
3542 ("let one = ", None),
3543 (";", Some(DiagnosticSeverity::ERROR)),
3544 ("\nlet two =", None),
3545 (" ", Some(DiagnosticSeverity::ERROR)),
3546 ("\nlet three = 3;\n", None)
3547 ]
3548 );
3549 });
3550}
3551
3552#[gpui::test]
3553async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
3554 init_test(cx);
3555
3556 let fs = FakeFs::new(cx.executor());
3557 fs.insert_tree(path!("/dir"), json!({ "a.rs": "one two three" }))
3558 .await;
3559
3560 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3561 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3562
3563 lsp_store.update(cx, |lsp_store, cx| {
3564 lsp_store
3565 .update_diagnostic_entries(
3566 LanguageServerId(0),
3567 Path::new(path!("/dir/a.rs")).to_owned(),
3568 None,
3569 None,
3570 vec![DiagnosticEntry {
3571 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
3572 diagnostic: Diagnostic {
3573 severity: DiagnosticSeverity::ERROR,
3574 is_primary: true,
3575 message: "syntax error a1".to_string(),
3576 source_kind: DiagnosticSourceKind::Pushed,
3577 ..Diagnostic::default()
3578 },
3579 }],
3580 cx,
3581 )
3582 .unwrap();
3583 lsp_store
3584 .update_diagnostic_entries(
3585 LanguageServerId(1),
3586 Path::new(path!("/dir/a.rs")).to_owned(),
3587 None,
3588 None,
3589 vec![DiagnosticEntry {
3590 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
3591 diagnostic: Diagnostic {
3592 severity: DiagnosticSeverity::ERROR,
3593 is_primary: true,
3594 message: "syntax error b1".to_string(),
3595 source_kind: DiagnosticSourceKind::Pushed,
3596 ..Diagnostic::default()
3597 },
3598 }],
3599 cx,
3600 )
3601 .unwrap();
3602
3603 assert_eq!(
3604 lsp_store.diagnostic_summary(false, cx),
3605 DiagnosticSummary {
3606 error_count: 2,
3607 warning_count: 0,
3608 }
3609 );
3610 });
3611}
3612
3613#[gpui::test]
3614async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
3615 init_test(cx);
3616
3617 let text = "
3618 fn a() {
3619 f1();
3620 }
3621 fn b() {
3622 f2();
3623 }
3624 fn c() {
3625 f3();
3626 }
3627 "
3628 .unindent();
3629
3630 let fs = FakeFs::new(cx.executor());
3631 fs.insert_tree(
3632 path!("/dir"),
3633 json!({
3634 "a.rs": text.clone(),
3635 }),
3636 )
3637 .await;
3638
3639 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3640 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3641
3642 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3643 language_registry.add(rust_lang());
3644 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3645
3646 let (buffer, _handle) = project
3647 .update(cx, |project, cx| {
3648 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3649 })
3650 .await
3651 .unwrap();
3652
3653 let mut fake_server = fake_servers.next().await.unwrap();
3654 let lsp_document_version = fake_server
3655 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3656 .await
3657 .text_document
3658 .version;
3659
3660 // Simulate editing the buffer after the language server computes some edits.
3661 buffer.update(cx, |buffer, cx| {
3662 buffer.edit(
3663 [(
3664 Point::new(0, 0)..Point::new(0, 0),
3665 "// above first function\n",
3666 )],
3667 None,
3668 cx,
3669 );
3670 buffer.edit(
3671 [(
3672 Point::new(2, 0)..Point::new(2, 0),
3673 " // inside first function\n",
3674 )],
3675 None,
3676 cx,
3677 );
3678 buffer.edit(
3679 [(
3680 Point::new(6, 4)..Point::new(6, 4),
3681 "// inside second function ",
3682 )],
3683 None,
3684 cx,
3685 );
3686
3687 assert_eq!(
3688 buffer.text(),
3689 "
3690 // above first function
3691 fn a() {
3692 // inside first function
3693 f1();
3694 }
3695 fn b() {
3696 // inside second function f2();
3697 }
3698 fn c() {
3699 f3();
3700 }
3701 "
3702 .unindent()
3703 );
3704 });
3705
3706 let edits = lsp_store
3707 .update(cx, |lsp_store, cx| {
3708 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3709 &buffer,
3710 vec![
3711 // replace body of first function
3712 lsp::TextEdit {
3713 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
3714 new_text: "
3715 fn a() {
3716 f10();
3717 }
3718 "
3719 .unindent(),
3720 },
3721 // edit inside second function
3722 lsp::TextEdit {
3723 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
3724 new_text: "00".into(),
3725 },
3726 // edit inside third function via two distinct edits
3727 lsp::TextEdit {
3728 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
3729 new_text: "4000".into(),
3730 },
3731 lsp::TextEdit {
3732 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
3733 new_text: "".into(),
3734 },
3735 ],
3736 LanguageServerId(0),
3737 Some(lsp_document_version),
3738 cx,
3739 )
3740 })
3741 .await
3742 .unwrap();
3743
3744 buffer.update(cx, |buffer, cx| {
3745 for (range, new_text) in edits {
3746 buffer.edit([(range, new_text)], None, cx);
3747 }
3748 assert_eq!(
3749 buffer.text(),
3750 "
3751 // above first function
3752 fn a() {
3753 // inside first function
3754 f10();
3755 }
3756 fn b() {
3757 // inside second function f200();
3758 }
3759 fn c() {
3760 f4000();
3761 }
3762 "
3763 .unindent()
3764 );
3765 });
3766}
3767
3768#[gpui::test]
3769async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
3770 init_test(cx);
3771
3772 let text = "
3773 use a::b;
3774 use a::c;
3775
3776 fn f() {
3777 b();
3778 c();
3779 }
3780 "
3781 .unindent();
3782
3783 let fs = FakeFs::new(cx.executor());
3784 fs.insert_tree(
3785 path!("/dir"),
3786 json!({
3787 "a.rs": text.clone(),
3788 }),
3789 )
3790 .await;
3791
3792 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3793 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3794 let buffer = project
3795 .update(cx, |project, cx| {
3796 project.open_local_buffer(path!("/dir/a.rs"), cx)
3797 })
3798 .await
3799 .unwrap();
3800
3801 // Simulate the language server sending us a small edit in the form of a very large diff.
3802 // Rust-analyzer does this when performing a merge-imports code action.
3803 let edits = lsp_store
3804 .update(cx, |lsp_store, cx| {
3805 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3806 &buffer,
3807 [
3808 // Replace the first use statement without editing the semicolon.
3809 lsp::TextEdit {
3810 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
3811 new_text: "a::{b, c}".into(),
3812 },
3813 // Reinsert the remainder of the file between the semicolon and the final
3814 // newline of the file.
3815 lsp::TextEdit {
3816 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3817 new_text: "\n\n".into(),
3818 },
3819 lsp::TextEdit {
3820 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3821 new_text: "
3822 fn f() {
3823 b();
3824 c();
3825 }"
3826 .unindent(),
3827 },
3828 // Delete everything after the first newline of the file.
3829 lsp::TextEdit {
3830 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
3831 new_text: "".into(),
3832 },
3833 ],
3834 LanguageServerId(0),
3835 None,
3836 cx,
3837 )
3838 })
3839 .await
3840 .unwrap();
3841
3842 buffer.update(cx, |buffer, cx| {
3843 let edits = edits
3844 .into_iter()
3845 .map(|(range, text)| {
3846 (
3847 range.start.to_point(buffer)..range.end.to_point(buffer),
3848 text,
3849 )
3850 })
3851 .collect::<Vec<_>>();
3852
3853 assert_eq!(
3854 edits,
3855 [
3856 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
3857 (Point::new(1, 0)..Point::new(2, 0), "".into())
3858 ]
3859 );
3860
3861 for (range, new_text) in edits {
3862 buffer.edit([(range, new_text)], None, cx);
3863 }
3864 assert_eq!(
3865 buffer.text(),
3866 "
3867 use a::{b, c};
3868
3869 fn f() {
3870 b();
3871 c();
3872 }
3873 "
3874 .unindent()
3875 );
3876 });
3877}
3878
3879#[gpui::test]
3880async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
3881 cx: &mut gpui::TestAppContext,
3882) {
3883 init_test(cx);
3884
3885 let text = "Path()";
3886
3887 let fs = FakeFs::new(cx.executor());
3888 fs.insert_tree(
3889 path!("/dir"),
3890 json!({
3891 "a.rs": text
3892 }),
3893 )
3894 .await;
3895
3896 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3897 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3898 let buffer = project
3899 .update(cx, |project, cx| {
3900 project.open_local_buffer(path!("/dir/a.rs"), cx)
3901 })
3902 .await
3903 .unwrap();
3904
3905 // Simulate the language server sending us a pair of edits at the same location,
3906 // with an insertion following a replacement (which violates the LSP spec).
3907 let edits = lsp_store
3908 .update(cx, |lsp_store, cx| {
3909 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3910 &buffer,
3911 [
3912 lsp::TextEdit {
3913 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
3914 new_text: "Path".into(),
3915 },
3916 lsp::TextEdit {
3917 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
3918 new_text: "from path import Path\n\n\n".into(),
3919 },
3920 ],
3921 LanguageServerId(0),
3922 None,
3923 cx,
3924 )
3925 })
3926 .await
3927 .unwrap();
3928
3929 buffer.update(cx, |buffer, cx| {
3930 buffer.edit(edits, None, cx);
3931 assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
3932 });
3933}
3934
3935#[gpui::test]
3936async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
3937 init_test(cx);
3938
3939 let text = "
3940 use a::b;
3941 use a::c;
3942
3943 fn f() {
3944 b();
3945 c();
3946 }
3947 "
3948 .unindent();
3949
3950 let fs = FakeFs::new(cx.executor());
3951 fs.insert_tree(
3952 path!("/dir"),
3953 json!({
3954 "a.rs": text.clone(),
3955 }),
3956 )
3957 .await;
3958
3959 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3960 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3961 let buffer = project
3962 .update(cx, |project, cx| {
3963 project.open_local_buffer(path!("/dir/a.rs"), cx)
3964 })
3965 .await
3966 .unwrap();
3967
3968 // Simulate the language server sending us edits in a non-ordered fashion,
3969 // with ranges sometimes being inverted or pointing to invalid locations.
3970 let edits = lsp_store
3971 .update(cx, |lsp_store, cx| {
3972 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3973 &buffer,
3974 [
3975 lsp::TextEdit {
3976 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3977 new_text: "\n\n".into(),
3978 },
3979 lsp::TextEdit {
3980 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
3981 new_text: "a::{b, c}".into(),
3982 },
3983 lsp::TextEdit {
3984 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
3985 new_text: "".into(),
3986 },
3987 lsp::TextEdit {
3988 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3989 new_text: "
3990 fn f() {
3991 b();
3992 c();
3993 }"
3994 .unindent(),
3995 },
3996 ],
3997 LanguageServerId(0),
3998 None,
3999 cx,
4000 )
4001 })
4002 .await
4003 .unwrap();
4004
4005 buffer.update(cx, |buffer, cx| {
4006 let edits = edits
4007 .into_iter()
4008 .map(|(range, text)| {
4009 (
4010 range.start.to_point(buffer)..range.end.to_point(buffer),
4011 text,
4012 )
4013 })
4014 .collect::<Vec<_>>();
4015
4016 assert_eq!(
4017 edits,
4018 [
4019 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
4020 (Point::new(1, 0)..Point::new(2, 0), "".into())
4021 ]
4022 );
4023
4024 for (range, new_text) in edits {
4025 buffer.edit([(range, new_text)], None, cx);
4026 }
4027 assert_eq!(
4028 buffer.text(),
4029 "
4030 use a::{b, c};
4031
4032 fn f() {
4033 b();
4034 c();
4035 }
4036 "
4037 .unindent()
4038 );
4039 });
4040}
4041
4042fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
4043 buffer: &Buffer,
4044 range: Range<T>,
4045) -> Vec<(String, Option<DiagnosticSeverity>)> {
4046 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
4047 for chunk in buffer.snapshot().chunks(range, true) {
4048 if chunks
4049 .last()
4050 .is_some_and(|prev_chunk| prev_chunk.1 == chunk.diagnostic_severity)
4051 {
4052 chunks.last_mut().unwrap().0.push_str(chunk.text);
4053 } else {
4054 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
4055 }
4056 }
4057 chunks
4058}
4059
4060#[gpui::test(iterations = 10)]
4061async fn test_definition(cx: &mut gpui::TestAppContext) {
4062 init_test(cx);
4063
4064 let fs = FakeFs::new(cx.executor());
4065 fs.insert_tree(
4066 path!("/dir"),
4067 json!({
4068 "a.rs": "const fn a() { A }",
4069 "b.rs": "const y: i32 = crate::a()",
4070 }),
4071 )
4072 .await;
4073
4074 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
4075
4076 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4077 language_registry.add(rust_lang());
4078 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
4079
4080 let (buffer, _handle) = project
4081 .update(cx, |project, cx| {
4082 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
4083 })
4084 .await
4085 .unwrap();
4086
4087 let fake_server = fake_servers.next().await.unwrap();
4088 cx.executor().run_until_parked();
4089
4090 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
4091 let params = params.text_document_position_params;
4092 assert_eq!(
4093 params.text_document.uri.to_file_path().unwrap(),
4094 Path::new(path!("/dir/b.rs")),
4095 );
4096 assert_eq!(params.position, lsp::Position::new(0, 22));
4097
4098 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
4099 lsp::Location::new(
4100 lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
4101 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
4102 ),
4103 )))
4104 });
4105 let mut definitions = project
4106 .update(cx, |project, cx| project.definitions(&buffer, 22, cx))
4107 .await
4108 .unwrap()
4109 .unwrap();
4110
4111 // Assert no new language server started
4112 cx.executor().run_until_parked();
4113 assert!(fake_servers.try_next().is_err());
4114
4115 assert_eq!(definitions.len(), 1);
4116 let definition = definitions.pop().unwrap();
4117 cx.update(|cx| {
4118 let target_buffer = definition.target.buffer.read(cx);
4119 assert_eq!(
4120 target_buffer
4121 .file()
4122 .unwrap()
4123 .as_local()
4124 .unwrap()
4125 .abs_path(cx),
4126 Path::new(path!("/dir/a.rs")),
4127 );
4128 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
4129 assert_eq!(
4130 list_worktrees(&project, cx),
4131 [
4132 (path!("/dir/a.rs").as_ref(), false),
4133 (path!("/dir/b.rs").as_ref(), true)
4134 ],
4135 );
4136
4137 drop(definition);
4138 });
4139 cx.update(|cx| {
4140 assert_eq!(
4141 list_worktrees(&project, cx),
4142 [(path!("/dir/b.rs").as_ref(), true)]
4143 );
4144 });
4145
4146 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
4147 project
4148 .read(cx)
4149 .worktrees(cx)
4150 .map(|worktree| {
4151 let worktree = worktree.read(cx);
4152 (
4153 worktree.as_local().unwrap().abs_path().as_ref(),
4154 worktree.is_visible(),
4155 )
4156 })
4157 .collect::<Vec<_>>()
4158 }
4159}
4160
4161#[gpui::test]
4162async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
4163 init_test(cx);
4164
4165 let fs = FakeFs::new(cx.executor());
4166 fs.insert_tree(
4167 path!("/dir"),
4168 json!({
4169 "a.ts": "",
4170 }),
4171 )
4172 .await;
4173
4174 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4175
4176 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4177 language_registry.add(typescript_lang());
4178 let mut fake_language_servers = language_registry.register_fake_lsp(
4179 "TypeScript",
4180 FakeLspAdapter {
4181 capabilities: lsp::ServerCapabilities {
4182 completion_provider: Some(lsp::CompletionOptions {
4183 trigger_characters: Some(vec![".".to_string()]),
4184 ..Default::default()
4185 }),
4186 ..Default::default()
4187 },
4188 ..Default::default()
4189 },
4190 );
4191
4192 let (buffer, _handle) = project
4193 .update(cx, |p, cx| {
4194 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4195 })
4196 .await
4197 .unwrap();
4198
4199 let fake_server = fake_language_servers.next().await.unwrap();
4200 cx.executor().run_until_parked();
4201
4202 // When text_edit exists, it takes precedence over insert_text and label
4203 let text = "let a = obj.fqn";
4204 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4205 let completions = project.update(cx, |project, cx| {
4206 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4207 });
4208
4209 fake_server
4210 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4211 Ok(Some(lsp::CompletionResponse::Array(vec![
4212 lsp::CompletionItem {
4213 label: "labelText".into(),
4214 insert_text: Some("insertText".into()),
4215 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
4216 range: lsp::Range::new(
4217 lsp::Position::new(0, text.len() as u32 - 3),
4218 lsp::Position::new(0, text.len() as u32),
4219 ),
4220 new_text: "textEditText".into(),
4221 })),
4222 ..Default::default()
4223 },
4224 ])))
4225 })
4226 .next()
4227 .await;
4228
4229 let completions = completions
4230 .await
4231 .unwrap()
4232 .into_iter()
4233 .flat_map(|response| response.completions)
4234 .collect::<Vec<_>>();
4235 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4236
4237 assert_eq!(completions.len(), 1);
4238 assert_eq!(completions[0].new_text, "textEditText");
4239 assert_eq!(
4240 completions[0].replace_range.to_offset(&snapshot),
4241 text.len() - 3..text.len()
4242 );
4243}
4244
4245#[gpui::test]
4246async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
4247 init_test(cx);
4248
4249 let fs = FakeFs::new(cx.executor());
4250 fs.insert_tree(
4251 path!("/dir"),
4252 json!({
4253 "a.ts": "",
4254 }),
4255 )
4256 .await;
4257
4258 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4259
4260 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4261 language_registry.add(typescript_lang());
4262 let mut fake_language_servers = language_registry.register_fake_lsp(
4263 "TypeScript",
4264 FakeLspAdapter {
4265 capabilities: lsp::ServerCapabilities {
4266 completion_provider: Some(lsp::CompletionOptions {
4267 trigger_characters: Some(vec![".".to_string()]),
4268 ..Default::default()
4269 }),
4270 ..Default::default()
4271 },
4272 ..Default::default()
4273 },
4274 );
4275
4276 let (buffer, _handle) = project
4277 .update(cx, |p, cx| {
4278 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4279 })
4280 .await
4281 .unwrap();
4282
4283 let fake_server = fake_language_servers.next().await.unwrap();
4284 cx.executor().run_until_parked();
4285 let text = "let a = obj.fqn";
4286
4287 // Test 1: When text_edit is None but text_edit_text exists with default edit_range
4288 {
4289 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4290 let completions = project.update(cx, |project, cx| {
4291 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4292 });
4293
4294 fake_server
4295 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4296 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
4297 is_incomplete: false,
4298 item_defaults: Some(lsp::CompletionListItemDefaults {
4299 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
4300 lsp::Range::new(
4301 lsp::Position::new(0, text.len() as u32 - 3),
4302 lsp::Position::new(0, text.len() as u32),
4303 ),
4304 )),
4305 ..Default::default()
4306 }),
4307 items: vec![lsp::CompletionItem {
4308 label: "labelText".into(),
4309 text_edit_text: Some("textEditText".into()),
4310 text_edit: None,
4311 ..Default::default()
4312 }],
4313 })))
4314 })
4315 .next()
4316 .await;
4317
4318 let completions = completions
4319 .await
4320 .unwrap()
4321 .into_iter()
4322 .flat_map(|response| response.completions)
4323 .collect::<Vec<_>>();
4324 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4325
4326 assert_eq!(completions.len(), 1);
4327 assert_eq!(completions[0].new_text, "textEditText");
4328 assert_eq!(
4329 completions[0].replace_range.to_offset(&snapshot),
4330 text.len() - 3..text.len()
4331 );
4332 }
4333
4334 // Test 2: When both text_edit and text_edit_text are None with default edit_range
4335 {
4336 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4337 let completions = project.update(cx, |project, cx| {
4338 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4339 });
4340
4341 fake_server
4342 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4343 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
4344 is_incomplete: false,
4345 item_defaults: Some(lsp::CompletionListItemDefaults {
4346 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
4347 lsp::Range::new(
4348 lsp::Position::new(0, text.len() as u32 - 3),
4349 lsp::Position::new(0, text.len() as u32),
4350 ),
4351 )),
4352 ..Default::default()
4353 }),
4354 items: vec![lsp::CompletionItem {
4355 label: "labelText".into(),
4356 text_edit_text: None,
4357 insert_text: Some("irrelevant".into()),
4358 text_edit: None,
4359 ..Default::default()
4360 }],
4361 })))
4362 })
4363 .next()
4364 .await;
4365
4366 let completions = completions
4367 .await
4368 .unwrap()
4369 .into_iter()
4370 .flat_map(|response| response.completions)
4371 .collect::<Vec<_>>();
4372 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4373
4374 assert_eq!(completions.len(), 1);
4375 assert_eq!(completions[0].new_text, "labelText");
4376 assert_eq!(
4377 completions[0].replace_range.to_offset(&snapshot),
4378 text.len() - 3..text.len()
4379 );
4380 }
4381}
4382
4383#[gpui::test]
4384async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
4385 init_test(cx);
4386
4387 let fs = FakeFs::new(cx.executor());
4388 fs.insert_tree(
4389 path!("/dir"),
4390 json!({
4391 "a.ts": "",
4392 }),
4393 )
4394 .await;
4395
4396 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4397
4398 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4399 language_registry.add(typescript_lang());
4400 let mut fake_language_servers = language_registry.register_fake_lsp(
4401 "TypeScript",
4402 FakeLspAdapter {
4403 capabilities: lsp::ServerCapabilities {
4404 completion_provider: Some(lsp::CompletionOptions {
4405 trigger_characters: Some(vec![":".to_string()]),
4406 ..Default::default()
4407 }),
4408 ..Default::default()
4409 },
4410 ..Default::default()
4411 },
4412 );
4413
4414 let (buffer, _handle) = project
4415 .update(cx, |p, cx| {
4416 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4417 })
4418 .await
4419 .unwrap();
4420
4421 let fake_server = fake_language_servers.next().await.unwrap();
4422 cx.executor().run_until_parked();
4423
4424 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
4425 let text = "let a = b.fqn";
4426 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4427 let completions = project.update(cx, |project, cx| {
4428 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4429 });
4430
4431 fake_server
4432 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4433 Ok(Some(lsp::CompletionResponse::Array(vec![
4434 lsp::CompletionItem {
4435 label: "fullyQualifiedName?".into(),
4436 insert_text: Some("fullyQualifiedName".into()),
4437 ..Default::default()
4438 },
4439 ])))
4440 })
4441 .next()
4442 .await;
4443 let completions = completions
4444 .await
4445 .unwrap()
4446 .into_iter()
4447 .flat_map(|response| response.completions)
4448 .collect::<Vec<_>>();
4449 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4450 assert_eq!(completions.len(), 1);
4451 assert_eq!(completions[0].new_text, "fullyQualifiedName");
4452 assert_eq!(
4453 completions[0].replace_range.to_offset(&snapshot),
4454 text.len() - 3..text.len()
4455 );
4456
4457 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
4458 let text = "let a = \"atoms/cmp\"";
4459 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4460 let completions = project.update(cx, |project, cx| {
4461 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
4462 });
4463
4464 fake_server
4465 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4466 Ok(Some(lsp::CompletionResponse::Array(vec![
4467 lsp::CompletionItem {
4468 label: "component".into(),
4469 ..Default::default()
4470 },
4471 ])))
4472 })
4473 .next()
4474 .await;
4475 let completions = completions
4476 .await
4477 .unwrap()
4478 .into_iter()
4479 .flat_map(|response| response.completions)
4480 .collect::<Vec<_>>();
4481 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4482 assert_eq!(completions.len(), 1);
4483 assert_eq!(completions[0].new_text, "component");
4484 assert_eq!(
4485 completions[0].replace_range.to_offset(&snapshot),
4486 text.len() - 4..text.len() - 1
4487 );
4488}
4489
4490#[gpui::test]
4491async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
4492 init_test(cx);
4493
4494 let fs = FakeFs::new(cx.executor());
4495 fs.insert_tree(
4496 path!("/dir"),
4497 json!({
4498 "a.ts": "",
4499 }),
4500 )
4501 .await;
4502
4503 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4504
4505 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4506 language_registry.add(typescript_lang());
4507 let mut fake_language_servers = language_registry.register_fake_lsp(
4508 "TypeScript",
4509 FakeLspAdapter {
4510 capabilities: lsp::ServerCapabilities {
4511 completion_provider: Some(lsp::CompletionOptions {
4512 trigger_characters: Some(vec![":".to_string()]),
4513 ..Default::default()
4514 }),
4515 ..Default::default()
4516 },
4517 ..Default::default()
4518 },
4519 );
4520
4521 let (buffer, _handle) = project
4522 .update(cx, |p, cx| {
4523 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4524 })
4525 .await
4526 .unwrap();
4527
4528 let fake_server = fake_language_servers.next().await.unwrap();
4529 cx.executor().run_until_parked();
4530
4531 let text = "let a = b.fqn";
4532 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4533 let completions = project.update(cx, |project, cx| {
4534 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4535 });
4536
4537 fake_server
4538 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4539 Ok(Some(lsp::CompletionResponse::Array(vec![
4540 lsp::CompletionItem {
4541 label: "fullyQualifiedName?".into(),
4542 insert_text: Some("fully\rQualified\r\nName".into()),
4543 ..Default::default()
4544 },
4545 ])))
4546 })
4547 .next()
4548 .await;
4549 let completions = completions
4550 .await
4551 .unwrap()
4552 .into_iter()
4553 .flat_map(|response| response.completions)
4554 .collect::<Vec<_>>();
4555 assert_eq!(completions.len(), 1);
4556 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
4557}
4558
4559#[gpui::test(iterations = 10)]
4560async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
4561 init_test(cx);
4562
4563 let fs = FakeFs::new(cx.executor());
4564 fs.insert_tree(
4565 path!("/dir"),
4566 json!({
4567 "a.ts": "a",
4568 }),
4569 )
4570 .await;
4571
4572 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4573
4574 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4575 language_registry.add(typescript_lang());
4576 let mut fake_language_servers = language_registry.register_fake_lsp(
4577 "TypeScript",
4578 FakeLspAdapter {
4579 capabilities: lsp::ServerCapabilities {
4580 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
4581 lsp::CodeActionOptions {
4582 resolve_provider: Some(true),
4583 ..lsp::CodeActionOptions::default()
4584 },
4585 )),
4586 execute_command_provider: Some(lsp::ExecuteCommandOptions {
4587 commands: vec!["_the/command".to_string()],
4588 ..lsp::ExecuteCommandOptions::default()
4589 }),
4590 ..lsp::ServerCapabilities::default()
4591 },
4592 ..FakeLspAdapter::default()
4593 },
4594 );
4595
4596 let (buffer, _handle) = project
4597 .update(cx, |p, cx| {
4598 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4599 })
4600 .await
4601 .unwrap();
4602
4603 let fake_server = fake_language_servers.next().await.unwrap();
4604 cx.executor().run_until_parked();
4605
4606 // Language server returns code actions that contain commands, and not edits.
4607 let actions = project.update(cx, |project, cx| {
4608 project.code_actions(&buffer, 0..0, None, cx)
4609 });
4610 fake_server
4611 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
4612 Ok(Some(vec![
4613 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
4614 title: "The code action".into(),
4615 data: Some(serde_json::json!({
4616 "command": "_the/command",
4617 })),
4618 ..lsp::CodeAction::default()
4619 }),
4620 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
4621 title: "two".into(),
4622 ..lsp::CodeAction::default()
4623 }),
4624 ]))
4625 })
4626 .next()
4627 .await;
4628
4629 let action = actions.await.unwrap().unwrap()[0].clone();
4630 let apply = project.update(cx, |project, cx| {
4631 project.apply_code_action(buffer.clone(), action, true, cx)
4632 });
4633
4634 // Resolving the code action does not populate its edits. In absence of
4635 // edits, we must execute the given command.
4636 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
4637 |mut action, _| async move {
4638 if action.data.is_some() {
4639 action.command = Some(lsp::Command {
4640 title: "The command".into(),
4641 command: "_the/command".into(),
4642 arguments: Some(vec![json!("the-argument")]),
4643 });
4644 }
4645 Ok(action)
4646 },
4647 );
4648
4649 // While executing the command, the language server sends the editor
4650 // a `workspaceEdit` request.
4651 fake_server
4652 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
4653 let fake = fake_server.clone();
4654 move |params, _| {
4655 assert_eq!(params.command, "_the/command");
4656 let fake = fake.clone();
4657 async move {
4658 fake.server
4659 .request::<lsp::request::ApplyWorkspaceEdit>(
4660 lsp::ApplyWorkspaceEditParams {
4661 label: None,
4662 edit: lsp::WorkspaceEdit {
4663 changes: Some(
4664 [(
4665 lsp::Uri::from_file_path(path!("/dir/a.ts")).unwrap(),
4666 vec![lsp::TextEdit {
4667 range: lsp::Range::new(
4668 lsp::Position::new(0, 0),
4669 lsp::Position::new(0, 0),
4670 ),
4671 new_text: "X".into(),
4672 }],
4673 )]
4674 .into_iter()
4675 .collect(),
4676 ),
4677 ..Default::default()
4678 },
4679 },
4680 DEFAULT_LSP_REQUEST_TIMEOUT,
4681 )
4682 .await
4683 .into_response()
4684 .unwrap();
4685 Ok(Some(json!(null)))
4686 }
4687 }
4688 })
4689 .next()
4690 .await;
4691
4692 // Applying the code action returns a project transaction containing the edits
4693 // sent by the language server in its `workspaceEdit` request.
4694 let transaction = apply.await.unwrap();
4695 assert!(transaction.0.contains_key(&buffer));
4696 buffer.update(cx, |buffer, cx| {
4697 assert_eq!(buffer.text(), "Xa");
4698 buffer.undo(cx);
4699 assert_eq!(buffer.text(), "a");
4700 });
4701}
4702
4703#[gpui::test]
4704async fn test_rename_file_to_new_directory(cx: &mut gpui::TestAppContext) {
4705 init_test(cx);
4706 let fs = FakeFs::new(cx.background_executor.clone());
4707 let expected_contents = "content";
4708 fs.as_fake()
4709 .insert_tree(
4710 "/root",
4711 json!({
4712 "test.txt": expected_contents
4713 }),
4714 )
4715 .await;
4716
4717 let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
4718
4719 let (worktree, entry_id) = project.read_with(cx, |project, cx| {
4720 let worktree = project.worktrees(cx).next().unwrap();
4721 let entry_id = worktree
4722 .read(cx)
4723 .entry_for_path(rel_path("test.txt"))
4724 .unwrap()
4725 .id;
4726 (worktree, entry_id)
4727 });
4728 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
4729 let _result = project
4730 .update(cx, |project, cx| {
4731 project.rename_entry(
4732 entry_id,
4733 (worktree_id, rel_path("dir1/dir2/dir3/test.txt")).into(),
4734 cx,
4735 )
4736 })
4737 .await
4738 .unwrap();
4739 worktree.read_with(cx, |worktree, _| {
4740 assert!(
4741 worktree.entry_for_path(rel_path("test.txt")).is_none(),
4742 "Old file should have been removed"
4743 );
4744 assert!(
4745 worktree
4746 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
4747 .is_some(),
4748 "Whole directory hierarchy and the new file should have been created"
4749 );
4750 });
4751 assert_eq!(
4752 worktree
4753 .update(cx, |worktree, cx| {
4754 worktree.load_file(rel_path("dir1/dir2/dir3/test.txt"), cx)
4755 })
4756 .await
4757 .unwrap()
4758 .text,
4759 expected_contents,
4760 "Moved file's contents should be preserved"
4761 );
4762
4763 let entry_id = worktree.read_with(cx, |worktree, _| {
4764 worktree
4765 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
4766 .unwrap()
4767 .id
4768 });
4769
4770 let _result = project
4771 .update(cx, |project, cx| {
4772 project.rename_entry(
4773 entry_id,
4774 (worktree_id, rel_path("dir1/dir2/test.txt")).into(),
4775 cx,
4776 )
4777 })
4778 .await
4779 .unwrap();
4780 worktree.read_with(cx, |worktree, _| {
4781 assert!(
4782 worktree.entry_for_path(rel_path("test.txt")).is_none(),
4783 "First file should not reappear"
4784 );
4785 assert!(
4786 worktree
4787 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
4788 .is_none(),
4789 "Old file should have been removed"
4790 );
4791 assert!(
4792 worktree
4793 .entry_for_path(rel_path("dir1/dir2/test.txt"))
4794 .is_some(),
4795 "No error should have occurred after moving into existing directory"
4796 );
4797 });
4798 assert_eq!(
4799 worktree
4800 .update(cx, |worktree, cx| {
4801 worktree.load_file(rel_path("dir1/dir2/test.txt"), cx)
4802 })
4803 .await
4804 .unwrap()
4805 .text,
4806 expected_contents,
4807 "Moved file's contents should be preserved"
4808 );
4809}
4810
4811#[gpui::test(iterations = 10)]
4812async fn test_save_file(cx: &mut gpui::TestAppContext) {
4813 init_test(cx);
4814
4815 let fs = FakeFs::new(cx.executor());
4816 fs.insert_tree(
4817 path!("/dir"),
4818 json!({
4819 "file1": "the old contents",
4820 }),
4821 )
4822 .await;
4823
4824 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4825 let buffer = project
4826 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4827 .await
4828 .unwrap();
4829 buffer.update(cx, |buffer, cx| {
4830 assert_eq!(buffer.text(), "the old contents");
4831 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
4832 });
4833
4834 project
4835 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
4836 .await
4837 .unwrap();
4838
4839 let new_text = fs
4840 .load(Path::new(path!("/dir/file1")))
4841 .await
4842 .unwrap()
4843 .replace("\r\n", "\n");
4844 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
4845}
4846
4847#[gpui::test(iterations = 10)]
4848async fn test_save_file_spawns_language_server(cx: &mut gpui::TestAppContext) {
4849 // Issue: #24349
4850 init_test(cx);
4851
4852 let fs = FakeFs::new(cx.executor());
4853 fs.insert_tree(path!("/dir"), json!({})).await;
4854
4855 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4856 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4857
4858 language_registry.add(rust_lang());
4859 let mut fake_rust_servers = language_registry.register_fake_lsp(
4860 "Rust",
4861 FakeLspAdapter {
4862 name: "the-rust-language-server",
4863 capabilities: lsp::ServerCapabilities {
4864 completion_provider: Some(lsp::CompletionOptions {
4865 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
4866 ..Default::default()
4867 }),
4868 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
4869 lsp::TextDocumentSyncOptions {
4870 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
4871 ..Default::default()
4872 },
4873 )),
4874 ..Default::default()
4875 },
4876 ..Default::default()
4877 },
4878 );
4879
4880 let buffer = project
4881 .update(cx, |this, cx| this.create_buffer(None, false, cx))
4882 .unwrap()
4883 .await;
4884 project.update(cx, |this, cx| {
4885 this.register_buffer_with_language_servers(&buffer, cx);
4886 buffer.update(cx, |buffer, cx| {
4887 assert!(!this.has_language_servers_for(buffer, cx));
4888 })
4889 });
4890
4891 project
4892 .update(cx, |this, cx| {
4893 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
4894 this.save_buffer_as(
4895 buffer.clone(),
4896 ProjectPath {
4897 worktree_id,
4898 path: rel_path("file.rs").into(),
4899 },
4900 cx,
4901 )
4902 })
4903 .await
4904 .unwrap();
4905 // A server is started up, and it is notified about Rust files.
4906 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
4907 assert_eq!(
4908 fake_rust_server
4909 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4910 .await
4911 .text_document,
4912 lsp::TextDocumentItem {
4913 uri: lsp::Uri::from_file_path(path!("/dir/file.rs")).unwrap(),
4914 version: 0,
4915 text: "".to_string(),
4916 language_id: "rust".to_string(),
4917 }
4918 );
4919
4920 project.update(cx, |this, cx| {
4921 buffer.update(cx, |buffer, cx| {
4922 assert!(this.has_language_servers_for(buffer, cx));
4923 })
4924 });
4925}
4926
4927#[gpui::test(iterations = 30)]
4928async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
4929 init_test(cx);
4930
4931 let fs = FakeFs::new(cx.executor());
4932 fs.insert_tree(
4933 path!("/dir"),
4934 json!({
4935 "file1": "the original contents",
4936 }),
4937 )
4938 .await;
4939
4940 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4941 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
4942 let buffer = project
4943 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4944 .await
4945 .unwrap();
4946
4947 // Change the buffer's file on disk, and then wait for the file change
4948 // to be detected by the worktree, so that the buffer starts reloading.
4949 fs.save(
4950 path!("/dir/file1").as_ref(),
4951 &"the first contents".into(),
4952 Default::default(),
4953 )
4954 .await
4955 .unwrap();
4956 worktree.next_event(cx).await;
4957
4958 // Change the buffer's file again. Depending on the random seed, the
4959 // previous file change may still be in progress.
4960 fs.save(
4961 path!("/dir/file1").as_ref(),
4962 &"the second contents".into(),
4963 Default::default(),
4964 )
4965 .await
4966 .unwrap();
4967 worktree.next_event(cx).await;
4968
4969 cx.executor().run_until_parked();
4970 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
4971 buffer.read_with(cx, |buffer, _| {
4972 assert_eq!(buffer.text(), on_disk_text);
4973 assert!(!buffer.is_dirty(), "buffer should not be dirty");
4974 assert!(!buffer.has_conflict(), "buffer should not be dirty");
4975 });
4976}
4977
4978#[gpui::test(iterations = 30)]
4979async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
4980 init_test(cx);
4981
4982 let fs = FakeFs::new(cx.executor());
4983 fs.insert_tree(
4984 path!("/dir"),
4985 json!({
4986 "file1": "the original contents",
4987 }),
4988 )
4989 .await;
4990
4991 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4992 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
4993 let buffer = project
4994 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4995 .await
4996 .unwrap();
4997
4998 // Change the buffer's file on disk, and then wait for the file change
4999 // to be detected by the worktree, so that the buffer starts reloading.
5000 fs.save(
5001 path!("/dir/file1").as_ref(),
5002 &"the first contents".into(),
5003 Default::default(),
5004 )
5005 .await
5006 .unwrap();
5007 worktree.next_event(cx).await;
5008
5009 cx.executor()
5010 .spawn(cx.executor().simulate_random_delay())
5011 .await;
5012
5013 // Perform a noop edit, causing the buffer's version to increase.
5014 buffer.update(cx, |buffer, cx| {
5015 buffer.edit([(0..0, " ")], None, cx);
5016 buffer.undo(cx);
5017 });
5018
5019 cx.executor().run_until_parked();
5020 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
5021 buffer.read_with(cx, |buffer, _| {
5022 let buffer_text = buffer.text();
5023 if buffer_text == on_disk_text {
5024 assert!(
5025 !buffer.is_dirty() && !buffer.has_conflict(),
5026 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
5027 );
5028 }
5029 // If the file change occurred while the buffer was processing the first
5030 // change, the buffer will be in a conflicting state.
5031 else {
5032 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
5033 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
5034 }
5035 });
5036}
5037
5038#[gpui::test]
5039async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
5040 init_test(cx);
5041
5042 let fs = FakeFs::new(cx.executor());
5043 fs.insert_tree(
5044 path!("/dir"),
5045 json!({
5046 "file1": "the old contents",
5047 }),
5048 )
5049 .await;
5050
5051 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
5052 let buffer = project
5053 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5054 .await
5055 .unwrap();
5056 buffer.update(cx, |buffer, cx| {
5057 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
5058 });
5059
5060 project
5061 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
5062 .await
5063 .unwrap();
5064
5065 let new_text = fs
5066 .load(Path::new(path!("/dir/file1")))
5067 .await
5068 .unwrap()
5069 .replace("\r\n", "\n");
5070 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
5071}
5072
5073#[gpui::test]
5074async fn test_save_as(cx: &mut gpui::TestAppContext) {
5075 init_test(cx);
5076
5077 let fs = FakeFs::new(cx.executor());
5078 fs.insert_tree("/dir", json!({})).await;
5079
5080 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5081
5082 let languages = project.update(cx, |project, _| project.languages().clone());
5083 languages.add(rust_lang());
5084
5085 let buffer = project.update(cx, |project, cx| {
5086 project.create_local_buffer("", None, false, cx)
5087 });
5088 buffer.update(cx, |buffer, cx| {
5089 buffer.edit([(0..0, "abc")], None, cx);
5090 assert!(buffer.is_dirty());
5091 assert!(!buffer.has_conflict());
5092 assert_eq!(buffer.language().unwrap().name(), "Plain Text");
5093 });
5094 project
5095 .update(cx, |project, cx| {
5096 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
5097 let path = ProjectPath {
5098 worktree_id,
5099 path: rel_path("file1.rs").into(),
5100 };
5101 project.save_buffer_as(buffer.clone(), path, cx)
5102 })
5103 .await
5104 .unwrap();
5105 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
5106
5107 cx.executor().run_until_parked();
5108 buffer.update(cx, |buffer, cx| {
5109 assert_eq!(
5110 buffer.file().unwrap().full_path(cx),
5111 Path::new("dir/file1.rs")
5112 );
5113 assert!(!buffer.is_dirty());
5114 assert!(!buffer.has_conflict());
5115 assert_eq!(buffer.language().unwrap().name(), "Rust");
5116 });
5117
5118 let opened_buffer = project
5119 .update(cx, |project, cx| {
5120 project.open_local_buffer("/dir/file1.rs", cx)
5121 })
5122 .await
5123 .unwrap();
5124 assert_eq!(opened_buffer, buffer);
5125}
5126
5127#[gpui::test]
5128async fn test_save_as_existing_file(cx: &mut gpui::TestAppContext) {
5129 init_test(cx);
5130
5131 let fs = FakeFs::new(cx.executor());
5132 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5133
5134 fs.insert_tree(
5135 path!("/dir"),
5136 json!({
5137 "data_a.txt": "data about a"
5138 }),
5139 )
5140 .await;
5141
5142 let buffer = project
5143 .update(cx, |project, cx| {
5144 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
5145 })
5146 .await
5147 .unwrap();
5148
5149 buffer.update(cx, |buffer, cx| {
5150 buffer.edit([(11..12, "b")], None, cx);
5151 });
5152
5153 // Save buffer's contents as a new file and confirm that the buffer's now
5154 // associated with `data_b.txt` instead of `data_a.txt`, confirming that the
5155 // file associated with the buffer has now been updated to `data_b.txt`
5156 project
5157 .update(cx, |project, cx| {
5158 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
5159 let new_path = ProjectPath {
5160 worktree_id,
5161 path: rel_path("data_b.txt").into(),
5162 };
5163
5164 project.save_buffer_as(buffer.clone(), new_path, cx)
5165 })
5166 .await
5167 .unwrap();
5168
5169 buffer.update(cx, |buffer, cx| {
5170 assert_eq!(
5171 buffer.file().unwrap().full_path(cx),
5172 Path::new("dir/data_b.txt")
5173 )
5174 });
5175
5176 // Open the original `data_a.txt` file, confirming that its contents are
5177 // unchanged and the resulting buffer's associated file is `data_a.txt`.
5178 let original_buffer = project
5179 .update(cx, |project, cx| {
5180 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
5181 })
5182 .await
5183 .unwrap();
5184
5185 original_buffer.update(cx, |buffer, cx| {
5186 assert_eq!(buffer.text(), "data about a");
5187 assert_eq!(
5188 buffer.file().unwrap().full_path(cx),
5189 Path::new("dir/data_a.txt")
5190 )
5191 });
5192}
5193
5194#[gpui::test(retries = 5)]
5195async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
5196 use worktree::WorktreeModelHandle as _;
5197
5198 init_test(cx);
5199 cx.executor().allow_parking();
5200
5201 let dir = TempTree::new(json!({
5202 "a": {
5203 "file1": "",
5204 "file2": "",
5205 "file3": "",
5206 },
5207 "b": {
5208 "c": {
5209 "file4": "",
5210 "file5": "",
5211 }
5212 }
5213 }));
5214
5215 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
5216
5217 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5218 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
5219 async move { buffer.await.unwrap() }
5220 };
5221 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5222 project.update(cx, |project, cx| {
5223 let tree = project.worktrees(cx).next().unwrap();
5224 tree.read(cx)
5225 .entry_for_path(rel_path(path))
5226 .unwrap_or_else(|| panic!("no entry for path {}", path))
5227 .id
5228 })
5229 };
5230
5231 let buffer2 = buffer_for_path("a/file2", cx).await;
5232 let buffer3 = buffer_for_path("a/file3", cx).await;
5233 let buffer4 = buffer_for_path("b/c/file4", cx).await;
5234 let buffer5 = buffer_for_path("b/c/file5", cx).await;
5235
5236 let file2_id = id_for_path("a/file2", cx);
5237 let file3_id = id_for_path("a/file3", cx);
5238 let file4_id = id_for_path("b/c/file4", cx);
5239
5240 // Create a remote copy of this worktree.
5241 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
5242 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
5243
5244 let updates = Arc::new(Mutex::new(Vec::new()));
5245 tree.update(cx, |tree, cx| {
5246 let updates = updates.clone();
5247 tree.observe_updates(0, cx, move |update| {
5248 updates.lock().push(update);
5249 async { true }
5250 });
5251 });
5252
5253 let remote = cx.update(|cx| {
5254 Worktree::remote(
5255 0,
5256 ReplicaId::REMOTE_SERVER,
5257 metadata,
5258 project.read(cx).client().into(),
5259 project.read(cx).path_style(cx),
5260 cx,
5261 )
5262 });
5263
5264 cx.executor().run_until_parked();
5265
5266 cx.update(|cx| {
5267 assert!(!buffer2.read(cx).is_dirty());
5268 assert!(!buffer3.read(cx).is_dirty());
5269 assert!(!buffer4.read(cx).is_dirty());
5270 assert!(!buffer5.read(cx).is_dirty());
5271 });
5272
5273 // Rename and delete files and directories.
5274 tree.flush_fs_events(cx).await;
5275 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
5276 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
5277 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
5278 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
5279 tree.flush_fs_events(cx).await;
5280
5281 cx.update(|app| {
5282 assert_eq!(
5283 tree.read(app).paths().collect::<Vec<_>>(),
5284 vec![
5285 rel_path("a"),
5286 rel_path("a/file1"),
5287 rel_path("a/file2.new"),
5288 rel_path("b"),
5289 rel_path("d"),
5290 rel_path("d/file3"),
5291 rel_path("d/file4"),
5292 ]
5293 );
5294 });
5295
5296 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
5297 assert_eq!(id_for_path("d/file3", cx), file3_id);
5298 assert_eq!(id_for_path("d/file4", cx), file4_id);
5299
5300 cx.update(|cx| {
5301 assert_eq!(
5302 buffer2.read(cx).file().unwrap().path().as_ref(),
5303 rel_path("a/file2.new")
5304 );
5305 assert_eq!(
5306 buffer3.read(cx).file().unwrap().path().as_ref(),
5307 rel_path("d/file3")
5308 );
5309 assert_eq!(
5310 buffer4.read(cx).file().unwrap().path().as_ref(),
5311 rel_path("d/file4")
5312 );
5313 assert_eq!(
5314 buffer5.read(cx).file().unwrap().path().as_ref(),
5315 rel_path("b/c/file5")
5316 );
5317
5318 assert_matches!(
5319 buffer2.read(cx).file().unwrap().disk_state(),
5320 DiskState::Present { .. }
5321 );
5322 assert_matches!(
5323 buffer3.read(cx).file().unwrap().disk_state(),
5324 DiskState::Present { .. }
5325 );
5326 assert_matches!(
5327 buffer4.read(cx).file().unwrap().disk_state(),
5328 DiskState::Present { .. }
5329 );
5330 assert_eq!(
5331 buffer5.read(cx).file().unwrap().disk_state(),
5332 DiskState::Deleted
5333 );
5334 });
5335
5336 // Update the remote worktree. Check that it becomes consistent with the
5337 // local worktree.
5338 cx.executor().run_until_parked();
5339
5340 remote.update(cx, |remote, _| {
5341 for update in updates.lock().drain(..) {
5342 remote.as_remote_mut().unwrap().update_from_remote(update);
5343 }
5344 });
5345 cx.executor().run_until_parked();
5346 remote.update(cx, |remote, _| {
5347 assert_eq!(
5348 remote.paths().collect::<Vec<_>>(),
5349 vec![
5350 rel_path("a"),
5351 rel_path("a/file1"),
5352 rel_path("a/file2.new"),
5353 rel_path("b"),
5354 rel_path("d"),
5355 rel_path("d/file3"),
5356 rel_path("d/file4"),
5357 ]
5358 );
5359 });
5360}
5361
5362#[gpui::test(iterations = 10)]
5363async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
5364 init_test(cx);
5365
5366 let fs = FakeFs::new(cx.executor());
5367 fs.insert_tree(
5368 path!("/dir"),
5369 json!({
5370 "a": {
5371 "file1": "",
5372 }
5373 }),
5374 )
5375 .await;
5376
5377 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
5378 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
5379 let tree_id = tree.update(cx, |tree, _| tree.id());
5380
5381 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5382 project.update(cx, |project, cx| {
5383 let tree = project.worktrees(cx).next().unwrap();
5384 tree.read(cx)
5385 .entry_for_path(rel_path(path))
5386 .unwrap_or_else(|| panic!("no entry for path {}", path))
5387 .id
5388 })
5389 };
5390
5391 let dir_id = id_for_path("a", cx);
5392 let file_id = id_for_path("a/file1", cx);
5393 let buffer = project
5394 .update(cx, |p, cx| {
5395 p.open_buffer((tree_id, rel_path("a/file1")), cx)
5396 })
5397 .await
5398 .unwrap();
5399 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5400
5401 project
5402 .update(cx, |project, cx| {
5403 project.rename_entry(dir_id, (tree_id, rel_path("b")).into(), cx)
5404 })
5405 .unwrap()
5406 .await
5407 .into_included()
5408 .unwrap();
5409 cx.executor().run_until_parked();
5410
5411 assert_eq!(id_for_path("b", cx), dir_id);
5412 assert_eq!(id_for_path("b/file1", cx), file_id);
5413 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5414}
5415
5416#[gpui::test]
5417async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
5418 init_test(cx);
5419
5420 let fs = FakeFs::new(cx.executor());
5421 fs.insert_tree(
5422 "/dir",
5423 json!({
5424 "a.txt": "a-contents",
5425 "b.txt": "b-contents",
5426 }),
5427 )
5428 .await;
5429
5430 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5431
5432 // Spawn multiple tasks to open paths, repeating some paths.
5433 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
5434 (
5435 p.open_local_buffer("/dir/a.txt", cx),
5436 p.open_local_buffer("/dir/b.txt", cx),
5437 p.open_local_buffer("/dir/a.txt", cx),
5438 )
5439 });
5440
5441 let buffer_a_1 = buffer_a_1.await.unwrap();
5442 let buffer_a_2 = buffer_a_2.await.unwrap();
5443 let buffer_b = buffer_b.await.unwrap();
5444 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
5445 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
5446
5447 // There is only one buffer per path.
5448 let buffer_a_id = buffer_a_1.entity_id();
5449 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
5450
5451 // Open the same path again while it is still open.
5452 drop(buffer_a_1);
5453 let buffer_a_3 = project
5454 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
5455 .await
5456 .unwrap();
5457
5458 // There's still only one buffer per path.
5459 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
5460}
5461
5462#[gpui::test]
5463async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
5464 init_test(cx);
5465
5466 let fs = FakeFs::new(cx.executor());
5467 fs.insert_tree(
5468 path!("/dir"),
5469 json!({
5470 "file1": "abc",
5471 "file2": "def",
5472 "file3": "ghi",
5473 }),
5474 )
5475 .await;
5476
5477 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5478
5479 let buffer1 = project
5480 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5481 .await
5482 .unwrap();
5483 let events = Arc::new(Mutex::new(Vec::new()));
5484
5485 // initially, the buffer isn't dirty.
5486 buffer1.update(cx, |buffer, cx| {
5487 cx.subscribe(&buffer1, {
5488 let events = events.clone();
5489 move |_, _, event, _| match event {
5490 BufferEvent::Operation { .. } => {}
5491 _ => events.lock().push(event.clone()),
5492 }
5493 })
5494 .detach();
5495
5496 assert!(!buffer.is_dirty());
5497 assert!(events.lock().is_empty());
5498
5499 buffer.edit([(1..2, "")], None, cx);
5500 });
5501
5502 // after the first edit, the buffer is dirty, and emits a dirtied event.
5503 buffer1.update(cx, |buffer, cx| {
5504 assert!(buffer.text() == "ac");
5505 assert!(buffer.is_dirty());
5506 assert_eq!(
5507 *events.lock(),
5508 &[
5509 language::BufferEvent::Edited,
5510 language::BufferEvent::DirtyChanged
5511 ]
5512 );
5513 events.lock().clear();
5514 buffer.did_save(
5515 buffer.version(),
5516 buffer.file().unwrap().disk_state().mtime(),
5517 cx,
5518 );
5519 });
5520
5521 // after saving, the buffer is not dirty, and emits a saved event.
5522 buffer1.update(cx, |buffer, cx| {
5523 assert!(!buffer.is_dirty());
5524 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
5525 events.lock().clear();
5526
5527 buffer.edit([(1..1, "B")], None, cx);
5528 buffer.edit([(2..2, "D")], None, cx);
5529 });
5530
5531 // after editing again, the buffer is dirty, and emits another dirty event.
5532 buffer1.update(cx, |buffer, cx| {
5533 assert!(buffer.text() == "aBDc");
5534 assert!(buffer.is_dirty());
5535 assert_eq!(
5536 *events.lock(),
5537 &[
5538 language::BufferEvent::Edited,
5539 language::BufferEvent::DirtyChanged,
5540 language::BufferEvent::Edited,
5541 ],
5542 );
5543 events.lock().clear();
5544
5545 // After restoring the buffer to its previously-saved state,
5546 // the buffer is not considered dirty anymore.
5547 buffer.edit([(1..3, "")], None, cx);
5548 assert!(buffer.text() == "ac");
5549 assert!(!buffer.is_dirty());
5550 });
5551
5552 assert_eq!(
5553 *events.lock(),
5554 &[
5555 language::BufferEvent::Edited,
5556 language::BufferEvent::DirtyChanged
5557 ]
5558 );
5559
5560 // When a file is deleted, it is not considered dirty.
5561 let events = Arc::new(Mutex::new(Vec::new()));
5562 let buffer2 = project
5563 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
5564 .await
5565 .unwrap();
5566 buffer2.update(cx, |_, cx| {
5567 cx.subscribe(&buffer2, {
5568 let events = events.clone();
5569 move |_, _, event, _| match event {
5570 BufferEvent::Operation { .. } => {}
5571 _ => events.lock().push(event.clone()),
5572 }
5573 })
5574 .detach();
5575 });
5576
5577 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
5578 .await
5579 .unwrap();
5580 cx.executor().run_until_parked();
5581 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5582 assert_eq!(
5583 mem::take(&mut *events.lock()),
5584 &[language::BufferEvent::FileHandleChanged]
5585 );
5586
5587 // Buffer becomes dirty when edited.
5588 buffer2.update(cx, |buffer, cx| {
5589 buffer.edit([(2..3, "")], None, cx);
5590 assert_eq!(buffer.is_dirty(), true);
5591 });
5592 assert_eq!(
5593 mem::take(&mut *events.lock()),
5594 &[
5595 language::BufferEvent::Edited,
5596 language::BufferEvent::DirtyChanged
5597 ]
5598 );
5599
5600 // Buffer becomes clean again when all of its content is removed, because
5601 // the file was deleted.
5602 buffer2.update(cx, |buffer, cx| {
5603 buffer.edit([(0..2, "")], None, cx);
5604 assert_eq!(buffer.is_empty(), true);
5605 assert_eq!(buffer.is_dirty(), false);
5606 });
5607 assert_eq!(
5608 *events.lock(),
5609 &[
5610 language::BufferEvent::Edited,
5611 language::BufferEvent::DirtyChanged
5612 ]
5613 );
5614
5615 // When a file is already dirty when deleted, we don't emit a Dirtied event.
5616 let events = Arc::new(Mutex::new(Vec::new()));
5617 let buffer3 = project
5618 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
5619 .await
5620 .unwrap();
5621 buffer3.update(cx, |_, cx| {
5622 cx.subscribe(&buffer3, {
5623 let events = events.clone();
5624 move |_, _, event, _| match event {
5625 BufferEvent::Operation { .. } => {}
5626 _ => events.lock().push(event.clone()),
5627 }
5628 })
5629 .detach();
5630 });
5631
5632 buffer3.update(cx, |buffer, cx| {
5633 buffer.edit([(0..0, "x")], None, cx);
5634 });
5635 events.lock().clear();
5636 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
5637 .await
5638 .unwrap();
5639 cx.executor().run_until_parked();
5640 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
5641 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
5642}
5643
5644#[gpui::test]
5645async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
5646 init_test(cx);
5647
5648 let (initial_contents, initial_offsets) =
5649 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
5650 let fs = FakeFs::new(cx.executor());
5651 fs.insert_tree(
5652 path!("/dir"),
5653 json!({
5654 "the-file": initial_contents,
5655 }),
5656 )
5657 .await;
5658 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5659 let buffer = project
5660 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
5661 .await
5662 .unwrap();
5663
5664 let anchors = initial_offsets
5665 .iter()
5666 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
5667 .collect::<Vec<_>>();
5668
5669 // Change the file on disk, adding two new lines of text, and removing
5670 // one line.
5671 buffer.update(cx, |buffer, _| {
5672 assert!(!buffer.is_dirty());
5673 assert!(!buffer.has_conflict());
5674 });
5675
5676 let (new_contents, new_offsets) =
5677 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
5678 fs.save(
5679 path!("/dir/the-file").as_ref(),
5680 &new_contents.as_str().into(),
5681 LineEnding::Unix,
5682 )
5683 .await
5684 .unwrap();
5685
5686 // Because the buffer was not modified, it is reloaded from disk. Its
5687 // contents are edited according to the diff between the old and new
5688 // file contents.
5689 cx.executor().run_until_parked();
5690 buffer.update(cx, |buffer, _| {
5691 assert_eq!(buffer.text(), new_contents);
5692 assert!(!buffer.is_dirty());
5693 assert!(!buffer.has_conflict());
5694
5695 let anchor_offsets = anchors
5696 .iter()
5697 .map(|anchor| anchor.to_offset(&*buffer))
5698 .collect::<Vec<_>>();
5699 assert_eq!(anchor_offsets, new_offsets);
5700 });
5701
5702 // Modify the buffer
5703 buffer.update(cx, |buffer, cx| {
5704 buffer.edit([(0..0, " ")], None, cx);
5705 assert!(buffer.is_dirty());
5706 assert!(!buffer.has_conflict());
5707 });
5708
5709 // Change the file on disk again, adding blank lines to the beginning.
5710 fs.save(
5711 path!("/dir/the-file").as_ref(),
5712 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
5713 LineEnding::Unix,
5714 )
5715 .await
5716 .unwrap();
5717
5718 // Because the buffer is modified, it doesn't reload from disk, but is
5719 // marked as having a conflict.
5720 cx.executor().run_until_parked();
5721 buffer.update(cx, |buffer, _| {
5722 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
5723 assert!(buffer.has_conflict());
5724 });
5725}
5726
5727#[gpui::test]
5728async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
5729 init_test(cx);
5730
5731 let fs = FakeFs::new(cx.executor());
5732 fs.insert_tree(
5733 path!("/dir"),
5734 json!({
5735 "file1": "a\nb\nc\n",
5736 "file2": "one\r\ntwo\r\nthree\r\n",
5737 }),
5738 )
5739 .await;
5740
5741 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5742 let buffer1 = project
5743 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5744 .await
5745 .unwrap();
5746 let buffer2 = project
5747 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
5748 .await
5749 .unwrap();
5750
5751 buffer1.update(cx, |buffer, _| {
5752 assert_eq!(buffer.text(), "a\nb\nc\n");
5753 assert_eq!(buffer.line_ending(), LineEnding::Unix);
5754 });
5755 buffer2.update(cx, |buffer, _| {
5756 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
5757 assert_eq!(buffer.line_ending(), LineEnding::Windows);
5758 });
5759
5760 // Change a file's line endings on disk from unix to windows. The buffer's
5761 // state updates correctly.
5762 fs.save(
5763 path!("/dir/file1").as_ref(),
5764 &"aaa\nb\nc\n".into(),
5765 LineEnding::Windows,
5766 )
5767 .await
5768 .unwrap();
5769 cx.executor().run_until_parked();
5770 buffer1.update(cx, |buffer, _| {
5771 assert_eq!(buffer.text(), "aaa\nb\nc\n");
5772 assert_eq!(buffer.line_ending(), LineEnding::Windows);
5773 });
5774
5775 // Save a file with windows line endings. The file is written correctly.
5776 buffer2.update(cx, |buffer, cx| {
5777 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
5778 });
5779 project
5780 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
5781 .await
5782 .unwrap();
5783 assert_eq!(
5784 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
5785 "one\r\ntwo\r\nthree\r\nfour\r\n",
5786 );
5787}
5788
5789#[gpui::test]
5790async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
5791 init_test(cx);
5792
5793 let fs = FakeFs::new(cx.executor());
5794 fs.insert_tree(
5795 path!("/dir"),
5796 json!({
5797 "a.rs": "
5798 fn foo(mut v: Vec<usize>) {
5799 for x in &v {
5800 v.push(1);
5801 }
5802 }
5803 "
5804 .unindent(),
5805 }),
5806 )
5807 .await;
5808
5809 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5810 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
5811 let buffer = project
5812 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
5813 .await
5814 .unwrap();
5815
5816 let buffer_uri = Uri::from_file_path(path!("/dir/a.rs")).unwrap();
5817 let message = lsp::PublishDiagnosticsParams {
5818 uri: buffer_uri.clone(),
5819 diagnostics: vec![
5820 lsp::Diagnostic {
5821 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5822 severity: Some(DiagnosticSeverity::WARNING),
5823 message: "error 1".to_string(),
5824 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5825 location: lsp::Location {
5826 uri: buffer_uri.clone(),
5827 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5828 },
5829 message: "error 1 hint 1".to_string(),
5830 }]),
5831 ..Default::default()
5832 },
5833 lsp::Diagnostic {
5834 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5835 severity: Some(DiagnosticSeverity::HINT),
5836 message: "error 1 hint 1".to_string(),
5837 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5838 location: lsp::Location {
5839 uri: buffer_uri.clone(),
5840 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5841 },
5842 message: "original diagnostic".to_string(),
5843 }]),
5844 ..Default::default()
5845 },
5846 lsp::Diagnostic {
5847 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5848 severity: Some(DiagnosticSeverity::ERROR),
5849 message: "error 2".to_string(),
5850 related_information: Some(vec![
5851 lsp::DiagnosticRelatedInformation {
5852 location: lsp::Location {
5853 uri: buffer_uri.clone(),
5854 range: lsp::Range::new(
5855 lsp::Position::new(1, 13),
5856 lsp::Position::new(1, 15),
5857 ),
5858 },
5859 message: "error 2 hint 1".to_string(),
5860 },
5861 lsp::DiagnosticRelatedInformation {
5862 location: lsp::Location {
5863 uri: buffer_uri.clone(),
5864 range: lsp::Range::new(
5865 lsp::Position::new(1, 13),
5866 lsp::Position::new(1, 15),
5867 ),
5868 },
5869 message: "error 2 hint 2".to_string(),
5870 },
5871 ]),
5872 ..Default::default()
5873 },
5874 lsp::Diagnostic {
5875 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
5876 severity: Some(DiagnosticSeverity::HINT),
5877 message: "error 2 hint 1".to_string(),
5878 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5879 location: lsp::Location {
5880 uri: buffer_uri.clone(),
5881 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5882 },
5883 message: "original diagnostic".to_string(),
5884 }]),
5885 ..Default::default()
5886 },
5887 lsp::Diagnostic {
5888 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
5889 severity: Some(DiagnosticSeverity::HINT),
5890 message: "error 2 hint 2".to_string(),
5891 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5892 location: lsp::Location {
5893 uri: buffer_uri,
5894 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5895 },
5896 message: "original diagnostic".to_string(),
5897 }]),
5898 ..Default::default()
5899 },
5900 ],
5901 version: None,
5902 };
5903
5904 lsp_store
5905 .update(cx, |lsp_store, cx| {
5906 lsp_store.update_diagnostics(
5907 LanguageServerId(0),
5908 message,
5909 None,
5910 DiagnosticSourceKind::Pushed,
5911 &[],
5912 cx,
5913 )
5914 })
5915 .unwrap();
5916 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
5917
5918 assert_eq!(
5919 buffer
5920 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5921 .collect::<Vec<_>>(),
5922 &[
5923 DiagnosticEntry {
5924 range: Point::new(1, 8)..Point::new(1, 9),
5925 diagnostic: Diagnostic {
5926 severity: DiagnosticSeverity::WARNING,
5927 message: "error 1".to_string(),
5928 group_id: 1,
5929 is_primary: true,
5930 source_kind: DiagnosticSourceKind::Pushed,
5931 ..Diagnostic::default()
5932 }
5933 },
5934 DiagnosticEntry {
5935 range: Point::new(1, 8)..Point::new(1, 9),
5936 diagnostic: Diagnostic {
5937 severity: DiagnosticSeverity::HINT,
5938 message: "error 1 hint 1".to_string(),
5939 group_id: 1,
5940 is_primary: false,
5941 source_kind: DiagnosticSourceKind::Pushed,
5942 ..Diagnostic::default()
5943 }
5944 },
5945 DiagnosticEntry {
5946 range: Point::new(1, 13)..Point::new(1, 15),
5947 diagnostic: Diagnostic {
5948 severity: DiagnosticSeverity::HINT,
5949 message: "error 2 hint 1".to_string(),
5950 group_id: 0,
5951 is_primary: false,
5952 source_kind: DiagnosticSourceKind::Pushed,
5953 ..Diagnostic::default()
5954 }
5955 },
5956 DiagnosticEntry {
5957 range: Point::new(1, 13)..Point::new(1, 15),
5958 diagnostic: Diagnostic {
5959 severity: DiagnosticSeverity::HINT,
5960 message: "error 2 hint 2".to_string(),
5961 group_id: 0,
5962 is_primary: false,
5963 source_kind: DiagnosticSourceKind::Pushed,
5964 ..Diagnostic::default()
5965 }
5966 },
5967 DiagnosticEntry {
5968 range: Point::new(2, 8)..Point::new(2, 17),
5969 diagnostic: Diagnostic {
5970 severity: DiagnosticSeverity::ERROR,
5971 message: "error 2".to_string(),
5972 group_id: 0,
5973 is_primary: true,
5974 source_kind: DiagnosticSourceKind::Pushed,
5975 ..Diagnostic::default()
5976 }
5977 }
5978 ]
5979 );
5980
5981 assert_eq!(
5982 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
5983 &[
5984 DiagnosticEntry {
5985 range: Point::new(1, 13)..Point::new(1, 15),
5986 diagnostic: Diagnostic {
5987 severity: DiagnosticSeverity::HINT,
5988 message: "error 2 hint 1".to_string(),
5989 group_id: 0,
5990 is_primary: false,
5991 source_kind: DiagnosticSourceKind::Pushed,
5992 ..Diagnostic::default()
5993 }
5994 },
5995 DiagnosticEntry {
5996 range: Point::new(1, 13)..Point::new(1, 15),
5997 diagnostic: Diagnostic {
5998 severity: DiagnosticSeverity::HINT,
5999 message: "error 2 hint 2".to_string(),
6000 group_id: 0,
6001 is_primary: false,
6002 source_kind: DiagnosticSourceKind::Pushed,
6003 ..Diagnostic::default()
6004 }
6005 },
6006 DiagnosticEntry {
6007 range: Point::new(2, 8)..Point::new(2, 17),
6008 diagnostic: Diagnostic {
6009 severity: DiagnosticSeverity::ERROR,
6010 message: "error 2".to_string(),
6011 group_id: 0,
6012 is_primary: true,
6013 source_kind: DiagnosticSourceKind::Pushed,
6014 ..Diagnostic::default()
6015 }
6016 }
6017 ]
6018 );
6019
6020 assert_eq!(
6021 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
6022 &[
6023 DiagnosticEntry {
6024 range: Point::new(1, 8)..Point::new(1, 9),
6025 diagnostic: Diagnostic {
6026 severity: DiagnosticSeverity::WARNING,
6027 message: "error 1".to_string(),
6028 group_id: 1,
6029 is_primary: true,
6030 source_kind: DiagnosticSourceKind::Pushed,
6031 ..Diagnostic::default()
6032 }
6033 },
6034 DiagnosticEntry {
6035 range: Point::new(1, 8)..Point::new(1, 9),
6036 diagnostic: Diagnostic {
6037 severity: DiagnosticSeverity::HINT,
6038 message: "error 1 hint 1".to_string(),
6039 group_id: 1,
6040 is_primary: false,
6041 source_kind: DiagnosticSourceKind::Pushed,
6042 ..Diagnostic::default()
6043 }
6044 },
6045 ]
6046 );
6047}
6048
6049#[gpui::test]
6050async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
6051 init_test(cx);
6052
6053 let fs = FakeFs::new(cx.executor());
6054 fs.insert_tree(
6055 path!("/dir"),
6056 json!({
6057 "one.rs": "const ONE: usize = 1;",
6058 "two": {
6059 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
6060 }
6061
6062 }),
6063 )
6064 .await;
6065 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6066
6067 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6068 language_registry.add(rust_lang());
6069 let watched_paths = lsp::FileOperationRegistrationOptions {
6070 filters: vec![
6071 FileOperationFilter {
6072 scheme: Some("file".to_owned()),
6073 pattern: lsp::FileOperationPattern {
6074 glob: "**/*.rs".to_owned(),
6075 matches: Some(lsp::FileOperationPatternKind::File),
6076 options: None,
6077 },
6078 },
6079 FileOperationFilter {
6080 scheme: Some("file".to_owned()),
6081 pattern: lsp::FileOperationPattern {
6082 glob: "**/**".to_owned(),
6083 matches: Some(lsp::FileOperationPatternKind::Folder),
6084 options: None,
6085 },
6086 },
6087 ],
6088 };
6089 let mut fake_servers = language_registry.register_fake_lsp(
6090 "Rust",
6091 FakeLspAdapter {
6092 capabilities: lsp::ServerCapabilities {
6093 workspace: Some(lsp::WorkspaceServerCapabilities {
6094 workspace_folders: None,
6095 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
6096 did_rename: Some(watched_paths.clone()),
6097 will_rename: Some(watched_paths),
6098 ..Default::default()
6099 }),
6100 }),
6101 ..Default::default()
6102 },
6103 ..Default::default()
6104 },
6105 );
6106
6107 let _ = project
6108 .update(cx, |project, cx| {
6109 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
6110 })
6111 .await
6112 .unwrap();
6113
6114 let fake_server = fake_servers.next().await.unwrap();
6115 cx.executor().run_until_parked();
6116 let response = project.update(cx, |project, cx| {
6117 let worktree = project.worktrees(cx).next().unwrap();
6118 let entry = worktree
6119 .read(cx)
6120 .entry_for_path(rel_path("one.rs"))
6121 .unwrap();
6122 project.rename_entry(
6123 entry.id,
6124 (worktree.read(cx).id(), rel_path("three.rs")).into(),
6125 cx,
6126 )
6127 });
6128 let expected_edit = lsp::WorkspaceEdit {
6129 changes: None,
6130 document_changes: Some(DocumentChanges::Edits({
6131 vec![TextDocumentEdit {
6132 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
6133 range: lsp::Range {
6134 start: lsp::Position {
6135 line: 0,
6136 character: 1,
6137 },
6138 end: lsp::Position {
6139 line: 0,
6140 character: 3,
6141 },
6142 },
6143 new_text: "This is not a drill".to_owned(),
6144 })],
6145 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
6146 uri: Uri::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
6147 version: Some(1337),
6148 },
6149 }]
6150 })),
6151 change_annotations: None,
6152 };
6153 let resolved_workspace_edit = Arc::new(OnceLock::new());
6154 fake_server
6155 .set_request_handler::<WillRenameFiles, _, _>({
6156 let resolved_workspace_edit = resolved_workspace_edit.clone();
6157 let expected_edit = expected_edit.clone();
6158 move |params, _| {
6159 let resolved_workspace_edit = resolved_workspace_edit.clone();
6160 let expected_edit = expected_edit.clone();
6161 async move {
6162 assert_eq!(params.files.len(), 1);
6163 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
6164 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
6165 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
6166 Ok(Some(expected_edit))
6167 }
6168 }
6169 })
6170 .next()
6171 .await
6172 .unwrap();
6173 let _ = response.await.unwrap();
6174 fake_server
6175 .handle_notification::<DidRenameFiles, _>(|params, _| {
6176 assert_eq!(params.files.len(), 1);
6177 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
6178 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
6179 })
6180 .next()
6181 .await
6182 .unwrap();
6183 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
6184}
6185
6186#[gpui::test]
6187async fn test_rename(cx: &mut gpui::TestAppContext) {
6188 // hi
6189 init_test(cx);
6190
6191 let fs = FakeFs::new(cx.executor());
6192 fs.insert_tree(
6193 path!("/dir"),
6194 json!({
6195 "one.rs": "const ONE: usize = 1;",
6196 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
6197 }),
6198 )
6199 .await;
6200
6201 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6202
6203 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6204 language_registry.add(rust_lang());
6205 let mut fake_servers = language_registry.register_fake_lsp(
6206 "Rust",
6207 FakeLspAdapter {
6208 capabilities: lsp::ServerCapabilities {
6209 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
6210 prepare_provider: Some(true),
6211 work_done_progress_options: Default::default(),
6212 })),
6213 ..Default::default()
6214 },
6215 ..Default::default()
6216 },
6217 );
6218
6219 let (buffer, _handle) = project
6220 .update(cx, |project, cx| {
6221 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
6222 })
6223 .await
6224 .unwrap();
6225
6226 let fake_server = fake_servers.next().await.unwrap();
6227 cx.executor().run_until_parked();
6228
6229 let response = project.update(cx, |project, cx| {
6230 project.prepare_rename(buffer.clone(), 7, cx)
6231 });
6232 fake_server
6233 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
6234 assert_eq!(
6235 params.text_document.uri.as_str(),
6236 uri!("file:///dir/one.rs")
6237 );
6238 assert_eq!(params.position, lsp::Position::new(0, 7));
6239 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
6240 lsp::Position::new(0, 6),
6241 lsp::Position::new(0, 9),
6242 ))))
6243 })
6244 .next()
6245 .await
6246 .unwrap();
6247 let response = response.await.unwrap();
6248 let PrepareRenameResponse::Success(range) = response else {
6249 panic!("{:?}", response);
6250 };
6251 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
6252 assert_eq!(range, 6..9);
6253
6254 let response = project.update(cx, |project, cx| {
6255 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
6256 });
6257 fake_server
6258 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
6259 assert_eq!(
6260 params.text_document_position.text_document.uri.as_str(),
6261 uri!("file:///dir/one.rs")
6262 );
6263 assert_eq!(
6264 params.text_document_position.position,
6265 lsp::Position::new(0, 7)
6266 );
6267 assert_eq!(params.new_name, "THREE");
6268 Ok(Some(lsp::WorkspaceEdit {
6269 changes: Some(
6270 [
6271 (
6272 lsp::Uri::from_file_path(path!("/dir/one.rs")).unwrap(),
6273 vec![lsp::TextEdit::new(
6274 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
6275 "THREE".to_string(),
6276 )],
6277 ),
6278 (
6279 lsp::Uri::from_file_path(path!("/dir/two.rs")).unwrap(),
6280 vec![
6281 lsp::TextEdit::new(
6282 lsp::Range::new(
6283 lsp::Position::new(0, 24),
6284 lsp::Position::new(0, 27),
6285 ),
6286 "THREE".to_string(),
6287 ),
6288 lsp::TextEdit::new(
6289 lsp::Range::new(
6290 lsp::Position::new(0, 35),
6291 lsp::Position::new(0, 38),
6292 ),
6293 "THREE".to_string(),
6294 ),
6295 ],
6296 ),
6297 ]
6298 .into_iter()
6299 .collect(),
6300 ),
6301 ..Default::default()
6302 }))
6303 })
6304 .next()
6305 .await
6306 .unwrap();
6307 let mut transaction = response.await.unwrap().0;
6308 assert_eq!(transaction.len(), 2);
6309 assert_eq!(
6310 transaction
6311 .remove_entry(&buffer)
6312 .unwrap()
6313 .0
6314 .update(cx, |buffer, _| buffer.text()),
6315 "const THREE: usize = 1;"
6316 );
6317 assert_eq!(
6318 transaction
6319 .into_keys()
6320 .next()
6321 .unwrap()
6322 .update(cx, |buffer, _| buffer.text()),
6323 "const TWO: usize = one::THREE + one::THREE;"
6324 );
6325}
6326
6327#[gpui::test]
6328async fn test_search(cx: &mut gpui::TestAppContext) {
6329 init_test(cx);
6330
6331 let fs = FakeFs::new(cx.executor());
6332 fs.insert_tree(
6333 path!("/dir"),
6334 json!({
6335 "one.rs": "const ONE: usize = 1;",
6336 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
6337 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
6338 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
6339 }),
6340 )
6341 .await;
6342 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6343 assert_eq!(
6344 search(
6345 &project,
6346 SearchQuery::text(
6347 "TWO",
6348 false,
6349 true,
6350 false,
6351 Default::default(),
6352 Default::default(),
6353 false,
6354 None
6355 )
6356 .unwrap(),
6357 cx
6358 )
6359 .await
6360 .unwrap(),
6361 HashMap::from_iter([
6362 (path!("dir/two.rs").to_string(), vec![6..9]),
6363 (path!("dir/three.rs").to_string(), vec![37..40])
6364 ])
6365 );
6366
6367 let buffer_4 = project
6368 .update(cx, |project, cx| {
6369 project.open_local_buffer(path!("/dir/four.rs"), cx)
6370 })
6371 .await
6372 .unwrap();
6373 buffer_4.update(cx, |buffer, cx| {
6374 let text = "two::TWO";
6375 buffer.edit([(20..28, text), (31..43, text)], None, cx);
6376 });
6377
6378 assert_eq!(
6379 search(
6380 &project,
6381 SearchQuery::text(
6382 "TWO",
6383 false,
6384 true,
6385 false,
6386 Default::default(),
6387 Default::default(),
6388 false,
6389 None,
6390 )
6391 .unwrap(),
6392 cx
6393 )
6394 .await
6395 .unwrap(),
6396 HashMap::from_iter([
6397 (path!("dir/two.rs").to_string(), vec![6..9]),
6398 (path!("dir/three.rs").to_string(), vec![37..40]),
6399 (path!("dir/four.rs").to_string(), vec![25..28, 36..39])
6400 ])
6401 );
6402}
6403
6404#[gpui::test]
6405async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
6406 init_test(cx);
6407
6408 let search_query = "file";
6409
6410 let fs = FakeFs::new(cx.executor());
6411 fs.insert_tree(
6412 path!("/dir"),
6413 json!({
6414 "one.rs": r#"// Rust file one"#,
6415 "one.ts": r#"// TypeScript file one"#,
6416 "two.rs": r#"// Rust file two"#,
6417 "two.ts": r#"// TypeScript file two"#,
6418 }),
6419 )
6420 .await;
6421 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6422
6423 assert!(
6424 search(
6425 &project,
6426 SearchQuery::text(
6427 search_query,
6428 false,
6429 true,
6430 false,
6431 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6432 Default::default(),
6433 false,
6434 None
6435 )
6436 .unwrap(),
6437 cx
6438 )
6439 .await
6440 .unwrap()
6441 .is_empty(),
6442 "If no inclusions match, no files should be returned"
6443 );
6444
6445 assert_eq!(
6446 search(
6447 &project,
6448 SearchQuery::text(
6449 search_query,
6450 false,
6451 true,
6452 false,
6453 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
6454 Default::default(),
6455 false,
6456 None
6457 )
6458 .unwrap(),
6459 cx
6460 )
6461 .await
6462 .unwrap(),
6463 HashMap::from_iter([
6464 (path!("dir/one.rs").to_string(), vec![8..12]),
6465 (path!("dir/two.rs").to_string(), vec![8..12]),
6466 ]),
6467 "Rust only search should give only Rust files"
6468 );
6469
6470 assert_eq!(
6471 search(
6472 &project,
6473 SearchQuery::text(
6474 search_query,
6475 false,
6476 true,
6477 false,
6478 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6479 .unwrap(),
6480 Default::default(),
6481 false,
6482 None,
6483 )
6484 .unwrap(),
6485 cx
6486 )
6487 .await
6488 .unwrap(),
6489 HashMap::from_iter([
6490 (path!("dir/one.ts").to_string(), vec![14..18]),
6491 (path!("dir/two.ts").to_string(), vec![14..18]),
6492 ]),
6493 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
6494 );
6495
6496 assert_eq!(
6497 search(
6498 &project,
6499 SearchQuery::text(
6500 search_query,
6501 false,
6502 true,
6503 false,
6504 PathMatcher::new(
6505 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
6506 PathStyle::local()
6507 )
6508 .unwrap(),
6509 Default::default(),
6510 false,
6511 None,
6512 )
6513 .unwrap(),
6514 cx
6515 )
6516 .await
6517 .unwrap(),
6518 HashMap::from_iter([
6519 (path!("dir/two.ts").to_string(), vec![14..18]),
6520 (path!("dir/one.rs").to_string(), vec![8..12]),
6521 (path!("dir/one.ts").to_string(), vec![14..18]),
6522 (path!("dir/two.rs").to_string(), vec![8..12]),
6523 ]),
6524 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
6525 );
6526}
6527
6528#[gpui::test]
6529async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
6530 init_test(cx);
6531
6532 let search_query = "file";
6533
6534 let fs = FakeFs::new(cx.executor());
6535 fs.insert_tree(
6536 path!("/dir"),
6537 json!({
6538 "one.rs": r#"// Rust file one"#,
6539 "one.ts": r#"// TypeScript file one"#,
6540 "two.rs": r#"// Rust file two"#,
6541 "two.ts": r#"// TypeScript file two"#,
6542 }),
6543 )
6544 .await;
6545 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6546
6547 assert_eq!(
6548 search(
6549 &project,
6550 SearchQuery::text(
6551 search_query,
6552 false,
6553 true,
6554 false,
6555 Default::default(),
6556 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6557 false,
6558 None,
6559 )
6560 .unwrap(),
6561 cx
6562 )
6563 .await
6564 .unwrap(),
6565 HashMap::from_iter([
6566 (path!("dir/one.rs").to_string(), vec![8..12]),
6567 (path!("dir/one.ts").to_string(), vec![14..18]),
6568 (path!("dir/two.rs").to_string(), vec![8..12]),
6569 (path!("dir/two.ts").to_string(), vec![14..18]),
6570 ]),
6571 "If no exclusions match, all files should be returned"
6572 );
6573
6574 assert_eq!(
6575 search(
6576 &project,
6577 SearchQuery::text(
6578 search_query,
6579 false,
6580 true,
6581 false,
6582 Default::default(),
6583 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
6584 false,
6585 None,
6586 )
6587 .unwrap(),
6588 cx
6589 )
6590 .await
6591 .unwrap(),
6592 HashMap::from_iter([
6593 (path!("dir/one.ts").to_string(), vec![14..18]),
6594 (path!("dir/two.ts").to_string(), vec![14..18]),
6595 ]),
6596 "Rust exclusion search should give only TypeScript files"
6597 );
6598
6599 assert_eq!(
6600 search(
6601 &project,
6602 SearchQuery::text(
6603 search_query,
6604 false,
6605 true,
6606 false,
6607 Default::default(),
6608 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6609 .unwrap(),
6610 false,
6611 None,
6612 )
6613 .unwrap(),
6614 cx
6615 )
6616 .await
6617 .unwrap(),
6618 HashMap::from_iter([
6619 (path!("dir/one.rs").to_string(), vec![8..12]),
6620 (path!("dir/two.rs").to_string(), vec![8..12]),
6621 ]),
6622 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
6623 );
6624
6625 assert!(
6626 search(
6627 &project,
6628 SearchQuery::text(
6629 search_query,
6630 false,
6631 true,
6632 false,
6633 Default::default(),
6634 PathMatcher::new(
6635 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
6636 PathStyle::local(),
6637 )
6638 .unwrap(),
6639 false,
6640 None,
6641 )
6642 .unwrap(),
6643 cx
6644 )
6645 .await
6646 .unwrap()
6647 .is_empty(),
6648 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
6649 );
6650}
6651
6652#[gpui::test]
6653async fn test_search_with_buffer_exclusions(cx: &mut gpui::TestAppContext) {
6654 init_test(cx);
6655
6656 let search_query = "file";
6657
6658 let fs = FakeFs::new(cx.executor());
6659 fs.insert_tree(
6660 path!("/dir"),
6661 json!({
6662 "one.rs": r#"// Rust file one"#,
6663 "one.ts": r#"// TypeScript file one"#,
6664 "two.rs": r#"// Rust file two"#,
6665 "two.ts": r#"// TypeScript file two"#,
6666 }),
6667 )
6668 .await;
6669
6670 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6671 let path_style = PathStyle::local();
6672 let _buffer = project.update(cx, |project, cx| {
6673 project.create_local_buffer("file", None, false, cx)
6674 });
6675
6676 assert_eq!(
6677 search(
6678 &project,
6679 SearchQuery::text(
6680 search_query,
6681 false,
6682 true,
6683 false,
6684 Default::default(),
6685 PathMatcher::new(&["*.odd".to_owned()], path_style).unwrap(),
6686 false,
6687 None,
6688 )
6689 .unwrap(),
6690 cx
6691 )
6692 .await
6693 .unwrap(),
6694 HashMap::from_iter([
6695 (path!("dir/one.rs").to_string(), vec![8..12]),
6696 (path!("dir/one.ts").to_string(), vec![14..18]),
6697 (path!("dir/two.rs").to_string(), vec![8..12]),
6698 (path!("dir/two.ts").to_string(), vec![14..18]),
6699 ]),
6700 "If no exclusions match, all files should be returned"
6701 );
6702
6703 assert_eq!(
6704 search(
6705 &project,
6706 SearchQuery::text(
6707 search_query,
6708 false,
6709 true,
6710 false,
6711 Default::default(),
6712 PathMatcher::new(&["*.rs".to_owned()], path_style).unwrap(),
6713 false,
6714 None,
6715 )
6716 .unwrap(),
6717 cx
6718 )
6719 .await
6720 .unwrap(),
6721 HashMap::from_iter([
6722 (path!("dir/one.ts").to_string(), vec![14..18]),
6723 (path!("dir/two.ts").to_string(), vec![14..18]),
6724 ]),
6725 "Rust exclusion search should give only TypeScript files"
6726 );
6727
6728 assert_eq!(
6729 search(
6730 &project,
6731 SearchQuery::text(
6732 search_query,
6733 false,
6734 true,
6735 false,
6736 Default::default(),
6737 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], path_style).unwrap(),
6738 false,
6739 None,
6740 )
6741 .unwrap(),
6742 cx
6743 )
6744 .await
6745 .unwrap(),
6746 HashMap::from_iter([
6747 (path!("dir/one.rs").to_string(), vec![8..12]),
6748 (path!("dir/two.rs").to_string(), vec![8..12]),
6749 ]),
6750 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
6751 );
6752
6753 assert!(
6754 search(
6755 &project,
6756 SearchQuery::text(
6757 search_query,
6758 false,
6759 true,
6760 false,
6761 Default::default(),
6762 PathMatcher::new(
6763 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
6764 PathStyle::local(),
6765 )
6766 .unwrap(),
6767 false,
6768 None,
6769 )
6770 .unwrap(),
6771 cx
6772 )
6773 .await
6774 .unwrap()
6775 .is_empty(),
6776 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
6777 );
6778}
6779
6780#[gpui::test]
6781async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
6782 init_test(cx);
6783
6784 let search_query = "file";
6785
6786 let fs = FakeFs::new(cx.executor());
6787 fs.insert_tree(
6788 path!("/dir"),
6789 json!({
6790 "one.rs": r#"// Rust file one"#,
6791 "one.ts": r#"// TypeScript file one"#,
6792 "two.rs": r#"// Rust file two"#,
6793 "two.ts": r#"// TypeScript file two"#,
6794 }),
6795 )
6796 .await;
6797 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6798 assert!(
6799 search(
6800 &project,
6801 SearchQuery::text(
6802 search_query,
6803 false,
6804 true,
6805 false,
6806 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6807 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6808 false,
6809 None,
6810 )
6811 .unwrap(),
6812 cx
6813 )
6814 .await
6815 .unwrap()
6816 .is_empty(),
6817 "If both no exclusions and inclusions match, exclusions should win and return nothing"
6818 );
6819
6820 assert!(
6821 search(
6822 &project,
6823 SearchQuery::text(
6824 search_query,
6825 false,
6826 true,
6827 false,
6828 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
6829 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
6830 false,
6831 None,
6832 )
6833 .unwrap(),
6834 cx
6835 )
6836 .await
6837 .unwrap()
6838 .is_empty(),
6839 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
6840 );
6841
6842 assert!(
6843 search(
6844 &project,
6845 SearchQuery::text(
6846 search_query,
6847 false,
6848 true,
6849 false,
6850 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6851 .unwrap(),
6852 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6853 .unwrap(),
6854 false,
6855 None,
6856 )
6857 .unwrap(),
6858 cx
6859 )
6860 .await
6861 .unwrap()
6862 .is_empty(),
6863 "Non-matching inclusions and exclusions should not change that."
6864 );
6865
6866 assert_eq!(
6867 search(
6868 &project,
6869 SearchQuery::text(
6870 search_query,
6871 false,
6872 true,
6873 false,
6874 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6875 .unwrap(),
6876 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()], PathStyle::local())
6877 .unwrap(),
6878 false,
6879 None,
6880 )
6881 .unwrap(),
6882 cx
6883 )
6884 .await
6885 .unwrap(),
6886 HashMap::from_iter([
6887 (path!("dir/one.ts").to_string(), vec![14..18]),
6888 (path!("dir/two.ts").to_string(), vec![14..18]),
6889 ]),
6890 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
6891 );
6892}
6893
6894#[gpui::test]
6895async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
6896 init_test(cx);
6897
6898 let fs = FakeFs::new(cx.executor());
6899 fs.insert_tree(
6900 path!("/worktree-a"),
6901 json!({
6902 "haystack.rs": r#"// NEEDLE"#,
6903 "haystack.ts": r#"// NEEDLE"#,
6904 }),
6905 )
6906 .await;
6907 fs.insert_tree(
6908 path!("/worktree-b"),
6909 json!({
6910 "haystack.rs": r#"// NEEDLE"#,
6911 "haystack.ts": r#"// NEEDLE"#,
6912 }),
6913 )
6914 .await;
6915
6916 let path_style = PathStyle::local();
6917 let project = Project::test(
6918 fs.clone(),
6919 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
6920 cx,
6921 )
6922 .await;
6923
6924 assert_eq!(
6925 search(
6926 &project,
6927 SearchQuery::text(
6928 "NEEDLE",
6929 false,
6930 true,
6931 false,
6932 PathMatcher::new(&["worktree-a/*.rs".to_owned()], path_style).unwrap(),
6933 Default::default(),
6934 true,
6935 None,
6936 )
6937 .unwrap(),
6938 cx
6939 )
6940 .await
6941 .unwrap(),
6942 HashMap::from_iter([(path!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
6943 "should only return results from included worktree"
6944 );
6945 assert_eq!(
6946 search(
6947 &project,
6948 SearchQuery::text(
6949 "NEEDLE",
6950 false,
6951 true,
6952 false,
6953 PathMatcher::new(&["worktree-b/*.rs".to_owned()], path_style).unwrap(),
6954 Default::default(),
6955 true,
6956 None,
6957 )
6958 .unwrap(),
6959 cx
6960 )
6961 .await
6962 .unwrap(),
6963 HashMap::from_iter([(path!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
6964 "should only return results from included worktree"
6965 );
6966
6967 assert_eq!(
6968 search(
6969 &project,
6970 SearchQuery::text(
6971 "NEEDLE",
6972 false,
6973 true,
6974 false,
6975 PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap(),
6976 Default::default(),
6977 false,
6978 None,
6979 )
6980 .unwrap(),
6981 cx
6982 )
6983 .await
6984 .unwrap(),
6985 HashMap::from_iter([
6986 (path!("worktree-a/haystack.ts").to_string(), vec![3..9]),
6987 (path!("worktree-b/haystack.ts").to_string(), vec![3..9])
6988 ]),
6989 "should return results from both worktrees"
6990 );
6991}
6992
6993#[gpui::test]
6994async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
6995 init_test(cx);
6996
6997 let fs = FakeFs::new(cx.background_executor.clone());
6998 fs.insert_tree(
6999 path!("/dir"),
7000 json!({
7001 ".git": {},
7002 ".gitignore": "**/target\n/node_modules\n",
7003 "target": {
7004 "index.txt": "index_key:index_value"
7005 },
7006 "node_modules": {
7007 "eslint": {
7008 "index.ts": "const eslint_key = 'eslint value'",
7009 "package.json": r#"{ "some_key": "some value" }"#,
7010 },
7011 "prettier": {
7012 "index.ts": "const prettier_key = 'prettier value'",
7013 "package.json": r#"{ "other_key": "other value" }"#,
7014 },
7015 },
7016 "package.json": r#"{ "main_key": "main value" }"#,
7017 }),
7018 )
7019 .await;
7020 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7021
7022 let query = "key";
7023 assert_eq!(
7024 search(
7025 &project,
7026 SearchQuery::text(
7027 query,
7028 false,
7029 false,
7030 false,
7031 Default::default(),
7032 Default::default(),
7033 false,
7034 None,
7035 )
7036 .unwrap(),
7037 cx
7038 )
7039 .await
7040 .unwrap(),
7041 HashMap::from_iter([(path!("dir/package.json").to_string(), vec![8..11])]),
7042 "Only one non-ignored file should have the query"
7043 );
7044
7045 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7046 let path_style = PathStyle::local();
7047 assert_eq!(
7048 search(
7049 &project,
7050 SearchQuery::text(
7051 query,
7052 false,
7053 false,
7054 true,
7055 Default::default(),
7056 Default::default(),
7057 false,
7058 None,
7059 )
7060 .unwrap(),
7061 cx
7062 )
7063 .await
7064 .unwrap(),
7065 HashMap::from_iter([
7066 (path!("dir/package.json").to_string(), vec![8..11]),
7067 (path!("dir/target/index.txt").to_string(), vec![6..9]),
7068 (
7069 path!("dir/node_modules/prettier/package.json").to_string(),
7070 vec![9..12]
7071 ),
7072 (
7073 path!("dir/node_modules/prettier/index.ts").to_string(),
7074 vec![15..18]
7075 ),
7076 (
7077 path!("dir/node_modules/eslint/index.ts").to_string(),
7078 vec![13..16]
7079 ),
7080 (
7081 path!("dir/node_modules/eslint/package.json").to_string(),
7082 vec![8..11]
7083 ),
7084 ]),
7085 "Unrestricted search with ignored directories should find every file with the query"
7086 );
7087
7088 let files_to_include =
7089 PathMatcher::new(&["node_modules/prettier/**".to_owned()], path_style).unwrap();
7090 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap();
7091 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7092 assert_eq!(
7093 search(
7094 &project,
7095 SearchQuery::text(
7096 query,
7097 false,
7098 false,
7099 true,
7100 files_to_include,
7101 files_to_exclude,
7102 false,
7103 None,
7104 )
7105 .unwrap(),
7106 cx
7107 )
7108 .await
7109 .unwrap(),
7110 HashMap::from_iter([(
7111 path!("dir/node_modules/prettier/package.json").to_string(),
7112 vec![9..12]
7113 )]),
7114 "With search including ignored prettier directory and excluding TS files, only one file should be found"
7115 );
7116}
7117
7118#[gpui::test]
7119async fn test_search_with_unicode(cx: &mut gpui::TestAppContext) {
7120 init_test(cx);
7121
7122 let fs = FakeFs::new(cx.executor());
7123 fs.insert_tree(
7124 path!("/dir"),
7125 json!({
7126 "one.rs": "// ПРИВЕТ? привет!",
7127 "two.rs": "// ПРИВЕТ.",
7128 "three.rs": "// привет",
7129 }),
7130 )
7131 .await;
7132 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7133 let unicode_case_sensitive_query = SearchQuery::text(
7134 "привет",
7135 false,
7136 true,
7137 false,
7138 Default::default(),
7139 Default::default(),
7140 false,
7141 None,
7142 );
7143 assert_matches!(unicode_case_sensitive_query, Ok(SearchQuery::Text { .. }));
7144 assert_eq!(
7145 search(&project, unicode_case_sensitive_query.unwrap(), cx)
7146 .await
7147 .unwrap(),
7148 HashMap::from_iter([
7149 (path!("dir/one.rs").to_string(), vec![17..29]),
7150 (path!("dir/three.rs").to_string(), vec![3..15]),
7151 ])
7152 );
7153
7154 let unicode_case_insensitive_query = SearchQuery::text(
7155 "привет",
7156 false,
7157 false,
7158 false,
7159 Default::default(),
7160 Default::default(),
7161 false,
7162 None,
7163 );
7164 assert_matches!(
7165 unicode_case_insensitive_query,
7166 Ok(SearchQuery::Regex { .. })
7167 );
7168 assert_eq!(
7169 search(&project, unicode_case_insensitive_query.unwrap(), cx)
7170 .await
7171 .unwrap(),
7172 HashMap::from_iter([
7173 (path!("dir/one.rs").to_string(), vec![3..15, 17..29]),
7174 (path!("dir/two.rs").to_string(), vec![3..15]),
7175 (path!("dir/three.rs").to_string(), vec![3..15]),
7176 ])
7177 );
7178
7179 assert_eq!(
7180 search(
7181 &project,
7182 SearchQuery::text(
7183 "привет.",
7184 false,
7185 false,
7186 false,
7187 Default::default(),
7188 Default::default(),
7189 false,
7190 None,
7191 )
7192 .unwrap(),
7193 cx
7194 )
7195 .await
7196 .unwrap(),
7197 HashMap::from_iter([(path!("dir/two.rs").to_string(), vec![3..16]),])
7198 );
7199}
7200
7201#[gpui::test]
7202async fn test_create_entry(cx: &mut gpui::TestAppContext) {
7203 init_test(cx);
7204
7205 let fs = FakeFs::new(cx.executor());
7206 fs.insert_tree(
7207 "/one/two",
7208 json!({
7209 "three": {
7210 "a.txt": "",
7211 "four": {}
7212 },
7213 "c.rs": ""
7214 }),
7215 )
7216 .await;
7217
7218 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
7219 project
7220 .update(cx, |project, cx| {
7221 let id = project.worktrees(cx).next().unwrap().read(cx).id();
7222 project.create_entry((id, rel_path("b..")), true, cx)
7223 })
7224 .await
7225 .unwrap()
7226 .into_included()
7227 .unwrap();
7228
7229 assert_eq!(
7230 fs.paths(true),
7231 vec![
7232 PathBuf::from(path!("/")),
7233 PathBuf::from(path!("/one")),
7234 PathBuf::from(path!("/one/two")),
7235 PathBuf::from(path!("/one/two/c.rs")),
7236 PathBuf::from(path!("/one/two/three")),
7237 PathBuf::from(path!("/one/two/three/a.txt")),
7238 PathBuf::from(path!("/one/two/three/b..")),
7239 PathBuf::from(path!("/one/two/three/four")),
7240 ]
7241 );
7242}
7243
7244#[gpui::test]
7245async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
7246 init_test(cx);
7247
7248 let fs = FakeFs::new(cx.executor());
7249 fs.insert_tree(
7250 path!("/dir"),
7251 json!({
7252 "a.tsx": "a",
7253 }),
7254 )
7255 .await;
7256
7257 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7258
7259 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7260 language_registry.add(tsx_lang());
7261 let language_server_names = [
7262 "TypeScriptServer",
7263 "TailwindServer",
7264 "ESLintServer",
7265 "NoHoverCapabilitiesServer",
7266 ];
7267 let mut language_servers = [
7268 language_registry.register_fake_lsp(
7269 "tsx",
7270 FakeLspAdapter {
7271 name: language_server_names[0],
7272 capabilities: lsp::ServerCapabilities {
7273 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7274 ..lsp::ServerCapabilities::default()
7275 },
7276 ..FakeLspAdapter::default()
7277 },
7278 ),
7279 language_registry.register_fake_lsp(
7280 "tsx",
7281 FakeLspAdapter {
7282 name: language_server_names[1],
7283 capabilities: lsp::ServerCapabilities {
7284 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7285 ..lsp::ServerCapabilities::default()
7286 },
7287 ..FakeLspAdapter::default()
7288 },
7289 ),
7290 language_registry.register_fake_lsp(
7291 "tsx",
7292 FakeLspAdapter {
7293 name: language_server_names[2],
7294 capabilities: lsp::ServerCapabilities {
7295 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7296 ..lsp::ServerCapabilities::default()
7297 },
7298 ..FakeLspAdapter::default()
7299 },
7300 ),
7301 language_registry.register_fake_lsp(
7302 "tsx",
7303 FakeLspAdapter {
7304 name: language_server_names[3],
7305 capabilities: lsp::ServerCapabilities {
7306 hover_provider: None,
7307 ..lsp::ServerCapabilities::default()
7308 },
7309 ..FakeLspAdapter::default()
7310 },
7311 ),
7312 ];
7313
7314 let (buffer, _handle) = project
7315 .update(cx, |p, cx| {
7316 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
7317 })
7318 .await
7319 .unwrap();
7320 cx.executor().run_until_parked();
7321
7322 let mut servers_with_hover_requests = HashMap::default();
7323 for i in 0..language_server_names.len() {
7324 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
7325 panic!(
7326 "Failed to get language server #{i} with name {}",
7327 &language_server_names[i]
7328 )
7329 });
7330 let new_server_name = new_server.server.name();
7331 assert!(
7332 !servers_with_hover_requests.contains_key(&new_server_name),
7333 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
7334 );
7335 match new_server_name.as_ref() {
7336 "TailwindServer" | "TypeScriptServer" => {
7337 servers_with_hover_requests.insert(
7338 new_server_name.clone(),
7339 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7340 move |_, _| {
7341 let name = new_server_name.clone();
7342 async move {
7343 Ok(Some(lsp::Hover {
7344 contents: lsp::HoverContents::Scalar(
7345 lsp::MarkedString::String(format!("{name} hover")),
7346 ),
7347 range: None,
7348 }))
7349 }
7350 },
7351 ),
7352 );
7353 }
7354 "ESLintServer" => {
7355 servers_with_hover_requests.insert(
7356 new_server_name,
7357 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7358 |_, _| async move { Ok(None) },
7359 ),
7360 );
7361 }
7362 "NoHoverCapabilitiesServer" => {
7363 let _never_handled = new_server
7364 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
7365 panic!(
7366 "Should not call for hovers server with no corresponding capabilities"
7367 )
7368 });
7369 }
7370 unexpected => panic!("Unexpected server name: {unexpected}"),
7371 }
7372 }
7373
7374 let hover_task = project.update(cx, |project, cx| {
7375 project.hover(&buffer, Point::new(0, 0), cx)
7376 });
7377 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
7378 |mut hover_request| async move {
7379 hover_request
7380 .next()
7381 .await
7382 .expect("All hover requests should have been triggered")
7383 },
7384 ))
7385 .await;
7386 assert_eq!(
7387 vec!["TailwindServer hover", "TypeScriptServer hover"],
7388 hover_task
7389 .await
7390 .into_iter()
7391 .flatten()
7392 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
7393 .sorted()
7394 .collect::<Vec<_>>(),
7395 "Should receive hover responses from all related servers with hover capabilities"
7396 );
7397}
7398
7399#[gpui::test]
7400async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
7401 init_test(cx);
7402
7403 let fs = FakeFs::new(cx.executor());
7404 fs.insert_tree(
7405 path!("/dir"),
7406 json!({
7407 "a.ts": "a",
7408 }),
7409 )
7410 .await;
7411
7412 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7413
7414 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7415 language_registry.add(typescript_lang());
7416 let mut fake_language_servers = language_registry.register_fake_lsp(
7417 "TypeScript",
7418 FakeLspAdapter {
7419 capabilities: lsp::ServerCapabilities {
7420 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7421 ..lsp::ServerCapabilities::default()
7422 },
7423 ..FakeLspAdapter::default()
7424 },
7425 );
7426
7427 let (buffer, _handle) = project
7428 .update(cx, |p, cx| {
7429 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
7430 })
7431 .await
7432 .unwrap();
7433 cx.executor().run_until_parked();
7434
7435 let fake_server = fake_language_servers
7436 .next()
7437 .await
7438 .expect("failed to get the language server");
7439
7440 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7441 move |_, _| async move {
7442 Ok(Some(lsp::Hover {
7443 contents: lsp::HoverContents::Array(vec![
7444 lsp::MarkedString::String("".to_string()),
7445 lsp::MarkedString::String(" ".to_string()),
7446 lsp::MarkedString::String("\n\n\n".to_string()),
7447 ]),
7448 range: None,
7449 }))
7450 },
7451 );
7452
7453 let hover_task = project.update(cx, |project, cx| {
7454 project.hover(&buffer, Point::new(0, 0), cx)
7455 });
7456 let () = request_handled
7457 .next()
7458 .await
7459 .expect("All hover requests should have been triggered");
7460 assert_eq!(
7461 Vec::<String>::new(),
7462 hover_task
7463 .await
7464 .into_iter()
7465 .flatten()
7466 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
7467 .sorted()
7468 .collect::<Vec<_>>(),
7469 "Empty hover parts should be ignored"
7470 );
7471}
7472
7473#[gpui::test]
7474async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
7475 init_test(cx);
7476
7477 let fs = FakeFs::new(cx.executor());
7478 fs.insert_tree(
7479 path!("/dir"),
7480 json!({
7481 "a.ts": "a",
7482 }),
7483 )
7484 .await;
7485
7486 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7487
7488 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7489 language_registry.add(typescript_lang());
7490 let mut fake_language_servers = language_registry.register_fake_lsp(
7491 "TypeScript",
7492 FakeLspAdapter {
7493 capabilities: lsp::ServerCapabilities {
7494 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
7495 ..lsp::ServerCapabilities::default()
7496 },
7497 ..FakeLspAdapter::default()
7498 },
7499 );
7500
7501 let (buffer, _handle) = project
7502 .update(cx, |p, cx| {
7503 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
7504 })
7505 .await
7506 .unwrap();
7507 cx.executor().run_until_parked();
7508
7509 let fake_server = fake_language_servers
7510 .next()
7511 .await
7512 .expect("failed to get the language server");
7513
7514 let mut request_handled = fake_server
7515 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
7516 Ok(Some(vec![
7517 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7518 title: "organize imports".to_string(),
7519 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
7520 ..lsp::CodeAction::default()
7521 }),
7522 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7523 title: "fix code".to_string(),
7524 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
7525 ..lsp::CodeAction::default()
7526 }),
7527 ]))
7528 });
7529
7530 let code_actions_task = project.update(cx, |project, cx| {
7531 project.code_actions(
7532 &buffer,
7533 0..buffer.read(cx).len(),
7534 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
7535 cx,
7536 )
7537 });
7538
7539 let () = request_handled
7540 .next()
7541 .await
7542 .expect("The code action request should have been triggered");
7543
7544 let code_actions = code_actions_task.await.unwrap().unwrap();
7545 assert_eq!(code_actions.len(), 1);
7546 assert_eq!(
7547 code_actions[0].lsp_action.action_kind(),
7548 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
7549 );
7550}
7551
7552#[gpui::test]
7553async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
7554 init_test(cx);
7555
7556 let fs = FakeFs::new(cx.executor());
7557 fs.insert_tree(
7558 path!("/dir"),
7559 json!({
7560 "a.tsx": "a",
7561 }),
7562 )
7563 .await;
7564
7565 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7566
7567 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7568 language_registry.add(tsx_lang());
7569 let language_server_names = [
7570 "TypeScriptServer",
7571 "TailwindServer",
7572 "ESLintServer",
7573 "NoActionsCapabilitiesServer",
7574 ];
7575
7576 let mut language_server_rxs = [
7577 language_registry.register_fake_lsp(
7578 "tsx",
7579 FakeLspAdapter {
7580 name: language_server_names[0],
7581 capabilities: lsp::ServerCapabilities {
7582 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
7583 ..lsp::ServerCapabilities::default()
7584 },
7585 ..FakeLspAdapter::default()
7586 },
7587 ),
7588 language_registry.register_fake_lsp(
7589 "tsx",
7590 FakeLspAdapter {
7591 name: language_server_names[1],
7592 capabilities: lsp::ServerCapabilities {
7593 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
7594 ..lsp::ServerCapabilities::default()
7595 },
7596 ..FakeLspAdapter::default()
7597 },
7598 ),
7599 language_registry.register_fake_lsp(
7600 "tsx",
7601 FakeLspAdapter {
7602 name: language_server_names[2],
7603 capabilities: lsp::ServerCapabilities {
7604 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
7605 ..lsp::ServerCapabilities::default()
7606 },
7607 ..FakeLspAdapter::default()
7608 },
7609 ),
7610 language_registry.register_fake_lsp(
7611 "tsx",
7612 FakeLspAdapter {
7613 name: language_server_names[3],
7614 capabilities: lsp::ServerCapabilities {
7615 code_action_provider: None,
7616 ..lsp::ServerCapabilities::default()
7617 },
7618 ..FakeLspAdapter::default()
7619 },
7620 ),
7621 ];
7622
7623 let (buffer, _handle) = project
7624 .update(cx, |p, cx| {
7625 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
7626 })
7627 .await
7628 .unwrap();
7629 cx.executor().run_until_parked();
7630
7631 let mut servers_with_actions_requests = HashMap::default();
7632 for i in 0..language_server_names.len() {
7633 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
7634 panic!(
7635 "Failed to get language server #{i} with name {}",
7636 &language_server_names[i]
7637 )
7638 });
7639 let new_server_name = new_server.server.name();
7640
7641 assert!(
7642 !servers_with_actions_requests.contains_key(&new_server_name),
7643 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
7644 );
7645 match new_server_name.0.as_ref() {
7646 "TailwindServer" | "TypeScriptServer" => {
7647 servers_with_actions_requests.insert(
7648 new_server_name.clone(),
7649 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
7650 move |_, _| {
7651 let name = new_server_name.clone();
7652 async move {
7653 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
7654 lsp::CodeAction {
7655 title: format!("{name} code action"),
7656 ..lsp::CodeAction::default()
7657 },
7658 )]))
7659 }
7660 },
7661 ),
7662 );
7663 }
7664 "ESLintServer" => {
7665 servers_with_actions_requests.insert(
7666 new_server_name,
7667 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
7668 |_, _| async move { Ok(None) },
7669 ),
7670 );
7671 }
7672 "NoActionsCapabilitiesServer" => {
7673 let _never_handled = new_server
7674 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
7675 panic!(
7676 "Should not call for code actions server with no corresponding capabilities"
7677 )
7678 });
7679 }
7680 unexpected => panic!("Unexpected server name: {unexpected}"),
7681 }
7682 }
7683
7684 let code_actions_task = project.update(cx, |project, cx| {
7685 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
7686 });
7687
7688 // cx.run_until_parked();
7689 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
7690 |mut code_actions_request| async move {
7691 code_actions_request
7692 .next()
7693 .await
7694 .expect("All code actions requests should have been triggered")
7695 },
7696 ))
7697 .await;
7698 assert_eq!(
7699 vec!["TailwindServer code action", "TypeScriptServer code action"],
7700 code_actions_task
7701 .await
7702 .unwrap()
7703 .unwrap()
7704 .into_iter()
7705 .map(|code_action| code_action.lsp_action.title().to_owned())
7706 .sorted()
7707 .collect::<Vec<_>>(),
7708 "Should receive code actions responses from all related servers with hover capabilities"
7709 );
7710}
7711
7712#[gpui::test]
7713async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
7714 init_test(cx);
7715
7716 let fs = FakeFs::new(cx.executor());
7717 fs.insert_tree(
7718 "/dir",
7719 json!({
7720 "a.rs": "let a = 1;",
7721 "b.rs": "let b = 2;",
7722 "c.rs": "let c = 2;",
7723 }),
7724 )
7725 .await;
7726
7727 let project = Project::test(
7728 fs,
7729 [
7730 "/dir/a.rs".as_ref(),
7731 "/dir/b.rs".as_ref(),
7732 "/dir/c.rs".as_ref(),
7733 ],
7734 cx,
7735 )
7736 .await;
7737
7738 // check the initial state and get the worktrees
7739 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
7740 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7741 assert_eq!(worktrees.len(), 3);
7742
7743 let worktree_a = worktrees[0].read(cx);
7744 let worktree_b = worktrees[1].read(cx);
7745 let worktree_c = worktrees[2].read(cx);
7746
7747 // check they start in the right order
7748 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
7749 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
7750 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
7751
7752 (
7753 worktrees[0].clone(),
7754 worktrees[1].clone(),
7755 worktrees[2].clone(),
7756 )
7757 });
7758
7759 // move first worktree to after the second
7760 // [a, b, c] -> [b, a, c]
7761 project
7762 .update(cx, |project, cx| {
7763 let first = worktree_a.read(cx);
7764 let second = worktree_b.read(cx);
7765 project.move_worktree(first.id(), second.id(), cx)
7766 })
7767 .expect("moving first after second");
7768
7769 // check the state after moving
7770 project.update(cx, |project, cx| {
7771 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7772 assert_eq!(worktrees.len(), 3);
7773
7774 let first = worktrees[0].read(cx);
7775 let second = worktrees[1].read(cx);
7776 let third = worktrees[2].read(cx);
7777
7778 // check they are now in the right order
7779 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
7780 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
7781 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7782 });
7783
7784 // move the second worktree to before the first
7785 // [b, a, c] -> [a, b, c]
7786 project
7787 .update(cx, |project, cx| {
7788 let second = worktree_a.read(cx);
7789 let first = worktree_b.read(cx);
7790 project.move_worktree(first.id(), second.id(), cx)
7791 })
7792 .expect("moving second before first");
7793
7794 // check the state after moving
7795 project.update(cx, |project, cx| {
7796 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7797 assert_eq!(worktrees.len(), 3);
7798
7799 let first = worktrees[0].read(cx);
7800 let second = worktrees[1].read(cx);
7801 let third = worktrees[2].read(cx);
7802
7803 // check they are now in the right order
7804 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7805 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
7806 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7807 });
7808
7809 // move the second worktree to after the third
7810 // [a, b, c] -> [a, c, b]
7811 project
7812 .update(cx, |project, cx| {
7813 let second = worktree_b.read(cx);
7814 let third = worktree_c.read(cx);
7815 project.move_worktree(second.id(), third.id(), cx)
7816 })
7817 .expect("moving second after third");
7818
7819 // check the state after moving
7820 project.update(cx, |project, cx| {
7821 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7822 assert_eq!(worktrees.len(), 3);
7823
7824 let first = worktrees[0].read(cx);
7825 let second = worktrees[1].read(cx);
7826 let third = worktrees[2].read(cx);
7827
7828 // check they are now in the right order
7829 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7830 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
7831 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
7832 });
7833
7834 // move the third worktree to before the second
7835 // [a, c, b] -> [a, b, c]
7836 project
7837 .update(cx, |project, cx| {
7838 let third = worktree_c.read(cx);
7839 let second = worktree_b.read(cx);
7840 project.move_worktree(third.id(), second.id(), cx)
7841 })
7842 .expect("moving third before second");
7843
7844 // check the state after moving
7845 project.update(cx, |project, cx| {
7846 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7847 assert_eq!(worktrees.len(), 3);
7848
7849 let first = worktrees[0].read(cx);
7850 let second = worktrees[1].read(cx);
7851 let third = worktrees[2].read(cx);
7852
7853 // check they are now in the right order
7854 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7855 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
7856 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7857 });
7858
7859 // move the first worktree to after the third
7860 // [a, b, c] -> [b, c, a]
7861 project
7862 .update(cx, |project, cx| {
7863 let first = worktree_a.read(cx);
7864 let third = worktree_c.read(cx);
7865 project.move_worktree(first.id(), third.id(), cx)
7866 })
7867 .expect("moving first after third");
7868
7869 // check the state after moving
7870 project.update(cx, |project, cx| {
7871 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7872 assert_eq!(worktrees.len(), 3);
7873
7874 let first = worktrees[0].read(cx);
7875 let second = worktrees[1].read(cx);
7876 let third = worktrees[2].read(cx);
7877
7878 // check they are now in the right order
7879 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
7880 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
7881 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
7882 });
7883
7884 // move the third worktree to before the first
7885 // [b, c, a] -> [a, b, c]
7886 project
7887 .update(cx, |project, cx| {
7888 let third = worktree_a.read(cx);
7889 let first = worktree_b.read(cx);
7890 project.move_worktree(third.id(), first.id(), cx)
7891 })
7892 .expect("moving third before first");
7893
7894 // check the state after moving
7895 project.update(cx, |project, cx| {
7896 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7897 assert_eq!(worktrees.len(), 3);
7898
7899 let first = worktrees[0].read(cx);
7900 let second = worktrees[1].read(cx);
7901 let third = worktrees[2].read(cx);
7902
7903 // check they are now in the right order
7904 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7905 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
7906 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7907 });
7908}
7909
7910#[gpui::test]
7911async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
7912 init_test(cx);
7913
7914 let staged_contents = r#"
7915 fn main() {
7916 println!("hello world");
7917 }
7918 "#
7919 .unindent();
7920 let file_contents = r#"
7921 // print goodbye
7922 fn main() {
7923 println!("goodbye world");
7924 }
7925 "#
7926 .unindent();
7927
7928 let fs = FakeFs::new(cx.background_executor.clone());
7929 fs.insert_tree(
7930 "/dir",
7931 json!({
7932 ".git": {},
7933 "src": {
7934 "main.rs": file_contents,
7935 }
7936 }),
7937 )
7938 .await;
7939
7940 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
7941
7942 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7943
7944 let buffer = project
7945 .update(cx, |project, cx| {
7946 project.open_local_buffer("/dir/src/main.rs", cx)
7947 })
7948 .await
7949 .unwrap();
7950 let unstaged_diff = project
7951 .update(cx, |project, cx| {
7952 project.open_unstaged_diff(buffer.clone(), cx)
7953 })
7954 .await
7955 .unwrap();
7956
7957 cx.run_until_parked();
7958 unstaged_diff.update(cx, |unstaged_diff, cx| {
7959 let snapshot = buffer.read(cx).snapshot();
7960 assert_hunks(
7961 unstaged_diff.snapshot(cx).hunks(&snapshot),
7962 &snapshot,
7963 &unstaged_diff.base_text_string(cx).unwrap(),
7964 &[
7965 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
7966 (
7967 2..3,
7968 " println!(\"hello world\");\n",
7969 " println!(\"goodbye world\");\n",
7970 DiffHunkStatus::modified_none(),
7971 ),
7972 ],
7973 );
7974 });
7975
7976 let staged_contents = r#"
7977 // print goodbye
7978 fn main() {
7979 }
7980 "#
7981 .unindent();
7982
7983 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
7984
7985 cx.run_until_parked();
7986 unstaged_diff.update(cx, |unstaged_diff, cx| {
7987 let snapshot = buffer.read(cx).snapshot();
7988 assert_hunks(
7989 unstaged_diff
7990 .snapshot(cx)
7991 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
7992 &snapshot,
7993 &unstaged_diff.base_text(cx).text(),
7994 &[(
7995 2..3,
7996 "",
7997 " println!(\"goodbye world\");\n",
7998 DiffHunkStatus::added_none(),
7999 )],
8000 );
8001 });
8002}
8003
8004#[gpui::test]
8005async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
8006 init_test(cx);
8007
8008 let committed_contents = r#"
8009 fn main() {
8010 println!("hello world");
8011 }
8012 "#
8013 .unindent();
8014 let staged_contents = r#"
8015 fn main() {
8016 println!("goodbye world");
8017 }
8018 "#
8019 .unindent();
8020 let file_contents = r#"
8021 // print goodbye
8022 fn main() {
8023 println!("goodbye world");
8024 }
8025 "#
8026 .unindent();
8027
8028 let fs = FakeFs::new(cx.background_executor.clone());
8029 fs.insert_tree(
8030 "/dir",
8031 json!({
8032 ".git": {},
8033 "src": {
8034 "modification.rs": file_contents,
8035 }
8036 }),
8037 )
8038 .await;
8039
8040 fs.set_head_for_repo(
8041 Path::new("/dir/.git"),
8042 &[
8043 ("src/modification.rs", committed_contents),
8044 ("src/deletion.rs", "// the-deleted-contents\n".into()),
8045 ],
8046 "deadbeef",
8047 );
8048 fs.set_index_for_repo(
8049 Path::new("/dir/.git"),
8050 &[
8051 ("src/modification.rs", staged_contents),
8052 ("src/deletion.rs", "// the-deleted-contents\n".into()),
8053 ],
8054 );
8055
8056 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8057 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
8058 let language = rust_lang();
8059 language_registry.add(language.clone());
8060
8061 let buffer_1 = project
8062 .update(cx, |project, cx| {
8063 project.open_local_buffer("/dir/src/modification.rs", cx)
8064 })
8065 .await
8066 .unwrap();
8067 let diff_1 = project
8068 .update(cx, |project, cx| {
8069 project.open_uncommitted_diff(buffer_1.clone(), cx)
8070 })
8071 .await
8072 .unwrap();
8073 diff_1.read_with(cx, |diff, cx| {
8074 assert_eq!(diff.base_text(cx).language().cloned(), Some(language))
8075 });
8076 cx.run_until_parked();
8077 diff_1.update(cx, |diff, cx| {
8078 let snapshot = buffer_1.read(cx).snapshot();
8079 assert_hunks(
8080 diff.snapshot(cx)
8081 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8082 &snapshot,
8083 &diff.base_text_string(cx).unwrap(),
8084 &[
8085 (
8086 0..1,
8087 "",
8088 "// print goodbye\n",
8089 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
8090 ),
8091 (
8092 2..3,
8093 " println!(\"hello world\");\n",
8094 " println!(\"goodbye world\");\n",
8095 DiffHunkStatus::modified_none(),
8096 ),
8097 ],
8098 );
8099 });
8100
8101 // Reset HEAD to a version that differs from both the buffer and the index.
8102 let committed_contents = r#"
8103 // print goodbye
8104 fn main() {
8105 }
8106 "#
8107 .unindent();
8108 fs.set_head_for_repo(
8109 Path::new("/dir/.git"),
8110 &[
8111 ("src/modification.rs", committed_contents.clone()),
8112 ("src/deletion.rs", "// the-deleted-contents\n".into()),
8113 ],
8114 "deadbeef",
8115 );
8116
8117 // Buffer now has an unstaged hunk.
8118 cx.run_until_parked();
8119 diff_1.update(cx, |diff, cx| {
8120 let snapshot = buffer_1.read(cx).snapshot();
8121 assert_hunks(
8122 diff.snapshot(cx)
8123 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8124 &snapshot,
8125 &diff.base_text(cx).text(),
8126 &[(
8127 2..3,
8128 "",
8129 " println!(\"goodbye world\");\n",
8130 DiffHunkStatus::added_none(),
8131 )],
8132 );
8133 });
8134
8135 // Open a buffer for a file that's been deleted.
8136 let buffer_2 = project
8137 .update(cx, |project, cx| {
8138 project.open_local_buffer("/dir/src/deletion.rs", cx)
8139 })
8140 .await
8141 .unwrap();
8142 let diff_2 = project
8143 .update(cx, |project, cx| {
8144 project.open_uncommitted_diff(buffer_2.clone(), cx)
8145 })
8146 .await
8147 .unwrap();
8148 cx.run_until_parked();
8149 diff_2.update(cx, |diff, cx| {
8150 let snapshot = buffer_2.read(cx).snapshot();
8151 assert_hunks(
8152 diff.snapshot(cx)
8153 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8154 &snapshot,
8155 &diff.base_text_string(cx).unwrap(),
8156 &[(
8157 0..0,
8158 "// the-deleted-contents\n",
8159 "",
8160 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
8161 )],
8162 );
8163 });
8164
8165 // Stage the deletion of this file
8166 fs.set_index_for_repo(
8167 Path::new("/dir/.git"),
8168 &[("src/modification.rs", committed_contents.clone())],
8169 );
8170 cx.run_until_parked();
8171 diff_2.update(cx, |diff, cx| {
8172 let snapshot = buffer_2.read(cx).snapshot();
8173 assert_hunks(
8174 diff.snapshot(cx)
8175 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8176 &snapshot,
8177 &diff.base_text_string(cx).unwrap(),
8178 &[(
8179 0..0,
8180 "// the-deleted-contents\n",
8181 "",
8182 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
8183 )],
8184 );
8185 });
8186}
8187
8188#[gpui::test]
8189async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
8190 use DiffHunkSecondaryStatus::*;
8191 init_test(cx);
8192
8193 let committed_contents = r#"
8194 zero
8195 one
8196 two
8197 three
8198 four
8199 five
8200 "#
8201 .unindent();
8202 let file_contents = r#"
8203 one
8204 TWO
8205 three
8206 FOUR
8207 five
8208 "#
8209 .unindent();
8210
8211 let fs = FakeFs::new(cx.background_executor.clone());
8212 fs.insert_tree(
8213 "/dir",
8214 json!({
8215 ".git": {},
8216 "file.txt": file_contents.clone()
8217 }),
8218 )
8219 .await;
8220
8221 fs.set_head_and_index_for_repo(
8222 path!("/dir/.git").as_ref(),
8223 &[("file.txt", committed_contents.clone())],
8224 );
8225
8226 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8227
8228 let buffer = project
8229 .update(cx, |project, cx| {
8230 project.open_local_buffer("/dir/file.txt", cx)
8231 })
8232 .await
8233 .unwrap();
8234 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8235 let uncommitted_diff = project
8236 .update(cx, |project, cx| {
8237 project.open_uncommitted_diff(buffer.clone(), cx)
8238 })
8239 .await
8240 .unwrap();
8241 let mut diff_events = cx.events(&uncommitted_diff);
8242
8243 // The hunks are initially unstaged.
8244 uncommitted_diff.read_with(cx, |diff, cx| {
8245 assert_hunks(
8246 diff.snapshot(cx).hunks(&snapshot),
8247 &snapshot,
8248 &diff.base_text_string(cx).unwrap(),
8249 &[
8250 (
8251 0..0,
8252 "zero\n",
8253 "",
8254 DiffHunkStatus::deleted(HasSecondaryHunk),
8255 ),
8256 (
8257 1..2,
8258 "two\n",
8259 "TWO\n",
8260 DiffHunkStatus::modified(HasSecondaryHunk),
8261 ),
8262 (
8263 3..4,
8264 "four\n",
8265 "FOUR\n",
8266 DiffHunkStatus::modified(HasSecondaryHunk),
8267 ),
8268 ],
8269 );
8270 });
8271
8272 // Stage a hunk. It appears as optimistically staged.
8273 uncommitted_diff.update(cx, |diff, cx| {
8274 let range =
8275 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
8276 let hunks = diff
8277 .snapshot(cx)
8278 .hunks_intersecting_range(range, &snapshot)
8279 .collect::<Vec<_>>();
8280 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
8281
8282 assert_hunks(
8283 diff.snapshot(cx).hunks(&snapshot),
8284 &snapshot,
8285 &diff.base_text_string(cx).unwrap(),
8286 &[
8287 (
8288 0..0,
8289 "zero\n",
8290 "",
8291 DiffHunkStatus::deleted(HasSecondaryHunk),
8292 ),
8293 (
8294 1..2,
8295 "two\n",
8296 "TWO\n",
8297 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8298 ),
8299 (
8300 3..4,
8301 "four\n",
8302 "FOUR\n",
8303 DiffHunkStatus::modified(HasSecondaryHunk),
8304 ),
8305 ],
8306 );
8307 });
8308
8309 // The diff emits a change event for the range of the staged hunk.
8310 assert!(matches!(
8311 diff_events.next().await.unwrap(),
8312 BufferDiffEvent::HunksStagedOrUnstaged(_)
8313 ));
8314 let event = diff_events.next().await.unwrap();
8315 if let BufferDiffEvent::DiffChanged(DiffChanged {
8316 changed_range: Some(changed_range),
8317 base_text_changed_range: _,
8318 extended_range: _,
8319 }) = event
8320 {
8321 let changed_range = changed_range.to_point(&snapshot);
8322 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
8323 } else {
8324 panic!("Unexpected event {event:?}");
8325 }
8326
8327 // When the write to the index completes, it appears as staged.
8328 cx.run_until_parked();
8329 uncommitted_diff.update(cx, |diff, cx| {
8330 assert_hunks(
8331 diff.snapshot(cx).hunks(&snapshot),
8332 &snapshot,
8333 &diff.base_text_string(cx).unwrap(),
8334 &[
8335 (
8336 0..0,
8337 "zero\n",
8338 "",
8339 DiffHunkStatus::deleted(HasSecondaryHunk),
8340 ),
8341 (
8342 1..2,
8343 "two\n",
8344 "TWO\n",
8345 DiffHunkStatus::modified(NoSecondaryHunk),
8346 ),
8347 (
8348 3..4,
8349 "four\n",
8350 "FOUR\n",
8351 DiffHunkStatus::modified(HasSecondaryHunk),
8352 ),
8353 ],
8354 );
8355 });
8356
8357 // The diff emits a change event for the changed index text.
8358 let event = diff_events.next().await.unwrap();
8359 if let BufferDiffEvent::DiffChanged(DiffChanged {
8360 changed_range: Some(changed_range),
8361 base_text_changed_range: _,
8362 extended_range: _,
8363 }) = event
8364 {
8365 let changed_range = changed_range.to_point(&snapshot);
8366 assert_eq!(changed_range, Point::new(0, 0)..Point::new(4, 0));
8367 } else {
8368 panic!("Unexpected event {event:?}");
8369 }
8370
8371 // Simulate a problem writing to the git index.
8372 fs.set_error_message_for_index_write(
8373 "/dir/.git".as_ref(),
8374 Some("failed to write git index".into()),
8375 );
8376
8377 // Stage another hunk.
8378 uncommitted_diff.update(cx, |diff, cx| {
8379 let range =
8380 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
8381 let hunks = diff
8382 .snapshot(cx)
8383 .hunks_intersecting_range(range, &snapshot)
8384 .collect::<Vec<_>>();
8385 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
8386
8387 assert_hunks(
8388 diff.snapshot(cx).hunks(&snapshot),
8389 &snapshot,
8390 &diff.base_text_string(cx).unwrap(),
8391 &[
8392 (
8393 0..0,
8394 "zero\n",
8395 "",
8396 DiffHunkStatus::deleted(HasSecondaryHunk),
8397 ),
8398 (
8399 1..2,
8400 "two\n",
8401 "TWO\n",
8402 DiffHunkStatus::modified(NoSecondaryHunk),
8403 ),
8404 (
8405 3..4,
8406 "four\n",
8407 "FOUR\n",
8408 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8409 ),
8410 ],
8411 );
8412 });
8413 assert!(matches!(
8414 diff_events.next().await.unwrap(),
8415 BufferDiffEvent::HunksStagedOrUnstaged(_)
8416 ));
8417 let event = diff_events.next().await.unwrap();
8418 if let BufferDiffEvent::DiffChanged(DiffChanged {
8419 changed_range: Some(changed_range),
8420 base_text_changed_range: _,
8421 extended_range: _,
8422 }) = event
8423 {
8424 let changed_range = changed_range.to_point(&snapshot);
8425 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
8426 } else {
8427 panic!("Unexpected event {event:?}");
8428 }
8429
8430 // When the write fails, the hunk returns to being unstaged.
8431 cx.run_until_parked();
8432 uncommitted_diff.update(cx, |diff, cx| {
8433 assert_hunks(
8434 diff.snapshot(cx).hunks(&snapshot),
8435 &snapshot,
8436 &diff.base_text_string(cx).unwrap(),
8437 &[
8438 (
8439 0..0,
8440 "zero\n",
8441 "",
8442 DiffHunkStatus::deleted(HasSecondaryHunk),
8443 ),
8444 (
8445 1..2,
8446 "two\n",
8447 "TWO\n",
8448 DiffHunkStatus::modified(NoSecondaryHunk),
8449 ),
8450 (
8451 3..4,
8452 "four\n",
8453 "FOUR\n",
8454 DiffHunkStatus::modified(HasSecondaryHunk),
8455 ),
8456 ],
8457 );
8458 });
8459
8460 let event = diff_events.next().await.unwrap();
8461 if let BufferDiffEvent::DiffChanged(DiffChanged {
8462 changed_range: Some(changed_range),
8463 base_text_changed_range: _,
8464 extended_range: _,
8465 }) = event
8466 {
8467 let changed_range = changed_range.to_point(&snapshot);
8468 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
8469 } else {
8470 panic!("Unexpected event {event:?}");
8471 }
8472
8473 // Allow writing to the git index to succeed again.
8474 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
8475
8476 // Stage two hunks with separate operations.
8477 uncommitted_diff.update(cx, |diff, cx| {
8478 let hunks = diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>();
8479 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
8480 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
8481 });
8482
8483 // Both staged hunks appear as pending.
8484 uncommitted_diff.update(cx, |diff, cx| {
8485 assert_hunks(
8486 diff.snapshot(cx).hunks(&snapshot),
8487 &snapshot,
8488 &diff.base_text_string(cx).unwrap(),
8489 &[
8490 (
8491 0..0,
8492 "zero\n",
8493 "",
8494 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
8495 ),
8496 (
8497 1..2,
8498 "two\n",
8499 "TWO\n",
8500 DiffHunkStatus::modified(NoSecondaryHunk),
8501 ),
8502 (
8503 3..4,
8504 "four\n",
8505 "FOUR\n",
8506 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8507 ),
8508 ],
8509 );
8510 });
8511
8512 // Both staging operations take effect.
8513 cx.run_until_parked();
8514 uncommitted_diff.update(cx, |diff, cx| {
8515 assert_hunks(
8516 diff.snapshot(cx).hunks(&snapshot),
8517 &snapshot,
8518 &diff.base_text_string(cx).unwrap(),
8519 &[
8520 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
8521 (
8522 1..2,
8523 "two\n",
8524 "TWO\n",
8525 DiffHunkStatus::modified(NoSecondaryHunk),
8526 ),
8527 (
8528 3..4,
8529 "four\n",
8530 "FOUR\n",
8531 DiffHunkStatus::modified(NoSecondaryHunk),
8532 ),
8533 ],
8534 );
8535 });
8536}
8537
8538#[gpui::test(seeds(340, 472))]
8539async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
8540 use DiffHunkSecondaryStatus::*;
8541 init_test(cx);
8542
8543 let committed_contents = r#"
8544 zero
8545 one
8546 two
8547 three
8548 four
8549 five
8550 "#
8551 .unindent();
8552 let file_contents = r#"
8553 one
8554 TWO
8555 three
8556 FOUR
8557 five
8558 "#
8559 .unindent();
8560
8561 let fs = FakeFs::new(cx.background_executor.clone());
8562 fs.insert_tree(
8563 "/dir",
8564 json!({
8565 ".git": {},
8566 "file.txt": file_contents.clone()
8567 }),
8568 )
8569 .await;
8570
8571 fs.set_head_for_repo(
8572 "/dir/.git".as_ref(),
8573 &[("file.txt", committed_contents.clone())],
8574 "deadbeef",
8575 );
8576 fs.set_index_for_repo(
8577 "/dir/.git".as_ref(),
8578 &[("file.txt", committed_contents.clone())],
8579 );
8580
8581 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8582
8583 let buffer = project
8584 .update(cx, |project, cx| {
8585 project.open_local_buffer("/dir/file.txt", cx)
8586 })
8587 .await
8588 .unwrap();
8589 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8590 let uncommitted_diff = project
8591 .update(cx, |project, cx| {
8592 project.open_uncommitted_diff(buffer.clone(), cx)
8593 })
8594 .await
8595 .unwrap();
8596
8597 // The hunks are initially unstaged.
8598 uncommitted_diff.read_with(cx, |diff, cx| {
8599 assert_hunks(
8600 diff.snapshot(cx).hunks(&snapshot),
8601 &snapshot,
8602 &diff.base_text_string(cx).unwrap(),
8603 &[
8604 (
8605 0..0,
8606 "zero\n",
8607 "",
8608 DiffHunkStatus::deleted(HasSecondaryHunk),
8609 ),
8610 (
8611 1..2,
8612 "two\n",
8613 "TWO\n",
8614 DiffHunkStatus::modified(HasSecondaryHunk),
8615 ),
8616 (
8617 3..4,
8618 "four\n",
8619 "FOUR\n",
8620 DiffHunkStatus::modified(HasSecondaryHunk),
8621 ),
8622 ],
8623 );
8624 });
8625
8626 // Pause IO events
8627 fs.pause_events();
8628
8629 // Stage the first hunk.
8630 uncommitted_diff.update(cx, |diff, cx| {
8631 let hunk = diff.snapshot(cx).hunks(&snapshot).next().unwrap();
8632 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
8633 assert_hunks(
8634 diff.snapshot(cx).hunks(&snapshot),
8635 &snapshot,
8636 &diff.base_text_string(cx).unwrap(),
8637 &[
8638 (
8639 0..0,
8640 "zero\n",
8641 "",
8642 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
8643 ),
8644 (
8645 1..2,
8646 "two\n",
8647 "TWO\n",
8648 DiffHunkStatus::modified(HasSecondaryHunk),
8649 ),
8650 (
8651 3..4,
8652 "four\n",
8653 "FOUR\n",
8654 DiffHunkStatus::modified(HasSecondaryHunk),
8655 ),
8656 ],
8657 );
8658 });
8659
8660 // Stage the second hunk *before* receiving the FS event for the first hunk.
8661 cx.run_until_parked();
8662 uncommitted_diff.update(cx, |diff, cx| {
8663 let hunk = diff.snapshot(cx).hunks(&snapshot).nth(1).unwrap();
8664 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
8665 assert_hunks(
8666 diff.snapshot(cx).hunks(&snapshot),
8667 &snapshot,
8668 &diff.base_text_string(cx).unwrap(),
8669 &[
8670 (
8671 0..0,
8672 "zero\n",
8673 "",
8674 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
8675 ),
8676 (
8677 1..2,
8678 "two\n",
8679 "TWO\n",
8680 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8681 ),
8682 (
8683 3..4,
8684 "four\n",
8685 "FOUR\n",
8686 DiffHunkStatus::modified(HasSecondaryHunk),
8687 ),
8688 ],
8689 );
8690 });
8691
8692 // Process the FS event for staging the first hunk (second event is still pending).
8693 fs.flush_events(1);
8694 cx.run_until_parked();
8695
8696 // Stage the third hunk before receiving the second FS event.
8697 uncommitted_diff.update(cx, |diff, cx| {
8698 let hunk = diff.snapshot(cx).hunks(&snapshot).nth(2).unwrap();
8699 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
8700 });
8701
8702 // Wait for all remaining IO.
8703 cx.run_until_parked();
8704 fs.flush_events(fs.buffered_event_count());
8705
8706 // Now all hunks are staged.
8707 cx.run_until_parked();
8708 uncommitted_diff.update(cx, |diff, cx| {
8709 assert_hunks(
8710 diff.snapshot(cx).hunks(&snapshot),
8711 &snapshot,
8712 &diff.base_text_string(cx).unwrap(),
8713 &[
8714 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
8715 (
8716 1..2,
8717 "two\n",
8718 "TWO\n",
8719 DiffHunkStatus::modified(NoSecondaryHunk),
8720 ),
8721 (
8722 3..4,
8723 "four\n",
8724 "FOUR\n",
8725 DiffHunkStatus::modified(NoSecondaryHunk),
8726 ),
8727 ],
8728 );
8729 });
8730}
8731
8732#[gpui::test(iterations = 25)]
8733async fn test_staging_random_hunks(
8734 mut rng: StdRng,
8735 _executor: BackgroundExecutor,
8736 cx: &mut gpui::TestAppContext,
8737) {
8738 let operations = env::var("OPERATIONS")
8739 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
8740 .unwrap_or(20);
8741
8742 use DiffHunkSecondaryStatus::*;
8743 init_test(cx);
8744
8745 let committed_text = (0..30).map(|i| format!("line {i}\n")).collect::<String>();
8746 let index_text = committed_text.clone();
8747 let buffer_text = (0..30)
8748 .map(|i| match i % 5 {
8749 0 => format!("line {i} (modified)\n"),
8750 _ => format!("line {i}\n"),
8751 })
8752 .collect::<String>();
8753
8754 let fs = FakeFs::new(cx.background_executor.clone());
8755 fs.insert_tree(
8756 path!("/dir"),
8757 json!({
8758 ".git": {},
8759 "file.txt": buffer_text.clone()
8760 }),
8761 )
8762 .await;
8763 fs.set_head_for_repo(
8764 path!("/dir/.git").as_ref(),
8765 &[("file.txt", committed_text.clone())],
8766 "deadbeef",
8767 );
8768 fs.set_index_for_repo(
8769 path!("/dir/.git").as_ref(),
8770 &[("file.txt", index_text.clone())],
8771 );
8772 let repo = fs
8773 .open_repo(path!("/dir/.git").as_ref(), Some("git".as_ref()))
8774 .unwrap();
8775
8776 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
8777 let buffer = project
8778 .update(cx, |project, cx| {
8779 project.open_local_buffer(path!("/dir/file.txt"), cx)
8780 })
8781 .await
8782 .unwrap();
8783 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8784 let uncommitted_diff = project
8785 .update(cx, |project, cx| {
8786 project.open_uncommitted_diff(buffer.clone(), cx)
8787 })
8788 .await
8789 .unwrap();
8790
8791 let mut hunks = uncommitted_diff.update(cx, |diff, cx| {
8792 diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>()
8793 });
8794 assert_eq!(hunks.len(), 6);
8795
8796 for _i in 0..operations {
8797 let hunk_ix = rng.random_range(0..hunks.len());
8798 let hunk = &mut hunks[hunk_ix];
8799 let row = hunk.range.start.row;
8800
8801 if hunk.status().has_secondary_hunk() {
8802 log::info!("staging hunk at {row}");
8803 uncommitted_diff.update(cx, |diff, cx| {
8804 diff.stage_or_unstage_hunks(true, std::slice::from_ref(hunk), &snapshot, true, cx);
8805 });
8806 hunk.secondary_status = SecondaryHunkRemovalPending;
8807 } else {
8808 log::info!("unstaging hunk at {row}");
8809 uncommitted_diff.update(cx, |diff, cx| {
8810 diff.stage_or_unstage_hunks(false, std::slice::from_ref(hunk), &snapshot, true, cx);
8811 });
8812 hunk.secondary_status = SecondaryHunkAdditionPending;
8813 }
8814
8815 for _ in 0..rng.random_range(0..10) {
8816 log::info!("yielding");
8817 cx.executor().simulate_random_delay().await;
8818 }
8819 }
8820
8821 cx.executor().run_until_parked();
8822
8823 for hunk in &mut hunks {
8824 if hunk.secondary_status == SecondaryHunkRemovalPending {
8825 hunk.secondary_status = NoSecondaryHunk;
8826 } else if hunk.secondary_status == SecondaryHunkAdditionPending {
8827 hunk.secondary_status = HasSecondaryHunk;
8828 }
8829 }
8830
8831 log::info!(
8832 "index text:\n{}",
8833 repo.load_index_text(RepoPath::from_rel_path(rel_path("file.txt")))
8834 .await
8835 .unwrap()
8836 );
8837
8838 uncommitted_diff.update(cx, |diff, cx| {
8839 let expected_hunks = hunks
8840 .iter()
8841 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
8842 .collect::<Vec<_>>();
8843 let actual_hunks = diff
8844 .snapshot(cx)
8845 .hunks(&snapshot)
8846 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
8847 .collect::<Vec<_>>();
8848 assert_eq!(actual_hunks, expected_hunks);
8849 });
8850}
8851
8852#[gpui::test]
8853async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
8854 init_test(cx);
8855
8856 let committed_contents = r#"
8857 fn main() {
8858 println!("hello from HEAD");
8859 }
8860 "#
8861 .unindent();
8862 let file_contents = r#"
8863 fn main() {
8864 println!("hello from the working copy");
8865 }
8866 "#
8867 .unindent();
8868
8869 let fs = FakeFs::new(cx.background_executor.clone());
8870 fs.insert_tree(
8871 "/dir",
8872 json!({
8873 ".git": {},
8874 "src": {
8875 "main.rs": file_contents,
8876 }
8877 }),
8878 )
8879 .await;
8880
8881 fs.set_head_for_repo(
8882 Path::new("/dir/.git"),
8883 &[("src/main.rs", committed_contents.clone())],
8884 "deadbeef",
8885 );
8886 fs.set_index_for_repo(
8887 Path::new("/dir/.git"),
8888 &[("src/main.rs", committed_contents.clone())],
8889 );
8890
8891 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
8892
8893 let buffer = project
8894 .update(cx, |project, cx| {
8895 project.open_local_buffer("/dir/src/main.rs", cx)
8896 })
8897 .await
8898 .unwrap();
8899 let uncommitted_diff = project
8900 .update(cx, |project, cx| {
8901 project.open_uncommitted_diff(buffer.clone(), cx)
8902 })
8903 .await
8904 .unwrap();
8905
8906 cx.run_until_parked();
8907 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
8908 let snapshot = buffer.read(cx).snapshot();
8909 assert_hunks(
8910 uncommitted_diff.snapshot(cx).hunks(&snapshot),
8911 &snapshot,
8912 &uncommitted_diff.base_text_string(cx).unwrap(),
8913 &[(
8914 1..2,
8915 " println!(\"hello from HEAD\");\n",
8916 " println!(\"hello from the working copy\");\n",
8917 DiffHunkStatus {
8918 kind: DiffHunkStatusKind::Modified,
8919 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
8920 },
8921 )],
8922 );
8923 });
8924}
8925
8926// TODO: Should we test this on Windows also?
8927#[gpui::test]
8928#[cfg(not(windows))]
8929async fn test_staging_hunk_preserve_executable_permission(cx: &mut gpui::TestAppContext) {
8930 use std::os::unix::fs::PermissionsExt;
8931 init_test(cx);
8932 cx.executor().allow_parking();
8933 let committed_contents = "bar\n";
8934 let file_contents = "baz\n";
8935 let root = TempTree::new(json!({
8936 "project": {
8937 "foo": committed_contents
8938 },
8939 }));
8940
8941 let work_dir = root.path().join("project");
8942 let file_path = work_dir.join("foo");
8943 let repo = git_init(work_dir.as_path());
8944 let mut perms = std::fs::metadata(&file_path).unwrap().permissions();
8945 perms.set_mode(0o755);
8946 std::fs::set_permissions(&file_path, perms).unwrap();
8947 git_add("foo", &repo);
8948 git_commit("Initial commit", &repo);
8949 std::fs::write(&file_path, file_contents).unwrap();
8950
8951 let project = Project::test(
8952 Arc::new(RealFs::new(None, cx.executor())),
8953 [root.path()],
8954 cx,
8955 )
8956 .await;
8957
8958 let buffer = project
8959 .update(cx, |project, cx| {
8960 project.open_local_buffer(file_path.as_path(), cx)
8961 })
8962 .await
8963 .unwrap();
8964
8965 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8966
8967 let uncommitted_diff = project
8968 .update(cx, |project, cx| {
8969 project.open_uncommitted_diff(buffer.clone(), cx)
8970 })
8971 .await
8972 .unwrap();
8973
8974 uncommitted_diff.update(cx, |diff, cx| {
8975 let hunks = diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>();
8976 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
8977 });
8978
8979 cx.run_until_parked();
8980
8981 let output = smol::process::Command::new("git")
8982 .current_dir(&work_dir)
8983 .args(["diff", "--staged"])
8984 .output()
8985 .await
8986 .unwrap();
8987
8988 let staged_diff = String::from_utf8_lossy(&output.stdout);
8989
8990 assert!(
8991 !staged_diff.contains("new mode 100644"),
8992 "Staging should not change file mode from 755 to 644.\ngit diff --staged:\n{}",
8993 staged_diff
8994 );
8995
8996 let output = smol::process::Command::new("git")
8997 .current_dir(&work_dir)
8998 .args(["ls-files", "-s"])
8999 .output()
9000 .await
9001 .unwrap();
9002 let index_contents = String::from_utf8_lossy(&output.stdout);
9003
9004 assert!(
9005 index_contents.contains("100755"),
9006 "Index should show file as executable (100755).\ngit ls-files -s:\n{}",
9007 index_contents
9008 );
9009}
9010
9011#[gpui::test]
9012async fn test_repository_and_path_for_project_path(
9013 background_executor: BackgroundExecutor,
9014 cx: &mut gpui::TestAppContext,
9015) {
9016 init_test(cx);
9017 let fs = FakeFs::new(background_executor);
9018 fs.insert_tree(
9019 path!("/root"),
9020 json!({
9021 "c.txt": "",
9022 "dir1": {
9023 ".git": {},
9024 "deps": {
9025 "dep1": {
9026 ".git": {},
9027 "src": {
9028 "a.txt": ""
9029 }
9030 }
9031 },
9032 "src": {
9033 "b.txt": ""
9034 }
9035 },
9036 }),
9037 )
9038 .await;
9039
9040 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
9041 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9042 let tree_id = tree.read_with(cx, |tree, _| tree.id());
9043 project
9044 .update(cx, |project, cx| project.git_scans_complete(cx))
9045 .await;
9046 cx.run_until_parked();
9047
9048 project.read_with(cx, |project, cx| {
9049 let git_store = project.git_store().read(cx);
9050 let pairs = [
9051 ("c.txt", None),
9052 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
9053 (
9054 "dir1/deps/dep1/src/a.txt",
9055 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
9056 ),
9057 ];
9058 let expected = pairs
9059 .iter()
9060 .map(|(path, result)| {
9061 (
9062 path,
9063 result.map(|(repo, repo_path)| {
9064 (Path::new(repo).into(), RepoPath::new(repo_path).unwrap())
9065 }),
9066 )
9067 })
9068 .collect::<Vec<_>>();
9069 let actual = pairs
9070 .iter()
9071 .map(|(path, _)| {
9072 let project_path = (tree_id, rel_path(path)).into();
9073 let result = maybe!({
9074 let (repo, repo_path) =
9075 git_store.repository_and_path_for_project_path(&project_path, cx)?;
9076 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
9077 });
9078 (path, result)
9079 })
9080 .collect::<Vec<_>>();
9081 pretty_assertions::assert_eq!(expected, actual);
9082 });
9083
9084 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
9085 .await
9086 .unwrap();
9087 cx.run_until_parked();
9088
9089 project.read_with(cx, |project, cx| {
9090 let git_store = project.git_store().read(cx);
9091 assert_eq!(
9092 git_store.repository_and_path_for_project_path(
9093 &(tree_id, rel_path("dir1/src/b.txt")).into(),
9094 cx
9095 ),
9096 None
9097 );
9098 });
9099}
9100
9101#[gpui::test]
9102async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
9103 init_test(cx);
9104 let fs = FakeFs::new(cx.background_executor.clone());
9105 let home = paths::home_dir();
9106 fs.insert_tree(
9107 home,
9108 json!({
9109 ".git": {},
9110 "project": {
9111 "a.txt": "A"
9112 },
9113 }),
9114 )
9115 .await;
9116
9117 let project = Project::test(fs.clone(), [home.join("project").as_ref()], cx).await;
9118 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9119 let tree_id = tree.read_with(cx, |tree, _| tree.id());
9120
9121 project
9122 .update(cx, |project, cx| project.git_scans_complete(cx))
9123 .await;
9124 tree.flush_fs_events(cx).await;
9125
9126 project.read_with(cx, |project, cx| {
9127 let containing = project
9128 .git_store()
9129 .read(cx)
9130 .repository_and_path_for_project_path(&(tree_id, rel_path("a.txt")).into(), cx);
9131 assert!(containing.is_none());
9132 });
9133
9134 let project = Project::test(fs.clone(), [home.as_ref()], cx).await;
9135 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9136 let tree_id = tree.read_with(cx, |tree, _| tree.id());
9137 project
9138 .update(cx, |project, cx| project.git_scans_complete(cx))
9139 .await;
9140 tree.flush_fs_events(cx).await;
9141
9142 project.read_with(cx, |project, cx| {
9143 let containing = project
9144 .git_store()
9145 .read(cx)
9146 .repository_and_path_for_project_path(&(tree_id, rel_path("project/a.txt")).into(), cx);
9147 assert_eq!(
9148 containing
9149 .unwrap()
9150 .0
9151 .read(cx)
9152 .work_directory_abs_path
9153 .as_ref(),
9154 home,
9155 );
9156 });
9157}
9158
9159#[gpui::test]
9160async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
9161 init_test(cx);
9162 cx.executor().allow_parking();
9163
9164 let root = TempTree::new(json!({
9165 "project": {
9166 "a.txt": "a", // Modified
9167 "b.txt": "bb", // Added
9168 "c.txt": "ccc", // Unchanged
9169 "d.txt": "dddd", // Deleted
9170 },
9171 }));
9172
9173 // Set up git repository before creating the project.
9174 let work_dir = root.path().join("project");
9175 let repo = git_init(work_dir.as_path());
9176 git_add("a.txt", &repo);
9177 git_add("c.txt", &repo);
9178 git_add("d.txt", &repo);
9179 git_commit("Initial commit", &repo);
9180 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
9181 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
9182
9183 let project = Project::test(
9184 Arc::new(RealFs::new(None, cx.executor())),
9185 [root.path()],
9186 cx,
9187 )
9188 .await;
9189
9190 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9191 tree.flush_fs_events(cx).await;
9192 project
9193 .update(cx, |project, cx| project.git_scans_complete(cx))
9194 .await;
9195 cx.executor().run_until_parked();
9196
9197 let repository = project.read_with(cx, |project, cx| {
9198 project.repositories(cx).values().next().unwrap().clone()
9199 });
9200
9201 // Check that the right git state is observed on startup
9202 repository.read_with(cx, |repository, _| {
9203 let entries = repository.cached_status().collect::<Vec<_>>();
9204 assert_eq!(
9205 entries,
9206 [
9207 StatusEntry {
9208 repo_path: repo_path("a.txt"),
9209 status: StatusCode::Modified.worktree(),
9210 },
9211 StatusEntry {
9212 repo_path: repo_path("b.txt"),
9213 status: FileStatus::Untracked,
9214 },
9215 StatusEntry {
9216 repo_path: repo_path("d.txt"),
9217 status: StatusCode::Deleted.worktree(),
9218 },
9219 ]
9220 );
9221 });
9222
9223 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
9224
9225 tree.flush_fs_events(cx).await;
9226 project
9227 .update(cx, |project, cx| project.git_scans_complete(cx))
9228 .await;
9229 cx.executor().run_until_parked();
9230
9231 repository.read_with(cx, |repository, _| {
9232 let entries = repository.cached_status().collect::<Vec<_>>();
9233 assert_eq!(
9234 entries,
9235 [
9236 StatusEntry {
9237 repo_path: repo_path("a.txt"),
9238 status: StatusCode::Modified.worktree(),
9239 },
9240 StatusEntry {
9241 repo_path: repo_path("b.txt"),
9242 status: FileStatus::Untracked,
9243 },
9244 StatusEntry {
9245 repo_path: repo_path("c.txt"),
9246 status: StatusCode::Modified.worktree(),
9247 },
9248 StatusEntry {
9249 repo_path: repo_path("d.txt"),
9250 status: StatusCode::Deleted.worktree(),
9251 },
9252 ]
9253 );
9254 });
9255
9256 git_add("a.txt", &repo);
9257 git_add("c.txt", &repo);
9258 git_remove_index(Path::new("d.txt"), &repo);
9259 git_commit("Another commit", &repo);
9260 tree.flush_fs_events(cx).await;
9261 project
9262 .update(cx, |project, cx| project.git_scans_complete(cx))
9263 .await;
9264 cx.executor().run_until_parked();
9265
9266 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
9267 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
9268 tree.flush_fs_events(cx).await;
9269 project
9270 .update(cx, |project, cx| project.git_scans_complete(cx))
9271 .await;
9272 cx.executor().run_until_parked();
9273
9274 repository.read_with(cx, |repository, _cx| {
9275 let entries = repository.cached_status().collect::<Vec<_>>();
9276
9277 // Deleting an untracked entry, b.txt, should leave no status
9278 // a.txt was tracked, and so should have a status
9279 assert_eq!(
9280 entries,
9281 [StatusEntry {
9282 repo_path: repo_path("a.txt"),
9283 status: StatusCode::Deleted.worktree(),
9284 }]
9285 );
9286 });
9287}
9288
9289#[gpui::test]
9290#[ignore]
9291async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
9292 init_test(cx);
9293 cx.executor().allow_parking();
9294
9295 let root = TempTree::new(json!({
9296 "project": {
9297 "sub": {},
9298 "a.txt": "",
9299 },
9300 }));
9301
9302 let work_dir = root.path().join("project");
9303 let repo = git_init(work_dir.as_path());
9304 // a.txt exists in HEAD and the working copy but is deleted in the index.
9305 git_add("a.txt", &repo);
9306 git_commit("Initial commit", &repo);
9307 git_remove_index("a.txt".as_ref(), &repo);
9308 // `sub` is a nested git repository.
9309 let _sub = git_init(&work_dir.join("sub"));
9310
9311 let project = Project::test(
9312 Arc::new(RealFs::new(None, cx.executor())),
9313 [root.path()],
9314 cx,
9315 )
9316 .await;
9317
9318 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9319 tree.flush_fs_events(cx).await;
9320 project
9321 .update(cx, |project, cx| project.git_scans_complete(cx))
9322 .await;
9323 cx.executor().run_until_parked();
9324
9325 let repository = project.read_with(cx, |project, cx| {
9326 project
9327 .repositories(cx)
9328 .values()
9329 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
9330 .unwrap()
9331 .clone()
9332 });
9333
9334 repository.read_with(cx, |repository, _cx| {
9335 let entries = repository.cached_status().collect::<Vec<_>>();
9336
9337 // `sub` doesn't appear in our computed statuses.
9338 // a.txt appears with a combined `DA` status.
9339 assert_eq!(
9340 entries,
9341 [StatusEntry {
9342 repo_path: repo_path("a.txt"),
9343 status: TrackedStatus {
9344 index_status: StatusCode::Deleted,
9345 worktree_status: StatusCode::Added
9346 }
9347 .into(),
9348 }]
9349 )
9350 });
9351}
9352
9353#[track_caller]
9354/// We merge lhs into rhs.
9355fn merge_pending_ops_snapshots(
9356 source: Vec<pending_op::PendingOps>,
9357 mut target: Vec<pending_op::PendingOps>,
9358) -> Vec<pending_op::PendingOps> {
9359 for s_ops in source {
9360 if let Some(idx) = target.iter().zip(0..).find_map(|(ops, idx)| {
9361 if ops.repo_path == s_ops.repo_path {
9362 Some(idx)
9363 } else {
9364 None
9365 }
9366 }) {
9367 let t_ops = &mut target[idx];
9368 for s_op in s_ops.ops {
9369 if let Some(op_idx) = t_ops
9370 .ops
9371 .iter()
9372 .zip(0..)
9373 .find_map(|(op, idx)| if op.id == s_op.id { Some(idx) } else { None })
9374 {
9375 let t_op = &mut t_ops.ops[op_idx];
9376 match (s_op.job_status, t_op.job_status) {
9377 (pending_op::JobStatus::Running, _) => {}
9378 (s_st, pending_op::JobStatus::Running) => t_op.job_status = s_st,
9379 (s_st, t_st) if s_st == t_st => {}
9380 _ => unreachable!(),
9381 }
9382 } else {
9383 t_ops.ops.push(s_op);
9384 }
9385 }
9386 t_ops.ops.sort_by(|l, r| l.id.cmp(&r.id));
9387 } else {
9388 target.push(s_ops);
9389 }
9390 }
9391 target
9392}
9393
9394#[gpui::test]
9395async fn test_repository_pending_ops_staging(
9396 executor: gpui::BackgroundExecutor,
9397 cx: &mut gpui::TestAppContext,
9398) {
9399 init_test(cx);
9400
9401 let fs = FakeFs::new(executor);
9402 fs.insert_tree(
9403 path!("/root"),
9404 json!({
9405 "my-repo": {
9406 ".git": {},
9407 "a.txt": "a",
9408 }
9409
9410 }),
9411 )
9412 .await;
9413
9414 fs.set_status_for_repo(
9415 path!("/root/my-repo/.git").as_ref(),
9416 &[("a.txt", FileStatus::Untracked)],
9417 );
9418
9419 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
9420 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
9421 project.update(cx, |project, cx| {
9422 let pending_ops_all = pending_ops_all.clone();
9423 cx.subscribe(project.git_store(), move |_, _, e, _| {
9424 if let GitStoreEvent::RepositoryUpdated(
9425 _,
9426 RepositoryEvent::PendingOpsChanged { pending_ops },
9427 _,
9428 ) = e
9429 {
9430 let merged = merge_pending_ops_snapshots(
9431 pending_ops.items(()),
9432 pending_ops_all.lock().items(()),
9433 );
9434 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
9435 }
9436 })
9437 .detach();
9438 });
9439 project
9440 .update(cx, |project, cx| project.git_scans_complete(cx))
9441 .await;
9442
9443 let repo = project.read_with(cx, |project, cx| {
9444 project.repositories(cx).values().next().unwrap().clone()
9445 });
9446
9447 // Ensure we have no pending ops for any of the untracked files
9448 repo.read_with(cx, |repo, _cx| {
9449 assert!(repo.pending_ops().next().is_none());
9450 });
9451
9452 let mut id = 1u16;
9453
9454 let mut assert_stage = async |path: RepoPath, stage| {
9455 let git_status = if stage {
9456 pending_op::GitStatus::Staged
9457 } else {
9458 pending_op::GitStatus::Unstaged
9459 };
9460 repo.update(cx, |repo, cx| {
9461 let task = if stage {
9462 repo.stage_entries(vec![path.clone()], cx)
9463 } else {
9464 repo.unstage_entries(vec![path.clone()], cx)
9465 };
9466 let ops = repo.pending_ops_for_path(&path).unwrap();
9467 assert_eq!(
9468 ops.ops.last(),
9469 Some(&pending_op::PendingOp {
9470 id: id.into(),
9471 git_status,
9472 job_status: pending_op::JobStatus::Running
9473 })
9474 );
9475 task
9476 })
9477 .await
9478 .unwrap();
9479
9480 repo.read_with(cx, |repo, _cx| {
9481 let ops = repo.pending_ops_for_path(&path).unwrap();
9482 assert_eq!(
9483 ops.ops.last(),
9484 Some(&pending_op::PendingOp {
9485 id: id.into(),
9486 git_status,
9487 job_status: pending_op::JobStatus::Finished
9488 })
9489 );
9490 });
9491
9492 id += 1;
9493 };
9494
9495 assert_stage(repo_path("a.txt"), true).await;
9496 assert_stage(repo_path("a.txt"), false).await;
9497 assert_stage(repo_path("a.txt"), true).await;
9498 assert_stage(repo_path("a.txt"), false).await;
9499 assert_stage(repo_path("a.txt"), true).await;
9500
9501 cx.run_until_parked();
9502
9503 assert_eq!(
9504 pending_ops_all
9505 .lock()
9506 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
9507 .unwrap()
9508 .ops,
9509 vec![
9510 pending_op::PendingOp {
9511 id: 1u16.into(),
9512 git_status: pending_op::GitStatus::Staged,
9513 job_status: pending_op::JobStatus::Finished
9514 },
9515 pending_op::PendingOp {
9516 id: 2u16.into(),
9517 git_status: pending_op::GitStatus::Unstaged,
9518 job_status: pending_op::JobStatus::Finished
9519 },
9520 pending_op::PendingOp {
9521 id: 3u16.into(),
9522 git_status: pending_op::GitStatus::Staged,
9523 job_status: pending_op::JobStatus::Finished
9524 },
9525 pending_op::PendingOp {
9526 id: 4u16.into(),
9527 git_status: pending_op::GitStatus::Unstaged,
9528 job_status: pending_op::JobStatus::Finished
9529 },
9530 pending_op::PendingOp {
9531 id: 5u16.into(),
9532 git_status: pending_op::GitStatus::Staged,
9533 job_status: pending_op::JobStatus::Finished
9534 }
9535 ],
9536 );
9537
9538 repo.update(cx, |repo, _cx| {
9539 let git_statuses = repo.cached_status().collect::<Vec<_>>();
9540
9541 assert_eq!(
9542 git_statuses,
9543 [StatusEntry {
9544 repo_path: repo_path("a.txt"),
9545 status: TrackedStatus {
9546 index_status: StatusCode::Added,
9547 worktree_status: StatusCode::Unmodified
9548 }
9549 .into(),
9550 }]
9551 );
9552 });
9553}
9554
9555#[gpui::test]
9556async fn test_repository_pending_ops_long_running_staging(
9557 executor: gpui::BackgroundExecutor,
9558 cx: &mut gpui::TestAppContext,
9559) {
9560 init_test(cx);
9561
9562 let fs = FakeFs::new(executor);
9563 fs.insert_tree(
9564 path!("/root"),
9565 json!({
9566 "my-repo": {
9567 ".git": {},
9568 "a.txt": "a",
9569 }
9570
9571 }),
9572 )
9573 .await;
9574
9575 fs.set_status_for_repo(
9576 path!("/root/my-repo/.git").as_ref(),
9577 &[("a.txt", FileStatus::Untracked)],
9578 );
9579
9580 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
9581 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
9582 project.update(cx, |project, cx| {
9583 let pending_ops_all = pending_ops_all.clone();
9584 cx.subscribe(project.git_store(), move |_, _, e, _| {
9585 if let GitStoreEvent::RepositoryUpdated(
9586 _,
9587 RepositoryEvent::PendingOpsChanged { pending_ops },
9588 _,
9589 ) = e
9590 {
9591 let merged = merge_pending_ops_snapshots(
9592 pending_ops.items(()),
9593 pending_ops_all.lock().items(()),
9594 );
9595 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
9596 }
9597 })
9598 .detach();
9599 });
9600
9601 project
9602 .update(cx, |project, cx| project.git_scans_complete(cx))
9603 .await;
9604
9605 let repo = project.read_with(cx, |project, cx| {
9606 project.repositories(cx).values().next().unwrap().clone()
9607 });
9608
9609 repo.update(cx, |repo, cx| {
9610 repo.stage_entries(vec![repo_path("a.txt")], cx)
9611 })
9612 .detach();
9613
9614 repo.update(cx, |repo, cx| {
9615 repo.stage_entries(vec![repo_path("a.txt")], cx)
9616 })
9617 .unwrap()
9618 .with_timeout(Duration::from_secs(1), &cx.executor())
9619 .await
9620 .unwrap();
9621
9622 cx.run_until_parked();
9623
9624 assert_eq!(
9625 pending_ops_all
9626 .lock()
9627 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
9628 .unwrap()
9629 .ops,
9630 vec![
9631 pending_op::PendingOp {
9632 id: 1u16.into(),
9633 git_status: pending_op::GitStatus::Staged,
9634 job_status: pending_op::JobStatus::Skipped
9635 },
9636 pending_op::PendingOp {
9637 id: 2u16.into(),
9638 git_status: pending_op::GitStatus::Staged,
9639 job_status: pending_op::JobStatus::Finished
9640 }
9641 ],
9642 );
9643
9644 repo.update(cx, |repo, _cx| {
9645 let git_statuses = repo.cached_status().collect::<Vec<_>>();
9646
9647 assert_eq!(
9648 git_statuses,
9649 [StatusEntry {
9650 repo_path: repo_path("a.txt"),
9651 status: TrackedStatus {
9652 index_status: StatusCode::Added,
9653 worktree_status: StatusCode::Unmodified
9654 }
9655 .into(),
9656 }]
9657 );
9658 });
9659}
9660
9661#[gpui::test]
9662async fn test_repository_pending_ops_stage_all(
9663 executor: gpui::BackgroundExecutor,
9664 cx: &mut gpui::TestAppContext,
9665) {
9666 init_test(cx);
9667
9668 let fs = FakeFs::new(executor);
9669 fs.insert_tree(
9670 path!("/root"),
9671 json!({
9672 "my-repo": {
9673 ".git": {},
9674 "a.txt": "a",
9675 "b.txt": "b"
9676 }
9677
9678 }),
9679 )
9680 .await;
9681
9682 fs.set_status_for_repo(
9683 path!("/root/my-repo/.git").as_ref(),
9684 &[
9685 ("a.txt", FileStatus::Untracked),
9686 ("b.txt", FileStatus::Untracked),
9687 ],
9688 );
9689
9690 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
9691 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
9692 project.update(cx, |project, cx| {
9693 let pending_ops_all = pending_ops_all.clone();
9694 cx.subscribe(project.git_store(), move |_, _, e, _| {
9695 if let GitStoreEvent::RepositoryUpdated(
9696 _,
9697 RepositoryEvent::PendingOpsChanged { pending_ops },
9698 _,
9699 ) = e
9700 {
9701 let merged = merge_pending_ops_snapshots(
9702 pending_ops.items(()),
9703 pending_ops_all.lock().items(()),
9704 );
9705 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
9706 }
9707 })
9708 .detach();
9709 });
9710 project
9711 .update(cx, |project, cx| project.git_scans_complete(cx))
9712 .await;
9713
9714 let repo = project.read_with(cx, |project, cx| {
9715 project.repositories(cx).values().next().unwrap().clone()
9716 });
9717
9718 repo.update(cx, |repo, cx| {
9719 repo.stage_entries(vec![repo_path("a.txt")], cx)
9720 })
9721 .await
9722 .unwrap();
9723 repo.update(cx, |repo, cx| repo.stage_all(cx))
9724 .await
9725 .unwrap();
9726 repo.update(cx, |repo, cx| repo.unstage_all(cx))
9727 .await
9728 .unwrap();
9729
9730 cx.run_until_parked();
9731
9732 assert_eq!(
9733 pending_ops_all
9734 .lock()
9735 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
9736 .unwrap()
9737 .ops,
9738 vec![
9739 pending_op::PendingOp {
9740 id: 1u16.into(),
9741 git_status: pending_op::GitStatus::Staged,
9742 job_status: pending_op::JobStatus::Finished
9743 },
9744 pending_op::PendingOp {
9745 id: 2u16.into(),
9746 git_status: pending_op::GitStatus::Unstaged,
9747 job_status: pending_op::JobStatus::Finished
9748 },
9749 ],
9750 );
9751 assert_eq!(
9752 pending_ops_all
9753 .lock()
9754 .get(&worktree::PathKey(repo_path("b.txt").as_ref().clone()), ())
9755 .unwrap()
9756 .ops,
9757 vec![
9758 pending_op::PendingOp {
9759 id: 1u16.into(),
9760 git_status: pending_op::GitStatus::Staged,
9761 job_status: pending_op::JobStatus::Finished
9762 },
9763 pending_op::PendingOp {
9764 id: 2u16.into(),
9765 git_status: pending_op::GitStatus::Unstaged,
9766 job_status: pending_op::JobStatus::Finished
9767 },
9768 ],
9769 );
9770
9771 repo.update(cx, |repo, _cx| {
9772 let git_statuses = repo.cached_status().collect::<Vec<_>>();
9773
9774 assert_eq!(
9775 git_statuses,
9776 [
9777 StatusEntry {
9778 repo_path: repo_path("a.txt"),
9779 status: FileStatus::Untracked,
9780 },
9781 StatusEntry {
9782 repo_path: repo_path("b.txt"),
9783 status: FileStatus::Untracked,
9784 },
9785 ]
9786 );
9787 });
9788}
9789
9790#[gpui::test]
9791async fn test_repository_subfolder_git_status(
9792 executor: gpui::BackgroundExecutor,
9793 cx: &mut gpui::TestAppContext,
9794) {
9795 init_test(cx);
9796
9797 let fs = FakeFs::new(executor);
9798 fs.insert_tree(
9799 path!("/root"),
9800 json!({
9801 "my-repo": {
9802 ".git": {},
9803 "a.txt": "a",
9804 "sub-folder-1": {
9805 "sub-folder-2": {
9806 "c.txt": "cc",
9807 "d": {
9808 "e.txt": "eee"
9809 }
9810 },
9811 }
9812 },
9813 }),
9814 )
9815 .await;
9816
9817 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
9818 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
9819
9820 fs.set_status_for_repo(
9821 path!("/root/my-repo/.git").as_ref(),
9822 &[(E_TXT, FileStatus::Untracked)],
9823 );
9824
9825 let project = Project::test(
9826 fs.clone(),
9827 [path!("/root/my-repo/sub-folder-1/sub-folder-2").as_ref()],
9828 cx,
9829 )
9830 .await;
9831
9832 project
9833 .update(cx, |project, cx| project.git_scans_complete(cx))
9834 .await;
9835 cx.run_until_parked();
9836
9837 let repository = project.read_with(cx, |project, cx| {
9838 project.repositories(cx).values().next().unwrap().clone()
9839 });
9840
9841 // Ensure that the git status is loaded correctly
9842 repository.read_with(cx, |repository, _cx| {
9843 assert_eq!(
9844 repository.work_directory_abs_path,
9845 Path::new(path!("/root/my-repo")).into()
9846 );
9847
9848 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
9849 assert_eq!(
9850 repository
9851 .status_for_path(&repo_path(E_TXT))
9852 .unwrap()
9853 .status,
9854 FileStatus::Untracked
9855 );
9856 });
9857
9858 fs.set_status_for_repo(path!("/root/my-repo/.git").as_ref(), &[]);
9859 project
9860 .update(cx, |project, cx| project.git_scans_complete(cx))
9861 .await;
9862 cx.run_until_parked();
9863
9864 repository.read_with(cx, |repository, _cx| {
9865 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
9866 assert_eq!(repository.status_for_path(&repo_path(E_TXT)), None);
9867 });
9868}
9869
9870// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
9871#[cfg(any())]
9872#[gpui::test]
9873async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
9874 init_test(cx);
9875 cx.executor().allow_parking();
9876
9877 let root = TempTree::new(json!({
9878 "project": {
9879 "a.txt": "a",
9880 },
9881 }));
9882 let root_path = root.path();
9883
9884 let repo = git_init(&root_path.join("project"));
9885 git_add("a.txt", &repo);
9886 git_commit("init", &repo);
9887
9888 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
9889
9890 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9891 tree.flush_fs_events(cx).await;
9892 project
9893 .update(cx, |project, cx| project.git_scans_complete(cx))
9894 .await;
9895 cx.executor().run_until_parked();
9896
9897 let repository = project.read_with(cx, |project, cx| {
9898 project.repositories(cx).values().next().unwrap().clone()
9899 });
9900
9901 git_branch("other-branch", &repo);
9902 git_checkout("refs/heads/other-branch", &repo);
9903 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
9904 git_add("a.txt", &repo);
9905 git_commit("capitalize", &repo);
9906 let commit = repo
9907 .head()
9908 .expect("Failed to get HEAD")
9909 .peel_to_commit()
9910 .expect("HEAD is not a commit");
9911 git_checkout("refs/heads/main", &repo);
9912 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
9913 git_add("a.txt", &repo);
9914 git_commit("improve letter", &repo);
9915 git_cherry_pick(&commit, &repo);
9916 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
9917 .expect("No CHERRY_PICK_HEAD");
9918 pretty_assertions::assert_eq!(
9919 git_status(&repo),
9920 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
9921 );
9922 tree.flush_fs_events(cx).await;
9923 project
9924 .update(cx, |project, cx| project.git_scans_complete(cx))
9925 .await;
9926 cx.executor().run_until_parked();
9927 let conflicts = repository.update(cx, |repository, _| {
9928 repository
9929 .merge_conflicts
9930 .iter()
9931 .cloned()
9932 .collect::<Vec<_>>()
9933 });
9934 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
9935
9936 git_add("a.txt", &repo);
9937 // Attempt to manually simulate what `git cherry-pick --continue` would do.
9938 git_commit("whatevs", &repo);
9939 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
9940 .expect("Failed to remove CHERRY_PICK_HEAD");
9941 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
9942 tree.flush_fs_events(cx).await;
9943 let conflicts = repository.update(cx, |repository, _| {
9944 repository
9945 .merge_conflicts
9946 .iter()
9947 .cloned()
9948 .collect::<Vec<_>>()
9949 });
9950 pretty_assertions::assert_eq!(conflicts, []);
9951}
9952
9953#[gpui::test]
9954async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
9955 init_test(cx);
9956 let fs = FakeFs::new(cx.background_executor.clone());
9957 fs.insert_tree(
9958 path!("/root"),
9959 json!({
9960 ".git": {},
9961 ".gitignore": "*.txt\n",
9962 "a.xml": "<a></a>",
9963 "b.txt": "Some text"
9964 }),
9965 )
9966 .await;
9967
9968 fs.set_head_and_index_for_repo(
9969 path!("/root/.git").as_ref(),
9970 &[
9971 (".gitignore", "*.txt\n".into()),
9972 ("a.xml", "<a></a>".into()),
9973 ],
9974 );
9975
9976 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
9977
9978 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9979 tree.flush_fs_events(cx).await;
9980 project
9981 .update(cx, |project, cx| project.git_scans_complete(cx))
9982 .await;
9983 cx.executor().run_until_parked();
9984
9985 let repository = project.read_with(cx, |project, cx| {
9986 project.repositories(cx).values().next().unwrap().clone()
9987 });
9988
9989 // One file is unmodified, the other is ignored.
9990 cx.read(|cx| {
9991 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
9992 assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
9993 });
9994
9995 // Change the gitignore, and stage the newly non-ignored file.
9996 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
9997 .await
9998 .unwrap();
9999 fs.set_index_for_repo(
10000 Path::new(path!("/root/.git")),
10001 &[
10002 (".gitignore", "*.txt\n".into()),
10003 ("a.xml", "<a></a>".into()),
10004 ("b.txt", "Some text".into()),
10005 ],
10006 );
10007
10008 cx.executor().run_until_parked();
10009 cx.read(|cx| {
10010 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
10011 assert_entry_git_state(
10012 tree.read(cx),
10013 repository.read(cx),
10014 "b.txt",
10015 Some(StatusCode::Added),
10016 false,
10017 );
10018 });
10019}
10020
10021// NOTE:
10022// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
10023// a directory which some program has already open.
10024// This is a limitation of the Windows.
10025// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
10026// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
10027#[gpui::test]
10028#[cfg_attr(target_os = "windows", ignore)]
10029async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
10030 init_test(cx);
10031 cx.executor().allow_parking();
10032 let root = TempTree::new(json!({
10033 "projects": {
10034 "project1": {
10035 "a": "",
10036 "b": "",
10037 }
10038 },
10039
10040 }));
10041 let root_path = root.path();
10042
10043 let repo = git_init(&root_path.join("projects/project1"));
10044 git_add("a", &repo);
10045 git_commit("init", &repo);
10046 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
10047
10048 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10049
10050 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10051 tree.flush_fs_events(cx).await;
10052 project
10053 .update(cx, |project, cx| project.git_scans_complete(cx))
10054 .await;
10055 cx.executor().run_until_parked();
10056
10057 let repository = project.read_with(cx, |project, cx| {
10058 project.repositories(cx).values().next().unwrap().clone()
10059 });
10060
10061 repository.read_with(cx, |repository, _| {
10062 assert_eq!(
10063 repository.work_directory_abs_path.as_ref(),
10064 root_path.join("projects/project1").as_path()
10065 );
10066 assert_eq!(
10067 repository
10068 .status_for_path(&repo_path("a"))
10069 .map(|entry| entry.status),
10070 Some(StatusCode::Modified.worktree()),
10071 );
10072 assert_eq!(
10073 repository
10074 .status_for_path(&repo_path("b"))
10075 .map(|entry| entry.status),
10076 Some(FileStatus::Untracked),
10077 );
10078 });
10079
10080 std::fs::rename(
10081 root_path.join("projects/project1"),
10082 root_path.join("projects/project2"),
10083 )
10084 .unwrap();
10085 tree.flush_fs_events(cx).await;
10086
10087 repository.read_with(cx, |repository, _| {
10088 assert_eq!(
10089 repository.work_directory_abs_path.as_ref(),
10090 root_path.join("projects/project2").as_path()
10091 );
10092 assert_eq!(
10093 repository.status_for_path(&repo_path("a")).unwrap().status,
10094 StatusCode::Modified.worktree(),
10095 );
10096 assert_eq!(
10097 repository.status_for_path(&repo_path("b")).unwrap().status,
10098 FileStatus::Untracked,
10099 );
10100 });
10101}
10102
10103// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
10104// you can't rename a directory which some program has already open. This is a
10105// limitation of the Windows. See:
10106// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
10107// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
10108#[gpui::test]
10109#[cfg_attr(target_os = "windows", ignore)]
10110async fn test_file_status(cx: &mut gpui::TestAppContext) {
10111 init_test(cx);
10112 cx.executor().allow_parking();
10113 const IGNORE_RULE: &str = "**/target";
10114
10115 let root = TempTree::new(json!({
10116 "project": {
10117 "a.txt": "a",
10118 "b.txt": "bb",
10119 "c": {
10120 "d": {
10121 "e.txt": "eee"
10122 }
10123 },
10124 "f.txt": "ffff",
10125 "target": {
10126 "build_file": "???"
10127 },
10128 ".gitignore": IGNORE_RULE
10129 },
10130
10131 }));
10132 let root_path = root.path();
10133
10134 const A_TXT: &str = "a.txt";
10135 const B_TXT: &str = "b.txt";
10136 const E_TXT: &str = "c/d/e.txt";
10137 const F_TXT: &str = "f.txt";
10138 const DOTGITIGNORE: &str = ".gitignore";
10139 const BUILD_FILE: &str = "target/build_file";
10140
10141 // Set up git repository before creating the worktree.
10142 let work_dir = root.path().join("project");
10143 let mut repo = git_init(work_dir.as_path());
10144 repo.add_ignore_rule(IGNORE_RULE).unwrap();
10145 git_add(A_TXT, &repo);
10146 git_add(E_TXT, &repo);
10147 git_add(DOTGITIGNORE, &repo);
10148 git_commit("Initial commit", &repo);
10149
10150 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10151
10152 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10153 tree.flush_fs_events(cx).await;
10154 project
10155 .update(cx, |project, cx| project.git_scans_complete(cx))
10156 .await;
10157 cx.executor().run_until_parked();
10158
10159 let repository = project.read_with(cx, |project, cx| {
10160 project.repositories(cx).values().next().unwrap().clone()
10161 });
10162
10163 // Check that the right git state is observed on startup
10164 repository.read_with(cx, |repository, _cx| {
10165 assert_eq!(
10166 repository.work_directory_abs_path.as_ref(),
10167 root_path.join("project").as_path()
10168 );
10169
10170 assert_eq!(
10171 repository
10172 .status_for_path(&repo_path(B_TXT))
10173 .unwrap()
10174 .status,
10175 FileStatus::Untracked,
10176 );
10177 assert_eq!(
10178 repository
10179 .status_for_path(&repo_path(F_TXT))
10180 .unwrap()
10181 .status,
10182 FileStatus::Untracked,
10183 );
10184 });
10185
10186 // Modify a file in the working copy.
10187 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
10188 tree.flush_fs_events(cx).await;
10189 project
10190 .update(cx, |project, cx| project.git_scans_complete(cx))
10191 .await;
10192 cx.executor().run_until_parked();
10193
10194 // The worktree detects that the file's git status has changed.
10195 repository.read_with(cx, |repository, _| {
10196 assert_eq!(
10197 repository
10198 .status_for_path(&repo_path(A_TXT))
10199 .unwrap()
10200 .status,
10201 StatusCode::Modified.worktree(),
10202 );
10203 });
10204
10205 // Create a commit in the git repository.
10206 git_add(A_TXT, &repo);
10207 git_add(B_TXT, &repo);
10208 git_commit("Committing modified and added", &repo);
10209 tree.flush_fs_events(cx).await;
10210 project
10211 .update(cx, |project, cx| project.git_scans_complete(cx))
10212 .await;
10213 cx.executor().run_until_parked();
10214
10215 // The worktree detects that the files' git status have changed.
10216 repository.read_with(cx, |repository, _cx| {
10217 assert_eq!(
10218 repository
10219 .status_for_path(&repo_path(F_TXT))
10220 .unwrap()
10221 .status,
10222 FileStatus::Untracked,
10223 );
10224 assert_eq!(repository.status_for_path(&repo_path(B_TXT)), None);
10225 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
10226 });
10227
10228 // Modify files in the working copy and perform git operations on other files.
10229 git_reset(0, &repo);
10230 git_remove_index(Path::new(B_TXT), &repo);
10231 git_stash(&mut repo);
10232 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
10233 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
10234 tree.flush_fs_events(cx).await;
10235 project
10236 .update(cx, |project, cx| project.git_scans_complete(cx))
10237 .await;
10238 cx.executor().run_until_parked();
10239
10240 // Check that more complex repo changes are tracked
10241 repository.read_with(cx, |repository, _cx| {
10242 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
10243 assert_eq!(
10244 repository
10245 .status_for_path(&repo_path(B_TXT))
10246 .unwrap()
10247 .status,
10248 FileStatus::Untracked,
10249 );
10250 assert_eq!(
10251 repository
10252 .status_for_path(&repo_path(E_TXT))
10253 .unwrap()
10254 .status,
10255 StatusCode::Modified.worktree(),
10256 );
10257 });
10258
10259 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
10260 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
10261 std::fs::write(
10262 work_dir.join(DOTGITIGNORE),
10263 [IGNORE_RULE, "f.txt"].join("\n"),
10264 )
10265 .unwrap();
10266
10267 git_add(Path::new(DOTGITIGNORE), &repo);
10268 git_commit("Committing modified git ignore", &repo);
10269
10270 tree.flush_fs_events(cx).await;
10271 cx.executor().run_until_parked();
10272
10273 let mut renamed_dir_name = "first_directory/second_directory";
10274 const RENAMED_FILE: &str = "rf.txt";
10275
10276 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
10277 std::fs::write(
10278 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
10279 "new-contents",
10280 )
10281 .unwrap();
10282
10283 tree.flush_fs_events(cx).await;
10284 project
10285 .update(cx, |project, cx| project.git_scans_complete(cx))
10286 .await;
10287 cx.executor().run_until_parked();
10288
10289 repository.read_with(cx, |repository, _cx| {
10290 assert_eq!(
10291 repository
10292 .status_for_path(&RepoPath::from_rel_path(
10293 &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE))
10294 ))
10295 .unwrap()
10296 .status,
10297 FileStatus::Untracked,
10298 );
10299 });
10300
10301 renamed_dir_name = "new_first_directory/second_directory";
10302
10303 std::fs::rename(
10304 work_dir.join("first_directory"),
10305 work_dir.join("new_first_directory"),
10306 )
10307 .unwrap();
10308
10309 tree.flush_fs_events(cx).await;
10310 project
10311 .update(cx, |project, cx| project.git_scans_complete(cx))
10312 .await;
10313 cx.executor().run_until_parked();
10314
10315 repository.read_with(cx, |repository, _cx| {
10316 assert_eq!(
10317 repository
10318 .status_for_path(&RepoPath::from_rel_path(
10319 &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE))
10320 ))
10321 .unwrap()
10322 .status,
10323 FileStatus::Untracked,
10324 );
10325 });
10326}
10327
10328#[gpui::test]
10329#[ignore]
10330async fn test_ignored_dirs_events(cx: &mut gpui::TestAppContext) {
10331 init_test(cx);
10332 cx.executor().allow_parking();
10333
10334 const IGNORE_RULE: &str = "**/target";
10335
10336 let root = TempTree::new(json!({
10337 "project": {
10338 "src": {
10339 "main.rs": "fn main() {}"
10340 },
10341 "target": {
10342 "debug": {
10343 "important_text.txt": "important text",
10344 },
10345 },
10346 ".gitignore": IGNORE_RULE
10347 },
10348
10349 }));
10350 let root_path = root.path();
10351
10352 // Set up git repository before creating the worktree.
10353 let work_dir = root.path().join("project");
10354 let repo = git_init(work_dir.as_path());
10355 repo.add_ignore_rule(IGNORE_RULE).unwrap();
10356 git_add("src/main.rs", &repo);
10357 git_add(".gitignore", &repo);
10358 git_commit("Initial commit", &repo);
10359
10360 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10361 let repository_updates = Arc::new(Mutex::new(Vec::new()));
10362 let project_events = Arc::new(Mutex::new(Vec::new()));
10363 project.update(cx, |project, cx| {
10364 let repo_events = repository_updates.clone();
10365 cx.subscribe(project.git_store(), move |_, _, e, _| {
10366 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
10367 repo_events.lock().push(e.clone());
10368 }
10369 })
10370 .detach();
10371 let project_events = project_events.clone();
10372 cx.subscribe_self(move |_, e, _| {
10373 if let Event::WorktreeUpdatedEntries(_, updates) = e {
10374 project_events.lock().extend(
10375 updates
10376 .iter()
10377 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
10378 .filter(|(path, _)| path != "fs-event-sentinel"),
10379 );
10380 }
10381 })
10382 .detach();
10383 });
10384
10385 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10386 tree.flush_fs_events(cx).await;
10387 tree.update(cx, |tree, cx| {
10388 tree.load_file(rel_path("project/target/debug/important_text.txt"), cx)
10389 })
10390 .await
10391 .unwrap();
10392 tree.update(cx, |tree, _| {
10393 assert_eq!(
10394 tree.entries(true, 0)
10395 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
10396 .collect::<Vec<_>>(),
10397 vec![
10398 (rel_path(""), false),
10399 (rel_path("project/"), false),
10400 (rel_path("project/.gitignore"), false),
10401 (rel_path("project/src"), false),
10402 (rel_path("project/src/main.rs"), false),
10403 (rel_path("project/target"), true),
10404 (rel_path("project/target/debug"), true),
10405 (rel_path("project/target/debug/important_text.txt"), true),
10406 ]
10407 );
10408 });
10409
10410 assert_eq!(
10411 repository_updates.lock().drain(..).collect::<Vec<_>>(),
10412 vec![
10413 RepositoryEvent::StatusesChanged,
10414 RepositoryEvent::MergeHeadsChanged,
10415 ],
10416 "Initial worktree scan should produce a repo update event"
10417 );
10418 assert_eq!(
10419 project_events.lock().drain(..).collect::<Vec<_>>(),
10420 vec![
10421 ("project/target".to_string(), PathChange::Loaded),
10422 ("project/target/debug".to_string(), PathChange::Loaded),
10423 (
10424 "project/target/debug/important_text.txt".to_string(),
10425 PathChange::Loaded
10426 ),
10427 ],
10428 "Initial project changes should show that all not-ignored and all opened files are loaded"
10429 );
10430
10431 let deps_dir = work_dir.join("target").join("debug").join("deps");
10432 std::fs::create_dir_all(&deps_dir).unwrap();
10433 tree.flush_fs_events(cx).await;
10434 project
10435 .update(cx, |project, cx| project.git_scans_complete(cx))
10436 .await;
10437 cx.executor().run_until_parked();
10438 std::fs::write(deps_dir.join("aa.tmp"), "something tmp").unwrap();
10439 tree.flush_fs_events(cx).await;
10440 project
10441 .update(cx, |project, cx| project.git_scans_complete(cx))
10442 .await;
10443 cx.executor().run_until_parked();
10444 std::fs::remove_dir_all(&deps_dir).unwrap();
10445 tree.flush_fs_events(cx).await;
10446 project
10447 .update(cx, |project, cx| project.git_scans_complete(cx))
10448 .await;
10449 cx.executor().run_until_parked();
10450
10451 tree.update(cx, |tree, _| {
10452 assert_eq!(
10453 tree.entries(true, 0)
10454 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
10455 .collect::<Vec<_>>(),
10456 vec![
10457 (rel_path(""), false),
10458 (rel_path("project/"), false),
10459 (rel_path("project/.gitignore"), false),
10460 (rel_path("project/src"), false),
10461 (rel_path("project/src/main.rs"), false),
10462 (rel_path("project/target"), true),
10463 (rel_path("project/target/debug"), true),
10464 (rel_path("project/target/debug/important_text.txt"), true),
10465 ],
10466 "No stray temp files should be left after the flycheck changes"
10467 );
10468 });
10469
10470 assert_eq!(
10471 repository_updates
10472 .lock()
10473 .iter()
10474 .cloned()
10475 .collect::<Vec<_>>(),
10476 Vec::new(),
10477 "No further RepositoryUpdated events should happen, as only ignored dirs' contents was changed",
10478 );
10479 assert_eq!(
10480 project_events.lock().as_slice(),
10481 vec![
10482 ("project/target/debug/deps".to_string(), PathChange::Added),
10483 ("project/target/debug/deps".to_string(), PathChange::Removed),
10484 ],
10485 "Due to `debug` directory being tracked, it should get updates for entries inside it.
10486 No updates for more nested directories should happen as those are ignored",
10487 );
10488}
10489
10490// todo(jk): turning this test off until we rework it in such a way so that it is not so susceptible
10491// to different timings/ordering of events.
10492#[ignore]
10493#[gpui::test]
10494async fn test_odd_events_for_ignored_dirs(
10495 executor: BackgroundExecutor,
10496 cx: &mut gpui::TestAppContext,
10497) {
10498 init_test(cx);
10499 let fs = FakeFs::new(executor);
10500 fs.insert_tree(
10501 path!("/root"),
10502 json!({
10503 ".git": {},
10504 ".gitignore": "**/target/",
10505 "src": {
10506 "main.rs": "fn main() {}",
10507 },
10508 "target": {
10509 "debug": {
10510 "foo.txt": "foo",
10511 "deps": {}
10512 }
10513 }
10514 }),
10515 )
10516 .await;
10517 fs.set_head_and_index_for_repo(
10518 path!("/root/.git").as_ref(),
10519 &[
10520 (".gitignore", "**/target/".into()),
10521 ("src/main.rs", "fn main() {}".into()),
10522 ],
10523 );
10524
10525 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
10526 let repository_updates = Arc::new(Mutex::new(Vec::new()));
10527 let project_events = Arc::new(Mutex::new(Vec::new()));
10528 project.update(cx, |project, cx| {
10529 let repository_updates = repository_updates.clone();
10530 cx.subscribe(project.git_store(), move |_, _, e, _| {
10531 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
10532 repository_updates.lock().push(e.clone());
10533 }
10534 })
10535 .detach();
10536 let project_events = project_events.clone();
10537 cx.subscribe_self(move |_, e, _| {
10538 if let Event::WorktreeUpdatedEntries(_, updates) = e {
10539 project_events.lock().extend(
10540 updates
10541 .iter()
10542 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
10543 .filter(|(path, _)| path != "fs-event-sentinel"),
10544 );
10545 }
10546 })
10547 .detach();
10548 });
10549
10550 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10551 tree.update(cx, |tree, cx| {
10552 tree.load_file(rel_path("target/debug/foo.txt"), cx)
10553 })
10554 .await
10555 .unwrap();
10556 tree.flush_fs_events(cx).await;
10557 project
10558 .update(cx, |project, cx| project.git_scans_complete(cx))
10559 .await;
10560 cx.run_until_parked();
10561 tree.update(cx, |tree, _| {
10562 assert_eq!(
10563 tree.entries(true, 0)
10564 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
10565 .collect::<Vec<_>>(),
10566 vec![
10567 (rel_path(""), false),
10568 (rel_path(".gitignore"), false),
10569 (rel_path("src"), false),
10570 (rel_path("src/main.rs"), false),
10571 (rel_path("target"), true),
10572 (rel_path("target/debug"), true),
10573 (rel_path("target/debug/deps"), true),
10574 (rel_path("target/debug/foo.txt"), true),
10575 ]
10576 );
10577 });
10578
10579 assert_eq!(
10580 repository_updates.lock().drain(..).collect::<Vec<_>>(),
10581 vec![
10582 RepositoryEvent::MergeHeadsChanged,
10583 RepositoryEvent::BranchChanged,
10584 RepositoryEvent::StatusesChanged,
10585 RepositoryEvent::StatusesChanged,
10586 ],
10587 "Initial worktree scan should produce a repo update event"
10588 );
10589 assert_eq!(
10590 project_events.lock().drain(..).collect::<Vec<_>>(),
10591 vec![
10592 ("target".to_string(), PathChange::Loaded),
10593 ("target/debug".to_string(), PathChange::Loaded),
10594 ("target/debug/deps".to_string(), PathChange::Loaded),
10595 ("target/debug/foo.txt".to_string(), PathChange::Loaded),
10596 ],
10597 "All non-ignored entries and all opened firs should be getting a project event",
10598 );
10599
10600 // Emulate a flycheck spawn: it emits a `INODE_META_MOD`-flagged FS event on target/debug/deps, then creates and removes temp files inside.
10601 // This may happen multiple times during a single flycheck, but once is enough for testing.
10602 fs.emit_fs_event("/root/target/debug/deps", None);
10603 tree.flush_fs_events(cx).await;
10604 project
10605 .update(cx, |project, cx| project.git_scans_complete(cx))
10606 .await;
10607 cx.executor().run_until_parked();
10608
10609 assert_eq!(
10610 repository_updates
10611 .lock()
10612 .iter()
10613 .cloned()
10614 .collect::<Vec<_>>(),
10615 Vec::new(),
10616 "No further RepositoryUpdated events should happen, as only ignored dirs received FS events",
10617 );
10618 assert_eq!(
10619 project_events.lock().as_slice(),
10620 Vec::new(),
10621 "No further project events should happen, as only ignored dirs received FS events",
10622 );
10623}
10624
10625#[gpui::test]
10626async fn test_repos_in_invisible_worktrees(
10627 executor: BackgroundExecutor,
10628 cx: &mut gpui::TestAppContext,
10629) {
10630 init_test(cx);
10631 let fs = FakeFs::new(executor);
10632 fs.insert_tree(
10633 path!("/root"),
10634 json!({
10635 "dir1": {
10636 ".git": {},
10637 "dep1": {
10638 ".git": {},
10639 "src": {
10640 "a.txt": "",
10641 },
10642 },
10643 "b.txt": "",
10644 },
10645 }),
10646 )
10647 .await;
10648
10649 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
10650 let _visible_worktree =
10651 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10652 project
10653 .update(cx, |project, cx| project.git_scans_complete(cx))
10654 .await;
10655
10656 let repos = project.read_with(cx, |project, cx| {
10657 project
10658 .repositories(cx)
10659 .values()
10660 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
10661 .collect::<Vec<_>>()
10662 });
10663 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
10664
10665 let (_invisible_worktree, _) = project
10666 .update(cx, |project, cx| {
10667 project.worktree_store().update(cx, |worktree_store, cx| {
10668 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
10669 })
10670 })
10671 .await
10672 .expect("failed to create worktree");
10673 project
10674 .update(cx, |project, cx| project.git_scans_complete(cx))
10675 .await;
10676
10677 let repos = project.read_with(cx, |project, cx| {
10678 project
10679 .repositories(cx)
10680 .values()
10681 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
10682 .collect::<Vec<_>>()
10683 });
10684 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
10685}
10686
10687#[gpui::test(iterations = 10)]
10688async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
10689 init_test(cx);
10690 cx.update(|cx| {
10691 cx.update_global::<SettingsStore, _>(|store, cx| {
10692 store.update_user_settings(cx, |settings| {
10693 settings.project.worktree.file_scan_exclusions = Some(Vec::new());
10694 });
10695 });
10696 });
10697 let fs = FakeFs::new(cx.background_executor.clone());
10698 fs.insert_tree(
10699 path!("/root"),
10700 json!({
10701 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
10702 "tree": {
10703 ".git": {},
10704 ".gitignore": "ignored-dir\n",
10705 "tracked-dir": {
10706 "tracked-file1": "",
10707 "ancestor-ignored-file1": "",
10708 },
10709 "ignored-dir": {
10710 "ignored-file1": ""
10711 }
10712 }
10713 }),
10714 )
10715 .await;
10716 fs.set_head_and_index_for_repo(
10717 path!("/root/tree/.git").as_ref(),
10718 &[
10719 (".gitignore", "ignored-dir\n".into()),
10720 ("tracked-dir/tracked-file1", "".into()),
10721 ],
10722 );
10723
10724 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
10725
10726 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10727 tree.flush_fs_events(cx).await;
10728 project
10729 .update(cx, |project, cx| project.git_scans_complete(cx))
10730 .await;
10731 cx.executor().run_until_parked();
10732
10733 let repository = project.read_with(cx, |project, cx| {
10734 project.repositories(cx).values().next().unwrap().clone()
10735 });
10736
10737 tree.read_with(cx, |tree, _| {
10738 tree.as_local()
10739 .unwrap()
10740 .manually_refresh_entries_for_paths(vec![rel_path("ignored-dir").into()])
10741 })
10742 .recv()
10743 .await;
10744
10745 cx.read(|cx| {
10746 assert_entry_git_state(
10747 tree.read(cx),
10748 repository.read(cx),
10749 "tracked-dir/tracked-file1",
10750 None,
10751 false,
10752 );
10753 assert_entry_git_state(
10754 tree.read(cx),
10755 repository.read(cx),
10756 "tracked-dir/ancestor-ignored-file1",
10757 None,
10758 false,
10759 );
10760 assert_entry_git_state(
10761 tree.read(cx),
10762 repository.read(cx),
10763 "ignored-dir/ignored-file1",
10764 None,
10765 true,
10766 );
10767 });
10768
10769 fs.create_file(
10770 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
10771 Default::default(),
10772 )
10773 .await
10774 .unwrap();
10775 fs.set_index_for_repo(
10776 path!("/root/tree/.git").as_ref(),
10777 &[
10778 (".gitignore", "ignored-dir\n".into()),
10779 ("tracked-dir/tracked-file1", "".into()),
10780 ("tracked-dir/tracked-file2", "".into()),
10781 ],
10782 );
10783 fs.create_file(
10784 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
10785 Default::default(),
10786 )
10787 .await
10788 .unwrap();
10789 fs.create_file(
10790 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
10791 Default::default(),
10792 )
10793 .await
10794 .unwrap();
10795
10796 cx.executor().run_until_parked();
10797 cx.read(|cx| {
10798 assert_entry_git_state(
10799 tree.read(cx),
10800 repository.read(cx),
10801 "tracked-dir/tracked-file2",
10802 Some(StatusCode::Added),
10803 false,
10804 );
10805 assert_entry_git_state(
10806 tree.read(cx),
10807 repository.read(cx),
10808 "tracked-dir/ancestor-ignored-file2",
10809 None,
10810 false,
10811 );
10812 assert_entry_git_state(
10813 tree.read(cx),
10814 repository.read(cx),
10815 "ignored-dir/ignored-file2",
10816 None,
10817 true,
10818 );
10819 assert!(
10820 tree.read(cx)
10821 .entry_for_path(&rel_path(".git"))
10822 .unwrap()
10823 .is_ignored
10824 );
10825 });
10826}
10827
10828#[gpui::test]
10829async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) {
10830 init_test(cx);
10831
10832 let fs = FakeFs::new(cx.executor());
10833 fs.insert_tree(
10834 path!("/project"),
10835 json!({
10836 ".git": {
10837 "worktrees": {
10838 "some-worktree": {
10839 "commondir": "../..\n",
10840 // For is_git_dir
10841 "HEAD": "",
10842 "config": ""
10843 }
10844 },
10845 "modules": {
10846 "subdir": {
10847 "some-submodule": {
10848 // For is_git_dir
10849 "HEAD": "",
10850 "config": "",
10851 }
10852 }
10853 }
10854 },
10855 "src": {
10856 "a.txt": "A",
10857 },
10858 "some-worktree": {
10859 ".git": "gitdir: ../.git/worktrees/some-worktree\n",
10860 "src": {
10861 "b.txt": "B",
10862 }
10863 },
10864 "subdir": {
10865 "some-submodule": {
10866 ".git": "gitdir: ../../.git/modules/subdir/some-submodule\n",
10867 "c.txt": "C",
10868 }
10869 }
10870 }),
10871 )
10872 .await;
10873
10874 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
10875 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
10876 scan_complete.await;
10877
10878 let mut repositories = project.update(cx, |project, cx| {
10879 project
10880 .repositories(cx)
10881 .values()
10882 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
10883 .collect::<Vec<_>>()
10884 });
10885 repositories.sort();
10886 pretty_assertions::assert_eq!(
10887 repositories,
10888 [
10889 Path::new(path!("/project")).into(),
10890 Path::new(path!("/project/some-worktree")).into(),
10891 Path::new(path!("/project/subdir/some-submodule")).into(),
10892 ]
10893 );
10894
10895 // Generate a git-related event for the worktree and check that it's refreshed.
10896 fs.with_git_state(
10897 path!("/project/some-worktree/.git").as_ref(),
10898 true,
10899 |state| {
10900 state
10901 .head_contents
10902 .insert(repo_path("src/b.txt"), "b".to_owned());
10903 state
10904 .index_contents
10905 .insert(repo_path("src/b.txt"), "b".to_owned());
10906 },
10907 )
10908 .unwrap();
10909 cx.run_until_parked();
10910
10911 let buffer = project
10912 .update(cx, |project, cx| {
10913 project.open_local_buffer(path!("/project/some-worktree/src/b.txt"), cx)
10914 })
10915 .await
10916 .unwrap();
10917 let (worktree_repo, barrier) = project.update(cx, |project, cx| {
10918 let (repo, _) = project
10919 .git_store()
10920 .read(cx)
10921 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
10922 .unwrap();
10923 pretty_assertions::assert_eq!(
10924 repo.read(cx).work_directory_abs_path,
10925 Path::new(path!("/project/some-worktree")).into(),
10926 );
10927 let barrier = repo.update(cx, |repo, _| repo.barrier());
10928 (repo.clone(), barrier)
10929 });
10930 barrier.await.unwrap();
10931 worktree_repo.update(cx, |repo, _| {
10932 pretty_assertions::assert_eq!(
10933 repo.status_for_path(&repo_path("src/b.txt"))
10934 .unwrap()
10935 .status,
10936 StatusCode::Modified.worktree(),
10937 );
10938 });
10939
10940 // The same for the submodule.
10941 fs.with_git_state(
10942 path!("/project/subdir/some-submodule/.git").as_ref(),
10943 true,
10944 |state| {
10945 state
10946 .head_contents
10947 .insert(repo_path("c.txt"), "c".to_owned());
10948 state
10949 .index_contents
10950 .insert(repo_path("c.txt"), "c".to_owned());
10951 },
10952 )
10953 .unwrap();
10954 cx.run_until_parked();
10955
10956 let buffer = project
10957 .update(cx, |project, cx| {
10958 project.open_local_buffer(path!("/project/subdir/some-submodule/c.txt"), cx)
10959 })
10960 .await
10961 .unwrap();
10962 let (submodule_repo, barrier) = project.update(cx, |project, cx| {
10963 let (repo, _) = project
10964 .git_store()
10965 .read(cx)
10966 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
10967 .unwrap();
10968 pretty_assertions::assert_eq!(
10969 repo.read(cx).work_directory_abs_path,
10970 Path::new(path!("/project/subdir/some-submodule")).into(),
10971 );
10972 let barrier = repo.update(cx, |repo, _| repo.barrier());
10973 (repo.clone(), barrier)
10974 });
10975 barrier.await.unwrap();
10976 submodule_repo.update(cx, |repo, _| {
10977 pretty_assertions::assert_eq!(
10978 repo.status_for_path(&repo_path("c.txt")).unwrap().status,
10979 StatusCode::Modified.worktree(),
10980 );
10981 });
10982}
10983
10984#[gpui::test]
10985async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
10986 init_test(cx);
10987 let fs = FakeFs::new(cx.background_executor.clone());
10988 fs.insert_tree(
10989 path!("/root"),
10990 json!({
10991 "project": {
10992 ".git": {},
10993 "child1": {
10994 "a.txt": "A",
10995 },
10996 "child2": {
10997 "b.txt": "B",
10998 }
10999 }
11000 }),
11001 )
11002 .await;
11003
11004 let project = Project::test(
11005 fs.clone(),
11006 [
11007 path!("/root/project/child1").as_ref(),
11008 path!("/root/project/child2").as_ref(),
11009 ],
11010 cx,
11011 )
11012 .await;
11013
11014 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
11015 tree.flush_fs_events(cx).await;
11016 project
11017 .update(cx, |project, cx| project.git_scans_complete(cx))
11018 .await;
11019 cx.executor().run_until_parked();
11020
11021 let repos = project.read_with(cx, |project, cx| {
11022 project
11023 .repositories(cx)
11024 .values()
11025 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
11026 .collect::<Vec<_>>()
11027 });
11028 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
11029}
11030
11031#[gpui::test]
11032async fn test_buffer_changed_file_path_updates_git_diff(cx: &mut gpui::TestAppContext) {
11033 init_test(cx);
11034
11035 let file_1_committed = String::from(r#"file_1_committed"#);
11036 let file_1_staged = String::from(r#"file_1_staged"#);
11037 let file_2_committed = String::from(r#"file_2_committed"#);
11038 let file_2_staged = String::from(r#"file_2_staged"#);
11039 let buffer_contents = String::from(r#"buffer"#);
11040
11041 let fs = FakeFs::new(cx.background_executor.clone());
11042 fs.insert_tree(
11043 path!("/dir"),
11044 json!({
11045 ".git": {},
11046 "src": {
11047 "file_1.rs": file_1_committed.clone(),
11048 "file_2.rs": file_2_committed.clone(),
11049 }
11050 }),
11051 )
11052 .await;
11053
11054 fs.set_head_for_repo(
11055 path!("/dir/.git").as_ref(),
11056 &[
11057 ("src/file_1.rs", file_1_committed.clone()),
11058 ("src/file_2.rs", file_2_committed.clone()),
11059 ],
11060 "deadbeef",
11061 );
11062 fs.set_index_for_repo(
11063 path!("/dir/.git").as_ref(),
11064 &[
11065 ("src/file_1.rs", file_1_staged.clone()),
11066 ("src/file_2.rs", file_2_staged.clone()),
11067 ],
11068 );
11069
11070 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
11071
11072 let buffer = project
11073 .update(cx, |project, cx| {
11074 project.open_local_buffer(path!("/dir/src/file_1.rs"), cx)
11075 })
11076 .await
11077 .unwrap();
11078
11079 buffer.update(cx, |buffer, cx| {
11080 buffer.edit([(0..buffer.len(), buffer_contents.as_str())], None, cx);
11081 });
11082
11083 let unstaged_diff = project
11084 .update(cx, |project, cx| {
11085 project.open_unstaged_diff(buffer.clone(), cx)
11086 })
11087 .await
11088 .unwrap();
11089
11090 cx.run_until_parked();
11091
11092 unstaged_diff.update(cx, |unstaged_diff, cx| {
11093 let base_text = unstaged_diff.base_text_string(cx).unwrap();
11094 assert_eq!(base_text, file_1_staged, "Should start with file_1 staged");
11095 });
11096
11097 // Save the buffer as `file_2.rs`, which should trigger the
11098 // `BufferChangedFilePath` event.
11099 project
11100 .update(cx, |project, cx| {
11101 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
11102 let path = ProjectPath {
11103 worktree_id,
11104 path: rel_path("src/file_2.rs").into(),
11105 };
11106 project.save_buffer_as(buffer.clone(), path, cx)
11107 })
11108 .await
11109 .unwrap();
11110
11111 cx.run_until_parked();
11112
11113 // Verify that the diff bases have been updated to file_2's contents due to
11114 // the `BufferChangedFilePath` event being handled.
11115 unstaged_diff.update(cx, |unstaged_diff, cx| {
11116 let snapshot = buffer.read(cx).snapshot();
11117 let base_text = unstaged_diff.base_text_string(cx).unwrap();
11118 assert_eq!(
11119 base_text, file_2_staged,
11120 "Diff bases should be automatically updated to file_2 staged content"
11121 );
11122
11123 let hunks: Vec<_> = unstaged_diff.snapshot(cx).hunks(&snapshot).collect();
11124 assert!(!hunks.is_empty(), "Should have diff hunks for file_2");
11125 });
11126
11127 let uncommitted_diff = project
11128 .update(cx, |project, cx| {
11129 project.open_uncommitted_diff(buffer.clone(), cx)
11130 })
11131 .await
11132 .unwrap();
11133
11134 cx.run_until_parked();
11135
11136 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
11137 let base_text = uncommitted_diff.base_text_string(cx).unwrap();
11138 assert_eq!(
11139 base_text, file_2_committed,
11140 "Uncommitted diff should compare against file_2 committed content"
11141 );
11142 });
11143}
11144
11145async fn search(
11146 project: &Entity<Project>,
11147 query: SearchQuery,
11148 cx: &mut gpui::TestAppContext,
11149) -> Result<HashMap<String, Vec<Range<usize>>>> {
11150 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
11151 let mut results = HashMap::default();
11152 while let Ok(search_result) = search_rx.rx.recv().await {
11153 match search_result {
11154 SearchResult::Buffer { buffer, ranges } => {
11155 results.entry(buffer).or_insert(ranges);
11156 }
11157 SearchResult::LimitReached => {}
11158 }
11159 }
11160 Ok(results
11161 .into_iter()
11162 .map(|(buffer, ranges)| {
11163 buffer.update(cx, |buffer, cx| {
11164 let path = buffer
11165 .file()
11166 .unwrap()
11167 .full_path(cx)
11168 .to_string_lossy()
11169 .to_string();
11170 let ranges = ranges
11171 .into_iter()
11172 .map(|range| range.to_offset(buffer))
11173 .collect::<Vec<_>>();
11174 (path, ranges)
11175 })
11176 })
11177 .collect())
11178}
11179
11180#[gpui::test]
11181async fn test_undo_encoding_change(cx: &mut gpui::TestAppContext) {
11182 init_test(cx);
11183
11184 let fs = FakeFs::new(cx.executor());
11185
11186 // Create a file with ASCII content "Hi" - this will be detected as UTF-8
11187 // When reinterpreted as UTF-16LE, the bytes 0x48 0x69 become a single character
11188 let ascii_bytes: Vec<u8> = vec![0x48, 0x69];
11189 fs.insert_tree(path!("/dir"), json!({})).await;
11190 fs.insert_file(path!("/dir/test.txt"), ascii_bytes).await;
11191
11192 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
11193
11194 let buffer = project
11195 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/test.txt"), cx))
11196 .await
11197 .unwrap();
11198
11199 let (initial_encoding, initial_text, initial_dirty) = buffer.read_with(cx, |buffer, _| {
11200 (buffer.encoding(), buffer.text(), buffer.is_dirty())
11201 });
11202 assert_eq!(initial_encoding, encoding_rs::UTF_8);
11203 assert_eq!(initial_text, "Hi");
11204 assert!(!initial_dirty);
11205
11206 let reload_receiver = buffer.update(cx, |buffer, cx| {
11207 buffer.reload_with_encoding(encoding_rs::UTF_16LE, cx)
11208 });
11209 cx.executor().run_until_parked();
11210
11211 // Wait for reload to complete
11212 let _ = reload_receiver.await;
11213
11214 // Verify the encoding changed, text is different, and still not dirty (we reloaded from disk)
11215 let (reloaded_encoding, reloaded_text, reloaded_dirty) = buffer.read_with(cx, |buffer, _| {
11216 (buffer.encoding(), buffer.text(), buffer.is_dirty())
11217 });
11218 assert_eq!(reloaded_encoding, encoding_rs::UTF_16LE);
11219 assert_eq!(reloaded_text, "楈");
11220 assert!(!reloaded_dirty);
11221
11222 // Undo the reload
11223 buffer.update(cx, |buffer, cx| {
11224 buffer.undo(cx);
11225 });
11226
11227 buffer.read_with(cx, |buffer, _| {
11228 assert_eq!(buffer.encoding(), encoding_rs::UTF_8);
11229 assert_eq!(buffer.text(), "Hi");
11230 assert!(!buffer.is_dirty());
11231 });
11232
11233 buffer.update(cx, |buffer, cx| {
11234 buffer.redo(cx);
11235 });
11236
11237 buffer.read_with(cx, |buffer, _| {
11238 assert_eq!(buffer.encoding(), encoding_rs::UTF_16LE);
11239 assert_ne!(buffer.text(), "Hi");
11240 assert!(!buffer.is_dirty());
11241 });
11242}
11243
11244pub fn init_test(cx: &mut gpui::TestAppContext) {
11245 zlog::init_test();
11246
11247 cx.update(|cx| {
11248 let settings_store = SettingsStore::test(cx);
11249 cx.set_global(settings_store);
11250 release_channel::init(semver::Version::new(0, 0, 0), cx);
11251 });
11252}
11253
11254fn json_lang() -> Arc<Language> {
11255 Arc::new(Language::new(
11256 LanguageConfig {
11257 name: "JSON".into(),
11258 matcher: LanguageMatcher {
11259 path_suffixes: vec!["json".to_string()],
11260 ..Default::default()
11261 },
11262 ..Default::default()
11263 },
11264 None,
11265 ))
11266}
11267
11268fn js_lang() -> Arc<Language> {
11269 Arc::new(Language::new(
11270 LanguageConfig {
11271 name: "JavaScript".into(),
11272 matcher: LanguageMatcher {
11273 path_suffixes: vec!["js".to_string()],
11274 ..Default::default()
11275 },
11276 ..Default::default()
11277 },
11278 None,
11279 ))
11280}
11281
11282fn python_lang(fs: Arc<FakeFs>) -> Arc<Language> {
11283 struct PythonMootToolchainLister(Arc<FakeFs>);
11284 #[async_trait]
11285 impl ToolchainLister for PythonMootToolchainLister {
11286 async fn list(
11287 &self,
11288 worktree_root: PathBuf,
11289 subroot_relative_path: Arc<RelPath>,
11290 _: Option<HashMap<String, String>>,
11291 _: &dyn Fs,
11292 ) -> ToolchainList {
11293 // This lister will always return a path .venv directories within ancestors
11294 let ancestors = subroot_relative_path.ancestors().collect::<Vec<_>>();
11295 let mut toolchains = vec![];
11296 for ancestor in ancestors {
11297 let venv_path = worktree_root.join(ancestor.as_std_path()).join(".venv");
11298 if self.0.is_dir(&venv_path).await {
11299 toolchains.push(Toolchain {
11300 name: SharedString::new_static("Python Venv"),
11301 path: venv_path.to_string_lossy().into_owned().into(),
11302 language_name: LanguageName(SharedString::new_static("Python")),
11303 as_json: serde_json::Value::Null,
11304 })
11305 }
11306 }
11307 ToolchainList {
11308 toolchains,
11309 ..Default::default()
11310 }
11311 }
11312 async fn resolve(
11313 &self,
11314 _: PathBuf,
11315 _: Option<HashMap<String, String>>,
11316 _: &dyn Fs,
11317 ) -> anyhow::Result<Toolchain> {
11318 Err(anyhow::anyhow!("Not implemented"))
11319 }
11320 fn meta(&self) -> ToolchainMetadata {
11321 ToolchainMetadata {
11322 term: SharedString::new_static("Virtual Environment"),
11323 new_toolchain_placeholder: SharedString::new_static(
11324 "A path to the python3 executable within a virtual environment, or path to virtual environment itself",
11325 ),
11326 manifest_name: ManifestName::from(SharedString::new_static("pyproject.toml")),
11327 }
11328 }
11329 fn activation_script(
11330 &self,
11331 _: &Toolchain,
11332 _: ShellKind,
11333 _: &gpui::App,
11334 ) -> futures::future::BoxFuture<'static, Vec<String>> {
11335 Box::pin(async { vec![] })
11336 }
11337 }
11338 Arc::new(
11339 Language::new(
11340 LanguageConfig {
11341 name: "Python".into(),
11342 matcher: LanguageMatcher {
11343 path_suffixes: vec!["py".to_string()],
11344 ..Default::default()
11345 },
11346 ..Default::default()
11347 },
11348 None, // We're not testing Python parsing with this language.
11349 )
11350 .with_manifest(Some(ManifestName::from(SharedString::new_static(
11351 "pyproject.toml",
11352 ))))
11353 .with_toolchain_lister(Some(Arc::new(PythonMootToolchainLister(fs)))),
11354 )
11355}
11356
11357fn typescript_lang() -> Arc<Language> {
11358 Arc::new(Language::new(
11359 LanguageConfig {
11360 name: "TypeScript".into(),
11361 matcher: LanguageMatcher {
11362 path_suffixes: vec!["ts".to_string()],
11363 ..Default::default()
11364 },
11365 ..Default::default()
11366 },
11367 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
11368 ))
11369}
11370
11371fn tsx_lang() -> Arc<Language> {
11372 Arc::new(Language::new(
11373 LanguageConfig {
11374 name: "tsx".into(),
11375 matcher: LanguageMatcher {
11376 path_suffixes: vec!["tsx".to_string()],
11377 ..Default::default()
11378 },
11379 ..Default::default()
11380 },
11381 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
11382 ))
11383}
11384
11385fn get_all_tasks(
11386 project: &Entity<Project>,
11387 task_contexts: Arc<TaskContexts>,
11388 cx: &mut App,
11389) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
11390 let new_tasks = project.update(cx, |project, cx| {
11391 project.task_store().update(cx, |task_store, cx| {
11392 task_store.task_inventory().unwrap().update(cx, |this, cx| {
11393 this.used_and_current_resolved_tasks(task_contexts, cx)
11394 })
11395 })
11396 });
11397
11398 cx.background_spawn(async move {
11399 let (mut old, new) = new_tasks.await;
11400 old.extend(new);
11401 old
11402 })
11403}
11404
11405#[track_caller]
11406fn assert_entry_git_state(
11407 tree: &Worktree,
11408 repository: &Repository,
11409 path: &str,
11410 index_status: Option<StatusCode>,
11411 is_ignored: bool,
11412) {
11413 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
11414 let entry = tree
11415 .entry_for_path(&rel_path(path))
11416 .unwrap_or_else(|| panic!("entry {path} not found"));
11417 let status = repository
11418 .status_for_path(&repo_path(path))
11419 .map(|entry| entry.status);
11420 let expected = index_status.map(|index_status| {
11421 TrackedStatus {
11422 index_status,
11423 worktree_status: StatusCode::Unmodified,
11424 }
11425 .into()
11426 });
11427 assert_eq!(
11428 status, expected,
11429 "expected {path} to have git status: {expected:?}"
11430 );
11431 assert_eq!(
11432 entry.is_ignored, is_ignored,
11433 "expected {path} to have is_ignored: {is_ignored}"
11434 );
11435}
11436
11437#[track_caller]
11438fn git_init(path: &Path) -> git2::Repository {
11439 let mut init_opts = RepositoryInitOptions::new();
11440 init_opts.initial_head("main");
11441 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
11442}
11443
11444#[track_caller]
11445fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
11446 let path = path.as_ref();
11447 let mut index = repo.index().expect("Failed to get index");
11448 index.add_path(path).expect("Failed to add file");
11449 index.write().expect("Failed to write index");
11450}
11451
11452#[track_caller]
11453fn git_remove_index(path: &Path, repo: &git2::Repository) {
11454 let mut index = repo.index().expect("Failed to get index");
11455 index.remove_path(path).expect("Failed to add file");
11456 index.write().expect("Failed to write index");
11457}
11458
11459#[track_caller]
11460fn git_commit(msg: &'static str, repo: &git2::Repository) {
11461 use git2::Signature;
11462
11463 let signature = Signature::now("test", "test@zed.dev").unwrap();
11464 let oid = repo.index().unwrap().write_tree().unwrap();
11465 let tree = repo.find_tree(oid).unwrap();
11466 if let Ok(head) = repo.head() {
11467 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
11468
11469 let parent_commit = parent_obj.as_commit().unwrap();
11470
11471 repo.commit(
11472 Some("HEAD"),
11473 &signature,
11474 &signature,
11475 msg,
11476 &tree,
11477 &[parent_commit],
11478 )
11479 .expect("Failed to commit with parent");
11480 } else {
11481 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
11482 .expect("Failed to commit");
11483 }
11484}
11485
11486#[cfg(any())]
11487#[track_caller]
11488fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
11489 repo.cherrypick(commit, None).expect("Failed to cherrypick");
11490}
11491
11492#[track_caller]
11493fn git_stash(repo: &mut git2::Repository) {
11494 use git2::Signature;
11495
11496 let signature = Signature::now("test", "test@zed.dev").unwrap();
11497 repo.stash_save(&signature, "N/A", None)
11498 .expect("Failed to stash");
11499}
11500
11501#[track_caller]
11502fn git_reset(offset: usize, repo: &git2::Repository) {
11503 let head = repo.head().expect("Couldn't get repo head");
11504 let object = head.peel(git2::ObjectType::Commit).unwrap();
11505 let commit = object.as_commit().unwrap();
11506 let new_head = commit
11507 .parents()
11508 .inspect(|parnet| {
11509 parnet.message();
11510 })
11511 .nth(offset)
11512 .expect("Not enough history");
11513 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
11514 .expect("Could not reset");
11515}
11516
11517#[cfg(any())]
11518#[track_caller]
11519fn git_branch(name: &str, repo: &git2::Repository) {
11520 let head = repo
11521 .head()
11522 .expect("Couldn't get repo head")
11523 .peel_to_commit()
11524 .expect("HEAD is not a commit");
11525 repo.branch(name, &head, false).expect("Failed to commit");
11526}
11527
11528#[cfg(any())]
11529#[track_caller]
11530fn git_checkout(name: &str, repo: &git2::Repository) {
11531 repo.set_head(name).expect("Failed to set head");
11532 repo.checkout_head(None).expect("Failed to check out head");
11533}
11534
11535#[cfg(any())]
11536#[track_caller]
11537fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
11538 repo.statuses(None)
11539 .unwrap()
11540 .iter()
11541 .map(|status| (status.path().unwrap().to_string(), status.status()))
11542 .collect()
11543}
11544
11545#[gpui::test]
11546async fn test_find_project_path_abs(
11547 background_executor: BackgroundExecutor,
11548 cx: &mut gpui::TestAppContext,
11549) {
11550 // find_project_path should work with absolute paths
11551 init_test(cx);
11552
11553 let fs = FakeFs::new(background_executor);
11554 fs.insert_tree(
11555 path!("/root"),
11556 json!({
11557 "project1": {
11558 "file1.txt": "content1",
11559 "subdir": {
11560 "file2.txt": "content2"
11561 }
11562 },
11563 "project2": {
11564 "file3.txt": "content3"
11565 }
11566 }),
11567 )
11568 .await;
11569
11570 let project = Project::test(
11571 fs.clone(),
11572 [
11573 path!("/root/project1").as_ref(),
11574 path!("/root/project2").as_ref(),
11575 ],
11576 cx,
11577 )
11578 .await;
11579
11580 // Make sure the worktrees are fully initialized
11581 project
11582 .update(cx, |project, cx| project.git_scans_complete(cx))
11583 .await;
11584 cx.run_until_parked();
11585
11586 let (project1_abs_path, project1_id, project2_abs_path, project2_id) =
11587 project.read_with(cx, |project, cx| {
11588 let worktrees: Vec<_> = project.worktrees(cx).collect();
11589 let abs_path1 = worktrees[0].read(cx).abs_path().to_path_buf();
11590 let id1 = worktrees[0].read(cx).id();
11591 let abs_path2 = worktrees[1].read(cx).abs_path().to_path_buf();
11592 let id2 = worktrees[1].read(cx).id();
11593 (abs_path1, id1, abs_path2, id2)
11594 });
11595
11596 project.update(cx, |project, cx| {
11597 let abs_path = project1_abs_path.join("file1.txt");
11598 let found_path = project.find_project_path(abs_path, cx).unwrap();
11599 assert_eq!(found_path.worktree_id, project1_id);
11600 assert_eq!(&*found_path.path, rel_path("file1.txt"));
11601
11602 let abs_path = project1_abs_path.join("subdir").join("file2.txt");
11603 let found_path = project.find_project_path(abs_path, cx).unwrap();
11604 assert_eq!(found_path.worktree_id, project1_id);
11605 assert_eq!(&*found_path.path, rel_path("subdir/file2.txt"));
11606
11607 let abs_path = project2_abs_path.join("file3.txt");
11608 let found_path = project.find_project_path(abs_path, cx).unwrap();
11609 assert_eq!(found_path.worktree_id, project2_id);
11610 assert_eq!(&*found_path.path, rel_path("file3.txt"));
11611
11612 let abs_path = project1_abs_path.join("nonexistent.txt");
11613 let found_path = project.find_project_path(abs_path, cx);
11614 assert!(
11615 found_path.is_some(),
11616 "Should find project path for nonexistent file in worktree"
11617 );
11618
11619 // Test with an absolute path outside any worktree
11620 let abs_path = Path::new("/some/other/path");
11621 let found_path = project.find_project_path(abs_path, cx);
11622 assert!(
11623 found_path.is_none(),
11624 "Should not find project path for path outside any worktree"
11625 );
11626 });
11627}
11628
11629#[gpui::test]
11630async fn test_git_worktree_remove(cx: &mut gpui::TestAppContext) {
11631 init_test(cx);
11632
11633 let fs = FakeFs::new(cx.executor());
11634 fs.insert_tree(
11635 path!("/root"),
11636 json!({
11637 "a": {
11638 ".git": {},
11639 "src": {
11640 "main.rs": "fn main() {}",
11641 }
11642 },
11643 "b": {
11644 ".git": {},
11645 "src": {
11646 "main.rs": "fn main() {}",
11647 },
11648 "script": {
11649 "run.sh": "#!/bin/bash"
11650 }
11651 }
11652 }),
11653 )
11654 .await;
11655
11656 let project = Project::test(
11657 fs.clone(),
11658 [
11659 path!("/root/a").as_ref(),
11660 path!("/root/b/script").as_ref(),
11661 path!("/root/b").as_ref(),
11662 ],
11663 cx,
11664 )
11665 .await;
11666 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
11667 scan_complete.await;
11668
11669 let worktrees = project.update(cx, |project, cx| project.worktrees(cx).collect::<Vec<_>>());
11670 assert_eq!(worktrees.len(), 3);
11671
11672 let worktree_id_by_abs_path = worktrees
11673 .into_iter()
11674 .map(|worktree| worktree.read_with(cx, |w, _| (w.abs_path(), w.id())))
11675 .collect::<HashMap<_, _>>();
11676 let worktree_id = worktree_id_by_abs_path
11677 .get(Path::new(path!("/root/b/script")))
11678 .unwrap();
11679
11680 let repos = project.update(cx, |p, cx| p.git_store().read(cx).repositories().clone());
11681 assert_eq!(repos.len(), 2);
11682
11683 project.update(cx, |project, cx| {
11684 project.remove_worktree(*worktree_id, cx);
11685 });
11686 cx.run_until_parked();
11687
11688 let mut repo_paths = project
11689 .update(cx, |p, cx| p.git_store().read(cx).repositories().clone())
11690 .values()
11691 .map(|repo| repo.read_with(cx, |r, _| r.work_directory_abs_path.clone()))
11692 .collect::<Vec<_>>();
11693 repo_paths.sort();
11694
11695 pretty_assertions::assert_eq!(
11696 repo_paths,
11697 [
11698 Path::new(path!("/root/a")).into(),
11699 Path::new(path!("/root/b")).into(),
11700 ]
11701 );
11702
11703 let active_repo_path = project
11704 .read_with(cx, |p, cx| {
11705 p.active_repository(cx)
11706 .map(|r| r.read(cx).work_directory_abs_path.clone())
11707 })
11708 .unwrap();
11709 assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/a")));
11710
11711 let worktree_id = worktree_id_by_abs_path
11712 .get(Path::new(path!("/root/a")))
11713 .unwrap();
11714 project.update(cx, |project, cx| {
11715 project.remove_worktree(*worktree_id, cx);
11716 });
11717 cx.run_until_parked();
11718
11719 let active_repo_path = project
11720 .read_with(cx, |p, cx| {
11721 p.active_repository(cx)
11722 .map(|r| r.read(cx).work_directory_abs_path.clone())
11723 })
11724 .unwrap();
11725 assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/b")));
11726
11727 let worktree_id = worktree_id_by_abs_path
11728 .get(Path::new(path!("/root/b")))
11729 .unwrap();
11730 project.update(cx, |project, cx| {
11731 project.remove_worktree(*worktree_id, cx);
11732 });
11733 cx.run_until_parked();
11734
11735 let active_repo_path = project.read_with(cx, |p, cx| {
11736 p.active_repository(cx)
11737 .map(|r| r.read(cx).work_directory_abs_path.clone())
11738 });
11739 assert!(active_repo_path.is_none());
11740}
11741
11742#[gpui::test]
11743async fn test_optimistic_hunks_in_staged_files(cx: &mut gpui::TestAppContext) {
11744 use DiffHunkSecondaryStatus::*;
11745 init_test(cx);
11746
11747 let committed_contents = r#"
11748 one
11749 two
11750 three
11751 "#
11752 .unindent();
11753 let file_contents = r#"
11754 one
11755 TWO
11756 three
11757 "#
11758 .unindent();
11759
11760 let fs = FakeFs::new(cx.background_executor.clone());
11761 fs.insert_tree(
11762 path!("/dir"),
11763 json!({
11764 ".git": {},
11765 "file.txt": file_contents.clone()
11766 }),
11767 )
11768 .await;
11769
11770 fs.set_head_and_index_for_repo(
11771 path!("/dir/.git").as_ref(),
11772 &[("file.txt", committed_contents.clone())],
11773 );
11774
11775 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
11776
11777 let buffer = project
11778 .update(cx, |project, cx| {
11779 project.open_local_buffer(path!("/dir/file.txt"), cx)
11780 })
11781 .await
11782 .unwrap();
11783 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
11784 let uncommitted_diff = project
11785 .update(cx, |project, cx| {
11786 project.open_uncommitted_diff(buffer.clone(), cx)
11787 })
11788 .await
11789 .unwrap();
11790
11791 // The hunk is initially unstaged.
11792 uncommitted_diff.read_with(cx, |diff, cx| {
11793 assert_hunks(
11794 diff.snapshot(cx).hunks(&snapshot),
11795 &snapshot,
11796 &diff.base_text_string(cx).unwrap(),
11797 &[(
11798 1..2,
11799 "two\n",
11800 "TWO\n",
11801 DiffHunkStatus::modified(HasSecondaryHunk),
11802 )],
11803 );
11804 });
11805
11806 // Get the repository handle.
11807 let repo = project.read_with(cx, |project, cx| {
11808 project.repositories(cx).values().next().unwrap().clone()
11809 });
11810
11811 // Stage the file.
11812 let stage_task = repo.update(cx, |repo, cx| {
11813 repo.stage_entries(vec![repo_path("file.txt")], cx)
11814 });
11815
11816 // Run a few ticks to let the job start and mark hunks as pending,
11817 // but don't run_until_parked which would complete the entire operation.
11818 for _ in 0..10 {
11819 cx.executor().tick();
11820 let [hunk]: [_; 1] = uncommitted_diff
11821 .read_with(cx, |diff, cx| {
11822 diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>()
11823 })
11824 .try_into()
11825 .unwrap();
11826 match hunk.secondary_status {
11827 HasSecondaryHunk => {}
11828 SecondaryHunkRemovalPending => break,
11829 NoSecondaryHunk => panic!("hunk was not optimistically staged"),
11830 _ => panic!("unexpected hunk state"),
11831 }
11832 }
11833 uncommitted_diff.read_with(cx, |diff, cx| {
11834 assert_hunks(
11835 diff.snapshot(cx).hunks(&snapshot),
11836 &snapshot,
11837 &diff.base_text_string(cx).unwrap(),
11838 &[(
11839 1..2,
11840 "two\n",
11841 "TWO\n",
11842 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
11843 )],
11844 );
11845 });
11846
11847 // Let the staging complete.
11848 stage_task.await.unwrap();
11849 cx.run_until_parked();
11850
11851 // The hunk is now fully staged.
11852 uncommitted_diff.read_with(cx, |diff, cx| {
11853 assert_hunks(
11854 diff.snapshot(cx).hunks(&snapshot),
11855 &snapshot,
11856 &diff.base_text_string(cx).unwrap(),
11857 &[(
11858 1..2,
11859 "two\n",
11860 "TWO\n",
11861 DiffHunkStatus::modified(NoSecondaryHunk),
11862 )],
11863 );
11864 });
11865
11866 // Simulate a commit by updating HEAD to match the current file contents.
11867 // The FakeGitRepository's commit method is a no-op, so we need to manually
11868 // update HEAD to simulate the commit completing.
11869 fs.set_head_for_repo(
11870 path!("/dir/.git").as_ref(),
11871 &[("file.txt", file_contents.clone())],
11872 "newhead",
11873 );
11874 cx.run_until_parked();
11875
11876 // After committing, there are no more hunks.
11877 uncommitted_diff.read_with(cx, |diff, cx| {
11878 assert_hunks(
11879 diff.snapshot(cx).hunks(&snapshot),
11880 &snapshot,
11881 &diff.base_text_string(cx).unwrap(),
11882 &[] as &[(Range<u32>, &str, &str, DiffHunkStatus)],
11883 );
11884 });
11885}
11886
11887#[gpui::test]
11888async fn test_read_only_files_setting(cx: &mut gpui::TestAppContext) {
11889 init_test(cx);
11890
11891 // Configure read_only_files setting
11892 cx.update(|cx| {
11893 cx.update_global::<SettingsStore, _>(|store, cx| {
11894 store.update_user_settings(cx, |settings| {
11895 settings.project.worktree.read_only_files = Some(vec![
11896 "**/generated/**".to_string(),
11897 "**/*.gen.rs".to_string(),
11898 ]);
11899 });
11900 });
11901 });
11902
11903 let fs = FakeFs::new(cx.background_executor.clone());
11904 fs.insert_tree(
11905 path!("/root"),
11906 json!({
11907 "src": {
11908 "main.rs": "fn main() {}",
11909 "types.gen.rs": "// Generated file",
11910 },
11911 "generated": {
11912 "schema.rs": "// Auto-generated schema",
11913 }
11914 }),
11915 )
11916 .await;
11917
11918 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
11919
11920 // Open a regular file - should be read-write
11921 let regular_buffer = project
11922 .update(cx, |project, cx| {
11923 project.open_local_buffer(path!("/root/src/main.rs"), cx)
11924 })
11925 .await
11926 .unwrap();
11927
11928 regular_buffer.read_with(cx, |buffer, _| {
11929 assert!(!buffer.read_only(), "Regular file should not be read-only");
11930 });
11931
11932 // Open a file matching *.gen.rs pattern - should be read-only
11933 let gen_buffer = project
11934 .update(cx, |project, cx| {
11935 project.open_local_buffer(path!("/root/src/types.gen.rs"), cx)
11936 })
11937 .await
11938 .unwrap();
11939
11940 gen_buffer.read_with(cx, |buffer, _| {
11941 assert!(
11942 buffer.read_only(),
11943 "File matching *.gen.rs pattern should be read-only"
11944 );
11945 });
11946
11947 // Open a file in generated directory - should be read-only
11948 let generated_buffer = project
11949 .update(cx, |project, cx| {
11950 project.open_local_buffer(path!("/root/generated/schema.rs"), cx)
11951 })
11952 .await
11953 .unwrap();
11954
11955 generated_buffer.read_with(cx, |buffer, _| {
11956 assert!(
11957 buffer.read_only(),
11958 "File in generated directory should be read-only"
11959 );
11960 });
11961}
11962
11963#[gpui::test]
11964async fn test_read_only_files_empty_setting(cx: &mut gpui::TestAppContext) {
11965 init_test(cx);
11966
11967 // Explicitly set read_only_files to empty (default behavior)
11968 cx.update(|cx| {
11969 cx.update_global::<SettingsStore, _>(|store, cx| {
11970 store.update_user_settings(cx, |settings| {
11971 settings.project.worktree.read_only_files = Some(vec![]);
11972 });
11973 });
11974 });
11975
11976 let fs = FakeFs::new(cx.background_executor.clone());
11977 fs.insert_tree(
11978 path!("/root"),
11979 json!({
11980 "src": {
11981 "main.rs": "fn main() {}",
11982 },
11983 "generated": {
11984 "schema.rs": "// Auto-generated schema",
11985 }
11986 }),
11987 )
11988 .await;
11989
11990 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
11991
11992 // All files should be read-write when read_only_files is empty
11993 let main_buffer = project
11994 .update(cx, |project, cx| {
11995 project.open_local_buffer(path!("/root/src/main.rs"), cx)
11996 })
11997 .await
11998 .unwrap();
11999
12000 main_buffer.read_with(cx, |buffer, _| {
12001 assert!(
12002 !buffer.read_only(),
12003 "Files should not be read-only when read_only_files is empty"
12004 );
12005 });
12006
12007 let generated_buffer = project
12008 .update(cx, |project, cx| {
12009 project.open_local_buffer(path!("/root/generated/schema.rs"), cx)
12010 })
12011 .await
12012 .unwrap();
12013
12014 generated_buffer.read_with(cx, |buffer, _| {
12015 assert!(
12016 !buffer.read_only(),
12017 "Generated files should not be read-only when read_only_files is empty"
12018 );
12019 });
12020}
12021
12022#[gpui::test]
12023async fn test_read_only_files_with_lock_files(cx: &mut gpui::TestAppContext) {
12024 init_test(cx);
12025
12026 // Configure to make lock files read-only
12027 cx.update(|cx| {
12028 cx.update_global::<SettingsStore, _>(|store, cx| {
12029 store.update_user_settings(cx, |settings| {
12030 settings.project.worktree.read_only_files = Some(vec![
12031 "**/*.lock".to_string(),
12032 "**/package-lock.json".to_string(),
12033 ]);
12034 });
12035 });
12036 });
12037
12038 let fs = FakeFs::new(cx.background_executor.clone());
12039 fs.insert_tree(
12040 path!("/root"),
12041 json!({
12042 "Cargo.lock": "# Lock file",
12043 "Cargo.toml": "[package]",
12044 "package-lock.json": "{}",
12045 "package.json": "{}",
12046 }),
12047 )
12048 .await;
12049
12050 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
12051
12052 // Cargo.lock should be read-only
12053 let cargo_lock = project
12054 .update(cx, |project, cx| {
12055 project.open_local_buffer(path!("/root/Cargo.lock"), cx)
12056 })
12057 .await
12058 .unwrap();
12059
12060 cargo_lock.read_with(cx, |buffer, _| {
12061 assert!(buffer.read_only(), "Cargo.lock should be read-only");
12062 });
12063
12064 // Cargo.toml should be read-write
12065 let cargo_toml = project
12066 .update(cx, |project, cx| {
12067 project.open_local_buffer(path!("/root/Cargo.toml"), cx)
12068 })
12069 .await
12070 .unwrap();
12071
12072 cargo_toml.read_with(cx, |buffer, _| {
12073 assert!(!buffer.read_only(), "Cargo.toml should not be read-only");
12074 });
12075
12076 // package-lock.json should be read-only
12077 let package_lock = project
12078 .update(cx, |project, cx| {
12079 project.open_local_buffer(path!("/root/package-lock.json"), cx)
12080 })
12081 .await
12082 .unwrap();
12083
12084 package_lock.read_with(cx, |buffer, _| {
12085 assert!(buffer.read_only(), "package-lock.json should be read-only");
12086 });
12087
12088 // package.json should be read-write
12089 let package_json = project
12090 .update(cx, |project, cx| {
12091 project.open_local_buffer(path!("/root/package.json"), cx)
12092 })
12093 .await
12094 .unwrap();
12095
12096 package_json.read_with(cx, |buffer, _| {
12097 assert!(!buffer.read_only(), "package.json should not be read-only");
12098 });
12099}
12100
12101mod disable_ai_settings_tests {
12102 use gpui::TestAppContext;
12103 use project::*;
12104 use settings::{Settings, SettingsStore};
12105
12106 #[gpui::test]
12107 async fn test_disable_ai_settings_security(cx: &mut TestAppContext) {
12108 cx.update(|cx| {
12109 settings::init(cx);
12110
12111 // Test 1: Default is false (AI enabled)
12112 assert!(
12113 !DisableAiSettings::get_global(cx).disable_ai,
12114 "Default should allow AI"
12115 );
12116 });
12117
12118 let disable_true = serde_json::json!({
12119 "disable_ai": true
12120 })
12121 .to_string();
12122 let disable_false = serde_json::json!({
12123 "disable_ai": false
12124 })
12125 .to_string();
12126
12127 cx.update_global::<SettingsStore, _>(|store, cx| {
12128 store.set_user_settings(&disable_false, cx).unwrap();
12129 store.set_global_settings(&disable_true, cx).unwrap();
12130 });
12131 cx.update(|cx| {
12132 assert!(
12133 DisableAiSettings::get_global(cx).disable_ai,
12134 "Local false cannot override global true"
12135 );
12136 });
12137
12138 cx.update_global::<SettingsStore, _>(|store, cx| {
12139 store.set_global_settings(&disable_false, cx).unwrap();
12140 store.set_user_settings(&disable_true, cx).unwrap();
12141 });
12142
12143 cx.update(|cx| {
12144 assert!(
12145 DisableAiSettings::get_global(cx).disable_ai,
12146 "Local false cannot override global true"
12147 );
12148 });
12149 }
12150
12151 #[gpui::test]
12152 async fn test_disable_ai_project_level_settings(cx: &mut TestAppContext) {
12153 use settings::{LocalSettingsKind, LocalSettingsPath, SettingsLocation, SettingsStore};
12154 use worktree::WorktreeId;
12155
12156 cx.update(|cx| {
12157 settings::init(cx);
12158
12159 // Default should allow AI
12160 assert!(
12161 !DisableAiSettings::get_global(cx).disable_ai,
12162 "Default should allow AI"
12163 );
12164 });
12165
12166 let worktree_id = WorktreeId::from_usize(1);
12167 let rel_path = |path: &str| -> std::sync::Arc<util::rel_path::RelPath> {
12168 std::sync::Arc::from(util::rel_path::RelPath::unix(path).unwrap())
12169 };
12170 let project_path = rel_path("project");
12171 let settings_location = SettingsLocation {
12172 worktree_id,
12173 path: project_path.as_ref(),
12174 };
12175
12176 // Test: Project-level disable_ai=true should disable AI for files in that project
12177 cx.update_global::<SettingsStore, _>(|store, cx| {
12178 store
12179 .set_local_settings(
12180 worktree_id,
12181 LocalSettingsPath::InWorktree(project_path.clone()),
12182 LocalSettingsKind::Settings,
12183 Some(r#"{ "disable_ai": true }"#),
12184 cx,
12185 )
12186 .unwrap();
12187 });
12188
12189 cx.update(|cx| {
12190 let settings = DisableAiSettings::get(Some(settings_location), cx);
12191 assert!(
12192 settings.disable_ai,
12193 "Project-level disable_ai=true should disable AI for files in that project"
12194 );
12195 // Global should now also be true since project-level disable_ai is merged into global
12196 assert!(
12197 DisableAiSettings::get_global(cx).disable_ai,
12198 "Global setting should be affected by project-level disable_ai=true"
12199 );
12200 });
12201
12202 // Test: Setting project-level to false should allow AI for that project
12203 cx.update_global::<SettingsStore, _>(|store, cx| {
12204 store
12205 .set_local_settings(
12206 worktree_id,
12207 LocalSettingsPath::InWorktree(project_path.clone()),
12208 LocalSettingsKind::Settings,
12209 Some(r#"{ "disable_ai": false }"#),
12210 cx,
12211 )
12212 .unwrap();
12213 });
12214
12215 cx.update(|cx| {
12216 let settings = DisableAiSettings::get(Some(settings_location), cx);
12217 assert!(
12218 !settings.disable_ai,
12219 "Project-level disable_ai=false should allow AI"
12220 );
12221 // Global should also be false now
12222 assert!(
12223 !DisableAiSettings::get_global(cx).disable_ai,
12224 "Global setting should be false when project-level is false"
12225 );
12226 });
12227
12228 // Test: User-level true + project-level false = AI disabled (saturation)
12229 let disable_true = serde_json::json!({ "disable_ai": true }).to_string();
12230 cx.update_global::<SettingsStore, _>(|store, cx| {
12231 store.set_user_settings(&disable_true, cx).unwrap();
12232 store
12233 .set_local_settings(
12234 worktree_id,
12235 LocalSettingsPath::InWorktree(project_path.clone()),
12236 LocalSettingsKind::Settings,
12237 Some(r#"{ "disable_ai": false }"#),
12238 cx,
12239 )
12240 .unwrap();
12241 });
12242
12243 cx.update(|cx| {
12244 let settings = DisableAiSettings::get(Some(settings_location), cx);
12245 assert!(
12246 settings.disable_ai,
12247 "Project-level false cannot override user-level true (SaturatingBool)"
12248 );
12249 });
12250 }
12251}