1#![allow(clippy::format_collect)]
2
3mod color_extractor;
4mod context_server_store;
5mod debugger;
6mod ext_agent_tests;
7mod extension_agent_tests;
8mod git_store;
9mod image_store;
10mod lsp_command;
11mod lsp_store;
12mod manifest_tree;
13mod project_search;
14mod search;
15mod search_history;
16mod signature_help;
17mod task_inventory;
18mod trusted_worktrees;
19mod yarn;
20
21use anyhow::Result;
22use async_trait::async_trait;
23use buffer_diff::{
24 BufferDiffEvent, DiffChanged, DiffHunkSecondaryStatus, DiffHunkStatus, DiffHunkStatusKind,
25 assert_hunks,
26};
27use collections::{BTreeSet, HashMap, HashSet};
28use encoding_rs;
29use fs::FakeFs;
30use futures::{StreamExt, future};
31use git::{
32 GitHostingProviderRegistry,
33 repository::{RepoPath, repo_path},
34 status::{FileStatus, StatusCode, TrackedStatus},
35};
36use git2::RepositoryInitOptions;
37use gpui::{
38 App, AppContext, BackgroundExecutor, BorrowAppContext, Entity, FutureExt, SharedString, Task,
39 UpdateGlobal,
40};
41use itertools::Itertools;
42use language::{
43 Buffer, BufferEvent, Diagnostic, DiagnosticEntry, DiagnosticEntryRef, DiagnosticSet,
44 DiagnosticSourceKind, DiskState, FakeLspAdapter, Language, LanguageConfig, LanguageMatcher,
45 LanguageName, LineEnding, ManifestName, ManifestProvider, ManifestQuery, OffsetRangeExt, Point,
46 ToPoint, Toolchain, ToolchainList, ToolchainLister, ToolchainMetadata,
47 language_settings::{LanguageSettingsContent, language_settings},
48 markdown_lang, rust_lang, tree_sitter_typescript,
49};
50use lsp::{
51 CodeActionKind, DiagnosticSeverity, DocumentChanges, FileOperationFilter, LanguageServerId,
52 LanguageServerName, NumberOrString, TextDocumentEdit, Uri, WillRenameFiles,
53 notification::DidRenameFiles,
54};
55use parking_lot::Mutex;
56use paths::{config_dir, global_gitignore_path, tasks_file};
57use postage::stream::Stream as _;
58use pretty_assertions::{assert_eq, assert_matches};
59use project::{
60 Event, TaskContexts,
61 git_store::{GitStoreEvent, Repository, RepositoryEvent, StatusEntry, pending_op},
62 search::{SearchQuery, SearchResult},
63 task_store::{TaskSettingsLocation, TaskStore},
64 *,
65};
66use rand::{Rng as _, rngs::StdRng};
67use serde_json::json;
68use settings::SettingsStore;
69#[cfg(not(windows))]
70use std::os;
71use std::{
72 cell::RefCell,
73 env, mem,
74 num::NonZeroU32,
75 ops::Range,
76 path::{Path, PathBuf},
77 rc::Rc,
78 str::FromStr,
79 sync::{Arc, OnceLock},
80 task::Poll,
81 time::Duration,
82};
83use sum_tree::SumTree;
84use task::{ResolvedTask, ShellKind, TaskContext};
85use text::{Anchor, PointUtf16, ReplicaId, ToOffset, Unclipped};
86use unindent::Unindent as _;
87use util::{
88 TryFutureExt as _, assert_set_eq, maybe, path,
89 paths::{PathMatcher, PathStyle},
90 rel_path::{RelPath, rel_path},
91 test::{TempTree, marked_text_offsets},
92 uri,
93};
94use worktree::WorktreeModelHandle as _;
95
96#[gpui::test]
97async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
98 cx.executor().allow_parking();
99
100 let (tx, mut rx) = futures::channel::mpsc::unbounded();
101 let _thread = std::thread::spawn(move || {
102 #[cfg(not(target_os = "windows"))]
103 std::fs::metadata("/tmp").unwrap();
104 #[cfg(target_os = "windows")]
105 std::fs::metadata("C:/Windows").unwrap();
106 std::thread::sleep(Duration::from_millis(1000));
107 tx.unbounded_send(1).unwrap();
108 });
109 rx.next().await.unwrap();
110}
111
112#[gpui::test]
113async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
114 cx.executor().allow_parking();
115
116 let io_task = smol::unblock(move || {
117 println!("sleeping on thread {:?}", std::thread::current().id());
118 std::thread::sleep(Duration::from_millis(10));
119 1
120 });
121
122 let task = cx.foreground_executor().spawn(async move {
123 io_task.await;
124 });
125
126 task.await;
127}
128
129// NOTE:
130// While POSIX symbolic links are somewhat supported on Windows, they are an opt in by the user, and thus
131// we assume that they are not supported out of the box.
132#[cfg(not(windows))]
133#[gpui::test]
134async fn test_symlinks(cx: &mut gpui::TestAppContext) {
135 init_test(cx);
136 cx.executor().allow_parking();
137
138 let dir = TempTree::new(json!({
139 "root": {
140 "apple": "",
141 "banana": {
142 "carrot": {
143 "date": "",
144 "endive": "",
145 }
146 },
147 "fennel": {
148 "grape": "",
149 }
150 }
151 }));
152
153 let root_link_path = dir.path().join("root_link");
154 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
155 os::unix::fs::symlink(
156 dir.path().join("root/fennel"),
157 dir.path().join("root/finnochio"),
158 )
159 .unwrap();
160
161 let project = Project::test(
162 Arc::new(RealFs::new(None, cx.executor())),
163 [root_link_path.as_ref()],
164 cx,
165 )
166 .await;
167
168 project.update(cx, |project, cx| {
169 let tree = project.worktrees(cx).next().unwrap().read(cx);
170 assert_eq!(tree.file_count(), 5);
171 assert_eq!(
172 tree.entry_for_path(rel_path("fennel/grape")).unwrap().inode,
173 tree.entry_for_path(rel_path("finnochio/grape"))
174 .unwrap()
175 .inode
176 );
177 });
178}
179
180#[gpui::test]
181async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
182 init_test(cx);
183
184 let dir = TempTree::new(json!({
185 ".editorconfig": r#"
186 root = true
187 [*.rs]
188 indent_style = tab
189 indent_size = 3
190 end_of_line = lf
191 insert_final_newline = true
192 trim_trailing_whitespace = true
193 max_line_length = 120
194 [*.js]
195 tab_width = 10
196 max_line_length = off
197 "#,
198 ".zed": {
199 "settings.json": r#"{
200 "tab_size": 8,
201 "hard_tabs": false,
202 "ensure_final_newline_on_save": false,
203 "remove_trailing_whitespace_on_save": false,
204 "preferred_line_length": 64,
205 "soft_wrap": "editor_width",
206 }"#,
207 },
208 "a.rs": "fn a() {\n A\n}",
209 "b": {
210 ".editorconfig": r#"
211 [*.rs]
212 indent_size = 2
213 max_line_length = off,
214 "#,
215 "b.rs": "fn b() {\n B\n}",
216 },
217 "c.js": "def c\n C\nend",
218 "README.json": "tabs are better\n",
219 }));
220
221 let path = dir.path();
222 let fs = FakeFs::new(cx.executor());
223 fs.insert_tree_from_real_fs(path, path).await;
224 let project = Project::test(fs, [path], cx).await;
225
226 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
227 language_registry.add(js_lang());
228 language_registry.add(json_lang());
229 language_registry.add(rust_lang());
230
231 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
232
233 cx.executor().run_until_parked();
234
235 cx.update(|cx| {
236 let tree = worktree.read(cx);
237 let settings_for = |path: &str| {
238 let file_entry = tree.entry_for_path(rel_path(path)).unwrap().clone();
239 let file = File::for_entry(file_entry, worktree.clone());
240 let file_language = project
241 .read(cx)
242 .languages()
243 .load_language_for_file_path(file.path.as_std_path());
244 let file_language = cx
245 .foreground_executor()
246 .block_on(file_language)
247 .expect("Failed to get file language");
248 let file = file as _;
249 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
250 };
251
252 let settings_a = settings_for("a.rs");
253 let settings_b = settings_for("b/b.rs");
254 let settings_c = settings_for("c.js");
255 let settings_readme = settings_for("README.json");
256
257 // .editorconfig overrides .zed/settings
258 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
259 assert_eq!(settings_a.hard_tabs, true);
260 assert_eq!(settings_a.ensure_final_newline_on_save, true);
261 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
262 assert_eq!(settings_a.preferred_line_length, 120);
263
264 // .editorconfig in b/ overrides .editorconfig in root
265 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
266
267 // "indent_size" is not set, so "tab_width" is used
268 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
269
270 // When max_line_length is "off", default to .zed/settings.json
271 assert_eq!(settings_b.preferred_line_length, 64);
272 assert_eq!(settings_c.preferred_line_length, 64);
273
274 // README.md should not be affected by .editorconfig's globe "*.rs"
275 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
276 });
277}
278
279#[gpui::test]
280async fn test_external_editorconfig_support(cx: &mut gpui::TestAppContext) {
281 init_test(cx);
282
283 let fs = FakeFs::new(cx.executor());
284 fs.insert_tree(
285 path!("/grandparent"),
286 json!({
287 ".editorconfig": "[*]\nindent_size = 4\n",
288 "parent": {
289 ".editorconfig": "[*.rs]\nindent_size = 2\n",
290 "worktree": {
291 ".editorconfig": "[*.md]\nindent_size = 3\n",
292 "main.rs": "fn main() {}",
293 "README.md": "# README",
294 "other.txt": "other content",
295 }
296 }
297 }),
298 )
299 .await;
300
301 let project = Project::test(fs, [path!("/grandparent/parent/worktree").as_ref()], cx).await;
302
303 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
304 language_registry.add(rust_lang());
305 language_registry.add(markdown_lang());
306
307 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
308
309 cx.executor().run_until_parked();
310
311 cx.update(|cx| {
312 let tree = worktree.read(cx);
313 let settings_for = |path: &str| {
314 let file_entry = tree.entry_for_path(rel_path(path)).unwrap().clone();
315 let file = File::for_entry(file_entry, worktree.clone());
316 let file_language = project
317 .read(cx)
318 .languages()
319 .load_language_for_file_path(file.path.as_std_path());
320 let file_language = cx
321 .foreground_executor()
322 .block_on(file_language)
323 .expect("Failed to get file language");
324 let file = file as _;
325 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
326 };
327
328 let settings_rs = settings_for("main.rs");
329 let settings_md = settings_for("README.md");
330 let settings_txt = settings_for("other.txt");
331
332 // main.rs gets indent_size = 2 from parent's external .editorconfig
333 assert_eq!(Some(settings_rs.tab_size), NonZeroU32::new(2));
334
335 // README.md gets indent_size = 3 from internal worktree .editorconfig
336 assert_eq!(Some(settings_md.tab_size), NonZeroU32::new(3));
337
338 // other.txt gets indent_size = 4 from grandparent's external .editorconfig
339 assert_eq!(Some(settings_txt.tab_size), NonZeroU32::new(4));
340 });
341}
342
343#[gpui::test]
344async fn test_external_editorconfig_root_stops_traversal(cx: &mut gpui::TestAppContext) {
345 init_test(cx);
346
347 let fs = FakeFs::new(cx.executor());
348 fs.insert_tree(
349 path!("/parent"),
350 json!({
351 ".editorconfig": "[*]\nindent_size = 99\n",
352 "worktree": {
353 ".editorconfig": "root = true\n[*]\nindent_size = 2\n",
354 "file.rs": "fn main() {}",
355 }
356 }),
357 )
358 .await;
359
360 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
361
362 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
363 language_registry.add(rust_lang());
364
365 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
366
367 cx.executor().run_until_parked();
368
369 cx.update(|cx| {
370 let tree = worktree.read(cx);
371 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
372 let file = File::for_entry(file_entry, worktree.clone());
373 let file_language = project
374 .read(cx)
375 .languages()
376 .load_language_for_file_path(file.path.as_std_path());
377 let file_language = cx
378 .foreground_executor()
379 .block_on(file_language)
380 .expect("Failed to get file language");
381 let file = file as _;
382 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
383
384 // file.rs gets indent_size = 2 from worktree's root config, NOT 99 from parent
385 assert_eq!(Some(settings.tab_size), NonZeroU32::new(2));
386 });
387}
388
389#[gpui::test]
390async fn test_external_editorconfig_root_in_parent_stops_traversal(cx: &mut gpui::TestAppContext) {
391 init_test(cx);
392
393 let fs = FakeFs::new(cx.executor());
394 fs.insert_tree(
395 path!("/grandparent"),
396 json!({
397 ".editorconfig": "[*]\nindent_size = 99\n",
398 "parent": {
399 ".editorconfig": "root = true\n[*]\nindent_size = 4\n",
400 "worktree": {
401 "file.rs": "fn main() {}",
402 }
403 }
404 }),
405 )
406 .await;
407
408 let project = Project::test(fs, [path!("/grandparent/parent/worktree").as_ref()], cx).await;
409
410 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
411 language_registry.add(rust_lang());
412
413 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
414
415 cx.executor().run_until_parked();
416
417 cx.update(|cx| {
418 let tree = worktree.read(cx);
419 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
420 let file = File::for_entry(file_entry, worktree.clone());
421 let file_language = project
422 .read(cx)
423 .languages()
424 .load_language_for_file_path(file.path.as_std_path());
425 let file_language = cx
426 .foreground_executor()
427 .block_on(file_language)
428 .expect("Failed to get file language");
429 let file = file as _;
430 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
431
432 // file.rs gets indent_size = 4 from parent's root config, NOT 99 from grandparent
433 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
434 });
435}
436
437#[gpui::test]
438async fn test_external_editorconfig_shared_across_worktrees(cx: &mut gpui::TestAppContext) {
439 init_test(cx);
440
441 let fs = FakeFs::new(cx.executor());
442 fs.insert_tree(
443 path!("/parent"),
444 json!({
445 ".editorconfig": "root = true\n[*]\nindent_size = 5\n",
446 "worktree_a": {
447 "file.rs": "fn a() {}",
448 ".editorconfig": "[*]\ninsert_final_newline = true\n",
449 },
450 "worktree_b": {
451 "file.rs": "fn b() {}",
452 ".editorconfig": "[*]\ninsert_final_newline = false\n",
453 }
454 }),
455 )
456 .await;
457
458 let project = Project::test(
459 fs,
460 [
461 path!("/parent/worktree_a").as_ref(),
462 path!("/parent/worktree_b").as_ref(),
463 ],
464 cx,
465 )
466 .await;
467
468 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
469 language_registry.add(rust_lang());
470
471 cx.executor().run_until_parked();
472
473 cx.update(|cx| {
474 let worktrees: Vec<_> = project.read(cx).worktrees(cx).collect();
475 assert_eq!(worktrees.len(), 2);
476
477 for worktree in worktrees {
478 let tree = worktree.read(cx);
479 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
480 let file = File::for_entry(file_entry, worktree.clone());
481 let file_language = project
482 .read(cx)
483 .languages()
484 .load_language_for_file_path(file.path.as_std_path());
485 let file_language = cx
486 .foreground_executor()
487 .block_on(file_language)
488 .expect("Failed to get file language");
489 let file = file as _;
490 let settings =
491 language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
492
493 // Both worktrees should get indent_size = 5 from shared parent .editorconfig
494 assert_eq!(Some(settings.tab_size), NonZeroU32::new(5));
495 }
496 });
497}
498
499#[gpui::test]
500async fn test_external_editorconfig_not_loaded_without_internal_config(
501 cx: &mut gpui::TestAppContext,
502) {
503 init_test(cx);
504
505 let fs = FakeFs::new(cx.executor());
506 fs.insert_tree(
507 path!("/parent"),
508 json!({
509 ".editorconfig": "[*]\nindent_size = 99\n",
510 "worktree": {
511 "file.rs": "fn main() {}",
512 }
513 }),
514 )
515 .await;
516
517 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
518
519 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
520 language_registry.add(rust_lang());
521
522 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
523
524 cx.executor().run_until_parked();
525
526 cx.update(|cx| {
527 let tree = worktree.read(cx);
528 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
529 let file = File::for_entry(file_entry, worktree.clone());
530 let file_language = project
531 .read(cx)
532 .languages()
533 .load_language_for_file_path(file.path.as_std_path());
534 let file_language = cx
535 .foreground_executor()
536 .block_on(file_language)
537 .expect("Failed to get file language");
538 let file = file as _;
539 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
540
541 // file.rs should have default tab_size = 4, NOT 99 from parent's external .editorconfig
542 // because without an internal .editorconfig, external configs are not loaded
543 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
544 });
545}
546
547#[gpui::test]
548async fn test_external_editorconfig_modification_triggers_refresh(cx: &mut gpui::TestAppContext) {
549 init_test(cx);
550
551 let fs = FakeFs::new(cx.executor());
552 fs.insert_tree(
553 path!("/parent"),
554 json!({
555 ".editorconfig": "[*]\nindent_size = 4\n",
556 "worktree": {
557 ".editorconfig": "[*]\n",
558 "file.rs": "fn main() {}",
559 }
560 }),
561 )
562 .await;
563
564 let project = Project::test(fs.clone(), [path!("/parent/worktree").as_ref()], cx).await;
565
566 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
567 language_registry.add(rust_lang());
568
569 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
570
571 cx.executor().run_until_parked();
572
573 cx.update(|cx| {
574 let tree = worktree.read(cx);
575 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
576 let file = File::for_entry(file_entry, worktree.clone());
577 let file_language = project
578 .read(cx)
579 .languages()
580 .load_language_for_file_path(file.path.as_std_path());
581 let file_language = cx
582 .foreground_executor()
583 .block_on(file_language)
584 .expect("Failed to get file language");
585 let file = file as _;
586 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
587
588 // Test initial settings: tab_size = 4 from parent's external .editorconfig
589 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
590 });
591
592 fs.atomic_write(
593 PathBuf::from(path!("/parent/.editorconfig")),
594 "[*]\nindent_size = 8\n".to_owned(),
595 )
596 .await
597 .unwrap();
598
599 cx.executor().run_until_parked();
600
601 cx.update(|cx| {
602 let tree = worktree.read(cx);
603 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
604 let file = File::for_entry(file_entry, worktree.clone());
605 let file_language = project
606 .read(cx)
607 .languages()
608 .load_language_for_file_path(file.path.as_std_path());
609 let file_language = cx
610 .foreground_executor()
611 .block_on(file_language)
612 .expect("Failed to get file language");
613 let file = file as _;
614 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
615
616 // Test settings updated: tab_size = 8
617 assert_eq!(Some(settings.tab_size), NonZeroU32::new(8));
618 });
619}
620
621#[gpui::test]
622async fn test_adding_worktree_discovers_external_editorconfigs(cx: &mut gpui::TestAppContext) {
623 init_test(cx);
624
625 let fs = FakeFs::new(cx.executor());
626 fs.insert_tree(
627 path!("/parent"),
628 json!({
629 ".editorconfig": "root = true\n[*]\nindent_size = 7\n",
630 "existing_worktree": {
631 ".editorconfig": "[*]\n",
632 "file.rs": "fn a() {}",
633 },
634 "new_worktree": {
635 ".editorconfig": "[*]\n",
636 "file.rs": "fn b() {}",
637 }
638 }),
639 )
640 .await;
641
642 let project = Project::test(fs, [path!("/parent/existing_worktree").as_ref()], cx).await;
643
644 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
645 language_registry.add(rust_lang());
646
647 cx.executor().run_until_parked();
648
649 cx.update(|cx| {
650 let worktree = project.read(cx).worktrees(cx).next().unwrap();
651 let tree = worktree.read(cx);
652 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
653 let file = File::for_entry(file_entry, worktree.clone());
654 let file_language = project
655 .read(cx)
656 .languages()
657 .load_language_for_file_path(file.path.as_std_path());
658 let file_language = cx
659 .foreground_executor()
660 .block_on(file_language)
661 .expect("Failed to get file language");
662 let file = file as _;
663 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
664
665 // Test existing worktree has tab_size = 7
666 assert_eq!(Some(settings.tab_size), NonZeroU32::new(7));
667 });
668
669 let (new_worktree, _) = project
670 .update(cx, |project, cx| {
671 project.find_or_create_worktree(path!("/parent/new_worktree"), true, cx)
672 })
673 .await
674 .unwrap();
675
676 cx.executor().run_until_parked();
677
678 cx.update(|cx| {
679 let tree = new_worktree.read(cx);
680 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
681 let file = File::for_entry(file_entry, new_worktree.clone());
682 let file_language = project
683 .read(cx)
684 .languages()
685 .load_language_for_file_path(file.path.as_std_path());
686 let file_language = cx
687 .foreground_executor()
688 .block_on(file_language)
689 .expect("Failed to get file language");
690 let file = file as _;
691 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
692
693 // Verify new worktree also has tab_size = 7 from shared parent editorconfig
694 assert_eq!(Some(settings.tab_size), NonZeroU32::new(7));
695 });
696}
697
698#[gpui::test]
699async fn test_removing_worktree_cleans_up_external_editorconfig(cx: &mut gpui::TestAppContext) {
700 init_test(cx);
701
702 let fs = FakeFs::new(cx.executor());
703 fs.insert_tree(
704 path!("/parent"),
705 json!({
706 ".editorconfig": "[*]\nindent_size = 6\n",
707 "worktree": {
708 ".editorconfig": "[*]\n",
709 "file.rs": "fn main() {}",
710 }
711 }),
712 )
713 .await;
714
715 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
716
717 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
718 language_registry.add(rust_lang());
719
720 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
721 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
722
723 cx.executor().run_until_parked();
724
725 cx.update(|cx| {
726 let store = cx.global::<SettingsStore>();
727 let (worktree_ids, external_paths, watcher_paths) =
728 store.editorconfig_store.read(cx).test_state();
729
730 // Test external config is loaded
731 assert!(worktree_ids.contains(&worktree_id));
732 assert!(!external_paths.is_empty());
733 assert!(!watcher_paths.is_empty());
734 });
735
736 project.update(cx, |project, cx| {
737 project.remove_worktree(worktree_id, cx);
738 });
739
740 cx.executor().run_until_parked();
741
742 cx.update(|cx| {
743 let store = cx.global::<SettingsStore>();
744 let (worktree_ids, external_paths, watcher_paths) =
745 store.editorconfig_store.read(cx).test_state();
746
747 // Test worktree state, external configs, and watchers all removed
748 assert!(!worktree_ids.contains(&worktree_id));
749 assert!(external_paths.is_empty());
750 assert!(watcher_paths.is_empty());
751 });
752}
753
754#[gpui::test]
755async fn test_shared_external_editorconfig_cleanup_with_multiple_worktrees(
756 cx: &mut gpui::TestAppContext,
757) {
758 init_test(cx);
759
760 let fs = FakeFs::new(cx.executor());
761 fs.insert_tree(
762 path!("/parent"),
763 json!({
764 ".editorconfig": "root = true\n[*]\nindent_size = 5\n",
765 "worktree_a": {
766 ".editorconfig": "[*]\n",
767 "file.rs": "fn a() {}",
768 },
769 "worktree_b": {
770 ".editorconfig": "[*]\n",
771 "file.rs": "fn b() {}",
772 }
773 }),
774 )
775 .await;
776
777 let project = Project::test(
778 fs,
779 [
780 path!("/parent/worktree_a").as_ref(),
781 path!("/parent/worktree_b").as_ref(),
782 ],
783 cx,
784 )
785 .await;
786
787 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
788 language_registry.add(rust_lang());
789
790 cx.executor().run_until_parked();
791
792 let (worktree_a_id, worktree_b, worktree_b_id) = cx.update(|cx| {
793 let worktrees: Vec<_> = project.read(cx).worktrees(cx).collect();
794 assert_eq!(worktrees.len(), 2);
795
796 let worktree_a = &worktrees[0];
797 let worktree_b = &worktrees[1];
798 let worktree_a_id = worktree_a.read(cx).id();
799 let worktree_b_id = worktree_b.read(cx).id();
800 (worktree_a_id, worktree_b.clone(), worktree_b_id)
801 });
802
803 cx.update(|cx| {
804 let store = cx.global::<SettingsStore>();
805 let (worktree_ids, external_paths, _) = store.editorconfig_store.read(cx).test_state();
806
807 // Test both worktrees have settings and share external config
808 assert!(worktree_ids.contains(&worktree_a_id));
809 assert!(worktree_ids.contains(&worktree_b_id));
810 assert_eq!(external_paths.len(), 1); // single shared external config
811 });
812
813 project.update(cx, |project, cx| {
814 project.remove_worktree(worktree_a_id, cx);
815 });
816
817 cx.executor().run_until_parked();
818
819 cx.update(|cx| {
820 let store = cx.global::<SettingsStore>();
821 let (worktree_ids, external_paths, watcher_paths) =
822 store.editorconfig_store.read(cx).test_state();
823
824 // Test worktree_a is gone but external config remains for worktree_b
825 assert!(!worktree_ids.contains(&worktree_a_id));
826 assert!(worktree_ids.contains(&worktree_b_id));
827 // External config should still exist because worktree_b uses it
828 assert_eq!(external_paths.len(), 1);
829 assert_eq!(watcher_paths.len(), 1);
830 });
831
832 cx.update(|cx| {
833 let tree = worktree_b.read(cx);
834 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
835 let file = File::for_entry(file_entry, worktree_b.clone());
836 let file_language = project
837 .read(cx)
838 .languages()
839 .load_language_for_file_path(file.path.as_std_path());
840 let file_language = cx
841 .foreground_executor()
842 .block_on(file_language)
843 .expect("Failed to get file language");
844 let file = file as _;
845 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
846
847 // Test worktree_b still has correct settings
848 assert_eq!(Some(settings.tab_size), NonZeroU32::new(5));
849 });
850}
851
852#[gpui::test]
853async fn test_git_provider_project_setting(cx: &mut gpui::TestAppContext) {
854 init_test(cx);
855 cx.update(|cx| {
856 GitHostingProviderRegistry::default_global(cx);
857 git_hosting_providers::init(cx);
858 });
859
860 let fs = FakeFs::new(cx.executor());
861 let str_path = path!("/dir");
862 let path = Path::new(str_path);
863
864 fs.insert_tree(
865 path!("/dir"),
866 json!({
867 ".zed": {
868 "settings.json": r#"{
869 "git_hosting_providers": [
870 {
871 "provider": "gitlab",
872 "base_url": "https://google.com",
873 "name": "foo"
874 }
875 ]
876 }"#
877 },
878 }),
879 )
880 .await;
881
882 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
883 let (_worktree, _) =
884 project.read_with(cx, |project, cx| project.find_worktree(path, cx).unwrap());
885 cx.executor().run_until_parked();
886
887 cx.update(|cx| {
888 let provider = GitHostingProviderRegistry::global(cx);
889 assert!(
890 provider
891 .list_hosting_providers()
892 .into_iter()
893 .any(|provider| provider.name() == "foo")
894 );
895 });
896
897 fs.atomic_write(
898 Path::new(path!("/dir/.zed/settings.json")).to_owned(),
899 "{}".into(),
900 )
901 .await
902 .unwrap();
903
904 cx.run_until_parked();
905
906 cx.update(|cx| {
907 let provider = GitHostingProviderRegistry::global(cx);
908 assert!(
909 !provider
910 .list_hosting_providers()
911 .into_iter()
912 .any(|provider| provider.name() == "foo")
913 );
914 });
915}
916
917#[gpui::test]
918async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
919 init_test(cx);
920 TaskStore::init(None);
921
922 let fs = FakeFs::new(cx.executor());
923 fs.insert_tree(
924 path!("/dir"),
925 json!({
926 ".zed": {
927 "settings.json": r#"{ "tab_size": 8 }"#,
928 "tasks.json": r#"[{
929 "label": "cargo check all",
930 "command": "cargo",
931 "args": ["check", "--all"]
932 },]"#,
933 },
934 "a": {
935 "a.rs": "fn a() {\n A\n}"
936 },
937 "b": {
938 ".zed": {
939 "settings.json": r#"{ "tab_size": 2 }"#,
940 "tasks.json": r#"[{
941 "label": "cargo check",
942 "command": "cargo",
943 "args": ["check"]
944 },]"#,
945 },
946 "b.rs": "fn b() {\n B\n}"
947 }
948 }),
949 )
950 .await;
951
952 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
953 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
954
955 cx.executor().run_until_parked();
956 let worktree_id = cx.update(|cx| {
957 project.update(cx, |project, cx| {
958 project.worktrees(cx).next().unwrap().read(cx).id()
959 })
960 });
961
962 let mut task_contexts = TaskContexts::default();
963 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
964 let task_contexts = Arc::new(task_contexts);
965
966 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
967 id: worktree_id,
968 directory_in_worktree: rel_path(".zed").into(),
969 id_base: "local worktree tasks from directory \".zed\"".into(),
970 };
971
972 let all_tasks = cx
973 .update(|cx| {
974 let tree = worktree.read(cx);
975
976 let file_a = File::for_entry(
977 tree.entry_for_path(rel_path("a/a.rs")).unwrap().clone(),
978 worktree.clone(),
979 ) as _;
980 let settings_a = language_settings(None, Some(&file_a), cx);
981 let file_b = File::for_entry(
982 tree.entry_for_path(rel_path("b/b.rs")).unwrap().clone(),
983 worktree.clone(),
984 ) as _;
985 let settings_b = language_settings(None, Some(&file_b), cx);
986
987 assert_eq!(settings_a.tab_size.get(), 8);
988 assert_eq!(settings_b.tab_size.get(), 2);
989
990 get_all_tasks(&project, task_contexts.clone(), cx)
991 })
992 .await
993 .into_iter()
994 .map(|(source_kind, task)| {
995 let resolved = task.resolved;
996 (
997 source_kind,
998 task.resolved_label,
999 resolved.args,
1000 resolved.env,
1001 )
1002 })
1003 .collect::<Vec<_>>();
1004 assert_eq!(
1005 all_tasks,
1006 vec![
1007 (
1008 TaskSourceKind::Worktree {
1009 id: worktree_id,
1010 directory_in_worktree: rel_path("b/.zed").into(),
1011 id_base: "local worktree tasks from directory \"b/.zed\"".into()
1012 },
1013 "cargo check".to_string(),
1014 vec!["check".to_string()],
1015 HashMap::default(),
1016 ),
1017 (
1018 topmost_local_task_source_kind.clone(),
1019 "cargo check all".to_string(),
1020 vec!["check".to_string(), "--all".to_string()],
1021 HashMap::default(),
1022 ),
1023 ]
1024 );
1025
1026 let (_, resolved_task) = cx
1027 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
1028 .await
1029 .into_iter()
1030 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
1031 .expect("should have one global task");
1032 project.update(cx, |project, cx| {
1033 let task_inventory = project
1034 .task_store()
1035 .read(cx)
1036 .task_inventory()
1037 .cloned()
1038 .unwrap();
1039 task_inventory.update(cx, |inventory, _| {
1040 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
1041 inventory
1042 .update_file_based_tasks(
1043 TaskSettingsLocation::Global(tasks_file()),
1044 Some(
1045 &json!([{
1046 "label": "cargo check unstable",
1047 "command": "cargo",
1048 "args": [
1049 "check",
1050 "--all",
1051 "--all-targets"
1052 ],
1053 "env": {
1054 "RUSTFLAGS": "-Zunstable-options"
1055 }
1056 }])
1057 .to_string(),
1058 ),
1059 )
1060 .unwrap();
1061 });
1062 });
1063 cx.run_until_parked();
1064
1065 let all_tasks = cx
1066 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
1067 .await
1068 .into_iter()
1069 .map(|(source_kind, task)| {
1070 let resolved = task.resolved;
1071 (
1072 source_kind,
1073 task.resolved_label,
1074 resolved.args,
1075 resolved.env,
1076 )
1077 })
1078 .collect::<Vec<_>>();
1079 assert_eq!(
1080 all_tasks,
1081 vec![
1082 (
1083 topmost_local_task_source_kind.clone(),
1084 "cargo check all".to_string(),
1085 vec!["check".to_string(), "--all".to_string()],
1086 HashMap::default(),
1087 ),
1088 (
1089 TaskSourceKind::Worktree {
1090 id: worktree_id,
1091 directory_in_worktree: rel_path("b/.zed").into(),
1092 id_base: "local worktree tasks from directory \"b/.zed\"".into()
1093 },
1094 "cargo check".to_string(),
1095 vec!["check".to_string()],
1096 HashMap::default(),
1097 ),
1098 (
1099 TaskSourceKind::AbsPath {
1100 abs_path: paths::tasks_file().clone(),
1101 id_base: "global tasks.json".into(),
1102 },
1103 "cargo check unstable".to_string(),
1104 vec![
1105 "check".to_string(),
1106 "--all".to_string(),
1107 "--all-targets".to_string(),
1108 ],
1109 HashMap::from_iter(Some((
1110 "RUSTFLAGS".to_string(),
1111 "-Zunstable-options".to_string()
1112 ))),
1113 ),
1114 ]
1115 );
1116}
1117
1118#[gpui::test]
1119async fn test_invalid_local_tasks_shows_toast_with_doc_link(cx: &mut gpui::TestAppContext) {
1120 init_test(cx);
1121 TaskStore::init(None);
1122
1123 // We need to start with a valid `.zed/tasks.json` file as otherwise the
1124 // event is emitted before we havd a chance to setup the event subscription.
1125 let fs = FakeFs::new(cx.executor());
1126 fs.insert_tree(
1127 path!("/dir"),
1128 json!({
1129 ".zed": {
1130 "tasks.json": r#"[{ "label": "valid task", "command": "echo" }]"#,
1131 },
1132 "file.rs": ""
1133 }),
1134 )
1135 .await;
1136
1137 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1138 let saw_toast = Rc::new(RefCell::new(false));
1139
1140 // Update the `.zed/tasks.json` file with an invalid variable, so we can
1141 // later assert that the `Event::Toast` even is emitted.
1142 fs.save(
1143 path!("/dir/.zed/tasks.json").as_ref(),
1144 &r#"[{ "label": "test $ZED_FOO", "command": "echo" }]"#.into(),
1145 Default::default(),
1146 )
1147 .await
1148 .unwrap();
1149
1150 project.update(cx, |_, cx| {
1151 let saw_toast = saw_toast.clone();
1152
1153 cx.subscribe(&project, move |_, _, event: &Event, _| match event {
1154 Event::Toast {
1155 notification_id,
1156 message,
1157 link: Some(ToastLink { url, .. }),
1158 } => {
1159 assert!(notification_id.starts_with("local-tasks-"));
1160 assert!(message.contains("ZED_FOO"));
1161 assert_eq!(*url, "https://zed.dev/docs/tasks");
1162 *saw_toast.borrow_mut() = true;
1163 }
1164 _ => {}
1165 })
1166 .detach();
1167 });
1168
1169 cx.run_until_parked();
1170 assert!(
1171 *saw_toast.borrow(),
1172 "Expected `Event::Toast` was never emitted"
1173 );
1174}
1175
1176#[gpui::test]
1177async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
1178 init_test(cx);
1179 TaskStore::init(None);
1180
1181 let fs = FakeFs::new(cx.executor());
1182 fs.insert_tree(
1183 path!("/dir"),
1184 json!({
1185 ".zed": {
1186 "tasks.json": r#"[{
1187 "label": "test worktree root",
1188 "command": "echo $ZED_WORKTREE_ROOT"
1189 }]"#,
1190 },
1191 "a": {
1192 "a.rs": "fn a() {\n A\n}"
1193 },
1194 }),
1195 )
1196 .await;
1197
1198 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1199 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
1200
1201 cx.executor().run_until_parked();
1202 let worktree_id = cx.update(|cx| {
1203 project.update(cx, |project, cx| {
1204 project.worktrees(cx).next().unwrap().read(cx).id()
1205 })
1206 });
1207
1208 let active_non_worktree_item_tasks = cx
1209 .update(|cx| {
1210 get_all_tasks(
1211 &project,
1212 Arc::new(TaskContexts {
1213 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
1214 active_worktree_context: None,
1215 other_worktree_contexts: Vec::new(),
1216 lsp_task_sources: HashMap::default(),
1217 latest_selection: None,
1218 }),
1219 cx,
1220 )
1221 })
1222 .await;
1223 assert!(
1224 active_non_worktree_item_tasks.is_empty(),
1225 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
1226 );
1227
1228 let active_worktree_tasks = cx
1229 .update(|cx| {
1230 get_all_tasks(
1231 &project,
1232 Arc::new(TaskContexts {
1233 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
1234 active_worktree_context: Some((worktree_id, {
1235 let mut worktree_context = TaskContext::default();
1236 worktree_context
1237 .task_variables
1238 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
1239 worktree_context
1240 })),
1241 other_worktree_contexts: Vec::new(),
1242 lsp_task_sources: HashMap::default(),
1243 latest_selection: None,
1244 }),
1245 cx,
1246 )
1247 })
1248 .await;
1249 assert_eq!(
1250 active_worktree_tasks
1251 .into_iter()
1252 .map(|(source_kind, task)| {
1253 let resolved = task.resolved;
1254 (source_kind, resolved.command.unwrap())
1255 })
1256 .collect::<Vec<_>>(),
1257 vec![(
1258 TaskSourceKind::Worktree {
1259 id: worktree_id,
1260 directory_in_worktree: rel_path(".zed").into(),
1261 id_base: "local worktree tasks from directory \".zed\"".into(),
1262 },
1263 "echo /dir".to_string(),
1264 )]
1265 );
1266}
1267
1268#[gpui::test]
1269async fn test_running_multiple_instances_of_a_single_server_in_one_worktree(
1270 cx: &mut gpui::TestAppContext,
1271) {
1272 pub(crate) struct PyprojectTomlManifestProvider;
1273
1274 impl ManifestProvider for PyprojectTomlManifestProvider {
1275 fn name(&self) -> ManifestName {
1276 SharedString::new_static("pyproject.toml").into()
1277 }
1278
1279 fn search(
1280 &self,
1281 ManifestQuery {
1282 path,
1283 depth,
1284 delegate,
1285 }: ManifestQuery,
1286 ) -> Option<Arc<RelPath>> {
1287 for path in path.ancestors().take(depth) {
1288 let p = path.join(rel_path("pyproject.toml"));
1289 if delegate.exists(&p, Some(false)) {
1290 return Some(path.into());
1291 }
1292 }
1293
1294 None
1295 }
1296 }
1297
1298 init_test(cx);
1299 let fs = FakeFs::new(cx.executor());
1300
1301 fs.insert_tree(
1302 path!("/the-root"),
1303 json!({
1304 ".zed": {
1305 "settings.json": r#"
1306 {
1307 "languages": {
1308 "Python": {
1309 "language_servers": ["ty"]
1310 }
1311 }
1312 }"#
1313 },
1314 "project-a": {
1315 ".venv": {},
1316 "file.py": "",
1317 "pyproject.toml": ""
1318 },
1319 "project-b": {
1320 ".venv": {},
1321 "source_file.py":"",
1322 "another_file.py": "",
1323 "pyproject.toml": ""
1324 }
1325 }),
1326 )
1327 .await;
1328 cx.update(|cx| {
1329 ManifestProvidersStore::global(cx).register(Arc::new(PyprojectTomlManifestProvider))
1330 });
1331
1332 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1333 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1334 let _fake_python_server = language_registry.register_fake_lsp(
1335 "Python",
1336 FakeLspAdapter {
1337 name: "ty",
1338 capabilities: lsp::ServerCapabilities {
1339 ..Default::default()
1340 },
1341 ..Default::default()
1342 },
1343 );
1344
1345 language_registry.add(python_lang(fs.clone()));
1346 let (first_buffer, _handle) = project
1347 .update(cx, |project, cx| {
1348 project.open_local_buffer_with_lsp(path!("/the-root/project-a/file.py"), cx)
1349 })
1350 .await
1351 .unwrap();
1352 cx.executor().run_until_parked();
1353 let servers = project.update(cx, |project, cx| {
1354 project.lsp_store().update(cx, |this, cx| {
1355 first_buffer.update(cx, |buffer, cx| {
1356 this.running_language_servers_for_local_buffer(buffer, cx)
1357 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1358 .collect::<Vec<_>>()
1359 })
1360 })
1361 });
1362 cx.executor().run_until_parked();
1363 assert_eq!(servers.len(), 1);
1364 let (adapter, server) = servers.into_iter().next().unwrap();
1365 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1366 assert_eq!(server.server_id(), LanguageServerId(0));
1367 // `workspace_folders` are set to the rooting point.
1368 assert_eq!(
1369 server.workspace_folders(),
1370 BTreeSet::from_iter(
1371 [Uri::from_file_path(path!("/the-root/project-a")).unwrap()].into_iter()
1372 )
1373 );
1374
1375 let (second_project_buffer, _other_handle) = project
1376 .update(cx, |project, cx| {
1377 project.open_local_buffer_with_lsp(path!("/the-root/project-b/source_file.py"), cx)
1378 })
1379 .await
1380 .unwrap();
1381 cx.executor().run_until_parked();
1382 let servers = project.update(cx, |project, cx| {
1383 project.lsp_store().update(cx, |this, cx| {
1384 second_project_buffer.update(cx, |buffer, cx| {
1385 this.running_language_servers_for_local_buffer(buffer, cx)
1386 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1387 .collect::<Vec<_>>()
1388 })
1389 })
1390 });
1391 cx.executor().run_until_parked();
1392 assert_eq!(servers.len(), 1);
1393 let (adapter, server) = servers.into_iter().next().unwrap();
1394 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1395 // We're not using venvs at all here, so both folders should fall under the same root.
1396 assert_eq!(server.server_id(), LanguageServerId(0));
1397 // Now, let's select a different toolchain for one of subprojects.
1398
1399 let Toolchains {
1400 toolchains: available_toolchains_for_b,
1401 root_path,
1402 ..
1403 } = project
1404 .update(cx, |this, cx| {
1405 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1406 this.available_toolchains(
1407 ProjectPath {
1408 worktree_id,
1409 path: rel_path("project-b/source_file.py").into(),
1410 },
1411 LanguageName::new_static("Python"),
1412 cx,
1413 )
1414 })
1415 .await
1416 .expect("A toolchain to be discovered");
1417 assert_eq!(root_path.as_ref(), rel_path("project-b"));
1418 assert_eq!(available_toolchains_for_b.toolchains().len(), 1);
1419 let currently_active_toolchain = project
1420 .update(cx, |this, cx| {
1421 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1422 this.active_toolchain(
1423 ProjectPath {
1424 worktree_id,
1425 path: rel_path("project-b/source_file.py").into(),
1426 },
1427 LanguageName::new_static("Python"),
1428 cx,
1429 )
1430 })
1431 .await;
1432
1433 assert!(currently_active_toolchain.is_none());
1434 let _ = project
1435 .update(cx, |this, cx| {
1436 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1437 this.activate_toolchain(
1438 ProjectPath {
1439 worktree_id,
1440 path: root_path,
1441 },
1442 available_toolchains_for_b
1443 .toolchains
1444 .into_iter()
1445 .next()
1446 .unwrap(),
1447 cx,
1448 )
1449 })
1450 .await
1451 .unwrap();
1452 cx.run_until_parked();
1453 let servers = project.update(cx, |project, cx| {
1454 project.lsp_store().update(cx, |this, cx| {
1455 second_project_buffer.update(cx, |buffer, cx| {
1456 this.running_language_servers_for_local_buffer(buffer, cx)
1457 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1458 .collect::<Vec<_>>()
1459 })
1460 })
1461 });
1462 cx.executor().run_until_parked();
1463 assert_eq!(servers.len(), 1);
1464 let (adapter, server) = servers.into_iter().next().unwrap();
1465 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1466 // There's a new language server in town.
1467 assert_eq!(server.server_id(), LanguageServerId(1));
1468}
1469
1470#[gpui::test]
1471async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
1472 init_test(cx);
1473
1474 let fs = FakeFs::new(cx.executor());
1475 fs.insert_tree(
1476 path!("/dir"),
1477 json!({
1478 "test.rs": "const A: i32 = 1;",
1479 "test2.rs": "",
1480 "Cargo.toml": "a = 1",
1481 "package.json": "{\"a\": 1}",
1482 }),
1483 )
1484 .await;
1485
1486 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1487 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1488
1489 let mut fake_rust_servers = language_registry.register_fake_lsp(
1490 "Rust",
1491 FakeLspAdapter {
1492 name: "the-rust-language-server",
1493 capabilities: lsp::ServerCapabilities {
1494 completion_provider: Some(lsp::CompletionOptions {
1495 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
1496 ..Default::default()
1497 }),
1498 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
1499 lsp::TextDocumentSyncOptions {
1500 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
1501 ..Default::default()
1502 },
1503 )),
1504 ..Default::default()
1505 },
1506 ..Default::default()
1507 },
1508 );
1509 let mut fake_json_servers = language_registry.register_fake_lsp(
1510 "JSON",
1511 FakeLspAdapter {
1512 name: "the-json-language-server",
1513 capabilities: lsp::ServerCapabilities {
1514 completion_provider: Some(lsp::CompletionOptions {
1515 trigger_characters: Some(vec![":".to_string()]),
1516 ..Default::default()
1517 }),
1518 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
1519 lsp::TextDocumentSyncOptions {
1520 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
1521 ..Default::default()
1522 },
1523 )),
1524 ..Default::default()
1525 },
1526 ..Default::default()
1527 },
1528 );
1529
1530 // Open a buffer without an associated language server.
1531 let (toml_buffer, _handle) = project
1532 .update(cx, |project, cx| {
1533 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
1534 })
1535 .await
1536 .unwrap();
1537
1538 // Open a buffer with an associated language server before the language for it has been loaded.
1539 let (rust_buffer, _handle2) = project
1540 .update(cx, |project, cx| {
1541 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
1542 })
1543 .await
1544 .unwrap();
1545 rust_buffer.update(cx, |buffer, _| {
1546 assert_eq!(buffer.language().map(|l| l.name()), None);
1547 });
1548
1549 // Now we add the languages to the project, and ensure they get assigned to all
1550 // the relevant open buffers.
1551 language_registry.add(json_lang());
1552 language_registry.add(rust_lang());
1553 cx.executor().run_until_parked();
1554 rust_buffer.update(cx, |buffer, _| {
1555 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
1556 });
1557
1558 // A server is started up, and it is notified about Rust files.
1559 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1560 assert_eq!(
1561 fake_rust_server
1562 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1563 .await
1564 .text_document,
1565 lsp::TextDocumentItem {
1566 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1567 version: 0,
1568 text: "const A: i32 = 1;".to_string(),
1569 language_id: "rust".to_string(),
1570 }
1571 );
1572
1573 // The buffer is configured based on the language server's capabilities.
1574 rust_buffer.update(cx, |buffer, _| {
1575 assert_eq!(
1576 buffer
1577 .completion_triggers()
1578 .iter()
1579 .cloned()
1580 .collect::<Vec<_>>(),
1581 &[".".to_string(), "::".to_string()]
1582 );
1583 });
1584 toml_buffer.update(cx, |buffer, _| {
1585 assert!(buffer.completion_triggers().is_empty());
1586 });
1587
1588 // Edit a buffer. The changes are reported to the language server.
1589 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
1590 assert_eq!(
1591 fake_rust_server
1592 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1593 .await
1594 .text_document,
1595 lsp::VersionedTextDocumentIdentifier::new(
1596 lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1597 1
1598 )
1599 );
1600
1601 // Open a third buffer with a different associated language server.
1602 let (json_buffer, _json_handle) = project
1603 .update(cx, |project, cx| {
1604 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
1605 })
1606 .await
1607 .unwrap();
1608
1609 // A json language server is started up and is only notified about the json buffer.
1610 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1611 assert_eq!(
1612 fake_json_server
1613 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1614 .await
1615 .text_document,
1616 lsp::TextDocumentItem {
1617 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1618 version: 0,
1619 text: "{\"a\": 1}".to_string(),
1620 language_id: "json".to_string(),
1621 }
1622 );
1623
1624 // This buffer is configured based on the second language server's
1625 // capabilities.
1626 json_buffer.update(cx, |buffer, _| {
1627 assert_eq!(
1628 buffer
1629 .completion_triggers()
1630 .iter()
1631 .cloned()
1632 .collect::<Vec<_>>(),
1633 &[":".to_string()]
1634 );
1635 });
1636
1637 // When opening another buffer whose language server is already running,
1638 // it is also configured based on the existing language server's capabilities.
1639 let (rust_buffer2, _handle4) = project
1640 .update(cx, |project, cx| {
1641 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
1642 })
1643 .await
1644 .unwrap();
1645 rust_buffer2.update(cx, |buffer, _| {
1646 assert_eq!(
1647 buffer
1648 .completion_triggers()
1649 .iter()
1650 .cloned()
1651 .collect::<Vec<_>>(),
1652 &[".".to_string(), "::".to_string()]
1653 );
1654 });
1655
1656 // Changes are reported only to servers matching the buffer's language.
1657 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
1658 rust_buffer2.update(cx, |buffer, cx| {
1659 buffer.edit([(0..0, "let x = 1;")], None, cx)
1660 });
1661 assert_eq!(
1662 fake_rust_server
1663 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1664 .await
1665 .text_document,
1666 lsp::VersionedTextDocumentIdentifier::new(
1667 lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap(),
1668 1
1669 )
1670 );
1671
1672 // Save notifications are reported to all servers.
1673 project
1674 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
1675 .await
1676 .unwrap();
1677 assert_eq!(
1678 fake_rust_server
1679 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1680 .await
1681 .text_document,
1682 lsp::TextDocumentIdentifier::new(
1683 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1684 )
1685 );
1686 assert_eq!(
1687 fake_json_server
1688 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1689 .await
1690 .text_document,
1691 lsp::TextDocumentIdentifier::new(
1692 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1693 )
1694 );
1695
1696 // Renames are reported only to servers matching the buffer's language.
1697 fs.rename(
1698 Path::new(path!("/dir/test2.rs")),
1699 Path::new(path!("/dir/test3.rs")),
1700 Default::default(),
1701 )
1702 .await
1703 .unwrap();
1704 assert_eq!(
1705 fake_rust_server
1706 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1707 .await
1708 .text_document,
1709 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap()),
1710 );
1711 assert_eq!(
1712 fake_rust_server
1713 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1714 .await
1715 .text_document,
1716 lsp::TextDocumentItem {
1717 uri: lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap(),
1718 version: 0,
1719 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1720 language_id: "rust".to_string(),
1721 },
1722 );
1723
1724 rust_buffer2.update(cx, |buffer, cx| {
1725 buffer.update_diagnostics(
1726 LanguageServerId(0),
1727 DiagnosticSet::from_sorted_entries(
1728 vec![DiagnosticEntry {
1729 diagnostic: Default::default(),
1730 range: Anchor::MIN..Anchor::MAX,
1731 }],
1732 &buffer.snapshot(),
1733 ),
1734 cx,
1735 );
1736 assert_eq!(
1737 buffer
1738 .snapshot()
1739 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1740 .count(),
1741 1
1742 );
1743 });
1744
1745 // When the rename changes the extension of the file, the buffer gets closed on the old
1746 // language server and gets opened on the new one.
1747 fs.rename(
1748 Path::new(path!("/dir/test3.rs")),
1749 Path::new(path!("/dir/test3.json")),
1750 Default::default(),
1751 )
1752 .await
1753 .unwrap();
1754 assert_eq!(
1755 fake_rust_server
1756 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1757 .await
1758 .text_document,
1759 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap()),
1760 );
1761 assert_eq!(
1762 fake_json_server
1763 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1764 .await
1765 .text_document,
1766 lsp::TextDocumentItem {
1767 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1768 version: 0,
1769 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1770 language_id: "json".to_string(),
1771 },
1772 );
1773
1774 // We clear the diagnostics, since the language has changed.
1775 rust_buffer2.update(cx, |buffer, _| {
1776 assert_eq!(
1777 buffer
1778 .snapshot()
1779 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1780 .count(),
1781 0
1782 );
1783 });
1784
1785 // The renamed file's version resets after changing language server.
1786 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
1787 assert_eq!(
1788 fake_json_server
1789 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1790 .await
1791 .text_document,
1792 lsp::VersionedTextDocumentIdentifier::new(
1793 lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1794 1
1795 )
1796 );
1797
1798 // Restart language servers
1799 project.update(cx, |project, cx| {
1800 project.restart_language_servers_for_buffers(
1801 vec![rust_buffer.clone(), json_buffer.clone()],
1802 HashSet::default(),
1803 cx,
1804 );
1805 });
1806
1807 let mut rust_shutdown_requests = fake_rust_server
1808 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1809 let mut json_shutdown_requests = fake_json_server
1810 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1811 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
1812
1813 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1814 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1815
1816 // Ensure rust document is reopened in new rust language server
1817 assert_eq!(
1818 fake_rust_server
1819 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1820 .await
1821 .text_document,
1822 lsp::TextDocumentItem {
1823 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1824 version: 0,
1825 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
1826 language_id: "rust".to_string(),
1827 }
1828 );
1829
1830 // Ensure json documents are reopened in new json language server
1831 assert_set_eq!(
1832 [
1833 fake_json_server
1834 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1835 .await
1836 .text_document,
1837 fake_json_server
1838 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1839 .await
1840 .text_document,
1841 ],
1842 [
1843 lsp::TextDocumentItem {
1844 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1845 version: 0,
1846 text: json_buffer.update(cx, |buffer, _| buffer.text()),
1847 language_id: "json".to_string(),
1848 },
1849 lsp::TextDocumentItem {
1850 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1851 version: 0,
1852 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1853 language_id: "json".to_string(),
1854 }
1855 ]
1856 );
1857
1858 // Close notifications are reported only to servers matching the buffer's language.
1859 cx.update(|_| drop(_json_handle));
1860 let close_message = lsp::DidCloseTextDocumentParams {
1861 text_document: lsp::TextDocumentIdentifier::new(
1862 lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1863 ),
1864 };
1865 assert_eq!(
1866 fake_json_server
1867 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1868 .await,
1869 close_message,
1870 );
1871}
1872
1873#[gpui::test]
1874async fn test_language_server_relative_path(cx: &mut gpui::TestAppContext) {
1875 init_test(cx);
1876
1877 let settings_json_contents = json!({
1878 "languages": {
1879 "Rust": {
1880 "language_servers": ["my_fake_lsp", "lsp_on_path"]
1881 }
1882 },
1883 "lsp": {
1884 "my_fake_lsp": {
1885 "binary": {
1886 // file exists, so this is treated as a relative path
1887 "path": path!(".relative_path/to/my_fake_lsp_binary.exe").to_string(),
1888 }
1889 },
1890 "lsp_on_path": {
1891 "binary": {
1892 // file doesn't exist, so it will fall back on PATH env var
1893 "path": path!("lsp_on_path.exe").to_string(),
1894 }
1895 }
1896 },
1897 });
1898
1899 let fs = FakeFs::new(cx.executor());
1900 fs.insert_tree(
1901 path!("/the-root"),
1902 json!({
1903 ".zed": {
1904 "settings.json": settings_json_contents.to_string(),
1905 },
1906 ".relative_path": {
1907 "to": {
1908 "my_fake_lsp.exe": "",
1909 },
1910 },
1911 "src": {
1912 "main.rs": "",
1913 }
1914 }),
1915 )
1916 .await;
1917
1918 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1919 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1920 language_registry.add(rust_lang());
1921
1922 let mut my_fake_lsp = language_registry.register_fake_lsp(
1923 "Rust",
1924 FakeLspAdapter {
1925 name: "my_fake_lsp",
1926 ..Default::default()
1927 },
1928 );
1929 let mut lsp_on_path = language_registry.register_fake_lsp(
1930 "Rust",
1931 FakeLspAdapter {
1932 name: "lsp_on_path",
1933 ..Default::default()
1934 },
1935 );
1936
1937 cx.run_until_parked();
1938
1939 // Start the language server by opening a buffer with a compatible file extension.
1940 project
1941 .update(cx, |project, cx| {
1942 project.open_local_buffer_with_lsp(path!("/the-root/src/main.rs"), cx)
1943 })
1944 .await
1945 .unwrap();
1946
1947 let lsp_path = my_fake_lsp.next().await.unwrap().binary.path;
1948 assert_eq!(
1949 lsp_path.to_string_lossy(),
1950 path!("/the-root/.relative_path/to/my_fake_lsp_binary.exe"),
1951 );
1952
1953 let lsp_path = lsp_on_path.next().await.unwrap().binary.path;
1954 assert_eq!(lsp_path.to_string_lossy(), path!("lsp_on_path.exe"));
1955}
1956
1957#[gpui::test]
1958async fn test_language_server_tilde_path(cx: &mut gpui::TestAppContext) {
1959 init_test(cx);
1960
1961 let settings_json_contents = json!({
1962 "languages": {
1963 "Rust": {
1964 "language_servers": ["tilde_lsp"]
1965 }
1966 },
1967 "lsp": {
1968 "tilde_lsp": {
1969 "binary": {
1970 "path": "~/.local/bin/rust-analyzer",
1971 }
1972 }
1973 },
1974 });
1975
1976 let fs = FakeFs::new(cx.executor());
1977 fs.insert_tree(
1978 path!("/root"),
1979 json!({
1980 ".zed": {
1981 "settings.json": settings_json_contents.to_string(),
1982 },
1983 "src": {
1984 "main.rs": "fn main() {}",
1985 }
1986 }),
1987 )
1988 .await;
1989
1990 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
1991 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1992 language_registry.add(rust_lang());
1993
1994 let mut tilde_lsp = language_registry.register_fake_lsp(
1995 "Rust",
1996 FakeLspAdapter {
1997 name: "tilde_lsp",
1998 ..Default::default()
1999 },
2000 );
2001 cx.run_until_parked();
2002
2003 project
2004 .update(cx, |project, cx| {
2005 project.open_local_buffer_with_lsp(path!("/root/src/main.rs"), cx)
2006 })
2007 .await
2008 .unwrap();
2009
2010 let lsp_path = tilde_lsp.next().await.unwrap().binary.path;
2011 let expected_path = paths::home_dir().join(".local/bin/rust-analyzer");
2012 assert_eq!(
2013 lsp_path, expected_path,
2014 "Tilde path should expand to home directory"
2015 );
2016}
2017
2018#[gpui::test]
2019async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
2020 init_test(cx);
2021
2022 let fs = FakeFs::new(cx.executor());
2023 fs.insert_tree(
2024 path!("/the-root"),
2025 json!({
2026 ".gitignore": "target\n",
2027 "Cargo.lock": "",
2028 "src": {
2029 "a.rs": "",
2030 "b.rs": "",
2031 },
2032 "target": {
2033 "x": {
2034 "out": {
2035 "x.rs": ""
2036 }
2037 },
2038 "y": {
2039 "out": {
2040 "y.rs": "",
2041 }
2042 },
2043 "z": {
2044 "out": {
2045 "z.rs": ""
2046 }
2047 }
2048 }
2049 }),
2050 )
2051 .await;
2052 fs.insert_tree(
2053 path!("/the-registry"),
2054 json!({
2055 "dep1": {
2056 "src": {
2057 "dep1.rs": "",
2058 }
2059 },
2060 "dep2": {
2061 "src": {
2062 "dep2.rs": "",
2063 }
2064 },
2065 }),
2066 )
2067 .await;
2068 fs.insert_tree(
2069 path!("/the/stdlib"),
2070 json!({
2071 "LICENSE": "",
2072 "src": {
2073 "string.rs": "",
2074 }
2075 }),
2076 )
2077 .await;
2078
2079 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
2080 let (language_registry, lsp_store) = project.read_with(cx, |project, _| {
2081 (project.languages().clone(), project.lsp_store())
2082 });
2083 language_registry.add(rust_lang());
2084 let mut fake_servers = language_registry.register_fake_lsp(
2085 "Rust",
2086 FakeLspAdapter {
2087 name: "the-language-server",
2088 ..Default::default()
2089 },
2090 );
2091
2092 cx.executor().run_until_parked();
2093
2094 // Start the language server by opening a buffer with a compatible file extension.
2095 project
2096 .update(cx, |project, cx| {
2097 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
2098 })
2099 .await
2100 .unwrap();
2101
2102 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
2103 project.update(cx, |project, cx| {
2104 let worktree = project.worktrees(cx).next().unwrap();
2105 assert_eq!(
2106 worktree
2107 .read(cx)
2108 .snapshot()
2109 .entries(true, 0)
2110 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
2111 .collect::<Vec<_>>(),
2112 &[
2113 ("", false),
2114 (".gitignore", false),
2115 ("Cargo.lock", false),
2116 ("src", false),
2117 ("src/a.rs", false),
2118 ("src/b.rs", false),
2119 ("target", true),
2120 ]
2121 );
2122 });
2123
2124 let prev_read_dir_count = fs.read_dir_call_count();
2125
2126 let fake_server = fake_servers.next().await.unwrap();
2127 cx.executor().run_until_parked();
2128 let server_id = lsp_store.read_with(cx, |lsp_store, _| {
2129 let (id, _) = lsp_store.language_server_statuses().next().unwrap();
2130 id
2131 });
2132
2133 // Simulate jumping to a definition in a dependency outside of the worktree.
2134 let _out_of_worktree_buffer = project
2135 .update(cx, |project, cx| {
2136 project.open_local_buffer_via_lsp(
2137 lsp::Uri::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
2138 server_id,
2139 cx,
2140 )
2141 })
2142 .await
2143 .unwrap();
2144
2145 // Keep track of the FS events reported to the language server.
2146 let file_changes = Arc::new(Mutex::new(Vec::new()));
2147 fake_server
2148 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
2149 registrations: vec![lsp::Registration {
2150 id: Default::default(),
2151 method: "workspace/didChangeWatchedFiles".to_string(),
2152 register_options: serde_json::to_value(
2153 lsp::DidChangeWatchedFilesRegistrationOptions {
2154 watchers: vec![
2155 lsp::FileSystemWatcher {
2156 glob_pattern: lsp::GlobPattern::String(
2157 path!("/the-root/Cargo.toml").to_string(),
2158 ),
2159 kind: None,
2160 },
2161 lsp::FileSystemWatcher {
2162 glob_pattern: lsp::GlobPattern::String(
2163 path!("/the-root/src/*.{rs,c}").to_string(),
2164 ),
2165 kind: None,
2166 },
2167 lsp::FileSystemWatcher {
2168 glob_pattern: lsp::GlobPattern::String(
2169 path!("/the-root/target/y/**/*.rs").to_string(),
2170 ),
2171 kind: None,
2172 },
2173 lsp::FileSystemWatcher {
2174 glob_pattern: lsp::GlobPattern::String(
2175 path!("/the/stdlib/src/**/*.rs").to_string(),
2176 ),
2177 kind: None,
2178 },
2179 lsp::FileSystemWatcher {
2180 glob_pattern: lsp::GlobPattern::String(
2181 path!("**/Cargo.lock").to_string(),
2182 ),
2183 kind: None,
2184 },
2185 ],
2186 },
2187 )
2188 .ok(),
2189 }],
2190 })
2191 .await
2192 .into_response()
2193 .unwrap();
2194 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
2195 let file_changes = file_changes.clone();
2196 move |params, _| {
2197 let mut file_changes = file_changes.lock();
2198 file_changes.extend(params.changes);
2199 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
2200 }
2201 });
2202
2203 cx.executor().run_until_parked();
2204 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
2205 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
2206
2207 let mut new_watched_paths = fs.watched_paths();
2208 new_watched_paths.retain(|path| {
2209 !path.starts_with(config_dir()) && !path.starts_with(global_gitignore_path().unwrap())
2210 });
2211 assert_eq!(
2212 &new_watched_paths,
2213 &[
2214 Path::new(path!("/the-root")),
2215 Path::new(path!("/the-registry/dep1/src/dep1.rs")),
2216 Path::new(path!("/the/stdlib/src"))
2217 ]
2218 );
2219
2220 // Now the language server has asked us to watch an ignored directory path,
2221 // so we recursively load it.
2222 project.update(cx, |project, cx| {
2223 let worktree = project.visible_worktrees(cx).next().unwrap();
2224 assert_eq!(
2225 worktree
2226 .read(cx)
2227 .snapshot()
2228 .entries(true, 0)
2229 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
2230 .collect::<Vec<_>>(),
2231 &[
2232 ("", false),
2233 (".gitignore", false),
2234 ("Cargo.lock", false),
2235 ("src", false),
2236 ("src/a.rs", false),
2237 ("src/b.rs", false),
2238 ("target", true),
2239 ("target/x", true),
2240 ("target/y", true),
2241 ("target/y/out", true),
2242 ("target/y/out/y.rs", true),
2243 ("target/z", true),
2244 ]
2245 );
2246 });
2247
2248 // Perform some file system mutations, two of which match the watched patterns,
2249 // and one of which does not.
2250 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
2251 .await
2252 .unwrap();
2253 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
2254 .await
2255 .unwrap();
2256 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
2257 .await
2258 .unwrap();
2259 fs.create_file(
2260 path!("/the-root/target/x/out/x2.rs").as_ref(),
2261 Default::default(),
2262 )
2263 .await
2264 .unwrap();
2265 fs.create_file(
2266 path!("/the-root/target/y/out/y2.rs").as_ref(),
2267 Default::default(),
2268 )
2269 .await
2270 .unwrap();
2271 fs.save(
2272 path!("/the-root/Cargo.lock").as_ref(),
2273 &"".into(),
2274 Default::default(),
2275 )
2276 .await
2277 .unwrap();
2278 fs.save(
2279 path!("/the-stdlib/LICENSE").as_ref(),
2280 &"".into(),
2281 Default::default(),
2282 )
2283 .await
2284 .unwrap();
2285 fs.save(
2286 path!("/the/stdlib/src/string.rs").as_ref(),
2287 &"".into(),
2288 Default::default(),
2289 )
2290 .await
2291 .unwrap();
2292
2293 // The language server receives events for the FS mutations that match its watch patterns.
2294 cx.executor().run_until_parked();
2295 assert_eq!(
2296 &*file_changes.lock(),
2297 &[
2298 lsp::FileEvent {
2299 uri: lsp::Uri::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
2300 typ: lsp::FileChangeType::CHANGED,
2301 },
2302 lsp::FileEvent {
2303 uri: lsp::Uri::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
2304 typ: lsp::FileChangeType::DELETED,
2305 },
2306 lsp::FileEvent {
2307 uri: lsp::Uri::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
2308 typ: lsp::FileChangeType::CREATED,
2309 },
2310 lsp::FileEvent {
2311 uri: lsp::Uri::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
2312 typ: lsp::FileChangeType::CREATED,
2313 },
2314 lsp::FileEvent {
2315 uri: lsp::Uri::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
2316 typ: lsp::FileChangeType::CHANGED,
2317 },
2318 ]
2319 );
2320}
2321
2322#[gpui::test]
2323async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
2324 init_test(cx);
2325
2326 let fs = FakeFs::new(cx.executor());
2327 fs.insert_tree(
2328 path!("/dir"),
2329 json!({
2330 "a.rs": "let a = 1;",
2331 "b.rs": "let b = 2;"
2332 }),
2333 )
2334 .await;
2335
2336 let project = Project::test(
2337 fs,
2338 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
2339 cx,
2340 )
2341 .await;
2342 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2343
2344 let buffer_a = project
2345 .update(cx, |project, cx| {
2346 project.open_local_buffer(path!("/dir/a.rs"), cx)
2347 })
2348 .await
2349 .unwrap();
2350 let buffer_b = project
2351 .update(cx, |project, cx| {
2352 project.open_local_buffer(path!("/dir/b.rs"), cx)
2353 })
2354 .await
2355 .unwrap();
2356
2357 lsp_store.update(cx, |lsp_store, cx| {
2358 lsp_store
2359 .update_diagnostics(
2360 LanguageServerId(0),
2361 lsp::PublishDiagnosticsParams {
2362 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2363 version: None,
2364 diagnostics: vec![lsp::Diagnostic {
2365 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2366 severity: Some(lsp::DiagnosticSeverity::ERROR),
2367 message: "error 1".to_string(),
2368 ..Default::default()
2369 }],
2370 },
2371 None,
2372 DiagnosticSourceKind::Pushed,
2373 &[],
2374 cx,
2375 )
2376 .unwrap();
2377 lsp_store
2378 .update_diagnostics(
2379 LanguageServerId(0),
2380 lsp::PublishDiagnosticsParams {
2381 uri: Uri::from_file_path(path!("/dir/b.rs")).unwrap(),
2382 version: None,
2383 diagnostics: vec![lsp::Diagnostic {
2384 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2385 severity: Some(DiagnosticSeverity::WARNING),
2386 message: "error 2".to_string(),
2387 ..Default::default()
2388 }],
2389 },
2390 None,
2391 DiagnosticSourceKind::Pushed,
2392 &[],
2393 cx,
2394 )
2395 .unwrap();
2396 });
2397
2398 buffer_a.update(cx, |buffer, _| {
2399 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2400 assert_eq!(
2401 chunks
2402 .iter()
2403 .map(|(s, d)| (s.as_str(), *d))
2404 .collect::<Vec<_>>(),
2405 &[
2406 ("let ", None),
2407 ("a", Some(DiagnosticSeverity::ERROR)),
2408 (" = 1;", None),
2409 ]
2410 );
2411 });
2412 buffer_b.update(cx, |buffer, _| {
2413 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2414 assert_eq!(
2415 chunks
2416 .iter()
2417 .map(|(s, d)| (s.as_str(), *d))
2418 .collect::<Vec<_>>(),
2419 &[
2420 ("let ", None),
2421 ("b", Some(DiagnosticSeverity::WARNING)),
2422 (" = 2;", None),
2423 ]
2424 );
2425 });
2426}
2427
2428#[gpui::test]
2429async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
2430 init_test(cx);
2431
2432 let fs = FakeFs::new(cx.executor());
2433 fs.insert_tree(
2434 path!("/root"),
2435 json!({
2436 "dir": {
2437 ".git": {
2438 "HEAD": "ref: refs/heads/main",
2439 },
2440 ".gitignore": "b.rs",
2441 "a.rs": "let a = 1;",
2442 "b.rs": "let b = 2;",
2443 },
2444 "other.rs": "let b = c;"
2445 }),
2446 )
2447 .await;
2448
2449 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
2450 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2451 let (worktree, _) = project
2452 .update(cx, |project, cx| {
2453 project.find_or_create_worktree(path!("/root/dir"), true, cx)
2454 })
2455 .await
2456 .unwrap();
2457 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
2458
2459 let (worktree, _) = project
2460 .update(cx, |project, cx| {
2461 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
2462 })
2463 .await
2464 .unwrap();
2465 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
2466
2467 let server_id = LanguageServerId(0);
2468 lsp_store.update(cx, |lsp_store, cx| {
2469 lsp_store
2470 .update_diagnostics(
2471 server_id,
2472 lsp::PublishDiagnosticsParams {
2473 uri: Uri::from_file_path(path!("/root/dir/b.rs")).unwrap(),
2474 version: None,
2475 diagnostics: vec![lsp::Diagnostic {
2476 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2477 severity: Some(lsp::DiagnosticSeverity::ERROR),
2478 message: "unused variable 'b'".to_string(),
2479 ..Default::default()
2480 }],
2481 },
2482 None,
2483 DiagnosticSourceKind::Pushed,
2484 &[],
2485 cx,
2486 )
2487 .unwrap();
2488 lsp_store
2489 .update_diagnostics(
2490 server_id,
2491 lsp::PublishDiagnosticsParams {
2492 uri: Uri::from_file_path(path!("/root/other.rs")).unwrap(),
2493 version: None,
2494 diagnostics: vec![lsp::Diagnostic {
2495 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
2496 severity: Some(lsp::DiagnosticSeverity::ERROR),
2497 message: "unknown variable 'c'".to_string(),
2498 ..Default::default()
2499 }],
2500 },
2501 None,
2502 DiagnosticSourceKind::Pushed,
2503 &[],
2504 cx,
2505 )
2506 .unwrap();
2507 });
2508
2509 let main_ignored_buffer = project
2510 .update(cx, |project, cx| {
2511 project.open_buffer((main_worktree_id, rel_path("b.rs")), cx)
2512 })
2513 .await
2514 .unwrap();
2515 main_ignored_buffer.update(cx, |buffer, _| {
2516 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2517 assert_eq!(
2518 chunks
2519 .iter()
2520 .map(|(s, d)| (s.as_str(), *d))
2521 .collect::<Vec<_>>(),
2522 &[
2523 ("let ", None),
2524 ("b", Some(DiagnosticSeverity::ERROR)),
2525 (" = 2;", None),
2526 ],
2527 "Gigitnored buffers should still get in-buffer diagnostics",
2528 );
2529 });
2530 let other_buffer = project
2531 .update(cx, |project, cx| {
2532 project.open_buffer((other_worktree_id, rel_path("")), cx)
2533 })
2534 .await
2535 .unwrap();
2536 other_buffer.update(cx, |buffer, _| {
2537 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2538 assert_eq!(
2539 chunks
2540 .iter()
2541 .map(|(s, d)| (s.as_str(), *d))
2542 .collect::<Vec<_>>(),
2543 &[
2544 ("let b = ", None),
2545 ("c", Some(DiagnosticSeverity::ERROR)),
2546 (";", None),
2547 ],
2548 "Buffers from hidden projects should still get in-buffer diagnostics"
2549 );
2550 });
2551
2552 project.update(cx, |project, cx| {
2553 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
2554 assert_eq!(
2555 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
2556 vec![(
2557 ProjectPath {
2558 worktree_id: main_worktree_id,
2559 path: rel_path("b.rs").into(),
2560 },
2561 server_id,
2562 DiagnosticSummary {
2563 error_count: 1,
2564 warning_count: 0,
2565 }
2566 )]
2567 );
2568 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
2569 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
2570 });
2571}
2572
2573#[gpui::test]
2574async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
2575 init_test(cx);
2576
2577 let progress_token = "the-progress-token";
2578
2579 let fs = FakeFs::new(cx.executor());
2580 fs.insert_tree(
2581 path!("/dir"),
2582 json!({
2583 "a.rs": "fn a() { A }",
2584 "b.rs": "const y: i32 = 1",
2585 }),
2586 )
2587 .await;
2588
2589 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2590 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2591
2592 language_registry.add(rust_lang());
2593 let mut fake_servers = language_registry.register_fake_lsp(
2594 "Rust",
2595 FakeLspAdapter {
2596 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2597 disk_based_diagnostics_sources: vec!["disk".into()],
2598 ..Default::default()
2599 },
2600 );
2601
2602 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
2603
2604 // Cause worktree to start the fake language server
2605 let _ = project
2606 .update(cx, |project, cx| {
2607 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
2608 })
2609 .await
2610 .unwrap();
2611
2612 let mut events = cx.events(&project);
2613
2614 let fake_server = fake_servers.next().await.unwrap();
2615 assert_eq!(
2616 events.next().await.unwrap(),
2617 Event::LanguageServerAdded(
2618 LanguageServerId(0),
2619 fake_server.server.name(),
2620 Some(worktree_id)
2621 ),
2622 );
2623
2624 fake_server
2625 .start_progress(format!("{}/0", progress_token))
2626 .await;
2627 assert_eq!(
2628 events.next().await.unwrap(),
2629 Event::DiskBasedDiagnosticsStarted {
2630 language_server_id: LanguageServerId(0),
2631 }
2632 );
2633
2634 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2635 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2636 version: None,
2637 diagnostics: vec![lsp::Diagnostic {
2638 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2639 severity: Some(lsp::DiagnosticSeverity::ERROR),
2640 message: "undefined variable 'A'".to_string(),
2641 ..Default::default()
2642 }],
2643 });
2644 assert_eq!(
2645 events.next().await.unwrap(),
2646 Event::DiagnosticsUpdated {
2647 language_server_id: LanguageServerId(0),
2648 paths: vec![(worktree_id, rel_path("a.rs")).into()],
2649 }
2650 );
2651
2652 fake_server.end_progress(format!("{}/0", progress_token));
2653 assert_eq!(
2654 events.next().await.unwrap(),
2655 Event::DiskBasedDiagnosticsFinished {
2656 language_server_id: LanguageServerId(0)
2657 }
2658 );
2659
2660 let buffer = project
2661 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
2662 .await
2663 .unwrap();
2664
2665 buffer.update(cx, |buffer, _| {
2666 let snapshot = buffer.snapshot();
2667 let diagnostics = snapshot
2668 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2669 .collect::<Vec<_>>();
2670 assert_eq!(
2671 diagnostics,
2672 &[DiagnosticEntryRef {
2673 range: Point::new(0, 9)..Point::new(0, 10),
2674 diagnostic: &Diagnostic {
2675 severity: lsp::DiagnosticSeverity::ERROR,
2676 message: "undefined variable 'A'".to_string(),
2677 group_id: 0,
2678 is_primary: true,
2679 source_kind: DiagnosticSourceKind::Pushed,
2680 ..Diagnostic::default()
2681 }
2682 }]
2683 )
2684 });
2685
2686 // Ensure publishing empty diagnostics twice only results in one update event.
2687 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2688 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2689 version: None,
2690 diagnostics: Default::default(),
2691 });
2692 assert_eq!(
2693 events.next().await.unwrap(),
2694 Event::DiagnosticsUpdated {
2695 language_server_id: LanguageServerId(0),
2696 paths: vec![(worktree_id, rel_path("a.rs")).into()],
2697 }
2698 );
2699
2700 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2701 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2702 version: None,
2703 diagnostics: Default::default(),
2704 });
2705 cx.executor().run_until_parked();
2706 assert_eq!(futures::poll!(events.next()), Poll::Pending);
2707}
2708
2709#[gpui::test]
2710async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
2711 init_test(cx);
2712
2713 let progress_token = "the-progress-token";
2714
2715 let fs = FakeFs::new(cx.executor());
2716 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2717
2718 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2719
2720 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2721 language_registry.add(rust_lang());
2722 let mut fake_servers = language_registry.register_fake_lsp(
2723 "Rust",
2724 FakeLspAdapter {
2725 name: "the-language-server",
2726 disk_based_diagnostics_sources: vec!["disk".into()],
2727 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2728 ..FakeLspAdapter::default()
2729 },
2730 );
2731
2732 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
2733
2734 let (buffer, _handle) = project
2735 .update(cx, |project, cx| {
2736 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2737 })
2738 .await
2739 .unwrap();
2740 let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id());
2741 // Simulate diagnostics starting to update.
2742 let fake_server = fake_servers.next().await.unwrap();
2743 cx.executor().run_until_parked();
2744 fake_server.start_progress(progress_token).await;
2745
2746 // Restart the server before the diagnostics finish updating.
2747 project.update(cx, |project, cx| {
2748 project.restart_language_servers_for_buffers(vec![buffer], HashSet::default(), cx);
2749 });
2750 let mut events = cx.events(&project);
2751
2752 // Simulate the newly started server sending more diagnostics.
2753 let fake_server = fake_servers.next().await.unwrap();
2754 cx.executor().run_until_parked();
2755 assert_eq!(
2756 events.next().await.unwrap(),
2757 Event::LanguageServerRemoved(LanguageServerId(0))
2758 );
2759 assert_eq!(
2760 events.next().await.unwrap(),
2761 Event::LanguageServerAdded(
2762 LanguageServerId(1),
2763 fake_server.server.name(),
2764 Some(worktree_id)
2765 )
2766 );
2767 fake_server.start_progress(progress_token).await;
2768 assert_eq!(
2769 events.next().await.unwrap(),
2770 Event::LanguageServerBufferRegistered {
2771 server_id: LanguageServerId(1),
2772 buffer_id,
2773 buffer_abs_path: PathBuf::from(path!("/dir/a.rs")),
2774 name: Some(fake_server.server.name())
2775 }
2776 );
2777 assert_eq!(
2778 events.next().await.unwrap(),
2779 Event::DiskBasedDiagnosticsStarted {
2780 language_server_id: LanguageServerId(1)
2781 }
2782 );
2783 project.update(cx, |project, cx| {
2784 assert_eq!(
2785 project
2786 .language_servers_running_disk_based_diagnostics(cx)
2787 .collect::<Vec<_>>(),
2788 [LanguageServerId(1)]
2789 );
2790 });
2791
2792 // All diagnostics are considered done, despite the old server's diagnostic
2793 // task never completing.
2794 fake_server.end_progress(progress_token);
2795 assert_eq!(
2796 events.next().await.unwrap(),
2797 Event::DiskBasedDiagnosticsFinished {
2798 language_server_id: LanguageServerId(1)
2799 }
2800 );
2801 project.update(cx, |project, cx| {
2802 assert_eq!(
2803 project
2804 .language_servers_running_disk_based_diagnostics(cx)
2805 .collect::<Vec<_>>(),
2806 [] as [language::LanguageServerId; 0]
2807 );
2808 });
2809}
2810
2811#[gpui::test]
2812async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
2813 init_test(cx);
2814
2815 let fs = FakeFs::new(cx.executor());
2816 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
2817
2818 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2819
2820 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2821 language_registry.add(rust_lang());
2822 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2823
2824 let (buffer, _) = project
2825 .update(cx, |project, cx| {
2826 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2827 })
2828 .await
2829 .unwrap();
2830
2831 // Publish diagnostics
2832 let fake_server = fake_servers.next().await.unwrap();
2833 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2834 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2835 version: None,
2836 diagnostics: vec![lsp::Diagnostic {
2837 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
2838 severity: Some(lsp::DiagnosticSeverity::ERROR),
2839 message: "the message".to_string(),
2840 ..Default::default()
2841 }],
2842 });
2843
2844 cx.executor().run_until_parked();
2845 buffer.update(cx, |buffer, _| {
2846 assert_eq!(
2847 buffer
2848 .snapshot()
2849 .diagnostics_in_range::<_, usize>(0..1, false)
2850 .map(|entry| entry.diagnostic.message.clone())
2851 .collect::<Vec<_>>(),
2852 ["the message".to_string()]
2853 );
2854 });
2855 project.update(cx, |project, cx| {
2856 assert_eq!(
2857 project.diagnostic_summary(false, cx),
2858 DiagnosticSummary {
2859 error_count: 1,
2860 warning_count: 0,
2861 }
2862 );
2863 });
2864
2865 project.update(cx, |project, cx| {
2866 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2867 });
2868
2869 // The diagnostics are cleared.
2870 cx.executor().run_until_parked();
2871 buffer.update(cx, |buffer, _| {
2872 assert_eq!(
2873 buffer
2874 .snapshot()
2875 .diagnostics_in_range::<_, usize>(0..1, false)
2876 .map(|entry| entry.diagnostic.message.clone())
2877 .collect::<Vec<_>>(),
2878 Vec::<String>::new(),
2879 );
2880 });
2881 project.update(cx, |project, cx| {
2882 assert_eq!(
2883 project.diagnostic_summary(false, cx),
2884 DiagnosticSummary {
2885 error_count: 0,
2886 warning_count: 0,
2887 }
2888 );
2889 });
2890}
2891
2892#[gpui::test]
2893async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
2894 init_test(cx);
2895
2896 let fs = FakeFs::new(cx.executor());
2897 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2898
2899 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2900 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2901
2902 language_registry.add(rust_lang());
2903 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2904
2905 let (buffer, _handle) = project
2906 .update(cx, |project, cx| {
2907 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2908 })
2909 .await
2910 .unwrap();
2911
2912 // Before restarting the server, report diagnostics with an unknown buffer version.
2913 let fake_server = fake_servers.next().await.unwrap();
2914 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2915 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2916 version: Some(10000),
2917 diagnostics: Vec::new(),
2918 });
2919 cx.executor().run_until_parked();
2920 project.update(cx, |project, cx| {
2921 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2922 });
2923
2924 let mut fake_server = fake_servers.next().await.unwrap();
2925 let notification = fake_server
2926 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2927 .await
2928 .text_document;
2929 assert_eq!(notification.version, 0);
2930}
2931
2932#[gpui::test]
2933async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
2934 init_test(cx);
2935
2936 let progress_token = "the-progress-token";
2937
2938 let fs = FakeFs::new(cx.executor());
2939 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2940
2941 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2942
2943 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2944 language_registry.add(rust_lang());
2945 let mut fake_servers = language_registry.register_fake_lsp(
2946 "Rust",
2947 FakeLspAdapter {
2948 name: "the-language-server",
2949 disk_based_diagnostics_sources: vec!["disk".into()],
2950 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2951 ..Default::default()
2952 },
2953 );
2954
2955 let (buffer, _handle) = project
2956 .update(cx, |project, cx| {
2957 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2958 })
2959 .await
2960 .unwrap();
2961
2962 // Simulate diagnostics starting to update.
2963 let mut fake_server = fake_servers.next().await.unwrap();
2964 fake_server
2965 .start_progress_with(
2966 "another-token",
2967 lsp::WorkDoneProgressBegin {
2968 cancellable: Some(false),
2969 ..Default::default()
2970 },
2971 )
2972 .await;
2973 // Ensure progress notification is fully processed before starting the next one
2974 cx.executor().run_until_parked();
2975
2976 fake_server
2977 .start_progress_with(
2978 progress_token,
2979 lsp::WorkDoneProgressBegin {
2980 cancellable: Some(true),
2981 ..Default::default()
2982 },
2983 )
2984 .await;
2985 // Ensure progress notification is fully processed before cancelling
2986 cx.executor().run_until_parked();
2987
2988 project.update(cx, |project, cx| {
2989 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
2990 });
2991 cx.executor().run_until_parked();
2992
2993 let cancel_notification = fake_server
2994 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
2995 .await;
2996 assert_eq!(
2997 cancel_notification.token,
2998 NumberOrString::String(progress_token.into())
2999 );
3000}
3001
3002#[gpui::test]
3003async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
3004 init_test(cx);
3005
3006 let fs = FakeFs::new(cx.executor());
3007 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
3008 .await;
3009
3010 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3011 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3012
3013 let mut fake_rust_servers = language_registry.register_fake_lsp(
3014 "Rust",
3015 FakeLspAdapter {
3016 name: "rust-lsp",
3017 ..Default::default()
3018 },
3019 );
3020 let mut fake_js_servers = language_registry.register_fake_lsp(
3021 "JavaScript",
3022 FakeLspAdapter {
3023 name: "js-lsp",
3024 ..Default::default()
3025 },
3026 );
3027 language_registry.add(rust_lang());
3028 language_registry.add(js_lang());
3029
3030 let _rs_buffer = project
3031 .update(cx, |project, cx| {
3032 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3033 })
3034 .await
3035 .unwrap();
3036 let _js_buffer = project
3037 .update(cx, |project, cx| {
3038 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
3039 })
3040 .await
3041 .unwrap();
3042
3043 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
3044 assert_eq!(
3045 fake_rust_server_1
3046 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3047 .await
3048 .text_document
3049 .uri
3050 .as_str(),
3051 uri!("file:///dir/a.rs")
3052 );
3053
3054 let mut fake_js_server = fake_js_servers.next().await.unwrap();
3055 assert_eq!(
3056 fake_js_server
3057 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3058 .await
3059 .text_document
3060 .uri
3061 .as_str(),
3062 uri!("file:///dir/b.js")
3063 );
3064
3065 // Disable Rust language server, ensuring only that server gets stopped.
3066 cx.update(|cx| {
3067 SettingsStore::update_global(cx, |settings, cx| {
3068 settings.update_user_settings(cx, |settings| {
3069 settings.languages_mut().insert(
3070 "Rust".into(),
3071 LanguageSettingsContent {
3072 enable_language_server: Some(false),
3073 ..Default::default()
3074 },
3075 );
3076 });
3077 })
3078 });
3079 fake_rust_server_1
3080 .receive_notification::<lsp::notification::Exit>()
3081 .await;
3082
3083 // Enable Rust and disable JavaScript language servers, ensuring that the
3084 // former gets started again and that the latter stops.
3085 cx.update(|cx| {
3086 SettingsStore::update_global(cx, |settings, cx| {
3087 settings.update_user_settings(cx, |settings| {
3088 settings.languages_mut().insert(
3089 "Rust".into(),
3090 LanguageSettingsContent {
3091 enable_language_server: Some(true),
3092 ..Default::default()
3093 },
3094 );
3095 settings.languages_mut().insert(
3096 "JavaScript".into(),
3097 LanguageSettingsContent {
3098 enable_language_server: Some(false),
3099 ..Default::default()
3100 },
3101 );
3102 });
3103 })
3104 });
3105 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
3106 assert_eq!(
3107 fake_rust_server_2
3108 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3109 .await
3110 .text_document
3111 .uri
3112 .as_str(),
3113 uri!("file:///dir/a.rs")
3114 );
3115 fake_js_server
3116 .receive_notification::<lsp::notification::Exit>()
3117 .await;
3118}
3119
3120#[gpui::test(iterations = 3)]
3121async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
3122 init_test(cx);
3123
3124 let text = "
3125 fn a() { A }
3126 fn b() { BB }
3127 fn c() { CCC }
3128 "
3129 .unindent();
3130
3131 let fs = FakeFs::new(cx.executor());
3132 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
3133
3134 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3135 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3136
3137 language_registry.add(rust_lang());
3138 let mut fake_servers = language_registry.register_fake_lsp(
3139 "Rust",
3140 FakeLspAdapter {
3141 disk_based_diagnostics_sources: vec!["disk".into()],
3142 ..Default::default()
3143 },
3144 );
3145
3146 let buffer = project
3147 .update(cx, |project, cx| {
3148 project.open_local_buffer(path!("/dir/a.rs"), cx)
3149 })
3150 .await
3151 .unwrap();
3152
3153 let _handle = project.update(cx, |project, cx| {
3154 project.register_buffer_with_language_servers(&buffer, cx)
3155 });
3156
3157 let mut fake_server = fake_servers.next().await.unwrap();
3158 let open_notification = fake_server
3159 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3160 .await;
3161
3162 // Edit the buffer, moving the content down
3163 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
3164 let change_notification_1 = fake_server
3165 .receive_notification::<lsp::notification::DidChangeTextDocument>()
3166 .await;
3167 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
3168
3169 // Report some diagnostics for the initial version of the buffer
3170 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3171 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3172 version: Some(open_notification.text_document.version),
3173 diagnostics: vec![
3174 lsp::Diagnostic {
3175 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3176 severity: Some(DiagnosticSeverity::ERROR),
3177 message: "undefined variable 'A'".to_string(),
3178 source: Some("disk".to_string()),
3179 ..Default::default()
3180 },
3181 lsp::Diagnostic {
3182 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
3183 severity: Some(DiagnosticSeverity::ERROR),
3184 message: "undefined variable 'BB'".to_string(),
3185 source: Some("disk".to_string()),
3186 ..Default::default()
3187 },
3188 lsp::Diagnostic {
3189 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
3190 severity: Some(DiagnosticSeverity::ERROR),
3191 source: Some("disk".to_string()),
3192 message: "undefined variable 'CCC'".to_string(),
3193 ..Default::default()
3194 },
3195 ],
3196 });
3197
3198 // The diagnostics have moved down since they were created.
3199 cx.executor().run_until_parked();
3200 buffer.update(cx, |buffer, _| {
3201 assert_eq!(
3202 buffer
3203 .snapshot()
3204 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
3205 .collect::<Vec<_>>(),
3206 &[
3207 DiagnosticEntry {
3208 range: Point::new(3, 9)..Point::new(3, 11),
3209 diagnostic: Diagnostic {
3210 source: Some("disk".into()),
3211 severity: DiagnosticSeverity::ERROR,
3212 message: "undefined variable 'BB'".to_string(),
3213 is_disk_based: true,
3214 group_id: 1,
3215 is_primary: true,
3216 source_kind: DiagnosticSourceKind::Pushed,
3217 ..Diagnostic::default()
3218 },
3219 },
3220 DiagnosticEntry {
3221 range: Point::new(4, 9)..Point::new(4, 12),
3222 diagnostic: Diagnostic {
3223 source: Some("disk".into()),
3224 severity: DiagnosticSeverity::ERROR,
3225 message: "undefined variable 'CCC'".to_string(),
3226 is_disk_based: true,
3227 group_id: 2,
3228 is_primary: true,
3229 source_kind: DiagnosticSourceKind::Pushed,
3230 ..Diagnostic::default()
3231 }
3232 }
3233 ]
3234 );
3235 assert_eq!(
3236 chunks_with_diagnostics(buffer, 0..buffer.len()),
3237 [
3238 ("\n\nfn a() { ".to_string(), None),
3239 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
3240 (" }\nfn b() { ".to_string(), None),
3241 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
3242 (" }\nfn c() { ".to_string(), None),
3243 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
3244 (" }\n".to_string(), None),
3245 ]
3246 );
3247 assert_eq!(
3248 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
3249 [
3250 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
3251 (" }\nfn c() { ".to_string(), None),
3252 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
3253 ]
3254 );
3255 });
3256
3257 // Ensure overlapping diagnostics are highlighted correctly.
3258 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3259 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3260 version: Some(open_notification.text_document.version),
3261 diagnostics: vec![
3262 lsp::Diagnostic {
3263 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3264 severity: Some(DiagnosticSeverity::ERROR),
3265 message: "undefined variable 'A'".to_string(),
3266 source: Some("disk".to_string()),
3267 ..Default::default()
3268 },
3269 lsp::Diagnostic {
3270 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
3271 severity: Some(DiagnosticSeverity::WARNING),
3272 message: "unreachable statement".to_string(),
3273 source: Some("disk".to_string()),
3274 ..Default::default()
3275 },
3276 ],
3277 });
3278
3279 cx.executor().run_until_parked();
3280 buffer.update(cx, |buffer, _| {
3281 assert_eq!(
3282 buffer
3283 .snapshot()
3284 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
3285 .collect::<Vec<_>>(),
3286 &[
3287 DiagnosticEntry {
3288 range: Point::new(2, 9)..Point::new(2, 12),
3289 diagnostic: Diagnostic {
3290 source: Some("disk".into()),
3291 severity: DiagnosticSeverity::WARNING,
3292 message: "unreachable statement".to_string(),
3293 is_disk_based: true,
3294 group_id: 4,
3295 is_primary: true,
3296 source_kind: DiagnosticSourceKind::Pushed,
3297 ..Diagnostic::default()
3298 }
3299 },
3300 DiagnosticEntry {
3301 range: Point::new(2, 9)..Point::new(2, 10),
3302 diagnostic: Diagnostic {
3303 source: Some("disk".into()),
3304 severity: DiagnosticSeverity::ERROR,
3305 message: "undefined variable 'A'".to_string(),
3306 is_disk_based: true,
3307 group_id: 3,
3308 is_primary: true,
3309 source_kind: DiagnosticSourceKind::Pushed,
3310 ..Diagnostic::default()
3311 },
3312 }
3313 ]
3314 );
3315 assert_eq!(
3316 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
3317 [
3318 ("fn a() { ".to_string(), None),
3319 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
3320 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
3321 ("\n".to_string(), None),
3322 ]
3323 );
3324 assert_eq!(
3325 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
3326 [
3327 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
3328 ("\n".to_string(), None),
3329 ]
3330 );
3331 });
3332
3333 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
3334 // changes since the last save.
3335 buffer.update(cx, |buffer, cx| {
3336 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
3337 buffer.edit(
3338 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
3339 None,
3340 cx,
3341 );
3342 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
3343 });
3344 let change_notification_2 = fake_server
3345 .receive_notification::<lsp::notification::DidChangeTextDocument>()
3346 .await;
3347 assert!(
3348 change_notification_2.text_document.version > change_notification_1.text_document.version
3349 );
3350
3351 // Handle out-of-order diagnostics
3352 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3353 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3354 version: Some(change_notification_2.text_document.version),
3355 diagnostics: vec![
3356 lsp::Diagnostic {
3357 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
3358 severity: Some(DiagnosticSeverity::ERROR),
3359 message: "undefined variable 'BB'".to_string(),
3360 source: Some("disk".to_string()),
3361 ..Default::default()
3362 },
3363 lsp::Diagnostic {
3364 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3365 severity: Some(DiagnosticSeverity::WARNING),
3366 message: "undefined variable 'A'".to_string(),
3367 source: Some("disk".to_string()),
3368 ..Default::default()
3369 },
3370 ],
3371 });
3372
3373 cx.executor().run_until_parked();
3374 buffer.update(cx, |buffer, _| {
3375 assert_eq!(
3376 buffer
3377 .snapshot()
3378 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3379 .collect::<Vec<_>>(),
3380 &[
3381 DiagnosticEntry {
3382 range: Point::new(2, 21)..Point::new(2, 22),
3383 diagnostic: Diagnostic {
3384 source: Some("disk".into()),
3385 severity: DiagnosticSeverity::WARNING,
3386 message: "undefined variable 'A'".to_string(),
3387 is_disk_based: true,
3388 group_id: 6,
3389 is_primary: true,
3390 source_kind: DiagnosticSourceKind::Pushed,
3391 ..Diagnostic::default()
3392 }
3393 },
3394 DiagnosticEntry {
3395 range: Point::new(3, 9)..Point::new(3, 14),
3396 diagnostic: Diagnostic {
3397 source: Some("disk".into()),
3398 severity: DiagnosticSeverity::ERROR,
3399 message: "undefined variable 'BB'".to_string(),
3400 is_disk_based: true,
3401 group_id: 5,
3402 is_primary: true,
3403 source_kind: DiagnosticSourceKind::Pushed,
3404 ..Diagnostic::default()
3405 },
3406 }
3407 ]
3408 );
3409 });
3410}
3411
3412#[gpui::test]
3413async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
3414 init_test(cx);
3415
3416 let text = concat!(
3417 "let one = ;\n", //
3418 "let two = \n",
3419 "let three = 3;\n",
3420 );
3421
3422 let fs = FakeFs::new(cx.executor());
3423 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
3424
3425 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3426 let buffer = project
3427 .update(cx, |project, cx| {
3428 project.open_local_buffer(path!("/dir/a.rs"), cx)
3429 })
3430 .await
3431 .unwrap();
3432
3433 project.update(cx, |project, cx| {
3434 project.lsp_store().update(cx, |lsp_store, cx| {
3435 lsp_store
3436 .update_diagnostic_entries(
3437 LanguageServerId(0),
3438 PathBuf::from(path!("/dir/a.rs")),
3439 None,
3440 None,
3441 vec![
3442 DiagnosticEntry {
3443 range: Unclipped(PointUtf16::new(0, 10))
3444 ..Unclipped(PointUtf16::new(0, 10)),
3445 diagnostic: Diagnostic {
3446 severity: DiagnosticSeverity::ERROR,
3447 message: "syntax error 1".to_string(),
3448 source_kind: DiagnosticSourceKind::Pushed,
3449 ..Diagnostic::default()
3450 },
3451 },
3452 DiagnosticEntry {
3453 range: Unclipped(PointUtf16::new(1, 10))
3454 ..Unclipped(PointUtf16::new(1, 10)),
3455 diagnostic: Diagnostic {
3456 severity: DiagnosticSeverity::ERROR,
3457 message: "syntax error 2".to_string(),
3458 source_kind: DiagnosticSourceKind::Pushed,
3459 ..Diagnostic::default()
3460 },
3461 },
3462 ],
3463 cx,
3464 )
3465 .unwrap();
3466 })
3467 });
3468
3469 // An empty range is extended forward to include the following character.
3470 // At the end of a line, an empty range is extended backward to include
3471 // the preceding character.
3472 buffer.update(cx, |buffer, _| {
3473 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
3474 assert_eq!(
3475 chunks
3476 .iter()
3477 .map(|(s, d)| (s.as_str(), *d))
3478 .collect::<Vec<_>>(),
3479 &[
3480 ("let one = ", None),
3481 (";", Some(DiagnosticSeverity::ERROR)),
3482 ("\nlet two =", None),
3483 (" ", Some(DiagnosticSeverity::ERROR)),
3484 ("\nlet three = 3;\n", None)
3485 ]
3486 );
3487 });
3488}
3489
3490#[gpui::test]
3491async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
3492 init_test(cx);
3493
3494 let fs = FakeFs::new(cx.executor());
3495 fs.insert_tree(path!("/dir"), json!({ "a.rs": "one two three" }))
3496 .await;
3497
3498 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3499 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3500
3501 lsp_store.update(cx, |lsp_store, cx| {
3502 lsp_store
3503 .update_diagnostic_entries(
3504 LanguageServerId(0),
3505 Path::new(path!("/dir/a.rs")).to_owned(),
3506 None,
3507 None,
3508 vec![DiagnosticEntry {
3509 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
3510 diagnostic: Diagnostic {
3511 severity: DiagnosticSeverity::ERROR,
3512 is_primary: true,
3513 message: "syntax error a1".to_string(),
3514 source_kind: DiagnosticSourceKind::Pushed,
3515 ..Diagnostic::default()
3516 },
3517 }],
3518 cx,
3519 )
3520 .unwrap();
3521 lsp_store
3522 .update_diagnostic_entries(
3523 LanguageServerId(1),
3524 Path::new(path!("/dir/a.rs")).to_owned(),
3525 None,
3526 None,
3527 vec![DiagnosticEntry {
3528 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
3529 diagnostic: Diagnostic {
3530 severity: DiagnosticSeverity::ERROR,
3531 is_primary: true,
3532 message: "syntax error b1".to_string(),
3533 source_kind: DiagnosticSourceKind::Pushed,
3534 ..Diagnostic::default()
3535 },
3536 }],
3537 cx,
3538 )
3539 .unwrap();
3540
3541 assert_eq!(
3542 lsp_store.diagnostic_summary(false, cx),
3543 DiagnosticSummary {
3544 error_count: 2,
3545 warning_count: 0,
3546 }
3547 );
3548 });
3549}
3550
3551#[gpui::test]
3552async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
3553 init_test(cx);
3554
3555 let text = "
3556 fn a() {
3557 f1();
3558 }
3559 fn b() {
3560 f2();
3561 }
3562 fn c() {
3563 f3();
3564 }
3565 "
3566 .unindent();
3567
3568 let fs = FakeFs::new(cx.executor());
3569 fs.insert_tree(
3570 path!("/dir"),
3571 json!({
3572 "a.rs": text.clone(),
3573 }),
3574 )
3575 .await;
3576
3577 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3578 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3579
3580 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3581 language_registry.add(rust_lang());
3582 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3583
3584 let (buffer, _handle) = project
3585 .update(cx, |project, cx| {
3586 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3587 })
3588 .await
3589 .unwrap();
3590
3591 let mut fake_server = fake_servers.next().await.unwrap();
3592 let lsp_document_version = fake_server
3593 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3594 .await
3595 .text_document
3596 .version;
3597
3598 // Simulate editing the buffer after the language server computes some edits.
3599 buffer.update(cx, |buffer, cx| {
3600 buffer.edit(
3601 [(
3602 Point::new(0, 0)..Point::new(0, 0),
3603 "// above first function\n",
3604 )],
3605 None,
3606 cx,
3607 );
3608 buffer.edit(
3609 [(
3610 Point::new(2, 0)..Point::new(2, 0),
3611 " // inside first function\n",
3612 )],
3613 None,
3614 cx,
3615 );
3616 buffer.edit(
3617 [(
3618 Point::new(6, 4)..Point::new(6, 4),
3619 "// inside second function ",
3620 )],
3621 None,
3622 cx,
3623 );
3624
3625 assert_eq!(
3626 buffer.text(),
3627 "
3628 // above first function
3629 fn a() {
3630 // inside first function
3631 f1();
3632 }
3633 fn b() {
3634 // inside second function f2();
3635 }
3636 fn c() {
3637 f3();
3638 }
3639 "
3640 .unindent()
3641 );
3642 });
3643
3644 let edits = lsp_store
3645 .update(cx, |lsp_store, cx| {
3646 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3647 &buffer,
3648 vec![
3649 // replace body of first function
3650 lsp::TextEdit {
3651 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
3652 new_text: "
3653 fn a() {
3654 f10();
3655 }
3656 "
3657 .unindent(),
3658 },
3659 // edit inside second function
3660 lsp::TextEdit {
3661 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
3662 new_text: "00".into(),
3663 },
3664 // edit inside third function via two distinct edits
3665 lsp::TextEdit {
3666 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
3667 new_text: "4000".into(),
3668 },
3669 lsp::TextEdit {
3670 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
3671 new_text: "".into(),
3672 },
3673 ],
3674 LanguageServerId(0),
3675 Some(lsp_document_version),
3676 cx,
3677 )
3678 })
3679 .await
3680 .unwrap();
3681
3682 buffer.update(cx, |buffer, cx| {
3683 for (range, new_text) in edits {
3684 buffer.edit([(range, new_text)], None, cx);
3685 }
3686 assert_eq!(
3687 buffer.text(),
3688 "
3689 // above first function
3690 fn a() {
3691 // inside first function
3692 f10();
3693 }
3694 fn b() {
3695 // inside second function f200();
3696 }
3697 fn c() {
3698 f4000();
3699 }
3700 "
3701 .unindent()
3702 );
3703 });
3704}
3705
3706#[gpui::test]
3707async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
3708 init_test(cx);
3709
3710 let text = "
3711 use a::b;
3712 use a::c;
3713
3714 fn f() {
3715 b();
3716 c();
3717 }
3718 "
3719 .unindent();
3720
3721 let fs = FakeFs::new(cx.executor());
3722 fs.insert_tree(
3723 path!("/dir"),
3724 json!({
3725 "a.rs": text.clone(),
3726 }),
3727 )
3728 .await;
3729
3730 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3731 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3732 let buffer = project
3733 .update(cx, |project, cx| {
3734 project.open_local_buffer(path!("/dir/a.rs"), cx)
3735 })
3736 .await
3737 .unwrap();
3738
3739 // Simulate the language server sending us a small edit in the form of a very large diff.
3740 // Rust-analyzer does this when performing a merge-imports code action.
3741 let edits = lsp_store
3742 .update(cx, |lsp_store, cx| {
3743 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3744 &buffer,
3745 [
3746 // Replace the first use statement without editing the semicolon.
3747 lsp::TextEdit {
3748 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
3749 new_text: "a::{b, c}".into(),
3750 },
3751 // Reinsert the remainder of the file between the semicolon and the final
3752 // newline of the file.
3753 lsp::TextEdit {
3754 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3755 new_text: "\n\n".into(),
3756 },
3757 lsp::TextEdit {
3758 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3759 new_text: "
3760 fn f() {
3761 b();
3762 c();
3763 }"
3764 .unindent(),
3765 },
3766 // Delete everything after the first newline of the file.
3767 lsp::TextEdit {
3768 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
3769 new_text: "".into(),
3770 },
3771 ],
3772 LanguageServerId(0),
3773 None,
3774 cx,
3775 )
3776 })
3777 .await
3778 .unwrap();
3779
3780 buffer.update(cx, |buffer, cx| {
3781 let edits = edits
3782 .into_iter()
3783 .map(|(range, text)| {
3784 (
3785 range.start.to_point(buffer)..range.end.to_point(buffer),
3786 text,
3787 )
3788 })
3789 .collect::<Vec<_>>();
3790
3791 assert_eq!(
3792 edits,
3793 [
3794 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
3795 (Point::new(1, 0)..Point::new(2, 0), "".into())
3796 ]
3797 );
3798
3799 for (range, new_text) in edits {
3800 buffer.edit([(range, new_text)], None, cx);
3801 }
3802 assert_eq!(
3803 buffer.text(),
3804 "
3805 use a::{b, c};
3806
3807 fn f() {
3808 b();
3809 c();
3810 }
3811 "
3812 .unindent()
3813 );
3814 });
3815}
3816
3817#[gpui::test]
3818async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
3819 cx: &mut gpui::TestAppContext,
3820) {
3821 init_test(cx);
3822
3823 let text = "Path()";
3824
3825 let fs = FakeFs::new(cx.executor());
3826 fs.insert_tree(
3827 path!("/dir"),
3828 json!({
3829 "a.rs": text
3830 }),
3831 )
3832 .await;
3833
3834 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3835 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3836 let buffer = project
3837 .update(cx, |project, cx| {
3838 project.open_local_buffer(path!("/dir/a.rs"), cx)
3839 })
3840 .await
3841 .unwrap();
3842
3843 // Simulate the language server sending us a pair of edits at the same location,
3844 // with an insertion following a replacement (which violates the LSP spec).
3845 let edits = lsp_store
3846 .update(cx, |lsp_store, cx| {
3847 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3848 &buffer,
3849 [
3850 lsp::TextEdit {
3851 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
3852 new_text: "Path".into(),
3853 },
3854 lsp::TextEdit {
3855 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
3856 new_text: "from path import Path\n\n\n".into(),
3857 },
3858 ],
3859 LanguageServerId(0),
3860 None,
3861 cx,
3862 )
3863 })
3864 .await
3865 .unwrap();
3866
3867 buffer.update(cx, |buffer, cx| {
3868 buffer.edit(edits, None, cx);
3869 assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
3870 });
3871}
3872
3873#[gpui::test]
3874async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
3875 init_test(cx);
3876
3877 let text = "
3878 use a::b;
3879 use a::c;
3880
3881 fn f() {
3882 b();
3883 c();
3884 }
3885 "
3886 .unindent();
3887
3888 let fs = FakeFs::new(cx.executor());
3889 fs.insert_tree(
3890 path!("/dir"),
3891 json!({
3892 "a.rs": text.clone(),
3893 }),
3894 )
3895 .await;
3896
3897 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3898 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3899 let buffer = project
3900 .update(cx, |project, cx| {
3901 project.open_local_buffer(path!("/dir/a.rs"), cx)
3902 })
3903 .await
3904 .unwrap();
3905
3906 // Simulate the language server sending us edits in a non-ordered fashion,
3907 // with ranges sometimes being inverted or pointing to invalid locations.
3908 let edits = lsp_store
3909 .update(cx, |lsp_store, cx| {
3910 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3911 &buffer,
3912 [
3913 lsp::TextEdit {
3914 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3915 new_text: "\n\n".into(),
3916 },
3917 lsp::TextEdit {
3918 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
3919 new_text: "a::{b, c}".into(),
3920 },
3921 lsp::TextEdit {
3922 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
3923 new_text: "".into(),
3924 },
3925 lsp::TextEdit {
3926 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3927 new_text: "
3928 fn f() {
3929 b();
3930 c();
3931 }"
3932 .unindent(),
3933 },
3934 ],
3935 LanguageServerId(0),
3936 None,
3937 cx,
3938 )
3939 })
3940 .await
3941 .unwrap();
3942
3943 buffer.update(cx, |buffer, cx| {
3944 let edits = edits
3945 .into_iter()
3946 .map(|(range, text)| {
3947 (
3948 range.start.to_point(buffer)..range.end.to_point(buffer),
3949 text,
3950 )
3951 })
3952 .collect::<Vec<_>>();
3953
3954 assert_eq!(
3955 edits,
3956 [
3957 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
3958 (Point::new(1, 0)..Point::new(2, 0), "".into())
3959 ]
3960 );
3961
3962 for (range, new_text) in edits {
3963 buffer.edit([(range, new_text)], None, cx);
3964 }
3965 assert_eq!(
3966 buffer.text(),
3967 "
3968 use a::{b, c};
3969
3970 fn f() {
3971 b();
3972 c();
3973 }
3974 "
3975 .unindent()
3976 );
3977 });
3978}
3979
3980fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
3981 buffer: &Buffer,
3982 range: Range<T>,
3983) -> Vec<(String, Option<DiagnosticSeverity>)> {
3984 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
3985 for chunk in buffer.snapshot().chunks(range, true) {
3986 if chunks
3987 .last()
3988 .is_some_and(|prev_chunk| prev_chunk.1 == chunk.diagnostic_severity)
3989 {
3990 chunks.last_mut().unwrap().0.push_str(chunk.text);
3991 } else {
3992 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
3993 }
3994 }
3995 chunks
3996}
3997
3998#[gpui::test(iterations = 10)]
3999async fn test_definition(cx: &mut gpui::TestAppContext) {
4000 init_test(cx);
4001
4002 let fs = FakeFs::new(cx.executor());
4003 fs.insert_tree(
4004 path!("/dir"),
4005 json!({
4006 "a.rs": "const fn a() { A }",
4007 "b.rs": "const y: i32 = crate::a()",
4008 }),
4009 )
4010 .await;
4011
4012 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
4013
4014 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4015 language_registry.add(rust_lang());
4016 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
4017
4018 let (buffer, _handle) = project
4019 .update(cx, |project, cx| {
4020 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
4021 })
4022 .await
4023 .unwrap();
4024
4025 let fake_server = fake_servers.next().await.unwrap();
4026 cx.executor().run_until_parked();
4027
4028 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
4029 let params = params.text_document_position_params;
4030 assert_eq!(
4031 params.text_document.uri.to_file_path().unwrap(),
4032 Path::new(path!("/dir/b.rs")),
4033 );
4034 assert_eq!(params.position, lsp::Position::new(0, 22));
4035
4036 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
4037 lsp::Location::new(
4038 lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
4039 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
4040 ),
4041 )))
4042 });
4043 let mut definitions = project
4044 .update(cx, |project, cx| project.definitions(&buffer, 22, cx))
4045 .await
4046 .unwrap()
4047 .unwrap();
4048
4049 // Assert no new language server started
4050 cx.executor().run_until_parked();
4051 assert!(fake_servers.try_next().is_err());
4052
4053 assert_eq!(definitions.len(), 1);
4054 let definition = definitions.pop().unwrap();
4055 cx.update(|cx| {
4056 let target_buffer = definition.target.buffer.read(cx);
4057 assert_eq!(
4058 target_buffer
4059 .file()
4060 .unwrap()
4061 .as_local()
4062 .unwrap()
4063 .abs_path(cx),
4064 Path::new(path!("/dir/a.rs")),
4065 );
4066 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
4067 assert_eq!(
4068 list_worktrees(&project, cx),
4069 [
4070 (path!("/dir/a.rs").as_ref(), false),
4071 (path!("/dir/b.rs").as_ref(), true)
4072 ],
4073 );
4074
4075 drop(definition);
4076 });
4077 cx.update(|cx| {
4078 assert_eq!(
4079 list_worktrees(&project, cx),
4080 [(path!("/dir/b.rs").as_ref(), true)]
4081 );
4082 });
4083
4084 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
4085 project
4086 .read(cx)
4087 .worktrees(cx)
4088 .map(|worktree| {
4089 let worktree = worktree.read(cx);
4090 (
4091 worktree.as_local().unwrap().abs_path().as_ref(),
4092 worktree.is_visible(),
4093 )
4094 })
4095 .collect::<Vec<_>>()
4096 }
4097}
4098
4099#[gpui::test]
4100async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
4101 init_test(cx);
4102
4103 let fs = FakeFs::new(cx.executor());
4104 fs.insert_tree(
4105 path!("/dir"),
4106 json!({
4107 "a.ts": "",
4108 }),
4109 )
4110 .await;
4111
4112 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4113
4114 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4115 language_registry.add(typescript_lang());
4116 let mut fake_language_servers = language_registry.register_fake_lsp(
4117 "TypeScript",
4118 FakeLspAdapter {
4119 capabilities: lsp::ServerCapabilities {
4120 completion_provider: Some(lsp::CompletionOptions {
4121 trigger_characters: Some(vec![".".to_string()]),
4122 ..Default::default()
4123 }),
4124 ..Default::default()
4125 },
4126 ..Default::default()
4127 },
4128 );
4129
4130 let (buffer, _handle) = project
4131 .update(cx, |p, cx| {
4132 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4133 })
4134 .await
4135 .unwrap();
4136
4137 let fake_server = fake_language_servers.next().await.unwrap();
4138 cx.executor().run_until_parked();
4139
4140 // When text_edit exists, it takes precedence over insert_text and label
4141 let text = "let a = obj.fqn";
4142 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4143 let completions = project.update(cx, |project, cx| {
4144 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4145 });
4146
4147 fake_server
4148 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4149 Ok(Some(lsp::CompletionResponse::Array(vec![
4150 lsp::CompletionItem {
4151 label: "labelText".into(),
4152 insert_text: Some("insertText".into()),
4153 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
4154 range: lsp::Range::new(
4155 lsp::Position::new(0, text.len() as u32 - 3),
4156 lsp::Position::new(0, text.len() as u32),
4157 ),
4158 new_text: "textEditText".into(),
4159 })),
4160 ..Default::default()
4161 },
4162 ])))
4163 })
4164 .next()
4165 .await;
4166
4167 let completions = completions
4168 .await
4169 .unwrap()
4170 .into_iter()
4171 .flat_map(|response| response.completions)
4172 .collect::<Vec<_>>();
4173 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4174
4175 assert_eq!(completions.len(), 1);
4176 assert_eq!(completions[0].new_text, "textEditText");
4177 assert_eq!(
4178 completions[0].replace_range.to_offset(&snapshot),
4179 text.len() - 3..text.len()
4180 );
4181}
4182
4183#[gpui::test]
4184async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
4185 init_test(cx);
4186
4187 let fs = FakeFs::new(cx.executor());
4188 fs.insert_tree(
4189 path!("/dir"),
4190 json!({
4191 "a.ts": "",
4192 }),
4193 )
4194 .await;
4195
4196 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4197
4198 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4199 language_registry.add(typescript_lang());
4200 let mut fake_language_servers = language_registry.register_fake_lsp(
4201 "TypeScript",
4202 FakeLspAdapter {
4203 capabilities: lsp::ServerCapabilities {
4204 completion_provider: Some(lsp::CompletionOptions {
4205 trigger_characters: Some(vec![".".to_string()]),
4206 ..Default::default()
4207 }),
4208 ..Default::default()
4209 },
4210 ..Default::default()
4211 },
4212 );
4213
4214 let (buffer, _handle) = project
4215 .update(cx, |p, cx| {
4216 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4217 })
4218 .await
4219 .unwrap();
4220
4221 let fake_server = fake_language_servers.next().await.unwrap();
4222 cx.executor().run_until_parked();
4223 let text = "let a = obj.fqn";
4224
4225 // Test 1: When text_edit is None but text_edit_text exists with default edit_range
4226 {
4227 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4228 let completions = project.update(cx, |project, cx| {
4229 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4230 });
4231
4232 fake_server
4233 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4234 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
4235 is_incomplete: false,
4236 item_defaults: Some(lsp::CompletionListItemDefaults {
4237 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
4238 lsp::Range::new(
4239 lsp::Position::new(0, text.len() as u32 - 3),
4240 lsp::Position::new(0, text.len() as u32),
4241 ),
4242 )),
4243 ..Default::default()
4244 }),
4245 items: vec![lsp::CompletionItem {
4246 label: "labelText".into(),
4247 text_edit_text: Some("textEditText".into()),
4248 text_edit: None,
4249 ..Default::default()
4250 }],
4251 })))
4252 })
4253 .next()
4254 .await;
4255
4256 let completions = completions
4257 .await
4258 .unwrap()
4259 .into_iter()
4260 .flat_map(|response| response.completions)
4261 .collect::<Vec<_>>();
4262 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4263
4264 assert_eq!(completions.len(), 1);
4265 assert_eq!(completions[0].new_text, "textEditText");
4266 assert_eq!(
4267 completions[0].replace_range.to_offset(&snapshot),
4268 text.len() - 3..text.len()
4269 );
4270 }
4271
4272 // Test 2: When both text_edit and text_edit_text are None with default edit_range
4273 {
4274 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4275 let completions = project.update(cx, |project, cx| {
4276 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4277 });
4278
4279 fake_server
4280 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4281 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
4282 is_incomplete: false,
4283 item_defaults: Some(lsp::CompletionListItemDefaults {
4284 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
4285 lsp::Range::new(
4286 lsp::Position::new(0, text.len() as u32 - 3),
4287 lsp::Position::new(0, text.len() as u32),
4288 ),
4289 )),
4290 ..Default::default()
4291 }),
4292 items: vec![lsp::CompletionItem {
4293 label: "labelText".into(),
4294 text_edit_text: None,
4295 insert_text: Some("irrelevant".into()),
4296 text_edit: None,
4297 ..Default::default()
4298 }],
4299 })))
4300 })
4301 .next()
4302 .await;
4303
4304 let completions = completions
4305 .await
4306 .unwrap()
4307 .into_iter()
4308 .flat_map(|response| response.completions)
4309 .collect::<Vec<_>>();
4310 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4311
4312 assert_eq!(completions.len(), 1);
4313 assert_eq!(completions[0].new_text, "labelText");
4314 assert_eq!(
4315 completions[0].replace_range.to_offset(&snapshot),
4316 text.len() - 3..text.len()
4317 );
4318 }
4319}
4320
4321#[gpui::test]
4322async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
4323 init_test(cx);
4324
4325 let fs = FakeFs::new(cx.executor());
4326 fs.insert_tree(
4327 path!("/dir"),
4328 json!({
4329 "a.ts": "",
4330 }),
4331 )
4332 .await;
4333
4334 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4335
4336 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4337 language_registry.add(typescript_lang());
4338 let mut fake_language_servers = language_registry.register_fake_lsp(
4339 "TypeScript",
4340 FakeLspAdapter {
4341 capabilities: lsp::ServerCapabilities {
4342 completion_provider: Some(lsp::CompletionOptions {
4343 trigger_characters: Some(vec![":".to_string()]),
4344 ..Default::default()
4345 }),
4346 ..Default::default()
4347 },
4348 ..Default::default()
4349 },
4350 );
4351
4352 let (buffer, _handle) = project
4353 .update(cx, |p, cx| {
4354 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4355 })
4356 .await
4357 .unwrap();
4358
4359 let fake_server = fake_language_servers.next().await.unwrap();
4360 cx.executor().run_until_parked();
4361
4362 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
4363 let text = "let a = b.fqn";
4364 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4365 let completions = project.update(cx, |project, cx| {
4366 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4367 });
4368
4369 fake_server
4370 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4371 Ok(Some(lsp::CompletionResponse::Array(vec![
4372 lsp::CompletionItem {
4373 label: "fullyQualifiedName?".into(),
4374 insert_text: Some("fullyQualifiedName".into()),
4375 ..Default::default()
4376 },
4377 ])))
4378 })
4379 .next()
4380 .await;
4381 let completions = completions
4382 .await
4383 .unwrap()
4384 .into_iter()
4385 .flat_map(|response| response.completions)
4386 .collect::<Vec<_>>();
4387 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4388 assert_eq!(completions.len(), 1);
4389 assert_eq!(completions[0].new_text, "fullyQualifiedName");
4390 assert_eq!(
4391 completions[0].replace_range.to_offset(&snapshot),
4392 text.len() - 3..text.len()
4393 );
4394
4395 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
4396 let text = "let a = \"atoms/cmp\"";
4397 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4398 let completions = project.update(cx, |project, cx| {
4399 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
4400 });
4401
4402 fake_server
4403 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4404 Ok(Some(lsp::CompletionResponse::Array(vec![
4405 lsp::CompletionItem {
4406 label: "component".into(),
4407 ..Default::default()
4408 },
4409 ])))
4410 })
4411 .next()
4412 .await;
4413 let completions = completions
4414 .await
4415 .unwrap()
4416 .into_iter()
4417 .flat_map(|response| response.completions)
4418 .collect::<Vec<_>>();
4419 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4420 assert_eq!(completions.len(), 1);
4421 assert_eq!(completions[0].new_text, "component");
4422 assert_eq!(
4423 completions[0].replace_range.to_offset(&snapshot),
4424 text.len() - 4..text.len() - 1
4425 );
4426}
4427
4428#[gpui::test]
4429async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
4430 init_test(cx);
4431
4432 let fs = FakeFs::new(cx.executor());
4433 fs.insert_tree(
4434 path!("/dir"),
4435 json!({
4436 "a.ts": "",
4437 }),
4438 )
4439 .await;
4440
4441 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4442
4443 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4444 language_registry.add(typescript_lang());
4445 let mut fake_language_servers = language_registry.register_fake_lsp(
4446 "TypeScript",
4447 FakeLspAdapter {
4448 capabilities: lsp::ServerCapabilities {
4449 completion_provider: Some(lsp::CompletionOptions {
4450 trigger_characters: Some(vec![":".to_string()]),
4451 ..Default::default()
4452 }),
4453 ..Default::default()
4454 },
4455 ..Default::default()
4456 },
4457 );
4458
4459 let (buffer, _handle) = project
4460 .update(cx, |p, cx| {
4461 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4462 })
4463 .await
4464 .unwrap();
4465
4466 let fake_server = fake_language_servers.next().await.unwrap();
4467 cx.executor().run_until_parked();
4468
4469 let text = "let a = b.fqn";
4470 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4471 let completions = project.update(cx, |project, cx| {
4472 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4473 });
4474
4475 fake_server
4476 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4477 Ok(Some(lsp::CompletionResponse::Array(vec![
4478 lsp::CompletionItem {
4479 label: "fullyQualifiedName?".into(),
4480 insert_text: Some("fully\rQualified\r\nName".into()),
4481 ..Default::default()
4482 },
4483 ])))
4484 })
4485 .next()
4486 .await;
4487 let completions = completions
4488 .await
4489 .unwrap()
4490 .into_iter()
4491 .flat_map(|response| response.completions)
4492 .collect::<Vec<_>>();
4493 assert_eq!(completions.len(), 1);
4494 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
4495}
4496
4497#[gpui::test(iterations = 10)]
4498async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
4499 init_test(cx);
4500
4501 let fs = FakeFs::new(cx.executor());
4502 fs.insert_tree(
4503 path!("/dir"),
4504 json!({
4505 "a.ts": "a",
4506 }),
4507 )
4508 .await;
4509
4510 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4511
4512 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4513 language_registry.add(typescript_lang());
4514 let mut fake_language_servers = language_registry.register_fake_lsp(
4515 "TypeScript",
4516 FakeLspAdapter {
4517 capabilities: lsp::ServerCapabilities {
4518 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
4519 lsp::CodeActionOptions {
4520 resolve_provider: Some(true),
4521 ..lsp::CodeActionOptions::default()
4522 },
4523 )),
4524 execute_command_provider: Some(lsp::ExecuteCommandOptions {
4525 commands: vec!["_the/command".to_string()],
4526 ..lsp::ExecuteCommandOptions::default()
4527 }),
4528 ..lsp::ServerCapabilities::default()
4529 },
4530 ..FakeLspAdapter::default()
4531 },
4532 );
4533
4534 let (buffer, _handle) = project
4535 .update(cx, |p, cx| {
4536 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4537 })
4538 .await
4539 .unwrap();
4540
4541 let fake_server = fake_language_servers.next().await.unwrap();
4542 cx.executor().run_until_parked();
4543
4544 // Language server returns code actions that contain commands, and not edits.
4545 let actions = project.update(cx, |project, cx| {
4546 project.code_actions(&buffer, 0..0, None, cx)
4547 });
4548 fake_server
4549 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
4550 Ok(Some(vec![
4551 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
4552 title: "The code action".into(),
4553 data: Some(serde_json::json!({
4554 "command": "_the/command",
4555 })),
4556 ..lsp::CodeAction::default()
4557 }),
4558 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
4559 title: "two".into(),
4560 ..lsp::CodeAction::default()
4561 }),
4562 ]))
4563 })
4564 .next()
4565 .await;
4566
4567 let action = actions.await.unwrap().unwrap()[0].clone();
4568 let apply = project.update(cx, |project, cx| {
4569 project.apply_code_action(buffer.clone(), action, true, cx)
4570 });
4571
4572 // Resolving the code action does not populate its edits. In absence of
4573 // edits, we must execute the given command.
4574 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
4575 |mut action, _| async move {
4576 if action.data.is_some() {
4577 action.command = Some(lsp::Command {
4578 title: "The command".into(),
4579 command: "_the/command".into(),
4580 arguments: Some(vec![json!("the-argument")]),
4581 });
4582 }
4583 Ok(action)
4584 },
4585 );
4586
4587 // While executing the command, the language server sends the editor
4588 // a `workspaceEdit` request.
4589 fake_server
4590 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
4591 let fake = fake_server.clone();
4592 move |params, _| {
4593 assert_eq!(params.command, "_the/command");
4594 let fake = fake.clone();
4595 async move {
4596 fake.server
4597 .request::<lsp::request::ApplyWorkspaceEdit>(
4598 lsp::ApplyWorkspaceEditParams {
4599 label: None,
4600 edit: lsp::WorkspaceEdit {
4601 changes: Some(
4602 [(
4603 lsp::Uri::from_file_path(path!("/dir/a.ts")).unwrap(),
4604 vec![lsp::TextEdit {
4605 range: lsp::Range::new(
4606 lsp::Position::new(0, 0),
4607 lsp::Position::new(0, 0),
4608 ),
4609 new_text: "X".into(),
4610 }],
4611 )]
4612 .into_iter()
4613 .collect(),
4614 ),
4615 ..Default::default()
4616 },
4617 },
4618 )
4619 .await
4620 .into_response()
4621 .unwrap();
4622 Ok(Some(json!(null)))
4623 }
4624 }
4625 })
4626 .next()
4627 .await;
4628
4629 // Applying the code action returns a project transaction containing the edits
4630 // sent by the language server in its `workspaceEdit` request.
4631 let transaction = apply.await.unwrap();
4632 assert!(transaction.0.contains_key(&buffer));
4633 buffer.update(cx, |buffer, cx| {
4634 assert_eq!(buffer.text(), "Xa");
4635 buffer.undo(cx);
4636 assert_eq!(buffer.text(), "a");
4637 });
4638}
4639
4640#[gpui::test]
4641async fn test_rename_file_to_new_directory(cx: &mut gpui::TestAppContext) {
4642 init_test(cx);
4643 let fs = FakeFs::new(cx.background_executor.clone());
4644 let expected_contents = "content";
4645 fs.as_fake()
4646 .insert_tree(
4647 "/root",
4648 json!({
4649 "test.txt": expected_contents
4650 }),
4651 )
4652 .await;
4653
4654 let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
4655
4656 let (worktree, entry_id) = project.read_with(cx, |project, cx| {
4657 let worktree = project.worktrees(cx).next().unwrap();
4658 let entry_id = worktree
4659 .read(cx)
4660 .entry_for_path(rel_path("test.txt"))
4661 .unwrap()
4662 .id;
4663 (worktree, entry_id)
4664 });
4665 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
4666 let _result = project
4667 .update(cx, |project, cx| {
4668 project.rename_entry(
4669 entry_id,
4670 (worktree_id, rel_path("dir1/dir2/dir3/test.txt")).into(),
4671 cx,
4672 )
4673 })
4674 .await
4675 .unwrap();
4676 worktree.read_with(cx, |worktree, _| {
4677 assert!(
4678 worktree.entry_for_path(rel_path("test.txt")).is_none(),
4679 "Old file should have been removed"
4680 );
4681 assert!(
4682 worktree
4683 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
4684 .is_some(),
4685 "Whole directory hierarchy and the new file should have been created"
4686 );
4687 });
4688 assert_eq!(
4689 worktree
4690 .update(cx, |worktree, cx| {
4691 worktree.load_file(rel_path("dir1/dir2/dir3/test.txt"), cx)
4692 })
4693 .await
4694 .unwrap()
4695 .text,
4696 expected_contents,
4697 "Moved file's contents should be preserved"
4698 );
4699
4700 let entry_id = worktree.read_with(cx, |worktree, _| {
4701 worktree
4702 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
4703 .unwrap()
4704 .id
4705 });
4706
4707 let _result = project
4708 .update(cx, |project, cx| {
4709 project.rename_entry(
4710 entry_id,
4711 (worktree_id, rel_path("dir1/dir2/test.txt")).into(),
4712 cx,
4713 )
4714 })
4715 .await
4716 .unwrap();
4717 worktree.read_with(cx, |worktree, _| {
4718 assert!(
4719 worktree.entry_for_path(rel_path("test.txt")).is_none(),
4720 "First file should not reappear"
4721 );
4722 assert!(
4723 worktree
4724 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
4725 .is_none(),
4726 "Old file should have been removed"
4727 );
4728 assert!(
4729 worktree
4730 .entry_for_path(rel_path("dir1/dir2/test.txt"))
4731 .is_some(),
4732 "No error should have occurred after moving into existing directory"
4733 );
4734 });
4735 assert_eq!(
4736 worktree
4737 .update(cx, |worktree, cx| {
4738 worktree.load_file(rel_path("dir1/dir2/test.txt"), cx)
4739 })
4740 .await
4741 .unwrap()
4742 .text,
4743 expected_contents,
4744 "Moved file's contents should be preserved"
4745 );
4746}
4747
4748#[gpui::test(iterations = 10)]
4749async fn test_save_file(cx: &mut gpui::TestAppContext) {
4750 init_test(cx);
4751
4752 let fs = FakeFs::new(cx.executor());
4753 fs.insert_tree(
4754 path!("/dir"),
4755 json!({
4756 "file1": "the old contents",
4757 }),
4758 )
4759 .await;
4760
4761 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4762 let buffer = project
4763 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4764 .await
4765 .unwrap();
4766 buffer.update(cx, |buffer, cx| {
4767 assert_eq!(buffer.text(), "the old contents");
4768 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
4769 });
4770
4771 project
4772 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
4773 .await
4774 .unwrap();
4775
4776 let new_text = fs
4777 .load(Path::new(path!("/dir/file1")))
4778 .await
4779 .unwrap()
4780 .replace("\r\n", "\n");
4781 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
4782}
4783
4784#[gpui::test(iterations = 10)]
4785async fn test_save_file_spawns_language_server(cx: &mut gpui::TestAppContext) {
4786 // Issue: #24349
4787 init_test(cx);
4788
4789 let fs = FakeFs::new(cx.executor());
4790 fs.insert_tree(path!("/dir"), json!({})).await;
4791
4792 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4793 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4794
4795 language_registry.add(rust_lang());
4796 let mut fake_rust_servers = language_registry.register_fake_lsp(
4797 "Rust",
4798 FakeLspAdapter {
4799 name: "the-rust-language-server",
4800 capabilities: lsp::ServerCapabilities {
4801 completion_provider: Some(lsp::CompletionOptions {
4802 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
4803 ..Default::default()
4804 }),
4805 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
4806 lsp::TextDocumentSyncOptions {
4807 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
4808 ..Default::default()
4809 },
4810 )),
4811 ..Default::default()
4812 },
4813 ..Default::default()
4814 },
4815 );
4816
4817 let buffer = project
4818 .update(cx, |this, cx| this.create_buffer(None, false, cx))
4819 .unwrap()
4820 .await;
4821 project.update(cx, |this, cx| {
4822 this.register_buffer_with_language_servers(&buffer, cx);
4823 buffer.update(cx, |buffer, cx| {
4824 assert!(!this.has_language_servers_for(buffer, cx));
4825 })
4826 });
4827
4828 project
4829 .update(cx, |this, cx| {
4830 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
4831 this.save_buffer_as(
4832 buffer.clone(),
4833 ProjectPath {
4834 worktree_id,
4835 path: rel_path("file.rs").into(),
4836 },
4837 cx,
4838 )
4839 })
4840 .await
4841 .unwrap();
4842 // A server is started up, and it is notified about Rust files.
4843 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
4844 assert_eq!(
4845 fake_rust_server
4846 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4847 .await
4848 .text_document,
4849 lsp::TextDocumentItem {
4850 uri: lsp::Uri::from_file_path(path!("/dir/file.rs")).unwrap(),
4851 version: 0,
4852 text: "".to_string(),
4853 language_id: "rust".to_string(),
4854 }
4855 );
4856
4857 project.update(cx, |this, cx| {
4858 buffer.update(cx, |buffer, cx| {
4859 assert!(this.has_language_servers_for(buffer, cx));
4860 })
4861 });
4862}
4863
4864#[gpui::test(iterations = 30)]
4865async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
4866 init_test(cx);
4867
4868 let fs = FakeFs::new(cx.executor());
4869 fs.insert_tree(
4870 path!("/dir"),
4871 json!({
4872 "file1": "the original contents",
4873 }),
4874 )
4875 .await;
4876
4877 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4878 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
4879 let buffer = project
4880 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4881 .await
4882 .unwrap();
4883
4884 // Change the buffer's file on disk, and then wait for the file change
4885 // to be detected by the worktree, so that the buffer starts reloading.
4886 fs.save(
4887 path!("/dir/file1").as_ref(),
4888 &"the first contents".into(),
4889 Default::default(),
4890 )
4891 .await
4892 .unwrap();
4893 worktree.next_event(cx).await;
4894
4895 // Change the buffer's file again. Depending on the random seed, the
4896 // previous file change may still be in progress.
4897 fs.save(
4898 path!("/dir/file1").as_ref(),
4899 &"the second contents".into(),
4900 Default::default(),
4901 )
4902 .await
4903 .unwrap();
4904 worktree.next_event(cx).await;
4905
4906 cx.executor().run_until_parked();
4907 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
4908 buffer.read_with(cx, |buffer, _| {
4909 assert_eq!(buffer.text(), on_disk_text);
4910 assert!(!buffer.is_dirty(), "buffer should not be dirty");
4911 assert!(!buffer.has_conflict(), "buffer should not be dirty");
4912 });
4913}
4914
4915#[gpui::test(iterations = 30)]
4916async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
4917 init_test(cx);
4918
4919 let fs = FakeFs::new(cx.executor());
4920 fs.insert_tree(
4921 path!("/dir"),
4922 json!({
4923 "file1": "the original contents",
4924 }),
4925 )
4926 .await;
4927
4928 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4929 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
4930 let buffer = project
4931 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4932 .await
4933 .unwrap();
4934
4935 // Change the buffer's file on disk, and then wait for the file change
4936 // to be detected by the worktree, so that the buffer starts reloading.
4937 fs.save(
4938 path!("/dir/file1").as_ref(),
4939 &"the first contents".into(),
4940 Default::default(),
4941 )
4942 .await
4943 .unwrap();
4944 worktree.next_event(cx).await;
4945
4946 cx.executor()
4947 .spawn(cx.executor().simulate_random_delay())
4948 .await;
4949
4950 // Perform a noop edit, causing the buffer's version to increase.
4951 buffer.update(cx, |buffer, cx| {
4952 buffer.edit([(0..0, " ")], None, cx);
4953 buffer.undo(cx);
4954 });
4955
4956 cx.executor().run_until_parked();
4957 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
4958 buffer.read_with(cx, |buffer, _| {
4959 let buffer_text = buffer.text();
4960 if buffer_text == on_disk_text {
4961 assert!(
4962 !buffer.is_dirty() && !buffer.has_conflict(),
4963 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
4964 );
4965 }
4966 // If the file change occurred while the buffer was processing the first
4967 // change, the buffer will be in a conflicting state.
4968 else {
4969 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
4970 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
4971 }
4972 });
4973}
4974
4975#[gpui::test]
4976async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
4977 init_test(cx);
4978
4979 let fs = FakeFs::new(cx.executor());
4980 fs.insert_tree(
4981 path!("/dir"),
4982 json!({
4983 "file1": "the old contents",
4984 }),
4985 )
4986 .await;
4987
4988 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
4989 let buffer = project
4990 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4991 .await
4992 .unwrap();
4993 buffer.update(cx, |buffer, cx| {
4994 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
4995 });
4996
4997 project
4998 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
4999 .await
5000 .unwrap();
5001
5002 let new_text = fs
5003 .load(Path::new(path!("/dir/file1")))
5004 .await
5005 .unwrap()
5006 .replace("\r\n", "\n");
5007 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
5008}
5009
5010#[gpui::test]
5011async fn test_save_as(cx: &mut gpui::TestAppContext) {
5012 init_test(cx);
5013
5014 let fs = FakeFs::new(cx.executor());
5015 fs.insert_tree("/dir", json!({})).await;
5016
5017 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5018
5019 let languages = project.update(cx, |project, _| project.languages().clone());
5020 languages.add(rust_lang());
5021
5022 let buffer = project.update(cx, |project, cx| {
5023 project.create_local_buffer("", None, false, cx)
5024 });
5025 buffer.update(cx, |buffer, cx| {
5026 buffer.edit([(0..0, "abc")], None, cx);
5027 assert!(buffer.is_dirty());
5028 assert!(!buffer.has_conflict());
5029 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
5030 });
5031 project
5032 .update(cx, |project, cx| {
5033 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
5034 let path = ProjectPath {
5035 worktree_id,
5036 path: rel_path("file1.rs").into(),
5037 };
5038 project.save_buffer_as(buffer.clone(), path, cx)
5039 })
5040 .await
5041 .unwrap();
5042 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
5043
5044 cx.executor().run_until_parked();
5045 buffer.update(cx, |buffer, cx| {
5046 assert_eq!(
5047 buffer.file().unwrap().full_path(cx),
5048 Path::new("dir/file1.rs")
5049 );
5050 assert!(!buffer.is_dirty());
5051 assert!(!buffer.has_conflict());
5052 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
5053 });
5054
5055 let opened_buffer = project
5056 .update(cx, |project, cx| {
5057 project.open_local_buffer("/dir/file1.rs", cx)
5058 })
5059 .await
5060 .unwrap();
5061 assert_eq!(opened_buffer, buffer);
5062}
5063
5064#[gpui::test]
5065async fn test_save_as_existing_file(cx: &mut gpui::TestAppContext) {
5066 init_test(cx);
5067
5068 let fs = FakeFs::new(cx.executor());
5069 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5070
5071 fs.insert_tree(
5072 path!("/dir"),
5073 json!({
5074 "data_a.txt": "data about a"
5075 }),
5076 )
5077 .await;
5078
5079 let buffer = project
5080 .update(cx, |project, cx| {
5081 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
5082 })
5083 .await
5084 .unwrap();
5085
5086 buffer.update(cx, |buffer, cx| {
5087 buffer.edit([(11..12, "b")], None, cx);
5088 });
5089
5090 // Save buffer's contents as a new file and confirm that the buffer's now
5091 // associated with `data_b.txt` instead of `data_a.txt`, confirming that the
5092 // file associated with the buffer has now been updated to `data_b.txt`
5093 project
5094 .update(cx, |project, cx| {
5095 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
5096 let new_path = ProjectPath {
5097 worktree_id,
5098 path: rel_path("data_b.txt").into(),
5099 };
5100
5101 project.save_buffer_as(buffer.clone(), new_path, cx)
5102 })
5103 .await
5104 .unwrap();
5105
5106 buffer.update(cx, |buffer, cx| {
5107 assert_eq!(
5108 buffer.file().unwrap().full_path(cx),
5109 Path::new("dir/data_b.txt")
5110 )
5111 });
5112
5113 // Open the original `data_a.txt` file, confirming that its contents are
5114 // unchanged and the resulting buffer's associated file is `data_a.txt`.
5115 let original_buffer = project
5116 .update(cx, |project, cx| {
5117 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
5118 })
5119 .await
5120 .unwrap();
5121
5122 original_buffer.update(cx, |buffer, cx| {
5123 assert_eq!(buffer.text(), "data about a");
5124 assert_eq!(
5125 buffer.file().unwrap().full_path(cx),
5126 Path::new("dir/data_a.txt")
5127 )
5128 });
5129}
5130
5131#[gpui::test(retries = 5)]
5132async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
5133 use worktree::WorktreeModelHandle as _;
5134
5135 init_test(cx);
5136 cx.executor().allow_parking();
5137
5138 let dir = TempTree::new(json!({
5139 "a": {
5140 "file1": "",
5141 "file2": "",
5142 "file3": "",
5143 },
5144 "b": {
5145 "c": {
5146 "file4": "",
5147 "file5": "",
5148 }
5149 }
5150 }));
5151
5152 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
5153
5154 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5155 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
5156 async move { buffer.await.unwrap() }
5157 };
5158 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5159 project.update(cx, |project, cx| {
5160 let tree = project.worktrees(cx).next().unwrap();
5161 tree.read(cx)
5162 .entry_for_path(rel_path(path))
5163 .unwrap_or_else(|| panic!("no entry for path {}", path))
5164 .id
5165 })
5166 };
5167
5168 let buffer2 = buffer_for_path("a/file2", cx).await;
5169 let buffer3 = buffer_for_path("a/file3", cx).await;
5170 let buffer4 = buffer_for_path("b/c/file4", cx).await;
5171 let buffer5 = buffer_for_path("b/c/file5", cx).await;
5172
5173 let file2_id = id_for_path("a/file2", cx);
5174 let file3_id = id_for_path("a/file3", cx);
5175 let file4_id = id_for_path("b/c/file4", cx);
5176
5177 // Create a remote copy of this worktree.
5178 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
5179 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
5180
5181 let updates = Arc::new(Mutex::new(Vec::new()));
5182 tree.update(cx, |tree, cx| {
5183 let updates = updates.clone();
5184 tree.observe_updates(0, cx, move |update| {
5185 updates.lock().push(update);
5186 async { true }
5187 });
5188 });
5189
5190 let remote = cx.update(|cx| {
5191 Worktree::remote(
5192 0,
5193 ReplicaId::REMOTE_SERVER,
5194 metadata,
5195 project.read(cx).client().into(),
5196 project.read(cx).path_style(cx),
5197 cx,
5198 )
5199 });
5200
5201 cx.executor().run_until_parked();
5202
5203 cx.update(|cx| {
5204 assert!(!buffer2.read(cx).is_dirty());
5205 assert!(!buffer3.read(cx).is_dirty());
5206 assert!(!buffer4.read(cx).is_dirty());
5207 assert!(!buffer5.read(cx).is_dirty());
5208 });
5209
5210 // Rename and delete files and directories.
5211 tree.flush_fs_events(cx).await;
5212 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
5213 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
5214 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
5215 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
5216 tree.flush_fs_events(cx).await;
5217
5218 cx.update(|app| {
5219 assert_eq!(
5220 tree.read(app).paths().collect::<Vec<_>>(),
5221 vec![
5222 rel_path("a"),
5223 rel_path("a/file1"),
5224 rel_path("a/file2.new"),
5225 rel_path("b"),
5226 rel_path("d"),
5227 rel_path("d/file3"),
5228 rel_path("d/file4"),
5229 ]
5230 );
5231 });
5232
5233 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
5234 assert_eq!(id_for_path("d/file3", cx), file3_id);
5235 assert_eq!(id_for_path("d/file4", cx), file4_id);
5236
5237 cx.update(|cx| {
5238 assert_eq!(
5239 buffer2.read(cx).file().unwrap().path().as_ref(),
5240 rel_path("a/file2.new")
5241 );
5242 assert_eq!(
5243 buffer3.read(cx).file().unwrap().path().as_ref(),
5244 rel_path("d/file3")
5245 );
5246 assert_eq!(
5247 buffer4.read(cx).file().unwrap().path().as_ref(),
5248 rel_path("d/file4")
5249 );
5250 assert_eq!(
5251 buffer5.read(cx).file().unwrap().path().as_ref(),
5252 rel_path("b/c/file5")
5253 );
5254
5255 assert_matches!(
5256 buffer2.read(cx).file().unwrap().disk_state(),
5257 DiskState::Present { .. }
5258 );
5259 assert_matches!(
5260 buffer3.read(cx).file().unwrap().disk_state(),
5261 DiskState::Present { .. }
5262 );
5263 assert_matches!(
5264 buffer4.read(cx).file().unwrap().disk_state(),
5265 DiskState::Present { .. }
5266 );
5267 assert_eq!(
5268 buffer5.read(cx).file().unwrap().disk_state(),
5269 DiskState::Deleted
5270 );
5271 });
5272
5273 // Update the remote worktree. Check that it becomes consistent with the
5274 // local worktree.
5275 cx.executor().run_until_parked();
5276
5277 remote.update(cx, |remote, _| {
5278 for update in updates.lock().drain(..) {
5279 remote.as_remote_mut().unwrap().update_from_remote(update);
5280 }
5281 });
5282 cx.executor().run_until_parked();
5283 remote.update(cx, |remote, _| {
5284 assert_eq!(
5285 remote.paths().collect::<Vec<_>>(),
5286 vec![
5287 rel_path("a"),
5288 rel_path("a/file1"),
5289 rel_path("a/file2.new"),
5290 rel_path("b"),
5291 rel_path("d"),
5292 rel_path("d/file3"),
5293 rel_path("d/file4"),
5294 ]
5295 );
5296 });
5297}
5298
5299#[gpui::test(iterations = 10)]
5300async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
5301 init_test(cx);
5302
5303 let fs = FakeFs::new(cx.executor());
5304 fs.insert_tree(
5305 path!("/dir"),
5306 json!({
5307 "a": {
5308 "file1": "",
5309 }
5310 }),
5311 )
5312 .await;
5313
5314 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
5315 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
5316 let tree_id = tree.update(cx, |tree, _| tree.id());
5317
5318 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5319 project.update(cx, |project, cx| {
5320 let tree = project.worktrees(cx).next().unwrap();
5321 tree.read(cx)
5322 .entry_for_path(rel_path(path))
5323 .unwrap_or_else(|| panic!("no entry for path {}", path))
5324 .id
5325 })
5326 };
5327
5328 let dir_id = id_for_path("a", cx);
5329 let file_id = id_for_path("a/file1", cx);
5330 let buffer = project
5331 .update(cx, |p, cx| {
5332 p.open_buffer((tree_id, rel_path("a/file1")), cx)
5333 })
5334 .await
5335 .unwrap();
5336 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5337
5338 project
5339 .update(cx, |project, cx| {
5340 project.rename_entry(dir_id, (tree_id, rel_path("b")).into(), cx)
5341 })
5342 .unwrap()
5343 .await
5344 .into_included()
5345 .unwrap();
5346 cx.executor().run_until_parked();
5347
5348 assert_eq!(id_for_path("b", cx), dir_id);
5349 assert_eq!(id_for_path("b/file1", cx), file_id);
5350 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5351}
5352
5353#[gpui::test]
5354async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
5355 init_test(cx);
5356
5357 let fs = FakeFs::new(cx.executor());
5358 fs.insert_tree(
5359 "/dir",
5360 json!({
5361 "a.txt": "a-contents",
5362 "b.txt": "b-contents",
5363 }),
5364 )
5365 .await;
5366
5367 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5368
5369 // Spawn multiple tasks to open paths, repeating some paths.
5370 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
5371 (
5372 p.open_local_buffer("/dir/a.txt", cx),
5373 p.open_local_buffer("/dir/b.txt", cx),
5374 p.open_local_buffer("/dir/a.txt", cx),
5375 )
5376 });
5377
5378 let buffer_a_1 = buffer_a_1.await.unwrap();
5379 let buffer_a_2 = buffer_a_2.await.unwrap();
5380 let buffer_b = buffer_b.await.unwrap();
5381 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
5382 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
5383
5384 // There is only one buffer per path.
5385 let buffer_a_id = buffer_a_1.entity_id();
5386 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
5387
5388 // Open the same path again while it is still open.
5389 drop(buffer_a_1);
5390 let buffer_a_3 = project
5391 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
5392 .await
5393 .unwrap();
5394
5395 // There's still only one buffer per path.
5396 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
5397}
5398
5399#[gpui::test]
5400async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
5401 init_test(cx);
5402
5403 let fs = FakeFs::new(cx.executor());
5404 fs.insert_tree(
5405 path!("/dir"),
5406 json!({
5407 "file1": "abc",
5408 "file2": "def",
5409 "file3": "ghi",
5410 }),
5411 )
5412 .await;
5413
5414 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5415
5416 let buffer1 = project
5417 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5418 .await
5419 .unwrap();
5420 let events = Arc::new(Mutex::new(Vec::new()));
5421
5422 // initially, the buffer isn't dirty.
5423 buffer1.update(cx, |buffer, cx| {
5424 cx.subscribe(&buffer1, {
5425 let events = events.clone();
5426 move |_, _, event, _| match event {
5427 BufferEvent::Operation { .. } => {}
5428 _ => events.lock().push(event.clone()),
5429 }
5430 })
5431 .detach();
5432
5433 assert!(!buffer.is_dirty());
5434 assert!(events.lock().is_empty());
5435
5436 buffer.edit([(1..2, "")], None, cx);
5437 });
5438
5439 // after the first edit, the buffer is dirty, and emits a dirtied event.
5440 buffer1.update(cx, |buffer, cx| {
5441 assert!(buffer.text() == "ac");
5442 assert!(buffer.is_dirty());
5443 assert_eq!(
5444 *events.lock(),
5445 &[
5446 language::BufferEvent::Edited,
5447 language::BufferEvent::DirtyChanged
5448 ]
5449 );
5450 events.lock().clear();
5451 buffer.did_save(
5452 buffer.version(),
5453 buffer.file().unwrap().disk_state().mtime(),
5454 cx,
5455 );
5456 });
5457
5458 // after saving, the buffer is not dirty, and emits a saved event.
5459 buffer1.update(cx, |buffer, cx| {
5460 assert!(!buffer.is_dirty());
5461 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
5462 events.lock().clear();
5463
5464 buffer.edit([(1..1, "B")], None, cx);
5465 buffer.edit([(2..2, "D")], None, cx);
5466 });
5467
5468 // after editing again, the buffer is dirty, and emits another dirty event.
5469 buffer1.update(cx, |buffer, cx| {
5470 assert!(buffer.text() == "aBDc");
5471 assert!(buffer.is_dirty());
5472 assert_eq!(
5473 *events.lock(),
5474 &[
5475 language::BufferEvent::Edited,
5476 language::BufferEvent::DirtyChanged,
5477 language::BufferEvent::Edited,
5478 ],
5479 );
5480 events.lock().clear();
5481
5482 // After restoring the buffer to its previously-saved state,
5483 // the buffer is not considered dirty anymore.
5484 buffer.edit([(1..3, "")], None, cx);
5485 assert!(buffer.text() == "ac");
5486 assert!(!buffer.is_dirty());
5487 });
5488
5489 assert_eq!(
5490 *events.lock(),
5491 &[
5492 language::BufferEvent::Edited,
5493 language::BufferEvent::DirtyChanged
5494 ]
5495 );
5496
5497 // When a file is deleted, it is not considered dirty.
5498 let events = Arc::new(Mutex::new(Vec::new()));
5499 let buffer2 = project
5500 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
5501 .await
5502 .unwrap();
5503 buffer2.update(cx, |_, cx| {
5504 cx.subscribe(&buffer2, {
5505 let events = events.clone();
5506 move |_, _, event, _| match event {
5507 BufferEvent::Operation { .. } => {}
5508 _ => events.lock().push(event.clone()),
5509 }
5510 })
5511 .detach();
5512 });
5513
5514 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
5515 .await
5516 .unwrap();
5517 cx.executor().run_until_parked();
5518 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5519 assert_eq!(
5520 mem::take(&mut *events.lock()),
5521 &[language::BufferEvent::FileHandleChanged]
5522 );
5523
5524 // Buffer becomes dirty when edited.
5525 buffer2.update(cx, |buffer, cx| {
5526 buffer.edit([(2..3, "")], None, cx);
5527 assert_eq!(buffer.is_dirty(), true);
5528 });
5529 assert_eq!(
5530 mem::take(&mut *events.lock()),
5531 &[
5532 language::BufferEvent::Edited,
5533 language::BufferEvent::DirtyChanged
5534 ]
5535 );
5536
5537 // Buffer becomes clean again when all of its content is removed, because
5538 // the file was deleted.
5539 buffer2.update(cx, |buffer, cx| {
5540 buffer.edit([(0..2, "")], None, cx);
5541 assert_eq!(buffer.is_empty(), true);
5542 assert_eq!(buffer.is_dirty(), false);
5543 });
5544 assert_eq!(
5545 *events.lock(),
5546 &[
5547 language::BufferEvent::Edited,
5548 language::BufferEvent::DirtyChanged
5549 ]
5550 );
5551
5552 // When a file is already dirty when deleted, we don't emit a Dirtied event.
5553 let events = Arc::new(Mutex::new(Vec::new()));
5554 let buffer3 = project
5555 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
5556 .await
5557 .unwrap();
5558 buffer3.update(cx, |_, cx| {
5559 cx.subscribe(&buffer3, {
5560 let events = events.clone();
5561 move |_, _, event, _| match event {
5562 BufferEvent::Operation { .. } => {}
5563 _ => events.lock().push(event.clone()),
5564 }
5565 })
5566 .detach();
5567 });
5568
5569 buffer3.update(cx, |buffer, cx| {
5570 buffer.edit([(0..0, "x")], None, cx);
5571 });
5572 events.lock().clear();
5573 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
5574 .await
5575 .unwrap();
5576 cx.executor().run_until_parked();
5577 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
5578 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
5579}
5580
5581#[gpui::test]
5582async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
5583 init_test(cx);
5584
5585 let (initial_contents, initial_offsets) =
5586 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
5587 let fs = FakeFs::new(cx.executor());
5588 fs.insert_tree(
5589 path!("/dir"),
5590 json!({
5591 "the-file": initial_contents,
5592 }),
5593 )
5594 .await;
5595 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5596 let buffer = project
5597 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
5598 .await
5599 .unwrap();
5600
5601 let anchors = initial_offsets
5602 .iter()
5603 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
5604 .collect::<Vec<_>>();
5605
5606 // Change the file on disk, adding two new lines of text, and removing
5607 // one line.
5608 buffer.update(cx, |buffer, _| {
5609 assert!(!buffer.is_dirty());
5610 assert!(!buffer.has_conflict());
5611 });
5612
5613 let (new_contents, new_offsets) =
5614 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
5615 fs.save(
5616 path!("/dir/the-file").as_ref(),
5617 &new_contents.as_str().into(),
5618 LineEnding::Unix,
5619 )
5620 .await
5621 .unwrap();
5622
5623 // Because the buffer was not modified, it is reloaded from disk. Its
5624 // contents are edited according to the diff between the old and new
5625 // file contents.
5626 cx.executor().run_until_parked();
5627 buffer.update(cx, |buffer, _| {
5628 assert_eq!(buffer.text(), new_contents);
5629 assert!(!buffer.is_dirty());
5630 assert!(!buffer.has_conflict());
5631
5632 let anchor_offsets = anchors
5633 .iter()
5634 .map(|anchor| anchor.to_offset(&*buffer))
5635 .collect::<Vec<_>>();
5636 assert_eq!(anchor_offsets, new_offsets);
5637 });
5638
5639 // Modify the buffer
5640 buffer.update(cx, |buffer, cx| {
5641 buffer.edit([(0..0, " ")], None, cx);
5642 assert!(buffer.is_dirty());
5643 assert!(!buffer.has_conflict());
5644 });
5645
5646 // Change the file on disk again, adding blank lines to the beginning.
5647 fs.save(
5648 path!("/dir/the-file").as_ref(),
5649 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
5650 LineEnding::Unix,
5651 )
5652 .await
5653 .unwrap();
5654
5655 // Because the buffer is modified, it doesn't reload from disk, but is
5656 // marked as having a conflict.
5657 cx.executor().run_until_parked();
5658 buffer.update(cx, |buffer, _| {
5659 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
5660 assert!(buffer.has_conflict());
5661 });
5662}
5663
5664#[gpui::test]
5665async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
5666 init_test(cx);
5667
5668 let fs = FakeFs::new(cx.executor());
5669 fs.insert_tree(
5670 path!("/dir"),
5671 json!({
5672 "file1": "a\nb\nc\n",
5673 "file2": "one\r\ntwo\r\nthree\r\n",
5674 }),
5675 )
5676 .await;
5677
5678 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5679 let buffer1 = project
5680 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5681 .await
5682 .unwrap();
5683 let buffer2 = project
5684 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
5685 .await
5686 .unwrap();
5687
5688 buffer1.update(cx, |buffer, _| {
5689 assert_eq!(buffer.text(), "a\nb\nc\n");
5690 assert_eq!(buffer.line_ending(), LineEnding::Unix);
5691 });
5692 buffer2.update(cx, |buffer, _| {
5693 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
5694 assert_eq!(buffer.line_ending(), LineEnding::Windows);
5695 });
5696
5697 // Change a file's line endings on disk from unix to windows. The buffer's
5698 // state updates correctly.
5699 fs.save(
5700 path!("/dir/file1").as_ref(),
5701 &"aaa\nb\nc\n".into(),
5702 LineEnding::Windows,
5703 )
5704 .await
5705 .unwrap();
5706 cx.executor().run_until_parked();
5707 buffer1.update(cx, |buffer, _| {
5708 assert_eq!(buffer.text(), "aaa\nb\nc\n");
5709 assert_eq!(buffer.line_ending(), LineEnding::Windows);
5710 });
5711
5712 // Save a file with windows line endings. The file is written correctly.
5713 buffer2.update(cx, |buffer, cx| {
5714 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
5715 });
5716 project
5717 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
5718 .await
5719 .unwrap();
5720 assert_eq!(
5721 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
5722 "one\r\ntwo\r\nthree\r\nfour\r\n",
5723 );
5724}
5725
5726#[gpui::test]
5727async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
5728 init_test(cx);
5729
5730 let fs = FakeFs::new(cx.executor());
5731 fs.insert_tree(
5732 path!("/dir"),
5733 json!({
5734 "a.rs": "
5735 fn foo(mut v: Vec<usize>) {
5736 for x in &v {
5737 v.push(1);
5738 }
5739 }
5740 "
5741 .unindent(),
5742 }),
5743 )
5744 .await;
5745
5746 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5747 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
5748 let buffer = project
5749 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
5750 .await
5751 .unwrap();
5752
5753 let buffer_uri = Uri::from_file_path(path!("/dir/a.rs")).unwrap();
5754 let message = lsp::PublishDiagnosticsParams {
5755 uri: buffer_uri.clone(),
5756 diagnostics: vec![
5757 lsp::Diagnostic {
5758 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5759 severity: Some(DiagnosticSeverity::WARNING),
5760 message: "error 1".to_string(),
5761 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5762 location: lsp::Location {
5763 uri: buffer_uri.clone(),
5764 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5765 },
5766 message: "error 1 hint 1".to_string(),
5767 }]),
5768 ..Default::default()
5769 },
5770 lsp::Diagnostic {
5771 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5772 severity: Some(DiagnosticSeverity::HINT),
5773 message: "error 1 hint 1".to_string(),
5774 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5775 location: lsp::Location {
5776 uri: buffer_uri.clone(),
5777 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5778 },
5779 message: "original diagnostic".to_string(),
5780 }]),
5781 ..Default::default()
5782 },
5783 lsp::Diagnostic {
5784 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5785 severity: Some(DiagnosticSeverity::ERROR),
5786 message: "error 2".to_string(),
5787 related_information: Some(vec![
5788 lsp::DiagnosticRelatedInformation {
5789 location: lsp::Location {
5790 uri: buffer_uri.clone(),
5791 range: lsp::Range::new(
5792 lsp::Position::new(1, 13),
5793 lsp::Position::new(1, 15),
5794 ),
5795 },
5796 message: "error 2 hint 1".to_string(),
5797 },
5798 lsp::DiagnosticRelatedInformation {
5799 location: lsp::Location {
5800 uri: buffer_uri.clone(),
5801 range: lsp::Range::new(
5802 lsp::Position::new(1, 13),
5803 lsp::Position::new(1, 15),
5804 ),
5805 },
5806 message: "error 2 hint 2".to_string(),
5807 },
5808 ]),
5809 ..Default::default()
5810 },
5811 lsp::Diagnostic {
5812 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
5813 severity: Some(DiagnosticSeverity::HINT),
5814 message: "error 2 hint 1".to_string(),
5815 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5816 location: lsp::Location {
5817 uri: buffer_uri.clone(),
5818 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5819 },
5820 message: "original diagnostic".to_string(),
5821 }]),
5822 ..Default::default()
5823 },
5824 lsp::Diagnostic {
5825 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
5826 severity: Some(DiagnosticSeverity::HINT),
5827 message: "error 2 hint 2".to_string(),
5828 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5829 location: lsp::Location {
5830 uri: buffer_uri,
5831 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5832 },
5833 message: "original diagnostic".to_string(),
5834 }]),
5835 ..Default::default()
5836 },
5837 ],
5838 version: None,
5839 };
5840
5841 lsp_store
5842 .update(cx, |lsp_store, cx| {
5843 lsp_store.update_diagnostics(
5844 LanguageServerId(0),
5845 message,
5846 None,
5847 DiagnosticSourceKind::Pushed,
5848 &[],
5849 cx,
5850 )
5851 })
5852 .unwrap();
5853 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
5854
5855 assert_eq!(
5856 buffer
5857 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5858 .collect::<Vec<_>>(),
5859 &[
5860 DiagnosticEntry {
5861 range: Point::new(1, 8)..Point::new(1, 9),
5862 diagnostic: Diagnostic {
5863 severity: DiagnosticSeverity::WARNING,
5864 message: "error 1".to_string(),
5865 group_id: 1,
5866 is_primary: true,
5867 source_kind: DiagnosticSourceKind::Pushed,
5868 ..Diagnostic::default()
5869 }
5870 },
5871 DiagnosticEntry {
5872 range: Point::new(1, 8)..Point::new(1, 9),
5873 diagnostic: Diagnostic {
5874 severity: DiagnosticSeverity::HINT,
5875 message: "error 1 hint 1".to_string(),
5876 group_id: 1,
5877 is_primary: false,
5878 source_kind: DiagnosticSourceKind::Pushed,
5879 ..Diagnostic::default()
5880 }
5881 },
5882 DiagnosticEntry {
5883 range: Point::new(1, 13)..Point::new(1, 15),
5884 diagnostic: Diagnostic {
5885 severity: DiagnosticSeverity::HINT,
5886 message: "error 2 hint 1".to_string(),
5887 group_id: 0,
5888 is_primary: false,
5889 source_kind: DiagnosticSourceKind::Pushed,
5890 ..Diagnostic::default()
5891 }
5892 },
5893 DiagnosticEntry {
5894 range: Point::new(1, 13)..Point::new(1, 15),
5895 diagnostic: Diagnostic {
5896 severity: DiagnosticSeverity::HINT,
5897 message: "error 2 hint 2".to_string(),
5898 group_id: 0,
5899 is_primary: false,
5900 source_kind: DiagnosticSourceKind::Pushed,
5901 ..Diagnostic::default()
5902 }
5903 },
5904 DiagnosticEntry {
5905 range: Point::new(2, 8)..Point::new(2, 17),
5906 diagnostic: Diagnostic {
5907 severity: DiagnosticSeverity::ERROR,
5908 message: "error 2".to_string(),
5909 group_id: 0,
5910 is_primary: true,
5911 source_kind: DiagnosticSourceKind::Pushed,
5912 ..Diagnostic::default()
5913 }
5914 }
5915 ]
5916 );
5917
5918 assert_eq!(
5919 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
5920 &[
5921 DiagnosticEntry {
5922 range: Point::new(1, 13)..Point::new(1, 15),
5923 diagnostic: Diagnostic {
5924 severity: DiagnosticSeverity::HINT,
5925 message: "error 2 hint 1".to_string(),
5926 group_id: 0,
5927 is_primary: false,
5928 source_kind: DiagnosticSourceKind::Pushed,
5929 ..Diagnostic::default()
5930 }
5931 },
5932 DiagnosticEntry {
5933 range: Point::new(1, 13)..Point::new(1, 15),
5934 diagnostic: Diagnostic {
5935 severity: DiagnosticSeverity::HINT,
5936 message: "error 2 hint 2".to_string(),
5937 group_id: 0,
5938 is_primary: false,
5939 source_kind: DiagnosticSourceKind::Pushed,
5940 ..Diagnostic::default()
5941 }
5942 },
5943 DiagnosticEntry {
5944 range: Point::new(2, 8)..Point::new(2, 17),
5945 diagnostic: Diagnostic {
5946 severity: DiagnosticSeverity::ERROR,
5947 message: "error 2".to_string(),
5948 group_id: 0,
5949 is_primary: true,
5950 source_kind: DiagnosticSourceKind::Pushed,
5951 ..Diagnostic::default()
5952 }
5953 }
5954 ]
5955 );
5956
5957 assert_eq!(
5958 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
5959 &[
5960 DiagnosticEntry {
5961 range: Point::new(1, 8)..Point::new(1, 9),
5962 diagnostic: Diagnostic {
5963 severity: DiagnosticSeverity::WARNING,
5964 message: "error 1".to_string(),
5965 group_id: 1,
5966 is_primary: true,
5967 source_kind: DiagnosticSourceKind::Pushed,
5968 ..Diagnostic::default()
5969 }
5970 },
5971 DiagnosticEntry {
5972 range: Point::new(1, 8)..Point::new(1, 9),
5973 diagnostic: Diagnostic {
5974 severity: DiagnosticSeverity::HINT,
5975 message: "error 1 hint 1".to_string(),
5976 group_id: 1,
5977 is_primary: false,
5978 source_kind: DiagnosticSourceKind::Pushed,
5979 ..Diagnostic::default()
5980 }
5981 },
5982 ]
5983 );
5984}
5985
5986#[gpui::test]
5987async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
5988 init_test(cx);
5989
5990 let fs = FakeFs::new(cx.executor());
5991 fs.insert_tree(
5992 path!("/dir"),
5993 json!({
5994 "one.rs": "const ONE: usize = 1;",
5995 "two": {
5996 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
5997 }
5998
5999 }),
6000 )
6001 .await;
6002 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6003
6004 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6005 language_registry.add(rust_lang());
6006 let watched_paths = lsp::FileOperationRegistrationOptions {
6007 filters: vec![
6008 FileOperationFilter {
6009 scheme: Some("file".to_owned()),
6010 pattern: lsp::FileOperationPattern {
6011 glob: "**/*.rs".to_owned(),
6012 matches: Some(lsp::FileOperationPatternKind::File),
6013 options: None,
6014 },
6015 },
6016 FileOperationFilter {
6017 scheme: Some("file".to_owned()),
6018 pattern: lsp::FileOperationPattern {
6019 glob: "**/**".to_owned(),
6020 matches: Some(lsp::FileOperationPatternKind::Folder),
6021 options: None,
6022 },
6023 },
6024 ],
6025 };
6026 let mut fake_servers = language_registry.register_fake_lsp(
6027 "Rust",
6028 FakeLspAdapter {
6029 capabilities: lsp::ServerCapabilities {
6030 workspace: Some(lsp::WorkspaceServerCapabilities {
6031 workspace_folders: None,
6032 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
6033 did_rename: Some(watched_paths.clone()),
6034 will_rename: Some(watched_paths),
6035 ..Default::default()
6036 }),
6037 }),
6038 ..Default::default()
6039 },
6040 ..Default::default()
6041 },
6042 );
6043
6044 let _ = project
6045 .update(cx, |project, cx| {
6046 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
6047 })
6048 .await
6049 .unwrap();
6050
6051 let fake_server = fake_servers.next().await.unwrap();
6052 cx.executor().run_until_parked();
6053 let response = project.update(cx, |project, cx| {
6054 let worktree = project.worktrees(cx).next().unwrap();
6055 let entry = worktree
6056 .read(cx)
6057 .entry_for_path(rel_path("one.rs"))
6058 .unwrap();
6059 project.rename_entry(
6060 entry.id,
6061 (worktree.read(cx).id(), rel_path("three.rs")).into(),
6062 cx,
6063 )
6064 });
6065 let expected_edit = lsp::WorkspaceEdit {
6066 changes: None,
6067 document_changes: Some(DocumentChanges::Edits({
6068 vec![TextDocumentEdit {
6069 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
6070 range: lsp::Range {
6071 start: lsp::Position {
6072 line: 0,
6073 character: 1,
6074 },
6075 end: lsp::Position {
6076 line: 0,
6077 character: 3,
6078 },
6079 },
6080 new_text: "This is not a drill".to_owned(),
6081 })],
6082 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
6083 uri: Uri::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
6084 version: Some(1337),
6085 },
6086 }]
6087 })),
6088 change_annotations: None,
6089 };
6090 let resolved_workspace_edit = Arc::new(OnceLock::new());
6091 fake_server
6092 .set_request_handler::<WillRenameFiles, _, _>({
6093 let resolved_workspace_edit = resolved_workspace_edit.clone();
6094 let expected_edit = expected_edit.clone();
6095 move |params, _| {
6096 let resolved_workspace_edit = resolved_workspace_edit.clone();
6097 let expected_edit = expected_edit.clone();
6098 async move {
6099 assert_eq!(params.files.len(), 1);
6100 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
6101 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
6102 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
6103 Ok(Some(expected_edit))
6104 }
6105 }
6106 })
6107 .next()
6108 .await
6109 .unwrap();
6110 let _ = response.await.unwrap();
6111 fake_server
6112 .handle_notification::<DidRenameFiles, _>(|params, _| {
6113 assert_eq!(params.files.len(), 1);
6114 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
6115 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
6116 })
6117 .next()
6118 .await
6119 .unwrap();
6120 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
6121}
6122
6123#[gpui::test]
6124async fn test_rename(cx: &mut gpui::TestAppContext) {
6125 // hi
6126 init_test(cx);
6127
6128 let fs = FakeFs::new(cx.executor());
6129 fs.insert_tree(
6130 path!("/dir"),
6131 json!({
6132 "one.rs": "const ONE: usize = 1;",
6133 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
6134 }),
6135 )
6136 .await;
6137
6138 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6139
6140 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6141 language_registry.add(rust_lang());
6142 let mut fake_servers = language_registry.register_fake_lsp(
6143 "Rust",
6144 FakeLspAdapter {
6145 capabilities: lsp::ServerCapabilities {
6146 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
6147 prepare_provider: Some(true),
6148 work_done_progress_options: Default::default(),
6149 })),
6150 ..Default::default()
6151 },
6152 ..Default::default()
6153 },
6154 );
6155
6156 let (buffer, _handle) = project
6157 .update(cx, |project, cx| {
6158 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
6159 })
6160 .await
6161 .unwrap();
6162
6163 let fake_server = fake_servers.next().await.unwrap();
6164 cx.executor().run_until_parked();
6165
6166 let response = project.update(cx, |project, cx| {
6167 project.prepare_rename(buffer.clone(), 7, cx)
6168 });
6169 fake_server
6170 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
6171 assert_eq!(
6172 params.text_document.uri.as_str(),
6173 uri!("file:///dir/one.rs")
6174 );
6175 assert_eq!(params.position, lsp::Position::new(0, 7));
6176 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
6177 lsp::Position::new(0, 6),
6178 lsp::Position::new(0, 9),
6179 ))))
6180 })
6181 .next()
6182 .await
6183 .unwrap();
6184 let response = response.await.unwrap();
6185 let PrepareRenameResponse::Success(range) = response else {
6186 panic!("{:?}", response);
6187 };
6188 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
6189 assert_eq!(range, 6..9);
6190
6191 let response = project.update(cx, |project, cx| {
6192 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
6193 });
6194 fake_server
6195 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
6196 assert_eq!(
6197 params.text_document_position.text_document.uri.as_str(),
6198 uri!("file:///dir/one.rs")
6199 );
6200 assert_eq!(
6201 params.text_document_position.position,
6202 lsp::Position::new(0, 7)
6203 );
6204 assert_eq!(params.new_name, "THREE");
6205 Ok(Some(lsp::WorkspaceEdit {
6206 changes: Some(
6207 [
6208 (
6209 lsp::Uri::from_file_path(path!("/dir/one.rs")).unwrap(),
6210 vec![lsp::TextEdit::new(
6211 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
6212 "THREE".to_string(),
6213 )],
6214 ),
6215 (
6216 lsp::Uri::from_file_path(path!("/dir/two.rs")).unwrap(),
6217 vec![
6218 lsp::TextEdit::new(
6219 lsp::Range::new(
6220 lsp::Position::new(0, 24),
6221 lsp::Position::new(0, 27),
6222 ),
6223 "THREE".to_string(),
6224 ),
6225 lsp::TextEdit::new(
6226 lsp::Range::new(
6227 lsp::Position::new(0, 35),
6228 lsp::Position::new(0, 38),
6229 ),
6230 "THREE".to_string(),
6231 ),
6232 ],
6233 ),
6234 ]
6235 .into_iter()
6236 .collect(),
6237 ),
6238 ..Default::default()
6239 }))
6240 })
6241 .next()
6242 .await
6243 .unwrap();
6244 let mut transaction = response.await.unwrap().0;
6245 assert_eq!(transaction.len(), 2);
6246 assert_eq!(
6247 transaction
6248 .remove_entry(&buffer)
6249 .unwrap()
6250 .0
6251 .update(cx, |buffer, _| buffer.text()),
6252 "const THREE: usize = 1;"
6253 );
6254 assert_eq!(
6255 transaction
6256 .into_keys()
6257 .next()
6258 .unwrap()
6259 .update(cx, |buffer, _| buffer.text()),
6260 "const TWO: usize = one::THREE + one::THREE;"
6261 );
6262}
6263
6264#[gpui::test]
6265async fn test_search(cx: &mut gpui::TestAppContext) {
6266 init_test(cx);
6267
6268 let fs = FakeFs::new(cx.executor());
6269 fs.insert_tree(
6270 path!("/dir"),
6271 json!({
6272 "one.rs": "const ONE: usize = 1;",
6273 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
6274 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
6275 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
6276 }),
6277 )
6278 .await;
6279 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6280 assert_eq!(
6281 search(
6282 &project,
6283 SearchQuery::text(
6284 "TWO",
6285 false,
6286 true,
6287 false,
6288 Default::default(),
6289 Default::default(),
6290 false,
6291 None
6292 )
6293 .unwrap(),
6294 cx
6295 )
6296 .await
6297 .unwrap(),
6298 HashMap::from_iter([
6299 (path!("dir/two.rs").to_string(), vec![6..9]),
6300 (path!("dir/three.rs").to_string(), vec![37..40])
6301 ])
6302 );
6303
6304 let buffer_4 = project
6305 .update(cx, |project, cx| {
6306 project.open_local_buffer(path!("/dir/four.rs"), cx)
6307 })
6308 .await
6309 .unwrap();
6310 buffer_4.update(cx, |buffer, cx| {
6311 let text = "two::TWO";
6312 buffer.edit([(20..28, text), (31..43, text)], None, cx);
6313 });
6314
6315 assert_eq!(
6316 search(
6317 &project,
6318 SearchQuery::text(
6319 "TWO",
6320 false,
6321 true,
6322 false,
6323 Default::default(),
6324 Default::default(),
6325 false,
6326 None,
6327 )
6328 .unwrap(),
6329 cx
6330 )
6331 .await
6332 .unwrap(),
6333 HashMap::from_iter([
6334 (path!("dir/two.rs").to_string(), vec![6..9]),
6335 (path!("dir/three.rs").to_string(), vec![37..40]),
6336 (path!("dir/four.rs").to_string(), vec![25..28, 36..39])
6337 ])
6338 );
6339}
6340
6341#[gpui::test]
6342async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
6343 init_test(cx);
6344
6345 let search_query = "file";
6346
6347 let fs = FakeFs::new(cx.executor());
6348 fs.insert_tree(
6349 path!("/dir"),
6350 json!({
6351 "one.rs": r#"// Rust file one"#,
6352 "one.ts": r#"// TypeScript file one"#,
6353 "two.rs": r#"// Rust file two"#,
6354 "two.ts": r#"// TypeScript file two"#,
6355 }),
6356 )
6357 .await;
6358 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6359
6360 assert!(
6361 search(
6362 &project,
6363 SearchQuery::text(
6364 search_query,
6365 false,
6366 true,
6367 false,
6368 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6369 Default::default(),
6370 false,
6371 None
6372 )
6373 .unwrap(),
6374 cx
6375 )
6376 .await
6377 .unwrap()
6378 .is_empty(),
6379 "If no inclusions match, no files should be returned"
6380 );
6381
6382 assert_eq!(
6383 search(
6384 &project,
6385 SearchQuery::text(
6386 search_query,
6387 false,
6388 true,
6389 false,
6390 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
6391 Default::default(),
6392 false,
6393 None
6394 )
6395 .unwrap(),
6396 cx
6397 )
6398 .await
6399 .unwrap(),
6400 HashMap::from_iter([
6401 (path!("dir/one.rs").to_string(), vec![8..12]),
6402 (path!("dir/two.rs").to_string(), vec![8..12]),
6403 ]),
6404 "Rust only search should give only Rust files"
6405 );
6406
6407 assert_eq!(
6408 search(
6409 &project,
6410 SearchQuery::text(
6411 search_query,
6412 false,
6413 true,
6414 false,
6415 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6416 .unwrap(),
6417 Default::default(),
6418 false,
6419 None,
6420 )
6421 .unwrap(),
6422 cx
6423 )
6424 .await
6425 .unwrap(),
6426 HashMap::from_iter([
6427 (path!("dir/one.ts").to_string(), vec![14..18]),
6428 (path!("dir/two.ts").to_string(), vec![14..18]),
6429 ]),
6430 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
6431 );
6432
6433 assert_eq!(
6434 search(
6435 &project,
6436 SearchQuery::text(
6437 search_query,
6438 false,
6439 true,
6440 false,
6441 PathMatcher::new(
6442 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
6443 PathStyle::local()
6444 )
6445 .unwrap(),
6446 Default::default(),
6447 false,
6448 None,
6449 )
6450 .unwrap(),
6451 cx
6452 )
6453 .await
6454 .unwrap(),
6455 HashMap::from_iter([
6456 (path!("dir/two.ts").to_string(), vec![14..18]),
6457 (path!("dir/one.rs").to_string(), vec![8..12]),
6458 (path!("dir/one.ts").to_string(), vec![14..18]),
6459 (path!("dir/two.rs").to_string(), vec![8..12]),
6460 ]),
6461 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
6462 );
6463}
6464
6465#[gpui::test]
6466async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
6467 init_test(cx);
6468
6469 let search_query = "file";
6470
6471 let fs = FakeFs::new(cx.executor());
6472 fs.insert_tree(
6473 path!("/dir"),
6474 json!({
6475 "one.rs": r#"// Rust file one"#,
6476 "one.ts": r#"// TypeScript file one"#,
6477 "two.rs": r#"// Rust file two"#,
6478 "two.ts": r#"// TypeScript file two"#,
6479 }),
6480 )
6481 .await;
6482 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6483
6484 assert_eq!(
6485 search(
6486 &project,
6487 SearchQuery::text(
6488 search_query,
6489 false,
6490 true,
6491 false,
6492 Default::default(),
6493 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6494 false,
6495 None,
6496 )
6497 .unwrap(),
6498 cx
6499 )
6500 .await
6501 .unwrap(),
6502 HashMap::from_iter([
6503 (path!("dir/one.rs").to_string(), vec![8..12]),
6504 (path!("dir/one.ts").to_string(), vec![14..18]),
6505 (path!("dir/two.rs").to_string(), vec![8..12]),
6506 (path!("dir/two.ts").to_string(), vec![14..18]),
6507 ]),
6508 "If no exclusions match, all files should be returned"
6509 );
6510
6511 assert_eq!(
6512 search(
6513 &project,
6514 SearchQuery::text(
6515 search_query,
6516 false,
6517 true,
6518 false,
6519 Default::default(),
6520 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
6521 false,
6522 None,
6523 )
6524 .unwrap(),
6525 cx
6526 )
6527 .await
6528 .unwrap(),
6529 HashMap::from_iter([
6530 (path!("dir/one.ts").to_string(), vec![14..18]),
6531 (path!("dir/two.ts").to_string(), vec![14..18]),
6532 ]),
6533 "Rust exclusion search should give only TypeScript files"
6534 );
6535
6536 assert_eq!(
6537 search(
6538 &project,
6539 SearchQuery::text(
6540 search_query,
6541 false,
6542 true,
6543 false,
6544 Default::default(),
6545 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6546 .unwrap(),
6547 false,
6548 None,
6549 )
6550 .unwrap(),
6551 cx
6552 )
6553 .await
6554 .unwrap(),
6555 HashMap::from_iter([
6556 (path!("dir/one.rs").to_string(), vec![8..12]),
6557 (path!("dir/two.rs").to_string(), vec![8..12]),
6558 ]),
6559 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
6560 );
6561
6562 assert!(
6563 search(
6564 &project,
6565 SearchQuery::text(
6566 search_query,
6567 false,
6568 true,
6569 false,
6570 Default::default(),
6571 PathMatcher::new(
6572 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
6573 PathStyle::local(),
6574 )
6575 .unwrap(),
6576 false,
6577 None,
6578 )
6579 .unwrap(),
6580 cx
6581 )
6582 .await
6583 .unwrap()
6584 .is_empty(),
6585 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
6586 );
6587}
6588
6589#[gpui::test]
6590async fn test_search_with_buffer_exclusions(cx: &mut gpui::TestAppContext) {
6591 init_test(cx);
6592
6593 let search_query = "file";
6594
6595 let fs = FakeFs::new(cx.executor());
6596 fs.insert_tree(
6597 path!("/dir"),
6598 json!({
6599 "one.rs": r#"// Rust file one"#,
6600 "one.ts": r#"// TypeScript file one"#,
6601 "two.rs": r#"// Rust file two"#,
6602 "two.ts": r#"// TypeScript file two"#,
6603 }),
6604 )
6605 .await;
6606
6607 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6608 let path_style = PathStyle::local();
6609 let _buffer = project.update(cx, |project, cx| {
6610 project.create_local_buffer("file", None, false, cx)
6611 });
6612
6613 assert_eq!(
6614 search(
6615 &project,
6616 SearchQuery::text(
6617 search_query,
6618 false,
6619 true,
6620 false,
6621 Default::default(),
6622 PathMatcher::new(&["*.odd".to_owned()], path_style).unwrap(),
6623 false,
6624 None,
6625 )
6626 .unwrap(),
6627 cx
6628 )
6629 .await
6630 .unwrap(),
6631 HashMap::from_iter([
6632 (path!("dir/one.rs").to_string(), vec![8..12]),
6633 (path!("dir/one.ts").to_string(), vec![14..18]),
6634 (path!("dir/two.rs").to_string(), vec![8..12]),
6635 (path!("dir/two.ts").to_string(), vec![14..18]),
6636 ]),
6637 "If no exclusions match, all files should be returned"
6638 );
6639
6640 assert_eq!(
6641 search(
6642 &project,
6643 SearchQuery::text(
6644 search_query,
6645 false,
6646 true,
6647 false,
6648 Default::default(),
6649 PathMatcher::new(&["*.rs".to_owned()], path_style).unwrap(),
6650 false,
6651 None,
6652 )
6653 .unwrap(),
6654 cx
6655 )
6656 .await
6657 .unwrap(),
6658 HashMap::from_iter([
6659 (path!("dir/one.ts").to_string(), vec![14..18]),
6660 (path!("dir/two.ts").to_string(), vec![14..18]),
6661 ]),
6662 "Rust exclusion search should give only TypeScript files"
6663 );
6664
6665 assert_eq!(
6666 search(
6667 &project,
6668 SearchQuery::text(
6669 search_query,
6670 false,
6671 true,
6672 false,
6673 Default::default(),
6674 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], path_style).unwrap(),
6675 false,
6676 None,
6677 )
6678 .unwrap(),
6679 cx
6680 )
6681 .await
6682 .unwrap(),
6683 HashMap::from_iter([
6684 (path!("dir/one.rs").to_string(), vec![8..12]),
6685 (path!("dir/two.rs").to_string(), vec![8..12]),
6686 ]),
6687 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
6688 );
6689
6690 assert!(
6691 search(
6692 &project,
6693 SearchQuery::text(
6694 search_query,
6695 false,
6696 true,
6697 false,
6698 Default::default(),
6699 PathMatcher::new(
6700 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
6701 PathStyle::local(),
6702 )
6703 .unwrap(),
6704 false,
6705 None,
6706 )
6707 .unwrap(),
6708 cx
6709 )
6710 .await
6711 .unwrap()
6712 .is_empty(),
6713 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
6714 );
6715}
6716
6717#[gpui::test]
6718async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
6719 init_test(cx);
6720
6721 let search_query = "file";
6722
6723 let fs = FakeFs::new(cx.executor());
6724 fs.insert_tree(
6725 path!("/dir"),
6726 json!({
6727 "one.rs": r#"// Rust file one"#,
6728 "one.ts": r#"// TypeScript file one"#,
6729 "two.rs": r#"// Rust file two"#,
6730 "two.ts": r#"// TypeScript file two"#,
6731 }),
6732 )
6733 .await;
6734 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6735 assert!(
6736 search(
6737 &project,
6738 SearchQuery::text(
6739 search_query,
6740 false,
6741 true,
6742 false,
6743 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6744 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6745 false,
6746 None,
6747 )
6748 .unwrap(),
6749 cx
6750 )
6751 .await
6752 .unwrap()
6753 .is_empty(),
6754 "If both no exclusions and inclusions match, exclusions should win and return nothing"
6755 );
6756
6757 assert!(
6758 search(
6759 &project,
6760 SearchQuery::text(
6761 search_query,
6762 false,
6763 true,
6764 false,
6765 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
6766 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
6767 false,
6768 None,
6769 )
6770 .unwrap(),
6771 cx
6772 )
6773 .await
6774 .unwrap()
6775 .is_empty(),
6776 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
6777 );
6778
6779 assert!(
6780 search(
6781 &project,
6782 SearchQuery::text(
6783 search_query,
6784 false,
6785 true,
6786 false,
6787 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6788 .unwrap(),
6789 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6790 .unwrap(),
6791 false,
6792 None,
6793 )
6794 .unwrap(),
6795 cx
6796 )
6797 .await
6798 .unwrap()
6799 .is_empty(),
6800 "Non-matching inclusions and exclusions should not change that."
6801 );
6802
6803 assert_eq!(
6804 search(
6805 &project,
6806 SearchQuery::text(
6807 search_query,
6808 false,
6809 true,
6810 false,
6811 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6812 .unwrap(),
6813 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()], PathStyle::local())
6814 .unwrap(),
6815 false,
6816 None,
6817 )
6818 .unwrap(),
6819 cx
6820 )
6821 .await
6822 .unwrap(),
6823 HashMap::from_iter([
6824 (path!("dir/one.ts").to_string(), vec![14..18]),
6825 (path!("dir/two.ts").to_string(), vec![14..18]),
6826 ]),
6827 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
6828 );
6829}
6830
6831#[gpui::test]
6832async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
6833 init_test(cx);
6834
6835 let fs = FakeFs::new(cx.executor());
6836 fs.insert_tree(
6837 path!("/worktree-a"),
6838 json!({
6839 "haystack.rs": r#"// NEEDLE"#,
6840 "haystack.ts": r#"// NEEDLE"#,
6841 }),
6842 )
6843 .await;
6844 fs.insert_tree(
6845 path!("/worktree-b"),
6846 json!({
6847 "haystack.rs": r#"// NEEDLE"#,
6848 "haystack.ts": r#"// NEEDLE"#,
6849 }),
6850 )
6851 .await;
6852
6853 let path_style = PathStyle::local();
6854 let project = Project::test(
6855 fs.clone(),
6856 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
6857 cx,
6858 )
6859 .await;
6860
6861 assert_eq!(
6862 search(
6863 &project,
6864 SearchQuery::text(
6865 "NEEDLE",
6866 false,
6867 true,
6868 false,
6869 PathMatcher::new(&["worktree-a/*.rs".to_owned()], path_style).unwrap(),
6870 Default::default(),
6871 true,
6872 None,
6873 )
6874 .unwrap(),
6875 cx
6876 )
6877 .await
6878 .unwrap(),
6879 HashMap::from_iter([(path!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
6880 "should only return results from included worktree"
6881 );
6882 assert_eq!(
6883 search(
6884 &project,
6885 SearchQuery::text(
6886 "NEEDLE",
6887 false,
6888 true,
6889 false,
6890 PathMatcher::new(&["worktree-b/*.rs".to_owned()], path_style).unwrap(),
6891 Default::default(),
6892 true,
6893 None,
6894 )
6895 .unwrap(),
6896 cx
6897 )
6898 .await
6899 .unwrap(),
6900 HashMap::from_iter([(path!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
6901 "should only return results from included worktree"
6902 );
6903
6904 assert_eq!(
6905 search(
6906 &project,
6907 SearchQuery::text(
6908 "NEEDLE",
6909 false,
6910 true,
6911 false,
6912 PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap(),
6913 Default::default(),
6914 false,
6915 None,
6916 )
6917 .unwrap(),
6918 cx
6919 )
6920 .await
6921 .unwrap(),
6922 HashMap::from_iter([
6923 (path!("worktree-a/haystack.ts").to_string(), vec![3..9]),
6924 (path!("worktree-b/haystack.ts").to_string(), vec![3..9])
6925 ]),
6926 "should return results from both worktrees"
6927 );
6928}
6929
6930#[gpui::test]
6931async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
6932 init_test(cx);
6933
6934 let fs = FakeFs::new(cx.background_executor.clone());
6935 fs.insert_tree(
6936 path!("/dir"),
6937 json!({
6938 ".git": {},
6939 ".gitignore": "**/target\n/node_modules\n",
6940 "target": {
6941 "index.txt": "index_key:index_value"
6942 },
6943 "node_modules": {
6944 "eslint": {
6945 "index.ts": "const eslint_key = 'eslint value'",
6946 "package.json": r#"{ "some_key": "some value" }"#,
6947 },
6948 "prettier": {
6949 "index.ts": "const prettier_key = 'prettier value'",
6950 "package.json": r#"{ "other_key": "other value" }"#,
6951 },
6952 },
6953 "package.json": r#"{ "main_key": "main value" }"#,
6954 }),
6955 )
6956 .await;
6957 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6958
6959 let query = "key";
6960 assert_eq!(
6961 search(
6962 &project,
6963 SearchQuery::text(
6964 query,
6965 false,
6966 false,
6967 false,
6968 Default::default(),
6969 Default::default(),
6970 false,
6971 None,
6972 )
6973 .unwrap(),
6974 cx
6975 )
6976 .await
6977 .unwrap(),
6978 HashMap::from_iter([(path!("dir/package.json").to_string(), vec![8..11])]),
6979 "Only one non-ignored file should have the query"
6980 );
6981
6982 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6983 let path_style = PathStyle::local();
6984 assert_eq!(
6985 search(
6986 &project,
6987 SearchQuery::text(
6988 query,
6989 false,
6990 false,
6991 true,
6992 Default::default(),
6993 Default::default(),
6994 false,
6995 None,
6996 )
6997 .unwrap(),
6998 cx
6999 )
7000 .await
7001 .unwrap(),
7002 HashMap::from_iter([
7003 (path!("dir/package.json").to_string(), vec![8..11]),
7004 (path!("dir/target/index.txt").to_string(), vec![6..9]),
7005 (
7006 path!("dir/node_modules/prettier/package.json").to_string(),
7007 vec![9..12]
7008 ),
7009 (
7010 path!("dir/node_modules/prettier/index.ts").to_string(),
7011 vec![15..18]
7012 ),
7013 (
7014 path!("dir/node_modules/eslint/index.ts").to_string(),
7015 vec![13..16]
7016 ),
7017 (
7018 path!("dir/node_modules/eslint/package.json").to_string(),
7019 vec![8..11]
7020 ),
7021 ]),
7022 "Unrestricted search with ignored directories should find every file with the query"
7023 );
7024
7025 let files_to_include =
7026 PathMatcher::new(&["node_modules/prettier/**".to_owned()], path_style).unwrap();
7027 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap();
7028 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7029 assert_eq!(
7030 search(
7031 &project,
7032 SearchQuery::text(
7033 query,
7034 false,
7035 false,
7036 true,
7037 files_to_include,
7038 files_to_exclude,
7039 false,
7040 None,
7041 )
7042 .unwrap(),
7043 cx
7044 )
7045 .await
7046 .unwrap(),
7047 HashMap::from_iter([(
7048 path!("dir/node_modules/prettier/package.json").to_string(),
7049 vec![9..12]
7050 )]),
7051 "With search including ignored prettier directory and excluding TS files, only one file should be found"
7052 );
7053}
7054
7055#[gpui::test]
7056async fn test_search_with_unicode(cx: &mut gpui::TestAppContext) {
7057 init_test(cx);
7058
7059 let fs = FakeFs::new(cx.executor());
7060 fs.insert_tree(
7061 path!("/dir"),
7062 json!({
7063 "one.rs": "// ПРИВЕТ? привет!",
7064 "two.rs": "// ПРИВЕТ.",
7065 "three.rs": "// привет",
7066 }),
7067 )
7068 .await;
7069 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7070 let unicode_case_sensitive_query = SearchQuery::text(
7071 "привет",
7072 false,
7073 true,
7074 false,
7075 Default::default(),
7076 Default::default(),
7077 false,
7078 None,
7079 );
7080 assert_matches!(unicode_case_sensitive_query, Ok(SearchQuery::Text { .. }));
7081 assert_eq!(
7082 search(&project, unicode_case_sensitive_query.unwrap(), cx)
7083 .await
7084 .unwrap(),
7085 HashMap::from_iter([
7086 (path!("dir/one.rs").to_string(), vec![17..29]),
7087 (path!("dir/three.rs").to_string(), vec![3..15]),
7088 ])
7089 );
7090
7091 let unicode_case_insensitive_query = SearchQuery::text(
7092 "привет",
7093 false,
7094 false,
7095 false,
7096 Default::default(),
7097 Default::default(),
7098 false,
7099 None,
7100 );
7101 assert_matches!(
7102 unicode_case_insensitive_query,
7103 Ok(SearchQuery::Regex { .. })
7104 );
7105 assert_eq!(
7106 search(&project, unicode_case_insensitive_query.unwrap(), cx)
7107 .await
7108 .unwrap(),
7109 HashMap::from_iter([
7110 (path!("dir/one.rs").to_string(), vec![3..15, 17..29]),
7111 (path!("dir/two.rs").to_string(), vec![3..15]),
7112 (path!("dir/three.rs").to_string(), vec![3..15]),
7113 ])
7114 );
7115
7116 assert_eq!(
7117 search(
7118 &project,
7119 SearchQuery::text(
7120 "привет.",
7121 false,
7122 false,
7123 false,
7124 Default::default(),
7125 Default::default(),
7126 false,
7127 None,
7128 )
7129 .unwrap(),
7130 cx
7131 )
7132 .await
7133 .unwrap(),
7134 HashMap::from_iter([(path!("dir/two.rs").to_string(), vec![3..16]),])
7135 );
7136}
7137
7138#[gpui::test]
7139async fn test_create_entry(cx: &mut gpui::TestAppContext) {
7140 init_test(cx);
7141
7142 let fs = FakeFs::new(cx.executor());
7143 fs.insert_tree(
7144 "/one/two",
7145 json!({
7146 "three": {
7147 "a.txt": "",
7148 "four": {}
7149 },
7150 "c.rs": ""
7151 }),
7152 )
7153 .await;
7154
7155 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
7156 project
7157 .update(cx, |project, cx| {
7158 let id = project.worktrees(cx).next().unwrap().read(cx).id();
7159 project.create_entry((id, rel_path("b..")), true, cx)
7160 })
7161 .await
7162 .unwrap()
7163 .into_included()
7164 .unwrap();
7165
7166 assert_eq!(
7167 fs.paths(true),
7168 vec![
7169 PathBuf::from(path!("/")),
7170 PathBuf::from(path!("/one")),
7171 PathBuf::from(path!("/one/two")),
7172 PathBuf::from(path!("/one/two/c.rs")),
7173 PathBuf::from(path!("/one/two/three")),
7174 PathBuf::from(path!("/one/two/three/a.txt")),
7175 PathBuf::from(path!("/one/two/three/b..")),
7176 PathBuf::from(path!("/one/two/three/four")),
7177 ]
7178 );
7179}
7180
7181#[gpui::test]
7182async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
7183 init_test(cx);
7184
7185 let fs = FakeFs::new(cx.executor());
7186 fs.insert_tree(
7187 path!("/dir"),
7188 json!({
7189 "a.tsx": "a",
7190 }),
7191 )
7192 .await;
7193
7194 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7195
7196 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7197 language_registry.add(tsx_lang());
7198 let language_server_names = [
7199 "TypeScriptServer",
7200 "TailwindServer",
7201 "ESLintServer",
7202 "NoHoverCapabilitiesServer",
7203 ];
7204 let mut language_servers = [
7205 language_registry.register_fake_lsp(
7206 "tsx",
7207 FakeLspAdapter {
7208 name: language_server_names[0],
7209 capabilities: lsp::ServerCapabilities {
7210 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7211 ..lsp::ServerCapabilities::default()
7212 },
7213 ..FakeLspAdapter::default()
7214 },
7215 ),
7216 language_registry.register_fake_lsp(
7217 "tsx",
7218 FakeLspAdapter {
7219 name: language_server_names[1],
7220 capabilities: lsp::ServerCapabilities {
7221 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7222 ..lsp::ServerCapabilities::default()
7223 },
7224 ..FakeLspAdapter::default()
7225 },
7226 ),
7227 language_registry.register_fake_lsp(
7228 "tsx",
7229 FakeLspAdapter {
7230 name: language_server_names[2],
7231 capabilities: lsp::ServerCapabilities {
7232 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7233 ..lsp::ServerCapabilities::default()
7234 },
7235 ..FakeLspAdapter::default()
7236 },
7237 ),
7238 language_registry.register_fake_lsp(
7239 "tsx",
7240 FakeLspAdapter {
7241 name: language_server_names[3],
7242 capabilities: lsp::ServerCapabilities {
7243 hover_provider: None,
7244 ..lsp::ServerCapabilities::default()
7245 },
7246 ..FakeLspAdapter::default()
7247 },
7248 ),
7249 ];
7250
7251 let (buffer, _handle) = project
7252 .update(cx, |p, cx| {
7253 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
7254 })
7255 .await
7256 .unwrap();
7257 cx.executor().run_until_parked();
7258
7259 let mut servers_with_hover_requests = HashMap::default();
7260 for i in 0..language_server_names.len() {
7261 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
7262 panic!(
7263 "Failed to get language server #{i} with name {}",
7264 &language_server_names[i]
7265 )
7266 });
7267 let new_server_name = new_server.server.name();
7268 assert!(
7269 !servers_with_hover_requests.contains_key(&new_server_name),
7270 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
7271 );
7272 match new_server_name.as_ref() {
7273 "TailwindServer" | "TypeScriptServer" => {
7274 servers_with_hover_requests.insert(
7275 new_server_name.clone(),
7276 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7277 move |_, _| {
7278 let name = new_server_name.clone();
7279 async move {
7280 Ok(Some(lsp::Hover {
7281 contents: lsp::HoverContents::Scalar(
7282 lsp::MarkedString::String(format!("{name} hover")),
7283 ),
7284 range: None,
7285 }))
7286 }
7287 },
7288 ),
7289 );
7290 }
7291 "ESLintServer" => {
7292 servers_with_hover_requests.insert(
7293 new_server_name,
7294 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7295 |_, _| async move { Ok(None) },
7296 ),
7297 );
7298 }
7299 "NoHoverCapabilitiesServer" => {
7300 let _never_handled = new_server
7301 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
7302 panic!(
7303 "Should not call for hovers server with no corresponding capabilities"
7304 )
7305 });
7306 }
7307 unexpected => panic!("Unexpected server name: {unexpected}"),
7308 }
7309 }
7310
7311 let hover_task = project.update(cx, |project, cx| {
7312 project.hover(&buffer, Point::new(0, 0), cx)
7313 });
7314 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
7315 |mut hover_request| async move {
7316 hover_request
7317 .next()
7318 .await
7319 .expect("All hover requests should have been triggered")
7320 },
7321 ))
7322 .await;
7323 assert_eq!(
7324 vec!["TailwindServer hover", "TypeScriptServer hover"],
7325 hover_task
7326 .await
7327 .into_iter()
7328 .flatten()
7329 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
7330 .sorted()
7331 .collect::<Vec<_>>(),
7332 "Should receive hover responses from all related servers with hover capabilities"
7333 );
7334}
7335
7336#[gpui::test]
7337async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
7338 init_test(cx);
7339
7340 let fs = FakeFs::new(cx.executor());
7341 fs.insert_tree(
7342 path!("/dir"),
7343 json!({
7344 "a.ts": "a",
7345 }),
7346 )
7347 .await;
7348
7349 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7350
7351 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7352 language_registry.add(typescript_lang());
7353 let mut fake_language_servers = language_registry.register_fake_lsp(
7354 "TypeScript",
7355 FakeLspAdapter {
7356 capabilities: lsp::ServerCapabilities {
7357 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7358 ..lsp::ServerCapabilities::default()
7359 },
7360 ..FakeLspAdapter::default()
7361 },
7362 );
7363
7364 let (buffer, _handle) = project
7365 .update(cx, |p, cx| {
7366 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
7367 })
7368 .await
7369 .unwrap();
7370 cx.executor().run_until_parked();
7371
7372 let fake_server = fake_language_servers
7373 .next()
7374 .await
7375 .expect("failed to get the language server");
7376
7377 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7378 move |_, _| async move {
7379 Ok(Some(lsp::Hover {
7380 contents: lsp::HoverContents::Array(vec![
7381 lsp::MarkedString::String("".to_string()),
7382 lsp::MarkedString::String(" ".to_string()),
7383 lsp::MarkedString::String("\n\n\n".to_string()),
7384 ]),
7385 range: None,
7386 }))
7387 },
7388 );
7389
7390 let hover_task = project.update(cx, |project, cx| {
7391 project.hover(&buffer, Point::new(0, 0), cx)
7392 });
7393 let () = request_handled
7394 .next()
7395 .await
7396 .expect("All hover requests should have been triggered");
7397 assert_eq!(
7398 Vec::<String>::new(),
7399 hover_task
7400 .await
7401 .into_iter()
7402 .flatten()
7403 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
7404 .sorted()
7405 .collect::<Vec<_>>(),
7406 "Empty hover parts should be ignored"
7407 );
7408}
7409
7410#[gpui::test]
7411async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
7412 init_test(cx);
7413
7414 let fs = FakeFs::new(cx.executor());
7415 fs.insert_tree(
7416 path!("/dir"),
7417 json!({
7418 "a.ts": "a",
7419 }),
7420 )
7421 .await;
7422
7423 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7424
7425 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7426 language_registry.add(typescript_lang());
7427 let mut fake_language_servers = language_registry.register_fake_lsp(
7428 "TypeScript",
7429 FakeLspAdapter {
7430 capabilities: lsp::ServerCapabilities {
7431 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
7432 ..lsp::ServerCapabilities::default()
7433 },
7434 ..FakeLspAdapter::default()
7435 },
7436 );
7437
7438 let (buffer, _handle) = project
7439 .update(cx, |p, cx| {
7440 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
7441 })
7442 .await
7443 .unwrap();
7444 cx.executor().run_until_parked();
7445
7446 let fake_server = fake_language_servers
7447 .next()
7448 .await
7449 .expect("failed to get the language server");
7450
7451 let mut request_handled = fake_server
7452 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
7453 Ok(Some(vec![
7454 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7455 title: "organize imports".to_string(),
7456 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
7457 ..lsp::CodeAction::default()
7458 }),
7459 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7460 title: "fix code".to_string(),
7461 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
7462 ..lsp::CodeAction::default()
7463 }),
7464 ]))
7465 });
7466
7467 let code_actions_task = project.update(cx, |project, cx| {
7468 project.code_actions(
7469 &buffer,
7470 0..buffer.read(cx).len(),
7471 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
7472 cx,
7473 )
7474 });
7475
7476 let () = request_handled
7477 .next()
7478 .await
7479 .expect("The code action request should have been triggered");
7480
7481 let code_actions = code_actions_task.await.unwrap().unwrap();
7482 assert_eq!(code_actions.len(), 1);
7483 assert_eq!(
7484 code_actions[0].lsp_action.action_kind(),
7485 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
7486 );
7487}
7488
7489#[gpui::test]
7490async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
7491 init_test(cx);
7492
7493 let fs = FakeFs::new(cx.executor());
7494 fs.insert_tree(
7495 path!("/dir"),
7496 json!({
7497 "a.tsx": "a",
7498 }),
7499 )
7500 .await;
7501
7502 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7503
7504 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7505 language_registry.add(tsx_lang());
7506 let language_server_names = [
7507 "TypeScriptServer",
7508 "TailwindServer",
7509 "ESLintServer",
7510 "NoActionsCapabilitiesServer",
7511 ];
7512
7513 let mut language_server_rxs = [
7514 language_registry.register_fake_lsp(
7515 "tsx",
7516 FakeLspAdapter {
7517 name: language_server_names[0],
7518 capabilities: lsp::ServerCapabilities {
7519 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
7520 ..lsp::ServerCapabilities::default()
7521 },
7522 ..FakeLspAdapter::default()
7523 },
7524 ),
7525 language_registry.register_fake_lsp(
7526 "tsx",
7527 FakeLspAdapter {
7528 name: language_server_names[1],
7529 capabilities: lsp::ServerCapabilities {
7530 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
7531 ..lsp::ServerCapabilities::default()
7532 },
7533 ..FakeLspAdapter::default()
7534 },
7535 ),
7536 language_registry.register_fake_lsp(
7537 "tsx",
7538 FakeLspAdapter {
7539 name: language_server_names[2],
7540 capabilities: lsp::ServerCapabilities {
7541 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
7542 ..lsp::ServerCapabilities::default()
7543 },
7544 ..FakeLspAdapter::default()
7545 },
7546 ),
7547 language_registry.register_fake_lsp(
7548 "tsx",
7549 FakeLspAdapter {
7550 name: language_server_names[3],
7551 capabilities: lsp::ServerCapabilities {
7552 code_action_provider: None,
7553 ..lsp::ServerCapabilities::default()
7554 },
7555 ..FakeLspAdapter::default()
7556 },
7557 ),
7558 ];
7559
7560 let (buffer, _handle) = project
7561 .update(cx, |p, cx| {
7562 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
7563 })
7564 .await
7565 .unwrap();
7566 cx.executor().run_until_parked();
7567
7568 let mut servers_with_actions_requests = HashMap::default();
7569 for i in 0..language_server_names.len() {
7570 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
7571 panic!(
7572 "Failed to get language server #{i} with name {}",
7573 &language_server_names[i]
7574 )
7575 });
7576 let new_server_name = new_server.server.name();
7577
7578 assert!(
7579 !servers_with_actions_requests.contains_key(&new_server_name),
7580 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
7581 );
7582 match new_server_name.0.as_ref() {
7583 "TailwindServer" | "TypeScriptServer" => {
7584 servers_with_actions_requests.insert(
7585 new_server_name.clone(),
7586 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
7587 move |_, _| {
7588 let name = new_server_name.clone();
7589 async move {
7590 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
7591 lsp::CodeAction {
7592 title: format!("{name} code action"),
7593 ..lsp::CodeAction::default()
7594 },
7595 )]))
7596 }
7597 },
7598 ),
7599 );
7600 }
7601 "ESLintServer" => {
7602 servers_with_actions_requests.insert(
7603 new_server_name,
7604 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
7605 |_, _| async move { Ok(None) },
7606 ),
7607 );
7608 }
7609 "NoActionsCapabilitiesServer" => {
7610 let _never_handled = new_server
7611 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
7612 panic!(
7613 "Should not call for code actions server with no corresponding capabilities"
7614 )
7615 });
7616 }
7617 unexpected => panic!("Unexpected server name: {unexpected}"),
7618 }
7619 }
7620
7621 let code_actions_task = project.update(cx, |project, cx| {
7622 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
7623 });
7624
7625 // cx.run_until_parked();
7626 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
7627 |mut code_actions_request| async move {
7628 code_actions_request
7629 .next()
7630 .await
7631 .expect("All code actions requests should have been triggered")
7632 },
7633 ))
7634 .await;
7635 assert_eq!(
7636 vec!["TailwindServer code action", "TypeScriptServer code action"],
7637 code_actions_task
7638 .await
7639 .unwrap()
7640 .unwrap()
7641 .into_iter()
7642 .map(|code_action| code_action.lsp_action.title().to_owned())
7643 .sorted()
7644 .collect::<Vec<_>>(),
7645 "Should receive code actions responses from all related servers with hover capabilities"
7646 );
7647}
7648
7649#[gpui::test]
7650async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
7651 init_test(cx);
7652
7653 let fs = FakeFs::new(cx.executor());
7654 fs.insert_tree(
7655 "/dir",
7656 json!({
7657 "a.rs": "let a = 1;",
7658 "b.rs": "let b = 2;",
7659 "c.rs": "let c = 2;",
7660 }),
7661 )
7662 .await;
7663
7664 let project = Project::test(
7665 fs,
7666 [
7667 "/dir/a.rs".as_ref(),
7668 "/dir/b.rs".as_ref(),
7669 "/dir/c.rs".as_ref(),
7670 ],
7671 cx,
7672 )
7673 .await;
7674
7675 // check the initial state and get the worktrees
7676 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
7677 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7678 assert_eq!(worktrees.len(), 3);
7679
7680 let worktree_a = worktrees[0].read(cx);
7681 let worktree_b = worktrees[1].read(cx);
7682 let worktree_c = worktrees[2].read(cx);
7683
7684 // check they start in the right order
7685 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
7686 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
7687 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
7688
7689 (
7690 worktrees[0].clone(),
7691 worktrees[1].clone(),
7692 worktrees[2].clone(),
7693 )
7694 });
7695
7696 // move first worktree to after the second
7697 // [a, b, c] -> [b, a, c]
7698 project
7699 .update(cx, |project, cx| {
7700 let first = worktree_a.read(cx);
7701 let second = worktree_b.read(cx);
7702 project.move_worktree(first.id(), second.id(), cx)
7703 })
7704 .expect("moving first after second");
7705
7706 // check the state after moving
7707 project.update(cx, |project, cx| {
7708 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7709 assert_eq!(worktrees.len(), 3);
7710
7711 let first = worktrees[0].read(cx);
7712 let second = worktrees[1].read(cx);
7713 let third = worktrees[2].read(cx);
7714
7715 // check they are now in the right order
7716 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
7717 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
7718 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7719 });
7720
7721 // move the second worktree to before the first
7722 // [b, a, c] -> [a, b, c]
7723 project
7724 .update(cx, |project, cx| {
7725 let second = worktree_a.read(cx);
7726 let first = worktree_b.read(cx);
7727 project.move_worktree(first.id(), second.id(), cx)
7728 })
7729 .expect("moving second before first");
7730
7731 // check the state after moving
7732 project.update(cx, |project, cx| {
7733 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7734 assert_eq!(worktrees.len(), 3);
7735
7736 let first = worktrees[0].read(cx);
7737 let second = worktrees[1].read(cx);
7738 let third = worktrees[2].read(cx);
7739
7740 // check they are now in the right order
7741 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7742 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
7743 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7744 });
7745
7746 // move the second worktree to after the third
7747 // [a, b, c] -> [a, c, b]
7748 project
7749 .update(cx, |project, cx| {
7750 let second = worktree_b.read(cx);
7751 let third = worktree_c.read(cx);
7752 project.move_worktree(second.id(), third.id(), cx)
7753 })
7754 .expect("moving second after third");
7755
7756 // check the state after moving
7757 project.update(cx, |project, cx| {
7758 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7759 assert_eq!(worktrees.len(), 3);
7760
7761 let first = worktrees[0].read(cx);
7762 let second = worktrees[1].read(cx);
7763 let third = worktrees[2].read(cx);
7764
7765 // check they are now in the right order
7766 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7767 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
7768 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
7769 });
7770
7771 // move the third worktree to before the second
7772 // [a, c, b] -> [a, b, c]
7773 project
7774 .update(cx, |project, cx| {
7775 let third = worktree_c.read(cx);
7776 let second = worktree_b.read(cx);
7777 project.move_worktree(third.id(), second.id(), cx)
7778 })
7779 .expect("moving third before second");
7780
7781 // check the state after moving
7782 project.update(cx, |project, cx| {
7783 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7784 assert_eq!(worktrees.len(), 3);
7785
7786 let first = worktrees[0].read(cx);
7787 let second = worktrees[1].read(cx);
7788 let third = worktrees[2].read(cx);
7789
7790 // check they are now in the right order
7791 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7792 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
7793 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7794 });
7795
7796 // move the first worktree to after the third
7797 // [a, b, c] -> [b, c, a]
7798 project
7799 .update(cx, |project, cx| {
7800 let first = worktree_a.read(cx);
7801 let third = worktree_c.read(cx);
7802 project.move_worktree(first.id(), third.id(), cx)
7803 })
7804 .expect("moving first after third");
7805
7806 // check the state after moving
7807 project.update(cx, |project, cx| {
7808 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7809 assert_eq!(worktrees.len(), 3);
7810
7811 let first = worktrees[0].read(cx);
7812 let second = worktrees[1].read(cx);
7813 let third = worktrees[2].read(cx);
7814
7815 // check they are now in the right order
7816 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
7817 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
7818 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
7819 });
7820
7821 // move the third worktree to before the first
7822 // [b, c, a] -> [a, b, c]
7823 project
7824 .update(cx, |project, cx| {
7825 let third = worktree_a.read(cx);
7826 let first = worktree_b.read(cx);
7827 project.move_worktree(third.id(), first.id(), cx)
7828 })
7829 .expect("moving third before first");
7830
7831 // check the state after moving
7832 project.update(cx, |project, cx| {
7833 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7834 assert_eq!(worktrees.len(), 3);
7835
7836 let first = worktrees[0].read(cx);
7837 let second = worktrees[1].read(cx);
7838 let third = worktrees[2].read(cx);
7839
7840 // check they are now in the right order
7841 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7842 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
7843 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7844 });
7845}
7846
7847#[gpui::test]
7848async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
7849 init_test(cx);
7850
7851 let staged_contents = r#"
7852 fn main() {
7853 println!("hello world");
7854 }
7855 "#
7856 .unindent();
7857 let file_contents = r#"
7858 // print goodbye
7859 fn main() {
7860 println!("goodbye world");
7861 }
7862 "#
7863 .unindent();
7864
7865 let fs = FakeFs::new(cx.background_executor.clone());
7866 fs.insert_tree(
7867 "/dir",
7868 json!({
7869 ".git": {},
7870 "src": {
7871 "main.rs": file_contents,
7872 }
7873 }),
7874 )
7875 .await;
7876
7877 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
7878
7879 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7880
7881 let buffer = project
7882 .update(cx, |project, cx| {
7883 project.open_local_buffer("/dir/src/main.rs", cx)
7884 })
7885 .await
7886 .unwrap();
7887 let unstaged_diff = project
7888 .update(cx, |project, cx| {
7889 project.open_unstaged_diff(buffer.clone(), cx)
7890 })
7891 .await
7892 .unwrap();
7893
7894 cx.run_until_parked();
7895 unstaged_diff.update(cx, |unstaged_diff, cx| {
7896 let snapshot = buffer.read(cx).snapshot();
7897 assert_hunks(
7898 unstaged_diff.snapshot(cx).hunks(&snapshot),
7899 &snapshot,
7900 &unstaged_diff.base_text_string(cx).unwrap(),
7901 &[
7902 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
7903 (
7904 2..3,
7905 " println!(\"hello world\");\n",
7906 " println!(\"goodbye world\");\n",
7907 DiffHunkStatus::modified_none(),
7908 ),
7909 ],
7910 );
7911 });
7912
7913 let staged_contents = r#"
7914 // print goodbye
7915 fn main() {
7916 }
7917 "#
7918 .unindent();
7919
7920 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
7921
7922 cx.run_until_parked();
7923 unstaged_diff.update(cx, |unstaged_diff, cx| {
7924 let snapshot = buffer.read(cx).snapshot();
7925 assert_hunks(
7926 unstaged_diff
7927 .snapshot(cx)
7928 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
7929 &snapshot,
7930 &unstaged_diff.base_text(cx).text(),
7931 &[(
7932 2..3,
7933 "",
7934 " println!(\"goodbye world\");\n",
7935 DiffHunkStatus::added_none(),
7936 )],
7937 );
7938 });
7939}
7940
7941#[gpui::test]
7942async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
7943 init_test(cx);
7944
7945 let committed_contents = r#"
7946 fn main() {
7947 println!("hello world");
7948 }
7949 "#
7950 .unindent();
7951 let staged_contents = r#"
7952 fn main() {
7953 println!("goodbye world");
7954 }
7955 "#
7956 .unindent();
7957 let file_contents = r#"
7958 // print goodbye
7959 fn main() {
7960 println!("goodbye world");
7961 }
7962 "#
7963 .unindent();
7964
7965 let fs = FakeFs::new(cx.background_executor.clone());
7966 fs.insert_tree(
7967 "/dir",
7968 json!({
7969 ".git": {},
7970 "src": {
7971 "modification.rs": file_contents,
7972 }
7973 }),
7974 )
7975 .await;
7976
7977 fs.set_head_for_repo(
7978 Path::new("/dir/.git"),
7979 &[
7980 ("src/modification.rs", committed_contents),
7981 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7982 ],
7983 "deadbeef",
7984 );
7985 fs.set_index_for_repo(
7986 Path::new("/dir/.git"),
7987 &[
7988 ("src/modification.rs", staged_contents),
7989 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7990 ],
7991 );
7992
7993 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7994 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7995 let language = rust_lang();
7996 language_registry.add(language.clone());
7997
7998 let buffer_1 = project
7999 .update(cx, |project, cx| {
8000 project.open_local_buffer("/dir/src/modification.rs", cx)
8001 })
8002 .await
8003 .unwrap();
8004 let diff_1 = project
8005 .update(cx, |project, cx| {
8006 project.open_uncommitted_diff(buffer_1.clone(), cx)
8007 })
8008 .await
8009 .unwrap();
8010 diff_1.read_with(cx, |diff, cx| {
8011 assert_eq!(diff.base_text(cx).language().cloned(), Some(language))
8012 });
8013 cx.run_until_parked();
8014 diff_1.update(cx, |diff, cx| {
8015 let snapshot = buffer_1.read(cx).snapshot();
8016 assert_hunks(
8017 diff.snapshot(cx)
8018 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8019 &snapshot,
8020 &diff.base_text_string(cx).unwrap(),
8021 &[
8022 (
8023 0..1,
8024 "",
8025 "// print goodbye\n",
8026 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
8027 ),
8028 (
8029 2..3,
8030 " println!(\"hello world\");\n",
8031 " println!(\"goodbye world\");\n",
8032 DiffHunkStatus::modified_none(),
8033 ),
8034 ],
8035 );
8036 });
8037
8038 // Reset HEAD to a version that differs from both the buffer and the index.
8039 let committed_contents = r#"
8040 // print goodbye
8041 fn main() {
8042 }
8043 "#
8044 .unindent();
8045 fs.set_head_for_repo(
8046 Path::new("/dir/.git"),
8047 &[
8048 ("src/modification.rs", committed_contents.clone()),
8049 ("src/deletion.rs", "// the-deleted-contents\n".into()),
8050 ],
8051 "deadbeef",
8052 );
8053
8054 // Buffer now has an unstaged hunk.
8055 cx.run_until_parked();
8056 diff_1.update(cx, |diff, cx| {
8057 let snapshot = buffer_1.read(cx).snapshot();
8058 assert_hunks(
8059 diff.snapshot(cx)
8060 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8061 &snapshot,
8062 &diff.base_text(cx).text(),
8063 &[(
8064 2..3,
8065 "",
8066 " println!(\"goodbye world\");\n",
8067 DiffHunkStatus::added_none(),
8068 )],
8069 );
8070 });
8071
8072 // Open a buffer for a file that's been deleted.
8073 let buffer_2 = project
8074 .update(cx, |project, cx| {
8075 project.open_local_buffer("/dir/src/deletion.rs", cx)
8076 })
8077 .await
8078 .unwrap();
8079 let diff_2 = project
8080 .update(cx, |project, cx| {
8081 project.open_uncommitted_diff(buffer_2.clone(), cx)
8082 })
8083 .await
8084 .unwrap();
8085 cx.run_until_parked();
8086 diff_2.update(cx, |diff, cx| {
8087 let snapshot = buffer_2.read(cx).snapshot();
8088 assert_hunks(
8089 diff.snapshot(cx)
8090 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8091 &snapshot,
8092 &diff.base_text_string(cx).unwrap(),
8093 &[(
8094 0..0,
8095 "// the-deleted-contents\n",
8096 "",
8097 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
8098 )],
8099 );
8100 });
8101
8102 // Stage the deletion of this file
8103 fs.set_index_for_repo(
8104 Path::new("/dir/.git"),
8105 &[("src/modification.rs", committed_contents.clone())],
8106 );
8107 cx.run_until_parked();
8108 diff_2.update(cx, |diff, cx| {
8109 let snapshot = buffer_2.read(cx).snapshot();
8110 assert_hunks(
8111 diff.snapshot(cx)
8112 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8113 &snapshot,
8114 &diff.base_text_string(cx).unwrap(),
8115 &[(
8116 0..0,
8117 "// the-deleted-contents\n",
8118 "",
8119 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
8120 )],
8121 );
8122 });
8123}
8124
8125#[gpui::test]
8126async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
8127 use DiffHunkSecondaryStatus::*;
8128 init_test(cx);
8129
8130 let committed_contents = r#"
8131 zero
8132 one
8133 two
8134 three
8135 four
8136 five
8137 "#
8138 .unindent();
8139 let file_contents = r#"
8140 one
8141 TWO
8142 three
8143 FOUR
8144 five
8145 "#
8146 .unindent();
8147
8148 let fs = FakeFs::new(cx.background_executor.clone());
8149 fs.insert_tree(
8150 "/dir",
8151 json!({
8152 ".git": {},
8153 "file.txt": file_contents.clone()
8154 }),
8155 )
8156 .await;
8157
8158 fs.set_head_and_index_for_repo(
8159 path!("/dir/.git").as_ref(),
8160 &[("file.txt", committed_contents.clone())],
8161 );
8162
8163 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8164
8165 let buffer = project
8166 .update(cx, |project, cx| {
8167 project.open_local_buffer("/dir/file.txt", cx)
8168 })
8169 .await
8170 .unwrap();
8171 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8172 let uncommitted_diff = project
8173 .update(cx, |project, cx| {
8174 project.open_uncommitted_diff(buffer.clone(), cx)
8175 })
8176 .await
8177 .unwrap();
8178 let mut diff_events = cx.events(&uncommitted_diff);
8179
8180 // The hunks are initially unstaged.
8181 uncommitted_diff.read_with(cx, |diff, cx| {
8182 assert_hunks(
8183 diff.snapshot(cx).hunks(&snapshot),
8184 &snapshot,
8185 &diff.base_text_string(cx).unwrap(),
8186 &[
8187 (
8188 0..0,
8189 "zero\n",
8190 "",
8191 DiffHunkStatus::deleted(HasSecondaryHunk),
8192 ),
8193 (
8194 1..2,
8195 "two\n",
8196 "TWO\n",
8197 DiffHunkStatus::modified(HasSecondaryHunk),
8198 ),
8199 (
8200 3..4,
8201 "four\n",
8202 "FOUR\n",
8203 DiffHunkStatus::modified(HasSecondaryHunk),
8204 ),
8205 ],
8206 );
8207 });
8208
8209 // Stage a hunk. It appears as optimistically staged.
8210 uncommitted_diff.update(cx, |diff, cx| {
8211 let range =
8212 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
8213 let hunks = diff
8214 .snapshot(cx)
8215 .hunks_intersecting_range(range, &snapshot)
8216 .collect::<Vec<_>>();
8217 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
8218
8219 assert_hunks(
8220 diff.snapshot(cx).hunks(&snapshot),
8221 &snapshot,
8222 &diff.base_text_string(cx).unwrap(),
8223 &[
8224 (
8225 0..0,
8226 "zero\n",
8227 "",
8228 DiffHunkStatus::deleted(HasSecondaryHunk),
8229 ),
8230 (
8231 1..2,
8232 "two\n",
8233 "TWO\n",
8234 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8235 ),
8236 (
8237 3..4,
8238 "four\n",
8239 "FOUR\n",
8240 DiffHunkStatus::modified(HasSecondaryHunk),
8241 ),
8242 ],
8243 );
8244 });
8245
8246 // The diff emits a change event for the range of the staged hunk.
8247 assert!(matches!(
8248 diff_events.next().await.unwrap(),
8249 BufferDiffEvent::HunksStagedOrUnstaged(_)
8250 ));
8251 let event = diff_events.next().await.unwrap();
8252 if let BufferDiffEvent::DiffChanged(DiffChanged {
8253 changed_range: Some(changed_range),
8254 base_text_changed_range: _,
8255 extended_range: _,
8256 }) = event
8257 {
8258 let changed_range = changed_range.to_point(&snapshot);
8259 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
8260 } else {
8261 panic!("Unexpected event {event:?}");
8262 }
8263
8264 // When the write to the index completes, it appears as staged.
8265 cx.run_until_parked();
8266 uncommitted_diff.update(cx, |diff, cx| {
8267 assert_hunks(
8268 diff.snapshot(cx).hunks(&snapshot),
8269 &snapshot,
8270 &diff.base_text_string(cx).unwrap(),
8271 &[
8272 (
8273 0..0,
8274 "zero\n",
8275 "",
8276 DiffHunkStatus::deleted(HasSecondaryHunk),
8277 ),
8278 (
8279 1..2,
8280 "two\n",
8281 "TWO\n",
8282 DiffHunkStatus::modified(NoSecondaryHunk),
8283 ),
8284 (
8285 3..4,
8286 "four\n",
8287 "FOUR\n",
8288 DiffHunkStatus::modified(HasSecondaryHunk),
8289 ),
8290 ],
8291 );
8292 });
8293
8294 // The diff emits a change event for the changed index text.
8295 let event = diff_events.next().await.unwrap();
8296 if let BufferDiffEvent::DiffChanged(DiffChanged {
8297 changed_range: Some(changed_range),
8298 base_text_changed_range: _,
8299 extended_range: _,
8300 }) = event
8301 {
8302 let changed_range = changed_range.to_point(&snapshot);
8303 assert_eq!(changed_range, Point::new(0, 0)..Point::new(4, 0));
8304 } else {
8305 panic!("Unexpected event {event:?}");
8306 }
8307
8308 // Simulate a problem writing to the git index.
8309 fs.set_error_message_for_index_write(
8310 "/dir/.git".as_ref(),
8311 Some("failed to write git index".into()),
8312 );
8313
8314 // Stage another hunk.
8315 uncommitted_diff.update(cx, |diff, cx| {
8316 let range =
8317 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
8318 let hunks = diff
8319 .snapshot(cx)
8320 .hunks_intersecting_range(range, &snapshot)
8321 .collect::<Vec<_>>();
8322 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
8323
8324 assert_hunks(
8325 diff.snapshot(cx).hunks(&snapshot),
8326 &snapshot,
8327 &diff.base_text_string(cx).unwrap(),
8328 &[
8329 (
8330 0..0,
8331 "zero\n",
8332 "",
8333 DiffHunkStatus::deleted(HasSecondaryHunk),
8334 ),
8335 (
8336 1..2,
8337 "two\n",
8338 "TWO\n",
8339 DiffHunkStatus::modified(NoSecondaryHunk),
8340 ),
8341 (
8342 3..4,
8343 "four\n",
8344 "FOUR\n",
8345 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8346 ),
8347 ],
8348 );
8349 });
8350 assert!(matches!(
8351 diff_events.next().await.unwrap(),
8352 BufferDiffEvent::HunksStagedOrUnstaged(_)
8353 ));
8354 let event = diff_events.next().await.unwrap();
8355 if let BufferDiffEvent::DiffChanged(DiffChanged {
8356 changed_range: Some(changed_range),
8357 base_text_changed_range: _,
8358 extended_range: _,
8359 }) = event
8360 {
8361 let changed_range = changed_range.to_point(&snapshot);
8362 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
8363 } else {
8364 panic!("Unexpected event {event:?}");
8365 }
8366
8367 // When the write fails, the hunk returns to being unstaged.
8368 cx.run_until_parked();
8369 uncommitted_diff.update(cx, |diff, cx| {
8370 assert_hunks(
8371 diff.snapshot(cx).hunks(&snapshot),
8372 &snapshot,
8373 &diff.base_text_string(cx).unwrap(),
8374 &[
8375 (
8376 0..0,
8377 "zero\n",
8378 "",
8379 DiffHunkStatus::deleted(HasSecondaryHunk),
8380 ),
8381 (
8382 1..2,
8383 "two\n",
8384 "TWO\n",
8385 DiffHunkStatus::modified(NoSecondaryHunk),
8386 ),
8387 (
8388 3..4,
8389 "four\n",
8390 "FOUR\n",
8391 DiffHunkStatus::modified(HasSecondaryHunk),
8392 ),
8393 ],
8394 );
8395 });
8396
8397 let event = diff_events.next().await.unwrap();
8398 if let BufferDiffEvent::DiffChanged(DiffChanged {
8399 changed_range: Some(changed_range),
8400 base_text_changed_range: _,
8401 extended_range: _,
8402 }) = event
8403 {
8404 let changed_range = changed_range.to_point(&snapshot);
8405 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
8406 } else {
8407 panic!("Unexpected event {event:?}");
8408 }
8409
8410 // Allow writing to the git index to succeed again.
8411 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
8412
8413 // Stage two hunks with separate operations.
8414 uncommitted_diff.update(cx, |diff, cx| {
8415 let hunks = diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>();
8416 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
8417 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
8418 });
8419
8420 // Both staged hunks appear as pending.
8421 uncommitted_diff.update(cx, |diff, cx| {
8422 assert_hunks(
8423 diff.snapshot(cx).hunks(&snapshot),
8424 &snapshot,
8425 &diff.base_text_string(cx).unwrap(),
8426 &[
8427 (
8428 0..0,
8429 "zero\n",
8430 "",
8431 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
8432 ),
8433 (
8434 1..2,
8435 "two\n",
8436 "TWO\n",
8437 DiffHunkStatus::modified(NoSecondaryHunk),
8438 ),
8439 (
8440 3..4,
8441 "four\n",
8442 "FOUR\n",
8443 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8444 ),
8445 ],
8446 );
8447 });
8448
8449 // Both staging operations take effect.
8450 cx.run_until_parked();
8451 uncommitted_diff.update(cx, |diff, cx| {
8452 assert_hunks(
8453 diff.snapshot(cx).hunks(&snapshot),
8454 &snapshot,
8455 &diff.base_text_string(cx).unwrap(),
8456 &[
8457 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
8458 (
8459 1..2,
8460 "two\n",
8461 "TWO\n",
8462 DiffHunkStatus::modified(NoSecondaryHunk),
8463 ),
8464 (
8465 3..4,
8466 "four\n",
8467 "FOUR\n",
8468 DiffHunkStatus::modified(NoSecondaryHunk),
8469 ),
8470 ],
8471 );
8472 });
8473}
8474
8475#[gpui::test(seeds(340, 472))]
8476async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
8477 use DiffHunkSecondaryStatus::*;
8478 init_test(cx);
8479
8480 let committed_contents = r#"
8481 zero
8482 one
8483 two
8484 three
8485 four
8486 five
8487 "#
8488 .unindent();
8489 let file_contents = r#"
8490 one
8491 TWO
8492 three
8493 FOUR
8494 five
8495 "#
8496 .unindent();
8497
8498 let fs = FakeFs::new(cx.background_executor.clone());
8499 fs.insert_tree(
8500 "/dir",
8501 json!({
8502 ".git": {},
8503 "file.txt": file_contents.clone()
8504 }),
8505 )
8506 .await;
8507
8508 fs.set_head_for_repo(
8509 "/dir/.git".as_ref(),
8510 &[("file.txt", committed_contents.clone())],
8511 "deadbeef",
8512 );
8513 fs.set_index_for_repo(
8514 "/dir/.git".as_ref(),
8515 &[("file.txt", committed_contents.clone())],
8516 );
8517
8518 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8519
8520 let buffer = project
8521 .update(cx, |project, cx| {
8522 project.open_local_buffer("/dir/file.txt", cx)
8523 })
8524 .await
8525 .unwrap();
8526 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8527 let uncommitted_diff = project
8528 .update(cx, |project, cx| {
8529 project.open_uncommitted_diff(buffer.clone(), cx)
8530 })
8531 .await
8532 .unwrap();
8533
8534 // The hunks are initially unstaged.
8535 uncommitted_diff.read_with(cx, |diff, cx| {
8536 assert_hunks(
8537 diff.snapshot(cx).hunks(&snapshot),
8538 &snapshot,
8539 &diff.base_text_string(cx).unwrap(),
8540 &[
8541 (
8542 0..0,
8543 "zero\n",
8544 "",
8545 DiffHunkStatus::deleted(HasSecondaryHunk),
8546 ),
8547 (
8548 1..2,
8549 "two\n",
8550 "TWO\n",
8551 DiffHunkStatus::modified(HasSecondaryHunk),
8552 ),
8553 (
8554 3..4,
8555 "four\n",
8556 "FOUR\n",
8557 DiffHunkStatus::modified(HasSecondaryHunk),
8558 ),
8559 ],
8560 );
8561 });
8562
8563 // Pause IO events
8564 fs.pause_events();
8565
8566 // Stage the first hunk.
8567 uncommitted_diff.update(cx, |diff, cx| {
8568 let hunk = diff.snapshot(cx).hunks(&snapshot).next().unwrap();
8569 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
8570 assert_hunks(
8571 diff.snapshot(cx).hunks(&snapshot),
8572 &snapshot,
8573 &diff.base_text_string(cx).unwrap(),
8574 &[
8575 (
8576 0..0,
8577 "zero\n",
8578 "",
8579 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
8580 ),
8581 (
8582 1..2,
8583 "two\n",
8584 "TWO\n",
8585 DiffHunkStatus::modified(HasSecondaryHunk),
8586 ),
8587 (
8588 3..4,
8589 "four\n",
8590 "FOUR\n",
8591 DiffHunkStatus::modified(HasSecondaryHunk),
8592 ),
8593 ],
8594 );
8595 });
8596
8597 // Stage the second hunk *before* receiving the FS event for the first hunk.
8598 cx.run_until_parked();
8599 uncommitted_diff.update(cx, |diff, cx| {
8600 let hunk = diff.snapshot(cx).hunks(&snapshot).nth(1).unwrap();
8601 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
8602 assert_hunks(
8603 diff.snapshot(cx).hunks(&snapshot),
8604 &snapshot,
8605 &diff.base_text_string(cx).unwrap(),
8606 &[
8607 (
8608 0..0,
8609 "zero\n",
8610 "",
8611 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
8612 ),
8613 (
8614 1..2,
8615 "two\n",
8616 "TWO\n",
8617 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8618 ),
8619 (
8620 3..4,
8621 "four\n",
8622 "FOUR\n",
8623 DiffHunkStatus::modified(HasSecondaryHunk),
8624 ),
8625 ],
8626 );
8627 });
8628
8629 // Process the FS event for staging the first hunk (second event is still pending).
8630 fs.flush_events(1);
8631 cx.run_until_parked();
8632
8633 // Stage the third hunk before receiving the second FS event.
8634 uncommitted_diff.update(cx, |diff, cx| {
8635 let hunk = diff.snapshot(cx).hunks(&snapshot).nth(2).unwrap();
8636 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
8637 });
8638
8639 // Wait for all remaining IO.
8640 cx.run_until_parked();
8641 fs.flush_events(fs.buffered_event_count());
8642
8643 // Now all hunks are staged.
8644 cx.run_until_parked();
8645 uncommitted_diff.update(cx, |diff, cx| {
8646 assert_hunks(
8647 diff.snapshot(cx).hunks(&snapshot),
8648 &snapshot,
8649 &diff.base_text_string(cx).unwrap(),
8650 &[
8651 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
8652 (
8653 1..2,
8654 "two\n",
8655 "TWO\n",
8656 DiffHunkStatus::modified(NoSecondaryHunk),
8657 ),
8658 (
8659 3..4,
8660 "four\n",
8661 "FOUR\n",
8662 DiffHunkStatus::modified(NoSecondaryHunk),
8663 ),
8664 ],
8665 );
8666 });
8667}
8668
8669#[gpui::test(iterations = 25)]
8670async fn test_staging_random_hunks(
8671 mut rng: StdRng,
8672 _executor: BackgroundExecutor,
8673 cx: &mut gpui::TestAppContext,
8674) {
8675 let operations = env::var("OPERATIONS")
8676 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
8677 .unwrap_or(20);
8678
8679 use DiffHunkSecondaryStatus::*;
8680 init_test(cx);
8681
8682 let committed_text = (0..30).map(|i| format!("line {i}\n")).collect::<String>();
8683 let index_text = committed_text.clone();
8684 let buffer_text = (0..30)
8685 .map(|i| match i % 5 {
8686 0 => format!("line {i} (modified)\n"),
8687 _ => format!("line {i}\n"),
8688 })
8689 .collect::<String>();
8690
8691 let fs = FakeFs::new(cx.background_executor.clone());
8692 fs.insert_tree(
8693 path!("/dir"),
8694 json!({
8695 ".git": {},
8696 "file.txt": buffer_text.clone()
8697 }),
8698 )
8699 .await;
8700 fs.set_head_for_repo(
8701 path!("/dir/.git").as_ref(),
8702 &[("file.txt", committed_text.clone())],
8703 "deadbeef",
8704 );
8705 fs.set_index_for_repo(
8706 path!("/dir/.git").as_ref(),
8707 &[("file.txt", index_text.clone())],
8708 );
8709 let repo = fs
8710 .open_repo(path!("/dir/.git").as_ref(), Some("git".as_ref()))
8711 .unwrap();
8712
8713 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
8714 let buffer = project
8715 .update(cx, |project, cx| {
8716 project.open_local_buffer(path!("/dir/file.txt"), cx)
8717 })
8718 .await
8719 .unwrap();
8720 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8721 let uncommitted_diff = project
8722 .update(cx, |project, cx| {
8723 project.open_uncommitted_diff(buffer.clone(), cx)
8724 })
8725 .await
8726 .unwrap();
8727
8728 let mut hunks = uncommitted_diff.update(cx, |diff, cx| {
8729 diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>()
8730 });
8731 assert_eq!(hunks.len(), 6);
8732
8733 for _i in 0..operations {
8734 let hunk_ix = rng.random_range(0..hunks.len());
8735 let hunk = &mut hunks[hunk_ix];
8736 let row = hunk.range.start.row;
8737
8738 if hunk.status().has_secondary_hunk() {
8739 log::info!("staging hunk at {row}");
8740 uncommitted_diff.update(cx, |diff, cx| {
8741 diff.stage_or_unstage_hunks(true, std::slice::from_ref(hunk), &snapshot, true, cx);
8742 });
8743 hunk.secondary_status = SecondaryHunkRemovalPending;
8744 } else {
8745 log::info!("unstaging hunk at {row}");
8746 uncommitted_diff.update(cx, |diff, cx| {
8747 diff.stage_or_unstage_hunks(false, std::slice::from_ref(hunk), &snapshot, true, cx);
8748 });
8749 hunk.secondary_status = SecondaryHunkAdditionPending;
8750 }
8751
8752 for _ in 0..rng.random_range(0..10) {
8753 log::info!("yielding");
8754 cx.executor().simulate_random_delay().await;
8755 }
8756 }
8757
8758 cx.executor().run_until_parked();
8759
8760 for hunk in &mut hunks {
8761 if hunk.secondary_status == SecondaryHunkRemovalPending {
8762 hunk.secondary_status = NoSecondaryHunk;
8763 } else if hunk.secondary_status == SecondaryHunkAdditionPending {
8764 hunk.secondary_status = HasSecondaryHunk;
8765 }
8766 }
8767
8768 log::info!(
8769 "index text:\n{}",
8770 repo.load_index_text(RepoPath::from_rel_path(rel_path("file.txt")))
8771 .await
8772 .unwrap()
8773 );
8774
8775 uncommitted_diff.update(cx, |diff, cx| {
8776 let expected_hunks = hunks
8777 .iter()
8778 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
8779 .collect::<Vec<_>>();
8780 let actual_hunks = diff
8781 .snapshot(cx)
8782 .hunks(&snapshot)
8783 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
8784 .collect::<Vec<_>>();
8785 assert_eq!(actual_hunks, expected_hunks);
8786 });
8787}
8788
8789#[gpui::test]
8790async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
8791 init_test(cx);
8792
8793 let committed_contents = r#"
8794 fn main() {
8795 println!("hello from HEAD");
8796 }
8797 "#
8798 .unindent();
8799 let file_contents = r#"
8800 fn main() {
8801 println!("hello from the working copy");
8802 }
8803 "#
8804 .unindent();
8805
8806 let fs = FakeFs::new(cx.background_executor.clone());
8807 fs.insert_tree(
8808 "/dir",
8809 json!({
8810 ".git": {},
8811 "src": {
8812 "main.rs": file_contents,
8813 }
8814 }),
8815 )
8816 .await;
8817
8818 fs.set_head_for_repo(
8819 Path::new("/dir/.git"),
8820 &[("src/main.rs", committed_contents.clone())],
8821 "deadbeef",
8822 );
8823 fs.set_index_for_repo(
8824 Path::new("/dir/.git"),
8825 &[("src/main.rs", committed_contents.clone())],
8826 );
8827
8828 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
8829
8830 let buffer = project
8831 .update(cx, |project, cx| {
8832 project.open_local_buffer("/dir/src/main.rs", cx)
8833 })
8834 .await
8835 .unwrap();
8836 let uncommitted_diff = project
8837 .update(cx, |project, cx| {
8838 project.open_uncommitted_diff(buffer.clone(), cx)
8839 })
8840 .await
8841 .unwrap();
8842
8843 cx.run_until_parked();
8844 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
8845 let snapshot = buffer.read(cx).snapshot();
8846 assert_hunks(
8847 uncommitted_diff.snapshot(cx).hunks(&snapshot),
8848 &snapshot,
8849 &uncommitted_diff.base_text_string(cx).unwrap(),
8850 &[(
8851 1..2,
8852 " println!(\"hello from HEAD\");\n",
8853 " println!(\"hello from the working copy\");\n",
8854 DiffHunkStatus {
8855 kind: DiffHunkStatusKind::Modified,
8856 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
8857 },
8858 )],
8859 );
8860 });
8861}
8862
8863// TODO: Should we test this on Windows also?
8864#[gpui::test]
8865#[cfg(not(windows))]
8866async fn test_staging_hunk_preserve_executable_permission(cx: &mut gpui::TestAppContext) {
8867 use std::os::unix::fs::PermissionsExt;
8868 init_test(cx);
8869 cx.executor().allow_parking();
8870 let committed_contents = "bar\n";
8871 let file_contents = "baz\n";
8872 let root = TempTree::new(json!({
8873 "project": {
8874 "foo": committed_contents
8875 },
8876 }));
8877
8878 let work_dir = root.path().join("project");
8879 let file_path = work_dir.join("foo");
8880 let repo = git_init(work_dir.as_path());
8881 let mut perms = std::fs::metadata(&file_path).unwrap().permissions();
8882 perms.set_mode(0o755);
8883 std::fs::set_permissions(&file_path, perms).unwrap();
8884 git_add("foo", &repo);
8885 git_commit("Initial commit", &repo);
8886 std::fs::write(&file_path, file_contents).unwrap();
8887
8888 let project = Project::test(
8889 Arc::new(RealFs::new(None, cx.executor())),
8890 [root.path()],
8891 cx,
8892 )
8893 .await;
8894
8895 let buffer = project
8896 .update(cx, |project, cx| {
8897 project.open_local_buffer(file_path.as_path(), cx)
8898 })
8899 .await
8900 .unwrap();
8901
8902 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8903
8904 let uncommitted_diff = project
8905 .update(cx, |project, cx| {
8906 project.open_uncommitted_diff(buffer.clone(), cx)
8907 })
8908 .await
8909 .unwrap();
8910
8911 uncommitted_diff.update(cx, |diff, cx| {
8912 let hunks = diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>();
8913 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
8914 });
8915
8916 cx.run_until_parked();
8917
8918 let output = smol::process::Command::new("git")
8919 .current_dir(&work_dir)
8920 .args(["diff", "--staged"])
8921 .output()
8922 .await
8923 .unwrap();
8924
8925 let staged_diff = String::from_utf8_lossy(&output.stdout);
8926
8927 assert!(
8928 !staged_diff.contains("new mode 100644"),
8929 "Staging should not change file mode from 755 to 644.\ngit diff --staged:\n{}",
8930 staged_diff
8931 );
8932
8933 let output = smol::process::Command::new("git")
8934 .current_dir(&work_dir)
8935 .args(["ls-files", "-s"])
8936 .output()
8937 .await
8938 .unwrap();
8939 let index_contents = String::from_utf8_lossy(&output.stdout);
8940
8941 assert!(
8942 index_contents.contains("100755"),
8943 "Index should show file as executable (100755).\ngit ls-files -s:\n{}",
8944 index_contents
8945 );
8946}
8947
8948#[gpui::test]
8949async fn test_repository_and_path_for_project_path(
8950 background_executor: BackgroundExecutor,
8951 cx: &mut gpui::TestAppContext,
8952) {
8953 init_test(cx);
8954 let fs = FakeFs::new(background_executor);
8955 fs.insert_tree(
8956 path!("/root"),
8957 json!({
8958 "c.txt": "",
8959 "dir1": {
8960 ".git": {},
8961 "deps": {
8962 "dep1": {
8963 ".git": {},
8964 "src": {
8965 "a.txt": ""
8966 }
8967 }
8968 },
8969 "src": {
8970 "b.txt": ""
8971 }
8972 },
8973 }),
8974 )
8975 .await;
8976
8977 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
8978 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8979 let tree_id = tree.read_with(cx, |tree, _| tree.id());
8980 project
8981 .update(cx, |project, cx| project.git_scans_complete(cx))
8982 .await;
8983 cx.run_until_parked();
8984
8985 project.read_with(cx, |project, cx| {
8986 let git_store = project.git_store().read(cx);
8987 let pairs = [
8988 ("c.txt", None),
8989 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
8990 (
8991 "dir1/deps/dep1/src/a.txt",
8992 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
8993 ),
8994 ];
8995 let expected = pairs
8996 .iter()
8997 .map(|(path, result)| {
8998 (
8999 path,
9000 result.map(|(repo, repo_path)| {
9001 (Path::new(repo).into(), RepoPath::new(repo_path).unwrap())
9002 }),
9003 )
9004 })
9005 .collect::<Vec<_>>();
9006 let actual = pairs
9007 .iter()
9008 .map(|(path, _)| {
9009 let project_path = (tree_id, rel_path(path)).into();
9010 let result = maybe!({
9011 let (repo, repo_path) =
9012 git_store.repository_and_path_for_project_path(&project_path, cx)?;
9013 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
9014 });
9015 (path, result)
9016 })
9017 .collect::<Vec<_>>();
9018 pretty_assertions::assert_eq!(expected, actual);
9019 });
9020
9021 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
9022 .await
9023 .unwrap();
9024 cx.run_until_parked();
9025
9026 project.read_with(cx, |project, cx| {
9027 let git_store = project.git_store().read(cx);
9028 assert_eq!(
9029 git_store.repository_and_path_for_project_path(
9030 &(tree_id, rel_path("dir1/src/b.txt")).into(),
9031 cx
9032 ),
9033 None
9034 );
9035 });
9036}
9037
9038#[gpui::test]
9039async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
9040 init_test(cx);
9041 let fs = FakeFs::new(cx.background_executor.clone());
9042 let home = paths::home_dir();
9043 fs.insert_tree(
9044 home,
9045 json!({
9046 ".git": {},
9047 "project": {
9048 "a.txt": "A"
9049 },
9050 }),
9051 )
9052 .await;
9053
9054 let project = Project::test(fs.clone(), [home.join("project").as_ref()], cx).await;
9055 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9056 let tree_id = tree.read_with(cx, |tree, _| tree.id());
9057
9058 project
9059 .update(cx, |project, cx| project.git_scans_complete(cx))
9060 .await;
9061 tree.flush_fs_events(cx).await;
9062
9063 project.read_with(cx, |project, cx| {
9064 let containing = project
9065 .git_store()
9066 .read(cx)
9067 .repository_and_path_for_project_path(&(tree_id, rel_path("a.txt")).into(), cx);
9068 assert!(containing.is_none());
9069 });
9070
9071 let project = Project::test(fs.clone(), [home.as_ref()], cx).await;
9072 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9073 let tree_id = tree.read_with(cx, |tree, _| tree.id());
9074 project
9075 .update(cx, |project, cx| project.git_scans_complete(cx))
9076 .await;
9077 tree.flush_fs_events(cx).await;
9078
9079 project.read_with(cx, |project, cx| {
9080 let containing = project
9081 .git_store()
9082 .read(cx)
9083 .repository_and_path_for_project_path(&(tree_id, rel_path("project/a.txt")).into(), cx);
9084 assert_eq!(
9085 containing
9086 .unwrap()
9087 .0
9088 .read(cx)
9089 .work_directory_abs_path
9090 .as_ref(),
9091 home,
9092 );
9093 });
9094}
9095
9096#[gpui::test]
9097async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
9098 init_test(cx);
9099 cx.executor().allow_parking();
9100
9101 let root = TempTree::new(json!({
9102 "project": {
9103 "a.txt": "a", // Modified
9104 "b.txt": "bb", // Added
9105 "c.txt": "ccc", // Unchanged
9106 "d.txt": "dddd", // Deleted
9107 },
9108 }));
9109
9110 // Set up git repository before creating the project.
9111 let work_dir = root.path().join("project");
9112 let repo = git_init(work_dir.as_path());
9113 git_add("a.txt", &repo);
9114 git_add("c.txt", &repo);
9115 git_add("d.txt", &repo);
9116 git_commit("Initial commit", &repo);
9117 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
9118 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
9119
9120 let project = Project::test(
9121 Arc::new(RealFs::new(None, cx.executor())),
9122 [root.path()],
9123 cx,
9124 )
9125 .await;
9126
9127 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9128 tree.flush_fs_events(cx).await;
9129 project
9130 .update(cx, |project, cx| project.git_scans_complete(cx))
9131 .await;
9132 cx.executor().run_until_parked();
9133
9134 let repository = project.read_with(cx, |project, cx| {
9135 project.repositories(cx).values().next().unwrap().clone()
9136 });
9137
9138 // Check that the right git state is observed on startup
9139 repository.read_with(cx, |repository, _| {
9140 let entries = repository.cached_status().collect::<Vec<_>>();
9141 assert_eq!(
9142 entries,
9143 [
9144 StatusEntry {
9145 repo_path: repo_path("a.txt"),
9146 status: StatusCode::Modified.worktree(),
9147 },
9148 StatusEntry {
9149 repo_path: repo_path("b.txt"),
9150 status: FileStatus::Untracked,
9151 },
9152 StatusEntry {
9153 repo_path: repo_path("d.txt"),
9154 status: StatusCode::Deleted.worktree(),
9155 },
9156 ]
9157 );
9158 });
9159
9160 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
9161
9162 tree.flush_fs_events(cx).await;
9163 project
9164 .update(cx, |project, cx| project.git_scans_complete(cx))
9165 .await;
9166 cx.executor().run_until_parked();
9167
9168 repository.read_with(cx, |repository, _| {
9169 let entries = repository.cached_status().collect::<Vec<_>>();
9170 assert_eq!(
9171 entries,
9172 [
9173 StatusEntry {
9174 repo_path: repo_path("a.txt"),
9175 status: StatusCode::Modified.worktree(),
9176 },
9177 StatusEntry {
9178 repo_path: repo_path("b.txt"),
9179 status: FileStatus::Untracked,
9180 },
9181 StatusEntry {
9182 repo_path: repo_path("c.txt"),
9183 status: StatusCode::Modified.worktree(),
9184 },
9185 StatusEntry {
9186 repo_path: repo_path("d.txt"),
9187 status: StatusCode::Deleted.worktree(),
9188 },
9189 ]
9190 );
9191 });
9192
9193 git_add("a.txt", &repo);
9194 git_add("c.txt", &repo);
9195 git_remove_index(Path::new("d.txt"), &repo);
9196 git_commit("Another commit", &repo);
9197 tree.flush_fs_events(cx).await;
9198 project
9199 .update(cx, |project, cx| project.git_scans_complete(cx))
9200 .await;
9201 cx.executor().run_until_parked();
9202
9203 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
9204 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
9205 tree.flush_fs_events(cx).await;
9206 project
9207 .update(cx, |project, cx| project.git_scans_complete(cx))
9208 .await;
9209 cx.executor().run_until_parked();
9210
9211 repository.read_with(cx, |repository, _cx| {
9212 let entries = repository.cached_status().collect::<Vec<_>>();
9213
9214 // Deleting an untracked entry, b.txt, should leave no status
9215 // a.txt was tracked, and so should have a status
9216 assert_eq!(
9217 entries,
9218 [StatusEntry {
9219 repo_path: repo_path("a.txt"),
9220 status: StatusCode::Deleted.worktree(),
9221 }]
9222 );
9223 });
9224}
9225
9226#[gpui::test]
9227#[ignore]
9228async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
9229 init_test(cx);
9230 cx.executor().allow_parking();
9231
9232 let root = TempTree::new(json!({
9233 "project": {
9234 "sub": {},
9235 "a.txt": "",
9236 },
9237 }));
9238
9239 let work_dir = root.path().join("project");
9240 let repo = git_init(work_dir.as_path());
9241 // a.txt exists in HEAD and the working copy but is deleted in the index.
9242 git_add("a.txt", &repo);
9243 git_commit("Initial commit", &repo);
9244 git_remove_index("a.txt".as_ref(), &repo);
9245 // `sub` is a nested git repository.
9246 let _sub = git_init(&work_dir.join("sub"));
9247
9248 let project = Project::test(
9249 Arc::new(RealFs::new(None, cx.executor())),
9250 [root.path()],
9251 cx,
9252 )
9253 .await;
9254
9255 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9256 tree.flush_fs_events(cx).await;
9257 project
9258 .update(cx, |project, cx| project.git_scans_complete(cx))
9259 .await;
9260 cx.executor().run_until_parked();
9261
9262 let repository = project.read_with(cx, |project, cx| {
9263 project
9264 .repositories(cx)
9265 .values()
9266 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
9267 .unwrap()
9268 .clone()
9269 });
9270
9271 repository.read_with(cx, |repository, _cx| {
9272 let entries = repository.cached_status().collect::<Vec<_>>();
9273
9274 // `sub` doesn't appear in our computed statuses.
9275 // a.txt appears with a combined `DA` status.
9276 assert_eq!(
9277 entries,
9278 [StatusEntry {
9279 repo_path: repo_path("a.txt"),
9280 status: TrackedStatus {
9281 index_status: StatusCode::Deleted,
9282 worktree_status: StatusCode::Added
9283 }
9284 .into(),
9285 }]
9286 )
9287 });
9288}
9289
9290#[track_caller]
9291/// We merge lhs into rhs.
9292fn merge_pending_ops_snapshots(
9293 source: Vec<pending_op::PendingOps>,
9294 mut target: Vec<pending_op::PendingOps>,
9295) -> Vec<pending_op::PendingOps> {
9296 for s_ops in source {
9297 if let Some(idx) = target.iter().zip(0..).find_map(|(ops, idx)| {
9298 if ops.repo_path == s_ops.repo_path {
9299 Some(idx)
9300 } else {
9301 None
9302 }
9303 }) {
9304 let t_ops = &mut target[idx];
9305 for s_op in s_ops.ops {
9306 if let Some(op_idx) = t_ops
9307 .ops
9308 .iter()
9309 .zip(0..)
9310 .find_map(|(op, idx)| if op.id == s_op.id { Some(idx) } else { None })
9311 {
9312 let t_op = &mut t_ops.ops[op_idx];
9313 match (s_op.job_status, t_op.job_status) {
9314 (pending_op::JobStatus::Running, _) => {}
9315 (s_st, pending_op::JobStatus::Running) => t_op.job_status = s_st,
9316 (s_st, t_st) if s_st == t_st => {}
9317 _ => unreachable!(),
9318 }
9319 } else {
9320 t_ops.ops.push(s_op);
9321 }
9322 }
9323 t_ops.ops.sort_by(|l, r| l.id.cmp(&r.id));
9324 } else {
9325 target.push(s_ops);
9326 }
9327 }
9328 target
9329}
9330
9331#[gpui::test]
9332async fn test_repository_pending_ops_staging(
9333 executor: gpui::BackgroundExecutor,
9334 cx: &mut gpui::TestAppContext,
9335) {
9336 init_test(cx);
9337
9338 let fs = FakeFs::new(executor);
9339 fs.insert_tree(
9340 path!("/root"),
9341 json!({
9342 "my-repo": {
9343 ".git": {},
9344 "a.txt": "a",
9345 }
9346
9347 }),
9348 )
9349 .await;
9350
9351 fs.set_status_for_repo(
9352 path!("/root/my-repo/.git").as_ref(),
9353 &[("a.txt", FileStatus::Untracked)],
9354 );
9355
9356 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
9357 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
9358 project.update(cx, |project, cx| {
9359 let pending_ops_all = pending_ops_all.clone();
9360 cx.subscribe(project.git_store(), move |_, _, e, _| {
9361 if let GitStoreEvent::RepositoryUpdated(
9362 _,
9363 RepositoryEvent::PendingOpsChanged { pending_ops },
9364 _,
9365 ) = e
9366 {
9367 let merged = merge_pending_ops_snapshots(
9368 pending_ops.items(()),
9369 pending_ops_all.lock().items(()),
9370 );
9371 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
9372 }
9373 })
9374 .detach();
9375 });
9376 project
9377 .update(cx, |project, cx| project.git_scans_complete(cx))
9378 .await;
9379
9380 let repo = project.read_with(cx, |project, cx| {
9381 project.repositories(cx).values().next().unwrap().clone()
9382 });
9383
9384 // Ensure we have no pending ops for any of the untracked files
9385 repo.read_with(cx, |repo, _cx| {
9386 assert!(repo.pending_ops().next().is_none());
9387 });
9388
9389 let mut id = 1u16;
9390
9391 let mut assert_stage = async |path: RepoPath, stage| {
9392 let git_status = if stage {
9393 pending_op::GitStatus::Staged
9394 } else {
9395 pending_op::GitStatus::Unstaged
9396 };
9397 repo.update(cx, |repo, cx| {
9398 let task = if stage {
9399 repo.stage_entries(vec![path.clone()], cx)
9400 } else {
9401 repo.unstage_entries(vec![path.clone()], cx)
9402 };
9403 let ops = repo.pending_ops_for_path(&path).unwrap();
9404 assert_eq!(
9405 ops.ops.last(),
9406 Some(&pending_op::PendingOp {
9407 id: id.into(),
9408 git_status,
9409 job_status: pending_op::JobStatus::Running
9410 })
9411 );
9412 task
9413 })
9414 .await
9415 .unwrap();
9416
9417 repo.read_with(cx, |repo, _cx| {
9418 let ops = repo.pending_ops_for_path(&path).unwrap();
9419 assert_eq!(
9420 ops.ops.last(),
9421 Some(&pending_op::PendingOp {
9422 id: id.into(),
9423 git_status,
9424 job_status: pending_op::JobStatus::Finished
9425 })
9426 );
9427 });
9428
9429 id += 1;
9430 };
9431
9432 assert_stage(repo_path("a.txt"), true).await;
9433 assert_stage(repo_path("a.txt"), false).await;
9434 assert_stage(repo_path("a.txt"), true).await;
9435 assert_stage(repo_path("a.txt"), false).await;
9436 assert_stage(repo_path("a.txt"), true).await;
9437
9438 cx.run_until_parked();
9439
9440 assert_eq!(
9441 pending_ops_all
9442 .lock()
9443 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
9444 .unwrap()
9445 .ops,
9446 vec![
9447 pending_op::PendingOp {
9448 id: 1u16.into(),
9449 git_status: pending_op::GitStatus::Staged,
9450 job_status: pending_op::JobStatus::Finished
9451 },
9452 pending_op::PendingOp {
9453 id: 2u16.into(),
9454 git_status: pending_op::GitStatus::Unstaged,
9455 job_status: pending_op::JobStatus::Finished
9456 },
9457 pending_op::PendingOp {
9458 id: 3u16.into(),
9459 git_status: pending_op::GitStatus::Staged,
9460 job_status: pending_op::JobStatus::Finished
9461 },
9462 pending_op::PendingOp {
9463 id: 4u16.into(),
9464 git_status: pending_op::GitStatus::Unstaged,
9465 job_status: pending_op::JobStatus::Finished
9466 },
9467 pending_op::PendingOp {
9468 id: 5u16.into(),
9469 git_status: pending_op::GitStatus::Staged,
9470 job_status: pending_op::JobStatus::Finished
9471 }
9472 ],
9473 );
9474
9475 repo.update(cx, |repo, _cx| {
9476 let git_statuses = repo.cached_status().collect::<Vec<_>>();
9477
9478 assert_eq!(
9479 git_statuses,
9480 [StatusEntry {
9481 repo_path: repo_path("a.txt"),
9482 status: TrackedStatus {
9483 index_status: StatusCode::Added,
9484 worktree_status: StatusCode::Unmodified
9485 }
9486 .into(),
9487 }]
9488 );
9489 });
9490}
9491
9492#[gpui::test]
9493async fn test_repository_pending_ops_long_running_staging(
9494 executor: gpui::BackgroundExecutor,
9495 cx: &mut gpui::TestAppContext,
9496) {
9497 init_test(cx);
9498
9499 let fs = FakeFs::new(executor);
9500 fs.insert_tree(
9501 path!("/root"),
9502 json!({
9503 "my-repo": {
9504 ".git": {},
9505 "a.txt": "a",
9506 }
9507
9508 }),
9509 )
9510 .await;
9511
9512 fs.set_status_for_repo(
9513 path!("/root/my-repo/.git").as_ref(),
9514 &[("a.txt", FileStatus::Untracked)],
9515 );
9516
9517 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
9518 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
9519 project.update(cx, |project, cx| {
9520 let pending_ops_all = pending_ops_all.clone();
9521 cx.subscribe(project.git_store(), move |_, _, e, _| {
9522 if let GitStoreEvent::RepositoryUpdated(
9523 _,
9524 RepositoryEvent::PendingOpsChanged { pending_ops },
9525 _,
9526 ) = e
9527 {
9528 let merged = merge_pending_ops_snapshots(
9529 pending_ops.items(()),
9530 pending_ops_all.lock().items(()),
9531 );
9532 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
9533 }
9534 })
9535 .detach();
9536 });
9537
9538 project
9539 .update(cx, |project, cx| project.git_scans_complete(cx))
9540 .await;
9541
9542 let repo = project.read_with(cx, |project, cx| {
9543 project.repositories(cx).values().next().unwrap().clone()
9544 });
9545
9546 repo.update(cx, |repo, cx| {
9547 repo.stage_entries(vec![repo_path("a.txt")], cx)
9548 })
9549 .detach();
9550
9551 repo.update(cx, |repo, cx| {
9552 repo.stage_entries(vec![repo_path("a.txt")], cx)
9553 })
9554 .unwrap()
9555 .with_timeout(Duration::from_secs(1), &cx.executor())
9556 .await
9557 .unwrap();
9558
9559 cx.run_until_parked();
9560
9561 assert_eq!(
9562 pending_ops_all
9563 .lock()
9564 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
9565 .unwrap()
9566 .ops,
9567 vec![
9568 pending_op::PendingOp {
9569 id: 1u16.into(),
9570 git_status: pending_op::GitStatus::Staged,
9571 job_status: pending_op::JobStatus::Skipped
9572 },
9573 pending_op::PendingOp {
9574 id: 2u16.into(),
9575 git_status: pending_op::GitStatus::Staged,
9576 job_status: pending_op::JobStatus::Finished
9577 }
9578 ],
9579 );
9580
9581 repo.update(cx, |repo, _cx| {
9582 let git_statuses = repo.cached_status().collect::<Vec<_>>();
9583
9584 assert_eq!(
9585 git_statuses,
9586 [StatusEntry {
9587 repo_path: repo_path("a.txt"),
9588 status: TrackedStatus {
9589 index_status: StatusCode::Added,
9590 worktree_status: StatusCode::Unmodified
9591 }
9592 .into(),
9593 }]
9594 );
9595 });
9596}
9597
9598#[gpui::test]
9599async fn test_repository_pending_ops_stage_all(
9600 executor: gpui::BackgroundExecutor,
9601 cx: &mut gpui::TestAppContext,
9602) {
9603 init_test(cx);
9604
9605 let fs = FakeFs::new(executor);
9606 fs.insert_tree(
9607 path!("/root"),
9608 json!({
9609 "my-repo": {
9610 ".git": {},
9611 "a.txt": "a",
9612 "b.txt": "b"
9613 }
9614
9615 }),
9616 )
9617 .await;
9618
9619 fs.set_status_for_repo(
9620 path!("/root/my-repo/.git").as_ref(),
9621 &[
9622 ("a.txt", FileStatus::Untracked),
9623 ("b.txt", FileStatus::Untracked),
9624 ],
9625 );
9626
9627 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
9628 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
9629 project.update(cx, |project, cx| {
9630 let pending_ops_all = pending_ops_all.clone();
9631 cx.subscribe(project.git_store(), move |_, _, e, _| {
9632 if let GitStoreEvent::RepositoryUpdated(
9633 _,
9634 RepositoryEvent::PendingOpsChanged { pending_ops },
9635 _,
9636 ) = e
9637 {
9638 let merged = merge_pending_ops_snapshots(
9639 pending_ops.items(()),
9640 pending_ops_all.lock().items(()),
9641 );
9642 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
9643 }
9644 })
9645 .detach();
9646 });
9647 project
9648 .update(cx, |project, cx| project.git_scans_complete(cx))
9649 .await;
9650
9651 let repo = project.read_with(cx, |project, cx| {
9652 project.repositories(cx).values().next().unwrap().clone()
9653 });
9654
9655 repo.update(cx, |repo, cx| {
9656 repo.stage_entries(vec![repo_path("a.txt")], cx)
9657 })
9658 .await
9659 .unwrap();
9660 repo.update(cx, |repo, cx| repo.stage_all(cx))
9661 .await
9662 .unwrap();
9663 repo.update(cx, |repo, cx| repo.unstage_all(cx))
9664 .await
9665 .unwrap();
9666
9667 cx.run_until_parked();
9668
9669 assert_eq!(
9670 pending_ops_all
9671 .lock()
9672 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
9673 .unwrap()
9674 .ops,
9675 vec![
9676 pending_op::PendingOp {
9677 id: 1u16.into(),
9678 git_status: pending_op::GitStatus::Staged,
9679 job_status: pending_op::JobStatus::Finished
9680 },
9681 pending_op::PendingOp {
9682 id: 2u16.into(),
9683 git_status: pending_op::GitStatus::Unstaged,
9684 job_status: pending_op::JobStatus::Finished
9685 },
9686 ],
9687 );
9688 assert_eq!(
9689 pending_ops_all
9690 .lock()
9691 .get(&worktree::PathKey(repo_path("b.txt").as_ref().clone()), ())
9692 .unwrap()
9693 .ops,
9694 vec![
9695 pending_op::PendingOp {
9696 id: 1u16.into(),
9697 git_status: pending_op::GitStatus::Staged,
9698 job_status: pending_op::JobStatus::Finished
9699 },
9700 pending_op::PendingOp {
9701 id: 2u16.into(),
9702 git_status: pending_op::GitStatus::Unstaged,
9703 job_status: pending_op::JobStatus::Finished
9704 },
9705 ],
9706 );
9707
9708 repo.update(cx, |repo, _cx| {
9709 let git_statuses = repo.cached_status().collect::<Vec<_>>();
9710
9711 assert_eq!(
9712 git_statuses,
9713 [
9714 StatusEntry {
9715 repo_path: repo_path("a.txt"),
9716 status: FileStatus::Untracked,
9717 },
9718 StatusEntry {
9719 repo_path: repo_path("b.txt"),
9720 status: FileStatus::Untracked,
9721 },
9722 ]
9723 );
9724 });
9725}
9726
9727#[gpui::test]
9728async fn test_repository_subfolder_git_status(
9729 executor: gpui::BackgroundExecutor,
9730 cx: &mut gpui::TestAppContext,
9731) {
9732 init_test(cx);
9733
9734 let fs = FakeFs::new(executor);
9735 fs.insert_tree(
9736 path!("/root"),
9737 json!({
9738 "my-repo": {
9739 ".git": {},
9740 "a.txt": "a",
9741 "sub-folder-1": {
9742 "sub-folder-2": {
9743 "c.txt": "cc",
9744 "d": {
9745 "e.txt": "eee"
9746 }
9747 },
9748 }
9749 },
9750 }),
9751 )
9752 .await;
9753
9754 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
9755 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
9756
9757 fs.set_status_for_repo(
9758 path!("/root/my-repo/.git").as_ref(),
9759 &[(E_TXT, FileStatus::Untracked)],
9760 );
9761
9762 let project = Project::test(
9763 fs.clone(),
9764 [path!("/root/my-repo/sub-folder-1/sub-folder-2").as_ref()],
9765 cx,
9766 )
9767 .await;
9768
9769 project
9770 .update(cx, |project, cx| project.git_scans_complete(cx))
9771 .await;
9772 cx.run_until_parked();
9773
9774 let repository = project.read_with(cx, |project, cx| {
9775 project.repositories(cx).values().next().unwrap().clone()
9776 });
9777
9778 // Ensure that the git status is loaded correctly
9779 repository.read_with(cx, |repository, _cx| {
9780 assert_eq!(
9781 repository.work_directory_abs_path,
9782 Path::new(path!("/root/my-repo")).into()
9783 );
9784
9785 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
9786 assert_eq!(
9787 repository
9788 .status_for_path(&repo_path(E_TXT))
9789 .unwrap()
9790 .status,
9791 FileStatus::Untracked
9792 );
9793 });
9794
9795 fs.set_status_for_repo(path!("/root/my-repo/.git").as_ref(), &[]);
9796 project
9797 .update(cx, |project, cx| project.git_scans_complete(cx))
9798 .await;
9799 cx.run_until_parked();
9800
9801 repository.read_with(cx, |repository, _cx| {
9802 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
9803 assert_eq!(repository.status_for_path(&repo_path(E_TXT)), None);
9804 });
9805}
9806
9807// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
9808#[cfg(any())]
9809#[gpui::test]
9810async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
9811 init_test(cx);
9812 cx.executor().allow_parking();
9813
9814 let root = TempTree::new(json!({
9815 "project": {
9816 "a.txt": "a",
9817 },
9818 }));
9819 let root_path = root.path();
9820
9821 let repo = git_init(&root_path.join("project"));
9822 git_add("a.txt", &repo);
9823 git_commit("init", &repo);
9824
9825 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
9826
9827 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9828 tree.flush_fs_events(cx).await;
9829 project
9830 .update(cx, |project, cx| project.git_scans_complete(cx))
9831 .await;
9832 cx.executor().run_until_parked();
9833
9834 let repository = project.read_with(cx, |project, cx| {
9835 project.repositories(cx).values().next().unwrap().clone()
9836 });
9837
9838 git_branch("other-branch", &repo);
9839 git_checkout("refs/heads/other-branch", &repo);
9840 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
9841 git_add("a.txt", &repo);
9842 git_commit("capitalize", &repo);
9843 let commit = repo
9844 .head()
9845 .expect("Failed to get HEAD")
9846 .peel_to_commit()
9847 .expect("HEAD is not a commit");
9848 git_checkout("refs/heads/main", &repo);
9849 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
9850 git_add("a.txt", &repo);
9851 git_commit("improve letter", &repo);
9852 git_cherry_pick(&commit, &repo);
9853 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
9854 .expect("No CHERRY_PICK_HEAD");
9855 pretty_assertions::assert_eq!(
9856 git_status(&repo),
9857 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
9858 );
9859 tree.flush_fs_events(cx).await;
9860 project
9861 .update(cx, |project, cx| project.git_scans_complete(cx))
9862 .await;
9863 cx.executor().run_until_parked();
9864 let conflicts = repository.update(cx, |repository, _| {
9865 repository
9866 .merge_conflicts
9867 .iter()
9868 .cloned()
9869 .collect::<Vec<_>>()
9870 });
9871 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
9872
9873 git_add("a.txt", &repo);
9874 // Attempt to manually simulate what `git cherry-pick --continue` would do.
9875 git_commit("whatevs", &repo);
9876 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
9877 .expect("Failed to remove CHERRY_PICK_HEAD");
9878 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
9879 tree.flush_fs_events(cx).await;
9880 let conflicts = repository.update(cx, |repository, _| {
9881 repository
9882 .merge_conflicts
9883 .iter()
9884 .cloned()
9885 .collect::<Vec<_>>()
9886 });
9887 pretty_assertions::assert_eq!(conflicts, []);
9888}
9889
9890#[gpui::test]
9891async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
9892 init_test(cx);
9893 let fs = FakeFs::new(cx.background_executor.clone());
9894 fs.insert_tree(
9895 path!("/root"),
9896 json!({
9897 ".git": {},
9898 ".gitignore": "*.txt\n",
9899 "a.xml": "<a></a>",
9900 "b.txt": "Some text"
9901 }),
9902 )
9903 .await;
9904
9905 fs.set_head_and_index_for_repo(
9906 path!("/root/.git").as_ref(),
9907 &[
9908 (".gitignore", "*.txt\n".into()),
9909 ("a.xml", "<a></a>".into()),
9910 ],
9911 );
9912
9913 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
9914
9915 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9916 tree.flush_fs_events(cx).await;
9917 project
9918 .update(cx, |project, cx| project.git_scans_complete(cx))
9919 .await;
9920 cx.executor().run_until_parked();
9921
9922 let repository = project.read_with(cx, |project, cx| {
9923 project.repositories(cx).values().next().unwrap().clone()
9924 });
9925
9926 // One file is unmodified, the other is ignored.
9927 cx.read(|cx| {
9928 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
9929 assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
9930 });
9931
9932 // Change the gitignore, and stage the newly non-ignored file.
9933 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
9934 .await
9935 .unwrap();
9936 fs.set_index_for_repo(
9937 Path::new(path!("/root/.git")),
9938 &[
9939 (".gitignore", "*.txt\n".into()),
9940 ("a.xml", "<a></a>".into()),
9941 ("b.txt", "Some text".into()),
9942 ],
9943 );
9944
9945 cx.executor().run_until_parked();
9946 cx.read(|cx| {
9947 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
9948 assert_entry_git_state(
9949 tree.read(cx),
9950 repository.read(cx),
9951 "b.txt",
9952 Some(StatusCode::Added),
9953 false,
9954 );
9955 });
9956}
9957
9958// NOTE:
9959// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
9960// a directory which some program has already open.
9961// This is a limitation of the Windows.
9962// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
9963// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
9964#[gpui::test]
9965#[cfg_attr(target_os = "windows", ignore)]
9966async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
9967 init_test(cx);
9968 cx.executor().allow_parking();
9969 let root = TempTree::new(json!({
9970 "projects": {
9971 "project1": {
9972 "a": "",
9973 "b": "",
9974 }
9975 },
9976
9977 }));
9978 let root_path = root.path();
9979
9980 let repo = git_init(&root_path.join("projects/project1"));
9981 git_add("a", &repo);
9982 git_commit("init", &repo);
9983 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
9984
9985 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
9986
9987 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9988 tree.flush_fs_events(cx).await;
9989 project
9990 .update(cx, |project, cx| project.git_scans_complete(cx))
9991 .await;
9992 cx.executor().run_until_parked();
9993
9994 let repository = project.read_with(cx, |project, cx| {
9995 project.repositories(cx).values().next().unwrap().clone()
9996 });
9997
9998 repository.read_with(cx, |repository, _| {
9999 assert_eq!(
10000 repository.work_directory_abs_path.as_ref(),
10001 root_path.join("projects/project1").as_path()
10002 );
10003 assert_eq!(
10004 repository
10005 .status_for_path(&repo_path("a"))
10006 .map(|entry| entry.status),
10007 Some(StatusCode::Modified.worktree()),
10008 );
10009 assert_eq!(
10010 repository
10011 .status_for_path(&repo_path("b"))
10012 .map(|entry| entry.status),
10013 Some(FileStatus::Untracked),
10014 );
10015 });
10016
10017 std::fs::rename(
10018 root_path.join("projects/project1"),
10019 root_path.join("projects/project2"),
10020 )
10021 .unwrap();
10022 tree.flush_fs_events(cx).await;
10023
10024 repository.read_with(cx, |repository, _| {
10025 assert_eq!(
10026 repository.work_directory_abs_path.as_ref(),
10027 root_path.join("projects/project2").as_path()
10028 );
10029 assert_eq!(
10030 repository.status_for_path(&repo_path("a")).unwrap().status,
10031 StatusCode::Modified.worktree(),
10032 );
10033 assert_eq!(
10034 repository.status_for_path(&repo_path("b")).unwrap().status,
10035 FileStatus::Untracked,
10036 );
10037 });
10038}
10039
10040// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
10041// you can't rename a directory which some program has already open. This is a
10042// limitation of the Windows. See:
10043// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
10044// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
10045#[gpui::test]
10046#[cfg_attr(target_os = "windows", ignore)]
10047async fn test_file_status(cx: &mut gpui::TestAppContext) {
10048 init_test(cx);
10049 cx.executor().allow_parking();
10050 const IGNORE_RULE: &str = "**/target";
10051
10052 let root = TempTree::new(json!({
10053 "project": {
10054 "a.txt": "a",
10055 "b.txt": "bb",
10056 "c": {
10057 "d": {
10058 "e.txt": "eee"
10059 }
10060 },
10061 "f.txt": "ffff",
10062 "target": {
10063 "build_file": "???"
10064 },
10065 ".gitignore": IGNORE_RULE
10066 },
10067
10068 }));
10069 let root_path = root.path();
10070
10071 const A_TXT: &str = "a.txt";
10072 const B_TXT: &str = "b.txt";
10073 const E_TXT: &str = "c/d/e.txt";
10074 const F_TXT: &str = "f.txt";
10075 const DOTGITIGNORE: &str = ".gitignore";
10076 const BUILD_FILE: &str = "target/build_file";
10077
10078 // Set up git repository before creating the worktree.
10079 let work_dir = root.path().join("project");
10080 let mut repo = git_init(work_dir.as_path());
10081 repo.add_ignore_rule(IGNORE_RULE).unwrap();
10082 git_add(A_TXT, &repo);
10083 git_add(E_TXT, &repo);
10084 git_add(DOTGITIGNORE, &repo);
10085 git_commit("Initial commit", &repo);
10086
10087 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10088
10089 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10090 tree.flush_fs_events(cx).await;
10091 project
10092 .update(cx, |project, cx| project.git_scans_complete(cx))
10093 .await;
10094 cx.executor().run_until_parked();
10095
10096 let repository = project.read_with(cx, |project, cx| {
10097 project.repositories(cx).values().next().unwrap().clone()
10098 });
10099
10100 // Check that the right git state is observed on startup
10101 repository.read_with(cx, |repository, _cx| {
10102 assert_eq!(
10103 repository.work_directory_abs_path.as_ref(),
10104 root_path.join("project").as_path()
10105 );
10106
10107 assert_eq!(
10108 repository
10109 .status_for_path(&repo_path(B_TXT))
10110 .unwrap()
10111 .status,
10112 FileStatus::Untracked,
10113 );
10114 assert_eq!(
10115 repository
10116 .status_for_path(&repo_path(F_TXT))
10117 .unwrap()
10118 .status,
10119 FileStatus::Untracked,
10120 );
10121 });
10122
10123 // Modify a file in the working copy.
10124 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
10125 tree.flush_fs_events(cx).await;
10126 project
10127 .update(cx, |project, cx| project.git_scans_complete(cx))
10128 .await;
10129 cx.executor().run_until_parked();
10130
10131 // The worktree detects that the file's git status has changed.
10132 repository.read_with(cx, |repository, _| {
10133 assert_eq!(
10134 repository
10135 .status_for_path(&repo_path(A_TXT))
10136 .unwrap()
10137 .status,
10138 StatusCode::Modified.worktree(),
10139 );
10140 });
10141
10142 // Create a commit in the git repository.
10143 git_add(A_TXT, &repo);
10144 git_add(B_TXT, &repo);
10145 git_commit("Committing modified and added", &repo);
10146 tree.flush_fs_events(cx).await;
10147 project
10148 .update(cx, |project, cx| project.git_scans_complete(cx))
10149 .await;
10150 cx.executor().run_until_parked();
10151
10152 // The worktree detects that the files' git status have changed.
10153 repository.read_with(cx, |repository, _cx| {
10154 assert_eq!(
10155 repository
10156 .status_for_path(&repo_path(F_TXT))
10157 .unwrap()
10158 .status,
10159 FileStatus::Untracked,
10160 );
10161 assert_eq!(repository.status_for_path(&repo_path(B_TXT)), None);
10162 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
10163 });
10164
10165 // Modify files in the working copy and perform git operations on other files.
10166 git_reset(0, &repo);
10167 git_remove_index(Path::new(B_TXT), &repo);
10168 git_stash(&mut repo);
10169 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
10170 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
10171 tree.flush_fs_events(cx).await;
10172 project
10173 .update(cx, |project, cx| project.git_scans_complete(cx))
10174 .await;
10175 cx.executor().run_until_parked();
10176
10177 // Check that more complex repo changes are tracked
10178 repository.read_with(cx, |repository, _cx| {
10179 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
10180 assert_eq!(
10181 repository
10182 .status_for_path(&repo_path(B_TXT))
10183 .unwrap()
10184 .status,
10185 FileStatus::Untracked,
10186 );
10187 assert_eq!(
10188 repository
10189 .status_for_path(&repo_path(E_TXT))
10190 .unwrap()
10191 .status,
10192 StatusCode::Modified.worktree(),
10193 );
10194 });
10195
10196 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
10197 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
10198 std::fs::write(
10199 work_dir.join(DOTGITIGNORE),
10200 [IGNORE_RULE, "f.txt"].join("\n"),
10201 )
10202 .unwrap();
10203
10204 git_add(Path::new(DOTGITIGNORE), &repo);
10205 git_commit("Committing modified git ignore", &repo);
10206
10207 tree.flush_fs_events(cx).await;
10208 cx.executor().run_until_parked();
10209
10210 let mut renamed_dir_name = "first_directory/second_directory";
10211 const RENAMED_FILE: &str = "rf.txt";
10212
10213 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
10214 std::fs::write(
10215 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
10216 "new-contents",
10217 )
10218 .unwrap();
10219
10220 tree.flush_fs_events(cx).await;
10221 project
10222 .update(cx, |project, cx| project.git_scans_complete(cx))
10223 .await;
10224 cx.executor().run_until_parked();
10225
10226 repository.read_with(cx, |repository, _cx| {
10227 assert_eq!(
10228 repository
10229 .status_for_path(&RepoPath::from_rel_path(
10230 &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE))
10231 ))
10232 .unwrap()
10233 .status,
10234 FileStatus::Untracked,
10235 );
10236 });
10237
10238 renamed_dir_name = "new_first_directory/second_directory";
10239
10240 std::fs::rename(
10241 work_dir.join("first_directory"),
10242 work_dir.join("new_first_directory"),
10243 )
10244 .unwrap();
10245
10246 tree.flush_fs_events(cx).await;
10247 project
10248 .update(cx, |project, cx| project.git_scans_complete(cx))
10249 .await;
10250 cx.executor().run_until_parked();
10251
10252 repository.read_with(cx, |repository, _cx| {
10253 assert_eq!(
10254 repository
10255 .status_for_path(&RepoPath::from_rel_path(
10256 &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE))
10257 ))
10258 .unwrap()
10259 .status,
10260 FileStatus::Untracked,
10261 );
10262 });
10263}
10264
10265#[gpui::test]
10266#[ignore]
10267async fn test_ignored_dirs_events(cx: &mut gpui::TestAppContext) {
10268 init_test(cx);
10269 cx.executor().allow_parking();
10270
10271 const IGNORE_RULE: &str = "**/target";
10272
10273 let root = TempTree::new(json!({
10274 "project": {
10275 "src": {
10276 "main.rs": "fn main() {}"
10277 },
10278 "target": {
10279 "debug": {
10280 "important_text.txt": "important text",
10281 },
10282 },
10283 ".gitignore": IGNORE_RULE
10284 },
10285
10286 }));
10287 let root_path = root.path();
10288
10289 // Set up git repository before creating the worktree.
10290 let work_dir = root.path().join("project");
10291 let repo = git_init(work_dir.as_path());
10292 repo.add_ignore_rule(IGNORE_RULE).unwrap();
10293 git_add("src/main.rs", &repo);
10294 git_add(".gitignore", &repo);
10295 git_commit("Initial commit", &repo);
10296
10297 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10298 let repository_updates = Arc::new(Mutex::new(Vec::new()));
10299 let project_events = Arc::new(Mutex::new(Vec::new()));
10300 project.update(cx, |project, cx| {
10301 let repo_events = repository_updates.clone();
10302 cx.subscribe(project.git_store(), move |_, _, e, _| {
10303 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
10304 repo_events.lock().push(e.clone());
10305 }
10306 })
10307 .detach();
10308 let project_events = project_events.clone();
10309 cx.subscribe_self(move |_, e, _| {
10310 if let Event::WorktreeUpdatedEntries(_, updates) = e {
10311 project_events.lock().extend(
10312 updates
10313 .iter()
10314 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
10315 .filter(|(path, _)| path != "fs-event-sentinel"),
10316 );
10317 }
10318 })
10319 .detach();
10320 });
10321
10322 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10323 tree.flush_fs_events(cx).await;
10324 tree.update(cx, |tree, cx| {
10325 tree.load_file(rel_path("project/target/debug/important_text.txt"), cx)
10326 })
10327 .await
10328 .unwrap();
10329 tree.update(cx, |tree, _| {
10330 assert_eq!(
10331 tree.entries(true, 0)
10332 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
10333 .collect::<Vec<_>>(),
10334 vec![
10335 (rel_path(""), false),
10336 (rel_path("project/"), false),
10337 (rel_path("project/.gitignore"), false),
10338 (rel_path("project/src"), false),
10339 (rel_path("project/src/main.rs"), false),
10340 (rel_path("project/target"), true),
10341 (rel_path("project/target/debug"), true),
10342 (rel_path("project/target/debug/important_text.txt"), true),
10343 ]
10344 );
10345 });
10346
10347 assert_eq!(
10348 repository_updates.lock().drain(..).collect::<Vec<_>>(),
10349 vec![
10350 RepositoryEvent::StatusesChanged,
10351 RepositoryEvent::MergeHeadsChanged,
10352 ],
10353 "Initial worktree scan should produce a repo update event"
10354 );
10355 assert_eq!(
10356 project_events.lock().drain(..).collect::<Vec<_>>(),
10357 vec![
10358 ("project/target".to_string(), PathChange::Loaded),
10359 ("project/target/debug".to_string(), PathChange::Loaded),
10360 (
10361 "project/target/debug/important_text.txt".to_string(),
10362 PathChange::Loaded
10363 ),
10364 ],
10365 "Initial project changes should show that all not-ignored and all opened files are loaded"
10366 );
10367
10368 let deps_dir = work_dir.join("target").join("debug").join("deps");
10369 std::fs::create_dir_all(&deps_dir).unwrap();
10370 tree.flush_fs_events(cx).await;
10371 project
10372 .update(cx, |project, cx| project.git_scans_complete(cx))
10373 .await;
10374 cx.executor().run_until_parked();
10375 std::fs::write(deps_dir.join("aa.tmp"), "something tmp").unwrap();
10376 tree.flush_fs_events(cx).await;
10377 project
10378 .update(cx, |project, cx| project.git_scans_complete(cx))
10379 .await;
10380 cx.executor().run_until_parked();
10381 std::fs::remove_dir_all(&deps_dir).unwrap();
10382 tree.flush_fs_events(cx).await;
10383 project
10384 .update(cx, |project, cx| project.git_scans_complete(cx))
10385 .await;
10386 cx.executor().run_until_parked();
10387
10388 tree.update(cx, |tree, _| {
10389 assert_eq!(
10390 tree.entries(true, 0)
10391 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
10392 .collect::<Vec<_>>(),
10393 vec![
10394 (rel_path(""), false),
10395 (rel_path("project/"), false),
10396 (rel_path("project/.gitignore"), false),
10397 (rel_path("project/src"), false),
10398 (rel_path("project/src/main.rs"), false),
10399 (rel_path("project/target"), true),
10400 (rel_path("project/target/debug"), true),
10401 (rel_path("project/target/debug/important_text.txt"), true),
10402 ],
10403 "No stray temp files should be left after the flycheck changes"
10404 );
10405 });
10406
10407 assert_eq!(
10408 repository_updates
10409 .lock()
10410 .iter()
10411 .cloned()
10412 .collect::<Vec<_>>(),
10413 Vec::new(),
10414 "No further RepositoryUpdated events should happen, as only ignored dirs' contents was changed",
10415 );
10416 assert_eq!(
10417 project_events.lock().as_slice(),
10418 vec![
10419 ("project/target/debug/deps".to_string(), PathChange::Added),
10420 ("project/target/debug/deps".to_string(), PathChange::Removed),
10421 ],
10422 "Due to `debug` directory being tracked, it should get updates for entries inside it.
10423 No updates for more nested directories should happen as those are ignored",
10424 );
10425}
10426
10427// todo(jk): turning this test off until we rework it in such a way so that it is not so susceptible
10428// to different timings/ordering of events.
10429#[ignore]
10430#[gpui::test]
10431async fn test_odd_events_for_ignored_dirs(
10432 executor: BackgroundExecutor,
10433 cx: &mut gpui::TestAppContext,
10434) {
10435 init_test(cx);
10436 let fs = FakeFs::new(executor);
10437 fs.insert_tree(
10438 path!("/root"),
10439 json!({
10440 ".git": {},
10441 ".gitignore": "**/target/",
10442 "src": {
10443 "main.rs": "fn main() {}",
10444 },
10445 "target": {
10446 "debug": {
10447 "foo.txt": "foo",
10448 "deps": {}
10449 }
10450 }
10451 }),
10452 )
10453 .await;
10454 fs.set_head_and_index_for_repo(
10455 path!("/root/.git").as_ref(),
10456 &[
10457 (".gitignore", "**/target/".into()),
10458 ("src/main.rs", "fn main() {}".into()),
10459 ],
10460 );
10461
10462 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
10463 let repository_updates = Arc::new(Mutex::new(Vec::new()));
10464 let project_events = Arc::new(Mutex::new(Vec::new()));
10465 project.update(cx, |project, cx| {
10466 let repository_updates = repository_updates.clone();
10467 cx.subscribe(project.git_store(), move |_, _, e, _| {
10468 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
10469 repository_updates.lock().push(e.clone());
10470 }
10471 })
10472 .detach();
10473 let project_events = project_events.clone();
10474 cx.subscribe_self(move |_, e, _| {
10475 if let Event::WorktreeUpdatedEntries(_, updates) = e {
10476 project_events.lock().extend(
10477 updates
10478 .iter()
10479 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
10480 .filter(|(path, _)| path != "fs-event-sentinel"),
10481 );
10482 }
10483 })
10484 .detach();
10485 });
10486
10487 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10488 tree.update(cx, |tree, cx| {
10489 tree.load_file(rel_path("target/debug/foo.txt"), cx)
10490 })
10491 .await
10492 .unwrap();
10493 tree.flush_fs_events(cx).await;
10494 project
10495 .update(cx, |project, cx| project.git_scans_complete(cx))
10496 .await;
10497 cx.run_until_parked();
10498 tree.update(cx, |tree, _| {
10499 assert_eq!(
10500 tree.entries(true, 0)
10501 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
10502 .collect::<Vec<_>>(),
10503 vec![
10504 (rel_path(""), false),
10505 (rel_path(".gitignore"), false),
10506 (rel_path("src"), false),
10507 (rel_path("src/main.rs"), false),
10508 (rel_path("target"), true),
10509 (rel_path("target/debug"), true),
10510 (rel_path("target/debug/deps"), true),
10511 (rel_path("target/debug/foo.txt"), true),
10512 ]
10513 );
10514 });
10515
10516 assert_eq!(
10517 repository_updates.lock().drain(..).collect::<Vec<_>>(),
10518 vec![
10519 RepositoryEvent::MergeHeadsChanged,
10520 RepositoryEvent::BranchChanged,
10521 RepositoryEvent::StatusesChanged,
10522 RepositoryEvent::StatusesChanged,
10523 ],
10524 "Initial worktree scan should produce a repo update event"
10525 );
10526 assert_eq!(
10527 project_events.lock().drain(..).collect::<Vec<_>>(),
10528 vec![
10529 ("target".to_string(), PathChange::Loaded),
10530 ("target/debug".to_string(), PathChange::Loaded),
10531 ("target/debug/deps".to_string(), PathChange::Loaded),
10532 ("target/debug/foo.txt".to_string(), PathChange::Loaded),
10533 ],
10534 "All non-ignored entries and all opened firs should be getting a project event",
10535 );
10536
10537 // Emulate a flycheck spawn: it emits a `INODE_META_MOD`-flagged FS event on target/debug/deps, then creates and removes temp files inside.
10538 // This may happen multiple times during a single flycheck, but once is enough for testing.
10539 fs.emit_fs_event("/root/target/debug/deps", None);
10540 tree.flush_fs_events(cx).await;
10541 project
10542 .update(cx, |project, cx| project.git_scans_complete(cx))
10543 .await;
10544 cx.executor().run_until_parked();
10545
10546 assert_eq!(
10547 repository_updates
10548 .lock()
10549 .iter()
10550 .cloned()
10551 .collect::<Vec<_>>(),
10552 Vec::new(),
10553 "No further RepositoryUpdated events should happen, as only ignored dirs received FS events",
10554 );
10555 assert_eq!(
10556 project_events.lock().as_slice(),
10557 Vec::new(),
10558 "No further project events should happen, as only ignored dirs received FS events",
10559 );
10560}
10561
10562#[gpui::test]
10563async fn test_repos_in_invisible_worktrees(
10564 executor: BackgroundExecutor,
10565 cx: &mut gpui::TestAppContext,
10566) {
10567 init_test(cx);
10568 let fs = FakeFs::new(executor);
10569 fs.insert_tree(
10570 path!("/root"),
10571 json!({
10572 "dir1": {
10573 ".git": {},
10574 "dep1": {
10575 ".git": {},
10576 "src": {
10577 "a.txt": "",
10578 },
10579 },
10580 "b.txt": "",
10581 },
10582 }),
10583 )
10584 .await;
10585
10586 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
10587 let _visible_worktree =
10588 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10589 project
10590 .update(cx, |project, cx| project.git_scans_complete(cx))
10591 .await;
10592
10593 let repos = project.read_with(cx, |project, cx| {
10594 project
10595 .repositories(cx)
10596 .values()
10597 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
10598 .collect::<Vec<_>>()
10599 });
10600 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
10601
10602 let (_invisible_worktree, _) = project
10603 .update(cx, |project, cx| {
10604 project.worktree_store().update(cx, |worktree_store, cx| {
10605 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
10606 })
10607 })
10608 .await
10609 .expect("failed to create worktree");
10610 project
10611 .update(cx, |project, cx| project.git_scans_complete(cx))
10612 .await;
10613
10614 let repos = project.read_with(cx, |project, cx| {
10615 project
10616 .repositories(cx)
10617 .values()
10618 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
10619 .collect::<Vec<_>>()
10620 });
10621 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
10622}
10623
10624#[gpui::test(iterations = 10)]
10625async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
10626 init_test(cx);
10627 cx.update(|cx| {
10628 cx.update_global::<SettingsStore, _>(|store, cx| {
10629 store.update_user_settings(cx, |settings| {
10630 settings.project.worktree.file_scan_exclusions = Some(Vec::new());
10631 });
10632 });
10633 });
10634 let fs = FakeFs::new(cx.background_executor.clone());
10635 fs.insert_tree(
10636 path!("/root"),
10637 json!({
10638 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
10639 "tree": {
10640 ".git": {},
10641 ".gitignore": "ignored-dir\n",
10642 "tracked-dir": {
10643 "tracked-file1": "",
10644 "ancestor-ignored-file1": "",
10645 },
10646 "ignored-dir": {
10647 "ignored-file1": ""
10648 }
10649 }
10650 }),
10651 )
10652 .await;
10653 fs.set_head_and_index_for_repo(
10654 path!("/root/tree/.git").as_ref(),
10655 &[
10656 (".gitignore", "ignored-dir\n".into()),
10657 ("tracked-dir/tracked-file1", "".into()),
10658 ],
10659 );
10660
10661 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
10662
10663 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10664 tree.flush_fs_events(cx).await;
10665 project
10666 .update(cx, |project, cx| project.git_scans_complete(cx))
10667 .await;
10668 cx.executor().run_until_parked();
10669
10670 let repository = project.read_with(cx, |project, cx| {
10671 project.repositories(cx).values().next().unwrap().clone()
10672 });
10673
10674 tree.read_with(cx, |tree, _| {
10675 tree.as_local()
10676 .unwrap()
10677 .manually_refresh_entries_for_paths(vec![rel_path("ignored-dir").into()])
10678 })
10679 .recv()
10680 .await;
10681
10682 cx.read(|cx| {
10683 assert_entry_git_state(
10684 tree.read(cx),
10685 repository.read(cx),
10686 "tracked-dir/tracked-file1",
10687 None,
10688 false,
10689 );
10690 assert_entry_git_state(
10691 tree.read(cx),
10692 repository.read(cx),
10693 "tracked-dir/ancestor-ignored-file1",
10694 None,
10695 false,
10696 );
10697 assert_entry_git_state(
10698 tree.read(cx),
10699 repository.read(cx),
10700 "ignored-dir/ignored-file1",
10701 None,
10702 true,
10703 );
10704 });
10705
10706 fs.create_file(
10707 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
10708 Default::default(),
10709 )
10710 .await
10711 .unwrap();
10712 fs.set_index_for_repo(
10713 path!("/root/tree/.git").as_ref(),
10714 &[
10715 (".gitignore", "ignored-dir\n".into()),
10716 ("tracked-dir/tracked-file1", "".into()),
10717 ("tracked-dir/tracked-file2", "".into()),
10718 ],
10719 );
10720 fs.create_file(
10721 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
10722 Default::default(),
10723 )
10724 .await
10725 .unwrap();
10726 fs.create_file(
10727 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
10728 Default::default(),
10729 )
10730 .await
10731 .unwrap();
10732
10733 cx.executor().run_until_parked();
10734 cx.read(|cx| {
10735 assert_entry_git_state(
10736 tree.read(cx),
10737 repository.read(cx),
10738 "tracked-dir/tracked-file2",
10739 Some(StatusCode::Added),
10740 false,
10741 );
10742 assert_entry_git_state(
10743 tree.read(cx),
10744 repository.read(cx),
10745 "tracked-dir/ancestor-ignored-file2",
10746 None,
10747 false,
10748 );
10749 assert_entry_git_state(
10750 tree.read(cx),
10751 repository.read(cx),
10752 "ignored-dir/ignored-file2",
10753 None,
10754 true,
10755 );
10756 assert!(
10757 tree.read(cx)
10758 .entry_for_path(&rel_path(".git"))
10759 .unwrap()
10760 .is_ignored
10761 );
10762 });
10763}
10764
10765#[gpui::test]
10766async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) {
10767 init_test(cx);
10768
10769 let fs = FakeFs::new(cx.executor());
10770 fs.insert_tree(
10771 path!("/project"),
10772 json!({
10773 ".git": {
10774 "worktrees": {
10775 "some-worktree": {
10776 "commondir": "../..\n",
10777 // For is_git_dir
10778 "HEAD": "",
10779 "config": ""
10780 }
10781 },
10782 "modules": {
10783 "subdir": {
10784 "some-submodule": {
10785 // For is_git_dir
10786 "HEAD": "",
10787 "config": "",
10788 }
10789 }
10790 }
10791 },
10792 "src": {
10793 "a.txt": "A",
10794 },
10795 "some-worktree": {
10796 ".git": "gitdir: ../.git/worktrees/some-worktree\n",
10797 "src": {
10798 "b.txt": "B",
10799 }
10800 },
10801 "subdir": {
10802 "some-submodule": {
10803 ".git": "gitdir: ../../.git/modules/subdir/some-submodule\n",
10804 "c.txt": "C",
10805 }
10806 }
10807 }),
10808 )
10809 .await;
10810
10811 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
10812 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
10813 scan_complete.await;
10814
10815 let mut repositories = project.update(cx, |project, cx| {
10816 project
10817 .repositories(cx)
10818 .values()
10819 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
10820 .collect::<Vec<_>>()
10821 });
10822 repositories.sort();
10823 pretty_assertions::assert_eq!(
10824 repositories,
10825 [
10826 Path::new(path!("/project")).into(),
10827 Path::new(path!("/project/some-worktree")).into(),
10828 Path::new(path!("/project/subdir/some-submodule")).into(),
10829 ]
10830 );
10831
10832 // Generate a git-related event for the worktree and check that it's refreshed.
10833 fs.with_git_state(
10834 path!("/project/some-worktree/.git").as_ref(),
10835 true,
10836 |state| {
10837 state
10838 .head_contents
10839 .insert(repo_path("src/b.txt"), "b".to_owned());
10840 state
10841 .index_contents
10842 .insert(repo_path("src/b.txt"), "b".to_owned());
10843 },
10844 )
10845 .unwrap();
10846 cx.run_until_parked();
10847
10848 let buffer = project
10849 .update(cx, |project, cx| {
10850 project.open_local_buffer(path!("/project/some-worktree/src/b.txt"), cx)
10851 })
10852 .await
10853 .unwrap();
10854 let (worktree_repo, barrier) = project.update(cx, |project, cx| {
10855 let (repo, _) = project
10856 .git_store()
10857 .read(cx)
10858 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
10859 .unwrap();
10860 pretty_assertions::assert_eq!(
10861 repo.read(cx).work_directory_abs_path,
10862 Path::new(path!("/project/some-worktree")).into(),
10863 );
10864 let barrier = repo.update(cx, |repo, _| repo.barrier());
10865 (repo.clone(), barrier)
10866 });
10867 barrier.await.unwrap();
10868 worktree_repo.update(cx, |repo, _| {
10869 pretty_assertions::assert_eq!(
10870 repo.status_for_path(&repo_path("src/b.txt"))
10871 .unwrap()
10872 .status,
10873 StatusCode::Modified.worktree(),
10874 );
10875 });
10876
10877 // The same for the submodule.
10878 fs.with_git_state(
10879 path!("/project/subdir/some-submodule/.git").as_ref(),
10880 true,
10881 |state| {
10882 state
10883 .head_contents
10884 .insert(repo_path("c.txt"), "c".to_owned());
10885 state
10886 .index_contents
10887 .insert(repo_path("c.txt"), "c".to_owned());
10888 },
10889 )
10890 .unwrap();
10891 cx.run_until_parked();
10892
10893 let buffer = project
10894 .update(cx, |project, cx| {
10895 project.open_local_buffer(path!("/project/subdir/some-submodule/c.txt"), cx)
10896 })
10897 .await
10898 .unwrap();
10899 let (submodule_repo, barrier) = project.update(cx, |project, cx| {
10900 let (repo, _) = project
10901 .git_store()
10902 .read(cx)
10903 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
10904 .unwrap();
10905 pretty_assertions::assert_eq!(
10906 repo.read(cx).work_directory_abs_path,
10907 Path::new(path!("/project/subdir/some-submodule")).into(),
10908 );
10909 let barrier = repo.update(cx, |repo, _| repo.barrier());
10910 (repo.clone(), barrier)
10911 });
10912 barrier.await.unwrap();
10913 submodule_repo.update(cx, |repo, _| {
10914 pretty_assertions::assert_eq!(
10915 repo.status_for_path(&repo_path("c.txt")).unwrap().status,
10916 StatusCode::Modified.worktree(),
10917 );
10918 });
10919}
10920
10921#[gpui::test]
10922async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
10923 init_test(cx);
10924 let fs = FakeFs::new(cx.background_executor.clone());
10925 fs.insert_tree(
10926 path!("/root"),
10927 json!({
10928 "project": {
10929 ".git": {},
10930 "child1": {
10931 "a.txt": "A",
10932 },
10933 "child2": {
10934 "b.txt": "B",
10935 }
10936 }
10937 }),
10938 )
10939 .await;
10940
10941 let project = Project::test(
10942 fs.clone(),
10943 [
10944 path!("/root/project/child1").as_ref(),
10945 path!("/root/project/child2").as_ref(),
10946 ],
10947 cx,
10948 )
10949 .await;
10950
10951 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10952 tree.flush_fs_events(cx).await;
10953 project
10954 .update(cx, |project, cx| project.git_scans_complete(cx))
10955 .await;
10956 cx.executor().run_until_parked();
10957
10958 let repos = project.read_with(cx, |project, cx| {
10959 project
10960 .repositories(cx)
10961 .values()
10962 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
10963 .collect::<Vec<_>>()
10964 });
10965 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
10966}
10967
10968#[gpui::test]
10969async fn test_buffer_changed_file_path_updates_git_diff(cx: &mut gpui::TestAppContext) {
10970 init_test(cx);
10971
10972 let file_1_committed = String::from(r#"file_1_committed"#);
10973 let file_1_staged = String::from(r#"file_1_staged"#);
10974 let file_2_committed = String::from(r#"file_2_committed"#);
10975 let file_2_staged = String::from(r#"file_2_staged"#);
10976 let buffer_contents = String::from(r#"buffer"#);
10977
10978 let fs = FakeFs::new(cx.background_executor.clone());
10979 fs.insert_tree(
10980 path!("/dir"),
10981 json!({
10982 ".git": {},
10983 "src": {
10984 "file_1.rs": file_1_committed.clone(),
10985 "file_2.rs": file_2_committed.clone(),
10986 }
10987 }),
10988 )
10989 .await;
10990
10991 fs.set_head_for_repo(
10992 path!("/dir/.git").as_ref(),
10993 &[
10994 ("src/file_1.rs", file_1_committed.clone()),
10995 ("src/file_2.rs", file_2_committed.clone()),
10996 ],
10997 "deadbeef",
10998 );
10999 fs.set_index_for_repo(
11000 path!("/dir/.git").as_ref(),
11001 &[
11002 ("src/file_1.rs", file_1_staged.clone()),
11003 ("src/file_2.rs", file_2_staged.clone()),
11004 ],
11005 );
11006
11007 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
11008
11009 let buffer = project
11010 .update(cx, |project, cx| {
11011 project.open_local_buffer(path!("/dir/src/file_1.rs"), cx)
11012 })
11013 .await
11014 .unwrap();
11015
11016 buffer.update(cx, |buffer, cx| {
11017 buffer.edit([(0..buffer.len(), buffer_contents.as_str())], None, cx);
11018 });
11019
11020 let unstaged_diff = project
11021 .update(cx, |project, cx| {
11022 project.open_unstaged_diff(buffer.clone(), cx)
11023 })
11024 .await
11025 .unwrap();
11026
11027 cx.run_until_parked();
11028
11029 unstaged_diff.update(cx, |unstaged_diff, cx| {
11030 let base_text = unstaged_diff.base_text_string(cx).unwrap();
11031 assert_eq!(base_text, file_1_staged, "Should start with file_1 staged");
11032 });
11033
11034 // Save the buffer as `file_2.rs`, which should trigger the
11035 // `BufferChangedFilePath` event.
11036 project
11037 .update(cx, |project, cx| {
11038 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
11039 let path = ProjectPath {
11040 worktree_id,
11041 path: rel_path("src/file_2.rs").into(),
11042 };
11043 project.save_buffer_as(buffer.clone(), path, cx)
11044 })
11045 .await
11046 .unwrap();
11047
11048 cx.run_until_parked();
11049
11050 // Verify that the diff bases have been updated to file_2's contents due to
11051 // the `BufferChangedFilePath` event being handled.
11052 unstaged_diff.update(cx, |unstaged_diff, cx| {
11053 let snapshot = buffer.read(cx).snapshot();
11054 let base_text = unstaged_diff.base_text_string(cx).unwrap();
11055 assert_eq!(
11056 base_text, file_2_staged,
11057 "Diff bases should be automatically updated to file_2 staged content"
11058 );
11059
11060 let hunks: Vec<_> = unstaged_diff.snapshot(cx).hunks(&snapshot).collect();
11061 assert!(!hunks.is_empty(), "Should have diff hunks for file_2");
11062 });
11063
11064 let uncommitted_diff = project
11065 .update(cx, |project, cx| {
11066 project.open_uncommitted_diff(buffer.clone(), cx)
11067 })
11068 .await
11069 .unwrap();
11070
11071 cx.run_until_parked();
11072
11073 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
11074 let base_text = uncommitted_diff.base_text_string(cx).unwrap();
11075 assert_eq!(
11076 base_text, file_2_committed,
11077 "Uncommitted diff should compare against file_2 committed content"
11078 );
11079 });
11080}
11081
11082async fn search(
11083 project: &Entity<Project>,
11084 query: SearchQuery,
11085 cx: &mut gpui::TestAppContext,
11086) -> Result<HashMap<String, Vec<Range<usize>>>> {
11087 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
11088 let mut results = HashMap::default();
11089 while let Ok(search_result) = search_rx.rx.recv().await {
11090 match search_result {
11091 SearchResult::Buffer { buffer, ranges } => {
11092 results.entry(buffer).or_insert(ranges);
11093 }
11094 SearchResult::LimitReached => {}
11095 }
11096 }
11097 Ok(results
11098 .into_iter()
11099 .map(|(buffer, ranges)| {
11100 buffer.update(cx, |buffer, cx| {
11101 let path = buffer
11102 .file()
11103 .unwrap()
11104 .full_path(cx)
11105 .to_string_lossy()
11106 .to_string();
11107 let ranges = ranges
11108 .into_iter()
11109 .map(|range| range.to_offset(buffer))
11110 .collect::<Vec<_>>();
11111 (path, ranges)
11112 })
11113 })
11114 .collect())
11115}
11116
11117#[gpui::test]
11118async fn test_undo_encoding_change(cx: &mut gpui::TestAppContext) {
11119 init_test(cx);
11120
11121 let fs = FakeFs::new(cx.executor());
11122
11123 // Create a file with ASCII content "Hi" - this will be detected as UTF-8
11124 // When reinterpreted as UTF-16LE, the bytes 0x48 0x69 become a single character
11125 let ascii_bytes: Vec<u8> = vec![0x48, 0x69];
11126 fs.insert_tree(path!("/dir"), json!({})).await;
11127 fs.insert_file(path!("/dir/test.txt"), ascii_bytes).await;
11128
11129 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
11130
11131 let buffer = project
11132 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/test.txt"), cx))
11133 .await
11134 .unwrap();
11135
11136 let (initial_encoding, initial_text, initial_dirty) = buffer.read_with(cx, |buffer, _| {
11137 (buffer.encoding(), buffer.text(), buffer.is_dirty())
11138 });
11139 assert_eq!(initial_encoding, encoding_rs::UTF_8);
11140 assert_eq!(initial_text, "Hi");
11141 assert!(!initial_dirty);
11142
11143 let reload_receiver = buffer.update(cx, |buffer, cx| {
11144 buffer.reload_with_encoding(encoding_rs::UTF_16LE, cx)
11145 });
11146 cx.executor().run_until_parked();
11147
11148 // Wait for reload to complete
11149 let _ = reload_receiver.await;
11150
11151 // Verify the encoding changed, text is different, and still not dirty (we reloaded from disk)
11152 let (reloaded_encoding, reloaded_text, reloaded_dirty) = buffer.read_with(cx, |buffer, _| {
11153 (buffer.encoding(), buffer.text(), buffer.is_dirty())
11154 });
11155 assert_eq!(reloaded_encoding, encoding_rs::UTF_16LE);
11156 assert_eq!(reloaded_text, "楈");
11157 assert!(!reloaded_dirty);
11158
11159 // Undo the reload
11160 buffer.update(cx, |buffer, cx| {
11161 buffer.undo(cx);
11162 });
11163
11164 buffer.read_with(cx, |buffer, _| {
11165 assert_eq!(buffer.encoding(), encoding_rs::UTF_8);
11166 assert_eq!(buffer.text(), "Hi");
11167 assert!(!buffer.is_dirty());
11168 });
11169
11170 buffer.update(cx, |buffer, cx| {
11171 buffer.redo(cx);
11172 });
11173
11174 buffer.read_with(cx, |buffer, _| {
11175 assert_eq!(buffer.encoding(), encoding_rs::UTF_16LE);
11176 assert_ne!(buffer.text(), "Hi");
11177 assert!(!buffer.is_dirty());
11178 });
11179}
11180
11181pub fn init_test(cx: &mut gpui::TestAppContext) {
11182 zlog::init_test();
11183
11184 cx.update(|cx| {
11185 let settings_store = SettingsStore::test(cx);
11186 cx.set_global(settings_store);
11187 release_channel::init(semver::Version::new(0, 0, 0), cx);
11188 });
11189}
11190
11191fn json_lang() -> Arc<Language> {
11192 Arc::new(Language::new(
11193 LanguageConfig {
11194 name: "JSON".into(),
11195 matcher: LanguageMatcher {
11196 path_suffixes: vec!["json".to_string()],
11197 ..Default::default()
11198 },
11199 ..Default::default()
11200 },
11201 None,
11202 ))
11203}
11204
11205fn js_lang() -> Arc<Language> {
11206 Arc::new(Language::new(
11207 LanguageConfig {
11208 name: "JavaScript".into(),
11209 matcher: LanguageMatcher {
11210 path_suffixes: vec!["js".to_string()],
11211 ..Default::default()
11212 },
11213 ..Default::default()
11214 },
11215 None,
11216 ))
11217}
11218
11219fn python_lang(fs: Arc<FakeFs>) -> Arc<Language> {
11220 struct PythonMootToolchainLister(Arc<FakeFs>);
11221 #[async_trait]
11222 impl ToolchainLister for PythonMootToolchainLister {
11223 async fn list(
11224 &self,
11225 worktree_root: PathBuf,
11226 subroot_relative_path: Arc<RelPath>,
11227 _: Option<HashMap<String, String>>,
11228 _: &dyn Fs,
11229 ) -> ToolchainList {
11230 // This lister will always return a path .venv directories within ancestors
11231 let ancestors = subroot_relative_path.ancestors().collect::<Vec<_>>();
11232 let mut toolchains = vec![];
11233 for ancestor in ancestors {
11234 let venv_path = worktree_root.join(ancestor.as_std_path()).join(".venv");
11235 if self.0.is_dir(&venv_path).await {
11236 toolchains.push(Toolchain {
11237 name: SharedString::new("Python Venv"),
11238 path: venv_path.to_string_lossy().into_owned().into(),
11239 language_name: LanguageName(SharedString::new_static("Python")),
11240 as_json: serde_json::Value::Null,
11241 })
11242 }
11243 }
11244 ToolchainList {
11245 toolchains,
11246 ..Default::default()
11247 }
11248 }
11249 async fn resolve(
11250 &self,
11251 _: PathBuf,
11252 _: Option<HashMap<String, String>>,
11253 _: &dyn Fs,
11254 ) -> anyhow::Result<Toolchain> {
11255 Err(anyhow::anyhow!("Not implemented"))
11256 }
11257 fn meta(&self) -> ToolchainMetadata {
11258 ToolchainMetadata {
11259 term: SharedString::new_static("Virtual Environment"),
11260 new_toolchain_placeholder: SharedString::new_static(
11261 "A path to the python3 executable within a virtual environment, or path to virtual environment itself",
11262 ),
11263 manifest_name: ManifestName::from(SharedString::new_static("pyproject.toml")),
11264 }
11265 }
11266 fn activation_script(
11267 &self,
11268 _: &Toolchain,
11269 _: ShellKind,
11270 _: &gpui::App,
11271 ) -> futures::future::BoxFuture<'static, Vec<String>> {
11272 Box::pin(async { vec![] })
11273 }
11274 }
11275 Arc::new(
11276 Language::new(
11277 LanguageConfig {
11278 name: "Python".into(),
11279 matcher: LanguageMatcher {
11280 path_suffixes: vec!["py".to_string()],
11281 ..Default::default()
11282 },
11283 ..Default::default()
11284 },
11285 None, // We're not testing Python parsing with this language.
11286 )
11287 .with_manifest(Some(ManifestName::from(SharedString::new_static(
11288 "pyproject.toml",
11289 ))))
11290 .with_toolchain_lister(Some(Arc::new(PythonMootToolchainLister(fs)))),
11291 )
11292}
11293
11294fn typescript_lang() -> Arc<Language> {
11295 Arc::new(Language::new(
11296 LanguageConfig {
11297 name: "TypeScript".into(),
11298 matcher: LanguageMatcher {
11299 path_suffixes: vec!["ts".to_string()],
11300 ..Default::default()
11301 },
11302 ..Default::default()
11303 },
11304 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
11305 ))
11306}
11307
11308fn tsx_lang() -> Arc<Language> {
11309 Arc::new(Language::new(
11310 LanguageConfig {
11311 name: "tsx".into(),
11312 matcher: LanguageMatcher {
11313 path_suffixes: vec!["tsx".to_string()],
11314 ..Default::default()
11315 },
11316 ..Default::default()
11317 },
11318 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
11319 ))
11320}
11321
11322fn get_all_tasks(
11323 project: &Entity<Project>,
11324 task_contexts: Arc<TaskContexts>,
11325 cx: &mut App,
11326) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
11327 let new_tasks = project.update(cx, |project, cx| {
11328 project.task_store().update(cx, |task_store, cx| {
11329 task_store.task_inventory().unwrap().update(cx, |this, cx| {
11330 this.used_and_current_resolved_tasks(task_contexts, cx)
11331 })
11332 })
11333 });
11334
11335 cx.background_spawn(async move {
11336 let (mut old, new) = new_tasks.await;
11337 old.extend(new);
11338 old
11339 })
11340}
11341
11342#[track_caller]
11343fn assert_entry_git_state(
11344 tree: &Worktree,
11345 repository: &Repository,
11346 path: &str,
11347 index_status: Option<StatusCode>,
11348 is_ignored: bool,
11349) {
11350 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
11351 let entry = tree
11352 .entry_for_path(&rel_path(path))
11353 .unwrap_or_else(|| panic!("entry {path} not found"));
11354 let status = repository
11355 .status_for_path(&repo_path(path))
11356 .map(|entry| entry.status);
11357 let expected = index_status.map(|index_status| {
11358 TrackedStatus {
11359 index_status,
11360 worktree_status: StatusCode::Unmodified,
11361 }
11362 .into()
11363 });
11364 assert_eq!(
11365 status, expected,
11366 "expected {path} to have git status: {expected:?}"
11367 );
11368 assert_eq!(
11369 entry.is_ignored, is_ignored,
11370 "expected {path} to have is_ignored: {is_ignored}"
11371 );
11372}
11373
11374#[track_caller]
11375fn git_init(path: &Path) -> git2::Repository {
11376 let mut init_opts = RepositoryInitOptions::new();
11377 init_opts.initial_head("main");
11378 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
11379}
11380
11381#[track_caller]
11382fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
11383 let path = path.as_ref();
11384 let mut index = repo.index().expect("Failed to get index");
11385 index.add_path(path).expect("Failed to add file");
11386 index.write().expect("Failed to write index");
11387}
11388
11389#[track_caller]
11390fn git_remove_index(path: &Path, repo: &git2::Repository) {
11391 let mut index = repo.index().expect("Failed to get index");
11392 index.remove_path(path).expect("Failed to add file");
11393 index.write().expect("Failed to write index");
11394}
11395
11396#[track_caller]
11397fn git_commit(msg: &'static str, repo: &git2::Repository) {
11398 use git2::Signature;
11399
11400 let signature = Signature::now("test", "test@zed.dev").unwrap();
11401 let oid = repo.index().unwrap().write_tree().unwrap();
11402 let tree = repo.find_tree(oid).unwrap();
11403 if let Ok(head) = repo.head() {
11404 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
11405
11406 let parent_commit = parent_obj.as_commit().unwrap();
11407
11408 repo.commit(
11409 Some("HEAD"),
11410 &signature,
11411 &signature,
11412 msg,
11413 &tree,
11414 &[parent_commit],
11415 )
11416 .expect("Failed to commit with parent");
11417 } else {
11418 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
11419 .expect("Failed to commit");
11420 }
11421}
11422
11423#[cfg(any())]
11424#[track_caller]
11425fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
11426 repo.cherrypick(commit, None).expect("Failed to cherrypick");
11427}
11428
11429#[track_caller]
11430fn git_stash(repo: &mut git2::Repository) {
11431 use git2::Signature;
11432
11433 let signature = Signature::now("test", "test@zed.dev").unwrap();
11434 repo.stash_save(&signature, "N/A", None)
11435 .expect("Failed to stash");
11436}
11437
11438#[track_caller]
11439fn git_reset(offset: usize, repo: &git2::Repository) {
11440 let head = repo.head().expect("Couldn't get repo head");
11441 let object = head.peel(git2::ObjectType::Commit).unwrap();
11442 let commit = object.as_commit().unwrap();
11443 let new_head = commit
11444 .parents()
11445 .inspect(|parnet| {
11446 parnet.message();
11447 })
11448 .nth(offset)
11449 .expect("Not enough history");
11450 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
11451 .expect("Could not reset");
11452}
11453
11454#[cfg(any())]
11455#[track_caller]
11456fn git_branch(name: &str, repo: &git2::Repository) {
11457 let head = repo
11458 .head()
11459 .expect("Couldn't get repo head")
11460 .peel_to_commit()
11461 .expect("HEAD is not a commit");
11462 repo.branch(name, &head, false).expect("Failed to commit");
11463}
11464
11465#[cfg(any())]
11466#[track_caller]
11467fn git_checkout(name: &str, repo: &git2::Repository) {
11468 repo.set_head(name).expect("Failed to set head");
11469 repo.checkout_head(None).expect("Failed to check out head");
11470}
11471
11472#[cfg(any())]
11473#[track_caller]
11474fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
11475 repo.statuses(None)
11476 .unwrap()
11477 .iter()
11478 .map(|status| (status.path().unwrap().to_string(), status.status()))
11479 .collect()
11480}
11481
11482#[gpui::test]
11483async fn test_find_project_path_abs(
11484 background_executor: BackgroundExecutor,
11485 cx: &mut gpui::TestAppContext,
11486) {
11487 // find_project_path should work with absolute paths
11488 init_test(cx);
11489
11490 let fs = FakeFs::new(background_executor);
11491 fs.insert_tree(
11492 path!("/root"),
11493 json!({
11494 "project1": {
11495 "file1.txt": "content1",
11496 "subdir": {
11497 "file2.txt": "content2"
11498 }
11499 },
11500 "project2": {
11501 "file3.txt": "content3"
11502 }
11503 }),
11504 )
11505 .await;
11506
11507 let project = Project::test(
11508 fs.clone(),
11509 [
11510 path!("/root/project1").as_ref(),
11511 path!("/root/project2").as_ref(),
11512 ],
11513 cx,
11514 )
11515 .await;
11516
11517 // Make sure the worktrees are fully initialized
11518 project
11519 .update(cx, |project, cx| project.git_scans_complete(cx))
11520 .await;
11521 cx.run_until_parked();
11522
11523 let (project1_abs_path, project1_id, project2_abs_path, project2_id) =
11524 project.read_with(cx, |project, cx| {
11525 let worktrees: Vec<_> = project.worktrees(cx).collect();
11526 let abs_path1 = worktrees[0].read(cx).abs_path().to_path_buf();
11527 let id1 = worktrees[0].read(cx).id();
11528 let abs_path2 = worktrees[1].read(cx).abs_path().to_path_buf();
11529 let id2 = worktrees[1].read(cx).id();
11530 (abs_path1, id1, abs_path2, id2)
11531 });
11532
11533 project.update(cx, |project, cx| {
11534 let abs_path = project1_abs_path.join("file1.txt");
11535 let found_path = project.find_project_path(abs_path, cx).unwrap();
11536 assert_eq!(found_path.worktree_id, project1_id);
11537 assert_eq!(&*found_path.path, rel_path("file1.txt"));
11538
11539 let abs_path = project1_abs_path.join("subdir").join("file2.txt");
11540 let found_path = project.find_project_path(abs_path, cx).unwrap();
11541 assert_eq!(found_path.worktree_id, project1_id);
11542 assert_eq!(&*found_path.path, rel_path("subdir/file2.txt"));
11543
11544 let abs_path = project2_abs_path.join("file3.txt");
11545 let found_path = project.find_project_path(abs_path, cx).unwrap();
11546 assert_eq!(found_path.worktree_id, project2_id);
11547 assert_eq!(&*found_path.path, rel_path("file3.txt"));
11548
11549 let abs_path = project1_abs_path.join("nonexistent.txt");
11550 let found_path = project.find_project_path(abs_path, cx);
11551 assert!(
11552 found_path.is_some(),
11553 "Should find project path for nonexistent file in worktree"
11554 );
11555
11556 // Test with an absolute path outside any worktree
11557 let abs_path = Path::new("/some/other/path");
11558 let found_path = project.find_project_path(abs_path, cx);
11559 assert!(
11560 found_path.is_none(),
11561 "Should not find project path for path outside any worktree"
11562 );
11563 });
11564}
11565
11566#[gpui::test]
11567async fn test_git_worktree_remove(cx: &mut gpui::TestAppContext) {
11568 init_test(cx);
11569
11570 let fs = FakeFs::new(cx.executor());
11571 fs.insert_tree(
11572 path!("/root"),
11573 json!({
11574 "a": {
11575 ".git": {},
11576 "src": {
11577 "main.rs": "fn main() {}",
11578 }
11579 },
11580 "b": {
11581 ".git": {},
11582 "src": {
11583 "main.rs": "fn main() {}",
11584 },
11585 "script": {
11586 "run.sh": "#!/bin/bash"
11587 }
11588 }
11589 }),
11590 )
11591 .await;
11592
11593 let project = Project::test(
11594 fs.clone(),
11595 [
11596 path!("/root/a").as_ref(),
11597 path!("/root/b/script").as_ref(),
11598 path!("/root/b").as_ref(),
11599 ],
11600 cx,
11601 )
11602 .await;
11603 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
11604 scan_complete.await;
11605
11606 let worktrees = project.update(cx, |project, cx| project.worktrees(cx).collect::<Vec<_>>());
11607 assert_eq!(worktrees.len(), 3);
11608
11609 let worktree_id_by_abs_path = worktrees
11610 .into_iter()
11611 .map(|worktree| worktree.read_with(cx, |w, _| (w.abs_path(), w.id())))
11612 .collect::<HashMap<_, _>>();
11613 let worktree_id = worktree_id_by_abs_path
11614 .get(Path::new(path!("/root/b/script")))
11615 .unwrap();
11616
11617 let repos = project.update(cx, |p, cx| p.git_store().read(cx).repositories().clone());
11618 assert_eq!(repos.len(), 2);
11619
11620 project.update(cx, |project, cx| {
11621 project.remove_worktree(*worktree_id, cx);
11622 });
11623 cx.run_until_parked();
11624
11625 let mut repo_paths = project
11626 .update(cx, |p, cx| p.git_store().read(cx).repositories().clone())
11627 .values()
11628 .map(|repo| repo.read_with(cx, |r, _| r.work_directory_abs_path.clone()))
11629 .collect::<Vec<_>>();
11630 repo_paths.sort();
11631
11632 pretty_assertions::assert_eq!(
11633 repo_paths,
11634 [
11635 Path::new(path!("/root/a")).into(),
11636 Path::new(path!("/root/b")).into(),
11637 ]
11638 );
11639
11640 let active_repo_path = project
11641 .read_with(cx, |p, cx| {
11642 p.active_repository(cx)
11643 .map(|r| r.read(cx).work_directory_abs_path.clone())
11644 })
11645 .unwrap();
11646 assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/a")));
11647
11648 let worktree_id = worktree_id_by_abs_path
11649 .get(Path::new(path!("/root/a")))
11650 .unwrap();
11651 project.update(cx, |project, cx| {
11652 project.remove_worktree(*worktree_id, cx);
11653 });
11654 cx.run_until_parked();
11655
11656 let active_repo_path = project
11657 .read_with(cx, |p, cx| {
11658 p.active_repository(cx)
11659 .map(|r| r.read(cx).work_directory_abs_path.clone())
11660 })
11661 .unwrap();
11662 assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/b")));
11663
11664 let worktree_id = worktree_id_by_abs_path
11665 .get(Path::new(path!("/root/b")))
11666 .unwrap();
11667 project.update(cx, |project, cx| {
11668 project.remove_worktree(*worktree_id, cx);
11669 });
11670 cx.run_until_parked();
11671
11672 let active_repo_path = project.read_with(cx, |p, cx| {
11673 p.active_repository(cx)
11674 .map(|r| r.read(cx).work_directory_abs_path.clone())
11675 });
11676 assert!(active_repo_path.is_none());
11677}
11678
11679#[gpui::test]
11680async fn test_optimistic_hunks_in_staged_files(cx: &mut gpui::TestAppContext) {
11681 use DiffHunkSecondaryStatus::*;
11682 init_test(cx);
11683
11684 let committed_contents = r#"
11685 one
11686 two
11687 three
11688 "#
11689 .unindent();
11690 let file_contents = r#"
11691 one
11692 TWO
11693 three
11694 "#
11695 .unindent();
11696
11697 let fs = FakeFs::new(cx.background_executor.clone());
11698 fs.insert_tree(
11699 path!("/dir"),
11700 json!({
11701 ".git": {},
11702 "file.txt": file_contents.clone()
11703 }),
11704 )
11705 .await;
11706
11707 fs.set_head_and_index_for_repo(
11708 path!("/dir/.git").as_ref(),
11709 &[("file.txt", committed_contents.clone())],
11710 );
11711
11712 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
11713
11714 let buffer = project
11715 .update(cx, |project, cx| {
11716 project.open_local_buffer(path!("/dir/file.txt"), cx)
11717 })
11718 .await
11719 .unwrap();
11720 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
11721 let uncommitted_diff = project
11722 .update(cx, |project, cx| {
11723 project.open_uncommitted_diff(buffer.clone(), cx)
11724 })
11725 .await
11726 .unwrap();
11727
11728 // The hunk is initially unstaged.
11729 uncommitted_diff.read_with(cx, |diff, cx| {
11730 assert_hunks(
11731 diff.snapshot(cx).hunks(&snapshot),
11732 &snapshot,
11733 &diff.base_text_string(cx).unwrap(),
11734 &[(
11735 1..2,
11736 "two\n",
11737 "TWO\n",
11738 DiffHunkStatus::modified(HasSecondaryHunk),
11739 )],
11740 );
11741 });
11742
11743 // Get the repository handle.
11744 let repo = project.read_with(cx, |project, cx| {
11745 project.repositories(cx).values().next().unwrap().clone()
11746 });
11747
11748 // Stage the file.
11749 let stage_task = repo.update(cx, |repo, cx| {
11750 repo.stage_entries(vec![repo_path("file.txt")], cx)
11751 });
11752
11753 // Run a few ticks to let the job start and mark hunks as pending,
11754 // but don't run_until_parked which would complete the entire operation.
11755 for _ in 0..10 {
11756 cx.executor().tick();
11757 let [hunk]: [_; 1] = uncommitted_diff
11758 .read_with(cx, |diff, cx| {
11759 diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>()
11760 })
11761 .try_into()
11762 .unwrap();
11763 match hunk.secondary_status {
11764 HasSecondaryHunk => {}
11765 SecondaryHunkRemovalPending => break,
11766 NoSecondaryHunk => panic!("hunk was not optimistically staged"),
11767 _ => panic!("unexpected hunk state"),
11768 }
11769 }
11770 uncommitted_diff.read_with(cx, |diff, cx| {
11771 assert_hunks(
11772 diff.snapshot(cx).hunks(&snapshot),
11773 &snapshot,
11774 &diff.base_text_string(cx).unwrap(),
11775 &[(
11776 1..2,
11777 "two\n",
11778 "TWO\n",
11779 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
11780 )],
11781 );
11782 });
11783
11784 // Let the staging complete.
11785 stage_task.await.unwrap();
11786 cx.run_until_parked();
11787
11788 // The hunk is now fully staged.
11789 uncommitted_diff.read_with(cx, |diff, cx| {
11790 assert_hunks(
11791 diff.snapshot(cx).hunks(&snapshot),
11792 &snapshot,
11793 &diff.base_text_string(cx).unwrap(),
11794 &[(
11795 1..2,
11796 "two\n",
11797 "TWO\n",
11798 DiffHunkStatus::modified(NoSecondaryHunk),
11799 )],
11800 );
11801 });
11802
11803 // Simulate a commit by updating HEAD to match the current file contents.
11804 // The FakeGitRepository's commit method is a no-op, so we need to manually
11805 // update HEAD to simulate the commit completing.
11806 fs.set_head_for_repo(
11807 path!("/dir/.git").as_ref(),
11808 &[("file.txt", file_contents.clone())],
11809 "newhead",
11810 );
11811 cx.run_until_parked();
11812
11813 // After committing, there are no more hunks.
11814 uncommitted_diff.read_with(cx, |diff, cx| {
11815 assert_hunks(
11816 diff.snapshot(cx).hunks(&snapshot),
11817 &snapshot,
11818 &diff.base_text_string(cx).unwrap(),
11819 &[] as &[(Range<u32>, &str, &str, DiffHunkStatus)],
11820 );
11821 });
11822}
11823
11824#[gpui::test]
11825async fn test_read_only_files_setting(cx: &mut gpui::TestAppContext) {
11826 init_test(cx);
11827
11828 // Configure read_only_files setting
11829 cx.update(|cx| {
11830 cx.update_global::<SettingsStore, _>(|store, cx| {
11831 store.update_user_settings(cx, |settings| {
11832 settings.project.worktree.read_only_files = Some(vec![
11833 "**/generated/**".to_string(),
11834 "**/*.gen.rs".to_string(),
11835 ]);
11836 });
11837 });
11838 });
11839
11840 let fs = FakeFs::new(cx.background_executor.clone());
11841 fs.insert_tree(
11842 path!("/root"),
11843 json!({
11844 "src": {
11845 "main.rs": "fn main() {}",
11846 "types.gen.rs": "// Generated file",
11847 },
11848 "generated": {
11849 "schema.rs": "// Auto-generated schema",
11850 }
11851 }),
11852 )
11853 .await;
11854
11855 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
11856
11857 // Open a regular file - should be read-write
11858 let regular_buffer = project
11859 .update(cx, |project, cx| {
11860 project.open_local_buffer(path!("/root/src/main.rs"), cx)
11861 })
11862 .await
11863 .unwrap();
11864
11865 regular_buffer.read_with(cx, |buffer, _| {
11866 assert!(!buffer.read_only(), "Regular file should not be read-only");
11867 });
11868
11869 // Open a file matching *.gen.rs pattern - should be read-only
11870 let gen_buffer = project
11871 .update(cx, |project, cx| {
11872 project.open_local_buffer(path!("/root/src/types.gen.rs"), cx)
11873 })
11874 .await
11875 .unwrap();
11876
11877 gen_buffer.read_with(cx, |buffer, _| {
11878 assert!(
11879 buffer.read_only(),
11880 "File matching *.gen.rs pattern should be read-only"
11881 );
11882 });
11883
11884 // Open a file in generated directory - should be read-only
11885 let generated_buffer = project
11886 .update(cx, |project, cx| {
11887 project.open_local_buffer(path!("/root/generated/schema.rs"), cx)
11888 })
11889 .await
11890 .unwrap();
11891
11892 generated_buffer.read_with(cx, |buffer, _| {
11893 assert!(
11894 buffer.read_only(),
11895 "File in generated directory should be read-only"
11896 );
11897 });
11898}
11899
11900#[gpui::test]
11901async fn test_read_only_files_empty_setting(cx: &mut gpui::TestAppContext) {
11902 init_test(cx);
11903
11904 // Explicitly set read_only_files to empty (default behavior)
11905 cx.update(|cx| {
11906 cx.update_global::<SettingsStore, _>(|store, cx| {
11907 store.update_user_settings(cx, |settings| {
11908 settings.project.worktree.read_only_files = Some(vec![]);
11909 });
11910 });
11911 });
11912
11913 let fs = FakeFs::new(cx.background_executor.clone());
11914 fs.insert_tree(
11915 path!("/root"),
11916 json!({
11917 "src": {
11918 "main.rs": "fn main() {}",
11919 },
11920 "generated": {
11921 "schema.rs": "// Auto-generated schema",
11922 }
11923 }),
11924 )
11925 .await;
11926
11927 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
11928
11929 // All files should be read-write when read_only_files is empty
11930 let main_buffer = project
11931 .update(cx, |project, cx| {
11932 project.open_local_buffer(path!("/root/src/main.rs"), cx)
11933 })
11934 .await
11935 .unwrap();
11936
11937 main_buffer.read_with(cx, |buffer, _| {
11938 assert!(
11939 !buffer.read_only(),
11940 "Files should not be read-only when read_only_files is empty"
11941 );
11942 });
11943
11944 let generated_buffer = project
11945 .update(cx, |project, cx| {
11946 project.open_local_buffer(path!("/root/generated/schema.rs"), cx)
11947 })
11948 .await
11949 .unwrap();
11950
11951 generated_buffer.read_with(cx, |buffer, _| {
11952 assert!(
11953 !buffer.read_only(),
11954 "Generated files should not be read-only when read_only_files is empty"
11955 );
11956 });
11957}
11958
11959#[gpui::test]
11960async fn test_read_only_files_with_lock_files(cx: &mut gpui::TestAppContext) {
11961 init_test(cx);
11962
11963 // Configure to make lock files read-only
11964 cx.update(|cx| {
11965 cx.update_global::<SettingsStore, _>(|store, cx| {
11966 store.update_user_settings(cx, |settings| {
11967 settings.project.worktree.read_only_files = Some(vec![
11968 "**/*.lock".to_string(),
11969 "**/package-lock.json".to_string(),
11970 ]);
11971 });
11972 });
11973 });
11974
11975 let fs = FakeFs::new(cx.background_executor.clone());
11976 fs.insert_tree(
11977 path!("/root"),
11978 json!({
11979 "Cargo.lock": "# Lock file",
11980 "Cargo.toml": "[package]",
11981 "package-lock.json": "{}",
11982 "package.json": "{}",
11983 }),
11984 )
11985 .await;
11986
11987 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
11988
11989 // Cargo.lock should be read-only
11990 let cargo_lock = project
11991 .update(cx, |project, cx| {
11992 project.open_local_buffer(path!("/root/Cargo.lock"), cx)
11993 })
11994 .await
11995 .unwrap();
11996
11997 cargo_lock.read_with(cx, |buffer, _| {
11998 assert!(buffer.read_only(), "Cargo.lock should be read-only");
11999 });
12000
12001 // Cargo.toml should be read-write
12002 let cargo_toml = project
12003 .update(cx, |project, cx| {
12004 project.open_local_buffer(path!("/root/Cargo.toml"), cx)
12005 })
12006 .await
12007 .unwrap();
12008
12009 cargo_toml.read_with(cx, |buffer, _| {
12010 assert!(!buffer.read_only(), "Cargo.toml should not be read-only");
12011 });
12012
12013 // package-lock.json should be read-only
12014 let package_lock = project
12015 .update(cx, |project, cx| {
12016 project.open_local_buffer(path!("/root/package-lock.json"), cx)
12017 })
12018 .await
12019 .unwrap();
12020
12021 package_lock.read_with(cx, |buffer, _| {
12022 assert!(buffer.read_only(), "package-lock.json should be read-only");
12023 });
12024
12025 // package.json should be read-write
12026 let package_json = project
12027 .update(cx, |project, cx| {
12028 project.open_local_buffer(path!("/root/package.json"), cx)
12029 })
12030 .await
12031 .unwrap();
12032
12033 package_json.read_with(cx, |buffer, _| {
12034 assert!(!buffer.read_only(), "package.json should not be read-only");
12035 });
12036}
12037
12038mod disable_ai_settings_tests {
12039 use gpui::TestAppContext;
12040 use project::*;
12041 use settings::{Settings, SettingsStore};
12042
12043 #[gpui::test]
12044 async fn test_disable_ai_settings_security(cx: &mut TestAppContext) {
12045 cx.update(|cx| {
12046 settings::init(cx);
12047
12048 // Test 1: Default is false (AI enabled)
12049 assert!(
12050 !DisableAiSettings::get_global(cx).disable_ai,
12051 "Default should allow AI"
12052 );
12053 });
12054
12055 let disable_true = serde_json::json!({
12056 "disable_ai": true
12057 })
12058 .to_string();
12059 let disable_false = serde_json::json!({
12060 "disable_ai": false
12061 })
12062 .to_string();
12063
12064 cx.update_global::<SettingsStore, _>(|store, cx| {
12065 store.set_user_settings(&disable_false, cx).unwrap();
12066 store.set_global_settings(&disable_true, cx).unwrap();
12067 });
12068 cx.update(|cx| {
12069 assert!(
12070 DisableAiSettings::get_global(cx).disable_ai,
12071 "Local false cannot override global true"
12072 );
12073 });
12074
12075 cx.update_global::<SettingsStore, _>(|store, cx| {
12076 store.set_global_settings(&disable_false, cx).unwrap();
12077 store.set_user_settings(&disable_true, cx).unwrap();
12078 });
12079
12080 cx.update(|cx| {
12081 assert!(
12082 DisableAiSettings::get_global(cx).disable_ai,
12083 "Local false cannot override global true"
12084 );
12085 });
12086 }
12087}