1#![allow(clippy::format_collect)]
2
3mod color_extractor;
4mod context_server_store;
5mod debugger;
6mod ext_agent_tests;
7mod extension_agent_tests;
8mod git_store;
9mod image_store;
10mod lsp_command;
11mod lsp_store;
12mod manifest_tree;
13mod project_search;
14mod search;
15mod search_history;
16mod signature_help;
17mod task_inventory;
18mod trusted_worktrees;
19mod yarn;
20
21use anyhow::Result;
22use async_trait::async_trait;
23use buffer_diff::{
24 BufferDiffEvent, DiffChanged, DiffHunkSecondaryStatus, DiffHunkStatus, DiffHunkStatusKind,
25 assert_hunks,
26};
27use collections::{BTreeSet, HashMap, HashSet};
28use encoding_rs;
29use fs::{FakeFs, PathEventKind};
30use futures::{StreamExt, future};
31use git::{
32 GitHostingProviderRegistry,
33 repository::{RepoPath, repo_path},
34 status::{DiffStat, FileStatus, StatusCode, TrackedStatus},
35};
36use git2::RepositoryInitOptions;
37use gpui::{
38 App, AppContext, BackgroundExecutor, BorrowAppContext, Entity, FutureExt, SharedString, Task,
39 UpdateGlobal,
40};
41use itertools::Itertools;
42use language::{
43 Buffer, BufferEvent, Diagnostic, DiagnosticEntry, DiagnosticEntryRef, DiagnosticSet,
44 DiagnosticSourceKind, DiskState, FakeLspAdapter, Language, LanguageConfig, LanguageMatcher,
45 LanguageName, LineEnding, ManifestName, ManifestProvider, ManifestQuery, OffsetRangeExt, Point,
46 ToPoint, Toolchain, ToolchainList, ToolchainLister, ToolchainMetadata,
47 language_settings::{LanguageSettingsContent, language_settings},
48 markdown_lang, rust_lang, tree_sitter_typescript,
49};
50use lsp::{
51 CodeActionKind, DEFAULT_LSP_REQUEST_TIMEOUT, DiagnosticSeverity, DocumentChanges,
52 FileOperationFilter, LanguageServerId, LanguageServerName, NumberOrString, TextDocumentEdit,
53 Uri, WillRenameFiles, notification::DidRenameFiles,
54};
55use parking_lot::Mutex;
56use paths::{config_dir, global_gitignore_path, tasks_file};
57use postage::stream::Stream as _;
58use pretty_assertions::{assert_eq, assert_matches};
59use project::{
60 Event, TaskContexts,
61 git_store::{GitStoreEvent, Repository, RepositoryEvent, StatusEntry, pending_op},
62 search::{SearchQuery, SearchResult},
63 task_store::{TaskSettingsLocation, TaskStore},
64 *,
65};
66use rand::{Rng as _, rngs::StdRng};
67use serde_json::json;
68use settings::SettingsStore;
69#[cfg(not(windows))]
70use std::os;
71use std::{
72 cell::RefCell,
73 env, mem,
74 num::NonZeroU32,
75 ops::Range,
76 path::{Path, PathBuf},
77 rc::Rc,
78 str::FromStr,
79 sync::{Arc, OnceLock},
80 task::Poll,
81 time::Duration,
82};
83use sum_tree::SumTree;
84use task::{ResolvedTask, ShellKind, TaskContext};
85use text::{Anchor, PointUtf16, ReplicaId, ToOffset, Unclipped};
86use unindent::Unindent as _;
87use util::{
88 TryFutureExt as _, assert_set_eq, maybe, path,
89 paths::{PathMatcher, PathStyle},
90 rel_path::{RelPath, rel_path},
91 test::{TempTree, marked_text_offsets},
92 uri,
93};
94use worktree::WorktreeModelHandle as _;
95
96#[gpui::test]
97async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
98 cx.executor().allow_parking();
99
100 let (tx, mut rx) = futures::channel::mpsc::unbounded();
101 let _thread = std::thread::spawn(move || {
102 #[cfg(not(target_os = "windows"))]
103 std::fs::metadata("/tmp").unwrap();
104 #[cfg(target_os = "windows")]
105 std::fs::metadata("C:/Windows").unwrap();
106 std::thread::sleep(Duration::from_millis(1000));
107 tx.unbounded_send(1).unwrap();
108 });
109 rx.next().await.unwrap();
110}
111
112#[gpui::test]
113async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
114 cx.executor().allow_parking();
115
116 let io_task = smol::unblock(move || {
117 println!("sleeping on thread {:?}", std::thread::current().id());
118 std::thread::sleep(Duration::from_millis(10));
119 1
120 });
121
122 let task = cx.foreground_executor().spawn(async move {
123 io_task.await;
124 });
125
126 task.await;
127}
128
129// NOTE:
130// While POSIX symbolic links are somewhat supported on Windows, they are an opt in by the user, and thus
131// we assume that they are not supported out of the box.
132#[cfg(not(windows))]
133#[gpui::test]
134async fn test_symlinks(cx: &mut gpui::TestAppContext) {
135 init_test(cx);
136 cx.executor().allow_parking();
137
138 let dir = TempTree::new(json!({
139 "root": {
140 "apple": "",
141 "banana": {
142 "carrot": {
143 "date": "",
144 "endive": "",
145 }
146 },
147 "fennel": {
148 "grape": "",
149 }
150 }
151 }));
152
153 let root_link_path = dir.path().join("root_link");
154 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
155 os::unix::fs::symlink(
156 dir.path().join("root/fennel"),
157 dir.path().join("root/finnochio"),
158 )
159 .unwrap();
160
161 let project = Project::test(
162 Arc::new(RealFs::new(None, cx.executor())),
163 [root_link_path.as_ref()],
164 cx,
165 )
166 .await;
167
168 project.update(cx, |project, cx| {
169 let tree = project.worktrees(cx).next().unwrap().read(cx);
170 assert_eq!(tree.file_count(), 5);
171 assert_eq!(
172 tree.entry_for_path(rel_path("fennel/grape")).unwrap().inode,
173 tree.entry_for_path(rel_path("finnochio/grape"))
174 .unwrap()
175 .inode
176 );
177 });
178}
179
180#[gpui::test]
181async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
182 init_test(cx);
183
184 let dir = TempTree::new(json!({
185 ".editorconfig": r#"
186 root = true
187 [*.rs]
188 indent_style = tab
189 indent_size = 3
190 end_of_line = lf
191 insert_final_newline = true
192 trim_trailing_whitespace = true
193 max_line_length = 120
194 [*.js]
195 tab_width = 10
196 max_line_length = off
197 "#,
198 ".zed": {
199 "settings.json": r#"{
200 "tab_size": 8,
201 "hard_tabs": false,
202 "ensure_final_newline_on_save": false,
203 "remove_trailing_whitespace_on_save": false,
204 "preferred_line_length": 64,
205 "soft_wrap": "editor_width",
206 }"#,
207 },
208 "a.rs": "fn a() {\n A\n}",
209 "b": {
210 ".editorconfig": r#"
211 [*.rs]
212 indent_size = 2
213 max_line_length = off,
214 "#,
215 "b.rs": "fn b() {\n B\n}",
216 },
217 "c.js": "def c\n C\nend",
218 "d": {
219 ".editorconfig": r#"
220 [*.rs]
221 indent_size = 1
222 "#,
223 "d.rs": "fn d() {\n D\n}",
224 },
225 "README.json": "tabs are better\n",
226 }));
227
228 let path = dir.path();
229 let fs = FakeFs::new(cx.executor());
230 fs.insert_tree_from_real_fs(path, path).await;
231 let project = Project::test(fs, [path], cx).await;
232
233 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
234 language_registry.add(js_lang());
235 language_registry.add(json_lang());
236 language_registry.add(rust_lang());
237
238 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
239
240 cx.executor().run_until_parked();
241
242 cx.update(|cx| {
243 let tree = worktree.read(cx);
244 let settings_for = |path: &str| {
245 let file_entry = tree.entry_for_path(rel_path(path)).unwrap().clone();
246 let file = File::for_entry(file_entry, worktree.clone());
247 let file_language = project
248 .read(cx)
249 .languages()
250 .load_language_for_file_path(file.path.as_std_path());
251 let file_language = cx
252 .foreground_executor()
253 .block_on(file_language)
254 .expect("Failed to get file language");
255 let file = file as _;
256 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
257 };
258
259 let settings_a = settings_for("a.rs");
260 let settings_b = settings_for("b/b.rs");
261 let settings_c = settings_for("c.js");
262 let settings_d = settings_for("d/d.rs");
263 let settings_readme = settings_for("README.json");
264
265 // .editorconfig overrides .zed/settings
266 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
267 assert_eq!(settings_a.hard_tabs, true);
268 assert_eq!(settings_a.ensure_final_newline_on_save, true);
269 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
270 assert_eq!(settings_a.preferred_line_length, 120);
271
272 // .editorconfig in subdirectory overrides .editorconfig in root
273 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
274 assert_eq!(Some(settings_d.tab_size), NonZeroU32::new(1));
275
276 // "indent_size" is not set, so "tab_width" is used
277 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
278
279 // When max_line_length is "off", default to .zed/settings.json
280 assert_eq!(settings_b.preferred_line_length, 64);
281 assert_eq!(settings_c.preferred_line_length, 64);
282
283 // README.md should not be affected by .editorconfig's globe "*.rs"
284 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
285 });
286}
287
288#[gpui::test]
289async fn test_external_editorconfig_support(cx: &mut gpui::TestAppContext) {
290 init_test(cx);
291
292 let fs = FakeFs::new(cx.executor());
293 fs.insert_tree(
294 path!("/grandparent"),
295 json!({
296 ".editorconfig": "[*]\nindent_size = 4\n",
297 "parent": {
298 ".editorconfig": "[*.rs]\nindent_size = 2\n",
299 "worktree": {
300 ".editorconfig": "[*.md]\nindent_size = 3\n",
301 "main.rs": "fn main() {}",
302 "README.md": "# README",
303 "other.txt": "other content",
304 }
305 }
306 }),
307 )
308 .await;
309
310 let project = Project::test(fs, [path!("/grandparent/parent/worktree").as_ref()], cx).await;
311
312 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
313 language_registry.add(rust_lang());
314 language_registry.add(markdown_lang());
315
316 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
317
318 cx.executor().run_until_parked();
319
320 cx.update(|cx| {
321 let tree = worktree.read(cx);
322 let settings_for = |path: &str| {
323 let file_entry = tree.entry_for_path(rel_path(path)).unwrap().clone();
324 let file = File::for_entry(file_entry, worktree.clone());
325 let file_language = project
326 .read(cx)
327 .languages()
328 .load_language_for_file_path(file.path.as_std_path());
329 let file_language = cx
330 .foreground_executor()
331 .block_on(file_language)
332 .expect("Failed to get file language");
333 let file = file as _;
334 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
335 };
336
337 let settings_rs = settings_for("main.rs");
338 let settings_md = settings_for("README.md");
339 let settings_txt = settings_for("other.txt");
340
341 // main.rs gets indent_size = 2 from parent's external .editorconfig
342 assert_eq!(Some(settings_rs.tab_size), NonZeroU32::new(2));
343
344 // README.md gets indent_size = 3 from internal worktree .editorconfig
345 assert_eq!(Some(settings_md.tab_size), NonZeroU32::new(3));
346
347 // other.txt gets indent_size = 4 from grandparent's external .editorconfig
348 assert_eq!(Some(settings_txt.tab_size), NonZeroU32::new(4));
349 });
350}
351
352#[gpui::test]
353async fn test_internal_editorconfig_root_stops_traversal(cx: &mut gpui::TestAppContext) {
354 init_test(cx);
355
356 let fs = FakeFs::new(cx.executor());
357 fs.insert_tree(
358 path!("/worktree"),
359 json!({
360 ".editorconfig": "[*]\nindent_size = 99\n",
361 "src": {
362 ".editorconfig": "root = true\n[*]\nindent_size = 2\n",
363 "file.rs": "fn main() {}",
364 }
365 }),
366 )
367 .await;
368
369 let project = Project::test(fs, [path!("/worktree").as_ref()], cx).await;
370
371 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
372 language_registry.add(rust_lang());
373
374 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
375
376 cx.executor().run_until_parked();
377
378 cx.update(|cx| {
379 let tree = worktree.read(cx);
380 let file_entry = tree
381 .entry_for_path(rel_path("src/file.rs"))
382 .unwrap()
383 .clone();
384 let file = File::for_entry(file_entry, worktree.clone());
385 let file_language = project
386 .read(cx)
387 .languages()
388 .load_language_for_file_path(file.path.as_std_path());
389 let file_language = cx
390 .foreground_executor()
391 .block_on(file_language)
392 .expect("Failed to get file language");
393 let file = file as _;
394 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
395
396 assert_eq!(Some(settings.tab_size), NonZeroU32::new(2));
397 });
398}
399
400#[gpui::test]
401async fn test_external_editorconfig_root_stops_traversal(cx: &mut gpui::TestAppContext) {
402 init_test(cx);
403
404 let fs = FakeFs::new(cx.executor());
405 fs.insert_tree(
406 path!("/parent"),
407 json!({
408 ".editorconfig": "[*]\nindent_size = 99\n",
409 "worktree": {
410 ".editorconfig": "root = true\n[*]\nindent_size = 2\n",
411 "file.rs": "fn main() {}",
412 }
413 }),
414 )
415 .await;
416
417 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
418
419 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
420 language_registry.add(rust_lang());
421
422 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
423
424 cx.executor().run_until_parked();
425
426 cx.update(|cx| {
427 let tree = worktree.read(cx);
428 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
429 let file = File::for_entry(file_entry, worktree.clone());
430 let file_language = project
431 .read(cx)
432 .languages()
433 .load_language_for_file_path(file.path.as_std_path());
434 let file_language = cx
435 .foreground_executor()
436 .block_on(file_language)
437 .expect("Failed to get file language");
438 let file = file as _;
439 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
440
441 // file.rs gets indent_size = 2 from worktree's root config, NOT 99 from parent
442 assert_eq!(Some(settings.tab_size), NonZeroU32::new(2));
443 });
444}
445
446#[gpui::test]
447async fn test_external_editorconfig_root_in_parent_stops_traversal(cx: &mut gpui::TestAppContext) {
448 init_test(cx);
449
450 let fs = FakeFs::new(cx.executor());
451 fs.insert_tree(
452 path!("/grandparent"),
453 json!({
454 ".editorconfig": "[*]\nindent_size = 99\n",
455 "parent": {
456 ".editorconfig": "root = true\n[*]\nindent_size = 4\n",
457 "worktree": {
458 "file.rs": "fn main() {}",
459 }
460 }
461 }),
462 )
463 .await;
464
465 let project = Project::test(fs, [path!("/grandparent/parent/worktree").as_ref()], cx).await;
466
467 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
468 language_registry.add(rust_lang());
469
470 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
471
472 cx.executor().run_until_parked();
473
474 cx.update(|cx| {
475 let tree = worktree.read(cx);
476 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
477 let file = File::for_entry(file_entry, worktree.clone());
478 let file_language = project
479 .read(cx)
480 .languages()
481 .load_language_for_file_path(file.path.as_std_path());
482 let file_language = cx
483 .foreground_executor()
484 .block_on(file_language)
485 .expect("Failed to get file language");
486 let file = file as _;
487 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
488
489 // file.rs gets indent_size = 4 from parent's root config, NOT 99 from grandparent
490 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
491 });
492}
493
494#[gpui::test]
495async fn test_external_editorconfig_shared_across_worktrees(cx: &mut gpui::TestAppContext) {
496 init_test(cx);
497
498 let fs = FakeFs::new(cx.executor());
499 fs.insert_tree(
500 path!("/parent"),
501 json!({
502 ".editorconfig": "root = true\n[*]\nindent_size = 5\n",
503 "worktree_a": {
504 "file.rs": "fn a() {}",
505 ".editorconfig": "[*]\ninsert_final_newline = true\n",
506 },
507 "worktree_b": {
508 "file.rs": "fn b() {}",
509 ".editorconfig": "[*]\ninsert_final_newline = false\n",
510 }
511 }),
512 )
513 .await;
514
515 let project = Project::test(
516 fs,
517 [
518 path!("/parent/worktree_a").as_ref(),
519 path!("/parent/worktree_b").as_ref(),
520 ],
521 cx,
522 )
523 .await;
524
525 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
526 language_registry.add(rust_lang());
527
528 cx.executor().run_until_parked();
529
530 cx.update(|cx| {
531 let worktrees: Vec<_> = project.read(cx).worktrees(cx).collect();
532 assert_eq!(worktrees.len(), 2);
533
534 for worktree in worktrees {
535 let tree = worktree.read(cx);
536 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
537 let file = File::for_entry(file_entry, worktree.clone());
538 let file_language = project
539 .read(cx)
540 .languages()
541 .load_language_for_file_path(file.path.as_std_path());
542 let file_language = cx
543 .foreground_executor()
544 .block_on(file_language)
545 .expect("Failed to get file language");
546 let file = file as _;
547 let settings =
548 language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
549
550 // Both worktrees should get indent_size = 5 from shared parent .editorconfig
551 assert_eq!(Some(settings.tab_size), NonZeroU32::new(5));
552 }
553 });
554}
555
556#[gpui::test]
557async fn test_external_editorconfig_not_loaded_without_internal_config(
558 cx: &mut gpui::TestAppContext,
559) {
560 init_test(cx);
561
562 let fs = FakeFs::new(cx.executor());
563 fs.insert_tree(
564 path!("/parent"),
565 json!({
566 ".editorconfig": "[*]\nindent_size = 99\n",
567 "worktree": {
568 "file.rs": "fn main() {}",
569 }
570 }),
571 )
572 .await;
573
574 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
575
576 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
577 language_registry.add(rust_lang());
578
579 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
580
581 cx.executor().run_until_parked();
582
583 cx.update(|cx| {
584 let tree = worktree.read(cx);
585 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
586 let file = File::for_entry(file_entry, worktree.clone());
587 let file_language = project
588 .read(cx)
589 .languages()
590 .load_language_for_file_path(file.path.as_std_path());
591 let file_language = cx
592 .foreground_executor()
593 .block_on(file_language)
594 .expect("Failed to get file language");
595 let file = file as _;
596 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
597
598 // file.rs should have default tab_size = 4, NOT 99 from parent's external .editorconfig
599 // because without an internal .editorconfig, external configs are not loaded
600 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
601 });
602}
603
604#[gpui::test]
605async fn test_external_editorconfig_modification_triggers_refresh(cx: &mut gpui::TestAppContext) {
606 init_test(cx);
607
608 let fs = FakeFs::new(cx.executor());
609 fs.insert_tree(
610 path!("/parent"),
611 json!({
612 ".editorconfig": "[*]\nindent_size = 4\n",
613 "worktree": {
614 ".editorconfig": "[*]\n",
615 "file.rs": "fn main() {}",
616 }
617 }),
618 )
619 .await;
620
621 let project = Project::test(fs.clone(), [path!("/parent/worktree").as_ref()], cx).await;
622
623 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
624 language_registry.add(rust_lang());
625
626 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
627
628 cx.executor().run_until_parked();
629
630 cx.update(|cx| {
631 let tree = worktree.read(cx);
632 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
633 let file = File::for_entry(file_entry, worktree.clone());
634 let file_language = project
635 .read(cx)
636 .languages()
637 .load_language_for_file_path(file.path.as_std_path());
638 let file_language = cx
639 .foreground_executor()
640 .block_on(file_language)
641 .expect("Failed to get file language");
642 let file = file as _;
643 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
644
645 // Test initial settings: tab_size = 4 from parent's external .editorconfig
646 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
647 });
648
649 fs.atomic_write(
650 PathBuf::from(path!("/parent/.editorconfig")),
651 "[*]\nindent_size = 8\n".to_owned(),
652 )
653 .await
654 .unwrap();
655
656 cx.executor().run_until_parked();
657
658 cx.update(|cx| {
659 let tree = worktree.read(cx);
660 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
661 let file = File::for_entry(file_entry, worktree.clone());
662 let file_language = project
663 .read(cx)
664 .languages()
665 .load_language_for_file_path(file.path.as_std_path());
666 let file_language = cx
667 .foreground_executor()
668 .block_on(file_language)
669 .expect("Failed to get file language");
670 let file = file as _;
671 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
672
673 // Test settings updated: tab_size = 8
674 assert_eq!(Some(settings.tab_size), NonZeroU32::new(8));
675 });
676}
677
678#[gpui::test]
679async fn test_adding_worktree_discovers_external_editorconfigs(cx: &mut gpui::TestAppContext) {
680 init_test(cx);
681
682 let fs = FakeFs::new(cx.executor());
683 fs.insert_tree(
684 path!("/parent"),
685 json!({
686 ".editorconfig": "root = true\n[*]\nindent_size = 7\n",
687 "existing_worktree": {
688 ".editorconfig": "[*]\n",
689 "file.rs": "fn a() {}",
690 },
691 "new_worktree": {
692 ".editorconfig": "[*]\n",
693 "file.rs": "fn b() {}",
694 }
695 }),
696 )
697 .await;
698
699 let project = Project::test(fs, [path!("/parent/existing_worktree").as_ref()], cx).await;
700
701 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
702 language_registry.add(rust_lang());
703
704 cx.executor().run_until_parked();
705
706 cx.update(|cx| {
707 let worktree = project.read(cx).worktrees(cx).next().unwrap();
708 let tree = worktree.read(cx);
709 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
710 let file = File::for_entry(file_entry, worktree.clone());
711 let file_language = project
712 .read(cx)
713 .languages()
714 .load_language_for_file_path(file.path.as_std_path());
715 let file_language = cx
716 .foreground_executor()
717 .block_on(file_language)
718 .expect("Failed to get file language");
719 let file = file as _;
720 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
721
722 // Test existing worktree has tab_size = 7
723 assert_eq!(Some(settings.tab_size), NonZeroU32::new(7));
724 });
725
726 let (new_worktree, _) = project
727 .update(cx, |project, cx| {
728 project.find_or_create_worktree(path!("/parent/new_worktree"), true, cx)
729 })
730 .await
731 .unwrap();
732
733 cx.executor().run_until_parked();
734
735 cx.update(|cx| {
736 let tree = new_worktree.read(cx);
737 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
738 let file = File::for_entry(file_entry, new_worktree.clone());
739 let file_language = project
740 .read(cx)
741 .languages()
742 .load_language_for_file_path(file.path.as_std_path());
743 let file_language = cx
744 .foreground_executor()
745 .block_on(file_language)
746 .expect("Failed to get file language");
747 let file = file as _;
748 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
749
750 // Verify new worktree also has tab_size = 7 from shared parent editorconfig
751 assert_eq!(Some(settings.tab_size), NonZeroU32::new(7));
752 });
753}
754
755#[gpui::test]
756async fn test_removing_worktree_cleans_up_external_editorconfig(cx: &mut gpui::TestAppContext) {
757 init_test(cx);
758
759 let fs = FakeFs::new(cx.executor());
760 fs.insert_tree(
761 path!("/parent"),
762 json!({
763 ".editorconfig": "[*]\nindent_size = 6\n",
764 "worktree": {
765 ".editorconfig": "[*]\n",
766 "file.rs": "fn main() {}",
767 }
768 }),
769 )
770 .await;
771
772 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
773
774 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
775 language_registry.add(rust_lang());
776
777 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
778 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
779
780 cx.executor().run_until_parked();
781
782 cx.update(|cx| {
783 let store = cx.global::<SettingsStore>();
784 let (worktree_ids, external_paths, watcher_paths) =
785 store.editorconfig_store.read(cx).test_state();
786
787 // Test external config is loaded
788 assert!(worktree_ids.contains(&worktree_id));
789 assert!(!external_paths.is_empty());
790 assert!(!watcher_paths.is_empty());
791 });
792
793 project.update(cx, |project, cx| {
794 project.remove_worktree(worktree_id, cx);
795 });
796
797 cx.executor().run_until_parked();
798
799 cx.update(|cx| {
800 let store = cx.global::<SettingsStore>();
801 let (worktree_ids, external_paths, watcher_paths) =
802 store.editorconfig_store.read(cx).test_state();
803
804 // Test worktree state, external configs, and watchers all removed
805 assert!(!worktree_ids.contains(&worktree_id));
806 assert!(external_paths.is_empty());
807 assert!(watcher_paths.is_empty());
808 });
809}
810
811#[gpui::test]
812async fn test_shared_external_editorconfig_cleanup_with_multiple_worktrees(
813 cx: &mut gpui::TestAppContext,
814) {
815 init_test(cx);
816
817 let fs = FakeFs::new(cx.executor());
818 fs.insert_tree(
819 path!("/parent"),
820 json!({
821 ".editorconfig": "root = true\n[*]\nindent_size = 5\n",
822 "worktree_a": {
823 ".editorconfig": "[*]\n",
824 "file.rs": "fn a() {}",
825 },
826 "worktree_b": {
827 ".editorconfig": "[*]\n",
828 "file.rs": "fn b() {}",
829 }
830 }),
831 )
832 .await;
833
834 let project = Project::test(
835 fs,
836 [
837 path!("/parent/worktree_a").as_ref(),
838 path!("/parent/worktree_b").as_ref(),
839 ],
840 cx,
841 )
842 .await;
843
844 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
845 language_registry.add(rust_lang());
846
847 cx.executor().run_until_parked();
848
849 let (worktree_a_id, worktree_b, worktree_b_id) = cx.update(|cx| {
850 let worktrees: Vec<_> = project.read(cx).worktrees(cx).collect();
851 assert_eq!(worktrees.len(), 2);
852
853 let worktree_a = &worktrees[0];
854 let worktree_b = &worktrees[1];
855 let worktree_a_id = worktree_a.read(cx).id();
856 let worktree_b_id = worktree_b.read(cx).id();
857 (worktree_a_id, worktree_b.clone(), worktree_b_id)
858 });
859
860 cx.update(|cx| {
861 let store = cx.global::<SettingsStore>();
862 let (worktree_ids, external_paths, _) = store.editorconfig_store.read(cx).test_state();
863
864 // Test both worktrees have settings and share external config
865 assert!(worktree_ids.contains(&worktree_a_id));
866 assert!(worktree_ids.contains(&worktree_b_id));
867 assert_eq!(external_paths.len(), 1); // single shared external config
868 });
869
870 project.update(cx, |project, cx| {
871 project.remove_worktree(worktree_a_id, cx);
872 });
873
874 cx.executor().run_until_parked();
875
876 cx.update(|cx| {
877 let store = cx.global::<SettingsStore>();
878 let (worktree_ids, external_paths, watcher_paths) =
879 store.editorconfig_store.read(cx).test_state();
880
881 // Test worktree_a is gone but external config remains for worktree_b
882 assert!(!worktree_ids.contains(&worktree_a_id));
883 assert!(worktree_ids.contains(&worktree_b_id));
884 // External config should still exist because worktree_b uses it
885 assert_eq!(external_paths.len(), 1);
886 assert_eq!(watcher_paths.len(), 1);
887 });
888
889 cx.update(|cx| {
890 let tree = worktree_b.read(cx);
891 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
892 let file = File::for_entry(file_entry, worktree_b.clone());
893 let file_language = project
894 .read(cx)
895 .languages()
896 .load_language_for_file_path(file.path.as_std_path());
897 let file_language = cx
898 .foreground_executor()
899 .block_on(file_language)
900 .expect("Failed to get file language");
901 let file = file as _;
902 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
903
904 // Test worktree_b still has correct settings
905 assert_eq!(Some(settings.tab_size), NonZeroU32::new(5));
906 });
907}
908
909#[gpui::test]
910async fn test_git_provider_project_setting(cx: &mut gpui::TestAppContext) {
911 init_test(cx);
912 cx.update(|cx| {
913 GitHostingProviderRegistry::default_global(cx);
914 git_hosting_providers::init(cx);
915 });
916
917 let fs = FakeFs::new(cx.executor());
918 let str_path = path!("/dir");
919 let path = Path::new(str_path);
920
921 fs.insert_tree(
922 path!("/dir"),
923 json!({
924 ".zed": {
925 "settings.json": r#"{
926 "git_hosting_providers": [
927 {
928 "provider": "gitlab",
929 "base_url": "https://google.com",
930 "name": "foo"
931 }
932 ]
933 }"#
934 },
935 }),
936 )
937 .await;
938
939 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
940 let (_worktree, _) =
941 project.read_with(cx, |project, cx| project.find_worktree(path, cx).unwrap());
942 cx.executor().run_until_parked();
943
944 cx.update(|cx| {
945 let provider = GitHostingProviderRegistry::global(cx);
946 assert!(
947 provider
948 .list_hosting_providers()
949 .into_iter()
950 .any(|provider| provider.name() == "foo")
951 );
952 });
953
954 fs.atomic_write(
955 Path::new(path!("/dir/.zed/settings.json")).to_owned(),
956 "{}".into(),
957 )
958 .await
959 .unwrap();
960
961 cx.run_until_parked();
962
963 cx.update(|cx| {
964 let provider = GitHostingProviderRegistry::global(cx);
965 assert!(
966 !provider
967 .list_hosting_providers()
968 .into_iter()
969 .any(|provider| provider.name() == "foo")
970 );
971 });
972}
973
974#[gpui::test]
975async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
976 init_test(cx);
977 TaskStore::init(None);
978
979 let fs = FakeFs::new(cx.executor());
980 fs.insert_tree(
981 path!("/dir"),
982 json!({
983 ".zed": {
984 "settings.json": r#"{ "tab_size": 8 }"#,
985 "tasks.json": r#"[{
986 "label": "cargo check all",
987 "command": "cargo",
988 "args": ["check", "--all"]
989 },]"#,
990 },
991 "a": {
992 "a.rs": "fn a() {\n A\n}"
993 },
994 "b": {
995 ".zed": {
996 "settings.json": r#"{ "tab_size": 2 }"#,
997 "tasks.json": r#"[{
998 "label": "cargo check",
999 "command": "cargo",
1000 "args": ["check"]
1001 },]"#,
1002 },
1003 "b.rs": "fn b() {\n B\n}"
1004 }
1005 }),
1006 )
1007 .await;
1008
1009 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1010 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
1011
1012 cx.executor().run_until_parked();
1013 let worktree_id = cx.update(|cx| {
1014 project.update(cx, |project, cx| {
1015 project.worktrees(cx).next().unwrap().read(cx).id()
1016 })
1017 });
1018
1019 let mut task_contexts = TaskContexts::default();
1020 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
1021 let task_contexts = Arc::new(task_contexts);
1022
1023 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
1024 id: worktree_id,
1025 directory_in_worktree: rel_path(".zed").into(),
1026 id_base: "local worktree tasks from directory \".zed\"".into(),
1027 };
1028
1029 let all_tasks = cx
1030 .update(|cx| {
1031 let tree = worktree.read(cx);
1032
1033 let file_a = File::for_entry(
1034 tree.entry_for_path(rel_path("a/a.rs")).unwrap().clone(),
1035 worktree.clone(),
1036 ) as _;
1037 let settings_a = language_settings(None, Some(&file_a), cx);
1038 let file_b = File::for_entry(
1039 tree.entry_for_path(rel_path("b/b.rs")).unwrap().clone(),
1040 worktree.clone(),
1041 ) as _;
1042 let settings_b = language_settings(None, Some(&file_b), cx);
1043
1044 assert_eq!(settings_a.tab_size.get(), 8);
1045 assert_eq!(settings_b.tab_size.get(), 2);
1046
1047 get_all_tasks(&project, task_contexts.clone(), cx)
1048 })
1049 .await
1050 .into_iter()
1051 .map(|(source_kind, task)| {
1052 let resolved = task.resolved;
1053 (
1054 source_kind,
1055 task.resolved_label,
1056 resolved.args,
1057 resolved.env,
1058 )
1059 })
1060 .collect::<Vec<_>>();
1061 assert_eq!(
1062 all_tasks,
1063 vec![
1064 (
1065 TaskSourceKind::Worktree {
1066 id: worktree_id,
1067 directory_in_worktree: rel_path("b/.zed").into(),
1068 id_base: "local worktree tasks from directory \"b/.zed\"".into()
1069 },
1070 "cargo check".to_string(),
1071 vec!["check".to_string()],
1072 HashMap::default(),
1073 ),
1074 (
1075 topmost_local_task_source_kind.clone(),
1076 "cargo check all".to_string(),
1077 vec!["check".to_string(), "--all".to_string()],
1078 HashMap::default(),
1079 ),
1080 ]
1081 );
1082
1083 let (_, resolved_task) = cx
1084 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
1085 .await
1086 .into_iter()
1087 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
1088 .expect("should have one global task");
1089 project.update(cx, |project, cx| {
1090 let task_inventory = project
1091 .task_store()
1092 .read(cx)
1093 .task_inventory()
1094 .cloned()
1095 .unwrap();
1096 task_inventory.update(cx, |inventory, _| {
1097 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
1098 inventory
1099 .update_file_based_tasks(
1100 TaskSettingsLocation::Global(tasks_file()),
1101 Some(
1102 &json!([{
1103 "label": "cargo check unstable",
1104 "command": "cargo",
1105 "args": [
1106 "check",
1107 "--all",
1108 "--all-targets"
1109 ],
1110 "env": {
1111 "RUSTFLAGS": "-Zunstable-options"
1112 }
1113 }])
1114 .to_string(),
1115 ),
1116 )
1117 .unwrap();
1118 });
1119 });
1120 cx.run_until_parked();
1121
1122 let all_tasks = cx
1123 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
1124 .await
1125 .into_iter()
1126 .map(|(source_kind, task)| {
1127 let resolved = task.resolved;
1128 (
1129 source_kind,
1130 task.resolved_label,
1131 resolved.args,
1132 resolved.env,
1133 )
1134 })
1135 .collect::<Vec<_>>();
1136 assert_eq!(
1137 all_tasks,
1138 vec![
1139 (
1140 topmost_local_task_source_kind.clone(),
1141 "cargo check all".to_string(),
1142 vec!["check".to_string(), "--all".to_string()],
1143 HashMap::default(),
1144 ),
1145 (
1146 TaskSourceKind::Worktree {
1147 id: worktree_id,
1148 directory_in_worktree: rel_path("b/.zed").into(),
1149 id_base: "local worktree tasks from directory \"b/.zed\"".into()
1150 },
1151 "cargo check".to_string(),
1152 vec!["check".to_string()],
1153 HashMap::default(),
1154 ),
1155 (
1156 TaskSourceKind::AbsPath {
1157 abs_path: paths::tasks_file().clone(),
1158 id_base: "global tasks.json".into(),
1159 },
1160 "cargo check unstable".to_string(),
1161 vec![
1162 "check".to_string(),
1163 "--all".to_string(),
1164 "--all-targets".to_string(),
1165 ],
1166 HashMap::from_iter(Some((
1167 "RUSTFLAGS".to_string(),
1168 "-Zunstable-options".to_string()
1169 ))),
1170 ),
1171 ]
1172 );
1173}
1174
1175#[gpui::test]
1176async fn test_invalid_local_tasks_shows_toast_with_doc_link(cx: &mut gpui::TestAppContext) {
1177 init_test(cx);
1178 TaskStore::init(None);
1179
1180 // We need to start with a valid `.zed/tasks.json` file as otherwise the
1181 // event is emitted before we havd a chance to setup the event subscription.
1182 let fs = FakeFs::new(cx.executor());
1183 fs.insert_tree(
1184 path!("/dir"),
1185 json!({
1186 ".zed": {
1187 "tasks.json": r#"[{ "label": "valid task", "command": "echo" }]"#,
1188 },
1189 "file.rs": ""
1190 }),
1191 )
1192 .await;
1193
1194 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1195 let saw_toast = Rc::new(RefCell::new(false));
1196
1197 // Update the `.zed/tasks.json` file with an invalid variable, so we can
1198 // later assert that the `Event::Toast` even is emitted.
1199 fs.save(
1200 path!("/dir/.zed/tasks.json").as_ref(),
1201 &r#"[{ "label": "test $ZED_FOO", "command": "echo" }]"#.into(),
1202 Default::default(),
1203 )
1204 .await
1205 .unwrap();
1206
1207 project.update(cx, |_, cx| {
1208 let saw_toast = saw_toast.clone();
1209
1210 cx.subscribe(&project, move |_, _, event: &Event, _| match event {
1211 Event::Toast {
1212 notification_id,
1213 message,
1214 link: Some(ToastLink { url, .. }),
1215 } => {
1216 assert!(notification_id.starts_with("local-tasks-"));
1217 assert!(message.contains("ZED_FOO"));
1218 assert_eq!(*url, "https://zed.dev/docs/tasks");
1219 *saw_toast.borrow_mut() = true;
1220 }
1221 _ => {}
1222 })
1223 .detach();
1224 });
1225
1226 cx.run_until_parked();
1227 assert!(
1228 *saw_toast.borrow(),
1229 "Expected `Event::Toast` was never emitted"
1230 );
1231}
1232
1233#[gpui::test]
1234async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
1235 init_test(cx);
1236 TaskStore::init(None);
1237
1238 let fs = FakeFs::new(cx.executor());
1239 fs.insert_tree(
1240 path!("/dir"),
1241 json!({
1242 ".zed": {
1243 "tasks.json": r#"[{
1244 "label": "test worktree root",
1245 "command": "echo $ZED_WORKTREE_ROOT"
1246 }]"#,
1247 },
1248 "a": {
1249 "a.rs": "fn a() {\n A\n}"
1250 },
1251 }),
1252 )
1253 .await;
1254
1255 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1256 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
1257
1258 cx.executor().run_until_parked();
1259 let worktree_id = cx.update(|cx| {
1260 project.update(cx, |project, cx| {
1261 project.worktrees(cx).next().unwrap().read(cx).id()
1262 })
1263 });
1264
1265 let active_non_worktree_item_tasks = cx
1266 .update(|cx| {
1267 get_all_tasks(
1268 &project,
1269 Arc::new(TaskContexts {
1270 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
1271 active_worktree_context: None,
1272 other_worktree_contexts: Vec::new(),
1273 lsp_task_sources: HashMap::default(),
1274 latest_selection: None,
1275 }),
1276 cx,
1277 )
1278 })
1279 .await;
1280 assert!(
1281 active_non_worktree_item_tasks.is_empty(),
1282 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
1283 );
1284
1285 let active_worktree_tasks = cx
1286 .update(|cx| {
1287 get_all_tasks(
1288 &project,
1289 Arc::new(TaskContexts {
1290 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
1291 active_worktree_context: Some((worktree_id, {
1292 let mut worktree_context = TaskContext::default();
1293 worktree_context
1294 .task_variables
1295 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
1296 worktree_context
1297 })),
1298 other_worktree_contexts: Vec::new(),
1299 lsp_task_sources: HashMap::default(),
1300 latest_selection: None,
1301 }),
1302 cx,
1303 )
1304 })
1305 .await;
1306 assert_eq!(
1307 active_worktree_tasks
1308 .into_iter()
1309 .map(|(source_kind, task)| {
1310 let resolved = task.resolved;
1311 (source_kind, resolved.command.unwrap())
1312 })
1313 .collect::<Vec<_>>(),
1314 vec![(
1315 TaskSourceKind::Worktree {
1316 id: worktree_id,
1317 directory_in_worktree: rel_path(".zed").into(),
1318 id_base: "local worktree tasks from directory \".zed\"".into(),
1319 },
1320 "echo /dir".to_string(),
1321 )]
1322 );
1323}
1324
1325#[gpui::test]
1326async fn test_running_multiple_instances_of_a_single_server_in_one_worktree(
1327 cx: &mut gpui::TestAppContext,
1328) {
1329 pub(crate) struct PyprojectTomlManifestProvider;
1330
1331 impl ManifestProvider for PyprojectTomlManifestProvider {
1332 fn name(&self) -> ManifestName {
1333 SharedString::new_static("pyproject.toml").into()
1334 }
1335
1336 fn search(
1337 &self,
1338 ManifestQuery {
1339 path,
1340 depth,
1341 delegate,
1342 }: ManifestQuery,
1343 ) -> Option<Arc<RelPath>> {
1344 for path in path.ancestors().take(depth) {
1345 let p = path.join(rel_path("pyproject.toml"));
1346 if delegate.exists(&p, Some(false)) {
1347 return Some(path.into());
1348 }
1349 }
1350
1351 None
1352 }
1353 }
1354
1355 init_test(cx);
1356 let fs = FakeFs::new(cx.executor());
1357
1358 fs.insert_tree(
1359 path!("/the-root"),
1360 json!({
1361 ".zed": {
1362 "settings.json": r#"
1363 {
1364 "languages": {
1365 "Python": {
1366 "language_servers": ["ty"]
1367 }
1368 }
1369 }"#
1370 },
1371 "project-a": {
1372 ".venv": {},
1373 "file.py": "",
1374 "pyproject.toml": ""
1375 },
1376 "project-b": {
1377 ".venv": {},
1378 "source_file.py":"",
1379 "another_file.py": "",
1380 "pyproject.toml": ""
1381 }
1382 }),
1383 )
1384 .await;
1385 cx.update(|cx| {
1386 ManifestProvidersStore::global(cx).register(Arc::new(PyprojectTomlManifestProvider))
1387 });
1388
1389 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1390 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1391 let _fake_python_server = language_registry.register_fake_lsp(
1392 "Python",
1393 FakeLspAdapter {
1394 name: "ty",
1395 capabilities: lsp::ServerCapabilities {
1396 ..Default::default()
1397 },
1398 ..Default::default()
1399 },
1400 );
1401
1402 language_registry.add(python_lang(fs.clone()));
1403 let (first_buffer, _handle) = project
1404 .update(cx, |project, cx| {
1405 project.open_local_buffer_with_lsp(path!("/the-root/project-a/file.py"), cx)
1406 })
1407 .await
1408 .unwrap();
1409 cx.executor().run_until_parked();
1410 let servers = project.update(cx, |project, cx| {
1411 project.lsp_store().update(cx, |this, cx| {
1412 first_buffer.update(cx, |buffer, cx| {
1413 this.running_language_servers_for_local_buffer(buffer, cx)
1414 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1415 .collect::<Vec<_>>()
1416 })
1417 })
1418 });
1419 cx.executor().run_until_parked();
1420 assert_eq!(servers.len(), 1);
1421 let (adapter, server) = servers.into_iter().next().unwrap();
1422 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1423 assert_eq!(server.server_id(), LanguageServerId(0));
1424 // `workspace_folders` are set to the rooting point.
1425 assert_eq!(
1426 server.workspace_folders(),
1427 BTreeSet::from_iter(
1428 [Uri::from_file_path(path!("/the-root/project-a")).unwrap()].into_iter()
1429 )
1430 );
1431
1432 let (second_project_buffer, _other_handle) = project
1433 .update(cx, |project, cx| {
1434 project.open_local_buffer_with_lsp(path!("/the-root/project-b/source_file.py"), cx)
1435 })
1436 .await
1437 .unwrap();
1438 cx.executor().run_until_parked();
1439 let servers = project.update(cx, |project, cx| {
1440 project.lsp_store().update(cx, |this, cx| {
1441 second_project_buffer.update(cx, |buffer, cx| {
1442 this.running_language_servers_for_local_buffer(buffer, cx)
1443 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1444 .collect::<Vec<_>>()
1445 })
1446 })
1447 });
1448 cx.executor().run_until_parked();
1449 assert_eq!(servers.len(), 1);
1450 let (adapter, server) = servers.into_iter().next().unwrap();
1451 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1452 // We're not using venvs at all here, so both folders should fall under the same root.
1453 assert_eq!(server.server_id(), LanguageServerId(0));
1454 // Now, let's select a different toolchain for one of subprojects.
1455
1456 let Toolchains {
1457 toolchains: available_toolchains_for_b,
1458 root_path,
1459 ..
1460 } = project
1461 .update(cx, |this, cx| {
1462 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1463 this.available_toolchains(
1464 ProjectPath {
1465 worktree_id,
1466 path: rel_path("project-b/source_file.py").into(),
1467 },
1468 LanguageName::new_static("Python"),
1469 cx,
1470 )
1471 })
1472 .await
1473 .expect("A toolchain to be discovered");
1474 assert_eq!(root_path.as_ref(), rel_path("project-b"));
1475 assert_eq!(available_toolchains_for_b.toolchains().len(), 1);
1476 let currently_active_toolchain = project
1477 .update(cx, |this, cx| {
1478 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1479 this.active_toolchain(
1480 ProjectPath {
1481 worktree_id,
1482 path: rel_path("project-b/source_file.py").into(),
1483 },
1484 LanguageName::new_static("Python"),
1485 cx,
1486 )
1487 })
1488 .await;
1489
1490 assert!(currently_active_toolchain.is_none());
1491 let _ = project
1492 .update(cx, |this, cx| {
1493 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1494 this.activate_toolchain(
1495 ProjectPath {
1496 worktree_id,
1497 path: root_path,
1498 },
1499 available_toolchains_for_b
1500 .toolchains
1501 .into_iter()
1502 .next()
1503 .unwrap(),
1504 cx,
1505 )
1506 })
1507 .await
1508 .unwrap();
1509 cx.run_until_parked();
1510 let servers = project.update(cx, |project, cx| {
1511 project.lsp_store().update(cx, |this, cx| {
1512 second_project_buffer.update(cx, |buffer, cx| {
1513 this.running_language_servers_for_local_buffer(buffer, cx)
1514 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1515 .collect::<Vec<_>>()
1516 })
1517 })
1518 });
1519 cx.executor().run_until_parked();
1520 assert_eq!(servers.len(), 1);
1521 let (adapter, server) = servers.into_iter().next().unwrap();
1522 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1523 // There's a new language server in town.
1524 assert_eq!(server.server_id(), LanguageServerId(1));
1525}
1526
1527#[gpui::test]
1528async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
1529 init_test(cx);
1530
1531 let fs = FakeFs::new(cx.executor());
1532 fs.insert_tree(
1533 path!("/dir"),
1534 json!({
1535 "test.rs": "const A: i32 = 1;",
1536 "test2.rs": "",
1537 "Cargo.toml": "a = 1",
1538 "package.json": "{\"a\": 1}",
1539 }),
1540 )
1541 .await;
1542
1543 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1544 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1545
1546 let mut fake_rust_servers = language_registry.register_fake_lsp(
1547 "Rust",
1548 FakeLspAdapter {
1549 name: "the-rust-language-server",
1550 capabilities: lsp::ServerCapabilities {
1551 completion_provider: Some(lsp::CompletionOptions {
1552 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
1553 ..Default::default()
1554 }),
1555 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
1556 lsp::TextDocumentSyncOptions {
1557 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
1558 ..Default::default()
1559 },
1560 )),
1561 ..Default::default()
1562 },
1563 ..Default::default()
1564 },
1565 );
1566 let mut fake_json_servers = language_registry.register_fake_lsp(
1567 "JSON",
1568 FakeLspAdapter {
1569 name: "the-json-language-server",
1570 capabilities: lsp::ServerCapabilities {
1571 completion_provider: Some(lsp::CompletionOptions {
1572 trigger_characters: Some(vec![":".to_string()]),
1573 ..Default::default()
1574 }),
1575 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
1576 lsp::TextDocumentSyncOptions {
1577 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
1578 ..Default::default()
1579 },
1580 )),
1581 ..Default::default()
1582 },
1583 ..Default::default()
1584 },
1585 );
1586
1587 // Open a buffer without an associated language server.
1588 let (toml_buffer, _handle) = project
1589 .update(cx, |project, cx| {
1590 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
1591 })
1592 .await
1593 .unwrap();
1594
1595 // Open a buffer with an associated language server before the language for it has been loaded.
1596 let (rust_buffer, _handle2) = project
1597 .update(cx, |project, cx| {
1598 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
1599 })
1600 .await
1601 .unwrap();
1602 rust_buffer.update(cx, |buffer, _| {
1603 assert_eq!(buffer.language().map(|l| l.name()), None);
1604 });
1605
1606 // Now we add the languages to the project, and ensure they get assigned to all
1607 // the relevant open buffers.
1608 language_registry.add(json_lang());
1609 language_registry.add(rust_lang());
1610 cx.executor().run_until_parked();
1611 rust_buffer.update(cx, |buffer, _| {
1612 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
1613 });
1614
1615 // A server is started up, and it is notified about Rust files.
1616 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1617 assert_eq!(
1618 fake_rust_server
1619 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1620 .await
1621 .text_document,
1622 lsp::TextDocumentItem {
1623 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1624 version: 0,
1625 text: "const A: i32 = 1;".to_string(),
1626 language_id: "rust".to_string(),
1627 }
1628 );
1629
1630 // The buffer is configured based on the language server's capabilities.
1631 rust_buffer.update(cx, |buffer, _| {
1632 assert_eq!(
1633 buffer
1634 .completion_triggers()
1635 .iter()
1636 .cloned()
1637 .collect::<Vec<_>>(),
1638 &[".".to_string(), "::".to_string()]
1639 );
1640 });
1641 toml_buffer.update(cx, |buffer, _| {
1642 assert!(buffer.completion_triggers().is_empty());
1643 });
1644
1645 // Edit a buffer. The changes are reported to the language server.
1646 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
1647 assert_eq!(
1648 fake_rust_server
1649 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1650 .await
1651 .text_document,
1652 lsp::VersionedTextDocumentIdentifier::new(
1653 lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1654 1
1655 )
1656 );
1657
1658 // Open a third buffer with a different associated language server.
1659 let (json_buffer, _json_handle) = project
1660 .update(cx, |project, cx| {
1661 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
1662 })
1663 .await
1664 .unwrap();
1665
1666 // A json language server is started up and is only notified about the json buffer.
1667 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1668 assert_eq!(
1669 fake_json_server
1670 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1671 .await
1672 .text_document,
1673 lsp::TextDocumentItem {
1674 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1675 version: 0,
1676 text: "{\"a\": 1}".to_string(),
1677 language_id: "json".to_string(),
1678 }
1679 );
1680
1681 // This buffer is configured based on the second language server's
1682 // capabilities.
1683 json_buffer.update(cx, |buffer, _| {
1684 assert_eq!(
1685 buffer
1686 .completion_triggers()
1687 .iter()
1688 .cloned()
1689 .collect::<Vec<_>>(),
1690 &[":".to_string()]
1691 );
1692 });
1693
1694 // When opening another buffer whose language server is already running,
1695 // it is also configured based on the existing language server's capabilities.
1696 let (rust_buffer2, _handle4) = project
1697 .update(cx, |project, cx| {
1698 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
1699 })
1700 .await
1701 .unwrap();
1702 rust_buffer2.update(cx, |buffer, _| {
1703 assert_eq!(
1704 buffer
1705 .completion_triggers()
1706 .iter()
1707 .cloned()
1708 .collect::<Vec<_>>(),
1709 &[".".to_string(), "::".to_string()]
1710 );
1711 });
1712
1713 // Changes are reported only to servers matching the buffer's language.
1714 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
1715 rust_buffer2.update(cx, |buffer, cx| {
1716 buffer.edit([(0..0, "let x = 1;")], None, cx)
1717 });
1718 assert_eq!(
1719 fake_rust_server
1720 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1721 .await
1722 .text_document,
1723 lsp::VersionedTextDocumentIdentifier::new(
1724 lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap(),
1725 1
1726 )
1727 );
1728
1729 // Save notifications are reported to all servers.
1730 project
1731 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
1732 .await
1733 .unwrap();
1734 assert_eq!(
1735 fake_rust_server
1736 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1737 .await
1738 .text_document,
1739 lsp::TextDocumentIdentifier::new(
1740 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1741 )
1742 );
1743 assert_eq!(
1744 fake_json_server
1745 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1746 .await
1747 .text_document,
1748 lsp::TextDocumentIdentifier::new(
1749 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1750 )
1751 );
1752
1753 // Renames are reported only to servers matching the buffer's language.
1754 fs.rename(
1755 Path::new(path!("/dir/test2.rs")),
1756 Path::new(path!("/dir/test3.rs")),
1757 Default::default(),
1758 )
1759 .await
1760 .unwrap();
1761 assert_eq!(
1762 fake_rust_server
1763 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1764 .await
1765 .text_document,
1766 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap()),
1767 );
1768 assert_eq!(
1769 fake_rust_server
1770 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1771 .await
1772 .text_document,
1773 lsp::TextDocumentItem {
1774 uri: lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap(),
1775 version: 0,
1776 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1777 language_id: "rust".to_string(),
1778 },
1779 );
1780
1781 rust_buffer2.update(cx, |buffer, cx| {
1782 buffer.update_diagnostics(
1783 LanguageServerId(0),
1784 DiagnosticSet::from_sorted_entries(
1785 vec![DiagnosticEntry {
1786 diagnostic: Default::default(),
1787 range: Anchor::MIN..Anchor::MAX,
1788 }],
1789 &buffer.snapshot(),
1790 ),
1791 cx,
1792 );
1793 assert_eq!(
1794 buffer
1795 .snapshot()
1796 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1797 .count(),
1798 1
1799 );
1800 });
1801
1802 // When the rename changes the extension of the file, the buffer gets closed on the old
1803 // language server and gets opened on the new one.
1804 fs.rename(
1805 Path::new(path!("/dir/test3.rs")),
1806 Path::new(path!("/dir/test3.json")),
1807 Default::default(),
1808 )
1809 .await
1810 .unwrap();
1811 assert_eq!(
1812 fake_rust_server
1813 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1814 .await
1815 .text_document,
1816 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap()),
1817 );
1818 assert_eq!(
1819 fake_json_server
1820 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1821 .await
1822 .text_document,
1823 lsp::TextDocumentItem {
1824 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1825 version: 0,
1826 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1827 language_id: "json".to_string(),
1828 },
1829 );
1830
1831 // We clear the diagnostics, since the language has changed.
1832 rust_buffer2.update(cx, |buffer, _| {
1833 assert_eq!(
1834 buffer
1835 .snapshot()
1836 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1837 .count(),
1838 0
1839 );
1840 });
1841
1842 // The renamed file's version resets after changing language server.
1843 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
1844 assert_eq!(
1845 fake_json_server
1846 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1847 .await
1848 .text_document,
1849 lsp::VersionedTextDocumentIdentifier::new(
1850 lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1851 1
1852 )
1853 );
1854
1855 // Restart language servers
1856 project.update(cx, |project, cx| {
1857 project.restart_language_servers_for_buffers(
1858 vec![rust_buffer.clone(), json_buffer.clone()],
1859 HashSet::default(),
1860 cx,
1861 );
1862 });
1863
1864 let mut rust_shutdown_requests = fake_rust_server
1865 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1866 let mut json_shutdown_requests = fake_json_server
1867 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1868 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
1869
1870 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1871 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1872
1873 // Ensure rust document is reopened in new rust language server
1874 assert_eq!(
1875 fake_rust_server
1876 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1877 .await
1878 .text_document,
1879 lsp::TextDocumentItem {
1880 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1881 version: 0,
1882 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
1883 language_id: "rust".to_string(),
1884 }
1885 );
1886
1887 // Ensure json documents are reopened in new json language server
1888 assert_set_eq!(
1889 [
1890 fake_json_server
1891 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1892 .await
1893 .text_document,
1894 fake_json_server
1895 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1896 .await
1897 .text_document,
1898 ],
1899 [
1900 lsp::TextDocumentItem {
1901 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1902 version: 0,
1903 text: json_buffer.update(cx, |buffer, _| buffer.text()),
1904 language_id: "json".to_string(),
1905 },
1906 lsp::TextDocumentItem {
1907 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1908 version: 0,
1909 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1910 language_id: "json".to_string(),
1911 }
1912 ]
1913 );
1914
1915 // Close notifications are reported only to servers matching the buffer's language.
1916 cx.update(|_| drop(_json_handle));
1917 let close_message = lsp::DidCloseTextDocumentParams {
1918 text_document: lsp::TextDocumentIdentifier::new(
1919 lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1920 ),
1921 };
1922 assert_eq!(
1923 fake_json_server
1924 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1925 .await,
1926 close_message,
1927 );
1928}
1929
1930#[gpui::test]
1931async fn test_language_server_relative_path(cx: &mut gpui::TestAppContext) {
1932 init_test(cx);
1933
1934 let settings_json_contents = json!({
1935 "languages": {
1936 "Rust": {
1937 "language_servers": ["my_fake_lsp", "lsp_on_path"]
1938 }
1939 },
1940 "lsp": {
1941 "my_fake_lsp": {
1942 "binary": {
1943 // file exists, so this is treated as a relative path
1944 "path": path!(".relative_path/to/my_fake_lsp_binary.exe").to_string(),
1945 }
1946 },
1947 "lsp_on_path": {
1948 "binary": {
1949 // file doesn't exist, so it will fall back on PATH env var
1950 "path": path!("lsp_on_path.exe").to_string(),
1951 }
1952 }
1953 },
1954 });
1955
1956 let fs = FakeFs::new(cx.executor());
1957 fs.insert_tree(
1958 path!("/the-root"),
1959 json!({
1960 ".zed": {
1961 "settings.json": settings_json_contents.to_string(),
1962 },
1963 ".relative_path": {
1964 "to": {
1965 "my_fake_lsp.exe": "",
1966 },
1967 },
1968 "src": {
1969 "main.rs": "",
1970 }
1971 }),
1972 )
1973 .await;
1974
1975 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1976 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1977 language_registry.add(rust_lang());
1978
1979 let mut my_fake_lsp = language_registry.register_fake_lsp(
1980 "Rust",
1981 FakeLspAdapter {
1982 name: "my_fake_lsp",
1983 ..Default::default()
1984 },
1985 );
1986 let mut lsp_on_path = language_registry.register_fake_lsp(
1987 "Rust",
1988 FakeLspAdapter {
1989 name: "lsp_on_path",
1990 ..Default::default()
1991 },
1992 );
1993
1994 cx.run_until_parked();
1995
1996 // Start the language server by opening a buffer with a compatible file extension.
1997 project
1998 .update(cx, |project, cx| {
1999 project.open_local_buffer_with_lsp(path!("/the-root/src/main.rs"), cx)
2000 })
2001 .await
2002 .unwrap();
2003
2004 let lsp_path = my_fake_lsp.next().await.unwrap().binary.path;
2005 assert_eq!(
2006 lsp_path.to_string_lossy(),
2007 path!("/the-root/.relative_path/to/my_fake_lsp_binary.exe"),
2008 );
2009
2010 let lsp_path = lsp_on_path.next().await.unwrap().binary.path;
2011 assert_eq!(lsp_path.to_string_lossy(), path!("lsp_on_path.exe"));
2012}
2013
2014#[gpui::test]
2015async fn test_language_server_tilde_path(cx: &mut gpui::TestAppContext) {
2016 init_test(cx);
2017
2018 let settings_json_contents = json!({
2019 "languages": {
2020 "Rust": {
2021 "language_servers": ["tilde_lsp"]
2022 }
2023 },
2024 "lsp": {
2025 "tilde_lsp": {
2026 "binary": {
2027 "path": "~/.local/bin/rust-analyzer",
2028 }
2029 }
2030 },
2031 });
2032
2033 let fs = FakeFs::new(cx.executor());
2034 fs.insert_tree(
2035 path!("/root"),
2036 json!({
2037 ".zed": {
2038 "settings.json": settings_json_contents.to_string(),
2039 },
2040 "src": {
2041 "main.rs": "fn main() {}",
2042 }
2043 }),
2044 )
2045 .await;
2046
2047 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
2048 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2049 language_registry.add(rust_lang());
2050
2051 let mut tilde_lsp = language_registry.register_fake_lsp(
2052 "Rust",
2053 FakeLspAdapter {
2054 name: "tilde_lsp",
2055 ..Default::default()
2056 },
2057 );
2058 cx.run_until_parked();
2059
2060 project
2061 .update(cx, |project, cx| {
2062 project.open_local_buffer_with_lsp(path!("/root/src/main.rs"), cx)
2063 })
2064 .await
2065 .unwrap();
2066
2067 let lsp_path = tilde_lsp.next().await.unwrap().binary.path;
2068 let expected_path = paths::home_dir().join(".local/bin/rust-analyzer");
2069 assert_eq!(
2070 lsp_path, expected_path,
2071 "Tilde path should expand to home directory"
2072 );
2073}
2074
2075#[gpui::test]
2076async fn test_rescan_fs_change_is_reported_to_language_servers_as_changed(
2077 cx: &mut gpui::TestAppContext,
2078) {
2079 init_test(cx);
2080
2081 let fs = FakeFs::new(cx.executor());
2082 fs.insert_tree(
2083 path!("/the-root"),
2084 json!({
2085 "Cargo.lock": "",
2086 "src": {
2087 "a.rs": "",
2088 }
2089 }),
2090 )
2091 .await;
2092
2093 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
2094 let (language_registry, _lsp_store) = project.read_with(cx, |project, _| {
2095 (project.languages().clone(), project.lsp_store())
2096 });
2097 language_registry.add(rust_lang());
2098 let mut fake_servers = language_registry.register_fake_lsp(
2099 "Rust",
2100 FakeLspAdapter {
2101 name: "the-language-server",
2102 ..Default::default()
2103 },
2104 );
2105
2106 cx.executor().run_until_parked();
2107
2108 project
2109 .update(cx, |project, cx| {
2110 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
2111 })
2112 .await
2113 .unwrap();
2114
2115 let fake_server = fake_servers.next().await.unwrap();
2116 cx.executor().run_until_parked();
2117
2118 let file_changes = Arc::new(Mutex::new(Vec::new()));
2119 fake_server
2120 .request::<lsp::request::RegisterCapability>(
2121 lsp::RegistrationParams {
2122 registrations: vec![lsp::Registration {
2123 id: Default::default(),
2124 method: "workspace/didChangeWatchedFiles".to_string(),
2125 register_options: serde_json::to_value(
2126 lsp::DidChangeWatchedFilesRegistrationOptions {
2127 watchers: vec![lsp::FileSystemWatcher {
2128 glob_pattern: lsp::GlobPattern::String(
2129 path!("/the-root/Cargo.lock").to_string(),
2130 ),
2131 kind: None,
2132 }],
2133 },
2134 )
2135 .ok(),
2136 }],
2137 },
2138 DEFAULT_LSP_REQUEST_TIMEOUT,
2139 )
2140 .await
2141 .into_response()
2142 .unwrap();
2143 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
2144 let file_changes = file_changes.clone();
2145 move |params, _| {
2146 let mut file_changes = file_changes.lock();
2147 file_changes.extend(params.changes);
2148 }
2149 });
2150
2151 cx.executor().run_until_parked();
2152 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
2153
2154 fs.emit_fs_event(path!("/the-root/Cargo.lock"), Some(PathEventKind::Rescan));
2155 cx.executor().run_until_parked();
2156
2157 assert_eq!(
2158 &*file_changes.lock(),
2159 &[lsp::FileEvent {
2160 uri: lsp::Uri::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
2161 typ: lsp::FileChangeType::CHANGED,
2162 }]
2163 );
2164}
2165
2166#[gpui::test]
2167async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
2168 init_test(cx);
2169
2170 let fs = FakeFs::new(cx.executor());
2171 fs.insert_tree(
2172 path!("/the-root"),
2173 json!({
2174 ".gitignore": "target\n",
2175 "Cargo.lock": "",
2176 "src": {
2177 "a.rs": "",
2178 "b.rs": "",
2179 },
2180 "target": {
2181 "x": {
2182 "out": {
2183 "x.rs": ""
2184 }
2185 },
2186 "y": {
2187 "out": {
2188 "y.rs": "",
2189 }
2190 },
2191 "z": {
2192 "out": {
2193 "z.rs": ""
2194 }
2195 }
2196 }
2197 }),
2198 )
2199 .await;
2200 fs.insert_tree(
2201 path!("/the-registry"),
2202 json!({
2203 "dep1": {
2204 "src": {
2205 "dep1.rs": "",
2206 }
2207 },
2208 "dep2": {
2209 "src": {
2210 "dep2.rs": "",
2211 }
2212 },
2213 }),
2214 )
2215 .await;
2216 fs.insert_tree(
2217 path!("/the/stdlib"),
2218 json!({
2219 "LICENSE": "",
2220 "src": {
2221 "string.rs": "",
2222 }
2223 }),
2224 )
2225 .await;
2226
2227 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
2228 let (language_registry, lsp_store) = project.read_with(cx, |project, _| {
2229 (project.languages().clone(), project.lsp_store())
2230 });
2231 language_registry.add(rust_lang());
2232 let mut fake_servers = language_registry.register_fake_lsp(
2233 "Rust",
2234 FakeLspAdapter {
2235 name: "the-language-server",
2236 ..Default::default()
2237 },
2238 );
2239
2240 cx.executor().run_until_parked();
2241
2242 // Start the language server by opening a buffer with a compatible file extension.
2243 project
2244 .update(cx, |project, cx| {
2245 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
2246 })
2247 .await
2248 .unwrap();
2249
2250 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
2251 project.update(cx, |project, cx| {
2252 let worktree = project.worktrees(cx).next().unwrap();
2253 assert_eq!(
2254 worktree
2255 .read(cx)
2256 .snapshot()
2257 .entries(true, 0)
2258 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
2259 .collect::<Vec<_>>(),
2260 &[
2261 ("", false),
2262 (".gitignore", false),
2263 ("Cargo.lock", false),
2264 ("src", false),
2265 ("src/a.rs", false),
2266 ("src/b.rs", false),
2267 ("target", true),
2268 ]
2269 );
2270 });
2271
2272 let prev_read_dir_count = fs.read_dir_call_count();
2273
2274 let fake_server = fake_servers.next().await.unwrap();
2275 cx.executor().run_until_parked();
2276 let server_id = lsp_store.read_with(cx, |lsp_store, _| {
2277 let (id, _) = lsp_store.language_server_statuses().next().unwrap();
2278 id
2279 });
2280
2281 // Simulate jumping to a definition in a dependency outside of the worktree.
2282 let _out_of_worktree_buffer = project
2283 .update(cx, |project, cx| {
2284 project.open_local_buffer_via_lsp(
2285 lsp::Uri::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
2286 server_id,
2287 cx,
2288 )
2289 })
2290 .await
2291 .unwrap();
2292
2293 // Keep track of the FS events reported to the language server.
2294 let file_changes = Arc::new(Mutex::new(Vec::new()));
2295 fake_server
2296 .request::<lsp::request::RegisterCapability>(
2297 lsp::RegistrationParams {
2298 registrations: vec![lsp::Registration {
2299 id: Default::default(),
2300 method: "workspace/didChangeWatchedFiles".to_string(),
2301 register_options: serde_json::to_value(
2302 lsp::DidChangeWatchedFilesRegistrationOptions {
2303 watchers: vec![
2304 lsp::FileSystemWatcher {
2305 glob_pattern: lsp::GlobPattern::String(
2306 path!("/the-root/Cargo.toml").to_string(),
2307 ),
2308 kind: None,
2309 },
2310 lsp::FileSystemWatcher {
2311 glob_pattern: lsp::GlobPattern::String(
2312 path!("/the-root/src/*.{rs,c}").to_string(),
2313 ),
2314 kind: None,
2315 },
2316 lsp::FileSystemWatcher {
2317 glob_pattern: lsp::GlobPattern::String(
2318 path!("/the-root/target/y/**/*.rs").to_string(),
2319 ),
2320 kind: None,
2321 },
2322 lsp::FileSystemWatcher {
2323 glob_pattern: lsp::GlobPattern::String(
2324 path!("/the/stdlib/src/**/*.rs").to_string(),
2325 ),
2326 kind: None,
2327 },
2328 lsp::FileSystemWatcher {
2329 glob_pattern: lsp::GlobPattern::String(
2330 path!("**/Cargo.lock").to_string(),
2331 ),
2332 kind: None,
2333 },
2334 ],
2335 },
2336 )
2337 .ok(),
2338 }],
2339 },
2340 DEFAULT_LSP_REQUEST_TIMEOUT,
2341 )
2342 .await
2343 .into_response()
2344 .unwrap();
2345 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
2346 let file_changes = file_changes.clone();
2347 move |params, _| {
2348 let mut file_changes = file_changes.lock();
2349 file_changes.extend(params.changes);
2350 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
2351 }
2352 });
2353
2354 cx.executor().run_until_parked();
2355 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
2356 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
2357
2358 let mut new_watched_paths = fs.watched_paths();
2359 new_watched_paths.retain(|path| {
2360 !path.starts_with(config_dir()) && !path.starts_with(global_gitignore_path().unwrap())
2361 });
2362 assert_eq!(
2363 &new_watched_paths,
2364 &[
2365 Path::new(path!("/the-root")),
2366 Path::new(path!("/the-registry/dep1/src/dep1.rs")),
2367 Path::new(path!("/the/stdlib/src"))
2368 ]
2369 );
2370
2371 // Now the language server has asked us to watch an ignored directory path,
2372 // so we recursively load it.
2373 project.update(cx, |project, cx| {
2374 let worktree = project.visible_worktrees(cx).next().unwrap();
2375 assert_eq!(
2376 worktree
2377 .read(cx)
2378 .snapshot()
2379 .entries(true, 0)
2380 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
2381 .collect::<Vec<_>>(),
2382 &[
2383 ("", false),
2384 (".gitignore", false),
2385 ("Cargo.lock", false),
2386 ("src", false),
2387 ("src/a.rs", false),
2388 ("src/b.rs", false),
2389 ("target", true),
2390 ("target/x", true),
2391 ("target/y", true),
2392 ("target/y/out", true),
2393 ("target/y/out/y.rs", true),
2394 ("target/z", true),
2395 ]
2396 );
2397 });
2398
2399 // Perform some file system mutations, two of which match the watched patterns,
2400 // and one of which does not.
2401 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
2402 .await
2403 .unwrap();
2404 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
2405 .await
2406 .unwrap();
2407 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
2408 .await
2409 .unwrap();
2410 fs.create_file(
2411 path!("/the-root/target/x/out/x2.rs").as_ref(),
2412 Default::default(),
2413 )
2414 .await
2415 .unwrap();
2416 fs.create_file(
2417 path!("/the-root/target/y/out/y2.rs").as_ref(),
2418 Default::default(),
2419 )
2420 .await
2421 .unwrap();
2422 fs.save(
2423 path!("/the-root/Cargo.lock").as_ref(),
2424 &"".into(),
2425 Default::default(),
2426 )
2427 .await
2428 .unwrap();
2429 fs.save(
2430 path!("/the-stdlib/LICENSE").as_ref(),
2431 &"".into(),
2432 Default::default(),
2433 )
2434 .await
2435 .unwrap();
2436 fs.save(
2437 path!("/the/stdlib/src/string.rs").as_ref(),
2438 &"".into(),
2439 Default::default(),
2440 )
2441 .await
2442 .unwrap();
2443
2444 // The language server receives events for the FS mutations that match its watch patterns.
2445 cx.executor().run_until_parked();
2446 assert_eq!(
2447 &*file_changes.lock(),
2448 &[
2449 lsp::FileEvent {
2450 uri: lsp::Uri::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
2451 typ: lsp::FileChangeType::CHANGED,
2452 },
2453 lsp::FileEvent {
2454 uri: lsp::Uri::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
2455 typ: lsp::FileChangeType::DELETED,
2456 },
2457 lsp::FileEvent {
2458 uri: lsp::Uri::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
2459 typ: lsp::FileChangeType::CREATED,
2460 },
2461 lsp::FileEvent {
2462 uri: lsp::Uri::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
2463 typ: lsp::FileChangeType::CREATED,
2464 },
2465 lsp::FileEvent {
2466 uri: lsp::Uri::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
2467 typ: lsp::FileChangeType::CHANGED,
2468 },
2469 ]
2470 );
2471}
2472
2473#[gpui::test]
2474async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
2475 init_test(cx);
2476
2477 let fs = FakeFs::new(cx.executor());
2478 fs.insert_tree(
2479 path!("/dir"),
2480 json!({
2481 "a.rs": "let a = 1;",
2482 "b.rs": "let b = 2;"
2483 }),
2484 )
2485 .await;
2486
2487 let project = Project::test(
2488 fs,
2489 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
2490 cx,
2491 )
2492 .await;
2493 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2494
2495 let buffer_a = project
2496 .update(cx, |project, cx| {
2497 project.open_local_buffer(path!("/dir/a.rs"), cx)
2498 })
2499 .await
2500 .unwrap();
2501 let buffer_b = project
2502 .update(cx, |project, cx| {
2503 project.open_local_buffer(path!("/dir/b.rs"), cx)
2504 })
2505 .await
2506 .unwrap();
2507
2508 lsp_store.update(cx, |lsp_store, cx| {
2509 lsp_store
2510 .update_diagnostics(
2511 LanguageServerId(0),
2512 lsp::PublishDiagnosticsParams {
2513 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2514 version: None,
2515 diagnostics: vec![lsp::Diagnostic {
2516 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2517 severity: Some(lsp::DiagnosticSeverity::ERROR),
2518 message: "error 1".to_string(),
2519 ..Default::default()
2520 }],
2521 },
2522 None,
2523 DiagnosticSourceKind::Pushed,
2524 &[],
2525 cx,
2526 )
2527 .unwrap();
2528 lsp_store
2529 .update_diagnostics(
2530 LanguageServerId(0),
2531 lsp::PublishDiagnosticsParams {
2532 uri: Uri::from_file_path(path!("/dir/b.rs")).unwrap(),
2533 version: None,
2534 diagnostics: vec![lsp::Diagnostic {
2535 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2536 severity: Some(DiagnosticSeverity::WARNING),
2537 message: "error 2".to_string(),
2538 ..Default::default()
2539 }],
2540 },
2541 None,
2542 DiagnosticSourceKind::Pushed,
2543 &[],
2544 cx,
2545 )
2546 .unwrap();
2547 });
2548
2549 buffer_a.update(cx, |buffer, _| {
2550 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2551 assert_eq!(
2552 chunks
2553 .iter()
2554 .map(|(s, d)| (s.as_str(), *d))
2555 .collect::<Vec<_>>(),
2556 &[
2557 ("let ", None),
2558 ("a", Some(DiagnosticSeverity::ERROR)),
2559 (" = 1;", None),
2560 ]
2561 );
2562 });
2563 buffer_b.update(cx, |buffer, _| {
2564 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2565 assert_eq!(
2566 chunks
2567 .iter()
2568 .map(|(s, d)| (s.as_str(), *d))
2569 .collect::<Vec<_>>(),
2570 &[
2571 ("let ", None),
2572 ("b", Some(DiagnosticSeverity::WARNING)),
2573 (" = 2;", None),
2574 ]
2575 );
2576 });
2577}
2578
2579#[gpui::test]
2580async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
2581 init_test(cx);
2582
2583 let fs = FakeFs::new(cx.executor());
2584 fs.insert_tree(
2585 path!("/root"),
2586 json!({
2587 "dir": {
2588 ".git": {
2589 "HEAD": "ref: refs/heads/main",
2590 },
2591 ".gitignore": "b.rs",
2592 "a.rs": "let a = 1;",
2593 "b.rs": "let b = 2;",
2594 },
2595 "other.rs": "let b = c;"
2596 }),
2597 )
2598 .await;
2599
2600 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
2601 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2602 let (worktree, _) = project
2603 .update(cx, |project, cx| {
2604 project.find_or_create_worktree(path!("/root/dir"), true, cx)
2605 })
2606 .await
2607 .unwrap();
2608 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
2609
2610 let (worktree, _) = project
2611 .update(cx, |project, cx| {
2612 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
2613 })
2614 .await
2615 .unwrap();
2616 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
2617
2618 let server_id = LanguageServerId(0);
2619 lsp_store.update(cx, |lsp_store, cx| {
2620 lsp_store
2621 .update_diagnostics(
2622 server_id,
2623 lsp::PublishDiagnosticsParams {
2624 uri: Uri::from_file_path(path!("/root/dir/b.rs")).unwrap(),
2625 version: None,
2626 diagnostics: vec![lsp::Diagnostic {
2627 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2628 severity: Some(lsp::DiagnosticSeverity::ERROR),
2629 message: "unused variable 'b'".to_string(),
2630 ..Default::default()
2631 }],
2632 },
2633 None,
2634 DiagnosticSourceKind::Pushed,
2635 &[],
2636 cx,
2637 )
2638 .unwrap();
2639 lsp_store
2640 .update_diagnostics(
2641 server_id,
2642 lsp::PublishDiagnosticsParams {
2643 uri: Uri::from_file_path(path!("/root/other.rs")).unwrap(),
2644 version: None,
2645 diagnostics: vec![lsp::Diagnostic {
2646 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
2647 severity: Some(lsp::DiagnosticSeverity::ERROR),
2648 message: "unknown variable 'c'".to_string(),
2649 ..Default::default()
2650 }],
2651 },
2652 None,
2653 DiagnosticSourceKind::Pushed,
2654 &[],
2655 cx,
2656 )
2657 .unwrap();
2658 });
2659
2660 let main_ignored_buffer = project
2661 .update(cx, |project, cx| {
2662 project.open_buffer((main_worktree_id, rel_path("b.rs")), cx)
2663 })
2664 .await
2665 .unwrap();
2666 main_ignored_buffer.update(cx, |buffer, _| {
2667 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2668 assert_eq!(
2669 chunks
2670 .iter()
2671 .map(|(s, d)| (s.as_str(), *d))
2672 .collect::<Vec<_>>(),
2673 &[
2674 ("let ", None),
2675 ("b", Some(DiagnosticSeverity::ERROR)),
2676 (" = 2;", None),
2677 ],
2678 "Gigitnored buffers should still get in-buffer diagnostics",
2679 );
2680 });
2681 let other_buffer = project
2682 .update(cx, |project, cx| {
2683 project.open_buffer((other_worktree_id, rel_path("")), cx)
2684 })
2685 .await
2686 .unwrap();
2687 other_buffer.update(cx, |buffer, _| {
2688 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2689 assert_eq!(
2690 chunks
2691 .iter()
2692 .map(|(s, d)| (s.as_str(), *d))
2693 .collect::<Vec<_>>(),
2694 &[
2695 ("let b = ", None),
2696 ("c", Some(DiagnosticSeverity::ERROR)),
2697 (";", None),
2698 ],
2699 "Buffers from hidden projects should still get in-buffer diagnostics"
2700 );
2701 });
2702
2703 project.update(cx, |project, cx| {
2704 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
2705 assert_eq!(
2706 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
2707 vec![(
2708 ProjectPath {
2709 worktree_id: main_worktree_id,
2710 path: rel_path("b.rs").into(),
2711 },
2712 server_id,
2713 DiagnosticSummary {
2714 error_count: 1,
2715 warning_count: 0,
2716 }
2717 )]
2718 );
2719 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
2720 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
2721 });
2722}
2723
2724#[gpui::test]
2725async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
2726 init_test(cx);
2727
2728 let progress_token = "the-progress-token";
2729
2730 let fs = FakeFs::new(cx.executor());
2731 fs.insert_tree(
2732 path!("/dir"),
2733 json!({
2734 "a.rs": "fn a() { A }",
2735 "b.rs": "const y: i32 = 1",
2736 }),
2737 )
2738 .await;
2739
2740 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2741 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2742
2743 language_registry.add(rust_lang());
2744 let mut fake_servers = language_registry.register_fake_lsp(
2745 "Rust",
2746 FakeLspAdapter {
2747 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2748 disk_based_diagnostics_sources: vec!["disk".into()],
2749 ..Default::default()
2750 },
2751 );
2752
2753 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
2754
2755 // Cause worktree to start the fake language server
2756 let _ = project
2757 .update(cx, |project, cx| {
2758 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
2759 })
2760 .await
2761 .unwrap();
2762
2763 let mut events = cx.events(&project);
2764
2765 let fake_server = fake_servers.next().await.unwrap();
2766 assert_eq!(
2767 events.next().await.unwrap(),
2768 Event::LanguageServerAdded(
2769 LanguageServerId(0),
2770 fake_server.server.name(),
2771 Some(worktree_id)
2772 ),
2773 );
2774
2775 fake_server
2776 .start_progress(format!("{}/0", progress_token))
2777 .await;
2778 assert_eq!(
2779 events.next().await.unwrap(),
2780 Event::DiskBasedDiagnosticsStarted {
2781 language_server_id: LanguageServerId(0),
2782 }
2783 );
2784
2785 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2786 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2787 version: None,
2788 diagnostics: vec![lsp::Diagnostic {
2789 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2790 severity: Some(lsp::DiagnosticSeverity::ERROR),
2791 message: "undefined variable 'A'".to_string(),
2792 ..Default::default()
2793 }],
2794 });
2795 assert_eq!(
2796 events.next().await.unwrap(),
2797 Event::DiagnosticsUpdated {
2798 language_server_id: LanguageServerId(0),
2799 paths: vec![(worktree_id, rel_path("a.rs")).into()],
2800 }
2801 );
2802
2803 fake_server.end_progress(format!("{}/0", progress_token));
2804 assert_eq!(
2805 events.next().await.unwrap(),
2806 Event::DiskBasedDiagnosticsFinished {
2807 language_server_id: LanguageServerId(0)
2808 }
2809 );
2810
2811 let buffer = project
2812 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
2813 .await
2814 .unwrap();
2815
2816 buffer.update(cx, |buffer, _| {
2817 let snapshot = buffer.snapshot();
2818 let diagnostics = snapshot
2819 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2820 .collect::<Vec<_>>();
2821 assert_eq!(
2822 diagnostics,
2823 &[DiagnosticEntryRef {
2824 range: Point::new(0, 9)..Point::new(0, 10),
2825 diagnostic: &Diagnostic {
2826 severity: lsp::DiagnosticSeverity::ERROR,
2827 message: "undefined variable 'A'".to_string(),
2828 group_id: 0,
2829 is_primary: true,
2830 source_kind: DiagnosticSourceKind::Pushed,
2831 ..Diagnostic::default()
2832 }
2833 }]
2834 )
2835 });
2836
2837 // Ensure publishing empty diagnostics twice only results in one update event.
2838 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2839 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2840 version: None,
2841 diagnostics: Default::default(),
2842 });
2843 assert_eq!(
2844 events.next().await.unwrap(),
2845 Event::DiagnosticsUpdated {
2846 language_server_id: LanguageServerId(0),
2847 paths: vec![(worktree_id, rel_path("a.rs")).into()],
2848 }
2849 );
2850
2851 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2852 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2853 version: None,
2854 diagnostics: Default::default(),
2855 });
2856 cx.executor().run_until_parked();
2857 assert_eq!(futures::poll!(events.next()), Poll::Pending);
2858}
2859
2860#[gpui::test]
2861async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
2862 init_test(cx);
2863
2864 let progress_token = "the-progress-token";
2865
2866 let fs = FakeFs::new(cx.executor());
2867 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2868
2869 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2870
2871 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2872 language_registry.add(rust_lang());
2873 let mut fake_servers = language_registry.register_fake_lsp(
2874 "Rust",
2875 FakeLspAdapter {
2876 name: "the-language-server",
2877 disk_based_diagnostics_sources: vec!["disk".into()],
2878 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2879 ..FakeLspAdapter::default()
2880 },
2881 );
2882
2883 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
2884
2885 let (buffer, _handle) = project
2886 .update(cx, |project, cx| {
2887 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2888 })
2889 .await
2890 .unwrap();
2891 let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id());
2892 // Simulate diagnostics starting to update.
2893 let fake_server = fake_servers.next().await.unwrap();
2894 cx.executor().run_until_parked();
2895 fake_server.start_progress(progress_token).await;
2896
2897 // Restart the server before the diagnostics finish updating.
2898 project.update(cx, |project, cx| {
2899 project.restart_language_servers_for_buffers(vec![buffer], HashSet::default(), cx);
2900 });
2901 let mut events = cx.events(&project);
2902
2903 // Simulate the newly started server sending more diagnostics.
2904 let fake_server = fake_servers.next().await.unwrap();
2905 cx.executor().run_until_parked();
2906 assert_eq!(
2907 events.next().await.unwrap(),
2908 Event::LanguageServerRemoved(LanguageServerId(0))
2909 );
2910 assert_eq!(
2911 events.next().await.unwrap(),
2912 Event::LanguageServerAdded(
2913 LanguageServerId(1),
2914 fake_server.server.name(),
2915 Some(worktree_id)
2916 )
2917 );
2918 fake_server.start_progress(progress_token).await;
2919 assert_eq!(
2920 events.next().await.unwrap(),
2921 Event::LanguageServerBufferRegistered {
2922 server_id: LanguageServerId(1),
2923 buffer_id,
2924 buffer_abs_path: PathBuf::from(path!("/dir/a.rs")),
2925 name: Some(fake_server.server.name())
2926 }
2927 );
2928 assert_eq!(
2929 events.next().await.unwrap(),
2930 Event::DiskBasedDiagnosticsStarted {
2931 language_server_id: LanguageServerId(1)
2932 }
2933 );
2934 project.update(cx, |project, cx| {
2935 assert_eq!(
2936 project
2937 .language_servers_running_disk_based_diagnostics(cx)
2938 .collect::<Vec<_>>(),
2939 [LanguageServerId(1)]
2940 );
2941 });
2942
2943 // All diagnostics are considered done, despite the old server's diagnostic
2944 // task never completing.
2945 fake_server.end_progress(progress_token);
2946 assert_eq!(
2947 events.next().await.unwrap(),
2948 Event::DiskBasedDiagnosticsFinished {
2949 language_server_id: LanguageServerId(1)
2950 }
2951 );
2952 project.update(cx, |project, cx| {
2953 assert_eq!(
2954 project
2955 .language_servers_running_disk_based_diagnostics(cx)
2956 .collect::<Vec<_>>(),
2957 [] as [language::LanguageServerId; 0]
2958 );
2959 });
2960}
2961
2962#[gpui::test]
2963async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
2964 init_test(cx);
2965
2966 let fs = FakeFs::new(cx.executor());
2967 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
2968
2969 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2970
2971 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2972 language_registry.add(rust_lang());
2973 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2974
2975 let (buffer, _) = project
2976 .update(cx, |project, cx| {
2977 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2978 })
2979 .await
2980 .unwrap();
2981
2982 // Publish diagnostics
2983 let fake_server = fake_servers.next().await.unwrap();
2984 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2985 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2986 version: None,
2987 diagnostics: vec![lsp::Diagnostic {
2988 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
2989 severity: Some(lsp::DiagnosticSeverity::ERROR),
2990 message: "the message".to_string(),
2991 ..Default::default()
2992 }],
2993 });
2994
2995 cx.executor().run_until_parked();
2996 buffer.update(cx, |buffer, _| {
2997 assert_eq!(
2998 buffer
2999 .snapshot()
3000 .diagnostics_in_range::<_, usize>(0..1, false)
3001 .map(|entry| entry.diagnostic.message.clone())
3002 .collect::<Vec<_>>(),
3003 ["the message".to_string()]
3004 );
3005 });
3006 project.update(cx, |project, cx| {
3007 assert_eq!(
3008 project.diagnostic_summary(false, cx),
3009 DiagnosticSummary {
3010 error_count: 1,
3011 warning_count: 0,
3012 }
3013 );
3014 });
3015
3016 project.update(cx, |project, cx| {
3017 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
3018 });
3019
3020 // The diagnostics are cleared.
3021 cx.executor().run_until_parked();
3022 buffer.update(cx, |buffer, _| {
3023 assert_eq!(
3024 buffer
3025 .snapshot()
3026 .diagnostics_in_range::<_, usize>(0..1, false)
3027 .map(|entry| entry.diagnostic.message.clone())
3028 .collect::<Vec<_>>(),
3029 Vec::<String>::new(),
3030 );
3031 });
3032 project.update(cx, |project, cx| {
3033 assert_eq!(
3034 project.diagnostic_summary(false, cx),
3035 DiagnosticSummary {
3036 error_count: 0,
3037 warning_count: 0,
3038 }
3039 );
3040 });
3041}
3042
3043#[gpui::test]
3044async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
3045 init_test(cx);
3046
3047 let fs = FakeFs::new(cx.executor());
3048 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
3049
3050 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3051 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3052
3053 language_registry.add(rust_lang());
3054 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3055
3056 let (buffer, _handle) = project
3057 .update(cx, |project, cx| {
3058 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3059 })
3060 .await
3061 .unwrap();
3062
3063 // Before restarting the server, report diagnostics with an unknown buffer version.
3064 let fake_server = fake_servers.next().await.unwrap();
3065 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3066 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3067 version: Some(10000),
3068 diagnostics: Vec::new(),
3069 });
3070 cx.executor().run_until_parked();
3071 project.update(cx, |project, cx| {
3072 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
3073 });
3074
3075 let mut fake_server = fake_servers.next().await.unwrap();
3076 let notification = fake_server
3077 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3078 .await
3079 .text_document;
3080 assert_eq!(notification.version, 0);
3081}
3082
3083#[gpui::test]
3084async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
3085 init_test(cx);
3086
3087 let progress_token = "the-progress-token";
3088
3089 let fs = FakeFs::new(cx.executor());
3090 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
3091
3092 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3093
3094 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3095 language_registry.add(rust_lang());
3096 let mut fake_servers = language_registry.register_fake_lsp(
3097 "Rust",
3098 FakeLspAdapter {
3099 name: "the-language-server",
3100 disk_based_diagnostics_sources: vec!["disk".into()],
3101 disk_based_diagnostics_progress_token: Some(progress_token.into()),
3102 ..Default::default()
3103 },
3104 );
3105
3106 let (buffer, _handle) = project
3107 .update(cx, |project, cx| {
3108 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3109 })
3110 .await
3111 .unwrap();
3112
3113 // Simulate diagnostics starting to update.
3114 let mut fake_server = fake_servers.next().await.unwrap();
3115 fake_server
3116 .start_progress_with(
3117 "another-token",
3118 lsp::WorkDoneProgressBegin {
3119 cancellable: Some(false),
3120 ..Default::default()
3121 },
3122 DEFAULT_LSP_REQUEST_TIMEOUT,
3123 )
3124 .await;
3125 // Ensure progress notification is fully processed before starting the next one
3126 cx.executor().run_until_parked();
3127
3128 fake_server
3129 .start_progress_with(
3130 progress_token,
3131 lsp::WorkDoneProgressBegin {
3132 cancellable: Some(true),
3133 ..Default::default()
3134 },
3135 DEFAULT_LSP_REQUEST_TIMEOUT,
3136 )
3137 .await;
3138 // Ensure progress notification is fully processed before cancelling
3139 cx.executor().run_until_parked();
3140
3141 project.update(cx, |project, cx| {
3142 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
3143 });
3144 cx.executor().run_until_parked();
3145
3146 let cancel_notification = fake_server
3147 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
3148 .await;
3149 assert_eq!(
3150 cancel_notification.token,
3151 NumberOrString::String(progress_token.into())
3152 );
3153}
3154
3155#[gpui::test]
3156async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
3157 init_test(cx);
3158
3159 let fs = FakeFs::new(cx.executor());
3160 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
3161 .await;
3162
3163 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3164 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3165
3166 let mut fake_rust_servers = language_registry.register_fake_lsp(
3167 "Rust",
3168 FakeLspAdapter {
3169 name: "rust-lsp",
3170 ..Default::default()
3171 },
3172 );
3173 let mut fake_js_servers = language_registry.register_fake_lsp(
3174 "JavaScript",
3175 FakeLspAdapter {
3176 name: "js-lsp",
3177 ..Default::default()
3178 },
3179 );
3180 language_registry.add(rust_lang());
3181 language_registry.add(js_lang());
3182
3183 let _rs_buffer = project
3184 .update(cx, |project, cx| {
3185 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3186 })
3187 .await
3188 .unwrap();
3189 let _js_buffer = project
3190 .update(cx, |project, cx| {
3191 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
3192 })
3193 .await
3194 .unwrap();
3195
3196 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
3197 assert_eq!(
3198 fake_rust_server_1
3199 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3200 .await
3201 .text_document
3202 .uri
3203 .as_str(),
3204 uri!("file:///dir/a.rs")
3205 );
3206
3207 let mut fake_js_server = fake_js_servers.next().await.unwrap();
3208 assert_eq!(
3209 fake_js_server
3210 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3211 .await
3212 .text_document
3213 .uri
3214 .as_str(),
3215 uri!("file:///dir/b.js")
3216 );
3217
3218 // Disable Rust language server, ensuring only that server gets stopped.
3219 cx.update(|cx| {
3220 SettingsStore::update_global(cx, |settings, cx| {
3221 settings.update_user_settings(cx, |settings| {
3222 settings.languages_mut().insert(
3223 "Rust".into(),
3224 LanguageSettingsContent {
3225 enable_language_server: Some(false),
3226 ..Default::default()
3227 },
3228 );
3229 });
3230 })
3231 });
3232 fake_rust_server_1
3233 .receive_notification::<lsp::notification::Exit>()
3234 .await;
3235
3236 // Enable Rust and disable JavaScript language servers, ensuring that the
3237 // former gets started again and that the latter stops.
3238 cx.update(|cx| {
3239 SettingsStore::update_global(cx, |settings, cx| {
3240 settings.update_user_settings(cx, |settings| {
3241 settings.languages_mut().insert(
3242 "Rust".into(),
3243 LanguageSettingsContent {
3244 enable_language_server: Some(true),
3245 ..Default::default()
3246 },
3247 );
3248 settings.languages_mut().insert(
3249 "JavaScript".into(),
3250 LanguageSettingsContent {
3251 enable_language_server: Some(false),
3252 ..Default::default()
3253 },
3254 );
3255 });
3256 })
3257 });
3258 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
3259 assert_eq!(
3260 fake_rust_server_2
3261 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3262 .await
3263 .text_document
3264 .uri
3265 .as_str(),
3266 uri!("file:///dir/a.rs")
3267 );
3268 fake_js_server
3269 .receive_notification::<lsp::notification::Exit>()
3270 .await;
3271}
3272
3273#[gpui::test(iterations = 3)]
3274async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
3275 init_test(cx);
3276
3277 let text = "
3278 fn a() { A }
3279 fn b() { BB }
3280 fn c() { CCC }
3281 "
3282 .unindent();
3283
3284 let fs = FakeFs::new(cx.executor());
3285 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
3286
3287 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3288 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3289
3290 language_registry.add(rust_lang());
3291 let mut fake_servers = language_registry.register_fake_lsp(
3292 "Rust",
3293 FakeLspAdapter {
3294 disk_based_diagnostics_sources: vec!["disk".into()],
3295 ..Default::default()
3296 },
3297 );
3298
3299 let buffer = project
3300 .update(cx, |project, cx| {
3301 project.open_local_buffer(path!("/dir/a.rs"), cx)
3302 })
3303 .await
3304 .unwrap();
3305
3306 let _handle = project.update(cx, |project, cx| {
3307 project.register_buffer_with_language_servers(&buffer, cx)
3308 });
3309
3310 let mut fake_server = fake_servers.next().await.unwrap();
3311 let open_notification = fake_server
3312 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3313 .await;
3314
3315 // Edit the buffer, moving the content down
3316 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
3317 let change_notification_1 = fake_server
3318 .receive_notification::<lsp::notification::DidChangeTextDocument>()
3319 .await;
3320 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
3321
3322 // Report some diagnostics for the initial version of the buffer
3323 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3324 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3325 version: Some(open_notification.text_document.version),
3326 diagnostics: vec![
3327 lsp::Diagnostic {
3328 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3329 severity: Some(DiagnosticSeverity::ERROR),
3330 message: "undefined variable 'A'".to_string(),
3331 source: Some("disk".to_string()),
3332 ..Default::default()
3333 },
3334 lsp::Diagnostic {
3335 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
3336 severity: Some(DiagnosticSeverity::ERROR),
3337 message: "undefined variable 'BB'".to_string(),
3338 source: Some("disk".to_string()),
3339 ..Default::default()
3340 },
3341 lsp::Diagnostic {
3342 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
3343 severity: Some(DiagnosticSeverity::ERROR),
3344 source: Some("disk".to_string()),
3345 message: "undefined variable 'CCC'".to_string(),
3346 ..Default::default()
3347 },
3348 ],
3349 });
3350
3351 // The diagnostics have moved down since they were created.
3352 cx.executor().run_until_parked();
3353 buffer.update(cx, |buffer, _| {
3354 assert_eq!(
3355 buffer
3356 .snapshot()
3357 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
3358 .collect::<Vec<_>>(),
3359 &[
3360 DiagnosticEntry {
3361 range: Point::new(3, 9)..Point::new(3, 11),
3362 diagnostic: Diagnostic {
3363 source: Some("disk".into()),
3364 severity: DiagnosticSeverity::ERROR,
3365 message: "undefined variable 'BB'".to_string(),
3366 is_disk_based: true,
3367 group_id: 1,
3368 is_primary: true,
3369 source_kind: DiagnosticSourceKind::Pushed,
3370 ..Diagnostic::default()
3371 },
3372 },
3373 DiagnosticEntry {
3374 range: Point::new(4, 9)..Point::new(4, 12),
3375 diagnostic: Diagnostic {
3376 source: Some("disk".into()),
3377 severity: DiagnosticSeverity::ERROR,
3378 message: "undefined variable 'CCC'".to_string(),
3379 is_disk_based: true,
3380 group_id: 2,
3381 is_primary: true,
3382 source_kind: DiagnosticSourceKind::Pushed,
3383 ..Diagnostic::default()
3384 }
3385 }
3386 ]
3387 );
3388 assert_eq!(
3389 chunks_with_diagnostics(buffer, 0..buffer.len()),
3390 [
3391 ("\n\nfn a() { ".to_string(), None),
3392 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
3393 (" }\nfn b() { ".to_string(), None),
3394 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
3395 (" }\nfn c() { ".to_string(), None),
3396 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
3397 (" }\n".to_string(), None),
3398 ]
3399 );
3400 assert_eq!(
3401 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
3402 [
3403 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
3404 (" }\nfn c() { ".to_string(), None),
3405 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
3406 ]
3407 );
3408 });
3409
3410 // Ensure overlapping diagnostics are highlighted correctly.
3411 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3412 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3413 version: Some(open_notification.text_document.version),
3414 diagnostics: vec![
3415 lsp::Diagnostic {
3416 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3417 severity: Some(DiagnosticSeverity::ERROR),
3418 message: "undefined variable 'A'".to_string(),
3419 source: Some("disk".to_string()),
3420 ..Default::default()
3421 },
3422 lsp::Diagnostic {
3423 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
3424 severity: Some(DiagnosticSeverity::WARNING),
3425 message: "unreachable statement".to_string(),
3426 source: Some("disk".to_string()),
3427 ..Default::default()
3428 },
3429 ],
3430 });
3431
3432 cx.executor().run_until_parked();
3433 buffer.update(cx, |buffer, _| {
3434 assert_eq!(
3435 buffer
3436 .snapshot()
3437 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
3438 .collect::<Vec<_>>(),
3439 &[
3440 DiagnosticEntry {
3441 range: Point::new(2, 9)..Point::new(2, 12),
3442 diagnostic: Diagnostic {
3443 source: Some("disk".into()),
3444 severity: DiagnosticSeverity::WARNING,
3445 message: "unreachable statement".to_string(),
3446 is_disk_based: true,
3447 group_id: 4,
3448 is_primary: true,
3449 source_kind: DiagnosticSourceKind::Pushed,
3450 ..Diagnostic::default()
3451 }
3452 },
3453 DiagnosticEntry {
3454 range: Point::new(2, 9)..Point::new(2, 10),
3455 diagnostic: Diagnostic {
3456 source: Some("disk".into()),
3457 severity: DiagnosticSeverity::ERROR,
3458 message: "undefined variable 'A'".to_string(),
3459 is_disk_based: true,
3460 group_id: 3,
3461 is_primary: true,
3462 source_kind: DiagnosticSourceKind::Pushed,
3463 ..Diagnostic::default()
3464 },
3465 }
3466 ]
3467 );
3468 assert_eq!(
3469 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
3470 [
3471 ("fn a() { ".to_string(), None),
3472 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
3473 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
3474 ("\n".to_string(), None),
3475 ]
3476 );
3477 assert_eq!(
3478 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
3479 [
3480 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
3481 ("\n".to_string(), None),
3482 ]
3483 );
3484 });
3485
3486 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
3487 // changes since the last save.
3488 buffer.update(cx, |buffer, cx| {
3489 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
3490 buffer.edit(
3491 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
3492 None,
3493 cx,
3494 );
3495 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
3496 });
3497 let change_notification_2 = fake_server
3498 .receive_notification::<lsp::notification::DidChangeTextDocument>()
3499 .await;
3500 assert!(
3501 change_notification_2.text_document.version > change_notification_1.text_document.version
3502 );
3503
3504 // Handle out-of-order diagnostics
3505 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3506 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3507 version: Some(change_notification_2.text_document.version),
3508 diagnostics: vec![
3509 lsp::Diagnostic {
3510 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
3511 severity: Some(DiagnosticSeverity::ERROR),
3512 message: "undefined variable 'BB'".to_string(),
3513 source: Some("disk".to_string()),
3514 ..Default::default()
3515 },
3516 lsp::Diagnostic {
3517 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3518 severity: Some(DiagnosticSeverity::WARNING),
3519 message: "undefined variable 'A'".to_string(),
3520 source: Some("disk".to_string()),
3521 ..Default::default()
3522 },
3523 ],
3524 });
3525
3526 cx.executor().run_until_parked();
3527 buffer.update(cx, |buffer, _| {
3528 assert_eq!(
3529 buffer
3530 .snapshot()
3531 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3532 .collect::<Vec<_>>(),
3533 &[
3534 DiagnosticEntry {
3535 range: Point::new(2, 21)..Point::new(2, 22),
3536 diagnostic: Diagnostic {
3537 source: Some("disk".into()),
3538 severity: DiagnosticSeverity::WARNING,
3539 message: "undefined variable 'A'".to_string(),
3540 is_disk_based: true,
3541 group_id: 6,
3542 is_primary: true,
3543 source_kind: DiagnosticSourceKind::Pushed,
3544 ..Diagnostic::default()
3545 }
3546 },
3547 DiagnosticEntry {
3548 range: Point::new(3, 9)..Point::new(3, 14),
3549 diagnostic: Diagnostic {
3550 source: Some("disk".into()),
3551 severity: DiagnosticSeverity::ERROR,
3552 message: "undefined variable 'BB'".to_string(),
3553 is_disk_based: true,
3554 group_id: 5,
3555 is_primary: true,
3556 source_kind: DiagnosticSourceKind::Pushed,
3557 ..Diagnostic::default()
3558 },
3559 }
3560 ]
3561 );
3562 });
3563}
3564
3565#[gpui::test]
3566async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
3567 init_test(cx);
3568
3569 let text = concat!(
3570 "let one = ;\n", //
3571 "let two = \n",
3572 "let three = 3;\n",
3573 );
3574
3575 let fs = FakeFs::new(cx.executor());
3576 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
3577
3578 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3579 let buffer = project
3580 .update(cx, |project, cx| {
3581 project.open_local_buffer(path!("/dir/a.rs"), cx)
3582 })
3583 .await
3584 .unwrap();
3585
3586 project.update(cx, |project, cx| {
3587 project.lsp_store().update(cx, |lsp_store, cx| {
3588 lsp_store
3589 .update_diagnostic_entries(
3590 LanguageServerId(0),
3591 PathBuf::from(path!("/dir/a.rs")),
3592 None,
3593 None,
3594 vec![
3595 DiagnosticEntry {
3596 range: Unclipped(PointUtf16::new(0, 10))
3597 ..Unclipped(PointUtf16::new(0, 10)),
3598 diagnostic: Diagnostic {
3599 severity: DiagnosticSeverity::ERROR,
3600 message: "syntax error 1".to_string(),
3601 source_kind: DiagnosticSourceKind::Pushed,
3602 ..Diagnostic::default()
3603 },
3604 },
3605 DiagnosticEntry {
3606 range: Unclipped(PointUtf16::new(1, 10))
3607 ..Unclipped(PointUtf16::new(1, 10)),
3608 diagnostic: Diagnostic {
3609 severity: DiagnosticSeverity::ERROR,
3610 message: "syntax error 2".to_string(),
3611 source_kind: DiagnosticSourceKind::Pushed,
3612 ..Diagnostic::default()
3613 },
3614 },
3615 ],
3616 cx,
3617 )
3618 .unwrap();
3619 })
3620 });
3621
3622 // An empty range is extended forward to include the following character.
3623 // At the end of a line, an empty range is extended backward to include
3624 // the preceding character.
3625 buffer.update(cx, |buffer, _| {
3626 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
3627 assert_eq!(
3628 chunks
3629 .iter()
3630 .map(|(s, d)| (s.as_str(), *d))
3631 .collect::<Vec<_>>(),
3632 &[
3633 ("let one = ", None),
3634 (";", Some(DiagnosticSeverity::ERROR)),
3635 ("\nlet two =", None),
3636 (" ", Some(DiagnosticSeverity::ERROR)),
3637 ("\nlet three = 3;\n", None)
3638 ]
3639 );
3640 });
3641}
3642
3643#[gpui::test]
3644async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
3645 init_test(cx);
3646
3647 let fs = FakeFs::new(cx.executor());
3648 fs.insert_tree(path!("/dir"), json!({ "a.rs": "one two three" }))
3649 .await;
3650
3651 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3652 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3653
3654 lsp_store.update(cx, |lsp_store, cx| {
3655 lsp_store
3656 .update_diagnostic_entries(
3657 LanguageServerId(0),
3658 Path::new(path!("/dir/a.rs")).to_owned(),
3659 None,
3660 None,
3661 vec![DiagnosticEntry {
3662 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
3663 diagnostic: Diagnostic {
3664 severity: DiagnosticSeverity::ERROR,
3665 is_primary: true,
3666 message: "syntax error a1".to_string(),
3667 source_kind: DiagnosticSourceKind::Pushed,
3668 ..Diagnostic::default()
3669 },
3670 }],
3671 cx,
3672 )
3673 .unwrap();
3674 lsp_store
3675 .update_diagnostic_entries(
3676 LanguageServerId(1),
3677 Path::new(path!("/dir/a.rs")).to_owned(),
3678 None,
3679 None,
3680 vec![DiagnosticEntry {
3681 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
3682 diagnostic: Diagnostic {
3683 severity: DiagnosticSeverity::ERROR,
3684 is_primary: true,
3685 message: "syntax error b1".to_string(),
3686 source_kind: DiagnosticSourceKind::Pushed,
3687 ..Diagnostic::default()
3688 },
3689 }],
3690 cx,
3691 )
3692 .unwrap();
3693
3694 assert_eq!(
3695 lsp_store.diagnostic_summary(false, cx),
3696 DiagnosticSummary {
3697 error_count: 2,
3698 warning_count: 0,
3699 }
3700 );
3701 });
3702}
3703
3704#[gpui::test]
3705async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
3706 init_test(cx);
3707
3708 let text = "
3709 fn a() {
3710 f1();
3711 }
3712 fn b() {
3713 f2();
3714 }
3715 fn c() {
3716 f3();
3717 }
3718 "
3719 .unindent();
3720
3721 let fs = FakeFs::new(cx.executor());
3722 fs.insert_tree(
3723 path!("/dir"),
3724 json!({
3725 "a.rs": text.clone(),
3726 }),
3727 )
3728 .await;
3729
3730 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3731 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3732
3733 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3734 language_registry.add(rust_lang());
3735 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3736
3737 let (buffer, _handle) = project
3738 .update(cx, |project, cx| {
3739 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3740 })
3741 .await
3742 .unwrap();
3743
3744 let mut fake_server = fake_servers.next().await.unwrap();
3745 let lsp_document_version = fake_server
3746 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3747 .await
3748 .text_document
3749 .version;
3750
3751 // Simulate editing the buffer after the language server computes some edits.
3752 buffer.update(cx, |buffer, cx| {
3753 buffer.edit(
3754 [(
3755 Point::new(0, 0)..Point::new(0, 0),
3756 "// above first function\n",
3757 )],
3758 None,
3759 cx,
3760 );
3761 buffer.edit(
3762 [(
3763 Point::new(2, 0)..Point::new(2, 0),
3764 " // inside first function\n",
3765 )],
3766 None,
3767 cx,
3768 );
3769 buffer.edit(
3770 [(
3771 Point::new(6, 4)..Point::new(6, 4),
3772 "// inside second function ",
3773 )],
3774 None,
3775 cx,
3776 );
3777
3778 assert_eq!(
3779 buffer.text(),
3780 "
3781 // above first function
3782 fn a() {
3783 // inside first function
3784 f1();
3785 }
3786 fn b() {
3787 // inside second function f2();
3788 }
3789 fn c() {
3790 f3();
3791 }
3792 "
3793 .unindent()
3794 );
3795 });
3796
3797 let edits = lsp_store
3798 .update(cx, |lsp_store, cx| {
3799 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3800 &buffer,
3801 vec![
3802 // replace body of first function
3803 lsp::TextEdit {
3804 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
3805 new_text: "
3806 fn a() {
3807 f10();
3808 }
3809 "
3810 .unindent(),
3811 },
3812 // edit inside second function
3813 lsp::TextEdit {
3814 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
3815 new_text: "00".into(),
3816 },
3817 // edit inside third function via two distinct edits
3818 lsp::TextEdit {
3819 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
3820 new_text: "4000".into(),
3821 },
3822 lsp::TextEdit {
3823 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
3824 new_text: "".into(),
3825 },
3826 ],
3827 LanguageServerId(0),
3828 Some(lsp_document_version),
3829 cx,
3830 )
3831 })
3832 .await
3833 .unwrap();
3834
3835 buffer.update(cx, |buffer, cx| {
3836 for (range, new_text) in edits {
3837 buffer.edit([(range, new_text)], None, cx);
3838 }
3839 assert_eq!(
3840 buffer.text(),
3841 "
3842 // above first function
3843 fn a() {
3844 // inside first function
3845 f10();
3846 }
3847 fn b() {
3848 // inside second function f200();
3849 }
3850 fn c() {
3851 f4000();
3852 }
3853 "
3854 .unindent()
3855 );
3856 });
3857}
3858
3859#[gpui::test]
3860async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
3861 init_test(cx);
3862
3863 let text = "
3864 use a::b;
3865 use a::c;
3866
3867 fn f() {
3868 b();
3869 c();
3870 }
3871 "
3872 .unindent();
3873
3874 let fs = FakeFs::new(cx.executor());
3875 fs.insert_tree(
3876 path!("/dir"),
3877 json!({
3878 "a.rs": text.clone(),
3879 }),
3880 )
3881 .await;
3882
3883 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3884 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3885 let buffer = project
3886 .update(cx, |project, cx| {
3887 project.open_local_buffer(path!("/dir/a.rs"), cx)
3888 })
3889 .await
3890 .unwrap();
3891
3892 // Simulate the language server sending us a small edit in the form of a very large diff.
3893 // Rust-analyzer does this when performing a merge-imports code action.
3894 let edits = lsp_store
3895 .update(cx, |lsp_store, cx| {
3896 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3897 &buffer,
3898 [
3899 // Replace the first use statement without editing the semicolon.
3900 lsp::TextEdit {
3901 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
3902 new_text: "a::{b, c}".into(),
3903 },
3904 // Reinsert the remainder of the file between the semicolon and the final
3905 // newline of the file.
3906 lsp::TextEdit {
3907 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3908 new_text: "\n\n".into(),
3909 },
3910 lsp::TextEdit {
3911 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3912 new_text: "
3913 fn f() {
3914 b();
3915 c();
3916 }"
3917 .unindent(),
3918 },
3919 // Delete everything after the first newline of the file.
3920 lsp::TextEdit {
3921 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
3922 new_text: "".into(),
3923 },
3924 ],
3925 LanguageServerId(0),
3926 None,
3927 cx,
3928 )
3929 })
3930 .await
3931 .unwrap();
3932
3933 buffer.update(cx, |buffer, cx| {
3934 let edits = edits
3935 .into_iter()
3936 .map(|(range, text)| {
3937 (
3938 range.start.to_point(buffer)..range.end.to_point(buffer),
3939 text,
3940 )
3941 })
3942 .collect::<Vec<_>>();
3943
3944 assert_eq!(
3945 edits,
3946 [
3947 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
3948 (Point::new(1, 0)..Point::new(2, 0), "".into())
3949 ]
3950 );
3951
3952 for (range, new_text) in edits {
3953 buffer.edit([(range, new_text)], None, cx);
3954 }
3955 assert_eq!(
3956 buffer.text(),
3957 "
3958 use a::{b, c};
3959
3960 fn f() {
3961 b();
3962 c();
3963 }
3964 "
3965 .unindent()
3966 );
3967 });
3968}
3969
3970#[gpui::test]
3971async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
3972 cx: &mut gpui::TestAppContext,
3973) {
3974 init_test(cx);
3975
3976 let text = "Path()";
3977
3978 let fs = FakeFs::new(cx.executor());
3979 fs.insert_tree(
3980 path!("/dir"),
3981 json!({
3982 "a.rs": text
3983 }),
3984 )
3985 .await;
3986
3987 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3988 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3989 let buffer = project
3990 .update(cx, |project, cx| {
3991 project.open_local_buffer(path!("/dir/a.rs"), cx)
3992 })
3993 .await
3994 .unwrap();
3995
3996 // Simulate the language server sending us a pair of edits at the same location,
3997 // with an insertion following a replacement (which violates the LSP spec).
3998 let edits = lsp_store
3999 .update(cx, |lsp_store, cx| {
4000 lsp_store.as_local_mut().unwrap().edits_from_lsp(
4001 &buffer,
4002 [
4003 lsp::TextEdit {
4004 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
4005 new_text: "Path".into(),
4006 },
4007 lsp::TextEdit {
4008 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
4009 new_text: "from path import Path\n\n\n".into(),
4010 },
4011 ],
4012 LanguageServerId(0),
4013 None,
4014 cx,
4015 )
4016 })
4017 .await
4018 .unwrap();
4019
4020 buffer.update(cx, |buffer, cx| {
4021 buffer.edit(edits, None, cx);
4022 assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
4023 });
4024}
4025
4026#[gpui::test]
4027async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
4028 init_test(cx);
4029
4030 let text = "
4031 use a::b;
4032 use a::c;
4033
4034 fn f() {
4035 b();
4036 c();
4037 }
4038 "
4039 .unindent();
4040
4041 let fs = FakeFs::new(cx.executor());
4042 fs.insert_tree(
4043 path!("/dir"),
4044 json!({
4045 "a.rs": text.clone(),
4046 }),
4047 )
4048 .await;
4049
4050 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4051 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4052 let buffer = project
4053 .update(cx, |project, cx| {
4054 project.open_local_buffer(path!("/dir/a.rs"), cx)
4055 })
4056 .await
4057 .unwrap();
4058
4059 // Simulate the language server sending us edits in a non-ordered fashion,
4060 // with ranges sometimes being inverted or pointing to invalid locations.
4061 let edits = lsp_store
4062 .update(cx, |lsp_store, cx| {
4063 lsp_store.as_local_mut().unwrap().edits_from_lsp(
4064 &buffer,
4065 [
4066 lsp::TextEdit {
4067 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
4068 new_text: "\n\n".into(),
4069 },
4070 lsp::TextEdit {
4071 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
4072 new_text: "a::{b, c}".into(),
4073 },
4074 lsp::TextEdit {
4075 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
4076 new_text: "".into(),
4077 },
4078 lsp::TextEdit {
4079 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
4080 new_text: "
4081 fn f() {
4082 b();
4083 c();
4084 }"
4085 .unindent(),
4086 },
4087 ],
4088 LanguageServerId(0),
4089 None,
4090 cx,
4091 )
4092 })
4093 .await
4094 .unwrap();
4095
4096 buffer.update(cx, |buffer, cx| {
4097 let edits = edits
4098 .into_iter()
4099 .map(|(range, text)| {
4100 (
4101 range.start.to_point(buffer)..range.end.to_point(buffer),
4102 text,
4103 )
4104 })
4105 .collect::<Vec<_>>();
4106
4107 assert_eq!(
4108 edits,
4109 [
4110 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
4111 (Point::new(1, 0)..Point::new(2, 0), "".into())
4112 ]
4113 );
4114
4115 for (range, new_text) in edits {
4116 buffer.edit([(range, new_text)], None, cx);
4117 }
4118 assert_eq!(
4119 buffer.text(),
4120 "
4121 use a::{b, c};
4122
4123 fn f() {
4124 b();
4125 c();
4126 }
4127 "
4128 .unindent()
4129 );
4130 });
4131}
4132
4133fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
4134 buffer: &Buffer,
4135 range: Range<T>,
4136) -> Vec<(String, Option<DiagnosticSeverity>)> {
4137 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
4138 for chunk in buffer.snapshot().chunks(range, true) {
4139 if chunks
4140 .last()
4141 .is_some_and(|prev_chunk| prev_chunk.1 == chunk.diagnostic_severity)
4142 {
4143 chunks.last_mut().unwrap().0.push_str(chunk.text);
4144 } else {
4145 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
4146 }
4147 }
4148 chunks
4149}
4150
4151#[gpui::test(iterations = 10)]
4152async fn test_definition(cx: &mut gpui::TestAppContext) {
4153 init_test(cx);
4154
4155 let fs = FakeFs::new(cx.executor());
4156 fs.insert_tree(
4157 path!("/dir"),
4158 json!({
4159 "a.rs": "const fn a() { A }",
4160 "b.rs": "const y: i32 = crate::a()",
4161 }),
4162 )
4163 .await;
4164
4165 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
4166
4167 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4168 language_registry.add(rust_lang());
4169 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
4170
4171 let (buffer, _handle) = project
4172 .update(cx, |project, cx| {
4173 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
4174 })
4175 .await
4176 .unwrap();
4177
4178 let fake_server = fake_servers.next().await.unwrap();
4179 cx.executor().run_until_parked();
4180
4181 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
4182 let params = params.text_document_position_params;
4183 assert_eq!(
4184 params.text_document.uri.to_file_path().unwrap(),
4185 Path::new(path!("/dir/b.rs")),
4186 );
4187 assert_eq!(params.position, lsp::Position::new(0, 22));
4188
4189 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
4190 lsp::Location::new(
4191 lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
4192 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
4193 ),
4194 )))
4195 });
4196 let mut definitions = project
4197 .update(cx, |project, cx| project.definitions(&buffer, 22, cx))
4198 .await
4199 .unwrap()
4200 .unwrap();
4201
4202 // Assert no new language server started
4203 cx.executor().run_until_parked();
4204 assert!(fake_servers.try_next().is_err());
4205
4206 assert_eq!(definitions.len(), 1);
4207 let definition = definitions.pop().unwrap();
4208 cx.update(|cx| {
4209 let target_buffer = definition.target.buffer.read(cx);
4210 assert_eq!(
4211 target_buffer
4212 .file()
4213 .unwrap()
4214 .as_local()
4215 .unwrap()
4216 .abs_path(cx),
4217 Path::new(path!("/dir/a.rs")),
4218 );
4219 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
4220 assert_eq!(
4221 list_worktrees(&project, cx),
4222 [
4223 (path!("/dir/a.rs").as_ref(), false),
4224 (path!("/dir/b.rs").as_ref(), true)
4225 ],
4226 );
4227
4228 drop(definition);
4229 });
4230 cx.update(|cx| {
4231 assert_eq!(
4232 list_worktrees(&project, cx),
4233 [(path!("/dir/b.rs").as_ref(), true)]
4234 );
4235 });
4236
4237 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
4238 project
4239 .read(cx)
4240 .worktrees(cx)
4241 .map(|worktree| {
4242 let worktree = worktree.read(cx);
4243 (
4244 worktree.as_local().unwrap().abs_path().as_ref(),
4245 worktree.is_visible(),
4246 )
4247 })
4248 .collect::<Vec<_>>()
4249 }
4250}
4251
4252#[gpui::test]
4253async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
4254 init_test(cx);
4255
4256 let fs = FakeFs::new(cx.executor());
4257 fs.insert_tree(
4258 path!("/dir"),
4259 json!({
4260 "a.ts": "",
4261 }),
4262 )
4263 .await;
4264
4265 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4266
4267 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4268 language_registry.add(typescript_lang());
4269 let mut fake_language_servers = language_registry.register_fake_lsp(
4270 "TypeScript",
4271 FakeLspAdapter {
4272 capabilities: lsp::ServerCapabilities {
4273 completion_provider: Some(lsp::CompletionOptions {
4274 trigger_characters: Some(vec![".".to_string()]),
4275 ..Default::default()
4276 }),
4277 ..Default::default()
4278 },
4279 ..Default::default()
4280 },
4281 );
4282
4283 let (buffer, _handle) = project
4284 .update(cx, |p, cx| {
4285 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4286 })
4287 .await
4288 .unwrap();
4289
4290 let fake_server = fake_language_servers.next().await.unwrap();
4291 cx.executor().run_until_parked();
4292
4293 // When text_edit exists, it takes precedence over insert_text and label
4294 let text = "let a = obj.fqn";
4295 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4296 let completions = project.update(cx, |project, cx| {
4297 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4298 });
4299
4300 fake_server
4301 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4302 Ok(Some(lsp::CompletionResponse::Array(vec![
4303 lsp::CompletionItem {
4304 label: "labelText".into(),
4305 insert_text: Some("insertText".into()),
4306 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
4307 range: lsp::Range::new(
4308 lsp::Position::new(0, text.len() as u32 - 3),
4309 lsp::Position::new(0, text.len() as u32),
4310 ),
4311 new_text: "textEditText".into(),
4312 })),
4313 ..Default::default()
4314 },
4315 ])))
4316 })
4317 .next()
4318 .await;
4319
4320 let completions = completions
4321 .await
4322 .unwrap()
4323 .into_iter()
4324 .flat_map(|response| response.completions)
4325 .collect::<Vec<_>>();
4326 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4327
4328 assert_eq!(completions.len(), 1);
4329 assert_eq!(completions[0].new_text, "textEditText");
4330 assert_eq!(
4331 completions[0].replace_range.to_offset(&snapshot),
4332 text.len() - 3..text.len()
4333 );
4334}
4335
4336#[gpui::test]
4337async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
4338 init_test(cx);
4339
4340 let fs = FakeFs::new(cx.executor());
4341 fs.insert_tree(
4342 path!("/dir"),
4343 json!({
4344 "a.ts": "",
4345 }),
4346 )
4347 .await;
4348
4349 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4350
4351 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4352 language_registry.add(typescript_lang());
4353 let mut fake_language_servers = language_registry.register_fake_lsp(
4354 "TypeScript",
4355 FakeLspAdapter {
4356 capabilities: lsp::ServerCapabilities {
4357 completion_provider: Some(lsp::CompletionOptions {
4358 trigger_characters: Some(vec![".".to_string()]),
4359 ..Default::default()
4360 }),
4361 ..Default::default()
4362 },
4363 ..Default::default()
4364 },
4365 );
4366
4367 let (buffer, _handle) = project
4368 .update(cx, |p, cx| {
4369 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4370 })
4371 .await
4372 .unwrap();
4373
4374 let fake_server = fake_language_servers.next().await.unwrap();
4375 cx.executor().run_until_parked();
4376 let text = "let a = obj.fqn";
4377
4378 // Test 1: When text_edit is None but text_edit_text exists with default edit_range
4379 {
4380 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4381 let completions = project.update(cx, |project, cx| {
4382 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4383 });
4384
4385 fake_server
4386 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4387 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
4388 is_incomplete: false,
4389 item_defaults: Some(lsp::CompletionListItemDefaults {
4390 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
4391 lsp::Range::new(
4392 lsp::Position::new(0, text.len() as u32 - 3),
4393 lsp::Position::new(0, text.len() as u32),
4394 ),
4395 )),
4396 ..Default::default()
4397 }),
4398 items: vec![lsp::CompletionItem {
4399 label: "labelText".into(),
4400 text_edit_text: Some("textEditText".into()),
4401 text_edit: None,
4402 ..Default::default()
4403 }],
4404 })))
4405 })
4406 .next()
4407 .await;
4408
4409 let completions = completions
4410 .await
4411 .unwrap()
4412 .into_iter()
4413 .flat_map(|response| response.completions)
4414 .collect::<Vec<_>>();
4415 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4416
4417 assert_eq!(completions.len(), 1);
4418 assert_eq!(completions[0].new_text, "textEditText");
4419 assert_eq!(
4420 completions[0].replace_range.to_offset(&snapshot),
4421 text.len() - 3..text.len()
4422 );
4423 }
4424
4425 // Test 2: When both text_edit and text_edit_text are None with default edit_range
4426 {
4427 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4428 let completions = project.update(cx, |project, cx| {
4429 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4430 });
4431
4432 fake_server
4433 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4434 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
4435 is_incomplete: false,
4436 item_defaults: Some(lsp::CompletionListItemDefaults {
4437 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
4438 lsp::Range::new(
4439 lsp::Position::new(0, text.len() as u32 - 3),
4440 lsp::Position::new(0, text.len() as u32),
4441 ),
4442 )),
4443 ..Default::default()
4444 }),
4445 items: vec![lsp::CompletionItem {
4446 label: "labelText".into(),
4447 text_edit_text: None,
4448 insert_text: Some("irrelevant".into()),
4449 text_edit: None,
4450 ..Default::default()
4451 }],
4452 })))
4453 })
4454 .next()
4455 .await;
4456
4457 let completions = completions
4458 .await
4459 .unwrap()
4460 .into_iter()
4461 .flat_map(|response| response.completions)
4462 .collect::<Vec<_>>();
4463 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4464
4465 assert_eq!(completions.len(), 1);
4466 assert_eq!(completions[0].new_text, "labelText");
4467 assert_eq!(
4468 completions[0].replace_range.to_offset(&snapshot),
4469 text.len() - 3..text.len()
4470 );
4471 }
4472}
4473
4474#[gpui::test]
4475async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
4476 init_test(cx);
4477
4478 let fs = FakeFs::new(cx.executor());
4479 fs.insert_tree(
4480 path!("/dir"),
4481 json!({
4482 "a.ts": "",
4483 }),
4484 )
4485 .await;
4486
4487 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4488
4489 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4490 language_registry.add(typescript_lang());
4491 let mut fake_language_servers = language_registry.register_fake_lsp(
4492 "TypeScript",
4493 FakeLspAdapter {
4494 capabilities: lsp::ServerCapabilities {
4495 completion_provider: Some(lsp::CompletionOptions {
4496 trigger_characters: Some(vec![":".to_string()]),
4497 ..Default::default()
4498 }),
4499 ..Default::default()
4500 },
4501 ..Default::default()
4502 },
4503 );
4504
4505 let (buffer, _handle) = project
4506 .update(cx, |p, cx| {
4507 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4508 })
4509 .await
4510 .unwrap();
4511
4512 let fake_server = fake_language_servers.next().await.unwrap();
4513 cx.executor().run_until_parked();
4514
4515 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
4516 let text = "let a = b.fqn";
4517 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4518 let completions = project.update(cx, |project, cx| {
4519 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4520 });
4521
4522 fake_server
4523 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4524 Ok(Some(lsp::CompletionResponse::Array(vec![
4525 lsp::CompletionItem {
4526 label: "fullyQualifiedName?".into(),
4527 insert_text: Some("fullyQualifiedName".into()),
4528 ..Default::default()
4529 },
4530 ])))
4531 })
4532 .next()
4533 .await;
4534 let completions = completions
4535 .await
4536 .unwrap()
4537 .into_iter()
4538 .flat_map(|response| response.completions)
4539 .collect::<Vec<_>>();
4540 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4541 assert_eq!(completions.len(), 1);
4542 assert_eq!(completions[0].new_text, "fullyQualifiedName");
4543 assert_eq!(
4544 completions[0].replace_range.to_offset(&snapshot),
4545 text.len() - 3..text.len()
4546 );
4547
4548 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
4549 let text = "let a = \"atoms/cmp\"";
4550 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4551 let completions = project.update(cx, |project, cx| {
4552 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
4553 });
4554
4555 fake_server
4556 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4557 Ok(Some(lsp::CompletionResponse::Array(vec![
4558 lsp::CompletionItem {
4559 label: "component".into(),
4560 ..Default::default()
4561 },
4562 ])))
4563 })
4564 .next()
4565 .await;
4566 let completions = completions
4567 .await
4568 .unwrap()
4569 .into_iter()
4570 .flat_map(|response| response.completions)
4571 .collect::<Vec<_>>();
4572 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4573 assert_eq!(completions.len(), 1);
4574 assert_eq!(completions[0].new_text, "component");
4575 assert_eq!(
4576 completions[0].replace_range.to_offset(&snapshot),
4577 text.len() - 4..text.len() - 1
4578 );
4579}
4580
4581#[gpui::test]
4582async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
4583 init_test(cx);
4584
4585 let fs = FakeFs::new(cx.executor());
4586 fs.insert_tree(
4587 path!("/dir"),
4588 json!({
4589 "a.ts": "",
4590 }),
4591 )
4592 .await;
4593
4594 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4595
4596 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4597 language_registry.add(typescript_lang());
4598 let mut fake_language_servers = language_registry.register_fake_lsp(
4599 "TypeScript",
4600 FakeLspAdapter {
4601 capabilities: lsp::ServerCapabilities {
4602 completion_provider: Some(lsp::CompletionOptions {
4603 trigger_characters: Some(vec![":".to_string()]),
4604 ..Default::default()
4605 }),
4606 ..Default::default()
4607 },
4608 ..Default::default()
4609 },
4610 );
4611
4612 let (buffer, _handle) = project
4613 .update(cx, |p, cx| {
4614 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4615 })
4616 .await
4617 .unwrap();
4618
4619 let fake_server = fake_language_servers.next().await.unwrap();
4620 cx.executor().run_until_parked();
4621
4622 let text = "let a = b.fqn";
4623 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4624 let completions = project.update(cx, |project, cx| {
4625 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4626 });
4627
4628 fake_server
4629 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4630 Ok(Some(lsp::CompletionResponse::Array(vec![
4631 lsp::CompletionItem {
4632 label: "fullyQualifiedName?".into(),
4633 insert_text: Some("fully\rQualified\r\nName".into()),
4634 ..Default::default()
4635 },
4636 ])))
4637 })
4638 .next()
4639 .await;
4640 let completions = completions
4641 .await
4642 .unwrap()
4643 .into_iter()
4644 .flat_map(|response| response.completions)
4645 .collect::<Vec<_>>();
4646 assert_eq!(completions.len(), 1);
4647 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
4648}
4649
4650#[gpui::test(iterations = 10)]
4651async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
4652 init_test(cx);
4653
4654 let fs = FakeFs::new(cx.executor());
4655 fs.insert_tree(
4656 path!("/dir"),
4657 json!({
4658 "a.ts": "a",
4659 }),
4660 )
4661 .await;
4662
4663 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4664
4665 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4666 language_registry.add(typescript_lang());
4667 let mut fake_language_servers = language_registry.register_fake_lsp(
4668 "TypeScript",
4669 FakeLspAdapter {
4670 capabilities: lsp::ServerCapabilities {
4671 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
4672 lsp::CodeActionOptions {
4673 resolve_provider: Some(true),
4674 ..lsp::CodeActionOptions::default()
4675 },
4676 )),
4677 execute_command_provider: Some(lsp::ExecuteCommandOptions {
4678 commands: vec!["_the/command".to_string()],
4679 ..lsp::ExecuteCommandOptions::default()
4680 }),
4681 ..lsp::ServerCapabilities::default()
4682 },
4683 ..FakeLspAdapter::default()
4684 },
4685 );
4686
4687 let (buffer, _handle) = project
4688 .update(cx, |p, cx| {
4689 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4690 })
4691 .await
4692 .unwrap();
4693
4694 let fake_server = fake_language_servers.next().await.unwrap();
4695 cx.executor().run_until_parked();
4696
4697 // Language server returns code actions that contain commands, and not edits.
4698 let actions = project.update(cx, |project, cx| {
4699 project.code_actions(&buffer, 0..0, None, cx)
4700 });
4701 fake_server
4702 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
4703 Ok(Some(vec![
4704 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
4705 title: "The code action".into(),
4706 data: Some(serde_json::json!({
4707 "command": "_the/command",
4708 })),
4709 ..lsp::CodeAction::default()
4710 }),
4711 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
4712 title: "two".into(),
4713 ..lsp::CodeAction::default()
4714 }),
4715 ]))
4716 })
4717 .next()
4718 .await;
4719
4720 let action = actions.await.unwrap().unwrap()[0].clone();
4721 let apply = project.update(cx, |project, cx| {
4722 project.apply_code_action(buffer.clone(), action, true, cx)
4723 });
4724
4725 // Resolving the code action does not populate its edits. In absence of
4726 // edits, we must execute the given command.
4727 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
4728 |mut action, _| async move {
4729 if action.data.is_some() {
4730 action.command = Some(lsp::Command {
4731 title: "The command".into(),
4732 command: "_the/command".into(),
4733 arguments: Some(vec![json!("the-argument")]),
4734 });
4735 }
4736 Ok(action)
4737 },
4738 );
4739
4740 // While executing the command, the language server sends the editor
4741 // a `workspaceEdit` request.
4742 fake_server
4743 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
4744 let fake = fake_server.clone();
4745 move |params, _| {
4746 assert_eq!(params.command, "_the/command");
4747 let fake = fake.clone();
4748 async move {
4749 fake.server
4750 .request::<lsp::request::ApplyWorkspaceEdit>(
4751 lsp::ApplyWorkspaceEditParams {
4752 label: None,
4753 edit: lsp::WorkspaceEdit {
4754 changes: Some(
4755 [(
4756 lsp::Uri::from_file_path(path!("/dir/a.ts")).unwrap(),
4757 vec![lsp::TextEdit {
4758 range: lsp::Range::new(
4759 lsp::Position::new(0, 0),
4760 lsp::Position::new(0, 0),
4761 ),
4762 new_text: "X".into(),
4763 }],
4764 )]
4765 .into_iter()
4766 .collect(),
4767 ),
4768 ..Default::default()
4769 },
4770 },
4771 DEFAULT_LSP_REQUEST_TIMEOUT,
4772 )
4773 .await
4774 .into_response()
4775 .unwrap();
4776 Ok(Some(json!(null)))
4777 }
4778 }
4779 })
4780 .next()
4781 .await;
4782
4783 // Applying the code action returns a project transaction containing the edits
4784 // sent by the language server in its `workspaceEdit` request.
4785 let transaction = apply.await.unwrap();
4786 assert!(transaction.0.contains_key(&buffer));
4787 buffer.update(cx, |buffer, cx| {
4788 assert_eq!(buffer.text(), "Xa");
4789 buffer.undo(cx);
4790 assert_eq!(buffer.text(), "a");
4791 });
4792}
4793
4794#[gpui::test]
4795async fn test_rename_file_to_new_directory(cx: &mut gpui::TestAppContext) {
4796 init_test(cx);
4797 let fs = FakeFs::new(cx.background_executor.clone());
4798 let expected_contents = "content";
4799 fs.as_fake()
4800 .insert_tree(
4801 "/root",
4802 json!({
4803 "test.txt": expected_contents
4804 }),
4805 )
4806 .await;
4807
4808 let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
4809
4810 let (worktree, entry_id) = project.read_with(cx, |project, cx| {
4811 let worktree = project.worktrees(cx).next().unwrap();
4812 let entry_id = worktree
4813 .read(cx)
4814 .entry_for_path(rel_path("test.txt"))
4815 .unwrap()
4816 .id;
4817 (worktree, entry_id)
4818 });
4819 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
4820 let _result = project
4821 .update(cx, |project, cx| {
4822 project.rename_entry(
4823 entry_id,
4824 (worktree_id, rel_path("dir1/dir2/dir3/test.txt")).into(),
4825 cx,
4826 )
4827 })
4828 .await
4829 .unwrap();
4830 worktree.read_with(cx, |worktree, _| {
4831 assert!(
4832 worktree.entry_for_path(rel_path("test.txt")).is_none(),
4833 "Old file should have been removed"
4834 );
4835 assert!(
4836 worktree
4837 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
4838 .is_some(),
4839 "Whole directory hierarchy and the new file should have been created"
4840 );
4841 });
4842 assert_eq!(
4843 worktree
4844 .update(cx, |worktree, cx| {
4845 worktree.load_file(rel_path("dir1/dir2/dir3/test.txt"), cx)
4846 })
4847 .await
4848 .unwrap()
4849 .text,
4850 expected_contents,
4851 "Moved file's contents should be preserved"
4852 );
4853
4854 let entry_id = worktree.read_with(cx, |worktree, _| {
4855 worktree
4856 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
4857 .unwrap()
4858 .id
4859 });
4860
4861 let _result = project
4862 .update(cx, |project, cx| {
4863 project.rename_entry(
4864 entry_id,
4865 (worktree_id, rel_path("dir1/dir2/test.txt")).into(),
4866 cx,
4867 )
4868 })
4869 .await
4870 .unwrap();
4871 worktree.read_with(cx, |worktree, _| {
4872 assert!(
4873 worktree.entry_for_path(rel_path("test.txt")).is_none(),
4874 "First file should not reappear"
4875 );
4876 assert!(
4877 worktree
4878 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
4879 .is_none(),
4880 "Old file should have been removed"
4881 );
4882 assert!(
4883 worktree
4884 .entry_for_path(rel_path("dir1/dir2/test.txt"))
4885 .is_some(),
4886 "No error should have occurred after moving into existing directory"
4887 );
4888 });
4889 assert_eq!(
4890 worktree
4891 .update(cx, |worktree, cx| {
4892 worktree.load_file(rel_path("dir1/dir2/test.txt"), cx)
4893 })
4894 .await
4895 .unwrap()
4896 .text,
4897 expected_contents,
4898 "Moved file's contents should be preserved"
4899 );
4900}
4901
4902#[gpui::test(iterations = 10)]
4903async fn test_save_file(cx: &mut gpui::TestAppContext) {
4904 init_test(cx);
4905
4906 let fs = FakeFs::new(cx.executor());
4907 fs.insert_tree(
4908 path!("/dir"),
4909 json!({
4910 "file1": "the old contents",
4911 }),
4912 )
4913 .await;
4914
4915 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4916 let buffer = project
4917 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4918 .await
4919 .unwrap();
4920 buffer.update(cx, |buffer, cx| {
4921 assert_eq!(buffer.text(), "the old contents");
4922 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
4923 });
4924
4925 project
4926 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
4927 .await
4928 .unwrap();
4929
4930 let new_text = fs
4931 .load(Path::new(path!("/dir/file1")))
4932 .await
4933 .unwrap()
4934 .replace("\r\n", "\n");
4935 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
4936}
4937
4938#[gpui::test(iterations = 10)]
4939async fn test_save_file_spawns_language_server(cx: &mut gpui::TestAppContext) {
4940 // Issue: #24349
4941 init_test(cx);
4942
4943 let fs = FakeFs::new(cx.executor());
4944 fs.insert_tree(path!("/dir"), json!({})).await;
4945
4946 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4947 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4948
4949 language_registry.add(rust_lang());
4950 let mut fake_rust_servers = language_registry.register_fake_lsp(
4951 "Rust",
4952 FakeLspAdapter {
4953 name: "the-rust-language-server",
4954 capabilities: lsp::ServerCapabilities {
4955 completion_provider: Some(lsp::CompletionOptions {
4956 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
4957 ..Default::default()
4958 }),
4959 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
4960 lsp::TextDocumentSyncOptions {
4961 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
4962 ..Default::default()
4963 },
4964 )),
4965 ..Default::default()
4966 },
4967 ..Default::default()
4968 },
4969 );
4970
4971 let buffer = project
4972 .update(cx, |this, cx| this.create_buffer(None, false, cx))
4973 .unwrap()
4974 .await;
4975 project.update(cx, |this, cx| {
4976 this.register_buffer_with_language_servers(&buffer, cx);
4977 buffer.update(cx, |buffer, cx| {
4978 assert!(!this.has_language_servers_for(buffer, cx));
4979 })
4980 });
4981
4982 project
4983 .update(cx, |this, cx| {
4984 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
4985 this.save_buffer_as(
4986 buffer.clone(),
4987 ProjectPath {
4988 worktree_id,
4989 path: rel_path("file.rs").into(),
4990 },
4991 cx,
4992 )
4993 })
4994 .await
4995 .unwrap();
4996 // A server is started up, and it is notified about Rust files.
4997 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
4998 assert_eq!(
4999 fake_rust_server
5000 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5001 .await
5002 .text_document,
5003 lsp::TextDocumentItem {
5004 uri: lsp::Uri::from_file_path(path!("/dir/file.rs")).unwrap(),
5005 version: 0,
5006 text: "".to_string(),
5007 language_id: "rust".to_string(),
5008 }
5009 );
5010
5011 project.update(cx, |this, cx| {
5012 buffer.update(cx, |buffer, cx| {
5013 assert!(this.has_language_servers_for(buffer, cx));
5014 })
5015 });
5016}
5017
5018#[gpui::test(iterations = 30)]
5019async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
5020 init_test(cx);
5021
5022 let fs = FakeFs::new(cx.executor());
5023 fs.insert_tree(
5024 path!("/dir"),
5025 json!({
5026 "file1": "the original contents",
5027 }),
5028 )
5029 .await;
5030
5031 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5032 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
5033 let buffer = project
5034 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5035 .await
5036 .unwrap();
5037
5038 // Change the buffer's file on disk, and then wait for the file change
5039 // to be detected by the worktree, so that the buffer starts reloading.
5040 fs.save(
5041 path!("/dir/file1").as_ref(),
5042 &"the first contents".into(),
5043 Default::default(),
5044 )
5045 .await
5046 .unwrap();
5047 worktree.next_event(cx).await;
5048
5049 // Change the buffer's file again. Depending on the random seed, the
5050 // previous file change may still be in progress.
5051 fs.save(
5052 path!("/dir/file1").as_ref(),
5053 &"the second contents".into(),
5054 Default::default(),
5055 )
5056 .await
5057 .unwrap();
5058 worktree.next_event(cx).await;
5059
5060 cx.executor().run_until_parked();
5061 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
5062 buffer.read_with(cx, |buffer, _| {
5063 assert_eq!(buffer.text(), on_disk_text);
5064 assert!(!buffer.is_dirty(), "buffer should not be dirty");
5065 assert!(!buffer.has_conflict(), "buffer should not be dirty");
5066 });
5067}
5068
5069#[gpui::test(iterations = 30)]
5070async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
5071 init_test(cx);
5072
5073 let fs = FakeFs::new(cx.executor());
5074 fs.insert_tree(
5075 path!("/dir"),
5076 json!({
5077 "file1": "the original contents",
5078 }),
5079 )
5080 .await;
5081
5082 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5083 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
5084 let buffer = project
5085 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5086 .await
5087 .unwrap();
5088
5089 // Change the buffer's file on disk, and then wait for the file change
5090 // to be detected by the worktree, so that the buffer starts reloading.
5091 fs.save(
5092 path!("/dir/file1").as_ref(),
5093 &"the first contents".into(),
5094 Default::default(),
5095 )
5096 .await
5097 .unwrap();
5098 worktree.next_event(cx).await;
5099
5100 cx.executor()
5101 .spawn(cx.executor().simulate_random_delay())
5102 .await;
5103
5104 // Perform a noop edit, causing the buffer's version to increase.
5105 buffer.update(cx, |buffer, cx| {
5106 buffer.edit([(0..0, " ")], None, cx);
5107 buffer.undo(cx);
5108 });
5109
5110 cx.executor().run_until_parked();
5111 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
5112 buffer.read_with(cx, |buffer, _| {
5113 let buffer_text = buffer.text();
5114 if buffer_text == on_disk_text {
5115 assert!(
5116 !buffer.is_dirty() && !buffer.has_conflict(),
5117 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
5118 );
5119 }
5120 // If the file change occurred while the buffer was processing the first
5121 // change, the buffer will be in a conflicting state.
5122 else {
5123 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
5124 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
5125 }
5126 });
5127}
5128
5129#[gpui::test]
5130async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
5131 init_test(cx);
5132
5133 let fs = FakeFs::new(cx.executor());
5134 fs.insert_tree(
5135 path!("/dir"),
5136 json!({
5137 "file1": "the old contents",
5138 }),
5139 )
5140 .await;
5141
5142 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
5143 let buffer = project
5144 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5145 .await
5146 .unwrap();
5147 buffer.update(cx, |buffer, cx| {
5148 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
5149 });
5150
5151 project
5152 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
5153 .await
5154 .unwrap();
5155
5156 let new_text = fs
5157 .load(Path::new(path!("/dir/file1")))
5158 .await
5159 .unwrap()
5160 .replace("\r\n", "\n");
5161 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
5162}
5163
5164#[gpui::test]
5165async fn test_save_as(cx: &mut gpui::TestAppContext) {
5166 init_test(cx);
5167
5168 let fs = FakeFs::new(cx.executor());
5169 fs.insert_tree("/dir", json!({})).await;
5170
5171 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5172
5173 let languages = project.update(cx, |project, _| project.languages().clone());
5174 languages.add(rust_lang());
5175
5176 let buffer = project.update(cx, |project, cx| {
5177 project.create_local_buffer("", None, false, cx)
5178 });
5179 buffer.update(cx, |buffer, cx| {
5180 buffer.edit([(0..0, "abc")], None, cx);
5181 assert!(buffer.is_dirty());
5182 assert!(!buffer.has_conflict());
5183 assert_eq!(buffer.language().unwrap().name(), "Plain Text");
5184 });
5185 project
5186 .update(cx, |project, cx| {
5187 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
5188 let path = ProjectPath {
5189 worktree_id,
5190 path: rel_path("file1.rs").into(),
5191 };
5192 project.save_buffer_as(buffer.clone(), path, cx)
5193 })
5194 .await
5195 .unwrap();
5196 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
5197
5198 cx.executor().run_until_parked();
5199 buffer.update(cx, |buffer, cx| {
5200 assert_eq!(
5201 buffer.file().unwrap().full_path(cx),
5202 Path::new("dir/file1.rs")
5203 );
5204 assert!(!buffer.is_dirty());
5205 assert!(!buffer.has_conflict());
5206 assert_eq!(buffer.language().unwrap().name(), "Rust");
5207 });
5208
5209 let opened_buffer = project
5210 .update(cx, |project, cx| {
5211 project.open_local_buffer("/dir/file1.rs", cx)
5212 })
5213 .await
5214 .unwrap();
5215 assert_eq!(opened_buffer, buffer);
5216}
5217
5218#[gpui::test]
5219async fn test_save_as_existing_file(cx: &mut gpui::TestAppContext) {
5220 init_test(cx);
5221
5222 let fs = FakeFs::new(cx.executor());
5223 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5224
5225 fs.insert_tree(
5226 path!("/dir"),
5227 json!({
5228 "data_a.txt": "data about a"
5229 }),
5230 )
5231 .await;
5232
5233 let buffer = project
5234 .update(cx, |project, cx| {
5235 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
5236 })
5237 .await
5238 .unwrap();
5239
5240 buffer.update(cx, |buffer, cx| {
5241 buffer.edit([(11..12, "b")], None, cx);
5242 });
5243
5244 // Save buffer's contents as a new file and confirm that the buffer's now
5245 // associated with `data_b.txt` instead of `data_a.txt`, confirming that the
5246 // file associated with the buffer has now been updated to `data_b.txt`
5247 project
5248 .update(cx, |project, cx| {
5249 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
5250 let new_path = ProjectPath {
5251 worktree_id,
5252 path: rel_path("data_b.txt").into(),
5253 };
5254
5255 project.save_buffer_as(buffer.clone(), new_path, cx)
5256 })
5257 .await
5258 .unwrap();
5259
5260 buffer.update(cx, |buffer, cx| {
5261 assert_eq!(
5262 buffer.file().unwrap().full_path(cx),
5263 Path::new("dir/data_b.txt")
5264 )
5265 });
5266
5267 // Open the original `data_a.txt` file, confirming that its contents are
5268 // unchanged and the resulting buffer's associated file is `data_a.txt`.
5269 let original_buffer = project
5270 .update(cx, |project, cx| {
5271 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
5272 })
5273 .await
5274 .unwrap();
5275
5276 original_buffer.update(cx, |buffer, cx| {
5277 assert_eq!(buffer.text(), "data about a");
5278 assert_eq!(
5279 buffer.file().unwrap().full_path(cx),
5280 Path::new("dir/data_a.txt")
5281 )
5282 });
5283}
5284
5285#[gpui::test(retries = 5)]
5286async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
5287 use worktree::WorktreeModelHandle as _;
5288
5289 init_test(cx);
5290 cx.executor().allow_parking();
5291
5292 let dir = TempTree::new(json!({
5293 "a": {
5294 "file1": "",
5295 "file2": "",
5296 "file3": "",
5297 },
5298 "b": {
5299 "c": {
5300 "file4": "",
5301 "file5": "",
5302 }
5303 }
5304 }));
5305
5306 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
5307
5308 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5309 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
5310 async move { buffer.await.unwrap() }
5311 };
5312 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5313 project.update(cx, |project, cx| {
5314 let tree = project.worktrees(cx).next().unwrap();
5315 tree.read(cx)
5316 .entry_for_path(rel_path(path))
5317 .unwrap_or_else(|| panic!("no entry for path {}", path))
5318 .id
5319 })
5320 };
5321
5322 let buffer2 = buffer_for_path("a/file2", cx).await;
5323 let buffer3 = buffer_for_path("a/file3", cx).await;
5324 let buffer4 = buffer_for_path("b/c/file4", cx).await;
5325 let buffer5 = buffer_for_path("b/c/file5", cx).await;
5326
5327 let file2_id = id_for_path("a/file2", cx);
5328 let file3_id = id_for_path("a/file3", cx);
5329 let file4_id = id_for_path("b/c/file4", cx);
5330
5331 // Create a remote copy of this worktree.
5332 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
5333 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
5334
5335 let updates = Arc::new(Mutex::new(Vec::new()));
5336 tree.update(cx, |tree, cx| {
5337 let updates = updates.clone();
5338 tree.observe_updates(0, cx, move |update| {
5339 updates.lock().push(update);
5340 async { true }
5341 });
5342 });
5343
5344 let remote = cx.update(|cx| {
5345 Worktree::remote(
5346 0,
5347 ReplicaId::REMOTE_SERVER,
5348 metadata,
5349 project.read(cx).client().into(),
5350 project.read(cx).path_style(cx),
5351 cx,
5352 )
5353 });
5354
5355 cx.executor().run_until_parked();
5356
5357 cx.update(|cx| {
5358 assert!(!buffer2.read(cx).is_dirty());
5359 assert!(!buffer3.read(cx).is_dirty());
5360 assert!(!buffer4.read(cx).is_dirty());
5361 assert!(!buffer5.read(cx).is_dirty());
5362 });
5363
5364 // Rename and delete files and directories.
5365 tree.flush_fs_events(cx).await;
5366 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
5367 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
5368 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
5369 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
5370 tree.flush_fs_events(cx).await;
5371
5372 cx.update(|app| {
5373 assert_eq!(
5374 tree.read(app).paths().collect::<Vec<_>>(),
5375 vec![
5376 rel_path("a"),
5377 rel_path("a/file1"),
5378 rel_path("a/file2.new"),
5379 rel_path("b"),
5380 rel_path("d"),
5381 rel_path("d/file3"),
5382 rel_path("d/file4"),
5383 ]
5384 );
5385 });
5386
5387 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
5388 assert_eq!(id_for_path("d/file3", cx), file3_id);
5389 assert_eq!(id_for_path("d/file4", cx), file4_id);
5390
5391 cx.update(|cx| {
5392 assert_eq!(
5393 buffer2.read(cx).file().unwrap().path().as_ref(),
5394 rel_path("a/file2.new")
5395 );
5396 assert_eq!(
5397 buffer3.read(cx).file().unwrap().path().as_ref(),
5398 rel_path("d/file3")
5399 );
5400 assert_eq!(
5401 buffer4.read(cx).file().unwrap().path().as_ref(),
5402 rel_path("d/file4")
5403 );
5404 assert_eq!(
5405 buffer5.read(cx).file().unwrap().path().as_ref(),
5406 rel_path("b/c/file5")
5407 );
5408
5409 assert_matches!(
5410 buffer2.read(cx).file().unwrap().disk_state(),
5411 DiskState::Present { .. }
5412 );
5413 assert_matches!(
5414 buffer3.read(cx).file().unwrap().disk_state(),
5415 DiskState::Present { .. }
5416 );
5417 assert_matches!(
5418 buffer4.read(cx).file().unwrap().disk_state(),
5419 DiskState::Present { .. }
5420 );
5421 assert_eq!(
5422 buffer5.read(cx).file().unwrap().disk_state(),
5423 DiskState::Deleted
5424 );
5425 });
5426
5427 // Update the remote worktree. Check that it becomes consistent with the
5428 // local worktree.
5429 cx.executor().run_until_parked();
5430
5431 remote.update(cx, |remote, _| {
5432 for update in updates.lock().drain(..) {
5433 remote.as_remote_mut().unwrap().update_from_remote(update);
5434 }
5435 });
5436 cx.executor().run_until_parked();
5437 remote.update(cx, |remote, _| {
5438 assert_eq!(
5439 remote.paths().collect::<Vec<_>>(),
5440 vec![
5441 rel_path("a"),
5442 rel_path("a/file1"),
5443 rel_path("a/file2.new"),
5444 rel_path("b"),
5445 rel_path("d"),
5446 rel_path("d/file3"),
5447 rel_path("d/file4"),
5448 ]
5449 );
5450 });
5451}
5452
5453#[cfg(target_os = "linux")]
5454#[gpui::test(retries = 5)]
5455async fn test_recreated_directory_receives_child_events(cx: &mut gpui::TestAppContext) {
5456 init_test(cx);
5457 cx.executor().allow_parking();
5458
5459 let dir = TempTree::new(json!({}));
5460 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
5461 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
5462
5463 tree.flush_fs_events(cx).await;
5464
5465 let repro_dir = dir.path().join("repro");
5466 std::fs::create_dir(&repro_dir).unwrap();
5467 tree.flush_fs_events(cx).await;
5468
5469 cx.update(|cx| {
5470 assert!(tree.read(cx).entry_for_path(rel_path("repro")).is_some());
5471 });
5472
5473 std::fs::remove_dir_all(&repro_dir).unwrap();
5474 tree.flush_fs_events(cx).await;
5475
5476 cx.update(|cx| {
5477 assert!(tree.read(cx).entry_for_path(rel_path("repro")).is_none());
5478 });
5479
5480 std::fs::create_dir(&repro_dir).unwrap();
5481 tree.flush_fs_events(cx).await;
5482
5483 cx.update(|cx| {
5484 assert!(tree.read(cx).entry_for_path(rel_path("repro")).is_some());
5485 });
5486
5487 std::fs::write(repro_dir.join("repro-marker"), "").unwrap();
5488 tree.flush_fs_events(cx).await;
5489
5490 cx.update(|cx| {
5491 assert!(
5492 tree.read(cx)
5493 .entry_for_path(rel_path("repro/repro-marker"))
5494 .is_some()
5495 );
5496 });
5497}
5498
5499#[gpui::test(iterations = 10)]
5500async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
5501 init_test(cx);
5502
5503 let fs = FakeFs::new(cx.executor());
5504 fs.insert_tree(
5505 path!("/dir"),
5506 json!({
5507 "a": {
5508 "file1": "",
5509 }
5510 }),
5511 )
5512 .await;
5513
5514 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
5515 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
5516 let tree_id = tree.update(cx, |tree, _| tree.id());
5517
5518 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5519 project.update(cx, |project, cx| {
5520 let tree = project.worktrees(cx).next().unwrap();
5521 tree.read(cx)
5522 .entry_for_path(rel_path(path))
5523 .unwrap_or_else(|| panic!("no entry for path {}", path))
5524 .id
5525 })
5526 };
5527
5528 let dir_id = id_for_path("a", cx);
5529 let file_id = id_for_path("a/file1", cx);
5530 let buffer = project
5531 .update(cx, |p, cx| {
5532 p.open_buffer((tree_id, rel_path("a/file1")), cx)
5533 })
5534 .await
5535 .unwrap();
5536 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5537
5538 project
5539 .update(cx, |project, cx| {
5540 project.rename_entry(dir_id, (tree_id, rel_path("b")).into(), cx)
5541 })
5542 .unwrap()
5543 .await
5544 .into_included()
5545 .unwrap();
5546 cx.executor().run_until_parked();
5547
5548 assert_eq!(id_for_path("b", cx), dir_id);
5549 assert_eq!(id_for_path("b/file1", cx), file_id);
5550 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5551}
5552
5553#[gpui::test]
5554async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
5555 init_test(cx);
5556
5557 let fs = FakeFs::new(cx.executor());
5558 fs.insert_tree(
5559 "/dir",
5560 json!({
5561 "a.txt": "a-contents",
5562 "b.txt": "b-contents",
5563 }),
5564 )
5565 .await;
5566
5567 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5568
5569 // Spawn multiple tasks to open paths, repeating some paths.
5570 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
5571 (
5572 p.open_local_buffer("/dir/a.txt", cx),
5573 p.open_local_buffer("/dir/b.txt", cx),
5574 p.open_local_buffer("/dir/a.txt", cx),
5575 )
5576 });
5577
5578 let buffer_a_1 = buffer_a_1.await.unwrap();
5579 let buffer_a_2 = buffer_a_2.await.unwrap();
5580 let buffer_b = buffer_b.await.unwrap();
5581 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
5582 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
5583
5584 // There is only one buffer per path.
5585 let buffer_a_id = buffer_a_1.entity_id();
5586 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
5587
5588 // Open the same path again while it is still open.
5589 drop(buffer_a_1);
5590 let buffer_a_3 = project
5591 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
5592 .await
5593 .unwrap();
5594
5595 // There's still only one buffer per path.
5596 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
5597}
5598
5599#[gpui::test]
5600async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
5601 init_test(cx);
5602
5603 let fs = FakeFs::new(cx.executor());
5604 fs.insert_tree(
5605 path!("/dir"),
5606 json!({
5607 "file1": "abc",
5608 "file2": "def",
5609 "file3": "ghi",
5610 }),
5611 )
5612 .await;
5613
5614 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5615
5616 let buffer1 = project
5617 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5618 .await
5619 .unwrap();
5620 let events = Arc::new(Mutex::new(Vec::new()));
5621
5622 // initially, the buffer isn't dirty.
5623 buffer1.update(cx, |buffer, cx| {
5624 cx.subscribe(&buffer1, {
5625 let events = events.clone();
5626 move |_, _, event, _| match event {
5627 BufferEvent::Operation { .. } => {}
5628 _ => events.lock().push(event.clone()),
5629 }
5630 })
5631 .detach();
5632
5633 assert!(!buffer.is_dirty());
5634 assert!(events.lock().is_empty());
5635
5636 buffer.edit([(1..2, "")], None, cx);
5637 });
5638
5639 // after the first edit, the buffer is dirty, and emits a dirtied event.
5640 buffer1.update(cx, |buffer, cx| {
5641 assert!(buffer.text() == "ac");
5642 assert!(buffer.is_dirty());
5643 assert_eq!(
5644 *events.lock(),
5645 &[
5646 language::BufferEvent::Edited { is_local: true },
5647 language::BufferEvent::DirtyChanged
5648 ]
5649 );
5650 events.lock().clear();
5651 buffer.did_save(
5652 buffer.version(),
5653 buffer.file().unwrap().disk_state().mtime(),
5654 cx,
5655 );
5656 });
5657
5658 // after saving, the buffer is not dirty, and emits a saved event.
5659 buffer1.update(cx, |buffer, cx| {
5660 assert!(!buffer.is_dirty());
5661 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
5662 events.lock().clear();
5663
5664 buffer.edit([(1..1, "B")], None, cx);
5665 buffer.edit([(2..2, "D")], None, cx);
5666 });
5667
5668 // after editing again, the buffer is dirty, and emits another dirty event.
5669 buffer1.update(cx, |buffer, cx| {
5670 assert!(buffer.text() == "aBDc");
5671 assert!(buffer.is_dirty());
5672 assert_eq!(
5673 *events.lock(),
5674 &[
5675 language::BufferEvent::Edited { is_local: true },
5676 language::BufferEvent::DirtyChanged,
5677 language::BufferEvent::Edited { is_local: true },
5678 ],
5679 );
5680 events.lock().clear();
5681
5682 // After restoring the buffer to its previously-saved state,
5683 // the buffer is not considered dirty anymore.
5684 buffer.edit([(1..3, "")], None, cx);
5685 assert!(buffer.text() == "ac");
5686 assert!(!buffer.is_dirty());
5687 });
5688
5689 assert_eq!(
5690 *events.lock(),
5691 &[
5692 language::BufferEvent::Edited { is_local: true },
5693 language::BufferEvent::DirtyChanged
5694 ]
5695 );
5696
5697 // When a file is deleted, it is not considered dirty.
5698 let events = Arc::new(Mutex::new(Vec::new()));
5699 let buffer2 = project
5700 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
5701 .await
5702 .unwrap();
5703 buffer2.update(cx, |_, cx| {
5704 cx.subscribe(&buffer2, {
5705 let events = events.clone();
5706 move |_, _, event, _| match event {
5707 BufferEvent::Operation { .. } => {}
5708 _ => events.lock().push(event.clone()),
5709 }
5710 })
5711 .detach();
5712 });
5713
5714 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
5715 .await
5716 .unwrap();
5717 cx.executor().run_until_parked();
5718 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5719 assert_eq!(
5720 mem::take(&mut *events.lock()),
5721 &[language::BufferEvent::FileHandleChanged]
5722 );
5723
5724 // Buffer becomes dirty when edited.
5725 buffer2.update(cx, |buffer, cx| {
5726 buffer.edit([(2..3, "")], None, cx);
5727 assert_eq!(buffer.is_dirty(), true);
5728 });
5729 assert_eq!(
5730 mem::take(&mut *events.lock()),
5731 &[
5732 language::BufferEvent::Edited { is_local: true },
5733 language::BufferEvent::DirtyChanged
5734 ]
5735 );
5736
5737 // Buffer becomes clean again when all of its content is removed, because
5738 // the file was deleted.
5739 buffer2.update(cx, |buffer, cx| {
5740 buffer.edit([(0..2, "")], None, cx);
5741 assert_eq!(buffer.is_empty(), true);
5742 assert_eq!(buffer.is_dirty(), false);
5743 });
5744 assert_eq!(
5745 *events.lock(),
5746 &[
5747 language::BufferEvent::Edited { is_local: true },
5748 language::BufferEvent::DirtyChanged
5749 ]
5750 );
5751
5752 // When a file is already dirty when deleted, we don't emit a Dirtied event.
5753 let events = Arc::new(Mutex::new(Vec::new()));
5754 let buffer3 = project
5755 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
5756 .await
5757 .unwrap();
5758 buffer3.update(cx, |_, cx| {
5759 cx.subscribe(&buffer3, {
5760 let events = events.clone();
5761 move |_, _, event, _| match event {
5762 BufferEvent::Operation { .. } => {}
5763 _ => events.lock().push(event.clone()),
5764 }
5765 })
5766 .detach();
5767 });
5768
5769 buffer3.update(cx, |buffer, cx| {
5770 buffer.edit([(0..0, "x")], None, cx);
5771 });
5772 events.lock().clear();
5773 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
5774 .await
5775 .unwrap();
5776 cx.executor().run_until_parked();
5777 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
5778 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
5779}
5780
5781#[gpui::test]
5782async fn test_dirty_buffer_reloads_after_undo(cx: &mut gpui::TestAppContext) {
5783 init_test(cx);
5784
5785 let fs = FakeFs::new(cx.executor());
5786 fs.insert_tree(
5787 path!("/dir"),
5788 json!({
5789 "file.txt": "version 1",
5790 }),
5791 )
5792 .await;
5793
5794 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5795 let buffer = project
5796 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file.txt"), cx))
5797 .await
5798 .unwrap();
5799
5800 buffer.read_with(cx, |buffer, _| {
5801 assert_eq!(buffer.text(), "version 1");
5802 assert!(!buffer.is_dirty());
5803 });
5804
5805 // User makes an edit, making the buffer dirty.
5806 buffer.update(cx, |buffer, cx| {
5807 buffer.edit([(0..0, "user edit: ")], None, cx);
5808 });
5809
5810 buffer.read_with(cx, |buffer, _| {
5811 assert!(buffer.is_dirty());
5812 assert_eq!(buffer.text(), "user edit: version 1");
5813 });
5814
5815 // External tool writes new content while buffer is dirty.
5816 // file_updated() updates the File but suppresses ReloadNeeded.
5817 fs.save(
5818 path!("/dir/file.txt").as_ref(),
5819 &"version 2 from external tool".into(),
5820 Default::default(),
5821 )
5822 .await
5823 .unwrap();
5824 cx.executor().run_until_parked();
5825
5826 buffer.read_with(cx, |buffer, _| {
5827 assert!(buffer.has_conflict());
5828 assert_eq!(buffer.text(), "user edit: version 1");
5829 });
5830
5831 // User undoes their edit. Buffer becomes clean, but disk has different
5832 // content. did_edit() detects the dirty->clean transition and checks if
5833 // disk changed while dirty. Since mtime differs from saved_mtime, it
5834 // emits ReloadNeeded.
5835 buffer.update(cx, |buffer, cx| {
5836 buffer.undo(cx);
5837 });
5838 cx.executor().run_until_parked();
5839
5840 buffer.read_with(cx, |buffer, _| {
5841 assert_eq!(
5842 buffer.text(),
5843 "version 2 from external tool",
5844 "buffer should reload from disk after undo makes it clean"
5845 );
5846 assert!(!buffer.is_dirty());
5847 });
5848}
5849
5850#[gpui::test]
5851async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
5852 init_test(cx);
5853
5854 let (initial_contents, initial_offsets) =
5855 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
5856 let fs = FakeFs::new(cx.executor());
5857 fs.insert_tree(
5858 path!("/dir"),
5859 json!({
5860 "the-file": initial_contents,
5861 }),
5862 )
5863 .await;
5864 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5865 let buffer = project
5866 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
5867 .await
5868 .unwrap();
5869
5870 let anchors = initial_offsets
5871 .iter()
5872 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
5873 .collect::<Vec<_>>();
5874
5875 // Change the file on disk, adding two new lines of text, and removing
5876 // one line.
5877 buffer.update(cx, |buffer, _| {
5878 assert!(!buffer.is_dirty());
5879 assert!(!buffer.has_conflict());
5880 });
5881
5882 let (new_contents, new_offsets) =
5883 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
5884 fs.save(
5885 path!("/dir/the-file").as_ref(),
5886 &new_contents.as_str().into(),
5887 LineEnding::Unix,
5888 )
5889 .await
5890 .unwrap();
5891
5892 // Because the buffer was not modified, it is reloaded from disk. Its
5893 // contents are edited according to the diff between the old and new
5894 // file contents.
5895 cx.executor().run_until_parked();
5896 buffer.update(cx, |buffer, _| {
5897 assert_eq!(buffer.text(), new_contents);
5898 assert!(!buffer.is_dirty());
5899 assert!(!buffer.has_conflict());
5900
5901 let anchor_offsets = anchors
5902 .iter()
5903 .map(|anchor| anchor.to_offset(&*buffer))
5904 .collect::<Vec<_>>();
5905 assert_eq!(anchor_offsets, new_offsets);
5906 });
5907
5908 // Modify the buffer
5909 buffer.update(cx, |buffer, cx| {
5910 buffer.edit([(0..0, " ")], None, cx);
5911 assert!(buffer.is_dirty());
5912 assert!(!buffer.has_conflict());
5913 });
5914
5915 // Change the file on disk again, adding blank lines to the beginning.
5916 fs.save(
5917 path!("/dir/the-file").as_ref(),
5918 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
5919 LineEnding::Unix,
5920 )
5921 .await
5922 .unwrap();
5923
5924 // Because the buffer is modified, it doesn't reload from disk, but is
5925 // marked as having a conflict.
5926 cx.executor().run_until_parked();
5927 buffer.update(cx, |buffer, _| {
5928 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
5929 assert!(buffer.has_conflict());
5930 });
5931}
5932
5933#[gpui::test]
5934async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
5935 init_test(cx);
5936
5937 let fs = FakeFs::new(cx.executor());
5938 fs.insert_tree(
5939 path!("/dir"),
5940 json!({
5941 "file1": "a\nb\nc\n",
5942 "file2": "one\r\ntwo\r\nthree\r\n",
5943 }),
5944 )
5945 .await;
5946
5947 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5948 let buffer1 = project
5949 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5950 .await
5951 .unwrap();
5952 let buffer2 = project
5953 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
5954 .await
5955 .unwrap();
5956
5957 buffer1.update(cx, |buffer, _| {
5958 assert_eq!(buffer.text(), "a\nb\nc\n");
5959 assert_eq!(buffer.line_ending(), LineEnding::Unix);
5960 });
5961 buffer2.update(cx, |buffer, _| {
5962 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
5963 assert_eq!(buffer.line_ending(), LineEnding::Windows);
5964 });
5965
5966 // Change a file's line endings on disk from unix to windows. The buffer's
5967 // state updates correctly.
5968 fs.save(
5969 path!("/dir/file1").as_ref(),
5970 &"aaa\nb\nc\n".into(),
5971 LineEnding::Windows,
5972 )
5973 .await
5974 .unwrap();
5975 cx.executor().run_until_parked();
5976 buffer1.update(cx, |buffer, _| {
5977 assert_eq!(buffer.text(), "aaa\nb\nc\n");
5978 assert_eq!(buffer.line_ending(), LineEnding::Windows);
5979 });
5980
5981 // Save a file with windows line endings. The file is written correctly.
5982 buffer2.update(cx, |buffer, cx| {
5983 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
5984 });
5985 project
5986 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
5987 .await
5988 .unwrap();
5989 assert_eq!(
5990 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
5991 "one\r\ntwo\r\nthree\r\nfour\r\n",
5992 );
5993}
5994
5995#[gpui::test]
5996async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
5997 init_test(cx);
5998
5999 let fs = FakeFs::new(cx.executor());
6000 fs.insert_tree(
6001 path!("/dir"),
6002 json!({
6003 "a.rs": "
6004 fn foo(mut v: Vec<usize>) {
6005 for x in &v {
6006 v.push(1);
6007 }
6008 }
6009 "
6010 .unindent(),
6011 }),
6012 )
6013 .await;
6014
6015 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6016 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
6017 let buffer = project
6018 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
6019 .await
6020 .unwrap();
6021
6022 let buffer_uri = Uri::from_file_path(path!("/dir/a.rs")).unwrap();
6023 let message = lsp::PublishDiagnosticsParams {
6024 uri: buffer_uri.clone(),
6025 diagnostics: vec![
6026 lsp::Diagnostic {
6027 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6028 severity: Some(DiagnosticSeverity::WARNING),
6029 message: "error 1".to_string(),
6030 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6031 location: lsp::Location {
6032 uri: buffer_uri.clone(),
6033 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6034 },
6035 message: "error 1 hint 1".to_string(),
6036 }]),
6037 ..Default::default()
6038 },
6039 lsp::Diagnostic {
6040 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6041 severity: Some(DiagnosticSeverity::HINT),
6042 message: "error 1 hint 1".to_string(),
6043 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6044 location: lsp::Location {
6045 uri: buffer_uri.clone(),
6046 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6047 },
6048 message: "original diagnostic".to_string(),
6049 }]),
6050 ..Default::default()
6051 },
6052 lsp::Diagnostic {
6053 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
6054 severity: Some(DiagnosticSeverity::ERROR),
6055 message: "error 2".to_string(),
6056 related_information: Some(vec![
6057 lsp::DiagnosticRelatedInformation {
6058 location: lsp::Location {
6059 uri: buffer_uri.clone(),
6060 range: lsp::Range::new(
6061 lsp::Position::new(1, 13),
6062 lsp::Position::new(1, 15),
6063 ),
6064 },
6065 message: "error 2 hint 1".to_string(),
6066 },
6067 lsp::DiagnosticRelatedInformation {
6068 location: lsp::Location {
6069 uri: buffer_uri.clone(),
6070 range: lsp::Range::new(
6071 lsp::Position::new(1, 13),
6072 lsp::Position::new(1, 15),
6073 ),
6074 },
6075 message: "error 2 hint 2".to_string(),
6076 },
6077 ]),
6078 ..Default::default()
6079 },
6080 lsp::Diagnostic {
6081 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6082 severity: Some(DiagnosticSeverity::HINT),
6083 message: "error 2 hint 1".to_string(),
6084 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6085 location: lsp::Location {
6086 uri: buffer_uri.clone(),
6087 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
6088 },
6089 message: "original diagnostic".to_string(),
6090 }]),
6091 ..Default::default()
6092 },
6093 lsp::Diagnostic {
6094 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6095 severity: Some(DiagnosticSeverity::HINT),
6096 message: "error 2 hint 2".to_string(),
6097 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6098 location: lsp::Location {
6099 uri: buffer_uri,
6100 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
6101 },
6102 message: "original diagnostic".to_string(),
6103 }]),
6104 ..Default::default()
6105 },
6106 ],
6107 version: None,
6108 };
6109
6110 lsp_store
6111 .update(cx, |lsp_store, cx| {
6112 lsp_store.update_diagnostics(
6113 LanguageServerId(0),
6114 message,
6115 None,
6116 DiagnosticSourceKind::Pushed,
6117 &[],
6118 cx,
6119 )
6120 })
6121 .unwrap();
6122 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
6123
6124 assert_eq!(
6125 buffer
6126 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6127 .collect::<Vec<_>>(),
6128 &[
6129 DiagnosticEntry {
6130 range: Point::new(1, 8)..Point::new(1, 9),
6131 diagnostic: Diagnostic {
6132 severity: DiagnosticSeverity::WARNING,
6133 message: "error 1".to_string(),
6134 group_id: 1,
6135 is_primary: true,
6136 source_kind: DiagnosticSourceKind::Pushed,
6137 ..Diagnostic::default()
6138 }
6139 },
6140 DiagnosticEntry {
6141 range: Point::new(1, 8)..Point::new(1, 9),
6142 diagnostic: Diagnostic {
6143 severity: DiagnosticSeverity::HINT,
6144 message: "error 1 hint 1".to_string(),
6145 group_id: 1,
6146 is_primary: false,
6147 source_kind: DiagnosticSourceKind::Pushed,
6148 ..Diagnostic::default()
6149 }
6150 },
6151 DiagnosticEntry {
6152 range: Point::new(1, 13)..Point::new(1, 15),
6153 diagnostic: Diagnostic {
6154 severity: DiagnosticSeverity::HINT,
6155 message: "error 2 hint 1".to_string(),
6156 group_id: 0,
6157 is_primary: false,
6158 source_kind: DiagnosticSourceKind::Pushed,
6159 ..Diagnostic::default()
6160 }
6161 },
6162 DiagnosticEntry {
6163 range: Point::new(1, 13)..Point::new(1, 15),
6164 diagnostic: Diagnostic {
6165 severity: DiagnosticSeverity::HINT,
6166 message: "error 2 hint 2".to_string(),
6167 group_id: 0,
6168 is_primary: false,
6169 source_kind: DiagnosticSourceKind::Pushed,
6170 ..Diagnostic::default()
6171 }
6172 },
6173 DiagnosticEntry {
6174 range: Point::new(2, 8)..Point::new(2, 17),
6175 diagnostic: Diagnostic {
6176 severity: DiagnosticSeverity::ERROR,
6177 message: "error 2".to_string(),
6178 group_id: 0,
6179 is_primary: true,
6180 source_kind: DiagnosticSourceKind::Pushed,
6181 ..Diagnostic::default()
6182 }
6183 }
6184 ]
6185 );
6186
6187 assert_eq!(
6188 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
6189 &[
6190 DiagnosticEntry {
6191 range: Point::new(1, 13)..Point::new(1, 15),
6192 diagnostic: Diagnostic {
6193 severity: DiagnosticSeverity::HINT,
6194 message: "error 2 hint 1".to_string(),
6195 group_id: 0,
6196 is_primary: false,
6197 source_kind: DiagnosticSourceKind::Pushed,
6198 ..Diagnostic::default()
6199 }
6200 },
6201 DiagnosticEntry {
6202 range: Point::new(1, 13)..Point::new(1, 15),
6203 diagnostic: Diagnostic {
6204 severity: DiagnosticSeverity::HINT,
6205 message: "error 2 hint 2".to_string(),
6206 group_id: 0,
6207 is_primary: false,
6208 source_kind: DiagnosticSourceKind::Pushed,
6209 ..Diagnostic::default()
6210 }
6211 },
6212 DiagnosticEntry {
6213 range: Point::new(2, 8)..Point::new(2, 17),
6214 diagnostic: Diagnostic {
6215 severity: DiagnosticSeverity::ERROR,
6216 message: "error 2".to_string(),
6217 group_id: 0,
6218 is_primary: true,
6219 source_kind: DiagnosticSourceKind::Pushed,
6220 ..Diagnostic::default()
6221 }
6222 }
6223 ]
6224 );
6225
6226 assert_eq!(
6227 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
6228 &[
6229 DiagnosticEntry {
6230 range: Point::new(1, 8)..Point::new(1, 9),
6231 diagnostic: Diagnostic {
6232 severity: DiagnosticSeverity::WARNING,
6233 message: "error 1".to_string(),
6234 group_id: 1,
6235 is_primary: true,
6236 source_kind: DiagnosticSourceKind::Pushed,
6237 ..Diagnostic::default()
6238 }
6239 },
6240 DiagnosticEntry {
6241 range: Point::new(1, 8)..Point::new(1, 9),
6242 diagnostic: Diagnostic {
6243 severity: DiagnosticSeverity::HINT,
6244 message: "error 1 hint 1".to_string(),
6245 group_id: 1,
6246 is_primary: false,
6247 source_kind: DiagnosticSourceKind::Pushed,
6248 ..Diagnostic::default()
6249 }
6250 },
6251 ]
6252 );
6253}
6254
6255#[gpui::test]
6256async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
6257 init_test(cx);
6258
6259 let fs = FakeFs::new(cx.executor());
6260 fs.insert_tree(
6261 path!("/dir"),
6262 json!({
6263 "one.rs": "const ONE: usize = 1;",
6264 "two": {
6265 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
6266 }
6267
6268 }),
6269 )
6270 .await;
6271 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6272
6273 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6274 language_registry.add(rust_lang());
6275 let watched_paths = lsp::FileOperationRegistrationOptions {
6276 filters: vec![
6277 FileOperationFilter {
6278 scheme: Some("file".to_owned()),
6279 pattern: lsp::FileOperationPattern {
6280 glob: "**/*.rs".to_owned(),
6281 matches: Some(lsp::FileOperationPatternKind::File),
6282 options: None,
6283 },
6284 },
6285 FileOperationFilter {
6286 scheme: Some("file".to_owned()),
6287 pattern: lsp::FileOperationPattern {
6288 glob: "**/**".to_owned(),
6289 matches: Some(lsp::FileOperationPatternKind::Folder),
6290 options: None,
6291 },
6292 },
6293 ],
6294 };
6295 let mut fake_servers = language_registry.register_fake_lsp(
6296 "Rust",
6297 FakeLspAdapter {
6298 capabilities: lsp::ServerCapabilities {
6299 workspace: Some(lsp::WorkspaceServerCapabilities {
6300 workspace_folders: None,
6301 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
6302 did_rename: Some(watched_paths.clone()),
6303 will_rename: Some(watched_paths),
6304 ..Default::default()
6305 }),
6306 }),
6307 ..Default::default()
6308 },
6309 ..Default::default()
6310 },
6311 );
6312
6313 let _ = project
6314 .update(cx, |project, cx| {
6315 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
6316 })
6317 .await
6318 .unwrap();
6319
6320 let fake_server = fake_servers.next().await.unwrap();
6321 cx.executor().run_until_parked();
6322 let response = project.update(cx, |project, cx| {
6323 let worktree = project.worktrees(cx).next().unwrap();
6324 let entry = worktree
6325 .read(cx)
6326 .entry_for_path(rel_path("one.rs"))
6327 .unwrap();
6328 project.rename_entry(
6329 entry.id,
6330 (worktree.read(cx).id(), rel_path("three.rs")).into(),
6331 cx,
6332 )
6333 });
6334 let expected_edit = lsp::WorkspaceEdit {
6335 changes: None,
6336 document_changes: Some(DocumentChanges::Edits({
6337 vec![TextDocumentEdit {
6338 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
6339 range: lsp::Range {
6340 start: lsp::Position {
6341 line: 0,
6342 character: 1,
6343 },
6344 end: lsp::Position {
6345 line: 0,
6346 character: 3,
6347 },
6348 },
6349 new_text: "This is not a drill".to_owned(),
6350 })],
6351 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
6352 uri: Uri::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
6353 version: Some(1337),
6354 },
6355 }]
6356 })),
6357 change_annotations: None,
6358 };
6359 let resolved_workspace_edit = Arc::new(OnceLock::new());
6360 fake_server
6361 .set_request_handler::<WillRenameFiles, _, _>({
6362 let resolved_workspace_edit = resolved_workspace_edit.clone();
6363 let expected_edit = expected_edit.clone();
6364 move |params, _| {
6365 let resolved_workspace_edit = resolved_workspace_edit.clone();
6366 let expected_edit = expected_edit.clone();
6367 async move {
6368 assert_eq!(params.files.len(), 1);
6369 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
6370 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
6371 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
6372 Ok(Some(expected_edit))
6373 }
6374 }
6375 })
6376 .next()
6377 .await
6378 .unwrap();
6379 let _ = response.await.unwrap();
6380 fake_server
6381 .handle_notification::<DidRenameFiles, _>(|params, _| {
6382 assert_eq!(params.files.len(), 1);
6383 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
6384 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
6385 })
6386 .next()
6387 .await
6388 .unwrap();
6389 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
6390}
6391
6392#[gpui::test]
6393async fn test_rename(cx: &mut gpui::TestAppContext) {
6394 // hi
6395 init_test(cx);
6396
6397 let fs = FakeFs::new(cx.executor());
6398 fs.insert_tree(
6399 path!("/dir"),
6400 json!({
6401 "one.rs": "const ONE: usize = 1;",
6402 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
6403 }),
6404 )
6405 .await;
6406
6407 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6408
6409 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6410 language_registry.add(rust_lang());
6411 let mut fake_servers = language_registry.register_fake_lsp(
6412 "Rust",
6413 FakeLspAdapter {
6414 capabilities: lsp::ServerCapabilities {
6415 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
6416 prepare_provider: Some(true),
6417 work_done_progress_options: Default::default(),
6418 })),
6419 ..Default::default()
6420 },
6421 ..Default::default()
6422 },
6423 );
6424
6425 let (buffer, _handle) = project
6426 .update(cx, |project, cx| {
6427 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
6428 })
6429 .await
6430 .unwrap();
6431
6432 let fake_server = fake_servers.next().await.unwrap();
6433 cx.executor().run_until_parked();
6434
6435 let response = project.update(cx, |project, cx| {
6436 project.prepare_rename(buffer.clone(), 7, cx)
6437 });
6438 fake_server
6439 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
6440 assert_eq!(
6441 params.text_document.uri.as_str(),
6442 uri!("file:///dir/one.rs")
6443 );
6444 assert_eq!(params.position, lsp::Position::new(0, 7));
6445 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
6446 lsp::Position::new(0, 6),
6447 lsp::Position::new(0, 9),
6448 ))))
6449 })
6450 .next()
6451 .await
6452 .unwrap();
6453 let response = response.await.unwrap();
6454 let PrepareRenameResponse::Success(range) = response else {
6455 panic!("{:?}", response);
6456 };
6457 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
6458 assert_eq!(range, 6..9);
6459
6460 let response = project.update(cx, |project, cx| {
6461 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
6462 });
6463 fake_server
6464 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
6465 assert_eq!(
6466 params.text_document_position.text_document.uri.as_str(),
6467 uri!("file:///dir/one.rs")
6468 );
6469 assert_eq!(
6470 params.text_document_position.position,
6471 lsp::Position::new(0, 7)
6472 );
6473 assert_eq!(params.new_name, "THREE");
6474 Ok(Some(lsp::WorkspaceEdit {
6475 changes: Some(
6476 [
6477 (
6478 lsp::Uri::from_file_path(path!("/dir/one.rs")).unwrap(),
6479 vec![lsp::TextEdit::new(
6480 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
6481 "THREE".to_string(),
6482 )],
6483 ),
6484 (
6485 lsp::Uri::from_file_path(path!("/dir/two.rs")).unwrap(),
6486 vec![
6487 lsp::TextEdit::new(
6488 lsp::Range::new(
6489 lsp::Position::new(0, 24),
6490 lsp::Position::new(0, 27),
6491 ),
6492 "THREE".to_string(),
6493 ),
6494 lsp::TextEdit::new(
6495 lsp::Range::new(
6496 lsp::Position::new(0, 35),
6497 lsp::Position::new(0, 38),
6498 ),
6499 "THREE".to_string(),
6500 ),
6501 ],
6502 ),
6503 ]
6504 .into_iter()
6505 .collect(),
6506 ),
6507 ..Default::default()
6508 }))
6509 })
6510 .next()
6511 .await
6512 .unwrap();
6513 let mut transaction = response.await.unwrap().0;
6514 assert_eq!(transaction.len(), 2);
6515 assert_eq!(
6516 transaction
6517 .remove_entry(&buffer)
6518 .unwrap()
6519 .0
6520 .update(cx, |buffer, _| buffer.text()),
6521 "const THREE: usize = 1;"
6522 );
6523 assert_eq!(
6524 transaction
6525 .into_keys()
6526 .next()
6527 .unwrap()
6528 .update(cx, |buffer, _| buffer.text()),
6529 "const TWO: usize = one::THREE + one::THREE;"
6530 );
6531}
6532
6533#[gpui::test]
6534async fn test_search(cx: &mut gpui::TestAppContext) {
6535 init_test(cx);
6536
6537 let fs = FakeFs::new(cx.executor());
6538 fs.insert_tree(
6539 path!("/dir"),
6540 json!({
6541 "one.rs": "const ONE: usize = 1;",
6542 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
6543 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
6544 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
6545 }),
6546 )
6547 .await;
6548 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6549 assert_eq!(
6550 search(
6551 &project,
6552 SearchQuery::text(
6553 "TWO",
6554 false,
6555 true,
6556 false,
6557 Default::default(),
6558 Default::default(),
6559 false,
6560 None
6561 )
6562 .unwrap(),
6563 cx
6564 )
6565 .await
6566 .unwrap(),
6567 HashMap::from_iter([
6568 (path!("dir/two.rs").to_string(), vec![6..9]),
6569 (path!("dir/three.rs").to_string(), vec![37..40])
6570 ])
6571 );
6572
6573 let buffer_4 = project
6574 .update(cx, |project, cx| {
6575 project.open_local_buffer(path!("/dir/four.rs"), cx)
6576 })
6577 .await
6578 .unwrap();
6579 buffer_4.update(cx, |buffer, cx| {
6580 let text = "two::TWO";
6581 buffer.edit([(20..28, text), (31..43, text)], None, cx);
6582 });
6583
6584 assert_eq!(
6585 search(
6586 &project,
6587 SearchQuery::text(
6588 "TWO",
6589 false,
6590 true,
6591 false,
6592 Default::default(),
6593 Default::default(),
6594 false,
6595 None,
6596 )
6597 .unwrap(),
6598 cx
6599 )
6600 .await
6601 .unwrap(),
6602 HashMap::from_iter([
6603 (path!("dir/two.rs").to_string(), vec![6..9]),
6604 (path!("dir/three.rs").to_string(), vec![37..40]),
6605 (path!("dir/four.rs").to_string(), vec![25..28, 36..39])
6606 ])
6607 );
6608}
6609
6610#[gpui::test]
6611async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
6612 init_test(cx);
6613
6614 let search_query = "file";
6615
6616 let fs = FakeFs::new(cx.executor());
6617 fs.insert_tree(
6618 path!("/dir"),
6619 json!({
6620 "one.rs": r#"// Rust file one"#,
6621 "one.ts": r#"// TypeScript file one"#,
6622 "two.rs": r#"// Rust file two"#,
6623 "two.ts": r#"// TypeScript file two"#,
6624 }),
6625 )
6626 .await;
6627 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6628
6629 assert!(
6630 search(
6631 &project,
6632 SearchQuery::text(
6633 search_query,
6634 false,
6635 true,
6636 false,
6637 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6638 Default::default(),
6639 false,
6640 None
6641 )
6642 .unwrap(),
6643 cx
6644 )
6645 .await
6646 .unwrap()
6647 .is_empty(),
6648 "If no inclusions match, no files should be returned"
6649 );
6650
6651 assert_eq!(
6652 search(
6653 &project,
6654 SearchQuery::text(
6655 search_query,
6656 false,
6657 true,
6658 false,
6659 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
6660 Default::default(),
6661 false,
6662 None
6663 )
6664 .unwrap(),
6665 cx
6666 )
6667 .await
6668 .unwrap(),
6669 HashMap::from_iter([
6670 (path!("dir/one.rs").to_string(), vec![8..12]),
6671 (path!("dir/two.rs").to_string(), vec![8..12]),
6672 ]),
6673 "Rust only search should give only Rust files"
6674 );
6675
6676 assert_eq!(
6677 search(
6678 &project,
6679 SearchQuery::text(
6680 search_query,
6681 false,
6682 true,
6683 false,
6684 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6685 .unwrap(),
6686 Default::default(),
6687 false,
6688 None,
6689 )
6690 .unwrap(),
6691 cx
6692 )
6693 .await
6694 .unwrap(),
6695 HashMap::from_iter([
6696 (path!("dir/one.ts").to_string(), vec![14..18]),
6697 (path!("dir/two.ts").to_string(), vec![14..18]),
6698 ]),
6699 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
6700 );
6701
6702 assert_eq!(
6703 search(
6704 &project,
6705 SearchQuery::text(
6706 search_query,
6707 false,
6708 true,
6709 false,
6710 PathMatcher::new(
6711 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
6712 PathStyle::local()
6713 )
6714 .unwrap(),
6715 Default::default(),
6716 false,
6717 None,
6718 )
6719 .unwrap(),
6720 cx
6721 )
6722 .await
6723 .unwrap(),
6724 HashMap::from_iter([
6725 (path!("dir/two.ts").to_string(), vec![14..18]),
6726 (path!("dir/one.rs").to_string(), vec![8..12]),
6727 (path!("dir/one.ts").to_string(), vec![14..18]),
6728 (path!("dir/two.rs").to_string(), vec![8..12]),
6729 ]),
6730 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
6731 );
6732}
6733
6734#[gpui::test]
6735async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
6736 init_test(cx);
6737
6738 let search_query = "file";
6739
6740 let fs = FakeFs::new(cx.executor());
6741 fs.insert_tree(
6742 path!("/dir"),
6743 json!({
6744 "one.rs": r#"// Rust file one"#,
6745 "one.ts": r#"// TypeScript file one"#,
6746 "two.rs": r#"// Rust file two"#,
6747 "two.ts": r#"// TypeScript file two"#,
6748 }),
6749 )
6750 .await;
6751 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6752
6753 assert_eq!(
6754 search(
6755 &project,
6756 SearchQuery::text(
6757 search_query,
6758 false,
6759 true,
6760 false,
6761 Default::default(),
6762 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6763 false,
6764 None,
6765 )
6766 .unwrap(),
6767 cx
6768 )
6769 .await
6770 .unwrap(),
6771 HashMap::from_iter([
6772 (path!("dir/one.rs").to_string(), vec![8..12]),
6773 (path!("dir/one.ts").to_string(), vec![14..18]),
6774 (path!("dir/two.rs").to_string(), vec![8..12]),
6775 (path!("dir/two.ts").to_string(), vec![14..18]),
6776 ]),
6777 "If no exclusions match, all files should be returned"
6778 );
6779
6780 assert_eq!(
6781 search(
6782 &project,
6783 SearchQuery::text(
6784 search_query,
6785 false,
6786 true,
6787 false,
6788 Default::default(),
6789 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
6790 false,
6791 None,
6792 )
6793 .unwrap(),
6794 cx
6795 )
6796 .await
6797 .unwrap(),
6798 HashMap::from_iter([
6799 (path!("dir/one.ts").to_string(), vec![14..18]),
6800 (path!("dir/two.ts").to_string(), vec![14..18]),
6801 ]),
6802 "Rust exclusion search should give only TypeScript files"
6803 );
6804
6805 assert_eq!(
6806 search(
6807 &project,
6808 SearchQuery::text(
6809 search_query,
6810 false,
6811 true,
6812 false,
6813 Default::default(),
6814 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6815 .unwrap(),
6816 false,
6817 None,
6818 )
6819 .unwrap(),
6820 cx
6821 )
6822 .await
6823 .unwrap(),
6824 HashMap::from_iter([
6825 (path!("dir/one.rs").to_string(), vec![8..12]),
6826 (path!("dir/two.rs").to_string(), vec![8..12]),
6827 ]),
6828 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
6829 );
6830
6831 assert!(
6832 search(
6833 &project,
6834 SearchQuery::text(
6835 search_query,
6836 false,
6837 true,
6838 false,
6839 Default::default(),
6840 PathMatcher::new(
6841 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
6842 PathStyle::local(),
6843 )
6844 .unwrap(),
6845 false,
6846 None,
6847 )
6848 .unwrap(),
6849 cx
6850 )
6851 .await
6852 .unwrap()
6853 .is_empty(),
6854 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
6855 );
6856}
6857
6858#[gpui::test]
6859async fn test_search_with_buffer_exclusions(cx: &mut gpui::TestAppContext) {
6860 init_test(cx);
6861
6862 let search_query = "file";
6863
6864 let fs = FakeFs::new(cx.executor());
6865 fs.insert_tree(
6866 path!("/dir"),
6867 json!({
6868 "one.rs": r#"// Rust file one"#,
6869 "one.ts": r#"// TypeScript file one"#,
6870 "two.rs": r#"// Rust file two"#,
6871 "two.ts": r#"// TypeScript file two"#,
6872 }),
6873 )
6874 .await;
6875
6876 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6877 let path_style = PathStyle::local();
6878 let _buffer = project.update(cx, |project, cx| {
6879 project.create_local_buffer("file", None, false, cx)
6880 });
6881
6882 assert_eq!(
6883 search(
6884 &project,
6885 SearchQuery::text(
6886 search_query,
6887 false,
6888 true,
6889 false,
6890 Default::default(),
6891 PathMatcher::new(&["*.odd".to_owned()], path_style).unwrap(),
6892 false,
6893 None,
6894 )
6895 .unwrap(),
6896 cx
6897 )
6898 .await
6899 .unwrap(),
6900 HashMap::from_iter([
6901 (path!("dir/one.rs").to_string(), vec![8..12]),
6902 (path!("dir/one.ts").to_string(), vec![14..18]),
6903 (path!("dir/two.rs").to_string(), vec![8..12]),
6904 (path!("dir/two.ts").to_string(), vec![14..18]),
6905 ]),
6906 "If no exclusions match, all files should be returned"
6907 );
6908
6909 assert_eq!(
6910 search(
6911 &project,
6912 SearchQuery::text(
6913 search_query,
6914 false,
6915 true,
6916 false,
6917 Default::default(),
6918 PathMatcher::new(&["*.rs".to_owned()], path_style).unwrap(),
6919 false,
6920 None,
6921 )
6922 .unwrap(),
6923 cx
6924 )
6925 .await
6926 .unwrap(),
6927 HashMap::from_iter([
6928 (path!("dir/one.ts").to_string(), vec![14..18]),
6929 (path!("dir/two.ts").to_string(), vec![14..18]),
6930 ]),
6931 "Rust exclusion search should give only TypeScript files"
6932 );
6933
6934 assert_eq!(
6935 search(
6936 &project,
6937 SearchQuery::text(
6938 search_query,
6939 false,
6940 true,
6941 false,
6942 Default::default(),
6943 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], path_style).unwrap(),
6944 false,
6945 None,
6946 )
6947 .unwrap(),
6948 cx
6949 )
6950 .await
6951 .unwrap(),
6952 HashMap::from_iter([
6953 (path!("dir/one.rs").to_string(), vec![8..12]),
6954 (path!("dir/two.rs").to_string(), vec![8..12]),
6955 ]),
6956 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
6957 );
6958
6959 assert!(
6960 search(
6961 &project,
6962 SearchQuery::text(
6963 search_query,
6964 false,
6965 true,
6966 false,
6967 Default::default(),
6968 PathMatcher::new(
6969 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
6970 PathStyle::local(),
6971 )
6972 .unwrap(),
6973 false,
6974 None,
6975 )
6976 .unwrap(),
6977 cx
6978 )
6979 .await
6980 .unwrap()
6981 .is_empty(),
6982 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
6983 );
6984}
6985
6986#[gpui::test]
6987async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
6988 init_test(cx);
6989
6990 let search_query = "file";
6991
6992 let fs = FakeFs::new(cx.executor());
6993 fs.insert_tree(
6994 path!("/dir"),
6995 json!({
6996 "one.rs": r#"// Rust file one"#,
6997 "one.ts": r#"// TypeScript file one"#,
6998 "two.rs": r#"// Rust file two"#,
6999 "two.ts": r#"// TypeScript file two"#,
7000 }),
7001 )
7002 .await;
7003 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7004 assert!(
7005 search(
7006 &project,
7007 SearchQuery::text(
7008 search_query,
7009 false,
7010 true,
7011 false,
7012 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
7013 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
7014 false,
7015 None,
7016 )
7017 .unwrap(),
7018 cx
7019 )
7020 .await
7021 .unwrap()
7022 .is_empty(),
7023 "If both no exclusions and inclusions match, exclusions should win and return nothing"
7024 );
7025
7026 assert!(
7027 search(
7028 &project,
7029 SearchQuery::text(
7030 search_query,
7031 false,
7032 true,
7033 false,
7034 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
7035 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
7036 false,
7037 None,
7038 )
7039 .unwrap(),
7040 cx
7041 )
7042 .await
7043 .unwrap()
7044 .is_empty(),
7045 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
7046 );
7047
7048 assert!(
7049 search(
7050 &project,
7051 SearchQuery::text(
7052 search_query,
7053 false,
7054 true,
7055 false,
7056 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
7057 .unwrap(),
7058 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
7059 .unwrap(),
7060 false,
7061 None,
7062 )
7063 .unwrap(),
7064 cx
7065 )
7066 .await
7067 .unwrap()
7068 .is_empty(),
7069 "Non-matching inclusions and exclusions should not change that."
7070 );
7071
7072 assert_eq!(
7073 search(
7074 &project,
7075 SearchQuery::text(
7076 search_query,
7077 false,
7078 true,
7079 false,
7080 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
7081 .unwrap(),
7082 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()], PathStyle::local())
7083 .unwrap(),
7084 false,
7085 None,
7086 )
7087 .unwrap(),
7088 cx
7089 )
7090 .await
7091 .unwrap(),
7092 HashMap::from_iter([
7093 (path!("dir/one.ts").to_string(), vec![14..18]),
7094 (path!("dir/two.ts").to_string(), vec![14..18]),
7095 ]),
7096 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
7097 );
7098}
7099
7100#[gpui::test]
7101async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
7102 init_test(cx);
7103
7104 let fs = FakeFs::new(cx.executor());
7105 fs.insert_tree(
7106 path!("/worktree-a"),
7107 json!({
7108 "haystack.rs": r#"// NEEDLE"#,
7109 "haystack.ts": r#"// NEEDLE"#,
7110 }),
7111 )
7112 .await;
7113 fs.insert_tree(
7114 path!("/worktree-b"),
7115 json!({
7116 "haystack.rs": r#"// NEEDLE"#,
7117 "haystack.ts": r#"// NEEDLE"#,
7118 }),
7119 )
7120 .await;
7121
7122 let path_style = PathStyle::local();
7123 let project = Project::test(
7124 fs.clone(),
7125 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
7126 cx,
7127 )
7128 .await;
7129
7130 assert_eq!(
7131 search(
7132 &project,
7133 SearchQuery::text(
7134 "NEEDLE",
7135 false,
7136 true,
7137 false,
7138 PathMatcher::new(&["worktree-a/*.rs".to_owned()], path_style).unwrap(),
7139 Default::default(),
7140 true,
7141 None,
7142 )
7143 .unwrap(),
7144 cx
7145 )
7146 .await
7147 .unwrap(),
7148 HashMap::from_iter([(path!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
7149 "should only return results from included worktree"
7150 );
7151 assert_eq!(
7152 search(
7153 &project,
7154 SearchQuery::text(
7155 "NEEDLE",
7156 false,
7157 true,
7158 false,
7159 PathMatcher::new(&["worktree-b/*.rs".to_owned()], path_style).unwrap(),
7160 Default::default(),
7161 true,
7162 None,
7163 )
7164 .unwrap(),
7165 cx
7166 )
7167 .await
7168 .unwrap(),
7169 HashMap::from_iter([(path!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
7170 "should only return results from included worktree"
7171 );
7172
7173 assert_eq!(
7174 search(
7175 &project,
7176 SearchQuery::text(
7177 "NEEDLE",
7178 false,
7179 true,
7180 false,
7181 PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap(),
7182 Default::default(),
7183 false,
7184 None,
7185 )
7186 .unwrap(),
7187 cx
7188 )
7189 .await
7190 .unwrap(),
7191 HashMap::from_iter([
7192 (path!("worktree-a/haystack.ts").to_string(), vec![3..9]),
7193 (path!("worktree-b/haystack.ts").to_string(), vec![3..9])
7194 ]),
7195 "should return results from both worktrees"
7196 );
7197}
7198
7199#[gpui::test]
7200async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
7201 init_test(cx);
7202
7203 let fs = FakeFs::new(cx.background_executor.clone());
7204 fs.insert_tree(
7205 path!("/dir"),
7206 json!({
7207 ".git": {},
7208 ".gitignore": "**/target\n/node_modules\n",
7209 "target": {
7210 "index.txt": "index_key:index_value"
7211 },
7212 "node_modules": {
7213 "eslint": {
7214 "index.ts": "const eslint_key = 'eslint value'",
7215 "package.json": r#"{ "some_key": "some value" }"#,
7216 },
7217 "prettier": {
7218 "index.ts": "const prettier_key = 'prettier value'",
7219 "package.json": r#"{ "other_key": "other value" }"#,
7220 },
7221 },
7222 "package.json": r#"{ "main_key": "main value" }"#,
7223 }),
7224 )
7225 .await;
7226 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7227
7228 let query = "key";
7229 assert_eq!(
7230 search(
7231 &project,
7232 SearchQuery::text(
7233 query,
7234 false,
7235 false,
7236 false,
7237 Default::default(),
7238 Default::default(),
7239 false,
7240 None,
7241 )
7242 .unwrap(),
7243 cx
7244 )
7245 .await
7246 .unwrap(),
7247 HashMap::from_iter([(path!("dir/package.json").to_string(), vec![8..11])]),
7248 "Only one non-ignored file should have the query"
7249 );
7250
7251 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7252 let path_style = PathStyle::local();
7253 assert_eq!(
7254 search(
7255 &project,
7256 SearchQuery::text(
7257 query,
7258 false,
7259 false,
7260 true,
7261 Default::default(),
7262 Default::default(),
7263 false,
7264 None,
7265 )
7266 .unwrap(),
7267 cx
7268 )
7269 .await
7270 .unwrap(),
7271 HashMap::from_iter([
7272 (path!("dir/package.json").to_string(), vec![8..11]),
7273 (path!("dir/target/index.txt").to_string(), vec![6..9]),
7274 (
7275 path!("dir/node_modules/prettier/package.json").to_string(),
7276 vec![9..12]
7277 ),
7278 (
7279 path!("dir/node_modules/prettier/index.ts").to_string(),
7280 vec![15..18]
7281 ),
7282 (
7283 path!("dir/node_modules/eslint/index.ts").to_string(),
7284 vec![13..16]
7285 ),
7286 (
7287 path!("dir/node_modules/eslint/package.json").to_string(),
7288 vec![8..11]
7289 ),
7290 ]),
7291 "Unrestricted search with ignored directories should find every file with the query"
7292 );
7293
7294 let files_to_include =
7295 PathMatcher::new(&["node_modules/prettier/**".to_owned()], path_style).unwrap();
7296 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap();
7297 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7298 assert_eq!(
7299 search(
7300 &project,
7301 SearchQuery::text(
7302 query,
7303 false,
7304 false,
7305 true,
7306 files_to_include,
7307 files_to_exclude,
7308 false,
7309 None,
7310 )
7311 .unwrap(),
7312 cx
7313 )
7314 .await
7315 .unwrap(),
7316 HashMap::from_iter([(
7317 path!("dir/node_modules/prettier/package.json").to_string(),
7318 vec![9..12]
7319 )]),
7320 "With search including ignored prettier directory and excluding TS files, only one file should be found"
7321 );
7322}
7323
7324#[gpui::test]
7325async fn test_search_with_unicode(cx: &mut gpui::TestAppContext) {
7326 init_test(cx);
7327
7328 let fs = FakeFs::new(cx.executor());
7329 fs.insert_tree(
7330 path!("/dir"),
7331 json!({
7332 "one.rs": "// ПРИВЕТ? привет!",
7333 "two.rs": "// ПРИВЕТ.",
7334 "three.rs": "// привет",
7335 }),
7336 )
7337 .await;
7338 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7339 let unicode_case_sensitive_query = SearchQuery::text(
7340 "привет",
7341 false,
7342 true,
7343 false,
7344 Default::default(),
7345 Default::default(),
7346 false,
7347 None,
7348 );
7349 assert_matches!(unicode_case_sensitive_query, Ok(SearchQuery::Text { .. }));
7350 assert_eq!(
7351 search(&project, unicode_case_sensitive_query.unwrap(), cx)
7352 .await
7353 .unwrap(),
7354 HashMap::from_iter([
7355 (path!("dir/one.rs").to_string(), vec![17..29]),
7356 (path!("dir/three.rs").to_string(), vec![3..15]),
7357 ])
7358 );
7359
7360 let unicode_case_insensitive_query = SearchQuery::text(
7361 "привет",
7362 false,
7363 false,
7364 false,
7365 Default::default(),
7366 Default::default(),
7367 false,
7368 None,
7369 );
7370 assert_matches!(
7371 unicode_case_insensitive_query,
7372 Ok(SearchQuery::Regex { .. })
7373 );
7374 assert_eq!(
7375 search(&project, unicode_case_insensitive_query.unwrap(), cx)
7376 .await
7377 .unwrap(),
7378 HashMap::from_iter([
7379 (path!("dir/one.rs").to_string(), vec![3..15, 17..29]),
7380 (path!("dir/two.rs").to_string(), vec![3..15]),
7381 (path!("dir/three.rs").to_string(), vec![3..15]),
7382 ])
7383 );
7384
7385 assert_eq!(
7386 search(
7387 &project,
7388 SearchQuery::text(
7389 "привет.",
7390 false,
7391 false,
7392 false,
7393 Default::default(),
7394 Default::default(),
7395 false,
7396 None,
7397 )
7398 .unwrap(),
7399 cx
7400 )
7401 .await
7402 .unwrap(),
7403 HashMap::from_iter([(path!("dir/two.rs").to_string(), vec![3..16]),])
7404 );
7405}
7406
7407#[gpui::test]
7408async fn test_create_entry(cx: &mut gpui::TestAppContext) {
7409 init_test(cx);
7410
7411 let fs = FakeFs::new(cx.executor());
7412 fs.insert_tree(
7413 "/one/two",
7414 json!({
7415 "three": {
7416 "a.txt": "",
7417 "four": {}
7418 },
7419 "c.rs": ""
7420 }),
7421 )
7422 .await;
7423
7424 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
7425 project
7426 .update(cx, |project, cx| {
7427 let id = project.worktrees(cx).next().unwrap().read(cx).id();
7428 project.create_entry((id, rel_path("b..")), true, cx)
7429 })
7430 .await
7431 .unwrap()
7432 .into_included()
7433 .unwrap();
7434
7435 assert_eq!(
7436 fs.paths(true),
7437 vec![
7438 PathBuf::from(path!("/")),
7439 PathBuf::from(path!("/one")),
7440 PathBuf::from(path!("/one/two")),
7441 PathBuf::from(path!("/one/two/c.rs")),
7442 PathBuf::from(path!("/one/two/three")),
7443 PathBuf::from(path!("/one/two/three/a.txt")),
7444 PathBuf::from(path!("/one/two/three/b..")),
7445 PathBuf::from(path!("/one/two/three/four")),
7446 ]
7447 );
7448}
7449
7450#[gpui::test]
7451async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
7452 init_test(cx);
7453
7454 let fs = FakeFs::new(cx.executor());
7455 fs.insert_tree(
7456 path!("/dir"),
7457 json!({
7458 "a.tsx": "a",
7459 }),
7460 )
7461 .await;
7462
7463 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7464
7465 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7466 language_registry.add(tsx_lang());
7467 let language_server_names = [
7468 "TypeScriptServer",
7469 "TailwindServer",
7470 "ESLintServer",
7471 "NoHoverCapabilitiesServer",
7472 ];
7473 let mut language_servers = [
7474 language_registry.register_fake_lsp(
7475 "tsx",
7476 FakeLspAdapter {
7477 name: language_server_names[0],
7478 capabilities: lsp::ServerCapabilities {
7479 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7480 ..lsp::ServerCapabilities::default()
7481 },
7482 ..FakeLspAdapter::default()
7483 },
7484 ),
7485 language_registry.register_fake_lsp(
7486 "tsx",
7487 FakeLspAdapter {
7488 name: language_server_names[1],
7489 capabilities: lsp::ServerCapabilities {
7490 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7491 ..lsp::ServerCapabilities::default()
7492 },
7493 ..FakeLspAdapter::default()
7494 },
7495 ),
7496 language_registry.register_fake_lsp(
7497 "tsx",
7498 FakeLspAdapter {
7499 name: language_server_names[2],
7500 capabilities: lsp::ServerCapabilities {
7501 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7502 ..lsp::ServerCapabilities::default()
7503 },
7504 ..FakeLspAdapter::default()
7505 },
7506 ),
7507 language_registry.register_fake_lsp(
7508 "tsx",
7509 FakeLspAdapter {
7510 name: language_server_names[3],
7511 capabilities: lsp::ServerCapabilities {
7512 hover_provider: None,
7513 ..lsp::ServerCapabilities::default()
7514 },
7515 ..FakeLspAdapter::default()
7516 },
7517 ),
7518 ];
7519
7520 let (buffer, _handle) = project
7521 .update(cx, |p, cx| {
7522 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
7523 })
7524 .await
7525 .unwrap();
7526 cx.executor().run_until_parked();
7527
7528 let mut servers_with_hover_requests = HashMap::default();
7529 for i in 0..language_server_names.len() {
7530 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
7531 panic!(
7532 "Failed to get language server #{i} with name {}",
7533 &language_server_names[i]
7534 )
7535 });
7536 let new_server_name = new_server.server.name();
7537 assert!(
7538 !servers_with_hover_requests.contains_key(&new_server_name),
7539 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
7540 );
7541 match new_server_name.as_ref() {
7542 "TailwindServer" | "TypeScriptServer" => {
7543 servers_with_hover_requests.insert(
7544 new_server_name.clone(),
7545 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7546 move |_, _| {
7547 let name = new_server_name.clone();
7548 async move {
7549 Ok(Some(lsp::Hover {
7550 contents: lsp::HoverContents::Scalar(
7551 lsp::MarkedString::String(format!("{name} hover")),
7552 ),
7553 range: None,
7554 }))
7555 }
7556 },
7557 ),
7558 );
7559 }
7560 "ESLintServer" => {
7561 servers_with_hover_requests.insert(
7562 new_server_name,
7563 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7564 |_, _| async move { Ok(None) },
7565 ),
7566 );
7567 }
7568 "NoHoverCapabilitiesServer" => {
7569 let _never_handled = new_server
7570 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
7571 panic!(
7572 "Should not call for hovers server with no corresponding capabilities"
7573 )
7574 });
7575 }
7576 unexpected => panic!("Unexpected server name: {unexpected}"),
7577 }
7578 }
7579
7580 let hover_task = project.update(cx, |project, cx| {
7581 project.hover(&buffer, Point::new(0, 0), cx)
7582 });
7583 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
7584 |mut hover_request| async move {
7585 hover_request
7586 .next()
7587 .await
7588 .expect("All hover requests should have been triggered")
7589 },
7590 ))
7591 .await;
7592 assert_eq!(
7593 vec!["TailwindServer hover", "TypeScriptServer hover"],
7594 hover_task
7595 .await
7596 .into_iter()
7597 .flatten()
7598 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
7599 .sorted()
7600 .collect::<Vec<_>>(),
7601 "Should receive hover responses from all related servers with hover capabilities"
7602 );
7603}
7604
7605#[gpui::test]
7606async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
7607 init_test(cx);
7608
7609 let fs = FakeFs::new(cx.executor());
7610 fs.insert_tree(
7611 path!("/dir"),
7612 json!({
7613 "a.ts": "a",
7614 }),
7615 )
7616 .await;
7617
7618 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7619
7620 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7621 language_registry.add(typescript_lang());
7622 let mut fake_language_servers = language_registry.register_fake_lsp(
7623 "TypeScript",
7624 FakeLspAdapter {
7625 capabilities: lsp::ServerCapabilities {
7626 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7627 ..lsp::ServerCapabilities::default()
7628 },
7629 ..FakeLspAdapter::default()
7630 },
7631 );
7632
7633 let (buffer, _handle) = project
7634 .update(cx, |p, cx| {
7635 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
7636 })
7637 .await
7638 .unwrap();
7639 cx.executor().run_until_parked();
7640
7641 let fake_server = fake_language_servers
7642 .next()
7643 .await
7644 .expect("failed to get the language server");
7645
7646 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7647 move |_, _| async move {
7648 Ok(Some(lsp::Hover {
7649 contents: lsp::HoverContents::Array(vec![
7650 lsp::MarkedString::String("".to_string()),
7651 lsp::MarkedString::String(" ".to_string()),
7652 lsp::MarkedString::String("\n\n\n".to_string()),
7653 ]),
7654 range: None,
7655 }))
7656 },
7657 );
7658
7659 let hover_task = project.update(cx, |project, cx| {
7660 project.hover(&buffer, Point::new(0, 0), cx)
7661 });
7662 let () = request_handled
7663 .next()
7664 .await
7665 .expect("All hover requests should have been triggered");
7666 assert_eq!(
7667 Vec::<String>::new(),
7668 hover_task
7669 .await
7670 .into_iter()
7671 .flatten()
7672 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
7673 .sorted()
7674 .collect::<Vec<_>>(),
7675 "Empty hover parts should be ignored"
7676 );
7677}
7678
7679#[gpui::test]
7680async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
7681 init_test(cx);
7682
7683 let fs = FakeFs::new(cx.executor());
7684 fs.insert_tree(
7685 path!("/dir"),
7686 json!({
7687 "a.ts": "a",
7688 }),
7689 )
7690 .await;
7691
7692 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7693
7694 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7695 language_registry.add(typescript_lang());
7696 let mut fake_language_servers = language_registry.register_fake_lsp(
7697 "TypeScript",
7698 FakeLspAdapter {
7699 capabilities: lsp::ServerCapabilities {
7700 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
7701 ..lsp::ServerCapabilities::default()
7702 },
7703 ..FakeLspAdapter::default()
7704 },
7705 );
7706
7707 let (buffer, _handle) = project
7708 .update(cx, |p, cx| {
7709 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
7710 })
7711 .await
7712 .unwrap();
7713 cx.executor().run_until_parked();
7714
7715 let fake_server = fake_language_servers
7716 .next()
7717 .await
7718 .expect("failed to get the language server");
7719
7720 let mut request_handled = fake_server
7721 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
7722 Ok(Some(vec![
7723 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7724 title: "organize imports".to_string(),
7725 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
7726 ..lsp::CodeAction::default()
7727 }),
7728 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7729 title: "fix code".to_string(),
7730 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
7731 ..lsp::CodeAction::default()
7732 }),
7733 ]))
7734 });
7735
7736 let code_actions_task = project.update(cx, |project, cx| {
7737 project.code_actions(
7738 &buffer,
7739 0..buffer.read(cx).len(),
7740 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
7741 cx,
7742 )
7743 });
7744
7745 let () = request_handled
7746 .next()
7747 .await
7748 .expect("The code action request should have been triggered");
7749
7750 let code_actions = code_actions_task.await.unwrap().unwrap();
7751 assert_eq!(code_actions.len(), 1);
7752 assert_eq!(
7753 code_actions[0].lsp_action.action_kind(),
7754 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
7755 );
7756}
7757
7758#[gpui::test]
7759async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
7760 init_test(cx);
7761
7762 let fs = FakeFs::new(cx.executor());
7763 fs.insert_tree(
7764 path!("/dir"),
7765 json!({
7766 "a.tsx": "a",
7767 }),
7768 )
7769 .await;
7770
7771 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7772
7773 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7774 language_registry.add(tsx_lang());
7775 let language_server_names = [
7776 "TypeScriptServer",
7777 "TailwindServer",
7778 "ESLintServer",
7779 "NoActionsCapabilitiesServer",
7780 ];
7781
7782 let mut language_server_rxs = [
7783 language_registry.register_fake_lsp(
7784 "tsx",
7785 FakeLspAdapter {
7786 name: language_server_names[0],
7787 capabilities: lsp::ServerCapabilities {
7788 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
7789 ..lsp::ServerCapabilities::default()
7790 },
7791 ..FakeLspAdapter::default()
7792 },
7793 ),
7794 language_registry.register_fake_lsp(
7795 "tsx",
7796 FakeLspAdapter {
7797 name: language_server_names[1],
7798 capabilities: lsp::ServerCapabilities {
7799 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
7800 ..lsp::ServerCapabilities::default()
7801 },
7802 ..FakeLspAdapter::default()
7803 },
7804 ),
7805 language_registry.register_fake_lsp(
7806 "tsx",
7807 FakeLspAdapter {
7808 name: language_server_names[2],
7809 capabilities: lsp::ServerCapabilities {
7810 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
7811 ..lsp::ServerCapabilities::default()
7812 },
7813 ..FakeLspAdapter::default()
7814 },
7815 ),
7816 language_registry.register_fake_lsp(
7817 "tsx",
7818 FakeLspAdapter {
7819 name: language_server_names[3],
7820 capabilities: lsp::ServerCapabilities {
7821 code_action_provider: None,
7822 ..lsp::ServerCapabilities::default()
7823 },
7824 ..FakeLspAdapter::default()
7825 },
7826 ),
7827 ];
7828
7829 let (buffer, _handle) = project
7830 .update(cx, |p, cx| {
7831 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
7832 })
7833 .await
7834 .unwrap();
7835 cx.executor().run_until_parked();
7836
7837 let mut servers_with_actions_requests = HashMap::default();
7838 for i in 0..language_server_names.len() {
7839 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
7840 panic!(
7841 "Failed to get language server #{i} with name {}",
7842 &language_server_names[i]
7843 )
7844 });
7845 let new_server_name = new_server.server.name();
7846
7847 assert!(
7848 !servers_with_actions_requests.contains_key(&new_server_name),
7849 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
7850 );
7851 match new_server_name.0.as_ref() {
7852 "TailwindServer" | "TypeScriptServer" => {
7853 servers_with_actions_requests.insert(
7854 new_server_name.clone(),
7855 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
7856 move |_, _| {
7857 let name = new_server_name.clone();
7858 async move {
7859 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
7860 lsp::CodeAction {
7861 title: format!("{name} code action"),
7862 ..lsp::CodeAction::default()
7863 },
7864 )]))
7865 }
7866 },
7867 ),
7868 );
7869 }
7870 "ESLintServer" => {
7871 servers_with_actions_requests.insert(
7872 new_server_name,
7873 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
7874 |_, _| async move { Ok(None) },
7875 ),
7876 );
7877 }
7878 "NoActionsCapabilitiesServer" => {
7879 let _never_handled = new_server
7880 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
7881 panic!(
7882 "Should not call for code actions server with no corresponding capabilities"
7883 )
7884 });
7885 }
7886 unexpected => panic!("Unexpected server name: {unexpected}"),
7887 }
7888 }
7889
7890 let code_actions_task = project.update(cx, |project, cx| {
7891 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
7892 });
7893
7894 // cx.run_until_parked();
7895 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
7896 |mut code_actions_request| async move {
7897 code_actions_request
7898 .next()
7899 .await
7900 .expect("All code actions requests should have been triggered")
7901 },
7902 ))
7903 .await;
7904 assert_eq!(
7905 vec!["TailwindServer code action", "TypeScriptServer code action"],
7906 code_actions_task
7907 .await
7908 .unwrap()
7909 .unwrap()
7910 .into_iter()
7911 .map(|code_action| code_action.lsp_action.title().to_owned())
7912 .sorted()
7913 .collect::<Vec<_>>(),
7914 "Should receive code actions responses from all related servers with hover capabilities"
7915 );
7916}
7917
7918#[gpui::test]
7919async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
7920 init_test(cx);
7921
7922 let fs = FakeFs::new(cx.executor());
7923 fs.insert_tree(
7924 "/dir",
7925 json!({
7926 "a.rs": "let a = 1;",
7927 "b.rs": "let b = 2;",
7928 "c.rs": "let c = 2;",
7929 }),
7930 )
7931 .await;
7932
7933 let project = Project::test(
7934 fs,
7935 [
7936 "/dir/a.rs".as_ref(),
7937 "/dir/b.rs".as_ref(),
7938 "/dir/c.rs".as_ref(),
7939 ],
7940 cx,
7941 )
7942 .await;
7943
7944 // check the initial state and get the worktrees
7945 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
7946 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7947 assert_eq!(worktrees.len(), 3);
7948
7949 let worktree_a = worktrees[0].read(cx);
7950 let worktree_b = worktrees[1].read(cx);
7951 let worktree_c = worktrees[2].read(cx);
7952
7953 // check they start in the right order
7954 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
7955 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
7956 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
7957
7958 (
7959 worktrees[0].clone(),
7960 worktrees[1].clone(),
7961 worktrees[2].clone(),
7962 )
7963 });
7964
7965 // move first worktree to after the second
7966 // [a, b, c] -> [b, a, c]
7967 project
7968 .update(cx, |project, cx| {
7969 let first = worktree_a.read(cx);
7970 let second = worktree_b.read(cx);
7971 project.move_worktree(first.id(), second.id(), cx)
7972 })
7973 .expect("moving first after second");
7974
7975 // check the state after moving
7976 project.update(cx, |project, cx| {
7977 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7978 assert_eq!(worktrees.len(), 3);
7979
7980 let first = worktrees[0].read(cx);
7981 let second = worktrees[1].read(cx);
7982 let third = worktrees[2].read(cx);
7983
7984 // check they are now in the right order
7985 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
7986 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
7987 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7988 });
7989
7990 // move the second worktree to before the first
7991 // [b, a, c] -> [a, b, c]
7992 project
7993 .update(cx, |project, cx| {
7994 let second = worktree_a.read(cx);
7995 let first = worktree_b.read(cx);
7996 project.move_worktree(first.id(), second.id(), cx)
7997 })
7998 .expect("moving second before first");
7999
8000 // check the state after moving
8001 project.update(cx, |project, cx| {
8002 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8003 assert_eq!(worktrees.len(), 3);
8004
8005 let first = worktrees[0].read(cx);
8006 let second = worktrees[1].read(cx);
8007 let third = worktrees[2].read(cx);
8008
8009 // check they are now in the right order
8010 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
8011 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
8012 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
8013 });
8014
8015 // move the second worktree to after the third
8016 // [a, b, c] -> [a, c, b]
8017 project
8018 .update(cx, |project, cx| {
8019 let second = worktree_b.read(cx);
8020 let third = worktree_c.read(cx);
8021 project.move_worktree(second.id(), third.id(), cx)
8022 })
8023 .expect("moving second after third");
8024
8025 // check the state after moving
8026 project.update(cx, |project, cx| {
8027 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8028 assert_eq!(worktrees.len(), 3);
8029
8030 let first = worktrees[0].read(cx);
8031 let second = worktrees[1].read(cx);
8032 let third = worktrees[2].read(cx);
8033
8034 // check they are now in the right order
8035 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
8036 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
8037 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
8038 });
8039
8040 // move the third worktree to before the second
8041 // [a, c, b] -> [a, b, c]
8042 project
8043 .update(cx, |project, cx| {
8044 let third = worktree_c.read(cx);
8045 let second = worktree_b.read(cx);
8046 project.move_worktree(third.id(), second.id(), cx)
8047 })
8048 .expect("moving third before second");
8049
8050 // check the state after moving
8051 project.update(cx, |project, cx| {
8052 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8053 assert_eq!(worktrees.len(), 3);
8054
8055 let first = worktrees[0].read(cx);
8056 let second = worktrees[1].read(cx);
8057 let third = worktrees[2].read(cx);
8058
8059 // check they are now in the right order
8060 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
8061 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
8062 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
8063 });
8064
8065 // move the first worktree to after the third
8066 // [a, b, c] -> [b, c, a]
8067 project
8068 .update(cx, |project, cx| {
8069 let first = worktree_a.read(cx);
8070 let third = worktree_c.read(cx);
8071 project.move_worktree(first.id(), third.id(), cx)
8072 })
8073 .expect("moving first after third");
8074
8075 // check the state after moving
8076 project.update(cx, |project, cx| {
8077 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8078 assert_eq!(worktrees.len(), 3);
8079
8080 let first = worktrees[0].read(cx);
8081 let second = worktrees[1].read(cx);
8082 let third = worktrees[2].read(cx);
8083
8084 // check they are now in the right order
8085 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
8086 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
8087 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
8088 });
8089
8090 // move the third worktree to before the first
8091 // [b, c, a] -> [a, b, c]
8092 project
8093 .update(cx, |project, cx| {
8094 let third = worktree_a.read(cx);
8095 let first = worktree_b.read(cx);
8096 project.move_worktree(third.id(), first.id(), cx)
8097 })
8098 .expect("moving third before first");
8099
8100 // check the state after moving
8101 project.update(cx, |project, cx| {
8102 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8103 assert_eq!(worktrees.len(), 3);
8104
8105 let first = worktrees[0].read(cx);
8106 let second = worktrees[1].read(cx);
8107 let third = worktrees[2].read(cx);
8108
8109 // check they are now in the right order
8110 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
8111 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
8112 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
8113 });
8114}
8115
8116#[gpui::test]
8117async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
8118 init_test(cx);
8119
8120 let staged_contents = r#"
8121 fn main() {
8122 println!("hello world");
8123 }
8124 "#
8125 .unindent();
8126 let file_contents = r#"
8127 // print goodbye
8128 fn main() {
8129 println!("goodbye world");
8130 }
8131 "#
8132 .unindent();
8133
8134 let fs = FakeFs::new(cx.background_executor.clone());
8135 fs.insert_tree(
8136 "/dir",
8137 json!({
8138 ".git": {},
8139 "src": {
8140 "main.rs": file_contents,
8141 }
8142 }),
8143 )
8144 .await;
8145
8146 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
8147
8148 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8149
8150 let buffer = project
8151 .update(cx, |project, cx| {
8152 project.open_local_buffer("/dir/src/main.rs", cx)
8153 })
8154 .await
8155 .unwrap();
8156 let unstaged_diff = project
8157 .update(cx, |project, cx| {
8158 project.open_unstaged_diff(buffer.clone(), cx)
8159 })
8160 .await
8161 .unwrap();
8162
8163 cx.run_until_parked();
8164 unstaged_diff.update(cx, |unstaged_diff, cx| {
8165 let snapshot = buffer.read(cx).snapshot();
8166 assert_hunks(
8167 unstaged_diff.snapshot(cx).hunks(&snapshot),
8168 &snapshot,
8169 &unstaged_diff.base_text_string(cx).unwrap(),
8170 &[
8171 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
8172 (
8173 2..3,
8174 " println!(\"hello world\");\n",
8175 " println!(\"goodbye world\");\n",
8176 DiffHunkStatus::modified_none(),
8177 ),
8178 ],
8179 );
8180 });
8181
8182 let staged_contents = r#"
8183 // print goodbye
8184 fn main() {
8185 }
8186 "#
8187 .unindent();
8188
8189 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
8190
8191 cx.run_until_parked();
8192 unstaged_diff.update(cx, |unstaged_diff, cx| {
8193 let snapshot = buffer.read(cx).snapshot();
8194 assert_hunks(
8195 unstaged_diff
8196 .snapshot(cx)
8197 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8198 &snapshot,
8199 &unstaged_diff.base_text(cx).text(),
8200 &[(
8201 2..3,
8202 "",
8203 " println!(\"goodbye world\");\n",
8204 DiffHunkStatus::added_none(),
8205 )],
8206 );
8207 });
8208}
8209
8210#[gpui::test]
8211async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
8212 init_test(cx);
8213
8214 let committed_contents = r#"
8215 fn main() {
8216 println!("hello world");
8217 }
8218 "#
8219 .unindent();
8220 let staged_contents = r#"
8221 fn main() {
8222 println!("goodbye world");
8223 }
8224 "#
8225 .unindent();
8226 let file_contents = r#"
8227 // print goodbye
8228 fn main() {
8229 println!("goodbye world");
8230 }
8231 "#
8232 .unindent();
8233
8234 let fs = FakeFs::new(cx.background_executor.clone());
8235 fs.insert_tree(
8236 "/dir",
8237 json!({
8238 ".git": {},
8239 "src": {
8240 "modification.rs": file_contents,
8241 }
8242 }),
8243 )
8244 .await;
8245
8246 fs.set_head_for_repo(
8247 Path::new("/dir/.git"),
8248 &[
8249 ("src/modification.rs", committed_contents),
8250 ("src/deletion.rs", "// the-deleted-contents\n".into()),
8251 ],
8252 "deadbeef",
8253 );
8254 fs.set_index_for_repo(
8255 Path::new("/dir/.git"),
8256 &[
8257 ("src/modification.rs", staged_contents),
8258 ("src/deletion.rs", "// the-deleted-contents\n".into()),
8259 ],
8260 );
8261
8262 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8263 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
8264 let language = rust_lang();
8265 language_registry.add(language.clone());
8266
8267 let buffer_1 = project
8268 .update(cx, |project, cx| {
8269 project.open_local_buffer("/dir/src/modification.rs", cx)
8270 })
8271 .await
8272 .unwrap();
8273 let diff_1 = project
8274 .update(cx, |project, cx| {
8275 project.open_uncommitted_diff(buffer_1.clone(), cx)
8276 })
8277 .await
8278 .unwrap();
8279 diff_1.read_with(cx, |diff, cx| {
8280 assert_eq!(diff.base_text(cx).language().cloned(), Some(language))
8281 });
8282 cx.run_until_parked();
8283 diff_1.update(cx, |diff, cx| {
8284 let snapshot = buffer_1.read(cx).snapshot();
8285 assert_hunks(
8286 diff.snapshot(cx)
8287 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8288 &snapshot,
8289 &diff.base_text_string(cx).unwrap(),
8290 &[
8291 (
8292 0..1,
8293 "",
8294 "// print goodbye\n",
8295 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
8296 ),
8297 (
8298 2..3,
8299 " println!(\"hello world\");\n",
8300 " println!(\"goodbye world\");\n",
8301 DiffHunkStatus::modified_none(),
8302 ),
8303 ],
8304 );
8305 });
8306
8307 // Reset HEAD to a version that differs from both the buffer and the index.
8308 let committed_contents = r#"
8309 // print goodbye
8310 fn main() {
8311 }
8312 "#
8313 .unindent();
8314 fs.set_head_for_repo(
8315 Path::new("/dir/.git"),
8316 &[
8317 ("src/modification.rs", committed_contents.clone()),
8318 ("src/deletion.rs", "// the-deleted-contents\n".into()),
8319 ],
8320 "deadbeef",
8321 );
8322
8323 // Buffer now has an unstaged hunk.
8324 cx.run_until_parked();
8325 diff_1.update(cx, |diff, cx| {
8326 let snapshot = buffer_1.read(cx).snapshot();
8327 assert_hunks(
8328 diff.snapshot(cx)
8329 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8330 &snapshot,
8331 &diff.base_text(cx).text(),
8332 &[(
8333 2..3,
8334 "",
8335 " println!(\"goodbye world\");\n",
8336 DiffHunkStatus::added_none(),
8337 )],
8338 );
8339 });
8340
8341 // Open a buffer for a file that's been deleted.
8342 let buffer_2 = project
8343 .update(cx, |project, cx| {
8344 project.open_local_buffer("/dir/src/deletion.rs", cx)
8345 })
8346 .await
8347 .unwrap();
8348 let diff_2 = project
8349 .update(cx, |project, cx| {
8350 project.open_uncommitted_diff(buffer_2.clone(), cx)
8351 })
8352 .await
8353 .unwrap();
8354 cx.run_until_parked();
8355 diff_2.update(cx, |diff, cx| {
8356 let snapshot = buffer_2.read(cx).snapshot();
8357 assert_hunks(
8358 diff.snapshot(cx)
8359 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8360 &snapshot,
8361 &diff.base_text_string(cx).unwrap(),
8362 &[(
8363 0..0,
8364 "// the-deleted-contents\n",
8365 "",
8366 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
8367 )],
8368 );
8369 });
8370
8371 // Stage the deletion of this file
8372 fs.set_index_for_repo(
8373 Path::new("/dir/.git"),
8374 &[("src/modification.rs", committed_contents.clone())],
8375 );
8376 cx.run_until_parked();
8377 diff_2.update(cx, |diff, cx| {
8378 let snapshot = buffer_2.read(cx).snapshot();
8379 assert_hunks(
8380 diff.snapshot(cx)
8381 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8382 &snapshot,
8383 &diff.base_text_string(cx).unwrap(),
8384 &[(
8385 0..0,
8386 "// the-deleted-contents\n",
8387 "",
8388 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
8389 )],
8390 );
8391 });
8392}
8393
8394#[gpui::test]
8395async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
8396 use DiffHunkSecondaryStatus::*;
8397 init_test(cx);
8398
8399 let committed_contents = r#"
8400 zero
8401 one
8402 two
8403 three
8404 four
8405 five
8406 "#
8407 .unindent();
8408 let file_contents = r#"
8409 one
8410 TWO
8411 three
8412 FOUR
8413 five
8414 "#
8415 .unindent();
8416
8417 let fs = FakeFs::new(cx.background_executor.clone());
8418 fs.insert_tree(
8419 "/dir",
8420 json!({
8421 ".git": {},
8422 "file.txt": file_contents.clone()
8423 }),
8424 )
8425 .await;
8426
8427 fs.set_head_and_index_for_repo(
8428 path!("/dir/.git").as_ref(),
8429 &[("file.txt", committed_contents.clone())],
8430 );
8431
8432 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8433
8434 let buffer = project
8435 .update(cx, |project, cx| {
8436 project.open_local_buffer("/dir/file.txt", cx)
8437 })
8438 .await
8439 .unwrap();
8440 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8441 let uncommitted_diff = project
8442 .update(cx, |project, cx| {
8443 project.open_uncommitted_diff(buffer.clone(), cx)
8444 })
8445 .await
8446 .unwrap();
8447 let mut diff_events = cx.events(&uncommitted_diff);
8448
8449 // The hunks are initially unstaged.
8450 uncommitted_diff.read_with(cx, |diff, cx| {
8451 assert_hunks(
8452 diff.snapshot(cx).hunks(&snapshot),
8453 &snapshot,
8454 &diff.base_text_string(cx).unwrap(),
8455 &[
8456 (
8457 0..0,
8458 "zero\n",
8459 "",
8460 DiffHunkStatus::deleted(HasSecondaryHunk),
8461 ),
8462 (
8463 1..2,
8464 "two\n",
8465 "TWO\n",
8466 DiffHunkStatus::modified(HasSecondaryHunk),
8467 ),
8468 (
8469 3..4,
8470 "four\n",
8471 "FOUR\n",
8472 DiffHunkStatus::modified(HasSecondaryHunk),
8473 ),
8474 ],
8475 );
8476 });
8477
8478 // Stage a hunk. It appears as optimistically staged.
8479 uncommitted_diff.update(cx, |diff, cx| {
8480 let range =
8481 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
8482 let hunks = diff
8483 .snapshot(cx)
8484 .hunks_intersecting_range(range, &snapshot)
8485 .collect::<Vec<_>>();
8486 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
8487
8488 assert_hunks(
8489 diff.snapshot(cx).hunks(&snapshot),
8490 &snapshot,
8491 &diff.base_text_string(cx).unwrap(),
8492 &[
8493 (
8494 0..0,
8495 "zero\n",
8496 "",
8497 DiffHunkStatus::deleted(HasSecondaryHunk),
8498 ),
8499 (
8500 1..2,
8501 "two\n",
8502 "TWO\n",
8503 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8504 ),
8505 (
8506 3..4,
8507 "four\n",
8508 "FOUR\n",
8509 DiffHunkStatus::modified(HasSecondaryHunk),
8510 ),
8511 ],
8512 );
8513 });
8514
8515 // The diff emits a change event for the range of the staged hunk.
8516 assert!(matches!(
8517 diff_events.next().await.unwrap(),
8518 BufferDiffEvent::HunksStagedOrUnstaged(_)
8519 ));
8520 let event = diff_events.next().await.unwrap();
8521 if let BufferDiffEvent::DiffChanged(DiffChanged {
8522 changed_range: Some(changed_range),
8523 base_text_changed_range: _,
8524 extended_range: _,
8525 }) = event
8526 {
8527 let changed_range = changed_range.to_point(&snapshot);
8528 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
8529 } else {
8530 panic!("Unexpected event {event:?}");
8531 }
8532
8533 // When the write to the index completes, it appears as staged.
8534 cx.run_until_parked();
8535 uncommitted_diff.update(cx, |diff, cx| {
8536 assert_hunks(
8537 diff.snapshot(cx).hunks(&snapshot),
8538 &snapshot,
8539 &diff.base_text_string(cx).unwrap(),
8540 &[
8541 (
8542 0..0,
8543 "zero\n",
8544 "",
8545 DiffHunkStatus::deleted(HasSecondaryHunk),
8546 ),
8547 (
8548 1..2,
8549 "two\n",
8550 "TWO\n",
8551 DiffHunkStatus::modified(NoSecondaryHunk),
8552 ),
8553 (
8554 3..4,
8555 "four\n",
8556 "FOUR\n",
8557 DiffHunkStatus::modified(HasSecondaryHunk),
8558 ),
8559 ],
8560 );
8561 });
8562
8563 // The diff emits a change event for the changed index text.
8564 let event = diff_events.next().await.unwrap();
8565 if let BufferDiffEvent::DiffChanged(DiffChanged {
8566 changed_range: Some(changed_range),
8567 base_text_changed_range: _,
8568 extended_range: _,
8569 }) = event
8570 {
8571 let changed_range = changed_range.to_point(&snapshot);
8572 assert_eq!(changed_range, Point::new(0, 0)..Point::new(4, 0));
8573 } else {
8574 panic!("Unexpected event {event:?}");
8575 }
8576
8577 // Simulate a problem writing to the git index.
8578 fs.set_error_message_for_index_write(
8579 "/dir/.git".as_ref(),
8580 Some("failed to write git index".into()),
8581 );
8582
8583 // Stage another hunk.
8584 uncommitted_diff.update(cx, |diff, cx| {
8585 let range =
8586 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
8587 let hunks = diff
8588 .snapshot(cx)
8589 .hunks_intersecting_range(range, &snapshot)
8590 .collect::<Vec<_>>();
8591 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
8592
8593 assert_hunks(
8594 diff.snapshot(cx).hunks(&snapshot),
8595 &snapshot,
8596 &diff.base_text_string(cx).unwrap(),
8597 &[
8598 (
8599 0..0,
8600 "zero\n",
8601 "",
8602 DiffHunkStatus::deleted(HasSecondaryHunk),
8603 ),
8604 (
8605 1..2,
8606 "two\n",
8607 "TWO\n",
8608 DiffHunkStatus::modified(NoSecondaryHunk),
8609 ),
8610 (
8611 3..4,
8612 "four\n",
8613 "FOUR\n",
8614 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8615 ),
8616 ],
8617 );
8618 });
8619 assert!(matches!(
8620 diff_events.next().await.unwrap(),
8621 BufferDiffEvent::HunksStagedOrUnstaged(_)
8622 ));
8623 let event = diff_events.next().await.unwrap();
8624 if let BufferDiffEvent::DiffChanged(DiffChanged {
8625 changed_range: Some(changed_range),
8626 base_text_changed_range: _,
8627 extended_range: _,
8628 }) = event
8629 {
8630 let changed_range = changed_range.to_point(&snapshot);
8631 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
8632 } else {
8633 panic!("Unexpected event {event:?}");
8634 }
8635
8636 // When the write fails, the hunk returns to being unstaged.
8637 cx.run_until_parked();
8638 uncommitted_diff.update(cx, |diff, cx| {
8639 assert_hunks(
8640 diff.snapshot(cx).hunks(&snapshot),
8641 &snapshot,
8642 &diff.base_text_string(cx).unwrap(),
8643 &[
8644 (
8645 0..0,
8646 "zero\n",
8647 "",
8648 DiffHunkStatus::deleted(HasSecondaryHunk),
8649 ),
8650 (
8651 1..2,
8652 "two\n",
8653 "TWO\n",
8654 DiffHunkStatus::modified(NoSecondaryHunk),
8655 ),
8656 (
8657 3..4,
8658 "four\n",
8659 "FOUR\n",
8660 DiffHunkStatus::modified(HasSecondaryHunk),
8661 ),
8662 ],
8663 );
8664 });
8665
8666 let event = diff_events.next().await.unwrap();
8667 if let BufferDiffEvent::DiffChanged(DiffChanged {
8668 changed_range: Some(changed_range),
8669 base_text_changed_range: _,
8670 extended_range: _,
8671 }) = event
8672 {
8673 let changed_range = changed_range.to_point(&snapshot);
8674 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
8675 } else {
8676 panic!("Unexpected event {event:?}");
8677 }
8678
8679 // Allow writing to the git index to succeed again.
8680 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
8681
8682 // Stage two hunks with separate operations.
8683 uncommitted_diff.update(cx, |diff, cx| {
8684 let hunks = diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>();
8685 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
8686 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
8687 });
8688
8689 // Both staged hunks appear as pending.
8690 uncommitted_diff.update(cx, |diff, cx| {
8691 assert_hunks(
8692 diff.snapshot(cx).hunks(&snapshot),
8693 &snapshot,
8694 &diff.base_text_string(cx).unwrap(),
8695 &[
8696 (
8697 0..0,
8698 "zero\n",
8699 "",
8700 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
8701 ),
8702 (
8703 1..2,
8704 "two\n",
8705 "TWO\n",
8706 DiffHunkStatus::modified(NoSecondaryHunk),
8707 ),
8708 (
8709 3..4,
8710 "four\n",
8711 "FOUR\n",
8712 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8713 ),
8714 ],
8715 );
8716 });
8717
8718 // Both staging operations take effect.
8719 cx.run_until_parked();
8720 uncommitted_diff.update(cx, |diff, cx| {
8721 assert_hunks(
8722 diff.snapshot(cx).hunks(&snapshot),
8723 &snapshot,
8724 &diff.base_text_string(cx).unwrap(),
8725 &[
8726 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
8727 (
8728 1..2,
8729 "two\n",
8730 "TWO\n",
8731 DiffHunkStatus::modified(NoSecondaryHunk),
8732 ),
8733 (
8734 3..4,
8735 "four\n",
8736 "FOUR\n",
8737 DiffHunkStatus::modified(NoSecondaryHunk),
8738 ),
8739 ],
8740 );
8741 });
8742}
8743
8744#[gpui::test(seeds(340, 472))]
8745async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
8746 use DiffHunkSecondaryStatus::*;
8747 init_test(cx);
8748
8749 let committed_contents = r#"
8750 zero
8751 one
8752 two
8753 three
8754 four
8755 five
8756 "#
8757 .unindent();
8758 let file_contents = r#"
8759 one
8760 TWO
8761 three
8762 FOUR
8763 five
8764 "#
8765 .unindent();
8766
8767 let fs = FakeFs::new(cx.background_executor.clone());
8768 fs.insert_tree(
8769 "/dir",
8770 json!({
8771 ".git": {},
8772 "file.txt": file_contents.clone()
8773 }),
8774 )
8775 .await;
8776
8777 fs.set_head_for_repo(
8778 "/dir/.git".as_ref(),
8779 &[("file.txt", committed_contents.clone())],
8780 "deadbeef",
8781 );
8782 fs.set_index_for_repo(
8783 "/dir/.git".as_ref(),
8784 &[("file.txt", committed_contents.clone())],
8785 );
8786
8787 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8788
8789 let buffer = project
8790 .update(cx, |project, cx| {
8791 project.open_local_buffer("/dir/file.txt", cx)
8792 })
8793 .await
8794 .unwrap();
8795 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8796 let uncommitted_diff = project
8797 .update(cx, |project, cx| {
8798 project.open_uncommitted_diff(buffer.clone(), cx)
8799 })
8800 .await
8801 .unwrap();
8802
8803 // The hunks are initially unstaged.
8804 uncommitted_diff.read_with(cx, |diff, cx| {
8805 assert_hunks(
8806 diff.snapshot(cx).hunks(&snapshot),
8807 &snapshot,
8808 &diff.base_text_string(cx).unwrap(),
8809 &[
8810 (
8811 0..0,
8812 "zero\n",
8813 "",
8814 DiffHunkStatus::deleted(HasSecondaryHunk),
8815 ),
8816 (
8817 1..2,
8818 "two\n",
8819 "TWO\n",
8820 DiffHunkStatus::modified(HasSecondaryHunk),
8821 ),
8822 (
8823 3..4,
8824 "four\n",
8825 "FOUR\n",
8826 DiffHunkStatus::modified(HasSecondaryHunk),
8827 ),
8828 ],
8829 );
8830 });
8831
8832 // Pause IO events
8833 fs.pause_events();
8834
8835 // Stage the first hunk.
8836 uncommitted_diff.update(cx, |diff, cx| {
8837 let hunk = diff.snapshot(cx).hunks(&snapshot).next().unwrap();
8838 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
8839 assert_hunks(
8840 diff.snapshot(cx).hunks(&snapshot),
8841 &snapshot,
8842 &diff.base_text_string(cx).unwrap(),
8843 &[
8844 (
8845 0..0,
8846 "zero\n",
8847 "",
8848 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
8849 ),
8850 (
8851 1..2,
8852 "two\n",
8853 "TWO\n",
8854 DiffHunkStatus::modified(HasSecondaryHunk),
8855 ),
8856 (
8857 3..4,
8858 "four\n",
8859 "FOUR\n",
8860 DiffHunkStatus::modified(HasSecondaryHunk),
8861 ),
8862 ],
8863 );
8864 });
8865
8866 // Stage the second hunk *before* receiving the FS event for the first hunk.
8867 cx.run_until_parked();
8868 uncommitted_diff.update(cx, |diff, cx| {
8869 let hunk = diff.snapshot(cx).hunks(&snapshot).nth(1).unwrap();
8870 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
8871 assert_hunks(
8872 diff.snapshot(cx).hunks(&snapshot),
8873 &snapshot,
8874 &diff.base_text_string(cx).unwrap(),
8875 &[
8876 (
8877 0..0,
8878 "zero\n",
8879 "",
8880 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
8881 ),
8882 (
8883 1..2,
8884 "two\n",
8885 "TWO\n",
8886 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8887 ),
8888 (
8889 3..4,
8890 "four\n",
8891 "FOUR\n",
8892 DiffHunkStatus::modified(HasSecondaryHunk),
8893 ),
8894 ],
8895 );
8896 });
8897
8898 // Process the FS event for staging the first hunk (second event is still pending).
8899 fs.flush_events(1);
8900 cx.run_until_parked();
8901
8902 // Stage the third hunk before receiving the second FS event.
8903 uncommitted_diff.update(cx, |diff, cx| {
8904 let hunk = diff.snapshot(cx).hunks(&snapshot).nth(2).unwrap();
8905 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
8906 });
8907
8908 // Wait for all remaining IO.
8909 cx.run_until_parked();
8910 fs.flush_events(fs.buffered_event_count());
8911
8912 // Now all hunks are staged.
8913 cx.run_until_parked();
8914 uncommitted_diff.update(cx, |diff, cx| {
8915 assert_hunks(
8916 diff.snapshot(cx).hunks(&snapshot),
8917 &snapshot,
8918 &diff.base_text_string(cx).unwrap(),
8919 &[
8920 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
8921 (
8922 1..2,
8923 "two\n",
8924 "TWO\n",
8925 DiffHunkStatus::modified(NoSecondaryHunk),
8926 ),
8927 (
8928 3..4,
8929 "four\n",
8930 "FOUR\n",
8931 DiffHunkStatus::modified(NoSecondaryHunk),
8932 ),
8933 ],
8934 );
8935 });
8936}
8937
8938#[gpui::test(iterations = 25)]
8939async fn test_staging_random_hunks(
8940 mut rng: StdRng,
8941 _executor: BackgroundExecutor,
8942 cx: &mut gpui::TestAppContext,
8943) {
8944 let operations = env::var("OPERATIONS")
8945 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
8946 .unwrap_or(20);
8947
8948 use DiffHunkSecondaryStatus::*;
8949 init_test(cx);
8950
8951 let committed_text = (0..30).map(|i| format!("line {i}\n")).collect::<String>();
8952 let index_text = committed_text.clone();
8953 let buffer_text = (0..30)
8954 .map(|i| match i % 5 {
8955 0 => format!("line {i} (modified)\n"),
8956 _ => format!("line {i}\n"),
8957 })
8958 .collect::<String>();
8959
8960 let fs = FakeFs::new(cx.background_executor.clone());
8961 fs.insert_tree(
8962 path!("/dir"),
8963 json!({
8964 ".git": {},
8965 "file.txt": buffer_text.clone()
8966 }),
8967 )
8968 .await;
8969 fs.set_head_for_repo(
8970 path!("/dir/.git").as_ref(),
8971 &[("file.txt", committed_text.clone())],
8972 "deadbeef",
8973 );
8974 fs.set_index_for_repo(
8975 path!("/dir/.git").as_ref(),
8976 &[("file.txt", index_text.clone())],
8977 );
8978 let repo = fs
8979 .open_repo(path!("/dir/.git").as_ref(), Some("git".as_ref()))
8980 .unwrap();
8981
8982 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
8983 let buffer = project
8984 .update(cx, |project, cx| {
8985 project.open_local_buffer(path!("/dir/file.txt"), cx)
8986 })
8987 .await
8988 .unwrap();
8989 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8990 let uncommitted_diff = project
8991 .update(cx, |project, cx| {
8992 project.open_uncommitted_diff(buffer.clone(), cx)
8993 })
8994 .await
8995 .unwrap();
8996
8997 let mut hunks = uncommitted_diff.update(cx, |diff, cx| {
8998 diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>()
8999 });
9000 assert_eq!(hunks.len(), 6);
9001
9002 for _i in 0..operations {
9003 let hunk_ix = rng.random_range(0..hunks.len());
9004 let hunk = &mut hunks[hunk_ix];
9005 let row = hunk.range.start.row;
9006
9007 if hunk.status().has_secondary_hunk() {
9008 log::info!("staging hunk at {row}");
9009 uncommitted_diff.update(cx, |diff, cx| {
9010 diff.stage_or_unstage_hunks(true, std::slice::from_ref(hunk), &snapshot, true, cx);
9011 });
9012 hunk.secondary_status = SecondaryHunkRemovalPending;
9013 } else {
9014 log::info!("unstaging hunk at {row}");
9015 uncommitted_diff.update(cx, |diff, cx| {
9016 diff.stage_or_unstage_hunks(false, std::slice::from_ref(hunk), &snapshot, true, cx);
9017 });
9018 hunk.secondary_status = SecondaryHunkAdditionPending;
9019 }
9020
9021 for _ in 0..rng.random_range(0..10) {
9022 log::info!("yielding");
9023 cx.executor().simulate_random_delay().await;
9024 }
9025 }
9026
9027 cx.executor().run_until_parked();
9028
9029 for hunk in &mut hunks {
9030 if hunk.secondary_status == SecondaryHunkRemovalPending {
9031 hunk.secondary_status = NoSecondaryHunk;
9032 } else if hunk.secondary_status == SecondaryHunkAdditionPending {
9033 hunk.secondary_status = HasSecondaryHunk;
9034 }
9035 }
9036
9037 log::info!(
9038 "index text:\n{}",
9039 repo.load_index_text(RepoPath::from_rel_path(rel_path("file.txt")))
9040 .await
9041 .unwrap()
9042 );
9043
9044 uncommitted_diff.update(cx, |diff, cx| {
9045 let expected_hunks = hunks
9046 .iter()
9047 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
9048 .collect::<Vec<_>>();
9049 let actual_hunks = diff
9050 .snapshot(cx)
9051 .hunks(&snapshot)
9052 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
9053 .collect::<Vec<_>>();
9054 assert_eq!(actual_hunks, expected_hunks);
9055 });
9056}
9057
9058#[gpui::test]
9059async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
9060 init_test(cx);
9061
9062 let committed_contents = r#"
9063 fn main() {
9064 println!("hello from HEAD");
9065 }
9066 "#
9067 .unindent();
9068 let file_contents = r#"
9069 fn main() {
9070 println!("hello from the working copy");
9071 }
9072 "#
9073 .unindent();
9074
9075 let fs = FakeFs::new(cx.background_executor.clone());
9076 fs.insert_tree(
9077 "/dir",
9078 json!({
9079 ".git": {},
9080 "src": {
9081 "main.rs": file_contents,
9082 }
9083 }),
9084 )
9085 .await;
9086
9087 fs.set_head_for_repo(
9088 Path::new("/dir/.git"),
9089 &[("src/main.rs", committed_contents.clone())],
9090 "deadbeef",
9091 );
9092 fs.set_index_for_repo(
9093 Path::new("/dir/.git"),
9094 &[("src/main.rs", committed_contents.clone())],
9095 );
9096
9097 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
9098
9099 let buffer = project
9100 .update(cx, |project, cx| {
9101 project.open_local_buffer("/dir/src/main.rs", cx)
9102 })
9103 .await
9104 .unwrap();
9105 let uncommitted_diff = project
9106 .update(cx, |project, cx| {
9107 project.open_uncommitted_diff(buffer.clone(), cx)
9108 })
9109 .await
9110 .unwrap();
9111
9112 cx.run_until_parked();
9113 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
9114 let snapshot = buffer.read(cx).snapshot();
9115 assert_hunks(
9116 uncommitted_diff.snapshot(cx).hunks(&snapshot),
9117 &snapshot,
9118 &uncommitted_diff.base_text_string(cx).unwrap(),
9119 &[(
9120 1..2,
9121 " println!(\"hello from HEAD\");\n",
9122 " println!(\"hello from the working copy\");\n",
9123 DiffHunkStatus {
9124 kind: DiffHunkStatusKind::Modified,
9125 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
9126 },
9127 )],
9128 );
9129 });
9130}
9131
9132// TODO: Should we test this on Windows also?
9133#[gpui::test]
9134#[cfg(not(windows))]
9135async fn test_staging_hunk_preserve_executable_permission(cx: &mut gpui::TestAppContext) {
9136 use std::os::unix::fs::PermissionsExt;
9137 init_test(cx);
9138 cx.executor().allow_parking();
9139 let committed_contents = "bar\n";
9140 let file_contents = "baz\n";
9141 let root = TempTree::new(json!({
9142 "project": {
9143 "foo": committed_contents
9144 },
9145 }));
9146
9147 let work_dir = root.path().join("project");
9148 let file_path = work_dir.join("foo");
9149 let repo = git_init(work_dir.as_path());
9150 let mut perms = std::fs::metadata(&file_path).unwrap().permissions();
9151 perms.set_mode(0o755);
9152 std::fs::set_permissions(&file_path, perms).unwrap();
9153 git_add("foo", &repo);
9154 git_commit("Initial commit", &repo);
9155 std::fs::write(&file_path, file_contents).unwrap();
9156
9157 let project = Project::test(
9158 Arc::new(RealFs::new(None, cx.executor())),
9159 [root.path()],
9160 cx,
9161 )
9162 .await;
9163
9164 let buffer = project
9165 .update(cx, |project, cx| {
9166 project.open_local_buffer(file_path.as_path(), cx)
9167 })
9168 .await
9169 .unwrap();
9170
9171 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
9172
9173 let uncommitted_diff = project
9174 .update(cx, |project, cx| {
9175 project.open_uncommitted_diff(buffer.clone(), cx)
9176 })
9177 .await
9178 .unwrap();
9179
9180 uncommitted_diff.update(cx, |diff, cx| {
9181 let hunks = diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>();
9182 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
9183 });
9184
9185 cx.run_until_parked();
9186
9187 let output = smol::process::Command::new("git")
9188 .current_dir(&work_dir)
9189 .args(["diff", "--staged"])
9190 .output()
9191 .await
9192 .unwrap();
9193
9194 let staged_diff = String::from_utf8_lossy(&output.stdout);
9195
9196 assert!(
9197 !staged_diff.contains("new mode 100644"),
9198 "Staging should not change file mode from 755 to 644.\ngit diff --staged:\n{}",
9199 staged_diff
9200 );
9201
9202 let output = smol::process::Command::new("git")
9203 .current_dir(&work_dir)
9204 .args(["ls-files", "-s"])
9205 .output()
9206 .await
9207 .unwrap();
9208 let index_contents = String::from_utf8_lossy(&output.stdout);
9209
9210 assert!(
9211 index_contents.contains("100755"),
9212 "Index should show file as executable (100755).\ngit ls-files -s:\n{}",
9213 index_contents
9214 );
9215}
9216
9217#[gpui::test]
9218async fn test_repository_and_path_for_project_path(
9219 background_executor: BackgroundExecutor,
9220 cx: &mut gpui::TestAppContext,
9221) {
9222 init_test(cx);
9223 let fs = FakeFs::new(background_executor);
9224 fs.insert_tree(
9225 path!("/root"),
9226 json!({
9227 "c.txt": "",
9228 "dir1": {
9229 ".git": {},
9230 "deps": {
9231 "dep1": {
9232 ".git": {},
9233 "src": {
9234 "a.txt": ""
9235 }
9236 }
9237 },
9238 "src": {
9239 "b.txt": ""
9240 }
9241 },
9242 }),
9243 )
9244 .await;
9245
9246 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
9247 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9248 let tree_id = tree.read_with(cx, |tree, _| tree.id());
9249 project
9250 .update(cx, |project, cx| project.git_scans_complete(cx))
9251 .await;
9252 cx.run_until_parked();
9253
9254 project.read_with(cx, |project, cx| {
9255 let git_store = project.git_store().read(cx);
9256 let pairs = [
9257 ("c.txt", None),
9258 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
9259 (
9260 "dir1/deps/dep1/src/a.txt",
9261 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
9262 ),
9263 ];
9264 let expected = pairs
9265 .iter()
9266 .map(|(path, result)| {
9267 (
9268 path,
9269 result.map(|(repo, repo_path)| {
9270 (Path::new(repo).into(), RepoPath::new(repo_path).unwrap())
9271 }),
9272 )
9273 })
9274 .collect::<Vec<_>>();
9275 let actual = pairs
9276 .iter()
9277 .map(|(path, _)| {
9278 let project_path = (tree_id, rel_path(path)).into();
9279 let result = maybe!({
9280 let (repo, repo_path) =
9281 git_store.repository_and_path_for_project_path(&project_path, cx)?;
9282 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
9283 });
9284 (path, result)
9285 })
9286 .collect::<Vec<_>>();
9287 pretty_assertions::assert_eq!(expected, actual);
9288 });
9289
9290 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
9291 .await
9292 .unwrap();
9293 cx.run_until_parked();
9294
9295 project.read_with(cx, |project, cx| {
9296 let git_store = project.git_store().read(cx);
9297 assert_eq!(
9298 git_store.repository_and_path_for_project_path(
9299 &(tree_id, rel_path("dir1/src/b.txt")).into(),
9300 cx
9301 ),
9302 None
9303 );
9304 });
9305}
9306
9307#[gpui::test]
9308async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
9309 init_test(cx);
9310 let fs = FakeFs::new(cx.background_executor.clone());
9311 let home = paths::home_dir();
9312 fs.insert_tree(
9313 home,
9314 json!({
9315 ".git": {},
9316 "project": {
9317 "a.txt": "A"
9318 },
9319 }),
9320 )
9321 .await;
9322
9323 let project = Project::test(fs.clone(), [home.join("project").as_ref()], cx).await;
9324 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9325 let tree_id = tree.read_with(cx, |tree, _| tree.id());
9326
9327 project
9328 .update(cx, |project, cx| project.git_scans_complete(cx))
9329 .await;
9330 tree.flush_fs_events(cx).await;
9331
9332 project.read_with(cx, |project, cx| {
9333 let containing = project
9334 .git_store()
9335 .read(cx)
9336 .repository_and_path_for_project_path(&(tree_id, rel_path("a.txt")).into(), cx);
9337 assert!(containing.is_none());
9338 });
9339
9340 let project = Project::test(fs.clone(), [home.as_ref()], cx).await;
9341 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9342 let tree_id = tree.read_with(cx, |tree, _| tree.id());
9343 project
9344 .update(cx, |project, cx| project.git_scans_complete(cx))
9345 .await;
9346 tree.flush_fs_events(cx).await;
9347
9348 project.read_with(cx, |project, cx| {
9349 let containing = project
9350 .git_store()
9351 .read(cx)
9352 .repository_and_path_for_project_path(&(tree_id, rel_path("project/a.txt")).into(), cx);
9353 assert_eq!(
9354 containing
9355 .unwrap()
9356 .0
9357 .read(cx)
9358 .work_directory_abs_path
9359 .as_ref(),
9360 home,
9361 );
9362 });
9363}
9364
9365#[gpui::test]
9366async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
9367 init_test(cx);
9368 cx.executor().allow_parking();
9369
9370 let root = TempTree::new(json!({
9371 "project": {
9372 "a.txt": "a", // Modified
9373 "b.txt": "bb", // Added
9374 "c.txt": "ccc", // Unchanged
9375 "d.txt": "dddd", // Deleted
9376 },
9377 }));
9378
9379 // Set up git repository before creating the project.
9380 let work_dir = root.path().join("project");
9381 let repo = git_init(work_dir.as_path());
9382 git_add("a.txt", &repo);
9383 git_add("c.txt", &repo);
9384 git_add("d.txt", &repo);
9385 git_commit("Initial commit", &repo);
9386 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
9387 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
9388
9389 let project = Project::test(
9390 Arc::new(RealFs::new(None, cx.executor())),
9391 [root.path()],
9392 cx,
9393 )
9394 .await;
9395
9396 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9397 tree.flush_fs_events(cx).await;
9398 project
9399 .update(cx, |project, cx| project.git_scans_complete(cx))
9400 .await;
9401 cx.executor().run_until_parked();
9402
9403 let repository = project.read_with(cx, |project, cx| {
9404 project.repositories(cx).values().next().unwrap().clone()
9405 });
9406
9407 // Check that the right git state is observed on startup
9408 repository.read_with(cx, |repository, _| {
9409 let entries = repository.cached_status().collect::<Vec<_>>();
9410 assert_eq!(
9411 entries,
9412 [
9413 StatusEntry {
9414 repo_path: repo_path("a.txt"),
9415 status: StatusCode::Modified.worktree(),
9416 diff_stat: Some(DiffStat {
9417 added: 1,
9418 deleted: 1,
9419 }),
9420 },
9421 StatusEntry {
9422 repo_path: repo_path("b.txt"),
9423 status: FileStatus::Untracked,
9424 diff_stat: None,
9425 },
9426 StatusEntry {
9427 repo_path: repo_path("d.txt"),
9428 status: StatusCode::Deleted.worktree(),
9429 diff_stat: Some(DiffStat {
9430 added: 0,
9431 deleted: 1,
9432 }),
9433 },
9434 ]
9435 );
9436 });
9437
9438 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
9439
9440 tree.flush_fs_events(cx).await;
9441 project
9442 .update(cx, |project, cx| project.git_scans_complete(cx))
9443 .await;
9444 cx.executor().run_until_parked();
9445
9446 repository.read_with(cx, |repository, _| {
9447 let entries = repository.cached_status().collect::<Vec<_>>();
9448 assert_eq!(
9449 entries,
9450 [
9451 StatusEntry {
9452 repo_path: repo_path("a.txt"),
9453 status: StatusCode::Modified.worktree(),
9454 diff_stat: Some(DiffStat {
9455 added: 1,
9456 deleted: 1,
9457 }),
9458 },
9459 StatusEntry {
9460 repo_path: repo_path("b.txt"),
9461 status: FileStatus::Untracked,
9462 diff_stat: None,
9463 },
9464 StatusEntry {
9465 repo_path: repo_path("c.txt"),
9466 status: StatusCode::Modified.worktree(),
9467 diff_stat: Some(DiffStat {
9468 added: 1,
9469 deleted: 1,
9470 }),
9471 },
9472 StatusEntry {
9473 repo_path: repo_path("d.txt"),
9474 status: StatusCode::Deleted.worktree(),
9475 diff_stat: Some(DiffStat {
9476 added: 0,
9477 deleted: 1,
9478 }),
9479 },
9480 ]
9481 );
9482 });
9483
9484 git_add("a.txt", &repo);
9485 git_add("c.txt", &repo);
9486 git_remove_index(Path::new("d.txt"), &repo);
9487 git_commit("Another commit", &repo);
9488 tree.flush_fs_events(cx).await;
9489 project
9490 .update(cx, |project, cx| project.git_scans_complete(cx))
9491 .await;
9492 cx.executor().run_until_parked();
9493
9494 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
9495 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
9496 tree.flush_fs_events(cx).await;
9497 project
9498 .update(cx, |project, cx| project.git_scans_complete(cx))
9499 .await;
9500 cx.executor().run_until_parked();
9501
9502 repository.read_with(cx, |repository, _cx| {
9503 let entries = repository.cached_status().collect::<Vec<_>>();
9504
9505 // Deleting an untracked entry, b.txt, should leave no status
9506 // a.txt was tracked, and so should have a status
9507 assert_eq!(
9508 entries,
9509 [StatusEntry {
9510 repo_path: repo_path("a.txt"),
9511 status: StatusCode::Deleted.worktree(),
9512 diff_stat: Some(DiffStat {
9513 added: 0,
9514 deleted: 1,
9515 }),
9516 }]
9517 );
9518 });
9519}
9520
9521#[gpui::test]
9522#[ignore]
9523async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
9524 init_test(cx);
9525 cx.executor().allow_parking();
9526
9527 let root = TempTree::new(json!({
9528 "project": {
9529 "sub": {},
9530 "a.txt": "",
9531 },
9532 }));
9533
9534 let work_dir = root.path().join("project");
9535 let repo = git_init(work_dir.as_path());
9536 // a.txt exists in HEAD and the working copy but is deleted in the index.
9537 git_add("a.txt", &repo);
9538 git_commit("Initial commit", &repo);
9539 git_remove_index("a.txt".as_ref(), &repo);
9540 // `sub` is a nested git repository.
9541 let _sub = git_init(&work_dir.join("sub"));
9542
9543 let project = Project::test(
9544 Arc::new(RealFs::new(None, cx.executor())),
9545 [root.path()],
9546 cx,
9547 )
9548 .await;
9549
9550 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9551 tree.flush_fs_events(cx).await;
9552 project
9553 .update(cx, |project, cx| project.git_scans_complete(cx))
9554 .await;
9555 cx.executor().run_until_parked();
9556
9557 let repository = project.read_with(cx, |project, cx| {
9558 project
9559 .repositories(cx)
9560 .values()
9561 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
9562 .unwrap()
9563 .clone()
9564 });
9565
9566 repository.read_with(cx, |repository, _cx| {
9567 let entries = repository.cached_status().collect::<Vec<_>>();
9568
9569 // `sub` doesn't appear in our computed statuses.
9570 // a.txt appears with a combined `DA` status.
9571 assert_eq!(
9572 entries,
9573 [StatusEntry {
9574 repo_path: repo_path("a.txt"),
9575 status: TrackedStatus {
9576 index_status: StatusCode::Deleted,
9577 worktree_status: StatusCode::Added
9578 }
9579 .into(),
9580 diff_stat: None,
9581 }]
9582 )
9583 });
9584}
9585
9586#[track_caller]
9587/// We merge lhs into rhs.
9588fn merge_pending_ops_snapshots(
9589 source: Vec<pending_op::PendingOps>,
9590 mut target: Vec<pending_op::PendingOps>,
9591) -> Vec<pending_op::PendingOps> {
9592 for s_ops in source {
9593 if let Some(idx) = target.iter().zip(0..).find_map(|(ops, idx)| {
9594 if ops.repo_path == s_ops.repo_path {
9595 Some(idx)
9596 } else {
9597 None
9598 }
9599 }) {
9600 let t_ops = &mut target[idx];
9601 for s_op in s_ops.ops {
9602 if let Some(op_idx) = t_ops
9603 .ops
9604 .iter()
9605 .zip(0..)
9606 .find_map(|(op, idx)| if op.id == s_op.id { Some(idx) } else { None })
9607 {
9608 let t_op = &mut t_ops.ops[op_idx];
9609 match (s_op.job_status, t_op.job_status) {
9610 (pending_op::JobStatus::Running, _) => {}
9611 (s_st, pending_op::JobStatus::Running) => t_op.job_status = s_st,
9612 (s_st, t_st) if s_st == t_st => {}
9613 _ => unreachable!(),
9614 }
9615 } else {
9616 t_ops.ops.push(s_op);
9617 }
9618 }
9619 t_ops.ops.sort_by(|l, r| l.id.cmp(&r.id));
9620 } else {
9621 target.push(s_ops);
9622 }
9623 }
9624 target
9625}
9626
9627#[gpui::test]
9628async fn test_repository_pending_ops_staging(
9629 executor: gpui::BackgroundExecutor,
9630 cx: &mut gpui::TestAppContext,
9631) {
9632 init_test(cx);
9633
9634 let fs = FakeFs::new(executor);
9635 fs.insert_tree(
9636 path!("/root"),
9637 json!({
9638 "my-repo": {
9639 ".git": {},
9640 "a.txt": "a",
9641 }
9642
9643 }),
9644 )
9645 .await;
9646
9647 fs.set_status_for_repo(
9648 path!("/root/my-repo/.git").as_ref(),
9649 &[("a.txt", FileStatus::Untracked)],
9650 );
9651
9652 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
9653 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
9654 project.update(cx, |project, cx| {
9655 let pending_ops_all = pending_ops_all.clone();
9656 cx.subscribe(project.git_store(), move |_, _, e, _| {
9657 if let GitStoreEvent::RepositoryUpdated(
9658 _,
9659 RepositoryEvent::PendingOpsChanged { pending_ops },
9660 _,
9661 ) = e
9662 {
9663 let merged = merge_pending_ops_snapshots(
9664 pending_ops.items(()),
9665 pending_ops_all.lock().items(()),
9666 );
9667 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
9668 }
9669 })
9670 .detach();
9671 });
9672 project
9673 .update(cx, |project, cx| project.git_scans_complete(cx))
9674 .await;
9675
9676 let repo = project.read_with(cx, |project, cx| {
9677 project.repositories(cx).values().next().unwrap().clone()
9678 });
9679
9680 // Ensure we have no pending ops for any of the untracked files
9681 repo.read_with(cx, |repo, _cx| {
9682 assert!(repo.pending_ops().next().is_none());
9683 });
9684
9685 let mut id = 1u16;
9686
9687 let mut assert_stage = async |path: RepoPath, stage| {
9688 let git_status = if stage {
9689 pending_op::GitStatus::Staged
9690 } else {
9691 pending_op::GitStatus::Unstaged
9692 };
9693 repo.update(cx, |repo, cx| {
9694 let task = if stage {
9695 repo.stage_entries(vec![path.clone()], cx)
9696 } else {
9697 repo.unstage_entries(vec![path.clone()], cx)
9698 };
9699 let ops = repo.pending_ops_for_path(&path).unwrap();
9700 assert_eq!(
9701 ops.ops.last(),
9702 Some(&pending_op::PendingOp {
9703 id: id.into(),
9704 git_status,
9705 job_status: pending_op::JobStatus::Running
9706 })
9707 );
9708 task
9709 })
9710 .await
9711 .unwrap();
9712
9713 repo.read_with(cx, |repo, _cx| {
9714 let ops = repo.pending_ops_for_path(&path).unwrap();
9715 assert_eq!(
9716 ops.ops.last(),
9717 Some(&pending_op::PendingOp {
9718 id: id.into(),
9719 git_status,
9720 job_status: pending_op::JobStatus::Finished
9721 })
9722 );
9723 });
9724
9725 id += 1;
9726 };
9727
9728 assert_stage(repo_path("a.txt"), true).await;
9729 assert_stage(repo_path("a.txt"), false).await;
9730 assert_stage(repo_path("a.txt"), true).await;
9731 assert_stage(repo_path("a.txt"), false).await;
9732 assert_stage(repo_path("a.txt"), true).await;
9733
9734 cx.run_until_parked();
9735
9736 assert_eq!(
9737 pending_ops_all
9738 .lock()
9739 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
9740 .unwrap()
9741 .ops,
9742 vec![
9743 pending_op::PendingOp {
9744 id: 1u16.into(),
9745 git_status: pending_op::GitStatus::Staged,
9746 job_status: pending_op::JobStatus::Finished
9747 },
9748 pending_op::PendingOp {
9749 id: 2u16.into(),
9750 git_status: pending_op::GitStatus::Unstaged,
9751 job_status: pending_op::JobStatus::Finished
9752 },
9753 pending_op::PendingOp {
9754 id: 3u16.into(),
9755 git_status: pending_op::GitStatus::Staged,
9756 job_status: pending_op::JobStatus::Finished
9757 },
9758 pending_op::PendingOp {
9759 id: 4u16.into(),
9760 git_status: pending_op::GitStatus::Unstaged,
9761 job_status: pending_op::JobStatus::Finished
9762 },
9763 pending_op::PendingOp {
9764 id: 5u16.into(),
9765 git_status: pending_op::GitStatus::Staged,
9766 job_status: pending_op::JobStatus::Finished
9767 }
9768 ],
9769 );
9770
9771 repo.update(cx, |repo, _cx| {
9772 let git_statuses = repo.cached_status().collect::<Vec<_>>();
9773
9774 assert_eq!(
9775 git_statuses,
9776 [StatusEntry {
9777 repo_path: repo_path("a.txt"),
9778 status: TrackedStatus {
9779 index_status: StatusCode::Added,
9780 worktree_status: StatusCode::Unmodified
9781 }
9782 .into(),
9783 diff_stat: Some(DiffStat {
9784 added: 1,
9785 deleted: 0,
9786 }),
9787 }]
9788 );
9789 });
9790}
9791
9792#[gpui::test]
9793async fn test_repository_pending_ops_long_running_staging(
9794 executor: gpui::BackgroundExecutor,
9795 cx: &mut gpui::TestAppContext,
9796) {
9797 init_test(cx);
9798
9799 let fs = FakeFs::new(executor);
9800 fs.insert_tree(
9801 path!("/root"),
9802 json!({
9803 "my-repo": {
9804 ".git": {},
9805 "a.txt": "a",
9806 }
9807
9808 }),
9809 )
9810 .await;
9811
9812 fs.set_status_for_repo(
9813 path!("/root/my-repo/.git").as_ref(),
9814 &[("a.txt", FileStatus::Untracked)],
9815 );
9816
9817 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
9818 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
9819 project.update(cx, |project, cx| {
9820 let pending_ops_all = pending_ops_all.clone();
9821 cx.subscribe(project.git_store(), move |_, _, e, _| {
9822 if let GitStoreEvent::RepositoryUpdated(
9823 _,
9824 RepositoryEvent::PendingOpsChanged { pending_ops },
9825 _,
9826 ) = e
9827 {
9828 let merged = merge_pending_ops_snapshots(
9829 pending_ops.items(()),
9830 pending_ops_all.lock().items(()),
9831 );
9832 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
9833 }
9834 })
9835 .detach();
9836 });
9837
9838 project
9839 .update(cx, |project, cx| project.git_scans_complete(cx))
9840 .await;
9841
9842 let repo = project.read_with(cx, |project, cx| {
9843 project.repositories(cx).values().next().unwrap().clone()
9844 });
9845
9846 repo.update(cx, |repo, cx| {
9847 repo.stage_entries(vec![repo_path("a.txt")], cx)
9848 })
9849 .detach();
9850
9851 repo.update(cx, |repo, cx| {
9852 repo.stage_entries(vec![repo_path("a.txt")], cx)
9853 })
9854 .unwrap()
9855 .with_timeout(Duration::from_secs(1), &cx.executor())
9856 .await
9857 .unwrap();
9858
9859 cx.run_until_parked();
9860
9861 assert_eq!(
9862 pending_ops_all
9863 .lock()
9864 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
9865 .unwrap()
9866 .ops,
9867 vec![
9868 pending_op::PendingOp {
9869 id: 1u16.into(),
9870 git_status: pending_op::GitStatus::Staged,
9871 job_status: pending_op::JobStatus::Skipped
9872 },
9873 pending_op::PendingOp {
9874 id: 2u16.into(),
9875 git_status: pending_op::GitStatus::Staged,
9876 job_status: pending_op::JobStatus::Finished
9877 }
9878 ],
9879 );
9880
9881 repo.update(cx, |repo, _cx| {
9882 let git_statuses = repo.cached_status().collect::<Vec<_>>();
9883
9884 assert_eq!(
9885 git_statuses,
9886 [StatusEntry {
9887 repo_path: repo_path("a.txt"),
9888 status: TrackedStatus {
9889 index_status: StatusCode::Added,
9890 worktree_status: StatusCode::Unmodified
9891 }
9892 .into(),
9893 diff_stat: Some(DiffStat {
9894 added: 1,
9895 deleted: 0,
9896 }),
9897 }]
9898 );
9899 });
9900}
9901
9902#[gpui::test]
9903async fn test_repository_pending_ops_stage_all(
9904 executor: gpui::BackgroundExecutor,
9905 cx: &mut gpui::TestAppContext,
9906) {
9907 init_test(cx);
9908
9909 let fs = FakeFs::new(executor);
9910 fs.insert_tree(
9911 path!("/root"),
9912 json!({
9913 "my-repo": {
9914 ".git": {},
9915 "a.txt": "a",
9916 "b.txt": "b"
9917 }
9918
9919 }),
9920 )
9921 .await;
9922
9923 fs.set_status_for_repo(
9924 path!("/root/my-repo/.git").as_ref(),
9925 &[
9926 ("a.txt", FileStatus::Untracked),
9927 ("b.txt", FileStatus::Untracked),
9928 ],
9929 );
9930
9931 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
9932 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
9933 project.update(cx, |project, cx| {
9934 let pending_ops_all = pending_ops_all.clone();
9935 cx.subscribe(project.git_store(), move |_, _, e, _| {
9936 if let GitStoreEvent::RepositoryUpdated(
9937 _,
9938 RepositoryEvent::PendingOpsChanged { pending_ops },
9939 _,
9940 ) = e
9941 {
9942 let merged = merge_pending_ops_snapshots(
9943 pending_ops.items(()),
9944 pending_ops_all.lock().items(()),
9945 );
9946 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
9947 }
9948 })
9949 .detach();
9950 });
9951 project
9952 .update(cx, |project, cx| project.git_scans_complete(cx))
9953 .await;
9954
9955 let repo = project.read_with(cx, |project, cx| {
9956 project.repositories(cx).values().next().unwrap().clone()
9957 });
9958
9959 repo.update(cx, |repo, cx| {
9960 repo.stage_entries(vec![repo_path("a.txt")], cx)
9961 })
9962 .await
9963 .unwrap();
9964 repo.update(cx, |repo, cx| repo.stage_all(cx))
9965 .await
9966 .unwrap();
9967 repo.update(cx, |repo, cx| repo.unstage_all(cx))
9968 .await
9969 .unwrap();
9970
9971 cx.run_until_parked();
9972
9973 assert_eq!(
9974 pending_ops_all
9975 .lock()
9976 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
9977 .unwrap()
9978 .ops,
9979 vec![
9980 pending_op::PendingOp {
9981 id: 1u16.into(),
9982 git_status: pending_op::GitStatus::Staged,
9983 job_status: pending_op::JobStatus::Finished
9984 },
9985 pending_op::PendingOp {
9986 id: 2u16.into(),
9987 git_status: pending_op::GitStatus::Unstaged,
9988 job_status: pending_op::JobStatus::Finished
9989 },
9990 ],
9991 );
9992 assert_eq!(
9993 pending_ops_all
9994 .lock()
9995 .get(&worktree::PathKey(repo_path("b.txt").as_ref().clone()), ())
9996 .unwrap()
9997 .ops,
9998 vec![
9999 pending_op::PendingOp {
10000 id: 1u16.into(),
10001 git_status: pending_op::GitStatus::Staged,
10002 job_status: pending_op::JobStatus::Finished
10003 },
10004 pending_op::PendingOp {
10005 id: 2u16.into(),
10006 git_status: pending_op::GitStatus::Unstaged,
10007 job_status: pending_op::JobStatus::Finished
10008 },
10009 ],
10010 );
10011
10012 repo.update(cx, |repo, _cx| {
10013 let git_statuses = repo.cached_status().collect::<Vec<_>>();
10014
10015 assert_eq!(
10016 git_statuses,
10017 [
10018 StatusEntry {
10019 repo_path: repo_path("a.txt"),
10020 status: FileStatus::Untracked,
10021 diff_stat: None,
10022 },
10023 StatusEntry {
10024 repo_path: repo_path("b.txt"),
10025 status: FileStatus::Untracked,
10026 diff_stat: None,
10027 },
10028 ]
10029 );
10030 });
10031}
10032
10033#[gpui::test]
10034async fn test_repository_subfolder_git_status(
10035 executor: gpui::BackgroundExecutor,
10036 cx: &mut gpui::TestAppContext,
10037) {
10038 init_test(cx);
10039
10040 let fs = FakeFs::new(executor);
10041 fs.insert_tree(
10042 path!("/root"),
10043 json!({
10044 "my-repo": {
10045 ".git": {},
10046 "a.txt": "a",
10047 "sub-folder-1": {
10048 "sub-folder-2": {
10049 "c.txt": "cc",
10050 "d": {
10051 "e.txt": "eee"
10052 }
10053 },
10054 }
10055 },
10056 }),
10057 )
10058 .await;
10059
10060 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
10061 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
10062
10063 fs.set_status_for_repo(
10064 path!("/root/my-repo/.git").as_ref(),
10065 &[(E_TXT, FileStatus::Untracked)],
10066 );
10067
10068 let project = Project::test(
10069 fs.clone(),
10070 [path!("/root/my-repo/sub-folder-1/sub-folder-2").as_ref()],
10071 cx,
10072 )
10073 .await;
10074
10075 project
10076 .update(cx, |project, cx| project.git_scans_complete(cx))
10077 .await;
10078 cx.run_until_parked();
10079
10080 let repository = project.read_with(cx, |project, cx| {
10081 project.repositories(cx).values().next().unwrap().clone()
10082 });
10083
10084 // Ensure that the git status is loaded correctly
10085 repository.read_with(cx, |repository, _cx| {
10086 assert_eq!(
10087 repository.work_directory_abs_path,
10088 Path::new(path!("/root/my-repo")).into()
10089 );
10090
10091 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
10092 assert_eq!(
10093 repository
10094 .status_for_path(&repo_path(E_TXT))
10095 .unwrap()
10096 .status,
10097 FileStatus::Untracked
10098 );
10099 });
10100
10101 fs.set_status_for_repo(path!("/root/my-repo/.git").as_ref(), &[]);
10102 project
10103 .update(cx, |project, cx| project.git_scans_complete(cx))
10104 .await;
10105 cx.run_until_parked();
10106
10107 repository.read_with(cx, |repository, _cx| {
10108 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
10109 assert_eq!(repository.status_for_path(&repo_path(E_TXT)), None);
10110 });
10111}
10112
10113// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
10114#[cfg(any())]
10115#[gpui::test]
10116async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
10117 init_test(cx);
10118 cx.executor().allow_parking();
10119
10120 let root = TempTree::new(json!({
10121 "project": {
10122 "a.txt": "a",
10123 },
10124 }));
10125 let root_path = root.path();
10126
10127 let repo = git_init(&root_path.join("project"));
10128 git_add("a.txt", &repo);
10129 git_commit("init", &repo);
10130
10131 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10132
10133 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10134 tree.flush_fs_events(cx).await;
10135 project
10136 .update(cx, |project, cx| project.git_scans_complete(cx))
10137 .await;
10138 cx.executor().run_until_parked();
10139
10140 let repository = project.read_with(cx, |project, cx| {
10141 project.repositories(cx).values().next().unwrap().clone()
10142 });
10143
10144 git_branch("other-branch", &repo);
10145 git_checkout("refs/heads/other-branch", &repo);
10146 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
10147 git_add("a.txt", &repo);
10148 git_commit("capitalize", &repo);
10149 let commit = repo
10150 .head()
10151 .expect("Failed to get HEAD")
10152 .peel_to_commit()
10153 .expect("HEAD is not a commit");
10154 git_checkout("refs/heads/main", &repo);
10155 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
10156 git_add("a.txt", &repo);
10157 git_commit("improve letter", &repo);
10158 git_cherry_pick(&commit, &repo);
10159 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
10160 .expect("No CHERRY_PICK_HEAD");
10161 pretty_assertions::assert_eq!(
10162 git_status(&repo),
10163 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
10164 );
10165 tree.flush_fs_events(cx).await;
10166 project
10167 .update(cx, |project, cx| project.git_scans_complete(cx))
10168 .await;
10169 cx.executor().run_until_parked();
10170 let conflicts = repository.update(cx, |repository, _| {
10171 repository
10172 .merge_conflicts
10173 .iter()
10174 .cloned()
10175 .collect::<Vec<_>>()
10176 });
10177 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
10178
10179 git_add("a.txt", &repo);
10180 // Attempt to manually simulate what `git cherry-pick --continue` would do.
10181 git_commit("whatevs", &repo);
10182 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
10183 .expect("Failed to remove CHERRY_PICK_HEAD");
10184 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
10185 tree.flush_fs_events(cx).await;
10186 let conflicts = repository.update(cx, |repository, _| {
10187 repository
10188 .merge_conflicts
10189 .iter()
10190 .cloned()
10191 .collect::<Vec<_>>()
10192 });
10193 pretty_assertions::assert_eq!(conflicts, []);
10194}
10195
10196#[gpui::test]
10197async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
10198 init_test(cx);
10199 let fs = FakeFs::new(cx.background_executor.clone());
10200 fs.insert_tree(
10201 path!("/root"),
10202 json!({
10203 ".git": {},
10204 ".gitignore": "*.txt\n",
10205 "a.xml": "<a></a>",
10206 "b.txt": "Some text"
10207 }),
10208 )
10209 .await;
10210
10211 fs.set_head_and_index_for_repo(
10212 path!("/root/.git").as_ref(),
10213 &[
10214 (".gitignore", "*.txt\n".into()),
10215 ("a.xml", "<a></a>".into()),
10216 ],
10217 );
10218
10219 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
10220
10221 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10222 tree.flush_fs_events(cx).await;
10223 project
10224 .update(cx, |project, cx| project.git_scans_complete(cx))
10225 .await;
10226 cx.executor().run_until_parked();
10227
10228 let repository = project.read_with(cx, |project, cx| {
10229 project.repositories(cx).values().next().unwrap().clone()
10230 });
10231
10232 // One file is unmodified, the other is ignored.
10233 cx.read(|cx| {
10234 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
10235 assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
10236 });
10237
10238 // Change the gitignore, and stage the newly non-ignored file.
10239 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
10240 .await
10241 .unwrap();
10242 fs.set_index_for_repo(
10243 Path::new(path!("/root/.git")),
10244 &[
10245 (".gitignore", "*.txt\n".into()),
10246 ("a.xml", "<a></a>".into()),
10247 ("b.txt", "Some text".into()),
10248 ],
10249 );
10250
10251 cx.executor().run_until_parked();
10252 cx.read(|cx| {
10253 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
10254 assert_entry_git_state(
10255 tree.read(cx),
10256 repository.read(cx),
10257 "b.txt",
10258 Some(StatusCode::Added),
10259 false,
10260 );
10261 });
10262}
10263
10264// NOTE:
10265// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
10266// a directory which some program has already open.
10267// This is a limitation of the Windows.
10268// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
10269// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
10270#[gpui::test]
10271#[cfg_attr(target_os = "windows", ignore)]
10272async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
10273 init_test(cx);
10274 cx.executor().allow_parking();
10275 let root = TempTree::new(json!({
10276 "projects": {
10277 "project1": {
10278 "a": "",
10279 "b": "",
10280 }
10281 },
10282
10283 }));
10284 let root_path = root.path();
10285
10286 let repo = git_init(&root_path.join("projects/project1"));
10287 git_add("a", &repo);
10288 git_commit("init", &repo);
10289 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
10290
10291 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10292
10293 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10294 tree.flush_fs_events(cx).await;
10295 project
10296 .update(cx, |project, cx| project.git_scans_complete(cx))
10297 .await;
10298 cx.executor().run_until_parked();
10299
10300 let repository = project.read_with(cx, |project, cx| {
10301 project.repositories(cx).values().next().unwrap().clone()
10302 });
10303
10304 repository.read_with(cx, |repository, _| {
10305 assert_eq!(
10306 repository.work_directory_abs_path.as_ref(),
10307 root_path.join("projects/project1").as_path()
10308 );
10309 assert_eq!(
10310 repository
10311 .status_for_path(&repo_path("a"))
10312 .map(|entry| entry.status),
10313 Some(StatusCode::Modified.worktree()),
10314 );
10315 assert_eq!(
10316 repository
10317 .status_for_path(&repo_path("b"))
10318 .map(|entry| entry.status),
10319 Some(FileStatus::Untracked),
10320 );
10321 });
10322
10323 std::fs::rename(
10324 root_path.join("projects/project1"),
10325 root_path.join("projects/project2"),
10326 )
10327 .unwrap();
10328 tree.flush_fs_events(cx).await;
10329
10330 repository.read_with(cx, |repository, _| {
10331 assert_eq!(
10332 repository.work_directory_abs_path.as_ref(),
10333 root_path.join("projects/project2").as_path()
10334 );
10335 assert_eq!(
10336 repository.status_for_path(&repo_path("a")).unwrap().status,
10337 StatusCode::Modified.worktree(),
10338 );
10339 assert_eq!(
10340 repository.status_for_path(&repo_path("b")).unwrap().status,
10341 FileStatus::Untracked,
10342 );
10343 });
10344}
10345
10346// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
10347// you can't rename a directory which some program has already open. This is a
10348// limitation of the Windows. See:
10349// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
10350// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
10351#[gpui::test]
10352#[cfg_attr(target_os = "windows", ignore)]
10353async fn test_file_status(cx: &mut gpui::TestAppContext) {
10354 init_test(cx);
10355 cx.executor().allow_parking();
10356 const IGNORE_RULE: &str = "**/target";
10357
10358 let root = TempTree::new(json!({
10359 "project": {
10360 "a.txt": "a",
10361 "b.txt": "bb",
10362 "c": {
10363 "d": {
10364 "e.txt": "eee"
10365 }
10366 },
10367 "f.txt": "ffff",
10368 "target": {
10369 "build_file": "???"
10370 },
10371 ".gitignore": IGNORE_RULE
10372 },
10373
10374 }));
10375 let root_path = root.path();
10376
10377 const A_TXT: &str = "a.txt";
10378 const B_TXT: &str = "b.txt";
10379 const E_TXT: &str = "c/d/e.txt";
10380 const F_TXT: &str = "f.txt";
10381 const DOTGITIGNORE: &str = ".gitignore";
10382 const BUILD_FILE: &str = "target/build_file";
10383
10384 // Set up git repository before creating the worktree.
10385 let work_dir = root.path().join("project");
10386 let mut repo = git_init(work_dir.as_path());
10387 repo.add_ignore_rule(IGNORE_RULE).unwrap();
10388 git_add(A_TXT, &repo);
10389 git_add(E_TXT, &repo);
10390 git_add(DOTGITIGNORE, &repo);
10391 git_commit("Initial commit", &repo);
10392
10393 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10394
10395 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10396 tree.flush_fs_events(cx).await;
10397 project
10398 .update(cx, |project, cx| project.git_scans_complete(cx))
10399 .await;
10400 cx.executor().run_until_parked();
10401
10402 let repository = project.read_with(cx, |project, cx| {
10403 project.repositories(cx).values().next().unwrap().clone()
10404 });
10405
10406 // Check that the right git state is observed on startup
10407 repository.read_with(cx, |repository, _cx| {
10408 assert_eq!(
10409 repository.work_directory_abs_path.as_ref(),
10410 root_path.join("project").as_path()
10411 );
10412
10413 assert_eq!(
10414 repository
10415 .status_for_path(&repo_path(B_TXT))
10416 .unwrap()
10417 .status,
10418 FileStatus::Untracked,
10419 );
10420 assert_eq!(
10421 repository
10422 .status_for_path(&repo_path(F_TXT))
10423 .unwrap()
10424 .status,
10425 FileStatus::Untracked,
10426 );
10427 });
10428
10429 // Modify a file in the working copy.
10430 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
10431 tree.flush_fs_events(cx).await;
10432 project
10433 .update(cx, |project, cx| project.git_scans_complete(cx))
10434 .await;
10435 cx.executor().run_until_parked();
10436
10437 // The worktree detects that the file's git status has changed.
10438 repository.read_with(cx, |repository, _| {
10439 assert_eq!(
10440 repository
10441 .status_for_path(&repo_path(A_TXT))
10442 .unwrap()
10443 .status,
10444 StatusCode::Modified.worktree(),
10445 );
10446 });
10447
10448 // Create a commit in the git repository.
10449 git_add(A_TXT, &repo);
10450 git_add(B_TXT, &repo);
10451 git_commit("Committing modified and added", &repo);
10452 tree.flush_fs_events(cx).await;
10453 project
10454 .update(cx, |project, cx| project.git_scans_complete(cx))
10455 .await;
10456 cx.executor().run_until_parked();
10457
10458 // The worktree detects that the files' git status have changed.
10459 repository.read_with(cx, |repository, _cx| {
10460 assert_eq!(
10461 repository
10462 .status_for_path(&repo_path(F_TXT))
10463 .unwrap()
10464 .status,
10465 FileStatus::Untracked,
10466 );
10467 assert_eq!(repository.status_for_path(&repo_path(B_TXT)), None);
10468 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
10469 });
10470
10471 // Modify files in the working copy and perform git operations on other files.
10472 git_reset(0, &repo);
10473 git_remove_index(Path::new(B_TXT), &repo);
10474 git_stash(&mut repo);
10475 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
10476 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
10477 tree.flush_fs_events(cx).await;
10478 project
10479 .update(cx, |project, cx| project.git_scans_complete(cx))
10480 .await;
10481 cx.executor().run_until_parked();
10482
10483 // Check that more complex repo changes are tracked
10484 repository.read_with(cx, |repository, _cx| {
10485 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
10486 assert_eq!(
10487 repository
10488 .status_for_path(&repo_path(B_TXT))
10489 .unwrap()
10490 .status,
10491 FileStatus::Untracked,
10492 );
10493 assert_eq!(
10494 repository
10495 .status_for_path(&repo_path(E_TXT))
10496 .unwrap()
10497 .status,
10498 StatusCode::Modified.worktree(),
10499 );
10500 });
10501
10502 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
10503 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
10504 std::fs::write(
10505 work_dir.join(DOTGITIGNORE),
10506 [IGNORE_RULE, "f.txt"].join("\n"),
10507 )
10508 .unwrap();
10509
10510 git_add(Path::new(DOTGITIGNORE), &repo);
10511 git_commit("Committing modified git ignore", &repo);
10512
10513 tree.flush_fs_events(cx).await;
10514 cx.executor().run_until_parked();
10515
10516 let mut renamed_dir_name = "first_directory/second_directory";
10517 const RENAMED_FILE: &str = "rf.txt";
10518
10519 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
10520 std::fs::write(
10521 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
10522 "new-contents",
10523 )
10524 .unwrap();
10525
10526 tree.flush_fs_events(cx).await;
10527 project
10528 .update(cx, |project, cx| project.git_scans_complete(cx))
10529 .await;
10530 cx.executor().run_until_parked();
10531
10532 repository.read_with(cx, |repository, _cx| {
10533 assert_eq!(
10534 repository
10535 .status_for_path(&RepoPath::from_rel_path(
10536 &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE))
10537 ))
10538 .unwrap()
10539 .status,
10540 FileStatus::Untracked,
10541 );
10542 });
10543
10544 renamed_dir_name = "new_first_directory/second_directory";
10545
10546 std::fs::rename(
10547 work_dir.join("first_directory"),
10548 work_dir.join("new_first_directory"),
10549 )
10550 .unwrap();
10551
10552 tree.flush_fs_events(cx).await;
10553 project
10554 .update(cx, |project, cx| project.git_scans_complete(cx))
10555 .await;
10556 cx.executor().run_until_parked();
10557
10558 repository.read_with(cx, |repository, _cx| {
10559 assert_eq!(
10560 repository
10561 .status_for_path(&RepoPath::from_rel_path(
10562 &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE))
10563 ))
10564 .unwrap()
10565 .status,
10566 FileStatus::Untracked,
10567 );
10568 });
10569}
10570
10571#[gpui::test]
10572#[ignore]
10573async fn test_ignored_dirs_events(cx: &mut gpui::TestAppContext) {
10574 init_test(cx);
10575 cx.executor().allow_parking();
10576
10577 const IGNORE_RULE: &str = "**/target";
10578
10579 let root = TempTree::new(json!({
10580 "project": {
10581 "src": {
10582 "main.rs": "fn main() {}"
10583 },
10584 "target": {
10585 "debug": {
10586 "important_text.txt": "important text",
10587 },
10588 },
10589 ".gitignore": IGNORE_RULE
10590 },
10591
10592 }));
10593 let root_path = root.path();
10594
10595 // Set up git repository before creating the worktree.
10596 let work_dir = root.path().join("project");
10597 let repo = git_init(work_dir.as_path());
10598 repo.add_ignore_rule(IGNORE_RULE).unwrap();
10599 git_add("src/main.rs", &repo);
10600 git_add(".gitignore", &repo);
10601 git_commit("Initial commit", &repo);
10602
10603 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10604 let repository_updates = Arc::new(Mutex::new(Vec::new()));
10605 let project_events = Arc::new(Mutex::new(Vec::new()));
10606 project.update(cx, |project, cx| {
10607 let repo_events = repository_updates.clone();
10608 cx.subscribe(project.git_store(), move |_, _, e, _| {
10609 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
10610 repo_events.lock().push(e.clone());
10611 }
10612 })
10613 .detach();
10614 let project_events = project_events.clone();
10615 cx.subscribe_self(move |_, e, _| {
10616 if let Event::WorktreeUpdatedEntries(_, updates) = e {
10617 project_events.lock().extend(
10618 updates
10619 .iter()
10620 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
10621 .filter(|(path, _)| path != "fs-event-sentinel"),
10622 );
10623 }
10624 })
10625 .detach();
10626 });
10627
10628 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10629 tree.flush_fs_events(cx).await;
10630 tree.update(cx, |tree, cx| {
10631 tree.load_file(rel_path("project/target/debug/important_text.txt"), cx)
10632 })
10633 .await
10634 .unwrap();
10635 tree.update(cx, |tree, _| {
10636 assert_eq!(
10637 tree.entries(true, 0)
10638 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
10639 .collect::<Vec<_>>(),
10640 vec![
10641 (rel_path(""), false),
10642 (rel_path("project/"), false),
10643 (rel_path("project/.gitignore"), false),
10644 (rel_path("project/src"), false),
10645 (rel_path("project/src/main.rs"), false),
10646 (rel_path("project/target"), true),
10647 (rel_path("project/target/debug"), true),
10648 (rel_path("project/target/debug/important_text.txt"), true),
10649 ]
10650 );
10651 });
10652
10653 assert_eq!(
10654 repository_updates.lock().drain(..).collect::<Vec<_>>(),
10655 vec![RepositoryEvent::StatusesChanged,],
10656 "Initial worktree scan should produce a repo update event"
10657 );
10658 assert_eq!(
10659 project_events.lock().drain(..).collect::<Vec<_>>(),
10660 vec![
10661 ("project/target".to_string(), PathChange::Loaded),
10662 ("project/target/debug".to_string(), PathChange::Loaded),
10663 (
10664 "project/target/debug/important_text.txt".to_string(),
10665 PathChange::Loaded
10666 ),
10667 ],
10668 "Initial project changes should show that all not-ignored and all opened files are loaded"
10669 );
10670
10671 let deps_dir = work_dir.join("target").join("debug").join("deps");
10672 std::fs::create_dir_all(&deps_dir).unwrap();
10673 tree.flush_fs_events(cx).await;
10674 project
10675 .update(cx, |project, cx| project.git_scans_complete(cx))
10676 .await;
10677 cx.executor().run_until_parked();
10678 std::fs::write(deps_dir.join("aa.tmp"), "something tmp").unwrap();
10679 tree.flush_fs_events(cx).await;
10680 project
10681 .update(cx, |project, cx| project.git_scans_complete(cx))
10682 .await;
10683 cx.executor().run_until_parked();
10684 std::fs::remove_dir_all(&deps_dir).unwrap();
10685 tree.flush_fs_events(cx).await;
10686 project
10687 .update(cx, |project, cx| project.git_scans_complete(cx))
10688 .await;
10689 cx.executor().run_until_parked();
10690
10691 tree.update(cx, |tree, _| {
10692 assert_eq!(
10693 tree.entries(true, 0)
10694 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
10695 .collect::<Vec<_>>(),
10696 vec![
10697 (rel_path(""), false),
10698 (rel_path("project/"), false),
10699 (rel_path("project/.gitignore"), false),
10700 (rel_path("project/src"), false),
10701 (rel_path("project/src/main.rs"), false),
10702 (rel_path("project/target"), true),
10703 (rel_path("project/target/debug"), true),
10704 (rel_path("project/target/debug/important_text.txt"), true),
10705 ],
10706 "No stray temp files should be left after the flycheck changes"
10707 );
10708 });
10709
10710 assert_eq!(
10711 repository_updates
10712 .lock()
10713 .iter()
10714 .cloned()
10715 .collect::<Vec<_>>(),
10716 Vec::new(),
10717 "No further RepositoryUpdated events should happen, as only ignored dirs' contents was changed",
10718 );
10719 assert_eq!(
10720 project_events.lock().as_slice(),
10721 vec![
10722 ("project/target/debug/deps".to_string(), PathChange::Added),
10723 ("project/target/debug/deps".to_string(), PathChange::Removed),
10724 ],
10725 "Due to `debug` directory being tracked, it should get updates for entries inside it.
10726 No updates for more nested directories should happen as those are ignored",
10727 );
10728}
10729
10730// todo(jk): turning this test off until we rework it in such a way so that it is not so susceptible
10731// to different timings/ordering of events.
10732#[ignore]
10733#[gpui::test]
10734async fn test_odd_events_for_ignored_dirs(
10735 executor: BackgroundExecutor,
10736 cx: &mut gpui::TestAppContext,
10737) {
10738 init_test(cx);
10739 let fs = FakeFs::new(executor);
10740 fs.insert_tree(
10741 path!("/root"),
10742 json!({
10743 ".git": {},
10744 ".gitignore": "**/target/",
10745 "src": {
10746 "main.rs": "fn main() {}",
10747 },
10748 "target": {
10749 "debug": {
10750 "foo.txt": "foo",
10751 "deps": {}
10752 }
10753 }
10754 }),
10755 )
10756 .await;
10757 fs.set_head_and_index_for_repo(
10758 path!("/root/.git").as_ref(),
10759 &[
10760 (".gitignore", "**/target/".into()),
10761 ("src/main.rs", "fn main() {}".into()),
10762 ],
10763 );
10764
10765 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
10766 let repository_updates = Arc::new(Mutex::new(Vec::new()));
10767 let project_events = Arc::new(Mutex::new(Vec::new()));
10768 project.update(cx, |project, cx| {
10769 let repository_updates = repository_updates.clone();
10770 cx.subscribe(project.git_store(), move |_, _, e, _| {
10771 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
10772 repository_updates.lock().push(e.clone());
10773 }
10774 })
10775 .detach();
10776 let project_events = project_events.clone();
10777 cx.subscribe_self(move |_, e, _| {
10778 if let Event::WorktreeUpdatedEntries(_, updates) = e {
10779 project_events.lock().extend(
10780 updates
10781 .iter()
10782 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
10783 .filter(|(path, _)| path != "fs-event-sentinel"),
10784 );
10785 }
10786 })
10787 .detach();
10788 });
10789
10790 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10791 tree.update(cx, |tree, cx| {
10792 tree.load_file(rel_path("target/debug/foo.txt"), cx)
10793 })
10794 .await
10795 .unwrap();
10796 tree.flush_fs_events(cx).await;
10797 project
10798 .update(cx, |project, cx| project.git_scans_complete(cx))
10799 .await;
10800 cx.run_until_parked();
10801 tree.update(cx, |tree, _| {
10802 assert_eq!(
10803 tree.entries(true, 0)
10804 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
10805 .collect::<Vec<_>>(),
10806 vec![
10807 (rel_path(""), false),
10808 (rel_path(".gitignore"), false),
10809 (rel_path("src"), false),
10810 (rel_path("src/main.rs"), false),
10811 (rel_path("target"), true),
10812 (rel_path("target/debug"), true),
10813 (rel_path("target/debug/deps"), true),
10814 (rel_path("target/debug/foo.txt"), true),
10815 ]
10816 );
10817 });
10818
10819 assert_eq!(
10820 repository_updates.lock().drain(..).collect::<Vec<_>>(),
10821 vec![
10822 RepositoryEvent::BranchChanged,
10823 RepositoryEvent::StatusesChanged,
10824 RepositoryEvent::StatusesChanged,
10825 ],
10826 "Initial worktree scan should produce a repo update event"
10827 );
10828 assert_eq!(
10829 project_events.lock().drain(..).collect::<Vec<_>>(),
10830 vec![
10831 ("target".to_string(), PathChange::Loaded),
10832 ("target/debug".to_string(), PathChange::Loaded),
10833 ("target/debug/deps".to_string(), PathChange::Loaded),
10834 ("target/debug/foo.txt".to_string(), PathChange::Loaded),
10835 ],
10836 "All non-ignored entries and all opened firs should be getting a project event",
10837 );
10838
10839 // Emulate a flycheck spawn: it emits a `INODE_META_MOD`-flagged FS event on target/debug/deps, then creates and removes temp files inside.
10840 // This may happen multiple times during a single flycheck, but once is enough for testing.
10841 fs.emit_fs_event("/root/target/debug/deps", None);
10842 tree.flush_fs_events(cx).await;
10843 project
10844 .update(cx, |project, cx| project.git_scans_complete(cx))
10845 .await;
10846 cx.executor().run_until_parked();
10847
10848 assert_eq!(
10849 repository_updates
10850 .lock()
10851 .iter()
10852 .cloned()
10853 .collect::<Vec<_>>(),
10854 Vec::new(),
10855 "No further RepositoryUpdated events should happen, as only ignored dirs received FS events",
10856 );
10857 assert_eq!(
10858 project_events.lock().as_slice(),
10859 Vec::new(),
10860 "No further project events should happen, as only ignored dirs received FS events",
10861 );
10862}
10863
10864#[gpui::test]
10865async fn test_repos_in_invisible_worktrees(
10866 executor: BackgroundExecutor,
10867 cx: &mut gpui::TestAppContext,
10868) {
10869 init_test(cx);
10870 let fs = FakeFs::new(executor);
10871 fs.insert_tree(
10872 path!("/root"),
10873 json!({
10874 "dir1": {
10875 ".git": {},
10876 "dep1": {
10877 ".git": {},
10878 "src": {
10879 "a.txt": "",
10880 },
10881 },
10882 "b.txt": "",
10883 },
10884 }),
10885 )
10886 .await;
10887
10888 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
10889 let _visible_worktree =
10890 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10891 project
10892 .update(cx, |project, cx| project.git_scans_complete(cx))
10893 .await;
10894
10895 let repos = project.read_with(cx, |project, cx| {
10896 project
10897 .repositories(cx)
10898 .values()
10899 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
10900 .collect::<Vec<_>>()
10901 });
10902 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
10903
10904 let (_invisible_worktree, _) = project
10905 .update(cx, |project, cx| {
10906 project.worktree_store().update(cx, |worktree_store, cx| {
10907 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
10908 })
10909 })
10910 .await
10911 .expect("failed to create worktree");
10912 project
10913 .update(cx, |project, cx| project.git_scans_complete(cx))
10914 .await;
10915
10916 let repos = project.read_with(cx, |project, cx| {
10917 project
10918 .repositories(cx)
10919 .values()
10920 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
10921 .collect::<Vec<_>>()
10922 });
10923 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
10924}
10925
10926#[gpui::test(iterations = 10)]
10927async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
10928 init_test(cx);
10929 cx.update(|cx| {
10930 cx.update_global::<SettingsStore, _>(|store, cx| {
10931 store.update_user_settings(cx, |settings| {
10932 settings.project.worktree.file_scan_exclusions = Some(Vec::new());
10933 });
10934 });
10935 });
10936 let fs = FakeFs::new(cx.background_executor.clone());
10937 fs.insert_tree(
10938 path!("/root"),
10939 json!({
10940 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
10941 "tree": {
10942 ".git": {},
10943 ".gitignore": "ignored-dir\n",
10944 "tracked-dir": {
10945 "tracked-file1": "",
10946 "ancestor-ignored-file1": "",
10947 },
10948 "ignored-dir": {
10949 "ignored-file1": ""
10950 }
10951 }
10952 }),
10953 )
10954 .await;
10955 fs.set_head_and_index_for_repo(
10956 path!("/root/tree/.git").as_ref(),
10957 &[
10958 (".gitignore", "ignored-dir\n".into()),
10959 ("tracked-dir/tracked-file1", "".into()),
10960 ],
10961 );
10962
10963 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
10964
10965 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10966 tree.flush_fs_events(cx).await;
10967 project
10968 .update(cx, |project, cx| project.git_scans_complete(cx))
10969 .await;
10970 cx.executor().run_until_parked();
10971
10972 let repository = project.read_with(cx, |project, cx| {
10973 project.repositories(cx).values().next().unwrap().clone()
10974 });
10975
10976 tree.read_with(cx, |tree, _| {
10977 tree.as_local()
10978 .unwrap()
10979 .manually_refresh_entries_for_paths(vec![rel_path("ignored-dir").into()])
10980 })
10981 .recv()
10982 .await;
10983
10984 cx.read(|cx| {
10985 assert_entry_git_state(
10986 tree.read(cx),
10987 repository.read(cx),
10988 "tracked-dir/tracked-file1",
10989 None,
10990 false,
10991 );
10992 assert_entry_git_state(
10993 tree.read(cx),
10994 repository.read(cx),
10995 "tracked-dir/ancestor-ignored-file1",
10996 None,
10997 false,
10998 );
10999 assert_entry_git_state(
11000 tree.read(cx),
11001 repository.read(cx),
11002 "ignored-dir/ignored-file1",
11003 None,
11004 true,
11005 );
11006 });
11007
11008 fs.create_file(
11009 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
11010 Default::default(),
11011 )
11012 .await
11013 .unwrap();
11014 fs.set_index_for_repo(
11015 path!("/root/tree/.git").as_ref(),
11016 &[
11017 (".gitignore", "ignored-dir\n".into()),
11018 ("tracked-dir/tracked-file1", "".into()),
11019 ("tracked-dir/tracked-file2", "".into()),
11020 ],
11021 );
11022 fs.create_file(
11023 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
11024 Default::default(),
11025 )
11026 .await
11027 .unwrap();
11028 fs.create_file(
11029 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
11030 Default::default(),
11031 )
11032 .await
11033 .unwrap();
11034
11035 cx.executor().run_until_parked();
11036 cx.read(|cx| {
11037 assert_entry_git_state(
11038 tree.read(cx),
11039 repository.read(cx),
11040 "tracked-dir/tracked-file2",
11041 Some(StatusCode::Added),
11042 false,
11043 );
11044 assert_entry_git_state(
11045 tree.read(cx),
11046 repository.read(cx),
11047 "tracked-dir/ancestor-ignored-file2",
11048 None,
11049 false,
11050 );
11051 assert_entry_git_state(
11052 tree.read(cx),
11053 repository.read(cx),
11054 "ignored-dir/ignored-file2",
11055 None,
11056 true,
11057 );
11058 assert!(
11059 tree.read(cx)
11060 .entry_for_path(&rel_path(".git"))
11061 .unwrap()
11062 .is_ignored
11063 );
11064 });
11065}
11066
11067#[gpui::test]
11068async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) {
11069 init_test(cx);
11070
11071 let fs = FakeFs::new(cx.executor());
11072 fs.insert_tree(
11073 path!("/project"),
11074 json!({
11075 ".git": {
11076 "worktrees": {
11077 "some-worktree": {
11078 "commondir": "../..\n",
11079 // For is_git_dir
11080 "HEAD": "",
11081 "config": ""
11082 }
11083 },
11084 "modules": {
11085 "subdir": {
11086 "some-submodule": {
11087 // For is_git_dir
11088 "HEAD": "",
11089 "config": "",
11090 }
11091 }
11092 }
11093 },
11094 "src": {
11095 "a.txt": "A",
11096 },
11097 "some-worktree": {
11098 ".git": "gitdir: ../.git/worktrees/some-worktree\n",
11099 "src": {
11100 "b.txt": "B",
11101 }
11102 },
11103 "subdir": {
11104 "some-submodule": {
11105 ".git": "gitdir: ../../.git/modules/subdir/some-submodule\n",
11106 "c.txt": "C",
11107 }
11108 }
11109 }),
11110 )
11111 .await;
11112
11113 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
11114 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
11115 scan_complete.await;
11116
11117 let mut repositories = project.update(cx, |project, cx| {
11118 project
11119 .repositories(cx)
11120 .values()
11121 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
11122 .collect::<Vec<_>>()
11123 });
11124 repositories.sort();
11125 pretty_assertions::assert_eq!(
11126 repositories,
11127 [
11128 Path::new(path!("/project")).into(),
11129 Path::new(path!("/project/some-worktree")).into(),
11130 Path::new(path!("/project/subdir/some-submodule")).into(),
11131 ]
11132 );
11133
11134 // Generate a git-related event for the worktree and check that it's refreshed.
11135 fs.with_git_state(
11136 path!("/project/some-worktree/.git").as_ref(),
11137 true,
11138 |state| {
11139 state
11140 .head_contents
11141 .insert(repo_path("src/b.txt"), "b".to_owned());
11142 state
11143 .index_contents
11144 .insert(repo_path("src/b.txt"), "b".to_owned());
11145 },
11146 )
11147 .unwrap();
11148 cx.run_until_parked();
11149
11150 let buffer = project
11151 .update(cx, |project, cx| {
11152 project.open_local_buffer(path!("/project/some-worktree/src/b.txt"), cx)
11153 })
11154 .await
11155 .unwrap();
11156 let (worktree_repo, barrier) = project.update(cx, |project, cx| {
11157 let (repo, _) = project
11158 .git_store()
11159 .read(cx)
11160 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
11161 .unwrap();
11162 pretty_assertions::assert_eq!(
11163 repo.read(cx).work_directory_abs_path,
11164 Path::new(path!("/project/some-worktree")).into(),
11165 );
11166 let barrier = repo.update(cx, |repo, _| repo.barrier());
11167 (repo.clone(), barrier)
11168 });
11169 barrier.await.unwrap();
11170 worktree_repo.update(cx, |repo, _| {
11171 pretty_assertions::assert_eq!(
11172 repo.status_for_path(&repo_path("src/b.txt"))
11173 .unwrap()
11174 .status,
11175 StatusCode::Modified.worktree(),
11176 );
11177 });
11178
11179 // The same for the submodule.
11180 fs.with_git_state(
11181 path!("/project/subdir/some-submodule/.git").as_ref(),
11182 true,
11183 |state| {
11184 state
11185 .head_contents
11186 .insert(repo_path("c.txt"), "c".to_owned());
11187 state
11188 .index_contents
11189 .insert(repo_path("c.txt"), "c".to_owned());
11190 },
11191 )
11192 .unwrap();
11193 cx.run_until_parked();
11194
11195 let buffer = project
11196 .update(cx, |project, cx| {
11197 project.open_local_buffer(path!("/project/subdir/some-submodule/c.txt"), cx)
11198 })
11199 .await
11200 .unwrap();
11201 let (submodule_repo, barrier) = project.update(cx, |project, cx| {
11202 let (repo, _) = project
11203 .git_store()
11204 .read(cx)
11205 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
11206 .unwrap();
11207 pretty_assertions::assert_eq!(
11208 repo.read(cx).work_directory_abs_path,
11209 Path::new(path!("/project/subdir/some-submodule")).into(),
11210 );
11211 let barrier = repo.update(cx, |repo, _| repo.barrier());
11212 (repo.clone(), barrier)
11213 });
11214 barrier.await.unwrap();
11215 submodule_repo.update(cx, |repo, _| {
11216 pretty_assertions::assert_eq!(
11217 repo.status_for_path(&repo_path("c.txt")).unwrap().status,
11218 StatusCode::Modified.worktree(),
11219 );
11220 });
11221}
11222
11223#[gpui::test]
11224async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
11225 init_test(cx);
11226 let fs = FakeFs::new(cx.background_executor.clone());
11227 fs.insert_tree(
11228 path!("/root"),
11229 json!({
11230 "project": {
11231 ".git": {},
11232 "child1": {
11233 "a.txt": "A",
11234 },
11235 "child2": {
11236 "b.txt": "B",
11237 }
11238 }
11239 }),
11240 )
11241 .await;
11242
11243 let project = Project::test(
11244 fs.clone(),
11245 [
11246 path!("/root/project/child1").as_ref(),
11247 path!("/root/project/child2").as_ref(),
11248 ],
11249 cx,
11250 )
11251 .await;
11252
11253 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
11254 tree.flush_fs_events(cx).await;
11255 project
11256 .update(cx, |project, cx| project.git_scans_complete(cx))
11257 .await;
11258 cx.executor().run_until_parked();
11259
11260 let repos = project.read_with(cx, |project, cx| {
11261 project
11262 .repositories(cx)
11263 .values()
11264 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
11265 .collect::<Vec<_>>()
11266 });
11267 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
11268}
11269
11270#[gpui::test]
11271async fn test_buffer_changed_file_path_updates_git_diff(cx: &mut gpui::TestAppContext) {
11272 init_test(cx);
11273
11274 let file_1_committed = String::from(r#"file_1_committed"#);
11275 let file_1_staged = String::from(r#"file_1_staged"#);
11276 let file_2_committed = String::from(r#"file_2_committed"#);
11277 let file_2_staged = String::from(r#"file_2_staged"#);
11278 let buffer_contents = String::from(r#"buffer"#);
11279
11280 let fs = FakeFs::new(cx.background_executor.clone());
11281 fs.insert_tree(
11282 path!("/dir"),
11283 json!({
11284 ".git": {},
11285 "src": {
11286 "file_1.rs": file_1_committed.clone(),
11287 "file_2.rs": file_2_committed.clone(),
11288 }
11289 }),
11290 )
11291 .await;
11292
11293 fs.set_head_for_repo(
11294 path!("/dir/.git").as_ref(),
11295 &[
11296 ("src/file_1.rs", file_1_committed.clone()),
11297 ("src/file_2.rs", file_2_committed.clone()),
11298 ],
11299 "deadbeef",
11300 );
11301 fs.set_index_for_repo(
11302 path!("/dir/.git").as_ref(),
11303 &[
11304 ("src/file_1.rs", file_1_staged.clone()),
11305 ("src/file_2.rs", file_2_staged.clone()),
11306 ],
11307 );
11308
11309 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
11310
11311 let buffer = project
11312 .update(cx, |project, cx| {
11313 project.open_local_buffer(path!("/dir/src/file_1.rs"), cx)
11314 })
11315 .await
11316 .unwrap();
11317
11318 buffer.update(cx, |buffer, cx| {
11319 buffer.edit([(0..buffer.len(), buffer_contents.as_str())], None, cx);
11320 });
11321
11322 let unstaged_diff = project
11323 .update(cx, |project, cx| {
11324 project.open_unstaged_diff(buffer.clone(), cx)
11325 })
11326 .await
11327 .unwrap();
11328
11329 cx.run_until_parked();
11330
11331 unstaged_diff.update(cx, |unstaged_diff, cx| {
11332 let base_text = unstaged_diff.base_text_string(cx).unwrap();
11333 assert_eq!(base_text, file_1_staged, "Should start with file_1 staged");
11334 });
11335
11336 // Save the buffer as `file_2.rs`, which should trigger the
11337 // `BufferChangedFilePath` event.
11338 project
11339 .update(cx, |project, cx| {
11340 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
11341 let path = ProjectPath {
11342 worktree_id,
11343 path: rel_path("src/file_2.rs").into(),
11344 };
11345 project.save_buffer_as(buffer.clone(), path, cx)
11346 })
11347 .await
11348 .unwrap();
11349
11350 cx.run_until_parked();
11351
11352 // Verify that the diff bases have been updated to file_2's contents due to
11353 // the `BufferChangedFilePath` event being handled.
11354 unstaged_diff.update(cx, |unstaged_diff, cx| {
11355 let snapshot = buffer.read(cx).snapshot();
11356 let base_text = unstaged_diff.base_text_string(cx).unwrap();
11357 assert_eq!(
11358 base_text, file_2_staged,
11359 "Diff bases should be automatically updated to file_2 staged content"
11360 );
11361
11362 let hunks: Vec<_> = unstaged_diff.snapshot(cx).hunks(&snapshot).collect();
11363 assert!(!hunks.is_empty(), "Should have diff hunks for file_2");
11364 });
11365
11366 let uncommitted_diff = project
11367 .update(cx, |project, cx| {
11368 project.open_uncommitted_diff(buffer.clone(), cx)
11369 })
11370 .await
11371 .unwrap();
11372
11373 cx.run_until_parked();
11374
11375 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
11376 let base_text = uncommitted_diff.base_text_string(cx).unwrap();
11377 assert_eq!(
11378 base_text, file_2_committed,
11379 "Uncommitted diff should compare against file_2 committed content"
11380 );
11381 });
11382}
11383
11384async fn search(
11385 project: &Entity<Project>,
11386 query: SearchQuery,
11387 cx: &mut gpui::TestAppContext,
11388) -> Result<HashMap<String, Vec<Range<usize>>>> {
11389 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
11390 let mut results = HashMap::default();
11391 while let Ok(search_result) = search_rx.rx.recv().await {
11392 match search_result {
11393 SearchResult::Buffer { buffer, ranges } => {
11394 results.entry(buffer).or_insert(ranges);
11395 }
11396 SearchResult::LimitReached => {}
11397 }
11398 }
11399 Ok(results
11400 .into_iter()
11401 .map(|(buffer, ranges)| {
11402 buffer.update(cx, |buffer, cx| {
11403 let path = buffer
11404 .file()
11405 .unwrap()
11406 .full_path(cx)
11407 .to_string_lossy()
11408 .to_string();
11409 let ranges = ranges
11410 .into_iter()
11411 .map(|range| range.to_offset(buffer))
11412 .collect::<Vec<_>>();
11413 (path, ranges)
11414 })
11415 })
11416 .collect())
11417}
11418
11419#[gpui::test]
11420async fn test_undo_encoding_change(cx: &mut gpui::TestAppContext) {
11421 init_test(cx);
11422
11423 let fs = FakeFs::new(cx.executor());
11424
11425 // Create a file with ASCII content "Hi" - this will be detected as UTF-8
11426 // When reinterpreted as UTF-16LE, the bytes 0x48 0x69 become a single character
11427 let ascii_bytes: Vec<u8> = vec![0x48, 0x69];
11428 fs.insert_tree(path!("/dir"), json!({})).await;
11429 fs.insert_file(path!("/dir/test.txt"), ascii_bytes).await;
11430
11431 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
11432
11433 let buffer = project
11434 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/test.txt"), cx))
11435 .await
11436 .unwrap();
11437
11438 let (initial_encoding, initial_text, initial_dirty) = buffer.read_with(cx, |buffer, _| {
11439 (buffer.encoding(), buffer.text(), buffer.is_dirty())
11440 });
11441 assert_eq!(initial_encoding, encoding_rs::UTF_8);
11442 assert_eq!(initial_text, "Hi");
11443 assert!(!initial_dirty);
11444
11445 let reload_receiver = buffer.update(cx, |buffer, cx| {
11446 buffer.reload_with_encoding(encoding_rs::UTF_16LE, cx)
11447 });
11448 cx.executor().run_until_parked();
11449
11450 // Wait for reload to complete
11451 let _ = reload_receiver.await;
11452
11453 // Verify the encoding changed, text is different, and still not dirty (we reloaded from disk)
11454 let (reloaded_encoding, reloaded_text, reloaded_dirty) = buffer.read_with(cx, |buffer, _| {
11455 (buffer.encoding(), buffer.text(), buffer.is_dirty())
11456 });
11457 assert_eq!(reloaded_encoding, encoding_rs::UTF_16LE);
11458 assert_eq!(reloaded_text, "楈");
11459 assert!(!reloaded_dirty);
11460
11461 // Undo the reload
11462 buffer.update(cx, |buffer, cx| {
11463 buffer.undo(cx);
11464 });
11465
11466 buffer.read_with(cx, |buffer, _| {
11467 assert_eq!(buffer.encoding(), encoding_rs::UTF_8);
11468 assert_eq!(buffer.text(), "Hi");
11469 assert!(!buffer.is_dirty());
11470 });
11471
11472 buffer.update(cx, |buffer, cx| {
11473 buffer.redo(cx);
11474 });
11475
11476 buffer.read_with(cx, |buffer, _| {
11477 assert_eq!(buffer.encoding(), encoding_rs::UTF_16LE);
11478 assert_ne!(buffer.text(), "Hi");
11479 assert!(!buffer.is_dirty());
11480 });
11481}
11482
11483pub fn init_test(cx: &mut gpui::TestAppContext) {
11484 zlog::init_test();
11485
11486 cx.update(|cx| {
11487 let settings_store = SettingsStore::test(cx);
11488 cx.set_global(settings_store);
11489 release_channel::init(semver::Version::new(0, 0, 0), cx);
11490 });
11491}
11492
11493fn json_lang() -> Arc<Language> {
11494 Arc::new(Language::new(
11495 LanguageConfig {
11496 name: "JSON".into(),
11497 matcher: LanguageMatcher {
11498 path_suffixes: vec!["json".to_string()],
11499 ..Default::default()
11500 },
11501 ..Default::default()
11502 },
11503 None,
11504 ))
11505}
11506
11507fn js_lang() -> Arc<Language> {
11508 Arc::new(Language::new(
11509 LanguageConfig {
11510 name: "JavaScript".into(),
11511 matcher: LanguageMatcher {
11512 path_suffixes: vec!["js".to_string()],
11513 ..Default::default()
11514 },
11515 ..Default::default()
11516 },
11517 None,
11518 ))
11519}
11520
11521fn python_lang(fs: Arc<FakeFs>) -> Arc<Language> {
11522 struct PythonMootToolchainLister(Arc<FakeFs>);
11523 #[async_trait]
11524 impl ToolchainLister for PythonMootToolchainLister {
11525 async fn list(
11526 &self,
11527 worktree_root: PathBuf,
11528 subroot_relative_path: Arc<RelPath>,
11529 _: Option<HashMap<String, String>>,
11530 _: &dyn Fs,
11531 ) -> ToolchainList {
11532 // This lister will always return a path .venv directories within ancestors
11533 let ancestors = subroot_relative_path.ancestors().collect::<Vec<_>>();
11534 let mut toolchains = vec![];
11535 for ancestor in ancestors {
11536 let venv_path = worktree_root.join(ancestor.as_std_path()).join(".venv");
11537 if self.0.is_dir(&venv_path).await {
11538 toolchains.push(Toolchain {
11539 name: SharedString::new_static("Python Venv"),
11540 path: venv_path.to_string_lossy().into_owned().into(),
11541 language_name: LanguageName(SharedString::new_static("Python")),
11542 as_json: serde_json::Value::Null,
11543 })
11544 }
11545 }
11546 ToolchainList {
11547 toolchains,
11548 ..Default::default()
11549 }
11550 }
11551 async fn resolve(
11552 &self,
11553 _: PathBuf,
11554 _: Option<HashMap<String, String>>,
11555 _: &dyn Fs,
11556 ) -> anyhow::Result<Toolchain> {
11557 Err(anyhow::anyhow!("Not implemented"))
11558 }
11559 fn meta(&self) -> ToolchainMetadata {
11560 ToolchainMetadata {
11561 term: SharedString::new_static("Virtual Environment"),
11562 new_toolchain_placeholder: SharedString::new_static(
11563 "A path to the python3 executable within a virtual environment, or path to virtual environment itself",
11564 ),
11565 manifest_name: ManifestName::from(SharedString::new_static("pyproject.toml")),
11566 }
11567 }
11568 fn activation_script(
11569 &self,
11570 _: &Toolchain,
11571 _: ShellKind,
11572 _: &gpui::App,
11573 ) -> futures::future::BoxFuture<'static, Vec<String>> {
11574 Box::pin(async { vec![] })
11575 }
11576 }
11577 Arc::new(
11578 Language::new(
11579 LanguageConfig {
11580 name: "Python".into(),
11581 matcher: LanguageMatcher {
11582 path_suffixes: vec!["py".to_string()],
11583 ..Default::default()
11584 },
11585 ..Default::default()
11586 },
11587 None, // We're not testing Python parsing with this language.
11588 )
11589 .with_manifest(Some(ManifestName::from(SharedString::new_static(
11590 "pyproject.toml",
11591 ))))
11592 .with_toolchain_lister(Some(Arc::new(PythonMootToolchainLister(fs)))),
11593 )
11594}
11595
11596fn typescript_lang() -> Arc<Language> {
11597 Arc::new(Language::new(
11598 LanguageConfig {
11599 name: "TypeScript".into(),
11600 matcher: LanguageMatcher {
11601 path_suffixes: vec!["ts".to_string()],
11602 ..Default::default()
11603 },
11604 ..Default::default()
11605 },
11606 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
11607 ))
11608}
11609
11610fn tsx_lang() -> Arc<Language> {
11611 Arc::new(Language::new(
11612 LanguageConfig {
11613 name: "tsx".into(),
11614 matcher: LanguageMatcher {
11615 path_suffixes: vec!["tsx".to_string()],
11616 ..Default::default()
11617 },
11618 ..Default::default()
11619 },
11620 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
11621 ))
11622}
11623
11624fn get_all_tasks(
11625 project: &Entity<Project>,
11626 task_contexts: Arc<TaskContexts>,
11627 cx: &mut App,
11628) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
11629 let new_tasks = project.update(cx, |project, cx| {
11630 project.task_store().update(cx, |task_store, cx| {
11631 task_store.task_inventory().unwrap().update(cx, |this, cx| {
11632 this.used_and_current_resolved_tasks(task_contexts, cx)
11633 })
11634 })
11635 });
11636
11637 cx.background_spawn(async move {
11638 let (mut old, new) = new_tasks.await;
11639 old.extend(new);
11640 old
11641 })
11642}
11643
11644#[track_caller]
11645fn assert_entry_git_state(
11646 tree: &Worktree,
11647 repository: &Repository,
11648 path: &str,
11649 index_status: Option<StatusCode>,
11650 is_ignored: bool,
11651) {
11652 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
11653 let entry = tree
11654 .entry_for_path(&rel_path(path))
11655 .unwrap_or_else(|| panic!("entry {path} not found"));
11656 let status = repository
11657 .status_for_path(&repo_path(path))
11658 .map(|entry| entry.status);
11659 let expected = index_status.map(|index_status| {
11660 TrackedStatus {
11661 index_status,
11662 worktree_status: StatusCode::Unmodified,
11663 }
11664 .into()
11665 });
11666 assert_eq!(
11667 status, expected,
11668 "expected {path} to have git status: {expected:?}"
11669 );
11670 assert_eq!(
11671 entry.is_ignored, is_ignored,
11672 "expected {path} to have is_ignored: {is_ignored}"
11673 );
11674}
11675
11676#[track_caller]
11677fn git_init(path: &Path) -> git2::Repository {
11678 let mut init_opts = RepositoryInitOptions::new();
11679 init_opts.initial_head("main");
11680 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
11681}
11682
11683#[track_caller]
11684fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
11685 let path = path.as_ref();
11686 let mut index = repo.index().expect("Failed to get index");
11687 index.add_path(path).expect("Failed to add file");
11688 index.write().expect("Failed to write index");
11689}
11690
11691#[track_caller]
11692fn git_remove_index(path: &Path, repo: &git2::Repository) {
11693 let mut index = repo.index().expect("Failed to get index");
11694 index.remove_path(path).expect("Failed to add file");
11695 index.write().expect("Failed to write index");
11696}
11697
11698#[track_caller]
11699fn git_commit(msg: &'static str, repo: &git2::Repository) {
11700 use git2::Signature;
11701
11702 let signature = Signature::now("test", "test@zed.dev").unwrap();
11703 let oid = repo.index().unwrap().write_tree().unwrap();
11704 let tree = repo.find_tree(oid).unwrap();
11705 if let Ok(head) = repo.head() {
11706 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
11707
11708 let parent_commit = parent_obj.as_commit().unwrap();
11709
11710 repo.commit(
11711 Some("HEAD"),
11712 &signature,
11713 &signature,
11714 msg,
11715 &tree,
11716 &[parent_commit],
11717 )
11718 .expect("Failed to commit with parent");
11719 } else {
11720 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
11721 .expect("Failed to commit");
11722 }
11723}
11724
11725#[cfg(any())]
11726#[track_caller]
11727fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
11728 repo.cherrypick(commit, None).expect("Failed to cherrypick");
11729}
11730
11731#[track_caller]
11732fn git_stash(repo: &mut git2::Repository) {
11733 use git2::Signature;
11734
11735 let signature = Signature::now("test", "test@zed.dev").unwrap();
11736 repo.stash_save(&signature, "N/A", None)
11737 .expect("Failed to stash");
11738}
11739
11740#[track_caller]
11741fn git_reset(offset: usize, repo: &git2::Repository) {
11742 let head = repo.head().expect("Couldn't get repo head");
11743 let object = head.peel(git2::ObjectType::Commit).unwrap();
11744 let commit = object.as_commit().unwrap();
11745 let new_head = commit
11746 .parents()
11747 .inspect(|parnet| {
11748 parnet.message();
11749 })
11750 .nth(offset)
11751 .expect("Not enough history");
11752 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
11753 .expect("Could not reset");
11754}
11755
11756#[cfg(any())]
11757#[track_caller]
11758fn git_branch(name: &str, repo: &git2::Repository) {
11759 let head = repo
11760 .head()
11761 .expect("Couldn't get repo head")
11762 .peel_to_commit()
11763 .expect("HEAD is not a commit");
11764 repo.branch(name, &head, false).expect("Failed to commit");
11765}
11766
11767#[cfg(any())]
11768#[track_caller]
11769fn git_checkout(name: &str, repo: &git2::Repository) {
11770 repo.set_head(name).expect("Failed to set head");
11771 repo.checkout_head(None).expect("Failed to check out head");
11772}
11773
11774#[cfg(any())]
11775#[track_caller]
11776fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
11777 repo.statuses(None)
11778 .unwrap()
11779 .iter()
11780 .map(|status| (status.path().unwrap().to_string(), status.status()))
11781 .collect()
11782}
11783
11784#[gpui::test]
11785async fn test_find_project_path_abs(
11786 background_executor: BackgroundExecutor,
11787 cx: &mut gpui::TestAppContext,
11788) {
11789 // find_project_path should work with absolute paths
11790 init_test(cx);
11791
11792 let fs = FakeFs::new(background_executor);
11793 fs.insert_tree(
11794 path!("/root"),
11795 json!({
11796 "project1": {
11797 "file1.txt": "content1",
11798 "subdir": {
11799 "file2.txt": "content2"
11800 }
11801 },
11802 "project2": {
11803 "file3.txt": "content3"
11804 }
11805 }),
11806 )
11807 .await;
11808
11809 let project = Project::test(
11810 fs.clone(),
11811 [
11812 path!("/root/project1").as_ref(),
11813 path!("/root/project2").as_ref(),
11814 ],
11815 cx,
11816 )
11817 .await;
11818
11819 // Make sure the worktrees are fully initialized
11820 project
11821 .update(cx, |project, cx| project.git_scans_complete(cx))
11822 .await;
11823 cx.run_until_parked();
11824
11825 let (project1_abs_path, project1_id, project2_abs_path, project2_id) =
11826 project.read_with(cx, |project, cx| {
11827 let worktrees: Vec<_> = project.worktrees(cx).collect();
11828 let abs_path1 = worktrees[0].read(cx).abs_path().to_path_buf();
11829 let id1 = worktrees[0].read(cx).id();
11830 let abs_path2 = worktrees[1].read(cx).abs_path().to_path_buf();
11831 let id2 = worktrees[1].read(cx).id();
11832 (abs_path1, id1, abs_path2, id2)
11833 });
11834
11835 project.update(cx, |project, cx| {
11836 let abs_path = project1_abs_path.join("file1.txt");
11837 let found_path = project.find_project_path(abs_path, cx).unwrap();
11838 assert_eq!(found_path.worktree_id, project1_id);
11839 assert_eq!(&*found_path.path, rel_path("file1.txt"));
11840
11841 let abs_path = project1_abs_path.join("subdir").join("file2.txt");
11842 let found_path = project.find_project_path(abs_path, cx).unwrap();
11843 assert_eq!(found_path.worktree_id, project1_id);
11844 assert_eq!(&*found_path.path, rel_path("subdir/file2.txt"));
11845
11846 let abs_path = project2_abs_path.join("file3.txt");
11847 let found_path = project.find_project_path(abs_path, cx).unwrap();
11848 assert_eq!(found_path.worktree_id, project2_id);
11849 assert_eq!(&*found_path.path, rel_path("file3.txt"));
11850
11851 let abs_path = project1_abs_path.join("nonexistent.txt");
11852 let found_path = project.find_project_path(abs_path, cx);
11853 assert!(
11854 found_path.is_some(),
11855 "Should find project path for nonexistent file in worktree"
11856 );
11857
11858 // Test with an absolute path outside any worktree
11859 let abs_path = Path::new("/some/other/path");
11860 let found_path = project.find_project_path(abs_path, cx);
11861 assert!(
11862 found_path.is_none(),
11863 "Should not find project path for path outside any worktree"
11864 );
11865 });
11866}
11867
11868#[gpui::test]
11869async fn test_git_worktree_remove(cx: &mut gpui::TestAppContext) {
11870 init_test(cx);
11871
11872 let fs = FakeFs::new(cx.executor());
11873 fs.insert_tree(
11874 path!("/root"),
11875 json!({
11876 "a": {
11877 ".git": {},
11878 "src": {
11879 "main.rs": "fn main() {}",
11880 }
11881 },
11882 "b": {
11883 ".git": {},
11884 "src": {
11885 "main.rs": "fn main() {}",
11886 },
11887 "script": {
11888 "run.sh": "#!/bin/bash"
11889 }
11890 }
11891 }),
11892 )
11893 .await;
11894
11895 let project = Project::test(
11896 fs.clone(),
11897 [
11898 path!("/root/a").as_ref(),
11899 path!("/root/b/script").as_ref(),
11900 path!("/root/b").as_ref(),
11901 ],
11902 cx,
11903 )
11904 .await;
11905 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
11906 scan_complete.await;
11907
11908 let worktrees = project.update(cx, |project, cx| project.worktrees(cx).collect::<Vec<_>>());
11909 assert_eq!(worktrees.len(), 3);
11910
11911 let worktree_id_by_abs_path = worktrees
11912 .into_iter()
11913 .map(|worktree| worktree.read_with(cx, |w, _| (w.abs_path(), w.id())))
11914 .collect::<HashMap<_, _>>();
11915 let worktree_id = worktree_id_by_abs_path
11916 .get(Path::new(path!("/root/b/script")))
11917 .unwrap();
11918
11919 let repos = project.update(cx, |p, cx| p.git_store().read(cx).repositories().clone());
11920 assert_eq!(repos.len(), 2);
11921
11922 project.update(cx, |project, cx| {
11923 project.remove_worktree(*worktree_id, cx);
11924 });
11925 cx.run_until_parked();
11926
11927 let mut repo_paths = project
11928 .update(cx, |p, cx| p.git_store().read(cx).repositories().clone())
11929 .values()
11930 .map(|repo| repo.read_with(cx, |r, _| r.work_directory_abs_path.clone()))
11931 .collect::<Vec<_>>();
11932 repo_paths.sort();
11933
11934 pretty_assertions::assert_eq!(
11935 repo_paths,
11936 [
11937 Path::new(path!("/root/a")).into(),
11938 Path::new(path!("/root/b")).into(),
11939 ]
11940 );
11941
11942 let active_repo_path = project
11943 .read_with(cx, |p, cx| {
11944 p.active_repository(cx)
11945 .map(|r| r.read(cx).work_directory_abs_path.clone())
11946 })
11947 .unwrap();
11948 assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/a")));
11949
11950 let worktree_id = worktree_id_by_abs_path
11951 .get(Path::new(path!("/root/a")))
11952 .unwrap();
11953 project.update(cx, |project, cx| {
11954 project.remove_worktree(*worktree_id, cx);
11955 });
11956 cx.run_until_parked();
11957
11958 let active_repo_path = project
11959 .read_with(cx, |p, cx| {
11960 p.active_repository(cx)
11961 .map(|r| r.read(cx).work_directory_abs_path.clone())
11962 })
11963 .unwrap();
11964 assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/b")));
11965
11966 let worktree_id = worktree_id_by_abs_path
11967 .get(Path::new(path!("/root/b")))
11968 .unwrap();
11969 project.update(cx, |project, cx| {
11970 project.remove_worktree(*worktree_id, cx);
11971 });
11972 cx.run_until_parked();
11973
11974 let active_repo_path = project.read_with(cx, |p, cx| {
11975 p.active_repository(cx)
11976 .map(|r| r.read(cx).work_directory_abs_path.clone())
11977 });
11978 assert!(active_repo_path.is_none());
11979}
11980
11981#[gpui::test]
11982async fn test_optimistic_hunks_in_staged_files(cx: &mut gpui::TestAppContext) {
11983 use DiffHunkSecondaryStatus::*;
11984 init_test(cx);
11985
11986 let committed_contents = r#"
11987 one
11988 two
11989 three
11990 "#
11991 .unindent();
11992 let file_contents = r#"
11993 one
11994 TWO
11995 three
11996 "#
11997 .unindent();
11998
11999 let fs = FakeFs::new(cx.background_executor.clone());
12000 fs.insert_tree(
12001 path!("/dir"),
12002 json!({
12003 ".git": {},
12004 "file.txt": file_contents.clone()
12005 }),
12006 )
12007 .await;
12008
12009 fs.set_head_and_index_for_repo(
12010 path!("/dir/.git").as_ref(),
12011 &[("file.txt", committed_contents.clone())],
12012 );
12013
12014 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
12015
12016 let buffer = project
12017 .update(cx, |project, cx| {
12018 project.open_local_buffer(path!("/dir/file.txt"), cx)
12019 })
12020 .await
12021 .unwrap();
12022 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
12023 let uncommitted_diff = project
12024 .update(cx, |project, cx| {
12025 project.open_uncommitted_diff(buffer.clone(), cx)
12026 })
12027 .await
12028 .unwrap();
12029
12030 // The hunk is initially unstaged.
12031 uncommitted_diff.read_with(cx, |diff, cx| {
12032 assert_hunks(
12033 diff.snapshot(cx).hunks(&snapshot),
12034 &snapshot,
12035 &diff.base_text_string(cx).unwrap(),
12036 &[(
12037 1..2,
12038 "two\n",
12039 "TWO\n",
12040 DiffHunkStatus::modified(HasSecondaryHunk),
12041 )],
12042 );
12043 });
12044
12045 // Get the repository handle.
12046 let repo = project.read_with(cx, |project, cx| {
12047 project.repositories(cx).values().next().unwrap().clone()
12048 });
12049
12050 // Stage the file.
12051 let stage_task = repo.update(cx, |repo, cx| {
12052 repo.stage_entries(vec![repo_path("file.txt")], cx)
12053 });
12054
12055 // Run a few ticks to let the job start and mark hunks as pending,
12056 // but don't run_until_parked which would complete the entire operation.
12057 for _ in 0..10 {
12058 cx.executor().tick();
12059 let [hunk]: [_; 1] = uncommitted_diff
12060 .read_with(cx, |diff, cx| {
12061 diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>()
12062 })
12063 .try_into()
12064 .unwrap();
12065 match hunk.secondary_status {
12066 HasSecondaryHunk => {}
12067 SecondaryHunkRemovalPending => break,
12068 NoSecondaryHunk => panic!("hunk was not optimistically staged"),
12069 _ => panic!("unexpected hunk state"),
12070 }
12071 }
12072 uncommitted_diff.read_with(cx, |diff, cx| {
12073 assert_hunks(
12074 diff.snapshot(cx).hunks(&snapshot),
12075 &snapshot,
12076 &diff.base_text_string(cx).unwrap(),
12077 &[(
12078 1..2,
12079 "two\n",
12080 "TWO\n",
12081 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
12082 )],
12083 );
12084 });
12085
12086 // Let the staging complete.
12087 stage_task.await.unwrap();
12088 cx.run_until_parked();
12089
12090 // The hunk is now fully staged.
12091 uncommitted_diff.read_with(cx, |diff, cx| {
12092 assert_hunks(
12093 diff.snapshot(cx).hunks(&snapshot),
12094 &snapshot,
12095 &diff.base_text_string(cx).unwrap(),
12096 &[(
12097 1..2,
12098 "two\n",
12099 "TWO\n",
12100 DiffHunkStatus::modified(NoSecondaryHunk),
12101 )],
12102 );
12103 });
12104
12105 // Simulate a commit by updating HEAD to match the current file contents.
12106 // The FakeGitRepository's commit method is a no-op, so we need to manually
12107 // update HEAD to simulate the commit completing.
12108 fs.set_head_for_repo(
12109 path!("/dir/.git").as_ref(),
12110 &[("file.txt", file_contents.clone())],
12111 "newhead",
12112 );
12113 cx.run_until_parked();
12114
12115 // After committing, there are no more hunks.
12116 uncommitted_diff.read_with(cx, |diff, cx| {
12117 assert_hunks(
12118 diff.snapshot(cx).hunks(&snapshot),
12119 &snapshot,
12120 &diff.base_text_string(cx).unwrap(),
12121 &[] as &[(Range<u32>, &str, &str, DiffHunkStatus)],
12122 );
12123 });
12124}
12125
12126#[gpui::test]
12127async fn test_read_only_files_setting(cx: &mut gpui::TestAppContext) {
12128 init_test(cx);
12129
12130 // Configure read_only_files setting
12131 cx.update(|cx| {
12132 cx.update_global::<SettingsStore, _>(|store, cx| {
12133 store.update_user_settings(cx, |settings| {
12134 settings.project.worktree.read_only_files = Some(vec![
12135 "**/generated/**".to_string(),
12136 "**/*.gen.rs".to_string(),
12137 ]);
12138 });
12139 });
12140 });
12141
12142 let fs = FakeFs::new(cx.background_executor.clone());
12143 fs.insert_tree(
12144 path!("/root"),
12145 json!({
12146 "src": {
12147 "main.rs": "fn main() {}",
12148 "types.gen.rs": "// Generated file",
12149 },
12150 "generated": {
12151 "schema.rs": "// Auto-generated schema",
12152 }
12153 }),
12154 )
12155 .await;
12156
12157 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
12158
12159 // Open a regular file - should be read-write
12160 let regular_buffer = project
12161 .update(cx, |project, cx| {
12162 project.open_local_buffer(path!("/root/src/main.rs"), cx)
12163 })
12164 .await
12165 .unwrap();
12166
12167 regular_buffer.read_with(cx, |buffer, _| {
12168 assert!(!buffer.read_only(), "Regular file should not be read-only");
12169 });
12170
12171 // Open a file matching *.gen.rs pattern - should be read-only
12172 let gen_buffer = project
12173 .update(cx, |project, cx| {
12174 project.open_local_buffer(path!("/root/src/types.gen.rs"), cx)
12175 })
12176 .await
12177 .unwrap();
12178
12179 gen_buffer.read_with(cx, |buffer, _| {
12180 assert!(
12181 buffer.read_only(),
12182 "File matching *.gen.rs pattern should be read-only"
12183 );
12184 });
12185
12186 // Open a file in generated directory - should be read-only
12187 let generated_buffer = project
12188 .update(cx, |project, cx| {
12189 project.open_local_buffer(path!("/root/generated/schema.rs"), cx)
12190 })
12191 .await
12192 .unwrap();
12193
12194 generated_buffer.read_with(cx, |buffer, _| {
12195 assert!(
12196 buffer.read_only(),
12197 "File in generated directory should be read-only"
12198 );
12199 });
12200}
12201
12202#[gpui::test]
12203async fn test_read_only_files_empty_setting(cx: &mut gpui::TestAppContext) {
12204 init_test(cx);
12205
12206 // Explicitly set read_only_files to empty (default behavior)
12207 cx.update(|cx| {
12208 cx.update_global::<SettingsStore, _>(|store, cx| {
12209 store.update_user_settings(cx, |settings| {
12210 settings.project.worktree.read_only_files = Some(vec![]);
12211 });
12212 });
12213 });
12214
12215 let fs = FakeFs::new(cx.background_executor.clone());
12216 fs.insert_tree(
12217 path!("/root"),
12218 json!({
12219 "src": {
12220 "main.rs": "fn main() {}",
12221 },
12222 "generated": {
12223 "schema.rs": "// Auto-generated schema",
12224 }
12225 }),
12226 )
12227 .await;
12228
12229 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
12230
12231 // All files should be read-write when read_only_files is empty
12232 let main_buffer = project
12233 .update(cx, |project, cx| {
12234 project.open_local_buffer(path!("/root/src/main.rs"), cx)
12235 })
12236 .await
12237 .unwrap();
12238
12239 main_buffer.read_with(cx, |buffer, _| {
12240 assert!(
12241 !buffer.read_only(),
12242 "Files should not be read-only when read_only_files is empty"
12243 );
12244 });
12245
12246 let generated_buffer = project
12247 .update(cx, |project, cx| {
12248 project.open_local_buffer(path!("/root/generated/schema.rs"), cx)
12249 })
12250 .await
12251 .unwrap();
12252
12253 generated_buffer.read_with(cx, |buffer, _| {
12254 assert!(
12255 !buffer.read_only(),
12256 "Generated files should not be read-only when read_only_files is empty"
12257 );
12258 });
12259}
12260
12261#[gpui::test]
12262async fn test_read_only_files_with_lock_files(cx: &mut gpui::TestAppContext) {
12263 init_test(cx);
12264
12265 // Configure to make lock files read-only
12266 cx.update(|cx| {
12267 cx.update_global::<SettingsStore, _>(|store, cx| {
12268 store.update_user_settings(cx, |settings| {
12269 settings.project.worktree.read_only_files = Some(vec![
12270 "**/*.lock".to_string(),
12271 "**/package-lock.json".to_string(),
12272 ]);
12273 });
12274 });
12275 });
12276
12277 let fs = FakeFs::new(cx.background_executor.clone());
12278 fs.insert_tree(
12279 path!("/root"),
12280 json!({
12281 "Cargo.lock": "# Lock file",
12282 "Cargo.toml": "[package]",
12283 "package-lock.json": "{}",
12284 "package.json": "{}",
12285 }),
12286 )
12287 .await;
12288
12289 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
12290
12291 // Cargo.lock should be read-only
12292 let cargo_lock = project
12293 .update(cx, |project, cx| {
12294 project.open_local_buffer(path!("/root/Cargo.lock"), cx)
12295 })
12296 .await
12297 .unwrap();
12298
12299 cargo_lock.read_with(cx, |buffer, _| {
12300 assert!(buffer.read_only(), "Cargo.lock should be read-only");
12301 });
12302
12303 // Cargo.toml should be read-write
12304 let cargo_toml = project
12305 .update(cx, |project, cx| {
12306 project.open_local_buffer(path!("/root/Cargo.toml"), cx)
12307 })
12308 .await
12309 .unwrap();
12310
12311 cargo_toml.read_with(cx, |buffer, _| {
12312 assert!(!buffer.read_only(), "Cargo.toml should not be read-only");
12313 });
12314
12315 // package-lock.json should be read-only
12316 let package_lock = project
12317 .update(cx, |project, cx| {
12318 project.open_local_buffer(path!("/root/package-lock.json"), cx)
12319 })
12320 .await
12321 .unwrap();
12322
12323 package_lock.read_with(cx, |buffer, _| {
12324 assert!(buffer.read_only(), "package-lock.json should be read-only");
12325 });
12326
12327 // package.json should be read-write
12328 let package_json = project
12329 .update(cx, |project, cx| {
12330 project.open_local_buffer(path!("/root/package.json"), cx)
12331 })
12332 .await
12333 .unwrap();
12334
12335 package_json.read_with(cx, |buffer, _| {
12336 assert!(!buffer.read_only(), "package.json should not be read-only");
12337 });
12338}
12339
12340mod disable_ai_settings_tests {
12341 use gpui::TestAppContext;
12342 use project::*;
12343 use settings::{Settings, SettingsStore};
12344
12345 #[gpui::test]
12346 async fn test_disable_ai_settings_security(cx: &mut TestAppContext) {
12347 cx.update(|cx| {
12348 settings::init(cx);
12349
12350 // Test 1: Default is false (AI enabled)
12351 assert!(
12352 !DisableAiSettings::get_global(cx).disable_ai,
12353 "Default should allow AI"
12354 );
12355 });
12356
12357 let disable_true = serde_json::json!({
12358 "disable_ai": true
12359 })
12360 .to_string();
12361 let disable_false = serde_json::json!({
12362 "disable_ai": false
12363 })
12364 .to_string();
12365
12366 cx.update_global::<SettingsStore, _>(|store, cx| {
12367 store.set_user_settings(&disable_false, cx).unwrap();
12368 store.set_global_settings(&disable_true, cx).unwrap();
12369 });
12370 cx.update(|cx| {
12371 assert!(
12372 DisableAiSettings::get_global(cx).disable_ai,
12373 "Local false cannot override global true"
12374 );
12375 });
12376
12377 cx.update_global::<SettingsStore, _>(|store, cx| {
12378 store.set_global_settings(&disable_false, cx).unwrap();
12379 store.set_user_settings(&disable_true, cx).unwrap();
12380 });
12381
12382 cx.update(|cx| {
12383 assert!(
12384 DisableAiSettings::get_global(cx).disable_ai,
12385 "Local false cannot override global true"
12386 );
12387 });
12388 }
12389
12390 #[gpui::test]
12391 async fn test_disable_ai_project_level_settings(cx: &mut TestAppContext) {
12392 use settings::{LocalSettingsKind, LocalSettingsPath, SettingsLocation, SettingsStore};
12393 use worktree::WorktreeId;
12394
12395 cx.update(|cx| {
12396 settings::init(cx);
12397
12398 // Default should allow AI
12399 assert!(
12400 !DisableAiSettings::get_global(cx).disable_ai,
12401 "Default should allow AI"
12402 );
12403 });
12404
12405 let worktree_id = WorktreeId::from_usize(1);
12406 let rel_path = |path: &str| -> std::sync::Arc<util::rel_path::RelPath> {
12407 std::sync::Arc::from(util::rel_path::RelPath::unix(path).unwrap())
12408 };
12409 let project_path = rel_path("project");
12410 let settings_location = SettingsLocation {
12411 worktree_id,
12412 path: project_path.as_ref(),
12413 };
12414
12415 // Test: Project-level disable_ai=true should disable AI for files in that project
12416 cx.update_global::<SettingsStore, _>(|store, cx| {
12417 store
12418 .set_local_settings(
12419 worktree_id,
12420 LocalSettingsPath::InWorktree(project_path.clone()),
12421 LocalSettingsKind::Settings,
12422 Some(r#"{ "disable_ai": true }"#),
12423 cx,
12424 )
12425 .unwrap();
12426 });
12427
12428 cx.update(|cx| {
12429 let settings = DisableAiSettings::get(Some(settings_location), cx);
12430 assert!(
12431 settings.disable_ai,
12432 "Project-level disable_ai=true should disable AI for files in that project"
12433 );
12434 // Global should now also be true since project-level disable_ai is merged into global
12435 assert!(
12436 DisableAiSettings::get_global(cx).disable_ai,
12437 "Global setting should be affected by project-level disable_ai=true"
12438 );
12439 });
12440
12441 // Test: Setting project-level to false should allow AI for that project
12442 cx.update_global::<SettingsStore, _>(|store, cx| {
12443 store
12444 .set_local_settings(
12445 worktree_id,
12446 LocalSettingsPath::InWorktree(project_path.clone()),
12447 LocalSettingsKind::Settings,
12448 Some(r#"{ "disable_ai": false }"#),
12449 cx,
12450 )
12451 .unwrap();
12452 });
12453
12454 cx.update(|cx| {
12455 let settings = DisableAiSettings::get(Some(settings_location), cx);
12456 assert!(
12457 !settings.disable_ai,
12458 "Project-level disable_ai=false should allow AI"
12459 );
12460 // Global should also be false now
12461 assert!(
12462 !DisableAiSettings::get_global(cx).disable_ai,
12463 "Global setting should be false when project-level is false"
12464 );
12465 });
12466
12467 // Test: User-level true + project-level false = AI disabled (saturation)
12468 let disable_true = serde_json::json!({ "disable_ai": true }).to_string();
12469 cx.update_global::<SettingsStore, _>(|store, cx| {
12470 store.set_user_settings(&disable_true, cx).unwrap();
12471 store
12472 .set_local_settings(
12473 worktree_id,
12474 LocalSettingsPath::InWorktree(project_path.clone()),
12475 LocalSettingsKind::Settings,
12476 Some(r#"{ "disable_ai": false }"#),
12477 cx,
12478 )
12479 .unwrap();
12480 });
12481
12482 cx.update(|cx| {
12483 let settings = DisableAiSettings::get(Some(settings_location), cx);
12484 assert!(
12485 settings.disable_ai,
12486 "Project-level false cannot override user-level true (SaturatingBool)"
12487 );
12488 });
12489 }
12490}