1#![allow(clippy::format_collect)]
2
3mod color_extractor;
4mod context_server_store;
5mod debugger;
6mod ext_agent_tests;
7mod extension_agent_tests;
8mod git_store;
9mod image_store;
10mod lsp_command;
11mod lsp_store;
12mod manifest_tree;
13mod project_search;
14mod search;
15mod search_history;
16mod signature_help;
17mod task_inventory;
18mod trusted_worktrees;
19mod yarn;
20
21use anyhow::Result;
22use async_trait::async_trait;
23use buffer_diff::{
24 BufferDiffEvent, DiffChanged, DiffHunkSecondaryStatus, DiffHunkStatus, DiffHunkStatusKind,
25 assert_hunks,
26};
27use collections::{BTreeSet, HashMap, HashSet};
28use encoding_rs;
29use fs::{FakeFs, PathEventKind};
30use futures::{StreamExt, future};
31use git::{
32 GitHostingProviderRegistry,
33 repository::{RepoPath, repo_path},
34 status::{DiffStat, FileStatus, StatusCode, TrackedStatus},
35};
36use git2::RepositoryInitOptions;
37use gpui::{
38 App, AppContext, BackgroundExecutor, BorrowAppContext, Entity, FutureExt, SharedString, Task,
39 TestAppContext, UpdateGlobal,
40};
41use itertools::Itertools;
42use language::{
43 Buffer, BufferEvent, Diagnostic, DiagnosticEntry, DiagnosticEntryRef, DiagnosticSet,
44 DiagnosticSourceKind, DiskState, FakeLspAdapter, Language, LanguageConfig, LanguageMatcher,
45 LanguageName, LineEnding, ManifestName, ManifestProvider, ManifestQuery, OffsetRangeExt, Point,
46 ToPoint, Toolchain, ToolchainList, ToolchainLister, ToolchainMetadata,
47 language_settings::{LanguageSettings, LanguageSettingsContent},
48 markdown_lang, rust_lang, tree_sitter_typescript,
49};
50use lsp::{
51 CodeActionKind, DEFAULT_LSP_REQUEST_TIMEOUT, DiagnosticSeverity, DocumentChanges,
52 FileOperationFilter, LanguageServerId, LanguageServerName, NumberOrString, TextDocumentEdit,
53 Uri, WillRenameFiles, notification::DidRenameFiles,
54};
55use parking_lot::Mutex;
56use paths::{config_dir, global_gitignore_path, tasks_file};
57use postage::stream::Stream as _;
58use pretty_assertions::{assert_eq, assert_matches};
59use project::{
60 Event, TaskContexts,
61 git_store::{GitStoreEvent, Repository, RepositoryEvent, StatusEntry, pending_op},
62 search::{SearchQuery, SearchResult},
63 task_store::{TaskSettingsLocation, TaskStore},
64 *,
65};
66use rand::{Rng as _, rngs::StdRng};
67use serde_json::json;
68use settings::SettingsStore;
69#[cfg(not(windows))]
70use std::os;
71use std::{
72 cell::RefCell,
73 env, mem,
74 num::NonZeroU32,
75 ops::Range,
76 path::{Path, PathBuf},
77 rc::Rc,
78 str::FromStr,
79 sync::{Arc, OnceLock, atomic},
80 task::Poll,
81 time::Duration,
82};
83use sum_tree::SumTree;
84use task::{ResolvedTask, ShellKind, TaskContext};
85use text::{Anchor, PointUtf16, ReplicaId, ToOffset, Unclipped};
86use unindent::Unindent as _;
87use util::{
88 TryFutureExt as _, assert_set_eq, maybe, path,
89 paths::{PathMatcher, PathStyle},
90 rel_path::{RelPath, rel_path},
91 test::{TempTree, marked_text_offsets},
92 uri,
93};
94use worktree::WorktreeModelHandle as _;
95
96#[gpui::test]
97async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
98 cx.executor().allow_parking();
99
100 let (tx, mut rx) = futures::channel::mpsc::unbounded();
101 let _thread = std::thread::spawn(move || {
102 #[cfg(not(target_os = "windows"))]
103 std::fs::metadata("/tmp").unwrap();
104 #[cfg(target_os = "windows")]
105 std::fs::metadata("C:/Windows").unwrap();
106 std::thread::sleep(Duration::from_millis(1000));
107 tx.unbounded_send(1).unwrap();
108 });
109 rx.next().await.unwrap();
110}
111
112#[gpui::test]
113async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
114 cx.executor().allow_parking();
115
116 let io_task = smol::unblock(move || {
117 println!("sleeping on thread {:?}", std::thread::current().id());
118 std::thread::sleep(Duration::from_millis(10));
119 1
120 });
121
122 let task = cx.foreground_executor().spawn(async move {
123 io_task.await;
124 });
125
126 task.await;
127}
128
129#[gpui::test]
130async fn test_default_session_work_dirs_prefers_directory_worktrees_over_single_file_parents(
131 cx: &mut gpui::TestAppContext,
132) {
133 init_test(cx);
134
135 let fs = FakeFs::new(cx.executor());
136 fs.insert_tree(
137 path!("/root"),
138 json!({
139 "dir-project": {
140 "src": {
141 "main.rs": "fn main() {}"
142 }
143 },
144 "single-file.rs": "fn helper() {}"
145 }),
146 )
147 .await;
148
149 let project = Project::test(
150 fs,
151 [
152 Path::new(path!("/root/single-file.rs")),
153 Path::new(path!("/root/dir-project")),
154 ],
155 cx,
156 )
157 .await;
158
159 let work_dirs = project.read_with(cx, |project, cx| project.default_path_list(cx));
160 let ordered_paths = work_dirs.ordered_paths().cloned().collect::<Vec<_>>();
161
162 assert_eq!(
163 ordered_paths,
164 vec![
165 PathBuf::from(path!("/root/dir-project")),
166 PathBuf::from(path!("/root")),
167 ]
168 );
169}
170
171#[gpui::test]
172async fn test_default_session_work_dirs_falls_back_to_home_for_empty_project(
173 cx: &mut gpui::TestAppContext,
174) {
175 init_test(cx);
176
177 let fs = FakeFs::new(cx.executor());
178 let project = Project::test(fs, [], cx).await;
179
180 let work_dirs = project.read_with(cx, |project, cx| project.default_path_list(cx));
181 let ordered_paths = work_dirs.ordered_paths().cloned().collect::<Vec<_>>();
182
183 assert_eq!(ordered_paths, vec![paths::home_dir().to_path_buf()]);
184}
185
186// NOTE:
187// While POSIX symbolic links are somewhat supported on Windows, they are an opt in by the user, and thus
188// we assume that they are not supported out of the box.
189#[cfg(not(windows))]
190#[gpui::test]
191async fn test_symlinks(cx: &mut gpui::TestAppContext) {
192 init_test(cx);
193 cx.executor().allow_parking();
194
195 let dir = TempTree::new(json!({
196 "root": {
197 "apple": "",
198 "banana": {
199 "carrot": {
200 "date": "",
201 "endive": "",
202 }
203 },
204 "fennel": {
205 "grape": "",
206 }
207 }
208 }));
209
210 let root_link_path = dir.path().join("root_link");
211 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
212 os::unix::fs::symlink(
213 dir.path().join("root/fennel"),
214 dir.path().join("root/finnochio"),
215 )
216 .unwrap();
217
218 let project = Project::test(
219 Arc::new(RealFs::new(None, cx.executor())),
220 [root_link_path.as_ref()],
221 cx,
222 )
223 .await;
224
225 project.update(cx, |project, cx| {
226 let tree = project.worktrees(cx).next().unwrap().read(cx);
227 assert_eq!(tree.file_count(), 5);
228 assert_eq!(
229 tree.entry_for_path(rel_path("fennel/grape")).unwrap().inode,
230 tree.entry_for_path(rel_path("finnochio/grape"))
231 .unwrap()
232 .inode
233 );
234 });
235}
236
237#[gpui::test]
238async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
239 init_test(cx);
240
241 let dir = TempTree::new(json!({
242 ".editorconfig": r#"
243 root = true
244 [*.rs]
245 indent_style = tab
246 indent_size = 3
247 end_of_line = lf
248 insert_final_newline = true
249 trim_trailing_whitespace = true
250 max_line_length = 120
251 [*.js]
252 tab_width = 10
253 max_line_length = off
254 "#,
255 ".zed": {
256 "settings.json": r#"{
257 "tab_size": 8,
258 "hard_tabs": false,
259 "ensure_final_newline_on_save": false,
260 "remove_trailing_whitespace_on_save": false,
261 "preferred_line_length": 64,
262 "soft_wrap": "editor_width",
263 }"#,
264 },
265 "a.rs": "fn a() {\n A\n}",
266 "b": {
267 ".editorconfig": r#"
268 [*.rs]
269 indent_size = 2
270 max_line_length = off,
271 "#,
272 "b.rs": "fn b() {\n B\n}",
273 },
274 "c.js": "def c\n C\nend",
275 "d": {
276 ".editorconfig": r#"
277 [*.rs]
278 indent_size = 1
279 "#,
280 "d.rs": "fn d() {\n D\n}",
281 },
282 "README.json": "tabs are better\n",
283 }));
284
285 let path = dir.path();
286 let fs = FakeFs::new(cx.executor());
287 fs.insert_tree_from_real_fs(path, path).await;
288 let project = Project::test(fs, [path], cx).await;
289
290 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
291 language_registry.add(js_lang());
292 language_registry.add(json_lang());
293 language_registry.add(rust_lang());
294
295 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
296
297 cx.executor().run_until_parked();
298
299 let settings_for = async |path: &str, cx: &mut TestAppContext| -> LanguageSettings {
300 let buffer = project
301 .update(cx, |project, cx| {
302 project.open_buffer((worktree.read(cx).id(), rel_path(path)), cx)
303 })
304 .await
305 .unwrap();
306 cx.update(|cx| LanguageSettings::for_buffer(&buffer.read(cx), cx).into_owned())
307 };
308
309 let settings_a = settings_for("a.rs", cx).await;
310 let settings_b = settings_for("b/b.rs", cx).await;
311 let settings_c = settings_for("c.js", cx).await;
312 let settings_d = settings_for("d/d.rs", cx).await;
313 let settings_readme = settings_for("README.json", cx).await;
314 // .editorconfig overrides .zed/settings
315 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
316 assert_eq!(settings_a.hard_tabs, true);
317 assert_eq!(settings_a.ensure_final_newline_on_save, true);
318 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
319 assert_eq!(settings_a.preferred_line_length, 120);
320
321 // .editorconfig in b/ overrides .editorconfig in root
322 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
323
324 // .editorconfig in subdirectory overrides .editorconfig in root
325 assert_eq!(Some(settings_d.tab_size), NonZeroU32::new(1));
326
327 // "indent_size" is not set, so "tab_width" is used
328 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
329
330 // When max_line_length is "off", default to .zed/settings.json
331 assert_eq!(settings_b.preferred_line_length, 64);
332 assert_eq!(settings_c.preferred_line_length, 64);
333
334 // README.md should not be affected by .editorconfig's globe "*.rs"
335 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
336}
337
338#[gpui::test]
339async fn test_external_editorconfig_support(cx: &mut gpui::TestAppContext) {
340 init_test(cx);
341
342 let fs = FakeFs::new(cx.executor());
343 fs.insert_tree(
344 path!("/grandparent"),
345 json!({
346 ".editorconfig": "[*]\nindent_size = 4\n",
347 "parent": {
348 ".editorconfig": "[*.rs]\nindent_size = 2\n",
349 "worktree": {
350 ".editorconfig": "[*.md]\nindent_size = 3\n",
351 "main.rs": "fn main() {}",
352 "README.md": "# README",
353 "other.txt": "other content",
354 }
355 }
356 }),
357 )
358 .await;
359
360 let project = Project::test(fs, [path!("/grandparent/parent/worktree").as_ref()], cx).await;
361
362 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
363 language_registry.add(rust_lang());
364 language_registry.add(markdown_lang());
365
366 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
367
368 cx.executor().run_until_parked();
369 let settings_for = async |path: &str, cx: &mut TestAppContext| -> LanguageSettings {
370 let buffer = project
371 .update(cx, |project, cx| {
372 project.open_buffer((worktree.read(cx).id(), rel_path(path)), cx)
373 })
374 .await
375 .unwrap();
376 cx.update(|cx| LanguageSettings::for_buffer(&buffer.read(cx), cx).into_owned())
377 };
378
379 let settings_rs = settings_for("main.rs", cx).await;
380 let settings_md = settings_for("README.md", cx).await;
381 let settings_txt = settings_for("other.txt", cx).await;
382
383 // main.rs gets indent_size = 2 from parent's external .editorconfig
384 assert_eq!(Some(settings_rs.tab_size), NonZeroU32::new(2));
385
386 // README.md gets indent_size = 3 from internal worktree .editorconfig
387 assert_eq!(Some(settings_md.tab_size), NonZeroU32::new(3));
388
389 // other.txt gets indent_size = 4 from grandparent's external .editorconfig
390 assert_eq!(Some(settings_txt.tab_size), NonZeroU32::new(4));
391}
392
393#[gpui::test]
394async fn test_internal_editorconfig_root_stops_traversal(cx: &mut gpui::TestAppContext) {
395 init_test(cx);
396
397 let fs = FakeFs::new(cx.executor());
398 fs.insert_tree(
399 path!("/worktree"),
400 json!({
401 ".editorconfig": "[*]\nindent_size = 99\n",
402 "src": {
403 ".editorconfig": "root = true\n[*]\nindent_size = 2\n",
404 "file.rs": "fn main() {}",
405 }
406 }),
407 )
408 .await;
409
410 let project = Project::test(fs, [path!("/worktree").as_ref()], cx).await;
411
412 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
413 language_registry.add(rust_lang());
414
415 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
416
417 cx.executor().run_until_parked();
418
419 let buffer = project
420 .update(cx, |project, cx| {
421 project.open_buffer((worktree.read(cx).id(), rel_path("src/file.rs")), cx)
422 })
423 .await
424 .unwrap();
425 cx.update(|cx| {
426 let settings = LanguageSettings::for_buffer(buffer.read(cx), cx).into_owned();
427 assert_eq!(Some(settings.tab_size), NonZeroU32::new(2));
428 });
429}
430
431#[gpui::test]
432async fn test_external_editorconfig_root_stops_traversal(cx: &mut gpui::TestAppContext) {
433 init_test(cx);
434
435 let fs = FakeFs::new(cx.executor());
436 fs.insert_tree(
437 path!("/parent"),
438 json!({
439 ".editorconfig": "[*]\nindent_size = 99\n",
440 "worktree": {
441 ".editorconfig": "root = true\n[*]\nindent_size = 2\n",
442 "file.rs": "fn main() {}",
443 }
444 }),
445 )
446 .await;
447
448 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
449
450 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
451 language_registry.add(rust_lang());
452
453 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
454
455 cx.executor().run_until_parked();
456
457 let buffer = project
458 .update(cx, |project, cx| {
459 project.open_buffer((worktree.read(cx).id(), rel_path("file.rs")), cx)
460 })
461 .await
462 .unwrap();
463
464 cx.update(|cx| {
465 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
466
467 // file.rs gets indent_size = 2 from worktree's root config, NOT 99 from parent
468 assert_eq!(Some(settings.tab_size), NonZeroU32::new(2));
469 });
470}
471
472#[gpui::test]
473async fn test_external_editorconfig_root_in_parent_stops_traversal(cx: &mut gpui::TestAppContext) {
474 init_test(cx);
475
476 let fs = FakeFs::new(cx.executor());
477 fs.insert_tree(
478 path!("/grandparent"),
479 json!({
480 ".editorconfig": "[*]\nindent_size = 99\n",
481 "parent": {
482 ".editorconfig": "root = true\n[*]\nindent_size = 4\n",
483 "worktree": {
484 "file.rs": "fn main() {}",
485 }
486 }
487 }),
488 )
489 .await;
490
491 let project = Project::test(fs, [path!("/grandparent/parent/worktree").as_ref()], cx).await;
492
493 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
494 language_registry.add(rust_lang());
495
496 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
497
498 cx.executor().run_until_parked();
499
500 let buffer = project
501 .update(cx, |project, cx| {
502 project.open_buffer((worktree.read(cx).id(), rel_path("file.rs")), cx)
503 })
504 .await
505 .unwrap();
506
507 cx.update(|cx| {
508 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
509
510 // file.rs gets indent_size = 4 from parent's root config, NOT 99 from grandparent
511 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
512 });
513}
514
515#[gpui::test]
516async fn test_external_editorconfig_shared_across_worktrees(cx: &mut gpui::TestAppContext) {
517 init_test(cx);
518
519 let fs = FakeFs::new(cx.executor());
520 fs.insert_tree(
521 path!("/parent"),
522 json!({
523 ".editorconfig": "root = true\n[*]\nindent_size = 5\n",
524 "worktree_a": {
525 "file.rs": "fn a() {}",
526 ".editorconfig": "[*]\ninsert_final_newline = true\n",
527 },
528 "worktree_b": {
529 "file.rs": "fn b() {}",
530 ".editorconfig": "[*]\ninsert_final_newline = false\n",
531 }
532 }),
533 )
534 .await;
535
536 let project = Project::test(
537 fs,
538 [
539 path!("/parent/worktree_a").as_ref(),
540 path!("/parent/worktree_b").as_ref(),
541 ],
542 cx,
543 )
544 .await;
545
546 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
547 language_registry.add(rust_lang());
548
549 cx.executor().run_until_parked();
550
551 let worktrees: Vec<_> = cx.update(|cx| project.read(cx).worktrees(cx).collect());
552 assert_eq!(worktrees.len(), 2);
553
554 for worktree in worktrees {
555 let buffer = project
556 .update(cx, |project, cx| {
557 project.open_buffer((worktree.read(cx).id(), rel_path("file.rs")), cx)
558 })
559 .await
560 .unwrap();
561
562 cx.update(|cx| {
563 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
564
565 // Both worktrees should get indent_size = 5 from shared parent .editorconfig
566 assert_eq!(Some(settings.tab_size), NonZeroU32::new(5));
567 });
568 }
569}
570
571#[gpui::test]
572async fn test_external_editorconfig_not_loaded_without_internal_config(
573 cx: &mut gpui::TestAppContext,
574) {
575 init_test(cx);
576
577 let fs = FakeFs::new(cx.executor());
578 fs.insert_tree(
579 path!("/parent"),
580 json!({
581 ".editorconfig": "[*]\nindent_size = 99\n",
582 "worktree": {
583 "file.rs": "fn main() {}",
584 }
585 }),
586 )
587 .await;
588
589 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
590
591 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
592 language_registry.add(rust_lang());
593
594 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
595
596 cx.executor().run_until_parked();
597
598 let buffer = project
599 .update(cx, |project, cx| {
600 project.open_buffer((worktree.read(cx).id(), rel_path("file.rs")), cx)
601 })
602 .await
603 .unwrap();
604
605 cx.update(|cx| {
606 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
607
608 // file.rs should have default tab_size = 4, NOT 99 from parent's external .editorconfig
609 // because without an internal .editorconfig, external configs are not loaded
610 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
611 });
612}
613
614#[gpui::test]
615async fn test_external_editorconfig_modification_triggers_refresh(cx: &mut gpui::TestAppContext) {
616 init_test(cx);
617
618 let fs = FakeFs::new(cx.executor());
619 fs.insert_tree(
620 path!("/parent"),
621 json!({
622 ".editorconfig": "[*]\nindent_size = 4\n",
623 "worktree": {
624 ".editorconfig": "[*]\n",
625 "file.rs": "fn main() {}",
626 }
627 }),
628 )
629 .await;
630
631 let project = Project::test(fs.clone(), [path!("/parent/worktree").as_ref()], cx).await;
632
633 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
634 language_registry.add(rust_lang());
635
636 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
637
638 cx.executor().run_until_parked();
639
640 let buffer = project
641 .update(cx, |project, cx| {
642 project.open_buffer((worktree.read(cx).id(), rel_path("file.rs")), cx)
643 })
644 .await
645 .unwrap();
646
647 cx.update(|cx| {
648 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
649
650 // Test initial settings: tab_size = 4 from parent's external .editorconfig
651 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
652 });
653
654 fs.atomic_write(
655 PathBuf::from(path!("/parent/.editorconfig")),
656 "[*]\nindent_size = 8\n".to_owned(),
657 )
658 .await
659 .unwrap();
660
661 cx.executor().run_until_parked();
662
663 let buffer = project
664 .update(cx, |project, cx| {
665 project.open_buffer((worktree.read(cx).id(), rel_path("file.rs")), cx)
666 })
667 .await
668 .unwrap();
669
670 cx.update(|cx| {
671 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
672
673 // Test settings updated: tab_size = 8
674 assert_eq!(Some(settings.tab_size), NonZeroU32::new(8));
675 });
676}
677
678#[gpui::test]
679async fn test_adding_worktree_discovers_external_editorconfigs(cx: &mut gpui::TestAppContext) {
680 init_test(cx);
681
682 let fs = FakeFs::new(cx.executor());
683 fs.insert_tree(
684 path!("/parent"),
685 json!({
686 ".editorconfig": "root = true\n[*]\nindent_size = 7\n",
687 "existing_worktree": {
688 ".editorconfig": "[*]\n",
689 "file.rs": "fn a() {}",
690 },
691 "new_worktree": {
692 ".editorconfig": "[*]\n",
693 "file.rs": "fn b() {}",
694 }
695 }),
696 )
697 .await;
698
699 let project = Project::test(fs, [path!("/parent/existing_worktree").as_ref()], cx).await;
700
701 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
702 language_registry.add(rust_lang());
703
704 cx.executor().run_until_parked();
705
706 let buffer = project
707 .update(cx, |project, cx| {
708 let id = project.worktrees(cx).next().unwrap().read(cx).id();
709 project.open_buffer((id, rel_path("file.rs")), cx)
710 })
711 .await
712 .unwrap();
713
714 cx.update(|cx| {
715 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx).into_owned();
716
717 // Test existing worktree has tab_size = 7
718 assert_eq!(Some(settings.tab_size), NonZeroU32::new(7));
719 });
720
721 let (new_worktree, _) = project
722 .update(cx, |project, cx| {
723 project.find_or_create_worktree(path!("/parent/new_worktree"), true, cx)
724 })
725 .await
726 .unwrap();
727
728 cx.executor().run_until_parked();
729
730 let buffer = project
731 .update(cx, |project, cx| {
732 project.open_buffer((new_worktree.read(cx).id(), rel_path("file.rs")), cx)
733 })
734 .await
735 .unwrap();
736
737 cx.update(|cx| {
738 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
739
740 // Verify new worktree also has tab_size = 7 from shared parent editorconfig
741 assert_eq!(Some(settings.tab_size), NonZeroU32::new(7));
742 });
743}
744
745#[gpui::test]
746async fn test_removing_worktree_cleans_up_external_editorconfig(cx: &mut gpui::TestAppContext) {
747 init_test(cx);
748
749 let fs = FakeFs::new(cx.executor());
750 fs.insert_tree(
751 path!("/parent"),
752 json!({
753 ".editorconfig": "[*]\nindent_size = 6\n",
754 "worktree": {
755 ".editorconfig": "[*]\n",
756 "file.rs": "fn main() {}",
757 }
758 }),
759 )
760 .await;
761
762 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
763
764 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
765 language_registry.add(rust_lang());
766
767 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
768 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
769
770 cx.executor().run_until_parked();
771
772 cx.update(|cx| {
773 let store = cx.global::<SettingsStore>();
774 let (worktree_ids, external_paths, watcher_paths) =
775 store.editorconfig_store.read(cx).test_state();
776
777 // Test external config is loaded
778 assert!(worktree_ids.contains(&worktree_id));
779 assert!(!external_paths.is_empty());
780 assert!(!watcher_paths.is_empty());
781 });
782
783 project.update(cx, |project, cx| {
784 project.remove_worktree(worktree_id, cx);
785 });
786
787 cx.executor().run_until_parked();
788
789 cx.update(|cx| {
790 let store = cx.global::<SettingsStore>();
791 let (worktree_ids, external_paths, watcher_paths) =
792 store.editorconfig_store.read(cx).test_state();
793
794 // Test worktree state, external configs, and watchers all removed
795 assert!(!worktree_ids.contains(&worktree_id));
796 assert!(external_paths.is_empty());
797 assert!(watcher_paths.is_empty());
798 });
799}
800
801#[gpui::test]
802async fn test_shared_external_editorconfig_cleanup_with_multiple_worktrees(
803 cx: &mut gpui::TestAppContext,
804) {
805 init_test(cx);
806
807 let fs = FakeFs::new(cx.executor());
808 fs.insert_tree(
809 path!("/parent"),
810 json!({
811 ".editorconfig": "root = true\n[*]\nindent_size = 5\n",
812 "worktree_a": {
813 ".editorconfig": "[*]\n",
814 "file.rs": "fn a() {}",
815 },
816 "worktree_b": {
817 ".editorconfig": "[*]\n",
818 "file.rs": "fn b() {}",
819 }
820 }),
821 )
822 .await;
823
824 let project = Project::test(
825 fs,
826 [
827 path!("/parent/worktree_a").as_ref(),
828 path!("/parent/worktree_b").as_ref(),
829 ],
830 cx,
831 )
832 .await;
833
834 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
835 language_registry.add(rust_lang());
836
837 cx.executor().run_until_parked();
838
839 let (worktree_a_id, worktree_b, worktree_b_id) = cx.update(|cx| {
840 let worktrees: Vec<_> = project.read(cx).worktrees(cx).collect();
841 assert_eq!(worktrees.len(), 2);
842
843 let worktree_a = &worktrees[0];
844 let worktree_b = &worktrees[1];
845 let worktree_a_id = worktree_a.read(cx).id();
846 let worktree_b_id = worktree_b.read(cx).id();
847 (worktree_a_id, worktree_b.clone(), worktree_b_id)
848 });
849
850 cx.update(|cx| {
851 let store = cx.global::<SettingsStore>();
852 let (worktree_ids, external_paths, _) = store.editorconfig_store.read(cx).test_state();
853
854 // Test both worktrees have settings and share external config
855 assert!(worktree_ids.contains(&worktree_a_id));
856 assert!(worktree_ids.contains(&worktree_b_id));
857 assert_eq!(external_paths.len(), 1); // single shared external config
858 });
859
860 project.update(cx, |project, cx| {
861 project.remove_worktree(worktree_a_id, cx);
862 });
863
864 cx.executor().run_until_parked();
865
866 cx.update(|cx| {
867 let store = cx.global::<SettingsStore>();
868 let (worktree_ids, external_paths, watcher_paths) =
869 store.editorconfig_store.read(cx).test_state();
870
871 // Test worktree_a is gone but external config remains for worktree_b
872 assert!(!worktree_ids.contains(&worktree_a_id));
873 assert!(worktree_ids.contains(&worktree_b_id));
874 // External config should still exist because worktree_b uses it
875 assert_eq!(external_paths.len(), 1);
876 assert_eq!(watcher_paths.len(), 1);
877 });
878
879 let buffer = project
880 .update(cx, |project, cx| {
881 project.open_buffer((worktree_b.read(cx).id(), rel_path("file.rs")), cx)
882 })
883 .await
884 .unwrap();
885
886 cx.update(|cx| {
887 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
888
889 // Test worktree_b still has correct settings
890 assert_eq!(Some(settings.tab_size), NonZeroU32::new(5));
891 });
892}
893
894#[gpui::test]
895async fn test_git_provider_project_setting(cx: &mut gpui::TestAppContext) {
896 init_test(cx);
897 cx.update(|cx| {
898 GitHostingProviderRegistry::default_global(cx);
899 git_hosting_providers::init(cx);
900 });
901
902 let fs = FakeFs::new(cx.executor());
903 let str_path = path!("/dir");
904 let path = Path::new(str_path);
905
906 fs.insert_tree(
907 path!("/dir"),
908 json!({
909 ".zed": {
910 "settings.json": r#"{
911 "git_hosting_providers": [
912 {
913 "provider": "gitlab",
914 "base_url": "https://google.com",
915 "name": "foo"
916 }
917 ]
918 }"#
919 },
920 }),
921 )
922 .await;
923
924 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
925 let (_worktree, _) =
926 project.read_with(cx, |project, cx| project.find_worktree(path, cx).unwrap());
927 cx.executor().run_until_parked();
928
929 cx.update(|cx| {
930 let provider = GitHostingProviderRegistry::global(cx);
931 assert!(
932 provider
933 .list_hosting_providers()
934 .into_iter()
935 .any(|provider| provider.name() == "foo")
936 );
937 });
938
939 fs.atomic_write(
940 Path::new(path!("/dir/.zed/settings.json")).to_owned(),
941 "{}".into(),
942 )
943 .await
944 .unwrap();
945
946 cx.run_until_parked();
947
948 cx.update(|cx| {
949 let provider = GitHostingProviderRegistry::global(cx);
950 assert!(
951 !provider
952 .list_hosting_providers()
953 .into_iter()
954 .any(|provider| provider.name() == "foo")
955 );
956 });
957}
958
959#[gpui::test]
960async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
961 init_test(cx);
962 TaskStore::init(None);
963
964 let fs = FakeFs::new(cx.executor());
965 fs.insert_tree(
966 path!("/dir"),
967 json!({
968 ".zed": {
969 "settings.json": r#"{ "tab_size": 8 }"#,
970 "tasks.json": r#"[{
971 "label": "cargo check all",
972 "command": "cargo",
973 "args": ["check", "--all"]
974 },]"#,
975 },
976 "a": {
977 "a.rs": "fn a() {\n A\n}"
978 },
979 "b": {
980 ".zed": {
981 "settings.json": r#"{ "tab_size": 2 }"#,
982 "tasks.json": r#"[{
983 "label": "cargo check",
984 "command": "cargo",
985 "args": ["check"]
986 },]"#,
987 },
988 "b.rs": "fn b() {\n B\n}"
989 }
990 }),
991 )
992 .await;
993
994 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
995 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
996
997 cx.executor().run_until_parked();
998 let worktree_id = cx.update(|cx| {
999 project.update(cx, |project, cx| {
1000 project.worktrees(cx).next().unwrap().read(cx).id()
1001 })
1002 });
1003
1004 let mut task_contexts = TaskContexts::default();
1005 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
1006 let task_contexts = Arc::new(task_contexts);
1007
1008 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
1009 id: worktree_id,
1010 directory_in_worktree: rel_path(".zed").into(),
1011 id_base: "local worktree tasks from directory \".zed\"".into(),
1012 };
1013
1014 let buffer_a = project
1015 .update(cx, |project, cx| {
1016 project.open_buffer((worktree.read(cx).id(), rel_path("a/a.rs")), cx)
1017 })
1018 .await
1019 .unwrap();
1020 let buffer_b = project
1021 .update(cx, |project, cx| {
1022 project.open_buffer((worktree.read(cx).id(), rel_path("b/b.rs")), cx)
1023 })
1024 .await
1025 .unwrap();
1026 cx.update(|cx| {
1027 let settings_a = LanguageSettings::for_buffer(&buffer_a.read(cx), cx);
1028 let settings_b = LanguageSettings::for_buffer(&buffer_b.read(cx), cx);
1029
1030 assert_eq!(settings_a.tab_size.get(), 8);
1031 assert_eq!(settings_b.tab_size.get(), 2);
1032 });
1033
1034 let all_tasks = cx
1035 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
1036 .await
1037 .into_iter()
1038 .map(|(source_kind, task)| {
1039 let resolved = task.resolved;
1040 (
1041 source_kind,
1042 task.resolved_label,
1043 resolved.args,
1044 resolved.env,
1045 )
1046 })
1047 .collect::<Vec<_>>();
1048 assert_eq!(
1049 all_tasks,
1050 vec![
1051 (
1052 TaskSourceKind::Worktree {
1053 id: worktree_id,
1054 directory_in_worktree: rel_path("b/.zed").into(),
1055 id_base: "local worktree tasks from directory \"b/.zed\"".into()
1056 },
1057 "cargo check".to_string(),
1058 vec!["check".to_string()],
1059 HashMap::default(),
1060 ),
1061 (
1062 topmost_local_task_source_kind.clone(),
1063 "cargo check all".to_string(),
1064 vec!["check".to_string(), "--all".to_string()],
1065 HashMap::default(),
1066 ),
1067 ]
1068 );
1069
1070 let (_, resolved_task) = cx
1071 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
1072 .await
1073 .into_iter()
1074 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
1075 .expect("should have one global task");
1076 project.update(cx, |project, cx| {
1077 let task_inventory = project
1078 .task_store()
1079 .read(cx)
1080 .task_inventory()
1081 .cloned()
1082 .unwrap();
1083 task_inventory.update(cx, |inventory, _| {
1084 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
1085 inventory
1086 .update_file_based_tasks(
1087 TaskSettingsLocation::Global(tasks_file()),
1088 Some(
1089 &json!([{
1090 "label": "cargo check unstable",
1091 "command": "cargo",
1092 "args": [
1093 "check",
1094 "--all",
1095 "--all-targets"
1096 ],
1097 "env": {
1098 "RUSTFLAGS": "-Zunstable-options"
1099 }
1100 }])
1101 .to_string(),
1102 ),
1103 )
1104 .unwrap();
1105 });
1106 });
1107 cx.run_until_parked();
1108
1109 let all_tasks = cx
1110 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
1111 .await
1112 .into_iter()
1113 .map(|(source_kind, task)| {
1114 let resolved = task.resolved;
1115 (
1116 source_kind,
1117 task.resolved_label,
1118 resolved.args,
1119 resolved.env,
1120 )
1121 })
1122 .collect::<Vec<_>>();
1123 assert_eq!(
1124 all_tasks,
1125 vec![
1126 (
1127 topmost_local_task_source_kind.clone(),
1128 "cargo check all".to_string(),
1129 vec!["check".to_string(), "--all".to_string()],
1130 HashMap::default(),
1131 ),
1132 (
1133 TaskSourceKind::Worktree {
1134 id: worktree_id,
1135 directory_in_worktree: rel_path("b/.zed").into(),
1136 id_base: "local worktree tasks from directory \"b/.zed\"".into()
1137 },
1138 "cargo check".to_string(),
1139 vec!["check".to_string()],
1140 HashMap::default(),
1141 ),
1142 (
1143 TaskSourceKind::AbsPath {
1144 abs_path: paths::tasks_file().clone(),
1145 id_base: "global tasks.json".into(),
1146 },
1147 "cargo check unstable".to_string(),
1148 vec![
1149 "check".to_string(),
1150 "--all".to_string(),
1151 "--all-targets".to_string(),
1152 ],
1153 HashMap::from_iter(Some((
1154 "RUSTFLAGS".to_string(),
1155 "-Zunstable-options".to_string()
1156 ))),
1157 ),
1158 ]
1159 );
1160}
1161
1162#[gpui::test]
1163async fn test_invalid_local_tasks_shows_toast_with_doc_link(cx: &mut gpui::TestAppContext) {
1164 init_test(cx);
1165 TaskStore::init(None);
1166
1167 // We need to start with a valid `.zed/tasks.json` file as otherwise the
1168 // event is emitted before we havd a chance to setup the event subscription.
1169 let fs = FakeFs::new(cx.executor());
1170 fs.insert_tree(
1171 path!("/dir"),
1172 json!({
1173 ".zed": {
1174 "tasks.json": r#"[{ "label": "valid task", "command": "echo" }]"#,
1175 },
1176 "file.rs": ""
1177 }),
1178 )
1179 .await;
1180
1181 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1182 let saw_toast = Rc::new(RefCell::new(false));
1183
1184 // Update the `.zed/tasks.json` file with an invalid variable, so we can
1185 // later assert that the `Event::Toast` even is emitted.
1186 fs.save(
1187 path!("/dir/.zed/tasks.json").as_ref(),
1188 &r#"[{ "label": "test $ZED_FOO", "command": "echo" }]"#.into(),
1189 Default::default(),
1190 )
1191 .await
1192 .unwrap();
1193
1194 project.update(cx, |_, cx| {
1195 let saw_toast = saw_toast.clone();
1196
1197 cx.subscribe(&project, move |_, _, event: &Event, _| match event {
1198 Event::Toast {
1199 notification_id,
1200 message,
1201 link: Some(ToastLink { url, .. }),
1202 } => {
1203 assert!(notification_id.starts_with("local-tasks-"));
1204 assert!(message.contains("ZED_FOO"));
1205 assert_eq!(*url, "https://zed.dev/docs/tasks");
1206 *saw_toast.borrow_mut() = true;
1207 }
1208 _ => {}
1209 })
1210 .detach();
1211 });
1212
1213 cx.run_until_parked();
1214 assert!(
1215 *saw_toast.borrow(),
1216 "Expected `Event::Toast` was never emitted"
1217 );
1218}
1219
1220#[gpui::test]
1221async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
1222 init_test(cx);
1223 TaskStore::init(None);
1224
1225 let fs = FakeFs::new(cx.executor());
1226 fs.insert_tree(
1227 path!("/dir"),
1228 json!({
1229 ".zed": {
1230 "tasks.json": r#"[{
1231 "label": "test worktree root",
1232 "command": "echo $ZED_WORKTREE_ROOT"
1233 }]"#,
1234 },
1235 "a": {
1236 "a.rs": "fn a() {\n A\n}"
1237 },
1238 }),
1239 )
1240 .await;
1241
1242 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1243 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
1244
1245 cx.executor().run_until_parked();
1246 let worktree_id = cx.update(|cx| {
1247 project.update(cx, |project, cx| {
1248 project.worktrees(cx).next().unwrap().read(cx).id()
1249 })
1250 });
1251
1252 let active_non_worktree_item_tasks = cx
1253 .update(|cx| {
1254 get_all_tasks(
1255 &project,
1256 Arc::new(TaskContexts {
1257 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
1258 active_worktree_context: None,
1259 other_worktree_contexts: Vec::new(),
1260 lsp_task_sources: HashMap::default(),
1261 latest_selection: None,
1262 }),
1263 cx,
1264 )
1265 })
1266 .await;
1267 assert!(
1268 active_non_worktree_item_tasks.is_empty(),
1269 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
1270 );
1271
1272 let active_worktree_tasks = cx
1273 .update(|cx| {
1274 get_all_tasks(
1275 &project,
1276 Arc::new(TaskContexts {
1277 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
1278 active_worktree_context: Some((worktree_id, {
1279 let mut worktree_context = TaskContext::default();
1280 worktree_context
1281 .task_variables
1282 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
1283 worktree_context
1284 })),
1285 other_worktree_contexts: Vec::new(),
1286 lsp_task_sources: HashMap::default(),
1287 latest_selection: None,
1288 }),
1289 cx,
1290 )
1291 })
1292 .await;
1293 assert_eq!(
1294 active_worktree_tasks
1295 .into_iter()
1296 .map(|(source_kind, task)| {
1297 let resolved = task.resolved;
1298 (source_kind, resolved.command.unwrap())
1299 })
1300 .collect::<Vec<_>>(),
1301 vec![(
1302 TaskSourceKind::Worktree {
1303 id: worktree_id,
1304 directory_in_worktree: rel_path(".zed").into(),
1305 id_base: "local worktree tasks from directory \".zed\"".into(),
1306 },
1307 "echo /dir".to_string(),
1308 )]
1309 );
1310}
1311
1312#[gpui::test]
1313async fn test_running_multiple_instances_of_a_single_server_in_one_worktree(
1314 cx: &mut gpui::TestAppContext,
1315) {
1316 pub(crate) struct PyprojectTomlManifestProvider;
1317
1318 impl ManifestProvider for PyprojectTomlManifestProvider {
1319 fn name(&self) -> ManifestName {
1320 SharedString::new_static("pyproject.toml").into()
1321 }
1322
1323 fn search(
1324 &self,
1325 ManifestQuery {
1326 path,
1327 depth,
1328 delegate,
1329 }: ManifestQuery,
1330 ) -> Option<Arc<RelPath>> {
1331 for path in path.ancestors().take(depth) {
1332 let p = path.join(rel_path("pyproject.toml"));
1333 if delegate.exists(&p, Some(false)) {
1334 return Some(path.into());
1335 }
1336 }
1337
1338 None
1339 }
1340 }
1341
1342 init_test(cx);
1343 let fs = FakeFs::new(cx.executor());
1344
1345 fs.insert_tree(
1346 path!("/the-root"),
1347 json!({
1348 ".zed": {
1349 "settings.json": r#"
1350 {
1351 "languages": {
1352 "Python": {
1353 "language_servers": ["ty"]
1354 }
1355 }
1356 }"#
1357 },
1358 "project-a": {
1359 ".venv": {},
1360 "file.py": "",
1361 "pyproject.toml": ""
1362 },
1363 "project-b": {
1364 ".venv": {},
1365 "source_file.py":"",
1366 "another_file.py": "",
1367 "pyproject.toml": ""
1368 }
1369 }),
1370 )
1371 .await;
1372 cx.update(|cx| {
1373 ManifestProvidersStore::global(cx).register(Arc::new(PyprojectTomlManifestProvider))
1374 });
1375
1376 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1377 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1378 let _fake_python_server = language_registry.register_fake_lsp(
1379 "Python",
1380 FakeLspAdapter {
1381 name: "ty",
1382 capabilities: lsp::ServerCapabilities {
1383 ..Default::default()
1384 },
1385 ..Default::default()
1386 },
1387 );
1388
1389 language_registry.add(python_lang(fs.clone()));
1390 let (first_buffer, _handle) = project
1391 .update(cx, |project, cx| {
1392 project.open_local_buffer_with_lsp(path!("/the-root/project-a/file.py"), cx)
1393 })
1394 .await
1395 .unwrap();
1396 cx.executor().run_until_parked();
1397 let servers = project.update(cx, |project, cx| {
1398 project.lsp_store().update(cx, |this, cx| {
1399 first_buffer.update(cx, |buffer, cx| {
1400 this.running_language_servers_for_local_buffer(buffer, cx)
1401 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1402 .collect::<Vec<_>>()
1403 })
1404 })
1405 });
1406 cx.executor().run_until_parked();
1407 assert_eq!(servers.len(), 1);
1408 let (adapter, server) = servers.into_iter().next().unwrap();
1409 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1410 assert_eq!(server.server_id(), LanguageServerId(0));
1411 // `workspace_folders` are set to the rooting point.
1412 assert_eq!(
1413 server.workspace_folders(),
1414 BTreeSet::from_iter(
1415 [Uri::from_file_path(path!("/the-root/project-a")).unwrap()].into_iter()
1416 )
1417 );
1418
1419 let (second_project_buffer, _other_handle) = project
1420 .update(cx, |project, cx| {
1421 project.open_local_buffer_with_lsp(path!("/the-root/project-b/source_file.py"), cx)
1422 })
1423 .await
1424 .unwrap();
1425 cx.executor().run_until_parked();
1426 let servers = project.update(cx, |project, cx| {
1427 project.lsp_store().update(cx, |this, cx| {
1428 second_project_buffer.update(cx, |buffer, cx| {
1429 this.running_language_servers_for_local_buffer(buffer, cx)
1430 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1431 .collect::<Vec<_>>()
1432 })
1433 })
1434 });
1435 cx.executor().run_until_parked();
1436 assert_eq!(servers.len(), 1);
1437 let (adapter, server) = servers.into_iter().next().unwrap();
1438 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1439 // We're not using venvs at all here, so both folders should fall under the same root.
1440 assert_eq!(server.server_id(), LanguageServerId(0));
1441 // Now, let's select a different toolchain for one of subprojects.
1442
1443 let Toolchains {
1444 toolchains: available_toolchains_for_b,
1445 root_path,
1446 ..
1447 } = project
1448 .update(cx, |this, cx| {
1449 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1450 this.available_toolchains(
1451 ProjectPath {
1452 worktree_id,
1453 path: rel_path("project-b/source_file.py").into(),
1454 },
1455 LanguageName::new_static("Python"),
1456 cx,
1457 )
1458 })
1459 .await
1460 .expect("A toolchain to be discovered");
1461 assert_eq!(root_path.as_ref(), rel_path("project-b"));
1462 assert_eq!(available_toolchains_for_b.toolchains().len(), 1);
1463 let currently_active_toolchain = project
1464 .update(cx, |this, cx| {
1465 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1466 this.active_toolchain(
1467 ProjectPath {
1468 worktree_id,
1469 path: rel_path("project-b/source_file.py").into(),
1470 },
1471 LanguageName::new_static("Python"),
1472 cx,
1473 )
1474 })
1475 .await;
1476
1477 assert!(currently_active_toolchain.is_none());
1478 let _ = project
1479 .update(cx, |this, cx| {
1480 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1481 this.activate_toolchain(
1482 ProjectPath {
1483 worktree_id,
1484 path: root_path,
1485 },
1486 available_toolchains_for_b
1487 .toolchains
1488 .into_iter()
1489 .next()
1490 .unwrap(),
1491 cx,
1492 )
1493 })
1494 .await
1495 .unwrap();
1496 cx.run_until_parked();
1497 let servers = project.update(cx, |project, cx| {
1498 project.lsp_store().update(cx, |this, cx| {
1499 second_project_buffer.update(cx, |buffer, cx| {
1500 this.running_language_servers_for_local_buffer(buffer, cx)
1501 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1502 .collect::<Vec<_>>()
1503 })
1504 })
1505 });
1506 cx.executor().run_until_parked();
1507 assert_eq!(servers.len(), 1);
1508 let (adapter, server) = servers.into_iter().next().unwrap();
1509 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1510 // There's a new language server in town.
1511 assert_eq!(server.server_id(), LanguageServerId(1));
1512}
1513
1514#[gpui::test]
1515async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
1516 init_test(cx);
1517
1518 let fs = FakeFs::new(cx.executor());
1519 fs.insert_tree(
1520 path!("/dir"),
1521 json!({
1522 "test.rs": "const A: i32 = 1;",
1523 "test2.rs": "",
1524 "Cargo.toml": "a = 1",
1525 "package.json": "{\"a\": 1}",
1526 }),
1527 )
1528 .await;
1529
1530 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1531 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1532
1533 let mut fake_rust_servers = language_registry.register_fake_lsp(
1534 "Rust",
1535 FakeLspAdapter {
1536 name: "the-rust-language-server",
1537 capabilities: lsp::ServerCapabilities {
1538 completion_provider: Some(lsp::CompletionOptions {
1539 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
1540 ..Default::default()
1541 }),
1542 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
1543 lsp::TextDocumentSyncOptions {
1544 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
1545 ..Default::default()
1546 },
1547 )),
1548 ..Default::default()
1549 },
1550 ..Default::default()
1551 },
1552 );
1553 let mut fake_json_servers = language_registry.register_fake_lsp(
1554 "JSON",
1555 FakeLspAdapter {
1556 name: "the-json-language-server",
1557 capabilities: lsp::ServerCapabilities {
1558 completion_provider: Some(lsp::CompletionOptions {
1559 trigger_characters: Some(vec![":".to_string()]),
1560 ..Default::default()
1561 }),
1562 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
1563 lsp::TextDocumentSyncOptions {
1564 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
1565 ..Default::default()
1566 },
1567 )),
1568 ..Default::default()
1569 },
1570 ..Default::default()
1571 },
1572 );
1573
1574 // Open a buffer without an associated language server.
1575 let (toml_buffer, _handle) = project
1576 .update(cx, |project, cx| {
1577 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
1578 })
1579 .await
1580 .unwrap();
1581
1582 // Open a buffer with an associated language server before the language for it has been loaded.
1583 let (rust_buffer, _handle2) = project
1584 .update(cx, |project, cx| {
1585 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
1586 })
1587 .await
1588 .unwrap();
1589 rust_buffer.update(cx, |buffer, _| {
1590 assert_eq!(buffer.language().map(|l| l.name()), None);
1591 });
1592
1593 // Now we add the languages to the project, and ensure they get assigned to all
1594 // the relevant open buffers.
1595 language_registry.add(json_lang());
1596 language_registry.add(rust_lang());
1597 cx.executor().run_until_parked();
1598 rust_buffer.update(cx, |buffer, _| {
1599 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
1600 });
1601
1602 // A server is started up, and it is notified about Rust files.
1603 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1604 assert_eq!(
1605 fake_rust_server
1606 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1607 .await
1608 .text_document,
1609 lsp::TextDocumentItem {
1610 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1611 version: 0,
1612 text: "const A: i32 = 1;".to_string(),
1613 language_id: "rust".to_string(),
1614 }
1615 );
1616
1617 // The buffer is configured based on the language server's capabilities.
1618 rust_buffer.update(cx, |buffer, _| {
1619 assert_eq!(
1620 buffer
1621 .completion_triggers()
1622 .iter()
1623 .cloned()
1624 .collect::<Vec<_>>(),
1625 &[".".to_string(), "::".to_string()]
1626 );
1627 });
1628 toml_buffer.update(cx, |buffer, _| {
1629 assert!(buffer.completion_triggers().is_empty());
1630 });
1631
1632 // Edit a buffer. The changes are reported to the language server.
1633 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
1634 assert_eq!(
1635 fake_rust_server
1636 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1637 .await
1638 .text_document,
1639 lsp::VersionedTextDocumentIdentifier::new(
1640 lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1641 1
1642 )
1643 );
1644
1645 // Open a third buffer with a different associated language server.
1646 let (json_buffer, _json_handle) = project
1647 .update(cx, |project, cx| {
1648 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
1649 })
1650 .await
1651 .unwrap();
1652
1653 // A json language server is started up and is only notified about the json buffer.
1654 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1655 assert_eq!(
1656 fake_json_server
1657 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1658 .await
1659 .text_document,
1660 lsp::TextDocumentItem {
1661 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1662 version: 0,
1663 text: "{\"a\": 1}".to_string(),
1664 language_id: "json".to_string(),
1665 }
1666 );
1667
1668 // This buffer is configured based on the second language server's
1669 // capabilities.
1670 json_buffer.update(cx, |buffer, _| {
1671 assert_eq!(
1672 buffer
1673 .completion_triggers()
1674 .iter()
1675 .cloned()
1676 .collect::<Vec<_>>(),
1677 &[":".to_string()]
1678 );
1679 });
1680
1681 // When opening another buffer whose language server is already running,
1682 // it is also configured based on the existing language server's capabilities.
1683 let (rust_buffer2, _handle4) = project
1684 .update(cx, |project, cx| {
1685 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
1686 })
1687 .await
1688 .unwrap();
1689 rust_buffer2.update(cx, |buffer, _| {
1690 assert_eq!(
1691 buffer
1692 .completion_triggers()
1693 .iter()
1694 .cloned()
1695 .collect::<Vec<_>>(),
1696 &[".".to_string(), "::".to_string()]
1697 );
1698 });
1699
1700 // Changes are reported only to servers matching the buffer's language.
1701 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
1702 rust_buffer2.update(cx, |buffer, cx| {
1703 buffer.edit([(0..0, "let x = 1;")], None, cx)
1704 });
1705 assert_eq!(
1706 fake_rust_server
1707 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1708 .await
1709 .text_document,
1710 lsp::VersionedTextDocumentIdentifier::new(
1711 lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap(),
1712 1
1713 )
1714 );
1715
1716 // Save notifications are reported to all servers.
1717 project
1718 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
1719 .await
1720 .unwrap();
1721 assert_eq!(
1722 fake_rust_server
1723 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1724 .await
1725 .text_document,
1726 lsp::TextDocumentIdentifier::new(
1727 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1728 )
1729 );
1730 assert_eq!(
1731 fake_json_server
1732 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1733 .await
1734 .text_document,
1735 lsp::TextDocumentIdentifier::new(
1736 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1737 )
1738 );
1739
1740 // Renames are reported only to servers matching the buffer's language.
1741 fs.rename(
1742 Path::new(path!("/dir/test2.rs")),
1743 Path::new(path!("/dir/test3.rs")),
1744 Default::default(),
1745 )
1746 .await
1747 .unwrap();
1748 assert_eq!(
1749 fake_rust_server
1750 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1751 .await
1752 .text_document,
1753 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap()),
1754 );
1755 assert_eq!(
1756 fake_rust_server
1757 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1758 .await
1759 .text_document,
1760 lsp::TextDocumentItem {
1761 uri: lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap(),
1762 version: 0,
1763 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1764 language_id: "rust".to_string(),
1765 },
1766 );
1767
1768 rust_buffer2.update(cx, |buffer, cx| {
1769 buffer.update_diagnostics(
1770 LanguageServerId(0),
1771 DiagnosticSet::from_sorted_entries(
1772 vec![DiagnosticEntry {
1773 diagnostic: Default::default(),
1774 range: Anchor::MIN..Anchor::MAX,
1775 }],
1776 &buffer.snapshot(),
1777 ),
1778 cx,
1779 );
1780 assert_eq!(
1781 buffer
1782 .snapshot()
1783 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1784 .count(),
1785 1
1786 );
1787 });
1788
1789 // When the rename changes the extension of the file, the buffer gets closed on the old
1790 // language server and gets opened on the new one.
1791 fs.rename(
1792 Path::new(path!("/dir/test3.rs")),
1793 Path::new(path!("/dir/test3.json")),
1794 Default::default(),
1795 )
1796 .await
1797 .unwrap();
1798 assert_eq!(
1799 fake_rust_server
1800 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1801 .await
1802 .text_document,
1803 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap()),
1804 );
1805 assert_eq!(
1806 fake_json_server
1807 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1808 .await
1809 .text_document,
1810 lsp::TextDocumentItem {
1811 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1812 version: 0,
1813 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1814 language_id: "json".to_string(),
1815 },
1816 );
1817
1818 // We clear the diagnostics, since the language has changed.
1819 rust_buffer2.update(cx, |buffer, _| {
1820 assert_eq!(
1821 buffer
1822 .snapshot()
1823 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1824 .count(),
1825 0
1826 );
1827 });
1828
1829 // The renamed file's version resets after changing language server.
1830 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
1831 assert_eq!(
1832 fake_json_server
1833 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1834 .await
1835 .text_document,
1836 lsp::VersionedTextDocumentIdentifier::new(
1837 lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1838 1
1839 )
1840 );
1841
1842 // Restart language servers
1843 project.update(cx, |project, cx| {
1844 project.restart_language_servers_for_buffers(
1845 vec![rust_buffer.clone(), json_buffer.clone()],
1846 HashSet::default(),
1847 cx,
1848 );
1849 });
1850
1851 let mut rust_shutdown_requests = fake_rust_server
1852 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1853 let mut json_shutdown_requests = fake_json_server
1854 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1855 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
1856
1857 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1858 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1859
1860 // Ensure rust document is reopened in new rust language server
1861 assert_eq!(
1862 fake_rust_server
1863 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1864 .await
1865 .text_document,
1866 lsp::TextDocumentItem {
1867 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1868 version: 0,
1869 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
1870 language_id: "rust".to_string(),
1871 }
1872 );
1873
1874 // Ensure json documents are reopened in new json language server
1875 assert_set_eq!(
1876 [
1877 fake_json_server
1878 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1879 .await
1880 .text_document,
1881 fake_json_server
1882 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1883 .await
1884 .text_document,
1885 ],
1886 [
1887 lsp::TextDocumentItem {
1888 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1889 version: 0,
1890 text: json_buffer.update(cx, |buffer, _| buffer.text()),
1891 language_id: "json".to_string(),
1892 },
1893 lsp::TextDocumentItem {
1894 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1895 version: 0,
1896 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1897 language_id: "json".to_string(),
1898 }
1899 ]
1900 );
1901
1902 // Close notifications are reported only to servers matching the buffer's language.
1903 cx.update(|_| drop(_json_handle));
1904 let close_message = lsp::DidCloseTextDocumentParams {
1905 text_document: lsp::TextDocumentIdentifier::new(
1906 lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1907 ),
1908 };
1909 assert_eq!(
1910 fake_json_server
1911 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1912 .await,
1913 close_message,
1914 );
1915}
1916
1917#[gpui::test]
1918async fn test_language_server_relative_path(cx: &mut gpui::TestAppContext) {
1919 init_test(cx);
1920
1921 let settings_json_contents = json!({
1922 "languages": {
1923 "Rust": {
1924 "language_servers": ["my_fake_lsp", "lsp_on_path"]
1925 }
1926 },
1927 "lsp": {
1928 "my_fake_lsp": {
1929 "binary": {
1930 // file exists, so this is treated as a relative path
1931 "path": path!(".relative_path/to/my_fake_lsp_binary.exe").to_string(),
1932 }
1933 },
1934 "lsp_on_path": {
1935 "binary": {
1936 // file doesn't exist, so it will fall back on PATH env var
1937 "path": path!("lsp_on_path.exe").to_string(),
1938 }
1939 }
1940 },
1941 });
1942
1943 let fs = FakeFs::new(cx.executor());
1944 fs.insert_tree(
1945 path!("/the-root"),
1946 json!({
1947 ".zed": {
1948 "settings.json": settings_json_contents.to_string(),
1949 },
1950 ".relative_path": {
1951 "to": {
1952 "my_fake_lsp.exe": "",
1953 },
1954 },
1955 "src": {
1956 "main.rs": "",
1957 }
1958 }),
1959 )
1960 .await;
1961
1962 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1963 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1964 language_registry.add(rust_lang());
1965
1966 let mut my_fake_lsp = language_registry.register_fake_lsp(
1967 "Rust",
1968 FakeLspAdapter {
1969 name: "my_fake_lsp",
1970 ..Default::default()
1971 },
1972 );
1973 let mut lsp_on_path = language_registry.register_fake_lsp(
1974 "Rust",
1975 FakeLspAdapter {
1976 name: "lsp_on_path",
1977 ..Default::default()
1978 },
1979 );
1980
1981 cx.run_until_parked();
1982
1983 // Start the language server by opening a buffer with a compatible file extension.
1984 project
1985 .update(cx, |project, cx| {
1986 project.open_local_buffer_with_lsp(path!("/the-root/src/main.rs"), cx)
1987 })
1988 .await
1989 .unwrap();
1990
1991 let lsp_path = my_fake_lsp.next().await.unwrap().binary.path;
1992 assert_eq!(
1993 lsp_path.to_string_lossy(),
1994 path!("/the-root/.relative_path/to/my_fake_lsp_binary.exe"),
1995 );
1996
1997 let lsp_path = lsp_on_path.next().await.unwrap().binary.path;
1998 assert_eq!(lsp_path.to_string_lossy(), path!("lsp_on_path.exe"));
1999}
2000
2001#[gpui::test]
2002async fn test_language_server_tilde_path(cx: &mut gpui::TestAppContext) {
2003 init_test(cx);
2004
2005 let settings_json_contents = json!({
2006 "languages": {
2007 "Rust": {
2008 "language_servers": ["tilde_lsp"]
2009 }
2010 },
2011 "lsp": {
2012 "tilde_lsp": {
2013 "binary": {
2014 "path": "~/.local/bin/rust-analyzer",
2015 }
2016 }
2017 },
2018 });
2019
2020 let fs = FakeFs::new(cx.executor());
2021 fs.insert_tree(
2022 path!("/root"),
2023 json!({
2024 ".zed": {
2025 "settings.json": settings_json_contents.to_string(),
2026 },
2027 "src": {
2028 "main.rs": "fn main() {}",
2029 }
2030 }),
2031 )
2032 .await;
2033
2034 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
2035 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2036 language_registry.add(rust_lang());
2037
2038 let mut tilde_lsp = language_registry.register_fake_lsp(
2039 "Rust",
2040 FakeLspAdapter {
2041 name: "tilde_lsp",
2042 ..Default::default()
2043 },
2044 );
2045 cx.run_until_parked();
2046
2047 project
2048 .update(cx, |project, cx| {
2049 project.open_local_buffer_with_lsp(path!("/root/src/main.rs"), cx)
2050 })
2051 .await
2052 .unwrap();
2053
2054 let lsp_path = tilde_lsp.next().await.unwrap().binary.path;
2055 let expected_path = paths::home_dir().join(".local/bin/rust-analyzer");
2056 assert_eq!(
2057 lsp_path, expected_path,
2058 "Tilde path should expand to home directory"
2059 );
2060}
2061
2062#[gpui::test]
2063async fn test_rescan_fs_change_is_reported_to_language_servers_as_changed(
2064 cx: &mut gpui::TestAppContext,
2065) {
2066 init_test(cx);
2067
2068 let fs = FakeFs::new(cx.executor());
2069 fs.insert_tree(
2070 path!("/the-root"),
2071 json!({
2072 "Cargo.lock": "",
2073 "src": {
2074 "a.rs": "",
2075 }
2076 }),
2077 )
2078 .await;
2079
2080 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
2081 let (language_registry, _lsp_store) = project.read_with(cx, |project, _| {
2082 (project.languages().clone(), project.lsp_store())
2083 });
2084 language_registry.add(rust_lang());
2085 let mut fake_servers = language_registry.register_fake_lsp(
2086 "Rust",
2087 FakeLspAdapter {
2088 name: "the-language-server",
2089 ..Default::default()
2090 },
2091 );
2092
2093 cx.executor().run_until_parked();
2094
2095 project
2096 .update(cx, |project, cx| {
2097 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
2098 })
2099 .await
2100 .unwrap();
2101
2102 let fake_server = fake_servers.next().await.unwrap();
2103 cx.executor().run_until_parked();
2104
2105 let file_changes = Arc::new(Mutex::new(Vec::new()));
2106 fake_server
2107 .request::<lsp::request::RegisterCapability>(
2108 lsp::RegistrationParams {
2109 registrations: vec![lsp::Registration {
2110 id: Default::default(),
2111 method: "workspace/didChangeWatchedFiles".to_string(),
2112 register_options: serde_json::to_value(
2113 lsp::DidChangeWatchedFilesRegistrationOptions {
2114 watchers: vec![lsp::FileSystemWatcher {
2115 glob_pattern: lsp::GlobPattern::String(
2116 path!("/the-root/Cargo.lock").to_string(),
2117 ),
2118 kind: None,
2119 }],
2120 },
2121 )
2122 .ok(),
2123 }],
2124 },
2125 DEFAULT_LSP_REQUEST_TIMEOUT,
2126 )
2127 .await
2128 .into_response()
2129 .unwrap();
2130 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
2131 let file_changes = file_changes.clone();
2132 move |params, _| {
2133 let mut file_changes = file_changes.lock();
2134 file_changes.extend(params.changes);
2135 }
2136 });
2137
2138 cx.executor().run_until_parked();
2139 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
2140
2141 fs.emit_fs_event(path!("/the-root/Cargo.lock"), Some(PathEventKind::Rescan));
2142 cx.executor().run_until_parked();
2143
2144 assert_eq!(
2145 &*file_changes.lock(),
2146 &[lsp::FileEvent {
2147 uri: lsp::Uri::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
2148 typ: lsp::FileChangeType::CHANGED,
2149 }]
2150 );
2151}
2152
2153#[gpui::test]
2154async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
2155 init_test(cx);
2156
2157 let fs = FakeFs::new(cx.executor());
2158 fs.insert_tree(
2159 path!("/the-root"),
2160 json!({
2161 ".gitignore": "target\n",
2162 "Cargo.lock": "",
2163 "src": {
2164 "a.rs": "",
2165 "b.rs": "",
2166 },
2167 "target": {
2168 "x": {
2169 "out": {
2170 "x.rs": ""
2171 }
2172 },
2173 "y": {
2174 "out": {
2175 "y.rs": "",
2176 }
2177 },
2178 "z": {
2179 "out": {
2180 "z.rs": ""
2181 }
2182 }
2183 }
2184 }),
2185 )
2186 .await;
2187 fs.insert_tree(
2188 path!("/the-registry"),
2189 json!({
2190 "dep1": {
2191 "src": {
2192 "dep1.rs": "",
2193 }
2194 },
2195 "dep2": {
2196 "src": {
2197 "dep2.rs": "",
2198 }
2199 },
2200 }),
2201 )
2202 .await;
2203 fs.insert_tree(
2204 path!("/the/stdlib"),
2205 json!({
2206 "LICENSE": "",
2207 "src": {
2208 "string.rs": "",
2209 }
2210 }),
2211 )
2212 .await;
2213
2214 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
2215 let (language_registry, lsp_store) = project.read_with(cx, |project, _| {
2216 (project.languages().clone(), project.lsp_store())
2217 });
2218 language_registry.add(rust_lang());
2219 let mut fake_servers = language_registry.register_fake_lsp(
2220 "Rust",
2221 FakeLspAdapter {
2222 name: "the-language-server",
2223 ..Default::default()
2224 },
2225 );
2226
2227 cx.executor().run_until_parked();
2228
2229 // Start the language server by opening a buffer with a compatible file extension.
2230 project
2231 .update(cx, |project, cx| {
2232 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
2233 })
2234 .await
2235 .unwrap();
2236
2237 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
2238 project.update(cx, |project, cx| {
2239 let worktree = project.worktrees(cx).next().unwrap();
2240 assert_eq!(
2241 worktree
2242 .read(cx)
2243 .snapshot()
2244 .entries(true, 0)
2245 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
2246 .collect::<Vec<_>>(),
2247 &[
2248 ("", false),
2249 (".gitignore", false),
2250 ("Cargo.lock", false),
2251 ("src", false),
2252 ("src/a.rs", false),
2253 ("src/b.rs", false),
2254 ("target", true),
2255 ]
2256 );
2257 });
2258
2259 let prev_read_dir_count = fs.read_dir_call_count();
2260
2261 let fake_server = fake_servers.next().await.unwrap();
2262 cx.executor().run_until_parked();
2263 let server_id = lsp_store.read_with(cx, |lsp_store, _| {
2264 let (id, _) = lsp_store.language_server_statuses().next().unwrap();
2265 id
2266 });
2267
2268 // Simulate jumping to a definition in a dependency outside of the worktree.
2269 let _out_of_worktree_buffer = project
2270 .update(cx, |project, cx| {
2271 project.open_local_buffer_via_lsp(
2272 lsp::Uri::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
2273 server_id,
2274 cx,
2275 )
2276 })
2277 .await
2278 .unwrap();
2279
2280 // Keep track of the FS events reported to the language server.
2281 let file_changes = Arc::new(Mutex::new(Vec::new()));
2282 fake_server
2283 .request::<lsp::request::RegisterCapability>(
2284 lsp::RegistrationParams {
2285 registrations: vec![lsp::Registration {
2286 id: Default::default(),
2287 method: "workspace/didChangeWatchedFiles".to_string(),
2288 register_options: serde_json::to_value(
2289 lsp::DidChangeWatchedFilesRegistrationOptions {
2290 watchers: vec![
2291 lsp::FileSystemWatcher {
2292 glob_pattern: lsp::GlobPattern::String(
2293 path!("/the-root/Cargo.toml").to_string(),
2294 ),
2295 kind: None,
2296 },
2297 lsp::FileSystemWatcher {
2298 glob_pattern: lsp::GlobPattern::String(
2299 path!("/the-root/src/*.{rs,c}").to_string(),
2300 ),
2301 kind: None,
2302 },
2303 lsp::FileSystemWatcher {
2304 glob_pattern: lsp::GlobPattern::String(
2305 path!("/the-root/target/y/**/*.rs").to_string(),
2306 ),
2307 kind: None,
2308 },
2309 lsp::FileSystemWatcher {
2310 glob_pattern: lsp::GlobPattern::String(
2311 path!("/the/stdlib/src/**/*.rs").to_string(),
2312 ),
2313 kind: None,
2314 },
2315 lsp::FileSystemWatcher {
2316 glob_pattern: lsp::GlobPattern::String(
2317 path!("**/Cargo.lock").to_string(),
2318 ),
2319 kind: None,
2320 },
2321 ],
2322 },
2323 )
2324 .ok(),
2325 }],
2326 },
2327 DEFAULT_LSP_REQUEST_TIMEOUT,
2328 )
2329 .await
2330 .into_response()
2331 .unwrap();
2332 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
2333 let file_changes = file_changes.clone();
2334 move |params, _| {
2335 let mut file_changes = file_changes.lock();
2336 file_changes.extend(params.changes);
2337 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
2338 }
2339 });
2340
2341 cx.executor().run_until_parked();
2342 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
2343 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
2344
2345 let mut new_watched_paths = fs.watched_paths();
2346 new_watched_paths.retain(|path| {
2347 !path.starts_with(config_dir()) && !path.starts_with(global_gitignore_path().unwrap())
2348 });
2349 assert_eq!(
2350 &new_watched_paths,
2351 &[
2352 Path::new(path!("/the-root")),
2353 Path::new(path!("/the-registry/dep1/src/dep1.rs")),
2354 Path::new(path!("/the/stdlib/src"))
2355 ]
2356 );
2357
2358 // Now the language server has asked us to watch an ignored directory path,
2359 // so we recursively load it.
2360 project.update(cx, |project, cx| {
2361 let worktree = project.visible_worktrees(cx).next().unwrap();
2362 assert_eq!(
2363 worktree
2364 .read(cx)
2365 .snapshot()
2366 .entries(true, 0)
2367 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
2368 .collect::<Vec<_>>(),
2369 &[
2370 ("", false),
2371 (".gitignore", false),
2372 ("Cargo.lock", false),
2373 ("src", false),
2374 ("src/a.rs", false),
2375 ("src/b.rs", false),
2376 ("target", true),
2377 ("target/x", true),
2378 ("target/y", true),
2379 ("target/y/out", true),
2380 ("target/y/out/y.rs", true),
2381 ("target/z", true),
2382 ]
2383 );
2384 });
2385
2386 // Perform some file system mutations, two of which match the watched patterns,
2387 // and one of which does not.
2388 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
2389 .await
2390 .unwrap();
2391 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
2392 .await
2393 .unwrap();
2394 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
2395 .await
2396 .unwrap();
2397 fs.create_file(
2398 path!("/the-root/target/x/out/x2.rs").as_ref(),
2399 Default::default(),
2400 )
2401 .await
2402 .unwrap();
2403 fs.create_file(
2404 path!("/the-root/target/y/out/y2.rs").as_ref(),
2405 Default::default(),
2406 )
2407 .await
2408 .unwrap();
2409 fs.save(
2410 path!("/the-root/Cargo.lock").as_ref(),
2411 &"".into(),
2412 Default::default(),
2413 )
2414 .await
2415 .unwrap();
2416 fs.save(
2417 path!("/the-stdlib/LICENSE").as_ref(),
2418 &"".into(),
2419 Default::default(),
2420 )
2421 .await
2422 .unwrap();
2423 fs.save(
2424 path!("/the/stdlib/src/string.rs").as_ref(),
2425 &"".into(),
2426 Default::default(),
2427 )
2428 .await
2429 .unwrap();
2430
2431 // The language server receives events for the FS mutations that match its watch patterns.
2432 cx.executor().run_until_parked();
2433 assert_eq!(
2434 &*file_changes.lock(),
2435 &[
2436 lsp::FileEvent {
2437 uri: lsp::Uri::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
2438 typ: lsp::FileChangeType::CHANGED,
2439 },
2440 lsp::FileEvent {
2441 uri: lsp::Uri::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
2442 typ: lsp::FileChangeType::DELETED,
2443 },
2444 lsp::FileEvent {
2445 uri: lsp::Uri::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
2446 typ: lsp::FileChangeType::CREATED,
2447 },
2448 lsp::FileEvent {
2449 uri: lsp::Uri::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
2450 typ: lsp::FileChangeType::CREATED,
2451 },
2452 lsp::FileEvent {
2453 uri: lsp::Uri::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
2454 typ: lsp::FileChangeType::CHANGED,
2455 },
2456 ]
2457 );
2458}
2459
2460#[gpui::test]
2461async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
2462 init_test(cx);
2463
2464 let fs = FakeFs::new(cx.executor());
2465 fs.insert_tree(
2466 path!("/dir"),
2467 json!({
2468 "a.rs": "let a = 1;",
2469 "b.rs": "let b = 2;"
2470 }),
2471 )
2472 .await;
2473
2474 let project = Project::test(
2475 fs,
2476 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
2477 cx,
2478 )
2479 .await;
2480 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2481
2482 let buffer_a = project
2483 .update(cx, |project, cx| {
2484 project.open_local_buffer(path!("/dir/a.rs"), cx)
2485 })
2486 .await
2487 .unwrap();
2488 let buffer_b = project
2489 .update(cx, |project, cx| {
2490 project.open_local_buffer(path!("/dir/b.rs"), cx)
2491 })
2492 .await
2493 .unwrap();
2494
2495 lsp_store.update(cx, |lsp_store, cx| {
2496 lsp_store
2497 .update_diagnostics(
2498 LanguageServerId(0),
2499 lsp::PublishDiagnosticsParams {
2500 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2501 version: None,
2502 diagnostics: vec![lsp::Diagnostic {
2503 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2504 severity: Some(lsp::DiagnosticSeverity::ERROR),
2505 message: "error 1".to_string(),
2506 ..Default::default()
2507 }],
2508 },
2509 None,
2510 DiagnosticSourceKind::Pushed,
2511 &[],
2512 cx,
2513 )
2514 .unwrap();
2515 lsp_store
2516 .update_diagnostics(
2517 LanguageServerId(0),
2518 lsp::PublishDiagnosticsParams {
2519 uri: Uri::from_file_path(path!("/dir/b.rs")).unwrap(),
2520 version: None,
2521 diagnostics: vec![lsp::Diagnostic {
2522 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2523 severity: Some(DiagnosticSeverity::WARNING),
2524 message: "error 2".to_string(),
2525 ..Default::default()
2526 }],
2527 },
2528 None,
2529 DiagnosticSourceKind::Pushed,
2530 &[],
2531 cx,
2532 )
2533 .unwrap();
2534 });
2535
2536 buffer_a.update(cx, |buffer, _| {
2537 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2538 assert_eq!(
2539 chunks
2540 .iter()
2541 .map(|(s, d)| (s.as_str(), *d))
2542 .collect::<Vec<_>>(),
2543 &[
2544 ("let ", None),
2545 ("a", Some(DiagnosticSeverity::ERROR)),
2546 (" = 1;", None),
2547 ]
2548 );
2549 });
2550 buffer_b.update(cx, |buffer, _| {
2551 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2552 assert_eq!(
2553 chunks
2554 .iter()
2555 .map(|(s, d)| (s.as_str(), *d))
2556 .collect::<Vec<_>>(),
2557 &[
2558 ("let ", None),
2559 ("b", Some(DiagnosticSeverity::WARNING)),
2560 (" = 2;", None),
2561 ]
2562 );
2563 });
2564}
2565
2566#[gpui::test]
2567async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
2568 init_test(cx);
2569
2570 let fs = FakeFs::new(cx.executor());
2571 fs.insert_tree(
2572 path!("/root"),
2573 json!({
2574 "dir": {
2575 ".git": {
2576 "HEAD": "ref: refs/heads/main",
2577 },
2578 ".gitignore": "b.rs",
2579 "a.rs": "let a = 1;",
2580 "b.rs": "let b = 2;",
2581 },
2582 "other.rs": "let b = c;"
2583 }),
2584 )
2585 .await;
2586
2587 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
2588 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2589 let (worktree, _) = project
2590 .update(cx, |project, cx| {
2591 project.find_or_create_worktree(path!("/root/dir"), true, cx)
2592 })
2593 .await
2594 .unwrap();
2595 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
2596
2597 let (worktree, _) = project
2598 .update(cx, |project, cx| {
2599 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
2600 })
2601 .await
2602 .unwrap();
2603 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
2604
2605 let server_id = LanguageServerId(0);
2606 lsp_store.update(cx, |lsp_store, cx| {
2607 lsp_store
2608 .update_diagnostics(
2609 server_id,
2610 lsp::PublishDiagnosticsParams {
2611 uri: Uri::from_file_path(path!("/root/dir/b.rs")).unwrap(),
2612 version: None,
2613 diagnostics: vec![lsp::Diagnostic {
2614 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2615 severity: Some(lsp::DiagnosticSeverity::ERROR),
2616 message: "unused variable 'b'".to_string(),
2617 ..Default::default()
2618 }],
2619 },
2620 None,
2621 DiagnosticSourceKind::Pushed,
2622 &[],
2623 cx,
2624 )
2625 .unwrap();
2626 lsp_store
2627 .update_diagnostics(
2628 server_id,
2629 lsp::PublishDiagnosticsParams {
2630 uri: Uri::from_file_path(path!("/root/other.rs")).unwrap(),
2631 version: None,
2632 diagnostics: vec![lsp::Diagnostic {
2633 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
2634 severity: Some(lsp::DiagnosticSeverity::ERROR),
2635 message: "unknown variable 'c'".to_string(),
2636 ..Default::default()
2637 }],
2638 },
2639 None,
2640 DiagnosticSourceKind::Pushed,
2641 &[],
2642 cx,
2643 )
2644 .unwrap();
2645 });
2646
2647 let main_ignored_buffer = project
2648 .update(cx, |project, cx| {
2649 project.open_buffer((main_worktree_id, rel_path("b.rs")), cx)
2650 })
2651 .await
2652 .unwrap();
2653 main_ignored_buffer.update(cx, |buffer, _| {
2654 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2655 assert_eq!(
2656 chunks
2657 .iter()
2658 .map(|(s, d)| (s.as_str(), *d))
2659 .collect::<Vec<_>>(),
2660 &[
2661 ("let ", None),
2662 ("b", Some(DiagnosticSeverity::ERROR)),
2663 (" = 2;", None),
2664 ],
2665 "Gigitnored buffers should still get in-buffer diagnostics",
2666 );
2667 });
2668 let other_buffer = project
2669 .update(cx, |project, cx| {
2670 project.open_buffer((other_worktree_id, rel_path("")), cx)
2671 })
2672 .await
2673 .unwrap();
2674 other_buffer.update(cx, |buffer, _| {
2675 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2676 assert_eq!(
2677 chunks
2678 .iter()
2679 .map(|(s, d)| (s.as_str(), *d))
2680 .collect::<Vec<_>>(),
2681 &[
2682 ("let b = ", None),
2683 ("c", Some(DiagnosticSeverity::ERROR)),
2684 (";", None),
2685 ],
2686 "Buffers from hidden projects should still get in-buffer diagnostics"
2687 );
2688 });
2689
2690 project.update(cx, |project, cx| {
2691 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
2692 assert_eq!(
2693 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
2694 vec![(
2695 ProjectPath {
2696 worktree_id: main_worktree_id,
2697 path: rel_path("b.rs").into(),
2698 },
2699 server_id,
2700 DiagnosticSummary {
2701 error_count: 1,
2702 warning_count: 0,
2703 }
2704 )]
2705 );
2706 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
2707 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
2708 });
2709}
2710
2711#[gpui::test]
2712async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
2713 init_test(cx);
2714
2715 let progress_token = "the-progress-token";
2716
2717 let fs = FakeFs::new(cx.executor());
2718 fs.insert_tree(
2719 path!("/dir"),
2720 json!({
2721 "a.rs": "fn a() { A }",
2722 "b.rs": "const y: i32 = 1",
2723 }),
2724 )
2725 .await;
2726
2727 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2728 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2729
2730 language_registry.add(rust_lang());
2731 let mut fake_servers = language_registry.register_fake_lsp(
2732 "Rust",
2733 FakeLspAdapter {
2734 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2735 disk_based_diagnostics_sources: vec!["disk".into()],
2736 ..Default::default()
2737 },
2738 );
2739
2740 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
2741
2742 // Cause worktree to start the fake language server
2743 let _ = project
2744 .update(cx, |project, cx| {
2745 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
2746 })
2747 .await
2748 .unwrap();
2749
2750 let mut events = cx.events(&project);
2751
2752 let fake_server = fake_servers.next().await.unwrap();
2753 assert_eq!(
2754 events.next().await.unwrap(),
2755 Event::LanguageServerAdded(
2756 LanguageServerId(0),
2757 fake_server.server.name(),
2758 Some(worktree_id)
2759 ),
2760 );
2761
2762 fake_server
2763 .start_progress(format!("{}/0", progress_token))
2764 .await;
2765 assert_eq!(
2766 events.next().await.unwrap(),
2767 Event::DiskBasedDiagnosticsStarted {
2768 language_server_id: LanguageServerId(0),
2769 }
2770 );
2771
2772 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2773 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2774 version: None,
2775 diagnostics: vec![lsp::Diagnostic {
2776 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2777 severity: Some(lsp::DiagnosticSeverity::ERROR),
2778 message: "undefined variable 'A'".to_string(),
2779 ..Default::default()
2780 }],
2781 });
2782 assert_eq!(
2783 events.next().await.unwrap(),
2784 Event::DiagnosticsUpdated {
2785 language_server_id: LanguageServerId(0),
2786 paths: vec![(worktree_id, rel_path("a.rs")).into()],
2787 }
2788 );
2789
2790 fake_server.end_progress(format!("{}/0", progress_token));
2791 assert_eq!(
2792 events.next().await.unwrap(),
2793 Event::DiskBasedDiagnosticsFinished {
2794 language_server_id: LanguageServerId(0)
2795 }
2796 );
2797
2798 let buffer = project
2799 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
2800 .await
2801 .unwrap();
2802
2803 buffer.update(cx, |buffer, _| {
2804 let snapshot = buffer.snapshot();
2805 let diagnostics = snapshot
2806 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2807 .collect::<Vec<_>>();
2808 assert_eq!(
2809 diagnostics,
2810 &[DiagnosticEntryRef {
2811 range: Point::new(0, 9)..Point::new(0, 10),
2812 diagnostic: &Diagnostic {
2813 severity: lsp::DiagnosticSeverity::ERROR,
2814 message: "undefined variable 'A'".to_string(),
2815 group_id: 0,
2816 is_primary: true,
2817 source_kind: DiagnosticSourceKind::Pushed,
2818 ..Diagnostic::default()
2819 }
2820 }]
2821 )
2822 });
2823
2824 // Ensure publishing empty diagnostics twice only results in one update event.
2825 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2826 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2827 version: None,
2828 diagnostics: Default::default(),
2829 });
2830 assert_eq!(
2831 events.next().await.unwrap(),
2832 Event::DiagnosticsUpdated {
2833 language_server_id: LanguageServerId(0),
2834 paths: vec![(worktree_id, rel_path("a.rs")).into()],
2835 }
2836 );
2837
2838 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2839 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2840 version: None,
2841 diagnostics: Default::default(),
2842 });
2843 cx.executor().run_until_parked();
2844 assert_eq!(futures::poll!(events.next()), Poll::Pending);
2845}
2846
2847#[gpui::test]
2848async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
2849 init_test(cx);
2850
2851 let progress_token = "the-progress-token";
2852
2853 let fs = FakeFs::new(cx.executor());
2854 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2855
2856 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2857
2858 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2859 language_registry.add(rust_lang());
2860 let mut fake_servers = language_registry.register_fake_lsp(
2861 "Rust",
2862 FakeLspAdapter {
2863 name: "the-language-server",
2864 disk_based_diagnostics_sources: vec!["disk".into()],
2865 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2866 ..FakeLspAdapter::default()
2867 },
2868 );
2869
2870 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
2871
2872 let (buffer, _handle) = project
2873 .update(cx, |project, cx| {
2874 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2875 })
2876 .await
2877 .unwrap();
2878 let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id());
2879 // Simulate diagnostics starting to update.
2880 let fake_server = fake_servers.next().await.unwrap();
2881 cx.executor().run_until_parked();
2882 fake_server.start_progress(progress_token).await;
2883
2884 // Restart the server before the diagnostics finish updating.
2885 project.update(cx, |project, cx| {
2886 project.restart_language_servers_for_buffers(vec![buffer], HashSet::default(), cx);
2887 });
2888 let mut events = cx.events(&project);
2889
2890 // Simulate the newly started server sending more diagnostics.
2891 let fake_server = fake_servers.next().await.unwrap();
2892 cx.executor().run_until_parked();
2893 assert_eq!(
2894 events.next().await.unwrap(),
2895 Event::LanguageServerRemoved(LanguageServerId(0))
2896 );
2897 assert_eq!(
2898 events.next().await.unwrap(),
2899 Event::LanguageServerAdded(
2900 LanguageServerId(1),
2901 fake_server.server.name(),
2902 Some(worktree_id)
2903 )
2904 );
2905 fake_server.start_progress(progress_token).await;
2906 assert_eq!(
2907 events.next().await.unwrap(),
2908 Event::LanguageServerBufferRegistered {
2909 server_id: LanguageServerId(1),
2910 buffer_id,
2911 buffer_abs_path: PathBuf::from(path!("/dir/a.rs")),
2912 name: Some(fake_server.server.name())
2913 }
2914 );
2915 assert_eq!(
2916 events.next().await.unwrap(),
2917 Event::DiskBasedDiagnosticsStarted {
2918 language_server_id: LanguageServerId(1)
2919 }
2920 );
2921 project.update(cx, |project, cx| {
2922 assert_eq!(
2923 project
2924 .language_servers_running_disk_based_diagnostics(cx)
2925 .collect::<Vec<_>>(),
2926 [LanguageServerId(1)]
2927 );
2928 });
2929
2930 // All diagnostics are considered done, despite the old server's diagnostic
2931 // task never completing.
2932 fake_server.end_progress(progress_token);
2933 assert_eq!(
2934 events.next().await.unwrap(),
2935 Event::DiskBasedDiagnosticsFinished {
2936 language_server_id: LanguageServerId(1)
2937 }
2938 );
2939 project.update(cx, |project, cx| {
2940 assert_eq!(
2941 project
2942 .language_servers_running_disk_based_diagnostics(cx)
2943 .collect::<Vec<_>>(),
2944 [] as [language::LanguageServerId; 0]
2945 );
2946 });
2947}
2948
2949#[gpui::test]
2950async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
2951 init_test(cx);
2952
2953 let fs = FakeFs::new(cx.executor());
2954 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
2955
2956 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2957
2958 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2959 language_registry.add(rust_lang());
2960 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2961
2962 let (buffer, _) = project
2963 .update(cx, |project, cx| {
2964 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2965 })
2966 .await
2967 .unwrap();
2968
2969 // Publish diagnostics
2970 let fake_server = fake_servers.next().await.unwrap();
2971 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2972 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2973 version: None,
2974 diagnostics: vec![lsp::Diagnostic {
2975 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
2976 severity: Some(lsp::DiagnosticSeverity::ERROR),
2977 message: "the message".to_string(),
2978 ..Default::default()
2979 }],
2980 });
2981
2982 cx.executor().run_until_parked();
2983 buffer.update(cx, |buffer, _| {
2984 assert_eq!(
2985 buffer
2986 .snapshot()
2987 .diagnostics_in_range::<_, usize>(0..1, false)
2988 .map(|entry| entry.diagnostic.message.clone())
2989 .collect::<Vec<_>>(),
2990 ["the message".to_string()]
2991 );
2992 });
2993 project.update(cx, |project, cx| {
2994 assert_eq!(
2995 project.diagnostic_summary(false, cx),
2996 DiagnosticSummary {
2997 error_count: 1,
2998 warning_count: 0,
2999 }
3000 );
3001 });
3002
3003 project.update(cx, |project, cx| {
3004 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
3005 });
3006
3007 // The diagnostics are cleared.
3008 cx.executor().run_until_parked();
3009 buffer.update(cx, |buffer, _| {
3010 assert_eq!(
3011 buffer
3012 .snapshot()
3013 .diagnostics_in_range::<_, usize>(0..1, false)
3014 .map(|entry| entry.diagnostic.message.clone())
3015 .collect::<Vec<_>>(),
3016 Vec::<String>::new(),
3017 );
3018 });
3019 project.update(cx, |project, cx| {
3020 assert_eq!(
3021 project.diagnostic_summary(false, cx),
3022 DiagnosticSummary {
3023 error_count: 0,
3024 warning_count: 0,
3025 }
3026 );
3027 });
3028}
3029
3030#[gpui::test]
3031async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
3032 init_test(cx);
3033
3034 let fs = FakeFs::new(cx.executor());
3035 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
3036
3037 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3038 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3039
3040 language_registry.add(rust_lang());
3041 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3042
3043 let (buffer, _handle) = project
3044 .update(cx, |project, cx| {
3045 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3046 })
3047 .await
3048 .unwrap();
3049
3050 // Before restarting the server, report diagnostics with an unknown buffer version.
3051 let fake_server = fake_servers.next().await.unwrap();
3052 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3053 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3054 version: Some(10000),
3055 diagnostics: Vec::new(),
3056 });
3057 cx.executor().run_until_parked();
3058 project.update(cx, |project, cx| {
3059 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
3060 });
3061
3062 let mut fake_server = fake_servers.next().await.unwrap();
3063 let notification = fake_server
3064 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3065 .await
3066 .text_document;
3067 assert_eq!(notification.version, 0);
3068}
3069
3070#[gpui::test]
3071async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
3072 init_test(cx);
3073
3074 let progress_token = "the-progress-token";
3075
3076 let fs = FakeFs::new(cx.executor());
3077 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
3078
3079 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3080
3081 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3082 language_registry.add(rust_lang());
3083 let mut fake_servers = language_registry.register_fake_lsp(
3084 "Rust",
3085 FakeLspAdapter {
3086 name: "the-language-server",
3087 disk_based_diagnostics_sources: vec!["disk".into()],
3088 disk_based_diagnostics_progress_token: Some(progress_token.into()),
3089 ..Default::default()
3090 },
3091 );
3092
3093 let (buffer, _handle) = project
3094 .update(cx, |project, cx| {
3095 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3096 })
3097 .await
3098 .unwrap();
3099
3100 // Simulate diagnostics starting to update.
3101 let mut fake_server = fake_servers.next().await.unwrap();
3102 fake_server
3103 .start_progress_with(
3104 "another-token",
3105 lsp::WorkDoneProgressBegin {
3106 cancellable: Some(false),
3107 ..Default::default()
3108 },
3109 DEFAULT_LSP_REQUEST_TIMEOUT,
3110 )
3111 .await;
3112 // Ensure progress notification is fully processed before starting the next one
3113 cx.executor().run_until_parked();
3114
3115 fake_server
3116 .start_progress_with(
3117 progress_token,
3118 lsp::WorkDoneProgressBegin {
3119 cancellable: Some(true),
3120 ..Default::default()
3121 },
3122 DEFAULT_LSP_REQUEST_TIMEOUT,
3123 )
3124 .await;
3125 // Ensure progress notification is fully processed before cancelling
3126 cx.executor().run_until_parked();
3127
3128 project.update(cx, |project, cx| {
3129 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
3130 });
3131 cx.executor().run_until_parked();
3132
3133 let cancel_notification = fake_server
3134 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
3135 .await;
3136 assert_eq!(
3137 cancel_notification.token,
3138 NumberOrString::String(progress_token.into())
3139 );
3140}
3141
3142#[gpui::test]
3143async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
3144 init_test(cx);
3145
3146 let fs = FakeFs::new(cx.executor());
3147 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
3148 .await;
3149
3150 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3151 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3152
3153 let mut fake_rust_servers = language_registry.register_fake_lsp(
3154 "Rust",
3155 FakeLspAdapter {
3156 name: "rust-lsp",
3157 ..Default::default()
3158 },
3159 );
3160 let mut fake_js_servers = language_registry.register_fake_lsp(
3161 "JavaScript",
3162 FakeLspAdapter {
3163 name: "js-lsp",
3164 ..Default::default()
3165 },
3166 );
3167 language_registry.add(rust_lang());
3168 language_registry.add(js_lang());
3169
3170 let _rs_buffer = project
3171 .update(cx, |project, cx| {
3172 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3173 })
3174 .await
3175 .unwrap();
3176 let _js_buffer = project
3177 .update(cx, |project, cx| {
3178 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
3179 })
3180 .await
3181 .unwrap();
3182
3183 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
3184 assert_eq!(
3185 fake_rust_server_1
3186 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3187 .await
3188 .text_document
3189 .uri
3190 .as_str(),
3191 uri!("file:///dir/a.rs")
3192 );
3193
3194 let mut fake_js_server = fake_js_servers.next().await.unwrap();
3195 assert_eq!(
3196 fake_js_server
3197 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3198 .await
3199 .text_document
3200 .uri
3201 .as_str(),
3202 uri!("file:///dir/b.js")
3203 );
3204
3205 // Disable Rust language server, ensuring only that server gets stopped.
3206 cx.update(|cx| {
3207 SettingsStore::update_global(cx, |settings, cx| {
3208 settings.update_user_settings(cx, |settings| {
3209 settings.languages_mut().insert(
3210 "Rust".into(),
3211 LanguageSettingsContent {
3212 enable_language_server: Some(false),
3213 ..Default::default()
3214 },
3215 );
3216 });
3217 })
3218 });
3219 fake_rust_server_1
3220 .receive_notification::<lsp::notification::Exit>()
3221 .await;
3222
3223 // Enable Rust and disable JavaScript language servers, ensuring that the
3224 // former gets started again and that the latter stops.
3225 cx.update(|cx| {
3226 SettingsStore::update_global(cx, |settings, cx| {
3227 settings.update_user_settings(cx, |settings| {
3228 settings.languages_mut().insert(
3229 "Rust".into(),
3230 LanguageSettingsContent {
3231 enable_language_server: Some(true),
3232 ..Default::default()
3233 },
3234 );
3235 settings.languages_mut().insert(
3236 "JavaScript".into(),
3237 LanguageSettingsContent {
3238 enable_language_server: Some(false),
3239 ..Default::default()
3240 },
3241 );
3242 });
3243 })
3244 });
3245 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
3246 assert_eq!(
3247 fake_rust_server_2
3248 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3249 .await
3250 .text_document
3251 .uri
3252 .as_str(),
3253 uri!("file:///dir/a.rs")
3254 );
3255 fake_js_server
3256 .receive_notification::<lsp::notification::Exit>()
3257 .await;
3258}
3259
3260#[gpui::test(iterations = 3)]
3261async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
3262 init_test(cx);
3263
3264 let text = "
3265 fn a() { A }
3266 fn b() { BB }
3267 fn c() { CCC }
3268 "
3269 .unindent();
3270
3271 let fs = FakeFs::new(cx.executor());
3272 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
3273
3274 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3275 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3276
3277 language_registry.add(rust_lang());
3278 let mut fake_servers = language_registry.register_fake_lsp(
3279 "Rust",
3280 FakeLspAdapter {
3281 disk_based_diagnostics_sources: vec!["disk".into()],
3282 ..Default::default()
3283 },
3284 );
3285
3286 let buffer = project
3287 .update(cx, |project, cx| {
3288 project.open_local_buffer(path!("/dir/a.rs"), cx)
3289 })
3290 .await
3291 .unwrap();
3292
3293 let _handle = project.update(cx, |project, cx| {
3294 project.register_buffer_with_language_servers(&buffer, cx)
3295 });
3296
3297 let mut fake_server = fake_servers.next().await.unwrap();
3298 let open_notification = fake_server
3299 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3300 .await;
3301
3302 // Edit the buffer, moving the content down
3303 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
3304 let change_notification_1 = fake_server
3305 .receive_notification::<lsp::notification::DidChangeTextDocument>()
3306 .await;
3307 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
3308
3309 // Report some diagnostics for the initial version of the buffer
3310 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3311 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3312 version: Some(open_notification.text_document.version),
3313 diagnostics: vec![
3314 lsp::Diagnostic {
3315 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3316 severity: Some(DiagnosticSeverity::ERROR),
3317 message: "undefined variable 'A'".to_string(),
3318 source: Some("disk".to_string()),
3319 ..Default::default()
3320 },
3321 lsp::Diagnostic {
3322 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
3323 severity: Some(DiagnosticSeverity::ERROR),
3324 message: "undefined variable 'BB'".to_string(),
3325 source: Some("disk".to_string()),
3326 ..Default::default()
3327 },
3328 lsp::Diagnostic {
3329 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
3330 severity: Some(DiagnosticSeverity::ERROR),
3331 source: Some("disk".to_string()),
3332 message: "undefined variable 'CCC'".to_string(),
3333 ..Default::default()
3334 },
3335 ],
3336 });
3337
3338 // The diagnostics have moved down since they were created.
3339 cx.executor().run_until_parked();
3340 buffer.update(cx, |buffer, _| {
3341 assert_eq!(
3342 buffer
3343 .snapshot()
3344 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
3345 .collect::<Vec<_>>(),
3346 &[
3347 DiagnosticEntry {
3348 range: Point::new(3, 9)..Point::new(3, 11),
3349 diagnostic: Diagnostic {
3350 source: Some("disk".into()),
3351 severity: DiagnosticSeverity::ERROR,
3352 message: "undefined variable 'BB'".to_string(),
3353 is_disk_based: true,
3354 group_id: 1,
3355 is_primary: true,
3356 source_kind: DiagnosticSourceKind::Pushed,
3357 ..Diagnostic::default()
3358 },
3359 },
3360 DiagnosticEntry {
3361 range: Point::new(4, 9)..Point::new(4, 12),
3362 diagnostic: Diagnostic {
3363 source: Some("disk".into()),
3364 severity: DiagnosticSeverity::ERROR,
3365 message: "undefined variable 'CCC'".to_string(),
3366 is_disk_based: true,
3367 group_id: 2,
3368 is_primary: true,
3369 source_kind: DiagnosticSourceKind::Pushed,
3370 ..Diagnostic::default()
3371 }
3372 }
3373 ]
3374 );
3375 assert_eq!(
3376 chunks_with_diagnostics(buffer, 0..buffer.len()),
3377 [
3378 ("\n\nfn a() { ".to_string(), None),
3379 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
3380 (" }\nfn b() { ".to_string(), None),
3381 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
3382 (" }\nfn c() { ".to_string(), None),
3383 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
3384 (" }\n".to_string(), None),
3385 ]
3386 );
3387 assert_eq!(
3388 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
3389 [
3390 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
3391 (" }\nfn c() { ".to_string(), None),
3392 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
3393 ]
3394 );
3395 });
3396
3397 // Ensure overlapping diagnostics are highlighted correctly.
3398 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3399 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3400 version: Some(open_notification.text_document.version),
3401 diagnostics: vec![
3402 lsp::Diagnostic {
3403 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3404 severity: Some(DiagnosticSeverity::ERROR),
3405 message: "undefined variable 'A'".to_string(),
3406 source: Some("disk".to_string()),
3407 ..Default::default()
3408 },
3409 lsp::Diagnostic {
3410 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
3411 severity: Some(DiagnosticSeverity::WARNING),
3412 message: "unreachable statement".to_string(),
3413 source: Some("disk".to_string()),
3414 ..Default::default()
3415 },
3416 ],
3417 });
3418
3419 cx.executor().run_until_parked();
3420 buffer.update(cx, |buffer, _| {
3421 assert_eq!(
3422 buffer
3423 .snapshot()
3424 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
3425 .collect::<Vec<_>>(),
3426 &[
3427 DiagnosticEntry {
3428 range: Point::new(2, 9)..Point::new(2, 12),
3429 diagnostic: Diagnostic {
3430 source: Some("disk".into()),
3431 severity: DiagnosticSeverity::WARNING,
3432 message: "unreachable statement".to_string(),
3433 is_disk_based: true,
3434 group_id: 4,
3435 is_primary: true,
3436 source_kind: DiagnosticSourceKind::Pushed,
3437 ..Diagnostic::default()
3438 }
3439 },
3440 DiagnosticEntry {
3441 range: Point::new(2, 9)..Point::new(2, 10),
3442 diagnostic: Diagnostic {
3443 source: Some("disk".into()),
3444 severity: DiagnosticSeverity::ERROR,
3445 message: "undefined variable 'A'".to_string(),
3446 is_disk_based: true,
3447 group_id: 3,
3448 is_primary: true,
3449 source_kind: DiagnosticSourceKind::Pushed,
3450 ..Diagnostic::default()
3451 },
3452 }
3453 ]
3454 );
3455 assert_eq!(
3456 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
3457 [
3458 ("fn a() { ".to_string(), None),
3459 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
3460 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
3461 ("\n".to_string(), None),
3462 ]
3463 );
3464 assert_eq!(
3465 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
3466 [
3467 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
3468 ("\n".to_string(), None),
3469 ]
3470 );
3471 });
3472
3473 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
3474 // changes since the last save.
3475 buffer.update(cx, |buffer, cx| {
3476 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
3477 buffer.edit(
3478 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
3479 None,
3480 cx,
3481 );
3482 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
3483 });
3484 let change_notification_2 = fake_server
3485 .receive_notification::<lsp::notification::DidChangeTextDocument>()
3486 .await;
3487 assert!(
3488 change_notification_2.text_document.version > change_notification_1.text_document.version
3489 );
3490
3491 // Handle out-of-order diagnostics
3492 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3493 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3494 version: Some(change_notification_2.text_document.version),
3495 diagnostics: vec![
3496 lsp::Diagnostic {
3497 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
3498 severity: Some(DiagnosticSeverity::ERROR),
3499 message: "undefined variable 'BB'".to_string(),
3500 source: Some("disk".to_string()),
3501 ..Default::default()
3502 },
3503 lsp::Diagnostic {
3504 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3505 severity: Some(DiagnosticSeverity::WARNING),
3506 message: "undefined variable 'A'".to_string(),
3507 source: Some("disk".to_string()),
3508 ..Default::default()
3509 },
3510 ],
3511 });
3512
3513 cx.executor().run_until_parked();
3514 buffer.update(cx, |buffer, _| {
3515 assert_eq!(
3516 buffer
3517 .snapshot()
3518 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3519 .collect::<Vec<_>>(),
3520 &[
3521 DiagnosticEntry {
3522 range: Point::new(2, 21)..Point::new(2, 22),
3523 diagnostic: Diagnostic {
3524 source: Some("disk".into()),
3525 severity: DiagnosticSeverity::WARNING,
3526 message: "undefined variable 'A'".to_string(),
3527 is_disk_based: true,
3528 group_id: 6,
3529 is_primary: true,
3530 source_kind: DiagnosticSourceKind::Pushed,
3531 ..Diagnostic::default()
3532 }
3533 },
3534 DiagnosticEntry {
3535 range: Point::new(3, 9)..Point::new(3, 14),
3536 diagnostic: Diagnostic {
3537 source: Some("disk".into()),
3538 severity: DiagnosticSeverity::ERROR,
3539 message: "undefined variable 'BB'".to_string(),
3540 is_disk_based: true,
3541 group_id: 5,
3542 is_primary: true,
3543 source_kind: DiagnosticSourceKind::Pushed,
3544 ..Diagnostic::default()
3545 },
3546 }
3547 ]
3548 );
3549 });
3550}
3551
3552#[gpui::test]
3553async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
3554 init_test(cx);
3555
3556 let text = concat!(
3557 "let one = ;\n", //
3558 "let two = \n",
3559 "let three = 3;\n",
3560 );
3561
3562 let fs = FakeFs::new(cx.executor());
3563 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
3564
3565 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3566 let buffer = project
3567 .update(cx, |project, cx| {
3568 project.open_local_buffer(path!("/dir/a.rs"), cx)
3569 })
3570 .await
3571 .unwrap();
3572
3573 project.update(cx, |project, cx| {
3574 project.lsp_store().update(cx, |lsp_store, cx| {
3575 lsp_store
3576 .update_diagnostic_entries(
3577 LanguageServerId(0),
3578 PathBuf::from(path!("/dir/a.rs")),
3579 None,
3580 None,
3581 vec![
3582 DiagnosticEntry {
3583 range: Unclipped(PointUtf16::new(0, 10))
3584 ..Unclipped(PointUtf16::new(0, 10)),
3585 diagnostic: Diagnostic {
3586 severity: DiagnosticSeverity::ERROR,
3587 message: "syntax error 1".to_string(),
3588 source_kind: DiagnosticSourceKind::Pushed,
3589 ..Diagnostic::default()
3590 },
3591 },
3592 DiagnosticEntry {
3593 range: Unclipped(PointUtf16::new(1, 10))
3594 ..Unclipped(PointUtf16::new(1, 10)),
3595 diagnostic: Diagnostic {
3596 severity: DiagnosticSeverity::ERROR,
3597 message: "syntax error 2".to_string(),
3598 source_kind: DiagnosticSourceKind::Pushed,
3599 ..Diagnostic::default()
3600 },
3601 },
3602 ],
3603 cx,
3604 )
3605 .unwrap();
3606 })
3607 });
3608
3609 // An empty range is extended forward to include the following character.
3610 // At the end of a line, an empty range is extended backward to include
3611 // the preceding character.
3612 buffer.update(cx, |buffer, _| {
3613 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
3614 assert_eq!(
3615 chunks
3616 .iter()
3617 .map(|(s, d)| (s.as_str(), *d))
3618 .collect::<Vec<_>>(),
3619 &[
3620 ("let one = ", None),
3621 (";", Some(DiagnosticSeverity::ERROR)),
3622 ("\nlet two =", None),
3623 (" ", Some(DiagnosticSeverity::ERROR)),
3624 ("\nlet three = 3;\n", None)
3625 ]
3626 );
3627 });
3628}
3629
3630#[gpui::test]
3631async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
3632 init_test(cx);
3633
3634 let fs = FakeFs::new(cx.executor());
3635 fs.insert_tree(path!("/dir"), json!({ "a.rs": "one two three" }))
3636 .await;
3637
3638 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3639 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3640
3641 lsp_store.update(cx, |lsp_store, cx| {
3642 lsp_store
3643 .update_diagnostic_entries(
3644 LanguageServerId(0),
3645 Path::new(path!("/dir/a.rs")).to_owned(),
3646 None,
3647 None,
3648 vec![DiagnosticEntry {
3649 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
3650 diagnostic: Diagnostic {
3651 severity: DiagnosticSeverity::ERROR,
3652 is_primary: true,
3653 message: "syntax error a1".to_string(),
3654 source_kind: DiagnosticSourceKind::Pushed,
3655 ..Diagnostic::default()
3656 },
3657 }],
3658 cx,
3659 )
3660 .unwrap();
3661 lsp_store
3662 .update_diagnostic_entries(
3663 LanguageServerId(1),
3664 Path::new(path!("/dir/a.rs")).to_owned(),
3665 None,
3666 None,
3667 vec![DiagnosticEntry {
3668 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
3669 diagnostic: Diagnostic {
3670 severity: DiagnosticSeverity::ERROR,
3671 is_primary: true,
3672 message: "syntax error b1".to_string(),
3673 source_kind: DiagnosticSourceKind::Pushed,
3674 ..Diagnostic::default()
3675 },
3676 }],
3677 cx,
3678 )
3679 .unwrap();
3680
3681 assert_eq!(
3682 lsp_store.diagnostic_summary(false, cx),
3683 DiagnosticSummary {
3684 error_count: 2,
3685 warning_count: 0,
3686 }
3687 );
3688 });
3689}
3690
3691#[gpui::test]
3692async fn test_diagnostic_summaries_cleared_on_worktree_entry_removal(
3693 cx: &mut gpui::TestAppContext,
3694) {
3695 init_test(cx);
3696
3697 let fs = FakeFs::new(cx.executor());
3698 fs.insert_tree(path!("/dir"), json!({ "a.rs": "one", "b.rs": "two" }))
3699 .await;
3700
3701 let project = Project::test(fs.clone(), [Path::new(path!("/dir"))], cx).await;
3702 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3703
3704 lsp_store.update(cx, |lsp_store, cx| {
3705 lsp_store
3706 .update_diagnostic_entries(
3707 LanguageServerId(0),
3708 Path::new(path!("/dir/a.rs")).to_owned(),
3709 None,
3710 None,
3711 vec![DiagnosticEntry {
3712 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
3713 diagnostic: Diagnostic {
3714 severity: DiagnosticSeverity::ERROR,
3715 is_primary: true,
3716 message: "error in a".to_string(),
3717 source_kind: DiagnosticSourceKind::Pushed,
3718 ..Diagnostic::default()
3719 },
3720 }],
3721 cx,
3722 )
3723 .unwrap();
3724 lsp_store
3725 .update_diagnostic_entries(
3726 LanguageServerId(0),
3727 Path::new(path!("/dir/b.rs")).to_owned(),
3728 None,
3729 None,
3730 vec![DiagnosticEntry {
3731 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
3732 diagnostic: Diagnostic {
3733 severity: DiagnosticSeverity::WARNING,
3734 is_primary: true,
3735 message: "warning in b".to_string(),
3736 source_kind: DiagnosticSourceKind::Pushed,
3737 ..Diagnostic::default()
3738 },
3739 }],
3740 cx,
3741 )
3742 .unwrap();
3743
3744 assert_eq!(
3745 lsp_store.diagnostic_summary(false, cx),
3746 DiagnosticSummary {
3747 error_count: 1,
3748 warning_count: 1,
3749 }
3750 );
3751 });
3752
3753 fs.remove_file(path!("/dir/a.rs").as_ref(), Default::default())
3754 .await
3755 .unwrap();
3756 cx.executor().run_until_parked();
3757
3758 lsp_store.update(cx, |lsp_store, cx| {
3759 assert_eq!(
3760 lsp_store.diagnostic_summary(false, cx),
3761 DiagnosticSummary {
3762 error_count: 0,
3763 warning_count: 1,
3764 },
3765 );
3766 });
3767}
3768
3769#[gpui::test]
3770async fn test_diagnostic_summaries_cleared_on_server_restart(cx: &mut gpui::TestAppContext) {
3771 init_test(cx);
3772
3773 let fs = FakeFs::new(cx.executor());
3774 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
3775
3776 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3777
3778 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3779 language_registry.add(rust_lang());
3780 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3781
3782 let (buffer, _handle) = project
3783 .update(cx, |project, cx| {
3784 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3785 })
3786 .await
3787 .unwrap();
3788
3789 let fake_server = fake_servers.next().await.unwrap();
3790 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3791 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3792 version: None,
3793 diagnostics: vec![lsp::Diagnostic {
3794 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 1)),
3795 severity: Some(lsp::DiagnosticSeverity::ERROR),
3796 message: "error before restart".to_string(),
3797 ..Default::default()
3798 }],
3799 });
3800 cx.executor().run_until_parked();
3801
3802 project.update(cx, |project, cx| {
3803 assert_eq!(
3804 project.diagnostic_summary(false, cx),
3805 DiagnosticSummary {
3806 error_count: 1,
3807 warning_count: 0,
3808 }
3809 );
3810 });
3811
3812 let mut events = cx.events(&project);
3813
3814 project.update(cx, |project, cx| {
3815 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
3816 });
3817 cx.executor().run_until_parked();
3818
3819 let mut received_diagnostics_updated = false;
3820 while let Some(Some(event)) =
3821 futures::FutureExt::now_or_never(futures::StreamExt::next(&mut events))
3822 {
3823 if matches!(event, Event::DiagnosticsUpdated { .. }) {
3824 received_diagnostics_updated = true;
3825 }
3826 }
3827 assert!(
3828 received_diagnostics_updated,
3829 "DiagnosticsUpdated event should be emitted when a language server is stopped"
3830 );
3831
3832 project.update(cx, |project, cx| {
3833 assert_eq!(
3834 project.diagnostic_summary(false, cx),
3835 DiagnosticSummary {
3836 error_count: 0,
3837 warning_count: 0,
3838 }
3839 );
3840 });
3841}
3842
3843#[gpui::test]
3844async fn test_diagnostic_summaries_cleared_on_buffer_reload(cx: &mut gpui::TestAppContext) {
3845 init_test(cx);
3846
3847 let fs = FakeFs::new(cx.executor());
3848 fs.insert_tree(path!("/dir"), json!({ "a.rs": "one two three" }))
3849 .await;
3850
3851 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3852
3853 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3854 language_registry.add(rust_lang());
3855 let pull_count = Arc::new(atomic::AtomicUsize::new(0));
3856 let closure_pull_count = pull_count.clone();
3857 let mut fake_servers = language_registry.register_fake_lsp(
3858 "Rust",
3859 FakeLspAdapter {
3860 capabilities: lsp::ServerCapabilities {
3861 diagnostic_provider: Some(lsp::DiagnosticServerCapabilities::Options(
3862 lsp::DiagnosticOptions {
3863 identifier: Some("test-reload".to_string()),
3864 inter_file_dependencies: true,
3865 workspace_diagnostics: false,
3866 work_done_progress_options: Default::default(),
3867 },
3868 )),
3869 ..lsp::ServerCapabilities::default()
3870 },
3871 initializer: Some(Box::new(move |fake_server| {
3872 let pull_count = closure_pull_count.clone();
3873 fake_server.set_request_handler::<lsp::request::DocumentDiagnosticRequest, _, _>(
3874 move |_, _| {
3875 let pull_count = pull_count.clone();
3876 async move {
3877 pull_count.fetch_add(1, atomic::Ordering::SeqCst);
3878 Ok(lsp::DocumentDiagnosticReportResult::Report(
3879 lsp::DocumentDiagnosticReport::Full(
3880 lsp::RelatedFullDocumentDiagnosticReport {
3881 related_documents: None,
3882 full_document_diagnostic_report:
3883 lsp::FullDocumentDiagnosticReport {
3884 result_id: None,
3885 items: Vec::new(),
3886 },
3887 },
3888 ),
3889 ))
3890 }
3891 },
3892 );
3893 })),
3894 ..FakeLspAdapter::default()
3895 },
3896 );
3897
3898 let (_buffer, _handle) = project
3899 .update(cx, |project, cx| {
3900 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3901 })
3902 .await
3903 .unwrap();
3904
3905 let fake_server = fake_servers.next().await.unwrap();
3906 cx.executor().run_until_parked();
3907
3908 // Publish initial diagnostics via the fake server.
3909 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3910 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3911 version: None,
3912 diagnostics: vec![lsp::Diagnostic {
3913 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 3)),
3914 severity: Some(lsp::DiagnosticSeverity::ERROR),
3915 message: "error in a".to_string(),
3916 ..Default::default()
3917 }],
3918 });
3919 cx.executor().run_until_parked();
3920
3921 project.update(cx, |project, cx| {
3922 assert_eq!(
3923 project.diagnostic_summary(false, cx),
3924 DiagnosticSummary {
3925 error_count: 1,
3926 warning_count: 0,
3927 }
3928 );
3929 });
3930
3931 let pulls_before = pull_count.load(atomic::Ordering::SeqCst);
3932
3933 // Change the file on disk. The FS event triggers buffer reload,
3934 // which in turn triggers pull_diagnostics_for_buffer.
3935 fs.save(
3936 path!("/dir/a.rs").as_ref(),
3937 &"fixed content".into(),
3938 LineEnding::Unix,
3939 )
3940 .await
3941 .unwrap();
3942 cx.executor().run_until_parked();
3943
3944 let pulls_after = pull_count.load(atomic::Ordering::SeqCst);
3945 assert!(
3946 pulls_after > pulls_before,
3947 "Expected document diagnostic pull after buffer reload (before={pulls_before}, after={pulls_after})"
3948 );
3949}
3950
3951#[gpui::test]
3952async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
3953 init_test(cx);
3954
3955 let text = "
3956 fn a() {
3957 f1();
3958 }
3959 fn b() {
3960 f2();
3961 }
3962 fn c() {
3963 f3();
3964 }
3965 "
3966 .unindent();
3967
3968 let fs = FakeFs::new(cx.executor());
3969 fs.insert_tree(
3970 path!("/dir"),
3971 json!({
3972 "a.rs": text.clone(),
3973 }),
3974 )
3975 .await;
3976
3977 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3978 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3979
3980 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3981 language_registry.add(rust_lang());
3982 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3983
3984 let (buffer, _handle) = project
3985 .update(cx, |project, cx| {
3986 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3987 })
3988 .await
3989 .unwrap();
3990
3991 let mut fake_server = fake_servers.next().await.unwrap();
3992 let lsp_document_version = fake_server
3993 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3994 .await
3995 .text_document
3996 .version;
3997
3998 // Simulate editing the buffer after the language server computes some edits.
3999 buffer.update(cx, |buffer, cx| {
4000 buffer.edit(
4001 [(
4002 Point::new(0, 0)..Point::new(0, 0),
4003 "// above first function\n",
4004 )],
4005 None,
4006 cx,
4007 );
4008 buffer.edit(
4009 [(
4010 Point::new(2, 0)..Point::new(2, 0),
4011 " // inside first function\n",
4012 )],
4013 None,
4014 cx,
4015 );
4016 buffer.edit(
4017 [(
4018 Point::new(6, 4)..Point::new(6, 4),
4019 "// inside second function ",
4020 )],
4021 None,
4022 cx,
4023 );
4024
4025 assert_eq!(
4026 buffer.text(),
4027 "
4028 // above first function
4029 fn a() {
4030 // inside first function
4031 f1();
4032 }
4033 fn b() {
4034 // inside second function f2();
4035 }
4036 fn c() {
4037 f3();
4038 }
4039 "
4040 .unindent()
4041 );
4042 });
4043
4044 let edits = lsp_store
4045 .update(cx, |lsp_store, cx| {
4046 lsp_store.as_local_mut().unwrap().edits_from_lsp(
4047 &buffer,
4048 vec![
4049 // replace body of first function
4050 lsp::TextEdit {
4051 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
4052 new_text: "
4053 fn a() {
4054 f10();
4055 }
4056 "
4057 .unindent(),
4058 },
4059 // edit inside second function
4060 lsp::TextEdit {
4061 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
4062 new_text: "00".into(),
4063 },
4064 // edit inside third function via two distinct edits
4065 lsp::TextEdit {
4066 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
4067 new_text: "4000".into(),
4068 },
4069 lsp::TextEdit {
4070 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
4071 new_text: "".into(),
4072 },
4073 ],
4074 LanguageServerId(0),
4075 Some(lsp_document_version),
4076 cx,
4077 )
4078 })
4079 .await
4080 .unwrap();
4081
4082 buffer.update(cx, |buffer, cx| {
4083 for (range, new_text) in edits {
4084 buffer.edit([(range, new_text)], None, cx);
4085 }
4086 assert_eq!(
4087 buffer.text(),
4088 "
4089 // above first function
4090 fn a() {
4091 // inside first function
4092 f10();
4093 }
4094 fn b() {
4095 // inside second function f200();
4096 }
4097 fn c() {
4098 f4000();
4099 }
4100 "
4101 .unindent()
4102 );
4103 });
4104}
4105
4106#[gpui::test]
4107async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
4108 init_test(cx);
4109
4110 let text = "
4111 use a::b;
4112 use a::c;
4113
4114 fn f() {
4115 b();
4116 c();
4117 }
4118 "
4119 .unindent();
4120
4121 let fs = FakeFs::new(cx.executor());
4122 fs.insert_tree(
4123 path!("/dir"),
4124 json!({
4125 "a.rs": text.clone(),
4126 }),
4127 )
4128 .await;
4129
4130 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4131 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4132 let buffer = project
4133 .update(cx, |project, cx| {
4134 project.open_local_buffer(path!("/dir/a.rs"), cx)
4135 })
4136 .await
4137 .unwrap();
4138
4139 // Simulate the language server sending us a small edit in the form of a very large diff.
4140 // Rust-analyzer does this when performing a merge-imports code action.
4141 let edits = lsp_store
4142 .update(cx, |lsp_store, cx| {
4143 lsp_store.as_local_mut().unwrap().edits_from_lsp(
4144 &buffer,
4145 [
4146 // Replace the first use statement without editing the semicolon.
4147 lsp::TextEdit {
4148 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
4149 new_text: "a::{b, c}".into(),
4150 },
4151 // Reinsert the remainder of the file between the semicolon and the final
4152 // newline of the file.
4153 lsp::TextEdit {
4154 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
4155 new_text: "\n\n".into(),
4156 },
4157 lsp::TextEdit {
4158 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
4159 new_text: "
4160 fn f() {
4161 b();
4162 c();
4163 }"
4164 .unindent(),
4165 },
4166 // Delete everything after the first newline of the file.
4167 lsp::TextEdit {
4168 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
4169 new_text: "".into(),
4170 },
4171 ],
4172 LanguageServerId(0),
4173 None,
4174 cx,
4175 )
4176 })
4177 .await
4178 .unwrap();
4179
4180 buffer.update(cx, |buffer, cx| {
4181 let edits = edits
4182 .into_iter()
4183 .map(|(range, text)| {
4184 (
4185 range.start.to_point(buffer)..range.end.to_point(buffer),
4186 text,
4187 )
4188 })
4189 .collect::<Vec<_>>();
4190
4191 assert_eq!(
4192 edits,
4193 [
4194 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
4195 (Point::new(1, 0)..Point::new(2, 0), "".into())
4196 ]
4197 );
4198
4199 for (range, new_text) in edits {
4200 buffer.edit([(range, new_text)], None, cx);
4201 }
4202 assert_eq!(
4203 buffer.text(),
4204 "
4205 use a::{b, c};
4206
4207 fn f() {
4208 b();
4209 c();
4210 }
4211 "
4212 .unindent()
4213 );
4214 });
4215}
4216
4217#[gpui::test]
4218async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
4219 cx: &mut gpui::TestAppContext,
4220) {
4221 init_test(cx);
4222
4223 let text = "Path()";
4224
4225 let fs = FakeFs::new(cx.executor());
4226 fs.insert_tree(
4227 path!("/dir"),
4228 json!({
4229 "a.rs": text
4230 }),
4231 )
4232 .await;
4233
4234 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4235 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4236 let buffer = project
4237 .update(cx, |project, cx| {
4238 project.open_local_buffer(path!("/dir/a.rs"), cx)
4239 })
4240 .await
4241 .unwrap();
4242
4243 // Simulate the language server sending us a pair of edits at the same location,
4244 // with an insertion following a replacement (which violates the LSP spec).
4245 let edits = lsp_store
4246 .update(cx, |lsp_store, cx| {
4247 lsp_store.as_local_mut().unwrap().edits_from_lsp(
4248 &buffer,
4249 [
4250 lsp::TextEdit {
4251 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
4252 new_text: "Path".into(),
4253 },
4254 lsp::TextEdit {
4255 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
4256 new_text: "from path import Path\n\n\n".into(),
4257 },
4258 ],
4259 LanguageServerId(0),
4260 None,
4261 cx,
4262 )
4263 })
4264 .await
4265 .unwrap();
4266
4267 buffer.update(cx, |buffer, cx| {
4268 buffer.edit(edits, None, cx);
4269 assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
4270 });
4271}
4272
4273#[gpui::test]
4274async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
4275 init_test(cx);
4276
4277 let text = "
4278 use a::b;
4279 use a::c;
4280
4281 fn f() {
4282 b();
4283 c();
4284 }
4285 "
4286 .unindent();
4287
4288 let fs = FakeFs::new(cx.executor());
4289 fs.insert_tree(
4290 path!("/dir"),
4291 json!({
4292 "a.rs": text.clone(),
4293 }),
4294 )
4295 .await;
4296
4297 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4298 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4299 let buffer = project
4300 .update(cx, |project, cx| {
4301 project.open_local_buffer(path!("/dir/a.rs"), cx)
4302 })
4303 .await
4304 .unwrap();
4305
4306 // Simulate the language server sending us edits in a non-ordered fashion,
4307 // with ranges sometimes being inverted or pointing to invalid locations.
4308 let edits = lsp_store
4309 .update(cx, |lsp_store, cx| {
4310 lsp_store.as_local_mut().unwrap().edits_from_lsp(
4311 &buffer,
4312 [
4313 lsp::TextEdit {
4314 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
4315 new_text: "\n\n".into(),
4316 },
4317 lsp::TextEdit {
4318 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
4319 new_text: "a::{b, c}".into(),
4320 },
4321 lsp::TextEdit {
4322 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
4323 new_text: "".into(),
4324 },
4325 lsp::TextEdit {
4326 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
4327 new_text: "
4328 fn f() {
4329 b();
4330 c();
4331 }"
4332 .unindent(),
4333 },
4334 ],
4335 LanguageServerId(0),
4336 None,
4337 cx,
4338 )
4339 })
4340 .await
4341 .unwrap();
4342
4343 buffer.update(cx, |buffer, cx| {
4344 let edits = edits
4345 .into_iter()
4346 .map(|(range, text)| {
4347 (
4348 range.start.to_point(buffer)..range.end.to_point(buffer),
4349 text,
4350 )
4351 })
4352 .collect::<Vec<_>>();
4353
4354 assert_eq!(
4355 edits,
4356 [
4357 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
4358 (Point::new(1, 0)..Point::new(2, 0), "".into())
4359 ]
4360 );
4361
4362 for (range, new_text) in edits {
4363 buffer.edit([(range, new_text)], None, cx);
4364 }
4365 assert_eq!(
4366 buffer.text(),
4367 "
4368 use a::{b, c};
4369
4370 fn f() {
4371 b();
4372 c();
4373 }
4374 "
4375 .unindent()
4376 );
4377 });
4378}
4379
4380fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
4381 buffer: &Buffer,
4382 range: Range<T>,
4383) -> Vec<(String, Option<DiagnosticSeverity>)> {
4384 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
4385 for chunk in buffer.snapshot().chunks(range, true) {
4386 if chunks
4387 .last()
4388 .is_some_and(|prev_chunk| prev_chunk.1 == chunk.diagnostic_severity)
4389 {
4390 chunks.last_mut().unwrap().0.push_str(chunk.text);
4391 } else {
4392 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
4393 }
4394 }
4395 chunks
4396}
4397
4398#[gpui::test(iterations = 10)]
4399async fn test_definition(cx: &mut gpui::TestAppContext) {
4400 init_test(cx);
4401
4402 let fs = FakeFs::new(cx.executor());
4403 fs.insert_tree(
4404 path!("/dir"),
4405 json!({
4406 "a.rs": "const fn a() { A }",
4407 "b.rs": "const y: i32 = crate::a()",
4408 }),
4409 )
4410 .await;
4411
4412 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
4413
4414 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4415 language_registry.add(rust_lang());
4416 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
4417
4418 let (buffer, _handle) = project
4419 .update(cx, |project, cx| {
4420 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
4421 })
4422 .await
4423 .unwrap();
4424
4425 let fake_server = fake_servers.next().await.unwrap();
4426 cx.executor().run_until_parked();
4427
4428 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
4429 let params = params.text_document_position_params;
4430 assert_eq!(
4431 params.text_document.uri.to_file_path().unwrap(),
4432 Path::new(path!("/dir/b.rs")),
4433 );
4434 assert_eq!(params.position, lsp::Position::new(0, 22));
4435
4436 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
4437 lsp::Location::new(
4438 lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
4439 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
4440 ),
4441 )))
4442 });
4443 let mut definitions = project
4444 .update(cx, |project, cx| project.definitions(&buffer, 22, cx))
4445 .await
4446 .unwrap()
4447 .unwrap();
4448
4449 // Assert no new language server started
4450 cx.executor().run_until_parked();
4451 assert!(fake_servers.try_next().is_err());
4452
4453 assert_eq!(definitions.len(), 1);
4454 let definition = definitions.pop().unwrap();
4455 cx.update(|cx| {
4456 let target_buffer = definition.target.buffer.read(cx);
4457 assert_eq!(
4458 target_buffer
4459 .file()
4460 .unwrap()
4461 .as_local()
4462 .unwrap()
4463 .abs_path(cx),
4464 Path::new(path!("/dir/a.rs")),
4465 );
4466 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
4467 assert_eq!(
4468 list_worktrees(&project, cx),
4469 [
4470 (path!("/dir/a.rs").as_ref(), false),
4471 (path!("/dir/b.rs").as_ref(), true)
4472 ],
4473 );
4474
4475 drop(definition);
4476 });
4477 cx.update(|cx| {
4478 assert_eq!(
4479 list_worktrees(&project, cx),
4480 [(path!("/dir/b.rs").as_ref(), true)]
4481 );
4482 });
4483
4484 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
4485 project
4486 .read(cx)
4487 .worktrees(cx)
4488 .map(|worktree| {
4489 let worktree = worktree.read(cx);
4490 (
4491 worktree.as_local().unwrap().abs_path().as_ref(),
4492 worktree.is_visible(),
4493 )
4494 })
4495 .collect::<Vec<_>>()
4496 }
4497}
4498
4499#[gpui::test]
4500async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
4501 init_test(cx);
4502
4503 let fs = FakeFs::new(cx.executor());
4504 fs.insert_tree(
4505 path!("/dir"),
4506 json!({
4507 "a.ts": "",
4508 }),
4509 )
4510 .await;
4511
4512 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4513
4514 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4515 language_registry.add(typescript_lang());
4516 let mut fake_language_servers = language_registry.register_fake_lsp(
4517 "TypeScript",
4518 FakeLspAdapter {
4519 capabilities: lsp::ServerCapabilities {
4520 completion_provider: Some(lsp::CompletionOptions {
4521 trigger_characters: Some(vec![".".to_string()]),
4522 ..Default::default()
4523 }),
4524 ..Default::default()
4525 },
4526 ..Default::default()
4527 },
4528 );
4529
4530 let (buffer, _handle) = project
4531 .update(cx, |p, cx| {
4532 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4533 })
4534 .await
4535 .unwrap();
4536
4537 let fake_server = fake_language_servers.next().await.unwrap();
4538 cx.executor().run_until_parked();
4539
4540 // When text_edit exists, it takes precedence over insert_text and label
4541 let text = "let a = obj.fqn";
4542 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4543 let completions = project.update(cx, |project, cx| {
4544 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4545 });
4546
4547 fake_server
4548 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4549 Ok(Some(lsp::CompletionResponse::Array(vec![
4550 lsp::CompletionItem {
4551 label: "labelText".into(),
4552 insert_text: Some("insertText".into()),
4553 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
4554 range: lsp::Range::new(
4555 lsp::Position::new(0, text.len() as u32 - 3),
4556 lsp::Position::new(0, text.len() as u32),
4557 ),
4558 new_text: "textEditText".into(),
4559 })),
4560 ..Default::default()
4561 },
4562 ])))
4563 })
4564 .next()
4565 .await;
4566
4567 let completions = completions
4568 .await
4569 .unwrap()
4570 .into_iter()
4571 .flat_map(|response| response.completions)
4572 .collect::<Vec<_>>();
4573 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4574
4575 assert_eq!(completions.len(), 1);
4576 assert_eq!(completions[0].new_text, "textEditText");
4577 assert_eq!(
4578 completions[0].replace_range.to_offset(&snapshot),
4579 text.len() - 3..text.len()
4580 );
4581}
4582
4583#[gpui::test]
4584async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
4585 init_test(cx);
4586
4587 let fs = FakeFs::new(cx.executor());
4588 fs.insert_tree(
4589 path!("/dir"),
4590 json!({
4591 "a.ts": "",
4592 }),
4593 )
4594 .await;
4595
4596 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4597
4598 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4599 language_registry.add(typescript_lang());
4600 let mut fake_language_servers = language_registry.register_fake_lsp(
4601 "TypeScript",
4602 FakeLspAdapter {
4603 capabilities: lsp::ServerCapabilities {
4604 completion_provider: Some(lsp::CompletionOptions {
4605 trigger_characters: Some(vec![".".to_string()]),
4606 ..Default::default()
4607 }),
4608 ..Default::default()
4609 },
4610 ..Default::default()
4611 },
4612 );
4613
4614 let (buffer, _handle) = project
4615 .update(cx, |p, cx| {
4616 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4617 })
4618 .await
4619 .unwrap();
4620
4621 let fake_server = fake_language_servers.next().await.unwrap();
4622 cx.executor().run_until_parked();
4623 let text = "let a = obj.fqn";
4624
4625 // Test 1: When text_edit is None but text_edit_text exists with default edit_range
4626 {
4627 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4628 let completions = project.update(cx, |project, cx| {
4629 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4630 });
4631
4632 fake_server
4633 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4634 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
4635 is_incomplete: false,
4636 item_defaults: Some(lsp::CompletionListItemDefaults {
4637 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
4638 lsp::Range::new(
4639 lsp::Position::new(0, text.len() as u32 - 3),
4640 lsp::Position::new(0, text.len() as u32),
4641 ),
4642 )),
4643 ..Default::default()
4644 }),
4645 items: vec![lsp::CompletionItem {
4646 label: "labelText".into(),
4647 text_edit_text: Some("textEditText".into()),
4648 text_edit: None,
4649 ..Default::default()
4650 }],
4651 })))
4652 })
4653 .next()
4654 .await;
4655
4656 let completions = completions
4657 .await
4658 .unwrap()
4659 .into_iter()
4660 .flat_map(|response| response.completions)
4661 .collect::<Vec<_>>();
4662 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4663
4664 assert_eq!(completions.len(), 1);
4665 assert_eq!(completions[0].new_text, "textEditText");
4666 assert_eq!(
4667 completions[0].replace_range.to_offset(&snapshot),
4668 text.len() - 3..text.len()
4669 );
4670 }
4671
4672 // Test 2: When both text_edit and text_edit_text are None with default edit_range
4673 {
4674 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4675 let completions = project.update(cx, |project, cx| {
4676 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4677 });
4678
4679 fake_server
4680 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4681 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
4682 is_incomplete: false,
4683 item_defaults: Some(lsp::CompletionListItemDefaults {
4684 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
4685 lsp::Range::new(
4686 lsp::Position::new(0, text.len() as u32 - 3),
4687 lsp::Position::new(0, text.len() as u32),
4688 ),
4689 )),
4690 ..Default::default()
4691 }),
4692 items: vec![lsp::CompletionItem {
4693 label: "labelText".into(),
4694 text_edit_text: None,
4695 insert_text: Some("irrelevant".into()),
4696 text_edit: None,
4697 ..Default::default()
4698 }],
4699 })))
4700 })
4701 .next()
4702 .await;
4703
4704 let completions = completions
4705 .await
4706 .unwrap()
4707 .into_iter()
4708 .flat_map(|response| response.completions)
4709 .collect::<Vec<_>>();
4710 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4711
4712 assert_eq!(completions.len(), 1);
4713 assert_eq!(completions[0].new_text, "labelText");
4714 assert_eq!(
4715 completions[0].replace_range.to_offset(&snapshot),
4716 text.len() - 3..text.len()
4717 );
4718 }
4719}
4720
4721#[gpui::test]
4722async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
4723 init_test(cx);
4724
4725 let fs = FakeFs::new(cx.executor());
4726 fs.insert_tree(
4727 path!("/dir"),
4728 json!({
4729 "a.ts": "",
4730 }),
4731 )
4732 .await;
4733
4734 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4735
4736 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4737 language_registry.add(typescript_lang());
4738 let mut fake_language_servers = language_registry.register_fake_lsp(
4739 "TypeScript",
4740 FakeLspAdapter {
4741 capabilities: lsp::ServerCapabilities {
4742 completion_provider: Some(lsp::CompletionOptions {
4743 trigger_characters: Some(vec![":".to_string()]),
4744 ..Default::default()
4745 }),
4746 ..Default::default()
4747 },
4748 ..Default::default()
4749 },
4750 );
4751
4752 let (buffer, _handle) = project
4753 .update(cx, |p, cx| {
4754 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4755 })
4756 .await
4757 .unwrap();
4758
4759 let fake_server = fake_language_servers.next().await.unwrap();
4760 cx.executor().run_until_parked();
4761
4762 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
4763 let text = "let a = b.fqn";
4764 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4765 let completions = project.update(cx, |project, cx| {
4766 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4767 });
4768
4769 fake_server
4770 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4771 Ok(Some(lsp::CompletionResponse::Array(vec![
4772 lsp::CompletionItem {
4773 label: "fullyQualifiedName?".into(),
4774 insert_text: Some("fullyQualifiedName".into()),
4775 ..Default::default()
4776 },
4777 ])))
4778 })
4779 .next()
4780 .await;
4781 let completions = completions
4782 .await
4783 .unwrap()
4784 .into_iter()
4785 .flat_map(|response| response.completions)
4786 .collect::<Vec<_>>();
4787 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4788 assert_eq!(completions.len(), 1);
4789 assert_eq!(completions[0].new_text, "fullyQualifiedName");
4790 assert_eq!(
4791 completions[0].replace_range.to_offset(&snapshot),
4792 text.len() - 3..text.len()
4793 );
4794
4795 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
4796 let text = "let a = \"atoms/cmp\"";
4797 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4798 let completions = project.update(cx, |project, cx| {
4799 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
4800 });
4801
4802 fake_server
4803 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4804 Ok(Some(lsp::CompletionResponse::Array(vec![
4805 lsp::CompletionItem {
4806 label: "component".into(),
4807 ..Default::default()
4808 },
4809 ])))
4810 })
4811 .next()
4812 .await;
4813 let completions = completions
4814 .await
4815 .unwrap()
4816 .into_iter()
4817 .flat_map(|response| response.completions)
4818 .collect::<Vec<_>>();
4819 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4820 assert_eq!(completions.len(), 1);
4821 assert_eq!(completions[0].new_text, "component");
4822 assert_eq!(
4823 completions[0].replace_range.to_offset(&snapshot),
4824 text.len() - 4..text.len() - 1
4825 );
4826}
4827
4828#[gpui::test]
4829async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
4830 init_test(cx);
4831
4832 let fs = FakeFs::new(cx.executor());
4833 fs.insert_tree(
4834 path!("/dir"),
4835 json!({
4836 "a.ts": "",
4837 }),
4838 )
4839 .await;
4840
4841 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4842
4843 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4844 language_registry.add(typescript_lang());
4845 let mut fake_language_servers = language_registry.register_fake_lsp(
4846 "TypeScript",
4847 FakeLspAdapter {
4848 capabilities: lsp::ServerCapabilities {
4849 completion_provider: Some(lsp::CompletionOptions {
4850 trigger_characters: Some(vec![":".to_string()]),
4851 ..Default::default()
4852 }),
4853 ..Default::default()
4854 },
4855 ..Default::default()
4856 },
4857 );
4858
4859 let (buffer, _handle) = project
4860 .update(cx, |p, cx| {
4861 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4862 })
4863 .await
4864 .unwrap();
4865
4866 let fake_server = fake_language_servers.next().await.unwrap();
4867 cx.executor().run_until_parked();
4868
4869 let text = "let a = b.fqn";
4870 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4871 let completions = project.update(cx, |project, cx| {
4872 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4873 });
4874
4875 fake_server
4876 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4877 Ok(Some(lsp::CompletionResponse::Array(vec![
4878 lsp::CompletionItem {
4879 label: "fullyQualifiedName?".into(),
4880 insert_text: Some("fully\rQualified\r\nName".into()),
4881 ..Default::default()
4882 },
4883 ])))
4884 })
4885 .next()
4886 .await;
4887 let completions = completions
4888 .await
4889 .unwrap()
4890 .into_iter()
4891 .flat_map(|response| response.completions)
4892 .collect::<Vec<_>>();
4893 assert_eq!(completions.len(), 1);
4894 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
4895}
4896
4897#[gpui::test(iterations = 10)]
4898async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
4899 init_test(cx);
4900
4901 let fs = FakeFs::new(cx.executor());
4902 fs.insert_tree(
4903 path!("/dir"),
4904 json!({
4905 "a.ts": "a",
4906 }),
4907 )
4908 .await;
4909
4910 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4911
4912 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4913 language_registry.add(typescript_lang());
4914 let mut fake_language_servers = language_registry.register_fake_lsp(
4915 "TypeScript",
4916 FakeLspAdapter {
4917 capabilities: lsp::ServerCapabilities {
4918 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
4919 lsp::CodeActionOptions {
4920 resolve_provider: Some(true),
4921 ..lsp::CodeActionOptions::default()
4922 },
4923 )),
4924 execute_command_provider: Some(lsp::ExecuteCommandOptions {
4925 commands: vec!["_the/command".to_string()],
4926 ..lsp::ExecuteCommandOptions::default()
4927 }),
4928 ..lsp::ServerCapabilities::default()
4929 },
4930 ..FakeLspAdapter::default()
4931 },
4932 );
4933
4934 let (buffer, _handle) = project
4935 .update(cx, |p, cx| {
4936 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4937 })
4938 .await
4939 .unwrap();
4940
4941 let fake_server = fake_language_servers.next().await.unwrap();
4942 cx.executor().run_until_parked();
4943
4944 // Language server returns code actions that contain commands, and not edits.
4945 let actions = project.update(cx, |project, cx| {
4946 project.code_actions(&buffer, 0..0, None, cx)
4947 });
4948 fake_server
4949 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
4950 Ok(Some(vec![
4951 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
4952 title: "The code action".into(),
4953 data: Some(serde_json::json!({
4954 "command": "_the/command",
4955 })),
4956 ..lsp::CodeAction::default()
4957 }),
4958 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
4959 title: "two".into(),
4960 ..lsp::CodeAction::default()
4961 }),
4962 ]))
4963 })
4964 .next()
4965 .await;
4966
4967 let action = actions.await.unwrap().unwrap()[0].clone();
4968 let apply = project.update(cx, |project, cx| {
4969 project.apply_code_action(buffer.clone(), action, true, cx)
4970 });
4971
4972 // Resolving the code action does not populate its edits. In absence of
4973 // edits, we must execute the given command.
4974 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
4975 |mut action, _| async move {
4976 if action.data.is_some() {
4977 action.command = Some(lsp::Command {
4978 title: "The command".into(),
4979 command: "_the/command".into(),
4980 arguments: Some(vec![json!("the-argument")]),
4981 });
4982 }
4983 Ok(action)
4984 },
4985 );
4986
4987 // While executing the command, the language server sends the editor
4988 // a `workspaceEdit` request.
4989 fake_server
4990 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
4991 let fake = fake_server.clone();
4992 move |params, _| {
4993 assert_eq!(params.command, "_the/command");
4994 let fake = fake.clone();
4995 async move {
4996 fake.server
4997 .request::<lsp::request::ApplyWorkspaceEdit>(
4998 lsp::ApplyWorkspaceEditParams {
4999 label: None,
5000 edit: lsp::WorkspaceEdit {
5001 changes: Some(
5002 [(
5003 lsp::Uri::from_file_path(path!("/dir/a.ts")).unwrap(),
5004 vec![lsp::TextEdit {
5005 range: lsp::Range::new(
5006 lsp::Position::new(0, 0),
5007 lsp::Position::new(0, 0),
5008 ),
5009 new_text: "X".into(),
5010 }],
5011 )]
5012 .into_iter()
5013 .collect(),
5014 ),
5015 ..Default::default()
5016 },
5017 },
5018 DEFAULT_LSP_REQUEST_TIMEOUT,
5019 )
5020 .await
5021 .into_response()
5022 .unwrap();
5023 Ok(Some(json!(null)))
5024 }
5025 }
5026 })
5027 .next()
5028 .await;
5029
5030 // Applying the code action returns a project transaction containing the edits
5031 // sent by the language server in its `workspaceEdit` request.
5032 let transaction = apply.await.unwrap();
5033 assert!(transaction.0.contains_key(&buffer));
5034 buffer.update(cx, |buffer, cx| {
5035 assert_eq!(buffer.text(), "Xa");
5036 buffer.undo(cx);
5037 assert_eq!(buffer.text(), "a");
5038 });
5039}
5040
5041#[gpui::test]
5042async fn test_rename_file_to_new_directory(cx: &mut gpui::TestAppContext) {
5043 init_test(cx);
5044 let fs = FakeFs::new(cx.background_executor.clone());
5045 let expected_contents = "content";
5046 fs.as_fake()
5047 .insert_tree(
5048 "/root",
5049 json!({
5050 "test.txt": expected_contents
5051 }),
5052 )
5053 .await;
5054
5055 let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
5056
5057 let (worktree, entry_id) = project.read_with(cx, |project, cx| {
5058 let worktree = project.worktrees(cx).next().unwrap();
5059 let entry_id = worktree
5060 .read(cx)
5061 .entry_for_path(rel_path("test.txt"))
5062 .unwrap()
5063 .id;
5064 (worktree, entry_id)
5065 });
5066 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
5067 let _result = project
5068 .update(cx, |project, cx| {
5069 project.rename_entry(
5070 entry_id,
5071 (worktree_id, rel_path("dir1/dir2/dir3/test.txt")).into(),
5072 cx,
5073 )
5074 })
5075 .await
5076 .unwrap();
5077 worktree.read_with(cx, |worktree, _| {
5078 assert!(
5079 worktree.entry_for_path(rel_path("test.txt")).is_none(),
5080 "Old file should have been removed"
5081 );
5082 assert!(
5083 worktree
5084 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
5085 .is_some(),
5086 "Whole directory hierarchy and the new file should have been created"
5087 );
5088 });
5089 assert_eq!(
5090 worktree
5091 .update(cx, |worktree, cx| {
5092 worktree.load_file(rel_path("dir1/dir2/dir3/test.txt"), cx)
5093 })
5094 .await
5095 .unwrap()
5096 .text,
5097 expected_contents,
5098 "Moved file's contents should be preserved"
5099 );
5100
5101 let entry_id = worktree.read_with(cx, |worktree, _| {
5102 worktree
5103 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
5104 .unwrap()
5105 .id
5106 });
5107
5108 let _result = project
5109 .update(cx, |project, cx| {
5110 project.rename_entry(
5111 entry_id,
5112 (worktree_id, rel_path("dir1/dir2/test.txt")).into(),
5113 cx,
5114 )
5115 })
5116 .await
5117 .unwrap();
5118 worktree.read_with(cx, |worktree, _| {
5119 assert!(
5120 worktree.entry_for_path(rel_path("test.txt")).is_none(),
5121 "First file should not reappear"
5122 );
5123 assert!(
5124 worktree
5125 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
5126 .is_none(),
5127 "Old file should have been removed"
5128 );
5129 assert!(
5130 worktree
5131 .entry_for_path(rel_path("dir1/dir2/test.txt"))
5132 .is_some(),
5133 "No error should have occurred after moving into existing directory"
5134 );
5135 });
5136 assert_eq!(
5137 worktree
5138 .update(cx, |worktree, cx| {
5139 worktree.load_file(rel_path("dir1/dir2/test.txt"), cx)
5140 })
5141 .await
5142 .unwrap()
5143 .text,
5144 expected_contents,
5145 "Moved file's contents should be preserved"
5146 );
5147}
5148
5149#[gpui::test(iterations = 10)]
5150async fn test_save_file(cx: &mut gpui::TestAppContext) {
5151 init_test(cx);
5152
5153 let fs = FakeFs::new(cx.executor());
5154 fs.insert_tree(
5155 path!("/dir"),
5156 json!({
5157 "file1": "the old contents",
5158 }),
5159 )
5160 .await;
5161
5162 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5163 let buffer = project
5164 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5165 .await
5166 .unwrap();
5167 buffer.update(cx, |buffer, cx| {
5168 assert_eq!(buffer.text(), "the old contents");
5169 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
5170 });
5171
5172 project
5173 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
5174 .await
5175 .unwrap();
5176
5177 let new_text = fs
5178 .load(Path::new(path!("/dir/file1")))
5179 .await
5180 .unwrap()
5181 .replace("\r\n", "\n");
5182 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
5183}
5184
5185#[gpui::test(iterations = 10)]
5186async fn test_save_file_spawns_language_server(cx: &mut gpui::TestAppContext) {
5187 // Issue: #24349
5188 init_test(cx);
5189
5190 let fs = FakeFs::new(cx.executor());
5191 fs.insert_tree(path!("/dir"), json!({})).await;
5192
5193 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5194 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5195
5196 language_registry.add(rust_lang());
5197 let mut fake_rust_servers = language_registry.register_fake_lsp(
5198 "Rust",
5199 FakeLspAdapter {
5200 name: "the-rust-language-server",
5201 capabilities: lsp::ServerCapabilities {
5202 completion_provider: Some(lsp::CompletionOptions {
5203 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
5204 ..Default::default()
5205 }),
5206 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
5207 lsp::TextDocumentSyncOptions {
5208 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
5209 ..Default::default()
5210 },
5211 )),
5212 ..Default::default()
5213 },
5214 ..Default::default()
5215 },
5216 );
5217
5218 let buffer = project
5219 .update(cx, |this, cx| this.create_buffer(None, false, cx))
5220 .unwrap()
5221 .await;
5222 project.update(cx, |this, cx| {
5223 this.register_buffer_with_language_servers(&buffer, cx);
5224 buffer.update(cx, |buffer, cx| {
5225 assert!(!this.has_language_servers_for(buffer, cx));
5226 })
5227 });
5228
5229 project
5230 .update(cx, |this, cx| {
5231 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
5232 this.save_buffer_as(
5233 buffer.clone(),
5234 ProjectPath {
5235 worktree_id,
5236 path: rel_path("file.rs").into(),
5237 },
5238 cx,
5239 )
5240 })
5241 .await
5242 .unwrap();
5243 // A server is started up, and it is notified about Rust files.
5244 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5245 assert_eq!(
5246 fake_rust_server
5247 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5248 .await
5249 .text_document,
5250 lsp::TextDocumentItem {
5251 uri: lsp::Uri::from_file_path(path!("/dir/file.rs")).unwrap(),
5252 version: 0,
5253 text: "".to_string(),
5254 language_id: "rust".to_string(),
5255 }
5256 );
5257
5258 project.update(cx, |this, cx| {
5259 buffer.update(cx, |buffer, cx| {
5260 assert!(this.has_language_servers_for(buffer, cx));
5261 })
5262 });
5263}
5264
5265#[gpui::test(iterations = 30)]
5266async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
5267 init_test(cx);
5268
5269 let fs = FakeFs::new(cx.executor());
5270 fs.insert_tree(
5271 path!("/dir"),
5272 json!({
5273 "file1": "the original contents",
5274 }),
5275 )
5276 .await;
5277
5278 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5279 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
5280 let buffer = project
5281 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5282 .await
5283 .unwrap();
5284
5285 // Change the buffer's file on disk, and then wait for the file change
5286 // to be detected by the worktree, so that the buffer starts reloading.
5287 fs.save(
5288 path!("/dir/file1").as_ref(),
5289 &"the first contents".into(),
5290 Default::default(),
5291 )
5292 .await
5293 .unwrap();
5294 worktree.next_event(cx).await;
5295
5296 // Change the buffer's file again. Depending on the random seed, the
5297 // previous file change may still be in progress.
5298 fs.save(
5299 path!("/dir/file1").as_ref(),
5300 &"the second contents".into(),
5301 Default::default(),
5302 )
5303 .await
5304 .unwrap();
5305 worktree.next_event(cx).await;
5306
5307 cx.executor().run_until_parked();
5308 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
5309 buffer.read_with(cx, |buffer, _| {
5310 assert_eq!(buffer.text(), on_disk_text);
5311 assert!(!buffer.is_dirty(), "buffer should not be dirty");
5312 assert!(!buffer.has_conflict(), "buffer should not be dirty");
5313 });
5314}
5315
5316#[gpui::test(iterations = 30)]
5317async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
5318 init_test(cx);
5319
5320 let fs = FakeFs::new(cx.executor());
5321 fs.insert_tree(
5322 path!("/dir"),
5323 json!({
5324 "file1": "the original contents",
5325 }),
5326 )
5327 .await;
5328
5329 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5330 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
5331 let buffer = project
5332 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5333 .await
5334 .unwrap();
5335
5336 // Change the buffer's file on disk, and then wait for the file change
5337 // to be detected by the worktree, so that the buffer starts reloading.
5338 fs.save(
5339 path!("/dir/file1").as_ref(),
5340 &"the first contents".into(),
5341 Default::default(),
5342 )
5343 .await
5344 .unwrap();
5345 worktree.next_event(cx).await;
5346
5347 cx.executor()
5348 .spawn(cx.executor().simulate_random_delay())
5349 .await;
5350
5351 // Perform a noop edit, causing the buffer's version to increase.
5352 buffer.update(cx, |buffer, cx| {
5353 buffer.edit([(0..0, " ")], None, cx);
5354 buffer.undo(cx);
5355 });
5356
5357 cx.executor().run_until_parked();
5358 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
5359 buffer.read_with(cx, |buffer, _| {
5360 let buffer_text = buffer.text();
5361 if buffer_text == on_disk_text {
5362 assert!(
5363 !buffer.is_dirty() && !buffer.has_conflict(),
5364 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
5365 );
5366 }
5367 // If the file change occurred while the buffer was processing the first
5368 // change, the buffer will be in a conflicting state.
5369 else {
5370 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
5371 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
5372 }
5373 });
5374}
5375
5376#[gpui::test]
5377async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
5378 init_test(cx);
5379
5380 let fs = FakeFs::new(cx.executor());
5381 fs.insert_tree(
5382 path!("/dir"),
5383 json!({
5384 "file1": "the old contents",
5385 }),
5386 )
5387 .await;
5388
5389 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
5390 let buffer = project
5391 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5392 .await
5393 .unwrap();
5394 buffer.update(cx, |buffer, cx| {
5395 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
5396 });
5397
5398 project
5399 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
5400 .await
5401 .unwrap();
5402
5403 let new_text = fs
5404 .load(Path::new(path!("/dir/file1")))
5405 .await
5406 .unwrap()
5407 .replace("\r\n", "\n");
5408 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
5409}
5410
5411#[gpui::test]
5412async fn test_save_as(cx: &mut gpui::TestAppContext) {
5413 init_test(cx);
5414
5415 let fs = FakeFs::new(cx.executor());
5416 fs.insert_tree("/dir", json!({})).await;
5417
5418 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5419
5420 let languages = project.update(cx, |project, _| project.languages().clone());
5421 languages.add(rust_lang());
5422
5423 let buffer = project.update(cx, |project, cx| {
5424 project.create_local_buffer("", None, false, cx)
5425 });
5426 buffer.update(cx, |buffer, cx| {
5427 buffer.edit([(0..0, "abc")], None, cx);
5428 assert!(buffer.is_dirty());
5429 assert!(!buffer.has_conflict());
5430 assert_eq!(buffer.language().unwrap().name(), "Plain Text");
5431 });
5432 project
5433 .update(cx, |project, cx| {
5434 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
5435 let path = ProjectPath {
5436 worktree_id,
5437 path: rel_path("file1.rs").into(),
5438 };
5439 project.save_buffer_as(buffer.clone(), path, cx)
5440 })
5441 .await
5442 .unwrap();
5443 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
5444
5445 cx.executor().run_until_parked();
5446 buffer.update(cx, |buffer, cx| {
5447 assert_eq!(
5448 buffer.file().unwrap().full_path(cx),
5449 Path::new("dir/file1.rs")
5450 );
5451 assert!(!buffer.is_dirty());
5452 assert!(!buffer.has_conflict());
5453 assert_eq!(buffer.language().unwrap().name(), "Rust");
5454 });
5455
5456 let opened_buffer = project
5457 .update(cx, |project, cx| {
5458 project.open_local_buffer("/dir/file1.rs", cx)
5459 })
5460 .await
5461 .unwrap();
5462 assert_eq!(opened_buffer, buffer);
5463}
5464
5465#[gpui::test]
5466async fn test_save_as_existing_file(cx: &mut gpui::TestAppContext) {
5467 init_test(cx);
5468
5469 let fs = FakeFs::new(cx.executor());
5470 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5471
5472 fs.insert_tree(
5473 path!("/dir"),
5474 json!({
5475 "data_a.txt": "data about a"
5476 }),
5477 )
5478 .await;
5479
5480 let buffer = project
5481 .update(cx, |project, cx| {
5482 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
5483 })
5484 .await
5485 .unwrap();
5486
5487 buffer.update(cx, |buffer, cx| {
5488 buffer.edit([(11..12, "b")], None, cx);
5489 });
5490
5491 // Save buffer's contents as a new file and confirm that the buffer's now
5492 // associated with `data_b.txt` instead of `data_a.txt`, confirming that the
5493 // file associated with the buffer has now been updated to `data_b.txt`
5494 project
5495 .update(cx, |project, cx| {
5496 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
5497 let new_path = ProjectPath {
5498 worktree_id,
5499 path: rel_path("data_b.txt").into(),
5500 };
5501
5502 project.save_buffer_as(buffer.clone(), new_path, cx)
5503 })
5504 .await
5505 .unwrap();
5506
5507 buffer.update(cx, |buffer, cx| {
5508 assert_eq!(
5509 buffer.file().unwrap().full_path(cx),
5510 Path::new("dir/data_b.txt")
5511 )
5512 });
5513
5514 // Open the original `data_a.txt` file, confirming that its contents are
5515 // unchanged and the resulting buffer's associated file is `data_a.txt`.
5516 let original_buffer = project
5517 .update(cx, |project, cx| {
5518 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
5519 })
5520 .await
5521 .unwrap();
5522
5523 original_buffer.update(cx, |buffer, cx| {
5524 assert_eq!(buffer.text(), "data about a");
5525 assert_eq!(
5526 buffer.file().unwrap().full_path(cx),
5527 Path::new("dir/data_a.txt")
5528 )
5529 });
5530}
5531
5532#[gpui::test(retries = 5)]
5533async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
5534 use worktree::WorktreeModelHandle as _;
5535
5536 init_test(cx);
5537 cx.executor().allow_parking();
5538
5539 let dir = TempTree::new(json!({
5540 "a": {
5541 "file1": "",
5542 "file2": "",
5543 "file3": "",
5544 },
5545 "b": {
5546 "c": {
5547 "file4": "",
5548 "file5": "",
5549 }
5550 }
5551 }));
5552
5553 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
5554
5555 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5556 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
5557 async move { buffer.await.unwrap() }
5558 };
5559 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5560 project.update(cx, |project, cx| {
5561 let tree = project.worktrees(cx).next().unwrap();
5562 tree.read(cx)
5563 .entry_for_path(rel_path(path))
5564 .unwrap_or_else(|| panic!("no entry for path {}", path))
5565 .id
5566 })
5567 };
5568
5569 let buffer2 = buffer_for_path("a/file2", cx).await;
5570 let buffer3 = buffer_for_path("a/file3", cx).await;
5571 let buffer4 = buffer_for_path("b/c/file4", cx).await;
5572 let buffer5 = buffer_for_path("b/c/file5", cx).await;
5573
5574 let file2_id = id_for_path("a/file2", cx);
5575 let file3_id = id_for_path("a/file3", cx);
5576 let file4_id = id_for_path("b/c/file4", cx);
5577
5578 // Create a remote copy of this worktree.
5579 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
5580 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
5581
5582 let updates = Arc::new(Mutex::new(Vec::new()));
5583 tree.update(cx, |tree, cx| {
5584 let updates = updates.clone();
5585 tree.observe_updates(0, cx, move |update| {
5586 updates.lock().push(update);
5587 async { true }
5588 });
5589 });
5590
5591 let remote = cx.update(|cx| {
5592 Worktree::remote(
5593 0,
5594 ReplicaId::REMOTE_SERVER,
5595 metadata,
5596 project.read(cx).client().into(),
5597 project.read(cx).path_style(cx),
5598 cx,
5599 )
5600 });
5601
5602 cx.executor().run_until_parked();
5603
5604 cx.update(|cx| {
5605 assert!(!buffer2.read(cx).is_dirty());
5606 assert!(!buffer3.read(cx).is_dirty());
5607 assert!(!buffer4.read(cx).is_dirty());
5608 assert!(!buffer5.read(cx).is_dirty());
5609 });
5610
5611 // Rename and delete files and directories.
5612 tree.flush_fs_events(cx).await;
5613 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
5614 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
5615 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
5616 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
5617 tree.flush_fs_events(cx).await;
5618
5619 cx.update(|app| {
5620 assert_eq!(
5621 tree.read(app).paths().collect::<Vec<_>>(),
5622 vec![
5623 rel_path("a"),
5624 rel_path("a/file1"),
5625 rel_path("a/file2.new"),
5626 rel_path("b"),
5627 rel_path("d"),
5628 rel_path("d/file3"),
5629 rel_path("d/file4"),
5630 ]
5631 );
5632 });
5633
5634 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
5635 assert_eq!(id_for_path("d/file3", cx), file3_id);
5636 assert_eq!(id_for_path("d/file4", cx), file4_id);
5637
5638 cx.update(|cx| {
5639 assert_eq!(
5640 buffer2.read(cx).file().unwrap().path().as_ref(),
5641 rel_path("a/file2.new")
5642 );
5643 assert_eq!(
5644 buffer3.read(cx).file().unwrap().path().as_ref(),
5645 rel_path("d/file3")
5646 );
5647 assert_eq!(
5648 buffer4.read(cx).file().unwrap().path().as_ref(),
5649 rel_path("d/file4")
5650 );
5651 assert_eq!(
5652 buffer5.read(cx).file().unwrap().path().as_ref(),
5653 rel_path("b/c/file5")
5654 );
5655
5656 assert_matches!(
5657 buffer2.read(cx).file().unwrap().disk_state(),
5658 DiskState::Present { .. }
5659 );
5660 assert_matches!(
5661 buffer3.read(cx).file().unwrap().disk_state(),
5662 DiskState::Present { .. }
5663 );
5664 assert_matches!(
5665 buffer4.read(cx).file().unwrap().disk_state(),
5666 DiskState::Present { .. }
5667 );
5668 assert_eq!(
5669 buffer5.read(cx).file().unwrap().disk_state(),
5670 DiskState::Deleted
5671 );
5672 });
5673
5674 // Update the remote worktree. Check that it becomes consistent with the
5675 // local worktree.
5676 cx.executor().run_until_parked();
5677
5678 remote.update(cx, |remote, _| {
5679 for update in updates.lock().drain(..) {
5680 remote.as_remote_mut().unwrap().update_from_remote(update);
5681 }
5682 });
5683 cx.executor().run_until_parked();
5684 remote.update(cx, |remote, _| {
5685 assert_eq!(
5686 remote.paths().collect::<Vec<_>>(),
5687 vec![
5688 rel_path("a"),
5689 rel_path("a/file1"),
5690 rel_path("a/file2.new"),
5691 rel_path("b"),
5692 rel_path("d"),
5693 rel_path("d/file3"),
5694 rel_path("d/file4"),
5695 ]
5696 );
5697 });
5698}
5699
5700#[cfg(target_os = "linux")]
5701#[gpui::test(retries = 5)]
5702async fn test_recreated_directory_receives_child_events(cx: &mut gpui::TestAppContext) {
5703 init_test(cx);
5704 cx.executor().allow_parking();
5705
5706 let dir = TempTree::new(json!({}));
5707 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
5708 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
5709
5710 tree.flush_fs_events(cx).await;
5711
5712 let repro_dir = dir.path().join("repro");
5713 std::fs::create_dir(&repro_dir).unwrap();
5714 tree.flush_fs_events(cx).await;
5715
5716 cx.update(|cx| {
5717 assert!(tree.read(cx).entry_for_path(rel_path("repro")).is_some());
5718 });
5719
5720 std::fs::remove_dir_all(&repro_dir).unwrap();
5721 tree.flush_fs_events(cx).await;
5722
5723 cx.update(|cx| {
5724 assert!(tree.read(cx).entry_for_path(rel_path("repro")).is_none());
5725 });
5726
5727 std::fs::create_dir(&repro_dir).unwrap();
5728 tree.flush_fs_events(cx).await;
5729
5730 cx.update(|cx| {
5731 assert!(tree.read(cx).entry_for_path(rel_path("repro")).is_some());
5732 });
5733
5734 std::fs::write(repro_dir.join("repro-marker"), "").unwrap();
5735 tree.flush_fs_events(cx).await;
5736
5737 cx.update(|cx| {
5738 assert!(
5739 tree.read(cx)
5740 .entry_for_path(rel_path("repro/repro-marker"))
5741 .is_some()
5742 );
5743 });
5744}
5745
5746#[gpui::test(iterations = 10)]
5747async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
5748 init_test(cx);
5749
5750 let fs = FakeFs::new(cx.executor());
5751 fs.insert_tree(
5752 path!("/dir"),
5753 json!({
5754 "a": {
5755 "file1": "",
5756 }
5757 }),
5758 )
5759 .await;
5760
5761 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
5762 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
5763 let tree_id = tree.update(cx, |tree, _| tree.id());
5764
5765 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5766 project.update(cx, |project, cx| {
5767 let tree = project.worktrees(cx).next().unwrap();
5768 tree.read(cx)
5769 .entry_for_path(rel_path(path))
5770 .unwrap_or_else(|| panic!("no entry for path {}", path))
5771 .id
5772 })
5773 };
5774
5775 let dir_id = id_for_path("a", cx);
5776 let file_id = id_for_path("a/file1", cx);
5777 let buffer = project
5778 .update(cx, |p, cx| {
5779 p.open_buffer((tree_id, rel_path("a/file1")), cx)
5780 })
5781 .await
5782 .unwrap();
5783 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5784
5785 project
5786 .update(cx, |project, cx| {
5787 project.rename_entry(dir_id, (tree_id, rel_path("b")).into(), cx)
5788 })
5789 .unwrap()
5790 .await
5791 .into_included()
5792 .unwrap();
5793 cx.executor().run_until_parked();
5794
5795 assert_eq!(id_for_path("b", cx), dir_id);
5796 assert_eq!(id_for_path("b/file1", cx), file_id);
5797 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5798}
5799
5800#[gpui::test]
5801async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
5802 init_test(cx);
5803
5804 let fs = FakeFs::new(cx.executor());
5805 fs.insert_tree(
5806 "/dir",
5807 json!({
5808 "a.txt": "a-contents",
5809 "b.txt": "b-contents",
5810 }),
5811 )
5812 .await;
5813
5814 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5815
5816 // Spawn multiple tasks to open paths, repeating some paths.
5817 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
5818 (
5819 p.open_local_buffer("/dir/a.txt", cx),
5820 p.open_local_buffer("/dir/b.txt", cx),
5821 p.open_local_buffer("/dir/a.txt", cx),
5822 )
5823 });
5824
5825 let buffer_a_1 = buffer_a_1.await.unwrap();
5826 let buffer_a_2 = buffer_a_2.await.unwrap();
5827 let buffer_b = buffer_b.await.unwrap();
5828 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
5829 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
5830
5831 // There is only one buffer per path.
5832 let buffer_a_id = buffer_a_1.entity_id();
5833 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
5834
5835 // Open the same path again while it is still open.
5836 drop(buffer_a_1);
5837 let buffer_a_3 = project
5838 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
5839 .await
5840 .unwrap();
5841
5842 // There's still only one buffer per path.
5843 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
5844}
5845
5846#[gpui::test]
5847async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
5848 init_test(cx);
5849
5850 let fs = FakeFs::new(cx.executor());
5851 fs.insert_tree(
5852 path!("/dir"),
5853 json!({
5854 "file1": "abc",
5855 "file2": "def",
5856 "file3": "ghi",
5857 }),
5858 )
5859 .await;
5860
5861 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5862
5863 let buffer1 = project
5864 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5865 .await
5866 .unwrap();
5867 let events = Arc::new(Mutex::new(Vec::new()));
5868
5869 // initially, the buffer isn't dirty.
5870 buffer1.update(cx, |buffer, cx| {
5871 cx.subscribe(&buffer1, {
5872 let events = events.clone();
5873 move |_, _, event, _| match event {
5874 BufferEvent::Operation { .. } => {}
5875 _ => events.lock().push(event.clone()),
5876 }
5877 })
5878 .detach();
5879
5880 assert!(!buffer.is_dirty());
5881 assert!(events.lock().is_empty());
5882
5883 buffer.edit([(1..2, "")], None, cx);
5884 });
5885
5886 // after the first edit, the buffer is dirty, and emits a dirtied event.
5887 buffer1.update(cx, |buffer, cx| {
5888 assert!(buffer.text() == "ac");
5889 assert!(buffer.is_dirty());
5890 assert_eq!(
5891 *events.lock(),
5892 &[
5893 language::BufferEvent::Edited { is_local: true },
5894 language::BufferEvent::DirtyChanged
5895 ]
5896 );
5897 events.lock().clear();
5898 buffer.did_save(
5899 buffer.version(),
5900 buffer.file().unwrap().disk_state().mtime(),
5901 cx,
5902 );
5903 });
5904
5905 // after saving, the buffer is not dirty, and emits a saved event.
5906 buffer1.update(cx, |buffer, cx| {
5907 assert!(!buffer.is_dirty());
5908 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
5909 events.lock().clear();
5910
5911 buffer.edit([(1..1, "B")], None, cx);
5912 buffer.edit([(2..2, "D")], None, cx);
5913 });
5914
5915 // after editing again, the buffer is dirty, and emits another dirty event.
5916 buffer1.update(cx, |buffer, cx| {
5917 assert!(buffer.text() == "aBDc");
5918 assert!(buffer.is_dirty());
5919 assert_eq!(
5920 *events.lock(),
5921 &[
5922 language::BufferEvent::Edited { is_local: true },
5923 language::BufferEvent::DirtyChanged,
5924 language::BufferEvent::Edited { is_local: true },
5925 ],
5926 );
5927 events.lock().clear();
5928
5929 // After restoring the buffer to its previously-saved state,
5930 // the buffer is not considered dirty anymore.
5931 buffer.edit([(1..3, "")], None, cx);
5932 assert!(buffer.text() == "ac");
5933 assert!(!buffer.is_dirty());
5934 });
5935
5936 assert_eq!(
5937 *events.lock(),
5938 &[
5939 language::BufferEvent::Edited { is_local: true },
5940 language::BufferEvent::DirtyChanged
5941 ]
5942 );
5943
5944 // When a file is deleted, it is not considered dirty.
5945 let events = Arc::new(Mutex::new(Vec::new()));
5946 let buffer2 = project
5947 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
5948 .await
5949 .unwrap();
5950 buffer2.update(cx, |_, cx| {
5951 cx.subscribe(&buffer2, {
5952 let events = events.clone();
5953 move |_, _, event, _| match event {
5954 BufferEvent::Operation { .. } => {}
5955 _ => events.lock().push(event.clone()),
5956 }
5957 })
5958 .detach();
5959 });
5960
5961 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
5962 .await
5963 .unwrap();
5964 cx.executor().run_until_parked();
5965 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5966 assert_eq!(
5967 mem::take(&mut *events.lock()),
5968 &[language::BufferEvent::FileHandleChanged]
5969 );
5970
5971 // Buffer becomes dirty when edited.
5972 buffer2.update(cx, |buffer, cx| {
5973 buffer.edit([(2..3, "")], None, cx);
5974 assert_eq!(buffer.is_dirty(), true);
5975 });
5976 assert_eq!(
5977 mem::take(&mut *events.lock()),
5978 &[
5979 language::BufferEvent::Edited { is_local: true },
5980 language::BufferEvent::DirtyChanged
5981 ]
5982 );
5983
5984 // Buffer becomes clean again when all of its content is removed, because
5985 // the file was deleted.
5986 buffer2.update(cx, |buffer, cx| {
5987 buffer.edit([(0..2, "")], None, cx);
5988 assert_eq!(buffer.is_empty(), true);
5989 assert_eq!(buffer.is_dirty(), false);
5990 });
5991 assert_eq!(
5992 *events.lock(),
5993 &[
5994 language::BufferEvent::Edited { is_local: true },
5995 language::BufferEvent::DirtyChanged
5996 ]
5997 );
5998
5999 // When a file is already dirty when deleted, we don't emit a Dirtied event.
6000 let events = Arc::new(Mutex::new(Vec::new()));
6001 let buffer3 = project
6002 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
6003 .await
6004 .unwrap();
6005 buffer3.update(cx, |_, cx| {
6006 cx.subscribe(&buffer3, {
6007 let events = events.clone();
6008 move |_, _, event, _| match event {
6009 BufferEvent::Operation { .. } => {}
6010 _ => events.lock().push(event.clone()),
6011 }
6012 })
6013 .detach();
6014 });
6015
6016 buffer3.update(cx, |buffer, cx| {
6017 buffer.edit([(0..0, "x")], None, cx);
6018 });
6019 events.lock().clear();
6020 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
6021 .await
6022 .unwrap();
6023 cx.executor().run_until_parked();
6024 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
6025 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
6026}
6027
6028#[gpui::test]
6029async fn test_dirty_buffer_reloads_after_undo(cx: &mut gpui::TestAppContext) {
6030 init_test(cx);
6031
6032 let fs = FakeFs::new(cx.executor());
6033 fs.insert_tree(
6034 path!("/dir"),
6035 json!({
6036 "file.txt": "version 1",
6037 }),
6038 )
6039 .await;
6040
6041 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6042 let buffer = project
6043 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file.txt"), cx))
6044 .await
6045 .unwrap();
6046
6047 buffer.read_with(cx, |buffer, _| {
6048 assert_eq!(buffer.text(), "version 1");
6049 assert!(!buffer.is_dirty());
6050 });
6051
6052 // User makes an edit, making the buffer dirty.
6053 buffer.update(cx, |buffer, cx| {
6054 buffer.edit([(0..0, "user edit: ")], None, cx);
6055 });
6056
6057 buffer.read_with(cx, |buffer, _| {
6058 assert!(buffer.is_dirty());
6059 assert_eq!(buffer.text(), "user edit: version 1");
6060 });
6061
6062 // External tool writes new content while buffer is dirty.
6063 // file_updated() updates the File but suppresses ReloadNeeded.
6064 fs.save(
6065 path!("/dir/file.txt").as_ref(),
6066 &"version 2 from external tool".into(),
6067 Default::default(),
6068 )
6069 .await
6070 .unwrap();
6071 cx.executor().run_until_parked();
6072
6073 buffer.read_with(cx, |buffer, _| {
6074 assert!(buffer.has_conflict());
6075 assert_eq!(buffer.text(), "user edit: version 1");
6076 });
6077
6078 // User undoes their edit. Buffer becomes clean, but disk has different
6079 // content. did_edit() detects the dirty->clean transition and checks if
6080 // disk changed while dirty. Since mtime differs from saved_mtime, it
6081 // emits ReloadNeeded.
6082 buffer.update(cx, |buffer, cx| {
6083 buffer.undo(cx);
6084 });
6085 cx.executor().run_until_parked();
6086
6087 buffer.read_with(cx, |buffer, _| {
6088 assert_eq!(
6089 buffer.text(),
6090 "version 2 from external tool",
6091 "buffer should reload from disk after undo makes it clean"
6092 );
6093 assert!(!buffer.is_dirty());
6094 });
6095}
6096
6097#[gpui::test]
6098async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
6099 init_test(cx);
6100
6101 let (initial_contents, initial_offsets) =
6102 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
6103 let fs = FakeFs::new(cx.executor());
6104 fs.insert_tree(
6105 path!("/dir"),
6106 json!({
6107 "the-file": initial_contents,
6108 }),
6109 )
6110 .await;
6111 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6112 let buffer = project
6113 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
6114 .await
6115 .unwrap();
6116
6117 let anchors = initial_offsets
6118 .iter()
6119 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
6120 .collect::<Vec<_>>();
6121
6122 // Change the file on disk, adding two new lines of text, and removing
6123 // one line.
6124 buffer.update(cx, |buffer, _| {
6125 assert!(!buffer.is_dirty());
6126 assert!(!buffer.has_conflict());
6127 });
6128
6129 let (new_contents, new_offsets) =
6130 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
6131 fs.save(
6132 path!("/dir/the-file").as_ref(),
6133 &new_contents.as_str().into(),
6134 LineEnding::Unix,
6135 )
6136 .await
6137 .unwrap();
6138
6139 // Because the buffer was not modified, it is reloaded from disk. Its
6140 // contents are edited according to the diff between the old and new
6141 // file contents.
6142 cx.executor().run_until_parked();
6143 buffer.update(cx, |buffer, _| {
6144 assert_eq!(buffer.text(), new_contents);
6145 assert!(!buffer.is_dirty());
6146 assert!(!buffer.has_conflict());
6147
6148 let anchor_offsets = anchors
6149 .iter()
6150 .map(|anchor| anchor.to_offset(&*buffer))
6151 .collect::<Vec<_>>();
6152 assert_eq!(anchor_offsets, new_offsets);
6153 });
6154
6155 // Modify the buffer
6156 buffer.update(cx, |buffer, cx| {
6157 buffer.edit([(0..0, " ")], None, cx);
6158 assert!(buffer.is_dirty());
6159 assert!(!buffer.has_conflict());
6160 });
6161
6162 // Change the file on disk again, adding blank lines to the beginning.
6163 fs.save(
6164 path!("/dir/the-file").as_ref(),
6165 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
6166 LineEnding::Unix,
6167 )
6168 .await
6169 .unwrap();
6170
6171 // Because the buffer is modified, it doesn't reload from disk, but is
6172 // marked as having a conflict.
6173 cx.executor().run_until_parked();
6174 buffer.update(cx, |buffer, _| {
6175 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
6176 assert!(buffer.has_conflict());
6177 });
6178}
6179
6180#[gpui::test]
6181async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
6182 init_test(cx);
6183
6184 let fs = FakeFs::new(cx.executor());
6185 fs.insert_tree(
6186 path!("/dir"),
6187 json!({
6188 "file1": "a\nb\nc\n",
6189 "file2": "one\r\ntwo\r\nthree\r\n",
6190 }),
6191 )
6192 .await;
6193
6194 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6195 let buffer1 = project
6196 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
6197 .await
6198 .unwrap();
6199 let buffer2 = project
6200 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
6201 .await
6202 .unwrap();
6203
6204 buffer1.update(cx, |buffer, _| {
6205 assert_eq!(buffer.text(), "a\nb\nc\n");
6206 assert_eq!(buffer.line_ending(), LineEnding::Unix);
6207 });
6208 buffer2.update(cx, |buffer, _| {
6209 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
6210 assert_eq!(buffer.line_ending(), LineEnding::Windows);
6211 });
6212
6213 // Change a file's line endings on disk from unix to windows. The buffer's
6214 // state updates correctly.
6215 fs.save(
6216 path!("/dir/file1").as_ref(),
6217 &"aaa\nb\nc\n".into(),
6218 LineEnding::Windows,
6219 )
6220 .await
6221 .unwrap();
6222 cx.executor().run_until_parked();
6223 buffer1.update(cx, |buffer, _| {
6224 assert_eq!(buffer.text(), "aaa\nb\nc\n");
6225 assert_eq!(buffer.line_ending(), LineEnding::Windows);
6226 });
6227
6228 // Save a file with windows line endings. The file is written correctly.
6229 buffer2.update(cx, |buffer, cx| {
6230 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
6231 });
6232 project
6233 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
6234 .await
6235 .unwrap();
6236 assert_eq!(
6237 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
6238 "one\r\ntwo\r\nthree\r\nfour\r\n",
6239 );
6240}
6241
6242#[gpui::test]
6243async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
6244 init_test(cx);
6245
6246 let fs = FakeFs::new(cx.executor());
6247 fs.insert_tree(
6248 path!("/dir"),
6249 json!({
6250 "a.rs": "
6251 fn foo(mut v: Vec<usize>) {
6252 for x in &v {
6253 v.push(1);
6254 }
6255 }
6256 "
6257 .unindent(),
6258 }),
6259 )
6260 .await;
6261
6262 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6263 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
6264 let buffer = project
6265 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
6266 .await
6267 .unwrap();
6268
6269 let buffer_uri = Uri::from_file_path(path!("/dir/a.rs")).unwrap();
6270 let message = lsp::PublishDiagnosticsParams {
6271 uri: buffer_uri.clone(),
6272 diagnostics: vec![
6273 lsp::Diagnostic {
6274 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6275 severity: Some(DiagnosticSeverity::WARNING),
6276 message: "error 1".to_string(),
6277 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6278 location: lsp::Location {
6279 uri: buffer_uri.clone(),
6280 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6281 },
6282 message: "error 1 hint 1".to_string(),
6283 }]),
6284 ..Default::default()
6285 },
6286 lsp::Diagnostic {
6287 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6288 severity: Some(DiagnosticSeverity::HINT),
6289 message: "error 1 hint 1".to_string(),
6290 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6291 location: lsp::Location {
6292 uri: buffer_uri.clone(),
6293 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6294 },
6295 message: "original diagnostic".to_string(),
6296 }]),
6297 ..Default::default()
6298 },
6299 lsp::Diagnostic {
6300 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
6301 severity: Some(DiagnosticSeverity::ERROR),
6302 message: "error 2".to_string(),
6303 related_information: Some(vec![
6304 lsp::DiagnosticRelatedInformation {
6305 location: lsp::Location {
6306 uri: buffer_uri.clone(),
6307 range: lsp::Range::new(
6308 lsp::Position::new(1, 13),
6309 lsp::Position::new(1, 15),
6310 ),
6311 },
6312 message: "error 2 hint 1".to_string(),
6313 },
6314 lsp::DiagnosticRelatedInformation {
6315 location: lsp::Location {
6316 uri: buffer_uri.clone(),
6317 range: lsp::Range::new(
6318 lsp::Position::new(1, 13),
6319 lsp::Position::new(1, 15),
6320 ),
6321 },
6322 message: "error 2 hint 2".to_string(),
6323 },
6324 ]),
6325 ..Default::default()
6326 },
6327 lsp::Diagnostic {
6328 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6329 severity: Some(DiagnosticSeverity::HINT),
6330 message: "error 2 hint 1".to_string(),
6331 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6332 location: lsp::Location {
6333 uri: buffer_uri.clone(),
6334 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
6335 },
6336 message: "original diagnostic".to_string(),
6337 }]),
6338 ..Default::default()
6339 },
6340 lsp::Diagnostic {
6341 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6342 severity: Some(DiagnosticSeverity::HINT),
6343 message: "error 2 hint 2".to_string(),
6344 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6345 location: lsp::Location {
6346 uri: buffer_uri,
6347 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
6348 },
6349 message: "original diagnostic".to_string(),
6350 }]),
6351 ..Default::default()
6352 },
6353 ],
6354 version: None,
6355 };
6356
6357 lsp_store
6358 .update(cx, |lsp_store, cx| {
6359 lsp_store.update_diagnostics(
6360 LanguageServerId(0),
6361 message,
6362 None,
6363 DiagnosticSourceKind::Pushed,
6364 &[],
6365 cx,
6366 )
6367 })
6368 .unwrap();
6369 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
6370
6371 assert_eq!(
6372 buffer
6373 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6374 .collect::<Vec<_>>(),
6375 &[
6376 DiagnosticEntry {
6377 range: Point::new(1, 8)..Point::new(1, 9),
6378 diagnostic: Diagnostic {
6379 severity: DiagnosticSeverity::WARNING,
6380 message: "error 1".to_string(),
6381 group_id: 1,
6382 is_primary: true,
6383 source_kind: DiagnosticSourceKind::Pushed,
6384 ..Diagnostic::default()
6385 }
6386 },
6387 DiagnosticEntry {
6388 range: Point::new(1, 8)..Point::new(1, 9),
6389 diagnostic: Diagnostic {
6390 severity: DiagnosticSeverity::HINT,
6391 message: "error 1 hint 1".to_string(),
6392 group_id: 1,
6393 is_primary: false,
6394 source_kind: DiagnosticSourceKind::Pushed,
6395 ..Diagnostic::default()
6396 }
6397 },
6398 DiagnosticEntry {
6399 range: Point::new(1, 13)..Point::new(1, 15),
6400 diagnostic: Diagnostic {
6401 severity: DiagnosticSeverity::HINT,
6402 message: "error 2 hint 1".to_string(),
6403 group_id: 0,
6404 is_primary: false,
6405 source_kind: DiagnosticSourceKind::Pushed,
6406 ..Diagnostic::default()
6407 }
6408 },
6409 DiagnosticEntry {
6410 range: Point::new(1, 13)..Point::new(1, 15),
6411 diagnostic: Diagnostic {
6412 severity: DiagnosticSeverity::HINT,
6413 message: "error 2 hint 2".to_string(),
6414 group_id: 0,
6415 is_primary: false,
6416 source_kind: DiagnosticSourceKind::Pushed,
6417 ..Diagnostic::default()
6418 }
6419 },
6420 DiagnosticEntry {
6421 range: Point::new(2, 8)..Point::new(2, 17),
6422 diagnostic: Diagnostic {
6423 severity: DiagnosticSeverity::ERROR,
6424 message: "error 2".to_string(),
6425 group_id: 0,
6426 is_primary: true,
6427 source_kind: DiagnosticSourceKind::Pushed,
6428 ..Diagnostic::default()
6429 }
6430 }
6431 ]
6432 );
6433
6434 assert_eq!(
6435 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
6436 &[
6437 DiagnosticEntry {
6438 range: Point::new(1, 13)..Point::new(1, 15),
6439 diagnostic: Diagnostic {
6440 severity: DiagnosticSeverity::HINT,
6441 message: "error 2 hint 1".to_string(),
6442 group_id: 0,
6443 is_primary: false,
6444 source_kind: DiagnosticSourceKind::Pushed,
6445 ..Diagnostic::default()
6446 }
6447 },
6448 DiagnosticEntry {
6449 range: Point::new(1, 13)..Point::new(1, 15),
6450 diagnostic: Diagnostic {
6451 severity: DiagnosticSeverity::HINT,
6452 message: "error 2 hint 2".to_string(),
6453 group_id: 0,
6454 is_primary: false,
6455 source_kind: DiagnosticSourceKind::Pushed,
6456 ..Diagnostic::default()
6457 }
6458 },
6459 DiagnosticEntry {
6460 range: Point::new(2, 8)..Point::new(2, 17),
6461 diagnostic: Diagnostic {
6462 severity: DiagnosticSeverity::ERROR,
6463 message: "error 2".to_string(),
6464 group_id: 0,
6465 is_primary: true,
6466 source_kind: DiagnosticSourceKind::Pushed,
6467 ..Diagnostic::default()
6468 }
6469 }
6470 ]
6471 );
6472
6473 assert_eq!(
6474 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
6475 &[
6476 DiagnosticEntry {
6477 range: Point::new(1, 8)..Point::new(1, 9),
6478 diagnostic: Diagnostic {
6479 severity: DiagnosticSeverity::WARNING,
6480 message: "error 1".to_string(),
6481 group_id: 1,
6482 is_primary: true,
6483 source_kind: DiagnosticSourceKind::Pushed,
6484 ..Diagnostic::default()
6485 }
6486 },
6487 DiagnosticEntry {
6488 range: Point::new(1, 8)..Point::new(1, 9),
6489 diagnostic: Diagnostic {
6490 severity: DiagnosticSeverity::HINT,
6491 message: "error 1 hint 1".to_string(),
6492 group_id: 1,
6493 is_primary: false,
6494 source_kind: DiagnosticSourceKind::Pushed,
6495 ..Diagnostic::default()
6496 }
6497 },
6498 ]
6499 );
6500}
6501
6502#[gpui::test]
6503async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
6504 init_test(cx);
6505
6506 let fs = FakeFs::new(cx.executor());
6507 fs.insert_tree(
6508 path!("/dir"),
6509 json!({
6510 "one.rs": "const ONE: usize = 1;",
6511 "two": {
6512 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
6513 }
6514
6515 }),
6516 )
6517 .await;
6518 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6519
6520 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6521 language_registry.add(rust_lang());
6522 let watched_paths = lsp::FileOperationRegistrationOptions {
6523 filters: vec![
6524 FileOperationFilter {
6525 scheme: Some("file".to_owned()),
6526 pattern: lsp::FileOperationPattern {
6527 glob: "**/*.rs".to_owned(),
6528 matches: Some(lsp::FileOperationPatternKind::File),
6529 options: None,
6530 },
6531 },
6532 FileOperationFilter {
6533 scheme: Some("file".to_owned()),
6534 pattern: lsp::FileOperationPattern {
6535 glob: "**/**".to_owned(),
6536 matches: Some(lsp::FileOperationPatternKind::Folder),
6537 options: None,
6538 },
6539 },
6540 ],
6541 };
6542 let mut fake_servers = language_registry.register_fake_lsp(
6543 "Rust",
6544 FakeLspAdapter {
6545 capabilities: lsp::ServerCapabilities {
6546 workspace: Some(lsp::WorkspaceServerCapabilities {
6547 workspace_folders: None,
6548 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
6549 did_rename: Some(watched_paths.clone()),
6550 will_rename: Some(watched_paths),
6551 ..Default::default()
6552 }),
6553 }),
6554 ..Default::default()
6555 },
6556 ..Default::default()
6557 },
6558 );
6559
6560 let _ = project
6561 .update(cx, |project, cx| {
6562 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
6563 })
6564 .await
6565 .unwrap();
6566
6567 let fake_server = fake_servers.next().await.unwrap();
6568 cx.executor().run_until_parked();
6569 let response = project.update(cx, |project, cx| {
6570 let worktree = project.worktrees(cx).next().unwrap();
6571 let entry = worktree
6572 .read(cx)
6573 .entry_for_path(rel_path("one.rs"))
6574 .unwrap();
6575 project.rename_entry(
6576 entry.id,
6577 (worktree.read(cx).id(), rel_path("three.rs")).into(),
6578 cx,
6579 )
6580 });
6581 let expected_edit = lsp::WorkspaceEdit {
6582 changes: None,
6583 document_changes: Some(DocumentChanges::Edits({
6584 vec![TextDocumentEdit {
6585 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
6586 range: lsp::Range {
6587 start: lsp::Position {
6588 line: 0,
6589 character: 1,
6590 },
6591 end: lsp::Position {
6592 line: 0,
6593 character: 3,
6594 },
6595 },
6596 new_text: "This is not a drill".to_owned(),
6597 })],
6598 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
6599 uri: Uri::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
6600 version: Some(1337),
6601 },
6602 }]
6603 })),
6604 change_annotations: None,
6605 };
6606 let resolved_workspace_edit = Arc::new(OnceLock::new());
6607 fake_server
6608 .set_request_handler::<WillRenameFiles, _, _>({
6609 let resolved_workspace_edit = resolved_workspace_edit.clone();
6610 let expected_edit = expected_edit.clone();
6611 move |params, _| {
6612 let resolved_workspace_edit = resolved_workspace_edit.clone();
6613 let expected_edit = expected_edit.clone();
6614 async move {
6615 assert_eq!(params.files.len(), 1);
6616 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
6617 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
6618 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
6619 Ok(Some(expected_edit))
6620 }
6621 }
6622 })
6623 .next()
6624 .await
6625 .unwrap();
6626 let _ = response.await.unwrap();
6627 fake_server
6628 .handle_notification::<DidRenameFiles, _>(|params, _| {
6629 assert_eq!(params.files.len(), 1);
6630 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
6631 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
6632 })
6633 .next()
6634 .await
6635 .unwrap();
6636 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
6637}
6638
6639#[gpui::test]
6640async fn test_rename(cx: &mut gpui::TestAppContext) {
6641 // hi
6642 init_test(cx);
6643
6644 let fs = FakeFs::new(cx.executor());
6645 fs.insert_tree(
6646 path!("/dir"),
6647 json!({
6648 "one.rs": "const ONE: usize = 1;",
6649 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
6650 }),
6651 )
6652 .await;
6653
6654 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6655
6656 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6657 language_registry.add(rust_lang());
6658 let mut fake_servers = language_registry.register_fake_lsp(
6659 "Rust",
6660 FakeLspAdapter {
6661 capabilities: lsp::ServerCapabilities {
6662 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
6663 prepare_provider: Some(true),
6664 work_done_progress_options: Default::default(),
6665 })),
6666 ..Default::default()
6667 },
6668 ..Default::default()
6669 },
6670 );
6671
6672 let (buffer, _handle) = project
6673 .update(cx, |project, cx| {
6674 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
6675 })
6676 .await
6677 .unwrap();
6678
6679 let fake_server = fake_servers.next().await.unwrap();
6680 cx.executor().run_until_parked();
6681
6682 let response = project.update(cx, |project, cx| {
6683 project.prepare_rename(buffer.clone(), 7, cx)
6684 });
6685 fake_server
6686 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
6687 assert_eq!(
6688 params.text_document.uri.as_str(),
6689 uri!("file:///dir/one.rs")
6690 );
6691 assert_eq!(params.position, lsp::Position::new(0, 7));
6692 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
6693 lsp::Position::new(0, 6),
6694 lsp::Position::new(0, 9),
6695 ))))
6696 })
6697 .next()
6698 .await
6699 .unwrap();
6700 let response = response.await.unwrap();
6701 let PrepareRenameResponse::Success(range) = response else {
6702 panic!("{:?}", response);
6703 };
6704 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
6705 assert_eq!(range, 6..9);
6706
6707 let response = project.update(cx, |project, cx| {
6708 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
6709 });
6710 fake_server
6711 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
6712 assert_eq!(
6713 params.text_document_position.text_document.uri.as_str(),
6714 uri!("file:///dir/one.rs")
6715 );
6716 assert_eq!(
6717 params.text_document_position.position,
6718 lsp::Position::new(0, 7)
6719 );
6720 assert_eq!(params.new_name, "THREE");
6721 Ok(Some(lsp::WorkspaceEdit {
6722 changes: Some(
6723 [
6724 (
6725 lsp::Uri::from_file_path(path!("/dir/one.rs")).unwrap(),
6726 vec![lsp::TextEdit::new(
6727 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
6728 "THREE".to_string(),
6729 )],
6730 ),
6731 (
6732 lsp::Uri::from_file_path(path!("/dir/two.rs")).unwrap(),
6733 vec![
6734 lsp::TextEdit::new(
6735 lsp::Range::new(
6736 lsp::Position::new(0, 24),
6737 lsp::Position::new(0, 27),
6738 ),
6739 "THREE".to_string(),
6740 ),
6741 lsp::TextEdit::new(
6742 lsp::Range::new(
6743 lsp::Position::new(0, 35),
6744 lsp::Position::new(0, 38),
6745 ),
6746 "THREE".to_string(),
6747 ),
6748 ],
6749 ),
6750 ]
6751 .into_iter()
6752 .collect(),
6753 ),
6754 ..Default::default()
6755 }))
6756 })
6757 .next()
6758 .await
6759 .unwrap();
6760 let mut transaction = response.await.unwrap().0;
6761 assert_eq!(transaction.len(), 2);
6762 assert_eq!(
6763 transaction
6764 .remove_entry(&buffer)
6765 .unwrap()
6766 .0
6767 .update(cx, |buffer, _| buffer.text()),
6768 "const THREE: usize = 1;"
6769 );
6770 assert_eq!(
6771 transaction
6772 .into_keys()
6773 .next()
6774 .unwrap()
6775 .update(cx, |buffer, _| buffer.text()),
6776 "const TWO: usize = one::THREE + one::THREE;"
6777 );
6778}
6779
6780#[gpui::test]
6781async fn test_search(cx: &mut gpui::TestAppContext) {
6782 init_test(cx);
6783
6784 let fs = FakeFs::new(cx.executor());
6785 fs.insert_tree(
6786 path!("/dir"),
6787 json!({
6788 "one.rs": "const ONE: usize = 1;",
6789 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
6790 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
6791 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
6792 }),
6793 )
6794 .await;
6795 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6796 assert_eq!(
6797 search(
6798 &project,
6799 SearchQuery::text(
6800 "TWO",
6801 false,
6802 true,
6803 false,
6804 Default::default(),
6805 Default::default(),
6806 false,
6807 None
6808 )
6809 .unwrap(),
6810 cx
6811 )
6812 .await
6813 .unwrap(),
6814 HashMap::from_iter([
6815 (path!("dir/two.rs").to_string(), vec![6..9]),
6816 (path!("dir/three.rs").to_string(), vec![37..40])
6817 ])
6818 );
6819
6820 let buffer_4 = project
6821 .update(cx, |project, cx| {
6822 project.open_local_buffer(path!("/dir/four.rs"), cx)
6823 })
6824 .await
6825 .unwrap();
6826 buffer_4.update(cx, |buffer, cx| {
6827 let text = "two::TWO";
6828 buffer.edit([(20..28, text), (31..43, text)], None, cx);
6829 });
6830
6831 assert_eq!(
6832 search(
6833 &project,
6834 SearchQuery::text(
6835 "TWO",
6836 false,
6837 true,
6838 false,
6839 Default::default(),
6840 Default::default(),
6841 false,
6842 None,
6843 )
6844 .unwrap(),
6845 cx
6846 )
6847 .await
6848 .unwrap(),
6849 HashMap::from_iter([
6850 (path!("dir/two.rs").to_string(), vec![6..9]),
6851 (path!("dir/three.rs").to_string(), vec![37..40]),
6852 (path!("dir/four.rs").to_string(), vec![25..28, 36..39])
6853 ])
6854 );
6855}
6856
6857#[gpui::test]
6858async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
6859 init_test(cx);
6860
6861 let search_query = "file";
6862
6863 let fs = FakeFs::new(cx.executor());
6864 fs.insert_tree(
6865 path!("/dir"),
6866 json!({
6867 "one.rs": r#"// Rust file one"#,
6868 "one.ts": r#"// TypeScript file one"#,
6869 "two.rs": r#"// Rust file two"#,
6870 "two.ts": r#"// TypeScript file two"#,
6871 }),
6872 )
6873 .await;
6874 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6875
6876 assert!(
6877 search(
6878 &project,
6879 SearchQuery::text(
6880 search_query,
6881 false,
6882 true,
6883 false,
6884 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6885 Default::default(),
6886 false,
6887 None
6888 )
6889 .unwrap(),
6890 cx
6891 )
6892 .await
6893 .unwrap()
6894 .is_empty(),
6895 "If no inclusions match, no files should be returned"
6896 );
6897
6898 assert_eq!(
6899 search(
6900 &project,
6901 SearchQuery::text(
6902 search_query,
6903 false,
6904 true,
6905 false,
6906 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
6907 Default::default(),
6908 false,
6909 None
6910 )
6911 .unwrap(),
6912 cx
6913 )
6914 .await
6915 .unwrap(),
6916 HashMap::from_iter([
6917 (path!("dir/one.rs").to_string(), vec![8..12]),
6918 (path!("dir/two.rs").to_string(), vec![8..12]),
6919 ]),
6920 "Rust only search should give only Rust files"
6921 );
6922
6923 assert_eq!(
6924 search(
6925 &project,
6926 SearchQuery::text(
6927 search_query,
6928 false,
6929 true,
6930 false,
6931 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6932 .unwrap(),
6933 Default::default(),
6934 false,
6935 None,
6936 )
6937 .unwrap(),
6938 cx
6939 )
6940 .await
6941 .unwrap(),
6942 HashMap::from_iter([
6943 (path!("dir/one.ts").to_string(), vec![14..18]),
6944 (path!("dir/two.ts").to_string(), vec![14..18]),
6945 ]),
6946 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
6947 );
6948
6949 assert_eq!(
6950 search(
6951 &project,
6952 SearchQuery::text(
6953 search_query,
6954 false,
6955 true,
6956 false,
6957 PathMatcher::new(
6958 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
6959 PathStyle::local()
6960 )
6961 .unwrap(),
6962 Default::default(),
6963 false,
6964 None,
6965 )
6966 .unwrap(),
6967 cx
6968 )
6969 .await
6970 .unwrap(),
6971 HashMap::from_iter([
6972 (path!("dir/two.ts").to_string(), vec![14..18]),
6973 (path!("dir/one.rs").to_string(), vec![8..12]),
6974 (path!("dir/one.ts").to_string(), vec![14..18]),
6975 (path!("dir/two.rs").to_string(), vec![8..12]),
6976 ]),
6977 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
6978 );
6979}
6980
6981#[gpui::test]
6982async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
6983 init_test(cx);
6984
6985 let search_query = "file";
6986
6987 let fs = FakeFs::new(cx.executor());
6988 fs.insert_tree(
6989 path!("/dir"),
6990 json!({
6991 "one.rs": r#"// Rust file one"#,
6992 "one.ts": r#"// TypeScript file one"#,
6993 "two.rs": r#"// Rust file two"#,
6994 "two.ts": r#"// TypeScript file two"#,
6995 }),
6996 )
6997 .await;
6998 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6999
7000 assert_eq!(
7001 search(
7002 &project,
7003 SearchQuery::text(
7004 search_query,
7005 false,
7006 true,
7007 false,
7008 Default::default(),
7009 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
7010 false,
7011 None,
7012 )
7013 .unwrap(),
7014 cx
7015 )
7016 .await
7017 .unwrap(),
7018 HashMap::from_iter([
7019 (path!("dir/one.rs").to_string(), vec![8..12]),
7020 (path!("dir/one.ts").to_string(), vec![14..18]),
7021 (path!("dir/two.rs").to_string(), vec![8..12]),
7022 (path!("dir/two.ts").to_string(), vec![14..18]),
7023 ]),
7024 "If no exclusions match, all files should be returned"
7025 );
7026
7027 assert_eq!(
7028 search(
7029 &project,
7030 SearchQuery::text(
7031 search_query,
7032 false,
7033 true,
7034 false,
7035 Default::default(),
7036 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
7037 false,
7038 None,
7039 )
7040 .unwrap(),
7041 cx
7042 )
7043 .await
7044 .unwrap(),
7045 HashMap::from_iter([
7046 (path!("dir/one.ts").to_string(), vec![14..18]),
7047 (path!("dir/two.ts").to_string(), vec![14..18]),
7048 ]),
7049 "Rust exclusion search should give only TypeScript files"
7050 );
7051
7052 assert_eq!(
7053 search(
7054 &project,
7055 SearchQuery::text(
7056 search_query,
7057 false,
7058 true,
7059 false,
7060 Default::default(),
7061 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
7062 .unwrap(),
7063 false,
7064 None,
7065 )
7066 .unwrap(),
7067 cx
7068 )
7069 .await
7070 .unwrap(),
7071 HashMap::from_iter([
7072 (path!("dir/one.rs").to_string(), vec![8..12]),
7073 (path!("dir/two.rs").to_string(), vec![8..12]),
7074 ]),
7075 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
7076 );
7077
7078 assert!(
7079 search(
7080 &project,
7081 SearchQuery::text(
7082 search_query,
7083 false,
7084 true,
7085 false,
7086 Default::default(),
7087 PathMatcher::new(
7088 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
7089 PathStyle::local(),
7090 )
7091 .unwrap(),
7092 false,
7093 None,
7094 )
7095 .unwrap(),
7096 cx
7097 )
7098 .await
7099 .unwrap()
7100 .is_empty(),
7101 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
7102 );
7103}
7104
7105#[gpui::test]
7106async fn test_search_with_buffer_exclusions(cx: &mut gpui::TestAppContext) {
7107 init_test(cx);
7108
7109 let search_query = "file";
7110
7111 let fs = FakeFs::new(cx.executor());
7112 fs.insert_tree(
7113 path!("/dir"),
7114 json!({
7115 "one.rs": r#"// Rust file one"#,
7116 "one.ts": r#"// TypeScript file one"#,
7117 "two.rs": r#"// Rust file two"#,
7118 "two.ts": r#"// TypeScript file two"#,
7119 }),
7120 )
7121 .await;
7122
7123 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7124 let path_style = PathStyle::local();
7125 let _buffer = project.update(cx, |project, cx| {
7126 project.create_local_buffer("file", None, false, cx)
7127 });
7128
7129 assert_eq!(
7130 search(
7131 &project,
7132 SearchQuery::text(
7133 search_query,
7134 false,
7135 true,
7136 false,
7137 Default::default(),
7138 PathMatcher::new(&["*.odd".to_owned()], path_style).unwrap(),
7139 false,
7140 None,
7141 )
7142 .unwrap(),
7143 cx
7144 )
7145 .await
7146 .unwrap(),
7147 HashMap::from_iter([
7148 (path!("dir/one.rs").to_string(), vec![8..12]),
7149 (path!("dir/one.ts").to_string(), vec![14..18]),
7150 (path!("dir/two.rs").to_string(), vec![8..12]),
7151 (path!("dir/two.ts").to_string(), vec![14..18]),
7152 ]),
7153 "If no exclusions match, all files should be returned"
7154 );
7155
7156 assert_eq!(
7157 search(
7158 &project,
7159 SearchQuery::text(
7160 search_query,
7161 false,
7162 true,
7163 false,
7164 Default::default(),
7165 PathMatcher::new(&["*.rs".to_owned()], path_style).unwrap(),
7166 false,
7167 None,
7168 )
7169 .unwrap(),
7170 cx
7171 )
7172 .await
7173 .unwrap(),
7174 HashMap::from_iter([
7175 (path!("dir/one.ts").to_string(), vec![14..18]),
7176 (path!("dir/two.ts").to_string(), vec![14..18]),
7177 ]),
7178 "Rust exclusion search should give only TypeScript files"
7179 );
7180
7181 assert_eq!(
7182 search(
7183 &project,
7184 SearchQuery::text(
7185 search_query,
7186 false,
7187 true,
7188 false,
7189 Default::default(),
7190 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], path_style).unwrap(),
7191 false,
7192 None,
7193 )
7194 .unwrap(),
7195 cx
7196 )
7197 .await
7198 .unwrap(),
7199 HashMap::from_iter([
7200 (path!("dir/one.rs").to_string(), vec![8..12]),
7201 (path!("dir/two.rs").to_string(), vec![8..12]),
7202 ]),
7203 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
7204 );
7205
7206 assert!(
7207 search(
7208 &project,
7209 SearchQuery::text(
7210 search_query,
7211 false,
7212 true,
7213 false,
7214 Default::default(),
7215 PathMatcher::new(
7216 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
7217 PathStyle::local(),
7218 )
7219 .unwrap(),
7220 false,
7221 None,
7222 )
7223 .unwrap(),
7224 cx
7225 )
7226 .await
7227 .unwrap()
7228 .is_empty(),
7229 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
7230 );
7231}
7232
7233#[gpui::test]
7234async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
7235 init_test(cx);
7236
7237 let search_query = "file";
7238
7239 let fs = FakeFs::new(cx.executor());
7240 fs.insert_tree(
7241 path!("/dir"),
7242 json!({
7243 "one.rs": r#"// Rust file one"#,
7244 "one.ts": r#"// TypeScript file one"#,
7245 "two.rs": r#"// Rust file two"#,
7246 "two.ts": r#"// TypeScript file two"#,
7247 }),
7248 )
7249 .await;
7250 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7251 assert!(
7252 search(
7253 &project,
7254 SearchQuery::text(
7255 search_query,
7256 false,
7257 true,
7258 false,
7259 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
7260 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
7261 false,
7262 None,
7263 )
7264 .unwrap(),
7265 cx
7266 )
7267 .await
7268 .unwrap()
7269 .is_empty(),
7270 "If both no exclusions and inclusions match, exclusions should win and return nothing"
7271 );
7272
7273 assert!(
7274 search(
7275 &project,
7276 SearchQuery::text(
7277 search_query,
7278 false,
7279 true,
7280 false,
7281 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
7282 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
7283 false,
7284 None,
7285 )
7286 .unwrap(),
7287 cx
7288 )
7289 .await
7290 .unwrap()
7291 .is_empty(),
7292 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
7293 );
7294
7295 assert!(
7296 search(
7297 &project,
7298 SearchQuery::text(
7299 search_query,
7300 false,
7301 true,
7302 false,
7303 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
7304 .unwrap(),
7305 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
7306 .unwrap(),
7307 false,
7308 None,
7309 )
7310 .unwrap(),
7311 cx
7312 )
7313 .await
7314 .unwrap()
7315 .is_empty(),
7316 "Non-matching inclusions and exclusions should not change that."
7317 );
7318
7319 assert_eq!(
7320 search(
7321 &project,
7322 SearchQuery::text(
7323 search_query,
7324 false,
7325 true,
7326 false,
7327 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
7328 .unwrap(),
7329 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()], PathStyle::local())
7330 .unwrap(),
7331 false,
7332 None,
7333 )
7334 .unwrap(),
7335 cx
7336 )
7337 .await
7338 .unwrap(),
7339 HashMap::from_iter([
7340 (path!("dir/one.ts").to_string(), vec![14..18]),
7341 (path!("dir/two.ts").to_string(), vec![14..18]),
7342 ]),
7343 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
7344 );
7345}
7346
7347#[gpui::test]
7348async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
7349 init_test(cx);
7350
7351 let fs = FakeFs::new(cx.executor());
7352 fs.insert_tree(
7353 path!("/worktree-a"),
7354 json!({
7355 "haystack.rs": r#"// NEEDLE"#,
7356 "haystack.ts": r#"// NEEDLE"#,
7357 }),
7358 )
7359 .await;
7360 fs.insert_tree(
7361 path!("/worktree-b"),
7362 json!({
7363 "haystack.rs": r#"// NEEDLE"#,
7364 "haystack.ts": r#"// NEEDLE"#,
7365 }),
7366 )
7367 .await;
7368
7369 let path_style = PathStyle::local();
7370 let project = Project::test(
7371 fs.clone(),
7372 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
7373 cx,
7374 )
7375 .await;
7376
7377 assert_eq!(
7378 search(
7379 &project,
7380 SearchQuery::text(
7381 "NEEDLE",
7382 false,
7383 true,
7384 false,
7385 PathMatcher::new(&["worktree-a/*.rs".to_owned()], path_style).unwrap(),
7386 Default::default(),
7387 true,
7388 None,
7389 )
7390 .unwrap(),
7391 cx
7392 )
7393 .await
7394 .unwrap(),
7395 HashMap::from_iter([(path!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
7396 "should only return results from included worktree"
7397 );
7398 assert_eq!(
7399 search(
7400 &project,
7401 SearchQuery::text(
7402 "NEEDLE",
7403 false,
7404 true,
7405 false,
7406 PathMatcher::new(&["worktree-b/*.rs".to_owned()], path_style).unwrap(),
7407 Default::default(),
7408 true,
7409 None,
7410 )
7411 .unwrap(),
7412 cx
7413 )
7414 .await
7415 .unwrap(),
7416 HashMap::from_iter([(path!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
7417 "should only return results from included worktree"
7418 );
7419
7420 assert_eq!(
7421 search(
7422 &project,
7423 SearchQuery::text(
7424 "NEEDLE",
7425 false,
7426 true,
7427 false,
7428 PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap(),
7429 Default::default(),
7430 false,
7431 None,
7432 )
7433 .unwrap(),
7434 cx
7435 )
7436 .await
7437 .unwrap(),
7438 HashMap::from_iter([
7439 (path!("worktree-a/haystack.ts").to_string(), vec![3..9]),
7440 (path!("worktree-b/haystack.ts").to_string(), vec![3..9])
7441 ]),
7442 "should return results from both worktrees"
7443 );
7444}
7445
7446#[gpui::test]
7447async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
7448 init_test(cx);
7449
7450 let fs = FakeFs::new(cx.background_executor.clone());
7451 fs.insert_tree(
7452 path!("/dir"),
7453 json!({
7454 ".git": {},
7455 ".gitignore": "**/target\n/node_modules\n",
7456 "target": {
7457 "index.txt": "index_key:index_value"
7458 },
7459 "node_modules": {
7460 "eslint": {
7461 "index.ts": "const eslint_key = 'eslint value'",
7462 "package.json": r#"{ "some_key": "some value" }"#,
7463 },
7464 "prettier": {
7465 "index.ts": "const prettier_key = 'prettier value'",
7466 "package.json": r#"{ "other_key": "other value" }"#,
7467 },
7468 },
7469 "package.json": r#"{ "main_key": "main value" }"#,
7470 }),
7471 )
7472 .await;
7473 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7474
7475 let query = "key";
7476 assert_eq!(
7477 search(
7478 &project,
7479 SearchQuery::text(
7480 query,
7481 false,
7482 false,
7483 false,
7484 Default::default(),
7485 Default::default(),
7486 false,
7487 None,
7488 )
7489 .unwrap(),
7490 cx
7491 )
7492 .await
7493 .unwrap(),
7494 HashMap::from_iter([(path!("dir/package.json").to_string(), vec![8..11])]),
7495 "Only one non-ignored file should have the query"
7496 );
7497
7498 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7499 let path_style = PathStyle::local();
7500 assert_eq!(
7501 search(
7502 &project,
7503 SearchQuery::text(
7504 query,
7505 false,
7506 false,
7507 true,
7508 Default::default(),
7509 Default::default(),
7510 false,
7511 None,
7512 )
7513 .unwrap(),
7514 cx
7515 )
7516 .await
7517 .unwrap(),
7518 HashMap::from_iter([
7519 (path!("dir/package.json").to_string(), vec![8..11]),
7520 (path!("dir/target/index.txt").to_string(), vec![6..9]),
7521 (
7522 path!("dir/node_modules/prettier/package.json").to_string(),
7523 vec![9..12]
7524 ),
7525 (
7526 path!("dir/node_modules/prettier/index.ts").to_string(),
7527 vec![15..18]
7528 ),
7529 (
7530 path!("dir/node_modules/eslint/index.ts").to_string(),
7531 vec![13..16]
7532 ),
7533 (
7534 path!("dir/node_modules/eslint/package.json").to_string(),
7535 vec![8..11]
7536 ),
7537 ]),
7538 "Unrestricted search with ignored directories should find every file with the query"
7539 );
7540
7541 let files_to_include =
7542 PathMatcher::new(&["node_modules/prettier/**".to_owned()], path_style).unwrap();
7543 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap();
7544 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7545 assert_eq!(
7546 search(
7547 &project,
7548 SearchQuery::text(
7549 query,
7550 false,
7551 false,
7552 true,
7553 files_to_include,
7554 files_to_exclude,
7555 false,
7556 None,
7557 )
7558 .unwrap(),
7559 cx
7560 )
7561 .await
7562 .unwrap(),
7563 HashMap::from_iter([(
7564 path!("dir/node_modules/prettier/package.json").to_string(),
7565 vec![9..12]
7566 )]),
7567 "With search including ignored prettier directory and excluding TS files, only one file should be found"
7568 );
7569}
7570
7571#[gpui::test]
7572async fn test_search_with_unicode(cx: &mut gpui::TestAppContext) {
7573 init_test(cx);
7574
7575 let fs = FakeFs::new(cx.executor());
7576 fs.insert_tree(
7577 path!("/dir"),
7578 json!({
7579 "one.rs": "// ПРИВЕТ? привет!",
7580 "two.rs": "// ПРИВЕТ.",
7581 "three.rs": "// привет",
7582 }),
7583 )
7584 .await;
7585 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7586 let unicode_case_sensitive_query = SearchQuery::text(
7587 "привет",
7588 false,
7589 true,
7590 false,
7591 Default::default(),
7592 Default::default(),
7593 false,
7594 None,
7595 );
7596 assert_matches!(unicode_case_sensitive_query, Ok(SearchQuery::Text { .. }));
7597 assert_eq!(
7598 search(&project, unicode_case_sensitive_query.unwrap(), cx)
7599 .await
7600 .unwrap(),
7601 HashMap::from_iter([
7602 (path!("dir/one.rs").to_string(), vec![17..29]),
7603 (path!("dir/three.rs").to_string(), vec![3..15]),
7604 ])
7605 );
7606
7607 let unicode_case_insensitive_query = SearchQuery::text(
7608 "привет",
7609 false,
7610 false,
7611 false,
7612 Default::default(),
7613 Default::default(),
7614 false,
7615 None,
7616 );
7617 assert_matches!(
7618 unicode_case_insensitive_query,
7619 Ok(SearchQuery::Regex { .. })
7620 );
7621 assert_eq!(
7622 search(&project, unicode_case_insensitive_query.unwrap(), cx)
7623 .await
7624 .unwrap(),
7625 HashMap::from_iter([
7626 (path!("dir/one.rs").to_string(), vec![3..15, 17..29]),
7627 (path!("dir/two.rs").to_string(), vec![3..15]),
7628 (path!("dir/three.rs").to_string(), vec![3..15]),
7629 ])
7630 );
7631
7632 assert_eq!(
7633 search(
7634 &project,
7635 SearchQuery::text(
7636 "привет.",
7637 false,
7638 false,
7639 false,
7640 Default::default(),
7641 Default::default(),
7642 false,
7643 None,
7644 )
7645 .unwrap(),
7646 cx
7647 )
7648 .await
7649 .unwrap(),
7650 HashMap::from_iter([(path!("dir/two.rs").to_string(), vec![3..16]),])
7651 );
7652}
7653
7654#[gpui::test]
7655async fn test_create_entry(cx: &mut gpui::TestAppContext) {
7656 init_test(cx);
7657
7658 let fs = FakeFs::new(cx.executor());
7659 fs.insert_tree(
7660 "/one/two",
7661 json!({
7662 "three": {
7663 "a.txt": "",
7664 "four": {}
7665 },
7666 "c.rs": ""
7667 }),
7668 )
7669 .await;
7670
7671 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
7672 project
7673 .update(cx, |project, cx| {
7674 let id = project.worktrees(cx).next().unwrap().read(cx).id();
7675 project.create_entry((id, rel_path("b..")), true, cx)
7676 })
7677 .await
7678 .unwrap()
7679 .into_included()
7680 .unwrap();
7681
7682 assert_eq!(
7683 fs.paths(true),
7684 vec![
7685 PathBuf::from(path!("/")),
7686 PathBuf::from(path!("/one")),
7687 PathBuf::from(path!("/one/two")),
7688 PathBuf::from(path!("/one/two/c.rs")),
7689 PathBuf::from(path!("/one/two/three")),
7690 PathBuf::from(path!("/one/two/three/a.txt")),
7691 PathBuf::from(path!("/one/two/three/b..")),
7692 PathBuf::from(path!("/one/two/three/four")),
7693 ]
7694 );
7695}
7696
7697#[gpui::test]
7698async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
7699 init_test(cx);
7700
7701 let fs = FakeFs::new(cx.executor());
7702 fs.insert_tree(
7703 path!("/dir"),
7704 json!({
7705 "a.tsx": "a",
7706 }),
7707 )
7708 .await;
7709
7710 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7711
7712 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7713 language_registry.add(tsx_lang());
7714 let language_server_names = [
7715 "TypeScriptServer",
7716 "TailwindServer",
7717 "ESLintServer",
7718 "NoHoverCapabilitiesServer",
7719 ];
7720 let mut language_servers = [
7721 language_registry.register_fake_lsp(
7722 "tsx",
7723 FakeLspAdapter {
7724 name: language_server_names[0],
7725 capabilities: lsp::ServerCapabilities {
7726 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7727 ..lsp::ServerCapabilities::default()
7728 },
7729 ..FakeLspAdapter::default()
7730 },
7731 ),
7732 language_registry.register_fake_lsp(
7733 "tsx",
7734 FakeLspAdapter {
7735 name: language_server_names[1],
7736 capabilities: lsp::ServerCapabilities {
7737 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7738 ..lsp::ServerCapabilities::default()
7739 },
7740 ..FakeLspAdapter::default()
7741 },
7742 ),
7743 language_registry.register_fake_lsp(
7744 "tsx",
7745 FakeLspAdapter {
7746 name: language_server_names[2],
7747 capabilities: lsp::ServerCapabilities {
7748 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7749 ..lsp::ServerCapabilities::default()
7750 },
7751 ..FakeLspAdapter::default()
7752 },
7753 ),
7754 language_registry.register_fake_lsp(
7755 "tsx",
7756 FakeLspAdapter {
7757 name: language_server_names[3],
7758 capabilities: lsp::ServerCapabilities {
7759 hover_provider: None,
7760 ..lsp::ServerCapabilities::default()
7761 },
7762 ..FakeLspAdapter::default()
7763 },
7764 ),
7765 ];
7766
7767 let (buffer, _handle) = project
7768 .update(cx, |p, cx| {
7769 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
7770 })
7771 .await
7772 .unwrap();
7773 cx.executor().run_until_parked();
7774
7775 let mut servers_with_hover_requests = HashMap::default();
7776 for i in 0..language_server_names.len() {
7777 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
7778 panic!(
7779 "Failed to get language server #{i} with name {}",
7780 &language_server_names[i]
7781 )
7782 });
7783 let new_server_name = new_server.server.name();
7784 assert!(
7785 !servers_with_hover_requests.contains_key(&new_server_name),
7786 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
7787 );
7788 match new_server_name.as_ref() {
7789 "TailwindServer" | "TypeScriptServer" => {
7790 servers_with_hover_requests.insert(
7791 new_server_name.clone(),
7792 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7793 move |_, _| {
7794 let name = new_server_name.clone();
7795 async move {
7796 Ok(Some(lsp::Hover {
7797 contents: lsp::HoverContents::Scalar(
7798 lsp::MarkedString::String(format!("{name} hover")),
7799 ),
7800 range: None,
7801 }))
7802 }
7803 },
7804 ),
7805 );
7806 }
7807 "ESLintServer" => {
7808 servers_with_hover_requests.insert(
7809 new_server_name,
7810 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7811 |_, _| async move { Ok(None) },
7812 ),
7813 );
7814 }
7815 "NoHoverCapabilitiesServer" => {
7816 let _never_handled = new_server
7817 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
7818 panic!(
7819 "Should not call for hovers server with no corresponding capabilities"
7820 )
7821 });
7822 }
7823 unexpected => panic!("Unexpected server name: {unexpected}"),
7824 }
7825 }
7826
7827 let hover_task = project.update(cx, |project, cx| {
7828 project.hover(&buffer, Point::new(0, 0), cx)
7829 });
7830 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
7831 |mut hover_request| async move {
7832 hover_request
7833 .next()
7834 .await
7835 .expect("All hover requests should have been triggered")
7836 },
7837 ))
7838 .await;
7839 assert_eq!(
7840 vec!["TailwindServer hover", "TypeScriptServer hover"],
7841 hover_task
7842 .await
7843 .into_iter()
7844 .flatten()
7845 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
7846 .sorted()
7847 .collect::<Vec<_>>(),
7848 "Should receive hover responses from all related servers with hover capabilities"
7849 );
7850}
7851
7852#[gpui::test]
7853async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
7854 init_test(cx);
7855
7856 let fs = FakeFs::new(cx.executor());
7857 fs.insert_tree(
7858 path!("/dir"),
7859 json!({
7860 "a.ts": "a",
7861 }),
7862 )
7863 .await;
7864
7865 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7866
7867 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7868 language_registry.add(typescript_lang());
7869 let mut fake_language_servers = language_registry.register_fake_lsp(
7870 "TypeScript",
7871 FakeLspAdapter {
7872 capabilities: lsp::ServerCapabilities {
7873 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7874 ..lsp::ServerCapabilities::default()
7875 },
7876 ..FakeLspAdapter::default()
7877 },
7878 );
7879
7880 let (buffer, _handle) = project
7881 .update(cx, |p, cx| {
7882 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
7883 })
7884 .await
7885 .unwrap();
7886 cx.executor().run_until_parked();
7887
7888 let fake_server = fake_language_servers
7889 .next()
7890 .await
7891 .expect("failed to get the language server");
7892
7893 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7894 move |_, _| async move {
7895 Ok(Some(lsp::Hover {
7896 contents: lsp::HoverContents::Array(vec![
7897 lsp::MarkedString::String("".to_string()),
7898 lsp::MarkedString::String(" ".to_string()),
7899 lsp::MarkedString::String("\n\n\n".to_string()),
7900 ]),
7901 range: None,
7902 }))
7903 },
7904 );
7905
7906 let hover_task = project.update(cx, |project, cx| {
7907 project.hover(&buffer, Point::new(0, 0), cx)
7908 });
7909 let () = request_handled
7910 .next()
7911 .await
7912 .expect("All hover requests should have been triggered");
7913 assert_eq!(
7914 Vec::<String>::new(),
7915 hover_task
7916 .await
7917 .into_iter()
7918 .flatten()
7919 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
7920 .sorted()
7921 .collect::<Vec<_>>(),
7922 "Empty hover parts should be ignored"
7923 );
7924}
7925
7926#[gpui::test]
7927async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
7928 init_test(cx);
7929
7930 let fs = FakeFs::new(cx.executor());
7931 fs.insert_tree(
7932 path!("/dir"),
7933 json!({
7934 "a.ts": "a",
7935 }),
7936 )
7937 .await;
7938
7939 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7940
7941 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7942 language_registry.add(typescript_lang());
7943 let mut fake_language_servers = language_registry.register_fake_lsp(
7944 "TypeScript",
7945 FakeLspAdapter {
7946 capabilities: lsp::ServerCapabilities {
7947 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
7948 ..lsp::ServerCapabilities::default()
7949 },
7950 ..FakeLspAdapter::default()
7951 },
7952 );
7953
7954 let (buffer, _handle) = project
7955 .update(cx, |p, cx| {
7956 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
7957 })
7958 .await
7959 .unwrap();
7960 cx.executor().run_until_parked();
7961
7962 let fake_server = fake_language_servers
7963 .next()
7964 .await
7965 .expect("failed to get the language server");
7966
7967 let mut request_handled = fake_server
7968 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
7969 Ok(Some(vec![
7970 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7971 title: "organize imports".to_string(),
7972 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
7973 ..lsp::CodeAction::default()
7974 }),
7975 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7976 title: "fix code".to_string(),
7977 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
7978 ..lsp::CodeAction::default()
7979 }),
7980 ]))
7981 });
7982
7983 let code_actions_task = project.update(cx, |project, cx| {
7984 project.code_actions(
7985 &buffer,
7986 0..buffer.read(cx).len(),
7987 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
7988 cx,
7989 )
7990 });
7991
7992 let () = request_handled
7993 .next()
7994 .await
7995 .expect("The code action request should have been triggered");
7996
7997 let code_actions = code_actions_task.await.unwrap().unwrap();
7998 assert_eq!(code_actions.len(), 1);
7999 assert_eq!(
8000 code_actions[0].lsp_action.action_kind(),
8001 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
8002 );
8003}
8004
8005#[gpui::test]
8006async fn test_code_actions_without_requested_kinds_do_not_send_only_filter(
8007 cx: &mut gpui::TestAppContext,
8008) {
8009 init_test(cx);
8010
8011 let fs = FakeFs::new(cx.executor());
8012 fs.insert_tree(
8013 path!("/dir"),
8014 json!({
8015 "a.ts": "a",
8016 }),
8017 )
8018 .await;
8019
8020 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
8021
8022 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
8023 language_registry.add(typescript_lang());
8024 let mut fake_language_servers = language_registry.register_fake_lsp(
8025 "TypeScript",
8026 FakeLspAdapter {
8027 capabilities: lsp::ServerCapabilities {
8028 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
8029 lsp::CodeActionOptions {
8030 code_action_kinds: Some(vec![
8031 CodeActionKind::SOURCE_ORGANIZE_IMPORTS,
8032 "source.doc".into(),
8033 ]),
8034 ..lsp::CodeActionOptions::default()
8035 },
8036 )),
8037 ..lsp::ServerCapabilities::default()
8038 },
8039 ..FakeLspAdapter::default()
8040 },
8041 );
8042
8043 let (buffer, _handle) = project
8044 .update(cx, |p, cx| {
8045 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
8046 })
8047 .await
8048 .unwrap();
8049 cx.executor().run_until_parked();
8050
8051 let fake_server = fake_language_servers
8052 .next()
8053 .await
8054 .expect("failed to get the language server");
8055
8056 let mut request_handled = fake_server.set_request_handler::<
8057 lsp::request::CodeActionRequest,
8058 _,
8059 _,
8060 >(move |params, _| async move {
8061 assert_eq!(
8062 params.context.only, None,
8063 "Code action requests without explicit kind filters should not send `context.only`"
8064 );
8065 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
8066 lsp::CodeAction {
8067 title: "Add test".to_string(),
8068 kind: Some("source.addTest".into()),
8069 ..lsp::CodeAction::default()
8070 },
8071 )]))
8072 });
8073
8074 let code_actions_task = project.update(cx, |project, cx| {
8075 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
8076 });
8077
8078 let () = request_handled
8079 .next()
8080 .await
8081 .expect("The code action request should have been triggered");
8082
8083 let code_actions = code_actions_task.await.unwrap().unwrap();
8084 assert_eq!(code_actions.len(), 1);
8085 assert_eq!(
8086 code_actions[0].lsp_action.action_kind(),
8087 Some("source.addTest".into())
8088 );
8089}
8090
8091#[gpui::test]
8092async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
8093 init_test(cx);
8094
8095 let fs = FakeFs::new(cx.executor());
8096 fs.insert_tree(
8097 path!("/dir"),
8098 json!({
8099 "a.tsx": "a",
8100 }),
8101 )
8102 .await;
8103
8104 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
8105
8106 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
8107 language_registry.add(tsx_lang());
8108 let language_server_names = [
8109 "TypeScriptServer",
8110 "TailwindServer",
8111 "ESLintServer",
8112 "NoActionsCapabilitiesServer",
8113 ];
8114
8115 let mut language_server_rxs = [
8116 language_registry.register_fake_lsp(
8117 "tsx",
8118 FakeLspAdapter {
8119 name: language_server_names[0],
8120 capabilities: lsp::ServerCapabilities {
8121 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
8122 ..lsp::ServerCapabilities::default()
8123 },
8124 ..FakeLspAdapter::default()
8125 },
8126 ),
8127 language_registry.register_fake_lsp(
8128 "tsx",
8129 FakeLspAdapter {
8130 name: language_server_names[1],
8131 capabilities: lsp::ServerCapabilities {
8132 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
8133 ..lsp::ServerCapabilities::default()
8134 },
8135 ..FakeLspAdapter::default()
8136 },
8137 ),
8138 language_registry.register_fake_lsp(
8139 "tsx",
8140 FakeLspAdapter {
8141 name: language_server_names[2],
8142 capabilities: lsp::ServerCapabilities {
8143 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
8144 ..lsp::ServerCapabilities::default()
8145 },
8146 ..FakeLspAdapter::default()
8147 },
8148 ),
8149 language_registry.register_fake_lsp(
8150 "tsx",
8151 FakeLspAdapter {
8152 name: language_server_names[3],
8153 capabilities: lsp::ServerCapabilities {
8154 code_action_provider: None,
8155 ..lsp::ServerCapabilities::default()
8156 },
8157 ..FakeLspAdapter::default()
8158 },
8159 ),
8160 ];
8161
8162 let (buffer, _handle) = project
8163 .update(cx, |p, cx| {
8164 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
8165 })
8166 .await
8167 .unwrap();
8168 cx.executor().run_until_parked();
8169
8170 let mut servers_with_actions_requests = HashMap::default();
8171 for i in 0..language_server_names.len() {
8172 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
8173 panic!(
8174 "Failed to get language server #{i} with name {}",
8175 &language_server_names[i]
8176 )
8177 });
8178 let new_server_name = new_server.server.name();
8179
8180 assert!(
8181 !servers_with_actions_requests.contains_key(&new_server_name),
8182 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
8183 );
8184 match new_server_name.0.as_ref() {
8185 "TailwindServer" | "TypeScriptServer" => {
8186 servers_with_actions_requests.insert(
8187 new_server_name.clone(),
8188 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
8189 move |_, _| {
8190 let name = new_server_name.clone();
8191 async move {
8192 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
8193 lsp::CodeAction {
8194 title: format!("{name} code action"),
8195 ..lsp::CodeAction::default()
8196 },
8197 )]))
8198 }
8199 },
8200 ),
8201 );
8202 }
8203 "ESLintServer" => {
8204 servers_with_actions_requests.insert(
8205 new_server_name,
8206 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
8207 |_, _| async move { Ok(None) },
8208 ),
8209 );
8210 }
8211 "NoActionsCapabilitiesServer" => {
8212 let _never_handled = new_server
8213 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
8214 panic!(
8215 "Should not call for code actions server with no corresponding capabilities"
8216 )
8217 });
8218 }
8219 unexpected => panic!("Unexpected server name: {unexpected}"),
8220 }
8221 }
8222
8223 let code_actions_task = project.update(cx, |project, cx| {
8224 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
8225 });
8226
8227 // cx.run_until_parked();
8228 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
8229 |mut code_actions_request| async move {
8230 code_actions_request
8231 .next()
8232 .await
8233 .expect("All code actions requests should have been triggered")
8234 },
8235 ))
8236 .await;
8237 assert_eq!(
8238 vec!["TailwindServer code action", "TypeScriptServer code action"],
8239 code_actions_task
8240 .await
8241 .unwrap()
8242 .unwrap()
8243 .into_iter()
8244 .map(|code_action| code_action.lsp_action.title().to_owned())
8245 .sorted()
8246 .collect::<Vec<_>>(),
8247 "Should receive code actions responses from all related servers with hover capabilities"
8248 );
8249}
8250
8251#[gpui::test]
8252async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
8253 init_test(cx);
8254
8255 let fs = FakeFs::new(cx.executor());
8256 fs.insert_tree(
8257 "/dir",
8258 json!({
8259 "a.rs": "let a = 1;",
8260 "b.rs": "let b = 2;",
8261 "c.rs": "let c = 2;",
8262 }),
8263 )
8264 .await;
8265
8266 let project = Project::test(
8267 fs,
8268 [
8269 "/dir/a.rs".as_ref(),
8270 "/dir/b.rs".as_ref(),
8271 "/dir/c.rs".as_ref(),
8272 ],
8273 cx,
8274 )
8275 .await;
8276
8277 // check the initial state and get the worktrees
8278 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
8279 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8280 assert_eq!(worktrees.len(), 3);
8281
8282 let worktree_a = worktrees[0].read(cx);
8283 let worktree_b = worktrees[1].read(cx);
8284 let worktree_c = worktrees[2].read(cx);
8285
8286 // check they start in the right order
8287 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
8288 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
8289 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
8290
8291 (
8292 worktrees[0].clone(),
8293 worktrees[1].clone(),
8294 worktrees[2].clone(),
8295 )
8296 });
8297
8298 // move first worktree to after the second
8299 // [a, b, c] -> [b, a, c]
8300 project
8301 .update(cx, |project, cx| {
8302 let first = worktree_a.read(cx);
8303 let second = worktree_b.read(cx);
8304 project.move_worktree(first.id(), second.id(), cx)
8305 })
8306 .expect("moving first after second");
8307
8308 // check the state after moving
8309 project.update(cx, |project, cx| {
8310 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8311 assert_eq!(worktrees.len(), 3);
8312
8313 let first = worktrees[0].read(cx);
8314 let second = worktrees[1].read(cx);
8315 let third = worktrees[2].read(cx);
8316
8317 // check they are now in the right order
8318 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
8319 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
8320 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
8321 });
8322
8323 // move the second worktree to before the first
8324 // [b, a, c] -> [a, b, c]
8325 project
8326 .update(cx, |project, cx| {
8327 let second = worktree_a.read(cx);
8328 let first = worktree_b.read(cx);
8329 project.move_worktree(first.id(), second.id(), cx)
8330 })
8331 .expect("moving second before first");
8332
8333 // check the state after moving
8334 project.update(cx, |project, cx| {
8335 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8336 assert_eq!(worktrees.len(), 3);
8337
8338 let first = worktrees[0].read(cx);
8339 let second = worktrees[1].read(cx);
8340 let third = worktrees[2].read(cx);
8341
8342 // check they are now in the right order
8343 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
8344 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
8345 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
8346 });
8347
8348 // move the second worktree to after the third
8349 // [a, b, c] -> [a, c, b]
8350 project
8351 .update(cx, |project, cx| {
8352 let second = worktree_b.read(cx);
8353 let third = worktree_c.read(cx);
8354 project.move_worktree(second.id(), third.id(), cx)
8355 })
8356 .expect("moving second after third");
8357
8358 // check the state after moving
8359 project.update(cx, |project, cx| {
8360 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8361 assert_eq!(worktrees.len(), 3);
8362
8363 let first = worktrees[0].read(cx);
8364 let second = worktrees[1].read(cx);
8365 let third = worktrees[2].read(cx);
8366
8367 // check they are now in the right order
8368 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
8369 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
8370 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
8371 });
8372
8373 // move the third worktree to before the second
8374 // [a, c, b] -> [a, b, c]
8375 project
8376 .update(cx, |project, cx| {
8377 let third = worktree_c.read(cx);
8378 let second = worktree_b.read(cx);
8379 project.move_worktree(third.id(), second.id(), cx)
8380 })
8381 .expect("moving third before second");
8382
8383 // check the state after moving
8384 project.update(cx, |project, cx| {
8385 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8386 assert_eq!(worktrees.len(), 3);
8387
8388 let first = worktrees[0].read(cx);
8389 let second = worktrees[1].read(cx);
8390 let third = worktrees[2].read(cx);
8391
8392 // check they are now in the right order
8393 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
8394 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
8395 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
8396 });
8397
8398 // move the first worktree to after the third
8399 // [a, b, c] -> [b, c, a]
8400 project
8401 .update(cx, |project, cx| {
8402 let first = worktree_a.read(cx);
8403 let third = worktree_c.read(cx);
8404 project.move_worktree(first.id(), third.id(), cx)
8405 })
8406 .expect("moving first after third");
8407
8408 // check the state after moving
8409 project.update(cx, |project, cx| {
8410 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8411 assert_eq!(worktrees.len(), 3);
8412
8413 let first = worktrees[0].read(cx);
8414 let second = worktrees[1].read(cx);
8415 let third = worktrees[2].read(cx);
8416
8417 // check they are now in the right order
8418 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
8419 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
8420 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
8421 });
8422
8423 // move the third worktree to before the first
8424 // [b, c, a] -> [a, b, c]
8425 project
8426 .update(cx, |project, cx| {
8427 let third = worktree_a.read(cx);
8428 let first = worktree_b.read(cx);
8429 project.move_worktree(third.id(), first.id(), cx)
8430 })
8431 .expect("moving third before first");
8432
8433 // check the state after moving
8434 project.update(cx, |project, cx| {
8435 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8436 assert_eq!(worktrees.len(), 3);
8437
8438 let first = worktrees[0].read(cx);
8439 let second = worktrees[1].read(cx);
8440 let third = worktrees[2].read(cx);
8441
8442 // check they are now in the right order
8443 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
8444 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
8445 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
8446 });
8447}
8448
8449#[gpui::test]
8450async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
8451 init_test(cx);
8452
8453 let staged_contents = r#"
8454 fn main() {
8455 println!("hello world");
8456 }
8457 "#
8458 .unindent();
8459 let file_contents = r#"
8460 // print goodbye
8461 fn main() {
8462 println!("goodbye world");
8463 }
8464 "#
8465 .unindent();
8466
8467 let fs = FakeFs::new(cx.background_executor.clone());
8468 fs.insert_tree(
8469 "/dir",
8470 json!({
8471 ".git": {},
8472 "src": {
8473 "main.rs": file_contents,
8474 }
8475 }),
8476 )
8477 .await;
8478
8479 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
8480
8481 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8482
8483 let buffer = project
8484 .update(cx, |project, cx| {
8485 project.open_local_buffer("/dir/src/main.rs", cx)
8486 })
8487 .await
8488 .unwrap();
8489 let unstaged_diff = project
8490 .update(cx, |project, cx| {
8491 project.open_unstaged_diff(buffer.clone(), cx)
8492 })
8493 .await
8494 .unwrap();
8495
8496 cx.run_until_parked();
8497 unstaged_diff.update(cx, |unstaged_diff, cx| {
8498 let snapshot = buffer.read(cx).snapshot();
8499 assert_hunks(
8500 unstaged_diff.snapshot(cx).hunks(&snapshot),
8501 &snapshot,
8502 &unstaged_diff.base_text_string(cx).unwrap(),
8503 &[
8504 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
8505 (
8506 2..3,
8507 " println!(\"hello world\");\n",
8508 " println!(\"goodbye world\");\n",
8509 DiffHunkStatus::modified_none(),
8510 ),
8511 ],
8512 );
8513 });
8514
8515 let staged_contents = r#"
8516 // print goodbye
8517 fn main() {
8518 }
8519 "#
8520 .unindent();
8521
8522 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
8523
8524 cx.run_until_parked();
8525 unstaged_diff.update(cx, |unstaged_diff, cx| {
8526 let snapshot = buffer.read(cx).snapshot();
8527 assert_hunks(
8528 unstaged_diff
8529 .snapshot(cx)
8530 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8531 &snapshot,
8532 &unstaged_diff.base_text(cx).text(),
8533 &[(
8534 2..3,
8535 "",
8536 " println!(\"goodbye world\");\n",
8537 DiffHunkStatus::added_none(),
8538 )],
8539 );
8540 });
8541}
8542
8543#[gpui::test]
8544async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
8545 init_test(cx);
8546
8547 let committed_contents = r#"
8548 fn main() {
8549 println!("hello world");
8550 }
8551 "#
8552 .unindent();
8553 let staged_contents = r#"
8554 fn main() {
8555 println!("goodbye world");
8556 }
8557 "#
8558 .unindent();
8559 let file_contents = r#"
8560 // print goodbye
8561 fn main() {
8562 println!("goodbye world");
8563 }
8564 "#
8565 .unindent();
8566
8567 let fs = FakeFs::new(cx.background_executor.clone());
8568 fs.insert_tree(
8569 "/dir",
8570 json!({
8571 ".git": {},
8572 "src": {
8573 "modification.rs": file_contents,
8574 }
8575 }),
8576 )
8577 .await;
8578
8579 fs.set_head_for_repo(
8580 Path::new("/dir/.git"),
8581 &[
8582 ("src/modification.rs", committed_contents),
8583 ("src/deletion.rs", "// the-deleted-contents\n".into()),
8584 ],
8585 "deadbeef",
8586 );
8587 fs.set_index_for_repo(
8588 Path::new("/dir/.git"),
8589 &[
8590 ("src/modification.rs", staged_contents),
8591 ("src/deletion.rs", "// the-deleted-contents\n".into()),
8592 ],
8593 );
8594
8595 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8596 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
8597 let language = rust_lang();
8598 language_registry.add(language.clone());
8599
8600 let buffer_1 = project
8601 .update(cx, |project, cx| {
8602 project.open_local_buffer("/dir/src/modification.rs", cx)
8603 })
8604 .await
8605 .unwrap();
8606 let diff_1 = project
8607 .update(cx, |project, cx| {
8608 project.open_uncommitted_diff(buffer_1.clone(), cx)
8609 })
8610 .await
8611 .unwrap();
8612 diff_1.read_with(cx, |diff, cx| {
8613 assert_eq!(diff.base_text(cx).language().cloned(), Some(language))
8614 });
8615 cx.run_until_parked();
8616 diff_1.update(cx, |diff, cx| {
8617 let snapshot = buffer_1.read(cx).snapshot();
8618 assert_hunks(
8619 diff.snapshot(cx)
8620 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8621 &snapshot,
8622 &diff.base_text_string(cx).unwrap(),
8623 &[
8624 (
8625 0..1,
8626 "",
8627 "// print goodbye\n",
8628 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
8629 ),
8630 (
8631 2..3,
8632 " println!(\"hello world\");\n",
8633 " println!(\"goodbye world\");\n",
8634 DiffHunkStatus::modified_none(),
8635 ),
8636 ],
8637 );
8638 });
8639
8640 // Reset HEAD to a version that differs from both the buffer and the index.
8641 let committed_contents = r#"
8642 // print goodbye
8643 fn main() {
8644 }
8645 "#
8646 .unindent();
8647 fs.set_head_for_repo(
8648 Path::new("/dir/.git"),
8649 &[
8650 ("src/modification.rs", committed_contents.clone()),
8651 ("src/deletion.rs", "// the-deleted-contents\n".into()),
8652 ],
8653 "deadbeef",
8654 );
8655
8656 // Buffer now has an unstaged hunk.
8657 cx.run_until_parked();
8658 diff_1.update(cx, |diff, cx| {
8659 let snapshot = buffer_1.read(cx).snapshot();
8660 assert_hunks(
8661 diff.snapshot(cx)
8662 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8663 &snapshot,
8664 &diff.base_text(cx).text(),
8665 &[(
8666 2..3,
8667 "",
8668 " println!(\"goodbye world\");\n",
8669 DiffHunkStatus::added_none(),
8670 )],
8671 );
8672 });
8673
8674 // Open a buffer for a file that's been deleted.
8675 let buffer_2 = project
8676 .update(cx, |project, cx| {
8677 project.open_local_buffer("/dir/src/deletion.rs", cx)
8678 })
8679 .await
8680 .unwrap();
8681 let diff_2 = project
8682 .update(cx, |project, cx| {
8683 project.open_uncommitted_diff(buffer_2.clone(), cx)
8684 })
8685 .await
8686 .unwrap();
8687 cx.run_until_parked();
8688 diff_2.update(cx, |diff, cx| {
8689 let snapshot = buffer_2.read(cx).snapshot();
8690 assert_hunks(
8691 diff.snapshot(cx)
8692 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8693 &snapshot,
8694 &diff.base_text_string(cx).unwrap(),
8695 &[(
8696 0..0,
8697 "// the-deleted-contents\n",
8698 "",
8699 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
8700 )],
8701 );
8702 });
8703
8704 // Stage the deletion of this file
8705 fs.set_index_for_repo(
8706 Path::new("/dir/.git"),
8707 &[("src/modification.rs", committed_contents.clone())],
8708 );
8709 cx.run_until_parked();
8710 diff_2.update(cx, |diff, cx| {
8711 let snapshot = buffer_2.read(cx).snapshot();
8712 assert_hunks(
8713 diff.snapshot(cx)
8714 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8715 &snapshot,
8716 &diff.base_text_string(cx).unwrap(),
8717 &[(
8718 0..0,
8719 "// the-deleted-contents\n",
8720 "",
8721 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
8722 )],
8723 );
8724 });
8725}
8726
8727#[gpui::test]
8728async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
8729 use DiffHunkSecondaryStatus::*;
8730 init_test(cx);
8731
8732 let committed_contents = r#"
8733 zero
8734 one
8735 two
8736 three
8737 four
8738 five
8739 "#
8740 .unindent();
8741 let file_contents = r#"
8742 one
8743 TWO
8744 three
8745 FOUR
8746 five
8747 "#
8748 .unindent();
8749
8750 let fs = FakeFs::new(cx.background_executor.clone());
8751 fs.insert_tree(
8752 "/dir",
8753 json!({
8754 ".git": {},
8755 "file.txt": file_contents.clone()
8756 }),
8757 )
8758 .await;
8759
8760 fs.set_head_and_index_for_repo(
8761 path!("/dir/.git").as_ref(),
8762 &[("file.txt", committed_contents.clone())],
8763 );
8764
8765 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8766
8767 let buffer = project
8768 .update(cx, |project, cx| {
8769 project.open_local_buffer("/dir/file.txt", cx)
8770 })
8771 .await
8772 .unwrap();
8773 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8774 let uncommitted_diff = project
8775 .update(cx, |project, cx| {
8776 project.open_uncommitted_diff(buffer.clone(), cx)
8777 })
8778 .await
8779 .unwrap();
8780 let mut diff_events = cx.events(&uncommitted_diff);
8781
8782 // The hunks are initially unstaged.
8783 uncommitted_diff.read_with(cx, |diff, cx| {
8784 assert_hunks(
8785 diff.snapshot(cx).hunks(&snapshot),
8786 &snapshot,
8787 &diff.base_text_string(cx).unwrap(),
8788 &[
8789 (
8790 0..0,
8791 "zero\n",
8792 "",
8793 DiffHunkStatus::deleted(HasSecondaryHunk),
8794 ),
8795 (
8796 1..2,
8797 "two\n",
8798 "TWO\n",
8799 DiffHunkStatus::modified(HasSecondaryHunk),
8800 ),
8801 (
8802 3..4,
8803 "four\n",
8804 "FOUR\n",
8805 DiffHunkStatus::modified(HasSecondaryHunk),
8806 ),
8807 ],
8808 );
8809 });
8810
8811 // Stage a hunk. It appears as optimistically staged.
8812 uncommitted_diff.update(cx, |diff, cx| {
8813 let range =
8814 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
8815 let hunks = diff
8816 .snapshot(cx)
8817 .hunks_intersecting_range(range, &snapshot)
8818 .collect::<Vec<_>>();
8819 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
8820
8821 assert_hunks(
8822 diff.snapshot(cx).hunks(&snapshot),
8823 &snapshot,
8824 &diff.base_text_string(cx).unwrap(),
8825 &[
8826 (
8827 0..0,
8828 "zero\n",
8829 "",
8830 DiffHunkStatus::deleted(HasSecondaryHunk),
8831 ),
8832 (
8833 1..2,
8834 "two\n",
8835 "TWO\n",
8836 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8837 ),
8838 (
8839 3..4,
8840 "four\n",
8841 "FOUR\n",
8842 DiffHunkStatus::modified(HasSecondaryHunk),
8843 ),
8844 ],
8845 );
8846 });
8847
8848 // The diff emits a change event for the range of the staged hunk.
8849 assert!(matches!(
8850 diff_events.next().await.unwrap(),
8851 BufferDiffEvent::HunksStagedOrUnstaged(_)
8852 ));
8853 let event = diff_events.next().await.unwrap();
8854 if let BufferDiffEvent::DiffChanged(DiffChanged {
8855 changed_range: Some(changed_range),
8856 base_text_changed_range: _,
8857 extended_range: _,
8858 }) = event
8859 {
8860 let changed_range = changed_range.to_point(&snapshot);
8861 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
8862 } else {
8863 panic!("Unexpected event {event:?}");
8864 }
8865
8866 // When the write to the index completes, it appears as staged.
8867 cx.run_until_parked();
8868 uncommitted_diff.update(cx, |diff, cx| {
8869 assert_hunks(
8870 diff.snapshot(cx).hunks(&snapshot),
8871 &snapshot,
8872 &diff.base_text_string(cx).unwrap(),
8873 &[
8874 (
8875 0..0,
8876 "zero\n",
8877 "",
8878 DiffHunkStatus::deleted(HasSecondaryHunk),
8879 ),
8880 (
8881 1..2,
8882 "two\n",
8883 "TWO\n",
8884 DiffHunkStatus::modified(NoSecondaryHunk),
8885 ),
8886 (
8887 3..4,
8888 "four\n",
8889 "FOUR\n",
8890 DiffHunkStatus::modified(HasSecondaryHunk),
8891 ),
8892 ],
8893 );
8894 });
8895
8896 // The diff emits a change event for the changed index text.
8897 let event = diff_events.next().await.unwrap();
8898 if let BufferDiffEvent::DiffChanged(DiffChanged {
8899 changed_range: Some(changed_range),
8900 base_text_changed_range: _,
8901 extended_range: _,
8902 }) = event
8903 {
8904 let changed_range = changed_range.to_point(&snapshot);
8905 assert_eq!(changed_range, Point::new(0, 0)..Point::new(4, 0));
8906 } else {
8907 panic!("Unexpected event {event:?}");
8908 }
8909
8910 // Simulate a problem writing to the git index.
8911 fs.set_error_message_for_index_write(
8912 "/dir/.git".as_ref(),
8913 Some("failed to write git index".into()),
8914 );
8915
8916 // Stage another hunk.
8917 uncommitted_diff.update(cx, |diff, cx| {
8918 let range =
8919 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
8920 let hunks = diff
8921 .snapshot(cx)
8922 .hunks_intersecting_range(range, &snapshot)
8923 .collect::<Vec<_>>();
8924 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
8925
8926 assert_hunks(
8927 diff.snapshot(cx).hunks(&snapshot),
8928 &snapshot,
8929 &diff.base_text_string(cx).unwrap(),
8930 &[
8931 (
8932 0..0,
8933 "zero\n",
8934 "",
8935 DiffHunkStatus::deleted(HasSecondaryHunk),
8936 ),
8937 (
8938 1..2,
8939 "two\n",
8940 "TWO\n",
8941 DiffHunkStatus::modified(NoSecondaryHunk),
8942 ),
8943 (
8944 3..4,
8945 "four\n",
8946 "FOUR\n",
8947 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8948 ),
8949 ],
8950 );
8951 });
8952 assert!(matches!(
8953 diff_events.next().await.unwrap(),
8954 BufferDiffEvent::HunksStagedOrUnstaged(_)
8955 ));
8956 let event = diff_events.next().await.unwrap();
8957 if let BufferDiffEvent::DiffChanged(DiffChanged {
8958 changed_range: Some(changed_range),
8959 base_text_changed_range: _,
8960 extended_range: _,
8961 }) = event
8962 {
8963 let changed_range = changed_range.to_point(&snapshot);
8964 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
8965 } else {
8966 panic!("Unexpected event {event:?}");
8967 }
8968
8969 // When the write fails, the hunk returns to being unstaged.
8970 cx.run_until_parked();
8971 uncommitted_diff.update(cx, |diff, cx| {
8972 assert_hunks(
8973 diff.snapshot(cx).hunks(&snapshot),
8974 &snapshot,
8975 &diff.base_text_string(cx).unwrap(),
8976 &[
8977 (
8978 0..0,
8979 "zero\n",
8980 "",
8981 DiffHunkStatus::deleted(HasSecondaryHunk),
8982 ),
8983 (
8984 1..2,
8985 "two\n",
8986 "TWO\n",
8987 DiffHunkStatus::modified(NoSecondaryHunk),
8988 ),
8989 (
8990 3..4,
8991 "four\n",
8992 "FOUR\n",
8993 DiffHunkStatus::modified(HasSecondaryHunk),
8994 ),
8995 ],
8996 );
8997 });
8998
8999 let event = diff_events.next().await.unwrap();
9000 if let BufferDiffEvent::DiffChanged(DiffChanged {
9001 changed_range: Some(changed_range),
9002 base_text_changed_range: _,
9003 extended_range: _,
9004 }) = event
9005 {
9006 let changed_range = changed_range.to_point(&snapshot);
9007 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
9008 } else {
9009 panic!("Unexpected event {event:?}");
9010 }
9011
9012 // Allow writing to the git index to succeed again.
9013 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
9014
9015 // Stage two hunks with separate operations.
9016 uncommitted_diff.update(cx, |diff, cx| {
9017 let hunks = diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>();
9018 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
9019 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
9020 });
9021
9022 // Both staged hunks appear as pending.
9023 uncommitted_diff.update(cx, |diff, cx| {
9024 assert_hunks(
9025 diff.snapshot(cx).hunks(&snapshot),
9026 &snapshot,
9027 &diff.base_text_string(cx).unwrap(),
9028 &[
9029 (
9030 0..0,
9031 "zero\n",
9032 "",
9033 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
9034 ),
9035 (
9036 1..2,
9037 "two\n",
9038 "TWO\n",
9039 DiffHunkStatus::modified(NoSecondaryHunk),
9040 ),
9041 (
9042 3..4,
9043 "four\n",
9044 "FOUR\n",
9045 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
9046 ),
9047 ],
9048 );
9049 });
9050
9051 // Both staging operations take effect.
9052 cx.run_until_parked();
9053 uncommitted_diff.update(cx, |diff, cx| {
9054 assert_hunks(
9055 diff.snapshot(cx).hunks(&snapshot),
9056 &snapshot,
9057 &diff.base_text_string(cx).unwrap(),
9058 &[
9059 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
9060 (
9061 1..2,
9062 "two\n",
9063 "TWO\n",
9064 DiffHunkStatus::modified(NoSecondaryHunk),
9065 ),
9066 (
9067 3..4,
9068 "four\n",
9069 "FOUR\n",
9070 DiffHunkStatus::modified(NoSecondaryHunk),
9071 ),
9072 ],
9073 );
9074 });
9075}
9076
9077#[gpui::test(seeds(340, 472))]
9078async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
9079 use DiffHunkSecondaryStatus::*;
9080 init_test(cx);
9081
9082 let committed_contents = r#"
9083 zero
9084 one
9085 two
9086 three
9087 four
9088 five
9089 "#
9090 .unindent();
9091 let file_contents = r#"
9092 one
9093 TWO
9094 three
9095 FOUR
9096 five
9097 "#
9098 .unindent();
9099
9100 let fs = FakeFs::new(cx.background_executor.clone());
9101 fs.insert_tree(
9102 "/dir",
9103 json!({
9104 ".git": {},
9105 "file.txt": file_contents.clone()
9106 }),
9107 )
9108 .await;
9109
9110 fs.set_head_for_repo(
9111 "/dir/.git".as_ref(),
9112 &[("file.txt", committed_contents.clone())],
9113 "deadbeef",
9114 );
9115 fs.set_index_for_repo(
9116 "/dir/.git".as_ref(),
9117 &[("file.txt", committed_contents.clone())],
9118 );
9119
9120 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
9121
9122 let buffer = project
9123 .update(cx, |project, cx| {
9124 project.open_local_buffer("/dir/file.txt", cx)
9125 })
9126 .await
9127 .unwrap();
9128 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
9129 let uncommitted_diff = project
9130 .update(cx, |project, cx| {
9131 project.open_uncommitted_diff(buffer.clone(), cx)
9132 })
9133 .await
9134 .unwrap();
9135
9136 // The hunks are initially unstaged.
9137 uncommitted_diff.read_with(cx, |diff, cx| {
9138 assert_hunks(
9139 diff.snapshot(cx).hunks(&snapshot),
9140 &snapshot,
9141 &diff.base_text_string(cx).unwrap(),
9142 &[
9143 (
9144 0..0,
9145 "zero\n",
9146 "",
9147 DiffHunkStatus::deleted(HasSecondaryHunk),
9148 ),
9149 (
9150 1..2,
9151 "two\n",
9152 "TWO\n",
9153 DiffHunkStatus::modified(HasSecondaryHunk),
9154 ),
9155 (
9156 3..4,
9157 "four\n",
9158 "FOUR\n",
9159 DiffHunkStatus::modified(HasSecondaryHunk),
9160 ),
9161 ],
9162 );
9163 });
9164
9165 // Pause IO events
9166 fs.pause_events();
9167
9168 // Stage the first hunk.
9169 uncommitted_diff.update(cx, |diff, cx| {
9170 let hunk = diff.snapshot(cx).hunks(&snapshot).next().unwrap();
9171 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
9172 assert_hunks(
9173 diff.snapshot(cx).hunks(&snapshot),
9174 &snapshot,
9175 &diff.base_text_string(cx).unwrap(),
9176 &[
9177 (
9178 0..0,
9179 "zero\n",
9180 "",
9181 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
9182 ),
9183 (
9184 1..2,
9185 "two\n",
9186 "TWO\n",
9187 DiffHunkStatus::modified(HasSecondaryHunk),
9188 ),
9189 (
9190 3..4,
9191 "four\n",
9192 "FOUR\n",
9193 DiffHunkStatus::modified(HasSecondaryHunk),
9194 ),
9195 ],
9196 );
9197 });
9198
9199 // Stage the second hunk *before* receiving the FS event for the first hunk.
9200 cx.run_until_parked();
9201 uncommitted_diff.update(cx, |diff, cx| {
9202 let hunk = diff.snapshot(cx).hunks(&snapshot).nth(1).unwrap();
9203 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
9204 assert_hunks(
9205 diff.snapshot(cx).hunks(&snapshot),
9206 &snapshot,
9207 &diff.base_text_string(cx).unwrap(),
9208 &[
9209 (
9210 0..0,
9211 "zero\n",
9212 "",
9213 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
9214 ),
9215 (
9216 1..2,
9217 "two\n",
9218 "TWO\n",
9219 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
9220 ),
9221 (
9222 3..4,
9223 "four\n",
9224 "FOUR\n",
9225 DiffHunkStatus::modified(HasSecondaryHunk),
9226 ),
9227 ],
9228 );
9229 });
9230
9231 // Process the FS event for staging the first hunk (second event is still pending).
9232 fs.flush_events(1);
9233 cx.run_until_parked();
9234
9235 // Stage the third hunk before receiving the second FS event.
9236 uncommitted_diff.update(cx, |diff, cx| {
9237 let hunk = diff.snapshot(cx).hunks(&snapshot).nth(2).unwrap();
9238 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
9239 });
9240
9241 // Wait for all remaining IO.
9242 cx.run_until_parked();
9243 fs.flush_events(fs.buffered_event_count());
9244
9245 // Now all hunks are staged.
9246 cx.run_until_parked();
9247 uncommitted_diff.update(cx, |diff, cx| {
9248 assert_hunks(
9249 diff.snapshot(cx).hunks(&snapshot),
9250 &snapshot,
9251 &diff.base_text_string(cx).unwrap(),
9252 &[
9253 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
9254 (
9255 1..2,
9256 "two\n",
9257 "TWO\n",
9258 DiffHunkStatus::modified(NoSecondaryHunk),
9259 ),
9260 (
9261 3..4,
9262 "four\n",
9263 "FOUR\n",
9264 DiffHunkStatus::modified(NoSecondaryHunk),
9265 ),
9266 ],
9267 );
9268 });
9269}
9270
9271#[gpui::test(iterations = 25)]
9272async fn test_staging_random_hunks(
9273 mut rng: StdRng,
9274 _executor: BackgroundExecutor,
9275 cx: &mut gpui::TestAppContext,
9276) {
9277 let operations = env::var("OPERATIONS")
9278 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
9279 .unwrap_or(20);
9280
9281 use DiffHunkSecondaryStatus::*;
9282 init_test(cx);
9283
9284 let committed_text = (0..30).map(|i| format!("line {i}\n")).collect::<String>();
9285 let index_text = committed_text.clone();
9286 let buffer_text = (0..30)
9287 .map(|i| match i % 5 {
9288 0 => format!("line {i} (modified)\n"),
9289 _ => format!("line {i}\n"),
9290 })
9291 .collect::<String>();
9292
9293 let fs = FakeFs::new(cx.background_executor.clone());
9294 fs.insert_tree(
9295 path!("/dir"),
9296 json!({
9297 ".git": {},
9298 "file.txt": buffer_text.clone()
9299 }),
9300 )
9301 .await;
9302 fs.set_head_for_repo(
9303 path!("/dir/.git").as_ref(),
9304 &[("file.txt", committed_text.clone())],
9305 "deadbeef",
9306 );
9307 fs.set_index_for_repo(
9308 path!("/dir/.git").as_ref(),
9309 &[("file.txt", index_text.clone())],
9310 );
9311 let repo = fs
9312 .open_repo(path!("/dir/.git").as_ref(), Some("git".as_ref()))
9313 .unwrap();
9314
9315 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
9316 let buffer = project
9317 .update(cx, |project, cx| {
9318 project.open_local_buffer(path!("/dir/file.txt"), cx)
9319 })
9320 .await
9321 .unwrap();
9322 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
9323 let uncommitted_diff = project
9324 .update(cx, |project, cx| {
9325 project.open_uncommitted_diff(buffer.clone(), cx)
9326 })
9327 .await
9328 .unwrap();
9329
9330 let mut hunks = uncommitted_diff.update(cx, |diff, cx| {
9331 diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>()
9332 });
9333 assert_eq!(hunks.len(), 6);
9334
9335 for _i in 0..operations {
9336 let hunk_ix = rng.random_range(0..hunks.len());
9337 let hunk = &mut hunks[hunk_ix];
9338 let row = hunk.range.start.row;
9339
9340 if hunk.status().has_secondary_hunk() {
9341 log::info!("staging hunk at {row}");
9342 uncommitted_diff.update(cx, |diff, cx| {
9343 diff.stage_or_unstage_hunks(true, std::slice::from_ref(hunk), &snapshot, true, cx);
9344 });
9345 hunk.secondary_status = SecondaryHunkRemovalPending;
9346 } else {
9347 log::info!("unstaging hunk at {row}");
9348 uncommitted_diff.update(cx, |diff, cx| {
9349 diff.stage_or_unstage_hunks(false, std::slice::from_ref(hunk), &snapshot, true, cx);
9350 });
9351 hunk.secondary_status = SecondaryHunkAdditionPending;
9352 }
9353
9354 for _ in 0..rng.random_range(0..10) {
9355 log::info!("yielding");
9356 cx.executor().simulate_random_delay().await;
9357 }
9358 }
9359
9360 cx.executor().run_until_parked();
9361
9362 for hunk in &mut hunks {
9363 if hunk.secondary_status == SecondaryHunkRemovalPending {
9364 hunk.secondary_status = NoSecondaryHunk;
9365 } else if hunk.secondary_status == SecondaryHunkAdditionPending {
9366 hunk.secondary_status = HasSecondaryHunk;
9367 }
9368 }
9369
9370 log::info!(
9371 "index text:\n{}",
9372 repo.load_index_text(RepoPath::from_rel_path(rel_path("file.txt")))
9373 .await
9374 .unwrap()
9375 );
9376
9377 uncommitted_diff.update(cx, |diff, cx| {
9378 let expected_hunks = hunks
9379 .iter()
9380 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
9381 .collect::<Vec<_>>();
9382 let actual_hunks = diff
9383 .snapshot(cx)
9384 .hunks(&snapshot)
9385 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
9386 .collect::<Vec<_>>();
9387 assert_eq!(actual_hunks, expected_hunks);
9388 });
9389}
9390
9391#[gpui::test]
9392async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
9393 init_test(cx);
9394
9395 let committed_contents = r#"
9396 fn main() {
9397 println!("hello from HEAD");
9398 }
9399 "#
9400 .unindent();
9401 let file_contents = r#"
9402 fn main() {
9403 println!("hello from the working copy");
9404 }
9405 "#
9406 .unindent();
9407
9408 let fs = FakeFs::new(cx.background_executor.clone());
9409 fs.insert_tree(
9410 "/dir",
9411 json!({
9412 ".git": {},
9413 "src": {
9414 "main.rs": file_contents,
9415 }
9416 }),
9417 )
9418 .await;
9419
9420 fs.set_head_for_repo(
9421 Path::new("/dir/.git"),
9422 &[("src/main.rs", committed_contents.clone())],
9423 "deadbeef",
9424 );
9425 fs.set_index_for_repo(
9426 Path::new("/dir/.git"),
9427 &[("src/main.rs", committed_contents.clone())],
9428 );
9429
9430 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
9431
9432 let buffer = project
9433 .update(cx, |project, cx| {
9434 project.open_local_buffer("/dir/src/main.rs", cx)
9435 })
9436 .await
9437 .unwrap();
9438 let uncommitted_diff = project
9439 .update(cx, |project, cx| {
9440 project.open_uncommitted_diff(buffer.clone(), cx)
9441 })
9442 .await
9443 .unwrap();
9444
9445 cx.run_until_parked();
9446 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
9447 let snapshot = buffer.read(cx).snapshot();
9448 assert_hunks(
9449 uncommitted_diff.snapshot(cx).hunks(&snapshot),
9450 &snapshot,
9451 &uncommitted_diff.base_text_string(cx).unwrap(),
9452 &[(
9453 1..2,
9454 " println!(\"hello from HEAD\");\n",
9455 " println!(\"hello from the working copy\");\n",
9456 DiffHunkStatus {
9457 kind: DiffHunkStatusKind::Modified,
9458 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
9459 },
9460 )],
9461 );
9462 });
9463}
9464
9465// TODO: Should we test this on Windows also?
9466#[gpui::test]
9467#[cfg(not(windows))]
9468async fn test_staging_hunk_preserve_executable_permission(cx: &mut gpui::TestAppContext) {
9469 use std::os::unix::fs::PermissionsExt;
9470 init_test(cx);
9471 cx.executor().allow_parking();
9472 let committed_contents = "bar\n";
9473 let file_contents = "baz\n";
9474 let root = TempTree::new(json!({
9475 "project": {
9476 "foo": committed_contents
9477 },
9478 }));
9479
9480 let work_dir = root.path().join("project");
9481 let file_path = work_dir.join("foo");
9482 let repo = git_init(work_dir.as_path());
9483 let mut perms = std::fs::metadata(&file_path).unwrap().permissions();
9484 perms.set_mode(0o755);
9485 std::fs::set_permissions(&file_path, perms).unwrap();
9486 git_add("foo", &repo);
9487 git_commit("Initial commit", &repo);
9488 std::fs::write(&file_path, file_contents).unwrap();
9489
9490 let project = Project::test(
9491 Arc::new(RealFs::new(None, cx.executor())),
9492 [root.path()],
9493 cx,
9494 )
9495 .await;
9496
9497 let buffer = project
9498 .update(cx, |project, cx| {
9499 project.open_local_buffer(file_path.as_path(), cx)
9500 })
9501 .await
9502 .unwrap();
9503
9504 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
9505
9506 let uncommitted_diff = project
9507 .update(cx, |project, cx| {
9508 project.open_uncommitted_diff(buffer.clone(), cx)
9509 })
9510 .await
9511 .unwrap();
9512
9513 uncommitted_diff.update(cx, |diff, cx| {
9514 let hunks = diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>();
9515 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
9516 });
9517
9518 cx.run_until_parked();
9519
9520 let output = smol::process::Command::new("git")
9521 .current_dir(&work_dir)
9522 .args(["diff", "--staged"])
9523 .output()
9524 .await
9525 .unwrap();
9526
9527 let staged_diff = String::from_utf8_lossy(&output.stdout);
9528
9529 assert!(
9530 !staged_diff.contains("new mode 100644"),
9531 "Staging should not change file mode from 755 to 644.\ngit diff --staged:\n{}",
9532 staged_diff
9533 );
9534
9535 let output = smol::process::Command::new("git")
9536 .current_dir(&work_dir)
9537 .args(["ls-files", "-s"])
9538 .output()
9539 .await
9540 .unwrap();
9541 let index_contents = String::from_utf8_lossy(&output.stdout);
9542
9543 assert!(
9544 index_contents.contains("100755"),
9545 "Index should show file as executable (100755).\ngit ls-files -s:\n{}",
9546 index_contents
9547 );
9548}
9549
9550#[gpui::test]
9551async fn test_repository_and_path_for_project_path(
9552 background_executor: BackgroundExecutor,
9553 cx: &mut gpui::TestAppContext,
9554) {
9555 init_test(cx);
9556 let fs = FakeFs::new(background_executor);
9557 fs.insert_tree(
9558 path!("/root"),
9559 json!({
9560 "c.txt": "",
9561 "dir1": {
9562 ".git": {},
9563 "deps": {
9564 "dep1": {
9565 ".git": {},
9566 "src": {
9567 "a.txt": ""
9568 }
9569 }
9570 },
9571 "src": {
9572 "b.txt": ""
9573 }
9574 },
9575 }),
9576 )
9577 .await;
9578
9579 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
9580 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9581 let tree_id = tree.read_with(cx, |tree, _| tree.id());
9582 project
9583 .update(cx, |project, cx| project.git_scans_complete(cx))
9584 .await;
9585 cx.run_until_parked();
9586
9587 project.read_with(cx, |project, cx| {
9588 let git_store = project.git_store().read(cx);
9589 let pairs = [
9590 ("c.txt", None),
9591 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
9592 (
9593 "dir1/deps/dep1/src/a.txt",
9594 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
9595 ),
9596 ];
9597 let expected = pairs
9598 .iter()
9599 .map(|(path, result)| {
9600 (
9601 path,
9602 result.map(|(repo, repo_path)| {
9603 (Path::new(repo).into(), RepoPath::new(repo_path).unwrap())
9604 }),
9605 )
9606 })
9607 .collect::<Vec<_>>();
9608 let actual = pairs
9609 .iter()
9610 .map(|(path, _)| {
9611 let project_path = (tree_id, rel_path(path)).into();
9612 let result = maybe!({
9613 let (repo, repo_path) =
9614 git_store.repository_and_path_for_project_path(&project_path, cx)?;
9615 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
9616 });
9617 (path, result)
9618 })
9619 .collect::<Vec<_>>();
9620 pretty_assertions::assert_eq!(expected, actual);
9621 });
9622
9623 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
9624 .await
9625 .unwrap();
9626 cx.run_until_parked();
9627
9628 project.read_with(cx, |project, cx| {
9629 let git_store = project.git_store().read(cx);
9630 assert_eq!(
9631 git_store.repository_and_path_for_project_path(
9632 &(tree_id, rel_path("dir1/src/b.txt")).into(),
9633 cx
9634 ),
9635 None
9636 );
9637 });
9638}
9639
9640#[gpui::test]
9641async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
9642 init_test(cx);
9643 let fs = FakeFs::new(cx.background_executor.clone());
9644 let home = paths::home_dir();
9645 fs.insert_tree(
9646 home,
9647 json!({
9648 ".git": {},
9649 "project": {
9650 "a.txt": "A"
9651 },
9652 }),
9653 )
9654 .await;
9655
9656 let project = Project::test(fs.clone(), [home.join("project").as_ref()], cx).await;
9657 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9658 let tree_id = tree.read_with(cx, |tree, _| tree.id());
9659
9660 project
9661 .update(cx, |project, cx| project.git_scans_complete(cx))
9662 .await;
9663 tree.flush_fs_events(cx).await;
9664
9665 project.read_with(cx, |project, cx| {
9666 let containing = project
9667 .git_store()
9668 .read(cx)
9669 .repository_and_path_for_project_path(&(tree_id, rel_path("a.txt")).into(), cx);
9670 assert!(containing.is_none());
9671 });
9672
9673 let project = Project::test(fs.clone(), [home.as_ref()], cx).await;
9674 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9675 let tree_id = tree.read_with(cx, |tree, _| tree.id());
9676 project
9677 .update(cx, |project, cx| project.git_scans_complete(cx))
9678 .await;
9679 tree.flush_fs_events(cx).await;
9680
9681 project.read_with(cx, |project, cx| {
9682 let containing = project
9683 .git_store()
9684 .read(cx)
9685 .repository_and_path_for_project_path(&(tree_id, rel_path("project/a.txt")).into(), cx);
9686 assert_eq!(
9687 containing
9688 .unwrap()
9689 .0
9690 .read(cx)
9691 .work_directory_abs_path
9692 .as_ref(),
9693 home,
9694 );
9695 });
9696}
9697
9698#[gpui::test]
9699async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
9700 init_test(cx);
9701 cx.executor().allow_parking();
9702
9703 let root = TempTree::new(json!({
9704 "project": {
9705 "a.txt": "a", // Modified
9706 "b.txt": "bb", // Added
9707 "c.txt": "ccc", // Unchanged
9708 "d.txt": "dddd", // Deleted
9709 },
9710 }));
9711
9712 // Set up git repository before creating the project.
9713 let work_dir = root.path().join("project");
9714 let repo = git_init(work_dir.as_path());
9715 git_add("a.txt", &repo);
9716 git_add("c.txt", &repo);
9717 git_add("d.txt", &repo);
9718 git_commit("Initial commit", &repo);
9719 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
9720 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
9721
9722 let project = Project::test(
9723 Arc::new(RealFs::new(None, cx.executor())),
9724 [root.path()],
9725 cx,
9726 )
9727 .await;
9728
9729 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9730 tree.flush_fs_events(cx).await;
9731 project
9732 .update(cx, |project, cx| project.git_scans_complete(cx))
9733 .await;
9734 cx.executor().run_until_parked();
9735
9736 let repository = project.read_with(cx, |project, cx| {
9737 project.repositories(cx).values().next().unwrap().clone()
9738 });
9739
9740 // Check that the right git state is observed on startup
9741 repository.read_with(cx, |repository, _| {
9742 let entries = repository.cached_status().collect::<Vec<_>>();
9743 assert_eq!(
9744 entries,
9745 [
9746 StatusEntry {
9747 repo_path: repo_path("a.txt"),
9748 status: StatusCode::Modified.worktree(),
9749 diff_stat: Some(DiffStat {
9750 added: 1,
9751 deleted: 1,
9752 }),
9753 },
9754 StatusEntry {
9755 repo_path: repo_path("b.txt"),
9756 status: FileStatus::Untracked,
9757 diff_stat: None,
9758 },
9759 StatusEntry {
9760 repo_path: repo_path("d.txt"),
9761 status: StatusCode::Deleted.worktree(),
9762 diff_stat: Some(DiffStat {
9763 added: 0,
9764 deleted: 1,
9765 }),
9766 },
9767 ]
9768 );
9769 });
9770
9771 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
9772
9773 tree.flush_fs_events(cx).await;
9774 project
9775 .update(cx, |project, cx| project.git_scans_complete(cx))
9776 .await;
9777 cx.executor().run_until_parked();
9778
9779 repository.read_with(cx, |repository, _| {
9780 let entries = repository.cached_status().collect::<Vec<_>>();
9781 assert_eq!(
9782 entries,
9783 [
9784 StatusEntry {
9785 repo_path: repo_path("a.txt"),
9786 status: StatusCode::Modified.worktree(),
9787 diff_stat: Some(DiffStat {
9788 added: 1,
9789 deleted: 1,
9790 }),
9791 },
9792 StatusEntry {
9793 repo_path: repo_path("b.txt"),
9794 status: FileStatus::Untracked,
9795 diff_stat: None,
9796 },
9797 StatusEntry {
9798 repo_path: repo_path("c.txt"),
9799 status: StatusCode::Modified.worktree(),
9800 diff_stat: Some(DiffStat {
9801 added: 1,
9802 deleted: 1,
9803 }),
9804 },
9805 StatusEntry {
9806 repo_path: repo_path("d.txt"),
9807 status: StatusCode::Deleted.worktree(),
9808 diff_stat: Some(DiffStat {
9809 added: 0,
9810 deleted: 1,
9811 }),
9812 },
9813 ]
9814 );
9815 });
9816
9817 git_add("a.txt", &repo);
9818 git_add("c.txt", &repo);
9819 git_remove_index(Path::new("d.txt"), &repo);
9820 git_commit("Another commit", &repo);
9821 tree.flush_fs_events(cx).await;
9822 project
9823 .update(cx, |project, cx| project.git_scans_complete(cx))
9824 .await;
9825 cx.executor().run_until_parked();
9826
9827 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
9828 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
9829 tree.flush_fs_events(cx).await;
9830 project
9831 .update(cx, |project, cx| project.git_scans_complete(cx))
9832 .await;
9833 cx.executor().run_until_parked();
9834
9835 repository.read_with(cx, |repository, _cx| {
9836 let entries = repository.cached_status().collect::<Vec<_>>();
9837
9838 // Deleting an untracked entry, b.txt, should leave no status
9839 // a.txt was tracked, and so should have a status
9840 assert_eq!(
9841 entries,
9842 [StatusEntry {
9843 repo_path: repo_path("a.txt"),
9844 status: StatusCode::Deleted.worktree(),
9845 diff_stat: Some(DiffStat {
9846 added: 0,
9847 deleted: 1,
9848 }),
9849 }]
9850 );
9851 });
9852}
9853
9854#[gpui::test]
9855#[ignore]
9856async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
9857 init_test(cx);
9858 cx.executor().allow_parking();
9859
9860 let root = TempTree::new(json!({
9861 "project": {
9862 "sub": {},
9863 "a.txt": "",
9864 },
9865 }));
9866
9867 let work_dir = root.path().join("project");
9868 let repo = git_init(work_dir.as_path());
9869 // a.txt exists in HEAD and the working copy but is deleted in the index.
9870 git_add("a.txt", &repo);
9871 git_commit("Initial commit", &repo);
9872 git_remove_index("a.txt".as_ref(), &repo);
9873 // `sub` is a nested git repository.
9874 let _sub = git_init(&work_dir.join("sub"));
9875
9876 let project = Project::test(
9877 Arc::new(RealFs::new(None, cx.executor())),
9878 [root.path()],
9879 cx,
9880 )
9881 .await;
9882
9883 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9884 tree.flush_fs_events(cx).await;
9885 project
9886 .update(cx, |project, cx| project.git_scans_complete(cx))
9887 .await;
9888 cx.executor().run_until_parked();
9889
9890 let repository = project.read_with(cx, |project, cx| {
9891 project
9892 .repositories(cx)
9893 .values()
9894 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
9895 .unwrap()
9896 .clone()
9897 });
9898
9899 repository.read_with(cx, |repository, _cx| {
9900 let entries = repository.cached_status().collect::<Vec<_>>();
9901
9902 // `sub` doesn't appear in our computed statuses.
9903 // a.txt appears with a combined `DA` status.
9904 assert_eq!(
9905 entries,
9906 [StatusEntry {
9907 repo_path: repo_path("a.txt"),
9908 status: TrackedStatus {
9909 index_status: StatusCode::Deleted,
9910 worktree_status: StatusCode::Added
9911 }
9912 .into(),
9913 diff_stat: None,
9914 }]
9915 )
9916 });
9917}
9918
9919#[track_caller]
9920/// We merge lhs into rhs.
9921fn merge_pending_ops_snapshots(
9922 source: Vec<pending_op::PendingOps>,
9923 mut target: Vec<pending_op::PendingOps>,
9924) -> Vec<pending_op::PendingOps> {
9925 for s_ops in source {
9926 if let Some(idx) = target.iter().zip(0..).find_map(|(ops, idx)| {
9927 if ops.repo_path == s_ops.repo_path {
9928 Some(idx)
9929 } else {
9930 None
9931 }
9932 }) {
9933 let t_ops = &mut target[idx];
9934 for s_op in s_ops.ops {
9935 if let Some(op_idx) = t_ops
9936 .ops
9937 .iter()
9938 .zip(0..)
9939 .find_map(|(op, idx)| if op.id == s_op.id { Some(idx) } else { None })
9940 {
9941 let t_op = &mut t_ops.ops[op_idx];
9942 match (s_op.job_status, t_op.job_status) {
9943 (pending_op::JobStatus::Running, _) => {}
9944 (s_st, pending_op::JobStatus::Running) => t_op.job_status = s_st,
9945 (s_st, t_st) if s_st == t_st => {}
9946 _ => unreachable!(),
9947 }
9948 } else {
9949 t_ops.ops.push(s_op);
9950 }
9951 }
9952 t_ops.ops.sort_by(|l, r| l.id.cmp(&r.id));
9953 } else {
9954 target.push(s_ops);
9955 }
9956 }
9957 target
9958}
9959
9960#[gpui::test]
9961async fn test_repository_pending_ops_staging(
9962 executor: gpui::BackgroundExecutor,
9963 cx: &mut gpui::TestAppContext,
9964) {
9965 init_test(cx);
9966
9967 let fs = FakeFs::new(executor);
9968 fs.insert_tree(
9969 path!("/root"),
9970 json!({
9971 "my-repo": {
9972 ".git": {},
9973 "a.txt": "a",
9974 }
9975
9976 }),
9977 )
9978 .await;
9979
9980 fs.set_status_for_repo(
9981 path!("/root/my-repo/.git").as_ref(),
9982 &[("a.txt", FileStatus::Untracked)],
9983 );
9984
9985 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
9986 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
9987 project.update(cx, |project, cx| {
9988 let pending_ops_all = pending_ops_all.clone();
9989 cx.subscribe(project.git_store(), move |_, _, e, _| {
9990 if let GitStoreEvent::RepositoryUpdated(
9991 _,
9992 RepositoryEvent::PendingOpsChanged { pending_ops },
9993 _,
9994 ) = e
9995 {
9996 let merged = merge_pending_ops_snapshots(
9997 pending_ops.items(()),
9998 pending_ops_all.lock().items(()),
9999 );
10000 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
10001 }
10002 })
10003 .detach();
10004 });
10005 project
10006 .update(cx, |project, cx| project.git_scans_complete(cx))
10007 .await;
10008
10009 let repo = project.read_with(cx, |project, cx| {
10010 project.repositories(cx).values().next().unwrap().clone()
10011 });
10012
10013 // Ensure we have no pending ops for any of the untracked files
10014 repo.read_with(cx, |repo, _cx| {
10015 assert!(repo.pending_ops().next().is_none());
10016 });
10017
10018 let mut id = 1u16;
10019
10020 let mut assert_stage = async |path: RepoPath, stage| {
10021 let git_status = if stage {
10022 pending_op::GitStatus::Staged
10023 } else {
10024 pending_op::GitStatus::Unstaged
10025 };
10026 repo.update(cx, |repo, cx| {
10027 let task = if stage {
10028 repo.stage_entries(vec![path.clone()], cx)
10029 } else {
10030 repo.unstage_entries(vec![path.clone()], cx)
10031 };
10032 let ops = repo.pending_ops_for_path(&path).unwrap();
10033 assert_eq!(
10034 ops.ops.last(),
10035 Some(&pending_op::PendingOp {
10036 id: id.into(),
10037 git_status,
10038 job_status: pending_op::JobStatus::Running
10039 })
10040 );
10041 task
10042 })
10043 .await
10044 .unwrap();
10045
10046 repo.read_with(cx, |repo, _cx| {
10047 let ops = repo.pending_ops_for_path(&path).unwrap();
10048 assert_eq!(
10049 ops.ops.last(),
10050 Some(&pending_op::PendingOp {
10051 id: id.into(),
10052 git_status,
10053 job_status: pending_op::JobStatus::Finished
10054 })
10055 );
10056 });
10057
10058 id += 1;
10059 };
10060
10061 assert_stage(repo_path("a.txt"), true).await;
10062 assert_stage(repo_path("a.txt"), false).await;
10063 assert_stage(repo_path("a.txt"), true).await;
10064 assert_stage(repo_path("a.txt"), false).await;
10065 assert_stage(repo_path("a.txt"), true).await;
10066
10067 cx.run_until_parked();
10068
10069 assert_eq!(
10070 pending_ops_all
10071 .lock()
10072 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
10073 .unwrap()
10074 .ops,
10075 vec![
10076 pending_op::PendingOp {
10077 id: 1u16.into(),
10078 git_status: pending_op::GitStatus::Staged,
10079 job_status: pending_op::JobStatus::Finished
10080 },
10081 pending_op::PendingOp {
10082 id: 2u16.into(),
10083 git_status: pending_op::GitStatus::Unstaged,
10084 job_status: pending_op::JobStatus::Finished
10085 },
10086 pending_op::PendingOp {
10087 id: 3u16.into(),
10088 git_status: pending_op::GitStatus::Staged,
10089 job_status: pending_op::JobStatus::Finished
10090 },
10091 pending_op::PendingOp {
10092 id: 4u16.into(),
10093 git_status: pending_op::GitStatus::Unstaged,
10094 job_status: pending_op::JobStatus::Finished
10095 },
10096 pending_op::PendingOp {
10097 id: 5u16.into(),
10098 git_status: pending_op::GitStatus::Staged,
10099 job_status: pending_op::JobStatus::Finished
10100 }
10101 ],
10102 );
10103
10104 repo.update(cx, |repo, _cx| {
10105 let git_statuses = repo.cached_status().collect::<Vec<_>>();
10106
10107 assert_eq!(
10108 git_statuses,
10109 [StatusEntry {
10110 repo_path: repo_path("a.txt"),
10111 status: TrackedStatus {
10112 index_status: StatusCode::Added,
10113 worktree_status: StatusCode::Unmodified
10114 }
10115 .into(),
10116 diff_stat: Some(DiffStat {
10117 added: 1,
10118 deleted: 0,
10119 }),
10120 }]
10121 );
10122 });
10123}
10124
10125#[gpui::test]
10126async fn test_repository_pending_ops_long_running_staging(
10127 executor: gpui::BackgroundExecutor,
10128 cx: &mut gpui::TestAppContext,
10129) {
10130 init_test(cx);
10131
10132 let fs = FakeFs::new(executor);
10133 fs.insert_tree(
10134 path!("/root"),
10135 json!({
10136 "my-repo": {
10137 ".git": {},
10138 "a.txt": "a",
10139 }
10140
10141 }),
10142 )
10143 .await;
10144
10145 fs.set_status_for_repo(
10146 path!("/root/my-repo/.git").as_ref(),
10147 &[("a.txt", FileStatus::Untracked)],
10148 );
10149
10150 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
10151 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
10152 project.update(cx, |project, cx| {
10153 let pending_ops_all = pending_ops_all.clone();
10154 cx.subscribe(project.git_store(), move |_, _, e, _| {
10155 if let GitStoreEvent::RepositoryUpdated(
10156 _,
10157 RepositoryEvent::PendingOpsChanged { pending_ops },
10158 _,
10159 ) = e
10160 {
10161 let merged = merge_pending_ops_snapshots(
10162 pending_ops.items(()),
10163 pending_ops_all.lock().items(()),
10164 );
10165 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
10166 }
10167 })
10168 .detach();
10169 });
10170
10171 project
10172 .update(cx, |project, cx| project.git_scans_complete(cx))
10173 .await;
10174
10175 let repo = project.read_with(cx, |project, cx| {
10176 project.repositories(cx).values().next().unwrap().clone()
10177 });
10178
10179 repo.update(cx, |repo, cx| {
10180 repo.stage_entries(vec![repo_path("a.txt")], cx)
10181 })
10182 .detach();
10183
10184 repo.update(cx, |repo, cx| {
10185 repo.stage_entries(vec![repo_path("a.txt")], cx)
10186 })
10187 .unwrap()
10188 .with_timeout(Duration::from_secs(1), &cx.executor())
10189 .await
10190 .unwrap();
10191
10192 cx.run_until_parked();
10193
10194 assert_eq!(
10195 pending_ops_all
10196 .lock()
10197 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
10198 .unwrap()
10199 .ops,
10200 vec![
10201 pending_op::PendingOp {
10202 id: 1u16.into(),
10203 git_status: pending_op::GitStatus::Staged,
10204 job_status: pending_op::JobStatus::Skipped
10205 },
10206 pending_op::PendingOp {
10207 id: 2u16.into(),
10208 git_status: pending_op::GitStatus::Staged,
10209 job_status: pending_op::JobStatus::Finished
10210 }
10211 ],
10212 );
10213
10214 repo.update(cx, |repo, _cx| {
10215 let git_statuses = repo.cached_status().collect::<Vec<_>>();
10216
10217 assert_eq!(
10218 git_statuses,
10219 [StatusEntry {
10220 repo_path: repo_path("a.txt"),
10221 status: TrackedStatus {
10222 index_status: StatusCode::Added,
10223 worktree_status: StatusCode::Unmodified
10224 }
10225 .into(),
10226 diff_stat: Some(DiffStat {
10227 added: 1,
10228 deleted: 0,
10229 }),
10230 }]
10231 );
10232 });
10233}
10234
10235#[gpui::test]
10236async fn test_repository_pending_ops_stage_all(
10237 executor: gpui::BackgroundExecutor,
10238 cx: &mut gpui::TestAppContext,
10239) {
10240 init_test(cx);
10241
10242 let fs = FakeFs::new(executor);
10243 fs.insert_tree(
10244 path!("/root"),
10245 json!({
10246 "my-repo": {
10247 ".git": {},
10248 "a.txt": "a",
10249 "b.txt": "b"
10250 }
10251
10252 }),
10253 )
10254 .await;
10255
10256 fs.set_status_for_repo(
10257 path!("/root/my-repo/.git").as_ref(),
10258 &[
10259 ("a.txt", FileStatus::Untracked),
10260 ("b.txt", FileStatus::Untracked),
10261 ],
10262 );
10263
10264 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
10265 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
10266 project.update(cx, |project, cx| {
10267 let pending_ops_all = pending_ops_all.clone();
10268 cx.subscribe(project.git_store(), move |_, _, e, _| {
10269 if let GitStoreEvent::RepositoryUpdated(
10270 _,
10271 RepositoryEvent::PendingOpsChanged { pending_ops },
10272 _,
10273 ) = e
10274 {
10275 let merged = merge_pending_ops_snapshots(
10276 pending_ops.items(()),
10277 pending_ops_all.lock().items(()),
10278 );
10279 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
10280 }
10281 })
10282 .detach();
10283 });
10284 project
10285 .update(cx, |project, cx| project.git_scans_complete(cx))
10286 .await;
10287
10288 let repo = project.read_with(cx, |project, cx| {
10289 project.repositories(cx).values().next().unwrap().clone()
10290 });
10291
10292 repo.update(cx, |repo, cx| {
10293 repo.stage_entries(vec![repo_path("a.txt")], cx)
10294 })
10295 .await
10296 .unwrap();
10297 repo.update(cx, |repo, cx| repo.stage_all(cx))
10298 .await
10299 .unwrap();
10300 repo.update(cx, |repo, cx| repo.unstage_all(cx))
10301 .await
10302 .unwrap();
10303
10304 cx.run_until_parked();
10305
10306 assert_eq!(
10307 pending_ops_all
10308 .lock()
10309 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
10310 .unwrap()
10311 .ops,
10312 vec![
10313 pending_op::PendingOp {
10314 id: 1u16.into(),
10315 git_status: pending_op::GitStatus::Staged,
10316 job_status: pending_op::JobStatus::Finished
10317 },
10318 pending_op::PendingOp {
10319 id: 2u16.into(),
10320 git_status: pending_op::GitStatus::Unstaged,
10321 job_status: pending_op::JobStatus::Finished
10322 },
10323 ],
10324 );
10325 assert_eq!(
10326 pending_ops_all
10327 .lock()
10328 .get(&worktree::PathKey(repo_path("b.txt").as_ref().clone()), ())
10329 .unwrap()
10330 .ops,
10331 vec![
10332 pending_op::PendingOp {
10333 id: 1u16.into(),
10334 git_status: pending_op::GitStatus::Staged,
10335 job_status: pending_op::JobStatus::Finished
10336 },
10337 pending_op::PendingOp {
10338 id: 2u16.into(),
10339 git_status: pending_op::GitStatus::Unstaged,
10340 job_status: pending_op::JobStatus::Finished
10341 },
10342 ],
10343 );
10344
10345 repo.update(cx, |repo, _cx| {
10346 let git_statuses = repo.cached_status().collect::<Vec<_>>();
10347
10348 assert_eq!(
10349 git_statuses,
10350 [
10351 StatusEntry {
10352 repo_path: repo_path("a.txt"),
10353 status: FileStatus::Untracked,
10354 diff_stat: None,
10355 },
10356 StatusEntry {
10357 repo_path: repo_path("b.txt"),
10358 status: FileStatus::Untracked,
10359 diff_stat: None,
10360 },
10361 ]
10362 );
10363 });
10364}
10365
10366#[gpui::test]
10367async fn test_repository_subfolder_git_status(
10368 executor: gpui::BackgroundExecutor,
10369 cx: &mut gpui::TestAppContext,
10370) {
10371 init_test(cx);
10372
10373 let fs = FakeFs::new(executor);
10374 fs.insert_tree(
10375 path!("/root"),
10376 json!({
10377 "my-repo": {
10378 ".git": {},
10379 "a.txt": "a",
10380 "sub-folder-1": {
10381 "sub-folder-2": {
10382 "c.txt": "cc",
10383 "d": {
10384 "e.txt": "eee"
10385 }
10386 },
10387 }
10388 },
10389 }),
10390 )
10391 .await;
10392
10393 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
10394 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
10395
10396 fs.set_status_for_repo(
10397 path!("/root/my-repo/.git").as_ref(),
10398 &[(E_TXT, FileStatus::Untracked)],
10399 );
10400
10401 let project = Project::test(
10402 fs.clone(),
10403 [path!("/root/my-repo/sub-folder-1/sub-folder-2").as_ref()],
10404 cx,
10405 )
10406 .await;
10407
10408 project
10409 .update(cx, |project, cx| project.git_scans_complete(cx))
10410 .await;
10411 cx.run_until_parked();
10412
10413 let repository = project.read_with(cx, |project, cx| {
10414 project.repositories(cx).values().next().unwrap().clone()
10415 });
10416
10417 // Ensure that the git status is loaded correctly
10418 repository.read_with(cx, |repository, _cx| {
10419 assert_eq!(
10420 repository.work_directory_abs_path,
10421 Path::new(path!("/root/my-repo")).into()
10422 );
10423
10424 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
10425 assert_eq!(
10426 repository
10427 .status_for_path(&repo_path(E_TXT))
10428 .unwrap()
10429 .status,
10430 FileStatus::Untracked
10431 );
10432 });
10433
10434 fs.set_status_for_repo(path!("/root/my-repo/.git").as_ref(), &[]);
10435 project
10436 .update(cx, |project, cx| project.git_scans_complete(cx))
10437 .await;
10438 cx.run_until_parked();
10439
10440 repository.read_with(cx, |repository, _cx| {
10441 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
10442 assert_eq!(repository.status_for_path(&repo_path(E_TXT)), None);
10443 });
10444}
10445
10446// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
10447#[cfg(any())]
10448#[gpui::test]
10449async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
10450 init_test(cx);
10451 cx.executor().allow_parking();
10452
10453 let root = TempTree::new(json!({
10454 "project": {
10455 "a.txt": "a",
10456 },
10457 }));
10458 let root_path = root.path();
10459
10460 let repo = git_init(&root_path.join("project"));
10461 git_add("a.txt", &repo);
10462 git_commit("init", &repo);
10463
10464 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10465
10466 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10467 tree.flush_fs_events(cx).await;
10468 project
10469 .update(cx, |project, cx| project.git_scans_complete(cx))
10470 .await;
10471 cx.executor().run_until_parked();
10472
10473 let repository = project.read_with(cx, |project, cx| {
10474 project.repositories(cx).values().next().unwrap().clone()
10475 });
10476
10477 git_branch("other-branch", &repo);
10478 git_checkout("refs/heads/other-branch", &repo);
10479 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
10480 git_add("a.txt", &repo);
10481 git_commit("capitalize", &repo);
10482 let commit = repo
10483 .head()
10484 .expect("Failed to get HEAD")
10485 .peel_to_commit()
10486 .expect("HEAD is not a commit");
10487 git_checkout("refs/heads/main", &repo);
10488 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
10489 git_add("a.txt", &repo);
10490 git_commit("improve letter", &repo);
10491 git_cherry_pick(&commit, &repo);
10492 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
10493 .expect("No CHERRY_PICK_HEAD");
10494 pretty_assertions::assert_eq!(
10495 git_status(&repo),
10496 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
10497 );
10498 tree.flush_fs_events(cx).await;
10499 project
10500 .update(cx, |project, cx| project.git_scans_complete(cx))
10501 .await;
10502 cx.executor().run_until_parked();
10503 let conflicts = repository.update(cx, |repository, _| {
10504 repository
10505 .merge_conflicts
10506 .iter()
10507 .cloned()
10508 .collect::<Vec<_>>()
10509 });
10510 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
10511
10512 git_add("a.txt", &repo);
10513 // Attempt to manually simulate what `git cherry-pick --continue` would do.
10514 git_commit("whatevs", &repo);
10515 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
10516 .expect("Failed to remove CHERRY_PICK_HEAD");
10517 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
10518 tree.flush_fs_events(cx).await;
10519 let conflicts = repository.update(cx, |repository, _| {
10520 repository
10521 .merge_conflicts
10522 .iter()
10523 .cloned()
10524 .collect::<Vec<_>>()
10525 });
10526 pretty_assertions::assert_eq!(conflicts, []);
10527}
10528
10529#[gpui::test]
10530async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
10531 init_test(cx);
10532 let fs = FakeFs::new(cx.background_executor.clone());
10533 fs.insert_tree(
10534 path!("/root"),
10535 json!({
10536 ".git": {},
10537 ".gitignore": "*.txt\n",
10538 "a.xml": "<a></a>",
10539 "b.txt": "Some text"
10540 }),
10541 )
10542 .await;
10543
10544 fs.set_head_and_index_for_repo(
10545 path!("/root/.git").as_ref(),
10546 &[
10547 (".gitignore", "*.txt\n".into()),
10548 ("a.xml", "<a></a>".into()),
10549 ],
10550 );
10551
10552 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
10553
10554 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10555 tree.flush_fs_events(cx).await;
10556 project
10557 .update(cx, |project, cx| project.git_scans_complete(cx))
10558 .await;
10559 cx.executor().run_until_parked();
10560
10561 let repository = project.read_with(cx, |project, cx| {
10562 project.repositories(cx).values().next().unwrap().clone()
10563 });
10564
10565 // One file is unmodified, the other is ignored.
10566 cx.read(|cx| {
10567 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
10568 assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
10569 });
10570
10571 // Change the gitignore, and stage the newly non-ignored file.
10572 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
10573 .await
10574 .unwrap();
10575 fs.set_index_for_repo(
10576 Path::new(path!("/root/.git")),
10577 &[
10578 (".gitignore", "*.txt\n".into()),
10579 ("a.xml", "<a></a>".into()),
10580 ("b.txt", "Some text".into()),
10581 ],
10582 );
10583
10584 cx.executor().run_until_parked();
10585 cx.read(|cx| {
10586 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
10587 assert_entry_git_state(
10588 tree.read(cx),
10589 repository.read(cx),
10590 "b.txt",
10591 Some(StatusCode::Added),
10592 false,
10593 );
10594 });
10595}
10596
10597// NOTE:
10598// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
10599// a directory which some program has already open.
10600// This is a limitation of the Windows.
10601// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
10602// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
10603#[gpui::test]
10604#[cfg_attr(target_os = "windows", ignore)]
10605async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
10606 init_test(cx);
10607 cx.executor().allow_parking();
10608 let root = TempTree::new(json!({
10609 "projects": {
10610 "project1": {
10611 "a": "",
10612 "b": "",
10613 }
10614 },
10615
10616 }));
10617 let root_path = root.path();
10618
10619 let repo = git_init(&root_path.join("projects/project1"));
10620 git_add("a", &repo);
10621 git_commit("init", &repo);
10622 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
10623
10624 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10625
10626 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10627 tree.flush_fs_events(cx).await;
10628 project
10629 .update(cx, |project, cx| project.git_scans_complete(cx))
10630 .await;
10631 cx.executor().run_until_parked();
10632
10633 let repository = project.read_with(cx, |project, cx| {
10634 project.repositories(cx).values().next().unwrap().clone()
10635 });
10636
10637 repository.read_with(cx, |repository, _| {
10638 assert_eq!(
10639 repository.work_directory_abs_path.as_ref(),
10640 root_path.join("projects/project1").as_path()
10641 );
10642 assert_eq!(
10643 repository
10644 .status_for_path(&repo_path("a"))
10645 .map(|entry| entry.status),
10646 Some(StatusCode::Modified.worktree()),
10647 );
10648 assert_eq!(
10649 repository
10650 .status_for_path(&repo_path("b"))
10651 .map(|entry| entry.status),
10652 Some(FileStatus::Untracked),
10653 );
10654 });
10655
10656 std::fs::rename(
10657 root_path.join("projects/project1"),
10658 root_path.join("projects/project2"),
10659 )
10660 .unwrap();
10661 tree.flush_fs_events(cx).await;
10662
10663 repository.read_with(cx, |repository, _| {
10664 assert_eq!(
10665 repository.work_directory_abs_path.as_ref(),
10666 root_path.join("projects/project2").as_path()
10667 );
10668 assert_eq!(
10669 repository.status_for_path(&repo_path("a")).unwrap().status,
10670 StatusCode::Modified.worktree(),
10671 );
10672 assert_eq!(
10673 repository.status_for_path(&repo_path("b")).unwrap().status,
10674 FileStatus::Untracked,
10675 );
10676 });
10677}
10678
10679// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
10680// you can't rename a directory which some program has already open. This is a
10681// limitation of the Windows. See:
10682// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
10683// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
10684#[gpui::test]
10685#[cfg_attr(target_os = "windows", ignore)]
10686async fn test_file_status(cx: &mut gpui::TestAppContext) {
10687 init_test(cx);
10688 cx.executor().allow_parking();
10689 const IGNORE_RULE: &str = "**/target";
10690
10691 let root = TempTree::new(json!({
10692 "project": {
10693 "a.txt": "a",
10694 "b.txt": "bb",
10695 "c": {
10696 "d": {
10697 "e.txt": "eee"
10698 }
10699 },
10700 "f.txt": "ffff",
10701 "target": {
10702 "build_file": "???"
10703 },
10704 ".gitignore": IGNORE_RULE
10705 },
10706
10707 }));
10708 let root_path = root.path();
10709
10710 const A_TXT: &str = "a.txt";
10711 const B_TXT: &str = "b.txt";
10712 const E_TXT: &str = "c/d/e.txt";
10713 const F_TXT: &str = "f.txt";
10714 const DOTGITIGNORE: &str = ".gitignore";
10715 const BUILD_FILE: &str = "target/build_file";
10716
10717 // Set up git repository before creating the worktree.
10718 let work_dir = root.path().join("project");
10719 let mut repo = git_init(work_dir.as_path());
10720 repo.add_ignore_rule(IGNORE_RULE).unwrap();
10721 git_add(A_TXT, &repo);
10722 git_add(E_TXT, &repo);
10723 git_add(DOTGITIGNORE, &repo);
10724 git_commit("Initial commit", &repo);
10725
10726 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10727
10728 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10729 tree.flush_fs_events(cx).await;
10730 project
10731 .update(cx, |project, cx| project.git_scans_complete(cx))
10732 .await;
10733 cx.executor().run_until_parked();
10734
10735 let repository = project.read_with(cx, |project, cx| {
10736 project.repositories(cx).values().next().unwrap().clone()
10737 });
10738
10739 // Check that the right git state is observed on startup
10740 repository.read_with(cx, |repository, _cx| {
10741 assert_eq!(
10742 repository.work_directory_abs_path.as_ref(),
10743 root_path.join("project").as_path()
10744 );
10745
10746 assert_eq!(
10747 repository
10748 .status_for_path(&repo_path(B_TXT))
10749 .unwrap()
10750 .status,
10751 FileStatus::Untracked,
10752 );
10753 assert_eq!(
10754 repository
10755 .status_for_path(&repo_path(F_TXT))
10756 .unwrap()
10757 .status,
10758 FileStatus::Untracked,
10759 );
10760 });
10761
10762 // Modify a file in the working copy.
10763 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
10764 tree.flush_fs_events(cx).await;
10765 project
10766 .update(cx, |project, cx| project.git_scans_complete(cx))
10767 .await;
10768 cx.executor().run_until_parked();
10769
10770 // The worktree detects that the file's git status has changed.
10771 repository.read_with(cx, |repository, _| {
10772 assert_eq!(
10773 repository
10774 .status_for_path(&repo_path(A_TXT))
10775 .unwrap()
10776 .status,
10777 StatusCode::Modified.worktree(),
10778 );
10779 });
10780
10781 // Create a commit in the git repository.
10782 git_add(A_TXT, &repo);
10783 git_add(B_TXT, &repo);
10784 git_commit("Committing modified and added", &repo);
10785 tree.flush_fs_events(cx).await;
10786 project
10787 .update(cx, |project, cx| project.git_scans_complete(cx))
10788 .await;
10789 cx.executor().run_until_parked();
10790
10791 // The worktree detects that the files' git status have changed.
10792 repository.read_with(cx, |repository, _cx| {
10793 assert_eq!(
10794 repository
10795 .status_for_path(&repo_path(F_TXT))
10796 .unwrap()
10797 .status,
10798 FileStatus::Untracked,
10799 );
10800 assert_eq!(repository.status_for_path(&repo_path(B_TXT)), None);
10801 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
10802 });
10803
10804 // Modify files in the working copy and perform git operations on other files.
10805 git_reset(0, &repo);
10806 git_remove_index(Path::new(B_TXT), &repo);
10807 git_stash(&mut repo);
10808 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
10809 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
10810 tree.flush_fs_events(cx).await;
10811 project
10812 .update(cx, |project, cx| project.git_scans_complete(cx))
10813 .await;
10814 cx.executor().run_until_parked();
10815
10816 // Check that more complex repo changes are tracked
10817 repository.read_with(cx, |repository, _cx| {
10818 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
10819 assert_eq!(
10820 repository
10821 .status_for_path(&repo_path(B_TXT))
10822 .unwrap()
10823 .status,
10824 FileStatus::Untracked,
10825 );
10826 assert_eq!(
10827 repository
10828 .status_for_path(&repo_path(E_TXT))
10829 .unwrap()
10830 .status,
10831 StatusCode::Modified.worktree(),
10832 );
10833 });
10834
10835 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
10836 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
10837 std::fs::write(
10838 work_dir.join(DOTGITIGNORE),
10839 [IGNORE_RULE, "f.txt"].join("\n"),
10840 )
10841 .unwrap();
10842
10843 git_add(Path::new(DOTGITIGNORE), &repo);
10844 git_commit("Committing modified git ignore", &repo);
10845
10846 tree.flush_fs_events(cx).await;
10847 cx.executor().run_until_parked();
10848
10849 let mut renamed_dir_name = "first_directory/second_directory";
10850 const RENAMED_FILE: &str = "rf.txt";
10851
10852 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
10853 std::fs::write(
10854 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
10855 "new-contents",
10856 )
10857 .unwrap();
10858
10859 tree.flush_fs_events(cx).await;
10860 project
10861 .update(cx, |project, cx| project.git_scans_complete(cx))
10862 .await;
10863 cx.executor().run_until_parked();
10864
10865 repository.read_with(cx, |repository, _cx| {
10866 assert_eq!(
10867 repository
10868 .status_for_path(&RepoPath::from_rel_path(
10869 &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE))
10870 ))
10871 .unwrap()
10872 .status,
10873 FileStatus::Untracked,
10874 );
10875 });
10876
10877 renamed_dir_name = "new_first_directory/second_directory";
10878
10879 std::fs::rename(
10880 work_dir.join("first_directory"),
10881 work_dir.join("new_first_directory"),
10882 )
10883 .unwrap();
10884
10885 tree.flush_fs_events(cx).await;
10886 project
10887 .update(cx, |project, cx| project.git_scans_complete(cx))
10888 .await;
10889 cx.executor().run_until_parked();
10890
10891 repository.read_with(cx, |repository, _cx| {
10892 assert_eq!(
10893 repository
10894 .status_for_path(&RepoPath::from_rel_path(
10895 &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE))
10896 ))
10897 .unwrap()
10898 .status,
10899 FileStatus::Untracked,
10900 );
10901 });
10902}
10903
10904#[gpui::test]
10905#[ignore]
10906async fn test_ignored_dirs_events(cx: &mut gpui::TestAppContext) {
10907 init_test(cx);
10908 cx.executor().allow_parking();
10909
10910 const IGNORE_RULE: &str = "**/target";
10911
10912 let root = TempTree::new(json!({
10913 "project": {
10914 "src": {
10915 "main.rs": "fn main() {}"
10916 },
10917 "target": {
10918 "debug": {
10919 "important_text.txt": "important text",
10920 },
10921 },
10922 ".gitignore": IGNORE_RULE
10923 },
10924
10925 }));
10926 let root_path = root.path();
10927
10928 // Set up git repository before creating the worktree.
10929 let work_dir = root.path().join("project");
10930 let repo = git_init(work_dir.as_path());
10931 repo.add_ignore_rule(IGNORE_RULE).unwrap();
10932 git_add("src/main.rs", &repo);
10933 git_add(".gitignore", &repo);
10934 git_commit("Initial commit", &repo);
10935
10936 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10937 let repository_updates = Arc::new(Mutex::new(Vec::new()));
10938 let project_events = Arc::new(Mutex::new(Vec::new()));
10939 project.update(cx, |project, cx| {
10940 let repo_events = repository_updates.clone();
10941 cx.subscribe(project.git_store(), move |_, _, e, _| {
10942 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
10943 repo_events.lock().push(e.clone());
10944 }
10945 })
10946 .detach();
10947 let project_events = project_events.clone();
10948 cx.subscribe_self(move |_, e, _| {
10949 if let Event::WorktreeUpdatedEntries(_, updates) = e {
10950 project_events.lock().extend(
10951 updates
10952 .iter()
10953 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
10954 .filter(|(path, _)| path != "fs-event-sentinel"),
10955 );
10956 }
10957 })
10958 .detach();
10959 });
10960
10961 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10962 tree.flush_fs_events(cx).await;
10963 tree.update(cx, |tree, cx| {
10964 tree.load_file(rel_path("project/target/debug/important_text.txt"), cx)
10965 })
10966 .await
10967 .unwrap();
10968 tree.update(cx, |tree, _| {
10969 assert_eq!(
10970 tree.entries(true, 0)
10971 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
10972 .collect::<Vec<_>>(),
10973 vec![
10974 (rel_path(""), false),
10975 (rel_path("project/"), false),
10976 (rel_path("project/.gitignore"), false),
10977 (rel_path("project/src"), false),
10978 (rel_path("project/src/main.rs"), false),
10979 (rel_path("project/target"), true),
10980 (rel_path("project/target/debug"), true),
10981 (rel_path("project/target/debug/important_text.txt"), true),
10982 ]
10983 );
10984 });
10985
10986 assert_eq!(
10987 repository_updates.lock().drain(..).collect::<Vec<_>>(),
10988 vec![RepositoryEvent::StatusesChanged,],
10989 "Initial worktree scan should produce a repo update event"
10990 );
10991 assert_eq!(
10992 project_events.lock().drain(..).collect::<Vec<_>>(),
10993 vec![
10994 ("project/target".to_string(), PathChange::Loaded),
10995 ("project/target/debug".to_string(), PathChange::Loaded),
10996 (
10997 "project/target/debug/important_text.txt".to_string(),
10998 PathChange::Loaded
10999 ),
11000 ],
11001 "Initial project changes should show that all not-ignored and all opened files are loaded"
11002 );
11003
11004 let deps_dir = work_dir.join("target").join("debug").join("deps");
11005 std::fs::create_dir_all(&deps_dir).unwrap();
11006 tree.flush_fs_events(cx).await;
11007 project
11008 .update(cx, |project, cx| project.git_scans_complete(cx))
11009 .await;
11010 cx.executor().run_until_parked();
11011 std::fs::write(deps_dir.join("aa.tmp"), "something tmp").unwrap();
11012 tree.flush_fs_events(cx).await;
11013 project
11014 .update(cx, |project, cx| project.git_scans_complete(cx))
11015 .await;
11016 cx.executor().run_until_parked();
11017 std::fs::remove_dir_all(&deps_dir).unwrap();
11018 tree.flush_fs_events(cx).await;
11019 project
11020 .update(cx, |project, cx| project.git_scans_complete(cx))
11021 .await;
11022 cx.executor().run_until_parked();
11023
11024 tree.update(cx, |tree, _| {
11025 assert_eq!(
11026 tree.entries(true, 0)
11027 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
11028 .collect::<Vec<_>>(),
11029 vec![
11030 (rel_path(""), false),
11031 (rel_path("project/"), false),
11032 (rel_path("project/.gitignore"), false),
11033 (rel_path("project/src"), false),
11034 (rel_path("project/src/main.rs"), false),
11035 (rel_path("project/target"), true),
11036 (rel_path("project/target/debug"), true),
11037 (rel_path("project/target/debug/important_text.txt"), true),
11038 ],
11039 "No stray temp files should be left after the flycheck changes"
11040 );
11041 });
11042
11043 assert_eq!(
11044 repository_updates
11045 .lock()
11046 .iter()
11047 .cloned()
11048 .collect::<Vec<_>>(),
11049 Vec::new(),
11050 "No further RepositoryUpdated events should happen, as only ignored dirs' contents was changed",
11051 );
11052 assert_eq!(
11053 project_events.lock().as_slice(),
11054 vec![
11055 ("project/target/debug/deps".to_string(), PathChange::Added),
11056 ("project/target/debug/deps".to_string(), PathChange::Removed),
11057 ],
11058 "Due to `debug` directory being tracked, it should get updates for entries inside it.
11059 No updates for more nested directories should happen as those are ignored",
11060 );
11061}
11062
11063// todo(jk): turning this test off until we rework it in such a way so that it is not so susceptible
11064// to different timings/ordering of events.
11065#[ignore]
11066#[gpui::test]
11067async fn test_odd_events_for_ignored_dirs(
11068 executor: BackgroundExecutor,
11069 cx: &mut gpui::TestAppContext,
11070) {
11071 init_test(cx);
11072 let fs = FakeFs::new(executor);
11073 fs.insert_tree(
11074 path!("/root"),
11075 json!({
11076 ".git": {},
11077 ".gitignore": "**/target/",
11078 "src": {
11079 "main.rs": "fn main() {}",
11080 },
11081 "target": {
11082 "debug": {
11083 "foo.txt": "foo",
11084 "deps": {}
11085 }
11086 }
11087 }),
11088 )
11089 .await;
11090 fs.set_head_and_index_for_repo(
11091 path!("/root/.git").as_ref(),
11092 &[
11093 (".gitignore", "**/target/".into()),
11094 ("src/main.rs", "fn main() {}".into()),
11095 ],
11096 );
11097
11098 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
11099 let repository_updates = Arc::new(Mutex::new(Vec::new()));
11100 let project_events = Arc::new(Mutex::new(Vec::new()));
11101 project.update(cx, |project, cx| {
11102 let repository_updates = repository_updates.clone();
11103 cx.subscribe(project.git_store(), move |_, _, e, _| {
11104 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
11105 repository_updates.lock().push(e.clone());
11106 }
11107 })
11108 .detach();
11109 let project_events = project_events.clone();
11110 cx.subscribe_self(move |_, e, _| {
11111 if let Event::WorktreeUpdatedEntries(_, updates) = e {
11112 project_events.lock().extend(
11113 updates
11114 .iter()
11115 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
11116 .filter(|(path, _)| path != "fs-event-sentinel"),
11117 );
11118 }
11119 })
11120 .detach();
11121 });
11122
11123 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
11124 tree.update(cx, |tree, cx| {
11125 tree.load_file(rel_path("target/debug/foo.txt"), cx)
11126 })
11127 .await
11128 .unwrap();
11129 tree.flush_fs_events(cx).await;
11130 project
11131 .update(cx, |project, cx| project.git_scans_complete(cx))
11132 .await;
11133 cx.run_until_parked();
11134 tree.update(cx, |tree, _| {
11135 assert_eq!(
11136 tree.entries(true, 0)
11137 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
11138 .collect::<Vec<_>>(),
11139 vec![
11140 (rel_path(""), false),
11141 (rel_path(".gitignore"), false),
11142 (rel_path("src"), false),
11143 (rel_path("src/main.rs"), false),
11144 (rel_path("target"), true),
11145 (rel_path("target/debug"), true),
11146 (rel_path("target/debug/deps"), true),
11147 (rel_path("target/debug/foo.txt"), true),
11148 ]
11149 );
11150 });
11151
11152 assert_eq!(
11153 repository_updates.lock().drain(..).collect::<Vec<_>>(),
11154 vec![
11155 RepositoryEvent::BranchChanged,
11156 RepositoryEvent::StatusesChanged,
11157 RepositoryEvent::StatusesChanged,
11158 ],
11159 "Initial worktree scan should produce a repo update event"
11160 );
11161 assert_eq!(
11162 project_events.lock().drain(..).collect::<Vec<_>>(),
11163 vec![
11164 ("target".to_string(), PathChange::Loaded),
11165 ("target/debug".to_string(), PathChange::Loaded),
11166 ("target/debug/deps".to_string(), PathChange::Loaded),
11167 ("target/debug/foo.txt".to_string(), PathChange::Loaded),
11168 ],
11169 "All non-ignored entries and all opened firs should be getting a project event",
11170 );
11171
11172 // Emulate a flycheck spawn: it emits a `INODE_META_MOD`-flagged FS event on target/debug/deps, then creates and removes temp files inside.
11173 // This may happen multiple times during a single flycheck, but once is enough for testing.
11174 fs.emit_fs_event("/root/target/debug/deps", None);
11175 tree.flush_fs_events(cx).await;
11176 project
11177 .update(cx, |project, cx| project.git_scans_complete(cx))
11178 .await;
11179 cx.executor().run_until_parked();
11180
11181 assert_eq!(
11182 repository_updates
11183 .lock()
11184 .iter()
11185 .cloned()
11186 .collect::<Vec<_>>(),
11187 Vec::new(),
11188 "No further RepositoryUpdated events should happen, as only ignored dirs received FS events",
11189 );
11190 assert_eq!(
11191 project_events.lock().as_slice(),
11192 Vec::new(),
11193 "No further project events should happen, as only ignored dirs received FS events",
11194 );
11195}
11196
11197#[gpui::test]
11198async fn test_repos_in_invisible_worktrees(
11199 executor: BackgroundExecutor,
11200 cx: &mut gpui::TestAppContext,
11201) {
11202 init_test(cx);
11203 let fs = FakeFs::new(executor);
11204 fs.insert_tree(
11205 path!("/root"),
11206 json!({
11207 "dir1": {
11208 ".git": {},
11209 "dep1": {
11210 ".git": {},
11211 "src": {
11212 "a.txt": "",
11213 },
11214 },
11215 "b.txt": "",
11216 },
11217 }),
11218 )
11219 .await;
11220
11221 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
11222 let _visible_worktree =
11223 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
11224 project
11225 .update(cx, |project, cx| project.git_scans_complete(cx))
11226 .await;
11227
11228 let repos = project.read_with(cx, |project, cx| {
11229 project
11230 .repositories(cx)
11231 .values()
11232 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
11233 .collect::<Vec<_>>()
11234 });
11235 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
11236
11237 let (_invisible_worktree, _) = project
11238 .update(cx, |project, cx| {
11239 project.worktree_store().update(cx, |worktree_store, cx| {
11240 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
11241 })
11242 })
11243 .await
11244 .expect("failed to create worktree");
11245 project
11246 .update(cx, |project, cx| project.git_scans_complete(cx))
11247 .await;
11248
11249 let repos = project.read_with(cx, |project, cx| {
11250 project
11251 .repositories(cx)
11252 .values()
11253 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
11254 .collect::<Vec<_>>()
11255 });
11256 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
11257}
11258
11259#[gpui::test(iterations = 10)]
11260async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
11261 init_test(cx);
11262 cx.update(|cx| {
11263 cx.update_global::<SettingsStore, _>(|store, cx| {
11264 store.update_user_settings(cx, |settings| {
11265 settings.project.worktree.file_scan_exclusions = Some(Vec::new());
11266 });
11267 });
11268 });
11269 let fs = FakeFs::new(cx.background_executor.clone());
11270 fs.insert_tree(
11271 path!("/root"),
11272 json!({
11273 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
11274 "tree": {
11275 ".git": {},
11276 ".gitignore": "ignored-dir\n",
11277 "tracked-dir": {
11278 "tracked-file1": "",
11279 "ancestor-ignored-file1": "",
11280 },
11281 "ignored-dir": {
11282 "ignored-file1": ""
11283 }
11284 }
11285 }),
11286 )
11287 .await;
11288 fs.set_head_and_index_for_repo(
11289 path!("/root/tree/.git").as_ref(),
11290 &[
11291 (".gitignore", "ignored-dir\n".into()),
11292 ("tracked-dir/tracked-file1", "".into()),
11293 ],
11294 );
11295
11296 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
11297
11298 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
11299 tree.flush_fs_events(cx).await;
11300 project
11301 .update(cx, |project, cx| project.git_scans_complete(cx))
11302 .await;
11303 cx.executor().run_until_parked();
11304
11305 let repository = project.read_with(cx, |project, cx| {
11306 project.repositories(cx).values().next().unwrap().clone()
11307 });
11308
11309 tree.read_with(cx, |tree, _| {
11310 tree.as_local()
11311 .unwrap()
11312 .manually_refresh_entries_for_paths(vec![rel_path("ignored-dir").into()])
11313 })
11314 .recv()
11315 .await;
11316
11317 cx.read(|cx| {
11318 assert_entry_git_state(
11319 tree.read(cx),
11320 repository.read(cx),
11321 "tracked-dir/tracked-file1",
11322 None,
11323 false,
11324 );
11325 assert_entry_git_state(
11326 tree.read(cx),
11327 repository.read(cx),
11328 "tracked-dir/ancestor-ignored-file1",
11329 None,
11330 false,
11331 );
11332 assert_entry_git_state(
11333 tree.read(cx),
11334 repository.read(cx),
11335 "ignored-dir/ignored-file1",
11336 None,
11337 true,
11338 );
11339 });
11340
11341 fs.create_file(
11342 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
11343 Default::default(),
11344 )
11345 .await
11346 .unwrap();
11347 fs.set_index_for_repo(
11348 path!("/root/tree/.git").as_ref(),
11349 &[
11350 (".gitignore", "ignored-dir\n".into()),
11351 ("tracked-dir/tracked-file1", "".into()),
11352 ("tracked-dir/tracked-file2", "".into()),
11353 ],
11354 );
11355 fs.create_file(
11356 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
11357 Default::default(),
11358 )
11359 .await
11360 .unwrap();
11361 fs.create_file(
11362 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
11363 Default::default(),
11364 )
11365 .await
11366 .unwrap();
11367
11368 cx.executor().run_until_parked();
11369 cx.read(|cx| {
11370 assert_entry_git_state(
11371 tree.read(cx),
11372 repository.read(cx),
11373 "tracked-dir/tracked-file2",
11374 Some(StatusCode::Added),
11375 false,
11376 );
11377 assert_entry_git_state(
11378 tree.read(cx),
11379 repository.read(cx),
11380 "tracked-dir/ancestor-ignored-file2",
11381 None,
11382 false,
11383 );
11384 assert_entry_git_state(
11385 tree.read(cx),
11386 repository.read(cx),
11387 "ignored-dir/ignored-file2",
11388 None,
11389 true,
11390 );
11391 assert!(
11392 tree.read(cx)
11393 .entry_for_path(&rel_path(".git"))
11394 .unwrap()
11395 .is_ignored
11396 );
11397 });
11398}
11399
11400#[gpui::test]
11401async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) {
11402 init_test(cx);
11403
11404 let fs = FakeFs::new(cx.executor());
11405 fs.insert_tree(
11406 path!("/project"),
11407 json!({
11408 ".git": {
11409 "worktrees": {
11410 "some-worktree": {
11411 "commondir": "../..\n",
11412 // For is_git_dir
11413 "HEAD": "",
11414 "config": ""
11415 }
11416 },
11417 "modules": {
11418 "subdir": {
11419 "some-submodule": {
11420 // For is_git_dir
11421 "HEAD": "",
11422 "config": "",
11423 }
11424 }
11425 }
11426 },
11427 "src": {
11428 "a.txt": "A",
11429 },
11430 "some-worktree": {
11431 ".git": "gitdir: ../.git/worktrees/some-worktree\n",
11432 "src": {
11433 "b.txt": "B",
11434 }
11435 },
11436 "subdir": {
11437 "some-submodule": {
11438 ".git": "gitdir: ../../.git/modules/subdir/some-submodule\n",
11439 "c.txt": "C",
11440 }
11441 }
11442 }),
11443 )
11444 .await;
11445
11446 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
11447 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
11448 scan_complete.await;
11449
11450 let mut repositories = project.update(cx, |project, cx| {
11451 project
11452 .repositories(cx)
11453 .values()
11454 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
11455 .collect::<Vec<_>>()
11456 });
11457 repositories.sort();
11458 pretty_assertions::assert_eq!(
11459 repositories,
11460 [
11461 Path::new(path!("/project")).into(),
11462 Path::new(path!("/project/some-worktree")).into(),
11463 Path::new(path!("/project/subdir/some-submodule")).into(),
11464 ]
11465 );
11466
11467 // Generate a git-related event for the worktree and check that it's refreshed.
11468 fs.with_git_state(
11469 path!("/project/some-worktree/.git").as_ref(),
11470 true,
11471 |state| {
11472 state
11473 .head_contents
11474 .insert(repo_path("src/b.txt"), "b".to_owned());
11475 state
11476 .index_contents
11477 .insert(repo_path("src/b.txt"), "b".to_owned());
11478 },
11479 )
11480 .unwrap();
11481 cx.run_until_parked();
11482
11483 let buffer = project
11484 .update(cx, |project, cx| {
11485 project.open_local_buffer(path!("/project/some-worktree/src/b.txt"), cx)
11486 })
11487 .await
11488 .unwrap();
11489 let (worktree_repo, barrier) = project.update(cx, |project, cx| {
11490 let (repo, _) = project
11491 .git_store()
11492 .read(cx)
11493 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
11494 .unwrap();
11495 pretty_assertions::assert_eq!(
11496 repo.read(cx).work_directory_abs_path,
11497 Path::new(path!("/project/some-worktree")).into(),
11498 );
11499 let barrier = repo.update(cx, |repo, _| repo.barrier());
11500 (repo.clone(), barrier)
11501 });
11502 barrier.await.unwrap();
11503 worktree_repo.update(cx, |repo, _| {
11504 pretty_assertions::assert_eq!(
11505 repo.status_for_path(&repo_path("src/b.txt"))
11506 .unwrap()
11507 .status,
11508 StatusCode::Modified.worktree(),
11509 );
11510 });
11511
11512 // The same for the submodule.
11513 fs.with_git_state(
11514 path!("/project/subdir/some-submodule/.git").as_ref(),
11515 true,
11516 |state| {
11517 state
11518 .head_contents
11519 .insert(repo_path("c.txt"), "c".to_owned());
11520 state
11521 .index_contents
11522 .insert(repo_path("c.txt"), "c".to_owned());
11523 },
11524 )
11525 .unwrap();
11526 cx.run_until_parked();
11527
11528 let buffer = project
11529 .update(cx, |project, cx| {
11530 project.open_local_buffer(path!("/project/subdir/some-submodule/c.txt"), cx)
11531 })
11532 .await
11533 .unwrap();
11534 let (submodule_repo, barrier) = project.update(cx, |project, cx| {
11535 let (repo, _) = project
11536 .git_store()
11537 .read(cx)
11538 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
11539 .unwrap();
11540 pretty_assertions::assert_eq!(
11541 repo.read(cx).work_directory_abs_path,
11542 Path::new(path!("/project/subdir/some-submodule")).into(),
11543 );
11544 let barrier = repo.update(cx, |repo, _| repo.barrier());
11545 (repo.clone(), barrier)
11546 });
11547 barrier.await.unwrap();
11548 submodule_repo.update(cx, |repo, _| {
11549 pretty_assertions::assert_eq!(
11550 repo.status_for_path(&repo_path("c.txt")).unwrap().status,
11551 StatusCode::Modified.worktree(),
11552 );
11553 });
11554}
11555
11556#[gpui::test]
11557async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
11558 init_test(cx);
11559 let fs = FakeFs::new(cx.background_executor.clone());
11560 fs.insert_tree(
11561 path!("/root"),
11562 json!({
11563 "project": {
11564 ".git": {},
11565 "child1": {
11566 "a.txt": "A",
11567 },
11568 "child2": {
11569 "b.txt": "B",
11570 }
11571 }
11572 }),
11573 )
11574 .await;
11575
11576 let project = Project::test(
11577 fs.clone(),
11578 [
11579 path!("/root/project/child1").as_ref(),
11580 path!("/root/project/child2").as_ref(),
11581 ],
11582 cx,
11583 )
11584 .await;
11585
11586 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
11587 tree.flush_fs_events(cx).await;
11588 project
11589 .update(cx, |project, cx| project.git_scans_complete(cx))
11590 .await;
11591 cx.executor().run_until_parked();
11592
11593 let repos = project.read_with(cx, |project, cx| {
11594 project
11595 .repositories(cx)
11596 .values()
11597 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
11598 .collect::<Vec<_>>()
11599 });
11600 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
11601}
11602
11603#[gpui::test]
11604async fn test_buffer_changed_file_path_updates_git_diff(cx: &mut gpui::TestAppContext) {
11605 init_test(cx);
11606
11607 let file_1_committed = String::from(r#"file_1_committed"#);
11608 let file_1_staged = String::from(r#"file_1_staged"#);
11609 let file_2_committed = String::from(r#"file_2_committed"#);
11610 let file_2_staged = String::from(r#"file_2_staged"#);
11611 let buffer_contents = String::from(r#"buffer"#);
11612
11613 let fs = FakeFs::new(cx.background_executor.clone());
11614 fs.insert_tree(
11615 path!("/dir"),
11616 json!({
11617 ".git": {},
11618 "src": {
11619 "file_1.rs": file_1_committed.clone(),
11620 "file_2.rs": file_2_committed.clone(),
11621 }
11622 }),
11623 )
11624 .await;
11625
11626 fs.set_head_for_repo(
11627 path!("/dir/.git").as_ref(),
11628 &[
11629 ("src/file_1.rs", file_1_committed.clone()),
11630 ("src/file_2.rs", file_2_committed.clone()),
11631 ],
11632 "deadbeef",
11633 );
11634 fs.set_index_for_repo(
11635 path!("/dir/.git").as_ref(),
11636 &[
11637 ("src/file_1.rs", file_1_staged.clone()),
11638 ("src/file_2.rs", file_2_staged.clone()),
11639 ],
11640 );
11641
11642 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
11643
11644 let buffer = project
11645 .update(cx, |project, cx| {
11646 project.open_local_buffer(path!("/dir/src/file_1.rs"), cx)
11647 })
11648 .await
11649 .unwrap();
11650
11651 buffer.update(cx, |buffer, cx| {
11652 buffer.edit([(0..buffer.len(), buffer_contents.as_str())], None, cx);
11653 });
11654
11655 let unstaged_diff = project
11656 .update(cx, |project, cx| {
11657 project.open_unstaged_diff(buffer.clone(), cx)
11658 })
11659 .await
11660 .unwrap();
11661
11662 cx.run_until_parked();
11663
11664 unstaged_diff.update(cx, |unstaged_diff, cx| {
11665 let base_text = unstaged_diff.base_text_string(cx).unwrap();
11666 assert_eq!(base_text, file_1_staged, "Should start with file_1 staged");
11667 });
11668
11669 // Save the buffer as `file_2.rs`, which should trigger the
11670 // `BufferChangedFilePath` event.
11671 project
11672 .update(cx, |project, cx| {
11673 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
11674 let path = ProjectPath {
11675 worktree_id,
11676 path: rel_path("src/file_2.rs").into(),
11677 };
11678 project.save_buffer_as(buffer.clone(), path, cx)
11679 })
11680 .await
11681 .unwrap();
11682
11683 cx.run_until_parked();
11684
11685 // Verify that the diff bases have been updated to file_2's contents due to
11686 // the `BufferChangedFilePath` event being handled.
11687 unstaged_diff.update(cx, |unstaged_diff, cx| {
11688 let snapshot = buffer.read(cx).snapshot();
11689 let base_text = unstaged_diff.base_text_string(cx).unwrap();
11690 assert_eq!(
11691 base_text, file_2_staged,
11692 "Diff bases should be automatically updated to file_2 staged content"
11693 );
11694
11695 let hunks: Vec<_> = unstaged_diff.snapshot(cx).hunks(&snapshot).collect();
11696 assert!(!hunks.is_empty(), "Should have diff hunks for file_2");
11697 });
11698
11699 let uncommitted_diff = project
11700 .update(cx, |project, cx| {
11701 project.open_uncommitted_diff(buffer.clone(), cx)
11702 })
11703 .await
11704 .unwrap();
11705
11706 cx.run_until_parked();
11707
11708 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
11709 let base_text = uncommitted_diff.base_text_string(cx).unwrap();
11710 assert_eq!(
11711 base_text, file_2_committed,
11712 "Uncommitted diff should compare against file_2 committed content"
11713 );
11714 });
11715}
11716
11717async fn search(
11718 project: &Entity<Project>,
11719 query: SearchQuery,
11720 cx: &mut gpui::TestAppContext,
11721) -> Result<HashMap<String, Vec<Range<usize>>>> {
11722 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
11723 let mut results = HashMap::default();
11724 while let Ok(search_result) = search_rx.rx.recv().await {
11725 match search_result {
11726 SearchResult::Buffer { buffer, ranges } => {
11727 results.entry(buffer).or_insert(ranges);
11728 }
11729 SearchResult::LimitReached => {}
11730 }
11731 }
11732 Ok(results
11733 .into_iter()
11734 .map(|(buffer, ranges)| {
11735 buffer.update(cx, |buffer, cx| {
11736 let path = buffer
11737 .file()
11738 .unwrap()
11739 .full_path(cx)
11740 .to_string_lossy()
11741 .to_string();
11742 let ranges = ranges
11743 .into_iter()
11744 .map(|range| range.to_offset(buffer))
11745 .collect::<Vec<_>>();
11746 (path, ranges)
11747 })
11748 })
11749 .collect())
11750}
11751
11752#[gpui::test]
11753async fn test_undo_encoding_change(cx: &mut gpui::TestAppContext) {
11754 init_test(cx);
11755
11756 let fs = FakeFs::new(cx.executor());
11757
11758 // Create a file with ASCII content "Hi" - this will be detected as UTF-8
11759 // When reinterpreted as UTF-16LE, the bytes 0x48 0x69 become a single character
11760 let ascii_bytes: Vec<u8> = vec![0x48, 0x69];
11761 fs.insert_tree(path!("/dir"), json!({})).await;
11762 fs.insert_file(path!("/dir/test.txt"), ascii_bytes).await;
11763
11764 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
11765
11766 let buffer = project
11767 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/test.txt"), cx))
11768 .await
11769 .unwrap();
11770
11771 let (initial_encoding, initial_text, initial_dirty) = buffer.read_with(cx, |buffer, _| {
11772 (buffer.encoding(), buffer.text(), buffer.is_dirty())
11773 });
11774 assert_eq!(initial_encoding, encoding_rs::UTF_8);
11775 assert_eq!(initial_text, "Hi");
11776 assert!(!initial_dirty);
11777
11778 let reload_receiver = buffer.update(cx, |buffer, cx| {
11779 buffer.reload_with_encoding(encoding_rs::UTF_16LE, cx)
11780 });
11781 cx.executor().run_until_parked();
11782
11783 // Wait for reload to complete
11784 let _ = reload_receiver.await;
11785
11786 // Verify the encoding changed, text is different, and still not dirty (we reloaded from disk)
11787 let (reloaded_encoding, reloaded_text, reloaded_dirty) = buffer.read_with(cx, |buffer, _| {
11788 (buffer.encoding(), buffer.text(), buffer.is_dirty())
11789 });
11790 assert_eq!(reloaded_encoding, encoding_rs::UTF_16LE);
11791 assert_eq!(reloaded_text, "楈");
11792 assert!(!reloaded_dirty);
11793
11794 // Undo the reload
11795 buffer.update(cx, |buffer, cx| {
11796 buffer.undo(cx);
11797 });
11798
11799 buffer.read_with(cx, |buffer, _| {
11800 assert_eq!(buffer.encoding(), encoding_rs::UTF_8);
11801 assert_eq!(buffer.text(), "Hi");
11802 assert!(!buffer.is_dirty());
11803 });
11804
11805 buffer.update(cx, |buffer, cx| {
11806 buffer.redo(cx);
11807 });
11808
11809 buffer.read_with(cx, |buffer, _| {
11810 assert_eq!(buffer.encoding(), encoding_rs::UTF_16LE);
11811 assert_ne!(buffer.text(), "Hi");
11812 assert!(!buffer.is_dirty());
11813 });
11814}
11815
11816#[gpui::test]
11817async fn test_initial_scan_complete(cx: &mut gpui::TestAppContext) {
11818 init_test(cx);
11819
11820 let fs = FakeFs::new(cx.executor());
11821 fs.insert_tree(
11822 path!("/root"),
11823 json!({
11824 "a": {
11825 ".git": {},
11826 ".zed": {
11827 "tasks.json": r#"[{"label": "task-a", "command": "echo a"}]"#
11828 },
11829 "src": { "main.rs": "" }
11830 },
11831 "b": {
11832 ".git": {},
11833 ".zed": {
11834 "tasks.json": r#"[{"label": "task-b", "command": "echo b"}]"#
11835 },
11836 "src": { "lib.rs": "" }
11837 },
11838 }),
11839 )
11840 .await;
11841
11842 let repos_created = Rc::new(RefCell::new(Vec::new()));
11843 let _observe = {
11844 let repos_created = repos_created.clone();
11845 cx.update(|cx| {
11846 cx.observe_new::<Repository>(move |repo, _, cx| {
11847 repos_created.borrow_mut().push(cx.entity().downgrade());
11848 let _ = repo;
11849 })
11850 })
11851 };
11852
11853 let project = Project::test(
11854 fs.clone(),
11855 [path!("/root/a").as_ref(), path!("/root/b").as_ref()],
11856 cx,
11857 )
11858 .await;
11859
11860 let scan_complete = project.read_with(cx, |project, cx| project.wait_for_initial_scan(cx));
11861 scan_complete.await;
11862
11863 project.read_with(cx, |project, cx| {
11864 assert!(
11865 project.worktree_store().read(cx).initial_scan_completed(),
11866 "Expected initial scan to be completed after awaiting wait_for_initial_scan"
11867 );
11868 });
11869
11870 let created_repos_len = repos_created.borrow().len();
11871 assert_eq!(
11872 created_repos_len, 2,
11873 "Expected 2 repositories to be created during scan, got {}",
11874 created_repos_len
11875 );
11876
11877 project.read_with(cx, |project, cx| {
11878 let git_store = project.git_store().read(cx);
11879 assert_eq!(
11880 git_store.repositories().len(),
11881 2,
11882 "Expected 2 repositories in GitStore"
11883 );
11884 });
11885}
11886
11887pub fn init_test(cx: &mut gpui::TestAppContext) {
11888 zlog::init_test();
11889
11890 cx.update(|cx| {
11891 let settings_store = SettingsStore::test(cx);
11892 cx.set_global(settings_store);
11893 release_channel::init(semver::Version::new(0, 0, 0), cx);
11894 });
11895}
11896
11897fn json_lang() -> Arc<Language> {
11898 Arc::new(Language::new(
11899 LanguageConfig {
11900 name: "JSON".into(),
11901 matcher: LanguageMatcher {
11902 path_suffixes: vec!["json".to_string()],
11903 ..Default::default()
11904 },
11905 ..Default::default()
11906 },
11907 None,
11908 ))
11909}
11910
11911fn js_lang() -> Arc<Language> {
11912 Arc::new(Language::new(
11913 LanguageConfig {
11914 name: "JavaScript".into(),
11915 matcher: LanguageMatcher {
11916 path_suffixes: vec!["js".to_string()],
11917 ..Default::default()
11918 },
11919 ..Default::default()
11920 },
11921 None,
11922 ))
11923}
11924
11925fn python_lang(fs: Arc<FakeFs>) -> Arc<Language> {
11926 struct PythonMootToolchainLister(Arc<FakeFs>);
11927 #[async_trait]
11928 impl ToolchainLister for PythonMootToolchainLister {
11929 async fn list(
11930 &self,
11931 worktree_root: PathBuf,
11932 subroot_relative_path: Arc<RelPath>,
11933 _: Option<HashMap<String, String>>,
11934 _: &dyn Fs,
11935 ) -> ToolchainList {
11936 // This lister will always return a path .venv directories within ancestors
11937 let ancestors = subroot_relative_path.ancestors().collect::<Vec<_>>();
11938 let mut toolchains = vec![];
11939 for ancestor in ancestors {
11940 let venv_path = worktree_root.join(ancestor.as_std_path()).join(".venv");
11941 if self.0.is_dir(&venv_path).await {
11942 toolchains.push(Toolchain {
11943 name: SharedString::new_static("Python Venv"),
11944 path: venv_path.to_string_lossy().into_owned().into(),
11945 language_name: LanguageName(SharedString::new_static("Python")),
11946 as_json: serde_json::Value::Null,
11947 })
11948 }
11949 }
11950 ToolchainList {
11951 toolchains,
11952 ..Default::default()
11953 }
11954 }
11955 async fn resolve(
11956 &self,
11957 _: PathBuf,
11958 _: Option<HashMap<String, String>>,
11959 _: &dyn Fs,
11960 ) -> anyhow::Result<Toolchain> {
11961 Err(anyhow::anyhow!("Not implemented"))
11962 }
11963 fn meta(&self) -> ToolchainMetadata {
11964 ToolchainMetadata {
11965 term: SharedString::new_static("Virtual Environment"),
11966 new_toolchain_placeholder: SharedString::new_static(
11967 "A path to the python3 executable within a virtual environment, or path to virtual environment itself",
11968 ),
11969 manifest_name: ManifestName::from(SharedString::new_static("pyproject.toml")),
11970 }
11971 }
11972 fn activation_script(
11973 &self,
11974 _: &Toolchain,
11975 _: ShellKind,
11976 _: &gpui::App,
11977 ) -> futures::future::BoxFuture<'static, Vec<String>> {
11978 Box::pin(async { vec![] })
11979 }
11980 }
11981 Arc::new(
11982 Language::new(
11983 LanguageConfig {
11984 name: "Python".into(),
11985 matcher: LanguageMatcher {
11986 path_suffixes: vec!["py".to_string()],
11987 ..Default::default()
11988 },
11989 ..Default::default()
11990 },
11991 None, // We're not testing Python parsing with this language.
11992 )
11993 .with_manifest(Some(ManifestName::from(SharedString::new_static(
11994 "pyproject.toml",
11995 ))))
11996 .with_toolchain_lister(Some(Arc::new(PythonMootToolchainLister(fs)))),
11997 )
11998}
11999
12000fn typescript_lang() -> Arc<Language> {
12001 Arc::new(Language::new(
12002 LanguageConfig {
12003 name: "TypeScript".into(),
12004 matcher: LanguageMatcher {
12005 path_suffixes: vec!["ts".to_string()],
12006 ..Default::default()
12007 },
12008 ..Default::default()
12009 },
12010 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
12011 ))
12012}
12013
12014fn tsx_lang() -> Arc<Language> {
12015 Arc::new(Language::new(
12016 LanguageConfig {
12017 name: "tsx".into(),
12018 matcher: LanguageMatcher {
12019 path_suffixes: vec!["tsx".to_string()],
12020 ..Default::default()
12021 },
12022 ..Default::default()
12023 },
12024 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
12025 ))
12026}
12027
12028fn get_all_tasks(
12029 project: &Entity<Project>,
12030 task_contexts: Arc<TaskContexts>,
12031 cx: &mut App,
12032) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
12033 let new_tasks = project.update(cx, |project, cx| {
12034 project.task_store().update(cx, |task_store, cx| {
12035 task_store.task_inventory().unwrap().update(cx, |this, cx| {
12036 this.used_and_current_resolved_tasks(task_contexts, cx)
12037 })
12038 })
12039 });
12040
12041 cx.background_spawn(async move {
12042 let (mut old, new) = new_tasks.await;
12043 old.extend(new);
12044 old
12045 })
12046}
12047
12048#[track_caller]
12049fn assert_entry_git_state(
12050 tree: &Worktree,
12051 repository: &Repository,
12052 path: &str,
12053 index_status: Option<StatusCode>,
12054 is_ignored: bool,
12055) {
12056 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
12057 let entry = tree
12058 .entry_for_path(&rel_path(path))
12059 .unwrap_or_else(|| panic!("entry {path} not found"));
12060 let status = repository
12061 .status_for_path(&repo_path(path))
12062 .map(|entry| entry.status);
12063 let expected = index_status.map(|index_status| {
12064 TrackedStatus {
12065 index_status,
12066 worktree_status: StatusCode::Unmodified,
12067 }
12068 .into()
12069 });
12070 assert_eq!(
12071 status, expected,
12072 "expected {path} to have git status: {expected:?}"
12073 );
12074 assert_eq!(
12075 entry.is_ignored, is_ignored,
12076 "expected {path} to have is_ignored: {is_ignored}"
12077 );
12078}
12079
12080#[track_caller]
12081fn git_init(path: &Path) -> git2::Repository {
12082 let mut init_opts = RepositoryInitOptions::new();
12083 init_opts.initial_head("main");
12084 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
12085}
12086
12087#[track_caller]
12088fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
12089 let path = path.as_ref();
12090 let mut index = repo.index().expect("Failed to get index");
12091 index.add_path(path).expect("Failed to add file");
12092 index.write().expect("Failed to write index");
12093}
12094
12095#[track_caller]
12096fn git_remove_index(path: &Path, repo: &git2::Repository) {
12097 let mut index = repo.index().expect("Failed to get index");
12098 index.remove_path(path).expect("Failed to add file");
12099 index.write().expect("Failed to write index");
12100}
12101
12102#[track_caller]
12103fn git_commit(msg: &'static str, repo: &git2::Repository) {
12104 use git2::Signature;
12105
12106 let signature = Signature::now("test", "test@zed.dev").unwrap();
12107 let oid = repo.index().unwrap().write_tree().unwrap();
12108 let tree = repo.find_tree(oid).unwrap();
12109 if let Ok(head) = repo.head() {
12110 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
12111
12112 let parent_commit = parent_obj.as_commit().unwrap();
12113
12114 repo.commit(
12115 Some("HEAD"),
12116 &signature,
12117 &signature,
12118 msg,
12119 &tree,
12120 &[parent_commit],
12121 )
12122 .expect("Failed to commit with parent");
12123 } else {
12124 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
12125 .expect("Failed to commit");
12126 }
12127}
12128
12129#[cfg(any())]
12130#[track_caller]
12131fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
12132 repo.cherrypick(commit, None).expect("Failed to cherrypick");
12133}
12134
12135#[track_caller]
12136fn git_stash(repo: &mut git2::Repository) {
12137 use git2::Signature;
12138
12139 let signature = Signature::now("test", "test@zed.dev").unwrap();
12140 repo.stash_save(&signature, "N/A", None)
12141 .expect("Failed to stash");
12142}
12143
12144#[track_caller]
12145fn git_reset(offset: usize, repo: &git2::Repository) {
12146 let head = repo.head().expect("Couldn't get repo head");
12147 let object = head.peel(git2::ObjectType::Commit).unwrap();
12148 let commit = object.as_commit().unwrap();
12149 let new_head = commit
12150 .parents()
12151 .inspect(|parnet| {
12152 parnet.message();
12153 })
12154 .nth(offset)
12155 .expect("Not enough history");
12156 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
12157 .expect("Could not reset");
12158}
12159
12160#[cfg(any())]
12161#[track_caller]
12162fn git_branch(name: &str, repo: &git2::Repository) {
12163 let head = repo
12164 .head()
12165 .expect("Couldn't get repo head")
12166 .peel_to_commit()
12167 .expect("HEAD is not a commit");
12168 repo.branch(name, &head, false).expect("Failed to commit");
12169}
12170
12171#[cfg(any())]
12172#[track_caller]
12173fn git_checkout(name: &str, repo: &git2::Repository) {
12174 repo.set_head(name).expect("Failed to set head");
12175 repo.checkout_head(None).expect("Failed to check out head");
12176}
12177
12178#[cfg(any())]
12179#[track_caller]
12180fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
12181 repo.statuses(None)
12182 .unwrap()
12183 .iter()
12184 .map(|status| (status.path().unwrap().to_string(), status.status()))
12185 .collect()
12186}
12187
12188#[gpui::test]
12189async fn test_find_project_path_abs(
12190 background_executor: BackgroundExecutor,
12191 cx: &mut gpui::TestAppContext,
12192) {
12193 // find_project_path should work with absolute paths
12194 init_test(cx);
12195
12196 let fs = FakeFs::new(background_executor);
12197 fs.insert_tree(
12198 path!("/root"),
12199 json!({
12200 "project1": {
12201 "file1.txt": "content1",
12202 "subdir": {
12203 "file2.txt": "content2"
12204 }
12205 },
12206 "project2": {
12207 "file3.txt": "content3"
12208 }
12209 }),
12210 )
12211 .await;
12212
12213 let project = Project::test(
12214 fs.clone(),
12215 [
12216 path!("/root/project1").as_ref(),
12217 path!("/root/project2").as_ref(),
12218 ],
12219 cx,
12220 )
12221 .await;
12222
12223 // Make sure the worktrees are fully initialized
12224 project
12225 .update(cx, |project, cx| project.git_scans_complete(cx))
12226 .await;
12227 cx.run_until_parked();
12228
12229 let (project1_abs_path, project1_id, project2_abs_path, project2_id) =
12230 project.read_with(cx, |project, cx| {
12231 let worktrees: Vec<_> = project.worktrees(cx).collect();
12232 let abs_path1 = worktrees[0].read(cx).abs_path().to_path_buf();
12233 let id1 = worktrees[0].read(cx).id();
12234 let abs_path2 = worktrees[1].read(cx).abs_path().to_path_buf();
12235 let id2 = worktrees[1].read(cx).id();
12236 (abs_path1, id1, abs_path2, id2)
12237 });
12238
12239 project.update(cx, |project, cx| {
12240 let abs_path = project1_abs_path.join("file1.txt");
12241 let found_path = project.find_project_path(abs_path, cx).unwrap();
12242 assert_eq!(found_path.worktree_id, project1_id);
12243 assert_eq!(&*found_path.path, rel_path("file1.txt"));
12244
12245 let abs_path = project1_abs_path.join("subdir").join("file2.txt");
12246 let found_path = project.find_project_path(abs_path, cx).unwrap();
12247 assert_eq!(found_path.worktree_id, project1_id);
12248 assert_eq!(&*found_path.path, rel_path("subdir/file2.txt"));
12249
12250 let abs_path = project2_abs_path.join("file3.txt");
12251 let found_path = project.find_project_path(abs_path, cx).unwrap();
12252 assert_eq!(found_path.worktree_id, project2_id);
12253 assert_eq!(&*found_path.path, rel_path("file3.txt"));
12254
12255 let abs_path = project1_abs_path.join("nonexistent.txt");
12256 let found_path = project.find_project_path(abs_path, cx);
12257 assert!(
12258 found_path.is_some(),
12259 "Should find project path for nonexistent file in worktree"
12260 );
12261
12262 // Test with an absolute path outside any worktree
12263 let abs_path = Path::new("/some/other/path");
12264 let found_path = project.find_project_path(abs_path, cx);
12265 assert!(
12266 found_path.is_none(),
12267 "Should not find project path for path outside any worktree"
12268 );
12269 });
12270}
12271
12272#[gpui::test]
12273async fn test_git_worktree_remove(cx: &mut gpui::TestAppContext) {
12274 init_test(cx);
12275
12276 let fs = FakeFs::new(cx.executor());
12277 fs.insert_tree(
12278 path!("/root"),
12279 json!({
12280 "a": {
12281 ".git": {},
12282 "src": {
12283 "main.rs": "fn main() {}",
12284 }
12285 },
12286 "b": {
12287 ".git": {},
12288 "src": {
12289 "main.rs": "fn main() {}",
12290 },
12291 "script": {
12292 "run.sh": "#!/bin/bash"
12293 }
12294 }
12295 }),
12296 )
12297 .await;
12298
12299 let project = Project::test(
12300 fs.clone(),
12301 [
12302 path!("/root/a").as_ref(),
12303 path!("/root/b/script").as_ref(),
12304 path!("/root/b").as_ref(),
12305 ],
12306 cx,
12307 )
12308 .await;
12309 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
12310 scan_complete.await;
12311
12312 let worktrees = project.update(cx, |project, cx| project.worktrees(cx).collect::<Vec<_>>());
12313 assert_eq!(worktrees.len(), 3);
12314
12315 let worktree_id_by_abs_path = worktrees
12316 .into_iter()
12317 .map(|worktree| worktree.read_with(cx, |w, _| (w.abs_path(), w.id())))
12318 .collect::<HashMap<_, _>>();
12319 let worktree_id = worktree_id_by_abs_path
12320 .get(Path::new(path!("/root/b/script")))
12321 .unwrap();
12322
12323 let repos = project.update(cx, |p, cx| p.git_store().read(cx).repositories().clone());
12324 assert_eq!(repos.len(), 2);
12325
12326 project.update(cx, |project, cx| {
12327 project.remove_worktree(*worktree_id, cx);
12328 });
12329 cx.run_until_parked();
12330
12331 let mut repo_paths = project
12332 .update(cx, |p, cx| p.git_store().read(cx).repositories().clone())
12333 .values()
12334 .map(|repo| repo.read_with(cx, |r, _| r.work_directory_abs_path.clone()))
12335 .collect::<Vec<_>>();
12336 repo_paths.sort();
12337
12338 pretty_assertions::assert_eq!(
12339 repo_paths,
12340 [
12341 Path::new(path!("/root/a")).into(),
12342 Path::new(path!("/root/b")).into(),
12343 ]
12344 );
12345
12346 let active_repo_path = project
12347 .read_with(cx, |p, cx| {
12348 p.active_repository(cx)
12349 .map(|r| r.read(cx).work_directory_abs_path.clone())
12350 })
12351 .unwrap();
12352 assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/a")));
12353
12354 let worktree_id = worktree_id_by_abs_path
12355 .get(Path::new(path!("/root/a")))
12356 .unwrap();
12357 project.update(cx, |project, cx| {
12358 project.remove_worktree(*worktree_id, cx);
12359 });
12360 cx.run_until_parked();
12361
12362 let active_repo_path = project
12363 .read_with(cx, |p, cx| {
12364 p.active_repository(cx)
12365 .map(|r| r.read(cx).work_directory_abs_path.clone())
12366 })
12367 .unwrap();
12368 assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/b")));
12369
12370 let worktree_id = worktree_id_by_abs_path
12371 .get(Path::new(path!("/root/b")))
12372 .unwrap();
12373 project.update(cx, |project, cx| {
12374 project.remove_worktree(*worktree_id, cx);
12375 });
12376 cx.run_until_parked();
12377
12378 let active_repo_path = project.read_with(cx, |p, cx| {
12379 p.active_repository(cx)
12380 .map(|r| r.read(cx).work_directory_abs_path.clone())
12381 });
12382 assert!(active_repo_path.is_none());
12383}
12384
12385#[gpui::test]
12386async fn test_optimistic_hunks_in_staged_files(cx: &mut gpui::TestAppContext) {
12387 use DiffHunkSecondaryStatus::*;
12388 init_test(cx);
12389
12390 let committed_contents = r#"
12391 one
12392 two
12393 three
12394 "#
12395 .unindent();
12396 let file_contents = r#"
12397 one
12398 TWO
12399 three
12400 "#
12401 .unindent();
12402
12403 let fs = FakeFs::new(cx.background_executor.clone());
12404 fs.insert_tree(
12405 path!("/dir"),
12406 json!({
12407 ".git": {},
12408 "file.txt": file_contents.clone()
12409 }),
12410 )
12411 .await;
12412
12413 fs.set_head_and_index_for_repo(
12414 path!("/dir/.git").as_ref(),
12415 &[("file.txt", committed_contents.clone())],
12416 );
12417
12418 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
12419
12420 let buffer = project
12421 .update(cx, |project, cx| {
12422 project.open_local_buffer(path!("/dir/file.txt"), cx)
12423 })
12424 .await
12425 .unwrap();
12426 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
12427 let uncommitted_diff = project
12428 .update(cx, |project, cx| {
12429 project.open_uncommitted_diff(buffer.clone(), cx)
12430 })
12431 .await
12432 .unwrap();
12433
12434 // The hunk is initially unstaged.
12435 uncommitted_diff.read_with(cx, |diff, cx| {
12436 assert_hunks(
12437 diff.snapshot(cx).hunks(&snapshot),
12438 &snapshot,
12439 &diff.base_text_string(cx).unwrap(),
12440 &[(
12441 1..2,
12442 "two\n",
12443 "TWO\n",
12444 DiffHunkStatus::modified(HasSecondaryHunk),
12445 )],
12446 );
12447 });
12448
12449 // Get the repository handle.
12450 let repo = project.read_with(cx, |project, cx| {
12451 project.repositories(cx).values().next().unwrap().clone()
12452 });
12453
12454 // Stage the file.
12455 let stage_task = repo.update(cx, |repo, cx| {
12456 repo.stage_entries(vec![repo_path("file.txt")], cx)
12457 });
12458
12459 // Run a few ticks to let the job start and mark hunks as pending,
12460 // but don't run_until_parked which would complete the entire operation.
12461 for _ in 0..10 {
12462 cx.executor().tick();
12463 let [hunk]: [_; 1] = uncommitted_diff
12464 .read_with(cx, |diff, cx| {
12465 diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>()
12466 })
12467 .try_into()
12468 .unwrap();
12469 match hunk.secondary_status {
12470 HasSecondaryHunk => {}
12471 SecondaryHunkRemovalPending => break,
12472 NoSecondaryHunk => panic!("hunk was not optimistically staged"),
12473 _ => panic!("unexpected hunk state"),
12474 }
12475 }
12476 uncommitted_diff.read_with(cx, |diff, cx| {
12477 assert_hunks(
12478 diff.snapshot(cx).hunks(&snapshot),
12479 &snapshot,
12480 &diff.base_text_string(cx).unwrap(),
12481 &[(
12482 1..2,
12483 "two\n",
12484 "TWO\n",
12485 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
12486 )],
12487 );
12488 });
12489
12490 // Let the staging complete.
12491 stage_task.await.unwrap();
12492 cx.run_until_parked();
12493
12494 // The hunk is now fully staged.
12495 uncommitted_diff.read_with(cx, |diff, cx| {
12496 assert_hunks(
12497 diff.snapshot(cx).hunks(&snapshot),
12498 &snapshot,
12499 &diff.base_text_string(cx).unwrap(),
12500 &[(
12501 1..2,
12502 "two\n",
12503 "TWO\n",
12504 DiffHunkStatus::modified(NoSecondaryHunk),
12505 )],
12506 );
12507 });
12508
12509 // Simulate a commit by updating HEAD to match the current file contents.
12510 // The FakeGitRepository's commit method is a no-op, so we need to manually
12511 // update HEAD to simulate the commit completing.
12512 fs.set_head_for_repo(
12513 path!("/dir/.git").as_ref(),
12514 &[("file.txt", file_contents.clone())],
12515 "newhead",
12516 );
12517 cx.run_until_parked();
12518
12519 // After committing, there are no more hunks.
12520 uncommitted_diff.read_with(cx, |diff, cx| {
12521 assert_hunks(
12522 diff.snapshot(cx).hunks(&snapshot),
12523 &snapshot,
12524 &diff.base_text_string(cx).unwrap(),
12525 &[] as &[(Range<u32>, &str, &str, DiffHunkStatus)],
12526 );
12527 });
12528}
12529
12530#[gpui::test]
12531async fn test_read_only_files_setting(cx: &mut gpui::TestAppContext) {
12532 init_test(cx);
12533
12534 // Configure read_only_files setting
12535 cx.update(|cx| {
12536 cx.update_global::<SettingsStore, _>(|store, cx| {
12537 store.update_user_settings(cx, |settings| {
12538 settings.project.worktree.read_only_files = Some(vec![
12539 "**/generated/**".to_string(),
12540 "**/*.gen.rs".to_string(),
12541 ]);
12542 });
12543 });
12544 });
12545
12546 let fs = FakeFs::new(cx.background_executor.clone());
12547 fs.insert_tree(
12548 path!("/root"),
12549 json!({
12550 "src": {
12551 "main.rs": "fn main() {}",
12552 "types.gen.rs": "// Generated file",
12553 },
12554 "generated": {
12555 "schema.rs": "// Auto-generated schema",
12556 }
12557 }),
12558 )
12559 .await;
12560
12561 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
12562
12563 // Open a regular file - should be read-write
12564 let regular_buffer = project
12565 .update(cx, |project, cx| {
12566 project.open_local_buffer(path!("/root/src/main.rs"), cx)
12567 })
12568 .await
12569 .unwrap();
12570
12571 regular_buffer.read_with(cx, |buffer, _| {
12572 assert!(!buffer.read_only(), "Regular file should not be read-only");
12573 });
12574
12575 // Open a file matching *.gen.rs pattern - should be read-only
12576 let gen_buffer = project
12577 .update(cx, |project, cx| {
12578 project.open_local_buffer(path!("/root/src/types.gen.rs"), cx)
12579 })
12580 .await
12581 .unwrap();
12582
12583 gen_buffer.read_with(cx, |buffer, _| {
12584 assert!(
12585 buffer.read_only(),
12586 "File matching *.gen.rs pattern should be read-only"
12587 );
12588 });
12589
12590 // Open a file in generated directory - should be read-only
12591 let generated_buffer = project
12592 .update(cx, |project, cx| {
12593 project.open_local_buffer(path!("/root/generated/schema.rs"), cx)
12594 })
12595 .await
12596 .unwrap();
12597
12598 generated_buffer.read_with(cx, |buffer, _| {
12599 assert!(
12600 buffer.read_only(),
12601 "File in generated directory should be read-only"
12602 );
12603 });
12604}
12605
12606#[gpui::test]
12607async fn test_read_only_files_empty_setting(cx: &mut gpui::TestAppContext) {
12608 init_test(cx);
12609
12610 // Explicitly set read_only_files to empty (default behavior)
12611 cx.update(|cx| {
12612 cx.update_global::<SettingsStore, _>(|store, cx| {
12613 store.update_user_settings(cx, |settings| {
12614 settings.project.worktree.read_only_files = Some(vec![]);
12615 });
12616 });
12617 });
12618
12619 let fs = FakeFs::new(cx.background_executor.clone());
12620 fs.insert_tree(
12621 path!("/root"),
12622 json!({
12623 "src": {
12624 "main.rs": "fn main() {}",
12625 },
12626 "generated": {
12627 "schema.rs": "// Auto-generated schema",
12628 }
12629 }),
12630 )
12631 .await;
12632
12633 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
12634
12635 // All files should be read-write when read_only_files is empty
12636 let main_buffer = project
12637 .update(cx, |project, cx| {
12638 project.open_local_buffer(path!("/root/src/main.rs"), cx)
12639 })
12640 .await
12641 .unwrap();
12642
12643 main_buffer.read_with(cx, |buffer, _| {
12644 assert!(
12645 !buffer.read_only(),
12646 "Files should not be read-only when read_only_files is empty"
12647 );
12648 });
12649
12650 let generated_buffer = project
12651 .update(cx, |project, cx| {
12652 project.open_local_buffer(path!("/root/generated/schema.rs"), cx)
12653 })
12654 .await
12655 .unwrap();
12656
12657 generated_buffer.read_with(cx, |buffer, _| {
12658 assert!(
12659 !buffer.read_only(),
12660 "Generated files should not be read-only when read_only_files is empty"
12661 );
12662 });
12663}
12664
12665#[gpui::test]
12666async fn test_read_only_files_with_lock_files(cx: &mut gpui::TestAppContext) {
12667 init_test(cx);
12668
12669 // Configure to make lock files read-only
12670 cx.update(|cx| {
12671 cx.update_global::<SettingsStore, _>(|store, cx| {
12672 store.update_user_settings(cx, |settings| {
12673 settings.project.worktree.read_only_files = Some(vec![
12674 "**/*.lock".to_string(),
12675 "**/package-lock.json".to_string(),
12676 ]);
12677 });
12678 });
12679 });
12680
12681 let fs = FakeFs::new(cx.background_executor.clone());
12682 fs.insert_tree(
12683 path!("/root"),
12684 json!({
12685 "Cargo.lock": "# Lock file",
12686 "Cargo.toml": "[package]",
12687 "package-lock.json": "{}",
12688 "package.json": "{}",
12689 }),
12690 )
12691 .await;
12692
12693 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
12694
12695 // Cargo.lock should be read-only
12696 let cargo_lock = project
12697 .update(cx, |project, cx| {
12698 project.open_local_buffer(path!("/root/Cargo.lock"), cx)
12699 })
12700 .await
12701 .unwrap();
12702
12703 cargo_lock.read_with(cx, |buffer, _| {
12704 assert!(buffer.read_only(), "Cargo.lock should be read-only");
12705 });
12706
12707 // Cargo.toml should be read-write
12708 let cargo_toml = project
12709 .update(cx, |project, cx| {
12710 project.open_local_buffer(path!("/root/Cargo.toml"), cx)
12711 })
12712 .await
12713 .unwrap();
12714
12715 cargo_toml.read_with(cx, |buffer, _| {
12716 assert!(!buffer.read_only(), "Cargo.toml should not be read-only");
12717 });
12718
12719 // package-lock.json should be read-only
12720 let package_lock = project
12721 .update(cx, |project, cx| {
12722 project.open_local_buffer(path!("/root/package-lock.json"), cx)
12723 })
12724 .await
12725 .unwrap();
12726
12727 package_lock.read_with(cx, |buffer, _| {
12728 assert!(buffer.read_only(), "package-lock.json should be read-only");
12729 });
12730
12731 // package.json should be read-write
12732 let package_json = project
12733 .update(cx, |project, cx| {
12734 project.open_local_buffer(path!("/root/package.json"), cx)
12735 })
12736 .await
12737 .unwrap();
12738
12739 package_json.read_with(cx, |buffer, _| {
12740 assert!(!buffer.read_only(), "package.json should not be read-only");
12741 });
12742}
12743
12744mod disable_ai_settings_tests {
12745 use gpui::TestAppContext;
12746 use project::*;
12747 use settings::{Settings, SettingsStore};
12748
12749 #[gpui::test]
12750 async fn test_disable_ai_settings_security(cx: &mut TestAppContext) {
12751 cx.update(|cx| {
12752 settings::init(cx);
12753
12754 // Test 1: Default is false (AI enabled)
12755 assert!(
12756 !DisableAiSettings::get_global(cx).disable_ai,
12757 "Default should allow AI"
12758 );
12759 });
12760
12761 let disable_true = serde_json::json!({
12762 "disable_ai": true
12763 })
12764 .to_string();
12765 let disable_false = serde_json::json!({
12766 "disable_ai": false
12767 })
12768 .to_string();
12769
12770 cx.update_global::<SettingsStore, _>(|store, cx| {
12771 store.set_user_settings(&disable_false, cx).unwrap();
12772 store.set_global_settings(&disable_true, cx).unwrap();
12773 });
12774 cx.update(|cx| {
12775 assert!(
12776 DisableAiSettings::get_global(cx).disable_ai,
12777 "Local false cannot override global true"
12778 );
12779 });
12780
12781 cx.update_global::<SettingsStore, _>(|store, cx| {
12782 store.set_global_settings(&disable_false, cx).unwrap();
12783 store.set_user_settings(&disable_true, cx).unwrap();
12784 });
12785
12786 cx.update(|cx| {
12787 assert!(
12788 DisableAiSettings::get_global(cx).disable_ai,
12789 "Local false cannot override global true"
12790 );
12791 });
12792 }
12793
12794 #[gpui::test]
12795 async fn test_disable_ai_project_level_settings(cx: &mut TestAppContext) {
12796 use settings::{LocalSettingsKind, LocalSettingsPath, SettingsLocation, SettingsStore};
12797 use worktree::WorktreeId;
12798
12799 cx.update(|cx| {
12800 settings::init(cx);
12801
12802 // Default should allow AI
12803 assert!(
12804 !DisableAiSettings::get_global(cx).disable_ai,
12805 "Default should allow AI"
12806 );
12807 });
12808
12809 let worktree_id = WorktreeId::from_usize(1);
12810 let rel_path = |path: &str| -> std::sync::Arc<util::rel_path::RelPath> {
12811 std::sync::Arc::from(util::rel_path::RelPath::unix(path).unwrap())
12812 };
12813 let project_path = rel_path("project");
12814 let settings_location = SettingsLocation {
12815 worktree_id,
12816 path: project_path.as_ref(),
12817 };
12818
12819 // Test: Project-level disable_ai=true should disable AI for files in that project
12820 cx.update_global::<SettingsStore, _>(|store, cx| {
12821 store
12822 .set_local_settings(
12823 worktree_id,
12824 LocalSettingsPath::InWorktree(project_path.clone()),
12825 LocalSettingsKind::Settings,
12826 Some(r#"{ "disable_ai": true }"#),
12827 cx,
12828 )
12829 .unwrap();
12830 });
12831
12832 cx.update(|cx| {
12833 let settings = DisableAiSettings::get(Some(settings_location), cx);
12834 assert!(
12835 settings.disable_ai,
12836 "Project-level disable_ai=true should disable AI for files in that project"
12837 );
12838 // Global should now also be true since project-level disable_ai is merged into global
12839 assert!(
12840 DisableAiSettings::get_global(cx).disable_ai,
12841 "Global setting should be affected by project-level disable_ai=true"
12842 );
12843 });
12844
12845 // Test: Setting project-level to false should allow AI for that project
12846 cx.update_global::<SettingsStore, _>(|store, cx| {
12847 store
12848 .set_local_settings(
12849 worktree_id,
12850 LocalSettingsPath::InWorktree(project_path.clone()),
12851 LocalSettingsKind::Settings,
12852 Some(r#"{ "disable_ai": false }"#),
12853 cx,
12854 )
12855 .unwrap();
12856 });
12857
12858 cx.update(|cx| {
12859 let settings = DisableAiSettings::get(Some(settings_location), cx);
12860 assert!(
12861 !settings.disable_ai,
12862 "Project-level disable_ai=false should allow AI"
12863 );
12864 // Global should also be false now
12865 assert!(
12866 !DisableAiSettings::get_global(cx).disable_ai,
12867 "Global setting should be false when project-level is false"
12868 );
12869 });
12870
12871 // Test: User-level true + project-level false = AI disabled (saturation)
12872 let disable_true = serde_json::json!({ "disable_ai": true }).to_string();
12873 cx.update_global::<SettingsStore, _>(|store, cx| {
12874 store.set_user_settings(&disable_true, cx).unwrap();
12875 store
12876 .set_local_settings(
12877 worktree_id,
12878 LocalSettingsPath::InWorktree(project_path.clone()),
12879 LocalSettingsKind::Settings,
12880 Some(r#"{ "disable_ai": false }"#),
12881 cx,
12882 )
12883 .unwrap();
12884 });
12885
12886 cx.update(|cx| {
12887 let settings = DisableAiSettings::get(Some(settings_location), cx);
12888 assert!(
12889 settings.disable_ai,
12890 "Project-level false cannot override user-level true (SaturatingBool)"
12891 );
12892 });
12893 }
12894}