1#![allow(clippy::format_collect)]
2
3mod color_extractor;
4mod context_server_store;
5mod debugger;
6mod ext_agent_tests;
7mod extension_agent_tests;
8mod git_store;
9mod image_store;
10mod lsp_command;
11mod lsp_store;
12mod manifest_tree;
13mod project_search;
14mod search;
15mod search_history;
16mod signature_help;
17mod task_inventory;
18mod trusted_worktrees;
19mod yarn;
20
21use anyhow::Result;
22use async_trait::async_trait;
23use buffer_diff::{
24 BufferDiffEvent, DiffChanged, DiffHunkSecondaryStatus, DiffHunkStatus, DiffHunkStatusKind,
25 assert_hunks,
26};
27use collections::{BTreeSet, HashMap, HashSet};
28use encoding_rs;
29use fs::{FakeFs, PathEventKind};
30use futures::{StreamExt, future};
31use git::{
32 GitHostingProviderRegistry,
33 repository::{RepoPath, repo_path},
34 status::{DiffStat, FileStatus, StatusCode, TrackedStatus},
35};
36use git2::RepositoryInitOptions;
37use gpui::{
38 App, AppContext, BackgroundExecutor, BorrowAppContext, Entity, FutureExt, SharedString, Task,
39 TestAppContext, UpdateGlobal,
40};
41use itertools::Itertools;
42use language::{
43 Buffer, BufferEvent, Diagnostic, DiagnosticEntry, DiagnosticEntryRef, DiagnosticSet,
44 DiagnosticSourceKind, DiskState, FakeLspAdapter, Language, LanguageConfig, LanguageMatcher,
45 LanguageName, LineEnding, ManifestName, ManifestProvider, ManifestQuery, OffsetRangeExt, Point,
46 ToPoint, Toolchain, ToolchainList, ToolchainLister, ToolchainMetadata,
47 language_settings::{LanguageSettings, LanguageSettingsContent},
48 markdown_lang, rust_lang, tree_sitter_typescript,
49};
50use lsp::{
51 CodeActionKind, DEFAULT_LSP_REQUEST_TIMEOUT, DiagnosticSeverity, DocumentChanges,
52 FileOperationFilter, LanguageServerId, LanguageServerName, NumberOrString, TextDocumentEdit,
53 Uri, WillRenameFiles, notification::DidRenameFiles,
54};
55use parking_lot::Mutex;
56use paths::{config_dir, global_gitignore_path, tasks_file};
57use postage::stream::Stream as _;
58use pretty_assertions::{assert_eq, assert_matches};
59use project::{
60 Event, TaskContexts,
61 git_store::{GitStoreEvent, Repository, RepositoryEvent, StatusEntry, pending_op},
62 search::{SearchQuery, SearchResult},
63 task_store::{TaskSettingsLocation, TaskStore},
64 *,
65};
66use rand::{Rng as _, rngs::StdRng};
67use serde_json::json;
68use settings::SettingsStore;
69#[cfg(not(windows))]
70use std::os;
71use std::{
72 cell::RefCell,
73 env, mem,
74 num::NonZeroU32,
75 ops::Range,
76 path::{Path, PathBuf},
77 rc::Rc,
78 str::FromStr,
79 sync::{Arc, OnceLock, atomic},
80 task::Poll,
81 time::Duration,
82};
83use sum_tree::SumTree;
84use task::{ResolvedTask, ShellKind, TaskContext};
85use text::{Anchor, PointUtf16, ReplicaId, ToOffset, Unclipped};
86use unindent::Unindent as _;
87use util::{
88 TryFutureExt as _, assert_set_eq, maybe, path,
89 paths::{PathMatcher, PathStyle},
90 rel_path::{RelPath, rel_path},
91 test::{TempTree, marked_text_offsets},
92 uri,
93};
94use worktree::WorktreeModelHandle as _;
95
96#[gpui::test]
97async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
98 cx.executor().allow_parking();
99
100 let (tx, mut rx) = futures::channel::mpsc::unbounded();
101 let _thread = std::thread::spawn(move || {
102 #[cfg(not(target_os = "windows"))]
103 std::fs::metadata("/tmp").unwrap();
104 #[cfg(target_os = "windows")]
105 std::fs::metadata("C:/Windows").unwrap();
106 std::thread::sleep(Duration::from_millis(1000));
107 tx.unbounded_send(1).unwrap();
108 });
109 rx.next().await.unwrap();
110}
111
112#[gpui::test]
113async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
114 cx.executor().allow_parking();
115
116 let io_task = smol::unblock(move || {
117 println!("sleeping on thread {:?}", std::thread::current().id());
118 std::thread::sleep(Duration::from_millis(10));
119 1
120 });
121
122 let task = cx.foreground_executor().spawn(async move {
123 io_task.await;
124 });
125
126 task.await;
127}
128
129#[gpui::test]
130async fn test_default_session_work_dirs_prefers_directory_worktrees_over_single_file_parents(
131 cx: &mut gpui::TestAppContext,
132) {
133 init_test(cx);
134
135 let fs = FakeFs::new(cx.executor());
136 fs.insert_tree(
137 path!("/root"),
138 json!({
139 "dir-project": {
140 "src": {
141 "main.rs": "fn main() {}"
142 }
143 },
144 "single-file.rs": "fn helper() {}"
145 }),
146 )
147 .await;
148
149 let project = Project::test(
150 fs,
151 [
152 Path::new(path!("/root/single-file.rs")),
153 Path::new(path!("/root/dir-project")),
154 ],
155 cx,
156 )
157 .await;
158
159 let work_dirs = project.read_with(cx, |project, cx| project.default_path_list(cx));
160 let ordered_paths = work_dirs.ordered_paths().cloned().collect::<Vec<_>>();
161
162 assert_eq!(
163 ordered_paths,
164 vec![
165 PathBuf::from(path!("/root/dir-project")),
166 PathBuf::from(path!("/root")),
167 ]
168 );
169}
170
171#[gpui::test]
172async fn test_default_session_work_dirs_falls_back_to_home_for_empty_project(
173 cx: &mut gpui::TestAppContext,
174) {
175 init_test(cx);
176
177 let fs = FakeFs::new(cx.executor());
178 let project = Project::test(fs, [], cx).await;
179
180 let work_dirs = project.read_with(cx, |project, cx| project.default_path_list(cx));
181 let ordered_paths = work_dirs.ordered_paths().cloned().collect::<Vec<_>>();
182
183 assert_eq!(ordered_paths, vec![paths::home_dir().to_path_buf()]);
184}
185
186// NOTE:
187// While POSIX symbolic links are somewhat supported on Windows, they are an opt in by the user, and thus
188// we assume that they are not supported out of the box.
189#[cfg(not(windows))]
190#[gpui::test]
191async fn test_symlinks(cx: &mut gpui::TestAppContext) {
192 init_test(cx);
193 cx.executor().allow_parking();
194
195 let dir = TempTree::new(json!({
196 "root": {
197 "apple": "",
198 "banana": {
199 "carrot": {
200 "date": "",
201 "endive": "",
202 }
203 },
204 "fennel": {
205 "grape": "",
206 }
207 }
208 }));
209
210 let root_link_path = dir.path().join("root_link");
211 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
212 os::unix::fs::symlink(
213 dir.path().join("root/fennel"),
214 dir.path().join("root/finnochio"),
215 )
216 .unwrap();
217
218 let project = Project::test(
219 Arc::new(RealFs::new(None, cx.executor())),
220 [root_link_path.as_ref()],
221 cx,
222 )
223 .await;
224
225 project.update(cx, |project, cx| {
226 let tree = project.worktrees(cx).next().unwrap().read(cx);
227 assert_eq!(tree.file_count(), 5);
228 assert_eq!(
229 tree.entry_for_path(rel_path("fennel/grape")).unwrap().inode,
230 tree.entry_for_path(rel_path("finnochio/grape"))
231 .unwrap()
232 .inode
233 );
234 });
235}
236
237#[gpui::test]
238async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
239 init_test(cx);
240
241 let dir = TempTree::new(json!({
242 ".editorconfig": r#"
243 root = true
244 [*.rs]
245 indent_style = tab
246 indent_size = 3
247 end_of_line = lf
248 insert_final_newline = true
249 trim_trailing_whitespace = true
250 max_line_length = 120
251 [*.js]
252 tab_width = 10
253 max_line_length = off
254 "#,
255 ".zed": {
256 "settings.json": r#"{
257 "tab_size": 8,
258 "hard_tabs": false,
259 "ensure_final_newline_on_save": false,
260 "remove_trailing_whitespace_on_save": false,
261 "preferred_line_length": 64,
262 "soft_wrap": "editor_width",
263 }"#,
264 },
265 "a.rs": "fn a() {\n A\n}",
266 "b": {
267 ".editorconfig": r#"
268 [*.rs]
269 indent_size = 2
270 max_line_length = off,
271 "#,
272 "b.rs": "fn b() {\n B\n}",
273 },
274 "c.js": "def c\n C\nend",
275 "d": {
276 ".editorconfig": r#"
277 [*.rs]
278 indent_size = 1
279 "#,
280 "d.rs": "fn d() {\n D\n}",
281 },
282 "README.json": "tabs are better\n",
283 }));
284
285 let path = dir.path();
286 let fs = FakeFs::new(cx.executor());
287 fs.insert_tree_from_real_fs(path, path).await;
288 let project = Project::test(fs, [path], cx).await;
289
290 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
291 language_registry.add(js_lang());
292 language_registry.add(json_lang());
293 language_registry.add(rust_lang());
294
295 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
296
297 cx.executor().run_until_parked();
298
299 let settings_for = async |path: &str, cx: &mut TestAppContext| -> LanguageSettings {
300 let buffer = project
301 .update(cx, |project, cx| {
302 project.open_buffer((worktree.read(cx).id(), rel_path(path)), cx)
303 })
304 .await
305 .unwrap();
306 cx.update(|cx| LanguageSettings::for_buffer(&buffer.read(cx), cx).into_owned())
307 };
308
309 let settings_a = settings_for("a.rs", cx).await;
310 let settings_b = settings_for("b/b.rs", cx).await;
311 let settings_c = settings_for("c.js", cx).await;
312 let settings_d = settings_for("d/d.rs", cx).await;
313 let settings_readme = settings_for("README.json", cx).await;
314 // .editorconfig overrides .zed/settings
315 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
316 assert_eq!(settings_a.hard_tabs, true);
317 assert_eq!(settings_a.ensure_final_newline_on_save, true);
318 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
319 assert_eq!(settings_a.preferred_line_length, 120);
320
321 // .editorconfig in b/ overrides .editorconfig in root
322 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
323
324 // .editorconfig in subdirectory overrides .editorconfig in root
325 assert_eq!(Some(settings_d.tab_size), NonZeroU32::new(1));
326
327 // "indent_size" is not set, so "tab_width" is used
328 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
329
330 // When max_line_length is "off", default to .zed/settings.json
331 assert_eq!(settings_b.preferred_line_length, 64);
332 assert_eq!(settings_c.preferred_line_length, 64);
333
334 // README.md should not be affected by .editorconfig's globe "*.rs"
335 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
336}
337
338#[gpui::test]
339async fn test_external_editorconfig_support(cx: &mut gpui::TestAppContext) {
340 init_test(cx);
341
342 let fs = FakeFs::new(cx.executor());
343 fs.insert_tree(
344 path!("/grandparent"),
345 json!({
346 ".editorconfig": "[*]\nindent_size = 4\n",
347 "parent": {
348 ".editorconfig": "[*.rs]\nindent_size = 2\n",
349 "worktree": {
350 ".editorconfig": "[*.md]\nindent_size = 3\n",
351 "main.rs": "fn main() {}",
352 "README.md": "# README",
353 "other.txt": "other content",
354 }
355 }
356 }),
357 )
358 .await;
359
360 let project = Project::test(fs, [path!("/grandparent/parent/worktree").as_ref()], cx).await;
361
362 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
363 language_registry.add(rust_lang());
364 language_registry.add(markdown_lang());
365
366 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
367
368 cx.executor().run_until_parked();
369 let settings_for = async |path: &str, cx: &mut TestAppContext| -> LanguageSettings {
370 let buffer = project
371 .update(cx, |project, cx| {
372 project.open_buffer((worktree.read(cx).id(), rel_path(path)), cx)
373 })
374 .await
375 .unwrap();
376 cx.update(|cx| LanguageSettings::for_buffer(&buffer.read(cx), cx).into_owned())
377 };
378
379 let settings_rs = settings_for("main.rs", cx).await;
380 let settings_md = settings_for("README.md", cx).await;
381 let settings_txt = settings_for("other.txt", cx).await;
382
383 // main.rs gets indent_size = 2 from parent's external .editorconfig
384 assert_eq!(Some(settings_rs.tab_size), NonZeroU32::new(2));
385
386 // README.md gets indent_size = 3 from internal worktree .editorconfig
387 assert_eq!(Some(settings_md.tab_size), NonZeroU32::new(3));
388
389 // other.txt gets indent_size = 4 from grandparent's external .editorconfig
390 assert_eq!(Some(settings_txt.tab_size), NonZeroU32::new(4));
391}
392
393#[gpui::test]
394async fn test_internal_editorconfig_root_stops_traversal(cx: &mut gpui::TestAppContext) {
395 init_test(cx);
396
397 let fs = FakeFs::new(cx.executor());
398 fs.insert_tree(
399 path!("/worktree"),
400 json!({
401 ".editorconfig": "[*]\nindent_size = 99\n",
402 "src": {
403 ".editorconfig": "root = true\n[*]\nindent_size = 2\n",
404 "file.rs": "fn main() {}",
405 }
406 }),
407 )
408 .await;
409
410 let project = Project::test(fs, [path!("/worktree").as_ref()], cx).await;
411
412 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
413 language_registry.add(rust_lang());
414
415 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
416
417 cx.executor().run_until_parked();
418
419 let buffer = project
420 .update(cx, |project, cx| {
421 project.open_buffer((worktree.read(cx).id(), rel_path("src/file.rs")), cx)
422 })
423 .await
424 .unwrap();
425 cx.update(|cx| {
426 let settings = LanguageSettings::for_buffer(buffer.read(cx), cx).into_owned();
427 assert_eq!(Some(settings.tab_size), NonZeroU32::new(2));
428 });
429}
430
431#[gpui::test]
432async fn test_external_editorconfig_root_stops_traversal(cx: &mut gpui::TestAppContext) {
433 init_test(cx);
434
435 let fs = FakeFs::new(cx.executor());
436 fs.insert_tree(
437 path!("/parent"),
438 json!({
439 ".editorconfig": "[*]\nindent_size = 99\n",
440 "worktree": {
441 ".editorconfig": "root = true\n[*]\nindent_size = 2\n",
442 "file.rs": "fn main() {}",
443 }
444 }),
445 )
446 .await;
447
448 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
449
450 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
451 language_registry.add(rust_lang());
452
453 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
454
455 cx.executor().run_until_parked();
456
457 let buffer = project
458 .update(cx, |project, cx| {
459 project.open_buffer((worktree.read(cx).id(), rel_path("file.rs")), cx)
460 })
461 .await
462 .unwrap();
463
464 cx.update(|cx| {
465 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
466
467 // file.rs gets indent_size = 2 from worktree's root config, NOT 99 from parent
468 assert_eq!(Some(settings.tab_size), NonZeroU32::new(2));
469 });
470}
471
472#[gpui::test]
473async fn test_external_editorconfig_root_in_parent_stops_traversal(cx: &mut gpui::TestAppContext) {
474 init_test(cx);
475
476 let fs = FakeFs::new(cx.executor());
477 fs.insert_tree(
478 path!("/grandparent"),
479 json!({
480 ".editorconfig": "[*]\nindent_size = 99\n",
481 "parent": {
482 ".editorconfig": "root = true\n[*]\nindent_size = 4\n",
483 "worktree": {
484 "file.rs": "fn main() {}",
485 }
486 }
487 }),
488 )
489 .await;
490
491 let project = Project::test(fs, [path!("/grandparent/parent/worktree").as_ref()], cx).await;
492
493 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
494 language_registry.add(rust_lang());
495
496 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
497
498 cx.executor().run_until_parked();
499
500 let buffer = project
501 .update(cx, |project, cx| {
502 project.open_buffer((worktree.read(cx).id(), rel_path("file.rs")), cx)
503 })
504 .await
505 .unwrap();
506
507 cx.update(|cx| {
508 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
509
510 // file.rs gets indent_size = 4 from parent's root config, NOT 99 from grandparent
511 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
512 });
513}
514
515#[gpui::test]
516async fn test_external_editorconfig_shared_across_worktrees(cx: &mut gpui::TestAppContext) {
517 init_test(cx);
518
519 let fs = FakeFs::new(cx.executor());
520 fs.insert_tree(
521 path!("/parent"),
522 json!({
523 ".editorconfig": "root = true\n[*]\nindent_size = 5\n",
524 "worktree_a": {
525 "file.rs": "fn a() {}",
526 ".editorconfig": "[*]\ninsert_final_newline = true\n",
527 },
528 "worktree_b": {
529 "file.rs": "fn b() {}",
530 ".editorconfig": "[*]\ninsert_final_newline = false\n",
531 }
532 }),
533 )
534 .await;
535
536 let project = Project::test(
537 fs,
538 [
539 path!("/parent/worktree_a").as_ref(),
540 path!("/parent/worktree_b").as_ref(),
541 ],
542 cx,
543 )
544 .await;
545
546 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
547 language_registry.add(rust_lang());
548
549 cx.executor().run_until_parked();
550
551 let worktrees: Vec<_> = cx.update(|cx| project.read(cx).worktrees(cx).collect());
552 assert_eq!(worktrees.len(), 2);
553
554 for worktree in worktrees {
555 let buffer = project
556 .update(cx, |project, cx| {
557 project.open_buffer((worktree.read(cx).id(), rel_path("file.rs")), cx)
558 })
559 .await
560 .unwrap();
561
562 cx.update(|cx| {
563 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
564
565 // Both worktrees should get indent_size = 5 from shared parent .editorconfig
566 assert_eq!(Some(settings.tab_size), NonZeroU32::new(5));
567 });
568 }
569}
570
571#[gpui::test]
572async fn test_external_editorconfig_not_loaded_without_internal_config(
573 cx: &mut gpui::TestAppContext,
574) {
575 init_test(cx);
576
577 let fs = FakeFs::new(cx.executor());
578 fs.insert_tree(
579 path!("/parent"),
580 json!({
581 ".editorconfig": "[*]\nindent_size = 99\n",
582 "worktree": {
583 "file.rs": "fn main() {}",
584 }
585 }),
586 )
587 .await;
588
589 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
590
591 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
592 language_registry.add(rust_lang());
593
594 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
595
596 cx.executor().run_until_parked();
597
598 let buffer = project
599 .update(cx, |project, cx| {
600 project.open_buffer((worktree.read(cx).id(), rel_path("file.rs")), cx)
601 })
602 .await
603 .unwrap();
604
605 cx.update(|cx| {
606 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
607
608 // file.rs should have default tab_size = 4, NOT 99 from parent's external .editorconfig
609 // because without an internal .editorconfig, external configs are not loaded
610 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
611 });
612}
613
614#[gpui::test]
615async fn test_external_editorconfig_modification_triggers_refresh(cx: &mut gpui::TestAppContext) {
616 init_test(cx);
617
618 let fs = FakeFs::new(cx.executor());
619 fs.insert_tree(
620 path!("/parent"),
621 json!({
622 ".editorconfig": "[*]\nindent_size = 4\n",
623 "worktree": {
624 ".editorconfig": "[*]\n",
625 "file.rs": "fn main() {}",
626 }
627 }),
628 )
629 .await;
630
631 let project = Project::test(fs.clone(), [path!("/parent/worktree").as_ref()], cx).await;
632
633 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
634 language_registry.add(rust_lang());
635
636 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
637
638 cx.executor().run_until_parked();
639
640 let buffer = project
641 .update(cx, |project, cx| {
642 project.open_buffer((worktree.read(cx).id(), rel_path("file.rs")), cx)
643 })
644 .await
645 .unwrap();
646
647 cx.update(|cx| {
648 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
649
650 // Test initial settings: tab_size = 4 from parent's external .editorconfig
651 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
652 });
653
654 fs.atomic_write(
655 PathBuf::from(path!("/parent/.editorconfig")),
656 "[*]\nindent_size = 8\n".to_owned(),
657 )
658 .await
659 .unwrap();
660
661 cx.executor().run_until_parked();
662
663 let buffer = project
664 .update(cx, |project, cx| {
665 project.open_buffer((worktree.read(cx).id(), rel_path("file.rs")), cx)
666 })
667 .await
668 .unwrap();
669
670 cx.update(|cx| {
671 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
672
673 // Test settings updated: tab_size = 8
674 assert_eq!(Some(settings.tab_size), NonZeroU32::new(8));
675 });
676}
677
678#[gpui::test]
679async fn test_adding_worktree_discovers_external_editorconfigs(cx: &mut gpui::TestAppContext) {
680 init_test(cx);
681
682 let fs = FakeFs::new(cx.executor());
683 fs.insert_tree(
684 path!("/parent"),
685 json!({
686 ".editorconfig": "root = true\n[*]\nindent_size = 7\n",
687 "existing_worktree": {
688 ".editorconfig": "[*]\n",
689 "file.rs": "fn a() {}",
690 },
691 "new_worktree": {
692 ".editorconfig": "[*]\n",
693 "file.rs": "fn b() {}",
694 }
695 }),
696 )
697 .await;
698
699 let project = Project::test(fs, [path!("/parent/existing_worktree").as_ref()], cx).await;
700
701 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
702 language_registry.add(rust_lang());
703
704 cx.executor().run_until_parked();
705
706 let buffer = project
707 .update(cx, |project, cx| {
708 let id = project.worktrees(cx).next().unwrap().read(cx).id();
709 project.open_buffer((id, rel_path("file.rs")), cx)
710 })
711 .await
712 .unwrap();
713
714 cx.update(|cx| {
715 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx).into_owned();
716
717 // Test existing worktree has tab_size = 7
718 assert_eq!(Some(settings.tab_size), NonZeroU32::new(7));
719 });
720
721 let (new_worktree, _) = project
722 .update(cx, |project, cx| {
723 project.find_or_create_worktree(path!("/parent/new_worktree"), true, cx)
724 })
725 .await
726 .unwrap();
727
728 cx.executor().run_until_parked();
729
730 let buffer = project
731 .update(cx, |project, cx| {
732 project.open_buffer((new_worktree.read(cx).id(), rel_path("file.rs")), cx)
733 })
734 .await
735 .unwrap();
736
737 cx.update(|cx| {
738 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
739
740 // Verify new worktree also has tab_size = 7 from shared parent editorconfig
741 assert_eq!(Some(settings.tab_size), NonZeroU32::new(7));
742 });
743}
744
745#[gpui::test]
746async fn test_removing_worktree_cleans_up_external_editorconfig(cx: &mut gpui::TestAppContext) {
747 init_test(cx);
748
749 let fs = FakeFs::new(cx.executor());
750 fs.insert_tree(
751 path!("/parent"),
752 json!({
753 ".editorconfig": "[*]\nindent_size = 6\n",
754 "worktree": {
755 ".editorconfig": "[*]\n",
756 "file.rs": "fn main() {}",
757 }
758 }),
759 )
760 .await;
761
762 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
763
764 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
765 language_registry.add(rust_lang());
766
767 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
768 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
769
770 cx.executor().run_until_parked();
771
772 cx.update(|cx| {
773 let store = cx.global::<SettingsStore>();
774 let (worktree_ids, external_paths, watcher_paths) =
775 store.editorconfig_store.read(cx).test_state();
776
777 // Test external config is loaded
778 assert!(worktree_ids.contains(&worktree_id));
779 assert!(!external_paths.is_empty());
780 assert!(!watcher_paths.is_empty());
781 });
782
783 project.update(cx, |project, cx| {
784 project.remove_worktree(worktree_id, cx);
785 });
786
787 cx.executor().run_until_parked();
788
789 cx.update(|cx| {
790 let store = cx.global::<SettingsStore>();
791 let (worktree_ids, external_paths, watcher_paths) =
792 store.editorconfig_store.read(cx).test_state();
793
794 // Test worktree state, external configs, and watchers all removed
795 assert!(!worktree_ids.contains(&worktree_id));
796 assert!(external_paths.is_empty());
797 assert!(watcher_paths.is_empty());
798 });
799}
800
801#[gpui::test]
802async fn test_shared_external_editorconfig_cleanup_with_multiple_worktrees(
803 cx: &mut gpui::TestAppContext,
804) {
805 init_test(cx);
806
807 let fs = FakeFs::new(cx.executor());
808 fs.insert_tree(
809 path!("/parent"),
810 json!({
811 ".editorconfig": "root = true\n[*]\nindent_size = 5\n",
812 "worktree_a": {
813 ".editorconfig": "[*]\n",
814 "file.rs": "fn a() {}",
815 },
816 "worktree_b": {
817 ".editorconfig": "[*]\n",
818 "file.rs": "fn b() {}",
819 }
820 }),
821 )
822 .await;
823
824 let project = Project::test(
825 fs,
826 [
827 path!("/parent/worktree_a").as_ref(),
828 path!("/parent/worktree_b").as_ref(),
829 ],
830 cx,
831 )
832 .await;
833
834 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
835 language_registry.add(rust_lang());
836
837 cx.executor().run_until_parked();
838
839 let (worktree_a_id, worktree_b, worktree_b_id) = cx.update(|cx| {
840 let worktrees: Vec<_> = project.read(cx).worktrees(cx).collect();
841 assert_eq!(worktrees.len(), 2);
842
843 let worktree_a = &worktrees[0];
844 let worktree_b = &worktrees[1];
845 let worktree_a_id = worktree_a.read(cx).id();
846 let worktree_b_id = worktree_b.read(cx).id();
847 (worktree_a_id, worktree_b.clone(), worktree_b_id)
848 });
849
850 cx.update(|cx| {
851 let store = cx.global::<SettingsStore>();
852 let (worktree_ids, external_paths, _) = store.editorconfig_store.read(cx).test_state();
853
854 // Test both worktrees have settings and share external config
855 assert!(worktree_ids.contains(&worktree_a_id));
856 assert!(worktree_ids.contains(&worktree_b_id));
857 assert_eq!(external_paths.len(), 1); // single shared external config
858 });
859
860 project.update(cx, |project, cx| {
861 project.remove_worktree(worktree_a_id, cx);
862 });
863
864 cx.executor().run_until_parked();
865
866 cx.update(|cx| {
867 let store = cx.global::<SettingsStore>();
868 let (worktree_ids, external_paths, watcher_paths) =
869 store.editorconfig_store.read(cx).test_state();
870
871 // Test worktree_a is gone but external config remains for worktree_b
872 assert!(!worktree_ids.contains(&worktree_a_id));
873 assert!(worktree_ids.contains(&worktree_b_id));
874 // External config should still exist because worktree_b uses it
875 assert_eq!(external_paths.len(), 1);
876 assert_eq!(watcher_paths.len(), 1);
877 });
878
879 let buffer = project
880 .update(cx, |project, cx| {
881 project.open_buffer((worktree_b.read(cx).id(), rel_path("file.rs")), cx)
882 })
883 .await
884 .unwrap();
885
886 cx.update(|cx| {
887 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
888
889 // Test worktree_b still has correct settings
890 assert_eq!(Some(settings.tab_size), NonZeroU32::new(5));
891 });
892}
893
894#[gpui::test]
895async fn test_git_provider_project_setting(cx: &mut gpui::TestAppContext) {
896 init_test(cx);
897 cx.update(|cx| {
898 GitHostingProviderRegistry::default_global(cx);
899 git_hosting_providers::init(cx);
900 });
901
902 let fs = FakeFs::new(cx.executor());
903 let str_path = path!("/dir");
904 let path = Path::new(str_path);
905
906 fs.insert_tree(
907 path!("/dir"),
908 json!({
909 ".zed": {
910 "settings.json": r#"{
911 "git_hosting_providers": [
912 {
913 "provider": "gitlab",
914 "base_url": "https://google.com",
915 "name": "foo"
916 }
917 ]
918 }"#
919 },
920 }),
921 )
922 .await;
923
924 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
925 let (_worktree, _) =
926 project.read_with(cx, |project, cx| project.find_worktree(path, cx).unwrap());
927 cx.executor().run_until_parked();
928
929 cx.update(|cx| {
930 let provider = GitHostingProviderRegistry::global(cx);
931 assert!(
932 provider
933 .list_hosting_providers()
934 .into_iter()
935 .any(|provider| provider.name() == "foo")
936 );
937 });
938
939 fs.atomic_write(
940 Path::new(path!("/dir/.zed/settings.json")).to_owned(),
941 "{}".into(),
942 )
943 .await
944 .unwrap();
945
946 cx.run_until_parked();
947
948 cx.update(|cx| {
949 let provider = GitHostingProviderRegistry::global(cx);
950 assert!(
951 !provider
952 .list_hosting_providers()
953 .into_iter()
954 .any(|provider| provider.name() == "foo")
955 );
956 });
957}
958
959#[gpui::test]
960async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
961 init_test(cx);
962 TaskStore::init(None);
963
964 let fs = FakeFs::new(cx.executor());
965 fs.insert_tree(
966 path!("/dir"),
967 json!({
968 ".zed": {
969 "settings.json": r#"{ "tab_size": 8 }"#,
970 "tasks.json": r#"[{
971 "label": "cargo check all",
972 "command": "cargo",
973 "args": ["check", "--all"]
974 },]"#,
975 },
976 "a": {
977 "a.rs": "fn a() {\n A\n}"
978 },
979 "b": {
980 ".zed": {
981 "settings.json": r#"{ "tab_size": 2 }"#,
982 "tasks.json": r#"[{
983 "label": "cargo check",
984 "command": "cargo",
985 "args": ["check"]
986 },]"#,
987 },
988 "b.rs": "fn b() {\n B\n}"
989 }
990 }),
991 )
992 .await;
993
994 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
995 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
996
997 cx.executor().run_until_parked();
998 let worktree_id = cx.update(|cx| {
999 project.update(cx, |project, cx| {
1000 project.worktrees(cx).next().unwrap().read(cx).id()
1001 })
1002 });
1003
1004 let mut task_contexts = TaskContexts::default();
1005 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
1006 let task_contexts = Arc::new(task_contexts);
1007
1008 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
1009 id: worktree_id,
1010 directory_in_worktree: rel_path(".zed").into(),
1011 id_base: "local worktree tasks from directory \".zed\"".into(),
1012 };
1013
1014 let buffer_a = project
1015 .update(cx, |project, cx| {
1016 project.open_buffer((worktree.read(cx).id(), rel_path("a/a.rs")), cx)
1017 })
1018 .await
1019 .unwrap();
1020 let buffer_b = project
1021 .update(cx, |project, cx| {
1022 project.open_buffer((worktree.read(cx).id(), rel_path("b/b.rs")), cx)
1023 })
1024 .await
1025 .unwrap();
1026 cx.update(|cx| {
1027 let settings_a = LanguageSettings::for_buffer(&buffer_a.read(cx), cx);
1028 let settings_b = LanguageSettings::for_buffer(&buffer_b.read(cx), cx);
1029
1030 assert_eq!(settings_a.tab_size.get(), 8);
1031 assert_eq!(settings_b.tab_size.get(), 2);
1032 });
1033
1034 let all_tasks = cx
1035 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
1036 .await
1037 .into_iter()
1038 .map(|(source_kind, task)| {
1039 let resolved = task.resolved;
1040 (
1041 source_kind,
1042 task.resolved_label,
1043 resolved.args,
1044 resolved.env,
1045 )
1046 })
1047 .collect::<Vec<_>>();
1048 assert_eq!(
1049 all_tasks,
1050 vec![
1051 (
1052 TaskSourceKind::Worktree {
1053 id: worktree_id,
1054 directory_in_worktree: rel_path("b/.zed").into(),
1055 id_base: "local worktree tasks from directory \"b/.zed\"".into()
1056 },
1057 "cargo check".to_string(),
1058 vec!["check".to_string()],
1059 HashMap::default(),
1060 ),
1061 (
1062 topmost_local_task_source_kind.clone(),
1063 "cargo check all".to_string(),
1064 vec!["check".to_string(), "--all".to_string()],
1065 HashMap::default(),
1066 ),
1067 ]
1068 );
1069
1070 let (_, resolved_task) = cx
1071 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
1072 .await
1073 .into_iter()
1074 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
1075 .expect("should have one global task");
1076 project.update(cx, |project, cx| {
1077 let task_inventory = project
1078 .task_store()
1079 .read(cx)
1080 .task_inventory()
1081 .cloned()
1082 .unwrap();
1083 task_inventory.update(cx, |inventory, _| {
1084 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
1085 inventory
1086 .update_file_based_tasks(
1087 TaskSettingsLocation::Global(tasks_file()),
1088 Some(
1089 &json!([{
1090 "label": "cargo check unstable",
1091 "command": "cargo",
1092 "args": [
1093 "check",
1094 "--all",
1095 "--all-targets"
1096 ],
1097 "env": {
1098 "RUSTFLAGS": "-Zunstable-options"
1099 }
1100 }])
1101 .to_string(),
1102 ),
1103 )
1104 .unwrap();
1105 });
1106 });
1107 cx.run_until_parked();
1108
1109 let all_tasks = cx
1110 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
1111 .await
1112 .into_iter()
1113 .map(|(source_kind, task)| {
1114 let resolved = task.resolved;
1115 (
1116 source_kind,
1117 task.resolved_label,
1118 resolved.args,
1119 resolved.env,
1120 )
1121 })
1122 .collect::<Vec<_>>();
1123 assert_eq!(
1124 all_tasks,
1125 vec![
1126 (
1127 topmost_local_task_source_kind.clone(),
1128 "cargo check all".to_string(),
1129 vec!["check".to_string(), "--all".to_string()],
1130 HashMap::default(),
1131 ),
1132 (
1133 TaskSourceKind::Worktree {
1134 id: worktree_id,
1135 directory_in_worktree: rel_path("b/.zed").into(),
1136 id_base: "local worktree tasks from directory \"b/.zed\"".into()
1137 },
1138 "cargo check".to_string(),
1139 vec!["check".to_string()],
1140 HashMap::default(),
1141 ),
1142 (
1143 TaskSourceKind::AbsPath {
1144 abs_path: paths::tasks_file().clone(),
1145 id_base: "global tasks.json".into(),
1146 },
1147 "cargo check unstable".to_string(),
1148 vec![
1149 "check".to_string(),
1150 "--all".to_string(),
1151 "--all-targets".to_string(),
1152 ],
1153 HashMap::from_iter(Some((
1154 "RUSTFLAGS".to_string(),
1155 "-Zunstable-options".to_string()
1156 ))),
1157 ),
1158 ]
1159 );
1160}
1161
1162#[gpui::test]
1163async fn test_invalid_local_tasks_shows_toast_with_doc_link(cx: &mut gpui::TestAppContext) {
1164 init_test(cx);
1165 TaskStore::init(None);
1166
1167 // We need to start with a valid `.zed/tasks.json` file as otherwise the
1168 // event is emitted before we havd a chance to setup the event subscription.
1169 let fs = FakeFs::new(cx.executor());
1170 fs.insert_tree(
1171 path!("/dir"),
1172 json!({
1173 ".zed": {
1174 "tasks.json": r#"[{ "label": "valid task", "command": "echo" }]"#,
1175 },
1176 "file.rs": ""
1177 }),
1178 )
1179 .await;
1180
1181 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1182 let saw_toast = Rc::new(RefCell::new(false));
1183
1184 // Update the `.zed/tasks.json` file with an invalid variable, so we can
1185 // later assert that the `Event::Toast` even is emitted.
1186 fs.save(
1187 path!("/dir/.zed/tasks.json").as_ref(),
1188 &r#"[{ "label": "test $ZED_FOO", "command": "echo" }]"#.into(),
1189 Default::default(),
1190 )
1191 .await
1192 .unwrap();
1193
1194 project.update(cx, |_, cx| {
1195 let saw_toast = saw_toast.clone();
1196
1197 cx.subscribe(&project, move |_, _, event: &Event, _| match event {
1198 Event::Toast {
1199 notification_id,
1200 message,
1201 link: Some(ToastLink { url, .. }),
1202 } => {
1203 assert!(notification_id.starts_with("local-tasks-"));
1204 assert!(message.contains("ZED_FOO"));
1205 assert_eq!(*url, "https://zed.dev/docs/tasks");
1206 *saw_toast.borrow_mut() = true;
1207 }
1208 _ => {}
1209 })
1210 .detach();
1211 });
1212
1213 cx.run_until_parked();
1214 assert!(
1215 *saw_toast.borrow(),
1216 "Expected `Event::Toast` was never emitted"
1217 );
1218}
1219
1220#[gpui::test]
1221async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
1222 init_test(cx);
1223 TaskStore::init(None);
1224
1225 let fs = FakeFs::new(cx.executor());
1226 fs.insert_tree(
1227 path!("/dir"),
1228 json!({
1229 ".zed": {
1230 "tasks.json": r#"[{
1231 "label": "test worktree root",
1232 "command": "echo $ZED_WORKTREE_ROOT"
1233 }]"#,
1234 },
1235 "a": {
1236 "a.rs": "fn a() {\n A\n}"
1237 },
1238 }),
1239 )
1240 .await;
1241
1242 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1243 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
1244
1245 cx.executor().run_until_parked();
1246 let worktree_id = cx.update(|cx| {
1247 project.update(cx, |project, cx| {
1248 project.worktrees(cx).next().unwrap().read(cx).id()
1249 })
1250 });
1251
1252 let active_non_worktree_item_tasks = cx
1253 .update(|cx| {
1254 get_all_tasks(
1255 &project,
1256 Arc::new(TaskContexts {
1257 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
1258 active_worktree_context: None,
1259 other_worktree_contexts: Vec::new(),
1260 lsp_task_sources: HashMap::default(),
1261 latest_selection: None,
1262 }),
1263 cx,
1264 )
1265 })
1266 .await;
1267 assert!(
1268 active_non_worktree_item_tasks.is_empty(),
1269 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
1270 );
1271
1272 let active_worktree_tasks = cx
1273 .update(|cx| {
1274 get_all_tasks(
1275 &project,
1276 Arc::new(TaskContexts {
1277 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
1278 active_worktree_context: Some((worktree_id, {
1279 let mut worktree_context = TaskContext::default();
1280 worktree_context
1281 .task_variables
1282 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
1283 worktree_context
1284 })),
1285 other_worktree_contexts: Vec::new(),
1286 lsp_task_sources: HashMap::default(),
1287 latest_selection: None,
1288 }),
1289 cx,
1290 )
1291 })
1292 .await;
1293 assert_eq!(
1294 active_worktree_tasks
1295 .into_iter()
1296 .map(|(source_kind, task)| {
1297 let resolved = task.resolved;
1298 (source_kind, resolved.command.unwrap())
1299 })
1300 .collect::<Vec<_>>(),
1301 vec![(
1302 TaskSourceKind::Worktree {
1303 id: worktree_id,
1304 directory_in_worktree: rel_path(".zed").into(),
1305 id_base: "local worktree tasks from directory \".zed\"".into(),
1306 },
1307 "echo /dir".to_string(),
1308 )]
1309 );
1310}
1311
1312#[gpui::test]
1313async fn test_running_multiple_instances_of_a_single_server_in_one_worktree(
1314 cx: &mut gpui::TestAppContext,
1315) {
1316 pub(crate) struct PyprojectTomlManifestProvider;
1317
1318 impl ManifestProvider for PyprojectTomlManifestProvider {
1319 fn name(&self) -> ManifestName {
1320 SharedString::new_static("pyproject.toml").into()
1321 }
1322
1323 fn search(
1324 &self,
1325 ManifestQuery {
1326 path,
1327 depth,
1328 delegate,
1329 }: ManifestQuery,
1330 ) -> Option<Arc<RelPath>> {
1331 for path in path.ancestors().take(depth) {
1332 let p = path.join(rel_path("pyproject.toml"));
1333 if delegate.exists(&p, Some(false)) {
1334 return Some(path.into());
1335 }
1336 }
1337
1338 None
1339 }
1340 }
1341
1342 init_test(cx);
1343 let fs = FakeFs::new(cx.executor());
1344
1345 fs.insert_tree(
1346 path!("/the-root"),
1347 json!({
1348 ".zed": {
1349 "settings.json": r#"
1350 {
1351 "languages": {
1352 "Python": {
1353 "language_servers": ["ty"]
1354 }
1355 }
1356 }"#
1357 },
1358 "project-a": {
1359 ".venv": {},
1360 "file.py": "",
1361 "pyproject.toml": ""
1362 },
1363 "project-b": {
1364 ".venv": {},
1365 "source_file.py":"",
1366 "another_file.py": "",
1367 "pyproject.toml": ""
1368 }
1369 }),
1370 )
1371 .await;
1372 cx.update(|cx| {
1373 ManifestProvidersStore::global(cx).register(Arc::new(PyprojectTomlManifestProvider))
1374 });
1375
1376 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1377 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1378 let _fake_python_server = language_registry.register_fake_lsp(
1379 "Python",
1380 FakeLspAdapter {
1381 name: "ty",
1382 capabilities: lsp::ServerCapabilities {
1383 ..Default::default()
1384 },
1385 ..Default::default()
1386 },
1387 );
1388
1389 language_registry.add(python_lang(fs.clone()));
1390 let (first_buffer, _handle) = project
1391 .update(cx, |project, cx| {
1392 project.open_local_buffer_with_lsp(path!("/the-root/project-a/file.py"), cx)
1393 })
1394 .await
1395 .unwrap();
1396 cx.executor().run_until_parked();
1397 let servers = project.update(cx, |project, cx| {
1398 project.lsp_store().update(cx, |this, cx| {
1399 first_buffer.update(cx, |buffer, cx| {
1400 this.running_language_servers_for_local_buffer(buffer, cx)
1401 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1402 .collect::<Vec<_>>()
1403 })
1404 })
1405 });
1406 cx.executor().run_until_parked();
1407 assert_eq!(servers.len(), 1);
1408 let (adapter, server) = servers.into_iter().next().unwrap();
1409 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1410 assert_eq!(server.server_id(), LanguageServerId(0));
1411 // `workspace_folders` are set to the rooting point.
1412 assert_eq!(
1413 server.workspace_folders(),
1414 BTreeSet::from_iter(
1415 [Uri::from_file_path(path!("/the-root/project-a")).unwrap()].into_iter()
1416 )
1417 );
1418
1419 let (second_project_buffer, _other_handle) = project
1420 .update(cx, |project, cx| {
1421 project.open_local_buffer_with_lsp(path!("/the-root/project-b/source_file.py"), cx)
1422 })
1423 .await
1424 .unwrap();
1425 cx.executor().run_until_parked();
1426 let servers = project.update(cx, |project, cx| {
1427 project.lsp_store().update(cx, |this, cx| {
1428 second_project_buffer.update(cx, |buffer, cx| {
1429 this.running_language_servers_for_local_buffer(buffer, cx)
1430 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1431 .collect::<Vec<_>>()
1432 })
1433 })
1434 });
1435 cx.executor().run_until_parked();
1436 assert_eq!(servers.len(), 1);
1437 let (adapter, server) = servers.into_iter().next().unwrap();
1438 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1439 // We're not using venvs at all here, so both folders should fall under the same root.
1440 assert_eq!(server.server_id(), LanguageServerId(0));
1441 // Now, let's select a different toolchain for one of subprojects.
1442
1443 let Toolchains {
1444 toolchains: available_toolchains_for_b,
1445 root_path,
1446 ..
1447 } = project
1448 .update(cx, |this, cx| {
1449 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1450 this.available_toolchains(
1451 ProjectPath {
1452 worktree_id,
1453 path: rel_path("project-b/source_file.py").into(),
1454 },
1455 LanguageName::new_static("Python"),
1456 cx,
1457 )
1458 })
1459 .await
1460 .expect("A toolchain to be discovered");
1461 assert_eq!(root_path.as_ref(), rel_path("project-b"));
1462 assert_eq!(available_toolchains_for_b.toolchains().len(), 1);
1463 let currently_active_toolchain = project
1464 .update(cx, |this, cx| {
1465 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1466 this.active_toolchain(
1467 ProjectPath {
1468 worktree_id,
1469 path: rel_path("project-b/source_file.py").into(),
1470 },
1471 LanguageName::new_static("Python"),
1472 cx,
1473 )
1474 })
1475 .await;
1476
1477 assert!(currently_active_toolchain.is_none());
1478 let _ = project
1479 .update(cx, |this, cx| {
1480 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1481 this.activate_toolchain(
1482 ProjectPath {
1483 worktree_id,
1484 path: root_path,
1485 },
1486 available_toolchains_for_b
1487 .toolchains
1488 .into_iter()
1489 .next()
1490 .unwrap(),
1491 cx,
1492 )
1493 })
1494 .await
1495 .unwrap();
1496 cx.run_until_parked();
1497 let servers = project.update(cx, |project, cx| {
1498 project.lsp_store().update(cx, |this, cx| {
1499 second_project_buffer.update(cx, |buffer, cx| {
1500 this.running_language_servers_for_local_buffer(buffer, cx)
1501 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1502 .collect::<Vec<_>>()
1503 })
1504 })
1505 });
1506 cx.executor().run_until_parked();
1507 assert_eq!(servers.len(), 1);
1508 let (adapter, server) = servers.into_iter().next().unwrap();
1509 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1510 // There's a new language server in town.
1511 assert_eq!(server.server_id(), LanguageServerId(1));
1512}
1513
1514#[gpui::test]
1515async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
1516 init_test(cx);
1517
1518 let fs = FakeFs::new(cx.executor());
1519 fs.insert_tree(
1520 path!("/dir"),
1521 json!({
1522 "test.rs": "const A: i32 = 1;",
1523 "test2.rs": "",
1524 "Cargo.toml": "a = 1",
1525 "package.json": "{\"a\": 1}",
1526 }),
1527 )
1528 .await;
1529
1530 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1531 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1532
1533 let mut fake_rust_servers = language_registry.register_fake_lsp(
1534 "Rust",
1535 FakeLspAdapter {
1536 name: "the-rust-language-server",
1537 capabilities: lsp::ServerCapabilities {
1538 completion_provider: Some(lsp::CompletionOptions {
1539 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
1540 ..Default::default()
1541 }),
1542 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
1543 lsp::TextDocumentSyncOptions {
1544 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
1545 ..Default::default()
1546 },
1547 )),
1548 ..Default::default()
1549 },
1550 ..Default::default()
1551 },
1552 );
1553 let mut fake_json_servers = language_registry.register_fake_lsp(
1554 "JSON",
1555 FakeLspAdapter {
1556 name: "the-json-language-server",
1557 capabilities: lsp::ServerCapabilities {
1558 completion_provider: Some(lsp::CompletionOptions {
1559 trigger_characters: Some(vec![":".to_string()]),
1560 ..Default::default()
1561 }),
1562 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
1563 lsp::TextDocumentSyncOptions {
1564 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
1565 ..Default::default()
1566 },
1567 )),
1568 ..Default::default()
1569 },
1570 ..Default::default()
1571 },
1572 );
1573
1574 // Open a buffer without an associated language server.
1575 let (toml_buffer, _handle) = project
1576 .update(cx, |project, cx| {
1577 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
1578 })
1579 .await
1580 .unwrap();
1581
1582 // Open a buffer with an associated language server before the language for it has been loaded.
1583 let (rust_buffer, _handle2) = project
1584 .update(cx, |project, cx| {
1585 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
1586 })
1587 .await
1588 .unwrap();
1589 rust_buffer.update(cx, |buffer, _| {
1590 assert_eq!(buffer.language().map(|l| l.name()), None);
1591 });
1592
1593 // Now we add the languages to the project, and ensure they get assigned to all
1594 // the relevant open buffers.
1595 language_registry.add(json_lang());
1596 language_registry.add(rust_lang());
1597 cx.executor().run_until_parked();
1598 rust_buffer.update(cx, |buffer, _| {
1599 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
1600 });
1601
1602 // A server is started up, and it is notified about Rust files.
1603 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1604 assert_eq!(
1605 fake_rust_server
1606 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1607 .await
1608 .text_document,
1609 lsp::TextDocumentItem {
1610 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1611 version: 0,
1612 text: "const A: i32 = 1;".to_string(),
1613 language_id: "rust".to_string(),
1614 }
1615 );
1616
1617 // The buffer is configured based on the language server's capabilities.
1618 rust_buffer.update(cx, |buffer, _| {
1619 assert_eq!(
1620 buffer
1621 .completion_triggers()
1622 .iter()
1623 .cloned()
1624 .collect::<Vec<_>>(),
1625 &[".".to_string(), "::".to_string()]
1626 );
1627 });
1628 toml_buffer.update(cx, |buffer, _| {
1629 assert!(buffer.completion_triggers().is_empty());
1630 });
1631
1632 // Edit a buffer. The changes are reported to the language server.
1633 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
1634 assert_eq!(
1635 fake_rust_server
1636 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1637 .await
1638 .text_document,
1639 lsp::VersionedTextDocumentIdentifier::new(
1640 lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1641 1
1642 )
1643 );
1644
1645 // Open a third buffer with a different associated language server.
1646 let (json_buffer, _json_handle) = project
1647 .update(cx, |project, cx| {
1648 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
1649 })
1650 .await
1651 .unwrap();
1652
1653 // A json language server is started up and is only notified about the json buffer.
1654 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1655 assert_eq!(
1656 fake_json_server
1657 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1658 .await
1659 .text_document,
1660 lsp::TextDocumentItem {
1661 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1662 version: 0,
1663 text: "{\"a\": 1}".to_string(),
1664 language_id: "json".to_string(),
1665 }
1666 );
1667
1668 // This buffer is configured based on the second language server's
1669 // capabilities.
1670 json_buffer.update(cx, |buffer, _| {
1671 assert_eq!(
1672 buffer
1673 .completion_triggers()
1674 .iter()
1675 .cloned()
1676 .collect::<Vec<_>>(),
1677 &[":".to_string()]
1678 );
1679 });
1680
1681 // When opening another buffer whose language server is already running,
1682 // it is also configured based on the existing language server's capabilities.
1683 let (rust_buffer2, _handle4) = project
1684 .update(cx, |project, cx| {
1685 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
1686 })
1687 .await
1688 .unwrap();
1689 rust_buffer2.update(cx, |buffer, _| {
1690 assert_eq!(
1691 buffer
1692 .completion_triggers()
1693 .iter()
1694 .cloned()
1695 .collect::<Vec<_>>(),
1696 &[".".to_string(), "::".to_string()]
1697 );
1698 });
1699
1700 // Changes are reported only to servers matching the buffer's language.
1701 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
1702 rust_buffer2.update(cx, |buffer, cx| {
1703 buffer.edit([(0..0, "let x = 1;")], None, cx)
1704 });
1705 assert_eq!(
1706 fake_rust_server
1707 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1708 .await
1709 .text_document,
1710 lsp::VersionedTextDocumentIdentifier::new(
1711 lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap(),
1712 1
1713 )
1714 );
1715
1716 // Save notifications are reported to all servers.
1717 project
1718 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
1719 .await
1720 .unwrap();
1721 assert_eq!(
1722 fake_rust_server
1723 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1724 .await
1725 .text_document,
1726 lsp::TextDocumentIdentifier::new(
1727 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1728 )
1729 );
1730 assert_eq!(
1731 fake_json_server
1732 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1733 .await
1734 .text_document,
1735 lsp::TextDocumentIdentifier::new(
1736 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1737 )
1738 );
1739
1740 // Renames are reported only to servers matching the buffer's language.
1741 fs.rename(
1742 Path::new(path!("/dir/test2.rs")),
1743 Path::new(path!("/dir/test3.rs")),
1744 Default::default(),
1745 )
1746 .await
1747 .unwrap();
1748 assert_eq!(
1749 fake_rust_server
1750 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1751 .await
1752 .text_document,
1753 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap()),
1754 );
1755 assert_eq!(
1756 fake_rust_server
1757 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1758 .await
1759 .text_document,
1760 lsp::TextDocumentItem {
1761 uri: lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap(),
1762 version: 0,
1763 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1764 language_id: "rust".to_string(),
1765 },
1766 );
1767
1768 rust_buffer2.update(cx, |buffer, cx| {
1769 buffer.update_diagnostics(
1770 LanguageServerId(0),
1771 DiagnosticSet::from_sorted_entries(
1772 vec![DiagnosticEntry {
1773 diagnostic: Default::default(),
1774 range: Anchor::min_max_range_for_buffer(buffer.remote_id()),
1775 }],
1776 &buffer.snapshot(),
1777 ),
1778 cx,
1779 );
1780 assert_eq!(
1781 buffer
1782 .snapshot()
1783 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1784 .count(),
1785 1
1786 );
1787 });
1788
1789 // When the rename changes the extension of the file, the buffer gets closed on the old
1790 // language server and gets opened on the new one.
1791 fs.rename(
1792 Path::new(path!("/dir/test3.rs")),
1793 Path::new(path!("/dir/test3.json")),
1794 Default::default(),
1795 )
1796 .await
1797 .unwrap();
1798 assert_eq!(
1799 fake_rust_server
1800 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1801 .await
1802 .text_document,
1803 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap()),
1804 );
1805 assert_eq!(
1806 fake_json_server
1807 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1808 .await
1809 .text_document,
1810 lsp::TextDocumentItem {
1811 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1812 version: 0,
1813 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1814 language_id: "json".to_string(),
1815 },
1816 );
1817
1818 // We clear the diagnostics, since the language has changed.
1819 rust_buffer2.update(cx, |buffer, _| {
1820 assert_eq!(
1821 buffer
1822 .snapshot()
1823 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1824 .count(),
1825 0
1826 );
1827 });
1828
1829 // The renamed file's version resets after changing language server.
1830 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
1831 assert_eq!(
1832 fake_json_server
1833 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1834 .await
1835 .text_document,
1836 lsp::VersionedTextDocumentIdentifier::new(
1837 lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1838 1
1839 )
1840 );
1841
1842 // Restart language servers
1843 project.update(cx, |project, cx| {
1844 project.restart_language_servers_for_buffers(
1845 vec![rust_buffer.clone(), json_buffer.clone()],
1846 HashSet::default(),
1847 cx,
1848 );
1849 });
1850
1851 let mut rust_shutdown_requests = fake_rust_server
1852 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1853 let mut json_shutdown_requests = fake_json_server
1854 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1855 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
1856
1857 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1858 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1859
1860 // Ensure rust document is reopened in new rust language server
1861 assert_eq!(
1862 fake_rust_server
1863 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1864 .await
1865 .text_document,
1866 lsp::TextDocumentItem {
1867 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1868 version: 0,
1869 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
1870 language_id: "rust".to_string(),
1871 }
1872 );
1873
1874 // Ensure json documents are reopened in new json language server
1875 assert_set_eq!(
1876 [
1877 fake_json_server
1878 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1879 .await
1880 .text_document,
1881 fake_json_server
1882 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1883 .await
1884 .text_document,
1885 ],
1886 [
1887 lsp::TextDocumentItem {
1888 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1889 version: 0,
1890 text: json_buffer.update(cx, |buffer, _| buffer.text()),
1891 language_id: "json".to_string(),
1892 },
1893 lsp::TextDocumentItem {
1894 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1895 version: 0,
1896 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1897 language_id: "json".to_string(),
1898 }
1899 ]
1900 );
1901
1902 // Close notifications are reported only to servers matching the buffer's language.
1903 cx.update(|_| drop(_json_handle));
1904 let close_message = lsp::DidCloseTextDocumentParams {
1905 text_document: lsp::TextDocumentIdentifier::new(
1906 lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1907 ),
1908 };
1909 assert_eq!(
1910 fake_json_server
1911 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1912 .await,
1913 close_message,
1914 );
1915}
1916
1917#[gpui::test]
1918async fn test_language_server_relative_path(cx: &mut gpui::TestAppContext) {
1919 init_test(cx);
1920
1921 let settings_json_contents = json!({
1922 "languages": {
1923 "Rust": {
1924 "language_servers": ["my_fake_lsp", "lsp_on_path"]
1925 }
1926 },
1927 "lsp": {
1928 "my_fake_lsp": {
1929 "binary": {
1930 // file exists, so this is treated as a relative path
1931 "path": path!(".relative_path/to/my_fake_lsp_binary.exe").to_string(),
1932 }
1933 },
1934 "lsp_on_path": {
1935 "binary": {
1936 // file doesn't exist, so it will fall back on PATH env var
1937 "path": path!("lsp_on_path.exe").to_string(),
1938 }
1939 }
1940 },
1941 });
1942
1943 let fs = FakeFs::new(cx.executor());
1944 fs.insert_tree(
1945 path!("/the-root"),
1946 json!({
1947 ".zed": {
1948 "settings.json": settings_json_contents.to_string(),
1949 },
1950 ".relative_path": {
1951 "to": {
1952 "my_fake_lsp.exe": "",
1953 },
1954 },
1955 "src": {
1956 "main.rs": "",
1957 }
1958 }),
1959 )
1960 .await;
1961
1962 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1963 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1964 language_registry.add(rust_lang());
1965
1966 let mut my_fake_lsp = language_registry.register_fake_lsp(
1967 "Rust",
1968 FakeLspAdapter {
1969 name: "my_fake_lsp",
1970 ..Default::default()
1971 },
1972 );
1973 let mut lsp_on_path = language_registry.register_fake_lsp(
1974 "Rust",
1975 FakeLspAdapter {
1976 name: "lsp_on_path",
1977 ..Default::default()
1978 },
1979 );
1980
1981 cx.run_until_parked();
1982
1983 // Start the language server by opening a buffer with a compatible file extension.
1984 project
1985 .update(cx, |project, cx| {
1986 project.open_local_buffer_with_lsp(path!("/the-root/src/main.rs"), cx)
1987 })
1988 .await
1989 .unwrap();
1990
1991 let lsp_path = my_fake_lsp.next().await.unwrap().binary.path;
1992 assert_eq!(
1993 lsp_path.to_string_lossy(),
1994 path!("/the-root/.relative_path/to/my_fake_lsp_binary.exe"),
1995 );
1996
1997 let lsp_path = lsp_on_path.next().await.unwrap().binary.path;
1998 assert_eq!(lsp_path.to_string_lossy(), path!("lsp_on_path.exe"));
1999}
2000
2001#[gpui::test]
2002async fn test_language_server_tilde_path(cx: &mut gpui::TestAppContext) {
2003 init_test(cx);
2004
2005 let settings_json_contents = json!({
2006 "languages": {
2007 "Rust": {
2008 "language_servers": ["tilde_lsp"]
2009 }
2010 },
2011 "lsp": {
2012 "tilde_lsp": {
2013 "binary": {
2014 "path": "~/.local/bin/rust-analyzer",
2015 }
2016 }
2017 },
2018 });
2019
2020 let fs = FakeFs::new(cx.executor());
2021 fs.insert_tree(
2022 path!("/root"),
2023 json!({
2024 ".zed": {
2025 "settings.json": settings_json_contents.to_string(),
2026 },
2027 "src": {
2028 "main.rs": "fn main() {}",
2029 }
2030 }),
2031 )
2032 .await;
2033
2034 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
2035 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2036 language_registry.add(rust_lang());
2037
2038 let mut tilde_lsp = language_registry.register_fake_lsp(
2039 "Rust",
2040 FakeLspAdapter {
2041 name: "tilde_lsp",
2042 ..Default::default()
2043 },
2044 );
2045 cx.run_until_parked();
2046
2047 project
2048 .update(cx, |project, cx| {
2049 project.open_local_buffer_with_lsp(path!("/root/src/main.rs"), cx)
2050 })
2051 .await
2052 .unwrap();
2053
2054 let lsp_path = tilde_lsp.next().await.unwrap().binary.path;
2055 let expected_path = paths::home_dir().join(".local/bin/rust-analyzer");
2056 assert_eq!(
2057 lsp_path, expected_path,
2058 "Tilde path should expand to home directory"
2059 );
2060}
2061
2062#[gpui::test]
2063async fn test_rescan_fs_change_is_reported_to_language_servers_as_changed(
2064 cx: &mut gpui::TestAppContext,
2065) {
2066 init_test(cx);
2067
2068 let fs = FakeFs::new(cx.executor());
2069 fs.insert_tree(
2070 path!("/the-root"),
2071 json!({
2072 "Cargo.lock": "",
2073 "src": {
2074 "a.rs": "",
2075 }
2076 }),
2077 )
2078 .await;
2079
2080 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
2081 let (language_registry, _lsp_store) = project.read_with(cx, |project, _| {
2082 (project.languages().clone(), project.lsp_store())
2083 });
2084 language_registry.add(rust_lang());
2085 let mut fake_servers = language_registry.register_fake_lsp(
2086 "Rust",
2087 FakeLspAdapter {
2088 name: "the-language-server",
2089 ..Default::default()
2090 },
2091 );
2092
2093 cx.executor().run_until_parked();
2094
2095 project
2096 .update(cx, |project, cx| {
2097 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
2098 })
2099 .await
2100 .unwrap();
2101
2102 let fake_server = fake_servers.next().await.unwrap();
2103 cx.executor().run_until_parked();
2104
2105 let file_changes = Arc::new(Mutex::new(Vec::new()));
2106 fake_server
2107 .request::<lsp::request::RegisterCapability>(
2108 lsp::RegistrationParams {
2109 registrations: vec![lsp::Registration {
2110 id: Default::default(),
2111 method: "workspace/didChangeWatchedFiles".to_string(),
2112 register_options: serde_json::to_value(
2113 lsp::DidChangeWatchedFilesRegistrationOptions {
2114 watchers: vec![lsp::FileSystemWatcher {
2115 glob_pattern: lsp::GlobPattern::String(
2116 path!("/the-root/Cargo.lock").to_string(),
2117 ),
2118 kind: None,
2119 }],
2120 },
2121 )
2122 .ok(),
2123 }],
2124 },
2125 DEFAULT_LSP_REQUEST_TIMEOUT,
2126 )
2127 .await
2128 .into_response()
2129 .unwrap();
2130 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
2131 let file_changes = file_changes.clone();
2132 move |params, _| {
2133 let mut file_changes = file_changes.lock();
2134 file_changes.extend(params.changes);
2135 }
2136 });
2137
2138 cx.executor().run_until_parked();
2139 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
2140
2141 fs.emit_fs_event(path!("/the-root/Cargo.lock"), Some(PathEventKind::Rescan));
2142 cx.executor().run_until_parked();
2143
2144 assert_eq!(
2145 &*file_changes.lock(),
2146 &[lsp::FileEvent {
2147 uri: lsp::Uri::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
2148 typ: lsp::FileChangeType::CHANGED,
2149 }]
2150 );
2151}
2152
2153#[gpui::test]
2154async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
2155 init_test(cx);
2156
2157 let fs = FakeFs::new(cx.executor());
2158 fs.insert_tree(
2159 path!("/the-root"),
2160 json!({
2161 ".gitignore": "target\n",
2162 "Cargo.lock": "",
2163 "src": {
2164 "a.rs": "",
2165 "b.rs": "",
2166 },
2167 "target": {
2168 "x": {
2169 "out": {
2170 "x.rs": ""
2171 }
2172 },
2173 "y": {
2174 "out": {
2175 "y.rs": "",
2176 }
2177 },
2178 "z": {
2179 "out": {
2180 "z.rs": ""
2181 }
2182 }
2183 }
2184 }),
2185 )
2186 .await;
2187 fs.insert_tree(
2188 path!("/the-registry"),
2189 json!({
2190 "dep1": {
2191 "src": {
2192 "dep1.rs": "",
2193 }
2194 },
2195 "dep2": {
2196 "src": {
2197 "dep2.rs": "",
2198 }
2199 },
2200 }),
2201 )
2202 .await;
2203 fs.insert_tree(
2204 path!("/the/stdlib"),
2205 json!({
2206 "LICENSE": "",
2207 "src": {
2208 "string.rs": "",
2209 }
2210 }),
2211 )
2212 .await;
2213
2214 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
2215 let (language_registry, lsp_store) = project.read_with(cx, |project, _| {
2216 (project.languages().clone(), project.lsp_store())
2217 });
2218 language_registry.add(rust_lang());
2219 let mut fake_servers = language_registry.register_fake_lsp(
2220 "Rust",
2221 FakeLspAdapter {
2222 name: "the-language-server",
2223 ..Default::default()
2224 },
2225 );
2226
2227 cx.executor().run_until_parked();
2228
2229 // Start the language server by opening a buffer with a compatible file extension.
2230 project
2231 .update(cx, |project, cx| {
2232 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
2233 })
2234 .await
2235 .unwrap();
2236
2237 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
2238 project.update(cx, |project, cx| {
2239 let worktree = project.worktrees(cx).next().unwrap();
2240 assert_eq!(
2241 worktree
2242 .read(cx)
2243 .snapshot()
2244 .entries(true, 0)
2245 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
2246 .collect::<Vec<_>>(),
2247 &[
2248 ("", false),
2249 (".gitignore", false),
2250 ("Cargo.lock", false),
2251 ("src", false),
2252 ("src/a.rs", false),
2253 ("src/b.rs", false),
2254 ("target", true),
2255 ]
2256 );
2257 });
2258
2259 let prev_read_dir_count = fs.read_dir_call_count();
2260
2261 let fake_server = fake_servers.next().await.unwrap();
2262 cx.executor().run_until_parked();
2263 let server_id = lsp_store.read_with(cx, |lsp_store, _| {
2264 let (id, _) = lsp_store.language_server_statuses().next().unwrap();
2265 id
2266 });
2267
2268 // Simulate jumping to a definition in a dependency outside of the worktree.
2269 let _out_of_worktree_buffer = project
2270 .update(cx, |project, cx| {
2271 project.open_local_buffer_via_lsp(
2272 lsp::Uri::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
2273 server_id,
2274 cx,
2275 )
2276 })
2277 .await
2278 .unwrap();
2279
2280 // Keep track of the FS events reported to the language server.
2281 let file_changes = Arc::new(Mutex::new(Vec::new()));
2282 fake_server
2283 .request::<lsp::request::RegisterCapability>(
2284 lsp::RegistrationParams {
2285 registrations: vec![lsp::Registration {
2286 id: Default::default(),
2287 method: "workspace/didChangeWatchedFiles".to_string(),
2288 register_options: serde_json::to_value(
2289 lsp::DidChangeWatchedFilesRegistrationOptions {
2290 watchers: vec![
2291 lsp::FileSystemWatcher {
2292 glob_pattern: lsp::GlobPattern::String(
2293 path!("/the-root/Cargo.toml").to_string(),
2294 ),
2295 kind: None,
2296 },
2297 lsp::FileSystemWatcher {
2298 glob_pattern: lsp::GlobPattern::String(
2299 path!("/the-root/src/*.{rs,c}").to_string(),
2300 ),
2301 kind: None,
2302 },
2303 lsp::FileSystemWatcher {
2304 glob_pattern: lsp::GlobPattern::String(
2305 path!("/the-root/target/y/**/*.rs").to_string(),
2306 ),
2307 kind: None,
2308 },
2309 lsp::FileSystemWatcher {
2310 glob_pattern: lsp::GlobPattern::String(
2311 path!("/the/stdlib/src/**/*.rs").to_string(),
2312 ),
2313 kind: None,
2314 },
2315 lsp::FileSystemWatcher {
2316 glob_pattern: lsp::GlobPattern::String(
2317 path!("**/Cargo.lock").to_string(),
2318 ),
2319 kind: None,
2320 },
2321 ],
2322 },
2323 )
2324 .ok(),
2325 }],
2326 },
2327 DEFAULT_LSP_REQUEST_TIMEOUT,
2328 )
2329 .await
2330 .into_response()
2331 .unwrap();
2332 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
2333 let file_changes = file_changes.clone();
2334 move |params, _| {
2335 let mut file_changes = file_changes.lock();
2336 file_changes.extend(params.changes);
2337 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
2338 }
2339 });
2340
2341 cx.executor().run_until_parked();
2342 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
2343 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
2344
2345 let mut new_watched_paths = fs.watched_paths();
2346 new_watched_paths.retain(|path| {
2347 !path.starts_with(config_dir()) && !path.starts_with(global_gitignore_path().unwrap())
2348 });
2349 assert_eq!(
2350 &new_watched_paths,
2351 &[
2352 Path::new(path!("/the-root")),
2353 Path::new(path!("/the-registry/dep1/src/dep1.rs")),
2354 Path::new(path!("/the/stdlib/src"))
2355 ]
2356 );
2357
2358 // Now the language server has asked us to watch an ignored directory path,
2359 // so we recursively load it.
2360 project.update(cx, |project, cx| {
2361 let worktree = project.visible_worktrees(cx).next().unwrap();
2362 assert_eq!(
2363 worktree
2364 .read(cx)
2365 .snapshot()
2366 .entries(true, 0)
2367 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
2368 .collect::<Vec<_>>(),
2369 &[
2370 ("", false),
2371 (".gitignore", false),
2372 ("Cargo.lock", false),
2373 ("src", false),
2374 ("src/a.rs", false),
2375 ("src/b.rs", false),
2376 ("target", true),
2377 ("target/x", true),
2378 ("target/y", true),
2379 ("target/y/out", true),
2380 ("target/y/out/y.rs", true),
2381 ("target/z", true),
2382 ]
2383 );
2384 });
2385
2386 // Perform some file system mutations, two of which match the watched patterns,
2387 // and one of which does not.
2388 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
2389 .await
2390 .unwrap();
2391 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
2392 .await
2393 .unwrap();
2394 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
2395 .await
2396 .unwrap();
2397 fs.create_file(
2398 path!("/the-root/target/x/out/x2.rs").as_ref(),
2399 Default::default(),
2400 )
2401 .await
2402 .unwrap();
2403 fs.create_file(
2404 path!("/the-root/target/y/out/y2.rs").as_ref(),
2405 Default::default(),
2406 )
2407 .await
2408 .unwrap();
2409 fs.save(
2410 path!("/the-root/Cargo.lock").as_ref(),
2411 &"".into(),
2412 Default::default(),
2413 )
2414 .await
2415 .unwrap();
2416 fs.save(
2417 path!("/the-stdlib/LICENSE").as_ref(),
2418 &"".into(),
2419 Default::default(),
2420 )
2421 .await
2422 .unwrap();
2423 fs.save(
2424 path!("/the/stdlib/src/string.rs").as_ref(),
2425 &"".into(),
2426 Default::default(),
2427 )
2428 .await
2429 .unwrap();
2430
2431 // The language server receives events for the FS mutations that match its watch patterns.
2432 cx.executor().run_until_parked();
2433 assert_eq!(
2434 &*file_changes.lock(),
2435 &[
2436 lsp::FileEvent {
2437 uri: lsp::Uri::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
2438 typ: lsp::FileChangeType::CHANGED,
2439 },
2440 lsp::FileEvent {
2441 uri: lsp::Uri::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
2442 typ: lsp::FileChangeType::DELETED,
2443 },
2444 lsp::FileEvent {
2445 uri: lsp::Uri::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
2446 typ: lsp::FileChangeType::CREATED,
2447 },
2448 lsp::FileEvent {
2449 uri: lsp::Uri::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
2450 typ: lsp::FileChangeType::CREATED,
2451 },
2452 lsp::FileEvent {
2453 uri: lsp::Uri::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
2454 typ: lsp::FileChangeType::CHANGED,
2455 },
2456 ]
2457 );
2458}
2459
2460#[gpui::test]
2461async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
2462 init_test(cx);
2463
2464 let fs = FakeFs::new(cx.executor());
2465 fs.insert_tree(
2466 path!("/dir"),
2467 json!({
2468 "a.rs": "let a = 1;",
2469 "b.rs": "let b = 2;"
2470 }),
2471 )
2472 .await;
2473
2474 let project = Project::test(
2475 fs,
2476 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
2477 cx,
2478 )
2479 .await;
2480 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2481
2482 let buffer_a = project
2483 .update(cx, |project, cx| {
2484 project.open_local_buffer(path!("/dir/a.rs"), cx)
2485 })
2486 .await
2487 .unwrap();
2488 let buffer_b = project
2489 .update(cx, |project, cx| {
2490 project.open_local_buffer(path!("/dir/b.rs"), cx)
2491 })
2492 .await
2493 .unwrap();
2494
2495 lsp_store.update(cx, |lsp_store, cx| {
2496 lsp_store
2497 .update_diagnostics(
2498 LanguageServerId(0),
2499 lsp::PublishDiagnosticsParams {
2500 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2501 version: None,
2502 diagnostics: vec![lsp::Diagnostic {
2503 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2504 severity: Some(lsp::DiagnosticSeverity::ERROR),
2505 message: "error 1".to_string(),
2506 ..Default::default()
2507 }],
2508 },
2509 None,
2510 DiagnosticSourceKind::Pushed,
2511 &[],
2512 cx,
2513 )
2514 .unwrap();
2515 lsp_store
2516 .update_diagnostics(
2517 LanguageServerId(0),
2518 lsp::PublishDiagnosticsParams {
2519 uri: Uri::from_file_path(path!("/dir/b.rs")).unwrap(),
2520 version: None,
2521 diagnostics: vec![lsp::Diagnostic {
2522 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2523 severity: Some(DiagnosticSeverity::WARNING),
2524 message: "error 2".to_string(),
2525 ..Default::default()
2526 }],
2527 },
2528 None,
2529 DiagnosticSourceKind::Pushed,
2530 &[],
2531 cx,
2532 )
2533 .unwrap();
2534 });
2535
2536 buffer_a.update(cx, |buffer, _| {
2537 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2538 assert_eq!(
2539 chunks
2540 .iter()
2541 .map(|(s, d)| (s.as_str(), *d))
2542 .collect::<Vec<_>>(),
2543 &[
2544 ("let ", None),
2545 ("a", Some(DiagnosticSeverity::ERROR)),
2546 (" = 1;", None),
2547 ]
2548 );
2549 });
2550 buffer_b.update(cx, |buffer, _| {
2551 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2552 assert_eq!(
2553 chunks
2554 .iter()
2555 .map(|(s, d)| (s.as_str(), *d))
2556 .collect::<Vec<_>>(),
2557 &[
2558 ("let ", None),
2559 ("b", Some(DiagnosticSeverity::WARNING)),
2560 (" = 2;", None),
2561 ]
2562 );
2563 });
2564}
2565
2566#[gpui::test]
2567async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
2568 init_test(cx);
2569
2570 let fs = FakeFs::new(cx.executor());
2571 fs.insert_tree(
2572 path!("/root"),
2573 json!({
2574 "dir": {
2575 ".git": {
2576 "HEAD": "ref: refs/heads/main",
2577 },
2578 ".gitignore": "b.rs",
2579 "a.rs": "let a = 1;",
2580 "b.rs": "let b = 2;",
2581 },
2582 "other.rs": "let b = c;"
2583 }),
2584 )
2585 .await;
2586
2587 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
2588 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2589 let (worktree, _) = project
2590 .update(cx, |project, cx| {
2591 project.find_or_create_worktree(path!("/root/dir"), true, cx)
2592 })
2593 .await
2594 .unwrap();
2595 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
2596
2597 let (worktree, _) = project
2598 .update(cx, |project, cx| {
2599 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
2600 })
2601 .await
2602 .unwrap();
2603 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
2604
2605 let server_id = LanguageServerId(0);
2606 lsp_store.update(cx, |lsp_store, cx| {
2607 lsp_store
2608 .update_diagnostics(
2609 server_id,
2610 lsp::PublishDiagnosticsParams {
2611 uri: Uri::from_file_path(path!("/root/dir/b.rs")).unwrap(),
2612 version: None,
2613 diagnostics: vec![lsp::Diagnostic {
2614 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2615 severity: Some(lsp::DiagnosticSeverity::ERROR),
2616 message: "unused variable 'b'".to_string(),
2617 ..Default::default()
2618 }],
2619 },
2620 None,
2621 DiagnosticSourceKind::Pushed,
2622 &[],
2623 cx,
2624 )
2625 .unwrap();
2626 lsp_store
2627 .update_diagnostics(
2628 server_id,
2629 lsp::PublishDiagnosticsParams {
2630 uri: Uri::from_file_path(path!("/root/other.rs")).unwrap(),
2631 version: None,
2632 diagnostics: vec![lsp::Diagnostic {
2633 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
2634 severity: Some(lsp::DiagnosticSeverity::ERROR),
2635 message: "unknown variable 'c'".to_string(),
2636 ..Default::default()
2637 }],
2638 },
2639 None,
2640 DiagnosticSourceKind::Pushed,
2641 &[],
2642 cx,
2643 )
2644 .unwrap();
2645 });
2646
2647 let main_ignored_buffer = project
2648 .update(cx, |project, cx| {
2649 project.open_buffer((main_worktree_id, rel_path("b.rs")), cx)
2650 })
2651 .await
2652 .unwrap();
2653 main_ignored_buffer.update(cx, |buffer, _| {
2654 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2655 assert_eq!(
2656 chunks
2657 .iter()
2658 .map(|(s, d)| (s.as_str(), *d))
2659 .collect::<Vec<_>>(),
2660 &[
2661 ("let ", None),
2662 ("b", Some(DiagnosticSeverity::ERROR)),
2663 (" = 2;", None),
2664 ],
2665 "Gigitnored buffers should still get in-buffer diagnostics",
2666 );
2667 });
2668 let other_buffer = project
2669 .update(cx, |project, cx| {
2670 project.open_buffer((other_worktree_id, rel_path("")), cx)
2671 })
2672 .await
2673 .unwrap();
2674 other_buffer.update(cx, |buffer, _| {
2675 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2676 assert_eq!(
2677 chunks
2678 .iter()
2679 .map(|(s, d)| (s.as_str(), *d))
2680 .collect::<Vec<_>>(),
2681 &[
2682 ("let b = ", None),
2683 ("c", Some(DiagnosticSeverity::ERROR)),
2684 (";", None),
2685 ],
2686 "Buffers from hidden projects should still get in-buffer diagnostics"
2687 );
2688 });
2689
2690 project.update(cx, |project, cx| {
2691 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
2692 assert_eq!(
2693 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
2694 vec![(
2695 ProjectPath {
2696 worktree_id: main_worktree_id,
2697 path: rel_path("b.rs").into(),
2698 },
2699 server_id,
2700 DiagnosticSummary {
2701 error_count: 1,
2702 warning_count: 0,
2703 }
2704 )]
2705 );
2706 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
2707 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
2708 });
2709}
2710
2711#[gpui::test]
2712async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
2713 init_test(cx);
2714
2715 let progress_token = "the-progress-token";
2716
2717 let fs = FakeFs::new(cx.executor());
2718 fs.insert_tree(
2719 path!("/dir"),
2720 json!({
2721 "a.rs": "fn a() { A }",
2722 "b.rs": "const y: i32 = 1",
2723 }),
2724 )
2725 .await;
2726
2727 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2728 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2729
2730 language_registry.add(rust_lang());
2731 let mut fake_servers = language_registry.register_fake_lsp(
2732 "Rust",
2733 FakeLspAdapter {
2734 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2735 disk_based_diagnostics_sources: vec!["disk".into()],
2736 ..Default::default()
2737 },
2738 );
2739
2740 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
2741
2742 // Cause worktree to start the fake language server
2743 let _ = project
2744 .update(cx, |project, cx| {
2745 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
2746 })
2747 .await
2748 .unwrap();
2749
2750 let mut events = cx.events(&project);
2751
2752 let fake_server = fake_servers.next().await.unwrap();
2753 assert_eq!(
2754 events.next().await.unwrap(),
2755 Event::LanguageServerAdded(
2756 LanguageServerId(0),
2757 fake_server.server.name(),
2758 Some(worktree_id)
2759 ),
2760 );
2761
2762 fake_server
2763 .start_progress(format!("{}/0", progress_token))
2764 .await;
2765 assert_eq!(
2766 events.next().await.unwrap(),
2767 Event::DiskBasedDiagnosticsStarted {
2768 language_server_id: LanguageServerId(0),
2769 }
2770 );
2771
2772 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2773 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2774 version: None,
2775 diagnostics: vec![lsp::Diagnostic {
2776 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2777 severity: Some(lsp::DiagnosticSeverity::ERROR),
2778 message: "undefined variable 'A'".to_string(),
2779 ..Default::default()
2780 }],
2781 });
2782 assert_eq!(
2783 events.next().await.unwrap(),
2784 Event::DiagnosticsUpdated {
2785 language_server_id: LanguageServerId(0),
2786 paths: vec![(worktree_id, rel_path("a.rs")).into()],
2787 }
2788 );
2789
2790 fake_server.end_progress(format!("{}/0", progress_token));
2791 assert_eq!(
2792 events.next().await.unwrap(),
2793 Event::DiskBasedDiagnosticsFinished {
2794 language_server_id: LanguageServerId(0)
2795 }
2796 );
2797
2798 let buffer = project
2799 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
2800 .await
2801 .unwrap();
2802
2803 buffer.update(cx, |buffer, _| {
2804 let snapshot = buffer.snapshot();
2805 let diagnostics = snapshot
2806 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2807 .collect::<Vec<_>>();
2808 assert_eq!(
2809 diagnostics,
2810 &[DiagnosticEntryRef {
2811 range: Point::new(0, 9)..Point::new(0, 10),
2812 diagnostic: &Diagnostic {
2813 severity: lsp::DiagnosticSeverity::ERROR,
2814 message: "undefined variable 'A'".to_string(),
2815 group_id: 0,
2816 is_primary: true,
2817 source_kind: DiagnosticSourceKind::Pushed,
2818 ..Diagnostic::default()
2819 }
2820 }]
2821 )
2822 });
2823
2824 // Ensure publishing empty diagnostics twice only results in one update event.
2825 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2826 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2827 version: None,
2828 diagnostics: Default::default(),
2829 });
2830 assert_eq!(
2831 events.next().await.unwrap(),
2832 Event::DiagnosticsUpdated {
2833 language_server_id: LanguageServerId(0),
2834 paths: vec![(worktree_id, rel_path("a.rs")).into()],
2835 }
2836 );
2837
2838 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2839 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2840 version: None,
2841 diagnostics: Default::default(),
2842 });
2843 cx.executor().run_until_parked();
2844 assert_eq!(futures::poll!(events.next()), Poll::Pending);
2845}
2846
2847#[gpui::test]
2848async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
2849 init_test(cx);
2850
2851 let progress_token = "the-progress-token";
2852
2853 let fs = FakeFs::new(cx.executor());
2854 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2855
2856 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2857
2858 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2859 language_registry.add(rust_lang());
2860 let mut fake_servers = language_registry.register_fake_lsp(
2861 "Rust",
2862 FakeLspAdapter {
2863 name: "the-language-server",
2864 disk_based_diagnostics_sources: vec!["disk".into()],
2865 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2866 ..FakeLspAdapter::default()
2867 },
2868 );
2869
2870 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
2871
2872 let (buffer, _handle) = project
2873 .update(cx, |project, cx| {
2874 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2875 })
2876 .await
2877 .unwrap();
2878 let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id());
2879 // Simulate diagnostics starting to update.
2880 let fake_server = fake_servers.next().await.unwrap();
2881 cx.executor().run_until_parked();
2882 fake_server.start_progress(progress_token).await;
2883
2884 // Restart the server before the diagnostics finish updating.
2885 project.update(cx, |project, cx| {
2886 project.restart_language_servers_for_buffers(vec![buffer], HashSet::default(), cx);
2887 });
2888 let mut events = cx.events(&project);
2889
2890 // Simulate the newly started server sending more diagnostics.
2891 let fake_server = fake_servers.next().await.unwrap();
2892 cx.executor().run_until_parked();
2893 assert_eq!(
2894 events.next().await.unwrap(),
2895 Event::LanguageServerRemoved(LanguageServerId(0))
2896 );
2897 assert_eq!(
2898 events.next().await.unwrap(),
2899 Event::LanguageServerAdded(
2900 LanguageServerId(1),
2901 fake_server.server.name(),
2902 Some(worktree_id)
2903 )
2904 );
2905 fake_server.start_progress(progress_token).await;
2906 assert_eq!(
2907 events.next().await.unwrap(),
2908 Event::LanguageServerBufferRegistered {
2909 server_id: LanguageServerId(1),
2910 buffer_id,
2911 buffer_abs_path: PathBuf::from(path!("/dir/a.rs")),
2912 name: Some(fake_server.server.name())
2913 }
2914 );
2915 assert_eq!(
2916 events.next().await.unwrap(),
2917 Event::DiskBasedDiagnosticsStarted {
2918 language_server_id: LanguageServerId(1)
2919 }
2920 );
2921 project.update(cx, |project, cx| {
2922 assert_eq!(
2923 project
2924 .language_servers_running_disk_based_diagnostics(cx)
2925 .collect::<Vec<_>>(),
2926 [LanguageServerId(1)]
2927 );
2928 });
2929
2930 // All diagnostics are considered done, despite the old server's diagnostic
2931 // task never completing.
2932 fake_server.end_progress(progress_token);
2933 assert_eq!(
2934 events.next().await.unwrap(),
2935 Event::DiskBasedDiagnosticsFinished {
2936 language_server_id: LanguageServerId(1)
2937 }
2938 );
2939 project.update(cx, |project, cx| {
2940 assert_eq!(
2941 project
2942 .language_servers_running_disk_based_diagnostics(cx)
2943 .collect::<Vec<_>>(),
2944 [] as [language::LanguageServerId; 0]
2945 );
2946 });
2947}
2948
2949#[gpui::test]
2950async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
2951 init_test(cx);
2952
2953 let fs = FakeFs::new(cx.executor());
2954 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
2955
2956 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2957
2958 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2959 language_registry.add(rust_lang());
2960 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2961
2962 let (buffer, _) = project
2963 .update(cx, |project, cx| {
2964 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2965 })
2966 .await
2967 .unwrap();
2968
2969 // Publish diagnostics
2970 let fake_server = fake_servers.next().await.unwrap();
2971 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2972 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2973 version: None,
2974 diagnostics: vec![lsp::Diagnostic {
2975 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
2976 severity: Some(lsp::DiagnosticSeverity::ERROR),
2977 message: "the message".to_string(),
2978 ..Default::default()
2979 }],
2980 });
2981
2982 cx.executor().run_until_parked();
2983 buffer.update(cx, |buffer, _| {
2984 assert_eq!(
2985 buffer
2986 .snapshot()
2987 .diagnostics_in_range::<_, usize>(0..1, false)
2988 .map(|entry| entry.diagnostic.message.clone())
2989 .collect::<Vec<_>>(),
2990 ["the message".to_string()]
2991 );
2992 });
2993 project.update(cx, |project, cx| {
2994 assert_eq!(
2995 project.diagnostic_summary(false, cx),
2996 DiagnosticSummary {
2997 error_count: 1,
2998 warning_count: 0,
2999 }
3000 );
3001 });
3002
3003 project.update(cx, |project, cx| {
3004 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
3005 });
3006
3007 // The diagnostics are cleared.
3008 cx.executor().run_until_parked();
3009 buffer.update(cx, |buffer, _| {
3010 assert_eq!(
3011 buffer
3012 .snapshot()
3013 .diagnostics_in_range::<_, usize>(0..1, false)
3014 .map(|entry| entry.diagnostic.message.clone())
3015 .collect::<Vec<_>>(),
3016 Vec::<String>::new(),
3017 );
3018 });
3019 project.update(cx, |project, cx| {
3020 assert_eq!(
3021 project.diagnostic_summary(false, cx),
3022 DiagnosticSummary {
3023 error_count: 0,
3024 warning_count: 0,
3025 }
3026 );
3027 });
3028}
3029
3030#[gpui::test]
3031async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
3032 init_test(cx);
3033
3034 let fs = FakeFs::new(cx.executor());
3035 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
3036
3037 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3038 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3039
3040 language_registry.add(rust_lang());
3041 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3042
3043 let (buffer, _handle) = project
3044 .update(cx, |project, cx| {
3045 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3046 })
3047 .await
3048 .unwrap();
3049
3050 // Before restarting the server, report diagnostics with an unknown buffer version.
3051 let fake_server = fake_servers.next().await.unwrap();
3052 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3053 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3054 version: Some(10000),
3055 diagnostics: Vec::new(),
3056 });
3057 cx.executor().run_until_parked();
3058 project.update(cx, |project, cx| {
3059 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
3060 });
3061
3062 let mut fake_server = fake_servers.next().await.unwrap();
3063 let notification = fake_server
3064 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3065 .await
3066 .text_document;
3067 assert_eq!(notification.version, 0);
3068}
3069
3070#[gpui::test]
3071async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
3072 init_test(cx);
3073
3074 let progress_token = "the-progress-token";
3075
3076 let fs = FakeFs::new(cx.executor());
3077 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
3078
3079 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3080
3081 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3082 language_registry.add(rust_lang());
3083 let mut fake_servers = language_registry.register_fake_lsp(
3084 "Rust",
3085 FakeLspAdapter {
3086 name: "the-language-server",
3087 disk_based_diagnostics_sources: vec!["disk".into()],
3088 disk_based_diagnostics_progress_token: Some(progress_token.into()),
3089 ..Default::default()
3090 },
3091 );
3092
3093 let (buffer, _handle) = project
3094 .update(cx, |project, cx| {
3095 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3096 })
3097 .await
3098 .unwrap();
3099
3100 // Simulate diagnostics starting to update.
3101 let mut fake_server = fake_servers.next().await.unwrap();
3102 fake_server
3103 .start_progress_with(
3104 "another-token",
3105 lsp::WorkDoneProgressBegin {
3106 cancellable: Some(false),
3107 ..Default::default()
3108 },
3109 DEFAULT_LSP_REQUEST_TIMEOUT,
3110 )
3111 .await;
3112 // Ensure progress notification is fully processed before starting the next one
3113 cx.executor().run_until_parked();
3114
3115 fake_server
3116 .start_progress_with(
3117 progress_token,
3118 lsp::WorkDoneProgressBegin {
3119 cancellable: Some(true),
3120 ..Default::default()
3121 },
3122 DEFAULT_LSP_REQUEST_TIMEOUT,
3123 )
3124 .await;
3125 // Ensure progress notification is fully processed before cancelling
3126 cx.executor().run_until_parked();
3127
3128 project.update(cx, |project, cx| {
3129 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
3130 });
3131 cx.executor().run_until_parked();
3132
3133 let cancel_notification = fake_server
3134 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
3135 .await;
3136 assert_eq!(
3137 cancel_notification.token,
3138 NumberOrString::String(progress_token.into())
3139 );
3140}
3141
3142#[gpui::test]
3143async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
3144 init_test(cx);
3145
3146 let fs = FakeFs::new(cx.executor());
3147 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
3148 .await;
3149
3150 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3151 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3152
3153 let mut fake_rust_servers = language_registry.register_fake_lsp(
3154 "Rust",
3155 FakeLspAdapter {
3156 name: "rust-lsp",
3157 ..Default::default()
3158 },
3159 );
3160 let mut fake_js_servers = language_registry.register_fake_lsp(
3161 "JavaScript",
3162 FakeLspAdapter {
3163 name: "js-lsp",
3164 ..Default::default()
3165 },
3166 );
3167 language_registry.add(rust_lang());
3168 language_registry.add(js_lang());
3169
3170 let _rs_buffer = project
3171 .update(cx, |project, cx| {
3172 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3173 })
3174 .await
3175 .unwrap();
3176 let _js_buffer = project
3177 .update(cx, |project, cx| {
3178 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
3179 })
3180 .await
3181 .unwrap();
3182
3183 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
3184 assert_eq!(
3185 fake_rust_server_1
3186 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3187 .await
3188 .text_document
3189 .uri
3190 .as_str(),
3191 uri!("file:///dir/a.rs")
3192 );
3193
3194 let mut fake_js_server = fake_js_servers.next().await.unwrap();
3195 assert_eq!(
3196 fake_js_server
3197 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3198 .await
3199 .text_document
3200 .uri
3201 .as_str(),
3202 uri!("file:///dir/b.js")
3203 );
3204
3205 // Disable Rust language server, ensuring only that server gets stopped.
3206 cx.update(|cx| {
3207 SettingsStore::update_global(cx, |settings, cx| {
3208 settings.update_user_settings(cx, |settings| {
3209 settings.languages_mut().insert(
3210 "Rust".into(),
3211 LanguageSettingsContent {
3212 enable_language_server: Some(false),
3213 ..Default::default()
3214 },
3215 );
3216 });
3217 })
3218 });
3219 fake_rust_server_1
3220 .receive_notification::<lsp::notification::Exit>()
3221 .await;
3222
3223 // Enable Rust and disable JavaScript language servers, ensuring that the
3224 // former gets started again and that the latter stops.
3225 cx.update(|cx| {
3226 SettingsStore::update_global(cx, |settings, cx| {
3227 settings.update_user_settings(cx, |settings| {
3228 settings.languages_mut().insert(
3229 "Rust".into(),
3230 LanguageSettingsContent {
3231 enable_language_server: Some(true),
3232 ..Default::default()
3233 },
3234 );
3235 settings.languages_mut().insert(
3236 "JavaScript".into(),
3237 LanguageSettingsContent {
3238 enable_language_server: Some(false),
3239 ..Default::default()
3240 },
3241 );
3242 });
3243 })
3244 });
3245 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
3246 assert_eq!(
3247 fake_rust_server_2
3248 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3249 .await
3250 .text_document
3251 .uri
3252 .as_str(),
3253 uri!("file:///dir/a.rs")
3254 );
3255 fake_js_server
3256 .receive_notification::<lsp::notification::Exit>()
3257 .await;
3258}
3259
3260#[gpui::test(iterations = 3)]
3261async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
3262 init_test(cx);
3263
3264 let text = "
3265 fn a() { A }
3266 fn b() { BB }
3267 fn c() { CCC }
3268 "
3269 .unindent();
3270
3271 let fs = FakeFs::new(cx.executor());
3272 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
3273
3274 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3275 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3276
3277 language_registry.add(rust_lang());
3278 let mut fake_servers = language_registry.register_fake_lsp(
3279 "Rust",
3280 FakeLspAdapter {
3281 disk_based_diagnostics_sources: vec!["disk".into()],
3282 ..Default::default()
3283 },
3284 );
3285
3286 let buffer = project
3287 .update(cx, |project, cx| {
3288 project.open_local_buffer(path!("/dir/a.rs"), cx)
3289 })
3290 .await
3291 .unwrap();
3292
3293 let _handle = project.update(cx, |project, cx| {
3294 project.register_buffer_with_language_servers(&buffer, cx)
3295 });
3296
3297 let mut fake_server = fake_servers.next().await.unwrap();
3298 let open_notification = fake_server
3299 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3300 .await;
3301
3302 // Edit the buffer, moving the content down
3303 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
3304 let change_notification_1 = fake_server
3305 .receive_notification::<lsp::notification::DidChangeTextDocument>()
3306 .await;
3307 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
3308
3309 // Report some diagnostics for the initial version of the buffer
3310 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3311 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3312 version: Some(open_notification.text_document.version),
3313 diagnostics: vec![
3314 lsp::Diagnostic {
3315 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3316 severity: Some(DiagnosticSeverity::ERROR),
3317 message: "undefined variable 'A'".to_string(),
3318 source: Some("disk".to_string()),
3319 ..Default::default()
3320 },
3321 lsp::Diagnostic {
3322 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
3323 severity: Some(DiagnosticSeverity::ERROR),
3324 message: "undefined variable 'BB'".to_string(),
3325 source: Some("disk".to_string()),
3326 ..Default::default()
3327 },
3328 lsp::Diagnostic {
3329 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
3330 severity: Some(DiagnosticSeverity::ERROR),
3331 source: Some("disk".to_string()),
3332 message: "undefined variable 'CCC'".to_string(),
3333 ..Default::default()
3334 },
3335 ],
3336 });
3337
3338 // The diagnostics have moved down since they were created.
3339 cx.executor().run_until_parked();
3340 buffer.update(cx, |buffer, _| {
3341 assert_eq!(
3342 buffer
3343 .snapshot()
3344 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
3345 .collect::<Vec<_>>(),
3346 &[
3347 DiagnosticEntry {
3348 range: Point::new(3, 9)..Point::new(3, 11),
3349 diagnostic: Diagnostic {
3350 source: Some("disk".into()),
3351 severity: DiagnosticSeverity::ERROR,
3352 message: "undefined variable 'BB'".to_string(),
3353 is_disk_based: true,
3354 group_id: 1,
3355 is_primary: true,
3356 source_kind: DiagnosticSourceKind::Pushed,
3357 ..Diagnostic::default()
3358 },
3359 },
3360 DiagnosticEntry {
3361 range: Point::new(4, 9)..Point::new(4, 12),
3362 diagnostic: Diagnostic {
3363 source: Some("disk".into()),
3364 severity: DiagnosticSeverity::ERROR,
3365 message: "undefined variable 'CCC'".to_string(),
3366 is_disk_based: true,
3367 group_id: 2,
3368 is_primary: true,
3369 source_kind: DiagnosticSourceKind::Pushed,
3370 ..Diagnostic::default()
3371 }
3372 }
3373 ]
3374 );
3375 assert_eq!(
3376 chunks_with_diagnostics(buffer, 0..buffer.len()),
3377 [
3378 ("\n\nfn a() { ".to_string(), None),
3379 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
3380 (" }\nfn b() { ".to_string(), None),
3381 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
3382 (" }\nfn c() { ".to_string(), None),
3383 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
3384 (" }\n".to_string(), None),
3385 ]
3386 );
3387 assert_eq!(
3388 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
3389 [
3390 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
3391 (" }\nfn c() { ".to_string(), None),
3392 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
3393 ]
3394 );
3395 });
3396
3397 // Ensure overlapping diagnostics are highlighted correctly.
3398 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3399 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3400 version: Some(open_notification.text_document.version),
3401 diagnostics: vec![
3402 lsp::Diagnostic {
3403 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3404 severity: Some(DiagnosticSeverity::ERROR),
3405 message: "undefined variable 'A'".to_string(),
3406 source: Some("disk".to_string()),
3407 ..Default::default()
3408 },
3409 lsp::Diagnostic {
3410 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
3411 severity: Some(DiagnosticSeverity::WARNING),
3412 message: "unreachable statement".to_string(),
3413 source: Some("disk".to_string()),
3414 ..Default::default()
3415 },
3416 ],
3417 });
3418
3419 cx.executor().run_until_parked();
3420 buffer.update(cx, |buffer, _| {
3421 assert_eq!(
3422 buffer
3423 .snapshot()
3424 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
3425 .collect::<Vec<_>>(),
3426 &[
3427 DiagnosticEntry {
3428 range: Point::new(2, 9)..Point::new(2, 12),
3429 diagnostic: Diagnostic {
3430 source: Some("disk".into()),
3431 severity: DiagnosticSeverity::WARNING,
3432 message: "unreachable statement".to_string(),
3433 is_disk_based: true,
3434 group_id: 4,
3435 is_primary: true,
3436 source_kind: DiagnosticSourceKind::Pushed,
3437 ..Diagnostic::default()
3438 }
3439 },
3440 DiagnosticEntry {
3441 range: Point::new(2, 9)..Point::new(2, 10),
3442 diagnostic: Diagnostic {
3443 source: Some("disk".into()),
3444 severity: DiagnosticSeverity::ERROR,
3445 message: "undefined variable 'A'".to_string(),
3446 is_disk_based: true,
3447 group_id: 3,
3448 is_primary: true,
3449 source_kind: DiagnosticSourceKind::Pushed,
3450 ..Diagnostic::default()
3451 },
3452 }
3453 ]
3454 );
3455 assert_eq!(
3456 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
3457 [
3458 ("fn a() { ".to_string(), None),
3459 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
3460 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
3461 ("\n".to_string(), None),
3462 ]
3463 );
3464 assert_eq!(
3465 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
3466 [
3467 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
3468 ("\n".to_string(), None),
3469 ]
3470 );
3471 });
3472
3473 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
3474 // changes since the last save.
3475 buffer.update(cx, |buffer, cx| {
3476 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
3477 buffer.edit(
3478 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
3479 None,
3480 cx,
3481 );
3482 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
3483 });
3484 let change_notification_2 = fake_server
3485 .receive_notification::<lsp::notification::DidChangeTextDocument>()
3486 .await;
3487 assert!(
3488 change_notification_2.text_document.version > change_notification_1.text_document.version
3489 );
3490
3491 // Handle out-of-order diagnostics
3492 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3493 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3494 version: Some(change_notification_2.text_document.version),
3495 diagnostics: vec![
3496 lsp::Diagnostic {
3497 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
3498 severity: Some(DiagnosticSeverity::ERROR),
3499 message: "undefined variable 'BB'".to_string(),
3500 source: Some("disk".to_string()),
3501 ..Default::default()
3502 },
3503 lsp::Diagnostic {
3504 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3505 severity: Some(DiagnosticSeverity::WARNING),
3506 message: "undefined variable 'A'".to_string(),
3507 source: Some("disk".to_string()),
3508 ..Default::default()
3509 },
3510 ],
3511 });
3512
3513 cx.executor().run_until_parked();
3514 buffer.update(cx, |buffer, _| {
3515 assert_eq!(
3516 buffer
3517 .snapshot()
3518 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3519 .collect::<Vec<_>>(),
3520 &[
3521 DiagnosticEntry {
3522 range: Point::new(2, 21)..Point::new(2, 22),
3523 diagnostic: Diagnostic {
3524 source: Some("disk".into()),
3525 severity: DiagnosticSeverity::WARNING,
3526 message: "undefined variable 'A'".to_string(),
3527 is_disk_based: true,
3528 group_id: 6,
3529 is_primary: true,
3530 source_kind: DiagnosticSourceKind::Pushed,
3531 ..Diagnostic::default()
3532 }
3533 },
3534 DiagnosticEntry {
3535 range: Point::new(3, 9)..Point::new(3, 14),
3536 diagnostic: Diagnostic {
3537 source: Some("disk".into()),
3538 severity: DiagnosticSeverity::ERROR,
3539 message: "undefined variable 'BB'".to_string(),
3540 is_disk_based: true,
3541 group_id: 5,
3542 is_primary: true,
3543 source_kind: DiagnosticSourceKind::Pushed,
3544 ..Diagnostic::default()
3545 },
3546 }
3547 ]
3548 );
3549 });
3550}
3551
3552#[gpui::test]
3553async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
3554 init_test(cx);
3555
3556 let text = concat!(
3557 "let one = ;\n", //
3558 "let two = \n",
3559 "let three = 3;\n",
3560 );
3561
3562 let fs = FakeFs::new(cx.executor());
3563 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
3564
3565 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3566 let buffer = project
3567 .update(cx, |project, cx| {
3568 project.open_local_buffer(path!("/dir/a.rs"), cx)
3569 })
3570 .await
3571 .unwrap();
3572
3573 project.update(cx, |project, cx| {
3574 project.lsp_store().update(cx, |lsp_store, cx| {
3575 lsp_store
3576 .update_diagnostic_entries(
3577 LanguageServerId(0),
3578 PathBuf::from(path!("/dir/a.rs")),
3579 None,
3580 None,
3581 vec![
3582 DiagnosticEntry {
3583 range: Unclipped(PointUtf16::new(0, 10))
3584 ..Unclipped(PointUtf16::new(0, 10)),
3585 diagnostic: Diagnostic {
3586 severity: DiagnosticSeverity::ERROR,
3587 message: "syntax error 1".to_string(),
3588 source_kind: DiagnosticSourceKind::Pushed,
3589 ..Diagnostic::default()
3590 },
3591 },
3592 DiagnosticEntry {
3593 range: Unclipped(PointUtf16::new(1, 10))
3594 ..Unclipped(PointUtf16::new(1, 10)),
3595 diagnostic: Diagnostic {
3596 severity: DiagnosticSeverity::ERROR,
3597 message: "syntax error 2".to_string(),
3598 source_kind: DiagnosticSourceKind::Pushed,
3599 ..Diagnostic::default()
3600 },
3601 },
3602 ],
3603 cx,
3604 )
3605 .unwrap();
3606 })
3607 });
3608
3609 // An empty range is extended forward to include the following character.
3610 // At the end of a line, an empty range is extended backward to include
3611 // the preceding character.
3612 buffer.update(cx, |buffer, _| {
3613 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
3614 assert_eq!(
3615 chunks
3616 .iter()
3617 .map(|(s, d)| (s.as_str(), *d))
3618 .collect::<Vec<_>>(),
3619 &[
3620 ("let one = ", None),
3621 (";", Some(DiagnosticSeverity::ERROR)),
3622 ("\nlet two =", None),
3623 (" ", Some(DiagnosticSeverity::ERROR)),
3624 ("\nlet three = 3;\n", None)
3625 ]
3626 );
3627 });
3628}
3629
3630#[gpui::test]
3631async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
3632 init_test(cx);
3633
3634 let fs = FakeFs::new(cx.executor());
3635 fs.insert_tree(path!("/dir"), json!({ "a.rs": "one two three" }))
3636 .await;
3637
3638 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3639 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3640
3641 lsp_store.update(cx, |lsp_store, cx| {
3642 lsp_store
3643 .update_diagnostic_entries(
3644 LanguageServerId(0),
3645 Path::new(path!("/dir/a.rs")).to_owned(),
3646 None,
3647 None,
3648 vec![DiagnosticEntry {
3649 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
3650 diagnostic: Diagnostic {
3651 severity: DiagnosticSeverity::ERROR,
3652 is_primary: true,
3653 message: "syntax error a1".to_string(),
3654 source_kind: DiagnosticSourceKind::Pushed,
3655 ..Diagnostic::default()
3656 },
3657 }],
3658 cx,
3659 )
3660 .unwrap();
3661 lsp_store
3662 .update_diagnostic_entries(
3663 LanguageServerId(1),
3664 Path::new(path!("/dir/a.rs")).to_owned(),
3665 None,
3666 None,
3667 vec![DiagnosticEntry {
3668 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
3669 diagnostic: Diagnostic {
3670 severity: DiagnosticSeverity::ERROR,
3671 is_primary: true,
3672 message: "syntax error b1".to_string(),
3673 source_kind: DiagnosticSourceKind::Pushed,
3674 ..Diagnostic::default()
3675 },
3676 }],
3677 cx,
3678 )
3679 .unwrap();
3680
3681 assert_eq!(
3682 lsp_store.diagnostic_summary(false, cx),
3683 DiagnosticSummary {
3684 error_count: 2,
3685 warning_count: 0,
3686 }
3687 );
3688 });
3689}
3690
3691#[gpui::test]
3692async fn test_diagnostic_summaries_cleared_on_worktree_entry_removal(
3693 cx: &mut gpui::TestAppContext,
3694) {
3695 init_test(cx);
3696
3697 let fs = FakeFs::new(cx.executor());
3698 fs.insert_tree(path!("/dir"), json!({ "a.rs": "one", "b.rs": "two" }))
3699 .await;
3700
3701 let project = Project::test(fs.clone(), [Path::new(path!("/dir"))], cx).await;
3702 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3703
3704 lsp_store.update(cx, |lsp_store, cx| {
3705 lsp_store
3706 .update_diagnostic_entries(
3707 LanguageServerId(0),
3708 Path::new(path!("/dir/a.rs")).to_owned(),
3709 None,
3710 None,
3711 vec![DiagnosticEntry {
3712 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
3713 diagnostic: Diagnostic {
3714 severity: DiagnosticSeverity::ERROR,
3715 is_primary: true,
3716 message: "error in a".to_string(),
3717 source_kind: DiagnosticSourceKind::Pushed,
3718 ..Diagnostic::default()
3719 },
3720 }],
3721 cx,
3722 )
3723 .unwrap();
3724 lsp_store
3725 .update_diagnostic_entries(
3726 LanguageServerId(0),
3727 Path::new(path!("/dir/b.rs")).to_owned(),
3728 None,
3729 None,
3730 vec![DiagnosticEntry {
3731 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
3732 diagnostic: Diagnostic {
3733 severity: DiagnosticSeverity::WARNING,
3734 is_primary: true,
3735 message: "warning in b".to_string(),
3736 source_kind: DiagnosticSourceKind::Pushed,
3737 ..Diagnostic::default()
3738 },
3739 }],
3740 cx,
3741 )
3742 .unwrap();
3743
3744 assert_eq!(
3745 lsp_store.diagnostic_summary(false, cx),
3746 DiagnosticSummary {
3747 error_count: 1,
3748 warning_count: 1,
3749 }
3750 );
3751 });
3752
3753 fs.remove_file(path!("/dir/a.rs").as_ref(), Default::default())
3754 .await
3755 .unwrap();
3756 cx.executor().run_until_parked();
3757
3758 lsp_store.update(cx, |lsp_store, cx| {
3759 assert_eq!(
3760 lsp_store.diagnostic_summary(false, cx),
3761 DiagnosticSummary {
3762 error_count: 0,
3763 warning_count: 1,
3764 },
3765 );
3766 });
3767}
3768
3769#[gpui::test]
3770async fn test_diagnostic_summaries_cleared_on_server_restart(cx: &mut gpui::TestAppContext) {
3771 init_test(cx);
3772
3773 let fs = FakeFs::new(cx.executor());
3774 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
3775
3776 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3777
3778 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3779 language_registry.add(rust_lang());
3780 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3781
3782 let (buffer, _handle) = project
3783 .update(cx, |project, cx| {
3784 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3785 })
3786 .await
3787 .unwrap();
3788
3789 let fake_server = fake_servers.next().await.unwrap();
3790 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3791 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3792 version: None,
3793 diagnostics: vec![lsp::Diagnostic {
3794 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 1)),
3795 severity: Some(lsp::DiagnosticSeverity::ERROR),
3796 message: "error before restart".to_string(),
3797 ..Default::default()
3798 }],
3799 });
3800 cx.executor().run_until_parked();
3801
3802 project.update(cx, |project, cx| {
3803 assert_eq!(
3804 project.diagnostic_summary(false, cx),
3805 DiagnosticSummary {
3806 error_count: 1,
3807 warning_count: 0,
3808 }
3809 );
3810 });
3811
3812 let mut events = cx.events(&project);
3813
3814 project.update(cx, |project, cx| {
3815 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
3816 });
3817 cx.executor().run_until_parked();
3818
3819 let mut received_diagnostics_updated = false;
3820 while let Some(Some(event)) =
3821 futures::FutureExt::now_or_never(futures::StreamExt::next(&mut events))
3822 {
3823 if matches!(event, Event::DiagnosticsUpdated { .. }) {
3824 received_diagnostics_updated = true;
3825 }
3826 }
3827 assert!(
3828 received_diagnostics_updated,
3829 "DiagnosticsUpdated event should be emitted when a language server is stopped"
3830 );
3831
3832 project.update(cx, |project, cx| {
3833 assert_eq!(
3834 project.diagnostic_summary(false, cx),
3835 DiagnosticSummary {
3836 error_count: 0,
3837 warning_count: 0,
3838 }
3839 );
3840 });
3841}
3842
3843#[gpui::test]
3844async fn test_diagnostic_summaries_cleared_on_buffer_reload(cx: &mut gpui::TestAppContext) {
3845 init_test(cx);
3846
3847 let fs = FakeFs::new(cx.executor());
3848 fs.insert_tree(path!("/dir"), json!({ "a.rs": "one two three" }))
3849 .await;
3850
3851 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3852
3853 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3854 language_registry.add(rust_lang());
3855 let pull_count = Arc::new(atomic::AtomicUsize::new(0));
3856 let closure_pull_count = pull_count.clone();
3857 let mut fake_servers = language_registry.register_fake_lsp(
3858 "Rust",
3859 FakeLspAdapter {
3860 capabilities: lsp::ServerCapabilities {
3861 diagnostic_provider: Some(lsp::DiagnosticServerCapabilities::Options(
3862 lsp::DiagnosticOptions {
3863 identifier: Some("test-reload".to_string()),
3864 inter_file_dependencies: true,
3865 workspace_diagnostics: false,
3866 work_done_progress_options: Default::default(),
3867 },
3868 )),
3869 ..lsp::ServerCapabilities::default()
3870 },
3871 initializer: Some(Box::new(move |fake_server| {
3872 let pull_count = closure_pull_count.clone();
3873 fake_server.set_request_handler::<lsp::request::DocumentDiagnosticRequest, _, _>(
3874 move |_, _| {
3875 let pull_count = pull_count.clone();
3876 async move {
3877 pull_count.fetch_add(1, atomic::Ordering::SeqCst);
3878 Ok(lsp::DocumentDiagnosticReportResult::Report(
3879 lsp::DocumentDiagnosticReport::Full(
3880 lsp::RelatedFullDocumentDiagnosticReport {
3881 related_documents: None,
3882 full_document_diagnostic_report:
3883 lsp::FullDocumentDiagnosticReport {
3884 result_id: None,
3885 items: Vec::new(),
3886 },
3887 },
3888 ),
3889 ))
3890 }
3891 },
3892 );
3893 })),
3894 ..FakeLspAdapter::default()
3895 },
3896 );
3897
3898 let (_buffer, _handle) = project
3899 .update(cx, |project, cx| {
3900 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3901 })
3902 .await
3903 .unwrap();
3904
3905 let fake_server = fake_servers.next().await.unwrap();
3906 cx.executor().run_until_parked();
3907
3908 // Publish initial diagnostics via the fake server.
3909 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3910 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3911 version: None,
3912 diagnostics: vec![lsp::Diagnostic {
3913 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 3)),
3914 severity: Some(lsp::DiagnosticSeverity::ERROR),
3915 message: "error in a".to_string(),
3916 ..Default::default()
3917 }],
3918 });
3919 cx.executor().run_until_parked();
3920
3921 project.update(cx, |project, cx| {
3922 assert_eq!(
3923 project.diagnostic_summary(false, cx),
3924 DiagnosticSummary {
3925 error_count: 1,
3926 warning_count: 0,
3927 }
3928 );
3929 });
3930
3931 let pulls_before = pull_count.load(atomic::Ordering::SeqCst);
3932
3933 // Change the file on disk. The FS event triggers buffer reload,
3934 // which in turn triggers pull_diagnostics_for_buffer.
3935 fs.save(
3936 path!("/dir/a.rs").as_ref(),
3937 &"fixed content".into(),
3938 LineEnding::Unix,
3939 )
3940 .await
3941 .unwrap();
3942 cx.executor().run_until_parked();
3943
3944 let pulls_after = pull_count.load(atomic::Ordering::SeqCst);
3945 assert!(
3946 pulls_after > pulls_before,
3947 "Expected document diagnostic pull after buffer reload (before={pulls_before}, after={pulls_after})"
3948 );
3949}
3950
3951#[gpui::test]
3952async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
3953 init_test(cx);
3954
3955 let text = "
3956 fn a() {
3957 f1();
3958 }
3959 fn b() {
3960 f2();
3961 }
3962 fn c() {
3963 f3();
3964 }
3965 "
3966 .unindent();
3967
3968 let fs = FakeFs::new(cx.executor());
3969 fs.insert_tree(
3970 path!("/dir"),
3971 json!({
3972 "a.rs": text.clone(),
3973 }),
3974 )
3975 .await;
3976
3977 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3978 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3979
3980 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3981 language_registry.add(rust_lang());
3982 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3983
3984 let (buffer, _handle) = project
3985 .update(cx, |project, cx| {
3986 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3987 })
3988 .await
3989 .unwrap();
3990
3991 let mut fake_server = fake_servers.next().await.unwrap();
3992 let lsp_document_version = fake_server
3993 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3994 .await
3995 .text_document
3996 .version;
3997
3998 // Simulate editing the buffer after the language server computes some edits.
3999 buffer.update(cx, |buffer, cx| {
4000 buffer.edit(
4001 [(
4002 Point::new(0, 0)..Point::new(0, 0),
4003 "// above first function\n",
4004 )],
4005 None,
4006 cx,
4007 );
4008 buffer.edit(
4009 [(
4010 Point::new(2, 0)..Point::new(2, 0),
4011 " // inside first function\n",
4012 )],
4013 None,
4014 cx,
4015 );
4016 buffer.edit(
4017 [(
4018 Point::new(6, 4)..Point::new(6, 4),
4019 "// inside second function ",
4020 )],
4021 None,
4022 cx,
4023 );
4024
4025 assert_eq!(
4026 buffer.text(),
4027 "
4028 // above first function
4029 fn a() {
4030 // inside first function
4031 f1();
4032 }
4033 fn b() {
4034 // inside second function f2();
4035 }
4036 fn c() {
4037 f3();
4038 }
4039 "
4040 .unindent()
4041 );
4042 });
4043
4044 let edits = lsp_store
4045 .update(cx, |lsp_store, cx| {
4046 lsp_store.as_local_mut().unwrap().edits_from_lsp(
4047 &buffer,
4048 vec![
4049 // replace body of first function
4050 lsp::TextEdit {
4051 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
4052 new_text: "
4053 fn a() {
4054 f10();
4055 }
4056 "
4057 .unindent(),
4058 },
4059 // edit inside second function
4060 lsp::TextEdit {
4061 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
4062 new_text: "00".into(),
4063 },
4064 // edit inside third function via two distinct edits
4065 lsp::TextEdit {
4066 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
4067 new_text: "4000".into(),
4068 },
4069 lsp::TextEdit {
4070 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
4071 new_text: "".into(),
4072 },
4073 ],
4074 LanguageServerId(0),
4075 Some(lsp_document_version),
4076 cx,
4077 )
4078 })
4079 .await
4080 .unwrap();
4081
4082 buffer.update(cx, |buffer, cx| {
4083 for (range, new_text) in edits {
4084 buffer.edit([(range, new_text)], None, cx);
4085 }
4086 assert_eq!(
4087 buffer.text(),
4088 "
4089 // above first function
4090 fn a() {
4091 // inside first function
4092 f10();
4093 }
4094 fn b() {
4095 // inside second function f200();
4096 }
4097 fn c() {
4098 f4000();
4099 }
4100 "
4101 .unindent()
4102 );
4103 });
4104}
4105
4106#[gpui::test]
4107async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
4108 init_test(cx);
4109
4110 let text = "
4111 use a::b;
4112 use a::c;
4113
4114 fn f() {
4115 b();
4116 c();
4117 }
4118 "
4119 .unindent();
4120
4121 let fs = FakeFs::new(cx.executor());
4122 fs.insert_tree(
4123 path!("/dir"),
4124 json!({
4125 "a.rs": text.clone(),
4126 }),
4127 )
4128 .await;
4129
4130 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4131 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4132 let buffer = project
4133 .update(cx, |project, cx| {
4134 project.open_local_buffer(path!("/dir/a.rs"), cx)
4135 })
4136 .await
4137 .unwrap();
4138
4139 // Simulate the language server sending us a small edit in the form of a very large diff.
4140 // Rust-analyzer does this when performing a merge-imports code action.
4141 let edits = lsp_store
4142 .update(cx, |lsp_store, cx| {
4143 lsp_store.as_local_mut().unwrap().edits_from_lsp(
4144 &buffer,
4145 [
4146 // Replace the first use statement without editing the semicolon.
4147 lsp::TextEdit {
4148 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
4149 new_text: "a::{b, c}".into(),
4150 },
4151 // Reinsert the remainder of the file between the semicolon and the final
4152 // newline of the file.
4153 lsp::TextEdit {
4154 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
4155 new_text: "\n\n".into(),
4156 },
4157 lsp::TextEdit {
4158 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
4159 new_text: "
4160 fn f() {
4161 b();
4162 c();
4163 }"
4164 .unindent(),
4165 },
4166 // Delete everything after the first newline of the file.
4167 lsp::TextEdit {
4168 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
4169 new_text: "".into(),
4170 },
4171 ],
4172 LanguageServerId(0),
4173 None,
4174 cx,
4175 )
4176 })
4177 .await
4178 .unwrap();
4179
4180 buffer.update(cx, |buffer, cx| {
4181 let edits = edits
4182 .into_iter()
4183 .map(|(range, text)| {
4184 (
4185 range.start.to_point(buffer)..range.end.to_point(buffer),
4186 text,
4187 )
4188 })
4189 .collect::<Vec<_>>();
4190
4191 assert_eq!(
4192 edits,
4193 [
4194 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
4195 (Point::new(1, 0)..Point::new(2, 0), "".into())
4196 ]
4197 );
4198
4199 for (range, new_text) in edits {
4200 buffer.edit([(range, new_text)], None, cx);
4201 }
4202 assert_eq!(
4203 buffer.text(),
4204 "
4205 use a::{b, c};
4206
4207 fn f() {
4208 b();
4209 c();
4210 }
4211 "
4212 .unindent()
4213 );
4214 });
4215}
4216
4217#[gpui::test]
4218async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
4219 cx: &mut gpui::TestAppContext,
4220) {
4221 init_test(cx);
4222
4223 let text = "Path()";
4224
4225 let fs = FakeFs::new(cx.executor());
4226 fs.insert_tree(
4227 path!("/dir"),
4228 json!({
4229 "a.rs": text
4230 }),
4231 )
4232 .await;
4233
4234 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4235 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4236 let buffer = project
4237 .update(cx, |project, cx| {
4238 project.open_local_buffer(path!("/dir/a.rs"), cx)
4239 })
4240 .await
4241 .unwrap();
4242
4243 // Simulate the language server sending us a pair of edits at the same location,
4244 // with an insertion following a replacement (which violates the LSP spec).
4245 let edits = lsp_store
4246 .update(cx, |lsp_store, cx| {
4247 lsp_store.as_local_mut().unwrap().edits_from_lsp(
4248 &buffer,
4249 [
4250 lsp::TextEdit {
4251 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
4252 new_text: "Path".into(),
4253 },
4254 lsp::TextEdit {
4255 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
4256 new_text: "from path import Path\n\n\n".into(),
4257 },
4258 ],
4259 LanguageServerId(0),
4260 None,
4261 cx,
4262 )
4263 })
4264 .await
4265 .unwrap();
4266
4267 buffer.update(cx, |buffer, cx| {
4268 buffer.edit(edits, None, cx);
4269 assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
4270 });
4271}
4272
4273#[gpui::test]
4274async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
4275 init_test(cx);
4276
4277 let text = "
4278 use a::b;
4279 use a::c;
4280
4281 fn f() {
4282 b();
4283 c();
4284 }
4285 "
4286 .unindent();
4287
4288 let fs = FakeFs::new(cx.executor());
4289 fs.insert_tree(
4290 path!("/dir"),
4291 json!({
4292 "a.rs": text.clone(),
4293 }),
4294 )
4295 .await;
4296
4297 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4298 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4299 let buffer = project
4300 .update(cx, |project, cx| {
4301 project.open_local_buffer(path!("/dir/a.rs"), cx)
4302 })
4303 .await
4304 .unwrap();
4305
4306 // Simulate the language server sending us edits in a non-ordered fashion,
4307 // with ranges sometimes being inverted or pointing to invalid locations.
4308 let edits = lsp_store
4309 .update(cx, |lsp_store, cx| {
4310 lsp_store.as_local_mut().unwrap().edits_from_lsp(
4311 &buffer,
4312 [
4313 lsp::TextEdit {
4314 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
4315 new_text: "\n\n".into(),
4316 },
4317 lsp::TextEdit {
4318 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
4319 new_text: "a::{b, c}".into(),
4320 },
4321 lsp::TextEdit {
4322 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
4323 new_text: "".into(),
4324 },
4325 lsp::TextEdit {
4326 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
4327 new_text: "
4328 fn f() {
4329 b();
4330 c();
4331 }"
4332 .unindent(),
4333 },
4334 ],
4335 LanguageServerId(0),
4336 None,
4337 cx,
4338 )
4339 })
4340 .await
4341 .unwrap();
4342
4343 buffer.update(cx, |buffer, cx| {
4344 let edits = edits
4345 .into_iter()
4346 .map(|(range, text)| {
4347 (
4348 range.start.to_point(buffer)..range.end.to_point(buffer),
4349 text,
4350 )
4351 })
4352 .collect::<Vec<_>>();
4353
4354 assert_eq!(
4355 edits,
4356 [
4357 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
4358 (Point::new(1, 0)..Point::new(2, 0), "".into())
4359 ]
4360 );
4361
4362 for (range, new_text) in edits {
4363 buffer.edit([(range, new_text)], None, cx);
4364 }
4365 assert_eq!(
4366 buffer.text(),
4367 "
4368 use a::{b, c};
4369
4370 fn f() {
4371 b();
4372 c();
4373 }
4374 "
4375 .unindent()
4376 );
4377 });
4378}
4379
4380fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
4381 buffer: &Buffer,
4382 range: Range<T>,
4383) -> Vec<(String, Option<DiagnosticSeverity>)> {
4384 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
4385 for chunk in buffer.snapshot().chunks(range, true) {
4386 if chunks
4387 .last()
4388 .is_some_and(|prev_chunk| prev_chunk.1 == chunk.diagnostic_severity)
4389 {
4390 chunks.last_mut().unwrap().0.push_str(chunk.text);
4391 } else {
4392 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
4393 }
4394 }
4395 chunks
4396}
4397
4398#[gpui::test(iterations = 10)]
4399async fn test_definition(cx: &mut gpui::TestAppContext) {
4400 init_test(cx);
4401
4402 let fs = FakeFs::new(cx.executor());
4403 fs.insert_tree(
4404 path!("/dir"),
4405 json!({
4406 "a.rs": "const fn a() { A }",
4407 "b.rs": "const y: i32 = crate::a()",
4408 }),
4409 )
4410 .await;
4411
4412 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
4413
4414 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4415 language_registry.add(rust_lang());
4416 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
4417
4418 let (buffer, _handle) = project
4419 .update(cx, |project, cx| {
4420 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
4421 })
4422 .await
4423 .unwrap();
4424
4425 let fake_server = fake_servers.next().await.unwrap();
4426 cx.executor().run_until_parked();
4427
4428 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
4429 let params = params.text_document_position_params;
4430 assert_eq!(
4431 params.text_document.uri.to_file_path().unwrap(),
4432 Path::new(path!("/dir/b.rs")),
4433 );
4434 assert_eq!(params.position, lsp::Position::new(0, 22));
4435
4436 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
4437 lsp::Location::new(
4438 lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
4439 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
4440 ),
4441 )))
4442 });
4443 let mut definitions = project
4444 .update(cx, |project, cx| project.definitions(&buffer, 22, cx))
4445 .await
4446 .unwrap()
4447 .unwrap();
4448
4449 // Assert no new language server started
4450 cx.executor().run_until_parked();
4451 assert!(fake_servers.try_next().is_err());
4452
4453 assert_eq!(definitions.len(), 1);
4454 let definition = definitions.pop().unwrap();
4455 cx.update(|cx| {
4456 let target_buffer = definition.target.buffer.read(cx);
4457 assert_eq!(
4458 target_buffer
4459 .file()
4460 .unwrap()
4461 .as_local()
4462 .unwrap()
4463 .abs_path(cx),
4464 Path::new(path!("/dir/a.rs")),
4465 );
4466 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
4467 assert_eq!(
4468 list_worktrees(&project, cx),
4469 [
4470 (path!("/dir/a.rs").as_ref(), false),
4471 (path!("/dir/b.rs").as_ref(), true)
4472 ],
4473 );
4474
4475 drop(definition);
4476 });
4477 cx.update(|cx| {
4478 assert_eq!(
4479 list_worktrees(&project, cx),
4480 [(path!("/dir/b.rs").as_ref(), true)]
4481 );
4482 });
4483
4484 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
4485 project
4486 .read(cx)
4487 .worktrees(cx)
4488 .map(|worktree| {
4489 let worktree = worktree.read(cx);
4490 (
4491 worktree.as_local().unwrap().abs_path().as_ref(),
4492 worktree.is_visible(),
4493 )
4494 })
4495 .collect::<Vec<_>>()
4496 }
4497}
4498
4499#[gpui::test]
4500async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
4501 init_test(cx);
4502
4503 let fs = FakeFs::new(cx.executor());
4504 fs.insert_tree(
4505 path!("/dir"),
4506 json!({
4507 "a.ts": "",
4508 }),
4509 )
4510 .await;
4511
4512 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4513
4514 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4515 language_registry.add(typescript_lang());
4516 let mut fake_language_servers = language_registry.register_fake_lsp(
4517 "TypeScript",
4518 FakeLspAdapter {
4519 capabilities: lsp::ServerCapabilities {
4520 completion_provider: Some(lsp::CompletionOptions {
4521 trigger_characters: Some(vec![".".to_string()]),
4522 ..Default::default()
4523 }),
4524 ..Default::default()
4525 },
4526 ..Default::default()
4527 },
4528 );
4529
4530 let (buffer, _handle) = project
4531 .update(cx, |p, cx| {
4532 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4533 })
4534 .await
4535 .unwrap();
4536
4537 let fake_server = fake_language_servers.next().await.unwrap();
4538 cx.executor().run_until_parked();
4539
4540 // When text_edit exists, it takes precedence over insert_text and label
4541 let text = "let a = obj.fqn";
4542 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4543 let completions = project.update(cx, |project, cx| {
4544 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4545 });
4546
4547 fake_server
4548 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4549 Ok(Some(lsp::CompletionResponse::Array(vec![
4550 lsp::CompletionItem {
4551 label: "labelText".into(),
4552 insert_text: Some("insertText".into()),
4553 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
4554 range: lsp::Range::new(
4555 lsp::Position::new(0, text.len() as u32 - 3),
4556 lsp::Position::new(0, text.len() as u32),
4557 ),
4558 new_text: "textEditText".into(),
4559 })),
4560 ..Default::default()
4561 },
4562 ])))
4563 })
4564 .next()
4565 .await;
4566
4567 let completions = completions
4568 .await
4569 .unwrap()
4570 .into_iter()
4571 .flat_map(|response| response.completions)
4572 .collect::<Vec<_>>();
4573 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4574
4575 assert_eq!(completions.len(), 1);
4576 assert_eq!(completions[0].new_text, "textEditText");
4577 assert_eq!(
4578 completions[0].replace_range.to_offset(&snapshot),
4579 text.len() - 3..text.len()
4580 );
4581}
4582
4583#[gpui::test]
4584async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
4585 init_test(cx);
4586
4587 let fs = FakeFs::new(cx.executor());
4588 fs.insert_tree(
4589 path!("/dir"),
4590 json!({
4591 "a.ts": "",
4592 }),
4593 )
4594 .await;
4595
4596 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4597
4598 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4599 language_registry.add(typescript_lang());
4600 let mut fake_language_servers = language_registry.register_fake_lsp(
4601 "TypeScript",
4602 FakeLspAdapter {
4603 capabilities: lsp::ServerCapabilities {
4604 completion_provider: Some(lsp::CompletionOptions {
4605 trigger_characters: Some(vec![".".to_string()]),
4606 ..Default::default()
4607 }),
4608 ..Default::default()
4609 },
4610 ..Default::default()
4611 },
4612 );
4613
4614 let (buffer, _handle) = project
4615 .update(cx, |p, cx| {
4616 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4617 })
4618 .await
4619 .unwrap();
4620
4621 let fake_server = fake_language_servers.next().await.unwrap();
4622 cx.executor().run_until_parked();
4623 let text = "let a = obj.fqn";
4624
4625 // Test 1: When text_edit is None but text_edit_text exists with default edit_range
4626 {
4627 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4628 let completions = project.update(cx, |project, cx| {
4629 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4630 });
4631
4632 fake_server
4633 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4634 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
4635 is_incomplete: false,
4636 item_defaults: Some(lsp::CompletionListItemDefaults {
4637 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
4638 lsp::Range::new(
4639 lsp::Position::new(0, text.len() as u32 - 3),
4640 lsp::Position::new(0, text.len() as u32),
4641 ),
4642 )),
4643 ..Default::default()
4644 }),
4645 items: vec![lsp::CompletionItem {
4646 label: "labelText".into(),
4647 text_edit_text: Some("textEditText".into()),
4648 text_edit: None,
4649 ..Default::default()
4650 }],
4651 })))
4652 })
4653 .next()
4654 .await;
4655
4656 let completions = completions
4657 .await
4658 .unwrap()
4659 .into_iter()
4660 .flat_map(|response| response.completions)
4661 .collect::<Vec<_>>();
4662 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4663
4664 assert_eq!(completions.len(), 1);
4665 assert_eq!(completions[0].new_text, "textEditText");
4666 assert_eq!(
4667 completions[0].replace_range.to_offset(&snapshot),
4668 text.len() - 3..text.len()
4669 );
4670 }
4671
4672 // Test 2: When both text_edit and text_edit_text are None with default edit_range
4673 {
4674 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4675 let completions = project.update(cx, |project, cx| {
4676 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4677 });
4678
4679 fake_server
4680 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4681 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
4682 is_incomplete: false,
4683 item_defaults: Some(lsp::CompletionListItemDefaults {
4684 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
4685 lsp::Range::new(
4686 lsp::Position::new(0, text.len() as u32 - 3),
4687 lsp::Position::new(0, text.len() as u32),
4688 ),
4689 )),
4690 ..Default::default()
4691 }),
4692 items: vec![lsp::CompletionItem {
4693 label: "labelText".into(),
4694 text_edit_text: None,
4695 insert_text: Some("irrelevant".into()),
4696 text_edit: None,
4697 ..Default::default()
4698 }],
4699 })))
4700 })
4701 .next()
4702 .await;
4703
4704 let completions = completions
4705 .await
4706 .unwrap()
4707 .into_iter()
4708 .flat_map(|response| response.completions)
4709 .collect::<Vec<_>>();
4710 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4711
4712 assert_eq!(completions.len(), 1);
4713 assert_eq!(completions[0].new_text, "labelText");
4714 assert_eq!(
4715 completions[0].replace_range.to_offset(&snapshot),
4716 text.len() - 3..text.len()
4717 );
4718 }
4719}
4720
4721#[gpui::test]
4722async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
4723 init_test(cx);
4724
4725 let fs = FakeFs::new(cx.executor());
4726 fs.insert_tree(
4727 path!("/dir"),
4728 json!({
4729 "a.ts": "",
4730 }),
4731 )
4732 .await;
4733
4734 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4735
4736 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4737 language_registry.add(typescript_lang());
4738 let mut fake_language_servers = language_registry.register_fake_lsp(
4739 "TypeScript",
4740 FakeLspAdapter {
4741 capabilities: lsp::ServerCapabilities {
4742 completion_provider: Some(lsp::CompletionOptions {
4743 trigger_characters: Some(vec![":".to_string()]),
4744 ..Default::default()
4745 }),
4746 ..Default::default()
4747 },
4748 ..Default::default()
4749 },
4750 );
4751
4752 let (buffer, _handle) = project
4753 .update(cx, |p, cx| {
4754 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4755 })
4756 .await
4757 .unwrap();
4758
4759 let fake_server = fake_language_servers.next().await.unwrap();
4760 cx.executor().run_until_parked();
4761
4762 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
4763 let text = "let a = b.fqn";
4764 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4765 let completions = project.update(cx, |project, cx| {
4766 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4767 });
4768
4769 fake_server
4770 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4771 Ok(Some(lsp::CompletionResponse::Array(vec![
4772 lsp::CompletionItem {
4773 label: "fullyQualifiedName?".into(),
4774 insert_text: Some("fullyQualifiedName".into()),
4775 ..Default::default()
4776 },
4777 ])))
4778 })
4779 .next()
4780 .await;
4781 let completions = completions
4782 .await
4783 .unwrap()
4784 .into_iter()
4785 .flat_map(|response| response.completions)
4786 .collect::<Vec<_>>();
4787 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4788 assert_eq!(completions.len(), 1);
4789 assert_eq!(completions[0].new_text, "fullyQualifiedName");
4790 assert_eq!(
4791 completions[0].replace_range.to_offset(&snapshot),
4792 text.len() - 3..text.len()
4793 );
4794
4795 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
4796 let text = "let a = \"atoms/cmp\"";
4797 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4798 let completions = project.update(cx, |project, cx| {
4799 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
4800 });
4801
4802 fake_server
4803 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4804 Ok(Some(lsp::CompletionResponse::Array(vec![
4805 lsp::CompletionItem {
4806 label: "component".into(),
4807 ..Default::default()
4808 },
4809 ])))
4810 })
4811 .next()
4812 .await;
4813 let completions = completions
4814 .await
4815 .unwrap()
4816 .into_iter()
4817 .flat_map(|response| response.completions)
4818 .collect::<Vec<_>>();
4819 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4820 assert_eq!(completions.len(), 1);
4821 assert_eq!(completions[0].new_text, "component");
4822 assert_eq!(
4823 completions[0].replace_range.to_offset(&snapshot),
4824 text.len() - 4..text.len() - 1
4825 );
4826}
4827
4828#[gpui::test]
4829async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
4830 init_test(cx);
4831
4832 let fs = FakeFs::new(cx.executor());
4833 fs.insert_tree(
4834 path!("/dir"),
4835 json!({
4836 "a.ts": "",
4837 }),
4838 )
4839 .await;
4840
4841 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4842
4843 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4844 language_registry.add(typescript_lang());
4845 let mut fake_language_servers = language_registry.register_fake_lsp(
4846 "TypeScript",
4847 FakeLspAdapter {
4848 capabilities: lsp::ServerCapabilities {
4849 completion_provider: Some(lsp::CompletionOptions {
4850 trigger_characters: Some(vec![":".to_string()]),
4851 ..Default::default()
4852 }),
4853 ..Default::default()
4854 },
4855 ..Default::default()
4856 },
4857 );
4858
4859 let (buffer, _handle) = project
4860 .update(cx, |p, cx| {
4861 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4862 })
4863 .await
4864 .unwrap();
4865
4866 let fake_server = fake_language_servers.next().await.unwrap();
4867 cx.executor().run_until_parked();
4868
4869 let text = "let a = b.fqn";
4870 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4871 let completions = project.update(cx, |project, cx| {
4872 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4873 });
4874
4875 fake_server
4876 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4877 Ok(Some(lsp::CompletionResponse::Array(vec![
4878 lsp::CompletionItem {
4879 label: "fullyQualifiedName?".into(),
4880 insert_text: Some("fully\rQualified\r\nName".into()),
4881 ..Default::default()
4882 },
4883 ])))
4884 })
4885 .next()
4886 .await;
4887 let completions = completions
4888 .await
4889 .unwrap()
4890 .into_iter()
4891 .flat_map(|response| response.completions)
4892 .collect::<Vec<_>>();
4893 assert_eq!(completions.len(), 1);
4894 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
4895}
4896
4897#[gpui::test(iterations = 10)]
4898async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
4899 init_test(cx);
4900
4901 let fs = FakeFs::new(cx.executor());
4902 fs.insert_tree(
4903 path!("/dir"),
4904 json!({
4905 "a.ts": "a",
4906 }),
4907 )
4908 .await;
4909
4910 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4911
4912 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4913 language_registry.add(typescript_lang());
4914 let mut fake_language_servers = language_registry.register_fake_lsp(
4915 "TypeScript",
4916 FakeLspAdapter {
4917 capabilities: lsp::ServerCapabilities {
4918 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
4919 lsp::CodeActionOptions {
4920 resolve_provider: Some(true),
4921 ..lsp::CodeActionOptions::default()
4922 },
4923 )),
4924 execute_command_provider: Some(lsp::ExecuteCommandOptions {
4925 commands: vec!["_the/command".to_string()],
4926 ..lsp::ExecuteCommandOptions::default()
4927 }),
4928 ..lsp::ServerCapabilities::default()
4929 },
4930 ..FakeLspAdapter::default()
4931 },
4932 );
4933
4934 let (buffer, _handle) = project
4935 .update(cx, |p, cx| {
4936 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4937 })
4938 .await
4939 .unwrap();
4940
4941 let fake_server = fake_language_servers.next().await.unwrap();
4942 cx.executor().run_until_parked();
4943
4944 // Language server returns code actions that contain commands, and not edits.
4945 let actions = project.update(cx, |project, cx| {
4946 project.code_actions(&buffer, 0..0, None, cx)
4947 });
4948 fake_server
4949 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
4950 Ok(Some(vec![
4951 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
4952 title: "The code action".into(),
4953 data: Some(serde_json::json!({
4954 "command": "_the/command",
4955 })),
4956 ..lsp::CodeAction::default()
4957 }),
4958 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
4959 title: "two".into(),
4960 ..lsp::CodeAction::default()
4961 }),
4962 ]))
4963 })
4964 .next()
4965 .await;
4966
4967 let action = actions.await.unwrap().unwrap()[0].clone();
4968 let apply = project.update(cx, |project, cx| {
4969 project.apply_code_action(buffer.clone(), action, true, cx)
4970 });
4971
4972 // Resolving the code action does not populate its edits. In absence of
4973 // edits, we must execute the given command.
4974 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
4975 |mut action, _| async move {
4976 if action.data.is_some() {
4977 action.command = Some(lsp::Command {
4978 title: "The command".into(),
4979 command: "_the/command".into(),
4980 arguments: Some(vec![json!("the-argument")]),
4981 });
4982 }
4983 Ok(action)
4984 },
4985 );
4986
4987 // While executing the command, the language server sends the editor
4988 // a `workspaceEdit` request.
4989 fake_server
4990 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
4991 let fake = fake_server.clone();
4992 move |params, _| {
4993 assert_eq!(params.command, "_the/command");
4994 let fake = fake.clone();
4995 async move {
4996 fake.server
4997 .request::<lsp::request::ApplyWorkspaceEdit>(
4998 lsp::ApplyWorkspaceEditParams {
4999 label: None,
5000 edit: lsp::WorkspaceEdit {
5001 changes: Some(
5002 [(
5003 lsp::Uri::from_file_path(path!("/dir/a.ts")).unwrap(),
5004 vec![lsp::TextEdit {
5005 range: lsp::Range::new(
5006 lsp::Position::new(0, 0),
5007 lsp::Position::new(0, 0),
5008 ),
5009 new_text: "X".into(),
5010 }],
5011 )]
5012 .into_iter()
5013 .collect(),
5014 ),
5015 ..Default::default()
5016 },
5017 },
5018 DEFAULT_LSP_REQUEST_TIMEOUT,
5019 )
5020 .await
5021 .into_response()
5022 .unwrap();
5023 Ok(Some(json!(null)))
5024 }
5025 }
5026 })
5027 .next()
5028 .await;
5029
5030 // Applying the code action returns a project transaction containing the edits
5031 // sent by the language server in its `workspaceEdit` request.
5032 let transaction = apply.await.unwrap();
5033 assert!(transaction.0.contains_key(&buffer));
5034 buffer.update(cx, |buffer, cx| {
5035 assert_eq!(buffer.text(), "Xa");
5036 buffer.undo(cx);
5037 assert_eq!(buffer.text(), "a");
5038 });
5039}
5040
5041#[gpui::test]
5042async fn test_rename_file_to_new_directory(cx: &mut gpui::TestAppContext) {
5043 init_test(cx);
5044 let fs = FakeFs::new(cx.background_executor.clone());
5045 let expected_contents = "content";
5046 fs.as_fake()
5047 .insert_tree(
5048 "/root",
5049 json!({
5050 "test.txt": expected_contents
5051 }),
5052 )
5053 .await;
5054
5055 let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
5056
5057 let (worktree, entry_id) = project.read_with(cx, |project, cx| {
5058 let worktree = project.worktrees(cx).next().unwrap();
5059 let entry_id = worktree
5060 .read(cx)
5061 .entry_for_path(rel_path("test.txt"))
5062 .unwrap()
5063 .id;
5064 (worktree, entry_id)
5065 });
5066 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
5067 let _result = project
5068 .update(cx, |project, cx| {
5069 project.rename_entry(
5070 entry_id,
5071 (worktree_id, rel_path("dir1/dir2/dir3/test.txt")).into(),
5072 cx,
5073 )
5074 })
5075 .await
5076 .unwrap();
5077 worktree.read_with(cx, |worktree, _| {
5078 assert!(
5079 worktree.entry_for_path(rel_path("test.txt")).is_none(),
5080 "Old file should have been removed"
5081 );
5082 assert!(
5083 worktree
5084 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
5085 .is_some(),
5086 "Whole directory hierarchy and the new file should have been created"
5087 );
5088 });
5089 assert_eq!(
5090 worktree
5091 .update(cx, |worktree, cx| {
5092 worktree.load_file(rel_path("dir1/dir2/dir3/test.txt"), cx)
5093 })
5094 .await
5095 .unwrap()
5096 .text,
5097 expected_contents,
5098 "Moved file's contents should be preserved"
5099 );
5100
5101 let entry_id = worktree.read_with(cx, |worktree, _| {
5102 worktree
5103 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
5104 .unwrap()
5105 .id
5106 });
5107
5108 let _result = project
5109 .update(cx, |project, cx| {
5110 project.rename_entry(
5111 entry_id,
5112 (worktree_id, rel_path("dir1/dir2/test.txt")).into(),
5113 cx,
5114 )
5115 })
5116 .await
5117 .unwrap();
5118 worktree.read_with(cx, |worktree, _| {
5119 assert!(
5120 worktree.entry_for_path(rel_path("test.txt")).is_none(),
5121 "First file should not reappear"
5122 );
5123 assert!(
5124 worktree
5125 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
5126 .is_none(),
5127 "Old file should have been removed"
5128 );
5129 assert!(
5130 worktree
5131 .entry_for_path(rel_path("dir1/dir2/test.txt"))
5132 .is_some(),
5133 "No error should have occurred after moving into existing directory"
5134 );
5135 });
5136 assert_eq!(
5137 worktree
5138 .update(cx, |worktree, cx| {
5139 worktree.load_file(rel_path("dir1/dir2/test.txt"), cx)
5140 })
5141 .await
5142 .unwrap()
5143 .text,
5144 expected_contents,
5145 "Moved file's contents should be preserved"
5146 );
5147}
5148
5149#[gpui::test(iterations = 10)]
5150async fn test_save_file(cx: &mut gpui::TestAppContext) {
5151 init_test(cx);
5152
5153 let fs = FakeFs::new(cx.executor());
5154 fs.insert_tree(
5155 path!("/dir"),
5156 json!({
5157 "file1": "the old contents",
5158 }),
5159 )
5160 .await;
5161
5162 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5163 let buffer = project
5164 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5165 .await
5166 .unwrap();
5167 buffer.update(cx, |buffer, cx| {
5168 assert_eq!(buffer.text(), "the old contents");
5169 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
5170 });
5171
5172 project
5173 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
5174 .await
5175 .unwrap();
5176
5177 let new_text = fs
5178 .load(Path::new(path!("/dir/file1")))
5179 .await
5180 .unwrap()
5181 .replace("\r\n", "\n");
5182 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
5183}
5184
5185#[gpui::test(iterations = 10)]
5186async fn test_save_file_spawns_language_server(cx: &mut gpui::TestAppContext) {
5187 // Issue: #24349
5188 init_test(cx);
5189
5190 let fs = FakeFs::new(cx.executor());
5191 fs.insert_tree(path!("/dir"), json!({})).await;
5192
5193 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5194 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5195
5196 language_registry.add(rust_lang());
5197 let mut fake_rust_servers = language_registry.register_fake_lsp(
5198 "Rust",
5199 FakeLspAdapter {
5200 name: "the-rust-language-server",
5201 capabilities: lsp::ServerCapabilities {
5202 completion_provider: Some(lsp::CompletionOptions {
5203 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
5204 ..Default::default()
5205 }),
5206 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
5207 lsp::TextDocumentSyncOptions {
5208 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
5209 ..Default::default()
5210 },
5211 )),
5212 ..Default::default()
5213 },
5214 ..Default::default()
5215 },
5216 );
5217
5218 let buffer = project
5219 .update(cx, |this, cx| this.create_buffer(None, false, cx))
5220 .unwrap()
5221 .await;
5222 project.update(cx, |this, cx| {
5223 this.register_buffer_with_language_servers(&buffer, cx);
5224 buffer.update(cx, |buffer, cx| {
5225 assert!(!this.has_language_servers_for(buffer, cx));
5226 })
5227 });
5228
5229 project
5230 .update(cx, |this, cx| {
5231 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
5232 this.save_buffer_as(
5233 buffer.clone(),
5234 ProjectPath {
5235 worktree_id,
5236 path: rel_path("file.rs").into(),
5237 },
5238 cx,
5239 )
5240 })
5241 .await
5242 .unwrap();
5243 // A server is started up, and it is notified about Rust files.
5244 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5245 assert_eq!(
5246 fake_rust_server
5247 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5248 .await
5249 .text_document,
5250 lsp::TextDocumentItem {
5251 uri: lsp::Uri::from_file_path(path!("/dir/file.rs")).unwrap(),
5252 version: 0,
5253 text: "".to_string(),
5254 language_id: "rust".to_string(),
5255 }
5256 );
5257
5258 project.update(cx, |this, cx| {
5259 buffer.update(cx, |buffer, cx| {
5260 assert!(this.has_language_servers_for(buffer, cx));
5261 })
5262 });
5263}
5264
5265#[gpui::test(iterations = 30)]
5266async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
5267 init_test(cx);
5268
5269 let fs = FakeFs::new(cx.executor());
5270 fs.insert_tree(
5271 path!("/dir"),
5272 json!({
5273 "file1": "the original contents",
5274 }),
5275 )
5276 .await;
5277
5278 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5279 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
5280 let buffer = project
5281 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5282 .await
5283 .unwrap();
5284
5285 // Change the buffer's file on disk, and then wait for the file change
5286 // to be detected by the worktree, so that the buffer starts reloading.
5287 fs.save(
5288 path!("/dir/file1").as_ref(),
5289 &"the first contents".into(),
5290 Default::default(),
5291 )
5292 .await
5293 .unwrap();
5294 worktree.next_event(cx).await;
5295
5296 // Change the buffer's file again. Depending on the random seed, the
5297 // previous file change may still be in progress.
5298 fs.save(
5299 path!("/dir/file1").as_ref(),
5300 &"the second contents".into(),
5301 Default::default(),
5302 )
5303 .await
5304 .unwrap();
5305 worktree.next_event(cx).await;
5306
5307 cx.executor().run_until_parked();
5308 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
5309 buffer.read_with(cx, |buffer, _| {
5310 assert_eq!(buffer.text(), on_disk_text);
5311 assert!(!buffer.is_dirty(), "buffer should not be dirty");
5312 assert!(!buffer.has_conflict(), "buffer should not be dirty");
5313 });
5314}
5315
5316#[gpui::test(iterations = 30)]
5317async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
5318 init_test(cx);
5319
5320 let fs = FakeFs::new(cx.executor());
5321 fs.insert_tree(
5322 path!("/dir"),
5323 json!({
5324 "file1": "the original contents",
5325 }),
5326 )
5327 .await;
5328
5329 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5330 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
5331 let buffer = project
5332 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5333 .await
5334 .unwrap();
5335
5336 // Change the buffer's file on disk, and then wait for the file change
5337 // to be detected by the worktree, so that the buffer starts reloading.
5338 fs.save(
5339 path!("/dir/file1").as_ref(),
5340 &"the first contents".into(),
5341 Default::default(),
5342 )
5343 .await
5344 .unwrap();
5345 worktree.next_event(cx).await;
5346
5347 cx.executor()
5348 .spawn(cx.executor().simulate_random_delay())
5349 .await;
5350
5351 // Perform a noop edit, causing the buffer's version to increase.
5352 buffer.update(cx, |buffer, cx| {
5353 buffer.edit([(0..0, " ")], None, cx);
5354 buffer.undo(cx);
5355 });
5356
5357 cx.executor().run_until_parked();
5358 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
5359 buffer.read_with(cx, |buffer, _| {
5360 let buffer_text = buffer.text();
5361 if buffer_text == on_disk_text {
5362 assert!(
5363 !buffer.is_dirty() && !buffer.has_conflict(),
5364 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
5365 );
5366 }
5367 // If the file change occurred while the buffer was processing the first
5368 // change, the buffer will be in a conflicting state.
5369 else {
5370 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
5371 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
5372 }
5373 });
5374}
5375
5376#[gpui::test]
5377async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
5378 init_test(cx);
5379
5380 let fs = FakeFs::new(cx.executor());
5381 fs.insert_tree(
5382 path!("/dir"),
5383 json!({
5384 "file1": "the old contents",
5385 }),
5386 )
5387 .await;
5388
5389 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
5390 let buffer = project
5391 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5392 .await
5393 .unwrap();
5394 buffer.update(cx, |buffer, cx| {
5395 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
5396 });
5397
5398 project
5399 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
5400 .await
5401 .unwrap();
5402
5403 let new_text = fs
5404 .load(Path::new(path!("/dir/file1")))
5405 .await
5406 .unwrap()
5407 .replace("\r\n", "\n");
5408 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
5409}
5410
5411#[gpui::test]
5412async fn test_save_as(cx: &mut gpui::TestAppContext) {
5413 init_test(cx);
5414
5415 let fs = FakeFs::new(cx.executor());
5416 fs.insert_tree("/dir", json!({})).await;
5417
5418 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5419
5420 let languages = project.update(cx, |project, _| project.languages().clone());
5421 languages.add(rust_lang());
5422
5423 let buffer = project.update(cx, |project, cx| {
5424 project.create_local_buffer("", None, false, cx)
5425 });
5426 buffer.update(cx, |buffer, cx| {
5427 buffer.edit([(0..0, "abc")], None, cx);
5428 assert!(buffer.is_dirty());
5429 assert!(!buffer.has_conflict());
5430 assert_eq!(buffer.language().unwrap().name(), "Plain Text");
5431 });
5432 project
5433 .update(cx, |project, cx| {
5434 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
5435 let path = ProjectPath {
5436 worktree_id,
5437 path: rel_path("file1.rs").into(),
5438 };
5439 project.save_buffer_as(buffer.clone(), path, cx)
5440 })
5441 .await
5442 .unwrap();
5443 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
5444
5445 cx.executor().run_until_parked();
5446 buffer.update(cx, |buffer, cx| {
5447 assert_eq!(
5448 buffer.file().unwrap().full_path(cx),
5449 Path::new("dir/file1.rs")
5450 );
5451 assert!(!buffer.is_dirty());
5452 assert!(!buffer.has_conflict());
5453 assert_eq!(buffer.language().unwrap().name(), "Rust");
5454 });
5455
5456 let opened_buffer = project
5457 .update(cx, |project, cx| {
5458 project.open_local_buffer("/dir/file1.rs", cx)
5459 })
5460 .await
5461 .unwrap();
5462 assert_eq!(opened_buffer, buffer);
5463}
5464
5465#[gpui::test]
5466async fn test_save_as_existing_file(cx: &mut gpui::TestAppContext) {
5467 init_test(cx);
5468
5469 let fs = FakeFs::new(cx.executor());
5470 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5471
5472 fs.insert_tree(
5473 path!("/dir"),
5474 json!({
5475 "data_a.txt": "data about a"
5476 }),
5477 )
5478 .await;
5479
5480 let buffer = project
5481 .update(cx, |project, cx| {
5482 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
5483 })
5484 .await
5485 .unwrap();
5486
5487 buffer.update(cx, |buffer, cx| {
5488 buffer.edit([(11..12, "b")], None, cx);
5489 });
5490
5491 // Save buffer's contents as a new file and confirm that the buffer's now
5492 // associated with `data_b.txt` instead of `data_a.txt`, confirming that the
5493 // file associated with the buffer has now been updated to `data_b.txt`
5494 project
5495 .update(cx, |project, cx| {
5496 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
5497 let new_path = ProjectPath {
5498 worktree_id,
5499 path: rel_path("data_b.txt").into(),
5500 };
5501
5502 project.save_buffer_as(buffer.clone(), new_path, cx)
5503 })
5504 .await
5505 .unwrap();
5506
5507 buffer.update(cx, |buffer, cx| {
5508 assert_eq!(
5509 buffer.file().unwrap().full_path(cx),
5510 Path::new("dir/data_b.txt")
5511 )
5512 });
5513
5514 // Open the original `data_a.txt` file, confirming that its contents are
5515 // unchanged and the resulting buffer's associated file is `data_a.txt`.
5516 let original_buffer = project
5517 .update(cx, |project, cx| {
5518 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
5519 })
5520 .await
5521 .unwrap();
5522
5523 original_buffer.update(cx, |buffer, cx| {
5524 assert_eq!(buffer.text(), "data about a");
5525 assert_eq!(
5526 buffer.file().unwrap().full_path(cx),
5527 Path::new("dir/data_a.txt")
5528 )
5529 });
5530}
5531
5532#[gpui::test(retries = 5)]
5533async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
5534 use worktree::WorktreeModelHandle as _;
5535
5536 init_test(cx);
5537 cx.executor().allow_parking();
5538
5539 let dir = TempTree::new(json!({
5540 "a": {
5541 "file1": "",
5542 "file2": "",
5543 "file3": "",
5544 },
5545 "b": {
5546 "c": {
5547 "file4": "",
5548 "file5": "",
5549 }
5550 }
5551 }));
5552
5553 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
5554
5555 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5556 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
5557 async move { buffer.await.unwrap() }
5558 };
5559 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5560 project.update(cx, |project, cx| {
5561 let tree = project.worktrees(cx).next().unwrap();
5562 tree.read(cx)
5563 .entry_for_path(rel_path(path))
5564 .unwrap_or_else(|| panic!("no entry for path {}", path))
5565 .id
5566 })
5567 };
5568
5569 let buffer2 = buffer_for_path("a/file2", cx).await;
5570 let buffer3 = buffer_for_path("a/file3", cx).await;
5571 let buffer4 = buffer_for_path("b/c/file4", cx).await;
5572 let buffer5 = buffer_for_path("b/c/file5", cx).await;
5573
5574 let file2_id = id_for_path("a/file2", cx);
5575 let file3_id = id_for_path("a/file3", cx);
5576 let file4_id = id_for_path("b/c/file4", cx);
5577
5578 // Create a remote copy of this worktree.
5579 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
5580 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
5581
5582 let updates = Arc::new(Mutex::new(Vec::new()));
5583 tree.update(cx, |tree, cx| {
5584 let updates = updates.clone();
5585 tree.observe_updates(0, cx, move |update| {
5586 updates.lock().push(update);
5587 async { true }
5588 });
5589 });
5590
5591 let remote = cx.update(|cx| {
5592 Worktree::remote(
5593 0,
5594 ReplicaId::REMOTE_SERVER,
5595 metadata,
5596 project.read(cx).client().into(),
5597 project.read(cx).path_style(cx),
5598 cx,
5599 )
5600 });
5601
5602 cx.executor().run_until_parked();
5603
5604 cx.update(|cx| {
5605 assert!(!buffer2.read(cx).is_dirty());
5606 assert!(!buffer3.read(cx).is_dirty());
5607 assert!(!buffer4.read(cx).is_dirty());
5608 assert!(!buffer5.read(cx).is_dirty());
5609 });
5610
5611 // Rename and delete files and directories.
5612 tree.flush_fs_events(cx).await;
5613 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
5614 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
5615 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
5616 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
5617 tree.flush_fs_events(cx).await;
5618
5619 cx.update(|app| {
5620 assert_eq!(
5621 tree.read(app).paths().collect::<Vec<_>>(),
5622 vec![
5623 rel_path("a"),
5624 rel_path("a/file1"),
5625 rel_path("a/file2.new"),
5626 rel_path("b"),
5627 rel_path("d"),
5628 rel_path("d/file3"),
5629 rel_path("d/file4"),
5630 ]
5631 );
5632 });
5633
5634 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
5635 assert_eq!(id_for_path("d/file3", cx), file3_id);
5636 assert_eq!(id_for_path("d/file4", cx), file4_id);
5637
5638 cx.update(|cx| {
5639 assert_eq!(
5640 buffer2.read(cx).file().unwrap().path().as_ref(),
5641 rel_path("a/file2.new")
5642 );
5643 assert_eq!(
5644 buffer3.read(cx).file().unwrap().path().as_ref(),
5645 rel_path("d/file3")
5646 );
5647 assert_eq!(
5648 buffer4.read(cx).file().unwrap().path().as_ref(),
5649 rel_path("d/file4")
5650 );
5651 assert_eq!(
5652 buffer5.read(cx).file().unwrap().path().as_ref(),
5653 rel_path("b/c/file5")
5654 );
5655
5656 assert_matches!(
5657 buffer2.read(cx).file().unwrap().disk_state(),
5658 DiskState::Present { .. }
5659 );
5660 assert_matches!(
5661 buffer3.read(cx).file().unwrap().disk_state(),
5662 DiskState::Present { .. }
5663 );
5664 assert_matches!(
5665 buffer4.read(cx).file().unwrap().disk_state(),
5666 DiskState::Present { .. }
5667 );
5668 assert_eq!(
5669 buffer5.read(cx).file().unwrap().disk_state(),
5670 DiskState::Deleted
5671 );
5672 });
5673
5674 // Update the remote worktree. Check that it becomes consistent with the
5675 // local worktree.
5676 cx.executor().run_until_parked();
5677
5678 remote.update(cx, |remote, _| {
5679 for update in updates.lock().drain(..) {
5680 remote.as_remote_mut().unwrap().update_from_remote(update);
5681 }
5682 });
5683 cx.executor().run_until_parked();
5684 remote.update(cx, |remote, _| {
5685 assert_eq!(
5686 remote.paths().collect::<Vec<_>>(),
5687 vec![
5688 rel_path("a"),
5689 rel_path("a/file1"),
5690 rel_path("a/file2.new"),
5691 rel_path("b"),
5692 rel_path("d"),
5693 rel_path("d/file3"),
5694 rel_path("d/file4"),
5695 ]
5696 );
5697 });
5698}
5699
5700#[cfg(target_os = "linux")]
5701#[gpui::test(retries = 5)]
5702async fn test_recreated_directory_receives_child_events(cx: &mut gpui::TestAppContext) {
5703 init_test(cx);
5704 cx.executor().allow_parking();
5705
5706 let dir = TempTree::new(json!({}));
5707 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
5708 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
5709
5710 tree.flush_fs_events(cx).await;
5711
5712 let repro_dir = dir.path().join("repro");
5713 std::fs::create_dir(&repro_dir).unwrap();
5714 tree.flush_fs_events(cx).await;
5715
5716 cx.update(|cx| {
5717 assert!(tree.read(cx).entry_for_path(rel_path("repro")).is_some());
5718 });
5719
5720 std::fs::remove_dir_all(&repro_dir).unwrap();
5721 tree.flush_fs_events(cx).await;
5722
5723 cx.update(|cx| {
5724 assert!(tree.read(cx).entry_for_path(rel_path("repro")).is_none());
5725 });
5726
5727 std::fs::create_dir(&repro_dir).unwrap();
5728 tree.flush_fs_events(cx).await;
5729
5730 cx.update(|cx| {
5731 assert!(tree.read(cx).entry_for_path(rel_path("repro")).is_some());
5732 });
5733
5734 std::fs::write(repro_dir.join("repro-marker"), "").unwrap();
5735 tree.flush_fs_events(cx).await;
5736
5737 cx.update(|cx| {
5738 assert!(
5739 tree.read(cx)
5740 .entry_for_path(rel_path("repro/repro-marker"))
5741 .is_some()
5742 );
5743 });
5744}
5745
5746#[gpui::test(iterations = 10)]
5747async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
5748 init_test(cx);
5749
5750 let fs = FakeFs::new(cx.executor());
5751 fs.insert_tree(
5752 path!("/dir"),
5753 json!({
5754 "a": {
5755 "file1": "",
5756 }
5757 }),
5758 )
5759 .await;
5760
5761 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
5762 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
5763 let tree_id = tree.update(cx, |tree, _| tree.id());
5764
5765 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5766 project.update(cx, |project, cx| {
5767 let tree = project.worktrees(cx).next().unwrap();
5768 tree.read(cx)
5769 .entry_for_path(rel_path(path))
5770 .unwrap_or_else(|| panic!("no entry for path {}", path))
5771 .id
5772 })
5773 };
5774
5775 let dir_id = id_for_path("a", cx);
5776 let file_id = id_for_path("a/file1", cx);
5777 let buffer = project
5778 .update(cx, |p, cx| {
5779 p.open_buffer((tree_id, rel_path("a/file1")), cx)
5780 })
5781 .await
5782 .unwrap();
5783 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5784
5785 project
5786 .update(cx, |project, cx| {
5787 project.rename_entry(dir_id, (tree_id, rel_path("b")).into(), cx)
5788 })
5789 .unwrap()
5790 .await
5791 .into_included()
5792 .unwrap();
5793 cx.executor().run_until_parked();
5794
5795 assert_eq!(id_for_path("b", cx), dir_id);
5796 assert_eq!(id_for_path("b/file1", cx), file_id);
5797 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5798}
5799
5800#[gpui::test]
5801async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
5802 init_test(cx);
5803
5804 let fs = FakeFs::new(cx.executor());
5805 fs.insert_tree(
5806 "/dir",
5807 json!({
5808 "a.txt": "a-contents",
5809 "b.txt": "b-contents",
5810 }),
5811 )
5812 .await;
5813
5814 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5815
5816 // Spawn multiple tasks to open paths, repeating some paths.
5817 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
5818 (
5819 p.open_local_buffer("/dir/a.txt", cx),
5820 p.open_local_buffer("/dir/b.txt", cx),
5821 p.open_local_buffer("/dir/a.txt", cx),
5822 )
5823 });
5824
5825 let buffer_a_1 = buffer_a_1.await.unwrap();
5826 let buffer_a_2 = buffer_a_2.await.unwrap();
5827 let buffer_b = buffer_b.await.unwrap();
5828 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
5829 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
5830
5831 // There is only one buffer per path.
5832 let buffer_a_id = buffer_a_1.entity_id();
5833 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
5834
5835 // Open the same path again while it is still open.
5836 drop(buffer_a_1);
5837 let buffer_a_3 = project
5838 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
5839 .await
5840 .unwrap();
5841
5842 // There's still only one buffer per path.
5843 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
5844}
5845
5846#[gpui::test]
5847async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
5848 init_test(cx);
5849
5850 let fs = FakeFs::new(cx.executor());
5851 fs.insert_tree(
5852 path!("/dir"),
5853 json!({
5854 "file1": "abc",
5855 "file2": "def",
5856 "file3": "ghi",
5857 }),
5858 )
5859 .await;
5860
5861 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5862
5863 let buffer1 = project
5864 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5865 .await
5866 .unwrap();
5867 let events = Arc::new(Mutex::new(Vec::new()));
5868
5869 // initially, the buffer isn't dirty.
5870 buffer1.update(cx, |buffer, cx| {
5871 cx.subscribe(&buffer1, {
5872 let events = events.clone();
5873 move |_, _, event, _| match event {
5874 BufferEvent::Operation { .. } => {}
5875 _ => events.lock().push(event.clone()),
5876 }
5877 })
5878 .detach();
5879
5880 assert!(!buffer.is_dirty());
5881 assert!(events.lock().is_empty());
5882
5883 buffer.edit([(1..2, "")], None, cx);
5884 });
5885
5886 // after the first edit, the buffer is dirty, and emits a dirtied event.
5887 buffer1.update(cx, |buffer, cx| {
5888 assert!(buffer.text() == "ac");
5889 assert!(buffer.is_dirty());
5890 assert_eq!(
5891 *events.lock(),
5892 &[
5893 language::BufferEvent::Edited { is_local: true },
5894 language::BufferEvent::DirtyChanged
5895 ]
5896 );
5897 events.lock().clear();
5898 buffer.did_save(
5899 buffer.version(),
5900 buffer.file().unwrap().disk_state().mtime(),
5901 cx,
5902 );
5903 });
5904
5905 // after saving, the buffer is not dirty, and emits a saved event.
5906 buffer1.update(cx, |buffer, cx| {
5907 assert!(!buffer.is_dirty());
5908 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
5909 events.lock().clear();
5910
5911 buffer.edit([(1..1, "B")], None, cx);
5912 buffer.edit([(2..2, "D")], None, cx);
5913 });
5914
5915 // after editing again, the buffer is dirty, and emits another dirty event.
5916 buffer1.update(cx, |buffer, cx| {
5917 assert!(buffer.text() == "aBDc");
5918 assert!(buffer.is_dirty());
5919 assert_eq!(
5920 *events.lock(),
5921 &[
5922 language::BufferEvent::Edited { is_local: true },
5923 language::BufferEvent::DirtyChanged,
5924 language::BufferEvent::Edited { is_local: true },
5925 ],
5926 );
5927 events.lock().clear();
5928
5929 // After restoring the buffer to its previously-saved state,
5930 // the buffer is not considered dirty anymore.
5931 buffer.edit([(1..3, "")], None, cx);
5932 assert!(buffer.text() == "ac");
5933 assert!(!buffer.is_dirty());
5934 });
5935
5936 assert_eq!(
5937 *events.lock(),
5938 &[
5939 language::BufferEvent::Edited { is_local: true },
5940 language::BufferEvent::DirtyChanged
5941 ]
5942 );
5943
5944 // When a file is deleted, it is not considered dirty.
5945 let events = Arc::new(Mutex::new(Vec::new()));
5946 let buffer2 = project
5947 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
5948 .await
5949 .unwrap();
5950 buffer2.update(cx, |_, cx| {
5951 cx.subscribe(&buffer2, {
5952 let events = events.clone();
5953 move |_, _, event, _| match event {
5954 BufferEvent::Operation { .. } => {}
5955 _ => events.lock().push(event.clone()),
5956 }
5957 })
5958 .detach();
5959 });
5960
5961 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
5962 .await
5963 .unwrap();
5964 cx.executor().run_until_parked();
5965 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5966 assert_eq!(
5967 mem::take(&mut *events.lock()),
5968 &[language::BufferEvent::FileHandleChanged]
5969 );
5970
5971 // Buffer becomes dirty when edited.
5972 buffer2.update(cx, |buffer, cx| {
5973 buffer.edit([(2..3, "")], None, cx);
5974 assert_eq!(buffer.is_dirty(), true);
5975 });
5976 assert_eq!(
5977 mem::take(&mut *events.lock()),
5978 &[
5979 language::BufferEvent::Edited { is_local: true },
5980 language::BufferEvent::DirtyChanged
5981 ]
5982 );
5983
5984 // Buffer becomes clean again when all of its content is removed, because
5985 // the file was deleted.
5986 buffer2.update(cx, |buffer, cx| {
5987 buffer.edit([(0..2, "")], None, cx);
5988 assert_eq!(buffer.is_empty(), true);
5989 assert_eq!(buffer.is_dirty(), false);
5990 });
5991 assert_eq!(
5992 *events.lock(),
5993 &[
5994 language::BufferEvent::Edited { is_local: true },
5995 language::BufferEvent::DirtyChanged
5996 ]
5997 );
5998
5999 // When a file is already dirty when deleted, we don't emit a Dirtied event.
6000 let events = Arc::new(Mutex::new(Vec::new()));
6001 let buffer3 = project
6002 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
6003 .await
6004 .unwrap();
6005 buffer3.update(cx, |_, cx| {
6006 cx.subscribe(&buffer3, {
6007 let events = events.clone();
6008 move |_, _, event, _| match event {
6009 BufferEvent::Operation { .. } => {}
6010 _ => events.lock().push(event.clone()),
6011 }
6012 })
6013 .detach();
6014 });
6015
6016 buffer3.update(cx, |buffer, cx| {
6017 buffer.edit([(0..0, "x")], None, cx);
6018 });
6019 events.lock().clear();
6020 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
6021 .await
6022 .unwrap();
6023 cx.executor().run_until_parked();
6024 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
6025 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
6026}
6027
6028#[gpui::test]
6029async fn test_dirty_buffer_reloads_after_undo(cx: &mut gpui::TestAppContext) {
6030 init_test(cx);
6031
6032 let fs = FakeFs::new(cx.executor());
6033 fs.insert_tree(
6034 path!("/dir"),
6035 json!({
6036 "file.txt": "version 1",
6037 }),
6038 )
6039 .await;
6040
6041 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6042 let buffer = project
6043 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file.txt"), cx))
6044 .await
6045 .unwrap();
6046
6047 buffer.read_with(cx, |buffer, _| {
6048 assert_eq!(buffer.text(), "version 1");
6049 assert!(!buffer.is_dirty());
6050 });
6051
6052 // User makes an edit, making the buffer dirty.
6053 buffer.update(cx, |buffer, cx| {
6054 buffer.edit([(0..0, "user edit: ")], None, cx);
6055 });
6056
6057 buffer.read_with(cx, |buffer, _| {
6058 assert!(buffer.is_dirty());
6059 assert_eq!(buffer.text(), "user edit: version 1");
6060 });
6061
6062 // External tool writes new content while buffer is dirty.
6063 // file_updated() updates the File but suppresses ReloadNeeded.
6064 fs.save(
6065 path!("/dir/file.txt").as_ref(),
6066 &"version 2 from external tool".into(),
6067 Default::default(),
6068 )
6069 .await
6070 .unwrap();
6071 cx.executor().run_until_parked();
6072
6073 buffer.read_with(cx, |buffer, _| {
6074 assert!(buffer.has_conflict());
6075 assert_eq!(buffer.text(), "user edit: version 1");
6076 });
6077
6078 // User undoes their edit. Buffer becomes clean, but disk has different
6079 // content. did_edit() detects the dirty->clean transition and checks if
6080 // disk changed while dirty. Since mtime differs from saved_mtime, it
6081 // emits ReloadNeeded.
6082 buffer.update(cx, |buffer, cx| {
6083 buffer.undo(cx);
6084 });
6085 cx.executor().run_until_parked();
6086
6087 buffer.read_with(cx, |buffer, _| {
6088 assert_eq!(
6089 buffer.text(),
6090 "version 2 from external tool",
6091 "buffer should reload from disk after undo makes it clean"
6092 );
6093 assert!(!buffer.is_dirty());
6094 });
6095}
6096
6097#[gpui::test]
6098async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
6099 init_test(cx);
6100
6101 let (initial_contents, initial_offsets) =
6102 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
6103 let fs = FakeFs::new(cx.executor());
6104 fs.insert_tree(
6105 path!("/dir"),
6106 json!({
6107 "the-file": initial_contents,
6108 }),
6109 )
6110 .await;
6111 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6112 let buffer = project
6113 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
6114 .await
6115 .unwrap();
6116
6117 let anchors = initial_offsets
6118 .iter()
6119 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
6120 .collect::<Vec<_>>();
6121
6122 // Change the file on disk, adding two new lines of text, and removing
6123 // one line.
6124 buffer.update(cx, |buffer, _| {
6125 assert!(!buffer.is_dirty());
6126 assert!(!buffer.has_conflict());
6127 });
6128
6129 let (new_contents, new_offsets) =
6130 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
6131 fs.save(
6132 path!("/dir/the-file").as_ref(),
6133 &new_contents.as_str().into(),
6134 LineEnding::Unix,
6135 )
6136 .await
6137 .unwrap();
6138
6139 // Because the buffer was not modified, it is reloaded from disk. Its
6140 // contents are edited according to the diff between the old and new
6141 // file contents.
6142 cx.executor().run_until_parked();
6143 buffer.update(cx, |buffer, _| {
6144 assert_eq!(buffer.text(), new_contents);
6145 assert!(!buffer.is_dirty());
6146 assert!(!buffer.has_conflict());
6147
6148 let anchor_offsets = anchors
6149 .iter()
6150 .map(|anchor| anchor.to_offset(&*buffer))
6151 .collect::<Vec<_>>();
6152 assert_eq!(anchor_offsets, new_offsets);
6153 });
6154
6155 // Modify the buffer
6156 buffer.update(cx, |buffer, cx| {
6157 buffer.edit([(0..0, " ")], None, cx);
6158 assert!(buffer.is_dirty());
6159 assert!(!buffer.has_conflict());
6160 });
6161
6162 // Change the file on disk again, adding blank lines to the beginning.
6163 fs.save(
6164 path!("/dir/the-file").as_ref(),
6165 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
6166 LineEnding::Unix,
6167 )
6168 .await
6169 .unwrap();
6170
6171 // Because the buffer is modified, it doesn't reload from disk, but is
6172 // marked as having a conflict.
6173 cx.executor().run_until_parked();
6174 buffer.update(cx, |buffer, _| {
6175 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
6176 assert!(buffer.has_conflict());
6177 });
6178}
6179
6180#[gpui::test]
6181async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
6182 init_test(cx);
6183
6184 let fs = FakeFs::new(cx.executor());
6185 fs.insert_tree(
6186 path!("/dir"),
6187 json!({
6188 "file1": "a\nb\nc\n",
6189 "file2": "one\r\ntwo\r\nthree\r\n",
6190 }),
6191 )
6192 .await;
6193
6194 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6195 let buffer1 = project
6196 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
6197 .await
6198 .unwrap();
6199 let buffer2 = project
6200 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
6201 .await
6202 .unwrap();
6203
6204 buffer1.update(cx, |buffer, _| {
6205 assert_eq!(buffer.text(), "a\nb\nc\n");
6206 assert_eq!(buffer.line_ending(), LineEnding::Unix);
6207 });
6208 buffer2.update(cx, |buffer, _| {
6209 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
6210 assert_eq!(buffer.line_ending(), LineEnding::Windows);
6211 });
6212
6213 // Change a file's line endings on disk from unix to windows. The buffer's
6214 // state updates correctly.
6215 fs.save(
6216 path!("/dir/file1").as_ref(),
6217 &"aaa\nb\nc\n".into(),
6218 LineEnding::Windows,
6219 )
6220 .await
6221 .unwrap();
6222 cx.executor().run_until_parked();
6223 buffer1.update(cx, |buffer, _| {
6224 assert_eq!(buffer.text(), "aaa\nb\nc\n");
6225 assert_eq!(buffer.line_ending(), LineEnding::Windows);
6226 });
6227
6228 // Save a file with windows line endings. The file is written correctly.
6229 buffer2.update(cx, |buffer, cx| {
6230 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
6231 });
6232 project
6233 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
6234 .await
6235 .unwrap();
6236 assert_eq!(
6237 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
6238 "one\r\ntwo\r\nthree\r\nfour\r\n",
6239 );
6240}
6241
6242#[gpui::test]
6243async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
6244 init_test(cx);
6245
6246 let fs = FakeFs::new(cx.executor());
6247 fs.insert_tree(
6248 path!("/dir"),
6249 json!({
6250 "a.rs": "
6251 fn foo(mut v: Vec<usize>) {
6252 for x in &v {
6253 v.push(1);
6254 }
6255 }
6256 "
6257 .unindent(),
6258 }),
6259 )
6260 .await;
6261
6262 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6263 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
6264 let buffer = project
6265 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
6266 .await
6267 .unwrap();
6268
6269 let buffer_uri = Uri::from_file_path(path!("/dir/a.rs")).unwrap();
6270 let message = lsp::PublishDiagnosticsParams {
6271 uri: buffer_uri.clone(),
6272 diagnostics: vec![
6273 lsp::Diagnostic {
6274 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6275 severity: Some(DiagnosticSeverity::WARNING),
6276 message: "error 1".to_string(),
6277 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6278 location: lsp::Location {
6279 uri: buffer_uri.clone(),
6280 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6281 },
6282 message: "error 1 hint 1".to_string(),
6283 }]),
6284 ..Default::default()
6285 },
6286 lsp::Diagnostic {
6287 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6288 severity: Some(DiagnosticSeverity::HINT),
6289 message: "error 1 hint 1".to_string(),
6290 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6291 location: lsp::Location {
6292 uri: buffer_uri.clone(),
6293 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6294 },
6295 message: "original diagnostic".to_string(),
6296 }]),
6297 ..Default::default()
6298 },
6299 lsp::Diagnostic {
6300 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
6301 severity: Some(DiagnosticSeverity::ERROR),
6302 message: "error 2".to_string(),
6303 related_information: Some(vec![
6304 lsp::DiagnosticRelatedInformation {
6305 location: lsp::Location {
6306 uri: buffer_uri.clone(),
6307 range: lsp::Range::new(
6308 lsp::Position::new(1, 13),
6309 lsp::Position::new(1, 15),
6310 ),
6311 },
6312 message: "error 2 hint 1".to_string(),
6313 },
6314 lsp::DiagnosticRelatedInformation {
6315 location: lsp::Location {
6316 uri: buffer_uri.clone(),
6317 range: lsp::Range::new(
6318 lsp::Position::new(1, 13),
6319 lsp::Position::new(1, 15),
6320 ),
6321 },
6322 message: "error 2 hint 2".to_string(),
6323 },
6324 ]),
6325 ..Default::default()
6326 },
6327 lsp::Diagnostic {
6328 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6329 severity: Some(DiagnosticSeverity::HINT),
6330 message: "error 2 hint 1".to_string(),
6331 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6332 location: lsp::Location {
6333 uri: buffer_uri.clone(),
6334 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
6335 },
6336 message: "original diagnostic".to_string(),
6337 }]),
6338 ..Default::default()
6339 },
6340 lsp::Diagnostic {
6341 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6342 severity: Some(DiagnosticSeverity::HINT),
6343 message: "error 2 hint 2".to_string(),
6344 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6345 location: lsp::Location {
6346 uri: buffer_uri,
6347 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
6348 },
6349 message: "original diagnostic".to_string(),
6350 }]),
6351 ..Default::default()
6352 },
6353 ],
6354 version: None,
6355 };
6356
6357 lsp_store
6358 .update(cx, |lsp_store, cx| {
6359 lsp_store.update_diagnostics(
6360 LanguageServerId(0),
6361 message,
6362 None,
6363 DiagnosticSourceKind::Pushed,
6364 &[],
6365 cx,
6366 )
6367 })
6368 .unwrap();
6369 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
6370
6371 assert_eq!(
6372 buffer
6373 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6374 .collect::<Vec<_>>(),
6375 &[
6376 DiagnosticEntry {
6377 range: Point::new(1, 8)..Point::new(1, 9),
6378 diagnostic: Diagnostic {
6379 severity: DiagnosticSeverity::WARNING,
6380 message: "error 1".to_string(),
6381 group_id: 1,
6382 is_primary: true,
6383 source_kind: DiagnosticSourceKind::Pushed,
6384 ..Diagnostic::default()
6385 }
6386 },
6387 DiagnosticEntry {
6388 range: Point::new(1, 8)..Point::new(1, 9),
6389 diagnostic: Diagnostic {
6390 severity: DiagnosticSeverity::HINT,
6391 message: "error 1 hint 1".to_string(),
6392 group_id: 1,
6393 is_primary: false,
6394 source_kind: DiagnosticSourceKind::Pushed,
6395 ..Diagnostic::default()
6396 }
6397 },
6398 DiagnosticEntry {
6399 range: Point::new(1, 13)..Point::new(1, 15),
6400 diagnostic: Diagnostic {
6401 severity: DiagnosticSeverity::HINT,
6402 message: "error 2 hint 1".to_string(),
6403 group_id: 0,
6404 is_primary: false,
6405 source_kind: DiagnosticSourceKind::Pushed,
6406 ..Diagnostic::default()
6407 }
6408 },
6409 DiagnosticEntry {
6410 range: Point::new(1, 13)..Point::new(1, 15),
6411 diagnostic: Diagnostic {
6412 severity: DiagnosticSeverity::HINT,
6413 message: "error 2 hint 2".to_string(),
6414 group_id: 0,
6415 is_primary: false,
6416 source_kind: DiagnosticSourceKind::Pushed,
6417 ..Diagnostic::default()
6418 }
6419 },
6420 DiagnosticEntry {
6421 range: Point::new(2, 8)..Point::new(2, 17),
6422 diagnostic: Diagnostic {
6423 severity: DiagnosticSeverity::ERROR,
6424 message: "error 2".to_string(),
6425 group_id: 0,
6426 is_primary: true,
6427 source_kind: DiagnosticSourceKind::Pushed,
6428 ..Diagnostic::default()
6429 }
6430 }
6431 ]
6432 );
6433
6434 assert_eq!(
6435 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
6436 &[
6437 DiagnosticEntry {
6438 range: Point::new(1, 13)..Point::new(1, 15),
6439 diagnostic: Diagnostic {
6440 severity: DiagnosticSeverity::HINT,
6441 message: "error 2 hint 1".to_string(),
6442 group_id: 0,
6443 is_primary: false,
6444 source_kind: DiagnosticSourceKind::Pushed,
6445 ..Diagnostic::default()
6446 }
6447 },
6448 DiagnosticEntry {
6449 range: Point::new(1, 13)..Point::new(1, 15),
6450 diagnostic: Diagnostic {
6451 severity: DiagnosticSeverity::HINT,
6452 message: "error 2 hint 2".to_string(),
6453 group_id: 0,
6454 is_primary: false,
6455 source_kind: DiagnosticSourceKind::Pushed,
6456 ..Diagnostic::default()
6457 }
6458 },
6459 DiagnosticEntry {
6460 range: Point::new(2, 8)..Point::new(2, 17),
6461 diagnostic: Diagnostic {
6462 severity: DiagnosticSeverity::ERROR,
6463 message: "error 2".to_string(),
6464 group_id: 0,
6465 is_primary: true,
6466 source_kind: DiagnosticSourceKind::Pushed,
6467 ..Diagnostic::default()
6468 }
6469 }
6470 ]
6471 );
6472
6473 assert_eq!(
6474 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
6475 &[
6476 DiagnosticEntry {
6477 range: Point::new(1, 8)..Point::new(1, 9),
6478 diagnostic: Diagnostic {
6479 severity: DiagnosticSeverity::WARNING,
6480 message: "error 1".to_string(),
6481 group_id: 1,
6482 is_primary: true,
6483 source_kind: DiagnosticSourceKind::Pushed,
6484 ..Diagnostic::default()
6485 }
6486 },
6487 DiagnosticEntry {
6488 range: Point::new(1, 8)..Point::new(1, 9),
6489 diagnostic: Diagnostic {
6490 severity: DiagnosticSeverity::HINT,
6491 message: "error 1 hint 1".to_string(),
6492 group_id: 1,
6493 is_primary: false,
6494 source_kind: DiagnosticSourceKind::Pushed,
6495 ..Diagnostic::default()
6496 }
6497 },
6498 ]
6499 );
6500}
6501
6502#[gpui::test]
6503async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
6504 init_test(cx);
6505
6506 let fs = FakeFs::new(cx.executor());
6507 fs.insert_tree(
6508 path!("/dir"),
6509 json!({
6510 "one.rs": "const ONE: usize = 1;",
6511 "two": {
6512 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
6513 }
6514
6515 }),
6516 )
6517 .await;
6518 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6519
6520 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6521 language_registry.add(rust_lang());
6522 let watched_paths = lsp::FileOperationRegistrationOptions {
6523 filters: vec![
6524 FileOperationFilter {
6525 scheme: Some("file".to_owned()),
6526 pattern: lsp::FileOperationPattern {
6527 glob: "**/*.rs".to_owned(),
6528 matches: Some(lsp::FileOperationPatternKind::File),
6529 options: None,
6530 },
6531 },
6532 FileOperationFilter {
6533 scheme: Some("file".to_owned()),
6534 pattern: lsp::FileOperationPattern {
6535 glob: "**/**".to_owned(),
6536 matches: Some(lsp::FileOperationPatternKind::Folder),
6537 options: None,
6538 },
6539 },
6540 ],
6541 };
6542 let mut fake_servers = language_registry.register_fake_lsp(
6543 "Rust",
6544 FakeLspAdapter {
6545 capabilities: lsp::ServerCapabilities {
6546 workspace: Some(lsp::WorkspaceServerCapabilities {
6547 workspace_folders: None,
6548 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
6549 did_rename: Some(watched_paths.clone()),
6550 will_rename: Some(watched_paths),
6551 ..Default::default()
6552 }),
6553 }),
6554 ..Default::default()
6555 },
6556 ..Default::default()
6557 },
6558 );
6559
6560 let _ = project
6561 .update(cx, |project, cx| {
6562 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
6563 })
6564 .await
6565 .unwrap();
6566
6567 let fake_server = fake_servers.next().await.unwrap();
6568 cx.executor().run_until_parked();
6569 let response = project.update(cx, |project, cx| {
6570 let worktree = project.worktrees(cx).next().unwrap();
6571 let entry = worktree
6572 .read(cx)
6573 .entry_for_path(rel_path("one.rs"))
6574 .unwrap();
6575 project.rename_entry(
6576 entry.id,
6577 (worktree.read(cx).id(), rel_path("three.rs")).into(),
6578 cx,
6579 )
6580 });
6581 let expected_edit = lsp::WorkspaceEdit {
6582 changes: None,
6583 document_changes: Some(DocumentChanges::Edits({
6584 vec![TextDocumentEdit {
6585 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
6586 range: lsp::Range {
6587 start: lsp::Position {
6588 line: 0,
6589 character: 1,
6590 },
6591 end: lsp::Position {
6592 line: 0,
6593 character: 3,
6594 },
6595 },
6596 new_text: "This is not a drill".to_owned(),
6597 })],
6598 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
6599 uri: Uri::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
6600 version: Some(1337),
6601 },
6602 }]
6603 })),
6604 change_annotations: None,
6605 };
6606 let resolved_workspace_edit = Arc::new(OnceLock::new());
6607 fake_server
6608 .set_request_handler::<WillRenameFiles, _, _>({
6609 let resolved_workspace_edit = resolved_workspace_edit.clone();
6610 let expected_edit = expected_edit.clone();
6611 move |params, _| {
6612 let resolved_workspace_edit = resolved_workspace_edit.clone();
6613 let expected_edit = expected_edit.clone();
6614 async move {
6615 assert_eq!(params.files.len(), 1);
6616 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
6617 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
6618 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
6619 Ok(Some(expected_edit))
6620 }
6621 }
6622 })
6623 .next()
6624 .await
6625 .unwrap();
6626 let _ = response.await.unwrap();
6627 fake_server
6628 .handle_notification::<DidRenameFiles, _>(|params, _| {
6629 assert_eq!(params.files.len(), 1);
6630 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
6631 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
6632 })
6633 .next()
6634 .await
6635 .unwrap();
6636 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
6637}
6638
6639#[gpui::test]
6640async fn test_rename(cx: &mut gpui::TestAppContext) {
6641 // hi
6642 init_test(cx);
6643
6644 let fs = FakeFs::new(cx.executor());
6645 fs.insert_tree(
6646 path!("/dir"),
6647 json!({
6648 "one.rs": "const ONE: usize = 1;",
6649 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
6650 }),
6651 )
6652 .await;
6653
6654 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6655
6656 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6657 language_registry.add(rust_lang());
6658 let mut fake_servers = language_registry.register_fake_lsp(
6659 "Rust",
6660 FakeLspAdapter {
6661 capabilities: lsp::ServerCapabilities {
6662 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
6663 prepare_provider: Some(true),
6664 work_done_progress_options: Default::default(),
6665 })),
6666 ..Default::default()
6667 },
6668 ..Default::default()
6669 },
6670 );
6671
6672 let (buffer, _handle) = project
6673 .update(cx, |project, cx| {
6674 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
6675 })
6676 .await
6677 .unwrap();
6678
6679 let fake_server = fake_servers.next().await.unwrap();
6680 cx.executor().run_until_parked();
6681
6682 let response = project.update(cx, |project, cx| {
6683 project.prepare_rename(buffer.clone(), 7, cx)
6684 });
6685 fake_server
6686 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
6687 assert_eq!(
6688 params.text_document.uri.as_str(),
6689 uri!("file:///dir/one.rs")
6690 );
6691 assert_eq!(params.position, lsp::Position::new(0, 7));
6692 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
6693 lsp::Position::new(0, 6),
6694 lsp::Position::new(0, 9),
6695 ))))
6696 })
6697 .next()
6698 .await
6699 .unwrap();
6700 let response = response.await.unwrap();
6701 let PrepareRenameResponse::Success(range) = response else {
6702 panic!("{:?}", response);
6703 };
6704 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
6705 assert_eq!(range, 6..9);
6706
6707 let response = project.update(cx, |project, cx| {
6708 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
6709 });
6710 fake_server
6711 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
6712 assert_eq!(
6713 params.text_document_position.text_document.uri.as_str(),
6714 uri!("file:///dir/one.rs")
6715 );
6716 assert_eq!(
6717 params.text_document_position.position,
6718 lsp::Position::new(0, 7)
6719 );
6720 assert_eq!(params.new_name, "THREE");
6721 Ok(Some(lsp::WorkspaceEdit {
6722 changes: Some(
6723 [
6724 (
6725 lsp::Uri::from_file_path(path!("/dir/one.rs")).unwrap(),
6726 vec![lsp::TextEdit::new(
6727 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
6728 "THREE".to_string(),
6729 )],
6730 ),
6731 (
6732 lsp::Uri::from_file_path(path!("/dir/two.rs")).unwrap(),
6733 vec![
6734 lsp::TextEdit::new(
6735 lsp::Range::new(
6736 lsp::Position::new(0, 24),
6737 lsp::Position::new(0, 27),
6738 ),
6739 "THREE".to_string(),
6740 ),
6741 lsp::TextEdit::new(
6742 lsp::Range::new(
6743 lsp::Position::new(0, 35),
6744 lsp::Position::new(0, 38),
6745 ),
6746 "THREE".to_string(),
6747 ),
6748 ],
6749 ),
6750 ]
6751 .into_iter()
6752 .collect(),
6753 ),
6754 ..Default::default()
6755 }))
6756 })
6757 .next()
6758 .await
6759 .unwrap();
6760 let mut transaction = response.await.unwrap().0;
6761 assert_eq!(transaction.len(), 2);
6762 assert_eq!(
6763 transaction
6764 .remove_entry(&buffer)
6765 .unwrap()
6766 .0
6767 .update(cx, |buffer, _| buffer.text()),
6768 "const THREE: usize = 1;"
6769 );
6770 assert_eq!(
6771 transaction
6772 .into_keys()
6773 .next()
6774 .unwrap()
6775 .update(cx, |buffer, _| buffer.text()),
6776 "const TWO: usize = one::THREE + one::THREE;"
6777 );
6778}
6779
6780#[gpui::test]
6781async fn test_search(cx: &mut gpui::TestAppContext) {
6782 init_test(cx);
6783
6784 let fs = FakeFs::new(cx.executor());
6785 fs.insert_tree(
6786 path!("/dir"),
6787 json!({
6788 "one.rs": "const ONE: usize = 1;",
6789 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
6790 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
6791 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
6792 }),
6793 )
6794 .await;
6795 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6796 assert_eq!(
6797 search(
6798 &project,
6799 SearchQuery::text(
6800 "TWO",
6801 false,
6802 true,
6803 false,
6804 Default::default(),
6805 Default::default(),
6806 false,
6807 None
6808 )
6809 .unwrap(),
6810 cx
6811 )
6812 .await
6813 .unwrap(),
6814 HashMap::from_iter([
6815 (path!("dir/two.rs").to_string(), vec![6..9]),
6816 (path!("dir/three.rs").to_string(), vec![37..40])
6817 ])
6818 );
6819
6820 let buffer_4 = project
6821 .update(cx, |project, cx| {
6822 project.open_local_buffer(path!("/dir/four.rs"), cx)
6823 })
6824 .await
6825 .unwrap();
6826 buffer_4.update(cx, |buffer, cx| {
6827 let text = "two::TWO";
6828 buffer.edit([(20..28, text), (31..43, text)], None, cx);
6829 });
6830
6831 assert_eq!(
6832 search(
6833 &project,
6834 SearchQuery::text(
6835 "TWO",
6836 false,
6837 true,
6838 false,
6839 Default::default(),
6840 Default::default(),
6841 false,
6842 None,
6843 )
6844 .unwrap(),
6845 cx
6846 )
6847 .await
6848 .unwrap(),
6849 HashMap::from_iter([
6850 (path!("dir/two.rs").to_string(), vec![6..9]),
6851 (path!("dir/three.rs").to_string(), vec![37..40]),
6852 (path!("dir/four.rs").to_string(), vec![25..28, 36..39])
6853 ])
6854 );
6855}
6856
6857#[gpui::test]
6858async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
6859 init_test(cx);
6860
6861 let search_query = "file";
6862
6863 let fs = FakeFs::new(cx.executor());
6864 fs.insert_tree(
6865 path!("/dir"),
6866 json!({
6867 "one.rs": r#"// Rust file one"#,
6868 "one.ts": r#"// TypeScript file one"#,
6869 "two.rs": r#"// Rust file two"#,
6870 "two.ts": r#"// TypeScript file two"#,
6871 }),
6872 )
6873 .await;
6874 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6875
6876 assert!(
6877 search(
6878 &project,
6879 SearchQuery::text(
6880 search_query,
6881 false,
6882 true,
6883 false,
6884 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6885 Default::default(),
6886 false,
6887 None
6888 )
6889 .unwrap(),
6890 cx
6891 )
6892 .await
6893 .unwrap()
6894 .is_empty(),
6895 "If no inclusions match, no files should be returned"
6896 );
6897
6898 assert_eq!(
6899 search(
6900 &project,
6901 SearchQuery::text(
6902 search_query,
6903 false,
6904 true,
6905 false,
6906 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
6907 Default::default(),
6908 false,
6909 None
6910 )
6911 .unwrap(),
6912 cx
6913 )
6914 .await
6915 .unwrap(),
6916 HashMap::from_iter([
6917 (path!("dir/one.rs").to_string(), vec![8..12]),
6918 (path!("dir/two.rs").to_string(), vec![8..12]),
6919 ]),
6920 "Rust only search should give only Rust files"
6921 );
6922
6923 assert_eq!(
6924 search(
6925 &project,
6926 SearchQuery::text(
6927 search_query,
6928 false,
6929 true,
6930 false,
6931 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6932 .unwrap(),
6933 Default::default(),
6934 false,
6935 None,
6936 )
6937 .unwrap(),
6938 cx
6939 )
6940 .await
6941 .unwrap(),
6942 HashMap::from_iter([
6943 (path!("dir/one.ts").to_string(), vec![14..18]),
6944 (path!("dir/two.ts").to_string(), vec![14..18]),
6945 ]),
6946 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
6947 );
6948
6949 assert_eq!(
6950 search(
6951 &project,
6952 SearchQuery::text(
6953 search_query,
6954 false,
6955 true,
6956 false,
6957 PathMatcher::new(
6958 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
6959 PathStyle::local()
6960 )
6961 .unwrap(),
6962 Default::default(),
6963 false,
6964 None,
6965 )
6966 .unwrap(),
6967 cx
6968 )
6969 .await
6970 .unwrap(),
6971 HashMap::from_iter([
6972 (path!("dir/two.ts").to_string(), vec![14..18]),
6973 (path!("dir/one.rs").to_string(), vec![8..12]),
6974 (path!("dir/one.ts").to_string(), vec![14..18]),
6975 (path!("dir/two.rs").to_string(), vec![8..12]),
6976 ]),
6977 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
6978 );
6979}
6980
6981#[gpui::test]
6982async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
6983 init_test(cx);
6984
6985 let search_query = "file";
6986
6987 let fs = FakeFs::new(cx.executor());
6988 fs.insert_tree(
6989 path!("/dir"),
6990 json!({
6991 "one.rs": r#"// Rust file one"#,
6992 "one.ts": r#"// TypeScript file one"#,
6993 "two.rs": r#"// Rust file two"#,
6994 "two.ts": r#"// TypeScript file two"#,
6995 }),
6996 )
6997 .await;
6998 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6999
7000 assert_eq!(
7001 search(
7002 &project,
7003 SearchQuery::text(
7004 search_query,
7005 false,
7006 true,
7007 false,
7008 Default::default(),
7009 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
7010 false,
7011 None,
7012 )
7013 .unwrap(),
7014 cx
7015 )
7016 .await
7017 .unwrap(),
7018 HashMap::from_iter([
7019 (path!("dir/one.rs").to_string(), vec![8..12]),
7020 (path!("dir/one.ts").to_string(), vec![14..18]),
7021 (path!("dir/two.rs").to_string(), vec![8..12]),
7022 (path!("dir/two.ts").to_string(), vec![14..18]),
7023 ]),
7024 "If no exclusions match, all files should be returned"
7025 );
7026
7027 assert_eq!(
7028 search(
7029 &project,
7030 SearchQuery::text(
7031 search_query,
7032 false,
7033 true,
7034 false,
7035 Default::default(),
7036 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
7037 false,
7038 None,
7039 )
7040 .unwrap(),
7041 cx
7042 )
7043 .await
7044 .unwrap(),
7045 HashMap::from_iter([
7046 (path!("dir/one.ts").to_string(), vec![14..18]),
7047 (path!("dir/two.ts").to_string(), vec![14..18]),
7048 ]),
7049 "Rust exclusion search should give only TypeScript files"
7050 );
7051
7052 assert_eq!(
7053 search(
7054 &project,
7055 SearchQuery::text(
7056 search_query,
7057 false,
7058 true,
7059 false,
7060 Default::default(),
7061 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
7062 .unwrap(),
7063 false,
7064 None,
7065 )
7066 .unwrap(),
7067 cx
7068 )
7069 .await
7070 .unwrap(),
7071 HashMap::from_iter([
7072 (path!("dir/one.rs").to_string(), vec![8..12]),
7073 (path!("dir/two.rs").to_string(), vec![8..12]),
7074 ]),
7075 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
7076 );
7077
7078 assert!(
7079 search(
7080 &project,
7081 SearchQuery::text(
7082 search_query,
7083 false,
7084 true,
7085 false,
7086 Default::default(),
7087 PathMatcher::new(
7088 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
7089 PathStyle::local(),
7090 )
7091 .unwrap(),
7092 false,
7093 None,
7094 )
7095 .unwrap(),
7096 cx
7097 )
7098 .await
7099 .unwrap()
7100 .is_empty(),
7101 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
7102 );
7103}
7104
7105#[gpui::test]
7106async fn test_search_with_buffer_exclusions(cx: &mut gpui::TestAppContext) {
7107 init_test(cx);
7108
7109 let search_query = "file";
7110
7111 let fs = FakeFs::new(cx.executor());
7112 fs.insert_tree(
7113 path!("/dir"),
7114 json!({
7115 "one.rs": r#"// Rust file one"#,
7116 "one.ts": r#"// TypeScript file one"#,
7117 "two.rs": r#"// Rust file two"#,
7118 "two.ts": r#"// TypeScript file two"#,
7119 }),
7120 )
7121 .await;
7122
7123 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7124 let path_style = PathStyle::local();
7125 let _buffer = project.update(cx, |project, cx| {
7126 project.create_local_buffer("file", None, false, cx)
7127 });
7128
7129 assert_eq!(
7130 search(
7131 &project,
7132 SearchQuery::text(
7133 search_query,
7134 false,
7135 true,
7136 false,
7137 Default::default(),
7138 PathMatcher::new(&["*.odd".to_owned()], path_style).unwrap(),
7139 false,
7140 None,
7141 )
7142 .unwrap(),
7143 cx
7144 )
7145 .await
7146 .unwrap(),
7147 HashMap::from_iter([
7148 (path!("dir/one.rs").to_string(), vec![8..12]),
7149 (path!("dir/one.ts").to_string(), vec![14..18]),
7150 (path!("dir/two.rs").to_string(), vec![8..12]),
7151 (path!("dir/two.ts").to_string(), vec![14..18]),
7152 ]),
7153 "If no exclusions match, all files should be returned"
7154 );
7155
7156 assert_eq!(
7157 search(
7158 &project,
7159 SearchQuery::text(
7160 search_query,
7161 false,
7162 true,
7163 false,
7164 Default::default(),
7165 PathMatcher::new(&["*.rs".to_owned()], path_style).unwrap(),
7166 false,
7167 None,
7168 )
7169 .unwrap(),
7170 cx
7171 )
7172 .await
7173 .unwrap(),
7174 HashMap::from_iter([
7175 (path!("dir/one.ts").to_string(), vec![14..18]),
7176 (path!("dir/two.ts").to_string(), vec![14..18]),
7177 ]),
7178 "Rust exclusion search should give only TypeScript files"
7179 );
7180
7181 assert_eq!(
7182 search(
7183 &project,
7184 SearchQuery::text(
7185 search_query,
7186 false,
7187 true,
7188 false,
7189 Default::default(),
7190 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], path_style).unwrap(),
7191 false,
7192 None,
7193 )
7194 .unwrap(),
7195 cx
7196 )
7197 .await
7198 .unwrap(),
7199 HashMap::from_iter([
7200 (path!("dir/one.rs").to_string(), vec![8..12]),
7201 (path!("dir/two.rs").to_string(), vec![8..12]),
7202 ]),
7203 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
7204 );
7205
7206 assert!(
7207 search(
7208 &project,
7209 SearchQuery::text(
7210 search_query,
7211 false,
7212 true,
7213 false,
7214 Default::default(),
7215 PathMatcher::new(
7216 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
7217 PathStyle::local(),
7218 )
7219 .unwrap(),
7220 false,
7221 None,
7222 )
7223 .unwrap(),
7224 cx
7225 )
7226 .await
7227 .unwrap()
7228 .is_empty(),
7229 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
7230 );
7231}
7232
7233#[gpui::test]
7234async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
7235 init_test(cx);
7236
7237 let search_query = "file";
7238
7239 let fs = FakeFs::new(cx.executor());
7240 fs.insert_tree(
7241 path!("/dir"),
7242 json!({
7243 "one.rs": r#"// Rust file one"#,
7244 "one.ts": r#"// TypeScript file one"#,
7245 "two.rs": r#"// Rust file two"#,
7246 "two.ts": r#"// TypeScript file two"#,
7247 }),
7248 )
7249 .await;
7250 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7251 assert!(
7252 search(
7253 &project,
7254 SearchQuery::text(
7255 search_query,
7256 false,
7257 true,
7258 false,
7259 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
7260 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
7261 false,
7262 None,
7263 )
7264 .unwrap(),
7265 cx
7266 )
7267 .await
7268 .unwrap()
7269 .is_empty(),
7270 "If both no exclusions and inclusions match, exclusions should win and return nothing"
7271 );
7272
7273 assert!(
7274 search(
7275 &project,
7276 SearchQuery::text(
7277 search_query,
7278 false,
7279 true,
7280 false,
7281 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
7282 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
7283 false,
7284 None,
7285 )
7286 .unwrap(),
7287 cx
7288 )
7289 .await
7290 .unwrap()
7291 .is_empty(),
7292 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
7293 );
7294
7295 assert!(
7296 search(
7297 &project,
7298 SearchQuery::text(
7299 search_query,
7300 false,
7301 true,
7302 false,
7303 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
7304 .unwrap(),
7305 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
7306 .unwrap(),
7307 false,
7308 None,
7309 )
7310 .unwrap(),
7311 cx
7312 )
7313 .await
7314 .unwrap()
7315 .is_empty(),
7316 "Non-matching inclusions and exclusions should not change that."
7317 );
7318
7319 assert_eq!(
7320 search(
7321 &project,
7322 SearchQuery::text(
7323 search_query,
7324 false,
7325 true,
7326 false,
7327 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
7328 .unwrap(),
7329 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()], PathStyle::local())
7330 .unwrap(),
7331 false,
7332 None,
7333 )
7334 .unwrap(),
7335 cx
7336 )
7337 .await
7338 .unwrap(),
7339 HashMap::from_iter([
7340 (path!("dir/one.ts").to_string(), vec![14..18]),
7341 (path!("dir/two.ts").to_string(), vec![14..18]),
7342 ]),
7343 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
7344 );
7345}
7346
7347#[gpui::test]
7348async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
7349 init_test(cx);
7350
7351 let fs = FakeFs::new(cx.executor());
7352 fs.insert_tree(
7353 path!("/worktree-a"),
7354 json!({
7355 "haystack.rs": r#"// NEEDLE"#,
7356 "haystack.ts": r#"// NEEDLE"#,
7357 }),
7358 )
7359 .await;
7360 fs.insert_tree(
7361 path!("/worktree-b"),
7362 json!({
7363 "haystack.rs": r#"// NEEDLE"#,
7364 "haystack.ts": r#"// NEEDLE"#,
7365 }),
7366 )
7367 .await;
7368
7369 let path_style = PathStyle::local();
7370 let project = Project::test(
7371 fs.clone(),
7372 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
7373 cx,
7374 )
7375 .await;
7376
7377 assert_eq!(
7378 search(
7379 &project,
7380 SearchQuery::text(
7381 "NEEDLE",
7382 false,
7383 true,
7384 false,
7385 PathMatcher::new(&["worktree-a/*.rs".to_owned()], path_style).unwrap(),
7386 Default::default(),
7387 true,
7388 None,
7389 )
7390 .unwrap(),
7391 cx
7392 )
7393 .await
7394 .unwrap(),
7395 HashMap::from_iter([(path!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
7396 "should only return results from included worktree"
7397 );
7398 assert_eq!(
7399 search(
7400 &project,
7401 SearchQuery::text(
7402 "NEEDLE",
7403 false,
7404 true,
7405 false,
7406 PathMatcher::new(&["worktree-b/*.rs".to_owned()], path_style).unwrap(),
7407 Default::default(),
7408 true,
7409 None,
7410 )
7411 .unwrap(),
7412 cx
7413 )
7414 .await
7415 .unwrap(),
7416 HashMap::from_iter([(path!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
7417 "should only return results from included worktree"
7418 );
7419
7420 assert_eq!(
7421 search(
7422 &project,
7423 SearchQuery::text(
7424 "NEEDLE",
7425 false,
7426 true,
7427 false,
7428 PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap(),
7429 Default::default(),
7430 false,
7431 None,
7432 )
7433 .unwrap(),
7434 cx
7435 )
7436 .await
7437 .unwrap(),
7438 HashMap::from_iter([
7439 (path!("worktree-a/haystack.ts").to_string(), vec![3..9]),
7440 (path!("worktree-b/haystack.ts").to_string(), vec![3..9])
7441 ]),
7442 "should return results from both worktrees"
7443 );
7444}
7445
7446#[gpui::test]
7447async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
7448 init_test(cx);
7449
7450 let fs = FakeFs::new(cx.background_executor.clone());
7451 fs.insert_tree(
7452 path!("/dir"),
7453 json!({
7454 ".git": {},
7455 ".gitignore": "**/target\n/node_modules\n",
7456 "target": {
7457 "index.txt": "index_key:index_value"
7458 },
7459 "node_modules": {
7460 "eslint": {
7461 "index.ts": "const eslint_key = 'eslint value'",
7462 "package.json": r#"{ "some_key": "some value" }"#,
7463 },
7464 "prettier": {
7465 "index.ts": "const prettier_key = 'prettier value'",
7466 "package.json": r#"{ "other_key": "other value" }"#,
7467 },
7468 },
7469 "package.json": r#"{ "main_key": "main value" }"#,
7470 }),
7471 )
7472 .await;
7473 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7474
7475 let query = "key";
7476 assert_eq!(
7477 search(
7478 &project,
7479 SearchQuery::text(
7480 query,
7481 false,
7482 false,
7483 false,
7484 Default::default(),
7485 Default::default(),
7486 false,
7487 None,
7488 )
7489 .unwrap(),
7490 cx
7491 )
7492 .await
7493 .unwrap(),
7494 HashMap::from_iter([(path!("dir/package.json").to_string(), vec![8..11])]),
7495 "Only one non-ignored file should have the query"
7496 );
7497
7498 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7499 let path_style = PathStyle::local();
7500 assert_eq!(
7501 search(
7502 &project,
7503 SearchQuery::text(
7504 query,
7505 false,
7506 false,
7507 true,
7508 Default::default(),
7509 Default::default(),
7510 false,
7511 None,
7512 )
7513 .unwrap(),
7514 cx
7515 )
7516 .await
7517 .unwrap(),
7518 HashMap::from_iter([
7519 (path!("dir/package.json").to_string(), vec![8..11]),
7520 (path!("dir/target/index.txt").to_string(), vec![6..9]),
7521 (
7522 path!("dir/node_modules/prettier/package.json").to_string(),
7523 vec![9..12]
7524 ),
7525 (
7526 path!("dir/node_modules/prettier/index.ts").to_string(),
7527 vec![15..18]
7528 ),
7529 (
7530 path!("dir/node_modules/eslint/index.ts").to_string(),
7531 vec![13..16]
7532 ),
7533 (
7534 path!("dir/node_modules/eslint/package.json").to_string(),
7535 vec![8..11]
7536 ),
7537 ]),
7538 "Unrestricted search with ignored directories should find every file with the query"
7539 );
7540
7541 let files_to_include =
7542 PathMatcher::new(&["node_modules/prettier/**".to_owned()], path_style).unwrap();
7543 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap();
7544 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7545 assert_eq!(
7546 search(
7547 &project,
7548 SearchQuery::text(
7549 query,
7550 false,
7551 false,
7552 true,
7553 files_to_include,
7554 files_to_exclude,
7555 false,
7556 None,
7557 )
7558 .unwrap(),
7559 cx
7560 )
7561 .await
7562 .unwrap(),
7563 HashMap::from_iter([(
7564 path!("dir/node_modules/prettier/package.json").to_string(),
7565 vec![9..12]
7566 )]),
7567 "With search including ignored prettier directory and excluding TS files, only one file should be found"
7568 );
7569}
7570
7571#[gpui::test]
7572async fn test_search_with_unicode(cx: &mut gpui::TestAppContext) {
7573 init_test(cx);
7574
7575 let fs = FakeFs::new(cx.executor());
7576 fs.insert_tree(
7577 path!("/dir"),
7578 json!({
7579 "one.rs": "// ПРИВЕТ? привет!",
7580 "two.rs": "// ПРИВЕТ.",
7581 "three.rs": "// привет",
7582 }),
7583 )
7584 .await;
7585 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7586 let unicode_case_sensitive_query = SearchQuery::text(
7587 "привет",
7588 false,
7589 true,
7590 false,
7591 Default::default(),
7592 Default::default(),
7593 false,
7594 None,
7595 );
7596 assert_matches!(unicode_case_sensitive_query, Ok(SearchQuery::Text { .. }));
7597 assert_eq!(
7598 search(&project, unicode_case_sensitive_query.unwrap(), cx)
7599 .await
7600 .unwrap(),
7601 HashMap::from_iter([
7602 (path!("dir/one.rs").to_string(), vec![17..29]),
7603 (path!("dir/three.rs").to_string(), vec![3..15]),
7604 ])
7605 );
7606
7607 let unicode_case_insensitive_query = SearchQuery::text(
7608 "привет",
7609 false,
7610 false,
7611 false,
7612 Default::default(),
7613 Default::default(),
7614 false,
7615 None,
7616 );
7617 assert_matches!(
7618 unicode_case_insensitive_query,
7619 Ok(SearchQuery::Regex { .. })
7620 );
7621 assert_eq!(
7622 search(&project, unicode_case_insensitive_query.unwrap(), cx)
7623 .await
7624 .unwrap(),
7625 HashMap::from_iter([
7626 (path!("dir/one.rs").to_string(), vec![3..15, 17..29]),
7627 (path!("dir/two.rs").to_string(), vec![3..15]),
7628 (path!("dir/three.rs").to_string(), vec![3..15]),
7629 ])
7630 );
7631
7632 assert_eq!(
7633 search(
7634 &project,
7635 SearchQuery::text(
7636 "привет.",
7637 false,
7638 false,
7639 false,
7640 Default::default(),
7641 Default::default(),
7642 false,
7643 None,
7644 )
7645 .unwrap(),
7646 cx
7647 )
7648 .await
7649 .unwrap(),
7650 HashMap::from_iter([(path!("dir/two.rs").to_string(), vec![3..16]),])
7651 );
7652}
7653
7654#[gpui::test]
7655async fn test_create_entry(cx: &mut gpui::TestAppContext) {
7656 init_test(cx);
7657
7658 let fs = FakeFs::new(cx.executor());
7659 fs.insert_tree(
7660 "/one/two",
7661 json!({
7662 "three": {
7663 "a.txt": "",
7664 "four": {}
7665 },
7666 "c.rs": ""
7667 }),
7668 )
7669 .await;
7670
7671 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
7672 project
7673 .update(cx, |project, cx| {
7674 let id = project.worktrees(cx).next().unwrap().read(cx).id();
7675 project.create_entry((id, rel_path("b..")), true, cx)
7676 })
7677 .await
7678 .unwrap()
7679 .into_included()
7680 .unwrap();
7681
7682 assert_eq!(
7683 fs.paths(true),
7684 vec![
7685 PathBuf::from(path!("/")),
7686 PathBuf::from(path!("/one")),
7687 PathBuf::from(path!("/one/two")),
7688 PathBuf::from(path!("/one/two/c.rs")),
7689 PathBuf::from(path!("/one/two/three")),
7690 PathBuf::from(path!("/one/two/three/a.txt")),
7691 PathBuf::from(path!("/one/two/three/b..")),
7692 PathBuf::from(path!("/one/two/three/four")),
7693 ]
7694 );
7695}
7696
7697#[gpui::test]
7698async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
7699 init_test(cx);
7700
7701 let fs = FakeFs::new(cx.executor());
7702 fs.insert_tree(
7703 path!("/dir"),
7704 json!({
7705 "a.tsx": "a",
7706 }),
7707 )
7708 .await;
7709
7710 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7711
7712 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7713 language_registry.add(tsx_lang());
7714 let language_server_names = [
7715 "TypeScriptServer",
7716 "TailwindServer",
7717 "ESLintServer",
7718 "NoHoverCapabilitiesServer",
7719 ];
7720 let mut language_servers = [
7721 language_registry.register_fake_lsp(
7722 "tsx",
7723 FakeLspAdapter {
7724 name: language_server_names[0],
7725 capabilities: lsp::ServerCapabilities {
7726 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7727 ..lsp::ServerCapabilities::default()
7728 },
7729 ..FakeLspAdapter::default()
7730 },
7731 ),
7732 language_registry.register_fake_lsp(
7733 "tsx",
7734 FakeLspAdapter {
7735 name: language_server_names[1],
7736 capabilities: lsp::ServerCapabilities {
7737 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7738 ..lsp::ServerCapabilities::default()
7739 },
7740 ..FakeLspAdapter::default()
7741 },
7742 ),
7743 language_registry.register_fake_lsp(
7744 "tsx",
7745 FakeLspAdapter {
7746 name: language_server_names[2],
7747 capabilities: lsp::ServerCapabilities {
7748 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7749 ..lsp::ServerCapabilities::default()
7750 },
7751 ..FakeLspAdapter::default()
7752 },
7753 ),
7754 language_registry.register_fake_lsp(
7755 "tsx",
7756 FakeLspAdapter {
7757 name: language_server_names[3],
7758 capabilities: lsp::ServerCapabilities {
7759 hover_provider: None,
7760 ..lsp::ServerCapabilities::default()
7761 },
7762 ..FakeLspAdapter::default()
7763 },
7764 ),
7765 ];
7766
7767 let (buffer, _handle) = project
7768 .update(cx, |p, cx| {
7769 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
7770 })
7771 .await
7772 .unwrap();
7773 cx.executor().run_until_parked();
7774
7775 let mut servers_with_hover_requests = HashMap::default();
7776 for i in 0..language_server_names.len() {
7777 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
7778 panic!(
7779 "Failed to get language server #{i} with name {}",
7780 &language_server_names[i]
7781 )
7782 });
7783 let new_server_name = new_server.server.name();
7784 assert!(
7785 !servers_with_hover_requests.contains_key(&new_server_name),
7786 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
7787 );
7788 match new_server_name.as_ref() {
7789 "TailwindServer" | "TypeScriptServer" => {
7790 servers_with_hover_requests.insert(
7791 new_server_name.clone(),
7792 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7793 move |_, _| {
7794 let name = new_server_name.clone();
7795 async move {
7796 Ok(Some(lsp::Hover {
7797 contents: lsp::HoverContents::Scalar(
7798 lsp::MarkedString::String(format!("{name} hover")),
7799 ),
7800 range: None,
7801 }))
7802 }
7803 },
7804 ),
7805 );
7806 }
7807 "ESLintServer" => {
7808 servers_with_hover_requests.insert(
7809 new_server_name,
7810 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7811 |_, _| async move { Ok(None) },
7812 ),
7813 );
7814 }
7815 "NoHoverCapabilitiesServer" => {
7816 let _never_handled = new_server
7817 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
7818 panic!(
7819 "Should not call for hovers server with no corresponding capabilities"
7820 )
7821 });
7822 }
7823 unexpected => panic!("Unexpected server name: {unexpected}"),
7824 }
7825 }
7826
7827 let hover_task = project.update(cx, |project, cx| {
7828 project.hover(&buffer, Point::new(0, 0), cx)
7829 });
7830 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
7831 |mut hover_request| async move {
7832 hover_request
7833 .next()
7834 .await
7835 .expect("All hover requests should have been triggered")
7836 },
7837 ))
7838 .await;
7839 assert_eq!(
7840 vec!["TailwindServer hover", "TypeScriptServer hover"],
7841 hover_task
7842 .await
7843 .into_iter()
7844 .flatten()
7845 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
7846 .sorted()
7847 .collect::<Vec<_>>(),
7848 "Should receive hover responses from all related servers with hover capabilities"
7849 );
7850}
7851
7852#[gpui::test]
7853async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
7854 init_test(cx);
7855
7856 let fs = FakeFs::new(cx.executor());
7857 fs.insert_tree(
7858 path!("/dir"),
7859 json!({
7860 "a.ts": "a",
7861 }),
7862 )
7863 .await;
7864
7865 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7866
7867 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7868 language_registry.add(typescript_lang());
7869 let mut fake_language_servers = language_registry.register_fake_lsp(
7870 "TypeScript",
7871 FakeLspAdapter {
7872 capabilities: lsp::ServerCapabilities {
7873 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7874 ..lsp::ServerCapabilities::default()
7875 },
7876 ..FakeLspAdapter::default()
7877 },
7878 );
7879
7880 let (buffer, _handle) = project
7881 .update(cx, |p, cx| {
7882 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
7883 })
7884 .await
7885 .unwrap();
7886 cx.executor().run_until_parked();
7887
7888 let fake_server = fake_language_servers
7889 .next()
7890 .await
7891 .expect("failed to get the language server");
7892
7893 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7894 move |_, _| async move {
7895 Ok(Some(lsp::Hover {
7896 contents: lsp::HoverContents::Array(vec![
7897 lsp::MarkedString::String("".to_string()),
7898 lsp::MarkedString::String(" ".to_string()),
7899 lsp::MarkedString::String("\n\n\n".to_string()),
7900 ]),
7901 range: None,
7902 }))
7903 },
7904 );
7905
7906 let hover_task = project.update(cx, |project, cx| {
7907 project.hover(&buffer, Point::new(0, 0), cx)
7908 });
7909 let () = request_handled
7910 .next()
7911 .await
7912 .expect("All hover requests should have been triggered");
7913 assert_eq!(
7914 Vec::<String>::new(),
7915 hover_task
7916 .await
7917 .into_iter()
7918 .flatten()
7919 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
7920 .sorted()
7921 .collect::<Vec<_>>(),
7922 "Empty hover parts should be ignored"
7923 );
7924}
7925
7926#[gpui::test]
7927async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
7928 init_test(cx);
7929
7930 let fs = FakeFs::new(cx.executor());
7931 fs.insert_tree(
7932 path!("/dir"),
7933 json!({
7934 "a.ts": "a",
7935 }),
7936 )
7937 .await;
7938
7939 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7940
7941 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7942 language_registry.add(typescript_lang());
7943 let mut fake_language_servers = language_registry.register_fake_lsp(
7944 "TypeScript",
7945 FakeLspAdapter {
7946 capabilities: lsp::ServerCapabilities {
7947 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
7948 ..lsp::ServerCapabilities::default()
7949 },
7950 ..FakeLspAdapter::default()
7951 },
7952 );
7953
7954 let (buffer, _handle) = project
7955 .update(cx, |p, cx| {
7956 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
7957 })
7958 .await
7959 .unwrap();
7960 cx.executor().run_until_parked();
7961
7962 let fake_server = fake_language_servers
7963 .next()
7964 .await
7965 .expect("failed to get the language server");
7966
7967 let mut request_handled = fake_server
7968 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
7969 Ok(Some(vec![
7970 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7971 title: "organize imports".to_string(),
7972 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
7973 ..lsp::CodeAction::default()
7974 }),
7975 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7976 title: "fix code".to_string(),
7977 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
7978 ..lsp::CodeAction::default()
7979 }),
7980 ]))
7981 });
7982
7983 let code_actions_task = project.update(cx, |project, cx| {
7984 project.code_actions(
7985 &buffer,
7986 0..buffer.read(cx).len(),
7987 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
7988 cx,
7989 )
7990 });
7991
7992 let () = request_handled
7993 .next()
7994 .await
7995 .expect("The code action request should have been triggered");
7996
7997 let code_actions = code_actions_task.await.unwrap().unwrap();
7998 assert_eq!(code_actions.len(), 1);
7999 assert_eq!(
8000 code_actions[0].lsp_action.action_kind(),
8001 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
8002 );
8003}
8004
8005#[gpui::test]
8006async fn test_code_actions_without_requested_kinds_do_not_send_only_filter(
8007 cx: &mut gpui::TestAppContext,
8008) {
8009 init_test(cx);
8010
8011 let fs = FakeFs::new(cx.executor());
8012 fs.insert_tree(
8013 path!("/dir"),
8014 json!({
8015 "a.ts": "a",
8016 }),
8017 )
8018 .await;
8019
8020 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
8021
8022 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
8023 language_registry.add(typescript_lang());
8024 let mut fake_language_servers = language_registry.register_fake_lsp(
8025 "TypeScript",
8026 FakeLspAdapter {
8027 capabilities: lsp::ServerCapabilities {
8028 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
8029 lsp::CodeActionOptions {
8030 code_action_kinds: Some(vec![
8031 CodeActionKind::SOURCE_ORGANIZE_IMPORTS,
8032 "source.doc".into(),
8033 ]),
8034 ..lsp::CodeActionOptions::default()
8035 },
8036 )),
8037 ..lsp::ServerCapabilities::default()
8038 },
8039 ..FakeLspAdapter::default()
8040 },
8041 );
8042
8043 let (buffer, _handle) = project
8044 .update(cx, |p, cx| {
8045 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
8046 })
8047 .await
8048 .unwrap();
8049 cx.executor().run_until_parked();
8050
8051 let fake_server = fake_language_servers
8052 .next()
8053 .await
8054 .expect("failed to get the language server");
8055
8056 let mut request_handled = fake_server.set_request_handler::<
8057 lsp::request::CodeActionRequest,
8058 _,
8059 _,
8060 >(move |params, _| async move {
8061 assert_eq!(
8062 params.context.only, None,
8063 "Code action requests without explicit kind filters should not send `context.only`"
8064 );
8065 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
8066 lsp::CodeAction {
8067 title: "Add test".to_string(),
8068 kind: Some("source.addTest".into()),
8069 ..lsp::CodeAction::default()
8070 },
8071 )]))
8072 });
8073
8074 let code_actions_task = project.update(cx, |project, cx| {
8075 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
8076 });
8077
8078 let () = request_handled
8079 .next()
8080 .await
8081 .expect("The code action request should have been triggered");
8082
8083 let code_actions = code_actions_task.await.unwrap().unwrap();
8084 assert_eq!(code_actions.len(), 1);
8085 assert_eq!(
8086 code_actions[0].lsp_action.action_kind(),
8087 Some("source.addTest".into())
8088 );
8089}
8090
8091#[gpui::test]
8092async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
8093 init_test(cx);
8094
8095 let fs = FakeFs::new(cx.executor());
8096 fs.insert_tree(
8097 path!("/dir"),
8098 json!({
8099 "a.tsx": "a",
8100 }),
8101 )
8102 .await;
8103
8104 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
8105
8106 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
8107 language_registry.add(tsx_lang());
8108 let language_server_names = [
8109 "TypeScriptServer",
8110 "TailwindServer",
8111 "ESLintServer",
8112 "NoActionsCapabilitiesServer",
8113 ];
8114
8115 let mut language_server_rxs = [
8116 language_registry.register_fake_lsp(
8117 "tsx",
8118 FakeLspAdapter {
8119 name: language_server_names[0],
8120 capabilities: lsp::ServerCapabilities {
8121 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
8122 ..lsp::ServerCapabilities::default()
8123 },
8124 ..FakeLspAdapter::default()
8125 },
8126 ),
8127 language_registry.register_fake_lsp(
8128 "tsx",
8129 FakeLspAdapter {
8130 name: language_server_names[1],
8131 capabilities: lsp::ServerCapabilities {
8132 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
8133 ..lsp::ServerCapabilities::default()
8134 },
8135 ..FakeLspAdapter::default()
8136 },
8137 ),
8138 language_registry.register_fake_lsp(
8139 "tsx",
8140 FakeLspAdapter {
8141 name: language_server_names[2],
8142 capabilities: lsp::ServerCapabilities {
8143 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
8144 ..lsp::ServerCapabilities::default()
8145 },
8146 ..FakeLspAdapter::default()
8147 },
8148 ),
8149 language_registry.register_fake_lsp(
8150 "tsx",
8151 FakeLspAdapter {
8152 name: language_server_names[3],
8153 capabilities: lsp::ServerCapabilities {
8154 code_action_provider: None,
8155 ..lsp::ServerCapabilities::default()
8156 },
8157 ..FakeLspAdapter::default()
8158 },
8159 ),
8160 ];
8161
8162 let (buffer, _handle) = project
8163 .update(cx, |p, cx| {
8164 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
8165 })
8166 .await
8167 .unwrap();
8168 cx.executor().run_until_parked();
8169
8170 let mut servers_with_actions_requests = HashMap::default();
8171 for i in 0..language_server_names.len() {
8172 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
8173 panic!(
8174 "Failed to get language server #{i} with name {}",
8175 &language_server_names[i]
8176 )
8177 });
8178 let new_server_name = new_server.server.name();
8179
8180 assert!(
8181 !servers_with_actions_requests.contains_key(&new_server_name),
8182 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
8183 );
8184 match new_server_name.0.as_ref() {
8185 "TailwindServer" | "TypeScriptServer" => {
8186 servers_with_actions_requests.insert(
8187 new_server_name.clone(),
8188 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
8189 move |_, _| {
8190 let name = new_server_name.clone();
8191 async move {
8192 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
8193 lsp::CodeAction {
8194 title: format!("{name} code action"),
8195 ..lsp::CodeAction::default()
8196 },
8197 )]))
8198 }
8199 },
8200 ),
8201 );
8202 }
8203 "ESLintServer" => {
8204 servers_with_actions_requests.insert(
8205 new_server_name,
8206 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
8207 |_, _| async move { Ok(None) },
8208 ),
8209 );
8210 }
8211 "NoActionsCapabilitiesServer" => {
8212 let _never_handled = new_server
8213 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
8214 panic!(
8215 "Should not call for code actions server with no corresponding capabilities"
8216 )
8217 });
8218 }
8219 unexpected => panic!("Unexpected server name: {unexpected}"),
8220 }
8221 }
8222
8223 let code_actions_task = project.update(cx, |project, cx| {
8224 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
8225 });
8226
8227 // cx.run_until_parked();
8228 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
8229 |mut code_actions_request| async move {
8230 code_actions_request
8231 .next()
8232 .await
8233 .expect("All code actions requests should have been triggered")
8234 },
8235 ))
8236 .await;
8237 assert_eq!(
8238 vec!["TailwindServer code action", "TypeScriptServer code action"],
8239 code_actions_task
8240 .await
8241 .unwrap()
8242 .unwrap()
8243 .into_iter()
8244 .map(|code_action| code_action.lsp_action.title().to_owned())
8245 .sorted()
8246 .collect::<Vec<_>>(),
8247 "Should receive code actions responses from all related servers with hover capabilities"
8248 );
8249}
8250
8251#[gpui::test]
8252async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
8253 init_test(cx);
8254
8255 let fs = FakeFs::new(cx.executor());
8256 fs.insert_tree(
8257 "/dir",
8258 json!({
8259 "a.rs": "let a = 1;",
8260 "b.rs": "let b = 2;",
8261 "c.rs": "let c = 2;",
8262 }),
8263 )
8264 .await;
8265
8266 let project = Project::test(
8267 fs,
8268 [
8269 "/dir/a.rs".as_ref(),
8270 "/dir/b.rs".as_ref(),
8271 "/dir/c.rs".as_ref(),
8272 ],
8273 cx,
8274 )
8275 .await;
8276
8277 // check the initial state and get the worktrees
8278 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
8279 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8280 assert_eq!(worktrees.len(), 3);
8281
8282 let worktree_a = worktrees[0].read(cx);
8283 let worktree_b = worktrees[1].read(cx);
8284 let worktree_c = worktrees[2].read(cx);
8285
8286 // check they start in the right order
8287 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
8288 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
8289 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
8290
8291 (
8292 worktrees[0].clone(),
8293 worktrees[1].clone(),
8294 worktrees[2].clone(),
8295 )
8296 });
8297
8298 // move first worktree to after the second
8299 // [a, b, c] -> [b, a, c]
8300 project
8301 .update(cx, |project, cx| {
8302 let first = worktree_a.read(cx);
8303 let second = worktree_b.read(cx);
8304 project.move_worktree(first.id(), second.id(), cx)
8305 })
8306 .expect("moving first after second");
8307
8308 // check the state after moving
8309 project.update(cx, |project, cx| {
8310 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8311 assert_eq!(worktrees.len(), 3);
8312
8313 let first = worktrees[0].read(cx);
8314 let second = worktrees[1].read(cx);
8315 let third = worktrees[2].read(cx);
8316
8317 // check they are now in the right order
8318 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
8319 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
8320 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
8321 });
8322
8323 // move the second worktree to before the first
8324 // [b, a, c] -> [a, b, c]
8325 project
8326 .update(cx, |project, cx| {
8327 let second = worktree_a.read(cx);
8328 let first = worktree_b.read(cx);
8329 project.move_worktree(first.id(), second.id(), cx)
8330 })
8331 .expect("moving second before first");
8332
8333 // check the state after moving
8334 project.update(cx, |project, cx| {
8335 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8336 assert_eq!(worktrees.len(), 3);
8337
8338 let first = worktrees[0].read(cx);
8339 let second = worktrees[1].read(cx);
8340 let third = worktrees[2].read(cx);
8341
8342 // check they are now in the right order
8343 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
8344 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
8345 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
8346 });
8347
8348 // move the second worktree to after the third
8349 // [a, b, c] -> [a, c, b]
8350 project
8351 .update(cx, |project, cx| {
8352 let second = worktree_b.read(cx);
8353 let third = worktree_c.read(cx);
8354 project.move_worktree(second.id(), third.id(), cx)
8355 })
8356 .expect("moving second after third");
8357
8358 // check the state after moving
8359 project.update(cx, |project, cx| {
8360 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8361 assert_eq!(worktrees.len(), 3);
8362
8363 let first = worktrees[0].read(cx);
8364 let second = worktrees[1].read(cx);
8365 let third = worktrees[2].read(cx);
8366
8367 // check they are now in the right order
8368 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
8369 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
8370 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
8371 });
8372
8373 // move the third worktree to before the second
8374 // [a, c, b] -> [a, b, c]
8375 project
8376 .update(cx, |project, cx| {
8377 let third = worktree_c.read(cx);
8378 let second = worktree_b.read(cx);
8379 project.move_worktree(third.id(), second.id(), cx)
8380 })
8381 .expect("moving third before second");
8382
8383 // check the state after moving
8384 project.update(cx, |project, cx| {
8385 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8386 assert_eq!(worktrees.len(), 3);
8387
8388 let first = worktrees[0].read(cx);
8389 let second = worktrees[1].read(cx);
8390 let third = worktrees[2].read(cx);
8391
8392 // check they are now in the right order
8393 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
8394 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
8395 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
8396 });
8397
8398 // move the first worktree to after the third
8399 // [a, b, c] -> [b, c, a]
8400 project
8401 .update(cx, |project, cx| {
8402 let first = worktree_a.read(cx);
8403 let third = worktree_c.read(cx);
8404 project.move_worktree(first.id(), third.id(), cx)
8405 })
8406 .expect("moving first after third");
8407
8408 // check the state after moving
8409 project.update(cx, |project, cx| {
8410 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8411 assert_eq!(worktrees.len(), 3);
8412
8413 let first = worktrees[0].read(cx);
8414 let second = worktrees[1].read(cx);
8415 let third = worktrees[2].read(cx);
8416
8417 // check they are now in the right order
8418 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
8419 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
8420 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
8421 });
8422
8423 // move the third worktree to before the first
8424 // [b, c, a] -> [a, b, c]
8425 project
8426 .update(cx, |project, cx| {
8427 let third = worktree_a.read(cx);
8428 let first = worktree_b.read(cx);
8429 project.move_worktree(third.id(), first.id(), cx)
8430 })
8431 .expect("moving third before first");
8432
8433 // check the state after moving
8434 project.update(cx, |project, cx| {
8435 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8436 assert_eq!(worktrees.len(), 3);
8437
8438 let first = worktrees[0].read(cx);
8439 let second = worktrees[1].read(cx);
8440 let third = worktrees[2].read(cx);
8441
8442 // check they are now in the right order
8443 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
8444 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
8445 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
8446 });
8447}
8448
8449#[gpui::test]
8450async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
8451 init_test(cx);
8452
8453 let staged_contents = r#"
8454 fn main() {
8455 println!("hello world");
8456 }
8457 "#
8458 .unindent();
8459 let file_contents = r#"
8460 // print goodbye
8461 fn main() {
8462 println!("goodbye world");
8463 }
8464 "#
8465 .unindent();
8466
8467 let fs = FakeFs::new(cx.background_executor.clone());
8468 fs.insert_tree(
8469 "/dir",
8470 json!({
8471 ".git": {},
8472 "src": {
8473 "main.rs": file_contents,
8474 }
8475 }),
8476 )
8477 .await;
8478
8479 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
8480
8481 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8482
8483 let buffer = project
8484 .update(cx, |project, cx| {
8485 project.open_local_buffer("/dir/src/main.rs", cx)
8486 })
8487 .await
8488 .unwrap();
8489 let unstaged_diff = project
8490 .update(cx, |project, cx| {
8491 project.open_unstaged_diff(buffer.clone(), cx)
8492 })
8493 .await
8494 .unwrap();
8495
8496 cx.run_until_parked();
8497 unstaged_diff.update(cx, |unstaged_diff, cx| {
8498 let snapshot = buffer.read(cx).snapshot();
8499 assert_hunks(
8500 unstaged_diff.snapshot(cx).hunks(&snapshot),
8501 &snapshot,
8502 &unstaged_diff.base_text_string(cx).unwrap(),
8503 &[
8504 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
8505 (
8506 2..3,
8507 " println!(\"hello world\");\n",
8508 " println!(\"goodbye world\");\n",
8509 DiffHunkStatus::modified_none(),
8510 ),
8511 ],
8512 );
8513 });
8514
8515 let staged_contents = r#"
8516 // print goodbye
8517 fn main() {
8518 }
8519 "#
8520 .unindent();
8521
8522 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
8523
8524 cx.run_until_parked();
8525 unstaged_diff.update(cx, |unstaged_diff, cx| {
8526 let snapshot = buffer.read(cx).snapshot();
8527 assert_hunks(
8528 unstaged_diff.snapshot(cx).hunks_intersecting_range(
8529 Anchor::min_max_range_for_buffer(snapshot.remote_id()),
8530 &snapshot,
8531 ),
8532 &snapshot,
8533 &unstaged_diff.base_text(cx).text(),
8534 &[(
8535 2..3,
8536 "",
8537 " println!(\"goodbye world\");\n",
8538 DiffHunkStatus::added_none(),
8539 )],
8540 );
8541 });
8542}
8543
8544#[gpui::test]
8545async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
8546 init_test(cx);
8547
8548 let committed_contents = r#"
8549 fn main() {
8550 println!("hello world");
8551 }
8552 "#
8553 .unindent();
8554 let staged_contents = r#"
8555 fn main() {
8556 println!("goodbye world");
8557 }
8558 "#
8559 .unindent();
8560 let file_contents = r#"
8561 // print goodbye
8562 fn main() {
8563 println!("goodbye world");
8564 }
8565 "#
8566 .unindent();
8567
8568 let fs = FakeFs::new(cx.background_executor.clone());
8569 fs.insert_tree(
8570 "/dir",
8571 json!({
8572 ".git": {},
8573 "src": {
8574 "modification.rs": file_contents,
8575 }
8576 }),
8577 )
8578 .await;
8579
8580 fs.set_head_for_repo(
8581 Path::new("/dir/.git"),
8582 &[
8583 ("src/modification.rs", committed_contents),
8584 ("src/deletion.rs", "// the-deleted-contents\n".into()),
8585 ],
8586 "deadbeef",
8587 );
8588 fs.set_index_for_repo(
8589 Path::new("/dir/.git"),
8590 &[
8591 ("src/modification.rs", staged_contents),
8592 ("src/deletion.rs", "// the-deleted-contents\n".into()),
8593 ],
8594 );
8595
8596 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8597 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
8598 let language = rust_lang();
8599 language_registry.add(language.clone());
8600
8601 let buffer_1 = project
8602 .update(cx, |project, cx| {
8603 project.open_local_buffer("/dir/src/modification.rs", cx)
8604 })
8605 .await
8606 .unwrap();
8607 let diff_1 = project
8608 .update(cx, |project, cx| {
8609 project.open_uncommitted_diff(buffer_1.clone(), cx)
8610 })
8611 .await
8612 .unwrap();
8613 diff_1.read_with(cx, |diff, cx| {
8614 assert_eq!(diff.base_text(cx).language().cloned(), Some(language))
8615 });
8616 cx.run_until_parked();
8617 diff_1.update(cx, |diff, cx| {
8618 let snapshot = buffer_1.read(cx).snapshot();
8619 assert_hunks(
8620 diff.snapshot(cx).hunks_intersecting_range(
8621 Anchor::min_max_range_for_buffer(snapshot.remote_id()),
8622 &snapshot,
8623 ),
8624 &snapshot,
8625 &diff.base_text_string(cx).unwrap(),
8626 &[
8627 (
8628 0..1,
8629 "",
8630 "// print goodbye\n",
8631 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
8632 ),
8633 (
8634 2..3,
8635 " println!(\"hello world\");\n",
8636 " println!(\"goodbye world\");\n",
8637 DiffHunkStatus::modified_none(),
8638 ),
8639 ],
8640 );
8641 });
8642
8643 // Reset HEAD to a version that differs from both the buffer and the index.
8644 let committed_contents = r#"
8645 // print goodbye
8646 fn main() {
8647 }
8648 "#
8649 .unindent();
8650 fs.set_head_for_repo(
8651 Path::new("/dir/.git"),
8652 &[
8653 ("src/modification.rs", committed_contents.clone()),
8654 ("src/deletion.rs", "// the-deleted-contents\n".into()),
8655 ],
8656 "deadbeef",
8657 );
8658
8659 // Buffer now has an unstaged hunk.
8660 cx.run_until_parked();
8661 diff_1.update(cx, |diff, cx| {
8662 let snapshot = buffer_1.read(cx).snapshot();
8663 assert_hunks(
8664 diff.snapshot(cx).hunks_intersecting_range(
8665 Anchor::min_max_range_for_buffer(snapshot.remote_id()),
8666 &snapshot,
8667 ),
8668 &snapshot,
8669 &diff.base_text(cx).text(),
8670 &[(
8671 2..3,
8672 "",
8673 " println!(\"goodbye world\");\n",
8674 DiffHunkStatus::added_none(),
8675 )],
8676 );
8677 });
8678
8679 // Open a buffer for a file that's been deleted.
8680 let buffer_2 = project
8681 .update(cx, |project, cx| {
8682 project.open_local_buffer("/dir/src/deletion.rs", cx)
8683 })
8684 .await
8685 .unwrap();
8686 let diff_2 = project
8687 .update(cx, |project, cx| {
8688 project.open_uncommitted_diff(buffer_2.clone(), cx)
8689 })
8690 .await
8691 .unwrap();
8692 cx.run_until_parked();
8693 diff_2.update(cx, |diff, cx| {
8694 let snapshot = buffer_2.read(cx).snapshot();
8695 assert_hunks(
8696 diff.snapshot(cx).hunks_intersecting_range(
8697 Anchor::min_max_range_for_buffer(snapshot.remote_id()),
8698 &snapshot,
8699 ),
8700 &snapshot,
8701 &diff.base_text_string(cx).unwrap(),
8702 &[(
8703 0..0,
8704 "// the-deleted-contents\n",
8705 "",
8706 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
8707 )],
8708 );
8709 });
8710
8711 // Stage the deletion of this file
8712 fs.set_index_for_repo(
8713 Path::new("/dir/.git"),
8714 &[("src/modification.rs", committed_contents.clone())],
8715 );
8716 cx.run_until_parked();
8717 diff_2.update(cx, |diff, cx| {
8718 let snapshot = buffer_2.read(cx).snapshot();
8719 assert_hunks(
8720 diff.snapshot(cx).hunks_intersecting_range(
8721 Anchor::min_max_range_for_buffer(snapshot.remote_id()),
8722 &snapshot,
8723 ),
8724 &snapshot,
8725 &diff.base_text_string(cx).unwrap(),
8726 &[(
8727 0..0,
8728 "// the-deleted-contents\n",
8729 "",
8730 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
8731 )],
8732 );
8733 });
8734}
8735
8736#[gpui::test]
8737async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
8738 use DiffHunkSecondaryStatus::*;
8739 init_test(cx);
8740
8741 let committed_contents = r#"
8742 zero
8743 one
8744 two
8745 three
8746 four
8747 five
8748 "#
8749 .unindent();
8750 let file_contents = r#"
8751 one
8752 TWO
8753 three
8754 FOUR
8755 five
8756 "#
8757 .unindent();
8758
8759 let fs = FakeFs::new(cx.background_executor.clone());
8760 fs.insert_tree(
8761 "/dir",
8762 json!({
8763 ".git": {},
8764 "file.txt": file_contents.clone()
8765 }),
8766 )
8767 .await;
8768
8769 fs.set_head_and_index_for_repo(
8770 path!("/dir/.git").as_ref(),
8771 &[("file.txt", committed_contents.clone())],
8772 );
8773
8774 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8775
8776 let buffer = project
8777 .update(cx, |project, cx| {
8778 project.open_local_buffer("/dir/file.txt", cx)
8779 })
8780 .await
8781 .unwrap();
8782 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8783 let uncommitted_diff = project
8784 .update(cx, |project, cx| {
8785 project.open_uncommitted_diff(buffer.clone(), cx)
8786 })
8787 .await
8788 .unwrap();
8789 let mut diff_events = cx.events(&uncommitted_diff);
8790
8791 // The hunks are initially unstaged.
8792 uncommitted_diff.read_with(cx, |diff, cx| {
8793 assert_hunks(
8794 diff.snapshot(cx).hunks(&snapshot),
8795 &snapshot,
8796 &diff.base_text_string(cx).unwrap(),
8797 &[
8798 (
8799 0..0,
8800 "zero\n",
8801 "",
8802 DiffHunkStatus::deleted(HasSecondaryHunk),
8803 ),
8804 (
8805 1..2,
8806 "two\n",
8807 "TWO\n",
8808 DiffHunkStatus::modified(HasSecondaryHunk),
8809 ),
8810 (
8811 3..4,
8812 "four\n",
8813 "FOUR\n",
8814 DiffHunkStatus::modified(HasSecondaryHunk),
8815 ),
8816 ],
8817 );
8818 });
8819
8820 // Stage a hunk. It appears as optimistically staged.
8821 uncommitted_diff.update(cx, |diff, cx| {
8822 let range =
8823 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
8824 let hunks = diff
8825 .snapshot(cx)
8826 .hunks_intersecting_range(range, &snapshot)
8827 .collect::<Vec<_>>();
8828 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
8829
8830 assert_hunks(
8831 diff.snapshot(cx).hunks(&snapshot),
8832 &snapshot,
8833 &diff.base_text_string(cx).unwrap(),
8834 &[
8835 (
8836 0..0,
8837 "zero\n",
8838 "",
8839 DiffHunkStatus::deleted(HasSecondaryHunk),
8840 ),
8841 (
8842 1..2,
8843 "two\n",
8844 "TWO\n",
8845 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8846 ),
8847 (
8848 3..4,
8849 "four\n",
8850 "FOUR\n",
8851 DiffHunkStatus::modified(HasSecondaryHunk),
8852 ),
8853 ],
8854 );
8855 });
8856
8857 // The diff emits a change event for the range of the staged hunk.
8858 assert!(matches!(
8859 diff_events.next().await.unwrap(),
8860 BufferDiffEvent::HunksStagedOrUnstaged(_)
8861 ));
8862 let event = diff_events.next().await.unwrap();
8863 if let BufferDiffEvent::DiffChanged(DiffChanged {
8864 changed_range: Some(changed_range),
8865 base_text_changed_range: _,
8866 extended_range: _,
8867 }) = event
8868 {
8869 let changed_range = changed_range.to_point(&snapshot);
8870 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
8871 } else {
8872 panic!("Unexpected event {event:?}");
8873 }
8874
8875 // When the write to the index completes, it appears as staged.
8876 cx.run_until_parked();
8877 uncommitted_diff.update(cx, |diff, cx| {
8878 assert_hunks(
8879 diff.snapshot(cx).hunks(&snapshot),
8880 &snapshot,
8881 &diff.base_text_string(cx).unwrap(),
8882 &[
8883 (
8884 0..0,
8885 "zero\n",
8886 "",
8887 DiffHunkStatus::deleted(HasSecondaryHunk),
8888 ),
8889 (
8890 1..2,
8891 "two\n",
8892 "TWO\n",
8893 DiffHunkStatus::modified(NoSecondaryHunk),
8894 ),
8895 (
8896 3..4,
8897 "four\n",
8898 "FOUR\n",
8899 DiffHunkStatus::modified(HasSecondaryHunk),
8900 ),
8901 ],
8902 );
8903 });
8904
8905 // The diff emits a change event for the changed index text.
8906 let event = diff_events.next().await.unwrap();
8907 if let BufferDiffEvent::DiffChanged(DiffChanged {
8908 changed_range: Some(changed_range),
8909 base_text_changed_range: _,
8910 extended_range: _,
8911 }) = event
8912 {
8913 let changed_range = changed_range.to_point(&snapshot);
8914 assert_eq!(changed_range, Point::new(0, 0)..Point::new(4, 0));
8915 } else {
8916 panic!("Unexpected event {event:?}");
8917 }
8918
8919 // Simulate a problem writing to the git index.
8920 fs.set_error_message_for_index_write(
8921 "/dir/.git".as_ref(),
8922 Some("failed to write git index".into()),
8923 );
8924
8925 // Stage another hunk.
8926 uncommitted_diff.update(cx, |diff, cx| {
8927 let range =
8928 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
8929 let hunks = diff
8930 .snapshot(cx)
8931 .hunks_intersecting_range(range, &snapshot)
8932 .collect::<Vec<_>>();
8933 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
8934
8935 assert_hunks(
8936 diff.snapshot(cx).hunks(&snapshot),
8937 &snapshot,
8938 &diff.base_text_string(cx).unwrap(),
8939 &[
8940 (
8941 0..0,
8942 "zero\n",
8943 "",
8944 DiffHunkStatus::deleted(HasSecondaryHunk),
8945 ),
8946 (
8947 1..2,
8948 "two\n",
8949 "TWO\n",
8950 DiffHunkStatus::modified(NoSecondaryHunk),
8951 ),
8952 (
8953 3..4,
8954 "four\n",
8955 "FOUR\n",
8956 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8957 ),
8958 ],
8959 );
8960 });
8961 assert!(matches!(
8962 diff_events.next().await.unwrap(),
8963 BufferDiffEvent::HunksStagedOrUnstaged(_)
8964 ));
8965 let event = diff_events.next().await.unwrap();
8966 if let BufferDiffEvent::DiffChanged(DiffChanged {
8967 changed_range: Some(changed_range),
8968 base_text_changed_range: _,
8969 extended_range: _,
8970 }) = event
8971 {
8972 let changed_range = changed_range.to_point(&snapshot);
8973 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
8974 } else {
8975 panic!("Unexpected event {event:?}");
8976 }
8977
8978 // When the write fails, the hunk returns to being unstaged.
8979 cx.run_until_parked();
8980 uncommitted_diff.update(cx, |diff, cx| {
8981 assert_hunks(
8982 diff.snapshot(cx).hunks(&snapshot),
8983 &snapshot,
8984 &diff.base_text_string(cx).unwrap(),
8985 &[
8986 (
8987 0..0,
8988 "zero\n",
8989 "",
8990 DiffHunkStatus::deleted(HasSecondaryHunk),
8991 ),
8992 (
8993 1..2,
8994 "two\n",
8995 "TWO\n",
8996 DiffHunkStatus::modified(NoSecondaryHunk),
8997 ),
8998 (
8999 3..4,
9000 "four\n",
9001 "FOUR\n",
9002 DiffHunkStatus::modified(HasSecondaryHunk),
9003 ),
9004 ],
9005 );
9006 });
9007
9008 let event = diff_events.next().await.unwrap();
9009 if let BufferDiffEvent::DiffChanged(DiffChanged {
9010 changed_range: Some(changed_range),
9011 base_text_changed_range: _,
9012 extended_range: _,
9013 }) = event
9014 {
9015 let changed_range = changed_range.to_point(&snapshot);
9016 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
9017 } else {
9018 panic!("Unexpected event {event:?}");
9019 }
9020
9021 // Allow writing to the git index to succeed again.
9022 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
9023
9024 // Stage two hunks with separate operations.
9025 uncommitted_diff.update(cx, |diff, cx| {
9026 let hunks = diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>();
9027 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
9028 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
9029 });
9030
9031 // Both staged hunks appear as pending.
9032 uncommitted_diff.update(cx, |diff, cx| {
9033 assert_hunks(
9034 diff.snapshot(cx).hunks(&snapshot),
9035 &snapshot,
9036 &diff.base_text_string(cx).unwrap(),
9037 &[
9038 (
9039 0..0,
9040 "zero\n",
9041 "",
9042 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
9043 ),
9044 (
9045 1..2,
9046 "two\n",
9047 "TWO\n",
9048 DiffHunkStatus::modified(NoSecondaryHunk),
9049 ),
9050 (
9051 3..4,
9052 "four\n",
9053 "FOUR\n",
9054 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
9055 ),
9056 ],
9057 );
9058 });
9059
9060 // Both staging operations take effect.
9061 cx.run_until_parked();
9062 uncommitted_diff.update(cx, |diff, cx| {
9063 assert_hunks(
9064 diff.snapshot(cx).hunks(&snapshot),
9065 &snapshot,
9066 &diff.base_text_string(cx).unwrap(),
9067 &[
9068 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
9069 (
9070 1..2,
9071 "two\n",
9072 "TWO\n",
9073 DiffHunkStatus::modified(NoSecondaryHunk),
9074 ),
9075 (
9076 3..4,
9077 "four\n",
9078 "FOUR\n",
9079 DiffHunkStatus::modified(NoSecondaryHunk),
9080 ),
9081 ],
9082 );
9083 });
9084}
9085
9086#[gpui::test(seeds(340, 472))]
9087async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
9088 use DiffHunkSecondaryStatus::*;
9089 init_test(cx);
9090
9091 let committed_contents = r#"
9092 zero
9093 one
9094 two
9095 three
9096 four
9097 five
9098 "#
9099 .unindent();
9100 let file_contents = r#"
9101 one
9102 TWO
9103 three
9104 FOUR
9105 five
9106 "#
9107 .unindent();
9108
9109 let fs = FakeFs::new(cx.background_executor.clone());
9110 fs.insert_tree(
9111 "/dir",
9112 json!({
9113 ".git": {},
9114 "file.txt": file_contents.clone()
9115 }),
9116 )
9117 .await;
9118
9119 fs.set_head_for_repo(
9120 "/dir/.git".as_ref(),
9121 &[("file.txt", committed_contents.clone())],
9122 "deadbeef",
9123 );
9124 fs.set_index_for_repo(
9125 "/dir/.git".as_ref(),
9126 &[("file.txt", committed_contents.clone())],
9127 );
9128
9129 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
9130
9131 let buffer = project
9132 .update(cx, |project, cx| {
9133 project.open_local_buffer("/dir/file.txt", cx)
9134 })
9135 .await
9136 .unwrap();
9137 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
9138 let uncommitted_diff = project
9139 .update(cx, |project, cx| {
9140 project.open_uncommitted_diff(buffer.clone(), cx)
9141 })
9142 .await
9143 .unwrap();
9144
9145 // The hunks are initially unstaged.
9146 uncommitted_diff.read_with(cx, |diff, cx| {
9147 assert_hunks(
9148 diff.snapshot(cx).hunks(&snapshot),
9149 &snapshot,
9150 &diff.base_text_string(cx).unwrap(),
9151 &[
9152 (
9153 0..0,
9154 "zero\n",
9155 "",
9156 DiffHunkStatus::deleted(HasSecondaryHunk),
9157 ),
9158 (
9159 1..2,
9160 "two\n",
9161 "TWO\n",
9162 DiffHunkStatus::modified(HasSecondaryHunk),
9163 ),
9164 (
9165 3..4,
9166 "four\n",
9167 "FOUR\n",
9168 DiffHunkStatus::modified(HasSecondaryHunk),
9169 ),
9170 ],
9171 );
9172 });
9173
9174 // Pause IO events
9175 fs.pause_events();
9176
9177 // Stage the first hunk.
9178 uncommitted_diff.update(cx, |diff, cx| {
9179 let hunk = diff.snapshot(cx).hunks(&snapshot).next().unwrap();
9180 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
9181 assert_hunks(
9182 diff.snapshot(cx).hunks(&snapshot),
9183 &snapshot,
9184 &diff.base_text_string(cx).unwrap(),
9185 &[
9186 (
9187 0..0,
9188 "zero\n",
9189 "",
9190 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
9191 ),
9192 (
9193 1..2,
9194 "two\n",
9195 "TWO\n",
9196 DiffHunkStatus::modified(HasSecondaryHunk),
9197 ),
9198 (
9199 3..4,
9200 "four\n",
9201 "FOUR\n",
9202 DiffHunkStatus::modified(HasSecondaryHunk),
9203 ),
9204 ],
9205 );
9206 });
9207
9208 // Stage the second hunk *before* receiving the FS event for the first hunk.
9209 cx.run_until_parked();
9210 uncommitted_diff.update(cx, |diff, cx| {
9211 let hunk = diff.snapshot(cx).hunks(&snapshot).nth(1).unwrap();
9212 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
9213 assert_hunks(
9214 diff.snapshot(cx).hunks(&snapshot),
9215 &snapshot,
9216 &diff.base_text_string(cx).unwrap(),
9217 &[
9218 (
9219 0..0,
9220 "zero\n",
9221 "",
9222 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
9223 ),
9224 (
9225 1..2,
9226 "two\n",
9227 "TWO\n",
9228 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
9229 ),
9230 (
9231 3..4,
9232 "four\n",
9233 "FOUR\n",
9234 DiffHunkStatus::modified(HasSecondaryHunk),
9235 ),
9236 ],
9237 );
9238 });
9239
9240 // Process the FS event for staging the first hunk (second event is still pending).
9241 fs.flush_events(1);
9242 cx.run_until_parked();
9243
9244 // Stage the third hunk before receiving the second FS event.
9245 uncommitted_diff.update(cx, |diff, cx| {
9246 let hunk = diff.snapshot(cx).hunks(&snapshot).nth(2).unwrap();
9247 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
9248 });
9249
9250 // Wait for all remaining IO.
9251 cx.run_until_parked();
9252 fs.flush_events(fs.buffered_event_count());
9253
9254 // Now all hunks are staged.
9255 cx.run_until_parked();
9256 uncommitted_diff.update(cx, |diff, cx| {
9257 assert_hunks(
9258 diff.snapshot(cx).hunks(&snapshot),
9259 &snapshot,
9260 &diff.base_text_string(cx).unwrap(),
9261 &[
9262 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
9263 (
9264 1..2,
9265 "two\n",
9266 "TWO\n",
9267 DiffHunkStatus::modified(NoSecondaryHunk),
9268 ),
9269 (
9270 3..4,
9271 "four\n",
9272 "FOUR\n",
9273 DiffHunkStatus::modified(NoSecondaryHunk),
9274 ),
9275 ],
9276 );
9277 });
9278}
9279
9280#[gpui::test(iterations = 25)]
9281async fn test_staging_random_hunks(
9282 mut rng: StdRng,
9283 _executor: BackgroundExecutor,
9284 cx: &mut gpui::TestAppContext,
9285) {
9286 let operations = env::var("OPERATIONS")
9287 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
9288 .unwrap_or(20);
9289
9290 use DiffHunkSecondaryStatus::*;
9291 init_test(cx);
9292
9293 let committed_text = (0..30).map(|i| format!("line {i}\n")).collect::<String>();
9294 let index_text = committed_text.clone();
9295 let buffer_text = (0..30)
9296 .map(|i| match i % 5 {
9297 0 => format!("line {i} (modified)\n"),
9298 _ => format!("line {i}\n"),
9299 })
9300 .collect::<String>();
9301
9302 let fs = FakeFs::new(cx.background_executor.clone());
9303 fs.insert_tree(
9304 path!("/dir"),
9305 json!({
9306 ".git": {},
9307 "file.txt": buffer_text.clone()
9308 }),
9309 )
9310 .await;
9311 fs.set_head_for_repo(
9312 path!("/dir/.git").as_ref(),
9313 &[("file.txt", committed_text.clone())],
9314 "deadbeef",
9315 );
9316 fs.set_index_for_repo(
9317 path!("/dir/.git").as_ref(),
9318 &[("file.txt", index_text.clone())],
9319 );
9320 let repo = fs
9321 .open_repo(path!("/dir/.git").as_ref(), Some("git".as_ref()))
9322 .unwrap();
9323
9324 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
9325 let buffer = project
9326 .update(cx, |project, cx| {
9327 project.open_local_buffer(path!("/dir/file.txt"), cx)
9328 })
9329 .await
9330 .unwrap();
9331 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
9332 let uncommitted_diff = project
9333 .update(cx, |project, cx| {
9334 project.open_uncommitted_diff(buffer.clone(), cx)
9335 })
9336 .await
9337 .unwrap();
9338
9339 let mut hunks = uncommitted_diff.update(cx, |diff, cx| {
9340 diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>()
9341 });
9342 assert_eq!(hunks.len(), 6);
9343
9344 for _i in 0..operations {
9345 let hunk_ix = rng.random_range(0..hunks.len());
9346 let hunk = &mut hunks[hunk_ix];
9347 let row = hunk.range.start.row;
9348
9349 if hunk.status().has_secondary_hunk() {
9350 log::info!("staging hunk at {row}");
9351 uncommitted_diff.update(cx, |diff, cx| {
9352 diff.stage_or_unstage_hunks(true, std::slice::from_ref(hunk), &snapshot, true, cx);
9353 });
9354 hunk.secondary_status = SecondaryHunkRemovalPending;
9355 } else {
9356 log::info!("unstaging hunk at {row}");
9357 uncommitted_diff.update(cx, |diff, cx| {
9358 diff.stage_or_unstage_hunks(false, std::slice::from_ref(hunk), &snapshot, true, cx);
9359 });
9360 hunk.secondary_status = SecondaryHunkAdditionPending;
9361 }
9362
9363 for _ in 0..rng.random_range(0..10) {
9364 log::info!("yielding");
9365 cx.executor().simulate_random_delay().await;
9366 }
9367 }
9368
9369 cx.executor().run_until_parked();
9370
9371 for hunk in &mut hunks {
9372 if hunk.secondary_status == SecondaryHunkRemovalPending {
9373 hunk.secondary_status = NoSecondaryHunk;
9374 } else if hunk.secondary_status == SecondaryHunkAdditionPending {
9375 hunk.secondary_status = HasSecondaryHunk;
9376 }
9377 }
9378
9379 log::info!(
9380 "index text:\n{}",
9381 repo.load_index_text(RepoPath::from_rel_path(rel_path("file.txt")))
9382 .await
9383 .unwrap()
9384 );
9385
9386 uncommitted_diff.update(cx, |diff, cx| {
9387 let expected_hunks = hunks
9388 .iter()
9389 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
9390 .collect::<Vec<_>>();
9391 let actual_hunks = diff
9392 .snapshot(cx)
9393 .hunks(&snapshot)
9394 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
9395 .collect::<Vec<_>>();
9396 assert_eq!(actual_hunks, expected_hunks);
9397 });
9398}
9399
9400#[gpui::test]
9401async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
9402 init_test(cx);
9403
9404 let committed_contents = r#"
9405 fn main() {
9406 println!("hello from HEAD");
9407 }
9408 "#
9409 .unindent();
9410 let file_contents = r#"
9411 fn main() {
9412 println!("hello from the working copy");
9413 }
9414 "#
9415 .unindent();
9416
9417 let fs = FakeFs::new(cx.background_executor.clone());
9418 fs.insert_tree(
9419 "/dir",
9420 json!({
9421 ".git": {},
9422 "src": {
9423 "main.rs": file_contents,
9424 }
9425 }),
9426 )
9427 .await;
9428
9429 fs.set_head_for_repo(
9430 Path::new("/dir/.git"),
9431 &[("src/main.rs", committed_contents.clone())],
9432 "deadbeef",
9433 );
9434 fs.set_index_for_repo(
9435 Path::new("/dir/.git"),
9436 &[("src/main.rs", committed_contents.clone())],
9437 );
9438
9439 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
9440
9441 let buffer = project
9442 .update(cx, |project, cx| {
9443 project.open_local_buffer("/dir/src/main.rs", cx)
9444 })
9445 .await
9446 .unwrap();
9447 let uncommitted_diff = project
9448 .update(cx, |project, cx| {
9449 project.open_uncommitted_diff(buffer.clone(), cx)
9450 })
9451 .await
9452 .unwrap();
9453
9454 cx.run_until_parked();
9455 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
9456 let snapshot = buffer.read(cx).snapshot();
9457 assert_hunks(
9458 uncommitted_diff.snapshot(cx).hunks(&snapshot),
9459 &snapshot,
9460 &uncommitted_diff.base_text_string(cx).unwrap(),
9461 &[(
9462 1..2,
9463 " println!(\"hello from HEAD\");\n",
9464 " println!(\"hello from the working copy\");\n",
9465 DiffHunkStatus {
9466 kind: DiffHunkStatusKind::Modified,
9467 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
9468 },
9469 )],
9470 );
9471 });
9472}
9473
9474// TODO: Should we test this on Windows also?
9475#[gpui::test]
9476#[cfg(not(windows))]
9477async fn test_staging_hunk_preserve_executable_permission(cx: &mut gpui::TestAppContext) {
9478 use std::os::unix::fs::PermissionsExt;
9479 init_test(cx);
9480 cx.executor().allow_parking();
9481 let committed_contents = "bar\n";
9482 let file_contents = "baz\n";
9483 let root = TempTree::new(json!({
9484 "project": {
9485 "foo": committed_contents
9486 },
9487 }));
9488
9489 let work_dir = root.path().join("project");
9490 let file_path = work_dir.join("foo");
9491 let repo = git_init(work_dir.as_path());
9492 let mut perms = std::fs::metadata(&file_path).unwrap().permissions();
9493 perms.set_mode(0o755);
9494 std::fs::set_permissions(&file_path, perms).unwrap();
9495 git_add("foo", &repo);
9496 git_commit("Initial commit", &repo);
9497 std::fs::write(&file_path, file_contents).unwrap();
9498
9499 let project = Project::test(
9500 Arc::new(RealFs::new(None, cx.executor())),
9501 [root.path()],
9502 cx,
9503 )
9504 .await;
9505
9506 let buffer = project
9507 .update(cx, |project, cx| {
9508 project.open_local_buffer(file_path.as_path(), cx)
9509 })
9510 .await
9511 .unwrap();
9512
9513 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
9514
9515 let uncommitted_diff = project
9516 .update(cx, |project, cx| {
9517 project.open_uncommitted_diff(buffer.clone(), cx)
9518 })
9519 .await
9520 .unwrap();
9521
9522 uncommitted_diff.update(cx, |diff, cx| {
9523 let hunks = diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>();
9524 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
9525 });
9526
9527 cx.run_until_parked();
9528
9529 let output = smol::process::Command::new("git")
9530 .current_dir(&work_dir)
9531 .args(["diff", "--staged"])
9532 .output()
9533 .await
9534 .unwrap();
9535
9536 let staged_diff = String::from_utf8_lossy(&output.stdout);
9537
9538 assert!(
9539 !staged_diff.contains("new mode 100644"),
9540 "Staging should not change file mode from 755 to 644.\ngit diff --staged:\n{}",
9541 staged_diff
9542 );
9543
9544 let output = smol::process::Command::new("git")
9545 .current_dir(&work_dir)
9546 .args(["ls-files", "-s"])
9547 .output()
9548 .await
9549 .unwrap();
9550 let index_contents = String::from_utf8_lossy(&output.stdout);
9551
9552 assert!(
9553 index_contents.contains("100755"),
9554 "Index should show file as executable (100755).\ngit ls-files -s:\n{}",
9555 index_contents
9556 );
9557}
9558
9559#[gpui::test]
9560async fn test_repository_and_path_for_project_path(
9561 background_executor: BackgroundExecutor,
9562 cx: &mut gpui::TestAppContext,
9563) {
9564 init_test(cx);
9565 let fs = FakeFs::new(background_executor);
9566 fs.insert_tree(
9567 path!("/root"),
9568 json!({
9569 "c.txt": "",
9570 "dir1": {
9571 ".git": {},
9572 "deps": {
9573 "dep1": {
9574 ".git": {},
9575 "src": {
9576 "a.txt": ""
9577 }
9578 }
9579 },
9580 "src": {
9581 "b.txt": ""
9582 }
9583 },
9584 }),
9585 )
9586 .await;
9587
9588 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
9589 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9590 let tree_id = tree.read_with(cx, |tree, _| tree.id());
9591 project
9592 .update(cx, |project, cx| project.git_scans_complete(cx))
9593 .await;
9594 cx.run_until_parked();
9595
9596 project.read_with(cx, |project, cx| {
9597 let git_store = project.git_store().read(cx);
9598 let pairs = [
9599 ("c.txt", None),
9600 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
9601 (
9602 "dir1/deps/dep1/src/a.txt",
9603 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
9604 ),
9605 ];
9606 let expected = pairs
9607 .iter()
9608 .map(|(path, result)| {
9609 (
9610 path,
9611 result.map(|(repo, repo_path)| {
9612 (Path::new(repo).into(), RepoPath::new(repo_path).unwrap())
9613 }),
9614 )
9615 })
9616 .collect::<Vec<_>>();
9617 let actual = pairs
9618 .iter()
9619 .map(|(path, _)| {
9620 let project_path = (tree_id, rel_path(path)).into();
9621 let result = maybe!({
9622 let (repo, repo_path) =
9623 git_store.repository_and_path_for_project_path(&project_path, cx)?;
9624 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
9625 });
9626 (path, result)
9627 })
9628 .collect::<Vec<_>>();
9629 pretty_assertions::assert_eq!(expected, actual);
9630 });
9631
9632 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
9633 .await
9634 .unwrap();
9635 cx.run_until_parked();
9636
9637 project.read_with(cx, |project, cx| {
9638 let git_store = project.git_store().read(cx);
9639 assert_eq!(
9640 git_store.repository_and_path_for_project_path(
9641 &(tree_id, rel_path("dir1/src/b.txt")).into(),
9642 cx
9643 ),
9644 None
9645 );
9646 });
9647}
9648
9649#[gpui::test]
9650async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
9651 init_test(cx);
9652 let fs = FakeFs::new(cx.background_executor.clone());
9653 let home = paths::home_dir();
9654 fs.insert_tree(
9655 home,
9656 json!({
9657 ".git": {},
9658 "project": {
9659 "a.txt": "A"
9660 },
9661 }),
9662 )
9663 .await;
9664
9665 let project = Project::test(fs.clone(), [home.join("project").as_ref()], cx).await;
9666 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9667 let tree_id = tree.read_with(cx, |tree, _| tree.id());
9668
9669 project
9670 .update(cx, |project, cx| project.git_scans_complete(cx))
9671 .await;
9672 tree.flush_fs_events(cx).await;
9673
9674 project.read_with(cx, |project, cx| {
9675 let containing = project
9676 .git_store()
9677 .read(cx)
9678 .repository_and_path_for_project_path(&(tree_id, rel_path("a.txt")).into(), cx);
9679 assert!(containing.is_none());
9680 });
9681
9682 let project = Project::test(fs.clone(), [home.as_ref()], cx).await;
9683 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9684 let tree_id = tree.read_with(cx, |tree, _| tree.id());
9685 project
9686 .update(cx, |project, cx| project.git_scans_complete(cx))
9687 .await;
9688 tree.flush_fs_events(cx).await;
9689
9690 project.read_with(cx, |project, cx| {
9691 let containing = project
9692 .git_store()
9693 .read(cx)
9694 .repository_and_path_for_project_path(&(tree_id, rel_path("project/a.txt")).into(), cx);
9695 assert_eq!(
9696 containing
9697 .unwrap()
9698 .0
9699 .read(cx)
9700 .work_directory_abs_path
9701 .as_ref(),
9702 home,
9703 );
9704 });
9705}
9706
9707#[gpui::test]
9708async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
9709 init_test(cx);
9710 cx.executor().allow_parking();
9711
9712 let root = TempTree::new(json!({
9713 "project": {
9714 "a.txt": "a", // Modified
9715 "b.txt": "bb", // Added
9716 "c.txt": "ccc", // Unchanged
9717 "d.txt": "dddd", // Deleted
9718 },
9719 }));
9720
9721 // Set up git repository before creating the project.
9722 let work_dir = root.path().join("project");
9723 let repo = git_init(work_dir.as_path());
9724 git_add("a.txt", &repo);
9725 git_add("c.txt", &repo);
9726 git_add("d.txt", &repo);
9727 git_commit("Initial commit", &repo);
9728 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
9729 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
9730
9731 let project = Project::test(
9732 Arc::new(RealFs::new(None, cx.executor())),
9733 [root.path()],
9734 cx,
9735 )
9736 .await;
9737
9738 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9739 tree.flush_fs_events(cx).await;
9740 project
9741 .update(cx, |project, cx| project.git_scans_complete(cx))
9742 .await;
9743 cx.executor().run_until_parked();
9744
9745 let repository = project.read_with(cx, |project, cx| {
9746 project.repositories(cx).values().next().unwrap().clone()
9747 });
9748
9749 // Check that the right git state is observed on startup
9750 repository.read_with(cx, |repository, _| {
9751 let entries = repository.cached_status().collect::<Vec<_>>();
9752 assert_eq!(
9753 entries,
9754 [
9755 StatusEntry {
9756 repo_path: repo_path("a.txt"),
9757 status: StatusCode::Modified.worktree(),
9758 diff_stat: Some(DiffStat {
9759 added: 1,
9760 deleted: 1,
9761 }),
9762 },
9763 StatusEntry {
9764 repo_path: repo_path("b.txt"),
9765 status: FileStatus::Untracked,
9766 diff_stat: None,
9767 },
9768 StatusEntry {
9769 repo_path: repo_path("d.txt"),
9770 status: StatusCode::Deleted.worktree(),
9771 diff_stat: Some(DiffStat {
9772 added: 0,
9773 deleted: 1,
9774 }),
9775 },
9776 ]
9777 );
9778 });
9779
9780 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
9781
9782 tree.flush_fs_events(cx).await;
9783 project
9784 .update(cx, |project, cx| project.git_scans_complete(cx))
9785 .await;
9786 cx.executor().run_until_parked();
9787
9788 repository.read_with(cx, |repository, _| {
9789 let entries = repository.cached_status().collect::<Vec<_>>();
9790 assert_eq!(
9791 entries,
9792 [
9793 StatusEntry {
9794 repo_path: repo_path("a.txt"),
9795 status: StatusCode::Modified.worktree(),
9796 diff_stat: Some(DiffStat {
9797 added: 1,
9798 deleted: 1,
9799 }),
9800 },
9801 StatusEntry {
9802 repo_path: repo_path("b.txt"),
9803 status: FileStatus::Untracked,
9804 diff_stat: None,
9805 },
9806 StatusEntry {
9807 repo_path: repo_path("c.txt"),
9808 status: StatusCode::Modified.worktree(),
9809 diff_stat: Some(DiffStat {
9810 added: 1,
9811 deleted: 1,
9812 }),
9813 },
9814 StatusEntry {
9815 repo_path: repo_path("d.txt"),
9816 status: StatusCode::Deleted.worktree(),
9817 diff_stat: Some(DiffStat {
9818 added: 0,
9819 deleted: 1,
9820 }),
9821 },
9822 ]
9823 );
9824 });
9825
9826 git_add("a.txt", &repo);
9827 git_add("c.txt", &repo);
9828 git_remove_index(Path::new("d.txt"), &repo);
9829 git_commit("Another commit", &repo);
9830 tree.flush_fs_events(cx).await;
9831 project
9832 .update(cx, |project, cx| project.git_scans_complete(cx))
9833 .await;
9834 cx.executor().run_until_parked();
9835
9836 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
9837 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
9838 tree.flush_fs_events(cx).await;
9839 project
9840 .update(cx, |project, cx| project.git_scans_complete(cx))
9841 .await;
9842 cx.executor().run_until_parked();
9843
9844 repository.read_with(cx, |repository, _cx| {
9845 let entries = repository.cached_status().collect::<Vec<_>>();
9846
9847 // Deleting an untracked entry, b.txt, should leave no status
9848 // a.txt was tracked, and so should have a status
9849 assert_eq!(
9850 entries,
9851 [StatusEntry {
9852 repo_path: repo_path("a.txt"),
9853 status: StatusCode::Deleted.worktree(),
9854 diff_stat: Some(DiffStat {
9855 added: 0,
9856 deleted: 1,
9857 }),
9858 }]
9859 );
9860 });
9861}
9862
9863#[gpui::test]
9864#[ignore]
9865async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
9866 init_test(cx);
9867 cx.executor().allow_parking();
9868
9869 let root = TempTree::new(json!({
9870 "project": {
9871 "sub": {},
9872 "a.txt": "",
9873 },
9874 }));
9875
9876 let work_dir = root.path().join("project");
9877 let repo = git_init(work_dir.as_path());
9878 // a.txt exists in HEAD and the working copy but is deleted in the index.
9879 git_add("a.txt", &repo);
9880 git_commit("Initial commit", &repo);
9881 git_remove_index("a.txt".as_ref(), &repo);
9882 // `sub` is a nested git repository.
9883 let _sub = git_init(&work_dir.join("sub"));
9884
9885 let project = Project::test(
9886 Arc::new(RealFs::new(None, cx.executor())),
9887 [root.path()],
9888 cx,
9889 )
9890 .await;
9891
9892 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9893 tree.flush_fs_events(cx).await;
9894 project
9895 .update(cx, |project, cx| project.git_scans_complete(cx))
9896 .await;
9897 cx.executor().run_until_parked();
9898
9899 let repository = project.read_with(cx, |project, cx| {
9900 project
9901 .repositories(cx)
9902 .values()
9903 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
9904 .unwrap()
9905 .clone()
9906 });
9907
9908 repository.read_with(cx, |repository, _cx| {
9909 let entries = repository.cached_status().collect::<Vec<_>>();
9910
9911 // `sub` doesn't appear in our computed statuses.
9912 // a.txt appears with a combined `DA` status.
9913 assert_eq!(
9914 entries,
9915 [StatusEntry {
9916 repo_path: repo_path("a.txt"),
9917 status: TrackedStatus {
9918 index_status: StatusCode::Deleted,
9919 worktree_status: StatusCode::Added
9920 }
9921 .into(),
9922 diff_stat: None,
9923 }]
9924 )
9925 });
9926}
9927
9928#[track_caller]
9929/// We merge lhs into rhs.
9930fn merge_pending_ops_snapshots(
9931 source: Vec<pending_op::PendingOps>,
9932 mut target: Vec<pending_op::PendingOps>,
9933) -> Vec<pending_op::PendingOps> {
9934 for s_ops in source {
9935 if let Some(idx) = target.iter().zip(0..).find_map(|(ops, idx)| {
9936 if ops.repo_path == s_ops.repo_path {
9937 Some(idx)
9938 } else {
9939 None
9940 }
9941 }) {
9942 let t_ops = &mut target[idx];
9943 for s_op in s_ops.ops {
9944 if let Some(op_idx) = t_ops
9945 .ops
9946 .iter()
9947 .zip(0..)
9948 .find_map(|(op, idx)| if op.id == s_op.id { Some(idx) } else { None })
9949 {
9950 let t_op = &mut t_ops.ops[op_idx];
9951 match (s_op.job_status, t_op.job_status) {
9952 (pending_op::JobStatus::Running, _) => {}
9953 (s_st, pending_op::JobStatus::Running) => t_op.job_status = s_st,
9954 (s_st, t_st) if s_st == t_st => {}
9955 _ => unreachable!(),
9956 }
9957 } else {
9958 t_ops.ops.push(s_op);
9959 }
9960 }
9961 t_ops.ops.sort_by(|l, r| l.id.cmp(&r.id));
9962 } else {
9963 target.push(s_ops);
9964 }
9965 }
9966 target
9967}
9968
9969#[gpui::test]
9970async fn test_repository_pending_ops_staging(
9971 executor: gpui::BackgroundExecutor,
9972 cx: &mut gpui::TestAppContext,
9973) {
9974 init_test(cx);
9975
9976 let fs = FakeFs::new(executor);
9977 fs.insert_tree(
9978 path!("/root"),
9979 json!({
9980 "my-repo": {
9981 ".git": {},
9982 "a.txt": "a",
9983 }
9984
9985 }),
9986 )
9987 .await;
9988
9989 fs.set_status_for_repo(
9990 path!("/root/my-repo/.git").as_ref(),
9991 &[("a.txt", FileStatus::Untracked)],
9992 );
9993
9994 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
9995 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
9996 project.update(cx, |project, cx| {
9997 let pending_ops_all = pending_ops_all.clone();
9998 cx.subscribe(project.git_store(), move |_, _, e, _| {
9999 if let GitStoreEvent::RepositoryUpdated(
10000 _,
10001 RepositoryEvent::PendingOpsChanged { pending_ops },
10002 _,
10003 ) = e
10004 {
10005 let merged = merge_pending_ops_snapshots(
10006 pending_ops.items(()),
10007 pending_ops_all.lock().items(()),
10008 );
10009 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
10010 }
10011 })
10012 .detach();
10013 });
10014 project
10015 .update(cx, |project, cx| project.git_scans_complete(cx))
10016 .await;
10017
10018 let repo = project.read_with(cx, |project, cx| {
10019 project.repositories(cx).values().next().unwrap().clone()
10020 });
10021
10022 // Ensure we have no pending ops for any of the untracked files
10023 repo.read_with(cx, |repo, _cx| {
10024 assert!(repo.pending_ops().next().is_none());
10025 });
10026
10027 let mut id = 1u16;
10028
10029 let mut assert_stage = async |path: RepoPath, stage| {
10030 let git_status = if stage {
10031 pending_op::GitStatus::Staged
10032 } else {
10033 pending_op::GitStatus::Unstaged
10034 };
10035 repo.update(cx, |repo, cx| {
10036 let task = if stage {
10037 repo.stage_entries(vec![path.clone()], cx)
10038 } else {
10039 repo.unstage_entries(vec![path.clone()], cx)
10040 };
10041 let ops = repo.pending_ops_for_path(&path).unwrap();
10042 assert_eq!(
10043 ops.ops.last(),
10044 Some(&pending_op::PendingOp {
10045 id: id.into(),
10046 git_status,
10047 job_status: pending_op::JobStatus::Running
10048 })
10049 );
10050 task
10051 })
10052 .await
10053 .unwrap();
10054
10055 repo.read_with(cx, |repo, _cx| {
10056 let ops = repo.pending_ops_for_path(&path).unwrap();
10057 assert_eq!(
10058 ops.ops.last(),
10059 Some(&pending_op::PendingOp {
10060 id: id.into(),
10061 git_status,
10062 job_status: pending_op::JobStatus::Finished
10063 })
10064 );
10065 });
10066
10067 id += 1;
10068 };
10069
10070 assert_stage(repo_path("a.txt"), true).await;
10071 assert_stage(repo_path("a.txt"), false).await;
10072 assert_stage(repo_path("a.txt"), true).await;
10073 assert_stage(repo_path("a.txt"), false).await;
10074 assert_stage(repo_path("a.txt"), true).await;
10075
10076 cx.run_until_parked();
10077
10078 assert_eq!(
10079 pending_ops_all
10080 .lock()
10081 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
10082 .unwrap()
10083 .ops,
10084 vec![
10085 pending_op::PendingOp {
10086 id: 1u16.into(),
10087 git_status: pending_op::GitStatus::Staged,
10088 job_status: pending_op::JobStatus::Finished
10089 },
10090 pending_op::PendingOp {
10091 id: 2u16.into(),
10092 git_status: pending_op::GitStatus::Unstaged,
10093 job_status: pending_op::JobStatus::Finished
10094 },
10095 pending_op::PendingOp {
10096 id: 3u16.into(),
10097 git_status: pending_op::GitStatus::Staged,
10098 job_status: pending_op::JobStatus::Finished
10099 },
10100 pending_op::PendingOp {
10101 id: 4u16.into(),
10102 git_status: pending_op::GitStatus::Unstaged,
10103 job_status: pending_op::JobStatus::Finished
10104 },
10105 pending_op::PendingOp {
10106 id: 5u16.into(),
10107 git_status: pending_op::GitStatus::Staged,
10108 job_status: pending_op::JobStatus::Finished
10109 }
10110 ],
10111 );
10112
10113 repo.update(cx, |repo, _cx| {
10114 let git_statuses = repo.cached_status().collect::<Vec<_>>();
10115
10116 assert_eq!(
10117 git_statuses,
10118 [StatusEntry {
10119 repo_path: repo_path("a.txt"),
10120 status: TrackedStatus {
10121 index_status: StatusCode::Added,
10122 worktree_status: StatusCode::Unmodified
10123 }
10124 .into(),
10125 diff_stat: Some(DiffStat {
10126 added: 1,
10127 deleted: 0,
10128 }),
10129 }]
10130 );
10131 });
10132}
10133
10134#[gpui::test]
10135async fn test_repository_pending_ops_long_running_staging(
10136 executor: gpui::BackgroundExecutor,
10137 cx: &mut gpui::TestAppContext,
10138) {
10139 init_test(cx);
10140
10141 let fs = FakeFs::new(executor);
10142 fs.insert_tree(
10143 path!("/root"),
10144 json!({
10145 "my-repo": {
10146 ".git": {},
10147 "a.txt": "a",
10148 }
10149
10150 }),
10151 )
10152 .await;
10153
10154 fs.set_status_for_repo(
10155 path!("/root/my-repo/.git").as_ref(),
10156 &[("a.txt", FileStatus::Untracked)],
10157 );
10158
10159 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
10160 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
10161 project.update(cx, |project, cx| {
10162 let pending_ops_all = pending_ops_all.clone();
10163 cx.subscribe(project.git_store(), move |_, _, e, _| {
10164 if let GitStoreEvent::RepositoryUpdated(
10165 _,
10166 RepositoryEvent::PendingOpsChanged { pending_ops },
10167 _,
10168 ) = e
10169 {
10170 let merged = merge_pending_ops_snapshots(
10171 pending_ops.items(()),
10172 pending_ops_all.lock().items(()),
10173 );
10174 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
10175 }
10176 })
10177 .detach();
10178 });
10179
10180 project
10181 .update(cx, |project, cx| project.git_scans_complete(cx))
10182 .await;
10183
10184 let repo = project.read_with(cx, |project, cx| {
10185 project.repositories(cx).values().next().unwrap().clone()
10186 });
10187
10188 repo.update(cx, |repo, cx| {
10189 repo.stage_entries(vec![repo_path("a.txt")], cx)
10190 })
10191 .detach();
10192
10193 repo.update(cx, |repo, cx| {
10194 repo.stage_entries(vec![repo_path("a.txt")], cx)
10195 })
10196 .unwrap()
10197 .with_timeout(Duration::from_secs(1), &cx.executor())
10198 .await
10199 .unwrap();
10200
10201 cx.run_until_parked();
10202
10203 assert_eq!(
10204 pending_ops_all
10205 .lock()
10206 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
10207 .unwrap()
10208 .ops,
10209 vec![
10210 pending_op::PendingOp {
10211 id: 1u16.into(),
10212 git_status: pending_op::GitStatus::Staged,
10213 job_status: pending_op::JobStatus::Skipped
10214 },
10215 pending_op::PendingOp {
10216 id: 2u16.into(),
10217 git_status: pending_op::GitStatus::Staged,
10218 job_status: pending_op::JobStatus::Finished
10219 }
10220 ],
10221 );
10222
10223 repo.update(cx, |repo, _cx| {
10224 let git_statuses = repo.cached_status().collect::<Vec<_>>();
10225
10226 assert_eq!(
10227 git_statuses,
10228 [StatusEntry {
10229 repo_path: repo_path("a.txt"),
10230 status: TrackedStatus {
10231 index_status: StatusCode::Added,
10232 worktree_status: StatusCode::Unmodified
10233 }
10234 .into(),
10235 diff_stat: Some(DiffStat {
10236 added: 1,
10237 deleted: 0,
10238 }),
10239 }]
10240 );
10241 });
10242}
10243
10244#[gpui::test]
10245async fn test_repository_pending_ops_stage_all(
10246 executor: gpui::BackgroundExecutor,
10247 cx: &mut gpui::TestAppContext,
10248) {
10249 init_test(cx);
10250
10251 let fs = FakeFs::new(executor);
10252 fs.insert_tree(
10253 path!("/root"),
10254 json!({
10255 "my-repo": {
10256 ".git": {},
10257 "a.txt": "a",
10258 "b.txt": "b"
10259 }
10260
10261 }),
10262 )
10263 .await;
10264
10265 fs.set_status_for_repo(
10266 path!("/root/my-repo/.git").as_ref(),
10267 &[
10268 ("a.txt", FileStatus::Untracked),
10269 ("b.txt", FileStatus::Untracked),
10270 ],
10271 );
10272
10273 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
10274 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
10275 project.update(cx, |project, cx| {
10276 let pending_ops_all = pending_ops_all.clone();
10277 cx.subscribe(project.git_store(), move |_, _, e, _| {
10278 if let GitStoreEvent::RepositoryUpdated(
10279 _,
10280 RepositoryEvent::PendingOpsChanged { pending_ops },
10281 _,
10282 ) = e
10283 {
10284 let merged = merge_pending_ops_snapshots(
10285 pending_ops.items(()),
10286 pending_ops_all.lock().items(()),
10287 );
10288 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
10289 }
10290 })
10291 .detach();
10292 });
10293 project
10294 .update(cx, |project, cx| project.git_scans_complete(cx))
10295 .await;
10296
10297 let repo = project.read_with(cx, |project, cx| {
10298 project.repositories(cx).values().next().unwrap().clone()
10299 });
10300
10301 repo.update(cx, |repo, cx| {
10302 repo.stage_entries(vec![repo_path("a.txt")], cx)
10303 })
10304 .await
10305 .unwrap();
10306 repo.update(cx, |repo, cx| repo.stage_all(cx))
10307 .await
10308 .unwrap();
10309 repo.update(cx, |repo, cx| repo.unstage_all(cx))
10310 .await
10311 .unwrap();
10312
10313 cx.run_until_parked();
10314
10315 assert_eq!(
10316 pending_ops_all
10317 .lock()
10318 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
10319 .unwrap()
10320 .ops,
10321 vec![
10322 pending_op::PendingOp {
10323 id: 1u16.into(),
10324 git_status: pending_op::GitStatus::Staged,
10325 job_status: pending_op::JobStatus::Finished
10326 },
10327 pending_op::PendingOp {
10328 id: 2u16.into(),
10329 git_status: pending_op::GitStatus::Unstaged,
10330 job_status: pending_op::JobStatus::Finished
10331 },
10332 ],
10333 );
10334 assert_eq!(
10335 pending_ops_all
10336 .lock()
10337 .get(&worktree::PathKey(repo_path("b.txt").as_ref().clone()), ())
10338 .unwrap()
10339 .ops,
10340 vec![
10341 pending_op::PendingOp {
10342 id: 1u16.into(),
10343 git_status: pending_op::GitStatus::Staged,
10344 job_status: pending_op::JobStatus::Finished
10345 },
10346 pending_op::PendingOp {
10347 id: 2u16.into(),
10348 git_status: pending_op::GitStatus::Unstaged,
10349 job_status: pending_op::JobStatus::Finished
10350 },
10351 ],
10352 );
10353
10354 repo.update(cx, |repo, _cx| {
10355 let git_statuses = repo.cached_status().collect::<Vec<_>>();
10356
10357 assert_eq!(
10358 git_statuses,
10359 [
10360 StatusEntry {
10361 repo_path: repo_path("a.txt"),
10362 status: FileStatus::Untracked,
10363 diff_stat: None,
10364 },
10365 StatusEntry {
10366 repo_path: repo_path("b.txt"),
10367 status: FileStatus::Untracked,
10368 diff_stat: None,
10369 },
10370 ]
10371 );
10372 });
10373}
10374
10375#[gpui::test]
10376async fn test_repository_subfolder_git_status(
10377 executor: gpui::BackgroundExecutor,
10378 cx: &mut gpui::TestAppContext,
10379) {
10380 init_test(cx);
10381
10382 let fs = FakeFs::new(executor);
10383 fs.insert_tree(
10384 path!("/root"),
10385 json!({
10386 "my-repo": {
10387 ".git": {},
10388 "a.txt": "a",
10389 "sub-folder-1": {
10390 "sub-folder-2": {
10391 "c.txt": "cc",
10392 "d": {
10393 "e.txt": "eee"
10394 }
10395 },
10396 }
10397 },
10398 }),
10399 )
10400 .await;
10401
10402 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
10403 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
10404
10405 fs.set_status_for_repo(
10406 path!("/root/my-repo/.git").as_ref(),
10407 &[(E_TXT, FileStatus::Untracked)],
10408 );
10409
10410 let project = Project::test(
10411 fs.clone(),
10412 [path!("/root/my-repo/sub-folder-1/sub-folder-2").as_ref()],
10413 cx,
10414 )
10415 .await;
10416
10417 project
10418 .update(cx, |project, cx| project.git_scans_complete(cx))
10419 .await;
10420 cx.run_until_parked();
10421
10422 let repository = project.read_with(cx, |project, cx| {
10423 project.repositories(cx).values().next().unwrap().clone()
10424 });
10425
10426 // Ensure that the git status is loaded correctly
10427 repository.read_with(cx, |repository, _cx| {
10428 assert_eq!(
10429 repository.work_directory_abs_path,
10430 Path::new(path!("/root/my-repo")).into()
10431 );
10432
10433 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
10434 assert_eq!(
10435 repository
10436 .status_for_path(&repo_path(E_TXT))
10437 .unwrap()
10438 .status,
10439 FileStatus::Untracked
10440 );
10441 });
10442
10443 fs.set_status_for_repo(path!("/root/my-repo/.git").as_ref(), &[]);
10444 project
10445 .update(cx, |project, cx| project.git_scans_complete(cx))
10446 .await;
10447 cx.run_until_parked();
10448
10449 repository.read_with(cx, |repository, _cx| {
10450 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
10451 assert_eq!(repository.status_for_path(&repo_path(E_TXT)), None);
10452 });
10453}
10454
10455// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
10456#[cfg(any())]
10457#[gpui::test]
10458async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
10459 init_test(cx);
10460 cx.executor().allow_parking();
10461
10462 let root = TempTree::new(json!({
10463 "project": {
10464 "a.txt": "a",
10465 },
10466 }));
10467 let root_path = root.path();
10468
10469 let repo = git_init(&root_path.join("project"));
10470 git_add("a.txt", &repo);
10471 git_commit("init", &repo);
10472
10473 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10474
10475 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10476 tree.flush_fs_events(cx).await;
10477 project
10478 .update(cx, |project, cx| project.git_scans_complete(cx))
10479 .await;
10480 cx.executor().run_until_parked();
10481
10482 let repository = project.read_with(cx, |project, cx| {
10483 project.repositories(cx).values().next().unwrap().clone()
10484 });
10485
10486 git_branch("other-branch", &repo);
10487 git_checkout("refs/heads/other-branch", &repo);
10488 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
10489 git_add("a.txt", &repo);
10490 git_commit("capitalize", &repo);
10491 let commit = repo
10492 .head()
10493 .expect("Failed to get HEAD")
10494 .peel_to_commit()
10495 .expect("HEAD is not a commit");
10496 git_checkout("refs/heads/main", &repo);
10497 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
10498 git_add("a.txt", &repo);
10499 git_commit("improve letter", &repo);
10500 git_cherry_pick(&commit, &repo);
10501 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
10502 .expect("No CHERRY_PICK_HEAD");
10503 pretty_assertions::assert_eq!(
10504 git_status(&repo),
10505 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
10506 );
10507 tree.flush_fs_events(cx).await;
10508 project
10509 .update(cx, |project, cx| project.git_scans_complete(cx))
10510 .await;
10511 cx.executor().run_until_parked();
10512 let conflicts = repository.update(cx, |repository, _| {
10513 repository
10514 .merge_conflicts
10515 .iter()
10516 .cloned()
10517 .collect::<Vec<_>>()
10518 });
10519 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
10520
10521 git_add("a.txt", &repo);
10522 // Attempt to manually simulate what `git cherry-pick --continue` would do.
10523 git_commit("whatevs", &repo);
10524 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
10525 .expect("Failed to remove CHERRY_PICK_HEAD");
10526 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
10527 tree.flush_fs_events(cx).await;
10528 let conflicts = repository.update(cx, |repository, _| {
10529 repository
10530 .merge_conflicts
10531 .iter()
10532 .cloned()
10533 .collect::<Vec<_>>()
10534 });
10535 pretty_assertions::assert_eq!(conflicts, []);
10536}
10537
10538#[gpui::test]
10539async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
10540 init_test(cx);
10541 let fs = FakeFs::new(cx.background_executor.clone());
10542 fs.insert_tree(
10543 path!("/root"),
10544 json!({
10545 ".git": {},
10546 ".gitignore": "*.txt\n",
10547 "a.xml": "<a></a>",
10548 "b.txt": "Some text"
10549 }),
10550 )
10551 .await;
10552
10553 fs.set_head_and_index_for_repo(
10554 path!("/root/.git").as_ref(),
10555 &[
10556 (".gitignore", "*.txt\n".into()),
10557 ("a.xml", "<a></a>".into()),
10558 ],
10559 );
10560
10561 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
10562
10563 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10564 tree.flush_fs_events(cx).await;
10565 project
10566 .update(cx, |project, cx| project.git_scans_complete(cx))
10567 .await;
10568 cx.executor().run_until_parked();
10569
10570 let repository = project.read_with(cx, |project, cx| {
10571 project.repositories(cx).values().next().unwrap().clone()
10572 });
10573
10574 // One file is unmodified, the other is ignored.
10575 cx.read(|cx| {
10576 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
10577 assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
10578 });
10579
10580 // Change the gitignore, and stage the newly non-ignored file.
10581 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
10582 .await
10583 .unwrap();
10584 fs.set_index_for_repo(
10585 Path::new(path!("/root/.git")),
10586 &[
10587 (".gitignore", "*.txt\n".into()),
10588 ("a.xml", "<a></a>".into()),
10589 ("b.txt", "Some text".into()),
10590 ],
10591 );
10592
10593 cx.executor().run_until_parked();
10594 cx.read(|cx| {
10595 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
10596 assert_entry_git_state(
10597 tree.read(cx),
10598 repository.read(cx),
10599 "b.txt",
10600 Some(StatusCode::Added),
10601 false,
10602 );
10603 });
10604}
10605
10606// NOTE:
10607// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
10608// a directory which some program has already open.
10609// This is a limitation of the Windows.
10610// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
10611// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
10612#[gpui::test]
10613#[cfg_attr(target_os = "windows", ignore)]
10614async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
10615 init_test(cx);
10616 cx.executor().allow_parking();
10617 let root = TempTree::new(json!({
10618 "projects": {
10619 "project1": {
10620 "a": "",
10621 "b": "",
10622 }
10623 },
10624
10625 }));
10626 let root_path = root.path();
10627
10628 let repo = git_init(&root_path.join("projects/project1"));
10629 git_add("a", &repo);
10630 git_commit("init", &repo);
10631 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
10632
10633 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10634
10635 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10636 tree.flush_fs_events(cx).await;
10637 project
10638 .update(cx, |project, cx| project.git_scans_complete(cx))
10639 .await;
10640 cx.executor().run_until_parked();
10641
10642 let repository = project.read_with(cx, |project, cx| {
10643 project.repositories(cx).values().next().unwrap().clone()
10644 });
10645
10646 repository.read_with(cx, |repository, _| {
10647 assert_eq!(
10648 repository.work_directory_abs_path.as_ref(),
10649 root_path.join("projects/project1").as_path()
10650 );
10651 assert_eq!(
10652 repository
10653 .status_for_path(&repo_path("a"))
10654 .map(|entry| entry.status),
10655 Some(StatusCode::Modified.worktree()),
10656 );
10657 assert_eq!(
10658 repository
10659 .status_for_path(&repo_path("b"))
10660 .map(|entry| entry.status),
10661 Some(FileStatus::Untracked),
10662 );
10663 });
10664
10665 std::fs::rename(
10666 root_path.join("projects/project1"),
10667 root_path.join("projects/project2"),
10668 )
10669 .unwrap();
10670 tree.flush_fs_events(cx).await;
10671
10672 repository.read_with(cx, |repository, _| {
10673 assert_eq!(
10674 repository.work_directory_abs_path.as_ref(),
10675 root_path.join("projects/project2").as_path()
10676 );
10677 assert_eq!(
10678 repository.status_for_path(&repo_path("a")).unwrap().status,
10679 StatusCode::Modified.worktree(),
10680 );
10681 assert_eq!(
10682 repository.status_for_path(&repo_path("b")).unwrap().status,
10683 FileStatus::Untracked,
10684 );
10685 });
10686}
10687
10688// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
10689// you can't rename a directory which some program has already open. This is a
10690// limitation of the Windows. See:
10691// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
10692// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
10693#[gpui::test]
10694#[cfg_attr(target_os = "windows", ignore)]
10695async fn test_file_status(cx: &mut gpui::TestAppContext) {
10696 init_test(cx);
10697 cx.executor().allow_parking();
10698 const IGNORE_RULE: &str = "**/target";
10699
10700 let root = TempTree::new(json!({
10701 "project": {
10702 "a.txt": "a",
10703 "b.txt": "bb",
10704 "c": {
10705 "d": {
10706 "e.txt": "eee"
10707 }
10708 },
10709 "f.txt": "ffff",
10710 "target": {
10711 "build_file": "???"
10712 },
10713 ".gitignore": IGNORE_RULE
10714 },
10715
10716 }));
10717 let root_path = root.path();
10718
10719 const A_TXT: &str = "a.txt";
10720 const B_TXT: &str = "b.txt";
10721 const E_TXT: &str = "c/d/e.txt";
10722 const F_TXT: &str = "f.txt";
10723 const DOTGITIGNORE: &str = ".gitignore";
10724 const BUILD_FILE: &str = "target/build_file";
10725
10726 // Set up git repository before creating the worktree.
10727 let work_dir = root.path().join("project");
10728 let mut repo = git_init(work_dir.as_path());
10729 repo.add_ignore_rule(IGNORE_RULE).unwrap();
10730 git_add(A_TXT, &repo);
10731 git_add(E_TXT, &repo);
10732 git_add(DOTGITIGNORE, &repo);
10733 git_commit("Initial commit", &repo);
10734
10735 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10736
10737 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10738 tree.flush_fs_events(cx).await;
10739 project
10740 .update(cx, |project, cx| project.git_scans_complete(cx))
10741 .await;
10742 cx.executor().run_until_parked();
10743
10744 let repository = project.read_with(cx, |project, cx| {
10745 project.repositories(cx).values().next().unwrap().clone()
10746 });
10747
10748 // Check that the right git state is observed on startup
10749 repository.read_with(cx, |repository, _cx| {
10750 assert_eq!(
10751 repository.work_directory_abs_path.as_ref(),
10752 root_path.join("project").as_path()
10753 );
10754
10755 assert_eq!(
10756 repository
10757 .status_for_path(&repo_path(B_TXT))
10758 .unwrap()
10759 .status,
10760 FileStatus::Untracked,
10761 );
10762 assert_eq!(
10763 repository
10764 .status_for_path(&repo_path(F_TXT))
10765 .unwrap()
10766 .status,
10767 FileStatus::Untracked,
10768 );
10769 });
10770
10771 // Modify a file in the working copy.
10772 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
10773 tree.flush_fs_events(cx).await;
10774 project
10775 .update(cx, |project, cx| project.git_scans_complete(cx))
10776 .await;
10777 cx.executor().run_until_parked();
10778
10779 // The worktree detects that the file's git status has changed.
10780 repository.read_with(cx, |repository, _| {
10781 assert_eq!(
10782 repository
10783 .status_for_path(&repo_path(A_TXT))
10784 .unwrap()
10785 .status,
10786 StatusCode::Modified.worktree(),
10787 );
10788 });
10789
10790 // Create a commit in the git repository.
10791 git_add(A_TXT, &repo);
10792 git_add(B_TXT, &repo);
10793 git_commit("Committing modified and added", &repo);
10794 tree.flush_fs_events(cx).await;
10795 project
10796 .update(cx, |project, cx| project.git_scans_complete(cx))
10797 .await;
10798 cx.executor().run_until_parked();
10799
10800 // The worktree detects that the files' git status have changed.
10801 repository.read_with(cx, |repository, _cx| {
10802 assert_eq!(
10803 repository
10804 .status_for_path(&repo_path(F_TXT))
10805 .unwrap()
10806 .status,
10807 FileStatus::Untracked,
10808 );
10809 assert_eq!(repository.status_for_path(&repo_path(B_TXT)), None);
10810 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
10811 });
10812
10813 // Modify files in the working copy and perform git operations on other files.
10814 git_reset(0, &repo);
10815 git_remove_index(Path::new(B_TXT), &repo);
10816 git_stash(&mut repo);
10817 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
10818 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
10819 tree.flush_fs_events(cx).await;
10820 project
10821 .update(cx, |project, cx| project.git_scans_complete(cx))
10822 .await;
10823 cx.executor().run_until_parked();
10824
10825 // Check that more complex repo changes are tracked
10826 repository.read_with(cx, |repository, _cx| {
10827 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
10828 assert_eq!(
10829 repository
10830 .status_for_path(&repo_path(B_TXT))
10831 .unwrap()
10832 .status,
10833 FileStatus::Untracked,
10834 );
10835 assert_eq!(
10836 repository
10837 .status_for_path(&repo_path(E_TXT))
10838 .unwrap()
10839 .status,
10840 StatusCode::Modified.worktree(),
10841 );
10842 });
10843
10844 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
10845 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
10846 std::fs::write(
10847 work_dir.join(DOTGITIGNORE),
10848 [IGNORE_RULE, "f.txt"].join("\n"),
10849 )
10850 .unwrap();
10851
10852 git_add(Path::new(DOTGITIGNORE), &repo);
10853 git_commit("Committing modified git ignore", &repo);
10854
10855 tree.flush_fs_events(cx).await;
10856 cx.executor().run_until_parked();
10857
10858 let mut renamed_dir_name = "first_directory/second_directory";
10859 const RENAMED_FILE: &str = "rf.txt";
10860
10861 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
10862 std::fs::write(
10863 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
10864 "new-contents",
10865 )
10866 .unwrap();
10867
10868 tree.flush_fs_events(cx).await;
10869 project
10870 .update(cx, |project, cx| project.git_scans_complete(cx))
10871 .await;
10872 cx.executor().run_until_parked();
10873
10874 repository.read_with(cx, |repository, _cx| {
10875 assert_eq!(
10876 repository
10877 .status_for_path(&RepoPath::from_rel_path(
10878 &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE))
10879 ))
10880 .unwrap()
10881 .status,
10882 FileStatus::Untracked,
10883 );
10884 });
10885
10886 renamed_dir_name = "new_first_directory/second_directory";
10887
10888 std::fs::rename(
10889 work_dir.join("first_directory"),
10890 work_dir.join("new_first_directory"),
10891 )
10892 .unwrap();
10893
10894 tree.flush_fs_events(cx).await;
10895 project
10896 .update(cx, |project, cx| project.git_scans_complete(cx))
10897 .await;
10898 cx.executor().run_until_parked();
10899
10900 repository.read_with(cx, |repository, _cx| {
10901 assert_eq!(
10902 repository
10903 .status_for_path(&RepoPath::from_rel_path(
10904 &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE))
10905 ))
10906 .unwrap()
10907 .status,
10908 FileStatus::Untracked,
10909 );
10910 });
10911}
10912
10913#[gpui::test]
10914#[ignore]
10915async fn test_ignored_dirs_events(cx: &mut gpui::TestAppContext) {
10916 init_test(cx);
10917 cx.executor().allow_parking();
10918
10919 const IGNORE_RULE: &str = "**/target";
10920
10921 let root = TempTree::new(json!({
10922 "project": {
10923 "src": {
10924 "main.rs": "fn main() {}"
10925 },
10926 "target": {
10927 "debug": {
10928 "important_text.txt": "important text",
10929 },
10930 },
10931 ".gitignore": IGNORE_RULE
10932 },
10933
10934 }));
10935 let root_path = root.path();
10936
10937 // Set up git repository before creating the worktree.
10938 let work_dir = root.path().join("project");
10939 let repo = git_init(work_dir.as_path());
10940 repo.add_ignore_rule(IGNORE_RULE).unwrap();
10941 git_add("src/main.rs", &repo);
10942 git_add(".gitignore", &repo);
10943 git_commit("Initial commit", &repo);
10944
10945 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10946 let repository_updates = Arc::new(Mutex::new(Vec::new()));
10947 let project_events = Arc::new(Mutex::new(Vec::new()));
10948 project.update(cx, |project, cx| {
10949 let repo_events = repository_updates.clone();
10950 cx.subscribe(project.git_store(), move |_, _, e, _| {
10951 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
10952 repo_events.lock().push(e.clone());
10953 }
10954 })
10955 .detach();
10956 let project_events = project_events.clone();
10957 cx.subscribe_self(move |_, e, _| {
10958 if let Event::WorktreeUpdatedEntries(_, updates) = e {
10959 project_events.lock().extend(
10960 updates
10961 .iter()
10962 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
10963 .filter(|(path, _)| path != "fs-event-sentinel"),
10964 );
10965 }
10966 })
10967 .detach();
10968 });
10969
10970 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10971 tree.flush_fs_events(cx).await;
10972 tree.update(cx, |tree, cx| {
10973 tree.load_file(rel_path("project/target/debug/important_text.txt"), cx)
10974 })
10975 .await
10976 .unwrap();
10977 tree.update(cx, |tree, _| {
10978 assert_eq!(
10979 tree.entries(true, 0)
10980 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
10981 .collect::<Vec<_>>(),
10982 vec![
10983 (rel_path(""), false),
10984 (rel_path("project/"), false),
10985 (rel_path("project/.gitignore"), false),
10986 (rel_path("project/src"), false),
10987 (rel_path("project/src/main.rs"), false),
10988 (rel_path("project/target"), true),
10989 (rel_path("project/target/debug"), true),
10990 (rel_path("project/target/debug/important_text.txt"), true),
10991 ]
10992 );
10993 });
10994
10995 assert_eq!(
10996 repository_updates.lock().drain(..).collect::<Vec<_>>(),
10997 vec![RepositoryEvent::StatusesChanged,],
10998 "Initial worktree scan should produce a repo update event"
10999 );
11000 assert_eq!(
11001 project_events.lock().drain(..).collect::<Vec<_>>(),
11002 vec![
11003 ("project/target".to_string(), PathChange::Loaded),
11004 ("project/target/debug".to_string(), PathChange::Loaded),
11005 (
11006 "project/target/debug/important_text.txt".to_string(),
11007 PathChange::Loaded
11008 ),
11009 ],
11010 "Initial project changes should show that all not-ignored and all opened files are loaded"
11011 );
11012
11013 let deps_dir = work_dir.join("target").join("debug").join("deps");
11014 std::fs::create_dir_all(&deps_dir).unwrap();
11015 tree.flush_fs_events(cx).await;
11016 project
11017 .update(cx, |project, cx| project.git_scans_complete(cx))
11018 .await;
11019 cx.executor().run_until_parked();
11020 std::fs::write(deps_dir.join("aa.tmp"), "something tmp").unwrap();
11021 tree.flush_fs_events(cx).await;
11022 project
11023 .update(cx, |project, cx| project.git_scans_complete(cx))
11024 .await;
11025 cx.executor().run_until_parked();
11026 std::fs::remove_dir_all(&deps_dir).unwrap();
11027 tree.flush_fs_events(cx).await;
11028 project
11029 .update(cx, |project, cx| project.git_scans_complete(cx))
11030 .await;
11031 cx.executor().run_until_parked();
11032
11033 tree.update(cx, |tree, _| {
11034 assert_eq!(
11035 tree.entries(true, 0)
11036 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
11037 .collect::<Vec<_>>(),
11038 vec![
11039 (rel_path(""), false),
11040 (rel_path("project/"), false),
11041 (rel_path("project/.gitignore"), false),
11042 (rel_path("project/src"), false),
11043 (rel_path("project/src/main.rs"), false),
11044 (rel_path("project/target"), true),
11045 (rel_path("project/target/debug"), true),
11046 (rel_path("project/target/debug/important_text.txt"), true),
11047 ],
11048 "No stray temp files should be left after the flycheck changes"
11049 );
11050 });
11051
11052 assert_eq!(
11053 repository_updates
11054 .lock()
11055 .iter()
11056 .cloned()
11057 .collect::<Vec<_>>(),
11058 Vec::new(),
11059 "No further RepositoryUpdated events should happen, as only ignored dirs' contents was changed",
11060 );
11061 assert_eq!(
11062 project_events.lock().as_slice(),
11063 vec![
11064 ("project/target/debug/deps".to_string(), PathChange::Added),
11065 ("project/target/debug/deps".to_string(), PathChange::Removed),
11066 ],
11067 "Due to `debug` directory being tracked, it should get updates for entries inside it.
11068 No updates for more nested directories should happen as those are ignored",
11069 );
11070}
11071
11072// todo(jk): turning this test off until we rework it in such a way so that it is not so susceptible
11073// to different timings/ordering of events.
11074#[ignore]
11075#[gpui::test]
11076async fn test_odd_events_for_ignored_dirs(
11077 executor: BackgroundExecutor,
11078 cx: &mut gpui::TestAppContext,
11079) {
11080 init_test(cx);
11081 let fs = FakeFs::new(executor);
11082 fs.insert_tree(
11083 path!("/root"),
11084 json!({
11085 ".git": {},
11086 ".gitignore": "**/target/",
11087 "src": {
11088 "main.rs": "fn main() {}",
11089 },
11090 "target": {
11091 "debug": {
11092 "foo.txt": "foo",
11093 "deps": {}
11094 }
11095 }
11096 }),
11097 )
11098 .await;
11099 fs.set_head_and_index_for_repo(
11100 path!("/root/.git").as_ref(),
11101 &[
11102 (".gitignore", "**/target/".into()),
11103 ("src/main.rs", "fn main() {}".into()),
11104 ],
11105 );
11106
11107 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
11108 let repository_updates = Arc::new(Mutex::new(Vec::new()));
11109 let project_events = Arc::new(Mutex::new(Vec::new()));
11110 project.update(cx, |project, cx| {
11111 let repository_updates = repository_updates.clone();
11112 cx.subscribe(project.git_store(), move |_, _, e, _| {
11113 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
11114 repository_updates.lock().push(e.clone());
11115 }
11116 })
11117 .detach();
11118 let project_events = project_events.clone();
11119 cx.subscribe_self(move |_, e, _| {
11120 if let Event::WorktreeUpdatedEntries(_, updates) = e {
11121 project_events.lock().extend(
11122 updates
11123 .iter()
11124 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
11125 .filter(|(path, _)| path != "fs-event-sentinel"),
11126 );
11127 }
11128 })
11129 .detach();
11130 });
11131
11132 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
11133 tree.update(cx, |tree, cx| {
11134 tree.load_file(rel_path("target/debug/foo.txt"), cx)
11135 })
11136 .await
11137 .unwrap();
11138 tree.flush_fs_events(cx).await;
11139 project
11140 .update(cx, |project, cx| project.git_scans_complete(cx))
11141 .await;
11142 cx.run_until_parked();
11143 tree.update(cx, |tree, _| {
11144 assert_eq!(
11145 tree.entries(true, 0)
11146 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
11147 .collect::<Vec<_>>(),
11148 vec![
11149 (rel_path(""), false),
11150 (rel_path(".gitignore"), false),
11151 (rel_path("src"), false),
11152 (rel_path("src/main.rs"), false),
11153 (rel_path("target"), true),
11154 (rel_path("target/debug"), true),
11155 (rel_path("target/debug/deps"), true),
11156 (rel_path("target/debug/foo.txt"), true),
11157 ]
11158 );
11159 });
11160
11161 assert_eq!(
11162 repository_updates.lock().drain(..).collect::<Vec<_>>(),
11163 vec![
11164 RepositoryEvent::BranchChanged,
11165 RepositoryEvent::StatusesChanged,
11166 RepositoryEvent::StatusesChanged,
11167 ],
11168 "Initial worktree scan should produce a repo update event"
11169 );
11170 assert_eq!(
11171 project_events.lock().drain(..).collect::<Vec<_>>(),
11172 vec![
11173 ("target".to_string(), PathChange::Loaded),
11174 ("target/debug".to_string(), PathChange::Loaded),
11175 ("target/debug/deps".to_string(), PathChange::Loaded),
11176 ("target/debug/foo.txt".to_string(), PathChange::Loaded),
11177 ],
11178 "All non-ignored entries and all opened firs should be getting a project event",
11179 );
11180
11181 // Emulate a flycheck spawn: it emits a `INODE_META_MOD`-flagged FS event on target/debug/deps, then creates and removes temp files inside.
11182 // This may happen multiple times during a single flycheck, but once is enough for testing.
11183 fs.emit_fs_event("/root/target/debug/deps", None);
11184 tree.flush_fs_events(cx).await;
11185 project
11186 .update(cx, |project, cx| project.git_scans_complete(cx))
11187 .await;
11188 cx.executor().run_until_parked();
11189
11190 assert_eq!(
11191 repository_updates
11192 .lock()
11193 .iter()
11194 .cloned()
11195 .collect::<Vec<_>>(),
11196 Vec::new(),
11197 "No further RepositoryUpdated events should happen, as only ignored dirs received FS events",
11198 );
11199 assert_eq!(
11200 project_events.lock().as_slice(),
11201 Vec::new(),
11202 "No further project events should happen, as only ignored dirs received FS events",
11203 );
11204}
11205
11206#[gpui::test]
11207async fn test_repos_in_invisible_worktrees(
11208 executor: BackgroundExecutor,
11209 cx: &mut gpui::TestAppContext,
11210) {
11211 init_test(cx);
11212 let fs = FakeFs::new(executor);
11213 fs.insert_tree(
11214 path!("/root"),
11215 json!({
11216 "dir1": {
11217 ".git": {},
11218 "dep1": {
11219 ".git": {},
11220 "src": {
11221 "a.txt": "",
11222 },
11223 },
11224 "b.txt": "",
11225 },
11226 }),
11227 )
11228 .await;
11229
11230 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
11231 let _visible_worktree =
11232 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
11233 project
11234 .update(cx, |project, cx| project.git_scans_complete(cx))
11235 .await;
11236
11237 let repos = project.read_with(cx, |project, cx| {
11238 project
11239 .repositories(cx)
11240 .values()
11241 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
11242 .collect::<Vec<_>>()
11243 });
11244 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
11245
11246 let (_invisible_worktree, _) = project
11247 .update(cx, |project, cx| {
11248 project.worktree_store().update(cx, |worktree_store, cx| {
11249 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
11250 })
11251 })
11252 .await
11253 .expect("failed to create worktree");
11254 project
11255 .update(cx, |project, cx| project.git_scans_complete(cx))
11256 .await;
11257
11258 let repos = project.read_with(cx, |project, cx| {
11259 project
11260 .repositories(cx)
11261 .values()
11262 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
11263 .collect::<Vec<_>>()
11264 });
11265 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
11266}
11267
11268#[gpui::test(iterations = 10)]
11269async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
11270 init_test(cx);
11271 cx.update(|cx| {
11272 cx.update_global::<SettingsStore, _>(|store, cx| {
11273 store.update_user_settings(cx, |settings| {
11274 settings.project.worktree.file_scan_exclusions = Some(Vec::new());
11275 });
11276 });
11277 });
11278 let fs = FakeFs::new(cx.background_executor.clone());
11279 fs.insert_tree(
11280 path!("/root"),
11281 json!({
11282 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
11283 "tree": {
11284 ".git": {},
11285 ".gitignore": "ignored-dir\n",
11286 "tracked-dir": {
11287 "tracked-file1": "",
11288 "ancestor-ignored-file1": "",
11289 },
11290 "ignored-dir": {
11291 "ignored-file1": ""
11292 }
11293 }
11294 }),
11295 )
11296 .await;
11297 fs.set_head_and_index_for_repo(
11298 path!("/root/tree/.git").as_ref(),
11299 &[
11300 (".gitignore", "ignored-dir\n".into()),
11301 ("tracked-dir/tracked-file1", "".into()),
11302 ],
11303 );
11304
11305 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
11306
11307 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
11308 tree.flush_fs_events(cx).await;
11309 project
11310 .update(cx, |project, cx| project.git_scans_complete(cx))
11311 .await;
11312 cx.executor().run_until_parked();
11313
11314 let repository = project.read_with(cx, |project, cx| {
11315 project.repositories(cx).values().next().unwrap().clone()
11316 });
11317
11318 tree.read_with(cx, |tree, _| {
11319 tree.as_local()
11320 .unwrap()
11321 .manually_refresh_entries_for_paths(vec![rel_path("ignored-dir").into()])
11322 })
11323 .recv()
11324 .await;
11325
11326 cx.read(|cx| {
11327 assert_entry_git_state(
11328 tree.read(cx),
11329 repository.read(cx),
11330 "tracked-dir/tracked-file1",
11331 None,
11332 false,
11333 );
11334 assert_entry_git_state(
11335 tree.read(cx),
11336 repository.read(cx),
11337 "tracked-dir/ancestor-ignored-file1",
11338 None,
11339 false,
11340 );
11341 assert_entry_git_state(
11342 tree.read(cx),
11343 repository.read(cx),
11344 "ignored-dir/ignored-file1",
11345 None,
11346 true,
11347 );
11348 });
11349
11350 fs.create_file(
11351 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
11352 Default::default(),
11353 )
11354 .await
11355 .unwrap();
11356 fs.set_index_for_repo(
11357 path!("/root/tree/.git").as_ref(),
11358 &[
11359 (".gitignore", "ignored-dir\n".into()),
11360 ("tracked-dir/tracked-file1", "".into()),
11361 ("tracked-dir/tracked-file2", "".into()),
11362 ],
11363 );
11364 fs.create_file(
11365 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
11366 Default::default(),
11367 )
11368 .await
11369 .unwrap();
11370 fs.create_file(
11371 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
11372 Default::default(),
11373 )
11374 .await
11375 .unwrap();
11376
11377 cx.executor().run_until_parked();
11378 cx.read(|cx| {
11379 assert_entry_git_state(
11380 tree.read(cx),
11381 repository.read(cx),
11382 "tracked-dir/tracked-file2",
11383 Some(StatusCode::Added),
11384 false,
11385 );
11386 assert_entry_git_state(
11387 tree.read(cx),
11388 repository.read(cx),
11389 "tracked-dir/ancestor-ignored-file2",
11390 None,
11391 false,
11392 );
11393 assert_entry_git_state(
11394 tree.read(cx),
11395 repository.read(cx),
11396 "ignored-dir/ignored-file2",
11397 None,
11398 true,
11399 );
11400 assert!(
11401 tree.read(cx)
11402 .entry_for_path(&rel_path(".git"))
11403 .unwrap()
11404 .is_ignored
11405 );
11406 });
11407}
11408
11409#[gpui::test]
11410async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) {
11411 init_test(cx);
11412
11413 let fs = FakeFs::new(cx.executor());
11414 fs.insert_tree(
11415 path!("/project"),
11416 json!({
11417 ".git": {
11418 "worktrees": {
11419 "some-worktree": {
11420 "commondir": "../..\n",
11421 // For is_git_dir
11422 "HEAD": "",
11423 "config": ""
11424 }
11425 },
11426 "modules": {
11427 "subdir": {
11428 "some-submodule": {
11429 // For is_git_dir
11430 "HEAD": "",
11431 "config": "",
11432 }
11433 }
11434 }
11435 },
11436 "src": {
11437 "a.txt": "A",
11438 },
11439 "some-worktree": {
11440 ".git": "gitdir: ../.git/worktrees/some-worktree\n",
11441 "src": {
11442 "b.txt": "B",
11443 }
11444 },
11445 "subdir": {
11446 "some-submodule": {
11447 ".git": "gitdir: ../../.git/modules/subdir/some-submodule\n",
11448 "c.txt": "C",
11449 }
11450 }
11451 }),
11452 )
11453 .await;
11454
11455 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
11456 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
11457 scan_complete.await;
11458
11459 let mut repositories = project.update(cx, |project, cx| {
11460 project
11461 .repositories(cx)
11462 .values()
11463 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
11464 .collect::<Vec<_>>()
11465 });
11466 repositories.sort();
11467 pretty_assertions::assert_eq!(
11468 repositories,
11469 [
11470 Path::new(path!("/project")).into(),
11471 Path::new(path!("/project/some-worktree")).into(),
11472 Path::new(path!("/project/subdir/some-submodule")).into(),
11473 ]
11474 );
11475
11476 // Generate a git-related event for the worktree and check that it's refreshed.
11477 fs.with_git_state(
11478 path!("/project/some-worktree/.git").as_ref(),
11479 true,
11480 |state| {
11481 state
11482 .head_contents
11483 .insert(repo_path("src/b.txt"), "b".to_owned());
11484 state
11485 .index_contents
11486 .insert(repo_path("src/b.txt"), "b".to_owned());
11487 },
11488 )
11489 .unwrap();
11490 cx.run_until_parked();
11491
11492 let buffer = project
11493 .update(cx, |project, cx| {
11494 project.open_local_buffer(path!("/project/some-worktree/src/b.txt"), cx)
11495 })
11496 .await
11497 .unwrap();
11498 let (worktree_repo, barrier) = project.update(cx, |project, cx| {
11499 let (repo, _) = project
11500 .git_store()
11501 .read(cx)
11502 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
11503 .unwrap();
11504 pretty_assertions::assert_eq!(
11505 repo.read(cx).work_directory_abs_path,
11506 Path::new(path!("/project/some-worktree")).into(),
11507 );
11508 pretty_assertions::assert_eq!(
11509 repo.read(cx).original_repo_abs_path,
11510 Path::new(path!("/project")).into(),
11511 );
11512 assert!(
11513 repo.read(cx).linked_worktree_path().is_some(),
11514 "linked worktree should be detected as a linked worktree"
11515 );
11516 let barrier = repo.update(cx, |repo, _| repo.barrier());
11517 (repo.clone(), barrier)
11518 });
11519 barrier.await.unwrap();
11520 worktree_repo.update(cx, |repo, _| {
11521 pretty_assertions::assert_eq!(
11522 repo.status_for_path(&repo_path("src/b.txt"))
11523 .unwrap()
11524 .status,
11525 StatusCode::Modified.worktree(),
11526 );
11527 });
11528
11529 // The same for the submodule.
11530 fs.with_git_state(
11531 path!("/project/subdir/some-submodule/.git").as_ref(),
11532 true,
11533 |state| {
11534 state
11535 .head_contents
11536 .insert(repo_path("c.txt"), "c".to_owned());
11537 state
11538 .index_contents
11539 .insert(repo_path("c.txt"), "c".to_owned());
11540 },
11541 )
11542 .unwrap();
11543 cx.run_until_parked();
11544
11545 let buffer = project
11546 .update(cx, |project, cx| {
11547 project.open_local_buffer(path!("/project/subdir/some-submodule/c.txt"), cx)
11548 })
11549 .await
11550 .unwrap();
11551 let (submodule_repo, barrier) = project.update(cx, |project, cx| {
11552 let (repo, _) = project
11553 .git_store()
11554 .read(cx)
11555 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
11556 .unwrap();
11557 pretty_assertions::assert_eq!(
11558 repo.read(cx).work_directory_abs_path,
11559 Path::new(path!("/project/subdir/some-submodule")).into(),
11560 );
11561 pretty_assertions::assert_eq!(
11562 repo.read(cx).original_repo_abs_path,
11563 Path::new(path!("/project/subdir/some-submodule")).into(),
11564 );
11565 assert!(
11566 repo.read(cx).linked_worktree_path().is_none(),
11567 "submodule should not be detected as a linked worktree"
11568 );
11569 let barrier = repo.update(cx, |repo, _| repo.barrier());
11570 (repo.clone(), barrier)
11571 });
11572 barrier.await.unwrap();
11573 submodule_repo.update(cx, |repo, _| {
11574 pretty_assertions::assert_eq!(
11575 repo.status_for_path(&repo_path("c.txt")).unwrap().status,
11576 StatusCode::Modified.worktree(),
11577 );
11578 });
11579}
11580
11581#[gpui::test]
11582async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
11583 init_test(cx);
11584 let fs = FakeFs::new(cx.background_executor.clone());
11585 fs.insert_tree(
11586 path!("/root"),
11587 json!({
11588 "project": {
11589 ".git": {},
11590 "child1": {
11591 "a.txt": "A",
11592 },
11593 "child2": {
11594 "b.txt": "B",
11595 }
11596 }
11597 }),
11598 )
11599 .await;
11600
11601 let project = Project::test(
11602 fs.clone(),
11603 [
11604 path!("/root/project/child1").as_ref(),
11605 path!("/root/project/child2").as_ref(),
11606 ],
11607 cx,
11608 )
11609 .await;
11610
11611 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
11612 tree.flush_fs_events(cx).await;
11613 project
11614 .update(cx, |project, cx| project.git_scans_complete(cx))
11615 .await;
11616 cx.executor().run_until_parked();
11617
11618 let repos = project.read_with(cx, |project, cx| {
11619 project
11620 .repositories(cx)
11621 .values()
11622 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
11623 .collect::<Vec<_>>()
11624 });
11625 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
11626}
11627
11628#[gpui::test]
11629async fn test_buffer_changed_file_path_updates_git_diff(cx: &mut gpui::TestAppContext) {
11630 init_test(cx);
11631
11632 let file_1_committed = String::from(r#"file_1_committed"#);
11633 let file_1_staged = String::from(r#"file_1_staged"#);
11634 let file_2_committed = String::from(r#"file_2_committed"#);
11635 let file_2_staged = String::from(r#"file_2_staged"#);
11636 let buffer_contents = String::from(r#"buffer"#);
11637
11638 let fs = FakeFs::new(cx.background_executor.clone());
11639 fs.insert_tree(
11640 path!("/dir"),
11641 json!({
11642 ".git": {},
11643 "src": {
11644 "file_1.rs": file_1_committed.clone(),
11645 "file_2.rs": file_2_committed.clone(),
11646 }
11647 }),
11648 )
11649 .await;
11650
11651 fs.set_head_for_repo(
11652 path!("/dir/.git").as_ref(),
11653 &[
11654 ("src/file_1.rs", file_1_committed.clone()),
11655 ("src/file_2.rs", file_2_committed.clone()),
11656 ],
11657 "deadbeef",
11658 );
11659 fs.set_index_for_repo(
11660 path!("/dir/.git").as_ref(),
11661 &[
11662 ("src/file_1.rs", file_1_staged.clone()),
11663 ("src/file_2.rs", file_2_staged.clone()),
11664 ],
11665 );
11666
11667 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
11668
11669 let buffer = project
11670 .update(cx, |project, cx| {
11671 project.open_local_buffer(path!("/dir/src/file_1.rs"), cx)
11672 })
11673 .await
11674 .unwrap();
11675
11676 buffer.update(cx, |buffer, cx| {
11677 buffer.edit([(0..buffer.len(), buffer_contents.as_str())], None, cx);
11678 });
11679
11680 let unstaged_diff = project
11681 .update(cx, |project, cx| {
11682 project.open_unstaged_diff(buffer.clone(), cx)
11683 })
11684 .await
11685 .unwrap();
11686
11687 cx.run_until_parked();
11688
11689 unstaged_diff.update(cx, |unstaged_diff, cx| {
11690 let base_text = unstaged_diff.base_text_string(cx).unwrap();
11691 assert_eq!(base_text, file_1_staged, "Should start with file_1 staged");
11692 });
11693
11694 // Save the buffer as `file_2.rs`, which should trigger the
11695 // `BufferChangedFilePath` event.
11696 project
11697 .update(cx, |project, cx| {
11698 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
11699 let path = ProjectPath {
11700 worktree_id,
11701 path: rel_path("src/file_2.rs").into(),
11702 };
11703 project.save_buffer_as(buffer.clone(), path, cx)
11704 })
11705 .await
11706 .unwrap();
11707
11708 cx.run_until_parked();
11709
11710 // Verify that the diff bases have been updated to file_2's contents due to
11711 // the `BufferChangedFilePath` event being handled.
11712 unstaged_diff.update(cx, |unstaged_diff, cx| {
11713 let snapshot = buffer.read(cx).snapshot();
11714 let base_text = unstaged_diff.base_text_string(cx).unwrap();
11715 assert_eq!(
11716 base_text, file_2_staged,
11717 "Diff bases should be automatically updated to file_2 staged content"
11718 );
11719
11720 let hunks: Vec<_> = unstaged_diff.snapshot(cx).hunks(&snapshot).collect();
11721 assert!(!hunks.is_empty(), "Should have diff hunks for file_2");
11722 });
11723
11724 let uncommitted_diff = project
11725 .update(cx, |project, cx| {
11726 project.open_uncommitted_diff(buffer.clone(), cx)
11727 })
11728 .await
11729 .unwrap();
11730
11731 cx.run_until_parked();
11732
11733 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
11734 let base_text = uncommitted_diff.base_text_string(cx).unwrap();
11735 assert_eq!(
11736 base_text, file_2_committed,
11737 "Uncommitted diff should compare against file_2 committed content"
11738 );
11739 });
11740}
11741
11742async fn search(
11743 project: &Entity<Project>,
11744 query: SearchQuery,
11745 cx: &mut gpui::TestAppContext,
11746) -> Result<HashMap<String, Vec<Range<usize>>>> {
11747 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
11748 let mut results = HashMap::default();
11749 while let Ok(search_result) = search_rx.rx.recv().await {
11750 match search_result {
11751 SearchResult::Buffer { buffer, ranges } => {
11752 results.entry(buffer).or_insert(ranges);
11753 }
11754 SearchResult::LimitReached => {}
11755 }
11756 }
11757 Ok(results
11758 .into_iter()
11759 .map(|(buffer, ranges)| {
11760 buffer.update(cx, |buffer, cx| {
11761 let path = buffer
11762 .file()
11763 .unwrap()
11764 .full_path(cx)
11765 .to_string_lossy()
11766 .to_string();
11767 let ranges = ranges
11768 .into_iter()
11769 .map(|range| range.to_offset(buffer))
11770 .collect::<Vec<_>>();
11771 (path, ranges)
11772 })
11773 })
11774 .collect())
11775}
11776
11777#[gpui::test]
11778async fn test_undo_encoding_change(cx: &mut gpui::TestAppContext) {
11779 init_test(cx);
11780
11781 let fs = FakeFs::new(cx.executor());
11782
11783 // Create a file with ASCII content "Hi" - this will be detected as UTF-8
11784 // When reinterpreted as UTF-16LE, the bytes 0x48 0x69 become a single character
11785 let ascii_bytes: Vec<u8> = vec![0x48, 0x69];
11786 fs.insert_tree(path!("/dir"), json!({})).await;
11787 fs.insert_file(path!("/dir/test.txt"), ascii_bytes).await;
11788
11789 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
11790
11791 let buffer = project
11792 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/test.txt"), cx))
11793 .await
11794 .unwrap();
11795
11796 let (initial_encoding, initial_text, initial_dirty) = buffer.read_with(cx, |buffer, _| {
11797 (buffer.encoding(), buffer.text(), buffer.is_dirty())
11798 });
11799 assert_eq!(initial_encoding, encoding_rs::UTF_8);
11800 assert_eq!(initial_text, "Hi");
11801 assert!(!initial_dirty);
11802
11803 let reload_receiver = buffer.update(cx, |buffer, cx| {
11804 buffer.reload_with_encoding(encoding_rs::UTF_16LE, cx)
11805 });
11806 cx.executor().run_until_parked();
11807
11808 // Wait for reload to complete
11809 let _ = reload_receiver.await;
11810
11811 // Verify the encoding changed, text is different, and still not dirty (we reloaded from disk)
11812 let (reloaded_encoding, reloaded_text, reloaded_dirty) = buffer.read_with(cx, |buffer, _| {
11813 (buffer.encoding(), buffer.text(), buffer.is_dirty())
11814 });
11815 assert_eq!(reloaded_encoding, encoding_rs::UTF_16LE);
11816 assert_eq!(reloaded_text, "楈");
11817 assert!(!reloaded_dirty);
11818
11819 // Undo the reload
11820 buffer.update(cx, |buffer, cx| {
11821 buffer.undo(cx);
11822 });
11823
11824 buffer.read_with(cx, |buffer, _| {
11825 assert_eq!(buffer.encoding(), encoding_rs::UTF_8);
11826 assert_eq!(buffer.text(), "Hi");
11827 assert!(!buffer.is_dirty());
11828 });
11829
11830 buffer.update(cx, |buffer, cx| {
11831 buffer.redo(cx);
11832 });
11833
11834 buffer.read_with(cx, |buffer, _| {
11835 assert_eq!(buffer.encoding(), encoding_rs::UTF_16LE);
11836 assert_ne!(buffer.text(), "Hi");
11837 assert!(!buffer.is_dirty());
11838 });
11839}
11840
11841#[gpui::test]
11842async fn test_initial_scan_complete(cx: &mut gpui::TestAppContext) {
11843 init_test(cx);
11844
11845 let fs = FakeFs::new(cx.executor());
11846 fs.insert_tree(
11847 path!("/root"),
11848 json!({
11849 "a": {
11850 ".git": {},
11851 ".zed": {
11852 "tasks.json": r#"[{"label": "task-a", "command": "echo a"}]"#
11853 },
11854 "src": { "main.rs": "" }
11855 },
11856 "b": {
11857 ".git": {},
11858 ".zed": {
11859 "tasks.json": r#"[{"label": "task-b", "command": "echo b"}]"#
11860 },
11861 "src": { "lib.rs": "" }
11862 },
11863 }),
11864 )
11865 .await;
11866
11867 let repos_created = Rc::new(RefCell::new(Vec::new()));
11868 let _observe = {
11869 let repos_created = repos_created.clone();
11870 cx.update(|cx| {
11871 cx.observe_new::<Repository>(move |repo, _, cx| {
11872 repos_created.borrow_mut().push(cx.entity().downgrade());
11873 let _ = repo;
11874 })
11875 })
11876 };
11877
11878 let project = Project::test(
11879 fs.clone(),
11880 [path!("/root/a").as_ref(), path!("/root/b").as_ref()],
11881 cx,
11882 )
11883 .await;
11884
11885 let scan_complete = project.read_with(cx, |project, cx| project.wait_for_initial_scan(cx));
11886 scan_complete.await;
11887
11888 project.read_with(cx, |project, cx| {
11889 assert!(
11890 project.worktree_store().read(cx).initial_scan_completed(),
11891 "Expected initial scan to be completed after awaiting wait_for_initial_scan"
11892 );
11893 });
11894
11895 let created_repos_len = repos_created.borrow().len();
11896 assert_eq!(
11897 created_repos_len, 2,
11898 "Expected 2 repositories to be created during scan, got {}",
11899 created_repos_len
11900 );
11901
11902 project.read_with(cx, |project, cx| {
11903 let git_store = project.git_store().read(cx);
11904 assert_eq!(
11905 git_store.repositories().len(),
11906 2,
11907 "Expected 2 repositories in GitStore"
11908 );
11909 });
11910}
11911
11912pub fn init_test(cx: &mut gpui::TestAppContext) {
11913 zlog::init_test();
11914
11915 cx.update(|cx| {
11916 let settings_store = SettingsStore::test(cx);
11917 cx.set_global(settings_store);
11918 release_channel::init(semver::Version::new(0, 0, 0), cx);
11919 });
11920}
11921
11922fn json_lang() -> Arc<Language> {
11923 Arc::new(Language::new(
11924 LanguageConfig {
11925 name: "JSON".into(),
11926 matcher: LanguageMatcher {
11927 path_suffixes: vec!["json".to_string()],
11928 ..Default::default()
11929 },
11930 ..Default::default()
11931 },
11932 None,
11933 ))
11934}
11935
11936fn js_lang() -> Arc<Language> {
11937 Arc::new(Language::new(
11938 LanguageConfig {
11939 name: "JavaScript".into(),
11940 matcher: LanguageMatcher {
11941 path_suffixes: vec!["js".to_string()],
11942 ..Default::default()
11943 },
11944 ..Default::default()
11945 },
11946 None,
11947 ))
11948}
11949
11950fn python_lang(fs: Arc<FakeFs>) -> Arc<Language> {
11951 struct PythonMootToolchainLister(Arc<FakeFs>);
11952 #[async_trait]
11953 impl ToolchainLister for PythonMootToolchainLister {
11954 async fn list(
11955 &self,
11956 worktree_root: PathBuf,
11957 subroot_relative_path: Arc<RelPath>,
11958 _: Option<HashMap<String, String>>,
11959 ) -> ToolchainList {
11960 // This lister will always return a path .venv directories within ancestors
11961 let ancestors = subroot_relative_path.ancestors().collect::<Vec<_>>();
11962 let mut toolchains = vec![];
11963 for ancestor in ancestors {
11964 let venv_path = worktree_root.join(ancestor.as_std_path()).join(".venv");
11965 if self.0.is_dir(&venv_path).await {
11966 toolchains.push(Toolchain {
11967 name: SharedString::new_static("Python Venv"),
11968 path: venv_path.to_string_lossy().into_owned().into(),
11969 language_name: LanguageName(SharedString::new_static("Python")),
11970 as_json: serde_json::Value::Null,
11971 })
11972 }
11973 }
11974 ToolchainList {
11975 toolchains,
11976 ..Default::default()
11977 }
11978 }
11979 async fn resolve(
11980 &self,
11981 _: PathBuf,
11982 _: Option<HashMap<String, String>>,
11983 ) -> anyhow::Result<Toolchain> {
11984 Err(anyhow::anyhow!("Not implemented"))
11985 }
11986 fn meta(&self) -> ToolchainMetadata {
11987 ToolchainMetadata {
11988 term: SharedString::new_static("Virtual Environment"),
11989 new_toolchain_placeholder: SharedString::new_static(
11990 "A path to the python3 executable within a virtual environment, or path to virtual environment itself",
11991 ),
11992 manifest_name: ManifestName::from(SharedString::new_static("pyproject.toml")),
11993 }
11994 }
11995 fn activation_script(
11996 &self,
11997 _: &Toolchain,
11998 _: ShellKind,
11999 _: &gpui::App,
12000 ) -> futures::future::BoxFuture<'static, Vec<String>> {
12001 Box::pin(async { vec![] })
12002 }
12003 }
12004 Arc::new(
12005 Language::new(
12006 LanguageConfig {
12007 name: "Python".into(),
12008 matcher: LanguageMatcher {
12009 path_suffixes: vec!["py".to_string()],
12010 ..Default::default()
12011 },
12012 ..Default::default()
12013 },
12014 None, // We're not testing Python parsing with this language.
12015 )
12016 .with_manifest(Some(ManifestName::from(SharedString::new_static(
12017 "pyproject.toml",
12018 ))))
12019 .with_toolchain_lister(Some(Arc::new(PythonMootToolchainLister(fs)))),
12020 )
12021}
12022
12023fn typescript_lang() -> Arc<Language> {
12024 Arc::new(Language::new(
12025 LanguageConfig {
12026 name: "TypeScript".into(),
12027 matcher: LanguageMatcher {
12028 path_suffixes: vec!["ts".to_string()],
12029 ..Default::default()
12030 },
12031 ..Default::default()
12032 },
12033 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
12034 ))
12035}
12036
12037fn tsx_lang() -> Arc<Language> {
12038 Arc::new(Language::new(
12039 LanguageConfig {
12040 name: "tsx".into(),
12041 matcher: LanguageMatcher {
12042 path_suffixes: vec!["tsx".to_string()],
12043 ..Default::default()
12044 },
12045 ..Default::default()
12046 },
12047 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
12048 ))
12049}
12050
12051fn get_all_tasks(
12052 project: &Entity<Project>,
12053 task_contexts: Arc<TaskContexts>,
12054 cx: &mut App,
12055) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
12056 let new_tasks = project.update(cx, |project, cx| {
12057 project.task_store().update(cx, |task_store, cx| {
12058 task_store.task_inventory().unwrap().update(cx, |this, cx| {
12059 this.used_and_current_resolved_tasks(task_contexts, cx)
12060 })
12061 })
12062 });
12063
12064 cx.background_spawn(async move {
12065 let (mut old, new) = new_tasks.await;
12066 old.extend(new);
12067 old
12068 })
12069}
12070
12071#[track_caller]
12072fn assert_entry_git_state(
12073 tree: &Worktree,
12074 repository: &Repository,
12075 path: &str,
12076 index_status: Option<StatusCode>,
12077 is_ignored: bool,
12078) {
12079 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
12080 let entry = tree
12081 .entry_for_path(&rel_path(path))
12082 .unwrap_or_else(|| panic!("entry {path} not found"));
12083 let status = repository
12084 .status_for_path(&repo_path(path))
12085 .map(|entry| entry.status);
12086 let expected = index_status.map(|index_status| {
12087 TrackedStatus {
12088 index_status,
12089 worktree_status: StatusCode::Unmodified,
12090 }
12091 .into()
12092 });
12093 assert_eq!(
12094 status, expected,
12095 "expected {path} to have git status: {expected:?}"
12096 );
12097 assert_eq!(
12098 entry.is_ignored, is_ignored,
12099 "expected {path} to have is_ignored: {is_ignored}"
12100 );
12101}
12102
12103#[track_caller]
12104fn git_init(path: &Path) -> git2::Repository {
12105 let mut init_opts = RepositoryInitOptions::new();
12106 init_opts.initial_head("main");
12107 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
12108}
12109
12110#[track_caller]
12111fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
12112 let path = path.as_ref();
12113 let mut index = repo.index().expect("Failed to get index");
12114 index.add_path(path).expect("Failed to add file");
12115 index.write().expect("Failed to write index");
12116}
12117
12118#[track_caller]
12119fn git_remove_index(path: &Path, repo: &git2::Repository) {
12120 let mut index = repo.index().expect("Failed to get index");
12121 index.remove_path(path).expect("Failed to add file");
12122 index.write().expect("Failed to write index");
12123}
12124
12125#[track_caller]
12126fn git_commit(msg: &'static str, repo: &git2::Repository) {
12127 use git2::Signature;
12128
12129 let signature = Signature::now("test", "test@zed.dev").unwrap();
12130 let oid = repo.index().unwrap().write_tree().unwrap();
12131 let tree = repo.find_tree(oid).unwrap();
12132 if let Ok(head) = repo.head() {
12133 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
12134
12135 let parent_commit = parent_obj.as_commit().unwrap();
12136
12137 repo.commit(
12138 Some("HEAD"),
12139 &signature,
12140 &signature,
12141 msg,
12142 &tree,
12143 &[parent_commit],
12144 )
12145 .expect("Failed to commit with parent");
12146 } else {
12147 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
12148 .expect("Failed to commit");
12149 }
12150}
12151
12152#[cfg(any())]
12153#[track_caller]
12154fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
12155 repo.cherrypick(commit, None).expect("Failed to cherrypick");
12156}
12157
12158#[track_caller]
12159fn git_stash(repo: &mut git2::Repository) {
12160 use git2::Signature;
12161
12162 let signature = Signature::now("test", "test@zed.dev").unwrap();
12163 repo.stash_save(&signature, "N/A", None)
12164 .expect("Failed to stash");
12165}
12166
12167#[track_caller]
12168fn git_reset(offset: usize, repo: &git2::Repository) {
12169 let head = repo.head().expect("Couldn't get repo head");
12170 let object = head.peel(git2::ObjectType::Commit).unwrap();
12171 let commit = object.as_commit().unwrap();
12172 let new_head = commit
12173 .parents()
12174 .inspect(|parnet| {
12175 parnet.message();
12176 })
12177 .nth(offset)
12178 .expect("Not enough history");
12179 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
12180 .expect("Could not reset");
12181}
12182
12183#[cfg(any())]
12184#[track_caller]
12185fn git_branch(name: &str, repo: &git2::Repository) {
12186 let head = repo
12187 .head()
12188 .expect("Couldn't get repo head")
12189 .peel_to_commit()
12190 .expect("HEAD is not a commit");
12191 repo.branch(name, &head, false).expect("Failed to commit");
12192}
12193
12194#[cfg(any())]
12195#[track_caller]
12196fn git_checkout(name: &str, repo: &git2::Repository) {
12197 repo.set_head(name).expect("Failed to set head");
12198 repo.checkout_head(None).expect("Failed to check out head");
12199}
12200
12201#[cfg(any())]
12202#[track_caller]
12203fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
12204 repo.statuses(None)
12205 .unwrap()
12206 .iter()
12207 .map(|status| (status.path().unwrap().to_string(), status.status()))
12208 .collect()
12209}
12210
12211#[gpui::test]
12212async fn test_find_project_path_abs(
12213 background_executor: BackgroundExecutor,
12214 cx: &mut gpui::TestAppContext,
12215) {
12216 // find_project_path should work with absolute paths
12217 init_test(cx);
12218
12219 let fs = FakeFs::new(background_executor);
12220 fs.insert_tree(
12221 path!("/root"),
12222 json!({
12223 "project1": {
12224 "file1.txt": "content1",
12225 "subdir": {
12226 "file2.txt": "content2"
12227 }
12228 },
12229 "project2": {
12230 "file3.txt": "content3"
12231 }
12232 }),
12233 )
12234 .await;
12235
12236 let project = Project::test(
12237 fs.clone(),
12238 [
12239 path!("/root/project1").as_ref(),
12240 path!("/root/project2").as_ref(),
12241 ],
12242 cx,
12243 )
12244 .await;
12245
12246 // Make sure the worktrees are fully initialized
12247 project
12248 .update(cx, |project, cx| project.git_scans_complete(cx))
12249 .await;
12250 cx.run_until_parked();
12251
12252 let (project1_abs_path, project1_id, project2_abs_path, project2_id) =
12253 project.read_with(cx, |project, cx| {
12254 let worktrees: Vec<_> = project.worktrees(cx).collect();
12255 let abs_path1 = worktrees[0].read(cx).abs_path().to_path_buf();
12256 let id1 = worktrees[0].read(cx).id();
12257 let abs_path2 = worktrees[1].read(cx).abs_path().to_path_buf();
12258 let id2 = worktrees[1].read(cx).id();
12259 (abs_path1, id1, abs_path2, id2)
12260 });
12261
12262 project.update(cx, |project, cx| {
12263 let abs_path = project1_abs_path.join("file1.txt");
12264 let found_path = project.find_project_path(abs_path, cx).unwrap();
12265 assert_eq!(found_path.worktree_id, project1_id);
12266 assert_eq!(&*found_path.path, rel_path("file1.txt"));
12267
12268 let abs_path = project1_abs_path.join("subdir").join("file2.txt");
12269 let found_path = project.find_project_path(abs_path, cx).unwrap();
12270 assert_eq!(found_path.worktree_id, project1_id);
12271 assert_eq!(&*found_path.path, rel_path("subdir/file2.txt"));
12272
12273 let abs_path = project2_abs_path.join("file3.txt");
12274 let found_path = project.find_project_path(abs_path, cx).unwrap();
12275 assert_eq!(found_path.worktree_id, project2_id);
12276 assert_eq!(&*found_path.path, rel_path("file3.txt"));
12277
12278 let abs_path = project1_abs_path.join("nonexistent.txt");
12279 let found_path = project.find_project_path(abs_path, cx);
12280 assert!(
12281 found_path.is_some(),
12282 "Should find project path for nonexistent file in worktree"
12283 );
12284
12285 // Test with an absolute path outside any worktree
12286 let abs_path = Path::new("/some/other/path");
12287 let found_path = project.find_project_path(abs_path, cx);
12288 assert!(
12289 found_path.is_none(),
12290 "Should not find project path for path outside any worktree"
12291 );
12292 });
12293}
12294
12295#[gpui::test]
12296async fn test_git_worktree_remove(cx: &mut gpui::TestAppContext) {
12297 init_test(cx);
12298
12299 let fs = FakeFs::new(cx.executor());
12300 fs.insert_tree(
12301 path!("/root"),
12302 json!({
12303 "a": {
12304 ".git": {},
12305 "src": {
12306 "main.rs": "fn main() {}",
12307 }
12308 },
12309 "b": {
12310 ".git": {},
12311 "src": {
12312 "main.rs": "fn main() {}",
12313 },
12314 "script": {
12315 "run.sh": "#!/bin/bash"
12316 }
12317 }
12318 }),
12319 )
12320 .await;
12321
12322 let project = Project::test(
12323 fs.clone(),
12324 [
12325 path!("/root/a").as_ref(),
12326 path!("/root/b/script").as_ref(),
12327 path!("/root/b").as_ref(),
12328 ],
12329 cx,
12330 )
12331 .await;
12332 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
12333 scan_complete.await;
12334
12335 let worktrees = project.update(cx, |project, cx| project.worktrees(cx).collect::<Vec<_>>());
12336 assert_eq!(worktrees.len(), 3);
12337
12338 let worktree_id_by_abs_path = worktrees
12339 .into_iter()
12340 .map(|worktree| worktree.read_with(cx, |w, _| (w.abs_path(), w.id())))
12341 .collect::<HashMap<_, _>>();
12342 let worktree_id = worktree_id_by_abs_path
12343 .get(Path::new(path!("/root/b/script")))
12344 .unwrap();
12345
12346 let repos = project.update(cx, |p, cx| p.git_store().read(cx).repositories().clone());
12347 assert_eq!(repos.len(), 2);
12348
12349 project.update(cx, |project, cx| {
12350 project.remove_worktree(*worktree_id, cx);
12351 });
12352 cx.run_until_parked();
12353
12354 let mut repo_paths = project
12355 .update(cx, |p, cx| p.git_store().read(cx).repositories().clone())
12356 .values()
12357 .map(|repo| repo.read_with(cx, |r, _| r.work_directory_abs_path.clone()))
12358 .collect::<Vec<_>>();
12359 repo_paths.sort();
12360
12361 pretty_assertions::assert_eq!(
12362 repo_paths,
12363 [
12364 Path::new(path!("/root/a")).into(),
12365 Path::new(path!("/root/b")).into(),
12366 ]
12367 );
12368
12369 let active_repo_path = project
12370 .read_with(cx, |p, cx| {
12371 p.active_repository(cx)
12372 .map(|r| r.read(cx).work_directory_abs_path.clone())
12373 })
12374 .unwrap();
12375 assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/a")));
12376
12377 let worktree_id = worktree_id_by_abs_path
12378 .get(Path::new(path!("/root/a")))
12379 .unwrap();
12380 project.update(cx, |project, cx| {
12381 project.remove_worktree(*worktree_id, cx);
12382 });
12383 cx.run_until_parked();
12384
12385 let active_repo_path = project
12386 .read_with(cx, |p, cx| {
12387 p.active_repository(cx)
12388 .map(|r| r.read(cx).work_directory_abs_path.clone())
12389 })
12390 .unwrap();
12391 assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/b")));
12392
12393 let worktree_id = worktree_id_by_abs_path
12394 .get(Path::new(path!("/root/b")))
12395 .unwrap();
12396 project.update(cx, |project, cx| {
12397 project.remove_worktree(*worktree_id, cx);
12398 });
12399 cx.run_until_parked();
12400
12401 let active_repo_path = project.read_with(cx, |p, cx| {
12402 p.active_repository(cx)
12403 .map(|r| r.read(cx).work_directory_abs_path.clone())
12404 });
12405 assert!(active_repo_path.is_none());
12406}
12407
12408#[gpui::test]
12409async fn test_optimistic_hunks_in_staged_files(cx: &mut gpui::TestAppContext) {
12410 use DiffHunkSecondaryStatus::*;
12411 init_test(cx);
12412
12413 let committed_contents = r#"
12414 one
12415 two
12416 three
12417 "#
12418 .unindent();
12419 let file_contents = r#"
12420 one
12421 TWO
12422 three
12423 "#
12424 .unindent();
12425
12426 let fs = FakeFs::new(cx.background_executor.clone());
12427 fs.insert_tree(
12428 path!("/dir"),
12429 json!({
12430 ".git": {},
12431 "file.txt": file_contents.clone()
12432 }),
12433 )
12434 .await;
12435
12436 fs.set_head_and_index_for_repo(
12437 path!("/dir/.git").as_ref(),
12438 &[("file.txt", committed_contents.clone())],
12439 );
12440
12441 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
12442
12443 let buffer = project
12444 .update(cx, |project, cx| {
12445 project.open_local_buffer(path!("/dir/file.txt"), cx)
12446 })
12447 .await
12448 .unwrap();
12449 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
12450 let uncommitted_diff = project
12451 .update(cx, |project, cx| {
12452 project.open_uncommitted_diff(buffer.clone(), cx)
12453 })
12454 .await
12455 .unwrap();
12456
12457 // The hunk is initially unstaged.
12458 uncommitted_diff.read_with(cx, |diff, cx| {
12459 assert_hunks(
12460 diff.snapshot(cx).hunks(&snapshot),
12461 &snapshot,
12462 &diff.base_text_string(cx).unwrap(),
12463 &[(
12464 1..2,
12465 "two\n",
12466 "TWO\n",
12467 DiffHunkStatus::modified(HasSecondaryHunk),
12468 )],
12469 );
12470 });
12471
12472 // Get the repository handle.
12473 let repo = project.read_with(cx, |project, cx| {
12474 project.repositories(cx).values().next().unwrap().clone()
12475 });
12476
12477 // Stage the file.
12478 let stage_task = repo.update(cx, |repo, cx| {
12479 repo.stage_entries(vec![repo_path("file.txt")], cx)
12480 });
12481
12482 // Run a few ticks to let the job start and mark hunks as pending,
12483 // but don't run_until_parked which would complete the entire operation.
12484 for _ in 0..10 {
12485 cx.executor().tick();
12486 let [hunk]: [_; 1] = uncommitted_diff
12487 .read_with(cx, |diff, cx| {
12488 diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>()
12489 })
12490 .try_into()
12491 .unwrap();
12492 match hunk.secondary_status {
12493 HasSecondaryHunk => {}
12494 SecondaryHunkRemovalPending => break,
12495 NoSecondaryHunk => panic!("hunk was not optimistically staged"),
12496 _ => panic!("unexpected hunk state"),
12497 }
12498 }
12499 uncommitted_diff.read_with(cx, |diff, cx| {
12500 assert_hunks(
12501 diff.snapshot(cx).hunks(&snapshot),
12502 &snapshot,
12503 &diff.base_text_string(cx).unwrap(),
12504 &[(
12505 1..2,
12506 "two\n",
12507 "TWO\n",
12508 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
12509 )],
12510 );
12511 });
12512
12513 // Let the staging complete.
12514 stage_task.await.unwrap();
12515 cx.run_until_parked();
12516
12517 // The hunk is now fully staged.
12518 uncommitted_diff.read_with(cx, |diff, cx| {
12519 assert_hunks(
12520 diff.snapshot(cx).hunks(&snapshot),
12521 &snapshot,
12522 &diff.base_text_string(cx).unwrap(),
12523 &[(
12524 1..2,
12525 "two\n",
12526 "TWO\n",
12527 DiffHunkStatus::modified(NoSecondaryHunk),
12528 )],
12529 );
12530 });
12531
12532 // Simulate a commit by updating HEAD to match the current file contents.
12533 // The FakeGitRepository's commit method is a no-op, so we need to manually
12534 // update HEAD to simulate the commit completing.
12535 fs.set_head_for_repo(
12536 path!("/dir/.git").as_ref(),
12537 &[("file.txt", file_contents.clone())],
12538 "newhead",
12539 );
12540 cx.run_until_parked();
12541
12542 // After committing, there are no more hunks.
12543 uncommitted_diff.read_with(cx, |diff, cx| {
12544 assert_hunks(
12545 diff.snapshot(cx).hunks(&snapshot),
12546 &snapshot,
12547 &diff.base_text_string(cx).unwrap(),
12548 &[] as &[(Range<u32>, &str, &str, DiffHunkStatus)],
12549 );
12550 });
12551}
12552
12553#[gpui::test]
12554async fn test_read_only_files_setting(cx: &mut gpui::TestAppContext) {
12555 init_test(cx);
12556
12557 // Configure read_only_files setting
12558 cx.update(|cx| {
12559 cx.update_global::<SettingsStore, _>(|store, cx| {
12560 store.update_user_settings(cx, |settings| {
12561 settings.project.worktree.read_only_files = Some(vec![
12562 "**/generated/**".to_string(),
12563 "**/*.gen.rs".to_string(),
12564 ]);
12565 });
12566 });
12567 });
12568
12569 let fs = FakeFs::new(cx.background_executor.clone());
12570 fs.insert_tree(
12571 path!("/root"),
12572 json!({
12573 "src": {
12574 "main.rs": "fn main() {}",
12575 "types.gen.rs": "// Generated file",
12576 },
12577 "generated": {
12578 "schema.rs": "// Auto-generated schema",
12579 }
12580 }),
12581 )
12582 .await;
12583
12584 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
12585
12586 // Open a regular file - should be read-write
12587 let regular_buffer = project
12588 .update(cx, |project, cx| {
12589 project.open_local_buffer(path!("/root/src/main.rs"), cx)
12590 })
12591 .await
12592 .unwrap();
12593
12594 regular_buffer.read_with(cx, |buffer, _| {
12595 assert!(!buffer.read_only(), "Regular file should not be read-only");
12596 });
12597
12598 // Open a file matching *.gen.rs pattern - should be read-only
12599 let gen_buffer = project
12600 .update(cx, |project, cx| {
12601 project.open_local_buffer(path!("/root/src/types.gen.rs"), cx)
12602 })
12603 .await
12604 .unwrap();
12605
12606 gen_buffer.read_with(cx, |buffer, _| {
12607 assert!(
12608 buffer.read_only(),
12609 "File matching *.gen.rs pattern should be read-only"
12610 );
12611 });
12612
12613 // Open a file in generated directory - should be read-only
12614 let generated_buffer = project
12615 .update(cx, |project, cx| {
12616 project.open_local_buffer(path!("/root/generated/schema.rs"), cx)
12617 })
12618 .await
12619 .unwrap();
12620
12621 generated_buffer.read_with(cx, |buffer, _| {
12622 assert!(
12623 buffer.read_only(),
12624 "File in generated directory should be read-only"
12625 );
12626 });
12627}
12628
12629#[gpui::test]
12630async fn test_read_only_files_empty_setting(cx: &mut gpui::TestAppContext) {
12631 init_test(cx);
12632
12633 // Explicitly set read_only_files to empty (default behavior)
12634 cx.update(|cx| {
12635 cx.update_global::<SettingsStore, _>(|store, cx| {
12636 store.update_user_settings(cx, |settings| {
12637 settings.project.worktree.read_only_files = Some(vec![]);
12638 });
12639 });
12640 });
12641
12642 let fs = FakeFs::new(cx.background_executor.clone());
12643 fs.insert_tree(
12644 path!("/root"),
12645 json!({
12646 "src": {
12647 "main.rs": "fn main() {}",
12648 },
12649 "generated": {
12650 "schema.rs": "// Auto-generated schema",
12651 }
12652 }),
12653 )
12654 .await;
12655
12656 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
12657
12658 // All files should be read-write when read_only_files is empty
12659 let main_buffer = project
12660 .update(cx, |project, cx| {
12661 project.open_local_buffer(path!("/root/src/main.rs"), cx)
12662 })
12663 .await
12664 .unwrap();
12665
12666 main_buffer.read_with(cx, |buffer, _| {
12667 assert!(
12668 !buffer.read_only(),
12669 "Files should not be read-only when read_only_files is empty"
12670 );
12671 });
12672
12673 let generated_buffer = project
12674 .update(cx, |project, cx| {
12675 project.open_local_buffer(path!("/root/generated/schema.rs"), cx)
12676 })
12677 .await
12678 .unwrap();
12679
12680 generated_buffer.read_with(cx, |buffer, _| {
12681 assert!(
12682 !buffer.read_only(),
12683 "Generated files should not be read-only when read_only_files is empty"
12684 );
12685 });
12686}
12687
12688#[gpui::test]
12689async fn test_read_only_files_with_lock_files(cx: &mut gpui::TestAppContext) {
12690 init_test(cx);
12691
12692 // Configure to make lock files read-only
12693 cx.update(|cx| {
12694 cx.update_global::<SettingsStore, _>(|store, cx| {
12695 store.update_user_settings(cx, |settings| {
12696 settings.project.worktree.read_only_files = Some(vec![
12697 "**/*.lock".to_string(),
12698 "**/package-lock.json".to_string(),
12699 ]);
12700 });
12701 });
12702 });
12703
12704 let fs = FakeFs::new(cx.background_executor.clone());
12705 fs.insert_tree(
12706 path!("/root"),
12707 json!({
12708 "Cargo.lock": "# Lock file",
12709 "Cargo.toml": "[package]",
12710 "package-lock.json": "{}",
12711 "package.json": "{}",
12712 }),
12713 )
12714 .await;
12715
12716 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
12717
12718 // Cargo.lock should be read-only
12719 let cargo_lock = project
12720 .update(cx, |project, cx| {
12721 project.open_local_buffer(path!("/root/Cargo.lock"), cx)
12722 })
12723 .await
12724 .unwrap();
12725
12726 cargo_lock.read_with(cx, |buffer, _| {
12727 assert!(buffer.read_only(), "Cargo.lock should be read-only");
12728 });
12729
12730 // Cargo.toml should be read-write
12731 let cargo_toml = project
12732 .update(cx, |project, cx| {
12733 project.open_local_buffer(path!("/root/Cargo.toml"), cx)
12734 })
12735 .await
12736 .unwrap();
12737
12738 cargo_toml.read_with(cx, |buffer, _| {
12739 assert!(!buffer.read_only(), "Cargo.toml should not be read-only");
12740 });
12741
12742 // package-lock.json should be read-only
12743 let package_lock = project
12744 .update(cx, |project, cx| {
12745 project.open_local_buffer(path!("/root/package-lock.json"), cx)
12746 })
12747 .await
12748 .unwrap();
12749
12750 package_lock.read_with(cx, |buffer, _| {
12751 assert!(buffer.read_only(), "package-lock.json should be read-only");
12752 });
12753
12754 // package.json should be read-write
12755 let package_json = project
12756 .update(cx, |project, cx| {
12757 project.open_local_buffer(path!("/root/package.json"), cx)
12758 })
12759 .await
12760 .unwrap();
12761
12762 package_json.read_with(cx, |buffer, _| {
12763 assert!(!buffer.read_only(), "package.json should not be read-only");
12764 });
12765}
12766
12767mod disable_ai_settings_tests {
12768 use gpui::TestAppContext;
12769 use project::*;
12770 use settings::{Settings, SettingsStore};
12771
12772 #[gpui::test]
12773 async fn test_disable_ai_settings_security(cx: &mut TestAppContext) {
12774 cx.update(|cx| {
12775 settings::init(cx);
12776
12777 // Test 1: Default is false (AI enabled)
12778 assert!(
12779 !DisableAiSettings::get_global(cx).disable_ai,
12780 "Default should allow AI"
12781 );
12782 });
12783
12784 let disable_true = serde_json::json!({
12785 "disable_ai": true
12786 })
12787 .to_string();
12788 let disable_false = serde_json::json!({
12789 "disable_ai": false
12790 })
12791 .to_string();
12792
12793 cx.update_global::<SettingsStore, _>(|store, cx| {
12794 store.set_user_settings(&disable_false, cx).unwrap();
12795 store.set_global_settings(&disable_true, cx).unwrap();
12796 });
12797 cx.update(|cx| {
12798 assert!(
12799 DisableAiSettings::get_global(cx).disable_ai,
12800 "Local false cannot override global true"
12801 );
12802 });
12803
12804 cx.update_global::<SettingsStore, _>(|store, cx| {
12805 store.set_global_settings(&disable_false, cx).unwrap();
12806 store.set_user_settings(&disable_true, cx).unwrap();
12807 });
12808
12809 cx.update(|cx| {
12810 assert!(
12811 DisableAiSettings::get_global(cx).disable_ai,
12812 "Local false cannot override global true"
12813 );
12814 });
12815 }
12816
12817 #[gpui::test]
12818 async fn test_disable_ai_project_level_settings(cx: &mut TestAppContext) {
12819 use settings::{LocalSettingsKind, LocalSettingsPath, SettingsLocation, SettingsStore};
12820 use worktree::WorktreeId;
12821
12822 cx.update(|cx| {
12823 settings::init(cx);
12824
12825 // Default should allow AI
12826 assert!(
12827 !DisableAiSettings::get_global(cx).disable_ai,
12828 "Default should allow AI"
12829 );
12830 });
12831
12832 let worktree_id = WorktreeId::from_usize(1);
12833 let rel_path = |path: &str| -> std::sync::Arc<util::rel_path::RelPath> {
12834 std::sync::Arc::from(util::rel_path::RelPath::unix(path).unwrap())
12835 };
12836 let project_path = rel_path("project");
12837 let settings_location = SettingsLocation {
12838 worktree_id,
12839 path: project_path.as_ref(),
12840 };
12841
12842 // Test: Project-level disable_ai=true should disable AI for files in that project
12843 cx.update_global::<SettingsStore, _>(|store, cx| {
12844 store
12845 .set_local_settings(
12846 worktree_id,
12847 LocalSettingsPath::InWorktree(project_path.clone()),
12848 LocalSettingsKind::Settings,
12849 Some(r#"{ "disable_ai": true }"#),
12850 cx,
12851 )
12852 .unwrap();
12853 });
12854
12855 cx.update(|cx| {
12856 let settings = DisableAiSettings::get(Some(settings_location), cx);
12857 assert!(
12858 settings.disable_ai,
12859 "Project-level disable_ai=true should disable AI for files in that project"
12860 );
12861 // Global should now also be true since project-level disable_ai is merged into global
12862 assert!(
12863 DisableAiSettings::get_global(cx).disable_ai,
12864 "Global setting should be affected by project-level disable_ai=true"
12865 );
12866 });
12867
12868 // Test: Setting project-level to false should allow AI for that project
12869 cx.update_global::<SettingsStore, _>(|store, cx| {
12870 store
12871 .set_local_settings(
12872 worktree_id,
12873 LocalSettingsPath::InWorktree(project_path.clone()),
12874 LocalSettingsKind::Settings,
12875 Some(r#"{ "disable_ai": false }"#),
12876 cx,
12877 )
12878 .unwrap();
12879 });
12880
12881 cx.update(|cx| {
12882 let settings = DisableAiSettings::get(Some(settings_location), cx);
12883 assert!(
12884 !settings.disable_ai,
12885 "Project-level disable_ai=false should allow AI"
12886 );
12887 // Global should also be false now
12888 assert!(
12889 !DisableAiSettings::get_global(cx).disable_ai,
12890 "Global setting should be false when project-level is false"
12891 );
12892 });
12893
12894 // Test: User-level true + project-level false = AI disabled (saturation)
12895 let disable_true = serde_json::json!({ "disable_ai": true }).to_string();
12896 cx.update_global::<SettingsStore, _>(|store, cx| {
12897 store.set_user_settings(&disable_true, cx).unwrap();
12898 store
12899 .set_local_settings(
12900 worktree_id,
12901 LocalSettingsPath::InWorktree(project_path.clone()),
12902 LocalSettingsKind::Settings,
12903 Some(r#"{ "disable_ai": false }"#),
12904 cx,
12905 )
12906 .unwrap();
12907 });
12908
12909 cx.update(|cx| {
12910 let settings = DisableAiSettings::get(Some(settings_location), cx);
12911 assert!(
12912 settings.disable_ai,
12913 "Project-level false cannot override user-level true (SaturatingBool)"
12914 );
12915 });
12916 }
12917}