1#![allow(clippy::format_collect)]
2
3mod color_extractor;
4mod context_server_store;
5mod debugger;
6mod ext_agent_tests;
7mod extension_agent_tests;
8mod git_store;
9mod image_store;
10mod lsp_command;
11mod lsp_store;
12mod manifest_tree;
13mod project_search;
14mod search;
15mod search_history;
16mod signature_help;
17mod task_inventory;
18mod trusted_worktrees;
19mod yarn;
20
21use anyhow::Result;
22use async_trait::async_trait;
23use buffer_diff::{
24 BufferDiffEvent, DiffChanged, DiffHunkSecondaryStatus, DiffHunkStatus, DiffHunkStatusKind,
25 assert_hunks,
26};
27use collections::{BTreeSet, HashMap, HashSet};
28use encoding_rs;
29use fs::{FakeFs, PathEventKind};
30use futures::{StreamExt, future};
31use git::{
32 GitHostingProviderRegistry,
33 repository::{RepoPath, repo_path},
34 status::{DiffStat, FileStatus, StatusCode, TrackedStatus},
35};
36use git2::RepositoryInitOptions;
37use gpui::{
38 App, AppContext, BackgroundExecutor, BorrowAppContext, Entity, FutureExt, SharedString, Task,
39 TestAppContext, UpdateGlobal,
40};
41use itertools::Itertools;
42use language::{
43 Buffer, BufferEvent, Diagnostic, DiagnosticEntry, DiagnosticEntryRef, DiagnosticSet,
44 DiagnosticSourceKind, DiskState, FakeLspAdapter, Language, LanguageConfig, LanguageMatcher,
45 LanguageName, LineEnding, ManifestName, ManifestProvider, ManifestQuery, OffsetRangeExt, Point,
46 ToPoint, Toolchain, ToolchainList, ToolchainLister, ToolchainMetadata,
47 language_settings::{LanguageSettings, LanguageSettingsContent},
48 markdown_lang, rust_lang, tree_sitter_typescript,
49};
50use lsp::{
51 CodeActionKind, DEFAULT_LSP_REQUEST_TIMEOUT, DiagnosticSeverity, DocumentChanges,
52 FileOperationFilter, LanguageServerId, LanguageServerName, NumberOrString, TextDocumentEdit,
53 Uri, WillRenameFiles, notification::DidRenameFiles,
54};
55use parking_lot::Mutex;
56use paths::{config_dir, global_gitignore_path, tasks_file};
57use postage::stream::Stream as _;
58use pretty_assertions::{assert_eq, assert_matches};
59use project::{
60 Event, TaskContexts,
61 git_store::{GitStoreEvent, Repository, RepositoryEvent, StatusEntry, pending_op},
62 search::{SearchQuery, SearchResult},
63 task_store::{TaskSettingsLocation, TaskStore},
64 *,
65};
66use rand::{Rng as _, rngs::StdRng};
67use serde_json::json;
68use settings::SettingsStore;
69#[cfg(not(windows))]
70use std::os;
71use std::{
72 cell::RefCell,
73 env, mem,
74 num::NonZeroU32,
75 ops::Range,
76 path::{Path, PathBuf},
77 rc::Rc,
78 str::FromStr,
79 sync::{Arc, OnceLock, atomic},
80 task::Poll,
81 time::Duration,
82};
83use sum_tree::SumTree;
84use task::{ResolvedTask, ShellKind, TaskContext};
85use text::{Anchor, PointUtf16, ReplicaId, ToOffset, Unclipped};
86use unindent::Unindent as _;
87use util::{
88 TryFutureExt as _, assert_set_eq, maybe, path,
89 paths::{PathMatcher, PathStyle},
90 rel_path::{RelPath, rel_path},
91 test::{TempTree, marked_text_offsets},
92 uri,
93};
94use worktree::WorktreeModelHandle as _;
95
96#[gpui::test]
97async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
98 cx.executor().allow_parking();
99
100 let (tx, mut rx) = futures::channel::mpsc::unbounded();
101 let _thread = std::thread::spawn(move || {
102 #[cfg(not(target_os = "windows"))]
103 std::fs::metadata("/tmp").unwrap();
104 #[cfg(target_os = "windows")]
105 std::fs::metadata("C:/Windows").unwrap();
106 std::thread::sleep(Duration::from_millis(1000));
107 tx.unbounded_send(1).unwrap();
108 });
109 rx.next().await.unwrap();
110}
111
112#[gpui::test]
113async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
114 cx.executor().allow_parking();
115
116 let io_task = smol::unblock(move || {
117 println!("sleeping on thread {:?}", std::thread::current().id());
118 std::thread::sleep(Duration::from_millis(10));
119 1
120 });
121
122 let task = cx.foreground_executor().spawn(async move {
123 io_task.await;
124 });
125
126 task.await;
127}
128
129#[gpui::test]
130async fn test_default_session_work_dirs_prefers_directory_worktrees_over_single_file_parents(
131 cx: &mut gpui::TestAppContext,
132) {
133 init_test(cx);
134
135 let fs = FakeFs::new(cx.executor());
136 fs.insert_tree(
137 path!("/root"),
138 json!({
139 "dir-project": {
140 "src": {
141 "main.rs": "fn main() {}"
142 }
143 },
144 "single-file.rs": "fn helper() {}"
145 }),
146 )
147 .await;
148
149 let project = Project::test(
150 fs,
151 [
152 Path::new(path!("/root/single-file.rs")),
153 Path::new(path!("/root/dir-project")),
154 ],
155 cx,
156 )
157 .await;
158
159 let work_dirs = project.read_with(cx, |project, cx| project.default_path_list(cx));
160 let ordered_paths = work_dirs.ordered_paths().cloned().collect::<Vec<_>>();
161
162 assert_eq!(
163 ordered_paths,
164 vec![
165 PathBuf::from(path!("/root/dir-project")),
166 PathBuf::from(path!("/root")),
167 ]
168 );
169}
170
171#[gpui::test]
172async fn test_default_session_work_dirs_falls_back_to_home_for_empty_project(
173 cx: &mut gpui::TestAppContext,
174) {
175 init_test(cx);
176
177 let fs = FakeFs::new(cx.executor());
178 let project = Project::test(fs, [], cx).await;
179
180 let work_dirs = project.read_with(cx, |project, cx| project.default_path_list(cx));
181 let ordered_paths = work_dirs.ordered_paths().cloned().collect::<Vec<_>>();
182
183 assert_eq!(ordered_paths, vec![paths::home_dir().to_path_buf()]);
184}
185
186// NOTE:
187// While POSIX symbolic links are somewhat supported on Windows, they are an opt in by the user, and thus
188// we assume that they are not supported out of the box.
189#[cfg(not(windows))]
190#[gpui::test]
191async fn test_symlinks(cx: &mut gpui::TestAppContext) {
192 init_test(cx);
193 cx.executor().allow_parking();
194
195 let dir = TempTree::new(json!({
196 "root": {
197 "apple": "",
198 "banana": {
199 "carrot": {
200 "date": "",
201 "endive": "",
202 }
203 },
204 "fennel": {
205 "grape": "",
206 }
207 }
208 }));
209
210 let root_link_path = dir.path().join("root_link");
211 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
212 os::unix::fs::symlink(
213 dir.path().join("root/fennel"),
214 dir.path().join("root/finnochio"),
215 )
216 .unwrap();
217
218 let project = Project::test(
219 Arc::new(RealFs::new(None, cx.executor())),
220 [root_link_path.as_ref()],
221 cx,
222 )
223 .await;
224
225 project.update(cx, |project, cx| {
226 let tree = project.worktrees(cx).next().unwrap().read(cx);
227 assert_eq!(tree.file_count(), 5);
228 assert_eq!(
229 tree.entry_for_path(rel_path("fennel/grape")).unwrap().inode,
230 tree.entry_for_path(rel_path("finnochio/grape"))
231 .unwrap()
232 .inode
233 );
234 });
235}
236
237#[gpui::test]
238async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
239 init_test(cx);
240
241 let dir = TempTree::new(json!({
242 ".editorconfig": r#"
243 root = true
244 [*.rs]
245 indent_style = tab
246 indent_size = 3
247 end_of_line = lf
248 insert_final_newline = true
249 trim_trailing_whitespace = true
250 max_line_length = 120
251 [*.js]
252 tab_width = 10
253 max_line_length = off
254 "#,
255 ".zed": {
256 "settings.json": r#"{
257 "tab_size": 8,
258 "hard_tabs": false,
259 "ensure_final_newline_on_save": false,
260 "remove_trailing_whitespace_on_save": false,
261 "preferred_line_length": 64,
262 "soft_wrap": "editor_width",
263 }"#,
264 },
265 "a.rs": "fn a() {\n A\n}",
266 "b": {
267 ".editorconfig": r#"
268 [*.rs]
269 indent_size = 2
270 max_line_length = off,
271 "#,
272 "b.rs": "fn b() {\n B\n}",
273 },
274 "c.js": "def c\n C\nend",
275 "d": {
276 ".editorconfig": r#"
277 [*.rs]
278 indent_size = 1
279 "#,
280 "d.rs": "fn d() {\n D\n}",
281 },
282 "README.json": "tabs are better\n",
283 }));
284
285 let path = dir.path();
286 let fs = FakeFs::new(cx.executor());
287 fs.insert_tree_from_real_fs(path, path).await;
288 let project = Project::test(fs, [path], cx).await;
289
290 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
291 language_registry.add(js_lang());
292 language_registry.add(json_lang());
293 language_registry.add(rust_lang());
294
295 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
296
297 cx.executor().run_until_parked();
298
299 let settings_for = async |path: &str, cx: &mut TestAppContext| -> LanguageSettings {
300 let buffer = project
301 .update(cx, |project, cx| {
302 project.open_buffer((worktree.read(cx).id(), rel_path(path)), cx)
303 })
304 .await
305 .unwrap();
306 cx.update(|cx| LanguageSettings::for_buffer(&buffer.read(cx), cx).into_owned())
307 };
308
309 let settings_a = settings_for("a.rs", cx).await;
310 let settings_b = settings_for("b/b.rs", cx).await;
311 let settings_c = settings_for("c.js", cx).await;
312 let settings_d = settings_for("d/d.rs", cx).await;
313 let settings_readme = settings_for("README.json", cx).await;
314 // .editorconfig overrides .zed/settings
315 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
316 assert_eq!(settings_a.hard_tabs, true);
317 assert_eq!(settings_a.ensure_final_newline_on_save, true);
318 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
319 assert_eq!(settings_a.preferred_line_length, 120);
320
321 // .editorconfig in b/ overrides .editorconfig in root
322 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
323
324 // .editorconfig in subdirectory overrides .editorconfig in root
325 assert_eq!(Some(settings_d.tab_size), NonZeroU32::new(1));
326
327 // "indent_size" is not set, so "tab_width" is used
328 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
329
330 // When max_line_length is "off", default to .zed/settings.json
331 assert_eq!(settings_b.preferred_line_length, 64);
332 assert_eq!(settings_c.preferred_line_length, 64);
333
334 // README.md should not be affected by .editorconfig's globe "*.rs"
335 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
336}
337
338#[gpui::test]
339async fn test_external_editorconfig_support(cx: &mut gpui::TestAppContext) {
340 init_test(cx);
341
342 let fs = FakeFs::new(cx.executor());
343 fs.insert_tree(
344 path!("/grandparent"),
345 json!({
346 ".editorconfig": "[*]\nindent_size = 4\n",
347 "parent": {
348 ".editorconfig": "[*.rs]\nindent_size = 2\n",
349 "worktree": {
350 ".editorconfig": "[*.md]\nindent_size = 3\n",
351 "main.rs": "fn main() {}",
352 "README.md": "# README",
353 "other.txt": "other content",
354 }
355 }
356 }),
357 )
358 .await;
359
360 let project = Project::test(fs, [path!("/grandparent/parent/worktree").as_ref()], cx).await;
361
362 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
363 language_registry.add(rust_lang());
364 language_registry.add(markdown_lang());
365
366 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
367
368 cx.executor().run_until_parked();
369 let settings_for = async |path: &str, cx: &mut TestAppContext| -> LanguageSettings {
370 let buffer = project
371 .update(cx, |project, cx| {
372 project.open_buffer((worktree.read(cx).id(), rel_path(path)), cx)
373 })
374 .await
375 .unwrap();
376 cx.update(|cx| LanguageSettings::for_buffer(&buffer.read(cx), cx).into_owned())
377 };
378
379 let settings_rs = settings_for("main.rs", cx).await;
380 let settings_md = settings_for("README.md", cx).await;
381 let settings_txt = settings_for("other.txt", cx).await;
382
383 // main.rs gets indent_size = 2 from parent's external .editorconfig
384 assert_eq!(Some(settings_rs.tab_size), NonZeroU32::new(2));
385
386 // README.md gets indent_size = 3 from internal worktree .editorconfig
387 assert_eq!(Some(settings_md.tab_size), NonZeroU32::new(3));
388
389 // other.txt gets indent_size = 4 from grandparent's external .editorconfig
390 assert_eq!(Some(settings_txt.tab_size), NonZeroU32::new(4));
391}
392
393#[gpui::test]
394async fn test_internal_editorconfig_root_stops_traversal(cx: &mut gpui::TestAppContext) {
395 init_test(cx);
396
397 let fs = FakeFs::new(cx.executor());
398 fs.insert_tree(
399 path!("/worktree"),
400 json!({
401 ".editorconfig": "[*]\nindent_size = 99\n",
402 "src": {
403 ".editorconfig": "root = true\n[*]\nindent_size = 2\n",
404 "file.rs": "fn main() {}",
405 }
406 }),
407 )
408 .await;
409
410 let project = Project::test(fs, [path!("/worktree").as_ref()], cx).await;
411
412 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
413 language_registry.add(rust_lang());
414
415 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
416
417 cx.executor().run_until_parked();
418
419 let buffer = project
420 .update(cx, |project, cx| {
421 project.open_buffer((worktree.read(cx).id(), rel_path("src/file.rs")), cx)
422 })
423 .await
424 .unwrap();
425 cx.update(|cx| {
426 let settings = LanguageSettings::for_buffer(buffer.read(cx), cx).into_owned();
427 assert_eq!(Some(settings.tab_size), NonZeroU32::new(2));
428 });
429}
430
431#[gpui::test]
432async fn test_external_editorconfig_root_stops_traversal(cx: &mut gpui::TestAppContext) {
433 init_test(cx);
434
435 let fs = FakeFs::new(cx.executor());
436 fs.insert_tree(
437 path!("/parent"),
438 json!({
439 ".editorconfig": "[*]\nindent_size = 99\n",
440 "worktree": {
441 ".editorconfig": "root = true\n[*]\nindent_size = 2\n",
442 "file.rs": "fn main() {}",
443 }
444 }),
445 )
446 .await;
447
448 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
449
450 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
451 language_registry.add(rust_lang());
452
453 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
454
455 cx.executor().run_until_parked();
456
457 let buffer = project
458 .update(cx, |project, cx| {
459 project.open_buffer((worktree.read(cx).id(), rel_path("file.rs")), cx)
460 })
461 .await
462 .unwrap();
463
464 cx.update(|cx| {
465 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
466
467 // file.rs gets indent_size = 2 from worktree's root config, NOT 99 from parent
468 assert_eq!(Some(settings.tab_size), NonZeroU32::new(2));
469 });
470}
471
472#[gpui::test]
473async fn test_external_editorconfig_root_in_parent_stops_traversal(cx: &mut gpui::TestAppContext) {
474 init_test(cx);
475
476 let fs = FakeFs::new(cx.executor());
477 fs.insert_tree(
478 path!("/grandparent"),
479 json!({
480 ".editorconfig": "[*]\nindent_size = 99\n",
481 "parent": {
482 ".editorconfig": "root = true\n[*]\nindent_size = 4\n",
483 "worktree": {
484 "file.rs": "fn main() {}",
485 }
486 }
487 }),
488 )
489 .await;
490
491 let project = Project::test(fs, [path!("/grandparent/parent/worktree").as_ref()], cx).await;
492
493 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
494 language_registry.add(rust_lang());
495
496 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
497
498 cx.executor().run_until_parked();
499
500 let buffer = project
501 .update(cx, |project, cx| {
502 project.open_buffer((worktree.read(cx).id(), rel_path("file.rs")), cx)
503 })
504 .await
505 .unwrap();
506
507 cx.update(|cx| {
508 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
509
510 // file.rs gets indent_size = 4 from parent's root config, NOT 99 from grandparent
511 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
512 });
513}
514
515#[gpui::test]
516async fn test_external_editorconfig_shared_across_worktrees(cx: &mut gpui::TestAppContext) {
517 init_test(cx);
518
519 let fs = FakeFs::new(cx.executor());
520 fs.insert_tree(
521 path!("/parent"),
522 json!({
523 ".editorconfig": "root = true\n[*]\nindent_size = 5\n",
524 "worktree_a": {
525 "file.rs": "fn a() {}",
526 ".editorconfig": "[*]\ninsert_final_newline = true\n",
527 },
528 "worktree_b": {
529 "file.rs": "fn b() {}",
530 ".editorconfig": "[*]\ninsert_final_newline = false\n",
531 }
532 }),
533 )
534 .await;
535
536 let project = Project::test(
537 fs,
538 [
539 path!("/parent/worktree_a").as_ref(),
540 path!("/parent/worktree_b").as_ref(),
541 ],
542 cx,
543 )
544 .await;
545
546 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
547 language_registry.add(rust_lang());
548
549 cx.executor().run_until_parked();
550
551 let worktrees: Vec<_> = cx.update(|cx| project.read(cx).worktrees(cx).collect());
552 assert_eq!(worktrees.len(), 2);
553
554 for worktree in worktrees {
555 let buffer = project
556 .update(cx, |project, cx| {
557 project.open_buffer((worktree.read(cx).id(), rel_path("file.rs")), cx)
558 })
559 .await
560 .unwrap();
561
562 cx.update(|cx| {
563 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
564
565 // Both worktrees should get indent_size = 5 from shared parent .editorconfig
566 assert_eq!(Some(settings.tab_size), NonZeroU32::new(5));
567 });
568 }
569}
570
571#[gpui::test]
572async fn test_external_editorconfig_not_loaded_without_internal_config(
573 cx: &mut gpui::TestAppContext,
574) {
575 init_test(cx);
576
577 let fs = FakeFs::new(cx.executor());
578 fs.insert_tree(
579 path!("/parent"),
580 json!({
581 ".editorconfig": "[*]\nindent_size = 99\n",
582 "worktree": {
583 "file.rs": "fn main() {}",
584 }
585 }),
586 )
587 .await;
588
589 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
590
591 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
592 language_registry.add(rust_lang());
593
594 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
595
596 cx.executor().run_until_parked();
597
598 let buffer = project
599 .update(cx, |project, cx| {
600 project.open_buffer((worktree.read(cx).id(), rel_path("file.rs")), cx)
601 })
602 .await
603 .unwrap();
604
605 cx.update(|cx| {
606 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
607
608 // file.rs should have default tab_size = 4, NOT 99 from parent's external .editorconfig
609 // because without an internal .editorconfig, external configs are not loaded
610 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
611 });
612}
613
614#[gpui::test]
615async fn test_external_editorconfig_modification_triggers_refresh(cx: &mut gpui::TestAppContext) {
616 init_test(cx);
617
618 let fs = FakeFs::new(cx.executor());
619 fs.insert_tree(
620 path!("/parent"),
621 json!({
622 ".editorconfig": "[*]\nindent_size = 4\n",
623 "worktree": {
624 ".editorconfig": "[*]\n",
625 "file.rs": "fn main() {}",
626 }
627 }),
628 )
629 .await;
630
631 let project = Project::test(fs.clone(), [path!("/parent/worktree").as_ref()], cx).await;
632
633 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
634 language_registry.add(rust_lang());
635
636 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
637
638 cx.executor().run_until_parked();
639
640 let buffer = project
641 .update(cx, |project, cx| {
642 project.open_buffer((worktree.read(cx).id(), rel_path("file.rs")), cx)
643 })
644 .await
645 .unwrap();
646
647 cx.update(|cx| {
648 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
649
650 // Test initial settings: tab_size = 4 from parent's external .editorconfig
651 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
652 });
653
654 fs.atomic_write(
655 PathBuf::from(path!("/parent/.editorconfig")),
656 "[*]\nindent_size = 8\n".to_owned(),
657 )
658 .await
659 .unwrap();
660
661 cx.executor().run_until_parked();
662
663 let buffer = project
664 .update(cx, |project, cx| {
665 project.open_buffer((worktree.read(cx).id(), rel_path("file.rs")), cx)
666 })
667 .await
668 .unwrap();
669
670 cx.update(|cx| {
671 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
672
673 // Test settings updated: tab_size = 8
674 assert_eq!(Some(settings.tab_size), NonZeroU32::new(8));
675 });
676}
677
678#[gpui::test]
679async fn test_adding_worktree_discovers_external_editorconfigs(cx: &mut gpui::TestAppContext) {
680 init_test(cx);
681
682 let fs = FakeFs::new(cx.executor());
683 fs.insert_tree(
684 path!("/parent"),
685 json!({
686 ".editorconfig": "root = true\n[*]\nindent_size = 7\n",
687 "existing_worktree": {
688 ".editorconfig": "[*]\n",
689 "file.rs": "fn a() {}",
690 },
691 "new_worktree": {
692 ".editorconfig": "[*]\n",
693 "file.rs": "fn b() {}",
694 }
695 }),
696 )
697 .await;
698
699 let project = Project::test(fs, [path!("/parent/existing_worktree").as_ref()], cx).await;
700
701 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
702 language_registry.add(rust_lang());
703
704 cx.executor().run_until_parked();
705
706 let buffer = project
707 .update(cx, |project, cx| {
708 let id = project.worktrees(cx).next().unwrap().read(cx).id();
709 project.open_buffer((id, rel_path("file.rs")), cx)
710 })
711 .await
712 .unwrap();
713
714 cx.update(|cx| {
715 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx).into_owned();
716
717 // Test existing worktree has tab_size = 7
718 assert_eq!(Some(settings.tab_size), NonZeroU32::new(7));
719 });
720
721 let (new_worktree, _) = project
722 .update(cx, |project, cx| {
723 project.find_or_create_worktree(path!("/parent/new_worktree"), true, cx)
724 })
725 .await
726 .unwrap();
727
728 cx.executor().run_until_parked();
729
730 let buffer = project
731 .update(cx, |project, cx| {
732 project.open_buffer((new_worktree.read(cx).id(), rel_path("file.rs")), cx)
733 })
734 .await
735 .unwrap();
736
737 cx.update(|cx| {
738 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
739
740 // Verify new worktree also has tab_size = 7 from shared parent editorconfig
741 assert_eq!(Some(settings.tab_size), NonZeroU32::new(7));
742 });
743}
744
745#[gpui::test]
746async fn test_removing_worktree_cleans_up_external_editorconfig(cx: &mut gpui::TestAppContext) {
747 init_test(cx);
748
749 let fs = FakeFs::new(cx.executor());
750 fs.insert_tree(
751 path!("/parent"),
752 json!({
753 ".editorconfig": "[*]\nindent_size = 6\n",
754 "worktree": {
755 ".editorconfig": "[*]\n",
756 "file.rs": "fn main() {}",
757 }
758 }),
759 )
760 .await;
761
762 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
763
764 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
765 language_registry.add(rust_lang());
766
767 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
768 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
769
770 cx.executor().run_until_parked();
771
772 cx.update(|cx| {
773 let store = cx.global::<SettingsStore>();
774 let (worktree_ids, external_paths, watcher_paths) =
775 store.editorconfig_store.read(cx).test_state();
776
777 // Test external config is loaded
778 assert!(worktree_ids.contains(&worktree_id));
779 assert!(!external_paths.is_empty());
780 assert!(!watcher_paths.is_empty());
781 });
782
783 project.update(cx, |project, cx| {
784 project.remove_worktree(worktree_id, cx);
785 });
786
787 cx.executor().run_until_parked();
788
789 cx.update(|cx| {
790 let store = cx.global::<SettingsStore>();
791 let (worktree_ids, external_paths, watcher_paths) =
792 store.editorconfig_store.read(cx).test_state();
793
794 // Test worktree state, external configs, and watchers all removed
795 assert!(!worktree_ids.contains(&worktree_id));
796 assert!(external_paths.is_empty());
797 assert!(watcher_paths.is_empty());
798 });
799}
800
801#[gpui::test]
802async fn test_shared_external_editorconfig_cleanup_with_multiple_worktrees(
803 cx: &mut gpui::TestAppContext,
804) {
805 init_test(cx);
806
807 let fs = FakeFs::new(cx.executor());
808 fs.insert_tree(
809 path!("/parent"),
810 json!({
811 ".editorconfig": "root = true\n[*]\nindent_size = 5\n",
812 "worktree_a": {
813 ".editorconfig": "[*]\n",
814 "file.rs": "fn a() {}",
815 },
816 "worktree_b": {
817 ".editorconfig": "[*]\n",
818 "file.rs": "fn b() {}",
819 }
820 }),
821 )
822 .await;
823
824 let project = Project::test(
825 fs,
826 [
827 path!("/parent/worktree_a").as_ref(),
828 path!("/parent/worktree_b").as_ref(),
829 ],
830 cx,
831 )
832 .await;
833
834 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
835 language_registry.add(rust_lang());
836
837 cx.executor().run_until_parked();
838
839 let (worktree_a_id, worktree_b, worktree_b_id) = cx.update(|cx| {
840 let worktrees: Vec<_> = project.read(cx).worktrees(cx).collect();
841 assert_eq!(worktrees.len(), 2);
842
843 let worktree_a = &worktrees[0];
844 let worktree_b = &worktrees[1];
845 let worktree_a_id = worktree_a.read(cx).id();
846 let worktree_b_id = worktree_b.read(cx).id();
847 (worktree_a_id, worktree_b.clone(), worktree_b_id)
848 });
849
850 cx.update(|cx| {
851 let store = cx.global::<SettingsStore>();
852 let (worktree_ids, external_paths, _) = store.editorconfig_store.read(cx).test_state();
853
854 // Test both worktrees have settings and share external config
855 assert!(worktree_ids.contains(&worktree_a_id));
856 assert!(worktree_ids.contains(&worktree_b_id));
857 assert_eq!(external_paths.len(), 1); // single shared external config
858 });
859
860 project.update(cx, |project, cx| {
861 project.remove_worktree(worktree_a_id, cx);
862 });
863
864 cx.executor().run_until_parked();
865
866 cx.update(|cx| {
867 let store = cx.global::<SettingsStore>();
868 let (worktree_ids, external_paths, watcher_paths) =
869 store.editorconfig_store.read(cx).test_state();
870
871 // Test worktree_a is gone but external config remains for worktree_b
872 assert!(!worktree_ids.contains(&worktree_a_id));
873 assert!(worktree_ids.contains(&worktree_b_id));
874 // External config should still exist because worktree_b uses it
875 assert_eq!(external_paths.len(), 1);
876 assert_eq!(watcher_paths.len(), 1);
877 });
878
879 let buffer = project
880 .update(cx, |project, cx| {
881 project.open_buffer((worktree_b.read(cx).id(), rel_path("file.rs")), cx)
882 })
883 .await
884 .unwrap();
885
886 cx.update(|cx| {
887 let settings = LanguageSettings::for_buffer(&buffer.read(cx), cx);
888
889 // Test worktree_b still has correct settings
890 assert_eq!(Some(settings.tab_size), NonZeroU32::new(5));
891 });
892}
893
894#[gpui::test]
895async fn test_git_provider_project_setting(cx: &mut gpui::TestAppContext) {
896 init_test(cx);
897 cx.update(|cx| {
898 GitHostingProviderRegistry::default_global(cx);
899 git_hosting_providers::init(cx);
900 });
901
902 let fs = FakeFs::new(cx.executor());
903 let str_path = path!("/dir");
904 let path = Path::new(str_path);
905
906 fs.insert_tree(
907 path!("/dir"),
908 json!({
909 ".zed": {
910 "settings.json": r#"{
911 "git_hosting_providers": [
912 {
913 "provider": "gitlab",
914 "base_url": "https://google.com",
915 "name": "foo"
916 }
917 ]
918 }"#
919 },
920 }),
921 )
922 .await;
923
924 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
925 let (_worktree, _) =
926 project.read_with(cx, |project, cx| project.find_worktree(path, cx).unwrap());
927 cx.executor().run_until_parked();
928
929 cx.update(|cx| {
930 let provider = GitHostingProviderRegistry::global(cx);
931 assert!(
932 provider
933 .list_hosting_providers()
934 .into_iter()
935 .any(|provider| provider.name() == "foo")
936 );
937 });
938
939 fs.atomic_write(
940 Path::new(path!("/dir/.zed/settings.json")).to_owned(),
941 "{}".into(),
942 )
943 .await
944 .unwrap();
945
946 cx.run_until_parked();
947
948 cx.update(|cx| {
949 let provider = GitHostingProviderRegistry::global(cx);
950 assert!(
951 !provider
952 .list_hosting_providers()
953 .into_iter()
954 .any(|provider| provider.name() == "foo")
955 );
956 });
957}
958
959#[gpui::test]
960async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
961 init_test(cx);
962 TaskStore::init(None);
963
964 let fs = FakeFs::new(cx.executor());
965 fs.insert_tree(
966 path!("/dir"),
967 json!({
968 ".zed": {
969 "settings.json": r#"{ "tab_size": 8 }"#,
970 "tasks.json": r#"[{
971 "label": "cargo check all",
972 "command": "cargo",
973 "args": ["check", "--all"]
974 },]"#,
975 },
976 "a": {
977 "a.rs": "fn a() {\n A\n}"
978 },
979 "b": {
980 ".zed": {
981 "settings.json": r#"{ "tab_size": 2 }"#,
982 "tasks.json": r#"[{
983 "label": "cargo check",
984 "command": "cargo",
985 "args": ["check"]
986 },]"#,
987 },
988 "b.rs": "fn b() {\n B\n}"
989 }
990 }),
991 )
992 .await;
993
994 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
995 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
996
997 cx.executor().run_until_parked();
998 let worktree_id = cx.update(|cx| {
999 project.update(cx, |project, cx| {
1000 project.worktrees(cx).next().unwrap().read(cx).id()
1001 })
1002 });
1003
1004 let mut task_contexts = TaskContexts::default();
1005 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
1006 let task_contexts = Arc::new(task_contexts);
1007
1008 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
1009 id: worktree_id,
1010 directory_in_worktree: rel_path(".zed").into(),
1011 id_base: "local worktree tasks from directory \".zed\"".into(),
1012 };
1013
1014 let buffer_a = project
1015 .update(cx, |project, cx| {
1016 project.open_buffer((worktree.read(cx).id(), rel_path("a/a.rs")), cx)
1017 })
1018 .await
1019 .unwrap();
1020 let buffer_b = project
1021 .update(cx, |project, cx| {
1022 project.open_buffer((worktree.read(cx).id(), rel_path("b/b.rs")), cx)
1023 })
1024 .await
1025 .unwrap();
1026 cx.update(|cx| {
1027 let settings_a = LanguageSettings::for_buffer(&buffer_a.read(cx), cx);
1028 let settings_b = LanguageSettings::for_buffer(&buffer_b.read(cx), cx);
1029
1030 assert_eq!(settings_a.tab_size.get(), 8);
1031 assert_eq!(settings_b.tab_size.get(), 2);
1032 });
1033
1034 let all_tasks = cx
1035 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
1036 .await
1037 .into_iter()
1038 .map(|(source_kind, task)| {
1039 let resolved = task.resolved;
1040 (
1041 source_kind,
1042 task.resolved_label,
1043 resolved.args,
1044 resolved.env,
1045 )
1046 })
1047 .collect::<Vec<_>>();
1048 assert_eq!(
1049 all_tasks,
1050 vec![
1051 (
1052 TaskSourceKind::Worktree {
1053 id: worktree_id,
1054 directory_in_worktree: rel_path("b/.zed").into(),
1055 id_base: "local worktree tasks from directory \"b/.zed\"".into()
1056 },
1057 "cargo check".to_string(),
1058 vec!["check".to_string()],
1059 HashMap::default(),
1060 ),
1061 (
1062 topmost_local_task_source_kind.clone(),
1063 "cargo check all".to_string(),
1064 vec!["check".to_string(), "--all".to_string()],
1065 HashMap::default(),
1066 ),
1067 ]
1068 );
1069
1070 let (_, resolved_task) = cx
1071 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
1072 .await
1073 .into_iter()
1074 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
1075 .expect("should have one global task");
1076 project.update(cx, |project, cx| {
1077 let task_inventory = project
1078 .task_store()
1079 .read(cx)
1080 .task_inventory()
1081 .cloned()
1082 .unwrap();
1083 task_inventory.update(cx, |inventory, _| {
1084 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
1085 inventory
1086 .update_file_based_tasks(
1087 TaskSettingsLocation::Global(tasks_file()),
1088 Some(
1089 &json!([{
1090 "label": "cargo check unstable",
1091 "command": "cargo",
1092 "args": [
1093 "check",
1094 "--all",
1095 "--all-targets"
1096 ],
1097 "env": {
1098 "RUSTFLAGS": "-Zunstable-options"
1099 }
1100 }])
1101 .to_string(),
1102 ),
1103 )
1104 .unwrap();
1105 });
1106 });
1107 cx.run_until_parked();
1108
1109 let all_tasks = cx
1110 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
1111 .await
1112 .into_iter()
1113 .map(|(source_kind, task)| {
1114 let resolved = task.resolved;
1115 (
1116 source_kind,
1117 task.resolved_label,
1118 resolved.args,
1119 resolved.env,
1120 )
1121 })
1122 .collect::<Vec<_>>();
1123 assert_eq!(
1124 all_tasks,
1125 vec![
1126 (
1127 topmost_local_task_source_kind.clone(),
1128 "cargo check all".to_string(),
1129 vec!["check".to_string(), "--all".to_string()],
1130 HashMap::default(),
1131 ),
1132 (
1133 TaskSourceKind::Worktree {
1134 id: worktree_id,
1135 directory_in_worktree: rel_path("b/.zed").into(),
1136 id_base: "local worktree tasks from directory \"b/.zed\"".into()
1137 },
1138 "cargo check".to_string(),
1139 vec!["check".to_string()],
1140 HashMap::default(),
1141 ),
1142 (
1143 TaskSourceKind::AbsPath {
1144 abs_path: paths::tasks_file().clone(),
1145 id_base: "global tasks.json".into(),
1146 },
1147 "cargo check unstable".to_string(),
1148 vec![
1149 "check".to_string(),
1150 "--all".to_string(),
1151 "--all-targets".to_string(),
1152 ],
1153 HashMap::from_iter(Some((
1154 "RUSTFLAGS".to_string(),
1155 "-Zunstable-options".to_string()
1156 ))),
1157 ),
1158 ]
1159 );
1160}
1161
1162#[gpui::test]
1163async fn test_invalid_local_tasks_shows_toast_with_doc_link(cx: &mut gpui::TestAppContext) {
1164 init_test(cx);
1165 TaskStore::init(None);
1166
1167 // We need to start with a valid `.zed/tasks.json` file as otherwise the
1168 // event is emitted before we havd a chance to setup the event subscription.
1169 let fs = FakeFs::new(cx.executor());
1170 fs.insert_tree(
1171 path!("/dir"),
1172 json!({
1173 ".zed": {
1174 "tasks.json": r#"[{ "label": "valid task", "command": "echo" }]"#,
1175 },
1176 "file.rs": ""
1177 }),
1178 )
1179 .await;
1180
1181 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1182 let saw_toast = Rc::new(RefCell::new(false));
1183
1184 // Update the `.zed/tasks.json` file with an invalid variable, so we can
1185 // later assert that the `Event::Toast` even is emitted.
1186 fs.save(
1187 path!("/dir/.zed/tasks.json").as_ref(),
1188 &r#"[{ "label": "test $ZED_FOO", "command": "echo" }]"#.into(),
1189 Default::default(),
1190 )
1191 .await
1192 .unwrap();
1193
1194 project.update(cx, |_, cx| {
1195 let saw_toast = saw_toast.clone();
1196
1197 cx.subscribe(&project, move |_, _, event: &Event, _| match event {
1198 Event::Toast {
1199 notification_id,
1200 message,
1201 link: Some(ToastLink { url, .. }),
1202 } => {
1203 assert!(notification_id.starts_with("local-tasks-"));
1204 assert!(message.contains("ZED_FOO"));
1205 assert_eq!(*url, "https://zed.dev/docs/tasks");
1206 *saw_toast.borrow_mut() = true;
1207 }
1208 _ => {}
1209 })
1210 .detach();
1211 });
1212
1213 cx.run_until_parked();
1214 assert!(
1215 *saw_toast.borrow(),
1216 "Expected `Event::Toast` was never emitted"
1217 );
1218}
1219
1220#[gpui::test]
1221async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
1222 init_test(cx);
1223 TaskStore::init(None);
1224
1225 let fs = FakeFs::new(cx.executor());
1226 fs.insert_tree(
1227 path!("/dir"),
1228 json!({
1229 ".zed": {
1230 "tasks.json": r#"[{
1231 "label": "test worktree root",
1232 "command": "echo $ZED_WORKTREE_ROOT"
1233 }]"#,
1234 },
1235 "a": {
1236 "a.rs": "fn a() {\n A\n}"
1237 },
1238 }),
1239 )
1240 .await;
1241
1242 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1243 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
1244
1245 cx.executor().run_until_parked();
1246 let worktree_id = cx.update(|cx| {
1247 project.update(cx, |project, cx| {
1248 project.worktrees(cx).next().unwrap().read(cx).id()
1249 })
1250 });
1251
1252 let active_non_worktree_item_tasks = cx
1253 .update(|cx| {
1254 get_all_tasks(
1255 &project,
1256 Arc::new(TaskContexts {
1257 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
1258 active_worktree_context: None,
1259 other_worktree_contexts: Vec::new(),
1260 lsp_task_sources: HashMap::default(),
1261 latest_selection: None,
1262 }),
1263 cx,
1264 )
1265 })
1266 .await;
1267 assert!(
1268 active_non_worktree_item_tasks.is_empty(),
1269 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
1270 );
1271
1272 let active_worktree_tasks = cx
1273 .update(|cx| {
1274 get_all_tasks(
1275 &project,
1276 Arc::new(TaskContexts {
1277 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
1278 active_worktree_context: Some((worktree_id, {
1279 let mut worktree_context = TaskContext::default();
1280 worktree_context
1281 .task_variables
1282 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
1283 worktree_context
1284 })),
1285 other_worktree_contexts: Vec::new(),
1286 lsp_task_sources: HashMap::default(),
1287 latest_selection: None,
1288 }),
1289 cx,
1290 )
1291 })
1292 .await;
1293 assert_eq!(
1294 active_worktree_tasks
1295 .into_iter()
1296 .map(|(source_kind, task)| {
1297 let resolved = task.resolved;
1298 (source_kind, resolved.command.unwrap())
1299 })
1300 .collect::<Vec<_>>(),
1301 vec![(
1302 TaskSourceKind::Worktree {
1303 id: worktree_id,
1304 directory_in_worktree: rel_path(".zed").into(),
1305 id_base: "local worktree tasks from directory \".zed\"".into(),
1306 },
1307 "echo /dir".to_string(),
1308 )]
1309 );
1310}
1311
1312#[gpui::test]
1313async fn test_running_multiple_instances_of_a_single_server_in_one_worktree(
1314 cx: &mut gpui::TestAppContext,
1315) {
1316 pub(crate) struct PyprojectTomlManifestProvider;
1317
1318 impl ManifestProvider for PyprojectTomlManifestProvider {
1319 fn name(&self) -> ManifestName {
1320 SharedString::new_static("pyproject.toml").into()
1321 }
1322
1323 fn search(
1324 &self,
1325 ManifestQuery {
1326 path,
1327 depth,
1328 delegate,
1329 }: ManifestQuery,
1330 ) -> Option<Arc<RelPath>> {
1331 for path in path.ancestors().take(depth) {
1332 let p = path.join(rel_path("pyproject.toml"));
1333 if delegate.exists(&p, Some(false)) {
1334 return Some(path.into());
1335 }
1336 }
1337
1338 None
1339 }
1340 }
1341
1342 init_test(cx);
1343 let fs = FakeFs::new(cx.executor());
1344
1345 fs.insert_tree(
1346 path!("/the-root"),
1347 json!({
1348 ".zed": {
1349 "settings.json": r#"
1350 {
1351 "languages": {
1352 "Python": {
1353 "language_servers": ["ty"]
1354 }
1355 }
1356 }"#
1357 },
1358 "project-a": {
1359 ".venv": {},
1360 "file.py": "",
1361 "pyproject.toml": ""
1362 },
1363 "project-b": {
1364 ".venv": {},
1365 "source_file.py":"",
1366 "another_file.py": "",
1367 "pyproject.toml": ""
1368 }
1369 }),
1370 )
1371 .await;
1372 cx.update(|cx| {
1373 ManifestProvidersStore::global(cx).register(Arc::new(PyprojectTomlManifestProvider))
1374 });
1375
1376 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1377 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1378 let _fake_python_server = language_registry.register_fake_lsp(
1379 "Python",
1380 FakeLspAdapter {
1381 name: "ty",
1382 capabilities: lsp::ServerCapabilities {
1383 ..Default::default()
1384 },
1385 ..Default::default()
1386 },
1387 );
1388
1389 language_registry.add(python_lang(fs.clone()));
1390 let (first_buffer, _handle) = project
1391 .update(cx, |project, cx| {
1392 project.open_local_buffer_with_lsp(path!("/the-root/project-a/file.py"), cx)
1393 })
1394 .await
1395 .unwrap();
1396 cx.executor().run_until_parked();
1397 let servers = project.update(cx, |project, cx| {
1398 project.lsp_store().update(cx, |this, cx| {
1399 first_buffer.update(cx, |buffer, cx| {
1400 this.running_language_servers_for_local_buffer(buffer, cx)
1401 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1402 .collect::<Vec<_>>()
1403 })
1404 })
1405 });
1406 cx.executor().run_until_parked();
1407 assert_eq!(servers.len(), 1);
1408 let (adapter, server) = servers.into_iter().next().unwrap();
1409 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1410 assert_eq!(server.server_id(), LanguageServerId(0));
1411 // `workspace_folders` are set to the rooting point.
1412 assert_eq!(
1413 server.workspace_folders(),
1414 BTreeSet::from_iter(
1415 [Uri::from_file_path(path!("/the-root/project-a")).unwrap()].into_iter()
1416 )
1417 );
1418
1419 let (second_project_buffer, _other_handle) = project
1420 .update(cx, |project, cx| {
1421 project.open_local_buffer_with_lsp(path!("/the-root/project-b/source_file.py"), cx)
1422 })
1423 .await
1424 .unwrap();
1425 cx.executor().run_until_parked();
1426 let servers = project.update(cx, |project, cx| {
1427 project.lsp_store().update(cx, |this, cx| {
1428 second_project_buffer.update(cx, |buffer, cx| {
1429 this.running_language_servers_for_local_buffer(buffer, cx)
1430 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1431 .collect::<Vec<_>>()
1432 })
1433 })
1434 });
1435 cx.executor().run_until_parked();
1436 assert_eq!(servers.len(), 1);
1437 let (adapter, server) = servers.into_iter().next().unwrap();
1438 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1439 // We're not using venvs at all here, so both folders should fall under the same root.
1440 assert_eq!(server.server_id(), LanguageServerId(0));
1441 // Now, let's select a different toolchain for one of subprojects.
1442
1443 let Toolchains {
1444 toolchains: available_toolchains_for_b,
1445 root_path,
1446 ..
1447 } = project
1448 .update(cx, |this, cx| {
1449 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1450 this.available_toolchains(
1451 ProjectPath {
1452 worktree_id,
1453 path: rel_path("project-b/source_file.py").into(),
1454 },
1455 LanguageName::new_static("Python"),
1456 cx,
1457 )
1458 })
1459 .await
1460 .expect("A toolchain to be discovered");
1461 assert_eq!(root_path.as_ref(), rel_path("project-b"));
1462 assert_eq!(available_toolchains_for_b.toolchains().len(), 1);
1463 let currently_active_toolchain = project
1464 .update(cx, |this, cx| {
1465 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1466 this.active_toolchain(
1467 ProjectPath {
1468 worktree_id,
1469 path: rel_path("project-b/source_file.py").into(),
1470 },
1471 LanguageName::new_static("Python"),
1472 cx,
1473 )
1474 })
1475 .await;
1476
1477 assert!(currently_active_toolchain.is_none());
1478 let _ = project
1479 .update(cx, |this, cx| {
1480 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1481 this.activate_toolchain(
1482 ProjectPath {
1483 worktree_id,
1484 path: root_path,
1485 },
1486 available_toolchains_for_b
1487 .toolchains
1488 .into_iter()
1489 .next()
1490 .unwrap(),
1491 cx,
1492 )
1493 })
1494 .await
1495 .unwrap();
1496 cx.run_until_parked();
1497 let servers = project.update(cx, |project, cx| {
1498 project.lsp_store().update(cx, |this, cx| {
1499 second_project_buffer.update(cx, |buffer, cx| {
1500 this.running_language_servers_for_local_buffer(buffer, cx)
1501 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1502 .collect::<Vec<_>>()
1503 })
1504 })
1505 });
1506 cx.executor().run_until_parked();
1507 assert_eq!(servers.len(), 1);
1508 let (adapter, server) = servers.into_iter().next().unwrap();
1509 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1510 // There's a new language server in town.
1511 assert_eq!(server.server_id(), LanguageServerId(1));
1512}
1513
1514#[gpui::test]
1515async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
1516 init_test(cx);
1517
1518 let fs = FakeFs::new(cx.executor());
1519 fs.insert_tree(
1520 path!("/dir"),
1521 json!({
1522 "test.rs": "const A: i32 = 1;",
1523 "test2.rs": "",
1524 "Cargo.toml": "a = 1",
1525 "package.json": "{\"a\": 1}",
1526 }),
1527 )
1528 .await;
1529
1530 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1531 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1532
1533 let mut fake_rust_servers = language_registry.register_fake_lsp(
1534 "Rust",
1535 FakeLspAdapter {
1536 name: "the-rust-language-server",
1537 capabilities: lsp::ServerCapabilities {
1538 completion_provider: Some(lsp::CompletionOptions {
1539 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
1540 ..Default::default()
1541 }),
1542 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
1543 lsp::TextDocumentSyncOptions {
1544 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
1545 ..Default::default()
1546 },
1547 )),
1548 ..Default::default()
1549 },
1550 ..Default::default()
1551 },
1552 );
1553 let mut fake_json_servers = language_registry.register_fake_lsp(
1554 "JSON",
1555 FakeLspAdapter {
1556 name: "the-json-language-server",
1557 capabilities: lsp::ServerCapabilities {
1558 completion_provider: Some(lsp::CompletionOptions {
1559 trigger_characters: Some(vec![":".to_string()]),
1560 ..Default::default()
1561 }),
1562 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
1563 lsp::TextDocumentSyncOptions {
1564 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
1565 ..Default::default()
1566 },
1567 )),
1568 ..Default::default()
1569 },
1570 ..Default::default()
1571 },
1572 );
1573
1574 // Open a buffer without an associated language server.
1575 let (toml_buffer, _handle) = project
1576 .update(cx, |project, cx| {
1577 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
1578 })
1579 .await
1580 .unwrap();
1581
1582 // Open a buffer with an associated language server before the language for it has been loaded.
1583 let (rust_buffer, _handle2) = project
1584 .update(cx, |project, cx| {
1585 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
1586 })
1587 .await
1588 .unwrap();
1589 rust_buffer.update(cx, |buffer, _| {
1590 assert_eq!(buffer.language().map(|l| l.name()), None);
1591 });
1592
1593 // Now we add the languages to the project, and ensure they get assigned to all
1594 // the relevant open buffers.
1595 language_registry.add(json_lang());
1596 language_registry.add(rust_lang());
1597 cx.executor().run_until_parked();
1598 rust_buffer.update(cx, |buffer, _| {
1599 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
1600 });
1601
1602 // A server is started up, and it is notified about Rust files.
1603 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1604 assert_eq!(
1605 fake_rust_server
1606 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1607 .await
1608 .text_document,
1609 lsp::TextDocumentItem {
1610 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1611 version: 0,
1612 text: "const A: i32 = 1;".to_string(),
1613 language_id: "rust".to_string(),
1614 }
1615 );
1616
1617 // The buffer is configured based on the language server's capabilities.
1618 rust_buffer.update(cx, |buffer, _| {
1619 assert_eq!(
1620 buffer
1621 .completion_triggers()
1622 .iter()
1623 .cloned()
1624 .collect::<Vec<_>>(),
1625 &[".".to_string(), "::".to_string()]
1626 );
1627 });
1628 toml_buffer.update(cx, |buffer, _| {
1629 assert!(buffer.completion_triggers().is_empty());
1630 });
1631
1632 // Edit a buffer. The changes are reported to the language server.
1633 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
1634 assert_eq!(
1635 fake_rust_server
1636 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1637 .await
1638 .text_document,
1639 lsp::VersionedTextDocumentIdentifier::new(
1640 lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1641 1
1642 )
1643 );
1644
1645 // Open a third buffer with a different associated language server.
1646 let (json_buffer, _json_handle) = project
1647 .update(cx, |project, cx| {
1648 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
1649 })
1650 .await
1651 .unwrap();
1652
1653 // A json language server is started up and is only notified about the json buffer.
1654 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1655 assert_eq!(
1656 fake_json_server
1657 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1658 .await
1659 .text_document,
1660 lsp::TextDocumentItem {
1661 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1662 version: 0,
1663 text: "{\"a\": 1}".to_string(),
1664 language_id: "json".to_string(),
1665 }
1666 );
1667
1668 // This buffer is configured based on the second language server's
1669 // capabilities.
1670 json_buffer.update(cx, |buffer, _| {
1671 assert_eq!(
1672 buffer
1673 .completion_triggers()
1674 .iter()
1675 .cloned()
1676 .collect::<Vec<_>>(),
1677 &[":".to_string()]
1678 );
1679 });
1680
1681 // When opening another buffer whose language server is already running,
1682 // it is also configured based on the existing language server's capabilities.
1683 let (rust_buffer2, _handle4) = project
1684 .update(cx, |project, cx| {
1685 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
1686 })
1687 .await
1688 .unwrap();
1689 rust_buffer2.update(cx, |buffer, _| {
1690 assert_eq!(
1691 buffer
1692 .completion_triggers()
1693 .iter()
1694 .cloned()
1695 .collect::<Vec<_>>(),
1696 &[".".to_string(), "::".to_string()]
1697 );
1698 });
1699
1700 // Changes are reported only to servers matching the buffer's language.
1701 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
1702 rust_buffer2.update(cx, |buffer, cx| {
1703 buffer.edit([(0..0, "let x = 1;")], None, cx)
1704 });
1705 assert_eq!(
1706 fake_rust_server
1707 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1708 .await
1709 .text_document,
1710 lsp::VersionedTextDocumentIdentifier::new(
1711 lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap(),
1712 1
1713 )
1714 );
1715
1716 // Save notifications are reported to all servers.
1717 project
1718 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
1719 .await
1720 .unwrap();
1721 assert_eq!(
1722 fake_rust_server
1723 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1724 .await
1725 .text_document,
1726 lsp::TextDocumentIdentifier::new(
1727 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1728 )
1729 );
1730 assert_eq!(
1731 fake_json_server
1732 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1733 .await
1734 .text_document,
1735 lsp::TextDocumentIdentifier::new(
1736 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1737 )
1738 );
1739
1740 // Renames are reported only to servers matching the buffer's language.
1741 fs.rename(
1742 Path::new(path!("/dir/test2.rs")),
1743 Path::new(path!("/dir/test3.rs")),
1744 Default::default(),
1745 )
1746 .await
1747 .unwrap();
1748 assert_eq!(
1749 fake_rust_server
1750 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1751 .await
1752 .text_document,
1753 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap()),
1754 );
1755 assert_eq!(
1756 fake_rust_server
1757 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1758 .await
1759 .text_document,
1760 lsp::TextDocumentItem {
1761 uri: lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap(),
1762 version: 0,
1763 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1764 language_id: "rust".to_string(),
1765 },
1766 );
1767
1768 rust_buffer2.update(cx, |buffer, cx| {
1769 buffer.update_diagnostics(
1770 LanguageServerId(0),
1771 DiagnosticSet::from_sorted_entries(
1772 vec![DiagnosticEntry {
1773 diagnostic: Default::default(),
1774 range: Anchor::MIN..Anchor::MAX,
1775 }],
1776 &buffer.snapshot(),
1777 ),
1778 cx,
1779 );
1780 assert_eq!(
1781 buffer
1782 .snapshot()
1783 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1784 .count(),
1785 1
1786 );
1787 });
1788
1789 // When the rename changes the extension of the file, the buffer gets closed on the old
1790 // language server and gets opened on the new one.
1791 fs.rename(
1792 Path::new(path!("/dir/test3.rs")),
1793 Path::new(path!("/dir/test3.json")),
1794 Default::default(),
1795 )
1796 .await
1797 .unwrap();
1798 assert_eq!(
1799 fake_rust_server
1800 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1801 .await
1802 .text_document,
1803 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap()),
1804 );
1805 assert_eq!(
1806 fake_json_server
1807 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1808 .await
1809 .text_document,
1810 lsp::TextDocumentItem {
1811 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1812 version: 0,
1813 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1814 language_id: "json".to_string(),
1815 },
1816 );
1817
1818 // We clear the diagnostics, since the language has changed.
1819 rust_buffer2.update(cx, |buffer, _| {
1820 assert_eq!(
1821 buffer
1822 .snapshot()
1823 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1824 .count(),
1825 0
1826 );
1827 });
1828
1829 // The renamed file's version resets after changing language server.
1830 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
1831 assert_eq!(
1832 fake_json_server
1833 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1834 .await
1835 .text_document,
1836 lsp::VersionedTextDocumentIdentifier::new(
1837 lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1838 1
1839 )
1840 );
1841
1842 // Restart language servers
1843 project.update(cx, |project, cx| {
1844 project.restart_language_servers_for_buffers(
1845 vec![rust_buffer.clone(), json_buffer.clone()],
1846 HashSet::default(),
1847 cx,
1848 );
1849 });
1850
1851 let mut rust_shutdown_requests = fake_rust_server
1852 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1853 let mut json_shutdown_requests = fake_json_server
1854 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1855 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
1856
1857 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1858 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1859
1860 // Ensure rust document is reopened in new rust language server
1861 assert_eq!(
1862 fake_rust_server
1863 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1864 .await
1865 .text_document,
1866 lsp::TextDocumentItem {
1867 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1868 version: 0,
1869 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
1870 language_id: "rust".to_string(),
1871 }
1872 );
1873
1874 // Ensure json documents are reopened in new json language server
1875 assert_set_eq!(
1876 [
1877 fake_json_server
1878 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1879 .await
1880 .text_document,
1881 fake_json_server
1882 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1883 .await
1884 .text_document,
1885 ],
1886 [
1887 lsp::TextDocumentItem {
1888 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1889 version: 0,
1890 text: json_buffer.update(cx, |buffer, _| buffer.text()),
1891 language_id: "json".to_string(),
1892 },
1893 lsp::TextDocumentItem {
1894 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1895 version: 0,
1896 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1897 language_id: "json".to_string(),
1898 }
1899 ]
1900 );
1901
1902 // Close notifications are reported only to servers matching the buffer's language.
1903 cx.update(|_| drop(_json_handle));
1904 let close_message = lsp::DidCloseTextDocumentParams {
1905 text_document: lsp::TextDocumentIdentifier::new(
1906 lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1907 ),
1908 };
1909 assert_eq!(
1910 fake_json_server
1911 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1912 .await,
1913 close_message,
1914 );
1915}
1916
1917#[gpui::test]
1918async fn test_language_server_relative_path(cx: &mut gpui::TestAppContext) {
1919 init_test(cx);
1920
1921 let settings_json_contents = json!({
1922 "languages": {
1923 "Rust": {
1924 "language_servers": ["my_fake_lsp", "lsp_on_path"]
1925 }
1926 },
1927 "lsp": {
1928 "my_fake_lsp": {
1929 "binary": {
1930 // file exists, so this is treated as a relative path
1931 "path": path!(".relative_path/to/my_fake_lsp_binary.exe").to_string(),
1932 }
1933 },
1934 "lsp_on_path": {
1935 "binary": {
1936 // file doesn't exist, so it will fall back on PATH env var
1937 "path": path!("lsp_on_path.exe").to_string(),
1938 }
1939 }
1940 },
1941 });
1942
1943 let fs = FakeFs::new(cx.executor());
1944 fs.insert_tree(
1945 path!("/the-root"),
1946 json!({
1947 ".zed": {
1948 "settings.json": settings_json_contents.to_string(),
1949 },
1950 ".relative_path": {
1951 "to": {
1952 "my_fake_lsp.exe": "",
1953 },
1954 },
1955 "src": {
1956 "main.rs": "",
1957 }
1958 }),
1959 )
1960 .await;
1961
1962 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1963 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1964 language_registry.add(rust_lang());
1965
1966 let mut my_fake_lsp = language_registry.register_fake_lsp(
1967 "Rust",
1968 FakeLspAdapter {
1969 name: "my_fake_lsp",
1970 ..Default::default()
1971 },
1972 );
1973 let mut lsp_on_path = language_registry.register_fake_lsp(
1974 "Rust",
1975 FakeLspAdapter {
1976 name: "lsp_on_path",
1977 ..Default::default()
1978 },
1979 );
1980
1981 cx.run_until_parked();
1982
1983 // Start the language server by opening a buffer with a compatible file extension.
1984 project
1985 .update(cx, |project, cx| {
1986 project.open_local_buffer_with_lsp(path!("/the-root/src/main.rs"), cx)
1987 })
1988 .await
1989 .unwrap();
1990
1991 let lsp_path = my_fake_lsp.next().await.unwrap().binary.path;
1992 assert_eq!(
1993 lsp_path.to_string_lossy(),
1994 path!("/the-root/.relative_path/to/my_fake_lsp_binary.exe"),
1995 );
1996
1997 let lsp_path = lsp_on_path.next().await.unwrap().binary.path;
1998 assert_eq!(lsp_path.to_string_lossy(), path!("lsp_on_path.exe"));
1999}
2000
2001#[gpui::test]
2002async fn test_language_server_tilde_path(cx: &mut gpui::TestAppContext) {
2003 init_test(cx);
2004
2005 let settings_json_contents = json!({
2006 "languages": {
2007 "Rust": {
2008 "language_servers": ["tilde_lsp"]
2009 }
2010 },
2011 "lsp": {
2012 "tilde_lsp": {
2013 "binary": {
2014 "path": "~/.local/bin/rust-analyzer",
2015 }
2016 }
2017 },
2018 });
2019
2020 let fs = FakeFs::new(cx.executor());
2021 fs.insert_tree(
2022 path!("/root"),
2023 json!({
2024 ".zed": {
2025 "settings.json": settings_json_contents.to_string(),
2026 },
2027 "src": {
2028 "main.rs": "fn main() {}",
2029 }
2030 }),
2031 )
2032 .await;
2033
2034 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
2035 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2036 language_registry.add(rust_lang());
2037
2038 let mut tilde_lsp = language_registry.register_fake_lsp(
2039 "Rust",
2040 FakeLspAdapter {
2041 name: "tilde_lsp",
2042 ..Default::default()
2043 },
2044 );
2045 cx.run_until_parked();
2046
2047 project
2048 .update(cx, |project, cx| {
2049 project.open_local_buffer_with_lsp(path!("/root/src/main.rs"), cx)
2050 })
2051 .await
2052 .unwrap();
2053
2054 let lsp_path = tilde_lsp.next().await.unwrap().binary.path;
2055 let expected_path = paths::home_dir().join(".local/bin/rust-analyzer");
2056 assert_eq!(
2057 lsp_path, expected_path,
2058 "Tilde path should expand to home directory"
2059 );
2060}
2061
2062#[gpui::test]
2063async fn test_rescan_fs_change_is_reported_to_language_servers_as_changed(
2064 cx: &mut gpui::TestAppContext,
2065) {
2066 init_test(cx);
2067
2068 let fs = FakeFs::new(cx.executor());
2069 fs.insert_tree(
2070 path!("/the-root"),
2071 json!({
2072 "Cargo.lock": "",
2073 "src": {
2074 "a.rs": "",
2075 }
2076 }),
2077 )
2078 .await;
2079
2080 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
2081 let (language_registry, _lsp_store) = project.read_with(cx, |project, _| {
2082 (project.languages().clone(), project.lsp_store())
2083 });
2084 language_registry.add(rust_lang());
2085 let mut fake_servers = language_registry.register_fake_lsp(
2086 "Rust",
2087 FakeLspAdapter {
2088 name: "the-language-server",
2089 ..Default::default()
2090 },
2091 );
2092
2093 cx.executor().run_until_parked();
2094
2095 project
2096 .update(cx, |project, cx| {
2097 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
2098 })
2099 .await
2100 .unwrap();
2101
2102 let fake_server = fake_servers.next().await.unwrap();
2103 cx.executor().run_until_parked();
2104
2105 let file_changes = Arc::new(Mutex::new(Vec::new()));
2106 fake_server
2107 .request::<lsp::request::RegisterCapability>(
2108 lsp::RegistrationParams {
2109 registrations: vec![lsp::Registration {
2110 id: Default::default(),
2111 method: "workspace/didChangeWatchedFiles".to_string(),
2112 register_options: serde_json::to_value(
2113 lsp::DidChangeWatchedFilesRegistrationOptions {
2114 watchers: vec![lsp::FileSystemWatcher {
2115 glob_pattern: lsp::GlobPattern::String(
2116 path!("/the-root/Cargo.lock").to_string(),
2117 ),
2118 kind: None,
2119 }],
2120 },
2121 )
2122 .ok(),
2123 }],
2124 },
2125 DEFAULT_LSP_REQUEST_TIMEOUT,
2126 )
2127 .await
2128 .into_response()
2129 .unwrap();
2130 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
2131 let file_changes = file_changes.clone();
2132 move |params, _| {
2133 let mut file_changes = file_changes.lock();
2134 file_changes.extend(params.changes);
2135 }
2136 });
2137
2138 cx.executor().run_until_parked();
2139 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
2140
2141 fs.emit_fs_event(path!("/the-root/Cargo.lock"), Some(PathEventKind::Rescan));
2142 cx.executor().run_until_parked();
2143
2144 assert_eq!(
2145 &*file_changes.lock(),
2146 &[lsp::FileEvent {
2147 uri: lsp::Uri::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
2148 typ: lsp::FileChangeType::CHANGED,
2149 }]
2150 );
2151}
2152
2153#[gpui::test]
2154async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
2155 init_test(cx);
2156
2157 let fs = FakeFs::new(cx.executor());
2158 fs.insert_tree(
2159 path!("/the-root"),
2160 json!({
2161 ".gitignore": "target\n",
2162 "Cargo.lock": "",
2163 "src": {
2164 "a.rs": "",
2165 "b.rs": "",
2166 },
2167 "target": {
2168 "x": {
2169 "out": {
2170 "x.rs": ""
2171 }
2172 },
2173 "y": {
2174 "out": {
2175 "y.rs": "",
2176 }
2177 },
2178 "z": {
2179 "out": {
2180 "z.rs": ""
2181 }
2182 }
2183 }
2184 }),
2185 )
2186 .await;
2187 fs.insert_tree(
2188 path!("/the-registry"),
2189 json!({
2190 "dep1": {
2191 "src": {
2192 "dep1.rs": "",
2193 }
2194 },
2195 "dep2": {
2196 "src": {
2197 "dep2.rs": "",
2198 }
2199 },
2200 }),
2201 )
2202 .await;
2203 fs.insert_tree(
2204 path!("/the/stdlib"),
2205 json!({
2206 "LICENSE": "",
2207 "src": {
2208 "string.rs": "",
2209 }
2210 }),
2211 )
2212 .await;
2213
2214 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
2215 let (language_registry, lsp_store) = project.read_with(cx, |project, _| {
2216 (project.languages().clone(), project.lsp_store())
2217 });
2218 language_registry.add(rust_lang());
2219 let mut fake_servers = language_registry.register_fake_lsp(
2220 "Rust",
2221 FakeLspAdapter {
2222 name: "the-language-server",
2223 ..Default::default()
2224 },
2225 );
2226
2227 cx.executor().run_until_parked();
2228
2229 // Start the language server by opening a buffer with a compatible file extension.
2230 project
2231 .update(cx, |project, cx| {
2232 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
2233 })
2234 .await
2235 .unwrap();
2236
2237 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
2238 project.update(cx, |project, cx| {
2239 let worktree = project.worktrees(cx).next().unwrap();
2240 assert_eq!(
2241 worktree
2242 .read(cx)
2243 .snapshot()
2244 .entries(true, 0)
2245 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
2246 .collect::<Vec<_>>(),
2247 &[
2248 ("", false),
2249 (".gitignore", false),
2250 ("Cargo.lock", false),
2251 ("src", false),
2252 ("src/a.rs", false),
2253 ("src/b.rs", false),
2254 ("target", true),
2255 ]
2256 );
2257 });
2258
2259 let prev_read_dir_count = fs.read_dir_call_count();
2260
2261 let fake_server = fake_servers.next().await.unwrap();
2262 cx.executor().run_until_parked();
2263 let server_id = lsp_store.read_with(cx, |lsp_store, _| {
2264 let (id, _) = lsp_store.language_server_statuses().next().unwrap();
2265 id
2266 });
2267
2268 // Simulate jumping to a definition in a dependency outside of the worktree.
2269 let _out_of_worktree_buffer = project
2270 .update(cx, |project, cx| {
2271 project.open_local_buffer_via_lsp(
2272 lsp::Uri::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
2273 server_id,
2274 cx,
2275 )
2276 })
2277 .await
2278 .unwrap();
2279
2280 // Keep track of the FS events reported to the language server.
2281 let file_changes = Arc::new(Mutex::new(Vec::new()));
2282 fake_server
2283 .request::<lsp::request::RegisterCapability>(
2284 lsp::RegistrationParams {
2285 registrations: vec![lsp::Registration {
2286 id: Default::default(),
2287 method: "workspace/didChangeWatchedFiles".to_string(),
2288 register_options: serde_json::to_value(
2289 lsp::DidChangeWatchedFilesRegistrationOptions {
2290 watchers: vec![
2291 lsp::FileSystemWatcher {
2292 glob_pattern: lsp::GlobPattern::String(
2293 path!("/the-root/Cargo.toml").to_string(),
2294 ),
2295 kind: None,
2296 },
2297 lsp::FileSystemWatcher {
2298 glob_pattern: lsp::GlobPattern::String(
2299 path!("/the-root/src/*.{rs,c}").to_string(),
2300 ),
2301 kind: None,
2302 },
2303 lsp::FileSystemWatcher {
2304 glob_pattern: lsp::GlobPattern::String(
2305 path!("/the-root/target/y/**/*.rs").to_string(),
2306 ),
2307 kind: None,
2308 },
2309 lsp::FileSystemWatcher {
2310 glob_pattern: lsp::GlobPattern::String(
2311 path!("/the/stdlib/src/**/*.rs").to_string(),
2312 ),
2313 kind: None,
2314 },
2315 lsp::FileSystemWatcher {
2316 glob_pattern: lsp::GlobPattern::String(
2317 path!("**/Cargo.lock").to_string(),
2318 ),
2319 kind: None,
2320 },
2321 ],
2322 },
2323 )
2324 .ok(),
2325 }],
2326 },
2327 DEFAULT_LSP_REQUEST_TIMEOUT,
2328 )
2329 .await
2330 .into_response()
2331 .unwrap();
2332 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
2333 let file_changes = file_changes.clone();
2334 move |params, _| {
2335 let mut file_changes = file_changes.lock();
2336 file_changes.extend(params.changes);
2337 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
2338 }
2339 });
2340
2341 cx.executor().run_until_parked();
2342 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
2343 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
2344
2345 let mut new_watched_paths = fs.watched_paths();
2346 new_watched_paths.retain(|path| {
2347 !path.starts_with(config_dir()) && !path.starts_with(global_gitignore_path().unwrap())
2348 });
2349 assert_eq!(
2350 &new_watched_paths,
2351 &[
2352 Path::new(path!("/the-root")),
2353 Path::new(path!("/the-registry/dep1/src/dep1.rs")),
2354 Path::new(path!("/the/stdlib/src"))
2355 ]
2356 );
2357
2358 // Now the language server has asked us to watch an ignored directory path,
2359 // so we recursively load it.
2360 project.update(cx, |project, cx| {
2361 let worktree = project.visible_worktrees(cx).next().unwrap();
2362 assert_eq!(
2363 worktree
2364 .read(cx)
2365 .snapshot()
2366 .entries(true, 0)
2367 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
2368 .collect::<Vec<_>>(),
2369 &[
2370 ("", false),
2371 (".gitignore", false),
2372 ("Cargo.lock", false),
2373 ("src", false),
2374 ("src/a.rs", false),
2375 ("src/b.rs", false),
2376 ("target", true),
2377 ("target/x", true),
2378 ("target/y", true),
2379 ("target/y/out", true),
2380 ("target/y/out/y.rs", true),
2381 ("target/z", true),
2382 ]
2383 );
2384 });
2385
2386 // Perform some file system mutations, two of which match the watched patterns,
2387 // and one of which does not.
2388 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
2389 .await
2390 .unwrap();
2391 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
2392 .await
2393 .unwrap();
2394 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
2395 .await
2396 .unwrap();
2397 fs.create_file(
2398 path!("/the-root/target/x/out/x2.rs").as_ref(),
2399 Default::default(),
2400 )
2401 .await
2402 .unwrap();
2403 fs.create_file(
2404 path!("/the-root/target/y/out/y2.rs").as_ref(),
2405 Default::default(),
2406 )
2407 .await
2408 .unwrap();
2409 fs.save(
2410 path!("/the-root/Cargo.lock").as_ref(),
2411 &"".into(),
2412 Default::default(),
2413 )
2414 .await
2415 .unwrap();
2416 fs.save(
2417 path!("/the-stdlib/LICENSE").as_ref(),
2418 &"".into(),
2419 Default::default(),
2420 )
2421 .await
2422 .unwrap();
2423 fs.save(
2424 path!("/the/stdlib/src/string.rs").as_ref(),
2425 &"".into(),
2426 Default::default(),
2427 )
2428 .await
2429 .unwrap();
2430
2431 // The language server receives events for the FS mutations that match its watch patterns.
2432 cx.executor().run_until_parked();
2433 assert_eq!(
2434 &*file_changes.lock(),
2435 &[
2436 lsp::FileEvent {
2437 uri: lsp::Uri::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
2438 typ: lsp::FileChangeType::CHANGED,
2439 },
2440 lsp::FileEvent {
2441 uri: lsp::Uri::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
2442 typ: lsp::FileChangeType::DELETED,
2443 },
2444 lsp::FileEvent {
2445 uri: lsp::Uri::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
2446 typ: lsp::FileChangeType::CREATED,
2447 },
2448 lsp::FileEvent {
2449 uri: lsp::Uri::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
2450 typ: lsp::FileChangeType::CREATED,
2451 },
2452 lsp::FileEvent {
2453 uri: lsp::Uri::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
2454 typ: lsp::FileChangeType::CHANGED,
2455 },
2456 ]
2457 );
2458}
2459
2460#[gpui::test]
2461async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
2462 init_test(cx);
2463
2464 let fs = FakeFs::new(cx.executor());
2465 fs.insert_tree(
2466 path!("/dir"),
2467 json!({
2468 "a.rs": "let a = 1;",
2469 "b.rs": "let b = 2;"
2470 }),
2471 )
2472 .await;
2473
2474 let project = Project::test(
2475 fs,
2476 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
2477 cx,
2478 )
2479 .await;
2480 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2481
2482 let buffer_a = project
2483 .update(cx, |project, cx| {
2484 project.open_local_buffer(path!("/dir/a.rs"), cx)
2485 })
2486 .await
2487 .unwrap();
2488 let buffer_b = project
2489 .update(cx, |project, cx| {
2490 project.open_local_buffer(path!("/dir/b.rs"), cx)
2491 })
2492 .await
2493 .unwrap();
2494
2495 lsp_store.update(cx, |lsp_store, cx| {
2496 lsp_store
2497 .update_diagnostics(
2498 LanguageServerId(0),
2499 lsp::PublishDiagnosticsParams {
2500 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2501 version: None,
2502 diagnostics: vec![lsp::Diagnostic {
2503 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2504 severity: Some(lsp::DiagnosticSeverity::ERROR),
2505 message: "error 1".to_string(),
2506 ..Default::default()
2507 }],
2508 },
2509 None,
2510 DiagnosticSourceKind::Pushed,
2511 &[],
2512 cx,
2513 )
2514 .unwrap();
2515 lsp_store
2516 .update_diagnostics(
2517 LanguageServerId(0),
2518 lsp::PublishDiagnosticsParams {
2519 uri: Uri::from_file_path(path!("/dir/b.rs")).unwrap(),
2520 version: None,
2521 diagnostics: vec![lsp::Diagnostic {
2522 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2523 severity: Some(DiagnosticSeverity::WARNING),
2524 message: "error 2".to_string(),
2525 ..Default::default()
2526 }],
2527 },
2528 None,
2529 DiagnosticSourceKind::Pushed,
2530 &[],
2531 cx,
2532 )
2533 .unwrap();
2534 });
2535
2536 buffer_a.update(cx, |buffer, _| {
2537 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2538 assert_eq!(
2539 chunks
2540 .iter()
2541 .map(|(s, d)| (s.as_str(), *d))
2542 .collect::<Vec<_>>(),
2543 &[
2544 ("let ", None),
2545 ("a", Some(DiagnosticSeverity::ERROR)),
2546 (" = 1;", None),
2547 ]
2548 );
2549 });
2550 buffer_b.update(cx, |buffer, _| {
2551 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2552 assert_eq!(
2553 chunks
2554 .iter()
2555 .map(|(s, d)| (s.as_str(), *d))
2556 .collect::<Vec<_>>(),
2557 &[
2558 ("let ", None),
2559 ("b", Some(DiagnosticSeverity::WARNING)),
2560 (" = 2;", None),
2561 ]
2562 );
2563 });
2564}
2565
2566#[gpui::test]
2567async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
2568 init_test(cx);
2569
2570 let fs = FakeFs::new(cx.executor());
2571 fs.insert_tree(
2572 path!("/root"),
2573 json!({
2574 "dir": {
2575 ".git": {
2576 "HEAD": "ref: refs/heads/main",
2577 },
2578 ".gitignore": "b.rs",
2579 "a.rs": "let a = 1;",
2580 "b.rs": "let b = 2;",
2581 },
2582 "other.rs": "let b = c;"
2583 }),
2584 )
2585 .await;
2586
2587 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
2588 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2589 let (worktree, _) = project
2590 .update(cx, |project, cx| {
2591 project.find_or_create_worktree(path!("/root/dir"), true, cx)
2592 })
2593 .await
2594 .unwrap();
2595 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
2596
2597 let (worktree, _) = project
2598 .update(cx, |project, cx| {
2599 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
2600 })
2601 .await
2602 .unwrap();
2603 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
2604
2605 let server_id = LanguageServerId(0);
2606 lsp_store.update(cx, |lsp_store, cx| {
2607 lsp_store
2608 .update_diagnostics(
2609 server_id,
2610 lsp::PublishDiagnosticsParams {
2611 uri: Uri::from_file_path(path!("/root/dir/b.rs")).unwrap(),
2612 version: None,
2613 diagnostics: vec![lsp::Diagnostic {
2614 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2615 severity: Some(lsp::DiagnosticSeverity::ERROR),
2616 message: "unused variable 'b'".to_string(),
2617 ..Default::default()
2618 }],
2619 },
2620 None,
2621 DiagnosticSourceKind::Pushed,
2622 &[],
2623 cx,
2624 )
2625 .unwrap();
2626 lsp_store
2627 .update_diagnostics(
2628 server_id,
2629 lsp::PublishDiagnosticsParams {
2630 uri: Uri::from_file_path(path!("/root/other.rs")).unwrap(),
2631 version: None,
2632 diagnostics: vec![lsp::Diagnostic {
2633 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
2634 severity: Some(lsp::DiagnosticSeverity::ERROR),
2635 message: "unknown variable 'c'".to_string(),
2636 ..Default::default()
2637 }],
2638 },
2639 None,
2640 DiagnosticSourceKind::Pushed,
2641 &[],
2642 cx,
2643 )
2644 .unwrap();
2645 });
2646
2647 let main_ignored_buffer = project
2648 .update(cx, |project, cx| {
2649 project.open_buffer((main_worktree_id, rel_path("b.rs")), cx)
2650 })
2651 .await
2652 .unwrap();
2653 main_ignored_buffer.update(cx, |buffer, _| {
2654 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2655 assert_eq!(
2656 chunks
2657 .iter()
2658 .map(|(s, d)| (s.as_str(), *d))
2659 .collect::<Vec<_>>(),
2660 &[
2661 ("let ", None),
2662 ("b", Some(DiagnosticSeverity::ERROR)),
2663 (" = 2;", None),
2664 ],
2665 "Gigitnored buffers should still get in-buffer diagnostics",
2666 );
2667 });
2668 let other_buffer = project
2669 .update(cx, |project, cx| {
2670 project.open_buffer((other_worktree_id, rel_path("")), cx)
2671 })
2672 .await
2673 .unwrap();
2674 other_buffer.update(cx, |buffer, _| {
2675 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2676 assert_eq!(
2677 chunks
2678 .iter()
2679 .map(|(s, d)| (s.as_str(), *d))
2680 .collect::<Vec<_>>(),
2681 &[
2682 ("let b = ", None),
2683 ("c", Some(DiagnosticSeverity::ERROR)),
2684 (";", None),
2685 ],
2686 "Buffers from hidden projects should still get in-buffer diagnostics"
2687 );
2688 });
2689
2690 project.update(cx, |project, cx| {
2691 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
2692 assert_eq!(
2693 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
2694 vec![(
2695 ProjectPath {
2696 worktree_id: main_worktree_id,
2697 path: rel_path("b.rs").into(),
2698 },
2699 server_id,
2700 DiagnosticSummary {
2701 error_count: 1,
2702 warning_count: 0,
2703 }
2704 )]
2705 );
2706 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
2707 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
2708 });
2709}
2710
2711#[gpui::test]
2712async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
2713 init_test(cx);
2714
2715 let progress_token = "the-progress-token";
2716
2717 let fs = FakeFs::new(cx.executor());
2718 fs.insert_tree(
2719 path!("/dir"),
2720 json!({
2721 "a.rs": "fn a() { A }",
2722 "b.rs": "const y: i32 = 1",
2723 }),
2724 )
2725 .await;
2726
2727 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2728 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2729
2730 language_registry.add(rust_lang());
2731 let mut fake_servers = language_registry.register_fake_lsp(
2732 "Rust",
2733 FakeLspAdapter {
2734 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2735 disk_based_diagnostics_sources: vec!["disk".into()],
2736 ..Default::default()
2737 },
2738 );
2739
2740 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
2741
2742 // Cause worktree to start the fake language server
2743 let _ = project
2744 .update(cx, |project, cx| {
2745 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
2746 })
2747 .await
2748 .unwrap();
2749
2750 let mut events = cx.events(&project);
2751
2752 let fake_server = fake_servers.next().await.unwrap();
2753 assert_eq!(
2754 events.next().await.unwrap(),
2755 Event::LanguageServerAdded(
2756 LanguageServerId(0),
2757 fake_server.server.name(),
2758 Some(worktree_id)
2759 ),
2760 );
2761
2762 fake_server
2763 .start_progress(format!("{}/0", progress_token))
2764 .await;
2765 assert_eq!(
2766 events.next().await.unwrap(),
2767 Event::DiskBasedDiagnosticsStarted {
2768 language_server_id: LanguageServerId(0),
2769 }
2770 );
2771
2772 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2773 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2774 version: None,
2775 diagnostics: vec![lsp::Diagnostic {
2776 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2777 severity: Some(lsp::DiagnosticSeverity::ERROR),
2778 message: "undefined variable 'A'".to_string(),
2779 ..Default::default()
2780 }],
2781 });
2782 assert_eq!(
2783 events.next().await.unwrap(),
2784 Event::DiagnosticsUpdated {
2785 language_server_id: LanguageServerId(0),
2786 paths: vec![(worktree_id, rel_path("a.rs")).into()],
2787 }
2788 );
2789
2790 fake_server.end_progress(format!("{}/0", progress_token));
2791 assert_eq!(
2792 events.next().await.unwrap(),
2793 Event::DiskBasedDiagnosticsFinished {
2794 language_server_id: LanguageServerId(0)
2795 }
2796 );
2797
2798 let buffer = project
2799 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
2800 .await
2801 .unwrap();
2802
2803 buffer.update(cx, |buffer, _| {
2804 let snapshot = buffer.snapshot();
2805 let diagnostics = snapshot
2806 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2807 .collect::<Vec<_>>();
2808 assert_eq!(
2809 diagnostics,
2810 &[DiagnosticEntryRef {
2811 range: Point::new(0, 9)..Point::new(0, 10),
2812 diagnostic: &Diagnostic {
2813 severity: lsp::DiagnosticSeverity::ERROR,
2814 message: "undefined variable 'A'".to_string(),
2815 group_id: 0,
2816 is_primary: true,
2817 source_kind: DiagnosticSourceKind::Pushed,
2818 ..Diagnostic::default()
2819 }
2820 }]
2821 )
2822 });
2823
2824 // Ensure publishing empty diagnostics twice only results in one update event.
2825 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2826 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2827 version: None,
2828 diagnostics: Default::default(),
2829 });
2830 assert_eq!(
2831 events.next().await.unwrap(),
2832 Event::DiagnosticsUpdated {
2833 language_server_id: LanguageServerId(0),
2834 paths: vec![(worktree_id, rel_path("a.rs")).into()],
2835 }
2836 );
2837
2838 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2839 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2840 version: None,
2841 diagnostics: Default::default(),
2842 });
2843 cx.executor().run_until_parked();
2844 assert_eq!(futures::poll!(events.next()), Poll::Pending);
2845}
2846
2847#[gpui::test]
2848async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
2849 init_test(cx);
2850
2851 let progress_token = "the-progress-token";
2852
2853 let fs = FakeFs::new(cx.executor());
2854 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2855
2856 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2857
2858 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2859 language_registry.add(rust_lang());
2860 let mut fake_servers = language_registry.register_fake_lsp(
2861 "Rust",
2862 FakeLspAdapter {
2863 name: "the-language-server",
2864 disk_based_diagnostics_sources: vec!["disk".into()],
2865 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2866 ..FakeLspAdapter::default()
2867 },
2868 );
2869
2870 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
2871
2872 let (buffer, _handle) = project
2873 .update(cx, |project, cx| {
2874 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2875 })
2876 .await
2877 .unwrap();
2878 let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id());
2879 // Simulate diagnostics starting to update.
2880 let fake_server = fake_servers.next().await.unwrap();
2881 cx.executor().run_until_parked();
2882 fake_server.start_progress(progress_token).await;
2883
2884 // Restart the server before the diagnostics finish updating.
2885 project.update(cx, |project, cx| {
2886 project.restart_language_servers_for_buffers(vec![buffer], HashSet::default(), cx);
2887 });
2888 let mut events = cx.events(&project);
2889
2890 // Simulate the newly started server sending more diagnostics.
2891 let fake_server = fake_servers.next().await.unwrap();
2892 cx.executor().run_until_parked();
2893 assert_eq!(
2894 events.next().await.unwrap(),
2895 Event::LanguageServerRemoved(LanguageServerId(0))
2896 );
2897 assert_eq!(
2898 events.next().await.unwrap(),
2899 Event::LanguageServerAdded(
2900 LanguageServerId(1),
2901 fake_server.server.name(),
2902 Some(worktree_id)
2903 )
2904 );
2905 fake_server.start_progress(progress_token).await;
2906 assert_eq!(
2907 events.next().await.unwrap(),
2908 Event::LanguageServerBufferRegistered {
2909 server_id: LanguageServerId(1),
2910 buffer_id,
2911 buffer_abs_path: PathBuf::from(path!("/dir/a.rs")),
2912 name: Some(fake_server.server.name())
2913 }
2914 );
2915 assert_eq!(
2916 events.next().await.unwrap(),
2917 Event::DiskBasedDiagnosticsStarted {
2918 language_server_id: LanguageServerId(1)
2919 }
2920 );
2921 project.update(cx, |project, cx| {
2922 assert_eq!(
2923 project
2924 .language_servers_running_disk_based_diagnostics(cx)
2925 .collect::<Vec<_>>(),
2926 [LanguageServerId(1)]
2927 );
2928 });
2929
2930 // All diagnostics are considered done, despite the old server's diagnostic
2931 // task never completing.
2932 fake_server.end_progress(progress_token);
2933 assert_eq!(
2934 events.next().await.unwrap(),
2935 Event::DiskBasedDiagnosticsFinished {
2936 language_server_id: LanguageServerId(1)
2937 }
2938 );
2939 project.update(cx, |project, cx| {
2940 assert_eq!(
2941 project
2942 .language_servers_running_disk_based_diagnostics(cx)
2943 .collect::<Vec<_>>(),
2944 [] as [language::LanguageServerId; 0]
2945 );
2946 });
2947}
2948
2949#[gpui::test]
2950async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
2951 init_test(cx);
2952
2953 let fs = FakeFs::new(cx.executor());
2954 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
2955
2956 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2957
2958 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2959 language_registry.add(rust_lang());
2960 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2961
2962 let (buffer, _) = project
2963 .update(cx, |project, cx| {
2964 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2965 })
2966 .await
2967 .unwrap();
2968
2969 // Publish diagnostics
2970 let fake_server = fake_servers.next().await.unwrap();
2971 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2972 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2973 version: None,
2974 diagnostics: vec![lsp::Diagnostic {
2975 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
2976 severity: Some(lsp::DiagnosticSeverity::ERROR),
2977 message: "the message".to_string(),
2978 ..Default::default()
2979 }],
2980 });
2981
2982 cx.executor().run_until_parked();
2983 buffer.update(cx, |buffer, _| {
2984 assert_eq!(
2985 buffer
2986 .snapshot()
2987 .diagnostics_in_range::<_, usize>(0..1, false)
2988 .map(|entry| entry.diagnostic.message.clone())
2989 .collect::<Vec<_>>(),
2990 ["the message".to_string()]
2991 );
2992 });
2993 project.update(cx, |project, cx| {
2994 assert_eq!(
2995 project.diagnostic_summary(false, cx),
2996 DiagnosticSummary {
2997 error_count: 1,
2998 warning_count: 0,
2999 }
3000 );
3001 });
3002
3003 project.update(cx, |project, cx| {
3004 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
3005 });
3006
3007 // The diagnostics are cleared.
3008 cx.executor().run_until_parked();
3009 buffer.update(cx, |buffer, _| {
3010 assert_eq!(
3011 buffer
3012 .snapshot()
3013 .diagnostics_in_range::<_, usize>(0..1, false)
3014 .map(|entry| entry.diagnostic.message.clone())
3015 .collect::<Vec<_>>(),
3016 Vec::<String>::new(),
3017 );
3018 });
3019 project.update(cx, |project, cx| {
3020 assert_eq!(
3021 project.diagnostic_summary(false, cx),
3022 DiagnosticSummary {
3023 error_count: 0,
3024 warning_count: 0,
3025 }
3026 );
3027 });
3028}
3029
3030#[gpui::test]
3031async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
3032 init_test(cx);
3033
3034 let fs = FakeFs::new(cx.executor());
3035 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
3036
3037 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3038 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3039
3040 language_registry.add(rust_lang());
3041 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3042
3043 let (buffer, _handle) = project
3044 .update(cx, |project, cx| {
3045 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3046 })
3047 .await
3048 .unwrap();
3049
3050 // Before restarting the server, report diagnostics with an unknown buffer version.
3051 let fake_server = fake_servers.next().await.unwrap();
3052 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3053 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3054 version: Some(10000),
3055 diagnostics: Vec::new(),
3056 });
3057 cx.executor().run_until_parked();
3058 project.update(cx, |project, cx| {
3059 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
3060 });
3061
3062 let mut fake_server = fake_servers.next().await.unwrap();
3063 let notification = fake_server
3064 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3065 .await
3066 .text_document;
3067 assert_eq!(notification.version, 0);
3068}
3069
3070#[gpui::test]
3071async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
3072 init_test(cx);
3073
3074 let progress_token = "the-progress-token";
3075
3076 let fs = FakeFs::new(cx.executor());
3077 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
3078
3079 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3080
3081 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3082 language_registry.add(rust_lang());
3083 let mut fake_servers = language_registry.register_fake_lsp(
3084 "Rust",
3085 FakeLspAdapter {
3086 name: "the-language-server",
3087 disk_based_diagnostics_sources: vec!["disk".into()],
3088 disk_based_diagnostics_progress_token: Some(progress_token.into()),
3089 ..Default::default()
3090 },
3091 );
3092
3093 let (buffer, _handle) = project
3094 .update(cx, |project, cx| {
3095 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3096 })
3097 .await
3098 .unwrap();
3099
3100 // Simulate diagnostics starting to update.
3101 let mut fake_server = fake_servers.next().await.unwrap();
3102 fake_server
3103 .start_progress_with(
3104 "another-token",
3105 lsp::WorkDoneProgressBegin {
3106 cancellable: Some(false),
3107 ..Default::default()
3108 },
3109 DEFAULT_LSP_REQUEST_TIMEOUT,
3110 )
3111 .await;
3112 // Ensure progress notification is fully processed before starting the next one
3113 cx.executor().run_until_parked();
3114
3115 fake_server
3116 .start_progress_with(
3117 progress_token,
3118 lsp::WorkDoneProgressBegin {
3119 cancellable: Some(true),
3120 ..Default::default()
3121 },
3122 DEFAULT_LSP_REQUEST_TIMEOUT,
3123 )
3124 .await;
3125 // Ensure progress notification is fully processed before cancelling
3126 cx.executor().run_until_parked();
3127
3128 project.update(cx, |project, cx| {
3129 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
3130 });
3131 cx.executor().run_until_parked();
3132
3133 let cancel_notification = fake_server
3134 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
3135 .await;
3136 assert_eq!(
3137 cancel_notification.token,
3138 NumberOrString::String(progress_token.into())
3139 );
3140}
3141
3142#[gpui::test]
3143async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
3144 init_test(cx);
3145
3146 let fs = FakeFs::new(cx.executor());
3147 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
3148 .await;
3149
3150 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3151 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3152
3153 let mut fake_rust_servers = language_registry.register_fake_lsp(
3154 "Rust",
3155 FakeLspAdapter {
3156 name: "rust-lsp",
3157 ..Default::default()
3158 },
3159 );
3160 let mut fake_js_servers = language_registry.register_fake_lsp(
3161 "JavaScript",
3162 FakeLspAdapter {
3163 name: "js-lsp",
3164 ..Default::default()
3165 },
3166 );
3167 language_registry.add(rust_lang());
3168 language_registry.add(js_lang());
3169
3170 let _rs_buffer = project
3171 .update(cx, |project, cx| {
3172 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3173 })
3174 .await
3175 .unwrap();
3176 let _js_buffer = project
3177 .update(cx, |project, cx| {
3178 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
3179 })
3180 .await
3181 .unwrap();
3182
3183 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
3184 assert_eq!(
3185 fake_rust_server_1
3186 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3187 .await
3188 .text_document
3189 .uri
3190 .as_str(),
3191 uri!("file:///dir/a.rs")
3192 );
3193
3194 let mut fake_js_server = fake_js_servers.next().await.unwrap();
3195 assert_eq!(
3196 fake_js_server
3197 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3198 .await
3199 .text_document
3200 .uri
3201 .as_str(),
3202 uri!("file:///dir/b.js")
3203 );
3204
3205 // Disable Rust language server, ensuring only that server gets stopped.
3206 cx.update(|cx| {
3207 SettingsStore::update_global(cx, |settings, cx| {
3208 settings.update_user_settings(cx, |settings| {
3209 settings.languages_mut().insert(
3210 "Rust".into(),
3211 LanguageSettingsContent {
3212 enable_language_server: Some(false),
3213 ..Default::default()
3214 },
3215 );
3216 });
3217 })
3218 });
3219 fake_rust_server_1
3220 .receive_notification::<lsp::notification::Exit>()
3221 .await;
3222
3223 // Enable Rust and disable JavaScript language servers, ensuring that the
3224 // former gets started again and that the latter stops.
3225 cx.update(|cx| {
3226 SettingsStore::update_global(cx, |settings, cx| {
3227 settings.update_user_settings(cx, |settings| {
3228 settings.languages_mut().insert(
3229 "Rust".into(),
3230 LanguageSettingsContent {
3231 enable_language_server: Some(true),
3232 ..Default::default()
3233 },
3234 );
3235 settings.languages_mut().insert(
3236 "JavaScript".into(),
3237 LanguageSettingsContent {
3238 enable_language_server: Some(false),
3239 ..Default::default()
3240 },
3241 );
3242 });
3243 })
3244 });
3245 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
3246 assert_eq!(
3247 fake_rust_server_2
3248 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3249 .await
3250 .text_document
3251 .uri
3252 .as_str(),
3253 uri!("file:///dir/a.rs")
3254 );
3255 fake_js_server
3256 .receive_notification::<lsp::notification::Exit>()
3257 .await;
3258}
3259
3260#[gpui::test(iterations = 3)]
3261async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
3262 init_test(cx);
3263
3264 let text = "
3265 fn a() { A }
3266 fn b() { BB }
3267 fn c() { CCC }
3268 "
3269 .unindent();
3270
3271 let fs = FakeFs::new(cx.executor());
3272 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
3273
3274 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3275 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3276
3277 language_registry.add(rust_lang());
3278 let mut fake_servers = language_registry.register_fake_lsp(
3279 "Rust",
3280 FakeLspAdapter {
3281 disk_based_diagnostics_sources: vec!["disk".into()],
3282 ..Default::default()
3283 },
3284 );
3285
3286 let buffer = project
3287 .update(cx, |project, cx| {
3288 project.open_local_buffer(path!("/dir/a.rs"), cx)
3289 })
3290 .await
3291 .unwrap();
3292
3293 let _handle = project.update(cx, |project, cx| {
3294 project.register_buffer_with_language_servers(&buffer, cx)
3295 });
3296
3297 let mut fake_server = fake_servers.next().await.unwrap();
3298 let open_notification = fake_server
3299 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3300 .await;
3301
3302 // Edit the buffer, moving the content down
3303 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
3304 let change_notification_1 = fake_server
3305 .receive_notification::<lsp::notification::DidChangeTextDocument>()
3306 .await;
3307 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
3308
3309 // Report some diagnostics for the initial version of the buffer
3310 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3311 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3312 version: Some(open_notification.text_document.version),
3313 diagnostics: vec![
3314 lsp::Diagnostic {
3315 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3316 severity: Some(DiagnosticSeverity::ERROR),
3317 message: "undefined variable 'A'".to_string(),
3318 source: Some("disk".to_string()),
3319 ..Default::default()
3320 },
3321 lsp::Diagnostic {
3322 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
3323 severity: Some(DiagnosticSeverity::ERROR),
3324 message: "undefined variable 'BB'".to_string(),
3325 source: Some("disk".to_string()),
3326 ..Default::default()
3327 },
3328 lsp::Diagnostic {
3329 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
3330 severity: Some(DiagnosticSeverity::ERROR),
3331 source: Some("disk".to_string()),
3332 message: "undefined variable 'CCC'".to_string(),
3333 ..Default::default()
3334 },
3335 ],
3336 });
3337
3338 // The diagnostics have moved down since they were created.
3339 cx.executor().run_until_parked();
3340 buffer.update(cx, |buffer, _| {
3341 assert_eq!(
3342 buffer
3343 .snapshot()
3344 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
3345 .collect::<Vec<_>>(),
3346 &[
3347 DiagnosticEntry {
3348 range: Point::new(3, 9)..Point::new(3, 11),
3349 diagnostic: Diagnostic {
3350 source: Some("disk".into()),
3351 severity: DiagnosticSeverity::ERROR,
3352 message: "undefined variable 'BB'".to_string(),
3353 is_disk_based: true,
3354 group_id: 1,
3355 is_primary: true,
3356 source_kind: DiagnosticSourceKind::Pushed,
3357 ..Diagnostic::default()
3358 },
3359 },
3360 DiagnosticEntry {
3361 range: Point::new(4, 9)..Point::new(4, 12),
3362 diagnostic: Diagnostic {
3363 source: Some("disk".into()),
3364 severity: DiagnosticSeverity::ERROR,
3365 message: "undefined variable 'CCC'".to_string(),
3366 is_disk_based: true,
3367 group_id: 2,
3368 is_primary: true,
3369 source_kind: DiagnosticSourceKind::Pushed,
3370 ..Diagnostic::default()
3371 }
3372 }
3373 ]
3374 );
3375 assert_eq!(
3376 chunks_with_diagnostics(buffer, 0..buffer.len()),
3377 [
3378 ("\n\nfn a() { ".to_string(), None),
3379 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
3380 (" }\nfn b() { ".to_string(), None),
3381 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
3382 (" }\nfn c() { ".to_string(), None),
3383 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
3384 (" }\n".to_string(), None),
3385 ]
3386 );
3387 assert_eq!(
3388 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
3389 [
3390 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
3391 (" }\nfn c() { ".to_string(), None),
3392 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
3393 ]
3394 );
3395 });
3396
3397 // Ensure overlapping diagnostics are highlighted correctly.
3398 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3399 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3400 version: Some(open_notification.text_document.version),
3401 diagnostics: vec![
3402 lsp::Diagnostic {
3403 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3404 severity: Some(DiagnosticSeverity::ERROR),
3405 message: "undefined variable 'A'".to_string(),
3406 source: Some("disk".to_string()),
3407 ..Default::default()
3408 },
3409 lsp::Diagnostic {
3410 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
3411 severity: Some(DiagnosticSeverity::WARNING),
3412 message: "unreachable statement".to_string(),
3413 source: Some("disk".to_string()),
3414 ..Default::default()
3415 },
3416 ],
3417 });
3418
3419 cx.executor().run_until_parked();
3420 buffer.update(cx, |buffer, _| {
3421 assert_eq!(
3422 buffer
3423 .snapshot()
3424 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
3425 .collect::<Vec<_>>(),
3426 &[
3427 DiagnosticEntry {
3428 range: Point::new(2, 9)..Point::new(2, 12),
3429 diagnostic: Diagnostic {
3430 source: Some("disk".into()),
3431 severity: DiagnosticSeverity::WARNING,
3432 message: "unreachable statement".to_string(),
3433 is_disk_based: true,
3434 group_id: 4,
3435 is_primary: true,
3436 source_kind: DiagnosticSourceKind::Pushed,
3437 ..Diagnostic::default()
3438 }
3439 },
3440 DiagnosticEntry {
3441 range: Point::new(2, 9)..Point::new(2, 10),
3442 diagnostic: Diagnostic {
3443 source: Some("disk".into()),
3444 severity: DiagnosticSeverity::ERROR,
3445 message: "undefined variable 'A'".to_string(),
3446 is_disk_based: true,
3447 group_id: 3,
3448 is_primary: true,
3449 source_kind: DiagnosticSourceKind::Pushed,
3450 ..Diagnostic::default()
3451 },
3452 }
3453 ]
3454 );
3455 assert_eq!(
3456 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
3457 [
3458 ("fn a() { ".to_string(), None),
3459 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
3460 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
3461 ("\n".to_string(), None),
3462 ]
3463 );
3464 assert_eq!(
3465 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
3466 [
3467 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
3468 ("\n".to_string(), None),
3469 ]
3470 );
3471 });
3472
3473 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
3474 // changes since the last save.
3475 buffer.update(cx, |buffer, cx| {
3476 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
3477 buffer.edit(
3478 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
3479 None,
3480 cx,
3481 );
3482 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
3483 });
3484 let change_notification_2 = fake_server
3485 .receive_notification::<lsp::notification::DidChangeTextDocument>()
3486 .await;
3487 assert!(
3488 change_notification_2.text_document.version > change_notification_1.text_document.version
3489 );
3490
3491 // Handle out-of-order diagnostics
3492 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3493 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3494 version: Some(change_notification_2.text_document.version),
3495 diagnostics: vec![
3496 lsp::Diagnostic {
3497 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
3498 severity: Some(DiagnosticSeverity::ERROR),
3499 message: "undefined variable 'BB'".to_string(),
3500 source: Some("disk".to_string()),
3501 ..Default::default()
3502 },
3503 lsp::Diagnostic {
3504 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3505 severity: Some(DiagnosticSeverity::WARNING),
3506 message: "undefined variable 'A'".to_string(),
3507 source: Some("disk".to_string()),
3508 ..Default::default()
3509 },
3510 ],
3511 });
3512
3513 cx.executor().run_until_parked();
3514 buffer.update(cx, |buffer, _| {
3515 assert_eq!(
3516 buffer
3517 .snapshot()
3518 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3519 .collect::<Vec<_>>(),
3520 &[
3521 DiagnosticEntry {
3522 range: Point::new(2, 21)..Point::new(2, 22),
3523 diagnostic: Diagnostic {
3524 source: Some("disk".into()),
3525 severity: DiagnosticSeverity::WARNING,
3526 message: "undefined variable 'A'".to_string(),
3527 is_disk_based: true,
3528 group_id: 6,
3529 is_primary: true,
3530 source_kind: DiagnosticSourceKind::Pushed,
3531 ..Diagnostic::default()
3532 }
3533 },
3534 DiagnosticEntry {
3535 range: Point::new(3, 9)..Point::new(3, 14),
3536 diagnostic: Diagnostic {
3537 source: Some("disk".into()),
3538 severity: DiagnosticSeverity::ERROR,
3539 message: "undefined variable 'BB'".to_string(),
3540 is_disk_based: true,
3541 group_id: 5,
3542 is_primary: true,
3543 source_kind: DiagnosticSourceKind::Pushed,
3544 ..Diagnostic::default()
3545 },
3546 }
3547 ]
3548 );
3549 });
3550}
3551
3552#[gpui::test]
3553async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
3554 init_test(cx);
3555
3556 let text = concat!(
3557 "let one = ;\n", //
3558 "let two = \n",
3559 "let three = 3;\n",
3560 );
3561
3562 let fs = FakeFs::new(cx.executor());
3563 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
3564
3565 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3566 let buffer = project
3567 .update(cx, |project, cx| {
3568 project.open_local_buffer(path!("/dir/a.rs"), cx)
3569 })
3570 .await
3571 .unwrap();
3572
3573 project.update(cx, |project, cx| {
3574 project.lsp_store().update(cx, |lsp_store, cx| {
3575 lsp_store
3576 .update_diagnostic_entries(
3577 LanguageServerId(0),
3578 PathBuf::from(path!("/dir/a.rs")),
3579 None,
3580 None,
3581 vec![
3582 DiagnosticEntry {
3583 range: Unclipped(PointUtf16::new(0, 10))
3584 ..Unclipped(PointUtf16::new(0, 10)),
3585 diagnostic: Diagnostic {
3586 severity: DiagnosticSeverity::ERROR,
3587 message: "syntax error 1".to_string(),
3588 source_kind: DiagnosticSourceKind::Pushed,
3589 ..Diagnostic::default()
3590 },
3591 },
3592 DiagnosticEntry {
3593 range: Unclipped(PointUtf16::new(1, 10))
3594 ..Unclipped(PointUtf16::new(1, 10)),
3595 diagnostic: Diagnostic {
3596 severity: DiagnosticSeverity::ERROR,
3597 message: "syntax error 2".to_string(),
3598 source_kind: DiagnosticSourceKind::Pushed,
3599 ..Diagnostic::default()
3600 },
3601 },
3602 ],
3603 cx,
3604 )
3605 .unwrap();
3606 })
3607 });
3608
3609 // An empty range is extended forward to include the following character.
3610 // At the end of a line, an empty range is extended backward to include
3611 // the preceding character.
3612 buffer.update(cx, |buffer, _| {
3613 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
3614 assert_eq!(
3615 chunks
3616 .iter()
3617 .map(|(s, d)| (s.as_str(), *d))
3618 .collect::<Vec<_>>(),
3619 &[
3620 ("let one = ", None),
3621 (";", Some(DiagnosticSeverity::ERROR)),
3622 ("\nlet two =", None),
3623 (" ", Some(DiagnosticSeverity::ERROR)),
3624 ("\nlet three = 3;\n", None)
3625 ]
3626 );
3627 });
3628}
3629
3630#[gpui::test]
3631async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
3632 init_test(cx);
3633
3634 let fs = FakeFs::new(cx.executor());
3635 fs.insert_tree(path!("/dir"), json!({ "a.rs": "one two three" }))
3636 .await;
3637
3638 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3639 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3640
3641 lsp_store.update(cx, |lsp_store, cx| {
3642 lsp_store
3643 .update_diagnostic_entries(
3644 LanguageServerId(0),
3645 Path::new(path!("/dir/a.rs")).to_owned(),
3646 None,
3647 None,
3648 vec![DiagnosticEntry {
3649 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
3650 diagnostic: Diagnostic {
3651 severity: DiagnosticSeverity::ERROR,
3652 is_primary: true,
3653 message: "syntax error a1".to_string(),
3654 source_kind: DiagnosticSourceKind::Pushed,
3655 ..Diagnostic::default()
3656 },
3657 }],
3658 cx,
3659 )
3660 .unwrap();
3661 lsp_store
3662 .update_diagnostic_entries(
3663 LanguageServerId(1),
3664 Path::new(path!("/dir/a.rs")).to_owned(),
3665 None,
3666 None,
3667 vec![DiagnosticEntry {
3668 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
3669 diagnostic: Diagnostic {
3670 severity: DiagnosticSeverity::ERROR,
3671 is_primary: true,
3672 message: "syntax error b1".to_string(),
3673 source_kind: DiagnosticSourceKind::Pushed,
3674 ..Diagnostic::default()
3675 },
3676 }],
3677 cx,
3678 )
3679 .unwrap();
3680
3681 assert_eq!(
3682 lsp_store.diagnostic_summary(false, cx),
3683 DiagnosticSummary {
3684 error_count: 2,
3685 warning_count: 0,
3686 }
3687 );
3688 });
3689}
3690
3691#[gpui::test]
3692async fn test_diagnostic_summaries_cleared_on_worktree_entry_removal(
3693 cx: &mut gpui::TestAppContext,
3694) {
3695 init_test(cx);
3696
3697 let fs = FakeFs::new(cx.executor());
3698 fs.insert_tree(path!("/dir"), json!({ "a.rs": "one", "b.rs": "two" }))
3699 .await;
3700
3701 let project = Project::test(fs.clone(), [Path::new(path!("/dir"))], cx).await;
3702 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3703
3704 lsp_store.update(cx, |lsp_store, cx| {
3705 lsp_store
3706 .update_diagnostic_entries(
3707 LanguageServerId(0),
3708 Path::new(path!("/dir/a.rs")).to_owned(),
3709 None,
3710 None,
3711 vec![DiagnosticEntry {
3712 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
3713 diagnostic: Diagnostic {
3714 severity: DiagnosticSeverity::ERROR,
3715 is_primary: true,
3716 message: "error in a".to_string(),
3717 source_kind: DiagnosticSourceKind::Pushed,
3718 ..Diagnostic::default()
3719 },
3720 }],
3721 cx,
3722 )
3723 .unwrap();
3724 lsp_store
3725 .update_diagnostic_entries(
3726 LanguageServerId(0),
3727 Path::new(path!("/dir/b.rs")).to_owned(),
3728 None,
3729 None,
3730 vec![DiagnosticEntry {
3731 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
3732 diagnostic: Diagnostic {
3733 severity: DiagnosticSeverity::WARNING,
3734 is_primary: true,
3735 message: "warning in b".to_string(),
3736 source_kind: DiagnosticSourceKind::Pushed,
3737 ..Diagnostic::default()
3738 },
3739 }],
3740 cx,
3741 )
3742 .unwrap();
3743
3744 assert_eq!(
3745 lsp_store.diagnostic_summary(false, cx),
3746 DiagnosticSummary {
3747 error_count: 1,
3748 warning_count: 1,
3749 }
3750 );
3751 });
3752
3753 fs.remove_file(path!("/dir/a.rs").as_ref(), Default::default())
3754 .await
3755 .unwrap();
3756 cx.executor().run_until_parked();
3757
3758 lsp_store.update(cx, |lsp_store, cx| {
3759 assert_eq!(
3760 lsp_store.diagnostic_summary(false, cx),
3761 DiagnosticSummary {
3762 error_count: 0,
3763 warning_count: 1,
3764 },
3765 );
3766 });
3767}
3768
3769#[gpui::test]
3770async fn test_diagnostic_summaries_cleared_on_server_restart(cx: &mut gpui::TestAppContext) {
3771 init_test(cx);
3772
3773 let fs = FakeFs::new(cx.executor());
3774 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
3775
3776 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3777
3778 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3779 language_registry.add(rust_lang());
3780 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3781
3782 let (buffer, _handle) = project
3783 .update(cx, |project, cx| {
3784 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3785 })
3786 .await
3787 .unwrap();
3788
3789 let fake_server = fake_servers.next().await.unwrap();
3790 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3791 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3792 version: None,
3793 diagnostics: vec![lsp::Diagnostic {
3794 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 1)),
3795 severity: Some(lsp::DiagnosticSeverity::ERROR),
3796 message: "error before restart".to_string(),
3797 ..Default::default()
3798 }],
3799 });
3800 cx.executor().run_until_parked();
3801
3802 project.update(cx, |project, cx| {
3803 assert_eq!(
3804 project.diagnostic_summary(false, cx),
3805 DiagnosticSummary {
3806 error_count: 1,
3807 warning_count: 0,
3808 }
3809 );
3810 });
3811
3812 let mut events = cx.events(&project);
3813
3814 project.update(cx, |project, cx| {
3815 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
3816 });
3817 cx.executor().run_until_parked();
3818
3819 let mut received_diagnostics_updated = false;
3820 while let Some(Some(event)) =
3821 futures::FutureExt::now_or_never(futures::StreamExt::next(&mut events))
3822 {
3823 if matches!(event, Event::DiagnosticsUpdated { .. }) {
3824 received_diagnostics_updated = true;
3825 }
3826 }
3827 assert!(
3828 received_diagnostics_updated,
3829 "DiagnosticsUpdated event should be emitted when a language server is stopped"
3830 );
3831
3832 project.update(cx, |project, cx| {
3833 assert_eq!(
3834 project.diagnostic_summary(false, cx),
3835 DiagnosticSummary {
3836 error_count: 0,
3837 warning_count: 0,
3838 }
3839 );
3840 });
3841}
3842
3843#[gpui::test]
3844async fn test_diagnostic_summaries_cleared_on_buffer_reload(cx: &mut gpui::TestAppContext) {
3845 init_test(cx);
3846
3847 let fs = FakeFs::new(cx.executor());
3848 fs.insert_tree(path!("/dir"), json!({ "a.rs": "one two three" }))
3849 .await;
3850
3851 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3852
3853 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3854 language_registry.add(rust_lang());
3855 let pull_count = Arc::new(atomic::AtomicUsize::new(0));
3856 let closure_pull_count = pull_count.clone();
3857 let mut fake_servers = language_registry.register_fake_lsp(
3858 "Rust",
3859 FakeLspAdapter {
3860 capabilities: lsp::ServerCapabilities {
3861 diagnostic_provider: Some(lsp::DiagnosticServerCapabilities::Options(
3862 lsp::DiagnosticOptions {
3863 identifier: Some("test-reload".to_string()),
3864 inter_file_dependencies: true,
3865 workspace_diagnostics: false,
3866 work_done_progress_options: Default::default(),
3867 },
3868 )),
3869 ..lsp::ServerCapabilities::default()
3870 },
3871 initializer: Some(Box::new(move |fake_server| {
3872 let pull_count = closure_pull_count.clone();
3873 fake_server.set_request_handler::<lsp::request::DocumentDiagnosticRequest, _, _>(
3874 move |_, _| {
3875 let pull_count = pull_count.clone();
3876 async move {
3877 pull_count.fetch_add(1, atomic::Ordering::SeqCst);
3878 Ok(lsp::DocumentDiagnosticReportResult::Report(
3879 lsp::DocumentDiagnosticReport::Full(
3880 lsp::RelatedFullDocumentDiagnosticReport {
3881 related_documents: None,
3882 full_document_diagnostic_report:
3883 lsp::FullDocumentDiagnosticReport {
3884 result_id: None,
3885 items: Vec::new(),
3886 },
3887 },
3888 ),
3889 ))
3890 }
3891 },
3892 );
3893 })),
3894 ..FakeLspAdapter::default()
3895 },
3896 );
3897
3898 let (_buffer, _handle) = project
3899 .update(cx, |project, cx| {
3900 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3901 })
3902 .await
3903 .unwrap();
3904
3905 let fake_server = fake_servers.next().await.unwrap();
3906 cx.executor().run_until_parked();
3907
3908 // Publish initial diagnostics via the fake server.
3909 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3910 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3911 version: None,
3912 diagnostics: vec![lsp::Diagnostic {
3913 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 3)),
3914 severity: Some(lsp::DiagnosticSeverity::ERROR),
3915 message: "error in a".to_string(),
3916 ..Default::default()
3917 }],
3918 });
3919 cx.executor().run_until_parked();
3920
3921 project.update(cx, |project, cx| {
3922 assert_eq!(
3923 project.diagnostic_summary(false, cx),
3924 DiagnosticSummary {
3925 error_count: 1,
3926 warning_count: 0,
3927 }
3928 );
3929 });
3930
3931 let pulls_before = pull_count.load(atomic::Ordering::SeqCst);
3932
3933 // Change the file on disk. The FS event triggers buffer reload,
3934 // which in turn triggers pull_diagnostics_for_buffer.
3935 fs.save(
3936 path!("/dir/a.rs").as_ref(),
3937 &"fixed content".into(),
3938 LineEnding::Unix,
3939 )
3940 .await
3941 .unwrap();
3942 cx.executor().run_until_parked();
3943
3944 let pulls_after = pull_count.load(atomic::Ordering::SeqCst);
3945 assert!(
3946 pulls_after > pulls_before,
3947 "Expected document diagnostic pull after buffer reload (before={pulls_before}, after={pulls_after})"
3948 );
3949}
3950
3951#[gpui::test]
3952async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
3953 init_test(cx);
3954
3955 let text = "
3956 fn a() {
3957 f1();
3958 }
3959 fn b() {
3960 f2();
3961 }
3962 fn c() {
3963 f3();
3964 }
3965 "
3966 .unindent();
3967
3968 let fs = FakeFs::new(cx.executor());
3969 fs.insert_tree(
3970 path!("/dir"),
3971 json!({
3972 "a.rs": text.clone(),
3973 }),
3974 )
3975 .await;
3976
3977 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3978 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3979
3980 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3981 language_registry.add(rust_lang());
3982 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3983
3984 let (buffer, _handle) = project
3985 .update(cx, |project, cx| {
3986 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3987 })
3988 .await
3989 .unwrap();
3990
3991 let mut fake_server = fake_servers.next().await.unwrap();
3992 let lsp_document_version = fake_server
3993 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3994 .await
3995 .text_document
3996 .version;
3997
3998 // Simulate editing the buffer after the language server computes some edits.
3999 buffer.update(cx, |buffer, cx| {
4000 buffer.edit(
4001 [(
4002 Point::new(0, 0)..Point::new(0, 0),
4003 "// above first function\n",
4004 )],
4005 None,
4006 cx,
4007 );
4008 buffer.edit(
4009 [(
4010 Point::new(2, 0)..Point::new(2, 0),
4011 " // inside first function\n",
4012 )],
4013 None,
4014 cx,
4015 );
4016 buffer.edit(
4017 [(
4018 Point::new(6, 4)..Point::new(6, 4),
4019 "// inside second function ",
4020 )],
4021 None,
4022 cx,
4023 );
4024
4025 assert_eq!(
4026 buffer.text(),
4027 "
4028 // above first function
4029 fn a() {
4030 // inside first function
4031 f1();
4032 }
4033 fn b() {
4034 // inside second function f2();
4035 }
4036 fn c() {
4037 f3();
4038 }
4039 "
4040 .unindent()
4041 );
4042 });
4043
4044 let edits = lsp_store
4045 .update(cx, |lsp_store, cx| {
4046 lsp_store.as_local_mut().unwrap().edits_from_lsp(
4047 &buffer,
4048 vec![
4049 // replace body of first function
4050 lsp::TextEdit {
4051 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
4052 new_text: "
4053 fn a() {
4054 f10();
4055 }
4056 "
4057 .unindent(),
4058 },
4059 // edit inside second function
4060 lsp::TextEdit {
4061 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
4062 new_text: "00".into(),
4063 },
4064 // edit inside third function via two distinct edits
4065 lsp::TextEdit {
4066 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
4067 new_text: "4000".into(),
4068 },
4069 lsp::TextEdit {
4070 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
4071 new_text: "".into(),
4072 },
4073 ],
4074 LanguageServerId(0),
4075 Some(lsp_document_version),
4076 cx,
4077 )
4078 })
4079 .await
4080 .unwrap();
4081
4082 buffer.update(cx, |buffer, cx| {
4083 for (range, new_text) in edits {
4084 buffer.edit([(range, new_text)], None, cx);
4085 }
4086 assert_eq!(
4087 buffer.text(),
4088 "
4089 // above first function
4090 fn a() {
4091 // inside first function
4092 f10();
4093 }
4094 fn b() {
4095 // inside second function f200();
4096 }
4097 fn c() {
4098 f4000();
4099 }
4100 "
4101 .unindent()
4102 );
4103 });
4104}
4105
4106#[gpui::test]
4107async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
4108 init_test(cx);
4109
4110 let text = "
4111 use a::b;
4112 use a::c;
4113
4114 fn f() {
4115 b();
4116 c();
4117 }
4118 "
4119 .unindent();
4120
4121 let fs = FakeFs::new(cx.executor());
4122 fs.insert_tree(
4123 path!("/dir"),
4124 json!({
4125 "a.rs": text.clone(),
4126 }),
4127 )
4128 .await;
4129
4130 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4131 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4132 let buffer = project
4133 .update(cx, |project, cx| {
4134 project.open_local_buffer(path!("/dir/a.rs"), cx)
4135 })
4136 .await
4137 .unwrap();
4138
4139 // Simulate the language server sending us a small edit in the form of a very large diff.
4140 // Rust-analyzer does this when performing a merge-imports code action.
4141 let edits = lsp_store
4142 .update(cx, |lsp_store, cx| {
4143 lsp_store.as_local_mut().unwrap().edits_from_lsp(
4144 &buffer,
4145 [
4146 // Replace the first use statement without editing the semicolon.
4147 lsp::TextEdit {
4148 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
4149 new_text: "a::{b, c}".into(),
4150 },
4151 // Reinsert the remainder of the file between the semicolon and the final
4152 // newline of the file.
4153 lsp::TextEdit {
4154 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
4155 new_text: "\n\n".into(),
4156 },
4157 lsp::TextEdit {
4158 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
4159 new_text: "
4160 fn f() {
4161 b();
4162 c();
4163 }"
4164 .unindent(),
4165 },
4166 // Delete everything after the first newline of the file.
4167 lsp::TextEdit {
4168 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
4169 new_text: "".into(),
4170 },
4171 ],
4172 LanguageServerId(0),
4173 None,
4174 cx,
4175 )
4176 })
4177 .await
4178 .unwrap();
4179
4180 buffer.update(cx, |buffer, cx| {
4181 let edits = edits
4182 .into_iter()
4183 .map(|(range, text)| {
4184 (
4185 range.start.to_point(buffer)..range.end.to_point(buffer),
4186 text,
4187 )
4188 })
4189 .collect::<Vec<_>>();
4190
4191 assert_eq!(
4192 edits,
4193 [
4194 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
4195 (Point::new(1, 0)..Point::new(2, 0), "".into())
4196 ]
4197 );
4198
4199 for (range, new_text) in edits {
4200 buffer.edit([(range, new_text)], None, cx);
4201 }
4202 assert_eq!(
4203 buffer.text(),
4204 "
4205 use a::{b, c};
4206
4207 fn f() {
4208 b();
4209 c();
4210 }
4211 "
4212 .unindent()
4213 );
4214 });
4215}
4216
4217#[gpui::test]
4218async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
4219 cx: &mut gpui::TestAppContext,
4220) {
4221 init_test(cx);
4222
4223 let text = "Path()";
4224
4225 let fs = FakeFs::new(cx.executor());
4226 fs.insert_tree(
4227 path!("/dir"),
4228 json!({
4229 "a.rs": text
4230 }),
4231 )
4232 .await;
4233
4234 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4235 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4236 let buffer = project
4237 .update(cx, |project, cx| {
4238 project.open_local_buffer(path!("/dir/a.rs"), cx)
4239 })
4240 .await
4241 .unwrap();
4242
4243 // Simulate the language server sending us a pair of edits at the same location,
4244 // with an insertion following a replacement (which violates the LSP spec).
4245 let edits = lsp_store
4246 .update(cx, |lsp_store, cx| {
4247 lsp_store.as_local_mut().unwrap().edits_from_lsp(
4248 &buffer,
4249 [
4250 lsp::TextEdit {
4251 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
4252 new_text: "Path".into(),
4253 },
4254 lsp::TextEdit {
4255 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
4256 new_text: "from path import Path\n\n\n".into(),
4257 },
4258 ],
4259 LanguageServerId(0),
4260 None,
4261 cx,
4262 )
4263 })
4264 .await
4265 .unwrap();
4266
4267 buffer.update(cx, |buffer, cx| {
4268 buffer.edit(edits, None, cx);
4269 assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
4270 });
4271}
4272
4273#[gpui::test]
4274async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
4275 init_test(cx);
4276
4277 let text = "
4278 use a::b;
4279 use a::c;
4280
4281 fn f() {
4282 b();
4283 c();
4284 }
4285 "
4286 .unindent();
4287
4288 let fs = FakeFs::new(cx.executor());
4289 fs.insert_tree(
4290 path!("/dir"),
4291 json!({
4292 "a.rs": text.clone(),
4293 }),
4294 )
4295 .await;
4296
4297 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4298 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4299 let buffer = project
4300 .update(cx, |project, cx| {
4301 project.open_local_buffer(path!("/dir/a.rs"), cx)
4302 })
4303 .await
4304 .unwrap();
4305
4306 // Simulate the language server sending us edits in a non-ordered fashion,
4307 // with ranges sometimes being inverted or pointing to invalid locations.
4308 let edits = lsp_store
4309 .update(cx, |lsp_store, cx| {
4310 lsp_store.as_local_mut().unwrap().edits_from_lsp(
4311 &buffer,
4312 [
4313 lsp::TextEdit {
4314 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
4315 new_text: "\n\n".into(),
4316 },
4317 lsp::TextEdit {
4318 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
4319 new_text: "a::{b, c}".into(),
4320 },
4321 lsp::TextEdit {
4322 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
4323 new_text: "".into(),
4324 },
4325 lsp::TextEdit {
4326 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
4327 new_text: "
4328 fn f() {
4329 b();
4330 c();
4331 }"
4332 .unindent(),
4333 },
4334 ],
4335 LanguageServerId(0),
4336 None,
4337 cx,
4338 )
4339 })
4340 .await
4341 .unwrap();
4342
4343 buffer.update(cx, |buffer, cx| {
4344 let edits = edits
4345 .into_iter()
4346 .map(|(range, text)| {
4347 (
4348 range.start.to_point(buffer)..range.end.to_point(buffer),
4349 text,
4350 )
4351 })
4352 .collect::<Vec<_>>();
4353
4354 assert_eq!(
4355 edits,
4356 [
4357 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
4358 (Point::new(1, 0)..Point::new(2, 0), "".into())
4359 ]
4360 );
4361
4362 for (range, new_text) in edits {
4363 buffer.edit([(range, new_text)], None, cx);
4364 }
4365 assert_eq!(
4366 buffer.text(),
4367 "
4368 use a::{b, c};
4369
4370 fn f() {
4371 b();
4372 c();
4373 }
4374 "
4375 .unindent()
4376 );
4377 });
4378}
4379
4380fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
4381 buffer: &Buffer,
4382 range: Range<T>,
4383) -> Vec<(String, Option<DiagnosticSeverity>)> {
4384 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
4385 for chunk in buffer.snapshot().chunks(range, true) {
4386 if chunks
4387 .last()
4388 .is_some_and(|prev_chunk| prev_chunk.1 == chunk.diagnostic_severity)
4389 {
4390 chunks.last_mut().unwrap().0.push_str(chunk.text);
4391 } else {
4392 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
4393 }
4394 }
4395 chunks
4396}
4397
4398#[gpui::test(iterations = 10)]
4399async fn test_definition(cx: &mut gpui::TestAppContext) {
4400 init_test(cx);
4401
4402 let fs = FakeFs::new(cx.executor());
4403 fs.insert_tree(
4404 path!("/dir"),
4405 json!({
4406 "a.rs": "const fn a() { A }",
4407 "b.rs": "const y: i32 = crate::a()",
4408 }),
4409 )
4410 .await;
4411
4412 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
4413
4414 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4415 language_registry.add(rust_lang());
4416 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
4417
4418 let (buffer, _handle) = project
4419 .update(cx, |project, cx| {
4420 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
4421 })
4422 .await
4423 .unwrap();
4424
4425 let fake_server = fake_servers.next().await.unwrap();
4426 cx.executor().run_until_parked();
4427
4428 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
4429 let params = params.text_document_position_params;
4430 assert_eq!(
4431 params.text_document.uri.to_file_path().unwrap(),
4432 Path::new(path!("/dir/b.rs")),
4433 );
4434 assert_eq!(params.position, lsp::Position::new(0, 22));
4435
4436 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
4437 lsp::Location::new(
4438 lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
4439 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
4440 ),
4441 )))
4442 });
4443 let mut definitions = project
4444 .update(cx, |project, cx| project.definitions(&buffer, 22, cx))
4445 .await
4446 .unwrap()
4447 .unwrap();
4448
4449 // Assert no new language server started
4450 cx.executor().run_until_parked();
4451 assert!(fake_servers.try_next().is_err());
4452
4453 assert_eq!(definitions.len(), 1);
4454 let definition = definitions.pop().unwrap();
4455 cx.update(|cx| {
4456 let target_buffer = definition.target.buffer.read(cx);
4457 assert_eq!(
4458 target_buffer
4459 .file()
4460 .unwrap()
4461 .as_local()
4462 .unwrap()
4463 .abs_path(cx),
4464 Path::new(path!("/dir/a.rs")),
4465 );
4466 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
4467 assert_eq!(
4468 list_worktrees(&project, cx),
4469 [
4470 (path!("/dir/a.rs").as_ref(), false),
4471 (path!("/dir/b.rs").as_ref(), true)
4472 ],
4473 );
4474
4475 drop(definition);
4476 });
4477 cx.update(|cx| {
4478 assert_eq!(
4479 list_worktrees(&project, cx),
4480 [(path!("/dir/b.rs").as_ref(), true)]
4481 );
4482 });
4483
4484 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
4485 project
4486 .read(cx)
4487 .worktrees(cx)
4488 .map(|worktree| {
4489 let worktree = worktree.read(cx);
4490 (
4491 worktree.as_local().unwrap().abs_path().as_ref(),
4492 worktree.is_visible(),
4493 )
4494 })
4495 .collect::<Vec<_>>()
4496 }
4497}
4498
4499#[gpui::test]
4500async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
4501 init_test(cx);
4502
4503 let fs = FakeFs::new(cx.executor());
4504 fs.insert_tree(
4505 path!("/dir"),
4506 json!({
4507 "a.ts": "",
4508 }),
4509 )
4510 .await;
4511
4512 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4513
4514 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4515 language_registry.add(typescript_lang());
4516 let mut fake_language_servers = language_registry.register_fake_lsp(
4517 "TypeScript",
4518 FakeLspAdapter {
4519 capabilities: lsp::ServerCapabilities {
4520 completion_provider: Some(lsp::CompletionOptions {
4521 trigger_characters: Some(vec![".".to_string()]),
4522 ..Default::default()
4523 }),
4524 ..Default::default()
4525 },
4526 ..Default::default()
4527 },
4528 );
4529
4530 let (buffer, _handle) = project
4531 .update(cx, |p, cx| {
4532 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4533 })
4534 .await
4535 .unwrap();
4536
4537 let fake_server = fake_language_servers.next().await.unwrap();
4538 cx.executor().run_until_parked();
4539
4540 // When text_edit exists, it takes precedence over insert_text and label
4541 let text = "let a = obj.fqn";
4542 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4543 let completions = project.update(cx, |project, cx| {
4544 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4545 });
4546
4547 fake_server
4548 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4549 Ok(Some(lsp::CompletionResponse::Array(vec![
4550 lsp::CompletionItem {
4551 label: "labelText".into(),
4552 insert_text: Some("insertText".into()),
4553 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
4554 range: lsp::Range::new(
4555 lsp::Position::new(0, text.len() as u32 - 3),
4556 lsp::Position::new(0, text.len() as u32),
4557 ),
4558 new_text: "textEditText".into(),
4559 })),
4560 ..Default::default()
4561 },
4562 ])))
4563 })
4564 .next()
4565 .await;
4566
4567 let completions = completions
4568 .await
4569 .unwrap()
4570 .into_iter()
4571 .flat_map(|response| response.completions)
4572 .collect::<Vec<_>>();
4573 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4574
4575 assert_eq!(completions.len(), 1);
4576 assert_eq!(completions[0].new_text, "textEditText");
4577 assert_eq!(
4578 completions[0].replace_range.to_offset(&snapshot),
4579 text.len() - 3..text.len()
4580 );
4581}
4582
4583#[gpui::test]
4584async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
4585 init_test(cx);
4586
4587 let fs = FakeFs::new(cx.executor());
4588 fs.insert_tree(
4589 path!("/dir"),
4590 json!({
4591 "a.ts": "",
4592 }),
4593 )
4594 .await;
4595
4596 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4597
4598 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4599 language_registry.add(typescript_lang());
4600 let mut fake_language_servers = language_registry.register_fake_lsp(
4601 "TypeScript",
4602 FakeLspAdapter {
4603 capabilities: lsp::ServerCapabilities {
4604 completion_provider: Some(lsp::CompletionOptions {
4605 trigger_characters: Some(vec![".".to_string()]),
4606 ..Default::default()
4607 }),
4608 ..Default::default()
4609 },
4610 ..Default::default()
4611 },
4612 );
4613
4614 let (buffer, _handle) = project
4615 .update(cx, |p, cx| {
4616 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4617 })
4618 .await
4619 .unwrap();
4620
4621 let fake_server = fake_language_servers.next().await.unwrap();
4622 cx.executor().run_until_parked();
4623 let text = "let a = obj.fqn";
4624
4625 // Test 1: When text_edit is None but text_edit_text exists with default edit_range
4626 {
4627 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4628 let completions = project.update(cx, |project, cx| {
4629 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4630 });
4631
4632 fake_server
4633 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4634 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
4635 is_incomplete: false,
4636 item_defaults: Some(lsp::CompletionListItemDefaults {
4637 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
4638 lsp::Range::new(
4639 lsp::Position::new(0, text.len() as u32 - 3),
4640 lsp::Position::new(0, text.len() as u32),
4641 ),
4642 )),
4643 ..Default::default()
4644 }),
4645 items: vec![lsp::CompletionItem {
4646 label: "labelText".into(),
4647 text_edit_text: Some("textEditText".into()),
4648 text_edit: None,
4649 ..Default::default()
4650 }],
4651 })))
4652 })
4653 .next()
4654 .await;
4655
4656 let completions = completions
4657 .await
4658 .unwrap()
4659 .into_iter()
4660 .flat_map(|response| response.completions)
4661 .collect::<Vec<_>>();
4662 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4663
4664 assert_eq!(completions.len(), 1);
4665 assert_eq!(completions[0].new_text, "textEditText");
4666 assert_eq!(
4667 completions[0].replace_range.to_offset(&snapshot),
4668 text.len() - 3..text.len()
4669 );
4670 }
4671
4672 // Test 2: When both text_edit and text_edit_text are None with default edit_range
4673 {
4674 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4675 let completions = project.update(cx, |project, cx| {
4676 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4677 });
4678
4679 fake_server
4680 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4681 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
4682 is_incomplete: false,
4683 item_defaults: Some(lsp::CompletionListItemDefaults {
4684 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
4685 lsp::Range::new(
4686 lsp::Position::new(0, text.len() as u32 - 3),
4687 lsp::Position::new(0, text.len() as u32),
4688 ),
4689 )),
4690 ..Default::default()
4691 }),
4692 items: vec![lsp::CompletionItem {
4693 label: "labelText".into(),
4694 text_edit_text: None,
4695 insert_text: Some("irrelevant".into()),
4696 text_edit: None,
4697 ..Default::default()
4698 }],
4699 })))
4700 })
4701 .next()
4702 .await;
4703
4704 let completions = completions
4705 .await
4706 .unwrap()
4707 .into_iter()
4708 .flat_map(|response| response.completions)
4709 .collect::<Vec<_>>();
4710 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4711
4712 assert_eq!(completions.len(), 1);
4713 assert_eq!(completions[0].new_text, "labelText");
4714 assert_eq!(
4715 completions[0].replace_range.to_offset(&snapshot),
4716 text.len() - 3..text.len()
4717 );
4718 }
4719}
4720
4721#[gpui::test]
4722async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
4723 init_test(cx);
4724
4725 let fs = FakeFs::new(cx.executor());
4726 fs.insert_tree(
4727 path!("/dir"),
4728 json!({
4729 "a.ts": "",
4730 }),
4731 )
4732 .await;
4733
4734 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4735
4736 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4737 language_registry.add(typescript_lang());
4738 let mut fake_language_servers = language_registry.register_fake_lsp(
4739 "TypeScript",
4740 FakeLspAdapter {
4741 capabilities: lsp::ServerCapabilities {
4742 completion_provider: Some(lsp::CompletionOptions {
4743 trigger_characters: Some(vec![":".to_string()]),
4744 ..Default::default()
4745 }),
4746 ..Default::default()
4747 },
4748 ..Default::default()
4749 },
4750 );
4751
4752 let (buffer, _handle) = project
4753 .update(cx, |p, cx| {
4754 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4755 })
4756 .await
4757 .unwrap();
4758
4759 let fake_server = fake_language_servers.next().await.unwrap();
4760 cx.executor().run_until_parked();
4761
4762 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
4763 let text = "let a = b.fqn";
4764 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4765 let completions = project.update(cx, |project, cx| {
4766 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4767 });
4768
4769 fake_server
4770 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4771 Ok(Some(lsp::CompletionResponse::Array(vec![
4772 lsp::CompletionItem {
4773 label: "fullyQualifiedName?".into(),
4774 insert_text: Some("fullyQualifiedName".into()),
4775 ..Default::default()
4776 },
4777 ])))
4778 })
4779 .next()
4780 .await;
4781 let completions = completions
4782 .await
4783 .unwrap()
4784 .into_iter()
4785 .flat_map(|response| response.completions)
4786 .collect::<Vec<_>>();
4787 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4788 assert_eq!(completions.len(), 1);
4789 assert_eq!(completions[0].new_text, "fullyQualifiedName");
4790 assert_eq!(
4791 completions[0].replace_range.to_offset(&snapshot),
4792 text.len() - 3..text.len()
4793 );
4794
4795 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
4796 let text = "let a = \"atoms/cmp\"";
4797 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4798 let completions = project.update(cx, |project, cx| {
4799 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
4800 });
4801
4802 fake_server
4803 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4804 Ok(Some(lsp::CompletionResponse::Array(vec![
4805 lsp::CompletionItem {
4806 label: "component".into(),
4807 ..Default::default()
4808 },
4809 ])))
4810 })
4811 .next()
4812 .await;
4813 let completions = completions
4814 .await
4815 .unwrap()
4816 .into_iter()
4817 .flat_map(|response| response.completions)
4818 .collect::<Vec<_>>();
4819 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4820 assert_eq!(completions.len(), 1);
4821 assert_eq!(completions[0].new_text, "component");
4822 assert_eq!(
4823 completions[0].replace_range.to_offset(&snapshot),
4824 text.len() - 4..text.len() - 1
4825 );
4826}
4827
4828#[gpui::test]
4829async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
4830 init_test(cx);
4831
4832 let fs = FakeFs::new(cx.executor());
4833 fs.insert_tree(
4834 path!("/dir"),
4835 json!({
4836 "a.ts": "",
4837 }),
4838 )
4839 .await;
4840
4841 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4842
4843 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4844 language_registry.add(typescript_lang());
4845 let mut fake_language_servers = language_registry.register_fake_lsp(
4846 "TypeScript",
4847 FakeLspAdapter {
4848 capabilities: lsp::ServerCapabilities {
4849 completion_provider: Some(lsp::CompletionOptions {
4850 trigger_characters: Some(vec![":".to_string()]),
4851 ..Default::default()
4852 }),
4853 ..Default::default()
4854 },
4855 ..Default::default()
4856 },
4857 );
4858
4859 let (buffer, _handle) = project
4860 .update(cx, |p, cx| {
4861 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4862 })
4863 .await
4864 .unwrap();
4865
4866 let fake_server = fake_language_servers.next().await.unwrap();
4867 cx.executor().run_until_parked();
4868
4869 let text = "let a = b.fqn";
4870 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4871 let completions = project.update(cx, |project, cx| {
4872 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4873 });
4874
4875 fake_server
4876 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4877 Ok(Some(lsp::CompletionResponse::Array(vec![
4878 lsp::CompletionItem {
4879 label: "fullyQualifiedName?".into(),
4880 insert_text: Some("fully\rQualified\r\nName".into()),
4881 ..Default::default()
4882 },
4883 ])))
4884 })
4885 .next()
4886 .await;
4887 let completions = completions
4888 .await
4889 .unwrap()
4890 .into_iter()
4891 .flat_map(|response| response.completions)
4892 .collect::<Vec<_>>();
4893 assert_eq!(completions.len(), 1);
4894 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
4895}
4896
4897#[gpui::test(iterations = 10)]
4898async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
4899 init_test(cx);
4900
4901 let fs = FakeFs::new(cx.executor());
4902 fs.insert_tree(
4903 path!("/dir"),
4904 json!({
4905 "a.ts": "a",
4906 }),
4907 )
4908 .await;
4909
4910 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4911
4912 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4913 language_registry.add(typescript_lang());
4914 let mut fake_language_servers = language_registry.register_fake_lsp(
4915 "TypeScript",
4916 FakeLspAdapter {
4917 capabilities: lsp::ServerCapabilities {
4918 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
4919 lsp::CodeActionOptions {
4920 resolve_provider: Some(true),
4921 ..lsp::CodeActionOptions::default()
4922 },
4923 )),
4924 execute_command_provider: Some(lsp::ExecuteCommandOptions {
4925 commands: vec!["_the/command".to_string()],
4926 ..lsp::ExecuteCommandOptions::default()
4927 }),
4928 ..lsp::ServerCapabilities::default()
4929 },
4930 ..FakeLspAdapter::default()
4931 },
4932 );
4933
4934 let (buffer, _handle) = project
4935 .update(cx, |p, cx| {
4936 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4937 })
4938 .await
4939 .unwrap();
4940
4941 let fake_server = fake_language_servers.next().await.unwrap();
4942 cx.executor().run_until_parked();
4943
4944 // Language server returns code actions that contain commands, and not edits.
4945 let actions = project.update(cx, |project, cx| {
4946 project.code_actions(&buffer, 0..0, None, cx)
4947 });
4948 fake_server
4949 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
4950 Ok(Some(vec![
4951 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
4952 title: "The code action".into(),
4953 data: Some(serde_json::json!({
4954 "command": "_the/command",
4955 })),
4956 ..lsp::CodeAction::default()
4957 }),
4958 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
4959 title: "two".into(),
4960 ..lsp::CodeAction::default()
4961 }),
4962 ]))
4963 })
4964 .next()
4965 .await;
4966
4967 let action = actions.await.unwrap().unwrap()[0].clone();
4968 let apply = project.update(cx, |project, cx| {
4969 project.apply_code_action(buffer.clone(), action, true, cx)
4970 });
4971
4972 // Resolving the code action does not populate its edits. In absence of
4973 // edits, we must execute the given command.
4974 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
4975 |mut action, _| async move {
4976 if action.data.is_some() {
4977 action.command = Some(lsp::Command {
4978 title: "The command".into(),
4979 command: "_the/command".into(),
4980 arguments: Some(vec![json!("the-argument")]),
4981 });
4982 }
4983 Ok(action)
4984 },
4985 );
4986
4987 // While executing the command, the language server sends the editor
4988 // a `workspaceEdit` request.
4989 fake_server
4990 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
4991 let fake = fake_server.clone();
4992 move |params, _| {
4993 assert_eq!(params.command, "_the/command");
4994 let fake = fake.clone();
4995 async move {
4996 fake.server
4997 .request::<lsp::request::ApplyWorkspaceEdit>(
4998 lsp::ApplyWorkspaceEditParams {
4999 label: None,
5000 edit: lsp::WorkspaceEdit {
5001 changes: Some(
5002 [(
5003 lsp::Uri::from_file_path(path!("/dir/a.ts")).unwrap(),
5004 vec![lsp::TextEdit {
5005 range: lsp::Range::new(
5006 lsp::Position::new(0, 0),
5007 lsp::Position::new(0, 0),
5008 ),
5009 new_text: "X".into(),
5010 }],
5011 )]
5012 .into_iter()
5013 .collect(),
5014 ),
5015 ..Default::default()
5016 },
5017 },
5018 DEFAULT_LSP_REQUEST_TIMEOUT,
5019 )
5020 .await
5021 .into_response()
5022 .unwrap();
5023 Ok(Some(json!(null)))
5024 }
5025 }
5026 })
5027 .next()
5028 .await;
5029
5030 // Applying the code action returns a project transaction containing the edits
5031 // sent by the language server in its `workspaceEdit` request.
5032 let transaction = apply.await.unwrap();
5033 assert!(transaction.0.contains_key(&buffer));
5034 buffer.update(cx, |buffer, cx| {
5035 assert_eq!(buffer.text(), "Xa");
5036 buffer.undo(cx);
5037 assert_eq!(buffer.text(), "a");
5038 });
5039}
5040
5041#[gpui::test]
5042async fn test_rename_file_to_new_directory(cx: &mut gpui::TestAppContext) {
5043 init_test(cx);
5044 let fs = FakeFs::new(cx.background_executor.clone());
5045 let expected_contents = "content";
5046 fs.as_fake()
5047 .insert_tree(
5048 "/root",
5049 json!({
5050 "test.txt": expected_contents
5051 }),
5052 )
5053 .await;
5054
5055 let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
5056
5057 let (worktree, entry_id) = project.read_with(cx, |project, cx| {
5058 let worktree = project.worktrees(cx).next().unwrap();
5059 let entry_id = worktree
5060 .read(cx)
5061 .entry_for_path(rel_path("test.txt"))
5062 .unwrap()
5063 .id;
5064 (worktree, entry_id)
5065 });
5066 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
5067 let _result = project
5068 .update(cx, |project, cx| {
5069 project.rename_entry(
5070 entry_id,
5071 (worktree_id, rel_path("dir1/dir2/dir3/test.txt")).into(),
5072 cx,
5073 )
5074 })
5075 .await
5076 .unwrap();
5077 worktree.read_with(cx, |worktree, _| {
5078 assert!(
5079 worktree.entry_for_path(rel_path("test.txt")).is_none(),
5080 "Old file should have been removed"
5081 );
5082 assert!(
5083 worktree
5084 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
5085 .is_some(),
5086 "Whole directory hierarchy and the new file should have been created"
5087 );
5088 });
5089 assert_eq!(
5090 worktree
5091 .update(cx, |worktree, cx| {
5092 worktree.load_file(rel_path("dir1/dir2/dir3/test.txt"), cx)
5093 })
5094 .await
5095 .unwrap()
5096 .text,
5097 expected_contents,
5098 "Moved file's contents should be preserved"
5099 );
5100
5101 let entry_id = worktree.read_with(cx, |worktree, _| {
5102 worktree
5103 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
5104 .unwrap()
5105 .id
5106 });
5107
5108 let _result = project
5109 .update(cx, |project, cx| {
5110 project.rename_entry(
5111 entry_id,
5112 (worktree_id, rel_path("dir1/dir2/test.txt")).into(),
5113 cx,
5114 )
5115 })
5116 .await
5117 .unwrap();
5118 worktree.read_with(cx, |worktree, _| {
5119 assert!(
5120 worktree.entry_for_path(rel_path("test.txt")).is_none(),
5121 "First file should not reappear"
5122 );
5123 assert!(
5124 worktree
5125 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
5126 .is_none(),
5127 "Old file should have been removed"
5128 );
5129 assert!(
5130 worktree
5131 .entry_for_path(rel_path("dir1/dir2/test.txt"))
5132 .is_some(),
5133 "No error should have occurred after moving into existing directory"
5134 );
5135 });
5136 assert_eq!(
5137 worktree
5138 .update(cx, |worktree, cx| {
5139 worktree.load_file(rel_path("dir1/dir2/test.txt"), cx)
5140 })
5141 .await
5142 .unwrap()
5143 .text,
5144 expected_contents,
5145 "Moved file's contents should be preserved"
5146 );
5147}
5148
5149#[gpui::test(iterations = 10)]
5150async fn test_save_file(cx: &mut gpui::TestAppContext) {
5151 init_test(cx);
5152
5153 let fs = FakeFs::new(cx.executor());
5154 fs.insert_tree(
5155 path!("/dir"),
5156 json!({
5157 "file1": "the old contents",
5158 }),
5159 )
5160 .await;
5161
5162 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5163 let buffer = project
5164 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5165 .await
5166 .unwrap();
5167 buffer.update(cx, |buffer, cx| {
5168 assert_eq!(buffer.text(), "the old contents");
5169 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
5170 });
5171
5172 project
5173 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
5174 .await
5175 .unwrap();
5176
5177 let new_text = fs
5178 .load(Path::new(path!("/dir/file1")))
5179 .await
5180 .unwrap()
5181 .replace("\r\n", "\n");
5182 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
5183}
5184
5185#[gpui::test(iterations = 10)]
5186async fn test_save_file_spawns_language_server(cx: &mut gpui::TestAppContext) {
5187 // Issue: #24349
5188 init_test(cx);
5189
5190 let fs = FakeFs::new(cx.executor());
5191 fs.insert_tree(path!("/dir"), json!({})).await;
5192
5193 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5194 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5195
5196 language_registry.add(rust_lang());
5197 let mut fake_rust_servers = language_registry.register_fake_lsp(
5198 "Rust",
5199 FakeLspAdapter {
5200 name: "the-rust-language-server",
5201 capabilities: lsp::ServerCapabilities {
5202 completion_provider: Some(lsp::CompletionOptions {
5203 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
5204 ..Default::default()
5205 }),
5206 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
5207 lsp::TextDocumentSyncOptions {
5208 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
5209 ..Default::default()
5210 },
5211 )),
5212 ..Default::default()
5213 },
5214 ..Default::default()
5215 },
5216 );
5217
5218 let buffer = project
5219 .update(cx, |this, cx| this.create_buffer(None, false, cx))
5220 .unwrap()
5221 .await;
5222 project.update(cx, |this, cx| {
5223 this.register_buffer_with_language_servers(&buffer, cx);
5224 buffer.update(cx, |buffer, cx| {
5225 assert!(!this.has_language_servers_for(buffer, cx));
5226 })
5227 });
5228
5229 project
5230 .update(cx, |this, cx| {
5231 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
5232 this.save_buffer_as(
5233 buffer.clone(),
5234 ProjectPath {
5235 worktree_id,
5236 path: rel_path("file.rs").into(),
5237 },
5238 cx,
5239 )
5240 })
5241 .await
5242 .unwrap();
5243 // A server is started up, and it is notified about Rust files.
5244 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5245 assert_eq!(
5246 fake_rust_server
5247 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5248 .await
5249 .text_document,
5250 lsp::TextDocumentItem {
5251 uri: lsp::Uri::from_file_path(path!("/dir/file.rs")).unwrap(),
5252 version: 0,
5253 text: "".to_string(),
5254 language_id: "rust".to_string(),
5255 }
5256 );
5257
5258 project.update(cx, |this, cx| {
5259 buffer.update(cx, |buffer, cx| {
5260 assert!(this.has_language_servers_for(buffer, cx));
5261 })
5262 });
5263}
5264
5265#[gpui::test(iterations = 30)]
5266async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
5267 init_test(cx);
5268
5269 let fs = FakeFs::new(cx.executor());
5270 fs.insert_tree(
5271 path!("/dir"),
5272 json!({
5273 "file1": "the original contents",
5274 }),
5275 )
5276 .await;
5277
5278 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5279 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
5280 let buffer = project
5281 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5282 .await
5283 .unwrap();
5284
5285 // Change the buffer's file on disk, and then wait for the file change
5286 // to be detected by the worktree, so that the buffer starts reloading.
5287 fs.save(
5288 path!("/dir/file1").as_ref(),
5289 &"the first contents".into(),
5290 Default::default(),
5291 )
5292 .await
5293 .unwrap();
5294 worktree.next_event(cx).await;
5295
5296 // Change the buffer's file again. Depending on the random seed, the
5297 // previous file change may still be in progress.
5298 fs.save(
5299 path!("/dir/file1").as_ref(),
5300 &"the second contents".into(),
5301 Default::default(),
5302 )
5303 .await
5304 .unwrap();
5305 worktree.next_event(cx).await;
5306
5307 cx.executor().run_until_parked();
5308 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
5309 buffer.read_with(cx, |buffer, _| {
5310 assert_eq!(buffer.text(), on_disk_text);
5311 assert!(!buffer.is_dirty(), "buffer should not be dirty");
5312 assert!(!buffer.has_conflict(), "buffer should not be dirty");
5313 });
5314}
5315
5316#[gpui::test(iterations = 30)]
5317async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
5318 init_test(cx);
5319
5320 let fs = FakeFs::new(cx.executor());
5321 fs.insert_tree(
5322 path!("/dir"),
5323 json!({
5324 "file1": "the original contents",
5325 }),
5326 )
5327 .await;
5328
5329 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5330 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
5331 let buffer = project
5332 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5333 .await
5334 .unwrap();
5335
5336 // Change the buffer's file on disk, and then wait for the file change
5337 // to be detected by the worktree, so that the buffer starts reloading.
5338 fs.save(
5339 path!("/dir/file1").as_ref(),
5340 &"the first contents".into(),
5341 Default::default(),
5342 )
5343 .await
5344 .unwrap();
5345 worktree.next_event(cx).await;
5346
5347 cx.executor()
5348 .spawn(cx.executor().simulate_random_delay())
5349 .await;
5350
5351 // Perform a noop edit, causing the buffer's version to increase.
5352 buffer.update(cx, |buffer, cx| {
5353 buffer.edit([(0..0, " ")], None, cx);
5354 buffer.undo(cx);
5355 });
5356
5357 cx.executor().run_until_parked();
5358 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
5359 buffer.read_with(cx, |buffer, _| {
5360 let buffer_text = buffer.text();
5361 if buffer_text == on_disk_text {
5362 assert!(
5363 !buffer.is_dirty() && !buffer.has_conflict(),
5364 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
5365 );
5366 }
5367 // If the file change occurred while the buffer was processing the first
5368 // change, the buffer will be in a conflicting state.
5369 else {
5370 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
5371 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
5372 }
5373 });
5374}
5375
5376#[gpui::test]
5377async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
5378 init_test(cx);
5379
5380 let fs = FakeFs::new(cx.executor());
5381 fs.insert_tree(
5382 path!("/dir"),
5383 json!({
5384 "file1": "the old contents",
5385 }),
5386 )
5387 .await;
5388
5389 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
5390 let buffer = project
5391 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5392 .await
5393 .unwrap();
5394 buffer.update(cx, |buffer, cx| {
5395 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
5396 });
5397
5398 project
5399 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
5400 .await
5401 .unwrap();
5402
5403 let new_text = fs
5404 .load(Path::new(path!("/dir/file1")))
5405 .await
5406 .unwrap()
5407 .replace("\r\n", "\n");
5408 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
5409}
5410
5411#[gpui::test]
5412async fn test_save_as(cx: &mut gpui::TestAppContext) {
5413 init_test(cx);
5414
5415 let fs = FakeFs::new(cx.executor());
5416 fs.insert_tree("/dir", json!({})).await;
5417
5418 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5419
5420 let languages = project.update(cx, |project, _| project.languages().clone());
5421 languages.add(rust_lang());
5422
5423 let buffer = project.update(cx, |project, cx| {
5424 project.create_local_buffer("", None, false, cx)
5425 });
5426 buffer.update(cx, |buffer, cx| {
5427 buffer.edit([(0..0, "abc")], None, cx);
5428 assert!(buffer.is_dirty());
5429 assert!(!buffer.has_conflict());
5430 assert_eq!(buffer.language().unwrap().name(), "Plain Text");
5431 });
5432 project
5433 .update(cx, |project, cx| {
5434 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
5435 let path = ProjectPath {
5436 worktree_id,
5437 path: rel_path("file1.rs").into(),
5438 };
5439 project.save_buffer_as(buffer.clone(), path, cx)
5440 })
5441 .await
5442 .unwrap();
5443 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
5444
5445 cx.executor().run_until_parked();
5446 buffer.update(cx, |buffer, cx| {
5447 assert_eq!(
5448 buffer.file().unwrap().full_path(cx),
5449 Path::new("dir/file1.rs")
5450 );
5451 assert!(!buffer.is_dirty());
5452 assert!(!buffer.has_conflict());
5453 assert_eq!(buffer.language().unwrap().name(), "Rust");
5454 });
5455
5456 let opened_buffer = project
5457 .update(cx, |project, cx| {
5458 project.open_local_buffer("/dir/file1.rs", cx)
5459 })
5460 .await
5461 .unwrap();
5462 assert_eq!(opened_buffer, buffer);
5463}
5464
5465#[gpui::test]
5466async fn test_save_as_existing_file(cx: &mut gpui::TestAppContext) {
5467 init_test(cx);
5468
5469 let fs = FakeFs::new(cx.executor());
5470 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5471
5472 fs.insert_tree(
5473 path!("/dir"),
5474 json!({
5475 "data_a.txt": "data about a"
5476 }),
5477 )
5478 .await;
5479
5480 let buffer = project
5481 .update(cx, |project, cx| {
5482 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
5483 })
5484 .await
5485 .unwrap();
5486
5487 buffer.update(cx, |buffer, cx| {
5488 buffer.edit([(11..12, "b")], None, cx);
5489 });
5490
5491 // Save buffer's contents as a new file and confirm that the buffer's now
5492 // associated with `data_b.txt` instead of `data_a.txt`, confirming that the
5493 // file associated with the buffer has now been updated to `data_b.txt`
5494 project
5495 .update(cx, |project, cx| {
5496 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
5497 let new_path = ProjectPath {
5498 worktree_id,
5499 path: rel_path("data_b.txt").into(),
5500 };
5501
5502 project.save_buffer_as(buffer.clone(), new_path, cx)
5503 })
5504 .await
5505 .unwrap();
5506
5507 buffer.update(cx, |buffer, cx| {
5508 assert_eq!(
5509 buffer.file().unwrap().full_path(cx),
5510 Path::new("dir/data_b.txt")
5511 )
5512 });
5513
5514 // Open the original `data_a.txt` file, confirming that its contents are
5515 // unchanged and the resulting buffer's associated file is `data_a.txt`.
5516 let original_buffer = project
5517 .update(cx, |project, cx| {
5518 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
5519 })
5520 .await
5521 .unwrap();
5522
5523 original_buffer.update(cx, |buffer, cx| {
5524 assert_eq!(buffer.text(), "data about a");
5525 assert_eq!(
5526 buffer.file().unwrap().full_path(cx),
5527 Path::new("dir/data_a.txt")
5528 )
5529 });
5530}
5531
5532#[gpui::test(retries = 5)]
5533async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
5534 use worktree::WorktreeModelHandle as _;
5535
5536 init_test(cx);
5537 cx.executor().allow_parking();
5538
5539 let dir = TempTree::new(json!({
5540 "a": {
5541 "file1": "",
5542 "file2": "",
5543 "file3": "",
5544 },
5545 "b": {
5546 "c": {
5547 "file4": "",
5548 "file5": "",
5549 }
5550 }
5551 }));
5552
5553 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
5554
5555 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5556 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
5557 async move { buffer.await.unwrap() }
5558 };
5559 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5560 project.update(cx, |project, cx| {
5561 let tree = project.worktrees(cx).next().unwrap();
5562 tree.read(cx)
5563 .entry_for_path(rel_path(path))
5564 .unwrap_or_else(|| panic!("no entry for path {}", path))
5565 .id
5566 })
5567 };
5568
5569 let buffer2 = buffer_for_path("a/file2", cx).await;
5570 let buffer3 = buffer_for_path("a/file3", cx).await;
5571 let buffer4 = buffer_for_path("b/c/file4", cx).await;
5572 let buffer5 = buffer_for_path("b/c/file5", cx).await;
5573
5574 let file2_id = id_for_path("a/file2", cx);
5575 let file3_id = id_for_path("a/file3", cx);
5576 let file4_id = id_for_path("b/c/file4", cx);
5577
5578 // Create a remote copy of this worktree.
5579 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
5580 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
5581
5582 let updates = Arc::new(Mutex::new(Vec::new()));
5583 tree.update(cx, |tree, cx| {
5584 let updates = updates.clone();
5585 tree.observe_updates(0, cx, move |update| {
5586 updates.lock().push(update);
5587 async { true }
5588 });
5589 });
5590
5591 let remote = cx.update(|cx| {
5592 Worktree::remote(
5593 0,
5594 ReplicaId::REMOTE_SERVER,
5595 metadata,
5596 project.read(cx).client().into(),
5597 project.read(cx).path_style(cx),
5598 cx,
5599 )
5600 });
5601
5602 cx.executor().run_until_parked();
5603
5604 cx.update(|cx| {
5605 assert!(!buffer2.read(cx).is_dirty());
5606 assert!(!buffer3.read(cx).is_dirty());
5607 assert!(!buffer4.read(cx).is_dirty());
5608 assert!(!buffer5.read(cx).is_dirty());
5609 });
5610
5611 // Rename and delete files and directories.
5612 tree.flush_fs_events(cx).await;
5613 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
5614 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
5615 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
5616 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
5617 tree.flush_fs_events(cx).await;
5618
5619 cx.update(|app| {
5620 assert_eq!(
5621 tree.read(app).paths().collect::<Vec<_>>(),
5622 vec![
5623 rel_path("a"),
5624 rel_path("a/file1"),
5625 rel_path("a/file2.new"),
5626 rel_path("b"),
5627 rel_path("d"),
5628 rel_path("d/file3"),
5629 rel_path("d/file4"),
5630 ]
5631 );
5632 });
5633
5634 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
5635 assert_eq!(id_for_path("d/file3", cx), file3_id);
5636 assert_eq!(id_for_path("d/file4", cx), file4_id);
5637
5638 cx.update(|cx| {
5639 assert_eq!(
5640 buffer2.read(cx).file().unwrap().path().as_ref(),
5641 rel_path("a/file2.new")
5642 );
5643 assert_eq!(
5644 buffer3.read(cx).file().unwrap().path().as_ref(),
5645 rel_path("d/file3")
5646 );
5647 assert_eq!(
5648 buffer4.read(cx).file().unwrap().path().as_ref(),
5649 rel_path("d/file4")
5650 );
5651 assert_eq!(
5652 buffer5.read(cx).file().unwrap().path().as_ref(),
5653 rel_path("b/c/file5")
5654 );
5655
5656 assert_matches!(
5657 buffer2.read(cx).file().unwrap().disk_state(),
5658 DiskState::Present { .. }
5659 );
5660 assert_matches!(
5661 buffer3.read(cx).file().unwrap().disk_state(),
5662 DiskState::Present { .. }
5663 );
5664 assert_matches!(
5665 buffer4.read(cx).file().unwrap().disk_state(),
5666 DiskState::Present { .. }
5667 );
5668 assert_eq!(
5669 buffer5.read(cx).file().unwrap().disk_state(),
5670 DiskState::Deleted
5671 );
5672 });
5673
5674 // Update the remote worktree. Check that it becomes consistent with the
5675 // local worktree.
5676 cx.executor().run_until_parked();
5677
5678 remote.update(cx, |remote, _| {
5679 for update in updates.lock().drain(..) {
5680 remote.as_remote_mut().unwrap().update_from_remote(update);
5681 }
5682 });
5683 cx.executor().run_until_parked();
5684 remote.update(cx, |remote, _| {
5685 assert_eq!(
5686 remote.paths().collect::<Vec<_>>(),
5687 vec![
5688 rel_path("a"),
5689 rel_path("a/file1"),
5690 rel_path("a/file2.new"),
5691 rel_path("b"),
5692 rel_path("d"),
5693 rel_path("d/file3"),
5694 rel_path("d/file4"),
5695 ]
5696 );
5697 });
5698}
5699
5700#[cfg(target_os = "linux")]
5701#[gpui::test(retries = 5)]
5702async fn test_recreated_directory_receives_child_events(cx: &mut gpui::TestAppContext) {
5703 init_test(cx);
5704 cx.executor().allow_parking();
5705
5706 let dir = TempTree::new(json!({}));
5707 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
5708 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
5709
5710 tree.flush_fs_events(cx).await;
5711
5712 let repro_dir = dir.path().join("repro");
5713 std::fs::create_dir(&repro_dir).unwrap();
5714 tree.flush_fs_events(cx).await;
5715
5716 cx.update(|cx| {
5717 assert!(tree.read(cx).entry_for_path(rel_path("repro")).is_some());
5718 });
5719
5720 std::fs::remove_dir_all(&repro_dir).unwrap();
5721 tree.flush_fs_events(cx).await;
5722
5723 cx.update(|cx| {
5724 assert!(tree.read(cx).entry_for_path(rel_path("repro")).is_none());
5725 });
5726
5727 std::fs::create_dir(&repro_dir).unwrap();
5728 tree.flush_fs_events(cx).await;
5729
5730 cx.update(|cx| {
5731 assert!(tree.read(cx).entry_for_path(rel_path("repro")).is_some());
5732 });
5733
5734 std::fs::write(repro_dir.join("repro-marker"), "").unwrap();
5735 tree.flush_fs_events(cx).await;
5736
5737 cx.update(|cx| {
5738 assert!(
5739 tree.read(cx)
5740 .entry_for_path(rel_path("repro/repro-marker"))
5741 .is_some()
5742 );
5743 });
5744}
5745
5746#[gpui::test(iterations = 10)]
5747async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
5748 init_test(cx);
5749
5750 let fs = FakeFs::new(cx.executor());
5751 fs.insert_tree(
5752 path!("/dir"),
5753 json!({
5754 "a": {
5755 "file1": "",
5756 }
5757 }),
5758 )
5759 .await;
5760
5761 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
5762 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
5763 let tree_id = tree.update(cx, |tree, _| tree.id());
5764
5765 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5766 project.update(cx, |project, cx| {
5767 let tree = project.worktrees(cx).next().unwrap();
5768 tree.read(cx)
5769 .entry_for_path(rel_path(path))
5770 .unwrap_or_else(|| panic!("no entry for path {}", path))
5771 .id
5772 })
5773 };
5774
5775 let dir_id = id_for_path("a", cx);
5776 let file_id = id_for_path("a/file1", cx);
5777 let buffer = project
5778 .update(cx, |p, cx| {
5779 p.open_buffer((tree_id, rel_path("a/file1")), cx)
5780 })
5781 .await
5782 .unwrap();
5783 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5784
5785 project
5786 .update(cx, |project, cx| {
5787 project.rename_entry(dir_id, (tree_id, rel_path("b")).into(), cx)
5788 })
5789 .unwrap()
5790 .await
5791 .into_included()
5792 .unwrap();
5793 cx.executor().run_until_parked();
5794
5795 assert_eq!(id_for_path("b", cx), dir_id);
5796 assert_eq!(id_for_path("b/file1", cx), file_id);
5797 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5798}
5799
5800#[gpui::test]
5801async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
5802 init_test(cx);
5803
5804 let fs = FakeFs::new(cx.executor());
5805 fs.insert_tree(
5806 "/dir",
5807 json!({
5808 "a.txt": "a-contents",
5809 "b.txt": "b-contents",
5810 }),
5811 )
5812 .await;
5813
5814 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5815
5816 // Spawn multiple tasks to open paths, repeating some paths.
5817 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
5818 (
5819 p.open_local_buffer("/dir/a.txt", cx),
5820 p.open_local_buffer("/dir/b.txt", cx),
5821 p.open_local_buffer("/dir/a.txt", cx),
5822 )
5823 });
5824
5825 let buffer_a_1 = buffer_a_1.await.unwrap();
5826 let buffer_a_2 = buffer_a_2.await.unwrap();
5827 let buffer_b = buffer_b.await.unwrap();
5828 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
5829 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
5830
5831 // There is only one buffer per path.
5832 let buffer_a_id = buffer_a_1.entity_id();
5833 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
5834
5835 // Open the same path again while it is still open.
5836 drop(buffer_a_1);
5837 let buffer_a_3 = project
5838 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
5839 .await
5840 .unwrap();
5841
5842 // There's still only one buffer per path.
5843 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
5844}
5845
5846#[gpui::test]
5847async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
5848 init_test(cx);
5849
5850 let fs = FakeFs::new(cx.executor());
5851 fs.insert_tree(
5852 path!("/dir"),
5853 json!({
5854 "file1": "abc",
5855 "file2": "def",
5856 "file3": "ghi",
5857 }),
5858 )
5859 .await;
5860
5861 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5862
5863 let buffer1 = project
5864 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5865 .await
5866 .unwrap();
5867 let events = Arc::new(Mutex::new(Vec::new()));
5868
5869 // initially, the buffer isn't dirty.
5870 buffer1.update(cx, |buffer, cx| {
5871 cx.subscribe(&buffer1, {
5872 let events = events.clone();
5873 move |_, _, event, _| match event {
5874 BufferEvent::Operation { .. } => {}
5875 _ => events.lock().push(event.clone()),
5876 }
5877 })
5878 .detach();
5879
5880 assert!(!buffer.is_dirty());
5881 assert!(events.lock().is_empty());
5882
5883 buffer.edit([(1..2, "")], None, cx);
5884 });
5885
5886 // after the first edit, the buffer is dirty, and emits a dirtied event.
5887 buffer1.update(cx, |buffer, cx| {
5888 assert!(buffer.text() == "ac");
5889 assert!(buffer.is_dirty());
5890 assert_eq!(
5891 *events.lock(),
5892 &[
5893 language::BufferEvent::Edited { is_local: true },
5894 language::BufferEvent::DirtyChanged
5895 ]
5896 );
5897 events.lock().clear();
5898 buffer.did_save(
5899 buffer.version(),
5900 buffer.file().unwrap().disk_state().mtime(),
5901 cx,
5902 );
5903 });
5904
5905 // after saving, the buffer is not dirty, and emits a saved event.
5906 buffer1.update(cx, |buffer, cx| {
5907 assert!(!buffer.is_dirty());
5908 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
5909 events.lock().clear();
5910
5911 buffer.edit([(1..1, "B")], None, cx);
5912 buffer.edit([(2..2, "D")], None, cx);
5913 });
5914
5915 // after editing again, the buffer is dirty, and emits another dirty event.
5916 buffer1.update(cx, |buffer, cx| {
5917 assert!(buffer.text() == "aBDc");
5918 assert!(buffer.is_dirty());
5919 assert_eq!(
5920 *events.lock(),
5921 &[
5922 language::BufferEvent::Edited { is_local: true },
5923 language::BufferEvent::DirtyChanged,
5924 language::BufferEvent::Edited { is_local: true },
5925 ],
5926 );
5927 events.lock().clear();
5928
5929 // After restoring the buffer to its previously-saved state,
5930 // the buffer is not considered dirty anymore.
5931 buffer.edit([(1..3, "")], None, cx);
5932 assert!(buffer.text() == "ac");
5933 assert!(!buffer.is_dirty());
5934 });
5935
5936 assert_eq!(
5937 *events.lock(),
5938 &[
5939 language::BufferEvent::Edited { is_local: true },
5940 language::BufferEvent::DirtyChanged
5941 ]
5942 );
5943
5944 // When a file is deleted, it is not considered dirty.
5945 let events = Arc::new(Mutex::new(Vec::new()));
5946 let buffer2 = project
5947 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
5948 .await
5949 .unwrap();
5950 buffer2.update(cx, |_, cx| {
5951 cx.subscribe(&buffer2, {
5952 let events = events.clone();
5953 move |_, _, event, _| match event {
5954 BufferEvent::Operation { .. } => {}
5955 _ => events.lock().push(event.clone()),
5956 }
5957 })
5958 .detach();
5959 });
5960
5961 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
5962 .await
5963 .unwrap();
5964 cx.executor().run_until_parked();
5965 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5966 assert_eq!(
5967 mem::take(&mut *events.lock()),
5968 &[language::BufferEvent::FileHandleChanged]
5969 );
5970
5971 // Buffer becomes dirty when edited.
5972 buffer2.update(cx, |buffer, cx| {
5973 buffer.edit([(2..3, "")], None, cx);
5974 assert_eq!(buffer.is_dirty(), true);
5975 });
5976 assert_eq!(
5977 mem::take(&mut *events.lock()),
5978 &[
5979 language::BufferEvent::Edited { is_local: true },
5980 language::BufferEvent::DirtyChanged
5981 ]
5982 );
5983
5984 // Buffer becomes clean again when all of its content is removed, because
5985 // the file was deleted.
5986 buffer2.update(cx, |buffer, cx| {
5987 buffer.edit([(0..2, "")], None, cx);
5988 assert_eq!(buffer.is_empty(), true);
5989 assert_eq!(buffer.is_dirty(), false);
5990 });
5991 assert_eq!(
5992 *events.lock(),
5993 &[
5994 language::BufferEvent::Edited { is_local: true },
5995 language::BufferEvent::DirtyChanged
5996 ]
5997 );
5998
5999 // When a file is already dirty when deleted, we don't emit a Dirtied event.
6000 let events = Arc::new(Mutex::new(Vec::new()));
6001 let buffer3 = project
6002 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
6003 .await
6004 .unwrap();
6005 buffer3.update(cx, |_, cx| {
6006 cx.subscribe(&buffer3, {
6007 let events = events.clone();
6008 move |_, _, event, _| match event {
6009 BufferEvent::Operation { .. } => {}
6010 _ => events.lock().push(event.clone()),
6011 }
6012 })
6013 .detach();
6014 });
6015
6016 buffer3.update(cx, |buffer, cx| {
6017 buffer.edit([(0..0, "x")], None, cx);
6018 });
6019 events.lock().clear();
6020 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
6021 .await
6022 .unwrap();
6023 cx.executor().run_until_parked();
6024 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
6025 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
6026}
6027
6028#[gpui::test]
6029async fn test_dirty_buffer_reloads_after_undo(cx: &mut gpui::TestAppContext) {
6030 init_test(cx);
6031
6032 let fs = FakeFs::new(cx.executor());
6033 fs.insert_tree(
6034 path!("/dir"),
6035 json!({
6036 "file.txt": "version 1",
6037 }),
6038 )
6039 .await;
6040
6041 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6042 let buffer = project
6043 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file.txt"), cx))
6044 .await
6045 .unwrap();
6046
6047 buffer.read_with(cx, |buffer, _| {
6048 assert_eq!(buffer.text(), "version 1");
6049 assert!(!buffer.is_dirty());
6050 });
6051
6052 // User makes an edit, making the buffer dirty.
6053 buffer.update(cx, |buffer, cx| {
6054 buffer.edit([(0..0, "user edit: ")], None, cx);
6055 });
6056
6057 buffer.read_with(cx, |buffer, _| {
6058 assert!(buffer.is_dirty());
6059 assert_eq!(buffer.text(), "user edit: version 1");
6060 });
6061
6062 // External tool writes new content while buffer is dirty.
6063 // file_updated() updates the File but suppresses ReloadNeeded.
6064 fs.save(
6065 path!("/dir/file.txt").as_ref(),
6066 &"version 2 from external tool".into(),
6067 Default::default(),
6068 )
6069 .await
6070 .unwrap();
6071 cx.executor().run_until_parked();
6072
6073 buffer.read_with(cx, |buffer, _| {
6074 assert!(buffer.has_conflict());
6075 assert_eq!(buffer.text(), "user edit: version 1");
6076 });
6077
6078 // User undoes their edit. Buffer becomes clean, but disk has different
6079 // content. did_edit() detects the dirty->clean transition and checks if
6080 // disk changed while dirty. Since mtime differs from saved_mtime, it
6081 // emits ReloadNeeded.
6082 buffer.update(cx, |buffer, cx| {
6083 buffer.undo(cx);
6084 });
6085 cx.executor().run_until_parked();
6086
6087 buffer.read_with(cx, |buffer, _| {
6088 assert_eq!(
6089 buffer.text(),
6090 "version 2 from external tool",
6091 "buffer should reload from disk after undo makes it clean"
6092 );
6093 assert!(!buffer.is_dirty());
6094 });
6095}
6096
6097#[gpui::test]
6098async fn test_save_does_not_reload_when_format_removes_user_edits(cx: &mut gpui::TestAppContext) {
6099 init_test(cx);
6100
6101 let fs = FakeFs::new(cx.executor());
6102 fs.insert_tree(
6103 path!("/dir"),
6104 json!({
6105 "file.txt": "hello\nworld\n",
6106 }),
6107 )
6108 .await;
6109
6110 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6111 let buffer = project
6112 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file.txt"), cx))
6113 .await
6114 .unwrap();
6115
6116 buffer.read_with(cx, |buffer, _| {
6117 assert_eq!(buffer.text(), "hello\nworld\n");
6118 assert!(!buffer.is_dirty());
6119 });
6120
6121 // User adds trailing whitespace — the only edit.
6122 buffer.update(cx, |buffer, cx| {
6123 buffer.edit([(5..5, " ")], None, cx);
6124 });
6125
6126 buffer.read_with(cx, |buffer, _| {
6127 assert_eq!(buffer.text(), "hello \nworld\n");
6128 assert!(buffer.is_dirty());
6129 });
6130
6131 // An external tool writes different content to the file while the buffer is dirty.
6132 fs.save(
6133 path!("/dir/file.txt").as_ref(),
6134 &"EXTERNAL CONTENT\n".into(),
6135 Default::default(),
6136 )
6137 .await
6138 .unwrap();
6139 cx.executor().run_until_parked();
6140
6141 // The buffer has a conflict: file mtime changed and buffer has unsaved edits.
6142 buffer.read_with(cx, |buffer, _| {
6143 assert!(buffer.is_dirty());
6144 assert!(buffer.has_conflict());
6145 assert_eq!(buffer.text(), "hello \nworld\n");
6146 assert_ne!(
6147 buffer.file().unwrap().disk_state().mtime(),
6148 buffer.saved_mtime(),
6149 "disk mtime should differ from saved mtime after external write"
6150 );
6151 });
6152
6153 // The user triggers a save, which formats the buffer then saves it.
6154 let buffers = [buffer.clone()].into_iter().collect::<HashSet<_>>();
6155 project
6156 .update(cx, |project, cx| {
6157 project.format(
6158 buffers,
6159 project::lsp_store::LspFormatTarget::Buffers,
6160 true,
6161 project::lsp_store::FormatTrigger::Save,
6162 cx,
6163 )
6164 })
6165 .await
6166 .unwrap();
6167
6168 // After formatting, the trailing whitespace was removed.
6169 // The buffer text should match what will be saved to disk.
6170 let formatted_text = buffer.read_with(cx, |buffer, _| buffer.text());
6171 assert_eq!(formatted_text, "hello\nworld\n");
6172 cx.executor().run_until_parked();
6173
6174 // The buffer text must still be the formatted text — not reloaded from disk.
6175 buffer.read_with(cx, |buffer, _| {
6176 assert_eq!(
6177 buffer.text(),
6178 formatted_text,
6179 "buffer should not have been reloaded from disk during format-on-save"
6180 );
6181 });
6182
6183 // Now save to disk.
6184 project
6185 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
6186 .await
6187 .unwrap();
6188 cx.executor().run_until_parked();
6189
6190 // After save, the buffer should be clean and match disk.
6191 buffer.read_with(cx, |buffer, _| {
6192 assert_eq!(buffer.text(), "hello\nworld\n");
6193 assert!(!buffer.is_dirty());
6194 assert!(!buffer.has_conflict());
6195 });
6196}
6197
6198#[gpui::test]
6199async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
6200 init_test(cx);
6201
6202 let (initial_contents, initial_offsets) =
6203 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
6204 let fs = FakeFs::new(cx.executor());
6205 fs.insert_tree(
6206 path!("/dir"),
6207 json!({
6208 "the-file": initial_contents,
6209 }),
6210 )
6211 .await;
6212 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6213 let buffer = project
6214 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
6215 .await
6216 .unwrap();
6217
6218 let anchors = initial_offsets
6219 .iter()
6220 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
6221 .collect::<Vec<_>>();
6222
6223 // Change the file on disk, adding two new lines of text, and removing
6224 // one line.
6225 buffer.update(cx, |buffer, _| {
6226 assert!(!buffer.is_dirty());
6227 assert!(!buffer.has_conflict());
6228 });
6229
6230 let (new_contents, new_offsets) =
6231 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
6232 fs.save(
6233 path!("/dir/the-file").as_ref(),
6234 &new_contents.as_str().into(),
6235 LineEnding::Unix,
6236 )
6237 .await
6238 .unwrap();
6239
6240 // Because the buffer was not modified, it is reloaded from disk. Its
6241 // contents are edited according to the diff between the old and new
6242 // file contents.
6243 cx.executor().run_until_parked();
6244 buffer.update(cx, |buffer, _| {
6245 assert_eq!(buffer.text(), new_contents);
6246 assert!(!buffer.is_dirty());
6247 assert!(!buffer.has_conflict());
6248
6249 let anchor_offsets = anchors
6250 .iter()
6251 .map(|anchor| anchor.to_offset(&*buffer))
6252 .collect::<Vec<_>>();
6253 assert_eq!(anchor_offsets, new_offsets);
6254 });
6255
6256 // Modify the buffer
6257 buffer.update(cx, |buffer, cx| {
6258 buffer.edit([(0..0, " ")], None, cx);
6259 assert!(buffer.is_dirty());
6260 assert!(!buffer.has_conflict());
6261 });
6262
6263 // Change the file on disk again, adding blank lines to the beginning.
6264 fs.save(
6265 path!("/dir/the-file").as_ref(),
6266 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
6267 LineEnding::Unix,
6268 )
6269 .await
6270 .unwrap();
6271
6272 // Because the buffer is modified, it doesn't reload from disk, but is
6273 // marked as having a conflict.
6274 cx.executor().run_until_parked();
6275 buffer.update(cx, |buffer, _| {
6276 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
6277 assert!(buffer.has_conflict());
6278 });
6279}
6280
6281#[gpui::test]
6282async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
6283 init_test(cx);
6284
6285 let fs = FakeFs::new(cx.executor());
6286 fs.insert_tree(
6287 path!("/dir"),
6288 json!({
6289 "file1": "a\nb\nc\n",
6290 "file2": "one\r\ntwo\r\nthree\r\n",
6291 }),
6292 )
6293 .await;
6294
6295 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6296 let buffer1 = project
6297 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
6298 .await
6299 .unwrap();
6300 let buffer2 = project
6301 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
6302 .await
6303 .unwrap();
6304
6305 buffer1.update(cx, |buffer, _| {
6306 assert_eq!(buffer.text(), "a\nb\nc\n");
6307 assert_eq!(buffer.line_ending(), LineEnding::Unix);
6308 });
6309 buffer2.update(cx, |buffer, _| {
6310 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
6311 assert_eq!(buffer.line_ending(), LineEnding::Windows);
6312 });
6313
6314 // Change a file's line endings on disk from unix to windows. The buffer's
6315 // state updates correctly.
6316 fs.save(
6317 path!("/dir/file1").as_ref(),
6318 &"aaa\nb\nc\n".into(),
6319 LineEnding::Windows,
6320 )
6321 .await
6322 .unwrap();
6323 cx.executor().run_until_parked();
6324 buffer1.update(cx, |buffer, _| {
6325 assert_eq!(buffer.text(), "aaa\nb\nc\n");
6326 assert_eq!(buffer.line_ending(), LineEnding::Windows);
6327 });
6328
6329 // Save a file with windows line endings. The file is written correctly.
6330 buffer2.update(cx, |buffer, cx| {
6331 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
6332 });
6333 project
6334 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
6335 .await
6336 .unwrap();
6337 assert_eq!(
6338 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
6339 "one\r\ntwo\r\nthree\r\nfour\r\n",
6340 );
6341}
6342
6343#[gpui::test]
6344async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
6345 init_test(cx);
6346
6347 let fs = FakeFs::new(cx.executor());
6348 fs.insert_tree(
6349 path!("/dir"),
6350 json!({
6351 "a.rs": "
6352 fn foo(mut v: Vec<usize>) {
6353 for x in &v {
6354 v.push(1);
6355 }
6356 }
6357 "
6358 .unindent(),
6359 }),
6360 )
6361 .await;
6362
6363 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6364 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
6365 let buffer = project
6366 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
6367 .await
6368 .unwrap();
6369
6370 let buffer_uri = Uri::from_file_path(path!("/dir/a.rs")).unwrap();
6371 let message = lsp::PublishDiagnosticsParams {
6372 uri: buffer_uri.clone(),
6373 diagnostics: vec![
6374 lsp::Diagnostic {
6375 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6376 severity: Some(DiagnosticSeverity::WARNING),
6377 message: "error 1".to_string(),
6378 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6379 location: lsp::Location {
6380 uri: buffer_uri.clone(),
6381 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6382 },
6383 message: "error 1 hint 1".to_string(),
6384 }]),
6385 ..Default::default()
6386 },
6387 lsp::Diagnostic {
6388 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6389 severity: Some(DiagnosticSeverity::HINT),
6390 message: "error 1 hint 1".to_string(),
6391 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6392 location: lsp::Location {
6393 uri: buffer_uri.clone(),
6394 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6395 },
6396 message: "original diagnostic".to_string(),
6397 }]),
6398 ..Default::default()
6399 },
6400 lsp::Diagnostic {
6401 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
6402 severity: Some(DiagnosticSeverity::ERROR),
6403 message: "error 2".to_string(),
6404 related_information: Some(vec![
6405 lsp::DiagnosticRelatedInformation {
6406 location: lsp::Location {
6407 uri: buffer_uri.clone(),
6408 range: lsp::Range::new(
6409 lsp::Position::new(1, 13),
6410 lsp::Position::new(1, 15),
6411 ),
6412 },
6413 message: "error 2 hint 1".to_string(),
6414 },
6415 lsp::DiagnosticRelatedInformation {
6416 location: lsp::Location {
6417 uri: buffer_uri.clone(),
6418 range: lsp::Range::new(
6419 lsp::Position::new(1, 13),
6420 lsp::Position::new(1, 15),
6421 ),
6422 },
6423 message: "error 2 hint 2".to_string(),
6424 },
6425 ]),
6426 ..Default::default()
6427 },
6428 lsp::Diagnostic {
6429 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6430 severity: Some(DiagnosticSeverity::HINT),
6431 message: "error 2 hint 1".to_string(),
6432 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6433 location: lsp::Location {
6434 uri: buffer_uri.clone(),
6435 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
6436 },
6437 message: "original diagnostic".to_string(),
6438 }]),
6439 ..Default::default()
6440 },
6441 lsp::Diagnostic {
6442 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6443 severity: Some(DiagnosticSeverity::HINT),
6444 message: "error 2 hint 2".to_string(),
6445 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6446 location: lsp::Location {
6447 uri: buffer_uri,
6448 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
6449 },
6450 message: "original diagnostic".to_string(),
6451 }]),
6452 ..Default::default()
6453 },
6454 ],
6455 version: None,
6456 };
6457
6458 lsp_store
6459 .update(cx, |lsp_store, cx| {
6460 lsp_store.update_diagnostics(
6461 LanguageServerId(0),
6462 message,
6463 None,
6464 DiagnosticSourceKind::Pushed,
6465 &[],
6466 cx,
6467 )
6468 })
6469 .unwrap();
6470 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
6471
6472 assert_eq!(
6473 buffer
6474 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6475 .collect::<Vec<_>>(),
6476 &[
6477 DiagnosticEntry {
6478 range: Point::new(1, 8)..Point::new(1, 9),
6479 diagnostic: Diagnostic {
6480 severity: DiagnosticSeverity::WARNING,
6481 message: "error 1".to_string(),
6482 group_id: 1,
6483 is_primary: true,
6484 source_kind: DiagnosticSourceKind::Pushed,
6485 ..Diagnostic::default()
6486 }
6487 },
6488 DiagnosticEntry {
6489 range: Point::new(1, 8)..Point::new(1, 9),
6490 diagnostic: Diagnostic {
6491 severity: DiagnosticSeverity::HINT,
6492 message: "error 1 hint 1".to_string(),
6493 group_id: 1,
6494 is_primary: false,
6495 source_kind: DiagnosticSourceKind::Pushed,
6496 ..Diagnostic::default()
6497 }
6498 },
6499 DiagnosticEntry {
6500 range: Point::new(1, 13)..Point::new(1, 15),
6501 diagnostic: Diagnostic {
6502 severity: DiagnosticSeverity::HINT,
6503 message: "error 2 hint 1".to_string(),
6504 group_id: 0,
6505 is_primary: false,
6506 source_kind: DiagnosticSourceKind::Pushed,
6507 ..Diagnostic::default()
6508 }
6509 },
6510 DiagnosticEntry {
6511 range: Point::new(1, 13)..Point::new(1, 15),
6512 diagnostic: Diagnostic {
6513 severity: DiagnosticSeverity::HINT,
6514 message: "error 2 hint 2".to_string(),
6515 group_id: 0,
6516 is_primary: false,
6517 source_kind: DiagnosticSourceKind::Pushed,
6518 ..Diagnostic::default()
6519 }
6520 },
6521 DiagnosticEntry {
6522 range: Point::new(2, 8)..Point::new(2, 17),
6523 diagnostic: Diagnostic {
6524 severity: DiagnosticSeverity::ERROR,
6525 message: "error 2".to_string(),
6526 group_id: 0,
6527 is_primary: true,
6528 source_kind: DiagnosticSourceKind::Pushed,
6529 ..Diagnostic::default()
6530 }
6531 }
6532 ]
6533 );
6534
6535 assert_eq!(
6536 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
6537 &[
6538 DiagnosticEntry {
6539 range: Point::new(1, 13)..Point::new(1, 15),
6540 diagnostic: Diagnostic {
6541 severity: DiagnosticSeverity::HINT,
6542 message: "error 2 hint 1".to_string(),
6543 group_id: 0,
6544 is_primary: false,
6545 source_kind: DiagnosticSourceKind::Pushed,
6546 ..Diagnostic::default()
6547 }
6548 },
6549 DiagnosticEntry {
6550 range: Point::new(1, 13)..Point::new(1, 15),
6551 diagnostic: Diagnostic {
6552 severity: DiagnosticSeverity::HINT,
6553 message: "error 2 hint 2".to_string(),
6554 group_id: 0,
6555 is_primary: false,
6556 source_kind: DiagnosticSourceKind::Pushed,
6557 ..Diagnostic::default()
6558 }
6559 },
6560 DiagnosticEntry {
6561 range: Point::new(2, 8)..Point::new(2, 17),
6562 diagnostic: Diagnostic {
6563 severity: DiagnosticSeverity::ERROR,
6564 message: "error 2".to_string(),
6565 group_id: 0,
6566 is_primary: true,
6567 source_kind: DiagnosticSourceKind::Pushed,
6568 ..Diagnostic::default()
6569 }
6570 }
6571 ]
6572 );
6573
6574 assert_eq!(
6575 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
6576 &[
6577 DiagnosticEntry {
6578 range: Point::new(1, 8)..Point::new(1, 9),
6579 diagnostic: Diagnostic {
6580 severity: DiagnosticSeverity::WARNING,
6581 message: "error 1".to_string(),
6582 group_id: 1,
6583 is_primary: true,
6584 source_kind: DiagnosticSourceKind::Pushed,
6585 ..Diagnostic::default()
6586 }
6587 },
6588 DiagnosticEntry {
6589 range: Point::new(1, 8)..Point::new(1, 9),
6590 diagnostic: Diagnostic {
6591 severity: DiagnosticSeverity::HINT,
6592 message: "error 1 hint 1".to_string(),
6593 group_id: 1,
6594 is_primary: false,
6595 source_kind: DiagnosticSourceKind::Pushed,
6596 ..Diagnostic::default()
6597 }
6598 },
6599 ]
6600 );
6601}
6602
6603#[gpui::test]
6604async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
6605 init_test(cx);
6606
6607 let fs = FakeFs::new(cx.executor());
6608 fs.insert_tree(
6609 path!("/dir"),
6610 json!({
6611 "one.rs": "const ONE: usize = 1;",
6612 "two": {
6613 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
6614 }
6615
6616 }),
6617 )
6618 .await;
6619 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6620
6621 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6622 language_registry.add(rust_lang());
6623 let watched_paths = lsp::FileOperationRegistrationOptions {
6624 filters: vec![
6625 FileOperationFilter {
6626 scheme: Some("file".to_owned()),
6627 pattern: lsp::FileOperationPattern {
6628 glob: "**/*.rs".to_owned(),
6629 matches: Some(lsp::FileOperationPatternKind::File),
6630 options: None,
6631 },
6632 },
6633 FileOperationFilter {
6634 scheme: Some("file".to_owned()),
6635 pattern: lsp::FileOperationPattern {
6636 glob: "**/**".to_owned(),
6637 matches: Some(lsp::FileOperationPatternKind::Folder),
6638 options: None,
6639 },
6640 },
6641 ],
6642 };
6643 let mut fake_servers = language_registry.register_fake_lsp(
6644 "Rust",
6645 FakeLspAdapter {
6646 capabilities: lsp::ServerCapabilities {
6647 workspace: Some(lsp::WorkspaceServerCapabilities {
6648 workspace_folders: None,
6649 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
6650 did_rename: Some(watched_paths.clone()),
6651 will_rename: Some(watched_paths),
6652 ..Default::default()
6653 }),
6654 }),
6655 ..Default::default()
6656 },
6657 ..Default::default()
6658 },
6659 );
6660
6661 let _ = project
6662 .update(cx, |project, cx| {
6663 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
6664 })
6665 .await
6666 .unwrap();
6667
6668 let fake_server = fake_servers.next().await.unwrap();
6669 cx.executor().run_until_parked();
6670 let response = project.update(cx, |project, cx| {
6671 let worktree = project.worktrees(cx).next().unwrap();
6672 let entry = worktree
6673 .read(cx)
6674 .entry_for_path(rel_path("one.rs"))
6675 .unwrap();
6676 project.rename_entry(
6677 entry.id,
6678 (worktree.read(cx).id(), rel_path("three.rs")).into(),
6679 cx,
6680 )
6681 });
6682 let expected_edit = lsp::WorkspaceEdit {
6683 changes: None,
6684 document_changes: Some(DocumentChanges::Edits({
6685 vec![TextDocumentEdit {
6686 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
6687 range: lsp::Range {
6688 start: lsp::Position {
6689 line: 0,
6690 character: 1,
6691 },
6692 end: lsp::Position {
6693 line: 0,
6694 character: 3,
6695 },
6696 },
6697 new_text: "This is not a drill".to_owned(),
6698 })],
6699 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
6700 uri: Uri::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
6701 version: Some(1337),
6702 },
6703 }]
6704 })),
6705 change_annotations: None,
6706 };
6707 let resolved_workspace_edit = Arc::new(OnceLock::new());
6708 fake_server
6709 .set_request_handler::<WillRenameFiles, _, _>({
6710 let resolved_workspace_edit = resolved_workspace_edit.clone();
6711 let expected_edit = expected_edit.clone();
6712 move |params, _| {
6713 let resolved_workspace_edit = resolved_workspace_edit.clone();
6714 let expected_edit = expected_edit.clone();
6715 async move {
6716 assert_eq!(params.files.len(), 1);
6717 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
6718 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
6719 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
6720 Ok(Some(expected_edit))
6721 }
6722 }
6723 })
6724 .next()
6725 .await
6726 .unwrap();
6727 let _ = response.await.unwrap();
6728 fake_server
6729 .handle_notification::<DidRenameFiles, _>(|params, _| {
6730 assert_eq!(params.files.len(), 1);
6731 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
6732 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
6733 })
6734 .next()
6735 .await
6736 .unwrap();
6737 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
6738}
6739
6740#[gpui::test]
6741async fn test_rename(cx: &mut gpui::TestAppContext) {
6742 // hi
6743 init_test(cx);
6744
6745 let fs = FakeFs::new(cx.executor());
6746 fs.insert_tree(
6747 path!("/dir"),
6748 json!({
6749 "one.rs": "const ONE: usize = 1;",
6750 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
6751 }),
6752 )
6753 .await;
6754
6755 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6756
6757 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6758 language_registry.add(rust_lang());
6759 let mut fake_servers = language_registry.register_fake_lsp(
6760 "Rust",
6761 FakeLspAdapter {
6762 capabilities: lsp::ServerCapabilities {
6763 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
6764 prepare_provider: Some(true),
6765 work_done_progress_options: Default::default(),
6766 })),
6767 ..Default::default()
6768 },
6769 ..Default::default()
6770 },
6771 );
6772
6773 let (buffer, _handle) = project
6774 .update(cx, |project, cx| {
6775 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
6776 })
6777 .await
6778 .unwrap();
6779
6780 let fake_server = fake_servers.next().await.unwrap();
6781 cx.executor().run_until_parked();
6782
6783 let response = project.update(cx, |project, cx| {
6784 project.prepare_rename(buffer.clone(), 7, cx)
6785 });
6786 fake_server
6787 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
6788 assert_eq!(
6789 params.text_document.uri.as_str(),
6790 uri!("file:///dir/one.rs")
6791 );
6792 assert_eq!(params.position, lsp::Position::new(0, 7));
6793 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
6794 lsp::Position::new(0, 6),
6795 lsp::Position::new(0, 9),
6796 ))))
6797 })
6798 .next()
6799 .await
6800 .unwrap();
6801 let response = response.await.unwrap();
6802 let PrepareRenameResponse::Success(range) = response else {
6803 panic!("{:?}", response);
6804 };
6805 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
6806 assert_eq!(range, 6..9);
6807
6808 let response = project.update(cx, |project, cx| {
6809 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
6810 });
6811 fake_server
6812 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
6813 assert_eq!(
6814 params.text_document_position.text_document.uri.as_str(),
6815 uri!("file:///dir/one.rs")
6816 );
6817 assert_eq!(
6818 params.text_document_position.position,
6819 lsp::Position::new(0, 7)
6820 );
6821 assert_eq!(params.new_name, "THREE");
6822 Ok(Some(lsp::WorkspaceEdit {
6823 changes: Some(
6824 [
6825 (
6826 lsp::Uri::from_file_path(path!("/dir/one.rs")).unwrap(),
6827 vec![lsp::TextEdit::new(
6828 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
6829 "THREE".to_string(),
6830 )],
6831 ),
6832 (
6833 lsp::Uri::from_file_path(path!("/dir/two.rs")).unwrap(),
6834 vec![
6835 lsp::TextEdit::new(
6836 lsp::Range::new(
6837 lsp::Position::new(0, 24),
6838 lsp::Position::new(0, 27),
6839 ),
6840 "THREE".to_string(),
6841 ),
6842 lsp::TextEdit::new(
6843 lsp::Range::new(
6844 lsp::Position::new(0, 35),
6845 lsp::Position::new(0, 38),
6846 ),
6847 "THREE".to_string(),
6848 ),
6849 ],
6850 ),
6851 ]
6852 .into_iter()
6853 .collect(),
6854 ),
6855 ..Default::default()
6856 }))
6857 })
6858 .next()
6859 .await
6860 .unwrap();
6861 let mut transaction = response.await.unwrap().0;
6862 assert_eq!(transaction.len(), 2);
6863 assert_eq!(
6864 transaction
6865 .remove_entry(&buffer)
6866 .unwrap()
6867 .0
6868 .update(cx, |buffer, _| buffer.text()),
6869 "const THREE: usize = 1;"
6870 );
6871 assert_eq!(
6872 transaction
6873 .into_keys()
6874 .next()
6875 .unwrap()
6876 .update(cx, |buffer, _| buffer.text()),
6877 "const TWO: usize = one::THREE + one::THREE;"
6878 );
6879}
6880
6881#[gpui::test]
6882async fn test_search(cx: &mut gpui::TestAppContext) {
6883 init_test(cx);
6884
6885 let fs = FakeFs::new(cx.executor());
6886 fs.insert_tree(
6887 path!("/dir"),
6888 json!({
6889 "one.rs": "const ONE: usize = 1;",
6890 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
6891 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
6892 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
6893 }),
6894 )
6895 .await;
6896 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6897 assert_eq!(
6898 search(
6899 &project,
6900 SearchQuery::text(
6901 "TWO",
6902 false,
6903 true,
6904 false,
6905 Default::default(),
6906 Default::default(),
6907 false,
6908 None
6909 )
6910 .unwrap(),
6911 cx
6912 )
6913 .await
6914 .unwrap(),
6915 HashMap::from_iter([
6916 (path!("dir/two.rs").to_string(), vec![6..9]),
6917 (path!("dir/three.rs").to_string(), vec![37..40])
6918 ])
6919 );
6920
6921 let buffer_4 = project
6922 .update(cx, |project, cx| {
6923 project.open_local_buffer(path!("/dir/four.rs"), cx)
6924 })
6925 .await
6926 .unwrap();
6927 buffer_4.update(cx, |buffer, cx| {
6928 let text = "two::TWO";
6929 buffer.edit([(20..28, text), (31..43, text)], None, cx);
6930 });
6931
6932 assert_eq!(
6933 search(
6934 &project,
6935 SearchQuery::text(
6936 "TWO",
6937 false,
6938 true,
6939 false,
6940 Default::default(),
6941 Default::default(),
6942 false,
6943 None,
6944 )
6945 .unwrap(),
6946 cx
6947 )
6948 .await
6949 .unwrap(),
6950 HashMap::from_iter([
6951 (path!("dir/two.rs").to_string(), vec![6..9]),
6952 (path!("dir/three.rs").to_string(), vec![37..40]),
6953 (path!("dir/four.rs").to_string(), vec![25..28, 36..39])
6954 ])
6955 );
6956}
6957
6958#[gpui::test]
6959async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
6960 init_test(cx);
6961
6962 let search_query = "file";
6963
6964 let fs = FakeFs::new(cx.executor());
6965 fs.insert_tree(
6966 path!("/dir"),
6967 json!({
6968 "one.rs": r#"// Rust file one"#,
6969 "one.ts": r#"// TypeScript file one"#,
6970 "two.rs": r#"// Rust file two"#,
6971 "two.ts": r#"// TypeScript file two"#,
6972 }),
6973 )
6974 .await;
6975 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6976
6977 assert!(
6978 search(
6979 &project,
6980 SearchQuery::text(
6981 search_query,
6982 false,
6983 true,
6984 false,
6985 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6986 Default::default(),
6987 false,
6988 None
6989 )
6990 .unwrap(),
6991 cx
6992 )
6993 .await
6994 .unwrap()
6995 .is_empty(),
6996 "If no inclusions match, no files should be returned"
6997 );
6998
6999 assert_eq!(
7000 search(
7001 &project,
7002 SearchQuery::text(
7003 search_query,
7004 false,
7005 true,
7006 false,
7007 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
7008 Default::default(),
7009 false,
7010 None
7011 )
7012 .unwrap(),
7013 cx
7014 )
7015 .await
7016 .unwrap(),
7017 HashMap::from_iter([
7018 (path!("dir/one.rs").to_string(), vec![8..12]),
7019 (path!("dir/two.rs").to_string(), vec![8..12]),
7020 ]),
7021 "Rust only search should give only Rust files"
7022 );
7023
7024 assert_eq!(
7025 search(
7026 &project,
7027 SearchQuery::text(
7028 search_query,
7029 false,
7030 true,
7031 false,
7032 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
7033 .unwrap(),
7034 Default::default(),
7035 false,
7036 None,
7037 )
7038 .unwrap(),
7039 cx
7040 )
7041 .await
7042 .unwrap(),
7043 HashMap::from_iter([
7044 (path!("dir/one.ts").to_string(), vec![14..18]),
7045 (path!("dir/two.ts").to_string(), vec![14..18]),
7046 ]),
7047 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
7048 );
7049
7050 assert_eq!(
7051 search(
7052 &project,
7053 SearchQuery::text(
7054 search_query,
7055 false,
7056 true,
7057 false,
7058 PathMatcher::new(
7059 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
7060 PathStyle::local()
7061 )
7062 .unwrap(),
7063 Default::default(),
7064 false,
7065 None,
7066 )
7067 .unwrap(),
7068 cx
7069 )
7070 .await
7071 .unwrap(),
7072 HashMap::from_iter([
7073 (path!("dir/two.ts").to_string(), vec![14..18]),
7074 (path!("dir/one.rs").to_string(), vec![8..12]),
7075 (path!("dir/one.ts").to_string(), vec![14..18]),
7076 (path!("dir/two.rs").to_string(), vec![8..12]),
7077 ]),
7078 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
7079 );
7080}
7081
7082#[gpui::test]
7083async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
7084 init_test(cx);
7085
7086 let search_query = "file";
7087
7088 let fs = FakeFs::new(cx.executor());
7089 fs.insert_tree(
7090 path!("/dir"),
7091 json!({
7092 "one.rs": r#"// Rust file one"#,
7093 "one.ts": r#"// TypeScript file one"#,
7094 "two.rs": r#"// Rust file two"#,
7095 "two.ts": r#"// TypeScript file two"#,
7096 }),
7097 )
7098 .await;
7099 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7100
7101 assert_eq!(
7102 search(
7103 &project,
7104 SearchQuery::text(
7105 search_query,
7106 false,
7107 true,
7108 false,
7109 Default::default(),
7110 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
7111 false,
7112 None,
7113 )
7114 .unwrap(),
7115 cx
7116 )
7117 .await
7118 .unwrap(),
7119 HashMap::from_iter([
7120 (path!("dir/one.rs").to_string(), vec![8..12]),
7121 (path!("dir/one.ts").to_string(), vec![14..18]),
7122 (path!("dir/two.rs").to_string(), vec![8..12]),
7123 (path!("dir/two.ts").to_string(), vec![14..18]),
7124 ]),
7125 "If no exclusions match, all files should be returned"
7126 );
7127
7128 assert_eq!(
7129 search(
7130 &project,
7131 SearchQuery::text(
7132 search_query,
7133 false,
7134 true,
7135 false,
7136 Default::default(),
7137 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
7138 false,
7139 None,
7140 )
7141 .unwrap(),
7142 cx
7143 )
7144 .await
7145 .unwrap(),
7146 HashMap::from_iter([
7147 (path!("dir/one.ts").to_string(), vec![14..18]),
7148 (path!("dir/two.ts").to_string(), vec![14..18]),
7149 ]),
7150 "Rust exclusion search should give only TypeScript files"
7151 );
7152
7153 assert_eq!(
7154 search(
7155 &project,
7156 SearchQuery::text(
7157 search_query,
7158 false,
7159 true,
7160 false,
7161 Default::default(),
7162 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
7163 .unwrap(),
7164 false,
7165 None,
7166 )
7167 .unwrap(),
7168 cx
7169 )
7170 .await
7171 .unwrap(),
7172 HashMap::from_iter([
7173 (path!("dir/one.rs").to_string(), vec![8..12]),
7174 (path!("dir/two.rs").to_string(), vec![8..12]),
7175 ]),
7176 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
7177 );
7178
7179 assert!(
7180 search(
7181 &project,
7182 SearchQuery::text(
7183 search_query,
7184 false,
7185 true,
7186 false,
7187 Default::default(),
7188 PathMatcher::new(
7189 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
7190 PathStyle::local(),
7191 )
7192 .unwrap(),
7193 false,
7194 None,
7195 )
7196 .unwrap(),
7197 cx
7198 )
7199 .await
7200 .unwrap()
7201 .is_empty(),
7202 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
7203 );
7204}
7205
7206#[gpui::test]
7207async fn test_search_with_buffer_exclusions(cx: &mut gpui::TestAppContext) {
7208 init_test(cx);
7209
7210 let search_query = "file";
7211
7212 let fs = FakeFs::new(cx.executor());
7213 fs.insert_tree(
7214 path!("/dir"),
7215 json!({
7216 "one.rs": r#"// Rust file one"#,
7217 "one.ts": r#"// TypeScript file one"#,
7218 "two.rs": r#"// Rust file two"#,
7219 "two.ts": r#"// TypeScript file two"#,
7220 }),
7221 )
7222 .await;
7223
7224 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7225 let path_style = PathStyle::local();
7226 let _buffer = project.update(cx, |project, cx| {
7227 project.create_local_buffer("file", None, false, cx)
7228 });
7229
7230 assert_eq!(
7231 search(
7232 &project,
7233 SearchQuery::text(
7234 search_query,
7235 false,
7236 true,
7237 false,
7238 Default::default(),
7239 PathMatcher::new(&["*.odd".to_owned()], path_style).unwrap(),
7240 false,
7241 None,
7242 )
7243 .unwrap(),
7244 cx
7245 )
7246 .await
7247 .unwrap(),
7248 HashMap::from_iter([
7249 (path!("dir/one.rs").to_string(), vec![8..12]),
7250 (path!("dir/one.ts").to_string(), vec![14..18]),
7251 (path!("dir/two.rs").to_string(), vec![8..12]),
7252 (path!("dir/two.ts").to_string(), vec![14..18]),
7253 ]),
7254 "If no exclusions match, all files should be returned"
7255 );
7256
7257 assert_eq!(
7258 search(
7259 &project,
7260 SearchQuery::text(
7261 search_query,
7262 false,
7263 true,
7264 false,
7265 Default::default(),
7266 PathMatcher::new(&["*.rs".to_owned()], path_style).unwrap(),
7267 false,
7268 None,
7269 )
7270 .unwrap(),
7271 cx
7272 )
7273 .await
7274 .unwrap(),
7275 HashMap::from_iter([
7276 (path!("dir/one.ts").to_string(), vec![14..18]),
7277 (path!("dir/two.ts").to_string(), vec![14..18]),
7278 ]),
7279 "Rust exclusion search should give only TypeScript files"
7280 );
7281
7282 assert_eq!(
7283 search(
7284 &project,
7285 SearchQuery::text(
7286 search_query,
7287 false,
7288 true,
7289 false,
7290 Default::default(),
7291 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], path_style).unwrap(),
7292 false,
7293 None,
7294 )
7295 .unwrap(),
7296 cx
7297 )
7298 .await
7299 .unwrap(),
7300 HashMap::from_iter([
7301 (path!("dir/one.rs").to_string(), vec![8..12]),
7302 (path!("dir/two.rs").to_string(), vec![8..12]),
7303 ]),
7304 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
7305 );
7306
7307 assert!(
7308 search(
7309 &project,
7310 SearchQuery::text(
7311 search_query,
7312 false,
7313 true,
7314 false,
7315 Default::default(),
7316 PathMatcher::new(
7317 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
7318 PathStyle::local(),
7319 )
7320 .unwrap(),
7321 false,
7322 None,
7323 )
7324 .unwrap(),
7325 cx
7326 )
7327 .await
7328 .unwrap()
7329 .is_empty(),
7330 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
7331 );
7332}
7333
7334#[gpui::test]
7335async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
7336 init_test(cx);
7337
7338 let search_query = "file";
7339
7340 let fs = FakeFs::new(cx.executor());
7341 fs.insert_tree(
7342 path!("/dir"),
7343 json!({
7344 "one.rs": r#"// Rust file one"#,
7345 "one.ts": r#"// TypeScript file one"#,
7346 "two.rs": r#"// Rust file two"#,
7347 "two.ts": r#"// TypeScript file two"#,
7348 }),
7349 )
7350 .await;
7351 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7352 assert!(
7353 search(
7354 &project,
7355 SearchQuery::text(
7356 search_query,
7357 false,
7358 true,
7359 false,
7360 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
7361 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
7362 false,
7363 None,
7364 )
7365 .unwrap(),
7366 cx
7367 )
7368 .await
7369 .unwrap()
7370 .is_empty(),
7371 "If both no exclusions and inclusions match, exclusions should win and return nothing"
7372 );
7373
7374 assert!(
7375 search(
7376 &project,
7377 SearchQuery::text(
7378 search_query,
7379 false,
7380 true,
7381 false,
7382 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
7383 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
7384 false,
7385 None,
7386 )
7387 .unwrap(),
7388 cx
7389 )
7390 .await
7391 .unwrap()
7392 .is_empty(),
7393 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
7394 );
7395
7396 assert!(
7397 search(
7398 &project,
7399 SearchQuery::text(
7400 search_query,
7401 false,
7402 true,
7403 false,
7404 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
7405 .unwrap(),
7406 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
7407 .unwrap(),
7408 false,
7409 None,
7410 )
7411 .unwrap(),
7412 cx
7413 )
7414 .await
7415 .unwrap()
7416 .is_empty(),
7417 "Non-matching inclusions and exclusions should not change that."
7418 );
7419
7420 assert_eq!(
7421 search(
7422 &project,
7423 SearchQuery::text(
7424 search_query,
7425 false,
7426 true,
7427 false,
7428 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
7429 .unwrap(),
7430 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()], PathStyle::local())
7431 .unwrap(),
7432 false,
7433 None,
7434 )
7435 .unwrap(),
7436 cx
7437 )
7438 .await
7439 .unwrap(),
7440 HashMap::from_iter([
7441 (path!("dir/one.ts").to_string(), vec![14..18]),
7442 (path!("dir/two.ts").to_string(), vec![14..18]),
7443 ]),
7444 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
7445 );
7446}
7447
7448#[gpui::test]
7449async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
7450 init_test(cx);
7451
7452 let fs = FakeFs::new(cx.executor());
7453 fs.insert_tree(
7454 path!("/worktree-a"),
7455 json!({
7456 "haystack.rs": r#"// NEEDLE"#,
7457 "haystack.ts": r#"// NEEDLE"#,
7458 }),
7459 )
7460 .await;
7461 fs.insert_tree(
7462 path!("/worktree-b"),
7463 json!({
7464 "haystack.rs": r#"// NEEDLE"#,
7465 "haystack.ts": r#"// NEEDLE"#,
7466 }),
7467 )
7468 .await;
7469
7470 let path_style = PathStyle::local();
7471 let project = Project::test(
7472 fs.clone(),
7473 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
7474 cx,
7475 )
7476 .await;
7477
7478 assert_eq!(
7479 search(
7480 &project,
7481 SearchQuery::text(
7482 "NEEDLE",
7483 false,
7484 true,
7485 false,
7486 PathMatcher::new(&["worktree-a/*.rs".to_owned()], path_style).unwrap(),
7487 Default::default(),
7488 true,
7489 None,
7490 )
7491 .unwrap(),
7492 cx
7493 )
7494 .await
7495 .unwrap(),
7496 HashMap::from_iter([(path!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
7497 "should only return results from included worktree"
7498 );
7499 assert_eq!(
7500 search(
7501 &project,
7502 SearchQuery::text(
7503 "NEEDLE",
7504 false,
7505 true,
7506 false,
7507 PathMatcher::new(&["worktree-b/*.rs".to_owned()], path_style).unwrap(),
7508 Default::default(),
7509 true,
7510 None,
7511 )
7512 .unwrap(),
7513 cx
7514 )
7515 .await
7516 .unwrap(),
7517 HashMap::from_iter([(path!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
7518 "should only return results from included worktree"
7519 );
7520
7521 assert_eq!(
7522 search(
7523 &project,
7524 SearchQuery::text(
7525 "NEEDLE",
7526 false,
7527 true,
7528 false,
7529 PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap(),
7530 Default::default(),
7531 false,
7532 None,
7533 )
7534 .unwrap(),
7535 cx
7536 )
7537 .await
7538 .unwrap(),
7539 HashMap::from_iter([
7540 (path!("worktree-a/haystack.ts").to_string(), vec![3..9]),
7541 (path!("worktree-b/haystack.ts").to_string(), vec![3..9])
7542 ]),
7543 "should return results from both worktrees"
7544 );
7545}
7546
7547#[gpui::test]
7548async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
7549 init_test(cx);
7550
7551 let fs = FakeFs::new(cx.background_executor.clone());
7552 fs.insert_tree(
7553 path!("/dir"),
7554 json!({
7555 ".git": {},
7556 ".gitignore": "**/target\n/node_modules\n",
7557 "target": {
7558 "index.txt": "index_key:index_value"
7559 },
7560 "node_modules": {
7561 "eslint": {
7562 "index.ts": "const eslint_key = 'eslint value'",
7563 "package.json": r#"{ "some_key": "some value" }"#,
7564 },
7565 "prettier": {
7566 "index.ts": "const prettier_key = 'prettier value'",
7567 "package.json": r#"{ "other_key": "other value" }"#,
7568 },
7569 },
7570 "package.json": r#"{ "main_key": "main value" }"#,
7571 }),
7572 )
7573 .await;
7574 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7575
7576 let query = "key";
7577 assert_eq!(
7578 search(
7579 &project,
7580 SearchQuery::text(
7581 query,
7582 false,
7583 false,
7584 false,
7585 Default::default(),
7586 Default::default(),
7587 false,
7588 None,
7589 )
7590 .unwrap(),
7591 cx
7592 )
7593 .await
7594 .unwrap(),
7595 HashMap::from_iter([(path!("dir/package.json").to_string(), vec![8..11])]),
7596 "Only one non-ignored file should have the query"
7597 );
7598
7599 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7600 let path_style = PathStyle::local();
7601 assert_eq!(
7602 search(
7603 &project,
7604 SearchQuery::text(
7605 query,
7606 false,
7607 false,
7608 true,
7609 Default::default(),
7610 Default::default(),
7611 false,
7612 None,
7613 )
7614 .unwrap(),
7615 cx
7616 )
7617 .await
7618 .unwrap(),
7619 HashMap::from_iter([
7620 (path!("dir/package.json").to_string(), vec![8..11]),
7621 (path!("dir/target/index.txt").to_string(), vec![6..9]),
7622 (
7623 path!("dir/node_modules/prettier/package.json").to_string(),
7624 vec![9..12]
7625 ),
7626 (
7627 path!("dir/node_modules/prettier/index.ts").to_string(),
7628 vec![15..18]
7629 ),
7630 (
7631 path!("dir/node_modules/eslint/index.ts").to_string(),
7632 vec![13..16]
7633 ),
7634 (
7635 path!("dir/node_modules/eslint/package.json").to_string(),
7636 vec![8..11]
7637 ),
7638 ]),
7639 "Unrestricted search with ignored directories should find every file with the query"
7640 );
7641
7642 let files_to_include =
7643 PathMatcher::new(&["node_modules/prettier/**".to_owned()], path_style).unwrap();
7644 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap();
7645 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7646 assert_eq!(
7647 search(
7648 &project,
7649 SearchQuery::text(
7650 query,
7651 false,
7652 false,
7653 true,
7654 files_to_include,
7655 files_to_exclude,
7656 false,
7657 None,
7658 )
7659 .unwrap(),
7660 cx
7661 )
7662 .await
7663 .unwrap(),
7664 HashMap::from_iter([(
7665 path!("dir/node_modules/prettier/package.json").to_string(),
7666 vec![9..12]
7667 )]),
7668 "With search including ignored prettier directory and excluding TS files, only one file should be found"
7669 );
7670}
7671
7672#[gpui::test]
7673async fn test_search_with_unicode(cx: &mut gpui::TestAppContext) {
7674 init_test(cx);
7675
7676 let fs = FakeFs::new(cx.executor());
7677 fs.insert_tree(
7678 path!("/dir"),
7679 json!({
7680 "one.rs": "// ПРИВЕТ? привет!",
7681 "two.rs": "// ПРИВЕТ.",
7682 "three.rs": "// привет",
7683 }),
7684 )
7685 .await;
7686 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7687 let unicode_case_sensitive_query = SearchQuery::text(
7688 "привет",
7689 false,
7690 true,
7691 false,
7692 Default::default(),
7693 Default::default(),
7694 false,
7695 None,
7696 );
7697 assert_matches!(unicode_case_sensitive_query, Ok(SearchQuery::Text { .. }));
7698 assert_eq!(
7699 search(&project, unicode_case_sensitive_query.unwrap(), cx)
7700 .await
7701 .unwrap(),
7702 HashMap::from_iter([
7703 (path!("dir/one.rs").to_string(), vec![17..29]),
7704 (path!("dir/three.rs").to_string(), vec![3..15]),
7705 ])
7706 );
7707
7708 let unicode_case_insensitive_query = SearchQuery::text(
7709 "привет",
7710 false,
7711 false,
7712 false,
7713 Default::default(),
7714 Default::default(),
7715 false,
7716 None,
7717 );
7718 assert_matches!(
7719 unicode_case_insensitive_query,
7720 Ok(SearchQuery::Regex { .. })
7721 );
7722 assert_eq!(
7723 search(&project, unicode_case_insensitive_query.unwrap(), cx)
7724 .await
7725 .unwrap(),
7726 HashMap::from_iter([
7727 (path!("dir/one.rs").to_string(), vec![3..15, 17..29]),
7728 (path!("dir/two.rs").to_string(), vec![3..15]),
7729 (path!("dir/three.rs").to_string(), vec![3..15]),
7730 ])
7731 );
7732
7733 assert_eq!(
7734 search(
7735 &project,
7736 SearchQuery::text(
7737 "привет.",
7738 false,
7739 false,
7740 false,
7741 Default::default(),
7742 Default::default(),
7743 false,
7744 None,
7745 )
7746 .unwrap(),
7747 cx
7748 )
7749 .await
7750 .unwrap(),
7751 HashMap::from_iter([(path!("dir/two.rs").to_string(), vec![3..16]),])
7752 );
7753}
7754
7755#[gpui::test]
7756async fn test_create_entry(cx: &mut gpui::TestAppContext) {
7757 init_test(cx);
7758
7759 let fs = FakeFs::new(cx.executor());
7760 fs.insert_tree(
7761 "/one/two",
7762 json!({
7763 "three": {
7764 "a.txt": "",
7765 "four": {}
7766 },
7767 "c.rs": ""
7768 }),
7769 )
7770 .await;
7771
7772 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
7773 project
7774 .update(cx, |project, cx| {
7775 let id = project.worktrees(cx).next().unwrap().read(cx).id();
7776 project.create_entry((id, rel_path("b..")), true, cx)
7777 })
7778 .await
7779 .unwrap()
7780 .into_included()
7781 .unwrap();
7782
7783 assert_eq!(
7784 fs.paths(true),
7785 vec![
7786 PathBuf::from(path!("/")),
7787 PathBuf::from(path!("/one")),
7788 PathBuf::from(path!("/one/two")),
7789 PathBuf::from(path!("/one/two/c.rs")),
7790 PathBuf::from(path!("/one/two/three")),
7791 PathBuf::from(path!("/one/two/three/a.txt")),
7792 PathBuf::from(path!("/one/two/three/b..")),
7793 PathBuf::from(path!("/one/two/three/four")),
7794 ]
7795 );
7796}
7797
7798#[gpui::test]
7799async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
7800 init_test(cx);
7801
7802 let fs = FakeFs::new(cx.executor());
7803 fs.insert_tree(
7804 path!("/dir"),
7805 json!({
7806 "a.tsx": "a",
7807 }),
7808 )
7809 .await;
7810
7811 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7812
7813 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7814 language_registry.add(tsx_lang());
7815 let language_server_names = [
7816 "TypeScriptServer",
7817 "TailwindServer",
7818 "ESLintServer",
7819 "NoHoverCapabilitiesServer",
7820 ];
7821 let mut language_servers = [
7822 language_registry.register_fake_lsp(
7823 "tsx",
7824 FakeLspAdapter {
7825 name: language_server_names[0],
7826 capabilities: lsp::ServerCapabilities {
7827 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7828 ..lsp::ServerCapabilities::default()
7829 },
7830 ..FakeLspAdapter::default()
7831 },
7832 ),
7833 language_registry.register_fake_lsp(
7834 "tsx",
7835 FakeLspAdapter {
7836 name: language_server_names[1],
7837 capabilities: lsp::ServerCapabilities {
7838 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7839 ..lsp::ServerCapabilities::default()
7840 },
7841 ..FakeLspAdapter::default()
7842 },
7843 ),
7844 language_registry.register_fake_lsp(
7845 "tsx",
7846 FakeLspAdapter {
7847 name: language_server_names[2],
7848 capabilities: lsp::ServerCapabilities {
7849 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7850 ..lsp::ServerCapabilities::default()
7851 },
7852 ..FakeLspAdapter::default()
7853 },
7854 ),
7855 language_registry.register_fake_lsp(
7856 "tsx",
7857 FakeLspAdapter {
7858 name: language_server_names[3],
7859 capabilities: lsp::ServerCapabilities {
7860 hover_provider: None,
7861 ..lsp::ServerCapabilities::default()
7862 },
7863 ..FakeLspAdapter::default()
7864 },
7865 ),
7866 ];
7867
7868 let (buffer, _handle) = project
7869 .update(cx, |p, cx| {
7870 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
7871 })
7872 .await
7873 .unwrap();
7874 cx.executor().run_until_parked();
7875
7876 let mut servers_with_hover_requests = HashMap::default();
7877 for i in 0..language_server_names.len() {
7878 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
7879 panic!(
7880 "Failed to get language server #{i} with name {}",
7881 &language_server_names[i]
7882 )
7883 });
7884 let new_server_name = new_server.server.name();
7885 assert!(
7886 !servers_with_hover_requests.contains_key(&new_server_name),
7887 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
7888 );
7889 match new_server_name.as_ref() {
7890 "TailwindServer" | "TypeScriptServer" => {
7891 servers_with_hover_requests.insert(
7892 new_server_name.clone(),
7893 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7894 move |_, _| {
7895 let name = new_server_name.clone();
7896 async move {
7897 Ok(Some(lsp::Hover {
7898 contents: lsp::HoverContents::Scalar(
7899 lsp::MarkedString::String(format!("{name} hover")),
7900 ),
7901 range: None,
7902 }))
7903 }
7904 },
7905 ),
7906 );
7907 }
7908 "ESLintServer" => {
7909 servers_with_hover_requests.insert(
7910 new_server_name,
7911 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7912 |_, _| async move { Ok(None) },
7913 ),
7914 );
7915 }
7916 "NoHoverCapabilitiesServer" => {
7917 let _never_handled = new_server
7918 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
7919 panic!(
7920 "Should not call for hovers server with no corresponding capabilities"
7921 )
7922 });
7923 }
7924 unexpected => panic!("Unexpected server name: {unexpected}"),
7925 }
7926 }
7927
7928 let hover_task = project.update(cx, |project, cx| {
7929 project.hover(&buffer, Point::new(0, 0), cx)
7930 });
7931 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
7932 |mut hover_request| async move {
7933 hover_request
7934 .next()
7935 .await
7936 .expect("All hover requests should have been triggered")
7937 },
7938 ))
7939 .await;
7940 assert_eq!(
7941 vec!["TailwindServer hover", "TypeScriptServer hover"],
7942 hover_task
7943 .await
7944 .into_iter()
7945 .flatten()
7946 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
7947 .sorted()
7948 .collect::<Vec<_>>(),
7949 "Should receive hover responses from all related servers with hover capabilities"
7950 );
7951}
7952
7953#[gpui::test]
7954async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
7955 init_test(cx);
7956
7957 let fs = FakeFs::new(cx.executor());
7958 fs.insert_tree(
7959 path!("/dir"),
7960 json!({
7961 "a.ts": "a",
7962 }),
7963 )
7964 .await;
7965
7966 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7967
7968 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7969 language_registry.add(typescript_lang());
7970 let mut fake_language_servers = language_registry.register_fake_lsp(
7971 "TypeScript",
7972 FakeLspAdapter {
7973 capabilities: lsp::ServerCapabilities {
7974 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7975 ..lsp::ServerCapabilities::default()
7976 },
7977 ..FakeLspAdapter::default()
7978 },
7979 );
7980
7981 let (buffer, _handle) = project
7982 .update(cx, |p, cx| {
7983 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
7984 })
7985 .await
7986 .unwrap();
7987 cx.executor().run_until_parked();
7988
7989 let fake_server = fake_language_servers
7990 .next()
7991 .await
7992 .expect("failed to get the language server");
7993
7994 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7995 move |_, _| async move {
7996 Ok(Some(lsp::Hover {
7997 contents: lsp::HoverContents::Array(vec![
7998 lsp::MarkedString::String("".to_string()),
7999 lsp::MarkedString::String(" ".to_string()),
8000 lsp::MarkedString::String("\n\n\n".to_string()),
8001 ]),
8002 range: None,
8003 }))
8004 },
8005 );
8006
8007 let hover_task = project.update(cx, |project, cx| {
8008 project.hover(&buffer, Point::new(0, 0), cx)
8009 });
8010 let () = request_handled
8011 .next()
8012 .await
8013 .expect("All hover requests should have been triggered");
8014 assert_eq!(
8015 Vec::<String>::new(),
8016 hover_task
8017 .await
8018 .into_iter()
8019 .flatten()
8020 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
8021 .sorted()
8022 .collect::<Vec<_>>(),
8023 "Empty hover parts should be ignored"
8024 );
8025}
8026
8027#[gpui::test]
8028async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
8029 init_test(cx);
8030
8031 let fs = FakeFs::new(cx.executor());
8032 fs.insert_tree(
8033 path!("/dir"),
8034 json!({
8035 "a.ts": "a",
8036 }),
8037 )
8038 .await;
8039
8040 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
8041
8042 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
8043 language_registry.add(typescript_lang());
8044 let mut fake_language_servers = language_registry.register_fake_lsp(
8045 "TypeScript",
8046 FakeLspAdapter {
8047 capabilities: lsp::ServerCapabilities {
8048 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
8049 ..lsp::ServerCapabilities::default()
8050 },
8051 ..FakeLspAdapter::default()
8052 },
8053 );
8054
8055 let (buffer, _handle) = project
8056 .update(cx, |p, cx| {
8057 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
8058 })
8059 .await
8060 .unwrap();
8061 cx.executor().run_until_parked();
8062
8063 let fake_server = fake_language_servers
8064 .next()
8065 .await
8066 .expect("failed to get the language server");
8067
8068 let mut request_handled = fake_server
8069 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
8070 Ok(Some(vec![
8071 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
8072 title: "organize imports".to_string(),
8073 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
8074 ..lsp::CodeAction::default()
8075 }),
8076 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
8077 title: "fix code".to_string(),
8078 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
8079 ..lsp::CodeAction::default()
8080 }),
8081 ]))
8082 });
8083
8084 let code_actions_task = project.update(cx, |project, cx| {
8085 project.code_actions(
8086 &buffer,
8087 0..buffer.read(cx).len(),
8088 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
8089 cx,
8090 )
8091 });
8092
8093 let () = request_handled
8094 .next()
8095 .await
8096 .expect("The code action request should have been triggered");
8097
8098 let code_actions = code_actions_task.await.unwrap().unwrap();
8099 assert_eq!(code_actions.len(), 1);
8100 assert_eq!(
8101 code_actions[0].lsp_action.action_kind(),
8102 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
8103 );
8104}
8105
8106#[gpui::test]
8107async fn test_code_actions_without_requested_kinds_do_not_send_only_filter(
8108 cx: &mut gpui::TestAppContext,
8109) {
8110 init_test(cx);
8111
8112 let fs = FakeFs::new(cx.executor());
8113 fs.insert_tree(
8114 path!("/dir"),
8115 json!({
8116 "a.ts": "a",
8117 }),
8118 )
8119 .await;
8120
8121 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
8122
8123 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
8124 language_registry.add(typescript_lang());
8125 let mut fake_language_servers = language_registry.register_fake_lsp(
8126 "TypeScript",
8127 FakeLspAdapter {
8128 capabilities: lsp::ServerCapabilities {
8129 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
8130 lsp::CodeActionOptions {
8131 code_action_kinds: Some(vec![
8132 CodeActionKind::SOURCE_ORGANIZE_IMPORTS,
8133 "source.doc".into(),
8134 ]),
8135 ..lsp::CodeActionOptions::default()
8136 },
8137 )),
8138 ..lsp::ServerCapabilities::default()
8139 },
8140 ..FakeLspAdapter::default()
8141 },
8142 );
8143
8144 let (buffer, _handle) = project
8145 .update(cx, |p, cx| {
8146 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
8147 })
8148 .await
8149 .unwrap();
8150 cx.executor().run_until_parked();
8151
8152 let fake_server = fake_language_servers
8153 .next()
8154 .await
8155 .expect("failed to get the language server");
8156
8157 let mut request_handled = fake_server.set_request_handler::<
8158 lsp::request::CodeActionRequest,
8159 _,
8160 _,
8161 >(move |params, _| async move {
8162 assert_eq!(
8163 params.context.only, None,
8164 "Code action requests without explicit kind filters should not send `context.only`"
8165 );
8166 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
8167 lsp::CodeAction {
8168 title: "Add test".to_string(),
8169 kind: Some("source.addTest".into()),
8170 ..lsp::CodeAction::default()
8171 },
8172 )]))
8173 });
8174
8175 let code_actions_task = project.update(cx, |project, cx| {
8176 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
8177 });
8178
8179 let () = request_handled
8180 .next()
8181 .await
8182 .expect("The code action request should have been triggered");
8183
8184 let code_actions = code_actions_task.await.unwrap().unwrap();
8185 assert_eq!(code_actions.len(), 1);
8186 assert_eq!(
8187 code_actions[0].lsp_action.action_kind(),
8188 Some("source.addTest".into())
8189 );
8190}
8191
8192#[gpui::test]
8193async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
8194 init_test(cx);
8195
8196 let fs = FakeFs::new(cx.executor());
8197 fs.insert_tree(
8198 path!("/dir"),
8199 json!({
8200 "a.tsx": "a",
8201 }),
8202 )
8203 .await;
8204
8205 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
8206
8207 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
8208 language_registry.add(tsx_lang());
8209 let language_server_names = [
8210 "TypeScriptServer",
8211 "TailwindServer",
8212 "ESLintServer",
8213 "NoActionsCapabilitiesServer",
8214 ];
8215
8216 let mut language_server_rxs = [
8217 language_registry.register_fake_lsp(
8218 "tsx",
8219 FakeLspAdapter {
8220 name: language_server_names[0],
8221 capabilities: lsp::ServerCapabilities {
8222 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
8223 ..lsp::ServerCapabilities::default()
8224 },
8225 ..FakeLspAdapter::default()
8226 },
8227 ),
8228 language_registry.register_fake_lsp(
8229 "tsx",
8230 FakeLspAdapter {
8231 name: language_server_names[1],
8232 capabilities: lsp::ServerCapabilities {
8233 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
8234 ..lsp::ServerCapabilities::default()
8235 },
8236 ..FakeLspAdapter::default()
8237 },
8238 ),
8239 language_registry.register_fake_lsp(
8240 "tsx",
8241 FakeLspAdapter {
8242 name: language_server_names[2],
8243 capabilities: lsp::ServerCapabilities {
8244 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
8245 ..lsp::ServerCapabilities::default()
8246 },
8247 ..FakeLspAdapter::default()
8248 },
8249 ),
8250 language_registry.register_fake_lsp(
8251 "tsx",
8252 FakeLspAdapter {
8253 name: language_server_names[3],
8254 capabilities: lsp::ServerCapabilities {
8255 code_action_provider: None,
8256 ..lsp::ServerCapabilities::default()
8257 },
8258 ..FakeLspAdapter::default()
8259 },
8260 ),
8261 ];
8262
8263 let (buffer, _handle) = project
8264 .update(cx, |p, cx| {
8265 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
8266 })
8267 .await
8268 .unwrap();
8269 cx.executor().run_until_parked();
8270
8271 let mut servers_with_actions_requests = HashMap::default();
8272 for i in 0..language_server_names.len() {
8273 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
8274 panic!(
8275 "Failed to get language server #{i} with name {}",
8276 &language_server_names[i]
8277 )
8278 });
8279 let new_server_name = new_server.server.name();
8280
8281 assert!(
8282 !servers_with_actions_requests.contains_key(&new_server_name),
8283 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
8284 );
8285 match new_server_name.0.as_ref() {
8286 "TailwindServer" | "TypeScriptServer" => {
8287 servers_with_actions_requests.insert(
8288 new_server_name.clone(),
8289 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
8290 move |_, _| {
8291 let name = new_server_name.clone();
8292 async move {
8293 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
8294 lsp::CodeAction {
8295 title: format!("{name} code action"),
8296 ..lsp::CodeAction::default()
8297 },
8298 )]))
8299 }
8300 },
8301 ),
8302 );
8303 }
8304 "ESLintServer" => {
8305 servers_with_actions_requests.insert(
8306 new_server_name,
8307 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
8308 |_, _| async move { Ok(None) },
8309 ),
8310 );
8311 }
8312 "NoActionsCapabilitiesServer" => {
8313 let _never_handled = new_server
8314 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
8315 panic!(
8316 "Should not call for code actions server with no corresponding capabilities"
8317 )
8318 });
8319 }
8320 unexpected => panic!("Unexpected server name: {unexpected}"),
8321 }
8322 }
8323
8324 let code_actions_task = project.update(cx, |project, cx| {
8325 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
8326 });
8327
8328 // cx.run_until_parked();
8329 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
8330 |mut code_actions_request| async move {
8331 code_actions_request
8332 .next()
8333 .await
8334 .expect("All code actions requests should have been triggered")
8335 },
8336 ))
8337 .await;
8338 assert_eq!(
8339 vec!["TailwindServer code action", "TypeScriptServer code action"],
8340 code_actions_task
8341 .await
8342 .unwrap()
8343 .unwrap()
8344 .into_iter()
8345 .map(|code_action| code_action.lsp_action.title().to_owned())
8346 .sorted()
8347 .collect::<Vec<_>>(),
8348 "Should receive code actions responses from all related servers with hover capabilities"
8349 );
8350}
8351
8352#[gpui::test]
8353async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
8354 init_test(cx);
8355
8356 let fs = FakeFs::new(cx.executor());
8357 fs.insert_tree(
8358 "/dir",
8359 json!({
8360 "a.rs": "let a = 1;",
8361 "b.rs": "let b = 2;",
8362 "c.rs": "let c = 2;",
8363 }),
8364 )
8365 .await;
8366
8367 let project = Project::test(
8368 fs,
8369 [
8370 "/dir/a.rs".as_ref(),
8371 "/dir/b.rs".as_ref(),
8372 "/dir/c.rs".as_ref(),
8373 ],
8374 cx,
8375 )
8376 .await;
8377
8378 // check the initial state and get the worktrees
8379 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
8380 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8381 assert_eq!(worktrees.len(), 3);
8382
8383 let worktree_a = worktrees[0].read(cx);
8384 let worktree_b = worktrees[1].read(cx);
8385 let worktree_c = worktrees[2].read(cx);
8386
8387 // check they start in the right order
8388 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
8389 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
8390 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
8391
8392 (
8393 worktrees[0].clone(),
8394 worktrees[1].clone(),
8395 worktrees[2].clone(),
8396 )
8397 });
8398
8399 // move first worktree to after the second
8400 // [a, b, c] -> [b, a, c]
8401 project
8402 .update(cx, |project, cx| {
8403 let first = worktree_a.read(cx);
8404 let second = worktree_b.read(cx);
8405 project.move_worktree(first.id(), second.id(), cx)
8406 })
8407 .expect("moving first after second");
8408
8409 // check the state after moving
8410 project.update(cx, |project, cx| {
8411 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8412 assert_eq!(worktrees.len(), 3);
8413
8414 let first = worktrees[0].read(cx);
8415 let second = worktrees[1].read(cx);
8416 let third = worktrees[2].read(cx);
8417
8418 // check they are now in the right order
8419 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
8420 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
8421 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
8422 });
8423
8424 // move the second worktree to before the first
8425 // [b, a, c] -> [a, b, c]
8426 project
8427 .update(cx, |project, cx| {
8428 let second = worktree_a.read(cx);
8429 let first = worktree_b.read(cx);
8430 project.move_worktree(first.id(), second.id(), cx)
8431 })
8432 .expect("moving second before first");
8433
8434 // check the state after moving
8435 project.update(cx, |project, cx| {
8436 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8437 assert_eq!(worktrees.len(), 3);
8438
8439 let first = worktrees[0].read(cx);
8440 let second = worktrees[1].read(cx);
8441 let third = worktrees[2].read(cx);
8442
8443 // check they are now in the right order
8444 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
8445 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
8446 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
8447 });
8448
8449 // move the second worktree to after the third
8450 // [a, b, c] -> [a, c, b]
8451 project
8452 .update(cx, |project, cx| {
8453 let second = worktree_b.read(cx);
8454 let third = worktree_c.read(cx);
8455 project.move_worktree(second.id(), third.id(), cx)
8456 })
8457 .expect("moving second after third");
8458
8459 // check the state after moving
8460 project.update(cx, |project, cx| {
8461 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8462 assert_eq!(worktrees.len(), 3);
8463
8464 let first = worktrees[0].read(cx);
8465 let second = worktrees[1].read(cx);
8466 let third = worktrees[2].read(cx);
8467
8468 // check they are now in the right order
8469 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
8470 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
8471 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
8472 });
8473
8474 // move the third worktree to before the second
8475 // [a, c, b] -> [a, b, c]
8476 project
8477 .update(cx, |project, cx| {
8478 let third = worktree_c.read(cx);
8479 let second = worktree_b.read(cx);
8480 project.move_worktree(third.id(), second.id(), cx)
8481 })
8482 .expect("moving third before second");
8483
8484 // check the state after moving
8485 project.update(cx, |project, cx| {
8486 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8487 assert_eq!(worktrees.len(), 3);
8488
8489 let first = worktrees[0].read(cx);
8490 let second = worktrees[1].read(cx);
8491 let third = worktrees[2].read(cx);
8492
8493 // check they are now in the right order
8494 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
8495 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
8496 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
8497 });
8498
8499 // move the first worktree to after the third
8500 // [a, b, c] -> [b, c, a]
8501 project
8502 .update(cx, |project, cx| {
8503 let first = worktree_a.read(cx);
8504 let third = worktree_c.read(cx);
8505 project.move_worktree(first.id(), third.id(), cx)
8506 })
8507 .expect("moving first after third");
8508
8509 // check the state after moving
8510 project.update(cx, |project, cx| {
8511 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8512 assert_eq!(worktrees.len(), 3);
8513
8514 let first = worktrees[0].read(cx);
8515 let second = worktrees[1].read(cx);
8516 let third = worktrees[2].read(cx);
8517
8518 // check they are now in the right order
8519 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
8520 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
8521 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
8522 });
8523
8524 // move the third worktree to before the first
8525 // [b, c, a] -> [a, b, c]
8526 project
8527 .update(cx, |project, cx| {
8528 let third = worktree_a.read(cx);
8529 let first = worktree_b.read(cx);
8530 project.move_worktree(third.id(), first.id(), cx)
8531 })
8532 .expect("moving third before first");
8533
8534 // check the state after moving
8535 project.update(cx, |project, cx| {
8536 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8537 assert_eq!(worktrees.len(), 3);
8538
8539 let first = worktrees[0].read(cx);
8540 let second = worktrees[1].read(cx);
8541 let third = worktrees[2].read(cx);
8542
8543 // check they are now in the right order
8544 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
8545 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
8546 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
8547 });
8548}
8549
8550#[gpui::test]
8551async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
8552 init_test(cx);
8553
8554 let staged_contents = r#"
8555 fn main() {
8556 println!("hello world");
8557 }
8558 "#
8559 .unindent();
8560 let file_contents = r#"
8561 // print goodbye
8562 fn main() {
8563 println!("goodbye world");
8564 }
8565 "#
8566 .unindent();
8567
8568 let fs = FakeFs::new(cx.background_executor.clone());
8569 fs.insert_tree(
8570 "/dir",
8571 json!({
8572 ".git": {},
8573 "src": {
8574 "main.rs": file_contents,
8575 }
8576 }),
8577 )
8578 .await;
8579
8580 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
8581
8582 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8583
8584 let buffer = project
8585 .update(cx, |project, cx| {
8586 project.open_local_buffer("/dir/src/main.rs", cx)
8587 })
8588 .await
8589 .unwrap();
8590 let unstaged_diff = project
8591 .update(cx, |project, cx| {
8592 project.open_unstaged_diff(buffer.clone(), cx)
8593 })
8594 .await
8595 .unwrap();
8596
8597 cx.run_until_parked();
8598 unstaged_diff.update(cx, |unstaged_diff, cx| {
8599 let snapshot = buffer.read(cx).snapshot();
8600 assert_hunks(
8601 unstaged_diff.snapshot(cx).hunks(&snapshot),
8602 &snapshot,
8603 &unstaged_diff.base_text_string(cx).unwrap(),
8604 &[
8605 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
8606 (
8607 2..3,
8608 " println!(\"hello world\");\n",
8609 " println!(\"goodbye world\");\n",
8610 DiffHunkStatus::modified_none(),
8611 ),
8612 ],
8613 );
8614 });
8615
8616 let staged_contents = r#"
8617 // print goodbye
8618 fn main() {
8619 }
8620 "#
8621 .unindent();
8622
8623 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
8624
8625 cx.run_until_parked();
8626 unstaged_diff.update(cx, |unstaged_diff, cx| {
8627 let snapshot = buffer.read(cx).snapshot();
8628 assert_hunks(
8629 unstaged_diff
8630 .snapshot(cx)
8631 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8632 &snapshot,
8633 &unstaged_diff.base_text(cx).text(),
8634 &[(
8635 2..3,
8636 "",
8637 " println!(\"goodbye world\");\n",
8638 DiffHunkStatus::added_none(),
8639 )],
8640 );
8641 });
8642}
8643
8644#[gpui::test]
8645async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
8646 init_test(cx);
8647
8648 let committed_contents = r#"
8649 fn main() {
8650 println!("hello world");
8651 }
8652 "#
8653 .unindent();
8654 let staged_contents = r#"
8655 fn main() {
8656 println!("goodbye world");
8657 }
8658 "#
8659 .unindent();
8660 let file_contents = r#"
8661 // print goodbye
8662 fn main() {
8663 println!("goodbye world");
8664 }
8665 "#
8666 .unindent();
8667
8668 let fs = FakeFs::new(cx.background_executor.clone());
8669 fs.insert_tree(
8670 "/dir",
8671 json!({
8672 ".git": {},
8673 "src": {
8674 "modification.rs": file_contents,
8675 }
8676 }),
8677 )
8678 .await;
8679
8680 fs.set_head_for_repo(
8681 Path::new("/dir/.git"),
8682 &[
8683 ("src/modification.rs", committed_contents),
8684 ("src/deletion.rs", "// the-deleted-contents\n".into()),
8685 ],
8686 "deadbeef",
8687 );
8688 fs.set_index_for_repo(
8689 Path::new("/dir/.git"),
8690 &[
8691 ("src/modification.rs", staged_contents),
8692 ("src/deletion.rs", "// the-deleted-contents\n".into()),
8693 ],
8694 );
8695
8696 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8697 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
8698 let language = rust_lang();
8699 language_registry.add(language.clone());
8700
8701 let buffer_1 = project
8702 .update(cx, |project, cx| {
8703 project.open_local_buffer("/dir/src/modification.rs", cx)
8704 })
8705 .await
8706 .unwrap();
8707 let diff_1 = project
8708 .update(cx, |project, cx| {
8709 project.open_uncommitted_diff(buffer_1.clone(), cx)
8710 })
8711 .await
8712 .unwrap();
8713 diff_1.read_with(cx, |diff, cx| {
8714 assert_eq!(diff.base_text(cx).language().cloned(), Some(language))
8715 });
8716 cx.run_until_parked();
8717 diff_1.update(cx, |diff, cx| {
8718 let snapshot = buffer_1.read(cx).snapshot();
8719 assert_hunks(
8720 diff.snapshot(cx)
8721 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8722 &snapshot,
8723 &diff.base_text_string(cx).unwrap(),
8724 &[
8725 (
8726 0..1,
8727 "",
8728 "// print goodbye\n",
8729 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
8730 ),
8731 (
8732 2..3,
8733 " println!(\"hello world\");\n",
8734 " println!(\"goodbye world\");\n",
8735 DiffHunkStatus::modified_none(),
8736 ),
8737 ],
8738 );
8739 });
8740
8741 // Reset HEAD to a version that differs from both the buffer and the index.
8742 let committed_contents = r#"
8743 // print goodbye
8744 fn main() {
8745 }
8746 "#
8747 .unindent();
8748 fs.set_head_for_repo(
8749 Path::new("/dir/.git"),
8750 &[
8751 ("src/modification.rs", committed_contents.clone()),
8752 ("src/deletion.rs", "// the-deleted-contents\n".into()),
8753 ],
8754 "deadbeef",
8755 );
8756
8757 // Buffer now has an unstaged hunk.
8758 cx.run_until_parked();
8759 diff_1.update(cx, |diff, cx| {
8760 let snapshot = buffer_1.read(cx).snapshot();
8761 assert_hunks(
8762 diff.snapshot(cx)
8763 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8764 &snapshot,
8765 &diff.base_text(cx).text(),
8766 &[(
8767 2..3,
8768 "",
8769 " println!(\"goodbye world\");\n",
8770 DiffHunkStatus::added_none(),
8771 )],
8772 );
8773 });
8774
8775 // Open a buffer for a file that's been deleted.
8776 let buffer_2 = project
8777 .update(cx, |project, cx| {
8778 project.open_local_buffer("/dir/src/deletion.rs", cx)
8779 })
8780 .await
8781 .unwrap();
8782 let diff_2 = project
8783 .update(cx, |project, cx| {
8784 project.open_uncommitted_diff(buffer_2.clone(), cx)
8785 })
8786 .await
8787 .unwrap();
8788 cx.run_until_parked();
8789 diff_2.update(cx, |diff, cx| {
8790 let snapshot = buffer_2.read(cx).snapshot();
8791 assert_hunks(
8792 diff.snapshot(cx)
8793 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8794 &snapshot,
8795 &diff.base_text_string(cx).unwrap(),
8796 &[(
8797 0..0,
8798 "// the-deleted-contents\n",
8799 "",
8800 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
8801 )],
8802 );
8803 });
8804
8805 // Stage the deletion of this file
8806 fs.set_index_for_repo(
8807 Path::new("/dir/.git"),
8808 &[("src/modification.rs", committed_contents.clone())],
8809 );
8810 cx.run_until_parked();
8811 diff_2.update(cx, |diff, cx| {
8812 let snapshot = buffer_2.read(cx).snapshot();
8813 assert_hunks(
8814 diff.snapshot(cx)
8815 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8816 &snapshot,
8817 &diff.base_text_string(cx).unwrap(),
8818 &[(
8819 0..0,
8820 "// the-deleted-contents\n",
8821 "",
8822 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
8823 )],
8824 );
8825 });
8826}
8827
8828#[gpui::test]
8829async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
8830 use DiffHunkSecondaryStatus::*;
8831 init_test(cx);
8832
8833 let committed_contents = r#"
8834 zero
8835 one
8836 two
8837 three
8838 four
8839 five
8840 "#
8841 .unindent();
8842 let file_contents = r#"
8843 one
8844 TWO
8845 three
8846 FOUR
8847 five
8848 "#
8849 .unindent();
8850
8851 let fs = FakeFs::new(cx.background_executor.clone());
8852 fs.insert_tree(
8853 "/dir",
8854 json!({
8855 ".git": {},
8856 "file.txt": file_contents.clone()
8857 }),
8858 )
8859 .await;
8860
8861 fs.set_head_and_index_for_repo(
8862 path!("/dir/.git").as_ref(),
8863 &[("file.txt", committed_contents.clone())],
8864 );
8865
8866 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8867
8868 let buffer = project
8869 .update(cx, |project, cx| {
8870 project.open_local_buffer("/dir/file.txt", cx)
8871 })
8872 .await
8873 .unwrap();
8874 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8875 let uncommitted_diff = project
8876 .update(cx, |project, cx| {
8877 project.open_uncommitted_diff(buffer.clone(), cx)
8878 })
8879 .await
8880 .unwrap();
8881 let mut diff_events = cx.events(&uncommitted_diff);
8882
8883 // The hunks are initially unstaged.
8884 uncommitted_diff.read_with(cx, |diff, cx| {
8885 assert_hunks(
8886 diff.snapshot(cx).hunks(&snapshot),
8887 &snapshot,
8888 &diff.base_text_string(cx).unwrap(),
8889 &[
8890 (
8891 0..0,
8892 "zero\n",
8893 "",
8894 DiffHunkStatus::deleted(HasSecondaryHunk),
8895 ),
8896 (
8897 1..2,
8898 "two\n",
8899 "TWO\n",
8900 DiffHunkStatus::modified(HasSecondaryHunk),
8901 ),
8902 (
8903 3..4,
8904 "four\n",
8905 "FOUR\n",
8906 DiffHunkStatus::modified(HasSecondaryHunk),
8907 ),
8908 ],
8909 );
8910 });
8911
8912 // Stage a hunk. It appears as optimistically staged.
8913 uncommitted_diff.update(cx, |diff, cx| {
8914 let range =
8915 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
8916 let hunks = diff
8917 .snapshot(cx)
8918 .hunks_intersecting_range(range, &snapshot)
8919 .collect::<Vec<_>>();
8920 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
8921
8922 assert_hunks(
8923 diff.snapshot(cx).hunks(&snapshot),
8924 &snapshot,
8925 &diff.base_text_string(cx).unwrap(),
8926 &[
8927 (
8928 0..0,
8929 "zero\n",
8930 "",
8931 DiffHunkStatus::deleted(HasSecondaryHunk),
8932 ),
8933 (
8934 1..2,
8935 "two\n",
8936 "TWO\n",
8937 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8938 ),
8939 (
8940 3..4,
8941 "four\n",
8942 "FOUR\n",
8943 DiffHunkStatus::modified(HasSecondaryHunk),
8944 ),
8945 ],
8946 );
8947 });
8948
8949 // The diff emits a change event for the range of the staged hunk.
8950 assert!(matches!(
8951 diff_events.next().await.unwrap(),
8952 BufferDiffEvent::HunksStagedOrUnstaged(_)
8953 ));
8954 let event = diff_events.next().await.unwrap();
8955 if let BufferDiffEvent::DiffChanged(DiffChanged {
8956 changed_range: Some(changed_range),
8957 base_text_changed_range: _,
8958 extended_range: _,
8959 }) = event
8960 {
8961 let changed_range = changed_range.to_point(&snapshot);
8962 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
8963 } else {
8964 panic!("Unexpected event {event:?}");
8965 }
8966
8967 // When the write to the index completes, it appears as staged.
8968 cx.run_until_parked();
8969 uncommitted_diff.update(cx, |diff, cx| {
8970 assert_hunks(
8971 diff.snapshot(cx).hunks(&snapshot),
8972 &snapshot,
8973 &diff.base_text_string(cx).unwrap(),
8974 &[
8975 (
8976 0..0,
8977 "zero\n",
8978 "",
8979 DiffHunkStatus::deleted(HasSecondaryHunk),
8980 ),
8981 (
8982 1..2,
8983 "two\n",
8984 "TWO\n",
8985 DiffHunkStatus::modified(NoSecondaryHunk),
8986 ),
8987 (
8988 3..4,
8989 "four\n",
8990 "FOUR\n",
8991 DiffHunkStatus::modified(HasSecondaryHunk),
8992 ),
8993 ],
8994 );
8995 });
8996
8997 // The diff emits a change event for the changed index text.
8998 let event = diff_events.next().await.unwrap();
8999 if let BufferDiffEvent::DiffChanged(DiffChanged {
9000 changed_range: Some(changed_range),
9001 base_text_changed_range: _,
9002 extended_range: _,
9003 }) = event
9004 {
9005 let changed_range = changed_range.to_point(&snapshot);
9006 assert_eq!(changed_range, Point::new(0, 0)..Point::new(4, 0));
9007 } else {
9008 panic!("Unexpected event {event:?}");
9009 }
9010
9011 // Simulate a problem writing to the git index.
9012 fs.set_error_message_for_index_write(
9013 "/dir/.git".as_ref(),
9014 Some("failed to write git index".into()),
9015 );
9016
9017 // Stage another hunk.
9018 uncommitted_diff.update(cx, |diff, cx| {
9019 let range =
9020 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
9021 let hunks = diff
9022 .snapshot(cx)
9023 .hunks_intersecting_range(range, &snapshot)
9024 .collect::<Vec<_>>();
9025 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
9026
9027 assert_hunks(
9028 diff.snapshot(cx).hunks(&snapshot),
9029 &snapshot,
9030 &diff.base_text_string(cx).unwrap(),
9031 &[
9032 (
9033 0..0,
9034 "zero\n",
9035 "",
9036 DiffHunkStatus::deleted(HasSecondaryHunk),
9037 ),
9038 (
9039 1..2,
9040 "two\n",
9041 "TWO\n",
9042 DiffHunkStatus::modified(NoSecondaryHunk),
9043 ),
9044 (
9045 3..4,
9046 "four\n",
9047 "FOUR\n",
9048 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
9049 ),
9050 ],
9051 );
9052 });
9053 assert!(matches!(
9054 diff_events.next().await.unwrap(),
9055 BufferDiffEvent::HunksStagedOrUnstaged(_)
9056 ));
9057 let event = diff_events.next().await.unwrap();
9058 if let BufferDiffEvent::DiffChanged(DiffChanged {
9059 changed_range: Some(changed_range),
9060 base_text_changed_range: _,
9061 extended_range: _,
9062 }) = event
9063 {
9064 let changed_range = changed_range.to_point(&snapshot);
9065 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
9066 } else {
9067 panic!("Unexpected event {event:?}");
9068 }
9069
9070 // When the write fails, the hunk returns to being unstaged.
9071 cx.run_until_parked();
9072 uncommitted_diff.update(cx, |diff, cx| {
9073 assert_hunks(
9074 diff.snapshot(cx).hunks(&snapshot),
9075 &snapshot,
9076 &diff.base_text_string(cx).unwrap(),
9077 &[
9078 (
9079 0..0,
9080 "zero\n",
9081 "",
9082 DiffHunkStatus::deleted(HasSecondaryHunk),
9083 ),
9084 (
9085 1..2,
9086 "two\n",
9087 "TWO\n",
9088 DiffHunkStatus::modified(NoSecondaryHunk),
9089 ),
9090 (
9091 3..4,
9092 "four\n",
9093 "FOUR\n",
9094 DiffHunkStatus::modified(HasSecondaryHunk),
9095 ),
9096 ],
9097 );
9098 });
9099
9100 let event = diff_events.next().await.unwrap();
9101 if let BufferDiffEvent::DiffChanged(DiffChanged {
9102 changed_range: Some(changed_range),
9103 base_text_changed_range: _,
9104 extended_range: _,
9105 }) = event
9106 {
9107 let changed_range = changed_range.to_point(&snapshot);
9108 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
9109 } else {
9110 panic!("Unexpected event {event:?}");
9111 }
9112
9113 // Allow writing to the git index to succeed again.
9114 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
9115
9116 // Stage two hunks with separate operations.
9117 uncommitted_diff.update(cx, |diff, cx| {
9118 let hunks = diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>();
9119 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
9120 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
9121 });
9122
9123 // Both staged hunks appear as pending.
9124 uncommitted_diff.update(cx, |diff, cx| {
9125 assert_hunks(
9126 diff.snapshot(cx).hunks(&snapshot),
9127 &snapshot,
9128 &diff.base_text_string(cx).unwrap(),
9129 &[
9130 (
9131 0..0,
9132 "zero\n",
9133 "",
9134 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
9135 ),
9136 (
9137 1..2,
9138 "two\n",
9139 "TWO\n",
9140 DiffHunkStatus::modified(NoSecondaryHunk),
9141 ),
9142 (
9143 3..4,
9144 "four\n",
9145 "FOUR\n",
9146 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
9147 ),
9148 ],
9149 );
9150 });
9151
9152 // Both staging operations take effect.
9153 cx.run_until_parked();
9154 uncommitted_diff.update(cx, |diff, cx| {
9155 assert_hunks(
9156 diff.snapshot(cx).hunks(&snapshot),
9157 &snapshot,
9158 &diff.base_text_string(cx).unwrap(),
9159 &[
9160 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
9161 (
9162 1..2,
9163 "two\n",
9164 "TWO\n",
9165 DiffHunkStatus::modified(NoSecondaryHunk),
9166 ),
9167 (
9168 3..4,
9169 "four\n",
9170 "FOUR\n",
9171 DiffHunkStatus::modified(NoSecondaryHunk),
9172 ),
9173 ],
9174 );
9175 });
9176}
9177
9178#[gpui::test(seeds(340, 472))]
9179async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
9180 use DiffHunkSecondaryStatus::*;
9181 init_test(cx);
9182
9183 let committed_contents = r#"
9184 zero
9185 one
9186 two
9187 three
9188 four
9189 five
9190 "#
9191 .unindent();
9192 let file_contents = r#"
9193 one
9194 TWO
9195 three
9196 FOUR
9197 five
9198 "#
9199 .unindent();
9200
9201 let fs = FakeFs::new(cx.background_executor.clone());
9202 fs.insert_tree(
9203 "/dir",
9204 json!({
9205 ".git": {},
9206 "file.txt": file_contents.clone()
9207 }),
9208 )
9209 .await;
9210
9211 fs.set_head_for_repo(
9212 "/dir/.git".as_ref(),
9213 &[("file.txt", committed_contents.clone())],
9214 "deadbeef",
9215 );
9216 fs.set_index_for_repo(
9217 "/dir/.git".as_ref(),
9218 &[("file.txt", committed_contents.clone())],
9219 );
9220
9221 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
9222
9223 let buffer = project
9224 .update(cx, |project, cx| {
9225 project.open_local_buffer("/dir/file.txt", cx)
9226 })
9227 .await
9228 .unwrap();
9229 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
9230 let uncommitted_diff = project
9231 .update(cx, |project, cx| {
9232 project.open_uncommitted_diff(buffer.clone(), cx)
9233 })
9234 .await
9235 .unwrap();
9236
9237 // The hunks are initially unstaged.
9238 uncommitted_diff.read_with(cx, |diff, cx| {
9239 assert_hunks(
9240 diff.snapshot(cx).hunks(&snapshot),
9241 &snapshot,
9242 &diff.base_text_string(cx).unwrap(),
9243 &[
9244 (
9245 0..0,
9246 "zero\n",
9247 "",
9248 DiffHunkStatus::deleted(HasSecondaryHunk),
9249 ),
9250 (
9251 1..2,
9252 "two\n",
9253 "TWO\n",
9254 DiffHunkStatus::modified(HasSecondaryHunk),
9255 ),
9256 (
9257 3..4,
9258 "four\n",
9259 "FOUR\n",
9260 DiffHunkStatus::modified(HasSecondaryHunk),
9261 ),
9262 ],
9263 );
9264 });
9265
9266 // Pause IO events
9267 fs.pause_events();
9268
9269 // Stage the first hunk.
9270 uncommitted_diff.update(cx, |diff, cx| {
9271 let hunk = diff.snapshot(cx).hunks(&snapshot).next().unwrap();
9272 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
9273 assert_hunks(
9274 diff.snapshot(cx).hunks(&snapshot),
9275 &snapshot,
9276 &diff.base_text_string(cx).unwrap(),
9277 &[
9278 (
9279 0..0,
9280 "zero\n",
9281 "",
9282 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
9283 ),
9284 (
9285 1..2,
9286 "two\n",
9287 "TWO\n",
9288 DiffHunkStatus::modified(HasSecondaryHunk),
9289 ),
9290 (
9291 3..4,
9292 "four\n",
9293 "FOUR\n",
9294 DiffHunkStatus::modified(HasSecondaryHunk),
9295 ),
9296 ],
9297 );
9298 });
9299
9300 // Stage the second hunk *before* receiving the FS event for the first hunk.
9301 cx.run_until_parked();
9302 uncommitted_diff.update(cx, |diff, cx| {
9303 let hunk = diff.snapshot(cx).hunks(&snapshot).nth(1).unwrap();
9304 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
9305 assert_hunks(
9306 diff.snapshot(cx).hunks(&snapshot),
9307 &snapshot,
9308 &diff.base_text_string(cx).unwrap(),
9309 &[
9310 (
9311 0..0,
9312 "zero\n",
9313 "",
9314 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
9315 ),
9316 (
9317 1..2,
9318 "two\n",
9319 "TWO\n",
9320 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
9321 ),
9322 (
9323 3..4,
9324 "four\n",
9325 "FOUR\n",
9326 DiffHunkStatus::modified(HasSecondaryHunk),
9327 ),
9328 ],
9329 );
9330 });
9331
9332 // Process the FS event for staging the first hunk (second event is still pending).
9333 fs.flush_events(1);
9334 cx.run_until_parked();
9335
9336 // Stage the third hunk before receiving the second FS event.
9337 uncommitted_diff.update(cx, |diff, cx| {
9338 let hunk = diff.snapshot(cx).hunks(&snapshot).nth(2).unwrap();
9339 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
9340 });
9341
9342 // Wait for all remaining IO.
9343 cx.run_until_parked();
9344 fs.flush_events(fs.buffered_event_count());
9345
9346 // Now all hunks are staged.
9347 cx.run_until_parked();
9348 uncommitted_diff.update(cx, |diff, cx| {
9349 assert_hunks(
9350 diff.snapshot(cx).hunks(&snapshot),
9351 &snapshot,
9352 &diff.base_text_string(cx).unwrap(),
9353 &[
9354 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
9355 (
9356 1..2,
9357 "two\n",
9358 "TWO\n",
9359 DiffHunkStatus::modified(NoSecondaryHunk),
9360 ),
9361 (
9362 3..4,
9363 "four\n",
9364 "FOUR\n",
9365 DiffHunkStatus::modified(NoSecondaryHunk),
9366 ),
9367 ],
9368 );
9369 });
9370}
9371
9372#[gpui::test(iterations = 25)]
9373async fn test_staging_random_hunks(
9374 mut rng: StdRng,
9375 _executor: BackgroundExecutor,
9376 cx: &mut gpui::TestAppContext,
9377) {
9378 let operations = env::var("OPERATIONS")
9379 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
9380 .unwrap_or(20);
9381
9382 use DiffHunkSecondaryStatus::*;
9383 init_test(cx);
9384
9385 let committed_text = (0..30).map(|i| format!("line {i}\n")).collect::<String>();
9386 let index_text = committed_text.clone();
9387 let buffer_text = (0..30)
9388 .map(|i| match i % 5 {
9389 0 => format!("line {i} (modified)\n"),
9390 _ => format!("line {i}\n"),
9391 })
9392 .collect::<String>();
9393
9394 let fs = FakeFs::new(cx.background_executor.clone());
9395 fs.insert_tree(
9396 path!("/dir"),
9397 json!({
9398 ".git": {},
9399 "file.txt": buffer_text.clone()
9400 }),
9401 )
9402 .await;
9403 fs.set_head_for_repo(
9404 path!("/dir/.git").as_ref(),
9405 &[("file.txt", committed_text.clone())],
9406 "deadbeef",
9407 );
9408 fs.set_index_for_repo(
9409 path!("/dir/.git").as_ref(),
9410 &[("file.txt", index_text.clone())],
9411 );
9412 let repo = fs
9413 .open_repo(path!("/dir/.git").as_ref(), Some("git".as_ref()))
9414 .unwrap();
9415
9416 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
9417 let buffer = project
9418 .update(cx, |project, cx| {
9419 project.open_local_buffer(path!("/dir/file.txt"), cx)
9420 })
9421 .await
9422 .unwrap();
9423 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
9424 let uncommitted_diff = project
9425 .update(cx, |project, cx| {
9426 project.open_uncommitted_diff(buffer.clone(), cx)
9427 })
9428 .await
9429 .unwrap();
9430
9431 let mut hunks = uncommitted_diff.update(cx, |diff, cx| {
9432 diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>()
9433 });
9434 assert_eq!(hunks.len(), 6);
9435
9436 for _i in 0..operations {
9437 let hunk_ix = rng.random_range(0..hunks.len());
9438 let hunk = &mut hunks[hunk_ix];
9439 let row = hunk.range.start.row;
9440
9441 if hunk.status().has_secondary_hunk() {
9442 log::info!("staging hunk at {row}");
9443 uncommitted_diff.update(cx, |diff, cx| {
9444 diff.stage_or_unstage_hunks(true, std::slice::from_ref(hunk), &snapshot, true, cx);
9445 });
9446 hunk.secondary_status = SecondaryHunkRemovalPending;
9447 } else {
9448 log::info!("unstaging hunk at {row}");
9449 uncommitted_diff.update(cx, |diff, cx| {
9450 diff.stage_or_unstage_hunks(false, std::slice::from_ref(hunk), &snapshot, true, cx);
9451 });
9452 hunk.secondary_status = SecondaryHunkAdditionPending;
9453 }
9454
9455 for _ in 0..rng.random_range(0..10) {
9456 log::info!("yielding");
9457 cx.executor().simulate_random_delay().await;
9458 }
9459 }
9460
9461 cx.executor().run_until_parked();
9462
9463 for hunk in &mut hunks {
9464 if hunk.secondary_status == SecondaryHunkRemovalPending {
9465 hunk.secondary_status = NoSecondaryHunk;
9466 } else if hunk.secondary_status == SecondaryHunkAdditionPending {
9467 hunk.secondary_status = HasSecondaryHunk;
9468 }
9469 }
9470
9471 log::info!(
9472 "index text:\n{}",
9473 repo.load_index_text(RepoPath::from_rel_path(rel_path("file.txt")))
9474 .await
9475 .unwrap()
9476 );
9477
9478 uncommitted_diff.update(cx, |diff, cx| {
9479 let expected_hunks = hunks
9480 .iter()
9481 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
9482 .collect::<Vec<_>>();
9483 let actual_hunks = diff
9484 .snapshot(cx)
9485 .hunks(&snapshot)
9486 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
9487 .collect::<Vec<_>>();
9488 assert_eq!(actual_hunks, expected_hunks);
9489 });
9490}
9491
9492#[gpui::test]
9493async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
9494 init_test(cx);
9495
9496 let committed_contents = r#"
9497 fn main() {
9498 println!("hello from HEAD");
9499 }
9500 "#
9501 .unindent();
9502 let file_contents = r#"
9503 fn main() {
9504 println!("hello from the working copy");
9505 }
9506 "#
9507 .unindent();
9508
9509 let fs = FakeFs::new(cx.background_executor.clone());
9510 fs.insert_tree(
9511 "/dir",
9512 json!({
9513 ".git": {},
9514 "src": {
9515 "main.rs": file_contents,
9516 }
9517 }),
9518 )
9519 .await;
9520
9521 fs.set_head_for_repo(
9522 Path::new("/dir/.git"),
9523 &[("src/main.rs", committed_contents.clone())],
9524 "deadbeef",
9525 );
9526 fs.set_index_for_repo(
9527 Path::new("/dir/.git"),
9528 &[("src/main.rs", committed_contents.clone())],
9529 );
9530
9531 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
9532
9533 let buffer = project
9534 .update(cx, |project, cx| {
9535 project.open_local_buffer("/dir/src/main.rs", cx)
9536 })
9537 .await
9538 .unwrap();
9539 let uncommitted_diff = project
9540 .update(cx, |project, cx| {
9541 project.open_uncommitted_diff(buffer.clone(), cx)
9542 })
9543 .await
9544 .unwrap();
9545
9546 cx.run_until_parked();
9547 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
9548 let snapshot = buffer.read(cx).snapshot();
9549 assert_hunks(
9550 uncommitted_diff.snapshot(cx).hunks(&snapshot),
9551 &snapshot,
9552 &uncommitted_diff.base_text_string(cx).unwrap(),
9553 &[(
9554 1..2,
9555 " println!(\"hello from HEAD\");\n",
9556 " println!(\"hello from the working copy\");\n",
9557 DiffHunkStatus {
9558 kind: DiffHunkStatusKind::Modified,
9559 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
9560 },
9561 )],
9562 );
9563 });
9564}
9565
9566// TODO: Should we test this on Windows also?
9567#[gpui::test]
9568#[cfg(not(windows))]
9569async fn test_staging_hunk_preserve_executable_permission(cx: &mut gpui::TestAppContext) {
9570 use std::os::unix::fs::PermissionsExt;
9571 init_test(cx);
9572 cx.executor().allow_parking();
9573 let committed_contents = "bar\n";
9574 let file_contents = "baz\n";
9575 let root = TempTree::new(json!({
9576 "project": {
9577 "foo": committed_contents
9578 },
9579 }));
9580
9581 let work_dir = root.path().join("project");
9582 let file_path = work_dir.join("foo");
9583 let repo = git_init(work_dir.as_path());
9584 let mut perms = std::fs::metadata(&file_path).unwrap().permissions();
9585 perms.set_mode(0o755);
9586 std::fs::set_permissions(&file_path, perms).unwrap();
9587 git_add("foo", &repo);
9588 git_commit("Initial commit", &repo);
9589 std::fs::write(&file_path, file_contents).unwrap();
9590
9591 let project = Project::test(
9592 Arc::new(RealFs::new(None, cx.executor())),
9593 [root.path()],
9594 cx,
9595 )
9596 .await;
9597
9598 let buffer = project
9599 .update(cx, |project, cx| {
9600 project.open_local_buffer(file_path.as_path(), cx)
9601 })
9602 .await
9603 .unwrap();
9604
9605 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
9606
9607 let uncommitted_diff = project
9608 .update(cx, |project, cx| {
9609 project.open_uncommitted_diff(buffer.clone(), cx)
9610 })
9611 .await
9612 .unwrap();
9613
9614 uncommitted_diff.update(cx, |diff, cx| {
9615 let hunks = diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>();
9616 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
9617 });
9618
9619 cx.run_until_parked();
9620
9621 let output = smol::process::Command::new("git")
9622 .current_dir(&work_dir)
9623 .args(["diff", "--staged"])
9624 .output()
9625 .await
9626 .unwrap();
9627
9628 let staged_diff = String::from_utf8_lossy(&output.stdout);
9629
9630 assert!(
9631 !staged_diff.contains("new mode 100644"),
9632 "Staging should not change file mode from 755 to 644.\ngit diff --staged:\n{}",
9633 staged_diff
9634 );
9635
9636 let output = smol::process::Command::new("git")
9637 .current_dir(&work_dir)
9638 .args(["ls-files", "-s"])
9639 .output()
9640 .await
9641 .unwrap();
9642 let index_contents = String::from_utf8_lossy(&output.stdout);
9643
9644 assert!(
9645 index_contents.contains("100755"),
9646 "Index should show file as executable (100755).\ngit ls-files -s:\n{}",
9647 index_contents
9648 );
9649}
9650
9651#[gpui::test]
9652async fn test_repository_and_path_for_project_path(
9653 background_executor: BackgroundExecutor,
9654 cx: &mut gpui::TestAppContext,
9655) {
9656 init_test(cx);
9657 let fs = FakeFs::new(background_executor);
9658 fs.insert_tree(
9659 path!("/root"),
9660 json!({
9661 "c.txt": "",
9662 "dir1": {
9663 ".git": {},
9664 "deps": {
9665 "dep1": {
9666 ".git": {},
9667 "src": {
9668 "a.txt": ""
9669 }
9670 }
9671 },
9672 "src": {
9673 "b.txt": ""
9674 }
9675 },
9676 }),
9677 )
9678 .await;
9679
9680 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
9681 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9682 let tree_id = tree.read_with(cx, |tree, _| tree.id());
9683 project
9684 .update(cx, |project, cx| project.git_scans_complete(cx))
9685 .await;
9686 cx.run_until_parked();
9687
9688 project.read_with(cx, |project, cx| {
9689 let git_store = project.git_store().read(cx);
9690 let pairs = [
9691 ("c.txt", None),
9692 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
9693 (
9694 "dir1/deps/dep1/src/a.txt",
9695 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
9696 ),
9697 ];
9698 let expected = pairs
9699 .iter()
9700 .map(|(path, result)| {
9701 (
9702 path,
9703 result.map(|(repo, repo_path)| {
9704 (Path::new(repo).into(), RepoPath::new(repo_path).unwrap())
9705 }),
9706 )
9707 })
9708 .collect::<Vec<_>>();
9709 let actual = pairs
9710 .iter()
9711 .map(|(path, _)| {
9712 let project_path = (tree_id, rel_path(path)).into();
9713 let result = maybe!({
9714 let (repo, repo_path) =
9715 git_store.repository_and_path_for_project_path(&project_path, cx)?;
9716 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
9717 });
9718 (path, result)
9719 })
9720 .collect::<Vec<_>>();
9721 pretty_assertions::assert_eq!(expected, actual);
9722 });
9723
9724 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
9725 .await
9726 .unwrap();
9727 cx.run_until_parked();
9728
9729 project.read_with(cx, |project, cx| {
9730 let git_store = project.git_store().read(cx);
9731 assert_eq!(
9732 git_store.repository_and_path_for_project_path(
9733 &(tree_id, rel_path("dir1/src/b.txt")).into(),
9734 cx
9735 ),
9736 None
9737 );
9738 });
9739}
9740
9741#[gpui::test]
9742async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
9743 init_test(cx);
9744 let fs = FakeFs::new(cx.background_executor.clone());
9745 let home = paths::home_dir();
9746 fs.insert_tree(
9747 home,
9748 json!({
9749 ".git": {},
9750 "project": {
9751 "a.txt": "A"
9752 },
9753 }),
9754 )
9755 .await;
9756
9757 let project = Project::test(fs.clone(), [home.join("project").as_ref()], cx).await;
9758 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9759 let tree_id = tree.read_with(cx, |tree, _| tree.id());
9760
9761 project
9762 .update(cx, |project, cx| project.git_scans_complete(cx))
9763 .await;
9764 tree.flush_fs_events(cx).await;
9765
9766 project.read_with(cx, |project, cx| {
9767 let containing = project
9768 .git_store()
9769 .read(cx)
9770 .repository_and_path_for_project_path(&(tree_id, rel_path("a.txt")).into(), cx);
9771 assert!(containing.is_none());
9772 });
9773
9774 let project = Project::test(fs.clone(), [home.as_ref()], cx).await;
9775 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9776 let tree_id = tree.read_with(cx, |tree, _| tree.id());
9777 project
9778 .update(cx, |project, cx| project.git_scans_complete(cx))
9779 .await;
9780 tree.flush_fs_events(cx).await;
9781
9782 project.read_with(cx, |project, cx| {
9783 let containing = project
9784 .git_store()
9785 .read(cx)
9786 .repository_and_path_for_project_path(&(tree_id, rel_path("project/a.txt")).into(), cx);
9787 assert_eq!(
9788 containing
9789 .unwrap()
9790 .0
9791 .read(cx)
9792 .work_directory_abs_path
9793 .as_ref(),
9794 home,
9795 );
9796 });
9797}
9798
9799#[gpui::test]
9800async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
9801 init_test(cx);
9802 cx.executor().allow_parking();
9803
9804 let root = TempTree::new(json!({
9805 "project": {
9806 "a.txt": "a", // Modified
9807 "b.txt": "bb", // Added
9808 "c.txt": "ccc", // Unchanged
9809 "d.txt": "dddd", // Deleted
9810 },
9811 }));
9812
9813 // Set up git repository before creating the project.
9814 let work_dir = root.path().join("project");
9815 let repo = git_init(work_dir.as_path());
9816 git_add("a.txt", &repo);
9817 git_add("c.txt", &repo);
9818 git_add("d.txt", &repo);
9819 git_commit("Initial commit", &repo);
9820 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
9821 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
9822
9823 let project = Project::test(
9824 Arc::new(RealFs::new(None, cx.executor())),
9825 [root.path()],
9826 cx,
9827 )
9828 .await;
9829
9830 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9831 tree.flush_fs_events(cx).await;
9832 project
9833 .update(cx, |project, cx| project.git_scans_complete(cx))
9834 .await;
9835 cx.executor().run_until_parked();
9836
9837 let repository = project.read_with(cx, |project, cx| {
9838 project.repositories(cx).values().next().unwrap().clone()
9839 });
9840
9841 // Check that the right git state is observed on startup
9842 repository.read_with(cx, |repository, _| {
9843 let entries = repository.cached_status().collect::<Vec<_>>();
9844 assert_eq!(
9845 entries,
9846 [
9847 StatusEntry {
9848 repo_path: repo_path("a.txt"),
9849 status: StatusCode::Modified.worktree(),
9850 diff_stat: Some(DiffStat {
9851 added: 1,
9852 deleted: 1,
9853 }),
9854 },
9855 StatusEntry {
9856 repo_path: repo_path("b.txt"),
9857 status: FileStatus::Untracked,
9858 diff_stat: None,
9859 },
9860 StatusEntry {
9861 repo_path: repo_path("d.txt"),
9862 status: StatusCode::Deleted.worktree(),
9863 diff_stat: Some(DiffStat {
9864 added: 0,
9865 deleted: 1,
9866 }),
9867 },
9868 ]
9869 );
9870 });
9871
9872 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
9873
9874 tree.flush_fs_events(cx).await;
9875 project
9876 .update(cx, |project, cx| project.git_scans_complete(cx))
9877 .await;
9878 cx.executor().run_until_parked();
9879
9880 repository.read_with(cx, |repository, _| {
9881 let entries = repository.cached_status().collect::<Vec<_>>();
9882 assert_eq!(
9883 entries,
9884 [
9885 StatusEntry {
9886 repo_path: repo_path("a.txt"),
9887 status: StatusCode::Modified.worktree(),
9888 diff_stat: Some(DiffStat {
9889 added: 1,
9890 deleted: 1,
9891 }),
9892 },
9893 StatusEntry {
9894 repo_path: repo_path("b.txt"),
9895 status: FileStatus::Untracked,
9896 diff_stat: None,
9897 },
9898 StatusEntry {
9899 repo_path: repo_path("c.txt"),
9900 status: StatusCode::Modified.worktree(),
9901 diff_stat: Some(DiffStat {
9902 added: 1,
9903 deleted: 1,
9904 }),
9905 },
9906 StatusEntry {
9907 repo_path: repo_path("d.txt"),
9908 status: StatusCode::Deleted.worktree(),
9909 diff_stat: Some(DiffStat {
9910 added: 0,
9911 deleted: 1,
9912 }),
9913 },
9914 ]
9915 );
9916 });
9917
9918 git_add("a.txt", &repo);
9919 git_add("c.txt", &repo);
9920 git_remove_index(Path::new("d.txt"), &repo);
9921 git_commit("Another commit", &repo);
9922 tree.flush_fs_events(cx).await;
9923 project
9924 .update(cx, |project, cx| project.git_scans_complete(cx))
9925 .await;
9926 cx.executor().run_until_parked();
9927
9928 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
9929 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
9930 tree.flush_fs_events(cx).await;
9931 project
9932 .update(cx, |project, cx| project.git_scans_complete(cx))
9933 .await;
9934 cx.executor().run_until_parked();
9935
9936 repository.read_with(cx, |repository, _cx| {
9937 let entries = repository.cached_status().collect::<Vec<_>>();
9938
9939 // Deleting an untracked entry, b.txt, should leave no status
9940 // a.txt was tracked, and so should have a status
9941 assert_eq!(
9942 entries,
9943 [StatusEntry {
9944 repo_path: repo_path("a.txt"),
9945 status: StatusCode::Deleted.worktree(),
9946 diff_stat: Some(DiffStat {
9947 added: 0,
9948 deleted: 1,
9949 }),
9950 }]
9951 );
9952 });
9953}
9954
9955#[gpui::test]
9956#[ignore]
9957async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
9958 init_test(cx);
9959 cx.executor().allow_parking();
9960
9961 let root = TempTree::new(json!({
9962 "project": {
9963 "sub": {},
9964 "a.txt": "",
9965 },
9966 }));
9967
9968 let work_dir = root.path().join("project");
9969 let repo = git_init(work_dir.as_path());
9970 // a.txt exists in HEAD and the working copy but is deleted in the index.
9971 git_add("a.txt", &repo);
9972 git_commit("Initial commit", &repo);
9973 git_remove_index("a.txt".as_ref(), &repo);
9974 // `sub` is a nested git repository.
9975 let _sub = git_init(&work_dir.join("sub"));
9976
9977 let project = Project::test(
9978 Arc::new(RealFs::new(None, cx.executor())),
9979 [root.path()],
9980 cx,
9981 )
9982 .await;
9983
9984 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9985 tree.flush_fs_events(cx).await;
9986 project
9987 .update(cx, |project, cx| project.git_scans_complete(cx))
9988 .await;
9989 cx.executor().run_until_parked();
9990
9991 let repository = project.read_with(cx, |project, cx| {
9992 project
9993 .repositories(cx)
9994 .values()
9995 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
9996 .unwrap()
9997 .clone()
9998 });
9999
10000 repository.read_with(cx, |repository, _cx| {
10001 let entries = repository.cached_status().collect::<Vec<_>>();
10002
10003 // `sub` doesn't appear in our computed statuses.
10004 // a.txt appears with a combined `DA` status.
10005 assert_eq!(
10006 entries,
10007 [StatusEntry {
10008 repo_path: repo_path("a.txt"),
10009 status: TrackedStatus {
10010 index_status: StatusCode::Deleted,
10011 worktree_status: StatusCode::Added
10012 }
10013 .into(),
10014 diff_stat: None,
10015 }]
10016 )
10017 });
10018}
10019
10020#[track_caller]
10021/// We merge lhs into rhs.
10022fn merge_pending_ops_snapshots(
10023 source: Vec<pending_op::PendingOps>,
10024 mut target: Vec<pending_op::PendingOps>,
10025) -> Vec<pending_op::PendingOps> {
10026 for s_ops in source {
10027 if let Some(idx) = target.iter().zip(0..).find_map(|(ops, idx)| {
10028 if ops.repo_path == s_ops.repo_path {
10029 Some(idx)
10030 } else {
10031 None
10032 }
10033 }) {
10034 let t_ops = &mut target[idx];
10035 for s_op in s_ops.ops {
10036 if let Some(op_idx) = t_ops
10037 .ops
10038 .iter()
10039 .zip(0..)
10040 .find_map(|(op, idx)| if op.id == s_op.id { Some(idx) } else { None })
10041 {
10042 let t_op = &mut t_ops.ops[op_idx];
10043 match (s_op.job_status, t_op.job_status) {
10044 (pending_op::JobStatus::Running, _) => {}
10045 (s_st, pending_op::JobStatus::Running) => t_op.job_status = s_st,
10046 (s_st, t_st) if s_st == t_st => {}
10047 _ => unreachable!(),
10048 }
10049 } else {
10050 t_ops.ops.push(s_op);
10051 }
10052 }
10053 t_ops.ops.sort_by(|l, r| l.id.cmp(&r.id));
10054 } else {
10055 target.push(s_ops);
10056 }
10057 }
10058 target
10059}
10060
10061#[gpui::test]
10062async fn test_repository_pending_ops_staging(
10063 executor: gpui::BackgroundExecutor,
10064 cx: &mut gpui::TestAppContext,
10065) {
10066 init_test(cx);
10067
10068 let fs = FakeFs::new(executor);
10069 fs.insert_tree(
10070 path!("/root"),
10071 json!({
10072 "my-repo": {
10073 ".git": {},
10074 "a.txt": "a",
10075 }
10076
10077 }),
10078 )
10079 .await;
10080
10081 fs.set_status_for_repo(
10082 path!("/root/my-repo/.git").as_ref(),
10083 &[("a.txt", FileStatus::Untracked)],
10084 );
10085
10086 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
10087 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
10088 project.update(cx, |project, cx| {
10089 let pending_ops_all = pending_ops_all.clone();
10090 cx.subscribe(project.git_store(), move |_, _, e, _| {
10091 if let GitStoreEvent::RepositoryUpdated(
10092 _,
10093 RepositoryEvent::PendingOpsChanged { pending_ops },
10094 _,
10095 ) = e
10096 {
10097 let merged = merge_pending_ops_snapshots(
10098 pending_ops.items(()),
10099 pending_ops_all.lock().items(()),
10100 );
10101 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
10102 }
10103 })
10104 .detach();
10105 });
10106 project
10107 .update(cx, |project, cx| project.git_scans_complete(cx))
10108 .await;
10109
10110 let repo = project.read_with(cx, |project, cx| {
10111 project.repositories(cx).values().next().unwrap().clone()
10112 });
10113
10114 // Ensure we have no pending ops for any of the untracked files
10115 repo.read_with(cx, |repo, _cx| {
10116 assert!(repo.pending_ops().next().is_none());
10117 });
10118
10119 let mut id = 1u16;
10120
10121 let mut assert_stage = async |path: RepoPath, stage| {
10122 let git_status = if stage {
10123 pending_op::GitStatus::Staged
10124 } else {
10125 pending_op::GitStatus::Unstaged
10126 };
10127 repo.update(cx, |repo, cx| {
10128 let task = if stage {
10129 repo.stage_entries(vec![path.clone()], cx)
10130 } else {
10131 repo.unstage_entries(vec![path.clone()], cx)
10132 };
10133 let ops = repo.pending_ops_for_path(&path).unwrap();
10134 assert_eq!(
10135 ops.ops.last(),
10136 Some(&pending_op::PendingOp {
10137 id: id.into(),
10138 git_status,
10139 job_status: pending_op::JobStatus::Running
10140 })
10141 );
10142 task
10143 })
10144 .await
10145 .unwrap();
10146
10147 repo.read_with(cx, |repo, _cx| {
10148 let ops = repo.pending_ops_for_path(&path).unwrap();
10149 assert_eq!(
10150 ops.ops.last(),
10151 Some(&pending_op::PendingOp {
10152 id: id.into(),
10153 git_status,
10154 job_status: pending_op::JobStatus::Finished
10155 })
10156 );
10157 });
10158
10159 id += 1;
10160 };
10161
10162 assert_stage(repo_path("a.txt"), true).await;
10163 assert_stage(repo_path("a.txt"), false).await;
10164 assert_stage(repo_path("a.txt"), true).await;
10165 assert_stage(repo_path("a.txt"), false).await;
10166 assert_stage(repo_path("a.txt"), true).await;
10167
10168 cx.run_until_parked();
10169
10170 assert_eq!(
10171 pending_ops_all
10172 .lock()
10173 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
10174 .unwrap()
10175 .ops,
10176 vec![
10177 pending_op::PendingOp {
10178 id: 1u16.into(),
10179 git_status: pending_op::GitStatus::Staged,
10180 job_status: pending_op::JobStatus::Finished
10181 },
10182 pending_op::PendingOp {
10183 id: 2u16.into(),
10184 git_status: pending_op::GitStatus::Unstaged,
10185 job_status: pending_op::JobStatus::Finished
10186 },
10187 pending_op::PendingOp {
10188 id: 3u16.into(),
10189 git_status: pending_op::GitStatus::Staged,
10190 job_status: pending_op::JobStatus::Finished
10191 },
10192 pending_op::PendingOp {
10193 id: 4u16.into(),
10194 git_status: pending_op::GitStatus::Unstaged,
10195 job_status: pending_op::JobStatus::Finished
10196 },
10197 pending_op::PendingOp {
10198 id: 5u16.into(),
10199 git_status: pending_op::GitStatus::Staged,
10200 job_status: pending_op::JobStatus::Finished
10201 }
10202 ],
10203 );
10204
10205 repo.update(cx, |repo, _cx| {
10206 let git_statuses = repo.cached_status().collect::<Vec<_>>();
10207
10208 assert_eq!(
10209 git_statuses,
10210 [StatusEntry {
10211 repo_path: repo_path("a.txt"),
10212 status: TrackedStatus {
10213 index_status: StatusCode::Added,
10214 worktree_status: StatusCode::Unmodified
10215 }
10216 .into(),
10217 diff_stat: Some(DiffStat {
10218 added: 1,
10219 deleted: 0,
10220 }),
10221 }]
10222 );
10223 });
10224}
10225
10226#[gpui::test]
10227async fn test_repository_pending_ops_long_running_staging(
10228 executor: gpui::BackgroundExecutor,
10229 cx: &mut gpui::TestAppContext,
10230) {
10231 init_test(cx);
10232
10233 let fs = FakeFs::new(executor);
10234 fs.insert_tree(
10235 path!("/root"),
10236 json!({
10237 "my-repo": {
10238 ".git": {},
10239 "a.txt": "a",
10240 }
10241
10242 }),
10243 )
10244 .await;
10245
10246 fs.set_status_for_repo(
10247 path!("/root/my-repo/.git").as_ref(),
10248 &[("a.txt", FileStatus::Untracked)],
10249 );
10250
10251 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
10252 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
10253 project.update(cx, |project, cx| {
10254 let pending_ops_all = pending_ops_all.clone();
10255 cx.subscribe(project.git_store(), move |_, _, e, _| {
10256 if let GitStoreEvent::RepositoryUpdated(
10257 _,
10258 RepositoryEvent::PendingOpsChanged { pending_ops },
10259 _,
10260 ) = e
10261 {
10262 let merged = merge_pending_ops_snapshots(
10263 pending_ops.items(()),
10264 pending_ops_all.lock().items(()),
10265 );
10266 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
10267 }
10268 })
10269 .detach();
10270 });
10271
10272 project
10273 .update(cx, |project, cx| project.git_scans_complete(cx))
10274 .await;
10275
10276 let repo = project.read_with(cx, |project, cx| {
10277 project.repositories(cx).values().next().unwrap().clone()
10278 });
10279
10280 repo.update(cx, |repo, cx| {
10281 repo.stage_entries(vec![repo_path("a.txt")], cx)
10282 })
10283 .detach();
10284
10285 repo.update(cx, |repo, cx| {
10286 repo.stage_entries(vec![repo_path("a.txt")], cx)
10287 })
10288 .unwrap()
10289 .with_timeout(Duration::from_secs(1), &cx.executor())
10290 .await
10291 .unwrap();
10292
10293 cx.run_until_parked();
10294
10295 assert_eq!(
10296 pending_ops_all
10297 .lock()
10298 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
10299 .unwrap()
10300 .ops,
10301 vec![
10302 pending_op::PendingOp {
10303 id: 1u16.into(),
10304 git_status: pending_op::GitStatus::Staged,
10305 job_status: pending_op::JobStatus::Skipped
10306 },
10307 pending_op::PendingOp {
10308 id: 2u16.into(),
10309 git_status: pending_op::GitStatus::Staged,
10310 job_status: pending_op::JobStatus::Finished
10311 }
10312 ],
10313 );
10314
10315 repo.update(cx, |repo, _cx| {
10316 let git_statuses = repo.cached_status().collect::<Vec<_>>();
10317
10318 assert_eq!(
10319 git_statuses,
10320 [StatusEntry {
10321 repo_path: repo_path("a.txt"),
10322 status: TrackedStatus {
10323 index_status: StatusCode::Added,
10324 worktree_status: StatusCode::Unmodified
10325 }
10326 .into(),
10327 diff_stat: Some(DiffStat {
10328 added: 1,
10329 deleted: 0,
10330 }),
10331 }]
10332 );
10333 });
10334}
10335
10336#[gpui::test]
10337async fn test_repository_pending_ops_stage_all(
10338 executor: gpui::BackgroundExecutor,
10339 cx: &mut gpui::TestAppContext,
10340) {
10341 init_test(cx);
10342
10343 let fs = FakeFs::new(executor);
10344 fs.insert_tree(
10345 path!("/root"),
10346 json!({
10347 "my-repo": {
10348 ".git": {},
10349 "a.txt": "a",
10350 "b.txt": "b"
10351 }
10352
10353 }),
10354 )
10355 .await;
10356
10357 fs.set_status_for_repo(
10358 path!("/root/my-repo/.git").as_ref(),
10359 &[
10360 ("a.txt", FileStatus::Untracked),
10361 ("b.txt", FileStatus::Untracked),
10362 ],
10363 );
10364
10365 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
10366 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
10367 project.update(cx, |project, cx| {
10368 let pending_ops_all = pending_ops_all.clone();
10369 cx.subscribe(project.git_store(), move |_, _, e, _| {
10370 if let GitStoreEvent::RepositoryUpdated(
10371 _,
10372 RepositoryEvent::PendingOpsChanged { pending_ops },
10373 _,
10374 ) = e
10375 {
10376 let merged = merge_pending_ops_snapshots(
10377 pending_ops.items(()),
10378 pending_ops_all.lock().items(()),
10379 );
10380 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
10381 }
10382 })
10383 .detach();
10384 });
10385 project
10386 .update(cx, |project, cx| project.git_scans_complete(cx))
10387 .await;
10388
10389 let repo = project.read_with(cx, |project, cx| {
10390 project.repositories(cx).values().next().unwrap().clone()
10391 });
10392
10393 repo.update(cx, |repo, cx| {
10394 repo.stage_entries(vec![repo_path("a.txt")], cx)
10395 })
10396 .await
10397 .unwrap();
10398 repo.update(cx, |repo, cx| repo.stage_all(cx))
10399 .await
10400 .unwrap();
10401 repo.update(cx, |repo, cx| repo.unstage_all(cx))
10402 .await
10403 .unwrap();
10404
10405 cx.run_until_parked();
10406
10407 assert_eq!(
10408 pending_ops_all
10409 .lock()
10410 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
10411 .unwrap()
10412 .ops,
10413 vec![
10414 pending_op::PendingOp {
10415 id: 1u16.into(),
10416 git_status: pending_op::GitStatus::Staged,
10417 job_status: pending_op::JobStatus::Finished
10418 },
10419 pending_op::PendingOp {
10420 id: 2u16.into(),
10421 git_status: pending_op::GitStatus::Unstaged,
10422 job_status: pending_op::JobStatus::Finished
10423 },
10424 ],
10425 );
10426 assert_eq!(
10427 pending_ops_all
10428 .lock()
10429 .get(&worktree::PathKey(repo_path("b.txt").as_ref().clone()), ())
10430 .unwrap()
10431 .ops,
10432 vec![
10433 pending_op::PendingOp {
10434 id: 1u16.into(),
10435 git_status: pending_op::GitStatus::Staged,
10436 job_status: pending_op::JobStatus::Finished
10437 },
10438 pending_op::PendingOp {
10439 id: 2u16.into(),
10440 git_status: pending_op::GitStatus::Unstaged,
10441 job_status: pending_op::JobStatus::Finished
10442 },
10443 ],
10444 );
10445
10446 repo.update(cx, |repo, _cx| {
10447 let git_statuses = repo.cached_status().collect::<Vec<_>>();
10448
10449 assert_eq!(
10450 git_statuses,
10451 [
10452 StatusEntry {
10453 repo_path: repo_path("a.txt"),
10454 status: FileStatus::Untracked,
10455 diff_stat: None,
10456 },
10457 StatusEntry {
10458 repo_path: repo_path("b.txt"),
10459 status: FileStatus::Untracked,
10460 diff_stat: None,
10461 },
10462 ]
10463 );
10464 });
10465}
10466
10467#[gpui::test]
10468async fn test_repository_subfolder_git_status(
10469 executor: gpui::BackgroundExecutor,
10470 cx: &mut gpui::TestAppContext,
10471) {
10472 init_test(cx);
10473
10474 let fs = FakeFs::new(executor);
10475 fs.insert_tree(
10476 path!("/root"),
10477 json!({
10478 "my-repo": {
10479 ".git": {},
10480 "a.txt": "a",
10481 "sub-folder-1": {
10482 "sub-folder-2": {
10483 "c.txt": "cc",
10484 "d": {
10485 "e.txt": "eee"
10486 }
10487 },
10488 }
10489 },
10490 }),
10491 )
10492 .await;
10493
10494 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
10495 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
10496
10497 fs.set_status_for_repo(
10498 path!("/root/my-repo/.git").as_ref(),
10499 &[(E_TXT, FileStatus::Untracked)],
10500 );
10501
10502 let project = Project::test(
10503 fs.clone(),
10504 [path!("/root/my-repo/sub-folder-1/sub-folder-2").as_ref()],
10505 cx,
10506 )
10507 .await;
10508
10509 project
10510 .update(cx, |project, cx| project.git_scans_complete(cx))
10511 .await;
10512 cx.run_until_parked();
10513
10514 let repository = project.read_with(cx, |project, cx| {
10515 project.repositories(cx).values().next().unwrap().clone()
10516 });
10517
10518 // Ensure that the git status is loaded correctly
10519 repository.read_with(cx, |repository, _cx| {
10520 assert_eq!(
10521 repository.work_directory_abs_path,
10522 Path::new(path!("/root/my-repo")).into()
10523 );
10524
10525 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
10526 assert_eq!(
10527 repository
10528 .status_for_path(&repo_path(E_TXT))
10529 .unwrap()
10530 .status,
10531 FileStatus::Untracked
10532 );
10533 });
10534
10535 fs.set_status_for_repo(path!("/root/my-repo/.git").as_ref(), &[]);
10536 project
10537 .update(cx, |project, cx| project.git_scans_complete(cx))
10538 .await;
10539 cx.run_until_parked();
10540
10541 repository.read_with(cx, |repository, _cx| {
10542 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
10543 assert_eq!(repository.status_for_path(&repo_path(E_TXT)), None);
10544 });
10545}
10546
10547// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
10548#[cfg(any())]
10549#[gpui::test]
10550async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
10551 init_test(cx);
10552 cx.executor().allow_parking();
10553
10554 let root = TempTree::new(json!({
10555 "project": {
10556 "a.txt": "a",
10557 },
10558 }));
10559 let root_path = root.path();
10560
10561 let repo = git_init(&root_path.join("project"));
10562 git_add("a.txt", &repo);
10563 git_commit("init", &repo);
10564
10565 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10566
10567 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10568 tree.flush_fs_events(cx).await;
10569 project
10570 .update(cx, |project, cx| project.git_scans_complete(cx))
10571 .await;
10572 cx.executor().run_until_parked();
10573
10574 let repository = project.read_with(cx, |project, cx| {
10575 project.repositories(cx).values().next().unwrap().clone()
10576 });
10577
10578 git_branch("other-branch", &repo);
10579 git_checkout("refs/heads/other-branch", &repo);
10580 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
10581 git_add("a.txt", &repo);
10582 git_commit("capitalize", &repo);
10583 let commit = repo
10584 .head()
10585 .expect("Failed to get HEAD")
10586 .peel_to_commit()
10587 .expect("HEAD is not a commit");
10588 git_checkout("refs/heads/main", &repo);
10589 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
10590 git_add("a.txt", &repo);
10591 git_commit("improve letter", &repo);
10592 git_cherry_pick(&commit, &repo);
10593 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
10594 .expect("No CHERRY_PICK_HEAD");
10595 pretty_assertions::assert_eq!(
10596 git_status(&repo),
10597 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
10598 );
10599 tree.flush_fs_events(cx).await;
10600 project
10601 .update(cx, |project, cx| project.git_scans_complete(cx))
10602 .await;
10603 cx.executor().run_until_parked();
10604 let conflicts = repository.update(cx, |repository, _| {
10605 repository
10606 .merge_conflicts
10607 .iter()
10608 .cloned()
10609 .collect::<Vec<_>>()
10610 });
10611 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
10612
10613 git_add("a.txt", &repo);
10614 // Attempt to manually simulate what `git cherry-pick --continue` would do.
10615 git_commit("whatevs", &repo);
10616 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
10617 .expect("Failed to remove CHERRY_PICK_HEAD");
10618 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
10619 tree.flush_fs_events(cx).await;
10620 let conflicts = repository.update(cx, |repository, _| {
10621 repository
10622 .merge_conflicts
10623 .iter()
10624 .cloned()
10625 .collect::<Vec<_>>()
10626 });
10627 pretty_assertions::assert_eq!(conflicts, []);
10628}
10629
10630#[gpui::test]
10631async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
10632 init_test(cx);
10633 let fs = FakeFs::new(cx.background_executor.clone());
10634 fs.insert_tree(
10635 path!("/root"),
10636 json!({
10637 ".git": {},
10638 ".gitignore": "*.txt\n",
10639 "a.xml": "<a></a>",
10640 "b.txt": "Some text"
10641 }),
10642 )
10643 .await;
10644
10645 fs.set_head_and_index_for_repo(
10646 path!("/root/.git").as_ref(),
10647 &[
10648 (".gitignore", "*.txt\n".into()),
10649 ("a.xml", "<a></a>".into()),
10650 ],
10651 );
10652
10653 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
10654
10655 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10656 tree.flush_fs_events(cx).await;
10657 project
10658 .update(cx, |project, cx| project.git_scans_complete(cx))
10659 .await;
10660 cx.executor().run_until_parked();
10661
10662 let repository = project.read_with(cx, |project, cx| {
10663 project.repositories(cx).values().next().unwrap().clone()
10664 });
10665
10666 // One file is unmodified, the other is ignored.
10667 cx.read(|cx| {
10668 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
10669 assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
10670 });
10671
10672 // Change the gitignore, and stage the newly non-ignored file.
10673 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
10674 .await
10675 .unwrap();
10676 fs.set_index_for_repo(
10677 Path::new(path!("/root/.git")),
10678 &[
10679 (".gitignore", "*.txt\n".into()),
10680 ("a.xml", "<a></a>".into()),
10681 ("b.txt", "Some text".into()),
10682 ],
10683 );
10684
10685 cx.executor().run_until_parked();
10686 cx.read(|cx| {
10687 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
10688 assert_entry_git_state(
10689 tree.read(cx),
10690 repository.read(cx),
10691 "b.txt",
10692 Some(StatusCode::Added),
10693 false,
10694 );
10695 });
10696}
10697
10698// NOTE:
10699// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
10700// a directory which some program has already open.
10701// This is a limitation of the Windows.
10702// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
10703// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
10704#[gpui::test]
10705#[cfg_attr(target_os = "windows", ignore)]
10706async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
10707 init_test(cx);
10708 cx.executor().allow_parking();
10709 let root = TempTree::new(json!({
10710 "projects": {
10711 "project1": {
10712 "a": "",
10713 "b": "",
10714 }
10715 },
10716
10717 }));
10718 let root_path = root.path();
10719
10720 let repo = git_init(&root_path.join("projects/project1"));
10721 git_add("a", &repo);
10722 git_commit("init", &repo);
10723 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
10724
10725 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10726
10727 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10728 tree.flush_fs_events(cx).await;
10729 project
10730 .update(cx, |project, cx| project.git_scans_complete(cx))
10731 .await;
10732 cx.executor().run_until_parked();
10733
10734 let repository = project.read_with(cx, |project, cx| {
10735 project.repositories(cx).values().next().unwrap().clone()
10736 });
10737
10738 repository.read_with(cx, |repository, _| {
10739 assert_eq!(
10740 repository.work_directory_abs_path.as_ref(),
10741 root_path.join("projects/project1").as_path()
10742 );
10743 assert_eq!(
10744 repository
10745 .status_for_path(&repo_path("a"))
10746 .map(|entry| entry.status),
10747 Some(StatusCode::Modified.worktree()),
10748 );
10749 assert_eq!(
10750 repository
10751 .status_for_path(&repo_path("b"))
10752 .map(|entry| entry.status),
10753 Some(FileStatus::Untracked),
10754 );
10755 });
10756
10757 std::fs::rename(
10758 root_path.join("projects/project1"),
10759 root_path.join("projects/project2"),
10760 )
10761 .unwrap();
10762 tree.flush_fs_events(cx).await;
10763
10764 repository.read_with(cx, |repository, _| {
10765 assert_eq!(
10766 repository.work_directory_abs_path.as_ref(),
10767 root_path.join("projects/project2").as_path()
10768 );
10769 assert_eq!(
10770 repository.status_for_path(&repo_path("a")).unwrap().status,
10771 StatusCode::Modified.worktree(),
10772 );
10773 assert_eq!(
10774 repository.status_for_path(&repo_path("b")).unwrap().status,
10775 FileStatus::Untracked,
10776 );
10777 });
10778}
10779
10780// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
10781// you can't rename a directory which some program has already open. This is a
10782// limitation of the Windows. See:
10783// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
10784// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
10785#[gpui::test]
10786#[cfg_attr(target_os = "windows", ignore)]
10787async fn test_file_status(cx: &mut gpui::TestAppContext) {
10788 init_test(cx);
10789 cx.executor().allow_parking();
10790 const IGNORE_RULE: &str = "**/target";
10791
10792 let root = TempTree::new(json!({
10793 "project": {
10794 "a.txt": "a",
10795 "b.txt": "bb",
10796 "c": {
10797 "d": {
10798 "e.txt": "eee"
10799 }
10800 },
10801 "f.txt": "ffff",
10802 "target": {
10803 "build_file": "???"
10804 },
10805 ".gitignore": IGNORE_RULE
10806 },
10807
10808 }));
10809 let root_path = root.path();
10810
10811 const A_TXT: &str = "a.txt";
10812 const B_TXT: &str = "b.txt";
10813 const E_TXT: &str = "c/d/e.txt";
10814 const F_TXT: &str = "f.txt";
10815 const DOTGITIGNORE: &str = ".gitignore";
10816 const BUILD_FILE: &str = "target/build_file";
10817
10818 // Set up git repository before creating the worktree.
10819 let work_dir = root.path().join("project");
10820 let mut repo = git_init(work_dir.as_path());
10821 repo.add_ignore_rule(IGNORE_RULE).unwrap();
10822 git_add(A_TXT, &repo);
10823 git_add(E_TXT, &repo);
10824 git_add(DOTGITIGNORE, &repo);
10825 git_commit("Initial commit", &repo);
10826
10827 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10828
10829 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10830 tree.flush_fs_events(cx).await;
10831 project
10832 .update(cx, |project, cx| project.git_scans_complete(cx))
10833 .await;
10834 cx.executor().run_until_parked();
10835
10836 let repository = project.read_with(cx, |project, cx| {
10837 project.repositories(cx).values().next().unwrap().clone()
10838 });
10839
10840 // Check that the right git state is observed on startup
10841 repository.read_with(cx, |repository, _cx| {
10842 assert_eq!(
10843 repository.work_directory_abs_path.as_ref(),
10844 root_path.join("project").as_path()
10845 );
10846
10847 assert_eq!(
10848 repository
10849 .status_for_path(&repo_path(B_TXT))
10850 .unwrap()
10851 .status,
10852 FileStatus::Untracked,
10853 );
10854 assert_eq!(
10855 repository
10856 .status_for_path(&repo_path(F_TXT))
10857 .unwrap()
10858 .status,
10859 FileStatus::Untracked,
10860 );
10861 });
10862
10863 // Modify a file in the working copy.
10864 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
10865 tree.flush_fs_events(cx).await;
10866 project
10867 .update(cx, |project, cx| project.git_scans_complete(cx))
10868 .await;
10869 cx.executor().run_until_parked();
10870
10871 // The worktree detects that the file's git status has changed.
10872 repository.read_with(cx, |repository, _| {
10873 assert_eq!(
10874 repository
10875 .status_for_path(&repo_path(A_TXT))
10876 .unwrap()
10877 .status,
10878 StatusCode::Modified.worktree(),
10879 );
10880 });
10881
10882 // Create a commit in the git repository.
10883 git_add(A_TXT, &repo);
10884 git_add(B_TXT, &repo);
10885 git_commit("Committing modified and added", &repo);
10886 tree.flush_fs_events(cx).await;
10887 project
10888 .update(cx, |project, cx| project.git_scans_complete(cx))
10889 .await;
10890 cx.executor().run_until_parked();
10891
10892 // The worktree detects that the files' git status have changed.
10893 repository.read_with(cx, |repository, _cx| {
10894 assert_eq!(
10895 repository
10896 .status_for_path(&repo_path(F_TXT))
10897 .unwrap()
10898 .status,
10899 FileStatus::Untracked,
10900 );
10901 assert_eq!(repository.status_for_path(&repo_path(B_TXT)), None);
10902 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
10903 });
10904
10905 // Modify files in the working copy and perform git operations on other files.
10906 git_reset(0, &repo);
10907 git_remove_index(Path::new(B_TXT), &repo);
10908 git_stash(&mut repo);
10909 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
10910 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
10911 tree.flush_fs_events(cx).await;
10912 project
10913 .update(cx, |project, cx| project.git_scans_complete(cx))
10914 .await;
10915 cx.executor().run_until_parked();
10916
10917 // Check that more complex repo changes are tracked
10918 repository.read_with(cx, |repository, _cx| {
10919 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
10920 assert_eq!(
10921 repository
10922 .status_for_path(&repo_path(B_TXT))
10923 .unwrap()
10924 .status,
10925 FileStatus::Untracked,
10926 );
10927 assert_eq!(
10928 repository
10929 .status_for_path(&repo_path(E_TXT))
10930 .unwrap()
10931 .status,
10932 StatusCode::Modified.worktree(),
10933 );
10934 });
10935
10936 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
10937 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
10938 std::fs::write(
10939 work_dir.join(DOTGITIGNORE),
10940 [IGNORE_RULE, "f.txt"].join("\n"),
10941 )
10942 .unwrap();
10943
10944 git_add(Path::new(DOTGITIGNORE), &repo);
10945 git_commit("Committing modified git ignore", &repo);
10946
10947 tree.flush_fs_events(cx).await;
10948 cx.executor().run_until_parked();
10949
10950 let mut renamed_dir_name = "first_directory/second_directory";
10951 const RENAMED_FILE: &str = "rf.txt";
10952
10953 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
10954 std::fs::write(
10955 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
10956 "new-contents",
10957 )
10958 .unwrap();
10959
10960 tree.flush_fs_events(cx).await;
10961 project
10962 .update(cx, |project, cx| project.git_scans_complete(cx))
10963 .await;
10964 cx.executor().run_until_parked();
10965
10966 repository.read_with(cx, |repository, _cx| {
10967 assert_eq!(
10968 repository
10969 .status_for_path(&RepoPath::from_rel_path(
10970 &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE))
10971 ))
10972 .unwrap()
10973 .status,
10974 FileStatus::Untracked,
10975 );
10976 });
10977
10978 renamed_dir_name = "new_first_directory/second_directory";
10979
10980 std::fs::rename(
10981 work_dir.join("first_directory"),
10982 work_dir.join("new_first_directory"),
10983 )
10984 .unwrap();
10985
10986 tree.flush_fs_events(cx).await;
10987 project
10988 .update(cx, |project, cx| project.git_scans_complete(cx))
10989 .await;
10990 cx.executor().run_until_parked();
10991
10992 repository.read_with(cx, |repository, _cx| {
10993 assert_eq!(
10994 repository
10995 .status_for_path(&RepoPath::from_rel_path(
10996 &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE))
10997 ))
10998 .unwrap()
10999 .status,
11000 FileStatus::Untracked,
11001 );
11002 });
11003}
11004
11005#[gpui::test]
11006#[ignore]
11007async fn test_ignored_dirs_events(cx: &mut gpui::TestAppContext) {
11008 init_test(cx);
11009 cx.executor().allow_parking();
11010
11011 const IGNORE_RULE: &str = "**/target";
11012
11013 let root = TempTree::new(json!({
11014 "project": {
11015 "src": {
11016 "main.rs": "fn main() {}"
11017 },
11018 "target": {
11019 "debug": {
11020 "important_text.txt": "important text",
11021 },
11022 },
11023 ".gitignore": IGNORE_RULE
11024 },
11025
11026 }));
11027 let root_path = root.path();
11028
11029 // Set up git repository before creating the worktree.
11030 let work_dir = root.path().join("project");
11031 let repo = git_init(work_dir.as_path());
11032 repo.add_ignore_rule(IGNORE_RULE).unwrap();
11033 git_add("src/main.rs", &repo);
11034 git_add(".gitignore", &repo);
11035 git_commit("Initial commit", &repo);
11036
11037 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
11038 let repository_updates = Arc::new(Mutex::new(Vec::new()));
11039 let project_events = Arc::new(Mutex::new(Vec::new()));
11040 project.update(cx, |project, cx| {
11041 let repo_events = repository_updates.clone();
11042 cx.subscribe(project.git_store(), move |_, _, e, _| {
11043 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
11044 repo_events.lock().push(e.clone());
11045 }
11046 })
11047 .detach();
11048 let project_events = project_events.clone();
11049 cx.subscribe_self(move |_, e, _| {
11050 if let Event::WorktreeUpdatedEntries(_, updates) = e {
11051 project_events.lock().extend(
11052 updates
11053 .iter()
11054 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
11055 .filter(|(path, _)| path != "fs-event-sentinel"),
11056 );
11057 }
11058 })
11059 .detach();
11060 });
11061
11062 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
11063 tree.flush_fs_events(cx).await;
11064 tree.update(cx, |tree, cx| {
11065 tree.load_file(rel_path("project/target/debug/important_text.txt"), cx)
11066 })
11067 .await
11068 .unwrap();
11069 tree.update(cx, |tree, _| {
11070 assert_eq!(
11071 tree.entries(true, 0)
11072 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
11073 .collect::<Vec<_>>(),
11074 vec![
11075 (rel_path(""), false),
11076 (rel_path("project/"), false),
11077 (rel_path("project/.gitignore"), false),
11078 (rel_path("project/src"), false),
11079 (rel_path("project/src/main.rs"), false),
11080 (rel_path("project/target"), true),
11081 (rel_path("project/target/debug"), true),
11082 (rel_path("project/target/debug/important_text.txt"), true),
11083 ]
11084 );
11085 });
11086
11087 assert_eq!(
11088 repository_updates.lock().drain(..).collect::<Vec<_>>(),
11089 vec![RepositoryEvent::StatusesChanged,],
11090 "Initial worktree scan should produce a repo update event"
11091 );
11092 assert_eq!(
11093 project_events.lock().drain(..).collect::<Vec<_>>(),
11094 vec![
11095 ("project/target".to_string(), PathChange::Loaded),
11096 ("project/target/debug".to_string(), PathChange::Loaded),
11097 (
11098 "project/target/debug/important_text.txt".to_string(),
11099 PathChange::Loaded
11100 ),
11101 ],
11102 "Initial project changes should show that all not-ignored and all opened files are loaded"
11103 );
11104
11105 let deps_dir = work_dir.join("target").join("debug").join("deps");
11106 std::fs::create_dir_all(&deps_dir).unwrap();
11107 tree.flush_fs_events(cx).await;
11108 project
11109 .update(cx, |project, cx| project.git_scans_complete(cx))
11110 .await;
11111 cx.executor().run_until_parked();
11112 std::fs::write(deps_dir.join("aa.tmp"), "something tmp").unwrap();
11113 tree.flush_fs_events(cx).await;
11114 project
11115 .update(cx, |project, cx| project.git_scans_complete(cx))
11116 .await;
11117 cx.executor().run_until_parked();
11118 std::fs::remove_dir_all(&deps_dir).unwrap();
11119 tree.flush_fs_events(cx).await;
11120 project
11121 .update(cx, |project, cx| project.git_scans_complete(cx))
11122 .await;
11123 cx.executor().run_until_parked();
11124
11125 tree.update(cx, |tree, _| {
11126 assert_eq!(
11127 tree.entries(true, 0)
11128 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
11129 .collect::<Vec<_>>(),
11130 vec![
11131 (rel_path(""), false),
11132 (rel_path("project/"), false),
11133 (rel_path("project/.gitignore"), false),
11134 (rel_path("project/src"), false),
11135 (rel_path("project/src/main.rs"), false),
11136 (rel_path("project/target"), true),
11137 (rel_path("project/target/debug"), true),
11138 (rel_path("project/target/debug/important_text.txt"), true),
11139 ],
11140 "No stray temp files should be left after the flycheck changes"
11141 );
11142 });
11143
11144 assert_eq!(
11145 repository_updates
11146 .lock()
11147 .iter()
11148 .cloned()
11149 .collect::<Vec<_>>(),
11150 Vec::new(),
11151 "No further RepositoryUpdated events should happen, as only ignored dirs' contents was changed",
11152 );
11153 assert_eq!(
11154 project_events.lock().as_slice(),
11155 vec![
11156 ("project/target/debug/deps".to_string(), PathChange::Added),
11157 ("project/target/debug/deps".to_string(), PathChange::Removed),
11158 ],
11159 "Due to `debug` directory being tracked, it should get updates for entries inside it.
11160 No updates for more nested directories should happen as those are ignored",
11161 );
11162}
11163
11164// todo(jk): turning this test off until we rework it in such a way so that it is not so susceptible
11165// to different timings/ordering of events.
11166#[ignore]
11167#[gpui::test]
11168async fn test_odd_events_for_ignored_dirs(
11169 executor: BackgroundExecutor,
11170 cx: &mut gpui::TestAppContext,
11171) {
11172 init_test(cx);
11173 let fs = FakeFs::new(executor);
11174 fs.insert_tree(
11175 path!("/root"),
11176 json!({
11177 ".git": {},
11178 ".gitignore": "**/target/",
11179 "src": {
11180 "main.rs": "fn main() {}",
11181 },
11182 "target": {
11183 "debug": {
11184 "foo.txt": "foo",
11185 "deps": {}
11186 }
11187 }
11188 }),
11189 )
11190 .await;
11191 fs.set_head_and_index_for_repo(
11192 path!("/root/.git").as_ref(),
11193 &[
11194 (".gitignore", "**/target/".into()),
11195 ("src/main.rs", "fn main() {}".into()),
11196 ],
11197 );
11198
11199 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
11200 let repository_updates = Arc::new(Mutex::new(Vec::new()));
11201 let project_events = Arc::new(Mutex::new(Vec::new()));
11202 project.update(cx, |project, cx| {
11203 let repository_updates = repository_updates.clone();
11204 cx.subscribe(project.git_store(), move |_, _, e, _| {
11205 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
11206 repository_updates.lock().push(e.clone());
11207 }
11208 })
11209 .detach();
11210 let project_events = project_events.clone();
11211 cx.subscribe_self(move |_, e, _| {
11212 if let Event::WorktreeUpdatedEntries(_, updates) = e {
11213 project_events.lock().extend(
11214 updates
11215 .iter()
11216 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
11217 .filter(|(path, _)| path != "fs-event-sentinel"),
11218 );
11219 }
11220 })
11221 .detach();
11222 });
11223
11224 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
11225 tree.update(cx, |tree, cx| {
11226 tree.load_file(rel_path("target/debug/foo.txt"), cx)
11227 })
11228 .await
11229 .unwrap();
11230 tree.flush_fs_events(cx).await;
11231 project
11232 .update(cx, |project, cx| project.git_scans_complete(cx))
11233 .await;
11234 cx.run_until_parked();
11235 tree.update(cx, |tree, _| {
11236 assert_eq!(
11237 tree.entries(true, 0)
11238 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
11239 .collect::<Vec<_>>(),
11240 vec![
11241 (rel_path(""), false),
11242 (rel_path(".gitignore"), false),
11243 (rel_path("src"), false),
11244 (rel_path("src/main.rs"), false),
11245 (rel_path("target"), true),
11246 (rel_path("target/debug"), true),
11247 (rel_path("target/debug/deps"), true),
11248 (rel_path("target/debug/foo.txt"), true),
11249 ]
11250 );
11251 });
11252
11253 assert_eq!(
11254 repository_updates.lock().drain(..).collect::<Vec<_>>(),
11255 vec![
11256 RepositoryEvent::BranchChanged,
11257 RepositoryEvent::StatusesChanged,
11258 RepositoryEvent::StatusesChanged,
11259 ],
11260 "Initial worktree scan should produce a repo update event"
11261 );
11262 assert_eq!(
11263 project_events.lock().drain(..).collect::<Vec<_>>(),
11264 vec![
11265 ("target".to_string(), PathChange::Loaded),
11266 ("target/debug".to_string(), PathChange::Loaded),
11267 ("target/debug/deps".to_string(), PathChange::Loaded),
11268 ("target/debug/foo.txt".to_string(), PathChange::Loaded),
11269 ],
11270 "All non-ignored entries and all opened firs should be getting a project event",
11271 );
11272
11273 // Emulate a flycheck spawn: it emits a `INODE_META_MOD`-flagged FS event on target/debug/deps, then creates and removes temp files inside.
11274 // This may happen multiple times during a single flycheck, but once is enough for testing.
11275 fs.emit_fs_event("/root/target/debug/deps", None);
11276 tree.flush_fs_events(cx).await;
11277 project
11278 .update(cx, |project, cx| project.git_scans_complete(cx))
11279 .await;
11280 cx.executor().run_until_parked();
11281
11282 assert_eq!(
11283 repository_updates
11284 .lock()
11285 .iter()
11286 .cloned()
11287 .collect::<Vec<_>>(),
11288 Vec::new(),
11289 "No further RepositoryUpdated events should happen, as only ignored dirs received FS events",
11290 );
11291 assert_eq!(
11292 project_events.lock().as_slice(),
11293 Vec::new(),
11294 "No further project events should happen, as only ignored dirs received FS events",
11295 );
11296}
11297
11298#[gpui::test]
11299async fn test_repos_in_invisible_worktrees(
11300 executor: BackgroundExecutor,
11301 cx: &mut gpui::TestAppContext,
11302) {
11303 init_test(cx);
11304 let fs = FakeFs::new(executor);
11305 fs.insert_tree(
11306 path!("/root"),
11307 json!({
11308 "dir1": {
11309 ".git": {},
11310 "dep1": {
11311 ".git": {},
11312 "src": {
11313 "a.txt": "",
11314 },
11315 },
11316 "b.txt": "",
11317 },
11318 }),
11319 )
11320 .await;
11321
11322 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
11323 let _visible_worktree =
11324 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
11325 project
11326 .update(cx, |project, cx| project.git_scans_complete(cx))
11327 .await;
11328
11329 let repos = project.read_with(cx, |project, cx| {
11330 project
11331 .repositories(cx)
11332 .values()
11333 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
11334 .collect::<Vec<_>>()
11335 });
11336 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
11337
11338 let (_invisible_worktree, _) = project
11339 .update(cx, |project, cx| {
11340 project.worktree_store().update(cx, |worktree_store, cx| {
11341 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
11342 })
11343 })
11344 .await
11345 .expect("failed to create worktree");
11346 project
11347 .update(cx, |project, cx| project.git_scans_complete(cx))
11348 .await;
11349
11350 let repos = project.read_with(cx, |project, cx| {
11351 project
11352 .repositories(cx)
11353 .values()
11354 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
11355 .collect::<Vec<_>>()
11356 });
11357 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
11358}
11359
11360#[gpui::test(iterations = 10)]
11361async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
11362 init_test(cx);
11363 cx.update(|cx| {
11364 cx.update_global::<SettingsStore, _>(|store, cx| {
11365 store.update_user_settings(cx, |settings| {
11366 settings.project.worktree.file_scan_exclusions = Some(Vec::new());
11367 });
11368 });
11369 });
11370 let fs = FakeFs::new(cx.background_executor.clone());
11371 fs.insert_tree(
11372 path!("/root"),
11373 json!({
11374 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
11375 "tree": {
11376 ".git": {},
11377 ".gitignore": "ignored-dir\n",
11378 "tracked-dir": {
11379 "tracked-file1": "",
11380 "ancestor-ignored-file1": "",
11381 },
11382 "ignored-dir": {
11383 "ignored-file1": ""
11384 }
11385 }
11386 }),
11387 )
11388 .await;
11389 fs.set_head_and_index_for_repo(
11390 path!("/root/tree/.git").as_ref(),
11391 &[
11392 (".gitignore", "ignored-dir\n".into()),
11393 ("tracked-dir/tracked-file1", "".into()),
11394 ],
11395 );
11396
11397 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
11398
11399 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
11400 tree.flush_fs_events(cx).await;
11401 project
11402 .update(cx, |project, cx| project.git_scans_complete(cx))
11403 .await;
11404 cx.executor().run_until_parked();
11405
11406 let repository = project.read_with(cx, |project, cx| {
11407 project.repositories(cx).values().next().unwrap().clone()
11408 });
11409
11410 tree.read_with(cx, |tree, _| {
11411 tree.as_local()
11412 .unwrap()
11413 .manually_refresh_entries_for_paths(vec![rel_path("ignored-dir").into()])
11414 })
11415 .recv()
11416 .await;
11417
11418 cx.read(|cx| {
11419 assert_entry_git_state(
11420 tree.read(cx),
11421 repository.read(cx),
11422 "tracked-dir/tracked-file1",
11423 None,
11424 false,
11425 );
11426 assert_entry_git_state(
11427 tree.read(cx),
11428 repository.read(cx),
11429 "tracked-dir/ancestor-ignored-file1",
11430 None,
11431 false,
11432 );
11433 assert_entry_git_state(
11434 tree.read(cx),
11435 repository.read(cx),
11436 "ignored-dir/ignored-file1",
11437 None,
11438 true,
11439 );
11440 });
11441
11442 fs.create_file(
11443 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
11444 Default::default(),
11445 )
11446 .await
11447 .unwrap();
11448 fs.set_index_for_repo(
11449 path!("/root/tree/.git").as_ref(),
11450 &[
11451 (".gitignore", "ignored-dir\n".into()),
11452 ("tracked-dir/tracked-file1", "".into()),
11453 ("tracked-dir/tracked-file2", "".into()),
11454 ],
11455 );
11456 fs.create_file(
11457 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
11458 Default::default(),
11459 )
11460 .await
11461 .unwrap();
11462 fs.create_file(
11463 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
11464 Default::default(),
11465 )
11466 .await
11467 .unwrap();
11468
11469 cx.executor().run_until_parked();
11470 cx.read(|cx| {
11471 assert_entry_git_state(
11472 tree.read(cx),
11473 repository.read(cx),
11474 "tracked-dir/tracked-file2",
11475 Some(StatusCode::Added),
11476 false,
11477 );
11478 assert_entry_git_state(
11479 tree.read(cx),
11480 repository.read(cx),
11481 "tracked-dir/ancestor-ignored-file2",
11482 None,
11483 false,
11484 );
11485 assert_entry_git_state(
11486 tree.read(cx),
11487 repository.read(cx),
11488 "ignored-dir/ignored-file2",
11489 None,
11490 true,
11491 );
11492 assert!(
11493 tree.read(cx)
11494 .entry_for_path(&rel_path(".git"))
11495 .unwrap()
11496 .is_ignored
11497 );
11498 });
11499}
11500
11501#[gpui::test]
11502async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) {
11503 init_test(cx);
11504
11505 let fs = FakeFs::new(cx.executor());
11506 fs.insert_tree(
11507 path!("/project"),
11508 json!({
11509 ".git": {
11510 "worktrees": {
11511 "some-worktree": {
11512 "commondir": "../..\n",
11513 // For is_git_dir
11514 "HEAD": "",
11515 "config": ""
11516 }
11517 },
11518 "modules": {
11519 "subdir": {
11520 "some-submodule": {
11521 // For is_git_dir
11522 "HEAD": "",
11523 "config": "",
11524 }
11525 }
11526 }
11527 },
11528 "src": {
11529 "a.txt": "A",
11530 },
11531 "some-worktree": {
11532 ".git": "gitdir: ../.git/worktrees/some-worktree\n",
11533 "src": {
11534 "b.txt": "B",
11535 }
11536 },
11537 "subdir": {
11538 "some-submodule": {
11539 ".git": "gitdir: ../../.git/modules/subdir/some-submodule\n",
11540 "c.txt": "C",
11541 }
11542 }
11543 }),
11544 )
11545 .await;
11546
11547 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
11548 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
11549 scan_complete.await;
11550
11551 let mut repositories = project.update(cx, |project, cx| {
11552 project
11553 .repositories(cx)
11554 .values()
11555 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
11556 .collect::<Vec<_>>()
11557 });
11558 repositories.sort();
11559 pretty_assertions::assert_eq!(
11560 repositories,
11561 [
11562 Path::new(path!("/project")).into(),
11563 Path::new(path!("/project/some-worktree")).into(),
11564 Path::new(path!("/project/subdir/some-submodule")).into(),
11565 ]
11566 );
11567
11568 // Generate a git-related event for the worktree and check that it's refreshed.
11569 fs.with_git_state(
11570 path!("/project/some-worktree/.git").as_ref(),
11571 true,
11572 |state| {
11573 state
11574 .head_contents
11575 .insert(repo_path("src/b.txt"), "b".to_owned());
11576 state
11577 .index_contents
11578 .insert(repo_path("src/b.txt"), "b".to_owned());
11579 },
11580 )
11581 .unwrap();
11582 cx.run_until_parked();
11583
11584 let buffer = project
11585 .update(cx, |project, cx| {
11586 project.open_local_buffer(path!("/project/some-worktree/src/b.txt"), cx)
11587 })
11588 .await
11589 .unwrap();
11590 let (worktree_repo, barrier) = project.update(cx, |project, cx| {
11591 let (repo, _) = project
11592 .git_store()
11593 .read(cx)
11594 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
11595 .unwrap();
11596 pretty_assertions::assert_eq!(
11597 repo.read(cx).work_directory_abs_path,
11598 Path::new(path!("/project/some-worktree")).into(),
11599 );
11600 let barrier = repo.update(cx, |repo, _| repo.barrier());
11601 (repo.clone(), barrier)
11602 });
11603 barrier.await.unwrap();
11604 worktree_repo.update(cx, |repo, _| {
11605 pretty_assertions::assert_eq!(
11606 repo.status_for_path(&repo_path("src/b.txt"))
11607 .unwrap()
11608 .status,
11609 StatusCode::Modified.worktree(),
11610 );
11611 });
11612
11613 // The same for the submodule.
11614 fs.with_git_state(
11615 path!("/project/subdir/some-submodule/.git").as_ref(),
11616 true,
11617 |state| {
11618 state
11619 .head_contents
11620 .insert(repo_path("c.txt"), "c".to_owned());
11621 state
11622 .index_contents
11623 .insert(repo_path("c.txt"), "c".to_owned());
11624 },
11625 )
11626 .unwrap();
11627 cx.run_until_parked();
11628
11629 let buffer = project
11630 .update(cx, |project, cx| {
11631 project.open_local_buffer(path!("/project/subdir/some-submodule/c.txt"), cx)
11632 })
11633 .await
11634 .unwrap();
11635 let (submodule_repo, barrier) = project.update(cx, |project, cx| {
11636 let (repo, _) = project
11637 .git_store()
11638 .read(cx)
11639 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
11640 .unwrap();
11641 pretty_assertions::assert_eq!(
11642 repo.read(cx).work_directory_abs_path,
11643 Path::new(path!("/project/subdir/some-submodule")).into(),
11644 );
11645 let barrier = repo.update(cx, |repo, _| repo.barrier());
11646 (repo.clone(), barrier)
11647 });
11648 barrier.await.unwrap();
11649 submodule_repo.update(cx, |repo, _| {
11650 pretty_assertions::assert_eq!(
11651 repo.status_for_path(&repo_path("c.txt")).unwrap().status,
11652 StatusCode::Modified.worktree(),
11653 );
11654 });
11655}
11656
11657#[gpui::test]
11658async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
11659 init_test(cx);
11660 let fs = FakeFs::new(cx.background_executor.clone());
11661 fs.insert_tree(
11662 path!("/root"),
11663 json!({
11664 "project": {
11665 ".git": {},
11666 "child1": {
11667 "a.txt": "A",
11668 },
11669 "child2": {
11670 "b.txt": "B",
11671 }
11672 }
11673 }),
11674 )
11675 .await;
11676
11677 let project = Project::test(
11678 fs.clone(),
11679 [
11680 path!("/root/project/child1").as_ref(),
11681 path!("/root/project/child2").as_ref(),
11682 ],
11683 cx,
11684 )
11685 .await;
11686
11687 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
11688 tree.flush_fs_events(cx).await;
11689 project
11690 .update(cx, |project, cx| project.git_scans_complete(cx))
11691 .await;
11692 cx.executor().run_until_parked();
11693
11694 let repos = project.read_with(cx, |project, cx| {
11695 project
11696 .repositories(cx)
11697 .values()
11698 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
11699 .collect::<Vec<_>>()
11700 });
11701 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
11702}
11703
11704#[gpui::test]
11705async fn test_buffer_changed_file_path_updates_git_diff(cx: &mut gpui::TestAppContext) {
11706 init_test(cx);
11707
11708 let file_1_committed = String::from(r#"file_1_committed"#);
11709 let file_1_staged = String::from(r#"file_1_staged"#);
11710 let file_2_committed = String::from(r#"file_2_committed"#);
11711 let file_2_staged = String::from(r#"file_2_staged"#);
11712 let buffer_contents = String::from(r#"buffer"#);
11713
11714 let fs = FakeFs::new(cx.background_executor.clone());
11715 fs.insert_tree(
11716 path!("/dir"),
11717 json!({
11718 ".git": {},
11719 "src": {
11720 "file_1.rs": file_1_committed.clone(),
11721 "file_2.rs": file_2_committed.clone(),
11722 }
11723 }),
11724 )
11725 .await;
11726
11727 fs.set_head_for_repo(
11728 path!("/dir/.git").as_ref(),
11729 &[
11730 ("src/file_1.rs", file_1_committed.clone()),
11731 ("src/file_2.rs", file_2_committed.clone()),
11732 ],
11733 "deadbeef",
11734 );
11735 fs.set_index_for_repo(
11736 path!("/dir/.git").as_ref(),
11737 &[
11738 ("src/file_1.rs", file_1_staged.clone()),
11739 ("src/file_2.rs", file_2_staged.clone()),
11740 ],
11741 );
11742
11743 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
11744
11745 let buffer = project
11746 .update(cx, |project, cx| {
11747 project.open_local_buffer(path!("/dir/src/file_1.rs"), cx)
11748 })
11749 .await
11750 .unwrap();
11751
11752 buffer.update(cx, |buffer, cx| {
11753 buffer.edit([(0..buffer.len(), buffer_contents.as_str())], None, cx);
11754 });
11755
11756 let unstaged_diff = project
11757 .update(cx, |project, cx| {
11758 project.open_unstaged_diff(buffer.clone(), cx)
11759 })
11760 .await
11761 .unwrap();
11762
11763 cx.run_until_parked();
11764
11765 unstaged_diff.update(cx, |unstaged_diff, cx| {
11766 let base_text = unstaged_diff.base_text_string(cx).unwrap();
11767 assert_eq!(base_text, file_1_staged, "Should start with file_1 staged");
11768 });
11769
11770 // Save the buffer as `file_2.rs`, which should trigger the
11771 // `BufferChangedFilePath` event.
11772 project
11773 .update(cx, |project, cx| {
11774 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
11775 let path = ProjectPath {
11776 worktree_id,
11777 path: rel_path("src/file_2.rs").into(),
11778 };
11779 project.save_buffer_as(buffer.clone(), path, cx)
11780 })
11781 .await
11782 .unwrap();
11783
11784 cx.run_until_parked();
11785
11786 // Verify that the diff bases have been updated to file_2's contents due to
11787 // the `BufferChangedFilePath` event being handled.
11788 unstaged_diff.update(cx, |unstaged_diff, cx| {
11789 let snapshot = buffer.read(cx).snapshot();
11790 let base_text = unstaged_diff.base_text_string(cx).unwrap();
11791 assert_eq!(
11792 base_text, file_2_staged,
11793 "Diff bases should be automatically updated to file_2 staged content"
11794 );
11795
11796 let hunks: Vec<_> = unstaged_diff.snapshot(cx).hunks(&snapshot).collect();
11797 assert!(!hunks.is_empty(), "Should have diff hunks for file_2");
11798 });
11799
11800 let uncommitted_diff = project
11801 .update(cx, |project, cx| {
11802 project.open_uncommitted_diff(buffer.clone(), cx)
11803 })
11804 .await
11805 .unwrap();
11806
11807 cx.run_until_parked();
11808
11809 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
11810 let base_text = uncommitted_diff.base_text_string(cx).unwrap();
11811 assert_eq!(
11812 base_text, file_2_committed,
11813 "Uncommitted diff should compare against file_2 committed content"
11814 );
11815 });
11816}
11817
11818async fn search(
11819 project: &Entity<Project>,
11820 query: SearchQuery,
11821 cx: &mut gpui::TestAppContext,
11822) -> Result<HashMap<String, Vec<Range<usize>>>> {
11823 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
11824 let mut results = HashMap::default();
11825 while let Ok(search_result) = search_rx.rx.recv().await {
11826 match search_result {
11827 SearchResult::Buffer { buffer, ranges } => {
11828 results.entry(buffer).or_insert(ranges);
11829 }
11830 SearchResult::LimitReached => {}
11831 }
11832 }
11833 Ok(results
11834 .into_iter()
11835 .map(|(buffer, ranges)| {
11836 buffer.update(cx, |buffer, cx| {
11837 let path = buffer
11838 .file()
11839 .unwrap()
11840 .full_path(cx)
11841 .to_string_lossy()
11842 .to_string();
11843 let ranges = ranges
11844 .into_iter()
11845 .map(|range| range.to_offset(buffer))
11846 .collect::<Vec<_>>();
11847 (path, ranges)
11848 })
11849 })
11850 .collect())
11851}
11852
11853#[gpui::test]
11854async fn test_undo_encoding_change(cx: &mut gpui::TestAppContext) {
11855 init_test(cx);
11856
11857 let fs = FakeFs::new(cx.executor());
11858
11859 // Create a file with ASCII content "Hi" - this will be detected as UTF-8
11860 // When reinterpreted as UTF-16LE, the bytes 0x48 0x69 become a single character
11861 let ascii_bytes: Vec<u8> = vec![0x48, 0x69];
11862 fs.insert_tree(path!("/dir"), json!({})).await;
11863 fs.insert_file(path!("/dir/test.txt"), ascii_bytes).await;
11864
11865 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
11866
11867 let buffer = project
11868 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/test.txt"), cx))
11869 .await
11870 .unwrap();
11871
11872 let (initial_encoding, initial_text, initial_dirty) = buffer.read_with(cx, |buffer, _| {
11873 (buffer.encoding(), buffer.text(), buffer.is_dirty())
11874 });
11875 assert_eq!(initial_encoding, encoding_rs::UTF_8);
11876 assert_eq!(initial_text, "Hi");
11877 assert!(!initial_dirty);
11878
11879 let reload_receiver = buffer.update(cx, |buffer, cx| {
11880 buffer.reload_with_encoding(encoding_rs::UTF_16LE, cx)
11881 });
11882 cx.executor().run_until_parked();
11883
11884 // Wait for reload to complete
11885 let _ = reload_receiver.await;
11886
11887 // Verify the encoding changed, text is different, and still not dirty (we reloaded from disk)
11888 let (reloaded_encoding, reloaded_text, reloaded_dirty) = buffer.read_with(cx, |buffer, _| {
11889 (buffer.encoding(), buffer.text(), buffer.is_dirty())
11890 });
11891 assert_eq!(reloaded_encoding, encoding_rs::UTF_16LE);
11892 assert_eq!(reloaded_text, "楈");
11893 assert!(!reloaded_dirty);
11894
11895 // Undo the reload
11896 buffer.update(cx, |buffer, cx| {
11897 buffer.undo(cx);
11898 });
11899
11900 buffer.read_with(cx, |buffer, _| {
11901 assert_eq!(buffer.encoding(), encoding_rs::UTF_8);
11902 assert_eq!(buffer.text(), "Hi");
11903 assert!(!buffer.is_dirty());
11904 });
11905
11906 buffer.update(cx, |buffer, cx| {
11907 buffer.redo(cx);
11908 });
11909
11910 buffer.read_with(cx, |buffer, _| {
11911 assert_eq!(buffer.encoding(), encoding_rs::UTF_16LE);
11912 assert_ne!(buffer.text(), "Hi");
11913 assert!(!buffer.is_dirty());
11914 });
11915}
11916
11917#[gpui::test]
11918async fn test_initial_scan_complete(cx: &mut gpui::TestAppContext) {
11919 init_test(cx);
11920
11921 let fs = FakeFs::new(cx.executor());
11922 fs.insert_tree(
11923 path!("/root"),
11924 json!({
11925 "a": {
11926 ".git": {},
11927 ".zed": {
11928 "tasks.json": r#"[{"label": "task-a", "command": "echo a"}]"#
11929 },
11930 "src": { "main.rs": "" }
11931 },
11932 "b": {
11933 ".git": {},
11934 ".zed": {
11935 "tasks.json": r#"[{"label": "task-b", "command": "echo b"}]"#
11936 },
11937 "src": { "lib.rs": "" }
11938 },
11939 }),
11940 )
11941 .await;
11942
11943 let repos_created = Rc::new(RefCell::new(Vec::new()));
11944 let _observe = {
11945 let repos_created = repos_created.clone();
11946 cx.update(|cx| {
11947 cx.observe_new::<Repository>(move |repo, _, cx| {
11948 repos_created.borrow_mut().push(cx.entity().downgrade());
11949 let _ = repo;
11950 })
11951 })
11952 };
11953
11954 let project = Project::test(
11955 fs.clone(),
11956 [path!("/root/a").as_ref(), path!("/root/b").as_ref()],
11957 cx,
11958 )
11959 .await;
11960
11961 let scan_complete = project.read_with(cx, |project, cx| project.wait_for_initial_scan(cx));
11962 scan_complete.await;
11963
11964 project.read_with(cx, |project, cx| {
11965 assert!(
11966 project.worktree_store().read(cx).initial_scan_completed(),
11967 "Expected initial scan to be completed after awaiting wait_for_initial_scan"
11968 );
11969 });
11970
11971 let created_repos_len = repos_created.borrow().len();
11972 assert_eq!(
11973 created_repos_len, 2,
11974 "Expected 2 repositories to be created during scan, got {}",
11975 created_repos_len
11976 );
11977
11978 project.read_with(cx, |project, cx| {
11979 let git_store = project.git_store().read(cx);
11980 assert_eq!(
11981 git_store.repositories().len(),
11982 2,
11983 "Expected 2 repositories in GitStore"
11984 );
11985 });
11986}
11987
11988pub fn init_test(cx: &mut gpui::TestAppContext) {
11989 zlog::init_test();
11990
11991 cx.update(|cx| {
11992 let settings_store = SettingsStore::test(cx);
11993 cx.set_global(settings_store);
11994 release_channel::init(semver::Version::new(0, 0, 0), cx);
11995 });
11996}
11997
11998fn json_lang() -> Arc<Language> {
11999 Arc::new(Language::new(
12000 LanguageConfig {
12001 name: "JSON".into(),
12002 matcher: LanguageMatcher {
12003 path_suffixes: vec!["json".to_string()],
12004 ..Default::default()
12005 },
12006 ..Default::default()
12007 },
12008 None,
12009 ))
12010}
12011
12012fn js_lang() -> Arc<Language> {
12013 Arc::new(Language::new(
12014 LanguageConfig {
12015 name: "JavaScript".into(),
12016 matcher: LanguageMatcher {
12017 path_suffixes: vec!["js".to_string()],
12018 ..Default::default()
12019 },
12020 ..Default::default()
12021 },
12022 None,
12023 ))
12024}
12025
12026fn python_lang(fs: Arc<FakeFs>) -> Arc<Language> {
12027 struct PythonMootToolchainLister(Arc<FakeFs>);
12028 #[async_trait]
12029 impl ToolchainLister for PythonMootToolchainLister {
12030 async fn list(
12031 &self,
12032 worktree_root: PathBuf,
12033 subroot_relative_path: Arc<RelPath>,
12034 _: Option<HashMap<String, String>>,
12035 ) -> ToolchainList {
12036 // This lister will always return a path .venv directories within ancestors
12037 let ancestors = subroot_relative_path.ancestors().collect::<Vec<_>>();
12038 let mut toolchains = vec![];
12039 for ancestor in ancestors {
12040 let venv_path = worktree_root.join(ancestor.as_std_path()).join(".venv");
12041 if self.0.is_dir(&venv_path).await {
12042 toolchains.push(Toolchain {
12043 name: SharedString::new_static("Python Venv"),
12044 path: venv_path.to_string_lossy().into_owned().into(),
12045 language_name: LanguageName(SharedString::new_static("Python")),
12046 as_json: serde_json::Value::Null,
12047 })
12048 }
12049 }
12050 ToolchainList {
12051 toolchains,
12052 ..Default::default()
12053 }
12054 }
12055 async fn resolve(
12056 &self,
12057 _: PathBuf,
12058 _: Option<HashMap<String, String>>,
12059 ) -> anyhow::Result<Toolchain> {
12060 Err(anyhow::anyhow!("Not implemented"))
12061 }
12062 fn meta(&self) -> ToolchainMetadata {
12063 ToolchainMetadata {
12064 term: SharedString::new_static("Virtual Environment"),
12065 new_toolchain_placeholder: SharedString::new_static(
12066 "A path to the python3 executable within a virtual environment, or path to virtual environment itself",
12067 ),
12068 manifest_name: ManifestName::from(SharedString::new_static("pyproject.toml")),
12069 }
12070 }
12071 fn activation_script(
12072 &self,
12073 _: &Toolchain,
12074 _: ShellKind,
12075 _: &gpui::App,
12076 ) -> futures::future::BoxFuture<'static, Vec<String>> {
12077 Box::pin(async { vec![] })
12078 }
12079 }
12080 Arc::new(
12081 Language::new(
12082 LanguageConfig {
12083 name: "Python".into(),
12084 matcher: LanguageMatcher {
12085 path_suffixes: vec!["py".to_string()],
12086 ..Default::default()
12087 },
12088 ..Default::default()
12089 },
12090 None, // We're not testing Python parsing with this language.
12091 )
12092 .with_manifest(Some(ManifestName::from(SharedString::new_static(
12093 "pyproject.toml",
12094 ))))
12095 .with_toolchain_lister(Some(Arc::new(PythonMootToolchainLister(fs)))),
12096 )
12097}
12098
12099fn typescript_lang() -> Arc<Language> {
12100 Arc::new(Language::new(
12101 LanguageConfig {
12102 name: "TypeScript".into(),
12103 matcher: LanguageMatcher {
12104 path_suffixes: vec!["ts".to_string()],
12105 ..Default::default()
12106 },
12107 ..Default::default()
12108 },
12109 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
12110 ))
12111}
12112
12113fn tsx_lang() -> Arc<Language> {
12114 Arc::new(Language::new(
12115 LanguageConfig {
12116 name: "tsx".into(),
12117 matcher: LanguageMatcher {
12118 path_suffixes: vec!["tsx".to_string()],
12119 ..Default::default()
12120 },
12121 ..Default::default()
12122 },
12123 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
12124 ))
12125}
12126
12127fn get_all_tasks(
12128 project: &Entity<Project>,
12129 task_contexts: Arc<TaskContexts>,
12130 cx: &mut App,
12131) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
12132 let new_tasks = project.update(cx, |project, cx| {
12133 project.task_store().update(cx, |task_store, cx| {
12134 task_store.task_inventory().unwrap().update(cx, |this, cx| {
12135 this.used_and_current_resolved_tasks(task_contexts, cx)
12136 })
12137 })
12138 });
12139
12140 cx.background_spawn(async move {
12141 let (mut old, new) = new_tasks.await;
12142 old.extend(new);
12143 old
12144 })
12145}
12146
12147#[track_caller]
12148fn assert_entry_git_state(
12149 tree: &Worktree,
12150 repository: &Repository,
12151 path: &str,
12152 index_status: Option<StatusCode>,
12153 is_ignored: bool,
12154) {
12155 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
12156 let entry = tree
12157 .entry_for_path(&rel_path(path))
12158 .unwrap_or_else(|| panic!("entry {path} not found"));
12159 let status = repository
12160 .status_for_path(&repo_path(path))
12161 .map(|entry| entry.status);
12162 let expected = index_status.map(|index_status| {
12163 TrackedStatus {
12164 index_status,
12165 worktree_status: StatusCode::Unmodified,
12166 }
12167 .into()
12168 });
12169 assert_eq!(
12170 status, expected,
12171 "expected {path} to have git status: {expected:?}"
12172 );
12173 assert_eq!(
12174 entry.is_ignored, is_ignored,
12175 "expected {path} to have is_ignored: {is_ignored}"
12176 );
12177}
12178
12179#[track_caller]
12180fn git_init(path: &Path) -> git2::Repository {
12181 let mut init_opts = RepositoryInitOptions::new();
12182 init_opts.initial_head("main");
12183 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
12184}
12185
12186#[track_caller]
12187fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
12188 let path = path.as_ref();
12189 let mut index = repo.index().expect("Failed to get index");
12190 index.add_path(path).expect("Failed to add file");
12191 index.write().expect("Failed to write index");
12192}
12193
12194#[track_caller]
12195fn git_remove_index(path: &Path, repo: &git2::Repository) {
12196 let mut index = repo.index().expect("Failed to get index");
12197 index.remove_path(path).expect("Failed to add file");
12198 index.write().expect("Failed to write index");
12199}
12200
12201#[track_caller]
12202fn git_commit(msg: &'static str, repo: &git2::Repository) {
12203 use git2::Signature;
12204
12205 let signature = Signature::now("test", "test@zed.dev").unwrap();
12206 let oid = repo.index().unwrap().write_tree().unwrap();
12207 let tree = repo.find_tree(oid).unwrap();
12208 if let Ok(head) = repo.head() {
12209 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
12210
12211 let parent_commit = parent_obj.as_commit().unwrap();
12212
12213 repo.commit(
12214 Some("HEAD"),
12215 &signature,
12216 &signature,
12217 msg,
12218 &tree,
12219 &[parent_commit],
12220 )
12221 .expect("Failed to commit with parent");
12222 } else {
12223 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
12224 .expect("Failed to commit");
12225 }
12226}
12227
12228#[cfg(any())]
12229#[track_caller]
12230fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
12231 repo.cherrypick(commit, None).expect("Failed to cherrypick");
12232}
12233
12234#[track_caller]
12235fn git_stash(repo: &mut git2::Repository) {
12236 use git2::Signature;
12237
12238 let signature = Signature::now("test", "test@zed.dev").unwrap();
12239 repo.stash_save(&signature, "N/A", None)
12240 .expect("Failed to stash");
12241}
12242
12243#[track_caller]
12244fn git_reset(offset: usize, repo: &git2::Repository) {
12245 let head = repo.head().expect("Couldn't get repo head");
12246 let object = head.peel(git2::ObjectType::Commit).unwrap();
12247 let commit = object.as_commit().unwrap();
12248 let new_head = commit
12249 .parents()
12250 .inspect(|parnet| {
12251 parnet.message();
12252 })
12253 .nth(offset)
12254 .expect("Not enough history");
12255 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
12256 .expect("Could not reset");
12257}
12258
12259#[cfg(any())]
12260#[track_caller]
12261fn git_branch(name: &str, repo: &git2::Repository) {
12262 let head = repo
12263 .head()
12264 .expect("Couldn't get repo head")
12265 .peel_to_commit()
12266 .expect("HEAD is not a commit");
12267 repo.branch(name, &head, false).expect("Failed to commit");
12268}
12269
12270#[cfg(any())]
12271#[track_caller]
12272fn git_checkout(name: &str, repo: &git2::Repository) {
12273 repo.set_head(name).expect("Failed to set head");
12274 repo.checkout_head(None).expect("Failed to check out head");
12275}
12276
12277#[cfg(any())]
12278#[track_caller]
12279fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
12280 repo.statuses(None)
12281 .unwrap()
12282 .iter()
12283 .map(|status| (status.path().unwrap().to_string(), status.status()))
12284 .collect()
12285}
12286
12287#[gpui::test]
12288async fn test_find_project_path_abs(
12289 background_executor: BackgroundExecutor,
12290 cx: &mut gpui::TestAppContext,
12291) {
12292 // find_project_path should work with absolute paths
12293 init_test(cx);
12294
12295 let fs = FakeFs::new(background_executor);
12296 fs.insert_tree(
12297 path!("/root"),
12298 json!({
12299 "project1": {
12300 "file1.txt": "content1",
12301 "subdir": {
12302 "file2.txt": "content2"
12303 }
12304 },
12305 "project2": {
12306 "file3.txt": "content3"
12307 }
12308 }),
12309 )
12310 .await;
12311
12312 let project = Project::test(
12313 fs.clone(),
12314 [
12315 path!("/root/project1").as_ref(),
12316 path!("/root/project2").as_ref(),
12317 ],
12318 cx,
12319 )
12320 .await;
12321
12322 // Make sure the worktrees are fully initialized
12323 project
12324 .update(cx, |project, cx| project.git_scans_complete(cx))
12325 .await;
12326 cx.run_until_parked();
12327
12328 let (project1_abs_path, project1_id, project2_abs_path, project2_id) =
12329 project.read_with(cx, |project, cx| {
12330 let worktrees: Vec<_> = project.worktrees(cx).collect();
12331 let abs_path1 = worktrees[0].read(cx).abs_path().to_path_buf();
12332 let id1 = worktrees[0].read(cx).id();
12333 let abs_path2 = worktrees[1].read(cx).abs_path().to_path_buf();
12334 let id2 = worktrees[1].read(cx).id();
12335 (abs_path1, id1, abs_path2, id2)
12336 });
12337
12338 project.update(cx, |project, cx| {
12339 let abs_path = project1_abs_path.join("file1.txt");
12340 let found_path = project.find_project_path(abs_path, cx).unwrap();
12341 assert_eq!(found_path.worktree_id, project1_id);
12342 assert_eq!(&*found_path.path, rel_path("file1.txt"));
12343
12344 let abs_path = project1_abs_path.join("subdir").join("file2.txt");
12345 let found_path = project.find_project_path(abs_path, cx).unwrap();
12346 assert_eq!(found_path.worktree_id, project1_id);
12347 assert_eq!(&*found_path.path, rel_path("subdir/file2.txt"));
12348
12349 let abs_path = project2_abs_path.join("file3.txt");
12350 let found_path = project.find_project_path(abs_path, cx).unwrap();
12351 assert_eq!(found_path.worktree_id, project2_id);
12352 assert_eq!(&*found_path.path, rel_path("file3.txt"));
12353
12354 let abs_path = project1_abs_path.join("nonexistent.txt");
12355 let found_path = project.find_project_path(abs_path, cx);
12356 assert!(
12357 found_path.is_some(),
12358 "Should find project path for nonexistent file in worktree"
12359 );
12360
12361 // Test with an absolute path outside any worktree
12362 let abs_path = Path::new("/some/other/path");
12363 let found_path = project.find_project_path(abs_path, cx);
12364 assert!(
12365 found_path.is_none(),
12366 "Should not find project path for path outside any worktree"
12367 );
12368 });
12369}
12370
12371#[gpui::test]
12372async fn test_git_worktree_remove(cx: &mut gpui::TestAppContext) {
12373 init_test(cx);
12374
12375 let fs = FakeFs::new(cx.executor());
12376 fs.insert_tree(
12377 path!("/root"),
12378 json!({
12379 "a": {
12380 ".git": {},
12381 "src": {
12382 "main.rs": "fn main() {}",
12383 }
12384 },
12385 "b": {
12386 ".git": {},
12387 "src": {
12388 "main.rs": "fn main() {}",
12389 },
12390 "script": {
12391 "run.sh": "#!/bin/bash"
12392 }
12393 }
12394 }),
12395 )
12396 .await;
12397
12398 let project = Project::test(
12399 fs.clone(),
12400 [
12401 path!("/root/a").as_ref(),
12402 path!("/root/b/script").as_ref(),
12403 path!("/root/b").as_ref(),
12404 ],
12405 cx,
12406 )
12407 .await;
12408 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
12409 scan_complete.await;
12410
12411 let worktrees = project.update(cx, |project, cx| project.worktrees(cx).collect::<Vec<_>>());
12412 assert_eq!(worktrees.len(), 3);
12413
12414 let worktree_id_by_abs_path = worktrees
12415 .into_iter()
12416 .map(|worktree| worktree.read_with(cx, |w, _| (w.abs_path(), w.id())))
12417 .collect::<HashMap<_, _>>();
12418 let worktree_id = worktree_id_by_abs_path
12419 .get(Path::new(path!("/root/b/script")))
12420 .unwrap();
12421
12422 let repos = project.update(cx, |p, cx| p.git_store().read(cx).repositories().clone());
12423 assert_eq!(repos.len(), 2);
12424
12425 project.update(cx, |project, cx| {
12426 project.remove_worktree(*worktree_id, cx);
12427 });
12428 cx.run_until_parked();
12429
12430 let mut repo_paths = project
12431 .update(cx, |p, cx| p.git_store().read(cx).repositories().clone())
12432 .values()
12433 .map(|repo| repo.read_with(cx, |r, _| r.work_directory_abs_path.clone()))
12434 .collect::<Vec<_>>();
12435 repo_paths.sort();
12436
12437 pretty_assertions::assert_eq!(
12438 repo_paths,
12439 [
12440 Path::new(path!("/root/a")).into(),
12441 Path::new(path!("/root/b")).into(),
12442 ]
12443 );
12444
12445 let active_repo_path = project
12446 .read_with(cx, |p, cx| {
12447 p.active_repository(cx)
12448 .map(|r| r.read(cx).work_directory_abs_path.clone())
12449 })
12450 .unwrap();
12451 assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/a")));
12452
12453 let worktree_id = worktree_id_by_abs_path
12454 .get(Path::new(path!("/root/a")))
12455 .unwrap();
12456 project.update(cx, |project, cx| {
12457 project.remove_worktree(*worktree_id, cx);
12458 });
12459 cx.run_until_parked();
12460
12461 let active_repo_path = project
12462 .read_with(cx, |p, cx| {
12463 p.active_repository(cx)
12464 .map(|r| r.read(cx).work_directory_abs_path.clone())
12465 })
12466 .unwrap();
12467 assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/b")));
12468
12469 let worktree_id = worktree_id_by_abs_path
12470 .get(Path::new(path!("/root/b")))
12471 .unwrap();
12472 project.update(cx, |project, cx| {
12473 project.remove_worktree(*worktree_id, cx);
12474 });
12475 cx.run_until_parked();
12476
12477 let active_repo_path = project.read_with(cx, |p, cx| {
12478 p.active_repository(cx)
12479 .map(|r| r.read(cx).work_directory_abs_path.clone())
12480 });
12481 assert!(active_repo_path.is_none());
12482}
12483
12484#[gpui::test]
12485async fn test_optimistic_hunks_in_staged_files(cx: &mut gpui::TestAppContext) {
12486 use DiffHunkSecondaryStatus::*;
12487 init_test(cx);
12488
12489 let committed_contents = r#"
12490 one
12491 two
12492 three
12493 "#
12494 .unindent();
12495 let file_contents = r#"
12496 one
12497 TWO
12498 three
12499 "#
12500 .unindent();
12501
12502 let fs = FakeFs::new(cx.background_executor.clone());
12503 fs.insert_tree(
12504 path!("/dir"),
12505 json!({
12506 ".git": {},
12507 "file.txt": file_contents.clone()
12508 }),
12509 )
12510 .await;
12511
12512 fs.set_head_and_index_for_repo(
12513 path!("/dir/.git").as_ref(),
12514 &[("file.txt", committed_contents.clone())],
12515 );
12516
12517 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
12518
12519 let buffer = project
12520 .update(cx, |project, cx| {
12521 project.open_local_buffer(path!("/dir/file.txt"), cx)
12522 })
12523 .await
12524 .unwrap();
12525 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
12526 let uncommitted_diff = project
12527 .update(cx, |project, cx| {
12528 project.open_uncommitted_diff(buffer.clone(), cx)
12529 })
12530 .await
12531 .unwrap();
12532
12533 // The hunk is initially unstaged.
12534 uncommitted_diff.read_with(cx, |diff, cx| {
12535 assert_hunks(
12536 diff.snapshot(cx).hunks(&snapshot),
12537 &snapshot,
12538 &diff.base_text_string(cx).unwrap(),
12539 &[(
12540 1..2,
12541 "two\n",
12542 "TWO\n",
12543 DiffHunkStatus::modified(HasSecondaryHunk),
12544 )],
12545 );
12546 });
12547
12548 // Get the repository handle.
12549 let repo = project.read_with(cx, |project, cx| {
12550 project.repositories(cx).values().next().unwrap().clone()
12551 });
12552
12553 // Stage the file.
12554 let stage_task = repo.update(cx, |repo, cx| {
12555 repo.stage_entries(vec![repo_path("file.txt")], cx)
12556 });
12557
12558 // Run a few ticks to let the job start and mark hunks as pending,
12559 // but don't run_until_parked which would complete the entire operation.
12560 for _ in 0..10 {
12561 cx.executor().tick();
12562 let [hunk]: [_; 1] = uncommitted_diff
12563 .read_with(cx, |diff, cx| {
12564 diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>()
12565 })
12566 .try_into()
12567 .unwrap();
12568 match hunk.secondary_status {
12569 HasSecondaryHunk => {}
12570 SecondaryHunkRemovalPending => break,
12571 NoSecondaryHunk => panic!("hunk was not optimistically staged"),
12572 _ => panic!("unexpected hunk state"),
12573 }
12574 }
12575 uncommitted_diff.read_with(cx, |diff, cx| {
12576 assert_hunks(
12577 diff.snapshot(cx).hunks(&snapshot),
12578 &snapshot,
12579 &diff.base_text_string(cx).unwrap(),
12580 &[(
12581 1..2,
12582 "two\n",
12583 "TWO\n",
12584 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
12585 )],
12586 );
12587 });
12588
12589 // Let the staging complete.
12590 stage_task.await.unwrap();
12591 cx.run_until_parked();
12592
12593 // The hunk is now fully staged.
12594 uncommitted_diff.read_with(cx, |diff, cx| {
12595 assert_hunks(
12596 diff.snapshot(cx).hunks(&snapshot),
12597 &snapshot,
12598 &diff.base_text_string(cx).unwrap(),
12599 &[(
12600 1..2,
12601 "two\n",
12602 "TWO\n",
12603 DiffHunkStatus::modified(NoSecondaryHunk),
12604 )],
12605 );
12606 });
12607
12608 // Simulate a commit by updating HEAD to match the current file contents.
12609 // The FakeGitRepository's commit method is a no-op, so we need to manually
12610 // update HEAD to simulate the commit completing.
12611 fs.set_head_for_repo(
12612 path!("/dir/.git").as_ref(),
12613 &[("file.txt", file_contents.clone())],
12614 "newhead",
12615 );
12616 cx.run_until_parked();
12617
12618 // After committing, there are no more hunks.
12619 uncommitted_diff.read_with(cx, |diff, cx| {
12620 assert_hunks(
12621 diff.snapshot(cx).hunks(&snapshot),
12622 &snapshot,
12623 &diff.base_text_string(cx).unwrap(),
12624 &[] as &[(Range<u32>, &str, &str, DiffHunkStatus)],
12625 );
12626 });
12627}
12628
12629#[gpui::test]
12630async fn test_read_only_files_setting(cx: &mut gpui::TestAppContext) {
12631 init_test(cx);
12632
12633 // Configure read_only_files setting
12634 cx.update(|cx| {
12635 cx.update_global::<SettingsStore, _>(|store, cx| {
12636 store.update_user_settings(cx, |settings| {
12637 settings.project.worktree.read_only_files = Some(vec![
12638 "**/generated/**".to_string(),
12639 "**/*.gen.rs".to_string(),
12640 ]);
12641 });
12642 });
12643 });
12644
12645 let fs = FakeFs::new(cx.background_executor.clone());
12646 fs.insert_tree(
12647 path!("/root"),
12648 json!({
12649 "src": {
12650 "main.rs": "fn main() {}",
12651 "types.gen.rs": "// Generated file",
12652 },
12653 "generated": {
12654 "schema.rs": "// Auto-generated schema",
12655 }
12656 }),
12657 )
12658 .await;
12659
12660 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
12661
12662 // Open a regular file - should be read-write
12663 let regular_buffer = project
12664 .update(cx, |project, cx| {
12665 project.open_local_buffer(path!("/root/src/main.rs"), cx)
12666 })
12667 .await
12668 .unwrap();
12669
12670 regular_buffer.read_with(cx, |buffer, _| {
12671 assert!(!buffer.read_only(), "Regular file should not be read-only");
12672 });
12673
12674 // Open a file matching *.gen.rs pattern - should be read-only
12675 let gen_buffer = project
12676 .update(cx, |project, cx| {
12677 project.open_local_buffer(path!("/root/src/types.gen.rs"), cx)
12678 })
12679 .await
12680 .unwrap();
12681
12682 gen_buffer.read_with(cx, |buffer, _| {
12683 assert!(
12684 buffer.read_only(),
12685 "File matching *.gen.rs pattern should be read-only"
12686 );
12687 });
12688
12689 // Open a file in generated directory - should be read-only
12690 let generated_buffer = project
12691 .update(cx, |project, cx| {
12692 project.open_local_buffer(path!("/root/generated/schema.rs"), cx)
12693 })
12694 .await
12695 .unwrap();
12696
12697 generated_buffer.read_with(cx, |buffer, _| {
12698 assert!(
12699 buffer.read_only(),
12700 "File in generated directory should be read-only"
12701 );
12702 });
12703}
12704
12705#[gpui::test]
12706async fn test_read_only_files_empty_setting(cx: &mut gpui::TestAppContext) {
12707 init_test(cx);
12708
12709 // Explicitly set read_only_files to empty (default behavior)
12710 cx.update(|cx| {
12711 cx.update_global::<SettingsStore, _>(|store, cx| {
12712 store.update_user_settings(cx, |settings| {
12713 settings.project.worktree.read_only_files = Some(vec![]);
12714 });
12715 });
12716 });
12717
12718 let fs = FakeFs::new(cx.background_executor.clone());
12719 fs.insert_tree(
12720 path!("/root"),
12721 json!({
12722 "src": {
12723 "main.rs": "fn main() {}",
12724 },
12725 "generated": {
12726 "schema.rs": "// Auto-generated schema",
12727 }
12728 }),
12729 )
12730 .await;
12731
12732 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
12733
12734 // All files should be read-write when read_only_files is empty
12735 let main_buffer = project
12736 .update(cx, |project, cx| {
12737 project.open_local_buffer(path!("/root/src/main.rs"), cx)
12738 })
12739 .await
12740 .unwrap();
12741
12742 main_buffer.read_with(cx, |buffer, _| {
12743 assert!(
12744 !buffer.read_only(),
12745 "Files should not be read-only when read_only_files is empty"
12746 );
12747 });
12748
12749 let generated_buffer = project
12750 .update(cx, |project, cx| {
12751 project.open_local_buffer(path!("/root/generated/schema.rs"), cx)
12752 })
12753 .await
12754 .unwrap();
12755
12756 generated_buffer.read_with(cx, |buffer, _| {
12757 assert!(
12758 !buffer.read_only(),
12759 "Generated files should not be read-only when read_only_files is empty"
12760 );
12761 });
12762}
12763
12764#[gpui::test]
12765async fn test_read_only_files_with_lock_files(cx: &mut gpui::TestAppContext) {
12766 init_test(cx);
12767
12768 // Configure to make lock files read-only
12769 cx.update(|cx| {
12770 cx.update_global::<SettingsStore, _>(|store, cx| {
12771 store.update_user_settings(cx, |settings| {
12772 settings.project.worktree.read_only_files = Some(vec![
12773 "**/*.lock".to_string(),
12774 "**/package-lock.json".to_string(),
12775 ]);
12776 });
12777 });
12778 });
12779
12780 let fs = FakeFs::new(cx.background_executor.clone());
12781 fs.insert_tree(
12782 path!("/root"),
12783 json!({
12784 "Cargo.lock": "# Lock file",
12785 "Cargo.toml": "[package]",
12786 "package-lock.json": "{}",
12787 "package.json": "{}",
12788 }),
12789 )
12790 .await;
12791
12792 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
12793
12794 // Cargo.lock should be read-only
12795 let cargo_lock = project
12796 .update(cx, |project, cx| {
12797 project.open_local_buffer(path!("/root/Cargo.lock"), cx)
12798 })
12799 .await
12800 .unwrap();
12801
12802 cargo_lock.read_with(cx, |buffer, _| {
12803 assert!(buffer.read_only(), "Cargo.lock should be read-only");
12804 });
12805
12806 // Cargo.toml should be read-write
12807 let cargo_toml = project
12808 .update(cx, |project, cx| {
12809 project.open_local_buffer(path!("/root/Cargo.toml"), cx)
12810 })
12811 .await
12812 .unwrap();
12813
12814 cargo_toml.read_with(cx, |buffer, _| {
12815 assert!(!buffer.read_only(), "Cargo.toml should not be read-only");
12816 });
12817
12818 // package-lock.json should be read-only
12819 let package_lock = project
12820 .update(cx, |project, cx| {
12821 project.open_local_buffer(path!("/root/package-lock.json"), cx)
12822 })
12823 .await
12824 .unwrap();
12825
12826 package_lock.read_with(cx, |buffer, _| {
12827 assert!(buffer.read_only(), "package-lock.json should be read-only");
12828 });
12829
12830 // package.json should be read-write
12831 let package_json = project
12832 .update(cx, |project, cx| {
12833 project.open_local_buffer(path!("/root/package.json"), cx)
12834 })
12835 .await
12836 .unwrap();
12837
12838 package_json.read_with(cx, |buffer, _| {
12839 assert!(!buffer.read_only(), "package.json should not be read-only");
12840 });
12841}
12842
12843mod disable_ai_settings_tests {
12844 use gpui::TestAppContext;
12845 use project::*;
12846 use settings::{Settings, SettingsStore};
12847
12848 #[gpui::test]
12849 async fn test_disable_ai_settings_security(cx: &mut TestAppContext) {
12850 cx.update(|cx| {
12851 settings::init(cx);
12852
12853 // Test 1: Default is false (AI enabled)
12854 assert!(
12855 !DisableAiSettings::get_global(cx).disable_ai,
12856 "Default should allow AI"
12857 );
12858 });
12859
12860 let disable_true = serde_json::json!({
12861 "disable_ai": true
12862 })
12863 .to_string();
12864 let disable_false = serde_json::json!({
12865 "disable_ai": false
12866 })
12867 .to_string();
12868
12869 cx.update_global::<SettingsStore, _>(|store, cx| {
12870 store.set_user_settings(&disable_false, cx).unwrap();
12871 store.set_global_settings(&disable_true, cx).unwrap();
12872 });
12873 cx.update(|cx| {
12874 assert!(
12875 DisableAiSettings::get_global(cx).disable_ai,
12876 "Local false cannot override global true"
12877 );
12878 });
12879
12880 cx.update_global::<SettingsStore, _>(|store, cx| {
12881 store.set_global_settings(&disable_false, cx).unwrap();
12882 store.set_user_settings(&disable_true, cx).unwrap();
12883 });
12884
12885 cx.update(|cx| {
12886 assert!(
12887 DisableAiSettings::get_global(cx).disable_ai,
12888 "Local false cannot override global true"
12889 );
12890 });
12891 }
12892
12893 #[gpui::test]
12894 async fn test_disable_ai_project_level_settings(cx: &mut TestAppContext) {
12895 use settings::{LocalSettingsKind, LocalSettingsPath, SettingsLocation, SettingsStore};
12896 use worktree::WorktreeId;
12897
12898 cx.update(|cx| {
12899 settings::init(cx);
12900
12901 // Default should allow AI
12902 assert!(
12903 !DisableAiSettings::get_global(cx).disable_ai,
12904 "Default should allow AI"
12905 );
12906 });
12907
12908 let worktree_id = WorktreeId::from_usize(1);
12909 let rel_path = |path: &str| -> std::sync::Arc<util::rel_path::RelPath> {
12910 std::sync::Arc::from(util::rel_path::RelPath::unix(path).unwrap())
12911 };
12912 let project_path = rel_path("project");
12913 let settings_location = SettingsLocation {
12914 worktree_id,
12915 path: project_path.as_ref(),
12916 };
12917
12918 // Test: Project-level disable_ai=true should disable AI for files in that project
12919 cx.update_global::<SettingsStore, _>(|store, cx| {
12920 store
12921 .set_local_settings(
12922 worktree_id,
12923 LocalSettingsPath::InWorktree(project_path.clone()),
12924 LocalSettingsKind::Settings,
12925 Some(r#"{ "disable_ai": true }"#),
12926 cx,
12927 )
12928 .unwrap();
12929 });
12930
12931 cx.update(|cx| {
12932 let settings = DisableAiSettings::get(Some(settings_location), cx);
12933 assert!(
12934 settings.disable_ai,
12935 "Project-level disable_ai=true should disable AI for files in that project"
12936 );
12937 // Global should now also be true since project-level disable_ai is merged into global
12938 assert!(
12939 DisableAiSettings::get_global(cx).disable_ai,
12940 "Global setting should be affected by project-level disable_ai=true"
12941 );
12942 });
12943
12944 // Test: Setting project-level to false should allow AI for that project
12945 cx.update_global::<SettingsStore, _>(|store, cx| {
12946 store
12947 .set_local_settings(
12948 worktree_id,
12949 LocalSettingsPath::InWorktree(project_path.clone()),
12950 LocalSettingsKind::Settings,
12951 Some(r#"{ "disable_ai": false }"#),
12952 cx,
12953 )
12954 .unwrap();
12955 });
12956
12957 cx.update(|cx| {
12958 let settings = DisableAiSettings::get(Some(settings_location), cx);
12959 assert!(
12960 !settings.disable_ai,
12961 "Project-level disable_ai=false should allow AI"
12962 );
12963 // Global should also be false now
12964 assert!(
12965 !DisableAiSettings::get_global(cx).disable_ai,
12966 "Global setting should be false when project-level is false"
12967 );
12968 });
12969
12970 // Test: User-level true + project-level false = AI disabled (saturation)
12971 let disable_true = serde_json::json!({ "disable_ai": true }).to_string();
12972 cx.update_global::<SettingsStore, _>(|store, cx| {
12973 store.set_user_settings(&disable_true, cx).unwrap();
12974 store
12975 .set_local_settings(
12976 worktree_id,
12977 LocalSettingsPath::InWorktree(project_path.clone()),
12978 LocalSettingsKind::Settings,
12979 Some(r#"{ "disable_ai": false }"#),
12980 cx,
12981 )
12982 .unwrap();
12983 });
12984
12985 cx.update(|cx| {
12986 let settings = DisableAiSettings::get(Some(settings_location), cx);
12987 assert!(
12988 settings.disable_ai,
12989 "Project-level false cannot override user-level true (SaturatingBool)"
12990 );
12991 });
12992 }
12993}