1#![allow(clippy::format_collect)]
2
3mod color_extractor;
4mod context_server_store;
5mod debugger;
6mod ext_agent_tests;
7mod extension_agent_tests;
8mod git_store;
9mod image_store;
10mod lsp_command;
11mod lsp_store;
12mod manifest_tree;
13mod project_search;
14mod search;
15mod search_history;
16mod signature_help;
17mod task_inventory;
18mod trusted_worktrees;
19mod yarn;
20
21use anyhow::Result;
22use async_trait::async_trait;
23use buffer_diff::{
24 BufferDiffEvent, DiffChanged, DiffHunkSecondaryStatus, DiffHunkStatus, DiffHunkStatusKind,
25 assert_hunks,
26};
27use collections::{BTreeSet, HashMap, HashSet};
28use encoding_rs;
29use fs::{FakeFs, PathEventKind};
30use futures::{StreamExt, future};
31use git::{
32 GitHostingProviderRegistry,
33 repository::{RepoPath, repo_path},
34 status::{DiffStat, FileStatus, StatusCode, TrackedStatus},
35};
36use git2::RepositoryInitOptions;
37use gpui::{
38 App, AppContext, BackgroundExecutor, BorrowAppContext, Entity, FutureExt, SharedString, Task,
39 UpdateGlobal,
40};
41use itertools::Itertools;
42use language::{
43 Buffer, BufferEvent, Diagnostic, DiagnosticEntry, DiagnosticEntryRef, DiagnosticSet,
44 DiagnosticSourceKind, DiskState, FakeLspAdapter, Language, LanguageConfig, LanguageMatcher,
45 LanguageName, LineEnding, ManifestName, ManifestProvider, ManifestQuery, OffsetRangeExt, Point,
46 ToPoint, Toolchain, ToolchainList, ToolchainLister, ToolchainMetadata,
47 language_settings::{LanguageSettingsContent, language_settings},
48 markdown_lang, rust_lang, tree_sitter_typescript,
49};
50use lsp::{
51 CodeActionKind, DEFAULT_LSP_REQUEST_TIMEOUT, DiagnosticSeverity, DocumentChanges,
52 FileOperationFilter, LanguageServerId, LanguageServerName, NumberOrString, TextDocumentEdit,
53 Uri, WillRenameFiles, notification::DidRenameFiles,
54};
55use parking_lot::Mutex;
56use paths::{config_dir, global_gitignore_path, tasks_file};
57use postage::stream::Stream as _;
58use pretty_assertions::{assert_eq, assert_matches};
59use project::{
60 Event, TaskContexts,
61 git_store::{GitStoreEvent, Repository, RepositoryEvent, StatusEntry, pending_op},
62 search::{SearchQuery, SearchResult},
63 task_store::{TaskSettingsLocation, TaskStore},
64 *,
65};
66use rand::{Rng as _, rngs::StdRng};
67use serde_json::json;
68use settings::SettingsStore;
69#[cfg(not(windows))]
70use std::os;
71use std::{
72 cell::RefCell,
73 env, mem,
74 num::NonZeroU32,
75 ops::Range,
76 path::{Path, PathBuf},
77 rc::Rc,
78 str::FromStr,
79 sync::{Arc, OnceLock},
80 task::Poll,
81 time::Duration,
82};
83use sum_tree::SumTree;
84use task::{ResolvedTask, ShellKind, TaskContext};
85use text::{Anchor, PointUtf16, ReplicaId, ToOffset, Unclipped};
86use unindent::Unindent as _;
87use util::{
88 TryFutureExt as _, assert_set_eq, maybe, path,
89 paths::{PathMatcher, PathStyle},
90 rel_path::{RelPath, rel_path},
91 test::{TempTree, marked_text_offsets},
92 uri,
93};
94use worktree::WorktreeModelHandle as _;
95
96#[gpui::test]
97async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
98 cx.executor().allow_parking();
99
100 let (tx, mut rx) = futures::channel::mpsc::unbounded();
101 let _thread = std::thread::spawn(move || {
102 #[cfg(not(target_os = "windows"))]
103 std::fs::metadata("/tmp").unwrap();
104 #[cfg(target_os = "windows")]
105 std::fs::metadata("C:/Windows").unwrap();
106 std::thread::sleep(Duration::from_millis(1000));
107 tx.unbounded_send(1).unwrap();
108 });
109 rx.next().await.unwrap();
110}
111
112#[gpui::test]
113async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
114 cx.executor().allow_parking();
115
116 let io_task = smol::unblock(move || {
117 println!("sleeping on thread {:?}", std::thread::current().id());
118 std::thread::sleep(Duration::from_millis(10));
119 1
120 });
121
122 let task = cx.foreground_executor().spawn(async move {
123 io_task.await;
124 });
125
126 task.await;
127}
128
129// NOTE:
130// While POSIX symbolic links are somewhat supported on Windows, they are an opt in by the user, and thus
131// we assume that they are not supported out of the box.
132#[cfg(not(windows))]
133#[gpui::test]
134async fn test_symlinks(cx: &mut gpui::TestAppContext) {
135 init_test(cx);
136 cx.executor().allow_parking();
137
138 let dir = TempTree::new(json!({
139 "root": {
140 "apple": "",
141 "banana": {
142 "carrot": {
143 "date": "",
144 "endive": "",
145 }
146 },
147 "fennel": {
148 "grape": "",
149 }
150 }
151 }));
152
153 let root_link_path = dir.path().join("root_link");
154 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
155 os::unix::fs::symlink(
156 dir.path().join("root/fennel"),
157 dir.path().join("root/finnochio"),
158 )
159 .unwrap();
160
161 let project = Project::test(
162 Arc::new(RealFs::new(None, cx.executor())),
163 [root_link_path.as_ref()],
164 cx,
165 )
166 .await;
167
168 project.update(cx, |project, cx| {
169 let tree = project.worktrees(cx).next().unwrap().read(cx);
170 assert_eq!(tree.file_count(), 5);
171 assert_eq!(
172 tree.entry_for_path(rel_path("fennel/grape")).unwrap().inode,
173 tree.entry_for_path(rel_path("finnochio/grape"))
174 .unwrap()
175 .inode
176 );
177 });
178}
179
180#[gpui::test]
181async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
182 init_test(cx);
183
184 let dir = TempTree::new(json!({
185 ".editorconfig": r#"
186 root = true
187 [*.rs]
188 indent_style = tab
189 indent_size = 3
190 end_of_line = lf
191 insert_final_newline = true
192 trim_trailing_whitespace = true
193 max_line_length = 120
194 [*.js]
195 tab_width = 10
196 max_line_length = off
197 "#,
198 ".zed": {
199 "settings.json": r#"{
200 "tab_size": 8,
201 "hard_tabs": false,
202 "ensure_final_newline_on_save": false,
203 "remove_trailing_whitespace_on_save": false,
204 "preferred_line_length": 64,
205 "soft_wrap": "editor_width",
206 }"#,
207 },
208 "a.rs": "fn a() {\n A\n}",
209 "b": {
210 ".editorconfig": r#"
211 [*.rs]
212 indent_size = 2
213 max_line_length = off,
214 "#,
215 "b.rs": "fn b() {\n B\n}",
216 },
217 "c.js": "def c\n C\nend",
218 "d": {
219 ".editorconfig": r#"
220 [*.rs]
221 indent_size = 1
222 "#,
223 "d.rs": "fn d() {\n D\n}",
224 },
225 "README.json": "tabs are better\n",
226 }));
227
228 let path = dir.path();
229 let fs = FakeFs::new(cx.executor());
230 fs.insert_tree_from_real_fs(path, path).await;
231 let project = Project::test(fs, [path], cx).await;
232
233 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
234 language_registry.add(js_lang());
235 language_registry.add(json_lang());
236 language_registry.add(rust_lang());
237
238 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
239
240 cx.executor().run_until_parked();
241
242 cx.update(|cx| {
243 let tree = worktree.read(cx);
244 let settings_for = |path: &str| {
245 let file_entry = tree.entry_for_path(rel_path(path)).unwrap().clone();
246 let file = File::for_entry(file_entry, worktree.clone());
247 let file_language = project
248 .read(cx)
249 .languages()
250 .load_language_for_file_path(file.path.as_std_path());
251 let file_language = cx
252 .foreground_executor()
253 .block_on(file_language)
254 .expect("Failed to get file language");
255 let file = file as _;
256 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
257 };
258
259 let settings_a = settings_for("a.rs");
260 let settings_b = settings_for("b/b.rs");
261 let settings_c = settings_for("c.js");
262 let settings_d = settings_for("d/d.rs");
263 let settings_readme = settings_for("README.json");
264
265 // .editorconfig overrides .zed/settings
266 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
267 assert_eq!(settings_a.hard_tabs, true);
268 assert_eq!(settings_a.ensure_final_newline_on_save, true);
269 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
270 assert_eq!(settings_a.preferred_line_length, 120);
271
272 // .editorconfig in subdirectory overrides .editorconfig in root
273 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
274 assert_eq!(Some(settings_d.tab_size), NonZeroU32::new(1));
275
276 // "indent_size" is not set, so "tab_width" is used
277 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
278
279 // When max_line_length is "off", default to .zed/settings.json
280 assert_eq!(settings_b.preferred_line_length, 64);
281 assert_eq!(settings_c.preferred_line_length, 64);
282
283 // README.md should not be affected by .editorconfig's globe "*.rs"
284 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
285 });
286}
287
288#[gpui::test]
289async fn test_external_editorconfig_support(cx: &mut gpui::TestAppContext) {
290 init_test(cx);
291
292 let fs = FakeFs::new(cx.executor());
293 fs.insert_tree(
294 path!("/grandparent"),
295 json!({
296 ".editorconfig": "[*]\nindent_size = 4\n",
297 "parent": {
298 ".editorconfig": "[*.rs]\nindent_size = 2\n",
299 "worktree": {
300 ".editorconfig": "[*.md]\nindent_size = 3\n",
301 "main.rs": "fn main() {}",
302 "README.md": "# README",
303 "other.txt": "other content",
304 }
305 }
306 }),
307 )
308 .await;
309
310 let project = Project::test(fs, [path!("/grandparent/parent/worktree").as_ref()], cx).await;
311
312 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
313 language_registry.add(rust_lang());
314 language_registry.add(markdown_lang());
315
316 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
317
318 cx.executor().run_until_parked();
319
320 cx.update(|cx| {
321 let tree = worktree.read(cx);
322 let settings_for = |path: &str| {
323 let file_entry = tree.entry_for_path(rel_path(path)).unwrap().clone();
324 let file = File::for_entry(file_entry, worktree.clone());
325 let file_language = project
326 .read(cx)
327 .languages()
328 .load_language_for_file_path(file.path.as_std_path());
329 let file_language = cx
330 .foreground_executor()
331 .block_on(file_language)
332 .expect("Failed to get file language");
333 let file = file as _;
334 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
335 };
336
337 let settings_rs = settings_for("main.rs");
338 let settings_md = settings_for("README.md");
339 let settings_txt = settings_for("other.txt");
340
341 // main.rs gets indent_size = 2 from parent's external .editorconfig
342 assert_eq!(Some(settings_rs.tab_size), NonZeroU32::new(2));
343
344 // README.md gets indent_size = 3 from internal worktree .editorconfig
345 assert_eq!(Some(settings_md.tab_size), NonZeroU32::new(3));
346
347 // other.txt gets indent_size = 4 from grandparent's external .editorconfig
348 assert_eq!(Some(settings_txt.tab_size), NonZeroU32::new(4));
349 });
350}
351
352#[gpui::test]
353async fn test_internal_editorconfig_root_stops_traversal(cx: &mut gpui::TestAppContext) {
354 init_test(cx);
355
356 let fs = FakeFs::new(cx.executor());
357 fs.insert_tree(
358 path!("/worktree"),
359 json!({
360 ".editorconfig": "[*]\nindent_size = 99\n",
361 "src": {
362 ".editorconfig": "root = true\n[*]\nindent_size = 2\n",
363 "file.rs": "fn main() {}",
364 }
365 }),
366 )
367 .await;
368
369 let project = Project::test(fs, [path!("/worktree").as_ref()], cx).await;
370
371 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
372 language_registry.add(rust_lang());
373
374 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
375
376 cx.executor().run_until_parked();
377
378 cx.update(|cx| {
379 let tree = worktree.read(cx);
380 let file_entry = tree
381 .entry_for_path(rel_path("src/file.rs"))
382 .unwrap()
383 .clone();
384 let file = File::for_entry(file_entry, worktree.clone());
385 let file_language = project
386 .read(cx)
387 .languages()
388 .load_language_for_file_path(file.path.as_std_path());
389 let file_language = cx
390 .foreground_executor()
391 .block_on(file_language)
392 .expect("Failed to get file language");
393 let file = file as _;
394 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
395
396 assert_eq!(Some(settings.tab_size), NonZeroU32::new(2));
397 });
398}
399
400#[gpui::test]
401async fn test_external_editorconfig_root_stops_traversal(cx: &mut gpui::TestAppContext) {
402 init_test(cx);
403
404 let fs = FakeFs::new(cx.executor());
405 fs.insert_tree(
406 path!("/parent"),
407 json!({
408 ".editorconfig": "[*]\nindent_size = 99\n",
409 "worktree": {
410 ".editorconfig": "root = true\n[*]\nindent_size = 2\n",
411 "file.rs": "fn main() {}",
412 }
413 }),
414 )
415 .await;
416
417 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
418
419 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
420 language_registry.add(rust_lang());
421
422 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
423
424 cx.executor().run_until_parked();
425
426 cx.update(|cx| {
427 let tree = worktree.read(cx);
428 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
429 let file = File::for_entry(file_entry, worktree.clone());
430 let file_language = project
431 .read(cx)
432 .languages()
433 .load_language_for_file_path(file.path.as_std_path());
434 let file_language = cx
435 .foreground_executor()
436 .block_on(file_language)
437 .expect("Failed to get file language");
438 let file = file as _;
439 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
440
441 // file.rs gets indent_size = 2 from worktree's root config, NOT 99 from parent
442 assert_eq!(Some(settings.tab_size), NonZeroU32::new(2));
443 });
444}
445
446#[gpui::test]
447async fn test_external_editorconfig_root_in_parent_stops_traversal(cx: &mut gpui::TestAppContext) {
448 init_test(cx);
449
450 let fs = FakeFs::new(cx.executor());
451 fs.insert_tree(
452 path!("/grandparent"),
453 json!({
454 ".editorconfig": "[*]\nindent_size = 99\n",
455 "parent": {
456 ".editorconfig": "root = true\n[*]\nindent_size = 4\n",
457 "worktree": {
458 "file.rs": "fn main() {}",
459 }
460 }
461 }),
462 )
463 .await;
464
465 let project = Project::test(fs, [path!("/grandparent/parent/worktree").as_ref()], cx).await;
466
467 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
468 language_registry.add(rust_lang());
469
470 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
471
472 cx.executor().run_until_parked();
473
474 cx.update(|cx| {
475 let tree = worktree.read(cx);
476 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
477 let file = File::for_entry(file_entry, worktree.clone());
478 let file_language = project
479 .read(cx)
480 .languages()
481 .load_language_for_file_path(file.path.as_std_path());
482 let file_language = cx
483 .foreground_executor()
484 .block_on(file_language)
485 .expect("Failed to get file language");
486 let file = file as _;
487 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
488
489 // file.rs gets indent_size = 4 from parent's root config, NOT 99 from grandparent
490 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
491 });
492}
493
494#[gpui::test]
495async fn test_external_editorconfig_shared_across_worktrees(cx: &mut gpui::TestAppContext) {
496 init_test(cx);
497
498 let fs = FakeFs::new(cx.executor());
499 fs.insert_tree(
500 path!("/parent"),
501 json!({
502 ".editorconfig": "root = true\n[*]\nindent_size = 5\n",
503 "worktree_a": {
504 "file.rs": "fn a() {}",
505 ".editorconfig": "[*]\ninsert_final_newline = true\n",
506 },
507 "worktree_b": {
508 "file.rs": "fn b() {}",
509 ".editorconfig": "[*]\ninsert_final_newline = false\n",
510 }
511 }),
512 )
513 .await;
514
515 let project = Project::test(
516 fs,
517 [
518 path!("/parent/worktree_a").as_ref(),
519 path!("/parent/worktree_b").as_ref(),
520 ],
521 cx,
522 )
523 .await;
524
525 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
526 language_registry.add(rust_lang());
527
528 cx.executor().run_until_parked();
529
530 cx.update(|cx| {
531 let worktrees: Vec<_> = project.read(cx).worktrees(cx).collect();
532 assert_eq!(worktrees.len(), 2);
533
534 for worktree in worktrees {
535 let tree = worktree.read(cx);
536 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
537 let file = File::for_entry(file_entry, worktree.clone());
538 let file_language = project
539 .read(cx)
540 .languages()
541 .load_language_for_file_path(file.path.as_std_path());
542 let file_language = cx
543 .foreground_executor()
544 .block_on(file_language)
545 .expect("Failed to get file language");
546 let file = file as _;
547 let settings =
548 language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
549
550 // Both worktrees should get indent_size = 5 from shared parent .editorconfig
551 assert_eq!(Some(settings.tab_size), NonZeroU32::new(5));
552 }
553 });
554}
555
556#[gpui::test]
557async fn test_external_editorconfig_not_loaded_without_internal_config(
558 cx: &mut gpui::TestAppContext,
559) {
560 init_test(cx);
561
562 let fs = FakeFs::new(cx.executor());
563 fs.insert_tree(
564 path!("/parent"),
565 json!({
566 ".editorconfig": "[*]\nindent_size = 99\n",
567 "worktree": {
568 "file.rs": "fn main() {}",
569 }
570 }),
571 )
572 .await;
573
574 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
575
576 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
577 language_registry.add(rust_lang());
578
579 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
580
581 cx.executor().run_until_parked();
582
583 cx.update(|cx| {
584 let tree = worktree.read(cx);
585 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
586 let file = File::for_entry(file_entry, worktree.clone());
587 let file_language = project
588 .read(cx)
589 .languages()
590 .load_language_for_file_path(file.path.as_std_path());
591 let file_language = cx
592 .foreground_executor()
593 .block_on(file_language)
594 .expect("Failed to get file language");
595 let file = file as _;
596 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
597
598 // file.rs should have default tab_size = 4, NOT 99 from parent's external .editorconfig
599 // because without an internal .editorconfig, external configs are not loaded
600 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
601 });
602}
603
604#[gpui::test]
605async fn test_external_editorconfig_modification_triggers_refresh(cx: &mut gpui::TestAppContext) {
606 init_test(cx);
607
608 let fs = FakeFs::new(cx.executor());
609 fs.insert_tree(
610 path!("/parent"),
611 json!({
612 ".editorconfig": "[*]\nindent_size = 4\n",
613 "worktree": {
614 ".editorconfig": "[*]\n",
615 "file.rs": "fn main() {}",
616 }
617 }),
618 )
619 .await;
620
621 let project = Project::test(fs.clone(), [path!("/parent/worktree").as_ref()], cx).await;
622
623 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
624 language_registry.add(rust_lang());
625
626 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
627
628 cx.executor().run_until_parked();
629
630 cx.update(|cx| {
631 let tree = worktree.read(cx);
632 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
633 let file = File::for_entry(file_entry, worktree.clone());
634 let file_language = project
635 .read(cx)
636 .languages()
637 .load_language_for_file_path(file.path.as_std_path());
638 let file_language = cx
639 .foreground_executor()
640 .block_on(file_language)
641 .expect("Failed to get file language");
642 let file = file as _;
643 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
644
645 // Test initial settings: tab_size = 4 from parent's external .editorconfig
646 assert_eq!(Some(settings.tab_size), NonZeroU32::new(4));
647 });
648
649 fs.atomic_write(
650 PathBuf::from(path!("/parent/.editorconfig")),
651 "[*]\nindent_size = 8\n".to_owned(),
652 )
653 .await
654 .unwrap();
655
656 cx.executor().run_until_parked();
657
658 cx.update(|cx| {
659 let tree = worktree.read(cx);
660 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
661 let file = File::for_entry(file_entry, worktree.clone());
662 let file_language = project
663 .read(cx)
664 .languages()
665 .load_language_for_file_path(file.path.as_std_path());
666 let file_language = cx
667 .foreground_executor()
668 .block_on(file_language)
669 .expect("Failed to get file language");
670 let file = file as _;
671 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
672
673 // Test settings updated: tab_size = 8
674 assert_eq!(Some(settings.tab_size), NonZeroU32::new(8));
675 });
676}
677
678#[gpui::test]
679async fn test_adding_worktree_discovers_external_editorconfigs(cx: &mut gpui::TestAppContext) {
680 init_test(cx);
681
682 let fs = FakeFs::new(cx.executor());
683 fs.insert_tree(
684 path!("/parent"),
685 json!({
686 ".editorconfig": "root = true\n[*]\nindent_size = 7\n",
687 "existing_worktree": {
688 ".editorconfig": "[*]\n",
689 "file.rs": "fn a() {}",
690 },
691 "new_worktree": {
692 ".editorconfig": "[*]\n",
693 "file.rs": "fn b() {}",
694 }
695 }),
696 )
697 .await;
698
699 let project = Project::test(fs, [path!("/parent/existing_worktree").as_ref()], cx).await;
700
701 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
702 language_registry.add(rust_lang());
703
704 cx.executor().run_until_parked();
705
706 cx.update(|cx| {
707 let worktree = project.read(cx).worktrees(cx).next().unwrap();
708 let tree = worktree.read(cx);
709 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
710 let file = File::for_entry(file_entry, worktree.clone());
711 let file_language = project
712 .read(cx)
713 .languages()
714 .load_language_for_file_path(file.path.as_std_path());
715 let file_language = cx
716 .foreground_executor()
717 .block_on(file_language)
718 .expect("Failed to get file language");
719 let file = file as _;
720 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
721
722 // Test existing worktree has tab_size = 7
723 assert_eq!(Some(settings.tab_size), NonZeroU32::new(7));
724 });
725
726 let (new_worktree, _) = project
727 .update(cx, |project, cx| {
728 project.find_or_create_worktree(path!("/parent/new_worktree"), true, cx)
729 })
730 .await
731 .unwrap();
732
733 cx.executor().run_until_parked();
734
735 cx.update(|cx| {
736 let tree = new_worktree.read(cx);
737 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
738 let file = File::for_entry(file_entry, new_worktree.clone());
739 let file_language = project
740 .read(cx)
741 .languages()
742 .load_language_for_file_path(file.path.as_std_path());
743 let file_language = cx
744 .foreground_executor()
745 .block_on(file_language)
746 .expect("Failed to get file language");
747 let file = file as _;
748 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
749
750 // Verify new worktree also has tab_size = 7 from shared parent editorconfig
751 assert_eq!(Some(settings.tab_size), NonZeroU32::new(7));
752 });
753}
754
755#[gpui::test]
756async fn test_removing_worktree_cleans_up_external_editorconfig(cx: &mut gpui::TestAppContext) {
757 init_test(cx);
758
759 let fs = FakeFs::new(cx.executor());
760 fs.insert_tree(
761 path!("/parent"),
762 json!({
763 ".editorconfig": "[*]\nindent_size = 6\n",
764 "worktree": {
765 ".editorconfig": "[*]\n",
766 "file.rs": "fn main() {}",
767 }
768 }),
769 )
770 .await;
771
772 let project = Project::test(fs, [path!("/parent/worktree").as_ref()], cx).await;
773
774 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
775 language_registry.add(rust_lang());
776
777 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
778 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
779
780 cx.executor().run_until_parked();
781
782 cx.update(|cx| {
783 let store = cx.global::<SettingsStore>();
784 let (worktree_ids, external_paths, watcher_paths) =
785 store.editorconfig_store.read(cx).test_state();
786
787 // Test external config is loaded
788 assert!(worktree_ids.contains(&worktree_id));
789 assert!(!external_paths.is_empty());
790 assert!(!watcher_paths.is_empty());
791 });
792
793 project.update(cx, |project, cx| {
794 project.remove_worktree(worktree_id, cx);
795 });
796
797 cx.executor().run_until_parked();
798
799 cx.update(|cx| {
800 let store = cx.global::<SettingsStore>();
801 let (worktree_ids, external_paths, watcher_paths) =
802 store.editorconfig_store.read(cx).test_state();
803
804 // Test worktree state, external configs, and watchers all removed
805 assert!(!worktree_ids.contains(&worktree_id));
806 assert!(external_paths.is_empty());
807 assert!(watcher_paths.is_empty());
808 });
809}
810
811#[gpui::test]
812async fn test_shared_external_editorconfig_cleanup_with_multiple_worktrees(
813 cx: &mut gpui::TestAppContext,
814) {
815 init_test(cx);
816
817 let fs = FakeFs::new(cx.executor());
818 fs.insert_tree(
819 path!("/parent"),
820 json!({
821 ".editorconfig": "root = true\n[*]\nindent_size = 5\n",
822 "worktree_a": {
823 ".editorconfig": "[*]\n",
824 "file.rs": "fn a() {}",
825 },
826 "worktree_b": {
827 ".editorconfig": "[*]\n",
828 "file.rs": "fn b() {}",
829 }
830 }),
831 )
832 .await;
833
834 let project = Project::test(
835 fs,
836 [
837 path!("/parent/worktree_a").as_ref(),
838 path!("/parent/worktree_b").as_ref(),
839 ],
840 cx,
841 )
842 .await;
843
844 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
845 language_registry.add(rust_lang());
846
847 cx.executor().run_until_parked();
848
849 let (worktree_a_id, worktree_b, worktree_b_id) = cx.update(|cx| {
850 let worktrees: Vec<_> = project.read(cx).worktrees(cx).collect();
851 assert_eq!(worktrees.len(), 2);
852
853 let worktree_a = &worktrees[0];
854 let worktree_b = &worktrees[1];
855 let worktree_a_id = worktree_a.read(cx).id();
856 let worktree_b_id = worktree_b.read(cx).id();
857 (worktree_a_id, worktree_b.clone(), worktree_b_id)
858 });
859
860 cx.update(|cx| {
861 let store = cx.global::<SettingsStore>();
862 let (worktree_ids, external_paths, _) = store.editorconfig_store.read(cx).test_state();
863
864 // Test both worktrees have settings and share external config
865 assert!(worktree_ids.contains(&worktree_a_id));
866 assert!(worktree_ids.contains(&worktree_b_id));
867 assert_eq!(external_paths.len(), 1); // single shared external config
868 });
869
870 project.update(cx, |project, cx| {
871 project.remove_worktree(worktree_a_id, cx);
872 });
873
874 cx.executor().run_until_parked();
875
876 cx.update(|cx| {
877 let store = cx.global::<SettingsStore>();
878 let (worktree_ids, external_paths, watcher_paths) =
879 store.editorconfig_store.read(cx).test_state();
880
881 // Test worktree_a is gone but external config remains for worktree_b
882 assert!(!worktree_ids.contains(&worktree_a_id));
883 assert!(worktree_ids.contains(&worktree_b_id));
884 // External config should still exist because worktree_b uses it
885 assert_eq!(external_paths.len(), 1);
886 assert_eq!(watcher_paths.len(), 1);
887 });
888
889 cx.update(|cx| {
890 let tree = worktree_b.read(cx);
891 let file_entry = tree.entry_for_path(rel_path("file.rs")).unwrap().clone();
892 let file = File::for_entry(file_entry, worktree_b.clone());
893 let file_language = project
894 .read(cx)
895 .languages()
896 .load_language_for_file_path(file.path.as_std_path());
897 let file_language = cx
898 .foreground_executor()
899 .block_on(file_language)
900 .expect("Failed to get file language");
901 let file = file as _;
902 let settings = language_settings(Some(file_language.name()), Some(&file), cx).into_owned();
903
904 // Test worktree_b still has correct settings
905 assert_eq!(Some(settings.tab_size), NonZeroU32::new(5));
906 });
907}
908
909#[gpui::test]
910async fn test_git_provider_project_setting(cx: &mut gpui::TestAppContext) {
911 init_test(cx);
912 cx.update(|cx| {
913 GitHostingProviderRegistry::default_global(cx);
914 git_hosting_providers::init(cx);
915 });
916
917 let fs = FakeFs::new(cx.executor());
918 let str_path = path!("/dir");
919 let path = Path::new(str_path);
920
921 fs.insert_tree(
922 path!("/dir"),
923 json!({
924 ".zed": {
925 "settings.json": r#"{
926 "git_hosting_providers": [
927 {
928 "provider": "gitlab",
929 "base_url": "https://google.com",
930 "name": "foo"
931 }
932 ]
933 }"#
934 },
935 }),
936 )
937 .await;
938
939 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
940 let (_worktree, _) =
941 project.read_with(cx, |project, cx| project.find_worktree(path, cx).unwrap());
942 cx.executor().run_until_parked();
943
944 cx.update(|cx| {
945 let provider = GitHostingProviderRegistry::global(cx);
946 assert!(
947 provider
948 .list_hosting_providers()
949 .into_iter()
950 .any(|provider| provider.name() == "foo")
951 );
952 });
953
954 fs.atomic_write(
955 Path::new(path!("/dir/.zed/settings.json")).to_owned(),
956 "{}".into(),
957 )
958 .await
959 .unwrap();
960
961 cx.run_until_parked();
962
963 cx.update(|cx| {
964 let provider = GitHostingProviderRegistry::global(cx);
965 assert!(
966 !provider
967 .list_hosting_providers()
968 .into_iter()
969 .any(|provider| provider.name() == "foo")
970 );
971 });
972}
973
974#[gpui::test]
975async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
976 init_test(cx);
977 TaskStore::init(None);
978
979 let fs = FakeFs::new(cx.executor());
980 fs.insert_tree(
981 path!("/dir"),
982 json!({
983 ".zed": {
984 "settings.json": r#"{ "tab_size": 8 }"#,
985 "tasks.json": r#"[{
986 "label": "cargo check all",
987 "command": "cargo",
988 "args": ["check", "--all"]
989 },]"#,
990 },
991 "a": {
992 "a.rs": "fn a() {\n A\n}"
993 },
994 "b": {
995 ".zed": {
996 "settings.json": r#"{ "tab_size": 2 }"#,
997 "tasks.json": r#"[{
998 "label": "cargo check",
999 "command": "cargo",
1000 "args": ["check"]
1001 },]"#,
1002 },
1003 "b.rs": "fn b() {\n B\n}"
1004 }
1005 }),
1006 )
1007 .await;
1008
1009 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1010 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
1011
1012 cx.executor().run_until_parked();
1013 let worktree_id = cx.update(|cx| {
1014 project.update(cx, |project, cx| {
1015 project.worktrees(cx).next().unwrap().read(cx).id()
1016 })
1017 });
1018
1019 let mut task_contexts = TaskContexts::default();
1020 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
1021 let task_contexts = Arc::new(task_contexts);
1022
1023 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
1024 id: worktree_id,
1025 directory_in_worktree: rel_path(".zed").into(),
1026 id_base: "local worktree tasks from directory \".zed\"".into(),
1027 };
1028
1029 let all_tasks = cx
1030 .update(|cx| {
1031 let tree = worktree.read(cx);
1032
1033 let file_a = File::for_entry(
1034 tree.entry_for_path(rel_path("a/a.rs")).unwrap().clone(),
1035 worktree.clone(),
1036 ) as _;
1037 let settings_a = language_settings(None, Some(&file_a), cx);
1038 let file_b = File::for_entry(
1039 tree.entry_for_path(rel_path("b/b.rs")).unwrap().clone(),
1040 worktree.clone(),
1041 ) as _;
1042 let settings_b = language_settings(None, Some(&file_b), cx);
1043
1044 assert_eq!(settings_a.tab_size.get(), 8);
1045 assert_eq!(settings_b.tab_size.get(), 2);
1046
1047 get_all_tasks(&project, task_contexts.clone(), cx)
1048 })
1049 .await
1050 .into_iter()
1051 .map(|(source_kind, task)| {
1052 let resolved = task.resolved;
1053 (
1054 source_kind,
1055 task.resolved_label,
1056 resolved.args,
1057 resolved.env,
1058 )
1059 })
1060 .collect::<Vec<_>>();
1061 assert_eq!(
1062 all_tasks,
1063 vec![
1064 (
1065 TaskSourceKind::Worktree {
1066 id: worktree_id,
1067 directory_in_worktree: rel_path("b/.zed").into(),
1068 id_base: "local worktree tasks from directory \"b/.zed\"".into()
1069 },
1070 "cargo check".to_string(),
1071 vec!["check".to_string()],
1072 HashMap::default(),
1073 ),
1074 (
1075 topmost_local_task_source_kind.clone(),
1076 "cargo check all".to_string(),
1077 vec!["check".to_string(), "--all".to_string()],
1078 HashMap::default(),
1079 ),
1080 ]
1081 );
1082
1083 let (_, resolved_task) = cx
1084 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
1085 .await
1086 .into_iter()
1087 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
1088 .expect("should have one global task");
1089 project.update(cx, |project, cx| {
1090 let task_inventory = project
1091 .task_store()
1092 .read(cx)
1093 .task_inventory()
1094 .cloned()
1095 .unwrap();
1096 task_inventory.update(cx, |inventory, _| {
1097 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
1098 inventory
1099 .update_file_based_tasks(
1100 TaskSettingsLocation::Global(tasks_file()),
1101 Some(
1102 &json!([{
1103 "label": "cargo check unstable",
1104 "command": "cargo",
1105 "args": [
1106 "check",
1107 "--all",
1108 "--all-targets"
1109 ],
1110 "env": {
1111 "RUSTFLAGS": "-Zunstable-options"
1112 }
1113 }])
1114 .to_string(),
1115 ),
1116 )
1117 .unwrap();
1118 });
1119 });
1120 cx.run_until_parked();
1121
1122 let all_tasks = cx
1123 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
1124 .await
1125 .into_iter()
1126 .map(|(source_kind, task)| {
1127 let resolved = task.resolved;
1128 (
1129 source_kind,
1130 task.resolved_label,
1131 resolved.args,
1132 resolved.env,
1133 )
1134 })
1135 .collect::<Vec<_>>();
1136 assert_eq!(
1137 all_tasks,
1138 vec![
1139 (
1140 topmost_local_task_source_kind.clone(),
1141 "cargo check all".to_string(),
1142 vec!["check".to_string(), "--all".to_string()],
1143 HashMap::default(),
1144 ),
1145 (
1146 TaskSourceKind::Worktree {
1147 id: worktree_id,
1148 directory_in_worktree: rel_path("b/.zed").into(),
1149 id_base: "local worktree tasks from directory \"b/.zed\"".into()
1150 },
1151 "cargo check".to_string(),
1152 vec!["check".to_string()],
1153 HashMap::default(),
1154 ),
1155 (
1156 TaskSourceKind::AbsPath {
1157 abs_path: paths::tasks_file().clone(),
1158 id_base: "global tasks.json".into(),
1159 },
1160 "cargo check unstable".to_string(),
1161 vec![
1162 "check".to_string(),
1163 "--all".to_string(),
1164 "--all-targets".to_string(),
1165 ],
1166 HashMap::from_iter(Some((
1167 "RUSTFLAGS".to_string(),
1168 "-Zunstable-options".to_string()
1169 ))),
1170 ),
1171 ]
1172 );
1173}
1174
1175#[gpui::test]
1176async fn test_invalid_local_tasks_shows_toast_with_doc_link(cx: &mut gpui::TestAppContext) {
1177 init_test(cx);
1178 TaskStore::init(None);
1179
1180 // We need to start with a valid `.zed/tasks.json` file as otherwise the
1181 // event is emitted before we havd a chance to setup the event subscription.
1182 let fs = FakeFs::new(cx.executor());
1183 fs.insert_tree(
1184 path!("/dir"),
1185 json!({
1186 ".zed": {
1187 "tasks.json": r#"[{ "label": "valid task", "command": "echo" }]"#,
1188 },
1189 "file.rs": ""
1190 }),
1191 )
1192 .await;
1193
1194 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1195 let saw_toast = Rc::new(RefCell::new(false));
1196
1197 // Update the `.zed/tasks.json` file with an invalid variable, so we can
1198 // later assert that the `Event::Toast` even is emitted.
1199 fs.save(
1200 path!("/dir/.zed/tasks.json").as_ref(),
1201 &r#"[{ "label": "test $ZED_FOO", "command": "echo" }]"#.into(),
1202 Default::default(),
1203 )
1204 .await
1205 .unwrap();
1206
1207 project.update(cx, |_, cx| {
1208 let saw_toast = saw_toast.clone();
1209
1210 cx.subscribe(&project, move |_, _, event: &Event, _| match event {
1211 Event::Toast {
1212 notification_id,
1213 message,
1214 link: Some(ToastLink { url, .. }),
1215 } => {
1216 assert!(notification_id.starts_with("local-tasks-"));
1217 assert!(message.contains("ZED_FOO"));
1218 assert_eq!(*url, "https://zed.dev/docs/tasks");
1219 *saw_toast.borrow_mut() = true;
1220 }
1221 _ => {}
1222 })
1223 .detach();
1224 });
1225
1226 cx.run_until_parked();
1227 assert!(
1228 *saw_toast.borrow(),
1229 "Expected `Event::Toast` was never emitted"
1230 );
1231}
1232
1233#[gpui::test]
1234async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
1235 init_test(cx);
1236 TaskStore::init(None);
1237
1238 let fs = FakeFs::new(cx.executor());
1239 fs.insert_tree(
1240 path!("/dir"),
1241 json!({
1242 ".zed": {
1243 "tasks.json": r#"[{
1244 "label": "test worktree root",
1245 "command": "echo $ZED_WORKTREE_ROOT"
1246 }]"#,
1247 },
1248 "a": {
1249 "a.rs": "fn a() {\n A\n}"
1250 },
1251 }),
1252 )
1253 .await;
1254
1255 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1256 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
1257
1258 cx.executor().run_until_parked();
1259 let worktree_id = cx.update(|cx| {
1260 project.update(cx, |project, cx| {
1261 project.worktrees(cx).next().unwrap().read(cx).id()
1262 })
1263 });
1264
1265 let active_non_worktree_item_tasks = cx
1266 .update(|cx| {
1267 get_all_tasks(
1268 &project,
1269 Arc::new(TaskContexts {
1270 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
1271 active_worktree_context: None,
1272 other_worktree_contexts: Vec::new(),
1273 lsp_task_sources: HashMap::default(),
1274 latest_selection: None,
1275 }),
1276 cx,
1277 )
1278 })
1279 .await;
1280 assert!(
1281 active_non_worktree_item_tasks.is_empty(),
1282 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
1283 );
1284
1285 let active_worktree_tasks = cx
1286 .update(|cx| {
1287 get_all_tasks(
1288 &project,
1289 Arc::new(TaskContexts {
1290 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
1291 active_worktree_context: Some((worktree_id, {
1292 let mut worktree_context = TaskContext::default();
1293 worktree_context
1294 .task_variables
1295 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
1296 worktree_context
1297 })),
1298 other_worktree_contexts: Vec::new(),
1299 lsp_task_sources: HashMap::default(),
1300 latest_selection: None,
1301 }),
1302 cx,
1303 )
1304 })
1305 .await;
1306 assert_eq!(
1307 active_worktree_tasks
1308 .into_iter()
1309 .map(|(source_kind, task)| {
1310 let resolved = task.resolved;
1311 (source_kind, resolved.command.unwrap())
1312 })
1313 .collect::<Vec<_>>(),
1314 vec![(
1315 TaskSourceKind::Worktree {
1316 id: worktree_id,
1317 directory_in_worktree: rel_path(".zed").into(),
1318 id_base: "local worktree tasks from directory \".zed\"".into(),
1319 },
1320 "echo /dir".to_string(),
1321 )]
1322 );
1323}
1324
1325#[gpui::test]
1326async fn test_running_multiple_instances_of_a_single_server_in_one_worktree(
1327 cx: &mut gpui::TestAppContext,
1328) {
1329 pub(crate) struct PyprojectTomlManifestProvider;
1330
1331 impl ManifestProvider for PyprojectTomlManifestProvider {
1332 fn name(&self) -> ManifestName {
1333 SharedString::new_static("pyproject.toml").into()
1334 }
1335
1336 fn search(
1337 &self,
1338 ManifestQuery {
1339 path,
1340 depth,
1341 delegate,
1342 }: ManifestQuery,
1343 ) -> Option<Arc<RelPath>> {
1344 for path in path.ancestors().take(depth) {
1345 let p = path.join(rel_path("pyproject.toml"));
1346 if delegate.exists(&p, Some(false)) {
1347 return Some(path.into());
1348 }
1349 }
1350
1351 None
1352 }
1353 }
1354
1355 init_test(cx);
1356 let fs = FakeFs::new(cx.executor());
1357
1358 fs.insert_tree(
1359 path!("/the-root"),
1360 json!({
1361 ".zed": {
1362 "settings.json": r#"
1363 {
1364 "languages": {
1365 "Python": {
1366 "language_servers": ["ty"]
1367 }
1368 }
1369 }"#
1370 },
1371 "project-a": {
1372 ".venv": {},
1373 "file.py": "",
1374 "pyproject.toml": ""
1375 },
1376 "project-b": {
1377 ".venv": {},
1378 "source_file.py":"",
1379 "another_file.py": "",
1380 "pyproject.toml": ""
1381 }
1382 }),
1383 )
1384 .await;
1385 cx.update(|cx| {
1386 ManifestProvidersStore::global(cx).register(Arc::new(PyprojectTomlManifestProvider))
1387 });
1388
1389 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1390 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1391 let _fake_python_server = language_registry.register_fake_lsp(
1392 "Python",
1393 FakeLspAdapter {
1394 name: "ty",
1395 capabilities: lsp::ServerCapabilities {
1396 ..Default::default()
1397 },
1398 ..Default::default()
1399 },
1400 );
1401
1402 language_registry.add(python_lang(fs.clone()));
1403 let (first_buffer, _handle) = project
1404 .update(cx, |project, cx| {
1405 project.open_local_buffer_with_lsp(path!("/the-root/project-a/file.py"), cx)
1406 })
1407 .await
1408 .unwrap();
1409 cx.executor().run_until_parked();
1410 let servers = project.update(cx, |project, cx| {
1411 project.lsp_store().update(cx, |this, cx| {
1412 first_buffer.update(cx, |buffer, cx| {
1413 this.running_language_servers_for_local_buffer(buffer, cx)
1414 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1415 .collect::<Vec<_>>()
1416 })
1417 })
1418 });
1419 cx.executor().run_until_parked();
1420 assert_eq!(servers.len(), 1);
1421 let (adapter, server) = servers.into_iter().next().unwrap();
1422 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1423 assert_eq!(server.server_id(), LanguageServerId(0));
1424 // `workspace_folders` are set to the rooting point.
1425 assert_eq!(
1426 server.workspace_folders(),
1427 BTreeSet::from_iter(
1428 [Uri::from_file_path(path!("/the-root/project-a")).unwrap()].into_iter()
1429 )
1430 );
1431
1432 let (second_project_buffer, _other_handle) = project
1433 .update(cx, |project, cx| {
1434 project.open_local_buffer_with_lsp(path!("/the-root/project-b/source_file.py"), cx)
1435 })
1436 .await
1437 .unwrap();
1438 cx.executor().run_until_parked();
1439 let servers = project.update(cx, |project, cx| {
1440 project.lsp_store().update(cx, |this, cx| {
1441 second_project_buffer.update(cx, |buffer, cx| {
1442 this.running_language_servers_for_local_buffer(buffer, cx)
1443 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1444 .collect::<Vec<_>>()
1445 })
1446 })
1447 });
1448 cx.executor().run_until_parked();
1449 assert_eq!(servers.len(), 1);
1450 let (adapter, server) = servers.into_iter().next().unwrap();
1451 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1452 // We're not using venvs at all here, so both folders should fall under the same root.
1453 assert_eq!(server.server_id(), LanguageServerId(0));
1454 // Now, let's select a different toolchain for one of subprojects.
1455
1456 let Toolchains {
1457 toolchains: available_toolchains_for_b,
1458 root_path,
1459 ..
1460 } = project
1461 .update(cx, |this, cx| {
1462 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1463 this.available_toolchains(
1464 ProjectPath {
1465 worktree_id,
1466 path: rel_path("project-b/source_file.py").into(),
1467 },
1468 LanguageName::new_static("Python"),
1469 cx,
1470 )
1471 })
1472 .await
1473 .expect("A toolchain to be discovered");
1474 assert_eq!(root_path.as_ref(), rel_path("project-b"));
1475 assert_eq!(available_toolchains_for_b.toolchains().len(), 1);
1476 let currently_active_toolchain = project
1477 .update(cx, |this, cx| {
1478 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1479 this.active_toolchain(
1480 ProjectPath {
1481 worktree_id,
1482 path: rel_path("project-b/source_file.py").into(),
1483 },
1484 LanguageName::new_static("Python"),
1485 cx,
1486 )
1487 })
1488 .await;
1489
1490 assert!(currently_active_toolchain.is_none());
1491 let _ = project
1492 .update(cx, |this, cx| {
1493 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
1494 this.activate_toolchain(
1495 ProjectPath {
1496 worktree_id,
1497 path: root_path,
1498 },
1499 available_toolchains_for_b
1500 .toolchains
1501 .into_iter()
1502 .next()
1503 .unwrap(),
1504 cx,
1505 )
1506 })
1507 .await
1508 .unwrap();
1509 cx.run_until_parked();
1510 let servers = project.update(cx, |project, cx| {
1511 project.lsp_store().update(cx, |this, cx| {
1512 second_project_buffer.update(cx, |buffer, cx| {
1513 this.running_language_servers_for_local_buffer(buffer, cx)
1514 .map(|(adapter, server)| (adapter.clone(), server.clone()))
1515 .collect::<Vec<_>>()
1516 })
1517 })
1518 });
1519 cx.executor().run_until_parked();
1520 assert_eq!(servers.len(), 1);
1521 let (adapter, server) = servers.into_iter().next().unwrap();
1522 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
1523 // There's a new language server in town.
1524 assert_eq!(server.server_id(), LanguageServerId(1));
1525}
1526
1527#[gpui::test]
1528async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
1529 init_test(cx);
1530
1531 let fs = FakeFs::new(cx.executor());
1532 fs.insert_tree(
1533 path!("/dir"),
1534 json!({
1535 "test.rs": "const A: i32 = 1;",
1536 "test2.rs": "",
1537 "Cargo.toml": "a = 1",
1538 "package.json": "{\"a\": 1}",
1539 }),
1540 )
1541 .await;
1542
1543 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
1544 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1545
1546 let mut fake_rust_servers = language_registry.register_fake_lsp(
1547 "Rust",
1548 FakeLspAdapter {
1549 name: "the-rust-language-server",
1550 capabilities: lsp::ServerCapabilities {
1551 completion_provider: Some(lsp::CompletionOptions {
1552 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
1553 ..Default::default()
1554 }),
1555 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
1556 lsp::TextDocumentSyncOptions {
1557 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
1558 ..Default::default()
1559 },
1560 )),
1561 ..Default::default()
1562 },
1563 ..Default::default()
1564 },
1565 );
1566 let mut fake_json_servers = language_registry.register_fake_lsp(
1567 "JSON",
1568 FakeLspAdapter {
1569 name: "the-json-language-server",
1570 capabilities: lsp::ServerCapabilities {
1571 completion_provider: Some(lsp::CompletionOptions {
1572 trigger_characters: Some(vec![":".to_string()]),
1573 ..Default::default()
1574 }),
1575 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
1576 lsp::TextDocumentSyncOptions {
1577 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
1578 ..Default::default()
1579 },
1580 )),
1581 ..Default::default()
1582 },
1583 ..Default::default()
1584 },
1585 );
1586
1587 // Open a buffer without an associated language server.
1588 let (toml_buffer, _handle) = project
1589 .update(cx, |project, cx| {
1590 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
1591 })
1592 .await
1593 .unwrap();
1594
1595 // Open a buffer with an associated language server before the language for it has been loaded.
1596 let (rust_buffer, _handle2) = project
1597 .update(cx, |project, cx| {
1598 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
1599 })
1600 .await
1601 .unwrap();
1602 rust_buffer.update(cx, |buffer, _| {
1603 assert_eq!(buffer.language().map(|l| l.name()), None);
1604 });
1605
1606 // Now we add the languages to the project, and ensure they get assigned to all
1607 // the relevant open buffers.
1608 language_registry.add(json_lang());
1609 language_registry.add(rust_lang());
1610 cx.executor().run_until_parked();
1611 rust_buffer.update(cx, |buffer, _| {
1612 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
1613 });
1614
1615 // A server is started up, and it is notified about Rust files.
1616 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1617 assert_eq!(
1618 fake_rust_server
1619 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1620 .await
1621 .text_document,
1622 lsp::TextDocumentItem {
1623 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1624 version: 0,
1625 text: "const A: i32 = 1;".to_string(),
1626 language_id: "rust".to_string(),
1627 }
1628 );
1629
1630 // The buffer is configured based on the language server's capabilities.
1631 rust_buffer.update(cx, |buffer, _| {
1632 assert_eq!(
1633 buffer
1634 .completion_triggers()
1635 .iter()
1636 .cloned()
1637 .collect::<Vec<_>>(),
1638 &[".".to_string(), "::".to_string()]
1639 );
1640 });
1641 toml_buffer.update(cx, |buffer, _| {
1642 assert!(buffer.completion_triggers().is_empty());
1643 });
1644
1645 // Edit a buffer. The changes are reported to the language server.
1646 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
1647 assert_eq!(
1648 fake_rust_server
1649 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1650 .await
1651 .text_document,
1652 lsp::VersionedTextDocumentIdentifier::new(
1653 lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1654 1
1655 )
1656 );
1657
1658 // Open a third buffer with a different associated language server.
1659 let (json_buffer, _json_handle) = project
1660 .update(cx, |project, cx| {
1661 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
1662 })
1663 .await
1664 .unwrap();
1665
1666 // A json language server is started up and is only notified about the json buffer.
1667 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1668 assert_eq!(
1669 fake_json_server
1670 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1671 .await
1672 .text_document,
1673 lsp::TextDocumentItem {
1674 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1675 version: 0,
1676 text: "{\"a\": 1}".to_string(),
1677 language_id: "json".to_string(),
1678 }
1679 );
1680
1681 // This buffer is configured based on the second language server's
1682 // capabilities.
1683 json_buffer.update(cx, |buffer, _| {
1684 assert_eq!(
1685 buffer
1686 .completion_triggers()
1687 .iter()
1688 .cloned()
1689 .collect::<Vec<_>>(),
1690 &[":".to_string()]
1691 );
1692 });
1693
1694 // When opening another buffer whose language server is already running,
1695 // it is also configured based on the existing language server's capabilities.
1696 let (rust_buffer2, _handle4) = project
1697 .update(cx, |project, cx| {
1698 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
1699 })
1700 .await
1701 .unwrap();
1702 rust_buffer2.update(cx, |buffer, _| {
1703 assert_eq!(
1704 buffer
1705 .completion_triggers()
1706 .iter()
1707 .cloned()
1708 .collect::<Vec<_>>(),
1709 &[".".to_string(), "::".to_string()]
1710 );
1711 });
1712
1713 // Changes are reported only to servers matching the buffer's language.
1714 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
1715 rust_buffer2.update(cx, |buffer, cx| {
1716 buffer.edit([(0..0, "let x = 1;")], None, cx)
1717 });
1718 assert_eq!(
1719 fake_rust_server
1720 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1721 .await
1722 .text_document,
1723 lsp::VersionedTextDocumentIdentifier::new(
1724 lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap(),
1725 1
1726 )
1727 );
1728
1729 // Save notifications are reported to all servers.
1730 project
1731 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
1732 .await
1733 .unwrap();
1734 assert_eq!(
1735 fake_rust_server
1736 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1737 .await
1738 .text_document,
1739 lsp::TextDocumentIdentifier::new(
1740 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1741 )
1742 );
1743 assert_eq!(
1744 fake_json_server
1745 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1746 .await
1747 .text_document,
1748 lsp::TextDocumentIdentifier::new(
1749 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1750 )
1751 );
1752
1753 // Renames are reported only to servers matching the buffer's language.
1754 fs.rename(
1755 Path::new(path!("/dir/test2.rs")),
1756 Path::new(path!("/dir/test3.rs")),
1757 Default::default(),
1758 )
1759 .await
1760 .unwrap();
1761 assert_eq!(
1762 fake_rust_server
1763 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1764 .await
1765 .text_document,
1766 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap()),
1767 );
1768 assert_eq!(
1769 fake_rust_server
1770 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1771 .await
1772 .text_document,
1773 lsp::TextDocumentItem {
1774 uri: lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap(),
1775 version: 0,
1776 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1777 language_id: "rust".to_string(),
1778 },
1779 );
1780
1781 rust_buffer2.update(cx, |buffer, cx| {
1782 buffer.update_diagnostics(
1783 LanguageServerId(0),
1784 DiagnosticSet::from_sorted_entries(
1785 vec![DiagnosticEntry {
1786 diagnostic: Default::default(),
1787 range: Anchor::MIN..Anchor::MAX,
1788 }],
1789 &buffer.snapshot(),
1790 ),
1791 cx,
1792 );
1793 assert_eq!(
1794 buffer
1795 .snapshot()
1796 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1797 .count(),
1798 1
1799 );
1800 });
1801
1802 // When the rename changes the extension of the file, the buffer gets closed on the old
1803 // language server and gets opened on the new one.
1804 fs.rename(
1805 Path::new(path!("/dir/test3.rs")),
1806 Path::new(path!("/dir/test3.json")),
1807 Default::default(),
1808 )
1809 .await
1810 .unwrap();
1811 assert_eq!(
1812 fake_rust_server
1813 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1814 .await
1815 .text_document,
1816 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap()),
1817 );
1818 assert_eq!(
1819 fake_json_server
1820 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1821 .await
1822 .text_document,
1823 lsp::TextDocumentItem {
1824 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1825 version: 0,
1826 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1827 language_id: "json".to_string(),
1828 },
1829 );
1830
1831 // We clear the diagnostics, since the language has changed.
1832 rust_buffer2.update(cx, |buffer, _| {
1833 assert_eq!(
1834 buffer
1835 .snapshot()
1836 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1837 .count(),
1838 0
1839 );
1840 });
1841
1842 // The renamed file's version resets after changing language server.
1843 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
1844 assert_eq!(
1845 fake_json_server
1846 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1847 .await
1848 .text_document,
1849 lsp::VersionedTextDocumentIdentifier::new(
1850 lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1851 1
1852 )
1853 );
1854
1855 // Restart language servers
1856 project.update(cx, |project, cx| {
1857 project.restart_language_servers_for_buffers(
1858 vec![rust_buffer.clone(), json_buffer.clone()],
1859 HashSet::default(),
1860 cx,
1861 );
1862 });
1863
1864 let mut rust_shutdown_requests = fake_rust_server
1865 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1866 let mut json_shutdown_requests = fake_json_server
1867 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1868 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
1869
1870 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1871 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1872
1873 // Ensure rust document is reopened in new rust language server
1874 assert_eq!(
1875 fake_rust_server
1876 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1877 .await
1878 .text_document,
1879 lsp::TextDocumentItem {
1880 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1881 version: 0,
1882 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
1883 language_id: "rust".to_string(),
1884 }
1885 );
1886
1887 // Ensure json documents are reopened in new json language server
1888 assert_set_eq!(
1889 [
1890 fake_json_server
1891 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1892 .await
1893 .text_document,
1894 fake_json_server
1895 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1896 .await
1897 .text_document,
1898 ],
1899 [
1900 lsp::TextDocumentItem {
1901 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1902 version: 0,
1903 text: json_buffer.update(cx, |buffer, _| buffer.text()),
1904 language_id: "json".to_string(),
1905 },
1906 lsp::TextDocumentItem {
1907 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1908 version: 0,
1909 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1910 language_id: "json".to_string(),
1911 }
1912 ]
1913 );
1914
1915 // Close notifications are reported only to servers matching the buffer's language.
1916 cx.update(|_| drop(_json_handle));
1917 let close_message = lsp::DidCloseTextDocumentParams {
1918 text_document: lsp::TextDocumentIdentifier::new(
1919 lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1920 ),
1921 };
1922 assert_eq!(
1923 fake_json_server
1924 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1925 .await,
1926 close_message,
1927 );
1928}
1929
1930#[gpui::test]
1931async fn test_language_server_relative_path(cx: &mut gpui::TestAppContext) {
1932 init_test(cx);
1933
1934 let settings_json_contents = json!({
1935 "languages": {
1936 "Rust": {
1937 "language_servers": ["my_fake_lsp", "lsp_on_path"]
1938 }
1939 },
1940 "lsp": {
1941 "my_fake_lsp": {
1942 "binary": {
1943 // file exists, so this is treated as a relative path
1944 "path": path!(".relative_path/to/my_fake_lsp_binary.exe").to_string(),
1945 }
1946 },
1947 "lsp_on_path": {
1948 "binary": {
1949 // file doesn't exist, so it will fall back on PATH env var
1950 "path": path!("lsp_on_path.exe").to_string(),
1951 }
1952 }
1953 },
1954 });
1955
1956 let fs = FakeFs::new(cx.executor());
1957 fs.insert_tree(
1958 path!("/the-root"),
1959 json!({
1960 ".zed": {
1961 "settings.json": settings_json_contents.to_string(),
1962 },
1963 ".relative_path": {
1964 "to": {
1965 "my_fake_lsp.exe": "",
1966 },
1967 },
1968 "src": {
1969 "main.rs": "",
1970 }
1971 }),
1972 )
1973 .await;
1974
1975 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1976 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1977 language_registry.add(rust_lang());
1978
1979 let mut my_fake_lsp = language_registry.register_fake_lsp(
1980 "Rust",
1981 FakeLspAdapter {
1982 name: "my_fake_lsp",
1983 ..Default::default()
1984 },
1985 );
1986 let mut lsp_on_path = language_registry.register_fake_lsp(
1987 "Rust",
1988 FakeLspAdapter {
1989 name: "lsp_on_path",
1990 ..Default::default()
1991 },
1992 );
1993
1994 cx.run_until_parked();
1995
1996 // Start the language server by opening a buffer with a compatible file extension.
1997 project
1998 .update(cx, |project, cx| {
1999 project.open_local_buffer_with_lsp(path!("/the-root/src/main.rs"), cx)
2000 })
2001 .await
2002 .unwrap();
2003
2004 let lsp_path = my_fake_lsp.next().await.unwrap().binary.path;
2005 assert_eq!(
2006 lsp_path.to_string_lossy(),
2007 path!("/the-root/.relative_path/to/my_fake_lsp_binary.exe"),
2008 );
2009
2010 let lsp_path = lsp_on_path.next().await.unwrap().binary.path;
2011 assert_eq!(lsp_path.to_string_lossy(), path!("lsp_on_path.exe"));
2012}
2013
2014#[gpui::test]
2015async fn test_language_server_tilde_path(cx: &mut gpui::TestAppContext) {
2016 init_test(cx);
2017
2018 let settings_json_contents = json!({
2019 "languages": {
2020 "Rust": {
2021 "language_servers": ["tilde_lsp"]
2022 }
2023 },
2024 "lsp": {
2025 "tilde_lsp": {
2026 "binary": {
2027 "path": "~/.local/bin/rust-analyzer",
2028 }
2029 }
2030 },
2031 });
2032
2033 let fs = FakeFs::new(cx.executor());
2034 fs.insert_tree(
2035 path!("/root"),
2036 json!({
2037 ".zed": {
2038 "settings.json": settings_json_contents.to_string(),
2039 },
2040 "src": {
2041 "main.rs": "fn main() {}",
2042 }
2043 }),
2044 )
2045 .await;
2046
2047 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
2048 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2049 language_registry.add(rust_lang());
2050
2051 let mut tilde_lsp = language_registry.register_fake_lsp(
2052 "Rust",
2053 FakeLspAdapter {
2054 name: "tilde_lsp",
2055 ..Default::default()
2056 },
2057 );
2058 cx.run_until_parked();
2059
2060 project
2061 .update(cx, |project, cx| {
2062 project.open_local_buffer_with_lsp(path!("/root/src/main.rs"), cx)
2063 })
2064 .await
2065 .unwrap();
2066
2067 let lsp_path = tilde_lsp.next().await.unwrap().binary.path;
2068 let expected_path = paths::home_dir().join(".local/bin/rust-analyzer");
2069 assert_eq!(
2070 lsp_path, expected_path,
2071 "Tilde path should expand to home directory"
2072 );
2073}
2074
2075#[gpui::test]
2076async fn test_rescan_fs_change_is_reported_to_language_servers_as_changed(
2077 cx: &mut gpui::TestAppContext,
2078) {
2079 init_test(cx);
2080
2081 let fs = FakeFs::new(cx.executor());
2082 fs.insert_tree(
2083 path!("/the-root"),
2084 json!({
2085 "Cargo.lock": "",
2086 "src": {
2087 "a.rs": "",
2088 }
2089 }),
2090 )
2091 .await;
2092
2093 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
2094 let (language_registry, _lsp_store) = project.read_with(cx, |project, _| {
2095 (project.languages().clone(), project.lsp_store())
2096 });
2097 language_registry.add(rust_lang());
2098 let mut fake_servers = language_registry.register_fake_lsp(
2099 "Rust",
2100 FakeLspAdapter {
2101 name: "the-language-server",
2102 ..Default::default()
2103 },
2104 );
2105
2106 cx.executor().run_until_parked();
2107
2108 project
2109 .update(cx, |project, cx| {
2110 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
2111 })
2112 .await
2113 .unwrap();
2114
2115 let fake_server = fake_servers.next().await.unwrap();
2116 cx.executor().run_until_parked();
2117
2118 let file_changes = Arc::new(Mutex::new(Vec::new()));
2119 fake_server
2120 .request::<lsp::request::RegisterCapability>(
2121 lsp::RegistrationParams {
2122 registrations: vec![lsp::Registration {
2123 id: Default::default(),
2124 method: "workspace/didChangeWatchedFiles".to_string(),
2125 register_options: serde_json::to_value(
2126 lsp::DidChangeWatchedFilesRegistrationOptions {
2127 watchers: vec![lsp::FileSystemWatcher {
2128 glob_pattern: lsp::GlobPattern::String(
2129 path!("/the-root/Cargo.lock").to_string(),
2130 ),
2131 kind: None,
2132 }],
2133 },
2134 )
2135 .ok(),
2136 }],
2137 },
2138 DEFAULT_LSP_REQUEST_TIMEOUT,
2139 )
2140 .await
2141 .into_response()
2142 .unwrap();
2143 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
2144 let file_changes = file_changes.clone();
2145 move |params, _| {
2146 let mut file_changes = file_changes.lock();
2147 file_changes.extend(params.changes);
2148 }
2149 });
2150
2151 cx.executor().run_until_parked();
2152 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
2153
2154 fs.emit_fs_event(path!("/the-root/Cargo.lock"), Some(PathEventKind::Rescan));
2155 cx.executor().run_until_parked();
2156
2157 assert_eq!(
2158 &*file_changes.lock(),
2159 &[lsp::FileEvent {
2160 uri: lsp::Uri::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
2161 typ: lsp::FileChangeType::CHANGED,
2162 }]
2163 );
2164}
2165
2166#[gpui::test]
2167async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
2168 init_test(cx);
2169
2170 let fs = FakeFs::new(cx.executor());
2171 fs.insert_tree(
2172 path!("/the-root"),
2173 json!({
2174 ".gitignore": "target\n",
2175 "Cargo.lock": "",
2176 "src": {
2177 "a.rs": "",
2178 "b.rs": "",
2179 },
2180 "target": {
2181 "x": {
2182 "out": {
2183 "x.rs": ""
2184 }
2185 },
2186 "y": {
2187 "out": {
2188 "y.rs": "",
2189 }
2190 },
2191 "z": {
2192 "out": {
2193 "z.rs": ""
2194 }
2195 }
2196 }
2197 }),
2198 )
2199 .await;
2200 fs.insert_tree(
2201 path!("/the-registry"),
2202 json!({
2203 "dep1": {
2204 "src": {
2205 "dep1.rs": "",
2206 }
2207 },
2208 "dep2": {
2209 "src": {
2210 "dep2.rs": "",
2211 }
2212 },
2213 }),
2214 )
2215 .await;
2216 fs.insert_tree(
2217 path!("/the/stdlib"),
2218 json!({
2219 "LICENSE": "",
2220 "src": {
2221 "string.rs": "",
2222 }
2223 }),
2224 )
2225 .await;
2226
2227 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
2228 let (language_registry, lsp_store) = project.read_with(cx, |project, _| {
2229 (project.languages().clone(), project.lsp_store())
2230 });
2231 language_registry.add(rust_lang());
2232 let mut fake_servers = language_registry.register_fake_lsp(
2233 "Rust",
2234 FakeLspAdapter {
2235 name: "the-language-server",
2236 ..Default::default()
2237 },
2238 );
2239
2240 cx.executor().run_until_parked();
2241
2242 // Start the language server by opening a buffer with a compatible file extension.
2243 project
2244 .update(cx, |project, cx| {
2245 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
2246 })
2247 .await
2248 .unwrap();
2249
2250 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
2251 project.update(cx, |project, cx| {
2252 let worktree = project.worktrees(cx).next().unwrap();
2253 assert_eq!(
2254 worktree
2255 .read(cx)
2256 .snapshot()
2257 .entries(true, 0)
2258 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
2259 .collect::<Vec<_>>(),
2260 &[
2261 ("", false),
2262 (".gitignore", false),
2263 ("Cargo.lock", false),
2264 ("src", false),
2265 ("src/a.rs", false),
2266 ("src/b.rs", false),
2267 ("target", true),
2268 ]
2269 );
2270 });
2271
2272 let prev_read_dir_count = fs.read_dir_call_count();
2273
2274 let fake_server = fake_servers.next().await.unwrap();
2275 cx.executor().run_until_parked();
2276 let server_id = lsp_store.read_with(cx, |lsp_store, _| {
2277 let (id, _) = lsp_store.language_server_statuses().next().unwrap();
2278 id
2279 });
2280
2281 // Simulate jumping to a definition in a dependency outside of the worktree.
2282 let _out_of_worktree_buffer = project
2283 .update(cx, |project, cx| {
2284 project.open_local_buffer_via_lsp(
2285 lsp::Uri::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
2286 server_id,
2287 cx,
2288 )
2289 })
2290 .await
2291 .unwrap();
2292
2293 // Keep track of the FS events reported to the language server.
2294 let file_changes = Arc::new(Mutex::new(Vec::new()));
2295 fake_server
2296 .request::<lsp::request::RegisterCapability>(
2297 lsp::RegistrationParams {
2298 registrations: vec![lsp::Registration {
2299 id: Default::default(),
2300 method: "workspace/didChangeWatchedFiles".to_string(),
2301 register_options: serde_json::to_value(
2302 lsp::DidChangeWatchedFilesRegistrationOptions {
2303 watchers: vec![
2304 lsp::FileSystemWatcher {
2305 glob_pattern: lsp::GlobPattern::String(
2306 path!("/the-root/Cargo.toml").to_string(),
2307 ),
2308 kind: None,
2309 },
2310 lsp::FileSystemWatcher {
2311 glob_pattern: lsp::GlobPattern::String(
2312 path!("/the-root/src/*.{rs,c}").to_string(),
2313 ),
2314 kind: None,
2315 },
2316 lsp::FileSystemWatcher {
2317 glob_pattern: lsp::GlobPattern::String(
2318 path!("/the-root/target/y/**/*.rs").to_string(),
2319 ),
2320 kind: None,
2321 },
2322 lsp::FileSystemWatcher {
2323 glob_pattern: lsp::GlobPattern::String(
2324 path!("/the/stdlib/src/**/*.rs").to_string(),
2325 ),
2326 kind: None,
2327 },
2328 lsp::FileSystemWatcher {
2329 glob_pattern: lsp::GlobPattern::String(
2330 path!("**/Cargo.lock").to_string(),
2331 ),
2332 kind: None,
2333 },
2334 ],
2335 },
2336 )
2337 .ok(),
2338 }],
2339 },
2340 DEFAULT_LSP_REQUEST_TIMEOUT,
2341 )
2342 .await
2343 .into_response()
2344 .unwrap();
2345 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
2346 let file_changes = file_changes.clone();
2347 move |params, _| {
2348 let mut file_changes = file_changes.lock();
2349 file_changes.extend(params.changes);
2350 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
2351 }
2352 });
2353
2354 cx.executor().run_until_parked();
2355 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
2356 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
2357
2358 let mut new_watched_paths = fs.watched_paths();
2359 new_watched_paths.retain(|path| {
2360 !path.starts_with(config_dir()) && !path.starts_with(global_gitignore_path().unwrap())
2361 });
2362 assert_eq!(
2363 &new_watched_paths,
2364 &[
2365 Path::new(path!("/the-root")),
2366 Path::new(path!("/the-registry/dep1/src/dep1.rs")),
2367 Path::new(path!("/the/stdlib/src"))
2368 ]
2369 );
2370
2371 // Now the language server has asked us to watch an ignored directory path,
2372 // so we recursively load it.
2373 project.update(cx, |project, cx| {
2374 let worktree = project.visible_worktrees(cx).next().unwrap();
2375 assert_eq!(
2376 worktree
2377 .read(cx)
2378 .snapshot()
2379 .entries(true, 0)
2380 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
2381 .collect::<Vec<_>>(),
2382 &[
2383 ("", false),
2384 (".gitignore", false),
2385 ("Cargo.lock", false),
2386 ("src", false),
2387 ("src/a.rs", false),
2388 ("src/b.rs", false),
2389 ("target", true),
2390 ("target/x", true),
2391 ("target/y", true),
2392 ("target/y/out", true),
2393 ("target/y/out/y.rs", true),
2394 ("target/z", true),
2395 ]
2396 );
2397 });
2398
2399 // Perform some file system mutations, two of which match the watched patterns,
2400 // and one of which does not.
2401 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
2402 .await
2403 .unwrap();
2404 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
2405 .await
2406 .unwrap();
2407 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
2408 .await
2409 .unwrap();
2410 fs.create_file(
2411 path!("/the-root/target/x/out/x2.rs").as_ref(),
2412 Default::default(),
2413 )
2414 .await
2415 .unwrap();
2416 fs.create_file(
2417 path!("/the-root/target/y/out/y2.rs").as_ref(),
2418 Default::default(),
2419 )
2420 .await
2421 .unwrap();
2422 fs.save(
2423 path!("/the-root/Cargo.lock").as_ref(),
2424 &"".into(),
2425 Default::default(),
2426 )
2427 .await
2428 .unwrap();
2429 fs.save(
2430 path!("/the-stdlib/LICENSE").as_ref(),
2431 &"".into(),
2432 Default::default(),
2433 )
2434 .await
2435 .unwrap();
2436 fs.save(
2437 path!("/the/stdlib/src/string.rs").as_ref(),
2438 &"".into(),
2439 Default::default(),
2440 )
2441 .await
2442 .unwrap();
2443
2444 // The language server receives events for the FS mutations that match its watch patterns.
2445 cx.executor().run_until_parked();
2446 assert_eq!(
2447 &*file_changes.lock(),
2448 &[
2449 lsp::FileEvent {
2450 uri: lsp::Uri::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
2451 typ: lsp::FileChangeType::CHANGED,
2452 },
2453 lsp::FileEvent {
2454 uri: lsp::Uri::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
2455 typ: lsp::FileChangeType::DELETED,
2456 },
2457 lsp::FileEvent {
2458 uri: lsp::Uri::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
2459 typ: lsp::FileChangeType::CREATED,
2460 },
2461 lsp::FileEvent {
2462 uri: lsp::Uri::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
2463 typ: lsp::FileChangeType::CREATED,
2464 },
2465 lsp::FileEvent {
2466 uri: lsp::Uri::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
2467 typ: lsp::FileChangeType::CHANGED,
2468 },
2469 ]
2470 );
2471}
2472
2473#[gpui::test]
2474async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
2475 init_test(cx);
2476
2477 let fs = FakeFs::new(cx.executor());
2478 fs.insert_tree(
2479 path!("/dir"),
2480 json!({
2481 "a.rs": "let a = 1;",
2482 "b.rs": "let b = 2;"
2483 }),
2484 )
2485 .await;
2486
2487 let project = Project::test(
2488 fs,
2489 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
2490 cx,
2491 )
2492 .await;
2493 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2494
2495 let buffer_a = project
2496 .update(cx, |project, cx| {
2497 project.open_local_buffer(path!("/dir/a.rs"), cx)
2498 })
2499 .await
2500 .unwrap();
2501 let buffer_b = project
2502 .update(cx, |project, cx| {
2503 project.open_local_buffer(path!("/dir/b.rs"), cx)
2504 })
2505 .await
2506 .unwrap();
2507
2508 lsp_store.update(cx, |lsp_store, cx| {
2509 lsp_store
2510 .update_diagnostics(
2511 LanguageServerId(0),
2512 lsp::PublishDiagnosticsParams {
2513 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2514 version: None,
2515 diagnostics: vec![lsp::Diagnostic {
2516 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2517 severity: Some(lsp::DiagnosticSeverity::ERROR),
2518 message: "error 1".to_string(),
2519 ..Default::default()
2520 }],
2521 },
2522 None,
2523 DiagnosticSourceKind::Pushed,
2524 &[],
2525 cx,
2526 )
2527 .unwrap();
2528 lsp_store
2529 .update_diagnostics(
2530 LanguageServerId(0),
2531 lsp::PublishDiagnosticsParams {
2532 uri: Uri::from_file_path(path!("/dir/b.rs")).unwrap(),
2533 version: None,
2534 diagnostics: vec![lsp::Diagnostic {
2535 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2536 severity: Some(DiagnosticSeverity::WARNING),
2537 message: "error 2".to_string(),
2538 ..Default::default()
2539 }],
2540 },
2541 None,
2542 DiagnosticSourceKind::Pushed,
2543 &[],
2544 cx,
2545 )
2546 .unwrap();
2547 });
2548
2549 buffer_a.update(cx, |buffer, _| {
2550 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2551 assert_eq!(
2552 chunks
2553 .iter()
2554 .map(|(s, d)| (s.as_str(), *d))
2555 .collect::<Vec<_>>(),
2556 &[
2557 ("let ", None),
2558 ("a", Some(DiagnosticSeverity::ERROR)),
2559 (" = 1;", None),
2560 ]
2561 );
2562 });
2563 buffer_b.update(cx, |buffer, _| {
2564 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2565 assert_eq!(
2566 chunks
2567 .iter()
2568 .map(|(s, d)| (s.as_str(), *d))
2569 .collect::<Vec<_>>(),
2570 &[
2571 ("let ", None),
2572 ("b", Some(DiagnosticSeverity::WARNING)),
2573 (" = 2;", None),
2574 ]
2575 );
2576 });
2577}
2578
2579#[gpui::test]
2580async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
2581 init_test(cx);
2582
2583 let fs = FakeFs::new(cx.executor());
2584 fs.insert_tree(
2585 path!("/root"),
2586 json!({
2587 "dir": {
2588 ".git": {
2589 "HEAD": "ref: refs/heads/main",
2590 },
2591 ".gitignore": "b.rs",
2592 "a.rs": "let a = 1;",
2593 "b.rs": "let b = 2;",
2594 },
2595 "other.rs": "let b = c;"
2596 }),
2597 )
2598 .await;
2599
2600 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
2601 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2602 let (worktree, _) = project
2603 .update(cx, |project, cx| {
2604 project.find_or_create_worktree(path!("/root/dir"), true, cx)
2605 })
2606 .await
2607 .unwrap();
2608 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
2609
2610 let (worktree, _) = project
2611 .update(cx, |project, cx| {
2612 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
2613 })
2614 .await
2615 .unwrap();
2616 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
2617
2618 let server_id = LanguageServerId(0);
2619 lsp_store.update(cx, |lsp_store, cx| {
2620 lsp_store
2621 .update_diagnostics(
2622 server_id,
2623 lsp::PublishDiagnosticsParams {
2624 uri: Uri::from_file_path(path!("/root/dir/b.rs")).unwrap(),
2625 version: None,
2626 diagnostics: vec![lsp::Diagnostic {
2627 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
2628 severity: Some(lsp::DiagnosticSeverity::ERROR),
2629 message: "unused variable 'b'".to_string(),
2630 ..Default::default()
2631 }],
2632 },
2633 None,
2634 DiagnosticSourceKind::Pushed,
2635 &[],
2636 cx,
2637 )
2638 .unwrap();
2639 lsp_store
2640 .update_diagnostics(
2641 server_id,
2642 lsp::PublishDiagnosticsParams {
2643 uri: Uri::from_file_path(path!("/root/other.rs")).unwrap(),
2644 version: None,
2645 diagnostics: vec![lsp::Diagnostic {
2646 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
2647 severity: Some(lsp::DiagnosticSeverity::ERROR),
2648 message: "unknown variable 'c'".to_string(),
2649 ..Default::default()
2650 }],
2651 },
2652 None,
2653 DiagnosticSourceKind::Pushed,
2654 &[],
2655 cx,
2656 )
2657 .unwrap();
2658 });
2659
2660 let main_ignored_buffer = project
2661 .update(cx, |project, cx| {
2662 project.open_buffer((main_worktree_id, rel_path("b.rs")), cx)
2663 })
2664 .await
2665 .unwrap();
2666 main_ignored_buffer.update(cx, |buffer, _| {
2667 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2668 assert_eq!(
2669 chunks
2670 .iter()
2671 .map(|(s, d)| (s.as_str(), *d))
2672 .collect::<Vec<_>>(),
2673 &[
2674 ("let ", None),
2675 ("b", Some(DiagnosticSeverity::ERROR)),
2676 (" = 2;", None),
2677 ],
2678 "Gigitnored buffers should still get in-buffer diagnostics",
2679 );
2680 });
2681 let other_buffer = project
2682 .update(cx, |project, cx| {
2683 project.open_buffer((other_worktree_id, rel_path("")), cx)
2684 })
2685 .await
2686 .unwrap();
2687 other_buffer.update(cx, |buffer, _| {
2688 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2689 assert_eq!(
2690 chunks
2691 .iter()
2692 .map(|(s, d)| (s.as_str(), *d))
2693 .collect::<Vec<_>>(),
2694 &[
2695 ("let b = ", None),
2696 ("c", Some(DiagnosticSeverity::ERROR)),
2697 (";", None),
2698 ],
2699 "Buffers from hidden projects should still get in-buffer diagnostics"
2700 );
2701 });
2702
2703 project.update(cx, |project, cx| {
2704 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
2705 assert_eq!(
2706 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
2707 vec![(
2708 ProjectPath {
2709 worktree_id: main_worktree_id,
2710 path: rel_path("b.rs").into(),
2711 },
2712 server_id,
2713 DiagnosticSummary {
2714 error_count: 1,
2715 warning_count: 0,
2716 }
2717 )]
2718 );
2719 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
2720 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
2721 });
2722}
2723
2724#[gpui::test]
2725async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
2726 init_test(cx);
2727
2728 let progress_token = "the-progress-token";
2729
2730 let fs = FakeFs::new(cx.executor());
2731 fs.insert_tree(
2732 path!("/dir"),
2733 json!({
2734 "a.rs": "fn a() { A }",
2735 "b.rs": "const y: i32 = 1",
2736 }),
2737 )
2738 .await;
2739
2740 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2741 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2742
2743 language_registry.add(rust_lang());
2744 let mut fake_servers = language_registry.register_fake_lsp(
2745 "Rust",
2746 FakeLspAdapter {
2747 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2748 disk_based_diagnostics_sources: vec!["disk".into()],
2749 ..Default::default()
2750 },
2751 );
2752
2753 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
2754
2755 // Cause worktree to start the fake language server
2756 let _ = project
2757 .update(cx, |project, cx| {
2758 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
2759 })
2760 .await
2761 .unwrap();
2762
2763 let mut events = cx.events(&project);
2764
2765 let fake_server = fake_servers.next().await.unwrap();
2766 assert_eq!(
2767 events.next().await.unwrap(),
2768 Event::LanguageServerAdded(
2769 LanguageServerId(0),
2770 fake_server.server.name(),
2771 Some(worktree_id)
2772 ),
2773 );
2774
2775 fake_server
2776 .start_progress(format!("{}/0", progress_token))
2777 .await;
2778 assert_eq!(
2779 events.next().await.unwrap(),
2780 Event::DiskBasedDiagnosticsStarted {
2781 language_server_id: LanguageServerId(0),
2782 }
2783 );
2784
2785 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2786 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2787 version: None,
2788 diagnostics: vec![lsp::Diagnostic {
2789 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2790 severity: Some(lsp::DiagnosticSeverity::ERROR),
2791 message: "undefined variable 'A'".to_string(),
2792 ..Default::default()
2793 }],
2794 });
2795 assert_eq!(
2796 events.next().await.unwrap(),
2797 Event::DiagnosticsUpdated {
2798 language_server_id: LanguageServerId(0),
2799 paths: vec![(worktree_id, rel_path("a.rs")).into()],
2800 }
2801 );
2802
2803 fake_server.end_progress(format!("{}/0", progress_token));
2804 assert_eq!(
2805 events.next().await.unwrap(),
2806 Event::DiskBasedDiagnosticsFinished {
2807 language_server_id: LanguageServerId(0)
2808 }
2809 );
2810
2811 let buffer = project
2812 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
2813 .await
2814 .unwrap();
2815
2816 buffer.update(cx, |buffer, _| {
2817 let snapshot = buffer.snapshot();
2818 let diagnostics = snapshot
2819 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2820 .collect::<Vec<_>>();
2821 assert_eq!(
2822 diagnostics,
2823 &[DiagnosticEntryRef {
2824 range: Point::new(0, 9)..Point::new(0, 10),
2825 diagnostic: &Diagnostic {
2826 severity: lsp::DiagnosticSeverity::ERROR,
2827 message: "undefined variable 'A'".to_string(),
2828 group_id: 0,
2829 is_primary: true,
2830 source_kind: DiagnosticSourceKind::Pushed,
2831 ..Diagnostic::default()
2832 }
2833 }]
2834 )
2835 });
2836
2837 // Ensure publishing empty diagnostics twice only results in one update event.
2838 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2839 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2840 version: None,
2841 diagnostics: Default::default(),
2842 });
2843 assert_eq!(
2844 events.next().await.unwrap(),
2845 Event::DiagnosticsUpdated {
2846 language_server_id: LanguageServerId(0),
2847 paths: vec![(worktree_id, rel_path("a.rs")).into()],
2848 }
2849 );
2850
2851 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2852 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2853 version: None,
2854 diagnostics: Default::default(),
2855 });
2856 cx.executor().run_until_parked();
2857 assert_eq!(futures::poll!(events.next()), Poll::Pending);
2858}
2859
2860#[gpui::test]
2861async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
2862 init_test(cx);
2863
2864 let progress_token = "the-progress-token";
2865
2866 let fs = FakeFs::new(cx.executor());
2867 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2868
2869 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2870
2871 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2872 language_registry.add(rust_lang());
2873 let mut fake_servers = language_registry.register_fake_lsp(
2874 "Rust",
2875 FakeLspAdapter {
2876 name: "the-language-server",
2877 disk_based_diagnostics_sources: vec!["disk".into()],
2878 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2879 ..FakeLspAdapter::default()
2880 },
2881 );
2882
2883 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
2884
2885 let (buffer, _handle) = project
2886 .update(cx, |project, cx| {
2887 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2888 })
2889 .await
2890 .unwrap();
2891 let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id());
2892 // Simulate diagnostics starting to update.
2893 let fake_server = fake_servers.next().await.unwrap();
2894 cx.executor().run_until_parked();
2895 fake_server.start_progress(progress_token).await;
2896
2897 // Restart the server before the diagnostics finish updating.
2898 project.update(cx, |project, cx| {
2899 project.restart_language_servers_for_buffers(vec![buffer], HashSet::default(), cx);
2900 });
2901 let mut events = cx.events(&project);
2902
2903 // Simulate the newly started server sending more diagnostics.
2904 let fake_server = fake_servers.next().await.unwrap();
2905 cx.executor().run_until_parked();
2906 assert_eq!(
2907 events.next().await.unwrap(),
2908 Event::LanguageServerRemoved(LanguageServerId(0))
2909 );
2910 assert_eq!(
2911 events.next().await.unwrap(),
2912 Event::LanguageServerAdded(
2913 LanguageServerId(1),
2914 fake_server.server.name(),
2915 Some(worktree_id)
2916 )
2917 );
2918 fake_server.start_progress(progress_token).await;
2919 assert_eq!(
2920 events.next().await.unwrap(),
2921 Event::LanguageServerBufferRegistered {
2922 server_id: LanguageServerId(1),
2923 buffer_id,
2924 buffer_abs_path: PathBuf::from(path!("/dir/a.rs")),
2925 name: Some(fake_server.server.name())
2926 }
2927 );
2928 assert_eq!(
2929 events.next().await.unwrap(),
2930 Event::DiskBasedDiagnosticsStarted {
2931 language_server_id: LanguageServerId(1)
2932 }
2933 );
2934 project.update(cx, |project, cx| {
2935 assert_eq!(
2936 project
2937 .language_servers_running_disk_based_diagnostics(cx)
2938 .collect::<Vec<_>>(),
2939 [LanguageServerId(1)]
2940 );
2941 });
2942
2943 // All diagnostics are considered done, despite the old server's diagnostic
2944 // task never completing.
2945 fake_server.end_progress(progress_token);
2946 assert_eq!(
2947 events.next().await.unwrap(),
2948 Event::DiskBasedDiagnosticsFinished {
2949 language_server_id: LanguageServerId(1)
2950 }
2951 );
2952 project.update(cx, |project, cx| {
2953 assert_eq!(
2954 project
2955 .language_servers_running_disk_based_diagnostics(cx)
2956 .collect::<Vec<_>>(),
2957 [] as [language::LanguageServerId; 0]
2958 );
2959 });
2960}
2961
2962#[gpui::test]
2963async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
2964 init_test(cx);
2965
2966 let fs = FakeFs::new(cx.executor());
2967 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
2968
2969 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2970
2971 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2972 language_registry.add(rust_lang());
2973 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2974
2975 let (buffer, _) = project
2976 .update(cx, |project, cx| {
2977 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2978 })
2979 .await
2980 .unwrap();
2981
2982 // Publish diagnostics
2983 let fake_server = fake_servers.next().await.unwrap();
2984 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2985 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2986 version: None,
2987 diagnostics: vec![lsp::Diagnostic {
2988 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
2989 severity: Some(lsp::DiagnosticSeverity::ERROR),
2990 message: "the message".to_string(),
2991 ..Default::default()
2992 }],
2993 });
2994
2995 cx.executor().run_until_parked();
2996 buffer.update(cx, |buffer, _| {
2997 assert_eq!(
2998 buffer
2999 .snapshot()
3000 .diagnostics_in_range::<_, usize>(0..1, false)
3001 .map(|entry| entry.diagnostic.message.clone())
3002 .collect::<Vec<_>>(),
3003 ["the message".to_string()]
3004 );
3005 });
3006 project.update(cx, |project, cx| {
3007 assert_eq!(
3008 project.diagnostic_summary(false, cx),
3009 DiagnosticSummary {
3010 error_count: 1,
3011 warning_count: 0,
3012 }
3013 );
3014 });
3015
3016 project.update(cx, |project, cx| {
3017 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
3018 });
3019
3020 // The diagnostics are cleared.
3021 cx.executor().run_until_parked();
3022 buffer.update(cx, |buffer, _| {
3023 assert_eq!(
3024 buffer
3025 .snapshot()
3026 .diagnostics_in_range::<_, usize>(0..1, false)
3027 .map(|entry| entry.diagnostic.message.clone())
3028 .collect::<Vec<_>>(),
3029 Vec::<String>::new(),
3030 );
3031 });
3032 project.update(cx, |project, cx| {
3033 assert_eq!(
3034 project.diagnostic_summary(false, cx),
3035 DiagnosticSummary {
3036 error_count: 0,
3037 warning_count: 0,
3038 }
3039 );
3040 });
3041}
3042
3043#[gpui::test]
3044async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
3045 init_test(cx);
3046
3047 let fs = FakeFs::new(cx.executor());
3048 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
3049
3050 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3051 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3052
3053 language_registry.add(rust_lang());
3054 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3055
3056 let (buffer, _handle) = project
3057 .update(cx, |project, cx| {
3058 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3059 })
3060 .await
3061 .unwrap();
3062
3063 // Before restarting the server, report diagnostics with an unknown buffer version.
3064 let fake_server = fake_servers.next().await.unwrap();
3065 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3066 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3067 version: Some(10000),
3068 diagnostics: Vec::new(),
3069 });
3070 cx.executor().run_until_parked();
3071 project.update(cx, |project, cx| {
3072 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
3073 });
3074
3075 let mut fake_server = fake_servers.next().await.unwrap();
3076 let notification = fake_server
3077 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3078 .await
3079 .text_document;
3080 assert_eq!(notification.version, 0);
3081}
3082
3083#[gpui::test]
3084async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
3085 init_test(cx);
3086
3087 let progress_token = "the-progress-token";
3088
3089 let fs = FakeFs::new(cx.executor());
3090 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
3091
3092 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3093
3094 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3095 language_registry.add(rust_lang());
3096 let mut fake_servers = language_registry.register_fake_lsp(
3097 "Rust",
3098 FakeLspAdapter {
3099 name: "the-language-server",
3100 disk_based_diagnostics_sources: vec!["disk".into()],
3101 disk_based_diagnostics_progress_token: Some(progress_token.into()),
3102 ..Default::default()
3103 },
3104 );
3105
3106 let (buffer, _handle) = project
3107 .update(cx, |project, cx| {
3108 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3109 })
3110 .await
3111 .unwrap();
3112
3113 // Simulate diagnostics starting to update.
3114 let mut fake_server = fake_servers.next().await.unwrap();
3115 fake_server
3116 .start_progress_with(
3117 "another-token",
3118 lsp::WorkDoneProgressBegin {
3119 cancellable: Some(false),
3120 ..Default::default()
3121 },
3122 DEFAULT_LSP_REQUEST_TIMEOUT,
3123 )
3124 .await;
3125 // Ensure progress notification is fully processed before starting the next one
3126 cx.executor().run_until_parked();
3127
3128 fake_server
3129 .start_progress_with(
3130 progress_token,
3131 lsp::WorkDoneProgressBegin {
3132 cancellable: Some(true),
3133 ..Default::default()
3134 },
3135 DEFAULT_LSP_REQUEST_TIMEOUT,
3136 )
3137 .await;
3138 // Ensure progress notification is fully processed before cancelling
3139 cx.executor().run_until_parked();
3140
3141 project.update(cx, |project, cx| {
3142 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
3143 });
3144 cx.executor().run_until_parked();
3145
3146 let cancel_notification = fake_server
3147 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
3148 .await;
3149 assert_eq!(
3150 cancel_notification.token,
3151 NumberOrString::String(progress_token.into())
3152 );
3153}
3154
3155#[gpui::test]
3156async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
3157 init_test(cx);
3158
3159 let fs = FakeFs::new(cx.executor());
3160 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
3161 .await;
3162
3163 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3164 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3165
3166 let mut fake_rust_servers = language_registry.register_fake_lsp(
3167 "Rust",
3168 FakeLspAdapter {
3169 name: "rust-lsp",
3170 ..Default::default()
3171 },
3172 );
3173 let mut fake_js_servers = language_registry.register_fake_lsp(
3174 "JavaScript",
3175 FakeLspAdapter {
3176 name: "js-lsp",
3177 ..Default::default()
3178 },
3179 );
3180 language_registry.add(rust_lang());
3181 language_registry.add(js_lang());
3182
3183 let _rs_buffer = project
3184 .update(cx, |project, cx| {
3185 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3186 })
3187 .await
3188 .unwrap();
3189 let _js_buffer = project
3190 .update(cx, |project, cx| {
3191 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
3192 })
3193 .await
3194 .unwrap();
3195
3196 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
3197 assert_eq!(
3198 fake_rust_server_1
3199 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3200 .await
3201 .text_document
3202 .uri
3203 .as_str(),
3204 uri!("file:///dir/a.rs")
3205 );
3206
3207 let mut fake_js_server = fake_js_servers.next().await.unwrap();
3208 assert_eq!(
3209 fake_js_server
3210 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3211 .await
3212 .text_document
3213 .uri
3214 .as_str(),
3215 uri!("file:///dir/b.js")
3216 );
3217
3218 // Disable Rust language server, ensuring only that server gets stopped.
3219 cx.update(|cx| {
3220 SettingsStore::update_global(cx, |settings, cx| {
3221 settings.update_user_settings(cx, |settings| {
3222 settings.languages_mut().insert(
3223 "Rust".into(),
3224 LanguageSettingsContent {
3225 enable_language_server: Some(false),
3226 ..Default::default()
3227 },
3228 );
3229 });
3230 })
3231 });
3232 fake_rust_server_1
3233 .receive_notification::<lsp::notification::Exit>()
3234 .await;
3235
3236 // Enable Rust and disable JavaScript language servers, ensuring that the
3237 // former gets started again and that the latter stops.
3238 cx.update(|cx| {
3239 SettingsStore::update_global(cx, |settings, cx| {
3240 settings.update_user_settings(cx, |settings| {
3241 settings.languages_mut().insert(
3242 "Rust".into(),
3243 LanguageSettingsContent {
3244 enable_language_server: Some(true),
3245 ..Default::default()
3246 },
3247 );
3248 settings.languages_mut().insert(
3249 "JavaScript".into(),
3250 LanguageSettingsContent {
3251 enable_language_server: Some(false),
3252 ..Default::default()
3253 },
3254 );
3255 });
3256 })
3257 });
3258 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
3259 assert_eq!(
3260 fake_rust_server_2
3261 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3262 .await
3263 .text_document
3264 .uri
3265 .as_str(),
3266 uri!("file:///dir/a.rs")
3267 );
3268 fake_js_server
3269 .receive_notification::<lsp::notification::Exit>()
3270 .await;
3271}
3272
3273#[gpui::test(iterations = 3)]
3274async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
3275 init_test(cx);
3276
3277 let text = "
3278 fn a() { A }
3279 fn b() { BB }
3280 fn c() { CCC }
3281 "
3282 .unindent();
3283
3284 let fs = FakeFs::new(cx.executor());
3285 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
3286
3287 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3288 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3289
3290 language_registry.add(rust_lang());
3291 let mut fake_servers = language_registry.register_fake_lsp(
3292 "Rust",
3293 FakeLspAdapter {
3294 disk_based_diagnostics_sources: vec!["disk".into()],
3295 ..Default::default()
3296 },
3297 );
3298
3299 let buffer = project
3300 .update(cx, |project, cx| {
3301 project.open_local_buffer(path!("/dir/a.rs"), cx)
3302 })
3303 .await
3304 .unwrap();
3305
3306 let _handle = project.update(cx, |project, cx| {
3307 project.register_buffer_with_language_servers(&buffer, cx)
3308 });
3309
3310 let mut fake_server = fake_servers.next().await.unwrap();
3311 let open_notification = fake_server
3312 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3313 .await;
3314
3315 // Edit the buffer, moving the content down
3316 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
3317 let change_notification_1 = fake_server
3318 .receive_notification::<lsp::notification::DidChangeTextDocument>()
3319 .await;
3320 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
3321
3322 // Report some diagnostics for the initial version of the buffer
3323 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3324 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3325 version: Some(open_notification.text_document.version),
3326 diagnostics: vec![
3327 lsp::Diagnostic {
3328 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3329 severity: Some(DiagnosticSeverity::ERROR),
3330 message: "undefined variable 'A'".to_string(),
3331 source: Some("disk".to_string()),
3332 ..Default::default()
3333 },
3334 lsp::Diagnostic {
3335 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
3336 severity: Some(DiagnosticSeverity::ERROR),
3337 message: "undefined variable 'BB'".to_string(),
3338 source: Some("disk".to_string()),
3339 ..Default::default()
3340 },
3341 lsp::Diagnostic {
3342 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
3343 severity: Some(DiagnosticSeverity::ERROR),
3344 source: Some("disk".to_string()),
3345 message: "undefined variable 'CCC'".to_string(),
3346 ..Default::default()
3347 },
3348 ],
3349 });
3350
3351 // The diagnostics have moved down since they were created.
3352 cx.executor().run_until_parked();
3353 buffer.update(cx, |buffer, _| {
3354 assert_eq!(
3355 buffer
3356 .snapshot()
3357 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
3358 .collect::<Vec<_>>(),
3359 &[
3360 DiagnosticEntry {
3361 range: Point::new(3, 9)..Point::new(3, 11),
3362 diagnostic: Diagnostic {
3363 source: Some("disk".into()),
3364 severity: DiagnosticSeverity::ERROR,
3365 message: "undefined variable 'BB'".to_string(),
3366 is_disk_based: true,
3367 group_id: 1,
3368 is_primary: true,
3369 source_kind: DiagnosticSourceKind::Pushed,
3370 ..Diagnostic::default()
3371 },
3372 },
3373 DiagnosticEntry {
3374 range: Point::new(4, 9)..Point::new(4, 12),
3375 diagnostic: Diagnostic {
3376 source: Some("disk".into()),
3377 severity: DiagnosticSeverity::ERROR,
3378 message: "undefined variable 'CCC'".to_string(),
3379 is_disk_based: true,
3380 group_id: 2,
3381 is_primary: true,
3382 source_kind: DiagnosticSourceKind::Pushed,
3383 ..Diagnostic::default()
3384 }
3385 }
3386 ]
3387 );
3388 assert_eq!(
3389 chunks_with_diagnostics(buffer, 0..buffer.len()),
3390 [
3391 ("\n\nfn a() { ".to_string(), None),
3392 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
3393 (" }\nfn b() { ".to_string(), None),
3394 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
3395 (" }\nfn c() { ".to_string(), None),
3396 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
3397 (" }\n".to_string(), None),
3398 ]
3399 );
3400 assert_eq!(
3401 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
3402 [
3403 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
3404 (" }\nfn c() { ".to_string(), None),
3405 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
3406 ]
3407 );
3408 });
3409
3410 // Ensure overlapping diagnostics are highlighted correctly.
3411 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3412 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3413 version: Some(open_notification.text_document.version),
3414 diagnostics: vec![
3415 lsp::Diagnostic {
3416 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3417 severity: Some(DiagnosticSeverity::ERROR),
3418 message: "undefined variable 'A'".to_string(),
3419 source: Some("disk".to_string()),
3420 ..Default::default()
3421 },
3422 lsp::Diagnostic {
3423 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
3424 severity: Some(DiagnosticSeverity::WARNING),
3425 message: "unreachable statement".to_string(),
3426 source: Some("disk".to_string()),
3427 ..Default::default()
3428 },
3429 ],
3430 });
3431
3432 cx.executor().run_until_parked();
3433 buffer.update(cx, |buffer, _| {
3434 assert_eq!(
3435 buffer
3436 .snapshot()
3437 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
3438 .collect::<Vec<_>>(),
3439 &[
3440 DiagnosticEntry {
3441 range: Point::new(2, 9)..Point::new(2, 12),
3442 diagnostic: Diagnostic {
3443 source: Some("disk".into()),
3444 severity: DiagnosticSeverity::WARNING,
3445 message: "unreachable statement".to_string(),
3446 is_disk_based: true,
3447 group_id: 4,
3448 is_primary: true,
3449 source_kind: DiagnosticSourceKind::Pushed,
3450 ..Diagnostic::default()
3451 }
3452 },
3453 DiagnosticEntry {
3454 range: Point::new(2, 9)..Point::new(2, 10),
3455 diagnostic: Diagnostic {
3456 source: Some("disk".into()),
3457 severity: DiagnosticSeverity::ERROR,
3458 message: "undefined variable 'A'".to_string(),
3459 is_disk_based: true,
3460 group_id: 3,
3461 is_primary: true,
3462 source_kind: DiagnosticSourceKind::Pushed,
3463 ..Diagnostic::default()
3464 },
3465 }
3466 ]
3467 );
3468 assert_eq!(
3469 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
3470 [
3471 ("fn a() { ".to_string(), None),
3472 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
3473 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
3474 ("\n".to_string(), None),
3475 ]
3476 );
3477 assert_eq!(
3478 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
3479 [
3480 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
3481 ("\n".to_string(), None),
3482 ]
3483 );
3484 });
3485
3486 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
3487 // changes since the last save.
3488 buffer.update(cx, |buffer, cx| {
3489 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
3490 buffer.edit(
3491 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
3492 None,
3493 cx,
3494 );
3495 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
3496 });
3497 let change_notification_2 = fake_server
3498 .receive_notification::<lsp::notification::DidChangeTextDocument>()
3499 .await;
3500 assert!(
3501 change_notification_2.text_document.version > change_notification_1.text_document.version
3502 );
3503
3504 // Handle out-of-order diagnostics
3505 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3506 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3507 version: Some(change_notification_2.text_document.version),
3508 diagnostics: vec![
3509 lsp::Diagnostic {
3510 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
3511 severity: Some(DiagnosticSeverity::ERROR),
3512 message: "undefined variable 'BB'".to_string(),
3513 source: Some("disk".to_string()),
3514 ..Default::default()
3515 },
3516 lsp::Diagnostic {
3517 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3518 severity: Some(DiagnosticSeverity::WARNING),
3519 message: "undefined variable 'A'".to_string(),
3520 source: Some("disk".to_string()),
3521 ..Default::default()
3522 },
3523 ],
3524 });
3525
3526 cx.executor().run_until_parked();
3527 buffer.update(cx, |buffer, _| {
3528 assert_eq!(
3529 buffer
3530 .snapshot()
3531 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3532 .collect::<Vec<_>>(),
3533 &[
3534 DiagnosticEntry {
3535 range: Point::new(2, 21)..Point::new(2, 22),
3536 diagnostic: Diagnostic {
3537 source: Some("disk".into()),
3538 severity: DiagnosticSeverity::WARNING,
3539 message: "undefined variable 'A'".to_string(),
3540 is_disk_based: true,
3541 group_id: 6,
3542 is_primary: true,
3543 source_kind: DiagnosticSourceKind::Pushed,
3544 ..Diagnostic::default()
3545 }
3546 },
3547 DiagnosticEntry {
3548 range: Point::new(3, 9)..Point::new(3, 14),
3549 diagnostic: Diagnostic {
3550 source: Some("disk".into()),
3551 severity: DiagnosticSeverity::ERROR,
3552 message: "undefined variable 'BB'".to_string(),
3553 is_disk_based: true,
3554 group_id: 5,
3555 is_primary: true,
3556 source_kind: DiagnosticSourceKind::Pushed,
3557 ..Diagnostic::default()
3558 },
3559 }
3560 ]
3561 );
3562 });
3563}
3564
3565#[gpui::test]
3566async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
3567 init_test(cx);
3568
3569 let text = concat!(
3570 "let one = ;\n", //
3571 "let two = \n",
3572 "let three = 3;\n",
3573 );
3574
3575 let fs = FakeFs::new(cx.executor());
3576 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
3577
3578 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3579 let buffer = project
3580 .update(cx, |project, cx| {
3581 project.open_local_buffer(path!("/dir/a.rs"), cx)
3582 })
3583 .await
3584 .unwrap();
3585
3586 project.update(cx, |project, cx| {
3587 project.lsp_store().update(cx, |lsp_store, cx| {
3588 lsp_store
3589 .update_diagnostic_entries(
3590 LanguageServerId(0),
3591 PathBuf::from(path!("/dir/a.rs")),
3592 None,
3593 None,
3594 vec![
3595 DiagnosticEntry {
3596 range: Unclipped(PointUtf16::new(0, 10))
3597 ..Unclipped(PointUtf16::new(0, 10)),
3598 diagnostic: Diagnostic {
3599 severity: DiagnosticSeverity::ERROR,
3600 message: "syntax error 1".to_string(),
3601 source_kind: DiagnosticSourceKind::Pushed,
3602 ..Diagnostic::default()
3603 },
3604 },
3605 DiagnosticEntry {
3606 range: Unclipped(PointUtf16::new(1, 10))
3607 ..Unclipped(PointUtf16::new(1, 10)),
3608 diagnostic: Diagnostic {
3609 severity: DiagnosticSeverity::ERROR,
3610 message: "syntax error 2".to_string(),
3611 source_kind: DiagnosticSourceKind::Pushed,
3612 ..Diagnostic::default()
3613 },
3614 },
3615 ],
3616 cx,
3617 )
3618 .unwrap();
3619 })
3620 });
3621
3622 // An empty range is extended forward to include the following character.
3623 // At the end of a line, an empty range is extended backward to include
3624 // the preceding character.
3625 buffer.update(cx, |buffer, _| {
3626 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
3627 assert_eq!(
3628 chunks
3629 .iter()
3630 .map(|(s, d)| (s.as_str(), *d))
3631 .collect::<Vec<_>>(),
3632 &[
3633 ("let one = ", None),
3634 (";", Some(DiagnosticSeverity::ERROR)),
3635 ("\nlet two =", None),
3636 (" ", Some(DiagnosticSeverity::ERROR)),
3637 ("\nlet three = 3;\n", None)
3638 ]
3639 );
3640 });
3641}
3642
3643#[gpui::test]
3644async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
3645 init_test(cx);
3646
3647 let fs = FakeFs::new(cx.executor());
3648 fs.insert_tree(path!("/dir"), json!({ "a.rs": "one two three" }))
3649 .await;
3650
3651 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3652 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3653
3654 lsp_store.update(cx, |lsp_store, cx| {
3655 lsp_store
3656 .update_diagnostic_entries(
3657 LanguageServerId(0),
3658 Path::new(path!("/dir/a.rs")).to_owned(),
3659 None,
3660 None,
3661 vec![DiagnosticEntry {
3662 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
3663 diagnostic: Diagnostic {
3664 severity: DiagnosticSeverity::ERROR,
3665 is_primary: true,
3666 message: "syntax error a1".to_string(),
3667 source_kind: DiagnosticSourceKind::Pushed,
3668 ..Diagnostic::default()
3669 },
3670 }],
3671 cx,
3672 )
3673 .unwrap();
3674 lsp_store
3675 .update_diagnostic_entries(
3676 LanguageServerId(1),
3677 Path::new(path!("/dir/a.rs")).to_owned(),
3678 None,
3679 None,
3680 vec![DiagnosticEntry {
3681 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
3682 diagnostic: Diagnostic {
3683 severity: DiagnosticSeverity::ERROR,
3684 is_primary: true,
3685 message: "syntax error b1".to_string(),
3686 source_kind: DiagnosticSourceKind::Pushed,
3687 ..Diagnostic::default()
3688 },
3689 }],
3690 cx,
3691 )
3692 .unwrap();
3693
3694 assert_eq!(
3695 lsp_store.diagnostic_summary(false, cx),
3696 DiagnosticSummary {
3697 error_count: 2,
3698 warning_count: 0,
3699 }
3700 );
3701 });
3702}
3703
3704#[gpui::test]
3705async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
3706 init_test(cx);
3707
3708 let text = "
3709 fn a() {
3710 f1();
3711 }
3712 fn b() {
3713 f2();
3714 }
3715 fn c() {
3716 f3();
3717 }
3718 "
3719 .unindent();
3720
3721 let fs = FakeFs::new(cx.executor());
3722 fs.insert_tree(
3723 path!("/dir"),
3724 json!({
3725 "a.rs": text.clone(),
3726 }),
3727 )
3728 .await;
3729
3730 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3731 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3732
3733 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3734 language_registry.add(rust_lang());
3735 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3736
3737 let (buffer, _handle) = project
3738 .update(cx, |project, cx| {
3739 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
3740 })
3741 .await
3742 .unwrap();
3743
3744 let mut fake_server = fake_servers.next().await.unwrap();
3745 let lsp_document_version = fake_server
3746 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3747 .await
3748 .text_document
3749 .version;
3750
3751 // Simulate editing the buffer after the language server computes some edits.
3752 buffer.update(cx, |buffer, cx| {
3753 buffer.edit(
3754 [(
3755 Point::new(0, 0)..Point::new(0, 0),
3756 "// above first function\n",
3757 )],
3758 None,
3759 cx,
3760 );
3761 buffer.edit(
3762 [(
3763 Point::new(2, 0)..Point::new(2, 0),
3764 " // inside first function\n",
3765 )],
3766 None,
3767 cx,
3768 );
3769 buffer.edit(
3770 [(
3771 Point::new(6, 4)..Point::new(6, 4),
3772 "// inside second function ",
3773 )],
3774 None,
3775 cx,
3776 );
3777
3778 assert_eq!(
3779 buffer.text(),
3780 "
3781 // above first function
3782 fn a() {
3783 // inside first function
3784 f1();
3785 }
3786 fn b() {
3787 // inside second function f2();
3788 }
3789 fn c() {
3790 f3();
3791 }
3792 "
3793 .unindent()
3794 );
3795 });
3796
3797 let edits = lsp_store
3798 .update(cx, |lsp_store, cx| {
3799 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3800 &buffer,
3801 vec![
3802 // replace body of first function
3803 lsp::TextEdit {
3804 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
3805 new_text: "
3806 fn a() {
3807 f10();
3808 }
3809 "
3810 .unindent(),
3811 },
3812 // edit inside second function
3813 lsp::TextEdit {
3814 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
3815 new_text: "00".into(),
3816 },
3817 // edit inside third function via two distinct edits
3818 lsp::TextEdit {
3819 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
3820 new_text: "4000".into(),
3821 },
3822 lsp::TextEdit {
3823 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
3824 new_text: "".into(),
3825 },
3826 ],
3827 LanguageServerId(0),
3828 Some(lsp_document_version),
3829 cx,
3830 )
3831 })
3832 .await
3833 .unwrap();
3834
3835 buffer.update(cx, |buffer, cx| {
3836 for (range, new_text) in edits {
3837 buffer.edit([(range, new_text)], None, cx);
3838 }
3839 assert_eq!(
3840 buffer.text(),
3841 "
3842 // above first function
3843 fn a() {
3844 // inside first function
3845 f10();
3846 }
3847 fn b() {
3848 // inside second function f200();
3849 }
3850 fn c() {
3851 f4000();
3852 }
3853 "
3854 .unindent()
3855 );
3856 });
3857}
3858
3859#[gpui::test]
3860async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
3861 init_test(cx);
3862
3863 let text = "
3864 use a::b;
3865 use a::c;
3866
3867 fn f() {
3868 b();
3869 c();
3870 }
3871 "
3872 .unindent();
3873
3874 let fs = FakeFs::new(cx.executor());
3875 fs.insert_tree(
3876 path!("/dir"),
3877 json!({
3878 "a.rs": text.clone(),
3879 }),
3880 )
3881 .await;
3882
3883 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3884 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3885 let buffer = project
3886 .update(cx, |project, cx| {
3887 project.open_local_buffer(path!("/dir/a.rs"), cx)
3888 })
3889 .await
3890 .unwrap();
3891
3892 // Simulate the language server sending us a small edit in the form of a very large diff.
3893 // Rust-analyzer does this when performing a merge-imports code action.
3894 let edits = lsp_store
3895 .update(cx, |lsp_store, cx| {
3896 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3897 &buffer,
3898 [
3899 // Replace the first use statement without editing the semicolon.
3900 lsp::TextEdit {
3901 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
3902 new_text: "a::{b, c}".into(),
3903 },
3904 // Reinsert the remainder of the file between the semicolon and the final
3905 // newline of the file.
3906 lsp::TextEdit {
3907 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3908 new_text: "\n\n".into(),
3909 },
3910 lsp::TextEdit {
3911 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3912 new_text: "
3913 fn f() {
3914 b();
3915 c();
3916 }"
3917 .unindent(),
3918 },
3919 // Delete everything after the first newline of the file.
3920 lsp::TextEdit {
3921 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
3922 new_text: "".into(),
3923 },
3924 ],
3925 LanguageServerId(0),
3926 None,
3927 cx,
3928 )
3929 })
3930 .await
3931 .unwrap();
3932
3933 buffer.update(cx, |buffer, cx| {
3934 let edits = edits
3935 .into_iter()
3936 .map(|(range, text)| {
3937 (
3938 range.start.to_point(buffer)..range.end.to_point(buffer),
3939 text,
3940 )
3941 })
3942 .collect::<Vec<_>>();
3943
3944 assert_eq!(
3945 edits,
3946 [
3947 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
3948 (Point::new(1, 0)..Point::new(2, 0), "".into())
3949 ]
3950 );
3951
3952 for (range, new_text) in edits {
3953 buffer.edit([(range, new_text)], None, cx);
3954 }
3955 assert_eq!(
3956 buffer.text(),
3957 "
3958 use a::{b, c};
3959
3960 fn f() {
3961 b();
3962 c();
3963 }
3964 "
3965 .unindent()
3966 );
3967 });
3968}
3969
3970#[gpui::test]
3971async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
3972 cx: &mut gpui::TestAppContext,
3973) {
3974 init_test(cx);
3975
3976 let text = "Path()";
3977
3978 let fs = FakeFs::new(cx.executor());
3979 fs.insert_tree(
3980 path!("/dir"),
3981 json!({
3982 "a.rs": text
3983 }),
3984 )
3985 .await;
3986
3987 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3988 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3989 let buffer = project
3990 .update(cx, |project, cx| {
3991 project.open_local_buffer(path!("/dir/a.rs"), cx)
3992 })
3993 .await
3994 .unwrap();
3995
3996 // Simulate the language server sending us a pair of edits at the same location,
3997 // with an insertion following a replacement (which violates the LSP spec).
3998 let edits = lsp_store
3999 .update(cx, |lsp_store, cx| {
4000 lsp_store.as_local_mut().unwrap().edits_from_lsp(
4001 &buffer,
4002 [
4003 lsp::TextEdit {
4004 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
4005 new_text: "Path".into(),
4006 },
4007 lsp::TextEdit {
4008 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
4009 new_text: "from path import Path\n\n\n".into(),
4010 },
4011 ],
4012 LanguageServerId(0),
4013 None,
4014 cx,
4015 )
4016 })
4017 .await
4018 .unwrap();
4019
4020 buffer.update(cx, |buffer, cx| {
4021 buffer.edit(edits, None, cx);
4022 assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
4023 });
4024}
4025
4026#[gpui::test]
4027async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
4028 init_test(cx);
4029
4030 let text = "
4031 use a::b;
4032 use a::c;
4033
4034 fn f() {
4035 b();
4036 c();
4037 }
4038 "
4039 .unindent();
4040
4041 let fs = FakeFs::new(cx.executor());
4042 fs.insert_tree(
4043 path!("/dir"),
4044 json!({
4045 "a.rs": text.clone(),
4046 }),
4047 )
4048 .await;
4049
4050 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4051 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4052 let buffer = project
4053 .update(cx, |project, cx| {
4054 project.open_local_buffer(path!("/dir/a.rs"), cx)
4055 })
4056 .await
4057 .unwrap();
4058
4059 // Simulate the language server sending us edits in a non-ordered fashion,
4060 // with ranges sometimes being inverted or pointing to invalid locations.
4061 let edits = lsp_store
4062 .update(cx, |lsp_store, cx| {
4063 lsp_store.as_local_mut().unwrap().edits_from_lsp(
4064 &buffer,
4065 [
4066 lsp::TextEdit {
4067 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
4068 new_text: "\n\n".into(),
4069 },
4070 lsp::TextEdit {
4071 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
4072 new_text: "a::{b, c}".into(),
4073 },
4074 lsp::TextEdit {
4075 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
4076 new_text: "".into(),
4077 },
4078 lsp::TextEdit {
4079 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
4080 new_text: "
4081 fn f() {
4082 b();
4083 c();
4084 }"
4085 .unindent(),
4086 },
4087 ],
4088 LanguageServerId(0),
4089 None,
4090 cx,
4091 )
4092 })
4093 .await
4094 .unwrap();
4095
4096 buffer.update(cx, |buffer, cx| {
4097 let edits = edits
4098 .into_iter()
4099 .map(|(range, text)| {
4100 (
4101 range.start.to_point(buffer)..range.end.to_point(buffer),
4102 text,
4103 )
4104 })
4105 .collect::<Vec<_>>();
4106
4107 assert_eq!(
4108 edits,
4109 [
4110 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
4111 (Point::new(1, 0)..Point::new(2, 0), "".into())
4112 ]
4113 );
4114
4115 for (range, new_text) in edits {
4116 buffer.edit([(range, new_text)], None, cx);
4117 }
4118 assert_eq!(
4119 buffer.text(),
4120 "
4121 use a::{b, c};
4122
4123 fn f() {
4124 b();
4125 c();
4126 }
4127 "
4128 .unindent()
4129 );
4130 });
4131}
4132
4133fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
4134 buffer: &Buffer,
4135 range: Range<T>,
4136) -> Vec<(String, Option<DiagnosticSeverity>)> {
4137 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
4138 for chunk in buffer.snapshot().chunks(range, true) {
4139 if chunks
4140 .last()
4141 .is_some_and(|prev_chunk| prev_chunk.1 == chunk.diagnostic_severity)
4142 {
4143 chunks.last_mut().unwrap().0.push_str(chunk.text);
4144 } else {
4145 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
4146 }
4147 }
4148 chunks
4149}
4150
4151#[gpui::test(iterations = 10)]
4152async fn test_definition(cx: &mut gpui::TestAppContext) {
4153 init_test(cx);
4154
4155 let fs = FakeFs::new(cx.executor());
4156 fs.insert_tree(
4157 path!("/dir"),
4158 json!({
4159 "a.rs": "const fn a() { A }",
4160 "b.rs": "const y: i32 = crate::a()",
4161 }),
4162 )
4163 .await;
4164
4165 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
4166
4167 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4168 language_registry.add(rust_lang());
4169 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
4170
4171 let (buffer, _handle) = project
4172 .update(cx, |project, cx| {
4173 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
4174 })
4175 .await
4176 .unwrap();
4177
4178 let fake_server = fake_servers.next().await.unwrap();
4179 cx.executor().run_until_parked();
4180
4181 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
4182 let params = params.text_document_position_params;
4183 assert_eq!(
4184 params.text_document.uri.to_file_path().unwrap(),
4185 Path::new(path!("/dir/b.rs")),
4186 );
4187 assert_eq!(params.position, lsp::Position::new(0, 22));
4188
4189 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
4190 lsp::Location::new(
4191 lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
4192 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
4193 ),
4194 )))
4195 });
4196 let mut definitions = project
4197 .update(cx, |project, cx| project.definitions(&buffer, 22, cx))
4198 .await
4199 .unwrap()
4200 .unwrap();
4201
4202 // Assert no new language server started
4203 cx.executor().run_until_parked();
4204 assert!(fake_servers.try_next().is_err());
4205
4206 assert_eq!(definitions.len(), 1);
4207 let definition = definitions.pop().unwrap();
4208 cx.update(|cx| {
4209 let target_buffer = definition.target.buffer.read(cx);
4210 assert_eq!(
4211 target_buffer
4212 .file()
4213 .unwrap()
4214 .as_local()
4215 .unwrap()
4216 .abs_path(cx),
4217 Path::new(path!("/dir/a.rs")),
4218 );
4219 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
4220 assert_eq!(
4221 list_worktrees(&project, cx),
4222 [
4223 (path!("/dir/a.rs").as_ref(), false),
4224 (path!("/dir/b.rs").as_ref(), true)
4225 ],
4226 );
4227
4228 drop(definition);
4229 });
4230 cx.update(|cx| {
4231 assert_eq!(
4232 list_worktrees(&project, cx),
4233 [(path!("/dir/b.rs").as_ref(), true)]
4234 );
4235 });
4236
4237 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
4238 project
4239 .read(cx)
4240 .worktrees(cx)
4241 .map(|worktree| {
4242 let worktree = worktree.read(cx);
4243 (
4244 worktree.as_local().unwrap().abs_path().as_ref(),
4245 worktree.is_visible(),
4246 )
4247 })
4248 .collect::<Vec<_>>()
4249 }
4250}
4251
4252#[gpui::test]
4253async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
4254 init_test(cx);
4255
4256 let fs = FakeFs::new(cx.executor());
4257 fs.insert_tree(
4258 path!("/dir"),
4259 json!({
4260 "a.ts": "",
4261 }),
4262 )
4263 .await;
4264
4265 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4266
4267 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4268 language_registry.add(typescript_lang());
4269 let mut fake_language_servers = language_registry.register_fake_lsp(
4270 "TypeScript",
4271 FakeLspAdapter {
4272 capabilities: lsp::ServerCapabilities {
4273 completion_provider: Some(lsp::CompletionOptions {
4274 trigger_characters: Some(vec![".".to_string()]),
4275 ..Default::default()
4276 }),
4277 ..Default::default()
4278 },
4279 ..Default::default()
4280 },
4281 );
4282
4283 let (buffer, _handle) = project
4284 .update(cx, |p, cx| {
4285 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4286 })
4287 .await
4288 .unwrap();
4289
4290 let fake_server = fake_language_servers.next().await.unwrap();
4291 cx.executor().run_until_parked();
4292
4293 // When text_edit exists, it takes precedence over insert_text and label
4294 let text = "let a = obj.fqn";
4295 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4296 let completions = project.update(cx, |project, cx| {
4297 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4298 });
4299
4300 fake_server
4301 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4302 Ok(Some(lsp::CompletionResponse::Array(vec![
4303 lsp::CompletionItem {
4304 label: "labelText".into(),
4305 insert_text: Some("insertText".into()),
4306 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
4307 range: lsp::Range::new(
4308 lsp::Position::new(0, text.len() as u32 - 3),
4309 lsp::Position::new(0, text.len() as u32),
4310 ),
4311 new_text: "textEditText".into(),
4312 })),
4313 ..Default::default()
4314 },
4315 ])))
4316 })
4317 .next()
4318 .await;
4319
4320 let completions = completions
4321 .await
4322 .unwrap()
4323 .into_iter()
4324 .flat_map(|response| response.completions)
4325 .collect::<Vec<_>>();
4326 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4327
4328 assert_eq!(completions.len(), 1);
4329 assert_eq!(completions[0].new_text, "textEditText");
4330 assert_eq!(
4331 completions[0].replace_range.to_offset(&snapshot),
4332 text.len() - 3..text.len()
4333 );
4334}
4335
4336#[gpui::test]
4337async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
4338 init_test(cx);
4339
4340 let fs = FakeFs::new(cx.executor());
4341 fs.insert_tree(
4342 path!("/dir"),
4343 json!({
4344 "a.ts": "",
4345 }),
4346 )
4347 .await;
4348
4349 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4350
4351 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4352 language_registry.add(typescript_lang());
4353 let mut fake_language_servers = language_registry.register_fake_lsp(
4354 "TypeScript",
4355 FakeLspAdapter {
4356 capabilities: lsp::ServerCapabilities {
4357 completion_provider: Some(lsp::CompletionOptions {
4358 trigger_characters: Some(vec![".".to_string()]),
4359 ..Default::default()
4360 }),
4361 ..Default::default()
4362 },
4363 ..Default::default()
4364 },
4365 );
4366
4367 let (buffer, _handle) = project
4368 .update(cx, |p, cx| {
4369 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4370 })
4371 .await
4372 .unwrap();
4373
4374 let fake_server = fake_language_servers.next().await.unwrap();
4375 cx.executor().run_until_parked();
4376 let text = "let a = obj.fqn";
4377
4378 // Test 1: When text_edit is None but text_edit_text exists with default edit_range
4379 {
4380 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4381 let completions = project.update(cx, |project, cx| {
4382 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4383 });
4384
4385 fake_server
4386 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4387 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
4388 is_incomplete: false,
4389 item_defaults: Some(lsp::CompletionListItemDefaults {
4390 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
4391 lsp::Range::new(
4392 lsp::Position::new(0, text.len() as u32 - 3),
4393 lsp::Position::new(0, text.len() as u32),
4394 ),
4395 )),
4396 ..Default::default()
4397 }),
4398 items: vec![lsp::CompletionItem {
4399 label: "labelText".into(),
4400 text_edit_text: Some("textEditText".into()),
4401 text_edit: None,
4402 ..Default::default()
4403 }],
4404 })))
4405 })
4406 .next()
4407 .await;
4408
4409 let completions = completions
4410 .await
4411 .unwrap()
4412 .into_iter()
4413 .flat_map(|response| response.completions)
4414 .collect::<Vec<_>>();
4415 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4416
4417 assert_eq!(completions.len(), 1);
4418 assert_eq!(completions[0].new_text, "textEditText");
4419 assert_eq!(
4420 completions[0].replace_range.to_offset(&snapshot),
4421 text.len() - 3..text.len()
4422 );
4423 }
4424
4425 // Test 2: When both text_edit and text_edit_text are None with default edit_range
4426 {
4427 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4428 let completions = project.update(cx, |project, cx| {
4429 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4430 });
4431
4432 fake_server
4433 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
4434 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
4435 is_incomplete: false,
4436 item_defaults: Some(lsp::CompletionListItemDefaults {
4437 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
4438 lsp::Range::new(
4439 lsp::Position::new(0, text.len() as u32 - 3),
4440 lsp::Position::new(0, text.len() as u32),
4441 ),
4442 )),
4443 ..Default::default()
4444 }),
4445 items: vec![lsp::CompletionItem {
4446 label: "labelText".into(),
4447 text_edit_text: None,
4448 insert_text: Some("irrelevant".into()),
4449 text_edit: None,
4450 ..Default::default()
4451 }],
4452 })))
4453 })
4454 .next()
4455 .await;
4456
4457 let completions = completions
4458 .await
4459 .unwrap()
4460 .into_iter()
4461 .flat_map(|response| response.completions)
4462 .collect::<Vec<_>>();
4463 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4464
4465 assert_eq!(completions.len(), 1);
4466 assert_eq!(completions[0].new_text, "labelText");
4467 assert_eq!(
4468 completions[0].replace_range.to_offset(&snapshot),
4469 text.len() - 3..text.len()
4470 );
4471 }
4472}
4473
4474#[gpui::test]
4475async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
4476 init_test(cx);
4477
4478 let fs = FakeFs::new(cx.executor());
4479 fs.insert_tree(
4480 path!("/dir"),
4481 json!({
4482 "a.ts": "",
4483 }),
4484 )
4485 .await;
4486
4487 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4488
4489 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4490 language_registry.add(typescript_lang());
4491 let mut fake_language_servers = language_registry.register_fake_lsp(
4492 "TypeScript",
4493 FakeLspAdapter {
4494 capabilities: lsp::ServerCapabilities {
4495 completion_provider: Some(lsp::CompletionOptions {
4496 trigger_characters: Some(vec![":".to_string()]),
4497 ..Default::default()
4498 }),
4499 ..Default::default()
4500 },
4501 ..Default::default()
4502 },
4503 );
4504
4505 let (buffer, _handle) = project
4506 .update(cx, |p, cx| {
4507 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4508 })
4509 .await
4510 .unwrap();
4511
4512 let fake_server = fake_language_servers.next().await.unwrap();
4513 cx.executor().run_until_parked();
4514
4515 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
4516 let text = "let a = b.fqn";
4517 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4518 let completions = project.update(cx, |project, cx| {
4519 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4520 });
4521
4522 fake_server
4523 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4524 Ok(Some(lsp::CompletionResponse::Array(vec![
4525 lsp::CompletionItem {
4526 label: "fullyQualifiedName?".into(),
4527 insert_text: Some("fullyQualifiedName".into()),
4528 ..Default::default()
4529 },
4530 ])))
4531 })
4532 .next()
4533 .await;
4534 let completions = completions
4535 .await
4536 .unwrap()
4537 .into_iter()
4538 .flat_map(|response| response.completions)
4539 .collect::<Vec<_>>();
4540 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4541 assert_eq!(completions.len(), 1);
4542 assert_eq!(completions[0].new_text, "fullyQualifiedName");
4543 assert_eq!(
4544 completions[0].replace_range.to_offset(&snapshot),
4545 text.len() - 3..text.len()
4546 );
4547
4548 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
4549 let text = "let a = \"atoms/cmp\"";
4550 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4551 let completions = project.update(cx, |project, cx| {
4552 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
4553 });
4554
4555 fake_server
4556 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4557 Ok(Some(lsp::CompletionResponse::Array(vec![
4558 lsp::CompletionItem {
4559 label: "component".into(),
4560 ..Default::default()
4561 },
4562 ])))
4563 })
4564 .next()
4565 .await;
4566 let completions = completions
4567 .await
4568 .unwrap()
4569 .into_iter()
4570 .flat_map(|response| response.completions)
4571 .collect::<Vec<_>>();
4572 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
4573 assert_eq!(completions.len(), 1);
4574 assert_eq!(completions[0].new_text, "component");
4575 assert_eq!(
4576 completions[0].replace_range.to_offset(&snapshot),
4577 text.len() - 4..text.len() - 1
4578 );
4579}
4580
4581#[gpui::test]
4582async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
4583 init_test(cx);
4584
4585 let fs = FakeFs::new(cx.executor());
4586 fs.insert_tree(
4587 path!("/dir"),
4588 json!({
4589 "a.ts": "",
4590 }),
4591 )
4592 .await;
4593
4594 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4595
4596 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4597 language_registry.add(typescript_lang());
4598 let mut fake_language_servers = language_registry.register_fake_lsp(
4599 "TypeScript",
4600 FakeLspAdapter {
4601 capabilities: lsp::ServerCapabilities {
4602 completion_provider: Some(lsp::CompletionOptions {
4603 trigger_characters: Some(vec![":".to_string()]),
4604 ..Default::default()
4605 }),
4606 ..Default::default()
4607 },
4608 ..Default::default()
4609 },
4610 );
4611
4612 let (buffer, _handle) = project
4613 .update(cx, |p, cx| {
4614 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4615 })
4616 .await
4617 .unwrap();
4618
4619 let fake_server = fake_language_servers.next().await.unwrap();
4620 cx.executor().run_until_parked();
4621
4622 let text = "let a = b.fqn";
4623 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
4624 let completions = project.update(cx, |project, cx| {
4625 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
4626 });
4627
4628 fake_server
4629 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
4630 Ok(Some(lsp::CompletionResponse::Array(vec![
4631 lsp::CompletionItem {
4632 label: "fullyQualifiedName?".into(),
4633 insert_text: Some("fully\rQualified\r\nName".into()),
4634 ..Default::default()
4635 },
4636 ])))
4637 })
4638 .next()
4639 .await;
4640 let completions = completions
4641 .await
4642 .unwrap()
4643 .into_iter()
4644 .flat_map(|response| response.completions)
4645 .collect::<Vec<_>>();
4646 assert_eq!(completions.len(), 1);
4647 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
4648}
4649
4650#[gpui::test(iterations = 10)]
4651async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
4652 init_test(cx);
4653
4654 let fs = FakeFs::new(cx.executor());
4655 fs.insert_tree(
4656 path!("/dir"),
4657 json!({
4658 "a.ts": "a",
4659 }),
4660 )
4661 .await;
4662
4663 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
4664
4665 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4666 language_registry.add(typescript_lang());
4667 let mut fake_language_servers = language_registry.register_fake_lsp(
4668 "TypeScript",
4669 FakeLspAdapter {
4670 capabilities: lsp::ServerCapabilities {
4671 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
4672 lsp::CodeActionOptions {
4673 resolve_provider: Some(true),
4674 ..lsp::CodeActionOptions::default()
4675 },
4676 )),
4677 execute_command_provider: Some(lsp::ExecuteCommandOptions {
4678 commands: vec!["_the/command".to_string()],
4679 ..lsp::ExecuteCommandOptions::default()
4680 }),
4681 ..lsp::ServerCapabilities::default()
4682 },
4683 ..FakeLspAdapter::default()
4684 },
4685 );
4686
4687 let (buffer, _handle) = project
4688 .update(cx, |p, cx| {
4689 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
4690 })
4691 .await
4692 .unwrap();
4693
4694 let fake_server = fake_language_servers.next().await.unwrap();
4695 cx.executor().run_until_parked();
4696
4697 // Language server returns code actions that contain commands, and not edits.
4698 let actions = project.update(cx, |project, cx| {
4699 project.code_actions(&buffer, 0..0, None, cx)
4700 });
4701 fake_server
4702 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
4703 Ok(Some(vec![
4704 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
4705 title: "The code action".into(),
4706 data: Some(serde_json::json!({
4707 "command": "_the/command",
4708 })),
4709 ..lsp::CodeAction::default()
4710 }),
4711 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
4712 title: "two".into(),
4713 ..lsp::CodeAction::default()
4714 }),
4715 ]))
4716 })
4717 .next()
4718 .await;
4719
4720 let action = actions.await.unwrap().unwrap()[0].clone();
4721 let apply = project.update(cx, |project, cx| {
4722 project.apply_code_action(buffer.clone(), action, true, cx)
4723 });
4724
4725 // Resolving the code action does not populate its edits. In absence of
4726 // edits, we must execute the given command.
4727 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
4728 |mut action, _| async move {
4729 if action.data.is_some() {
4730 action.command = Some(lsp::Command {
4731 title: "The command".into(),
4732 command: "_the/command".into(),
4733 arguments: Some(vec![json!("the-argument")]),
4734 });
4735 }
4736 Ok(action)
4737 },
4738 );
4739
4740 // While executing the command, the language server sends the editor
4741 // a `workspaceEdit` request.
4742 fake_server
4743 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
4744 let fake = fake_server.clone();
4745 move |params, _| {
4746 assert_eq!(params.command, "_the/command");
4747 let fake = fake.clone();
4748 async move {
4749 fake.server
4750 .request::<lsp::request::ApplyWorkspaceEdit>(
4751 lsp::ApplyWorkspaceEditParams {
4752 label: None,
4753 edit: lsp::WorkspaceEdit {
4754 changes: Some(
4755 [(
4756 lsp::Uri::from_file_path(path!("/dir/a.ts")).unwrap(),
4757 vec![lsp::TextEdit {
4758 range: lsp::Range::new(
4759 lsp::Position::new(0, 0),
4760 lsp::Position::new(0, 0),
4761 ),
4762 new_text: "X".into(),
4763 }],
4764 )]
4765 .into_iter()
4766 .collect(),
4767 ),
4768 ..Default::default()
4769 },
4770 },
4771 DEFAULT_LSP_REQUEST_TIMEOUT,
4772 )
4773 .await
4774 .into_response()
4775 .unwrap();
4776 Ok(Some(json!(null)))
4777 }
4778 }
4779 })
4780 .next()
4781 .await;
4782
4783 // Applying the code action returns a project transaction containing the edits
4784 // sent by the language server in its `workspaceEdit` request.
4785 let transaction = apply.await.unwrap();
4786 assert!(transaction.0.contains_key(&buffer));
4787 buffer.update(cx, |buffer, cx| {
4788 assert_eq!(buffer.text(), "Xa");
4789 buffer.undo(cx);
4790 assert_eq!(buffer.text(), "a");
4791 });
4792}
4793
4794#[gpui::test]
4795async fn test_rename_file_to_new_directory(cx: &mut gpui::TestAppContext) {
4796 init_test(cx);
4797 let fs = FakeFs::new(cx.background_executor.clone());
4798 let expected_contents = "content";
4799 fs.as_fake()
4800 .insert_tree(
4801 "/root",
4802 json!({
4803 "test.txt": expected_contents
4804 }),
4805 )
4806 .await;
4807
4808 let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
4809
4810 let (worktree, entry_id) = project.read_with(cx, |project, cx| {
4811 let worktree = project.worktrees(cx).next().unwrap();
4812 let entry_id = worktree
4813 .read(cx)
4814 .entry_for_path(rel_path("test.txt"))
4815 .unwrap()
4816 .id;
4817 (worktree, entry_id)
4818 });
4819 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
4820 let _result = project
4821 .update(cx, |project, cx| {
4822 project.rename_entry(
4823 entry_id,
4824 (worktree_id, rel_path("dir1/dir2/dir3/test.txt")).into(),
4825 cx,
4826 )
4827 })
4828 .await
4829 .unwrap();
4830 worktree.read_with(cx, |worktree, _| {
4831 assert!(
4832 worktree.entry_for_path(rel_path("test.txt")).is_none(),
4833 "Old file should have been removed"
4834 );
4835 assert!(
4836 worktree
4837 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
4838 .is_some(),
4839 "Whole directory hierarchy and the new file should have been created"
4840 );
4841 });
4842 assert_eq!(
4843 worktree
4844 .update(cx, |worktree, cx| {
4845 worktree.load_file(rel_path("dir1/dir2/dir3/test.txt"), cx)
4846 })
4847 .await
4848 .unwrap()
4849 .text,
4850 expected_contents,
4851 "Moved file's contents should be preserved"
4852 );
4853
4854 let entry_id = worktree.read_with(cx, |worktree, _| {
4855 worktree
4856 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
4857 .unwrap()
4858 .id
4859 });
4860
4861 let _result = project
4862 .update(cx, |project, cx| {
4863 project.rename_entry(
4864 entry_id,
4865 (worktree_id, rel_path("dir1/dir2/test.txt")).into(),
4866 cx,
4867 )
4868 })
4869 .await
4870 .unwrap();
4871 worktree.read_with(cx, |worktree, _| {
4872 assert!(
4873 worktree.entry_for_path(rel_path("test.txt")).is_none(),
4874 "First file should not reappear"
4875 );
4876 assert!(
4877 worktree
4878 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
4879 .is_none(),
4880 "Old file should have been removed"
4881 );
4882 assert!(
4883 worktree
4884 .entry_for_path(rel_path("dir1/dir2/test.txt"))
4885 .is_some(),
4886 "No error should have occurred after moving into existing directory"
4887 );
4888 });
4889 assert_eq!(
4890 worktree
4891 .update(cx, |worktree, cx| {
4892 worktree.load_file(rel_path("dir1/dir2/test.txt"), cx)
4893 })
4894 .await
4895 .unwrap()
4896 .text,
4897 expected_contents,
4898 "Moved file's contents should be preserved"
4899 );
4900}
4901
4902#[gpui::test(iterations = 10)]
4903async fn test_save_file(cx: &mut gpui::TestAppContext) {
4904 init_test(cx);
4905
4906 let fs = FakeFs::new(cx.executor());
4907 fs.insert_tree(
4908 path!("/dir"),
4909 json!({
4910 "file1": "the old contents",
4911 }),
4912 )
4913 .await;
4914
4915 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4916 let buffer = project
4917 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4918 .await
4919 .unwrap();
4920 buffer.update(cx, |buffer, cx| {
4921 assert_eq!(buffer.text(), "the old contents");
4922 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
4923 });
4924
4925 project
4926 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
4927 .await
4928 .unwrap();
4929
4930 let new_text = fs
4931 .load(Path::new(path!("/dir/file1")))
4932 .await
4933 .unwrap()
4934 .replace("\r\n", "\n");
4935 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
4936}
4937
4938#[gpui::test(iterations = 10)]
4939async fn test_save_file_spawns_language_server(cx: &mut gpui::TestAppContext) {
4940 // Issue: #24349
4941 init_test(cx);
4942
4943 let fs = FakeFs::new(cx.executor());
4944 fs.insert_tree(path!("/dir"), json!({})).await;
4945
4946 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4947 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4948
4949 language_registry.add(rust_lang());
4950 let mut fake_rust_servers = language_registry.register_fake_lsp(
4951 "Rust",
4952 FakeLspAdapter {
4953 name: "the-rust-language-server",
4954 capabilities: lsp::ServerCapabilities {
4955 completion_provider: Some(lsp::CompletionOptions {
4956 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
4957 ..Default::default()
4958 }),
4959 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
4960 lsp::TextDocumentSyncOptions {
4961 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
4962 ..Default::default()
4963 },
4964 )),
4965 ..Default::default()
4966 },
4967 ..Default::default()
4968 },
4969 );
4970
4971 let buffer = project
4972 .update(cx, |this, cx| this.create_buffer(None, false, cx))
4973 .unwrap()
4974 .await;
4975 project.update(cx, |this, cx| {
4976 this.register_buffer_with_language_servers(&buffer, cx);
4977 buffer.update(cx, |buffer, cx| {
4978 assert!(!this.has_language_servers_for(buffer, cx));
4979 })
4980 });
4981
4982 project
4983 .update(cx, |this, cx| {
4984 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
4985 this.save_buffer_as(
4986 buffer.clone(),
4987 ProjectPath {
4988 worktree_id,
4989 path: rel_path("file.rs").into(),
4990 },
4991 cx,
4992 )
4993 })
4994 .await
4995 .unwrap();
4996 // A server is started up, and it is notified about Rust files.
4997 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
4998 assert_eq!(
4999 fake_rust_server
5000 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5001 .await
5002 .text_document,
5003 lsp::TextDocumentItem {
5004 uri: lsp::Uri::from_file_path(path!("/dir/file.rs")).unwrap(),
5005 version: 0,
5006 text: "".to_string(),
5007 language_id: "rust".to_string(),
5008 }
5009 );
5010
5011 project.update(cx, |this, cx| {
5012 buffer.update(cx, |buffer, cx| {
5013 assert!(this.has_language_servers_for(buffer, cx));
5014 })
5015 });
5016}
5017
5018#[gpui::test(iterations = 30)]
5019async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
5020 init_test(cx);
5021
5022 let fs = FakeFs::new(cx.executor());
5023 fs.insert_tree(
5024 path!("/dir"),
5025 json!({
5026 "file1": "the original contents",
5027 }),
5028 )
5029 .await;
5030
5031 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5032 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
5033 let buffer = project
5034 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5035 .await
5036 .unwrap();
5037
5038 // Change the buffer's file on disk, and then wait for the file change
5039 // to be detected by the worktree, so that the buffer starts reloading.
5040 fs.save(
5041 path!("/dir/file1").as_ref(),
5042 &"the first contents".into(),
5043 Default::default(),
5044 )
5045 .await
5046 .unwrap();
5047 worktree.next_event(cx).await;
5048
5049 // Change the buffer's file again. Depending on the random seed, the
5050 // previous file change may still be in progress.
5051 fs.save(
5052 path!("/dir/file1").as_ref(),
5053 &"the second contents".into(),
5054 Default::default(),
5055 )
5056 .await
5057 .unwrap();
5058 worktree.next_event(cx).await;
5059
5060 cx.executor().run_until_parked();
5061 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
5062 buffer.read_with(cx, |buffer, _| {
5063 assert_eq!(buffer.text(), on_disk_text);
5064 assert!(!buffer.is_dirty(), "buffer should not be dirty");
5065 assert!(!buffer.has_conflict(), "buffer should not be dirty");
5066 });
5067}
5068
5069#[gpui::test(iterations = 30)]
5070async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
5071 init_test(cx);
5072
5073 let fs = FakeFs::new(cx.executor());
5074 fs.insert_tree(
5075 path!("/dir"),
5076 json!({
5077 "file1": "the original contents",
5078 }),
5079 )
5080 .await;
5081
5082 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5083 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
5084 let buffer = project
5085 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5086 .await
5087 .unwrap();
5088
5089 // Change the buffer's file on disk, and then wait for the file change
5090 // to be detected by the worktree, so that the buffer starts reloading.
5091 fs.save(
5092 path!("/dir/file1").as_ref(),
5093 &"the first contents".into(),
5094 Default::default(),
5095 )
5096 .await
5097 .unwrap();
5098 worktree.next_event(cx).await;
5099
5100 cx.executor()
5101 .spawn(cx.executor().simulate_random_delay())
5102 .await;
5103
5104 // Perform a noop edit, causing the buffer's version to increase.
5105 buffer.update(cx, |buffer, cx| {
5106 buffer.edit([(0..0, " ")], None, cx);
5107 buffer.undo(cx);
5108 });
5109
5110 cx.executor().run_until_parked();
5111 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
5112 buffer.read_with(cx, |buffer, _| {
5113 let buffer_text = buffer.text();
5114 if buffer_text == on_disk_text {
5115 assert!(
5116 !buffer.is_dirty() && !buffer.has_conflict(),
5117 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
5118 );
5119 }
5120 // If the file change occurred while the buffer was processing the first
5121 // change, the buffer will be in a conflicting state.
5122 else {
5123 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
5124 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
5125 }
5126 });
5127}
5128
5129#[gpui::test]
5130async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
5131 init_test(cx);
5132
5133 let fs = FakeFs::new(cx.executor());
5134 fs.insert_tree(
5135 path!("/dir"),
5136 json!({
5137 "file1": "the old contents",
5138 }),
5139 )
5140 .await;
5141
5142 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
5143 let buffer = project
5144 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5145 .await
5146 .unwrap();
5147 buffer.update(cx, |buffer, cx| {
5148 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
5149 });
5150
5151 project
5152 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
5153 .await
5154 .unwrap();
5155
5156 let new_text = fs
5157 .load(Path::new(path!("/dir/file1")))
5158 .await
5159 .unwrap()
5160 .replace("\r\n", "\n");
5161 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
5162}
5163
5164#[gpui::test]
5165async fn test_save_as(cx: &mut gpui::TestAppContext) {
5166 init_test(cx);
5167
5168 let fs = FakeFs::new(cx.executor());
5169 fs.insert_tree("/dir", json!({})).await;
5170
5171 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5172
5173 let languages = project.update(cx, |project, _| project.languages().clone());
5174 languages.add(rust_lang());
5175
5176 let buffer = project.update(cx, |project, cx| {
5177 project.create_local_buffer("", None, false, cx)
5178 });
5179 buffer.update(cx, |buffer, cx| {
5180 buffer.edit([(0..0, "abc")], None, cx);
5181 assert!(buffer.is_dirty());
5182 assert!(!buffer.has_conflict());
5183 assert_eq!(buffer.language().unwrap().name(), "Plain Text");
5184 });
5185 project
5186 .update(cx, |project, cx| {
5187 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
5188 let path = ProjectPath {
5189 worktree_id,
5190 path: rel_path("file1.rs").into(),
5191 };
5192 project.save_buffer_as(buffer.clone(), path, cx)
5193 })
5194 .await
5195 .unwrap();
5196 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
5197
5198 cx.executor().run_until_parked();
5199 buffer.update(cx, |buffer, cx| {
5200 assert_eq!(
5201 buffer.file().unwrap().full_path(cx),
5202 Path::new("dir/file1.rs")
5203 );
5204 assert!(!buffer.is_dirty());
5205 assert!(!buffer.has_conflict());
5206 assert_eq!(buffer.language().unwrap().name(), "Rust");
5207 });
5208
5209 let opened_buffer = project
5210 .update(cx, |project, cx| {
5211 project.open_local_buffer("/dir/file1.rs", cx)
5212 })
5213 .await
5214 .unwrap();
5215 assert_eq!(opened_buffer, buffer);
5216}
5217
5218#[gpui::test]
5219async fn test_save_as_existing_file(cx: &mut gpui::TestAppContext) {
5220 init_test(cx);
5221
5222 let fs = FakeFs::new(cx.executor());
5223 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5224
5225 fs.insert_tree(
5226 path!("/dir"),
5227 json!({
5228 "data_a.txt": "data about a"
5229 }),
5230 )
5231 .await;
5232
5233 let buffer = project
5234 .update(cx, |project, cx| {
5235 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
5236 })
5237 .await
5238 .unwrap();
5239
5240 buffer.update(cx, |buffer, cx| {
5241 buffer.edit([(11..12, "b")], None, cx);
5242 });
5243
5244 // Save buffer's contents as a new file and confirm that the buffer's now
5245 // associated with `data_b.txt` instead of `data_a.txt`, confirming that the
5246 // file associated with the buffer has now been updated to `data_b.txt`
5247 project
5248 .update(cx, |project, cx| {
5249 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
5250 let new_path = ProjectPath {
5251 worktree_id,
5252 path: rel_path("data_b.txt").into(),
5253 };
5254
5255 project.save_buffer_as(buffer.clone(), new_path, cx)
5256 })
5257 .await
5258 .unwrap();
5259
5260 buffer.update(cx, |buffer, cx| {
5261 assert_eq!(
5262 buffer.file().unwrap().full_path(cx),
5263 Path::new("dir/data_b.txt")
5264 )
5265 });
5266
5267 // Open the original `data_a.txt` file, confirming that its contents are
5268 // unchanged and the resulting buffer's associated file is `data_a.txt`.
5269 let original_buffer = project
5270 .update(cx, |project, cx| {
5271 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
5272 })
5273 .await
5274 .unwrap();
5275
5276 original_buffer.update(cx, |buffer, cx| {
5277 assert_eq!(buffer.text(), "data about a");
5278 assert_eq!(
5279 buffer.file().unwrap().full_path(cx),
5280 Path::new("dir/data_a.txt")
5281 )
5282 });
5283}
5284
5285#[gpui::test(retries = 5)]
5286async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
5287 use worktree::WorktreeModelHandle as _;
5288
5289 init_test(cx);
5290 cx.executor().allow_parking();
5291
5292 let dir = TempTree::new(json!({
5293 "a": {
5294 "file1": "",
5295 "file2": "",
5296 "file3": "",
5297 },
5298 "b": {
5299 "c": {
5300 "file4": "",
5301 "file5": "",
5302 }
5303 }
5304 }));
5305
5306 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
5307
5308 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5309 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
5310 async move { buffer.await.unwrap() }
5311 };
5312 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5313 project.update(cx, |project, cx| {
5314 let tree = project.worktrees(cx).next().unwrap();
5315 tree.read(cx)
5316 .entry_for_path(rel_path(path))
5317 .unwrap_or_else(|| panic!("no entry for path {}", path))
5318 .id
5319 })
5320 };
5321
5322 let buffer2 = buffer_for_path("a/file2", cx).await;
5323 let buffer3 = buffer_for_path("a/file3", cx).await;
5324 let buffer4 = buffer_for_path("b/c/file4", cx).await;
5325 let buffer5 = buffer_for_path("b/c/file5", cx).await;
5326
5327 let file2_id = id_for_path("a/file2", cx);
5328 let file3_id = id_for_path("a/file3", cx);
5329 let file4_id = id_for_path("b/c/file4", cx);
5330
5331 // Create a remote copy of this worktree.
5332 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
5333 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
5334
5335 let updates = Arc::new(Mutex::new(Vec::new()));
5336 tree.update(cx, |tree, cx| {
5337 let updates = updates.clone();
5338 tree.observe_updates(0, cx, move |update| {
5339 updates.lock().push(update);
5340 async { true }
5341 });
5342 });
5343
5344 let remote = cx.update(|cx| {
5345 Worktree::remote(
5346 0,
5347 ReplicaId::REMOTE_SERVER,
5348 metadata,
5349 project.read(cx).client().into(),
5350 project.read(cx).path_style(cx),
5351 cx,
5352 )
5353 });
5354
5355 cx.executor().run_until_parked();
5356
5357 cx.update(|cx| {
5358 assert!(!buffer2.read(cx).is_dirty());
5359 assert!(!buffer3.read(cx).is_dirty());
5360 assert!(!buffer4.read(cx).is_dirty());
5361 assert!(!buffer5.read(cx).is_dirty());
5362 });
5363
5364 // Rename and delete files and directories.
5365 tree.flush_fs_events(cx).await;
5366 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
5367 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
5368 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
5369 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
5370 tree.flush_fs_events(cx).await;
5371
5372 cx.update(|app| {
5373 assert_eq!(
5374 tree.read(app).paths().collect::<Vec<_>>(),
5375 vec![
5376 rel_path("a"),
5377 rel_path("a/file1"),
5378 rel_path("a/file2.new"),
5379 rel_path("b"),
5380 rel_path("d"),
5381 rel_path("d/file3"),
5382 rel_path("d/file4"),
5383 ]
5384 );
5385 });
5386
5387 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
5388 assert_eq!(id_for_path("d/file3", cx), file3_id);
5389 assert_eq!(id_for_path("d/file4", cx), file4_id);
5390
5391 cx.update(|cx| {
5392 assert_eq!(
5393 buffer2.read(cx).file().unwrap().path().as_ref(),
5394 rel_path("a/file2.new")
5395 );
5396 assert_eq!(
5397 buffer3.read(cx).file().unwrap().path().as_ref(),
5398 rel_path("d/file3")
5399 );
5400 assert_eq!(
5401 buffer4.read(cx).file().unwrap().path().as_ref(),
5402 rel_path("d/file4")
5403 );
5404 assert_eq!(
5405 buffer5.read(cx).file().unwrap().path().as_ref(),
5406 rel_path("b/c/file5")
5407 );
5408
5409 assert_matches!(
5410 buffer2.read(cx).file().unwrap().disk_state(),
5411 DiskState::Present { .. }
5412 );
5413 assert_matches!(
5414 buffer3.read(cx).file().unwrap().disk_state(),
5415 DiskState::Present { .. }
5416 );
5417 assert_matches!(
5418 buffer4.read(cx).file().unwrap().disk_state(),
5419 DiskState::Present { .. }
5420 );
5421 assert_eq!(
5422 buffer5.read(cx).file().unwrap().disk_state(),
5423 DiskState::Deleted
5424 );
5425 });
5426
5427 // Update the remote worktree. Check that it becomes consistent with the
5428 // local worktree.
5429 cx.executor().run_until_parked();
5430
5431 remote.update(cx, |remote, _| {
5432 for update in updates.lock().drain(..) {
5433 remote.as_remote_mut().unwrap().update_from_remote(update);
5434 }
5435 });
5436 cx.executor().run_until_parked();
5437 remote.update(cx, |remote, _| {
5438 assert_eq!(
5439 remote.paths().collect::<Vec<_>>(),
5440 vec![
5441 rel_path("a"),
5442 rel_path("a/file1"),
5443 rel_path("a/file2.new"),
5444 rel_path("b"),
5445 rel_path("d"),
5446 rel_path("d/file3"),
5447 rel_path("d/file4"),
5448 ]
5449 );
5450 });
5451}
5452
5453#[cfg(target_os = "linux")]
5454#[gpui::test(retries = 5)]
5455async fn test_recreated_directory_receives_child_events(cx: &mut gpui::TestAppContext) {
5456 init_test(cx);
5457 cx.executor().allow_parking();
5458
5459 let dir = TempTree::new(json!({}));
5460 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
5461 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
5462
5463 tree.flush_fs_events(cx).await;
5464
5465 let repro_dir = dir.path().join("repro");
5466 std::fs::create_dir(&repro_dir).unwrap();
5467 tree.flush_fs_events(cx).await;
5468
5469 cx.update(|cx| {
5470 assert!(tree.read(cx).entry_for_path(rel_path("repro")).is_some());
5471 });
5472
5473 std::fs::remove_dir_all(&repro_dir).unwrap();
5474 tree.flush_fs_events(cx).await;
5475
5476 cx.update(|cx| {
5477 assert!(tree.read(cx).entry_for_path(rel_path("repro")).is_none());
5478 });
5479
5480 std::fs::create_dir(&repro_dir).unwrap();
5481 tree.flush_fs_events(cx).await;
5482
5483 cx.update(|cx| {
5484 assert!(tree.read(cx).entry_for_path(rel_path("repro")).is_some());
5485 });
5486
5487 std::fs::write(repro_dir.join("repro-marker"), "").unwrap();
5488 tree.flush_fs_events(cx).await;
5489
5490 cx.update(|cx| {
5491 assert!(
5492 tree.read(cx)
5493 .entry_for_path(rel_path("repro/repro-marker"))
5494 .is_some()
5495 );
5496 });
5497}
5498
5499#[gpui::test(iterations = 10)]
5500async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
5501 init_test(cx);
5502
5503 let fs = FakeFs::new(cx.executor());
5504 fs.insert_tree(
5505 path!("/dir"),
5506 json!({
5507 "a": {
5508 "file1": "",
5509 }
5510 }),
5511 )
5512 .await;
5513
5514 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
5515 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
5516 let tree_id = tree.update(cx, |tree, _| tree.id());
5517
5518 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5519 project.update(cx, |project, cx| {
5520 let tree = project.worktrees(cx).next().unwrap();
5521 tree.read(cx)
5522 .entry_for_path(rel_path(path))
5523 .unwrap_or_else(|| panic!("no entry for path {}", path))
5524 .id
5525 })
5526 };
5527
5528 let dir_id = id_for_path("a", cx);
5529 let file_id = id_for_path("a/file1", cx);
5530 let buffer = project
5531 .update(cx, |p, cx| {
5532 p.open_buffer((tree_id, rel_path("a/file1")), cx)
5533 })
5534 .await
5535 .unwrap();
5536 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5537
5538 project
5539 .update(cx, |project, cx| {
5540 project.rename_entry(dir_id, (tree_id, rel_path("b")).into(), cx)
5541 })
5542 .unwrap()
5543 .await
5544 .into_included()
5545 .unwrap();
5546 cx.executor().run_until_parked();
5547
5548 assert_eq!(id_for_path("b", cx), dir_id);
5549 assert_eq!(id_for_path("b/file1", cx), file_id);
5550 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5551}
5552
5553#[gpui::test]
5554async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
5555 init_test(cx);
5556
5557 let fs = FakeFs::new(cx.executor());
5558 fs.insert_tree(
5559 "/dir",
5560 json!({
5561 "a.txt": "a-contents",
5562 "b.txt": "b-contents",
5563 }),
5564 )
5565 .await;
5566
5567 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5568
5569 // Spawn multiple tasks to open paths, repeating some paths.
5570 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
5571 (
5572 p.open_local_buffer("/dir/a.txt", cx),
5573 p.open_local_buffer("/dir/b.txt", cx),
5574 p.open_local_buffer("/dir/a.txt", cx),
5575 )
5576 });
5577
5578 let buffer_a_1 = buffer_a_1.await.unwrap();
5579 let buffer_a_2 = buffer_a_2.await.unwrap();
5580 let buffer_b = buffer_b.await.unwrap();
5581 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
5582 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
5583
5584 // There is only one buffer per path.
5585 let buffer_a_id = buffer_a_1.entity_id();
5586 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
5587
5588 // Open the same path again while it is still open.
5589 drop(buffer_a_1);
5590 let buffer_a_3 = project
5591 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
5592 .await
5593 .unwrap();
5594
5595 // There's still only one buffer per path.
5596 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
5597}
5598
5599#[gpui::test]
5600async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
5601 init_test(cx);
5602
5603 let fs = FakeFs::new(cx.executor());
5604 fs.insert_tree(
5605 path!("/dir"),
5606 json!({
5607 "file1": "abc",
5608 "file2": "def",
5609 "file3": "ghi",
5610 }),
5611 )
5612 .await;
5613
5614 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5615
5616 let buffer1 = project
5617 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5618 .await
5619 .unwrap();
5620 let events = Arc::new(Mutex::new(Vec::new()));
5621
5622 // initially, the buffer isn't dirty.
5623 buffer1.update(cx, |buffer, cx| {
5624 cx.subscribe(&buffer1, {
5625 let events = events.clone();
5626 move |_, _, event, _| match event {
5627 BufferEvent::Operation { .. } => {}
5628 _ => events.lock().push(event.clone()),
5629 }
5630 })
5631 .detach();
5632
5633 assert!(!buffer.is_dirty());
5634 assert!(events.lock().is_empty());
5635
5636 buffer.edit([(1..2, "")], None, cx);
5637 });
5638
5639 // after the first edit, the buffer is dirty, and emits a dirtied event.
5640 buffer1.update(cx, |buffer, cx| {
5641 assert!(buffer.text() == "ac");
5642 assert!(buffer.is_dirty());
5643 assert_eq!(
5644 *events.lock(),
5645 &[
5646 language::BufferEvent::Edited { is_local: true },
5647 language::BufferEvent::DirtyChanged
5648 ]
5649 );
5650 events.lock().clear();
5651 buffer.did_save(
5652 buffer.version(),
5653 buffer.file().unwrap().disk_state().mtime(),
5654 cx,
5655 );
5656 });
5657
5658 // after saving, the buffer is not dirty, and emits a saved event.
5659 buffer1.update(cx, |buffer, cx| {
5660 assert!(!buffer.is_dirty());
5661 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
5662 events.lock().clear();
5663
5664 buffer.edit([(1..1, "B")], None, cx);
5665 buffer.edit([(2..2, "D")], None, cx);
5666 });
5667
5668 // after editing again, the buffer is dirty, and emits another dirty event.
5669 buffer1.update(cx, |buffer, cx| {
5670 assert!(buffer.text() == "aBDc");
5671 assert!(buffer.is_dirty());
5672 assert_eq!(
5673 *events.lock(),
5674 &[
5675 language::BufferEvent::Edited { is_local: true },
5676 language::BufferEvent::DirtyChanged,
5677 language::BufferEvent::Edited { is_local: true },
5678 ],
5679 );
5680 events.lock().clear();
5681
5682 // After restoring the buffer to its previously-saved state,
5683 // the buffer is not considered dirty anymore.
5684 buffer.edit([(1..3, "")], None, cx);
5685 assert!(buffer.text() == "ac");
5686 assert!(!buffer.is_dirty());
5687 });
5688
5689 assert_eq!(
5690 *events.lock(),
5691 &[
5692 language::BufferEvent::Edited { is_local: true },
5693 language::BufferEvent::DirtyChanged
5694 ]
5695 );
5696
5697 // When a file is deleted, it is not considered dirty.
5698 let events = Arc::new(Mutex::new(Vec::new()));
5699 let buffer2 = project
5700 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
5701 .await
5702 .unwrap();
5703 buffer2.update(cx, |_, cx| {
5704 cx.subscribe(&buffer2, {
5705 let events = events.clone();
5706 move |_, _, event, _| match event {
5707 BufferEvent::Operation { .. } => {}
5708 _ => events.lock().push(event.clone()),
5709 }
5710 })
5711 .detach();
5712 });
5713
5714 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
5715 .await
5716 .unwrap();
5717 cx.executor().run_until_parked();
5718 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
5719 assert_eq!(
5720 mem::take(&mut *events.lock()),
5721 &[language::BufferEvent::FileHandleChanged]
5722 );
5723
5724 // Buffer becomes dirty when edited.
5725 buffer2.update(cx, |buffer, cx| {
5726 buffer.edit([(2..3, "")], None, cx);
5727 assert_eq!(buffer.is_dirty(), true);
5728 });
5729 assert_eq!(
5730 mem::take(&mut *events.lock()),
5731 &[
5732 language::BufferEvent::Edited { is_local: true },
5733 language::BufferEvent::DirtyChanged
5734 ]
5735 );
5736
5737 // Buffer becomes clean again when all of its content is removed, because
5738 // the file was deleted.
5739 buffer2.update(cx, |buffer, cx| {
5740 buffer.edit([(0..2, "")], None, cx);
5741 assert_eq!(buffer.is_empty(), true);
5742 assert_eq!(buffer.is_dirty(), false);
5743 });
5744 assert_eq!(
5745 *events.lock(),
5746 &[
5747 language::BufferEvent::Edited { is_local: true },
5748 language::BufferEvent::DirtyChanged
5749 ]
5750 );
5751
5752 // When a file is already dirty when deleted, we don't emit a Dirtied event.
5753 let events = Arc::new(Mutex::new(Vec::new()));
5754 let buffer3 = project
5755 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
5756 .await
5757 .unwrap();
5758 buffer3.update(cx, |_, cx| {
5759 cx.subscribe(&buffer3, {
5760 let events = events.clone();
5761 move |_, _, event, _| match event {
5762 BufferEvent::Operation { .. } => {}
5763 _ => events.lock().push(event.clone()),
5764 }
5765 })
5766 .detach();
5767 });
5768
5769 buffer3.update(cx, |buffer, cx| {
5770 buffer.edit([(0..0, "x")], None, cx);
5771 });
5772 events.lock().clear();
5773 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
5774 .await
5775 .unwrap();
5776 cx.executor().run_until_parked();
5777 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
5778 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
5779}
5780
5781#[gpui::test]
5782async fn test_dirty_buffer_reloads_after_undo(cx: &mut gpui::TestAppContext) {
5783 init_test(cx);
5784
5785 let fs = FakeFs::new(cx.executor());
5786 fs.insert_tree(
5787 path!("/dir"),
5788 json!({
5789 "file.txt": "version 1",
5790 }),
5791 )
5792 .await;
5793
5794 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5795 let buffer = project
5796 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file.txt"), cx))
5797 .await
5798 .unwrap();
5799
5800 buffer.read_with(cx, |buffer, _| {
5801 assert_eq!(buffer.text(), "version 1");
5802 assert!(!buffer.is_dirty());
5803 });
5804
5805 // User makes an edit, making the buffer dirty.
5806 buffer.update(cx, |buffer, cx| {
5807 buffer.edit([(0..0, "user edit: ")], None, cx);
5808 });
5809
5810 buffer.read_with(cx, |buffer, _| {
5811 assert!(buffer.is_dirty());
5812 assert_eq!(buffer.text(), "user edit: version 1");
5813 });
5814
5815 // External tool writes new content while buffer is dirty.
5816 // file_updated() updates the File but suppresses ReloadNeeded.
5817 fs.save(
5818 path!("/dir/file.txt").as_ref(),
5819 &"version 2 from external tool".into(),
5820 Default::default(),
5821 )
5822 .await
5823 .unwrap();
5824 cx.executor().run_until_parked();
5825
5826 buffer.read_with(cx, |buffer, _| {
5827 assert!(buffer.has_conflict());
5828 assert_eq!(buffer.text(), "user edit: version 1");
5829 });
5830
5831 // User undoes their edit. Buffer becomes clean, but disk has different
5832 // content. did_edit() detects the dirty->clean transition and checks if
5833 // disk changed while dirty. Since mtime differs from saved_mtime, it
5834 // emits ReloadNeeded.
5835 buffer.update(cx, |buffer, cx| {
5836 buffer.undo(cx);
5837 });
5838 cx.executor().run_until_parked();
5839
5840 buffer.read_with(cx, |buffer, _| {
5841 assert_eq!(
5842 buffer.text(),
5843 "version 2 from external tool",
5844 "buffer should reload from disk after undo makes it clean"
5845 );
5846 assert!(!buffer.is_dirty());
5847 });
5848}
5849
5850#[gpui::test]
5851async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
5852 init_test(cx);
5853
5854 let (initial_contents, initial_offsets) =
5855 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
5856 let fs = FakeFs::new(cx.executor());
5857 fs.insert_tree(
5858 path!("/dir"),
5859 json!({
5860 "the-file": initial_contents,
5861 }),
5862 )
5863 .await;
5864 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5865 let buffer = project
5866 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
5867 .await
5868 .unwrap();
5869
5870 let anchors = initial_offsets
5871 .iter()
5872 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
5873 .collect::<Vec<_>>();
5874
5875 // Change the file on disk, adding two new lines of text, and removing
5876 // one line.
5877 buffer.update(cx, |buffer, _| {
5878 assert!(!buffer.is_dirty());
5879 assert!(!buffer.has_conflict());
5880 });
5881
5882 let (new_contents, new_offsets) =
5883 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
5884 fs.save(
5885 path!("/dir/the-file").as_ref(),
5886 &new_contents.as_str().into(),
5887 LineEnding::Unix,
5888 )
5889 .await
5890 .unwrap();
5891
5892 // Because the buffer was not modified, it is reloaded from disk. Its
5893 // contents are edited according to the diff between the old and new
5894 // file contents.
5895 cx.executor().run_until_parked();
5896 buffer.update(cx, |buffer, _| {
5897 assert_eq!(buffer.text(), new_contents);
5898 assert!(!buffer.is_dirty());
5899 assert!(!buffer.has_conflict());
5900
5901 let anchor_offsets = anchors
5902 .iter()
5903 .map(|anchor| anchor.to_offset(&*buffer))
5904 .collect::<Vec<_>>();
5905 assert_eq!(anchor_offsets, new_offsets);
5906 });
5907
5908 // Modify the buffer
5909 buffer.update(cx, |buffer, cx| {
5910 buffer.edit([(0..0, " ")], None, cx);
5911 assert!(buffer.is_dirty());
5912 assert!(!buffer.has_conflict());
5913 });
5914
5915 // Change the file on disk again, adding blank lines to the beginning.
5916 fs.save(
5917 path!("/dir/the-file").as_ref(),
5918 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
5919 LineEnding::Unix,
5920 )
5921 .await
5922 .unwrap();
5923
5924 // Because the buffer is modified, it doesn't reload from disk, but is
5925 // marked as having a conflict.
5926 cx.executor().run_until_parked();
5927 buffer.update(cx, |buffer, _| {
5928 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
5929 assert!(buffer.has_conflict());
5930 });
5931}
5932
5933#[gpui::test]
5934async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
5935 init_test(cx);
5936
5937 let fs = FakeFs::new(cx.executor());
5938 fs.insert_tree(
5939 path!("/dir"),
5940 json!({
5941 "file1": "a\nb\nc\n",
5942 "file2": "one\r\ntwo\r\nthree\r\n",
5943 }),
5944 )
5945 .await;
5946
5947 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5948 let buffer1 = project
5949 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5950 .await
5951 .unwrap();
5952 let buffer2 = project
5953 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
5954 .await
5955 .unwrap();
5956
5957 buffer1.update(cx, |buffer, _| {
5958 assert_eq!(buffer.text(), "a\nb\nc\n");
5959 assert_eq!(buffer.line_ending(), LineEnding::Unix);
5960 });
5961 buffer2.update(cx, |buffer, _| {
5962 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
5963 assert_eq!(buffer.line_ending(), LineEnding::Windows);
5964 });
5965
5966 // Change a file's line endings on disk from unix to windows. The buffer's
5967 // state updates correctly.
5968 fs.save(
5969 path!("/dir/file1").as_ref(),
5970 &"aaa\nb\nc\n".into(),
5971 LineEnding::Windows,
5972 )
5973 .await
5974 .unwrap();
5975 cx.executor().run_until_parked();
5976 buffer1.update(cx, |buffer, _| {
5977 assert_eq!(buffer.text(), "aaa\nb\nc\n");
5978 assert_eq!(buffer.line_ending(), LineEnding::Windows);
5979 });
5980
5981 // Save a file with windows line endings. The file is written correctly.
5982 buffer2.update(cx, |buffer, cx| {
5983 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
5984 });
5985 project
5986 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
5987 .await
5988 .unwrap();
5989 assert_eq!(
5990 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
5991 "one\r\ntwo\r\nthree\r\nfour\r\n",
5992 );
5993}
5994
5995#[gpui::test]
5996async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
5997 init_test(cx);
5998
5999 let fs = FakeFs::new(cx.executor());
6000 fs.insert_tree(
6001 path!("/dir"),
6002 json!({
6003 "a.rs": "
6004 fn foo(mut v: Vec<usize>) {
6005 for x in &v {
6006 v.push(1);
6007 }
6008 }
6009 "
6010 .unindent(),
6011 }),
6012 )
6013 .await;
6014
6015 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6016 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
6017 let buffer = project
6018 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
6019 .await
6020 .unwrap();
6021
6022 let buffer_uri = Uri::from_file_path(path!("/dir/a.rs")).unwrap();
6023 let message = lsp::PublishDiagnosticsParams {
6024 uri: buffer_uri.clone(),
6025 diagnostics: vec![
6026 lsp::Diagnostic {
6027 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6028 severity: Some(DiagnosticSeverity::WARNING),
6029 message: "error 1".to_string(),
6030 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6031 location: lsp::Location {
6032 uri: buffer_uri.clone(),
6033 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6034 },
6035 message: "error 1 hint 1".to_string(),
6036 }]),
6037 ..Default::default()
6038 },
6039 lsp::Diagnostic {
6040 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6041 severity: Some(DiagnosticSeverity::HINT),
6042 message: "error 1 hint 1".to_string(),
6043 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6044 location: lsp::Location {
6045 uri: buffer_uri.clone(),
6046 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6047 },
6048 message: "original diagnostic".to_string(),
6049 }]),
6050 ..Default::default()
6051 },
6052 lsp::Diagnostic {
6053 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
6054 severity: Some(DiagnosticSeverity::ERROR),
6055 message: "error 2".to_string(),
6056 related_information: Some(vec![
6057 lsp::DiagnosticRelatedInformation {
6058 location: lsp::Location {
6059 uri: buffer_uri.clone(),
6060 range: lsp::Range::new(
6061 lsp::Position::new(1, 13),
6062 lsp::Position::new(1, 15),
6063 ),
6064 },
6065 message: "error 2 hint 1".to_string(),
6066 },
6067 lsp::DiagnosticRelatedInformation {
6068 location: lsp::Location {
6069 uri: buffer_uri.clone(),
6070 range: lsp::Range::new(
6071 lsp::Position::new(1, 13),
6072 lsp::Position::new(1, 15),
6073 ),
6074 },
6075 message: "error 2 hint 2".to_string(),
6076 },
6077 ]),
6078 ..Default::default()
6079 },
6080 lsp::Diagnostic {
6081 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6082 severity: Some(DiagnosticSeverity::HINT),
6083 message: "error 2 hint 1".to_string(),
6084 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6085 location: lsp::Location {
6086 uri: buffer_uri.clone(),
6087 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
6088 },
6089 message: "original diagnostic".to_string(),
6090 }]),
6091 ..Default::default()
6092 },
6093 lsp::Diagnostic {
6094 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6095 severity: Some(DiagnosticSeverity::HINT),
6096 message: "error 2 hint 2".to_string(),
6097 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6098 location: lsp::Location {
6099 uri: buffer_uri,
6100 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
6101 },
6102 message: "original diagnostic".to_string(),
6103 }]),
6104 ..Default::default()
6105 },
6106 ],
6107 version: None,
6108 };
6109
6110 lsp_store
6111 .update(cx, |lsp_store, cx| {
6112 lsp_store.update_diagnostics(
6113 LanguageServerId(0),
6114 message,
6115 None,
6116 DiagnosticSourceKind::Pushed,
6117 &[],
6118 cx,
6119 )
6120 })
6121 .unwrap();
6122 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
6123
6124 assert_eq!(
6125 buffer
6126 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6127 .collect::<Vec<_>>(),
6128 &[
6129 DiagnosticEntry {
6130 range: Point::new(1, 8)..Point::new(1, 9),
6131 diagnostic: Diagnostic {
6132 severity: DiagnosticSeverity::WARNING,
6133 message: "error 1".to_string(),
6134 group_id: 1,
6135 is_primary: true,
6136 source_kind: DiagnosticSourceKind::Pushed,
6137 ..Diagnostic::default()
6138 }
6139 },
6140 DiagnosticEntry {
6141 range: Point::new(1, 8)..Point::new(1, 9),
6142 diagnostic: Diagnostic {
6143 severity: DiagnosticSeverity::HINT,
6144 message: "error 1 hint 1".to_string(),
6145 group_id: 1,
6146 is_primary: false,
6147 source_kind: DiagnosticSourceKind::Pushed,
6148 ..Diagnostic::default()
6149 }
6150 },
6151 DiagnosticEntry {
6152 range: Point::new(1, 13)..Point::new(1, 15),
6153 diagnostic: Diagnostic {
6154 severity: DiagnosticSeverity::HINT,
6155 message: "error 2 hint 1".to_string(),
6156 group_id: 0,
6157 is_primary: false,
6158 source_kind: DiagnosticSourceKind::Pushed,
6159 ..Diagnostic::default()
6160 }
6161 },
6162 DiagnosticEntry {
6163 range: Point::new(1, 13)..Point::new(1, 15),
6164 diagnostic: Diagnostic {
6165 severity: DiagnosticSeverity::HINT,
6166 message: "error 2 hint 2".to_string(),
6167 group_id: 0,
6168 is_primary: false,
6169 source_kind: DiagnosticSourceKind::Pushed,
6170 ..Diagnostic::default()
6171 }
6172 },
6173 DiagnosticEntry {
6174 range: Point::new(2, 8)..Point::new(2, 17),
6175 diagnostic: Diagnostic {
6176 severity: DiagnosticSeverity::ERROR,
6177 message: "error 2".to_string(),
6178 group_id: 0,
6179 is_primary: true,
6180 source_kind: DiagnosticSourceKind::Pushed,
6181 ..Diagnostic::default()
6182 }
6183 }
6184 ]
6185 );
6186
6187 assert_eq!(
6188 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
6189 &[
6190 DiagnosticEntry {
6191 range: Point::new(1, 13)..Point::new(1, 15),
6192 diagnostic: Diagnostic {
6193 severity: DiagnosticSeverity::HINT,
6194 message: "error 2 hint 1".to_string(),
6195 group_id: 0,
6196 is_primary: false,
6197 source_kind: DiagnosticSourceKind::Pushed,
6198 ..Diagnostic::default()
6199 }
6200 },
6201 DiagnosticEntry {
6202 range: Point::new(1, 13)..Point::new(1, 15),
6203 diagnostic: Diagnostic {
6204 severity: DiagnosticSeverity::HINT,
6205 message: "error 2 hint 2".to_string(),
6206 group_id: 0,
6207 is_primary: false,
6208 source_kind: DiagnosticSourceKind::Pushed,
6209 ..Diagnostic::default()
6210 }
6211 },
6212 DiagnosticEntry {
6213 range: Point::new(2, 8)..Point::new(2, 17),
6214 diagnostic: Diagnostic {
6215 severity: DiagnosticSeverity::ERROR,
6216 message: "error 2".to_string(),
6217 group_id: 0,
6218 is_primary: true,
6219 source_kind: DiagnosticSourceKind::Pushed,
6220 ..Diagnostic::default()
6221 }
6222 }
6223 ]
6224 );
6225
6226 assert_eq!(
6227 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
6228 &[
6229 DiagnosticEntry {
6230 range: Point::new(1, 8)..Point::new(1, 9),
6231 diagnostic: Diagnostic {
6232 severity: DiagnosticSeverity::WARNING,
6233 message: "error 1".to_string(),
6234 group_id: 1,
6235 is_primary: true,
6236 source_kind: DiagnosticSourceKind::Pushed,
6237 ..Diagnostic::default()
6238 }
6239 },
6240 DiagnosticEntry {
6241 range: Point::new(1, 8)..Point::new(1, 9),
6242 diagnostic: Diagnostic {
6243 severity: DiagnosticSeverity::HINT,
6244 message: "error 1 hint 1".to_string(),
6245 group_id: 1,
6246 is_primary: false,
6247 source_kind: DiagnosticSourceKind::Pushed,
6248 ..Diagnostic::default()
6249 }
6250 },
6251 ]
6252 );
6253}
6254
6255#[gpui::test]
6256async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
6257 init_test(cx);
6258
6259 let fs = FakeFs::new(cx.executor());
6260 fs.insert_tree(
6261 path!("/dir"),
6262 json!({
6263 "one.rs": "const ONE: usize = 1;",
6264 "two": {
6265 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
6266 }
6267
6268 }),
6269 )
6270 .await;
6271 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6272
6273 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6274 language_registry.add(rust_lang());
6275 let watched_paths = lsp::FileOperationRegistrationOptions {
6276 filters: vec![
6277 FileOperationFilter {
6278 scheme: Some("file".to_owned()),
6279 pattern: lsp::FileOperationPattern {
6280 glob: "**/*.rs".to_owned(),
6281 matches: Some(lsp::FileOperationPatternKind::File),
6282 options: None,
6283 },
6284 },
6285 FileOperationFilter {
6286 scheme: Some("file".to_owned()),
6287 pattern: lsp::FileOperationPattern {
6288 glob: "**/**".to_owned(),
6289 matches: Some(lsp::FileOperationPatternKind::Folder),
6290 options: None,
6291 },
6292 },
6293 ],
6294 };
6295 let mut fake_servers = language_registry.register_fake_lsp(
6296 "Rust",
6297 FakeLspAdapter {
6298 capabilities: lsp::ServerCapabilities {
6299 workspace: Some(lsp::WorkspaceServerCapabilities {
6300 workspace_folders: None,
6301 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
6302 did_rename: Some(watched_paths.clone()),
6303 will_rename: Some(watched_paths),
6304 ..Default::default()
6305 }),
6306 }),
6307 ..Default::default()
6308 },
6309 ..Default::default()
6310 },
6311 );
6312
6313 let _ = project
6314 .update(cx, |project, cx| {
6315 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
6316 })
6317 .await
6318 .unwrap();
6319
6320 let fake_server = fake_servers.next().await.unwrap();
6321 cx.executor().run_until_parked();
6322 let response = project.update(cx, |project, cx| {
6323 let worktree = project.worktrees(cx).next().unwrap();
6324 let entry = worktree
6325 .read(cx)
6326 .entry_for_path(rel_path("one.rs"))
6327 .unwrap();
6328 project.rename_entry(
6329 entry.id,
6330 (worktree.read(cx).id(), rel_path("three.rs")).into(),
6331 cx,
6332 )
6333 });
6334 let expected_edit = lsp::WorkspaceEdit {
6335 changes: None,
6336 document_changes: Some(DocumentChanges::Edits({
6337 vec![TextDocumentEdit {
6338 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
6339 range: lsp::Range {
6340 start: lsp::Position {
6341 line: 0,
6342 character: 1,
6343 },
6344 end: lsp::Position {
6345 line: 0,
6346 character: 3,
6347 },
6348 },
6349 new_text: "This is not a drill".to_owned(),
6350 })],
6351 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
6352 uri: Uri::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
6353 version: Some(1337),
6354 },
6355 }]
6356 })),
6357 change_annotations: None,
6358 };
6359 let resolved_workspace_edit = Arc::new(OnceLock::new());
6360 fake_server
6361 .set_request_handler::<WillRenameFiles, _, _>({
6362 let resolved_workspace_edit = resolved_workspace_edit.clone();
6363 let expected_edit = expected_edit.clone();
6364 move |params, _| {
6365 let resolved_workspace_edit = resolved_workspace_edit.clone();
6366 let expected_edit = expected_edit.clone();
6367 async move {
6368 assert_eq!(params.files.len(), 1);
6369 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
6370 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
6371 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
6372 Ok(Some(expected_edit))
6373 }
6374 }
6375 })
6376 .next()
6377 .await
6378 .unwrap();
6379 let _ = response.await.unwrap();
6380 fake_server
6381 .handle_notification::<DidRenameFiles, _>(|params, _| {
6382 assert_eq!(params.files.len(), 1);
6383 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
6384 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
6385 })
6386 .next()
6387 .await
6388 .unwrap();
6389 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
6390}
6391
6392#[gpui::test]
6393async fn test_rename(cx: &mut gpui::TestAppContext) {
6394 // hi
6395 init_test(cx);
6396
6397 let fs = FakeFs::new(cx.executor());
6398 fs.insert_tree(
6399 path!("/dir"),
6400 json!({
6401 "one.rs": "const ONE: usize = 1;",
6402 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
6403 }),
6404 )
6405 .await;
6406
6407 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6408
6409 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6410 language_registry.add(rust_lang());
6411 let mut fake_servers = language_registry.register_fake_lsp(
6412 "Rust",
6413 FakeLspAdapter {
6414 capabilities: lsp::ServerCapabilities {
6415 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
6416 prepare_provider: Some(true),
6417 work_done_progress_options: Default::default(),
6418 })),
6419 ..Default::default()
6420 },
6421 ..Default::default()
6422 },
6423 );
6424
6425 let (buffer, _handle) = project
6426 .update(cx, |project, cx| {
6427 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
6428 })
6429 .await
6430 .unwrap();
6431
6432 let fake_server = fake_servers.next().await.unwrap();
6433 cx.executor().run_until_parked();
6434
6435 let response = project.update(cx, |project, cx| {
6436 project.prepare_rename(buffer.clone(), 7, cx)
6437 });
6438 fake_server
6439 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
6440 assert_eq!(
6441 params.text_document.uri.as_str(),
6442 uri!("file:///dir/one.rs")
6443 );
6444 assert_eq!(params.position, lsp::Position::new(0, 7));
6445 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
6446 lsp::Position::new(0, 6),
6447 lsp::Position::new(0, 9),
6448 ))))
6449 })
6450 .next()
6451 .await
6452 .unwrap();
6453 let response = response.await.unwrap();
6454 let PrepareRenameResponse::Success(range) = response else {
6455 panic!("{:?}", response);
6456 };
6457 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
6458 assert_eq!(range, 6..9);
6459
6460 let response = project.update(cx, |project, cx| {
6461 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
6462 });
6463 fake_server
6464 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
6465 assert_eq!(
6466 params.text_document_position.text_document.uri.as_str(),
6467 uri!("file:///dir/one.rs")
6468 );
6469 assert_eq!(
6470 params.text_document_position.position,
6471 lsp::Position::new(0, 7)
6472 );
6473 assert_eq!(params.new_name, "THREE");
6474 Ok(Some(lsp::WorkspaceEdit {
6475 changes: Some(
6476 [
6477 (
6478 lsp::Uri::from_file_path(path!("/dir/one.rs")).unwrap(),
6479 vec![lsp::TextEdit::new(
6480 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
6481 "THREE".to_string(),
6482 )],
6483 ),
6484 (
6485 lsp::Uri::from_file_path(path!("/dir/two.rs")).unwrap(),
6486 vec![
6487 lsp::TextEdit::new(
6488 lsp::Range::new(
6489 lsp::Position::new(0, 24),
6490 lsp::Position::new(0, 27),
6491 ),
6492 "THREE".to_string(),
6493 ),
6494 lsp::TextEdit::new(
6495 lsp::Range::new(
6496 lsp::Position::new(0, 35),
6497 lsp::Position::new(0, 38),
6498 ),
6499 "THREE".to_string(),
6500 ),
6501 ],
6502 ),
6503 ]
6504 .into_iter()
6505 .collect(),
6506 ),
6507 ..Default::default()
6508 }))
6509 })
6510 .next()
6511 .await
6512 .unwrap();
6513 let mut transaction = response.await.unwrap().0;
6514 assert_eq!(transaction.len(), 2);
6515 assert_eq!(
6516 transaction
6517 .remove_entry(&buffer)
6518 .unwrap()
6519 .0
6520 .update(cx, |buffer, _| buffer.text()),
6521 "const THREE: usize = 1;"
6522 );
6523 assert_eq!(
6524 transaction
6525 .into_keys()
6526 .next()
6527 .unwrap()
6528 .update(cx, |buffer, _| buffer.text()),
6529 "const TWO: usize = one::THREE + one::THREE;"
6530 );
6531}
6532
6533#[gpui::test]
6534async fn test_search(cx: &mut gpui::TestAppContext) {
6535 init_test(cx);
6536
6537 let fs = FakeFs::new(cx.executor());
6538 fs.insert_tree(
6539 path!("/dir"),
6540 json!({
6541 "one.rs": "const ONE: usize = 1;",
6542 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
6543 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
6544 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
6545 }),
6546 )
6547 .await;
6548 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6549 assert_eq!(
6550 search(
6551 &project,
6552 SearchQuery::text(
6553 "TWO",
6554 false,
6555 true,
6556 false,
6557 Default::default(),
6558 Default::default(),
6559 false,
6560 None
6561 )
6562 .unwrap(),
6563 cx
6564 )
6565 .await
6566 .unwrap(),
6567 HashMap::from_iter([
6568 (path!("dir/two.rs").to_string(), vec![6..9]),
6569 (path!("dir/three.rs").to_string(), vec![37..40])
6570 ])
6571 );
6572
6573 let buffer_4 = project
6574 .update(cx, |project, cx| {
6575 project.open_local_buffer(path!("/dir/four.rs"), cx)
6576 })
6577 .await
6578 .unwrap();
6579 buffer_4.update(cx, |buffer, cx| {
6580 let text = "two::TWO";
6581 buffer.edit([(20..28, text), (31..43, text)], None, cx);
6582 });
6583
6584 assert_eq!(
6585 search(
6586 &project,
6587 SearchQuery::text(
6588 "TWO",
6589 false,
6590 true,
6591 false,
6592 Default::default(),
6593 Default::default(),
6594 false,
6595 None,
6596 )
6597 .unwrap(),
6598 cx
6599 )
6600 .await
6601 .unwrap(),
6602 HashMap::from_iter([
6603 (path!("dir/two.rs").to_string(), vec![6..9]),
6604 (path!("dir/three.rs").to_string(), vec![37..40]),
6605 (path!("dir/four.rs").to_string(), vec![25..28, 36..39])
6606 ])
6607 );
6608}
6609
6610#[gpui::test]
6611async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
6612 init_test(cx);
6613
6614 let search_query = "file";
6615
6616 let fs = FakeFs::new(cx.executor());
6617 fs.insert_tree(
6618 path!("/dir"),
6619 json!({
6620 "one.rs": r#"// Rust file one"#,
6621 "one.ts": r#"// TypeScript file one"#,
6622 "two.rs": r#"// Rust file two"#,
6623 "two.ts": r#"// TypeScript file two"#,
6624 }),
6625 )
6626 .await;
6627 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6628
6629 assert!(
6630 search(
6631 &project,
6632 SearchQuery::text(
6633 search_query,
6634 false,
6635 true,
6636 false,
6637 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6638 Default::default(),
6639 false,
6640 None
6641 )
6642 .unwrap(),
6643 cx
6644 )
6645 .await
6646 .unwrap()
6647 .is_empty(),
6648 "If no inclusions match, no files should be returned"
6649 );
6650
6651 assert_eq!(
6652 search(
6653 &project,
6654 SearchQuery::text(
6655 search_query,
6656 false,
6657 true,
6658 false,
6659 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
6660 Default::default(),
6661 false,
6662 None
6663 )
6664 .unwrap(),
6665 cx
6666 )
6667 .await
6668 .unwrap(),
6669 HashMap::from_iter([
6670 (path!("dir/one.rs").to_string(), vec![8..12]),
6671 (path!("dir/two.rs").to_string(), vec![8..12]),
6672 ]),
6673 "Rust only search should give only Rust files"
6674 );
6675
6676 assert_eq!(
6677 search(
6678 &project,
6679 SearchQuery::text(
6680 search_query,
6681 false,
6682 true,
6683 false,
6684 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6685 .unwrap(),
6686 Default::default(),
6687 false,
6688 None,
6689 )
6690 .unwrap(),
6691 cx
6692 )
6693 .await
6694 .unwrap(),
6695 HashMap::from_iter([
6696 (path!("dir/one.ts").to_string(), vec![14..18]),
6697 (path!("dir/two.ts").to_string(), vec![14..18]),
6698 ]),
6699 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
6700 );
6701
6702 assert_eq!(
6703 search(
6704 &project,
6705 SearchQuery::text(
6706 search_query,
6707 false,
6708 true,
6709 false,
6710 PathMatcher::new(
6711 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
6712 PathStyle::local()
6713 )
6714 .unwrap(),
6715 Default::default(),
6716 false,
6717 None,
6718 )
6719 .unwrap(),
6720 cx
6721 )
6722 .await
6723 .unwrap(),
6724 HashMap::from_iter([
6725 (path!("dir/two.ts").to_string(), vec![14..18]),
6726 (path!("dir/one.rs").to_string(), vec![8..12]),
6727 (path!("dir/one.ts").to_string(), vec![14..18]),
6728 (path!("dir/two.rs").to_string(), vec![8..12]),
6729 ]),
6730 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
6731 );
6732}
6733
6734#[gpui::test]
6735async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
6736 init_test(cx);
6737
6738 let search_query = "file";
6739
6740 let fs = FakeFs::new(cx.executor());
6741 fs.insert_tree(
6742 path!("/dir"),
6743 json!({
6744 "one.rs": r#"// Rust file one"#,
6745 "one.ts": r#"// TypeScript file one"#,
6746 "two.rs": r#"// Rust file two"#,
6747 "two.ts": r#"// TypeScript file two"#,
6748 }),
6749 )
6750 .await;
6751 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6752
6753 assert_eq!(
6754 search(
6755 &project,
6756 SearchQuery::text(
6757 search_query,
6758 false,
6759 true,
6760 false,
6761 Default::default(),
6762 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6763 false,
6764 None,
6765 )
6766 .unwrap(),
6767 cx
6768 )
6769 .await
6770 .unwrap(),
6771 HashMap::from_iter([
6772 (path!("dir/one.rs").to_string(), vec![8..12]),
6773 (path!("dir/one.ts").to_string(), vec![14..18]),
6774 (path!("dir/two.rs").to_string(), vec![8..12]),
6775 (path!("dir/two.ts").to_string(), vec![14..18]),
6776 ]),
6777 "If no exclusions match, all files should be returned"
6778 );
6779
6780 assert_eq!(
6781 search(
6782 &project,
6783 SearchQuery::text(
6784 search_query,
6785 false,
6786 true,
6787 false,
6788 Default::default(),
6789 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
6790 false,
6791 None,
6792 )
6793 .unwrap(),
6794 cx
6795 )
6796 .await
6797 .unwrap(),
6798 HashMap::from_iter([
6799 (path!("dir/one.ts").to_string(), vec![14..18]),
6800 (path!("dir/two.ts").to_string(), vec![14..18]),
6801 ]),
6802 "Rust exclusion search should give only TypeScript files"
6803 );
6804
6805 assert_eq!(
6806 search(
6807 &project,
6808 SearchQuery::text(
6809 search_query,
6810 false,
6811 true,
6812 false,
6813 Default::default(),
6814 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6815 .unwrap(),
6816 false,
6817 None,
6818 )
6819 .unwrap(),
6820 cx
6821 )
6822 .await
6823 .unwrap(),
6824 HashMap::from_iter([
6825 (path!("dir/one.rs").to_string(), vec![8..12]),
6826 (path!("dir/two.rs").to_string(), vec![8..12]),
6827 ]),
6828 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
6829 );
6830
6831 assert!(
6832 search(
6833 &project,
6834 SearchQuery::text(
6835 search_query,
6836 false,
6837 true,
6838 false,
6839 Default::default(),
6840 PathMatcher::new(
6841 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
6842 PathStyle::local(),
6843 )
6844 .unwrap(),
6845 false,
6846 None,
6847 )
6848 .unwrap(),
6849 cx
6850 )
6851 .await
6852 .unwrap()
6853 .is_empty(),
6854 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
6855 );
6856}
6857
6858#[gpui::test]
6859async fn test_search_with_buffer_exclusions(cx: &mut gpui::TestAppContext) {
6860 init_test(cx);
6861
6862 let search_query = "file";
6863
6864 let fs = FakeFs::new(cx.executor());
6865 fs.insert_tree(
6866 path!("/dir"),
6867 json!({
6868 "one.rs": r#"// Rust file one"#,
6869 "one.ts": r#"// TypeScript file one"#,
6870 "two.rs": r#"// Rust file two"#,
6871 "two.ts": r#"// TypeScript file two"#,
6872 }),
6873 )
6874 .await;
6875
6876 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6877 let path_style = PathStyle::local();
6878 let _buffer = project.update(cx, |project, cx| {
6879 project.create_local_buffer("file", None, false, cx)
6880 });
6881
6882 assert_eq!(
6883 search(
6884 &project,
6885 SearchQuery::text(
6886 search_query,
6887 false,
6888 true,
6889 false,
6890 Default::default(),
6891 PathMatcher::new(&["*.odd".to_owned()], path_style).unwrap(),
6892 false,
6893 None,
6894 )
6895 .unwrap(),
6896 cx
6897 )
6898 .await
6899 .unwrap(),
6900 HashMap::from_iter([
6901 (path!("dir/one.rs").to_string(), vec![8..12]),
6902 (path!("dir/one.ts").to_string(), vec![14..18]),
6903 (path!("dir/two.rs").to_string(), vec![8..12]),
6904 (path!("dir/two.ts").to_string(), vec![14..18]),
6905 ]),
6906 "If no exclusions match, all files should be returned"
6907 );
6908
6909 assert_eq!(
6910 search(
6911 &project,
6912 SearchQuery::text(
6913 search_query,
6914 false,
6915 true,
6916 false,
6917 Default::default(),
6918 PathMatcher::new(&["*.rs".to_owned()], path_style).unwrap(),
6919 false,
6920 None,
6921 )
6922 .unwrap(),
6923 cx
6924 )
6925 .await
6926 .unwrap(),
6927 HashMap::from_iter([
6928 (path!("dir/one.ts").to_string(), vec![14..18]),
6929 (path!("dir/two.ts").to_string(), vec![14..18]),
6930 ]),
6931 "Rust exclusion search should give only TypeScript files"
6932 );
6933
6934 assert_eq!(
6935 search(
6936 &project,
6937 SearchQuery::text(
6938 search_query,
6939 false,
6940 true,
6941 false,
6942 Default::default(),
6943 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], path_style).unwrap(),
6944 false,
6945 None,
6946 )
6947 .unwrap(),
6948 cx
6949 )
6950 .await
6951 .unwrap(),
6952 HashMap::from_iter([
6953 (path!("dir/one.rs").to_string(), vec![8..12]),
6954 (path!("dir/two.rs").to_string(), vec![8..12]),
6955 ]),
6956 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
6957 );
6958
6959 assert!(
6960 search(
6961 &project,
6962 SearchQuery::text(
6963 search_query,
6964 false,
6965 true,
6966 false,
6967 Default::default(),
6968 PathMatcher::new(
6969 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
6970 PathStyle::local(),
6971 )
6972 .unwrap(),
6973 false,
6974 None,
6975 )
6976 .unwrap(),
6977 cx
6978 )
6979 .await
6980 .unwrap()
6981 .is_empty(),
6982 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
6983 );
6984}
6985
6986#[gpui::test]
6987async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
6988 init_test(cx);
6989
6990 let search_query = "file";
6991
6992 let fs = FakeFs::new(cx.executor());
6993 fs.insert_tree(
6994 path!("/dir"),
6995 json!({
6996 "one.rs": r#"// Rust file one"#,
6997 "one.ts": r#"// TypeScript file one"#,
6998 "two.rs": r#"// Rust file two"#,
6999 "two.ts": r#"// TypeScript file two"#,
7000 }),
7001 )
7002 .await;
7003 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7004 assert!(
7005 search(
7006 &project,
7007 SearchQuery::text(
7008 search_query,
7009 false,
7010 true,
7011 false,
7012 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
7013 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
7014 false,
7015 None,
7016 )
7017 .unwrap(),
7018 cx
7019 )
7020 .await
7021 .unwrap()
7022 .is_empty(),
7023 "If both no exclusions and inclusions match, exclusions should win and return nothing"
7024 );
7025
7026 assert!(
7027 search(
7028 &project,
7029 SearchQuery::text(
7030 search_query,
7031 false,
7032 true,
7033 false,
7034 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
7035 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
7036 false,
7037 None,
7038 )
7039 .unwrap(),
7040 cx
7041 )
7042 .await
7043 .unwrap()
7044 .is_empty(),
7045 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
7046 );
7047
7048 assert!(
7049 search(
7050 &project,
7051 SearchQuery::text(
7052 search_query,
7053 false,
7054 true,
7055 false,
7056 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
7057 .unwrap(),
7058 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
7059 .unwrap(),
7060 false,
7061 None,
7062 )
7063 .unwrap(),
7064 cx
7065 )
7066 .await
7067 .unwrap()
7068 .is_empty(),
7069 "Non-matching inclusions and exclusions should not change that."
7070 );
7071
7072 assert_eq!(
7073 search(
7074 &project,
7075 SearchQuery::text(
7076 search_query,
7077 false,
7078 true,
7079 false,
7080 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
7081 .unwrap(),
7082 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()], PathStyle::local())
7083 .unwrap(),
7084 false,
7085 None,
7086 )
7087 .unwrap(),
7088 cx
7089 )
7090 .await
7091 .unwrap(),
7092 HashMap::from_iter([
7093 (path!("dir/one.ts").to_string(), vec![14..18]),
7094 (path!("dir/two.ts").to_string(), vec![14..18]),
7095 ]),
7096 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
7097 );
7098}
7099
7100#[gpui::test]
7101async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
7102 init_test(cx);
7103
7104 let fs = FakeFs::new(cx.executor());
7105 fs.insert_tree(
7106 path!("/worktree-a"),
7107 json!({
7108 "haystack.rs": r#"// NEEDLE"#,
7109 "haystack.ts": r#"// NEEDLE"#,
7110 }),
7111 )
7112 .await;
7113 fs.insert_tree(
7114 path!("/worktree-b"),
7115 json!({
7116 "haystack.rs": r#"// NEEDLE"#,
7117 "haystack.ts": r#"// NEEDLE"#,
7118 }),
7119 )
7120 .await;
7121
7122 let path_style = PathStyle::local();
7123 let project = Project::test(
7124 fs.clone(),
7125 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
7126 cx,
7127 )
7128 .await;
7129
7130 assert_eq!(
7131 search(
7132 &project,
7133 SearchQuery::text(
7134 "NEEDLE",
7135 false,
7136 true,
7137 false,
7138 PathMatcher::new(&["worktree-a/*.rs".to_owned()], path_style).unwrap(),
7139 Default::default(),
7140 true,
7141 None,
7142 )
7143 .unwrap(),
7144 cx
7145 )
7146 .await
7147 .unwrap(),
7148 HashMap::from_iter([(path!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
7149 "should only return results from included worktree"
7150 );
7151 assert_eq!(
7152 search(
7153 &project,
7154 SearchQuery::text(
7155 "NEEDLE",
7156 false,
7157 true,
7158 false,
7159 PathMatcher::new(&["worktree-b/*.rs".to_owned()], path_style).unwrap(),
7160 Default::default(),
7161 true,
7162 None,
7163 )
7164 .unwrap(),
7165 cx
7166 )
7167 .await
7168 .unwrap(),
7169 HashMap::from_iter([(path!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
7170 "should only return results from included worktree"
7171 );
7172
7173 assert_eq!(
7174 search(
7175 &project,
7176 SearchQuery::text(
7177 "NEEDLE",
7178 false,
7179 true,
7180 false,
7181 PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap(),
7182 Default::default(),
7183 false,
7184 None,
7185 )
7186 .unwrap(),
7187 cx
7188 )
7189 .await
7190 .unwrap(),
7191 HashMap::from_iter([
7192 (path!("worktree-a/haystack.ts").to_string(), vec![3..9]),
7193 (path!("worktree-b/haystack.ts").to_string(), vec![3..9])
7194 ]),
7195 "should return results from both worktrees"
7196 );
7197}
7198
7199#[gpui::test]
7200async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
7201 init_test(cx);
7202
7203 let fs = FakeFs::new(cx.background_executor.clone());
7204 fs.insert_tree(
7205 path!("/dir"),
7206 json!({
7207 ".git": {},
7208 ".gitignore": "**/target\n/node_modules\n",
7209 "target": {
7210 "index.txt": "index_key:index_value"
7211 },
7212 "node_modules": {
7213 "eslint": {
7214 "index.ts": "const eslint_key = 'eslint value'",
7215 "package.json": r#"{ "some_key": "some value" }"#,
7216 },
7217 "prettier": {
7218 "index.ts": "const prettier_key = 'prettier value'",
7219 "package.json": r#"{ "other_key": "other value" }"#,
7220 },
7221 },
7222 "package.json": r#"{ "main_key": "main value" }"#,
7223 }),
7224 )
7225 .await;
7226 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7227
7228 let query = "key";
7229 assert_eq!(
7230 search(
7231 &project,
7232 SearchQuery::text(
7233 query,
7234 false,
7235 false,
7236 false,
7237 Default::default(),
7238 Default::default(),
7239 false,
7240 None,
7241 )
7242 .unwrap(),
7243 cx
7244 )
7245 .await
7246 .unwrap(),
7247 HashMap::from_iter([(path!("dir/package.json").to_string(), vec![8..11])]),
7248 "Only one non-ignored file should have the query"
7249 );
7250
7251 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7252 let path_style = PathStyle::local();
7253 assert_eq!(
7254 search(
7255 &project,
7256 SearchQuery::text(
7257 query,
7258 false,
7259 false,
7260 true,
7261 Default::default(),
7262 Default::default(),
7263 false,
7264 None,
7265 )
7266 .unwrap(),
7267 cx
7268 )
7269 .await
7270 .unwrap(),
7271 HashMap::from_iter([
7272 (path!("dir/package.json").to_string(), vec![8..11]),
7273 (path!("dir/target/index.txt").to_string(), vec![6..9]),
7274 (
7275 path!("dir/node_modules/prettier/package.json").to_string(),
7276 vec![9..12]
7277 ),
7278 (
7279 path!("dir/node_modules/prettier/index.ts").to_string(),
7280 vec![15..18]
7281 ),
7282 (
7283 path!("dir/node_modules/eslint/index.ts").to_string(),
7284 vec![13..16]
7285 ),
7286 (
7287 path!("dir/node_modules/eslint/package.json").to_string(),
7288 vec![8..11]
7289 ),
7290 ]),
7291 "Unrestricted search with ignored directories should find every file with the query"
7292 );
7293
7294 let files_to_include =
7295 PathMatcher::new(&["node_modules/prettier/**".to_owned()], path_style).unwrap();
7296 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap();
7297 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7298 assert_eq!(
7299 search(
7300 &project,
7301 SearchQuery::text(
7302 query,
7303 false,
7304 false,
7305 true,
7306 files_to_include,
7307 files_to_exclude,
7308 false,
7309 None,
7310 )
7311 .unwrap(),
7312 cx
7313 )
7314 .await
7315 .unwrap(),
7316 HashMap::from_iter([(
7317 path!("dir/node_modules/prettier/package.json").to_string(),
7318 vec![9..12]
7319 )]),
7320 "With search including ignored prettier directory and excluding TS files, only one file should be found"
7321 );
7322}
7323
7324#[gpui::test]
7325async fn test_search_with_unicode(cx: &mut gpui::TestAppContext) {
7326 init_test(cx);
7327
7328 let fs = FakeFs::new(cx.executor());
7329 fs.insert_tree(
7330 path!("/dir"),
7331 json!({
7332 "one.rs": "// ПРИВЕТ? привет!",
7333 "two.rs": "// ПРИВЕТ.",
7334 "three.rs": "// привет",
7335 }),
7336 )
7337 .await;
7338 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7339 let unicode_case_sensitive_query = SearchQuery::text(
7340 "привет",
7341 false,
7342 true,
7343 false,
7344 Default::default(),
7345 Default::default(),
7346 false,
7347 None,
7348 );
7349 assert_matches!(unicode_case_sensitive_query, Ok(SearchQuery::Text { .. }));
7350 assert_eq!(
7351 search(&project, unicode_case_sensitive_query.unwrap(), cx)
7352 .await
7353 .unwrap(),
7354 HashMap::from_iter([
7355 (path!("dir/one.rs").to_string(), vec![17..29]),
7356 (path!("dir/three.rs").to_string(), vec![3..15]),
7357 ])
7358 );
7359
7360 let unicode_case_insensitive_query = SearchQuery::text(
7361 "привет",
7362 false,
7363 false,
7364 false,
7365 Default::default(),
7366 Default::default(),
7367 false,
7368 None,
7369 );
7370 assert_matches!(
7371 unicode_case_insensitive_query,
7372 Ok(SearchQuery::Regex { .. })
7373 );
7374 assert_eq!(
7375 search(&project, unicode_case_insensitive_query.unwrap(), cx)
7376 .await
7377 .unwrap(),
7378 HashMap::from_iter([
7379 (path!("dir/one.rs").to_string(), vec![3..15, 17..29]),
7380 (path!("dir/two.rs").to_string(), vec![3..15]),
7381 (path!("dir/three.rs").to_string(), vec![3..15]),
7382 ])
7383 );
7384
7385 assert_eq!(
7386 search(
7387 &project,
7388 SearchQuery::text(
7389 "привет.",
7390 false,
7391 false,
7392 false,
7393 Default::default(),
7394 Default::default(),
7395 false,
7396 None,
7397 )
7398 .unwrap(),
7399 cx
7400 )
7401 .await
7402 .unwrap(),
7403 HashMap::from_iter([(path!("dir/two.rs").to_string(), vec![3..16]),])
7404 );
7405}
7406
7407#[gpui::test]
7408async fn test_create_entry(cx: &mut gpui::TestAppContext) {
7409 init_test(cx);
7410
7411 let fs = FakeFs::new(cx.executor());
7412 fs.insert_tree(
7413 "/one/two",
7414 json!({
7415 "three": {
7416 "a.txt": "",
7417 "four": {}
7418 },
7419 "c.rs": ""
7420 }),
7421 )
7422 .await;
7423
7424 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
7425 project
7426 .update(cx, |project, cx| {
7427 let id = project.worktrees(cx).next().unwrap().read(cx).id();
7428 project.create_entry((id, rel_path("b..")), true, cx)
7429 })
7430 .await
7431 .unwrap()
7432 .into_included()
7433 .unwrap();
7434
7435 assert_eq!(
7436 fs.paths(true),
7437 vec![
7438 PathBuf::from(path!("/")),
7439 PathBuf::from(path!("/one")),
7440 PathBuf::from(path!("/one/two")),
7441 PathBuf::from(path!("/one/two/c.rs")),
7442 PathBuf::from(path!("/one/two/three")),
7443 PathBuf::from(path!("/one/two/three/a.txt")),
7444 PathBuf::from(path!("/one/two/three/b..")),
7445 PathBuf::from(path!("/one/two/three/four")),
7446 ]
7447 );
7448}
7449
7450#[gpui::test]
7451async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
7452 init_test(cx);
7453
7454 let fs = FakeFs::new(cx.executor());
7455 fs.insert_tree(
7456 path!("/dir"),
7457 json!({
7458 "a.tsx": "a",
7459 }),
7460 )
7461 .await;
7462
7463 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7464
7465 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7466 language_registry.add(tsx_lang());
7467 let language_server_names = [
7468 "TypeScriptServer",
7469 "TailwindServer",
7470 "ESLintServer",
7471 "NoHoverCapabilitiesServer",
7472 ];
7473 let mut language_servers = [
7474 language_registry.register_fake_lsp(
7475 "tsx",
7476 FakeLspAdapter {
7477 name: language_server_names[0],
7478 capabilities: lsp::ServerCapabilities {
7479 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7480 ..lsp::ServerCapabilities::default()
7481 },
7482 ..FakeLspAdapter::default()
7483 },
7484 ),
7485 language_registry.register_fake_lsp(
7486 "tsx",
7487 FakeLspAdapter {
7488 name: language_server_names[1],
7489 capabilities: lsp::ServerCapabilities {
7490 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7491 ..lsp::ServerCapabilities::default()
7492 },
7493 ..FakeLspAdapter::default()
7494 },
7495 ),
7496 language_registry.register_fake_lsp(
7497 "tsx",
7498 FakeLspAdapter {
7499 name: language_server_names[2],
7500 capabilities: lsp::ServerCapabilities {
7501 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7502 ..lsp::ServerCapabilities::default()
7503 },
7504 ..FakeLspAdapter::default()
7505 },
7506 ),
7507 language_registry.register_fake_lsp(
7508 "tsx",
7509 FakeLspAdapter {
7510 name: language_server_names[3],
7511 capabilities: lsp::ServerCapabilities {
7512 hover_provider: None,
7513 ..lsp::ServerCapabilities::default()
7514 },
7515 ..FakeLspAdapter::default()
7516 },
7517 ),
7518 ];
7519
7520 let (buffer, _handle) = project
7521 .update(cx, |p, cx| {
7522 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
7523 })
7524 .await
7525 .unwrap();
7526 cx.executor().run_until_parked();
7527
7528 let mut servers_with_hover_requests = HashMap::default();
7529 for i in 0..language_server_names.len() {
7530 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
7531 panic!(
7532 "Failed to get language server #{i} with name {}",
7533 &language_server_names[i]
7534 )
7535 });
7536 let new_server_name = new_server.server.name();
7537 assert!(
7538 !servers_with_hover_requests.contains_key(&new_server_name),
7539 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
7540 );
7541 match new_server_name.as_ref() {
7542 "TailwindServer" | "TypeScriptServer" => {
7543 servers_with_hover_requests.insert(
7544 new_server_name.clone(),
7545 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7546 move |_, _| {
7547 let name = new_server_name.clone();
7548 async move {
7549 Ok(Some(lsp::Hover {
7550 contents: lsp::HoverContents::Scalar(
7551 lsp::MarkedString::String(format!("{name} hover")),
7552 ),
7553 range: None,
7554 }))
7555 }
7556 },
7557 ),
7558 );
7559 }
7560 "ESLintServer" => {
7561 servers_with_hover_requests.insert(
7562 new_server_name,
7563 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7564 |_, _| async move { Ok(None) },
7565 ),
7566 );
7567 }
7568 "NoHoverCapabilitiesServer" => {
7569 let _never_handled = new_server
7570 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
7571 panic!(
7572 "Should not call for hovers server with no corresponding capabilities"
7573 )
7574 });
7575 }
7576 unexpected => panic!("Unexpected server name: {unexpected}"),
7577 }
7578 }
7579
7580 let hover_task = project.update(cx, |project, cx| {
7581 project.hover(&buffer, Point::new(0, 0), cx)
7582 });
7583 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
7584 |mut hover_request| async move {
7585 hover_request
7586 .next()
7587 .await
7588 .expect("All hover requests should have been triggered")
7589 },
7590 ))
7591 .await;
7592 assert_eq!(
7593 vec!["TailwindServer hover", "TypeScriptServer hover"],
7594 hover_task
7595 .await
7596 .into_iter()
7597 .flatten()
7598 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
7599 .sorted()
7600 .collect::<Vec<_>>(),
7601 "Should receive hover responses from all related servers with hover capabilities"
7602 );
7603}
7604
7605#[gpui::test]
7606async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
7607 init_test(cx);
7608
7609 let fs = FakeFs::new(cx.executor());
7610 fs.insert_tree(
7611 path!("/dir"),
7612 json!({
7613 "a.ts": "a",
7614 }),
7615 )
7616 .await;
7617
7618 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7619
7620 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7621 language_registry.add(typescript_lang());
7622 let mut fake_language_servers = language_registry.register_fake_lsp(
7623 "TypeScript",
7624 FakeLspAdapter {
7625 capabilities: lsp::ServerCapabilities {
7626 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
7627 ..lsp::ServerCapabilities::default()
7628 },
7629 ..FakeLspAdapter::default()
7630 },
7631 );
7632
7633 let (buffer, _handle) = project
7634 .update(cx, |p, cx| {
7635 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
7636 })
7637 .await
7638 .unwrap();
7639 cx.executor().run_until_parked();
7640
7641 let fake_server = fake_language_servers
7642 .next()
7643 .await
7644 .expect("failed to get the language server");
7645
7646 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
7647 move |_, _| async move {
7648 Ok(Some(lsp::Hover {
7649 contents: lsp::HoverContents::Array(vec![
7650 lsp::MarkedString::String("".to_string()),
7651 lsp::MarkedString::String(" ".to_string()),
7652 lsp::MarkedString::String("\n\n\n".to_string()),
7653 ]),
7654 range: None,
7655 }))
7656 },
7657 );
7658
7659 let hover_task = project.update(cx, |project, cx| {
7660 project.hover(&buffer, Point::new(0, 0), cx)
7661 });
7662 let () = request_handled
7663 .next()
7664 .await
7665 .expect("All hover requests should have been triggered");
7666 assert_eq!(
7667 Vec::<String>::new(),
7668 hover_task
7669 .await
7670 .into_iter()
7671 .flatten()
7672 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
7673 .sorted()
7674 .collect::<Vec<_>>(),
7675 "Empty hover parts should be ignored"
7676 );
7677}
7678
7679#[gpui::test]
7680async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
7681 init_test(cx);
7682
7683 let fs = FakeFs::new(cx.executor());
7684 fs.insert_tree(
7685 path!("/dir"),
7686 json!({
7687 "a.ts": "a",
7688 }),
7689 )
7690 .await;
7691
7692 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7693
7694 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7695 language_registry.add(typescript_lang());
7696 let mut fake_language_servers = language_registry.register_fake_lsp(
7697 "TypeScript",
7698 FakeLspAdapter {
7699 capabilities: lsp::ServerCapabilities {
7700 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
7701 ..lsp::ServerCapabilities::default()
7702 },
7703 ..FakeLspAdapter::default()
7704 },
7705 );
7706
7707 let (buffer, _handle) = project
7708 .update(cx, |p, cx| {
7709 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
7710 })
7711 .await
7712 .unwrap();
7713 cx.executor().run_until_parked();
7714
7715 let fake_server = fake_language_servers
7716 .next()
7717 .await
7718 .expect("failed to get the language server");
7719
7720 let mut request_handled = fake_server
7721 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
7722 Ok(Some(vec![
7723 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7724 title: "organize imports".to_string(),
7725 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
7726 ..lsp::CodeAction::default()
7727 }),
7728 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7729 title: "fix code".to_string(),
7730 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
7731 ..lsp::CodeAction::default()
7732 }),
7733 ]))
7734 });
7735
7736 let code_actions_task = project.update(cx, |project, cx| {
7737 project.code_actions(
7738 &buffer,
7739 0..buffer.read(cx).len(),
7740 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
7741 cx,
7742 )
7743 });
7744
7745 let () = request_handled
7746 .next()
7747 .await
7748 .expect("The code action request should have been triggered");
7749
7750 let code_actions = code_actions_task.await.unwrap().unwrap();
7751 assert_eq!(code_actions.len(), 1);
7752 assert_eq!(
7753 code_actions[0].lsp_action.action_kind(),
7754 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
7755 );
7756}
7757
7758#[gpui::test]
7759async fn test_code_actions_without_requested_kinds_do_not_send_only_filter(
7760 cx: &mut gpui::TestAppContext,
7761) {
7762 init_test(cx);
7763
7764 let fs = FakeFs::new(cx.executor());
7765 fs.insert_tree(
7766 path!("/dir"),
7767 json!({
7768 "a.ts": "a",
7769 }),
7770 )
7771 .await;
7772
7773 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7774
7775 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7776 language_registry.add(typescript_lang());
7777 let mut fake_language_servers = language_registry.register_fake_lsp(
7778 "TypeScript",
7779 FakeLspAdapter {
7780 capabilities: lsp::ServerCapabilities {
7781 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
7782 lsp::CodeActionOptions {
7783 code_action_kinds: Some(vec![
7784 CodeActionKind::SOURCE_ORGANIZE_IMPORTS,
7785 "source.doc".into(),
7786 ]),
7787 ..lsp::CodeActionOptions::default()
7788 },
7789 )),
7790 ..lsp::ServerCapabilities::default()
7791 },
7792 ..FakeLspAdapter::default()
7793 },
7794 );
7795
7796 let (buffer, _handle) = project
7797 .update(cx, |p, cx| {
7798 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
7799 })
7800 .await
7801 .unwrap();
7802 cx.executor().run_until_parked();
7803
7804 let fake_server = fake_language_servers
7805 .next()
7806 .await
7807 .expect("failed to get the language server");
7808
7809 let mut request_handled = fake_server.set_request_handler::<
7810 lsp::request::CodeActionRequest,
7811 _,
7812 _,
7813 >(move |params, _| async move {
7814 assert_eq!(
7815 params.context.only, None,
7816 "Code action requests without explicit kind filters should not send `context.only`"
7817 );
7818 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
7819 lsp::CodeAction {
7820 title: "Add test".to_string(),
7821 kind: Some("source.addTest".into()),
7822 ..lsp::CodeAction::default()
7823 },
7824 )]))
7825 });
7826
7827 let code_actions_task = project.update(cx, |project, cx| {
7828 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
7829 });
7830
7831 let () = request_handled
7832 .next()
7833 .await
7834 .expect("The code action request should have been triggered");
7835
7836 let code_actions = code_actions_task.await.unwrap().unwrap();
7837 assert_eq!(code_actions.len(), 1);
7838 assert_eq!(
7839 code_actions[0].lsp_action.action_kind(),
7840 Some("source.addTest".into())
7841 );
7842}
7843
7844#[gpui::test]
7845async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
7846 init_test(cx);
7847
7848 let fs = FakeFs::new(cx.executor());
7849 fs.insert_tree(
7850 path!("/dir"),
7851 json!({
7852 "a.tsx": "a",
7853 }),
7854 )
7855 .await;
7856
7857 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
7858
7859 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7860 language_registry.add(tsx_lang());
7861 let language_server_names = [
7862 "TypeScriptServer",
7863 "TailwindServer",
7864 "ESLintServer",
7865 "NoActionsCapabilitiesServer",
7866 ];
7867
7868 let mut language_server_rxs = [
7869 language_registry.register_fake_lsp(
7870 "tsx",
7871 FakeLspAdapter {
7872 name: language_server_names[0],
7873 capabilities: lsp::ServerCapabilities {
7874 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
7875 ..lsp::ServerCapabilities::default()
7876 },
7877 ..FakeLspAdapter::default()
7878 },
7879 ),
7880 language_registry.register_fake_lsp(
7881 "tsx",
7882 FakeLspAdapter {
7883 name: language_server_names[1],
7884 capabilities: lsp::ServerCapabilities {
7885 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
7886 ..lsp::ServerCapabilities::default()
7887 },
7888 ..FakeLspAdapter::default()
7889 },
7890 ),
7891 language_registry.register_fake_lsp(
7892 "tsx",
7893 FakeLspAdapter {
7894 name: language_server_names[2],
7895 capabilities: lsp::ServerCapabilities {
7896 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
7897 ..lsp::ServerCapabilities::default()
7898 },
7899 ..FakeLspAdapter::default()
7900 },
7901 ),
7902 language_registry.register_fake_lsp(
7903 "tsx",
7904 FakeLspAdapter {
7905 name: language_server_names[3],
7906 capabilities: lsp::ServerCapabilities {
7907 code_action_provider: None,
7908 ..lsp::ServerCapabilities::default()
7909 },
7910 ..FakeLspAdapter::default()
7911 },
7912 ),
7913 ];
7914
7915 let (buffer, _handle) = project
7916 .update(cx, |p, cx| {
7917 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
7918 })
7919 .await
7920 .unwrap();
7921 cx.executor().run_until_parked();
7922
7923 let mut servers_with_actions_requests = HashMap::default();
7924 for i in 0..language_server_names.len() {
7925 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
7926 panic!(
7927 "Failed to get language server #{i} with name {}",
7928 &language_server_names[i]
7929 )
7930 });
7931 let new_server_name = new_server.server.name();
7932
7933 assert!(
7934 !servers_with_actions_requests.contains_key(&new_server_name),
7935 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
7936 );
7937 match new_server_name.0.as_ref() {
7938 "TailwindServer" | "TypeScriptServer" => {
7939 servers_with_actions_requests.insert(
7940 new_server_name.clone(),
7941 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
7942 move |_, _| {
7943 let name = new_server_name.clone();
7944 async move {
7945 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
7946 lsp::CodeAction {
7947 title: format!("{name} code action"),
7948 ..lsp::CodeAction::default()
7949 },
7950 )]))
7951 }
7952 },
7953 ),
7954 );
7955 }
7956 "ESLintServer" => {
7957 servers_with_actions_requests.insert(
7958 new_server_name,
7959 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
7960 |_, _| async move { Ok(None) },
7961 ),
7962 );
7963 }
7964 "NoActionsCapabilitiesServer" => {
7965 let _never_handled = new_server
7966 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
7967 panic!(
7968 "Should not call for code actions server with no corresponding capabilities"
7969 )
7970 });
7971 }
7972 unexpected => panic!("Unexpected server name: {unexpected}"),
7973 }
7974 }
7975
7976 let code_actions_task = project.update(cx, |project, cx| {
7977 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
7978 });
7979
7980 // cx.run_until_parked();
7981 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
7982 |mut code_actions_request| async move {
7983 code_actions_request
7984 .next()
7985 .await
7986 .expect("All code actions requests should have been triggered")
7987 },
7988 ))
7989 .await;
7990 assert_eq!(
7991 vec!["TailwindServer code action", "TypeScriptServer code action"],
7992 code_actions_task
7993 .await
7994 .unwrap()
7995 .unwrap()
7996 .into_iter()
7997 .map(|code_action| code_action.lsp_action.title().to_owned())
7998 .sorted()
7999 .collect::<Vec<_>>(),
8000 "Should receive code actions responses from all related servers with hover capabilities"
8001 );
8002}
8003
8004#[gpui::test]
8005async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
8006 init_test(cx);
8007
8008 let fs = FakeFs::new(cx.executor());
8009 fs.insert_tree(
8010 "/dir",
8011 json!({
8012 "a.rs": "let a = 1;",
8013 "b.rs": "let b = 2;",
8014 "c.rs": "let c = 2;",
8015 }),
8016 )
8017 .await;
8018
8019 let project = Project::test(
8020 fs,
8021 [
8022 "/dir/a.rs".as_ref(),
8023 "/dir/b.rs".as_ref(),
8024 "/dir/c.rs".as_ref(),
8025 ],
8026 cx,
8027 )
8028 .await;
8029
8030 // check the initial state and get the worktrees
8031 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
8032 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8033 assert_eq!(worktrees.len(), 3);
8034
8035 let worktree_a = worktrees[0].read(cx);
8036 let worktree_b = worktrees[1].read(cx);
8037 let worktree_c = worktrees[2].read(cx);
8038
8039 // check they start in the right order
8040 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
8041 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
8042 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
8043
8044 (
8045 worktrees[0].clone(),
8046 worktrees[1].clone(),
8047 worktrees[2].clone(),
8048 )
8049 });
8050
8051 // move first worktree to after the second
8052 // [a, b, c] -> [b, a, c]
8053 project
8054 .update(cx, |project, cx| {
8055 let first = worktree_a.read(cx);
8056 let second = worktree_b.read(cx);
8057 project.move_worktree(first.id(), second.id(), cx)
8058 })
8059 .expect("moving first after second");
8060
8061 // check the state after moving
8062 project.update(cx, |project, cx| {
8063 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8064 assert_eq!(worktrees.len(), 3);
8065
8066 let first = worktrees[0].read(cx);
8067 let second = worktrees[1].read(cx);
8068 let third = worktrees[2].read(cx);
8069
8070 // check they are now in the right order
8071 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
8072 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
8073 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
8074 });
8075
8076 // move the second worktree to before the first
8077 // [b, a, c] -> [a, b, c]
8078 project
8079 .update(cx, |project, cx| {
8080 let second = worktree_a.read(cx);
8081 let first = worktree_b.read(cx);
8082 project.move_worktree(first.id(), second.id(), cx)
8083 })
8084 .expect("moving second before first");
8085
8086 // check the state after moving
8087 project.update(cx, |project, cx| {
8088 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8089 assert_eq!(worktrees.len(), 3);
8090
8091 let first = worktrees[0].read(cx);
8092 let second = worktrees[1].read(cx);
8093 let third = worktrees[2].read(cx);
8094
8095 // check they are now in the right order
8096 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
8097 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
8098 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
8099 });
8100
8101 // move the second worktree to after the third
8102 // [a, b, c] -> [a, c, b]
8103 project
8104 .update(cx, |project, cx| {
8105 let second = worktree_b.read(cx);
8106 let third = worktree_c.read(cx);
8107 project.move_worktree(second.id(), third.id(), cx)
8108 })
8109 .expect("moving second after third");
8110
8111 // check the state after moving
8112 project.update(cx, |project, cx| {
8113 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8114 assert_eq!(worktrees.len(), 3);
8115
8116 let first = worktrees[0].read(cx);
8117 let second = worktrees[1].read(cx);
8118 let third = worktrees[2].read(cx);
8119
8120 // check they are now in the right order
8121 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
8122 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
8123 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
8124 });
8125
8126 // move the third worktree to before the second
8127 // [a, c, b] -> [a, b, c]
8128 project
8129 .update(cx, |project, cx| {
8130 let third = worktree_c.read(cx);
8131 let second = worktree_b.read(cx);
8132 project.move_worktree(third.id(), second.id(), cx)
8133 })
8134 .expect("moving third before second");
8135
8136 // check the state after moving
8137 project.update(cx, |project, cx| {
8138 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8139 assert_eq!(worktrees.len(), 3);
8140
8141 let first = worktrees[0].read(cx);
8142 let second = worktrees[1].read(cx);
8143 let third = worktrees[2].read(cx);
8144
8145 // check they are now in the right order
8146 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
8147 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
8148 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
8149 });
8150
8151 // move the first worktree to after the third
8152 // [a, b, c] -> [b, c, a]
8153 project
8154 .update(cx, |project, cx| {
8155 let first = worktree_a.read(cx);
8156 let third = worktree_c.read(cx);
8157 project.move_worktree(first.id(), third.id(), cx)
8158 })
8159 .expect("moving first after third");
8160
8161 // check the state after moving
8162 project.update(cx, |project, cx| {
8163 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8164 assert_eq!(worktrees.len(), 3);
8165
8166 let first = worktrees[0].read(cx);
8167 let second = worktrees[1].read(cx);
8168 let third = worktrees[2].read(cx);
8169
8170 // check they are now in the right order
8171 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
8172 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
8173 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
8174 });
8175
8176 // move the third worktree to before the first
8177 // [b, c, a] -> [a, b, c]
8178 project
8179 .update(cx, |project, cx| {
8180 let third = worktree_a.read(cx);
8181 let first = worktree_b.read(cx);
8182 project.move_worktree(third.id(), first.id(), cx)
8183 })
8184 .expect("moving third before first");
8185
8186 // check the state after moving
8187 project.update(cx, |project, cx| {
8188 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
8189 assert_eq!(worktrees.len(), 3);
8190
8191 let first = worktrees[0].read(cx);
8192 let second = worktrees[1].read(cx);
8193 let third = worktrees[2].read(cx);
8194
8195 // check they are now in the right order
8196 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
8197 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
8198 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
8199 });
8200}
8201
8202#[gpui::test]
8203async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
8204 init_test(cx);
8205
8206 let staged_contents = r#"
8207 fn main() {
8208 println!("hello world");
8209 }
8210 "#
8211 .unindent();
8212 let file_contents = r#"
8213 // print goodbye
8214 fn main() {
8215 println!("goodbye world");
8216 }
8217 "#
8218 .unindent();
8219
8220 let fs = FakeFs::new(cx.background_executor.clone());
8221 fs.insert_tree(
8222 "/dir",
8223 json!({
8224 ".git": {},
8225 "src": {
8226 "main.rs": file_contents,
8227 }
8228 }),
8229 )
8230 .await;
8231
8232 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
8233
8234 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8235
8236 let buffer = project
8237 .update(cx, |project, cx| {
8238 project.open_local_buffer("/dir/src/main.rs", cx)
8239 })
8240 .await
8241 .unwrap();
8242 let unstaged_diff = project
8243 .update(cx, |project, cx| {
8244 project.open_unstaged_diff(buffer.clone(), cx)
8245 })
8246 .await
8247 .unwrap();
8248
8249 cx.run_until_parked();
8250 unstaged_diff.update(cx, |unstaged_diff, cx| {
8251 let snapshot = buffer.read(cx).snapshot();
8252 assert_hunks(
8253 unstaged_diff.snapshot(cx).hunks(&snapshot),
8254 &snapshot,
8255 &unstaged_diff.base_text_string(cx).unwrap(),
8256 &[
8257 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
8258 (
8259 2..3,
8260 " println!(\"hello world\");\n",
8261 " println!(\"goodbye world\");\n",
8262 DiffHunkStatus::modified_none(),
8263 ),
8264 ],
8265 );
8266 });
8267
8268 let staged_contents = r#"
8269 // print goodbye
8270 fn main() {
8271 }
8272 "#
8273 .unindent();
8274
8275 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
8276
8277 cx.run_until_parked();
8278 unstaged_diff.update(cx, |unstaged_diff, cx| {
8279 let snapshot = buffer.read(cx).snapshot();
8280 assert_hunks(
8281 unstaged_diff
8282 .snapshot(cx)
8283 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8284 &snapshot,
8285 &unstaged_diff.base_text(cx).text(),
8286 &[(
8287 2..3,
8288 "",
8289 " println!(\"goodbye world\");\n",
8290 DiffHunkStatus::added_none(),
8291 )],
8292 );
8293 });
8294}
8295
8296#[gpui::test]
8297async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
8298 init_test(cx);
8299
8300 let committed_contents = r#"
8301 fn main() {
8302 println!("hello world");
8303 }
8304 "#
8305 .unindent();
8306 let staged_contents = r#"
8307 fn main() {
8308 println!("goodbye world");
8309 }
8310 "#
8311 .unindent();
8312 let file_contents = r#"
8313 // print goodbye
8314 fn main() {
8315 println!("goodbye world");
8316 }
8317 "#
8318 .unindent();
8319
8320 let fs = FakeFs::new(cx.background_executor.clone());
8321 fs.insert_tree(
8322 "/dir",
8323 json!({
8324 ".git": {},
8325 "src": {
8326 "modification.rs": file_contents,
8327 }
8328 }),
8329 )
8330 .await;
8331
8332 fs.set_head_for_repo(
8333 Path::new("/dir/.git"),
8334 &[
8335 ("src/modification.rs", committed_contents),
8336 ("src/deletion.rs", "// the-deleted-contents\n".into()),
8337 ],
8338 "deadbeef",
8339 );
8340 fs.set_index_for_repo(
8341 Path::new("/dir/.git"),
8342 &[
8343 ("src/modification.rs", staged_contents),
8344 ("src/deletion.rs", "// the-deleted-contents\n".into()),
8345 ],
8346 );
8347
8348 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8349 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
8350 let language = rust_lang();
8351 language_registry.add(language.clone());
8352
8353 let buffer_1 = project
8354 .update(cx, |project, cx| {
8355 project.open_local_buffer("/dir/src/modification.rs", cx)
8356 })
8357 .await
8358 .unwrap();
8359 let diff_1 = project
8360 .update(cx, |project, cx| {
8361 project.open_uncommitted_diff(buffer_1.clone(), cx)
8362 })
8363 .await
8364 .unwrap();
8365 diff_1.read_with(cx, |diff, cx| {
8366 assert_eq!(diff.base_text(cx).language().cloned(), Some(language))
8367 });
8368 cx.run_until_parked();
8369 diff_1.update(cx, |diff, cx| {
8370 let snapshot = buffer_1.read(cx).snapshot();
8371 assert_hunks(
8372 diff.snapshot(cx)
8373 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8374 &snapshot,
8375 &diff.base_text_string(cx).unwrap(),
8376 &[
8377 (
8378 0..1,
8379 "",
8380 "// print goodbye\n",
8381 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
8382 ),
8383 (
8384 2..3,
8385 " println!(\"hello world\");\n",
8386 " println!(\"goodbye world\");\n",
8387 DiffHunkStatus::modified_none(),
8388 ),
8389 ],
8390 );
8391 });
8392
8393 // Reset HEAD to a version that differs from both the buffer and the index.
8394 let committed_contents = r#"
8395 // print goodbye
8396 fn main() {
8397 }
8398 "#
8399 .unindent();
8400 fs.set_head_for_repo(
8401 Path::new("/dir/.git"),
8402 &[
8403 ("src/modification.rs", committed_contents.clone()),
8404 ("src/deletion.rs", "// the-deleted-contents\n".into()),
8405 ],
8406 "deadbeef",
8407 );
8408
8409 // Buffer now has an unstaged hunk.
8410 cx.run_until_parked();
8411 diff_1.update(cx, |diff, cx| {
8412 let snapshot = buffer_1.read(cx).snapshot();
8413 assert_hunks(
8414 diff.snapshot(cx)
8415 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8416 &snapshot,
8417 &diff.base_text(cx).text(),
8418 &[(
8419 2..3,
8420 "",
8421 " println!(\"goodbye world\");\n",
8422 DiffHunkStatus::added_none(),
8423 )],
8424 );
8425 });
8426
8427 // Open a buffer for a file that's been deleted.
8428 let buffer_2 = project
8429 .update(cx, |project, cx| {
8430 project.open_local_buffer("/dir/src/deletion.rs", cx)
8431 })
8432 .await
8433 .unwrap();
8434 let diff_2 = project
8435 .update(cx, |project, cx| {
8436 project.open_uncommitted_diff(buffer_2.clone(), cx)
8437 })
8438 .await
8439 .unwrap();
8440 cx.run_until_parked();
8441 diff_2.update(cx, |diff, cx| {
8442 let snapshot = buffer_2.read(cx).snapshot();
8443 assert_hunks(
8444 diff.snapshot(cx)
8445 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8446 &snapshot,
8447 &diff.base_text_string(cx).unwrap(),
8448 &[(
8449 0..0,
8450 "// the-deleted-contents\n",
8451 "",
8452 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
8453 )],
8454 );
8455 });
8456
8457 // Stage the deletion of this file
8458 fs.set_index_for_repo(
8459 Path::new("/dir/.git"),
8460 &[("src/modification.rs", committed_contents.clone())],
8461 );
8462 cx.run_until_parked();
8463 diff_2.update(cx, |diff, cx| {
8464 let snapshot = buffer_2.read(cx).snapshot();
8465 assert_hunks(
8466 diff.snapshot(cx)
8467 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
8468 &snapshot,
8469 &diff.base_text_string(cx).unwrap(),
8470 &[(
8471 0..0,
8472 "// the-deleted-contents\n",
8473 "",
8474 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
8475 )],
8476 );
8477 });
8478}
8479
8480#[gpui::test]
8481async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
8482 use DiffHunkSecondaryStatus::*;
8483 init_test(cx);
8484
8485 let committed_contents = r#"
8486 zero
8487 one
8488 two
8489 three
8490 four
8491 five
8492 "#
8493 .unindent();
8494 let file_contents = r#"
8495 one
8496 TWO
8497 three
8498 FOUR
8499 five
8500 "#
8501 .unindent();
8502
8503 let fs = FakeFs::new(cx.background_executor.clone());
8504 fs.insert_tree(
8505 "/dir",
8506 json!({
8507 ".git": {},
8508 "file.txt": file_contents.clone()
8509 }),
8510 )
8511 .await;
8512
8513 fs.set_head_and_index_for_repo(
8514 path!("/dir/.git").as_ref(),
8515 &[("file.txt", committed_contents.clone())],
8516 );
8517
8518 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8519
8520 let buffer = project
8521 .update(cx, |project, cx| {
8522 project.open_local_buffer("/dir/file.txt", cx)
8523 })
8524 .await
8525 .unwrap();
8526 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8527 let uncommitted_diff = project
8528 .update(cx, |project, cx| {
8529 project.open_uncommitted_diff(buffer.clone(), cx)
8530 })
8531 .await
8532 .unwrap();
8533 let mut diff_events = cx.events(&uncommitted_diff);
8534
8535 // The hunks are initially unstaged.
8536 uncommitted_diff.read_with(cx, |diff, cx| {
8537 assert_hunks(
8538 diff.snapshot(cx).hunks(&snapshot),
8539 &snapshot,
8540 &diff.base_text_string(cx).unwrap(),
8541 &[
8542 (
8543 0..0,
8544 "zero\n",
8545 "",
8546 DiffHunkStatus::deleted(HasSecondaryHunk),
8547 ),
8548 (
8549 1..2,
8550 "two\n",
8551 "TWO\n",
8552 DiffHunkStatus::modified(HasSecondaryHunk),
8553 ),
8554 (
8555 3..4,
8556 "four\n",
8557 "FOUR\n",
8558 DiffHunkStatus::modified(HasSecondaryHunk),
8559 ),
8560 ],
8561 );
8562 });
8563
8564 // Stage a hunk. It appears as optimistically staged.
8565 uncommitted_diff.update(cx, |diff, cx| {
8566 let range =
8567 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
8568 let hunks = diff
8569 .snapshot(cx)
8570 .hunks_intersecting_range(range, &snapshot)
8571 .collect::<Vec<_>>();
8572 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
8573
8574 assert_hunks(
8575 diff.snapshot(cx).hunks(&snapshot),
8576 &snapshot,
8577 &diff.base_text_string(cx).unwrap(),
8578 &[
8579 (
8580 0..0,
8581 "zero\n",
8582 "",
8583 DiffHunkStatus::deleted(HasSecondaryHunk),
8584 ),
8585 (
8586 1..2,
8587 "two\n",
8588 "TWO\n",
8589 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8590 ),
8591 (
8592 3..4,
8593 "four\n",
8594 "FOUR\n",
8595 DiffHunkStatus::modified(HasSecondaryHunk),
8596 ),
8597 ],
8598 );
8599 });
8600
8601 // The diff emits a change event for the range of the staged hunk.
8602 assert!(matches!(
8603 diff_events.next().await.unwrap(),
8604 BufferDiffEvent::HunksStagedOrUnstaged(_)
8605 ));
8606 let event = diff_events.next().await.unwrap();
8607 if let BufferDiffEvent::DiffChanged(DiffChanged {
8608 changed_range: Some(changed_range),
8609 base_text_changed_range: _,
8610 extended_range: _,
8611 }) = event
8612 {
8613 let changed_range = changed_range.to_point(&snapshot);
8614 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
8615 } else {
8616 panic!("Unexpected event {event:?}");
8617 }
8618
8619 // When the write to the index completes, it appears as staged.
8620 cx.run_until_parked();
8621 uncommitted_diff.update(cx, |diff, cx| {
8622 assert_hunks(
8623 diff.snapshot(cx).hunks(&snapshot),
8624 &snapshot,
8625 &diff.base_text_string(cx).unwrap(),
8626 &[
8627 (
8628 0..0,
8629 "zero\n",
8630 "",
8631 DiffHunkStatus::deleted(HasSecondaryHunk),
8632 ),
8633 (
8634 1..2,
8635 "two\n",
8636 "TWO\n",
8637 DiffHunkStatus::modified(NoSecondaryHunk),
8638 ),
8639 (
8640 3..4,
8641 "four\n",
8642 "FOUR\n",
8643 DiffHunkStatus::modified(HasSecondaryHunk),
8644 ),
8645 ],
8646 );
8647 });
8648
8649 // The diff emits a change event for the changed index text.
8650 let event = diff_events.next().await.unwrap();
8651 if let BufferDiffEvent::DiffChanged(DiffChanged {
8652 changed_range: Some(changed_range),
8653 base_text_changed_range: _,
8654 extended_range: _,
8655 }) = event
8656 {
8657 let changed_range = changed_range.to_point(&snapshot);
8658 assert_eq!(changed_range, Point::new(0, 0)..Point::new(4, 0));
8659 } else {
8660 panic!("Unexpected event {event:?}");
8661 }
8662
8663 // Simulate a problem writing to the git index.
8664 fs.set_error_message_for_index_write(
8665 "/dir/.git".as_ref(),
8666 Some("failed to write git index".into()),
8667 );
8668
8669 // Stage another hunk.
8670 uncommitted_diff.update(cx, |diff, cx| {
8671 let range =
8672 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
8673 let hunks = diff
8674 .snapshot(cx)
8675 .hunks_intersecting_range(range, &snapshot)
8676 .collect::<Vec<_>>();
8677 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
8678
8679 assert_hunks(
8680 diff.snapshot(cx).hunks(&snapshot),
8681 &snapshot,
8682 &diff.base_text_string(cx).unwrap(),
8683 &[
8684 (
8685 0..0,
8686 "zero\n",
8687 "",
8688 DiffHunkStatus::deleted(HasSecondaryHunk),
8689 ),
8690 (
8691 1..2,
8692 "two\n",
8693 "TWO\n",
8694 DiffHunkStatus::modified(NoSecondaryHunk),
8695 ),
8696 (
8697 3..4,
8698 "four\n",
8699 "FOUR\n",
8700 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8701 ),
8702 ],
8703 );
8704 });
8705 assert!(matches!(
8706 diff_events.next().await.unwrap(),
8707 BufferDiffEvent::HunksStagedOrUnstaged(_)
8708 ));
8709 let event = diff_events.next().await.unwrap();
8710 if let BufferDiffEvent::DiffChanged(DiffChanged {
8711 changed_range: Some(changed_range),
8712 base_text_changed_range: _,
8713 extended_range: _,
8714 }) = event
8715 {
8716 let changed_range = changed_range.to_point(&snapshot);
8717 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
8718 } else {
8719 panic!("Unexpected event {event:?}");
8720 }
8721
8722 // When the write fails, the hunk returns to being unstaged.
8723 cx.run_until_parked();
8724 uncommitted_diff.update(cx, |diff, cx| {
8725 assert_hunks(
8726 diff.snapshot(cx).hunks(&snapshot),
8727 &snapshot,
8728 &diff.base_text_string(cx).unwrap(),
8729 &[
8730 (
8731 0..0,
8732 "zero\n",
8733 "",
8734 DiffHunkStatus::deleted(HasSecondaryHunk),
8735 ),
8736 (
8737 1..2,
8738 "two\n",
8739 "TWO\n",
8740 DiffHunkStatus::modified(NoSecondaryHunk),
8741 ),
8742 (
8743 3..4,
8744 "four\n",
8745 "FOUR\n",
8746 DiffHunkStatus::modified(HasSecondaryHunk),
8747 ),
8748 ],
8749 );
8750 });
8751
8752 let event = diff_events.next().await.unwrap();
8753 if let BufferDiffEvent::DiffChanged(DiffChanged {
8754 changed_range: Some(changed_range),
8755 base_text_changed_range: _,
8756 extended_range: _,
8757 }) = event
8758 {
8759 let changed_range = changed_range.to_point(&snapshot);
8760 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
8761 } else {
8762 panic!("Unexpected event {event:?}");
8763 }
8764
8765 // Allow writing to the git index to succeed again.
8766 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
8767
8768 // Stage two hunks with separate operations.
8769 uncommitted_diff.update(cx, |diff, cx| {
8770 let hunks = diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>();
8771 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
8772 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
8773 });
8774
8775 // Both staged hunks appear as pending.
8776 uncommitted_diff.update(cx, |diff, cx| {
8777 assert_hunks(
8778 diff.snapshot(cx).hunks(&snapshot),
8779 &snapshot,
8780 &diff.base_text_string(cx).unwrap(),
8781 &[
8782 (
8783 0..0,
8784 "zero\n",
8785 "",
8786 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
8787 ),
8788 (
8789 1..2,
8790 "two\n",
8791 "TWO\n",
8792 DiffHunkStatus::modified(NoSecondaryHunk),
8793 ),
8794 (
8795 3..4,
8796 "four\n",
8797 "FOUR\n",
8798 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8799 ),
8800 ],
8801 );
8802 });
8803
8804 // Both staging operations take effect.
8805 cx.run_until_parked();
8806 uncommitted_diff.update(cx, |diff, cx| {
8807 assert_hunks(
8808 diff.snapshot(cx).hunks(&snapshot),
8809 &snapshot,
8810 &diff.base_text_string(cx).unwrap(),
8811 &[
8812 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
8813 (
8814 1..2,
8815 "two\n",
8816 "TWO\n",
8817 DiffHunkStatus::modified(NoSecondaryHunk),
8818 ),
8819 (
8820 3..4,
8821 "four\n",
8822 "FOUR\n",
8823 DiffHunkStatus::modified(NoSecondaryHunk),
8824 ),
8825 ],
8826 );
8827 });
8828}
8829
8830#[gpui::test(seeds(340, 472))]
8831async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
8832 use DiffHunkSecondaryStatus::*;
8833 init_test(cx);
8834
8835 let committed_contents = r#"
8836 zero
8837 one
8838 two
8839 three
8840 four
8841 five
8842 "#
8843 .unindent();
8844 let file_contents = r#"
8845 one
8846 TWO
8847 three
8848 FOUR
8849 five
8850 "#
8851 .unindent();
8852
8853 let fs = FakeFs::new(cx.background_executor.clone());
8854 fs.insert_tree(
8855 "/dir",
8856 json!({
8857 ".git": {},
8858 "file.txt": file_contents.clone()
8859 }),
8860 )
8861 .await;
8862
8863 fs.set_head_for_repo(
8864 "/dir/.git".as_ref(),
8865 &[("file.txt", committed_contents.clone())],
8866 "deadbeef",
8867 );
8868 fs.set_index_for_repo(
8869 "/dir/.git".as_ref(),
8870 &[("file.txt", committed_contents.clone())],
8871 );
8872
8873 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8874
8875 let buffer = project
8876 .update(cx, |project, cx| {
8877 project.open_local_buffer("/dir/file.txt", cx)
8878 })
8879 .await
8880 .unwrap();
8881 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8882 let uncommitted_diff = project
8883 .update(cx, |project, cx| {
8884 project.open_uncommitted_diff(buffer.clone(), cx)
8885 })
8886 .await
8887 .unwrap();
8888
8889 // The hunks are initially unstaged.
8890 uncommitted_diff.read_with(cx, |diff, cx| {
8891 assert_hunks(
8892 diff.snapshot(cx).hunks(&snapshot),
8893 &snapshot,
8894 &diff.base_text_string(cx).unwrap(),
8895 &[
8896 (
8897 0..0,
8898 "zero\n",
8899 "",
8900 DiffHunkStatus::deleted(HasSecondaryHunk),
8901 ),
8902 (
8903 1..2,
8904 "two\n",
8905 "TWO\n",
8906 DiffHunkStatus::modified(HasSecondaryHunk),
8907 ),
8908 (
8909 3..4,
8910 "four\n",
8911 "FOUR\n",
8912 DiffHunkStatus::modified(HasSecondaryHunk),
8913 ),
8914 ],
8915 );
8916 });
8917
8918 // Pause IO events
8919 fs.pause_events();
8920
8921 // Stage the first hunk.
8922 uncommitted_diff.update(cx, |diff, cx| {
8923 let hunk = diff.snapshot(cx).hunks(&snapshot).next().unwrap();
8924 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
8925 assert_hunks(
8926 diff.snapshot(cx).hunks(&snapshot),
8927 &snapshot,
8928 &diff.base_text_string(cx).unwrap(),
8929 &[
8930 (
8931 0..0,
8932 "zero\n",
8933 "",
8934 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
8935 ),
8936 (
8937 1..2,
8938 "two\n",
8939 "TWO\n",
8940 DiffHunkStatus::modified(HasSecondaryHunk),
8941 ),
8942 (
8943 3..4,
8944 "four\n",
8945 "FOUR\n",
8946 DiffHunkStatus::modified(HasSecondaryHunk),
8947 ),
8948 ],
8949 );
8950 });
8951
8952 // Stage the second hunk *before* receiving the FS event for the first hunk.
8953 cx.run_until_parked();
8954 uncommitted_diff.update(cx, |diff, cx| {
8955 let hunk = diff.snapshot(cx).hunks(&snapshot).nth(1).unwrap();
8956 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
8957 assert_hunks(
8958 diff.snapshot(cx).hunks(&snapshot),
8959 &snapshot,
8960 &diff.base_text_string(cx).unwrap(),
8961 &[
8962 (
8963 0..0,
8964 "zero\n",
8965 "",
8966 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
8967 ),
8968 (
8969 1..2,
8970 "two\n",
8971 "TWO\n",
8972 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
8973 ),
8974 (
8975 3..4,
8976 "four\n",
8977 "FOUR\n",
8978 DiffHunkStatus::modified(HasSecondaryHunk),
8979 ),
8980 ],
8981 );
8982 });
8983
8984 // Process the FS event for staging the first hunk (second event is still pending).
8985 fs.flush_events(1);
8986 cx.run_until_parked();
8987
8988 // Stage the third hunk before receiving the second FS event.
8989 uncommitted_diff.update(cx, |diff, cx| {
8990 let hunk = diff.snapshot(cx).hunks(&snapshot).nth(2).unwrap();
8991 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
8992 });
8993
8994 // Wait for all remaining IO.
8995 cx.run_until_parked();
8996 fs.flush_events(fs.buffered_event_count());
8997
8998 // Now all hunks are staged.
8999 cx.run_until_parked();
9000 uncommitted_diff.update(cx, |diff, cx| {
9001 assert_hunks(
9002 diff.snapshot(cx).hunks(&snapshot),
9003 &snapshot,
9004 &diff.base_text_string(cx).unwrap(),
9005 &[
9006 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
9007 (
9008 1..2,
9009 "two\n",
9010 "TWO\n",
9011 DiffHunkStatus::modified(NoSecondaryHunk),
9012 ),
9013 (
9014 3..4,
9015 "four\n",
9016 "FOUR\n",
9017 DiffHunkStatus::modified(NoSecondaryHunk),
9018 ),
9019 ],
9020 );
9021 });
9022}
9023
9024#[gpui::test(iterations = 25)]
9025async fn test_staging_random_hunks(
9026 mut rng: StdRng,
9027 _executor: BackgroundExecutor,
9028 cx: &mut gpui::TestAppContext,
9029) {
9030 let operations = env::var("OPERATIONS")
9031 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
9032 .unwrap_or(20);
9033
9034 use DiffHunkSecondaryStatus::*;
9035 init_test(cx);
9036
9037 let committed_text = (0..30).map(|i| format!("line {i}\n")).collect::<String>();
9038 let index_text = committed_text.clone();
9039 let buffer_text = (0..30)
9040 .map(|i| match i % 5 {
9041 0 => format!("line {i} (modified)\n"),
9042 _ => format!("line {i}\n"),
9043 })
9044 .collect::<String>();
9045
9046 let fs = FakeFs::new(cx.background_executor.clone());
9047 fs.insert_tree(
9048 path!("/dir"),
9049 json!({
9050 ".git": {},
9051 "file.txt": buffer_text.clone()
9052 }),
9053 )
9054 .await;
9055 fs.set_head_for_repo(
9056 path!("/dir/.git").as_ref(),
9057 &[("file.txt", committed_text.clone())],
9058 "deadbeef",
9059 );
9060 fs.set_index_for_repo(
9061 path!("/dir/.git").as_ref(),
9062 &[("file.txt", index_text.clone())],
9063 );
9064 let repo = fs
9065 .open_repo(path!("/dir/.git").as_ref(), Some("git".as_ref()))
9066 .unwrap();
9067
9068 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
9069 let buffer = project
9070 .update(cx, |project, cx| {
9071 project.open_local_buffer(path!("/dir/file.txt"), cx)
9072 })
9073 .await
9074 .unwrap();
9075 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
9076 let uncommitted_diff = project
9077 .update(cx, |project, cx| {
9078 project.open_uncommitted_diff(buffer.clone(), cx)
9079 })
9080 .await
9081 .unwrap();
9082
9083 let mut hunks = uncommitted_diff.update(cx, |diff, cx| {
9084 diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>()
9085 });
9086 assert_eq!(hunks.len(), 6);
9087
9088 for _i in 0..operations {
9089 let hunk_ix = rng.random_range(0..hunks.len());
9090 let hunk = &mut hunks[hunk_ix];
9091 let row = hunk.range.start.row;
9092
9093 if hunk.status().has_secondary_hunk() {
9094 log::info!("staging hunk at {row}");
9095 uncommitted_diff.update(cx, |diff, cx| {
9096 diff.stage_or_unstage_hunks(true, std::slice::from_ref(hunk), &snapshot, true, cx);
9097 });
9098 hunk.secondary_status = SecondaryHunkRemovalPending;
9099 } else {
9100 log::info!("unstaging hunk at {row}");
9101 uncommitted_diff.update(cx, |diff, cx| {
9102 diff.stage_or_unstage_hunks(false, std::slice::from_ref(hunk), &snapshot, true, cx);
9103 });
9104 hunk.secondary_status = SecondaryHunkAdditionPending;
9105 }
9106
9107 for _ in 0..rng.random_range(0..10) {
9108 log::info!("yielding");
9109 cx.executor().simulate_random_delay().await;
9110 }
9111 }
9112
9113 cx.executor().run_until_parked();
9114
9115 for hunk in &mut hunks {
9116 if hunk.secondary_status == SecondaryHunkRemovalPending {
9117 hunk.secondary_status = NoSecondaryHunk;
9118 } else if hunk.secondary_status == SecondaryHunkAdditionPending {
9119 hunk.secondary_status = HasSecondaryHunk;
9120 }
9121 }
9122
9123 log::info!(
9124 "index text:\n{}",
9125 repo.load_index_text(RepoPath::from_rel_path(rel_path("file.txt")))
9126 .await
9127 .unwrap()
9128 );
9129
9130 uncommitted_diff.update(cx, |diff, cx| {
9131 let expected_hunks = hunks
9132 .iter()
9133 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
9134 .collect::<Vec<_>>();
9135 let actual_hunks = diff
9136 .snapshot(cx)
9137 .hunks(&snapshot)
9138 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
9139 .collect::<Vec<_>>();
9140 assert_eq!(actual_hunks, expected_hunks);
9141 });
9142}
9143
9144#[gpui::test]
9145async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
9146 init_test(cx);
9147
9148 let committed_contents = r#"
9149 fn main() {
9150 println!("hello from HEAD");
9151 }
9152 "#
9153 .unindent();
9154 let file_contents = r#"
9155 fn main() {
9156 println!("hello from the working copy");
9157 }
9158 "#
9159 .unindent();
9160
9161 let fs = FakeFs::new(cx.background_executor.clone());
9162 fs.insert_tree(
9163 "/dir",
9164 json!({
9165 ".git": {},
9166 "src": {
9167 "main.rs": file_contents,
9168 }
9169 }),
9170 )
9171 .await;
9172
9173 fs.set_head_for_repo(
9174 Path::new("/dir/.git"),
9175 &[("src/main.rs", committed_contents.clone())],
9176 "deadbeef",
9177 );
9178 fs.set_index_for_repo(
9179 Path::new("/dir/.git"),
9180 &[("src/main.rs", committed_contents.clone())],
9181 );
9182
9183 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
9184
9185 let buffer = project
9186 .update(cx, |project, cx| {
9187 project.open_local_buffer("/dir/src/main.rs", cx)
9188 })
9189 .await
9190 .unwrap();
9191 let uncommitted_diff = project
9192 .update(cx, |project, cx| {
9193 project.open_uncommitted_diff(buffer.clone(), cx)
9194 })
9195 .await
9196 .unwrap();
9197
9198 cx.run_until_parked();
9199 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
9200 let snapshot = buffer.read(cx).snapshot();
9201 assert_hunks(
9202 uncommitted_diff.snapshot(cx).hunks(&snapshot),
9203 &snapshot,
9204 &uncommitted_diff.base_text_string(cx).unwrap(),
9205 &[(
9206 1..2,
9207 " println!(\"hello from HEAD\");\n",
9208 " println!(\"hello from the working copy\");\n",
9209 DiffHunkStatus {
9210 kind: DiffHunkStatusKind::Modified,
9211 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
9212 },
9213 )],
9214 );
9215 });
9216}
9217
9218// TODO: Should we test this on Windows also?
9219#[gpui::test]
9220#[cfg(not(windows))]
9221async fn test_staging_hunk_preserve_executable_permission(cx: &mut gpui::TestAppContext) {
9222 use std::os::unix::fs::PermissionsExt;
9223 init_test(cx);
9224 cx.executor().allow_parking();
9225 let committed_contents = "bar\n";
9226 let file_contents = "baz\n";
9227 let root = TempTree::new(json!({
9228 "project": {
9229 "foo": committed_contents
9230 },
9231 }));
9232
9233 let work_dir = root.path().join("project");
9234 let file_path = work_dir.join("foo");
9235 let repo = git_init(work_dir.as_path());
9236 let mut perms = std::fs::metadata(&file_path).unwrap().permissions();
9237 perms.set_mode(0o755);
9238 std::fs::set_permissions(&file_path, perms).unwrap();
9239 git_add("foo", &repo);
9240 git_commit("Initial commit", &repo);
9241 std::fs::write(&file_path, file_contents).unwrap();
9242
9243 let project = Project::test(
9244 Arc::new(RealFs::new(None, cx.executor())),
9245 [root.path()],
9246 cx,
9247 )
9248 .await;
9249
9250 let buffer = project
9251 .update(cx, |project, cx| {
9252 project.open_local_buffer(file_path.as_path(), cx)
9253 })
9254 .await
9255 .unwrap();
9256
9257 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
9258
9259 let uncommitted_diff = project
9260 .update(cx, |project, cx| {
9261 project.open_uncommitted_diff(buffer.clone(), cx)
9262 })
9263 .await
9264 .unwrap();
9265
9266 uncommitted_diff.update(cx, |diff, cx| {
9267 let hunks = diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>();
9268 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
9269 });
9270
9271 cx.run_until_parked();
9272
9273 let output = smol::process::Command::new("git")
9274 .current_dir(&work_dir)
9275 .args(["diff", "--staged"])
9276 .output()
9277 .await
9278 .unwrap();
9279
9280 let staged_diff = String::from_utf8_lossy(&output.stdout);
9281
9282 assert!(
9283 !staged_diff.contains("new mode 100644"),
9284 "Staging should not change file mode from 755 to 644.\ngit diff --staged:\n{}",
9285 staged_diff
9286 );
9287
9288 let output = smol::process::Command::new("git")
9289 .current_dir(&work_dir)
9290 .args(["ls-files", "-s"])
9291 .output()
9292 .await
9293 .unwrap();
9294 let index_contents = String::from_utf8_lossy(&output.stdout);
9295
9296 assert!(
9297 index_contents.contains("100755"),
9298 "Index should show file as executable (100755).\ngit ls-files -s:\n{}",
9299 index_contents
9300 );
9301}
9302
9303#[gpui::test]
9304async fn test_repository_and_path_for_project_path(
9305 background_executor: BackgroundExecutor,
9306 cx: &mut gpui::TestAppContext,
9307) {
9308 init_test(cx);
9309 let fs = FakeFs::new(background_executor);
9310 fs.insert_tree(
9311 path!("/root"),
9312 json!({
9313 "c.txt": "",
9314 "dir1": {
9315 ".git": {},
9316 "deps": {
9317 "dep1": {
9318 ".git": {},
9319 "src": {
9320 "a.txt": ""
9321 }
9322 }
9323 },
9324 "src": {
9325 "b.txt": ""
9326 }
9327 },
9328 }),
9329 )
9330 .await;
9331
9332 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
9333 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9334 let tree_id = tree.read_with(cx, |tree, _| tree.id());
9335 project
9336 .update(cx, |project, cx| project.git_scans_complete(cx))
9337 .await;
9338 cx.run_until_parked();
9339
9340 project.read_with(cx, |project, cx| {
9341 let git_store = project.git_store().read(cx);
9342 let pairs = [
9343 ("c.txt", None),
9344 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
9345 (
9346 "dir1/deps/dep1/src/a.txt",
9347 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
9348 ),
9349 ];
9350 let expected = pairs
9351 .iter()
9352 .map(|(path, result)| {
9353 (
9354 path,
9355 result.map(|(repo, repo_path)| {
9356 (Path::new(repo).into(), RepoPath::new(repo_path).unwrap())
9357 }),
9358 )
9359 })
9360 .collect::<Vec<_>>();
9361 let actual = pairs
9362 .iter()
9363 .map(|(path, _)| {
9364 let project_path = (tree_id, rel_path(path)).into();
9365 let result = maybe!({
9366 let (repo, repo_path) =
9367 git_store.repository_and_path_for_project_path(&project_path, cx)?;
9368 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
9369 });
9370 (path, result)
9371 })
9372 .collect::<Vec<_>>();
9373 pretty_assertions::assert_eq!(expected, actual);
9374 });
9375
9376 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
9377 .await
9378 .unwrap();
9379 cx.run_until_parked();
9380
9381 project.read_with(cx, |project, cx| {
9382 let git_store = project.git_store().read(cx);
9383 assert_eq!(
9384 git_store.repository_and_path_for_project_path(
9385 &(tree_id, rel_path("dir1/src/b.txt")).into(),
9386 cx
9387 ),
9388 None
9389 );
9390 });
9391}
9392
9393#[gpui::test]
9394async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
9395 init_test(cx);
9396 let fs = FakeFs::new(cx.background_executor.clone());
9397 let home = paths::home_dir();
9398 fs.insert_tree(
9399 home,
9400 json!({
9401 ".git": {},
9402 "project": {
9403 "a.txt": "A"
9404 },
9405 }),
9406 )
9407 .await;
9408
9409 let project = Project::test(fs.clone(), [home.join("project").as_ref()], cx).await;
9410 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9411 let tree_id = tree.read_with(cx, |tree, _| tree.id());
9412
9413 project
9414 .update(cx, |project, cx| project.git_scans_complete(cx))
9415 .await;
9416 tree.flush_fs_events(cx).await;
9417
9418 project.read_with(cx, |project, cx| {
9419 let containing = project
9420 .git_store()
9421 .read(cx)
9422 .repository_and_path_for_project_path(&(tree_id, rel_path("a.txt")).into(), cx);
9423 assert!(containing.is_none());
9424 });
9425
9426 let project = Project::test(fs.clone(), [home.as_ref()], cx).await;
9427 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9428 let tree_id = tree.read_with(cx, |tree, _| tree.id());
9429 project
9430 .update(cx, |project, cx| project.git_scans_complete(cx))
9431 .await;
9432 tree.flush_fs_events(cx).await;
9433
9434 project.read_with(cx, |project, cx| {
9435 let containing = project
9436 .git_store()
9437 .read(cx)
9438 .repository_and_path_for_project_path(&(tree_id, rel_path("project/a.txt")).into(), cx);
9439 assert_eq!(
9440 containing
9441 .unwrap()
9442 .0
9443 .read(cx)
9444 .work_directory_abs_path
9445 .as_ref(),
9446 home,
9447 );
9448 });
9449}
9450
9451#[gpui::test]
9452async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
9453 init_test(cx);
9454 cx.executor().allow_parking();
9455
9456 let root = TempTree::new(json!({
9457 "project": {
9458 "a.txt": "a", // Modified
9459 "b.txt": "bb", // Added
9460 "c.txt": "ccc", // Unchanged
9461 "d.txt": "dddd", // Deleted
9462 },
9463 }));
9464
9465 // Set up git repository before creating the project.
9466 let work_dir = root.path().join("project");
9467 let repo = git_init(work_dir.as_path());
9468 git_add("a.txt", &repo);
9469 git_add("c.txt", &repo);
9470 git_add("d.txt", &repo);
9471 git_commit("Initial commit", &repo);
9472 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
9473 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
9474
9475 let project = Project::test(
9476 Arc::new(RealFs::new(None, cx.executor())),
9477 [root.path()],
9478 cx,
9479 )
9480 .await;
9481
9482 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9483 tree.flush_fs_events(cx).await;
9484 project
9485 .update(cx, |project, cx| project.git_scans_complete(cx))
9486 .await;
9487 cx.executor().run_until_parked();
9488
9489 let repository = project.read_with(cx, |project, cx| {
9490 project.repositories(cx).values().next().unwrap().clone()
9491 });
9492
9493 // Check that the right git state is observed on startup
9494 repository.read_with(cx, |repository, _| {
9495 let entries = repository.cached_status().collect::<Vec<_>>();
9496 assert_eq!(
9497 entries,
9498 [
9499 StatusEntry {
9500 repo_path: repo_path("a.txt"),
9501 status: StatusCode::Modified.worktree(),
9502 diff_stat: Some(DiffStat {
9503 added: 1,
9504 deleted: 1,
9505 }),
9506 },
9507 StatusEntry {
9508 repo_path: repo_path("b.txt"),
9509 status: FileStatus::Untracked,
9510 diff_stat: None,
9511 },
9512 StatusEntry {
9513 repo_path: repo_path("d.txt"),
9514 status: StatusCode::Deleted.worktree(),
9515 diff_stat: Some(DiffStat {
9516 added: 0,
9517 deleted: 1,
9518 }),
9519 },
9520 ]
9521 );
9522 });
9523
9524 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
9525
9526 tree.flush_fs_events(cx).await;
9527 project
9528 .update(cx, |project, cx| project.git_scans_complete(cx))
9529 .await;
9530 cx.executor().run_until_parked();
9531
9532 repository.read_with(cx, |repository, _| {
9533 let entries = repository.cached_status().collect::<Vec<_>>();
9534 assert_eq!(
9535 entries,
9536 [
9537 StatusEntry {
9538 repo_path: repo_path("a.txt"),
9539 status: StatusCode::Modified.worktree(),
9540 diff_stat: Some(DiffStat {
9541 added: 1,
9542 deleted: 1,
9543 }),
9544 },
9545 StatusEntry {
9546 repo_path: repo_path("b.txt"),
9547 status: FileStatus::Untracked,
9548 diff_stat: None,
9549 },
9550 StatusEntry {
9551 repo_path: repo_path("c.txt"),
9552 status: StatusCode::Modified.worktree(),
9553 diff_stat: Some(DiffStat {
9554 added: 1,
9555 deleted: 1,
9556 }),
9557 },
9558 StatusEntry {
9559 repo_path: repo_path("d.txt"),
9560 status: StatusCode::Deleted.worktree(),
9561 diff_stat: Some(DiffStat {
9562 added: 0,
9563 deleted: 1,
9564 }),
9565 },
9566 ]
9567 );
9568 });
9569
9570 git_add("a.txt", &repo);
9571 git_add("c.txt", &repo);
9572 git_remove_index(Path::new("d.txt"), &repo);
9573 git_commit("Another commit", &repo);
9574 tree.flush_fs_events(cx).await;
9575 project
9576 .update(cx, |project, cx| project.git_scans_complete(cx))
9577 .await;
9578 cx.executor().run_until_parked();
9579
9580 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
9581 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
9582 tree.flush_fs_events(cx).await;
9583 project
9584 .update(cx, |project, cx| project.git_scans_complete(cx))
9585 .await;
9586 cx.executor().run_until_parked();
9587
9588 repository.read_with(cx, |repository, _cx| {
9589 let entries = repository.cached_status().collect::<Vec<_>>();
9590
9591 // Deleting an untracked entry, b.txt, should leave no status
9592 // a.txt was tracked, and so should have a status
9593 assert_eq!(
9594 entries,
9595 [StatusEntry {
9596 repo_path: repo_path("a.txt"),
9597 status: StatusCode::Deleted.worktree(),
9598 diff_stat: Some(DiffStat {
9599 added: 0,
9600 deleted: 1,
9601 }),
9602 }]
9603 );
9604 });
9605}
9606
9607#[gpui::test]
9608#[ignore]
9609async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
9610 init_test(cx);
9611 cx.executor().allow_parking();
9612
9613 let root = TempTree::new(json!({
9614 "project": {
9615 "sub": {},
9616 "a.txt": "",
9617 },
9618 }));
9619
9620 let work_dir = root.path().join("project");
9621 let repo = git_init(work_dir.as_path());
9622 // a.txt exists in HEAD and the working copy but is deleted in the index.
9623 git_add("a.txt", &repo);
9624 git_commit("Initial commit", &repo);
9625 git_remove_index("a.txt".as_ref(), &repo);
9626 // `sub` is a nested git repository.
9627 let _sub = git_init(&work_dir.join("sub"));
9628
9629 let project = Project::test(
9630 Arc::new(RealFs::new(None, cx.executor())),
9631 [root.path()],
9632 cx,
9633 )
9634 .await;
9635
9636 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9637 tree.flush_fs_events(cx).await;
9638 project
9639 .update(cx, |project, cx| project.git_scans_complete(cx))
9640 .await;
9641 cx.executor().run_until_parked();
9642
9643 let repository = project.read_with(cx, |project, cx| {
9644 project
9645 .repositories(cx)
9646 .values()
9647 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
9648 .unwrap()
9649 .clone()
9650 });
9651
9652 repository.read_with(cx, |repository, _cx| {
9653 let entries = repository.cached_status().collect::<Vec<_>>();
9654
9655 // `sub` doesn't appear in our computed statuses.
9656 // a.txt appears with a combined `DA` status.
9657 assert_eq!(
9658 entries,
9659 [StatusEntry {
9660 repo_path: repo_path("a.txt"),
9661 status: TrackedStatus {
9662 index_status: StatusCode::Deleted,
9663 worktree_status: StatusCode::Added
9664 }
9665 .into(),
9666 diff_stat: None,
9667 }]
9668 )
9669 });
9670}
9671
9672#[track_caller]
9673/// We merge lhs into rhs.
9674fn merge_pending_ops_snapshots(
9675 source: Vec<pending_op::PendingOps>,
9676 mut target: Vec<pending_op::PendingOps>,
9677) -> Vec<pending_op::PendingOps> {
9678 for s_ops in source {
9679 if let Some(idx) = target.iter().zip(0..).find_map(|(ops, idx)| {
9680 if ops.repo_path == s_ops.repo_path {
9681 Some(idx)
9682 } else {
9683 None
9684 }
9685 }) {
9686 let t_ops = &mut target[idx];
9687 for s_op in s_ops.ops {
9688 if let Some(op_idx) = t_ops
9689 .ops
9690 .iter()
9691 .zip(0..)
9692 .find_map(|(op, idx)| if op.id == s_op.id { Some(idx) } else { None })
9693 {
9694 let t_op = &mut t_ops.ops[op_idx];
9695 match (s_op.job_status, t_op.job_status) {
9696 (pending_op::JobStatus::Running, _) => {}
9697 (s_st, pending_op::JobStatus::Running) => t_op.job_status = s_st,
9698 (s_st, t_st) if s_st == t_st => {}
9699 _ => unreachable!(),
9700 }
9701 } else {
9702 t_ops.ops.push(s_op);
9703 }
9704 }
9705 t_ops.ops.sort_by(|l, r| l.id.cmp(&r.id));
9706 } else {
9707 target.push(s_ops);
9708 }
9709 }
9710 target
9711}
9712
9713#[gpui::test]
9714async fn test_repository_pending_ops_staging(
9715 executor: gpui::BackgroundExecutor,
9716 cx: &mut gpui::TestAppContext,
9717) {
9718 init_test(cx);
9719
9720 let fs = FakeFs::new(executor);
9721 fs.insert_tree(
9722 path!("/root"),
9723 json!({
9724 "my-repo": {
9725 ".git": {},
9726 "a.txt": "a",
9727 }
9728
9729 }),
9730 )
9731 .await;
9732
9733 fs.set_status_for_repo(
9734 path!("/root/my-repo/.git").as_ref(),
9735 &[("a.txt", FileStatus::Untracked)],
9736 );
9737
9738 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
9739 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
9740 project.update(cx, |project, cx| {
9741 let pending_ops_all = pending_ops_all.clone();
9742 cx.subscribe(project.git_store(), move |_, _, e, _| {
9743 if let GitStoreEvent::RepositoryUpdated(
9744 _,
9745 RepositoryEvent::PendingOpsChanged { pending_ops },
9746 _,
9747 ) = e
9748 {
9749 let merged = merge_pending_ops_snapshots(
9750 pending_ops.items(()),
9751 pending_ops_all.lock().items(()),
9752 );
9753 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
9754 }
9755 })
9756 .detach();
9757 });
9758 project
9759 .update(cx, |project, cx| project.git_scans_complete(cx))
9760 .await;
9761
9762 let repo = project.read_with(cx, |project, cx| {
9763 project.repositories(cx).values().next().unwrap().clone()
9764 });
9765
9766 // Ensure we have no pending ops for any of the untracked files
9767 repo.read_with(cx, |repo, _cx| {
9768 assert!(repo.pending_ops().next().is_none());
9769 });
9770
9771 let mut id = 1u16;
9772
9773 let mut assert_stage = async |path: RepoPath, stage| {
9774 let git_status = if stage {
9775 pending_op::GitStatus::Staged
9776 } else {
9777 pending_op::GitStatus::Unstaged
9778 };
9779 repo.update(cx, |repo, cx| {
9780 let task = if stage {
9781 repo.stage_entries(vec![path.clone()], cx)
9782 } else {
9783 repo.unstage_entries(vec![path.clone()], cx)
9784 };
9785 let ops = repo.pending_ops_for_path(&path).unwrap();
9786 assert_eq!(
9787 ops.ops.last(),
9788 Some(&pending_op::PendingOp {
9789 id: id.into(),
9790 git_status,
9791 job_status: pending_op::JobStatus::Running
9792 })
9793 );
9794 task
9795 })
9796 .await
9797 .unwrap();
9798
9799 repo.read_with(cx, |repo, _cx| {
9800 let ops = repo.pending_ops_for_path(&path).unwrap();
9801 assert_eq!(
9802 ops.ops.last(),
9803 Some(&pending_op::PendingOp {
9804 id: id.into(),
9805 git_status,
9806 job_status: pending_op::JobStatus::Finished
9807 })
9808 );
9809 });
9810
9811 id += 1;
9812 };
9813
9814 assert_stage(repo_path("a.txt"), true).await;
9815 assert_stage(repo_path("a.txt"), false).await;
9816 assert_stage(repo_path("a.txt"), true).await;
9817 assert_stage(repo_path("a.txt"), false).await;
9818 assert_stage(repo_path("a.txt"), true).await;
9819
9820 cx.run_until_parked();
9821
9822 assert_eq!(
9823 pending_ops_all
9824 .lock()
9825 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
9826 .unwrap()
9827 .ops,
9828 vec![
9829 pending_op::PendingOp {
9830 id: 1u16.into(),
9831 git_status: pending_op::GitStatus::Staged,
9832 job_status: pending_op::JobStatus::Finished
9833 },
9834 pending_op::PendingOp {
9835 id: 2u16.into(),
9836 git_status: pending_op::GitStatus::Unstaged,
9837 job_status: pending_op::JobStatus::Finished
9838 },
9839 pending_op::PendingOp {
9840 id: 3u16.into(),
9841 git_status: pending_op::GitStatus::Staged,
9842 job_status: pending_op::JobStatus::Finished
9843 },
9844 pending_op::PendingOp {
9845 id: 4u16.into(),
9846 git_status: pending_op::GitStatus::Unstaged,
9847 job_status: pending_op::JobStatus::Finished
9848 },
9849 pending_op::PendingOp {
9850 id: 5u16.into(),
9851 git_status: pending_op::GitStatus::Staged,
9852 job_status: pending_op::JobStatus::Finished
9853 }
9854 ],
9855 );
9856
9857 repo.update(cx, |repo, _cx| {
9858 let git_statuses = repo.cached_status().collect::<Vec<_>>();
9859
9860 assert_eq!(
9861 git_statuses,
9862 [StatusEntry {
9863 repo_path: repo_path("a.txt"),
9864 status: TrackedStatus {
9865 index_status: StatusCode::Added,
9866 worktree_status: StatusCode::Unmodified
9867 }
9868 .into(),
9869 diff_stat: Some(DiffStat {
9870 added: 1,
9871 deleted: 0,
9872 }),
9873 }]
9874 );
9875 });
9876}
9877
9878#[gpui::test]
9879async fn test_repository_pending_ops_long_running_staging(
9880 executor: gpui::BackgroundExecutor,
9881 cx: &mut gpui::TestAppContext,
9882) {
9883 init_test(cx);
9884
9885 let fs = FakeFs::new(executor);
9886 fs.insert_tree(
9887 path!("/root"),
9888 json!({
9889 "my-repo": {
9890 ".git": {},
9891 "a.txt": "a",
9892 }
9893
9894 }),
9895 )
9896 .await;
9897
9898 fs.set_status_for_repo(
9899 path!("/root/my-repo/.git").as_ref(),
9900 &[("a.txt", FileStatus::Untracked)],
9901 );
9902
9903 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
9904 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
9905 project.update(cx, |project, cx| {
9906 let pending_ops_all = pending_ops_all.clone();
9907 cx.subscribe(project.git_store(), move |_, _, e, _| {
9908 if let GitStoreEvent::RepositoryUpdated(
9909 _,
9910 RepositoryEvent::PendingOpsChanged { pending_ops },
9911 _,
9912 ) = e
9913 {
9914 let merged = merge_pending_ops_snapshots(
9915 pending_ops.items(()),
9916 pending_ops_all.lock().items(()),
9917 );
9918 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
9919 }
9920 })
9921 .detach();
9922 });
9923
9924 project
9925 .update(cx, |project, cx| project.git_scans_complete(cx))
9926 .await;
9927
9928 let repo = project.read_with(cx, |project, cx| {
9929 project.repositories(cx).values().next().unwrap().clone()
9930 });
9931
9932 repo.update(cx, |repo, cx| {
9933 repo.stage_entries(vec![repo_path("a.txt")], cx)
9934 })
9935 .detach();
9936
9937 repo.update(cx, |repo, cx| {
9938 repo.stage_entries(vec![repo_path("a.txt")], cx)
9939 })
9940 .unwrap()
9941 .with_timeout(Duration::from_secs(1), &cx.executor())
9942 .await
9943 .unwrap();
9944
9945 cx.run_until_parked();
9946
9947 assert_eq!(
9948 pending_ops_all
9949 .lock()
9950 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
9951 .unwrap()
9952 .ops,
9953 vec![
9954 pending_op::PendingOp {
9955 id: 1u16.into(),
9956 git_status: pending_op::GitStatus::Staged,
9957 job_status: pending_op::JobStatus::Skipped
9958 },
9959 pending_op::PendingOp {
9960 id: 2u16.into(),
9961 git_status: pending_op::GitStatus::Staged,
9962 job_status: pending_op::JobStatus::Finished
9963 }
9964 ],
9965 );
9966
9967 repo.update(cx, |repo, _cx| {
9968 let git_statuses = repo.cached_status().collect::<Vec<_>>();
9969
9970 assert_eq!(
9971 git_statuses,
9972 [StatusEntry {
9973 repo_path: repo_path("a.txt"),
9974 status: TrackedStatus {
9975 index_status: StatusCode::Added,
9976 worktree_status: StatusCode::Unmodified
9977 }
9978 .into(),
9979 diff_stat: Some(DiffStat {
9980 added: 1,
9981 deleted: 0,
9982 }),
9983 }]
9984 );
9985 });
9986}
9987
9988#[gpui::test]
9989async fn test_repository_pending_ops_stage_all(
9990 executor: gpui::BackgroundExecutor,
9991 cx: &mut gpui::TestAppContext,
9992) {
9993 init_test(cx);
9994
9995 let fs = FakeFs::new(executor);
9996 fs.insert_tree(
9997 path!("/root"),
9998 json!({
9999 "my-repo": {
10000 ".git": {},
10001 "a.txt": "a",
10002 "b.txt": "b"
10003 }
10004
10005 }),
10006 )
10007 .await;
10008
10009 fs.set_status_for_repo(
10010 path!("/root/my-repo/.git").as_ref(),
10011 &[
10012 ("a.txt", FileStatus::Untracked),
10013 ("b.txt", FileStatus::Untracked),
10014 ],
10015 );
10016
10017 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
10018 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
10019 project.update(cx, |project, cx| {
10020 let pending_ops_all = pending_ops_all.clone();
10021 cx.subscribe(project.git_store(), move |_, _, e, _| {
10022 if let GitStoreEvent::RepositoryUpdated(
10023 _,
10024 RepositoryEvent::PendingOpsChanged { pending_ops },
10025 _,
10026 ) = e
10027 {
10028 let merged = merge_pending_ops_snapshots(
10029 pending_ops.items(()),
10030 pending_ops_all.lock().items(()),
10031 );
10032 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
10033 }
10034 })
10035 .detach();
10036 });
10037 project
10038 .update(cx, |project, cx| project.git_scans_complete(cx))
10039 .await;
10040
10041 let repo = project.read_with(cx, |project, cx| {
10042 project.repositories(cx).values().next().unwrap().clone()
10043 });
10044
10045 repo.update(cx, |repo, cx| {
10046 repo.stage_entries(vec![repo_path("a.txt")], cx)
10047 })
10048 .await
10049 .unwrap();
10050 repo.update(cx, |repo, cx| repo.stage_all(cx))
10051 .await
10052 .unwrap();
10053 repo.update(cx, |repo, cx| repo.unstage_all(cx))
10054 .await
10055 .unwrap();
10056
10057 cx.run_until_parked();
10058
10059 assert_eq!(
10060 pending_ops_all
10061 .lock()
10062 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
10063 .unwrap()
10064 .ops,
10065 vec![
10066 pending_op::PendingOp {
10067 id: 1u16.into(),
10068 git_status: pending_op::GitStatus::Staged,
10069 job_status: pending_op::JobStatus::Finished
10070 },
10071 pending_op::PendingOp {
10072 id: 2u16.into(),
10073 git_status: pending_op::GitStatus::Unstaged,
10074 job_status: pending_op::JobStatus::Finished
10075 },
10076 ],
10077 );
10078 assert_eq!(
10079 pending_ops_all
10080 .lock()
10081 .get(&worktree::PathKey(repo_path("b.txt").as_ref().clone()), ())
10082 .unwrap()
10083 .ops,
10084 vec![
10085 pending_op::PendingOp {
10086 id: 1u16.into(),
10087 git_status: pending_op::GitStatus::Staged,
10088 job_status: pending_op::JobStatus::Finished
10089 },
10090 pending_op::PendingOp {
10091 id: 2u16.into(),
10092 git_status: pending_op::GitStatus::Unstaged,
10093 job_status: pending_op::JobStatus::Finished
10094 },
10095 ],
10096 );
10097
10098 repo.update(cx, |repo, _cx| {
10099 let git_statuses = repo.cached_status().collect::<Vec<_>>();
10100
10101 assert_eq!(
10102 git_statuses,
10103 [
10104 StatusEntry {
10105 repo_path: repo_path("a.txt"),
10106 status: FileStatus::Untracked,
10107 diff_stat: None,
10108 },
10109 StatusEntry {
10110 repo_path: repo_path("b.txt"),
10111 status: FileStatus::Untracked,
10112 diff_stat: None,
10113 },
10114 ]
10115 );
10116 });
10117}
10118
10119#[gpui::test]
10120async fn test_repository_subfolder_git_status(
10121 executor: gpui::BackgroundExecutor,
10122 cx: &mut gpui::TestAppContext,
10123) {
10124 init_test(cx);
10125
10126 let fs = FakeFs::new(executor);
10127 fs.insert_tree(
10128 path!("/root"),
10129 json!({
10130 "my-repo": {
10131 ".git": {},
10132 "a.txt": "a",
10133 "sub-folder-1": {
10134 "sub-folder-2": {
10135 "c.txt": "cc",
10136 "d": {
10137 "e.txt": "eee"
10138 }
10139 },
10140 }
10141 },
10142 }),
10143 )
10144 .await;
10145
10146 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
10147 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
10148
10149 fs.set_status_for_repo(
10150 path!("/root/my-repo/.git").as_ref(),
10151 &[(E_TXT, FileStatus::Untracked)],
10152 );
10153
10154 let project = Project::test(
10155 fs.clone(),
10156 [path!("/root/my-repo/sub-folder-1/sub-folder-2").as_ref()],
10157 cx,
10158 )
10159 .await;
10160
10161 project
10162 .update(cx, |project, cx| project.git_scans_complete(cx))
10163 .await;
10164 cx.run_until_parked();
10165
10166 let repository = project.read_with(cx, |project, cx| {
10167 project.repositories(cx).values().next().unwrap().clone()
10168 });
10169
10170 // Ensure that the git status is loaded correctly
10171 repository.read_with(cx, |repository, _cx| {
10172 assert_eq!(
10173 repository.work_directory_abs_path,
10174 Path::new(path!("/root/my-repo")).into()
10175 );
10176
10177 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
10178 assert_eq!(
10179 repository
10180 .status_for_path(&repo_path(E_TXT))
10181 .unwrap()
10182 .status,
10183 FileStatus::Untracked
10184 );
10185 });
10186
10187 fs.set_status_for_repo(path!("/root/my-repo/.git").as_ref(), &[]);
10188 project
10189 .update(cx, |project, cx| project.git_scans_complete(cx))
10190 .await;
10191 cx.run_until_parked();
10192
10193 repository.read_with(cx, |repository, _cx| {
10194 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
10195 assert_eq!(repository.status_for_path(&repo_path(E_TXT)), None);
10196 });
10197}
10198
10199// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
10200#[cfg(any())]
10201#[gpui::test]
10202async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
10203 init_test(cx);
10204 cx.executor().allow_parking();
10205
10206 let root = TempTree::new(json!({
10207 "project": {
10208 "a.txt": "a",
10209 },
10210 }));
10211 let root_path = root.path();
10212
10213 let repo = git_init(&root_path.join("project"));
10214 git_add("a.txt", &repo);
10215 git_commit("init", &repo);
10216
10217 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10218
10219 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10220 tree.flush_fs_events(cx).await;
10221 project
10222 .update(cx, |project, cx| project.git_scans_complete(cx))
10223 .await;
10224 cx.executor().run_until_parked();
10225
10226 let repository = project.read_with(cx, |project, cx| {
10227 project.repositories(cx).values().next().unwrap().clone()
10228 });
10229
10230 git_branch("other-branch", &repo);
10231 git_checkout("refs/heads/other-branch", &repo);
10232 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
10233 git_add("a.txt", &repo);
10234 git_commit("capitalize", &repo);
10235 let commit = repo
10236 .head()
10237 .expect("Failed to get HEAD")
10238 .peel_to_commit()
10239 .expect("HEAD is not a commit");
10240 git_checkout("refs/heads/main", &repo);
10241 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
10242 git_add("a.txt", &repo);
10243 git_commit("improve letter", &repo);
10244 git_cherry_pick(&commit, &repo);
10245 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
10246 .expect("No CHERRY_PICK_HEAD");
10247 pretty_assertions::assert_eq!(
10248 git_status(&repo),
10249 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
10250 );
10251 tree.flush_fs_events(cx).await;
10252 project
10253 .update(cx, |project, cx| project.git_scans_complete(cx))
10254 .await;
10255 cx.executor().run_until_parked();
10256 let conflicts = repository.update(cx, |repository, _| {
10257 repository
10258 .merge_conflicts
10259 .iter()
10260 .cloned()
10261 .collect::<Vec<_>>()
10262 });
10263 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
10264
10265 git_add("a.txt", &repo);
10266 // Attempt to manually simulate what `git cherry-pick --continue` would do.
10267 git_commit("whatevs", &repo);
10268 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
10269 .expect("Failed to remove CHERRY_PICK_HEAD");
10270 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
10271 tree.flush_fs_events(cx).await;
10272 let conflicts = repository.update(cx, |repository, _| {
10273 repository
10274 .merge_conflicts
10275 .iter()
10276 .cloned()
10277 .collect::<Vec<_>>()
10278 });
10279 pretty_assertions::assert_eq!(conflicts, []);
10280}
10281
10282#[gpui::test]
10283async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
10284 init_test(cx);
10285 let fs = FakeFs::new(cx.background_executor.clone());
10286 fs.insert_tree(
10287 path!("/root"),
10288 json!({
10289 ".git": {},
10290 ".gitignore": "*.txt\n",
10291 "a.xml": "<a></a>",
10292 "b.txt": "Some text"
10293 }),
10294 )
10295 .await;
10296
10297 fs.set_head_and_index_for_repo(
10298 path!("/root/.git").as_ref(),
10299 &[
10300 (".gitignore", "*.txt\n".into()),
10301 ("a.xml", "<a></a>".into()),
10302 ],
10303 );
10304
10305 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
10306
10307 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10308 tree.flush_fs_events(cx).await;
10309 project
10310 .update(cx, |project, cx| project.git_scans_complete(cx))
10311 .await;
10312 cx.executor().run_until_parked();
10313
10314 let repository = project.read_with(cx, |project, cx| {
10315 project.repositories(cx).values().next().unwrap().clone()
10316 });
10317
10318 // One file is unmodified, the other is ignored.
10319 cx.read(|cx| {
10320 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
10321 assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
10322 });
10323
10324 // Change the gitignore, and stage the newly non-ignored file.
10325 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
10326 .await
10327 .unwrap();
10328 fs.set_index_for_repo(
10329 Path::new(path!("/root/.git")),
10330 &[
10331 (".gitignore", "*.txt\n".into()),
10332 ("a.xml", "<a></a>".into()),
10333 ("b.txt", "Some text".into()),
10334 ],
10335 );
10336
10337 cx.executor().run_until_parked();
10338 cx.read(|cx| {
10339 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
10340 assert_entry_git_state(
10341 tree.read(cx),
10342 repository.read(cx),
10343 "b.txt",
10344 Some(StatusCode::Added),
10345 false,
10346 );
10347 });
10348}
10349
10350// NOTE:
10351// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
10352// a directory which some program has already open.
10353// This is a limitation of the Windows.
10354// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
10355// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
10356#[gpui::test]
10357#[cfg_attr(target_os = "windows", ignore)]
10358async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
10359 init_test(cx);
10360 cx.executor().allow_parking();
10361 let root = TempTree::new(json!({
10362 "projects": {
10363 "project1": {
10364 "a": "",
10365 "b": "",
10366 }
10367 },
10368
10369 }));
10370 let root_path = root.path();
10371
10372 let repo = git_init(&root_path.join("projects/project1"));
10373 git_add("a", &repo);
10374 git_commit("init", &repo);
10375 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
10376
10377 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10378
10379 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10380 tree.flush_fs_events(cx).await;
10381 project
10382 .update(cx, |project, cx| project.git_scans_complete(cx))
10383 .await;
10384 cx.executor().run_until_parked();
10385
10386 let repository = project.read_with(cx, |project, cx| {
10387 project.repositories(cx).values().next().unwrap().clone()
10388 });
10389
10390 repository.read_with(cx, |repository, _| {
10391 assert_eq!(
10392 repository.work_directory_abs_path.as_ref(),
10393 root_path.join("projects/project1").as_path()
10394 );
10395 assert_eq!(
10396 repository
10397 .status_for_path(&repo_path("a"))
10398 .map(|entry| entry.status),
10399 Some(StatusCode::Modified.worktree()),
10400 );
10401 assert_eq!(
10402 repository
10403 .status_for_path(&repo_path("b"))
10404 .map(|entry| entry.status),
10405 Some(FileStatus::Untracked),
10406 );
10407 });
10408
10409 std::fs::rename(
10410 root_path.join("projects/project1"),
10411 root_path.join("projects/project2"),
10412 )
10413 .unwrap();
10414 tree.flush_fs_events(cx).await;
10415
10416 repository.read_with(cx, |repository, _| {
10417 assert_eq!(
10418 repository.work_directory_abs_path.as_ref(),
10419 root_path.join("projects/project2").as_path()
10420 );
10421 assert_eq!(
10422 repository.status_for_path(&repo_path("a")).unwrap().status,
10423 StatusCode::Modified.worktree(),
10424 );
10425 assert_eq!(
10426 repository.status_for_path(&repo_path("b")).unwrap().status,
10427 FileStatus::Untracked,
10428 );
10429 });
10430}
10431
10432// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
10433// you can't rename a directory which some program has already open. This is a
10434// limitation of the Windows. See:
10435// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
10436// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
10437#[gpui::test]
10438#[cfg_attr(target_os = "windows", ignore)]
10439async fn test_file_status(cx: &mut gpui::TestAppContext) {
10440 init_test(cx);
10441 cx.executor().allow_parking();
10442 const IGNORE_RULE: &str = "**/target";
10443
10444 let root = TempTree::new(json!({
10445 "project": {
10446 "a.txt": "a",
10447 "b.txt": "bb",
10448 "c": {
10449 "d": {
10450 "e.txt": "eee"
10451 }
10452 },
10453 "f.txt": "ffff",
10454 "target": {
10455 "build_file": "???"
10456 },
10457 ".gitignore": IGNORE_RULE
10458 },
10459
10460 }));
10461 let root_path = root.path();
10462
10463 const A_TXT: &str = "a.txt";
10464 const B_TXT: &str = "b.txt";
10465 const E_TXT: &str = "c/d/e.txt";
10466 const F_TXT: &str = "f.txt";
10467 const DOTGITIGNORE: &str = ".gitignore";
10468 const BUILD_FILE: &str = "target/build_file";
10469
10470 // Set up git repository before creating the worktree.
10471 let work_dir = root.path().join("project");
10472 let mut repo = git_init(work_dir.as_path());
10473 repo.add_ignore_rule(IGNORE_RULE).unwrap();
10474 git_add(A_TXT, &repo);
10475 git_add(E_TXT, &repo);
10476 git_add(DOTGITIGNORE, &repo);
10477 git_commit("Initial commit", &repo);
10478
10479 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10480
10481 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10482 tree.flush_fs_events(cx).await;
10483 project
10484 .update(cx, |project, cx| project.git_scans_complete(cx))
10485 .await;
10486 cx.executor().run_until_parked();
10487
10488 let repository = project.read_with(cx, |project, cx| {
10489 project.repositories(cx).values().next().unwrap().clone()
10490 });
10491
10492 // Check that the right git state is observed on startup
10493 repository.read_with(cx, |repository, _cx| {
10494 assert_eq!(
10495 repository.work_directory_abs_path.as_ref(),
10496 root_path.join("project").as_path()
10497 );
10498
10499 assert_eq!(
10500 repository
10501 .status_for_path(&repo_path(B_TXT))
10502 .unwrap()
10503 .status,
10504 FileStatus::Untracked,
10505 );
10506 assert_eq!(
10507 repository
10508 .status_for_path(&repo_path(F_TXT))
10509 .unwrap()
10510 .status,
10511 FileStatus::Untracked,
10512 );
10513 });
10514
10515 // Modify a file in the working copy.
10516 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
10517 tree.flush_fs_events(cx).await;
10518 project
10519 .update(cx, |project, cx| project.git_scans_complete(cx))
10520 .await;
10521 cx.executor().run_until_parked();
10522
10523 // The worktree detects that the file's git status has changed.
10524 repository.read_with(cx, |repository, _| {
10525 assert_eq!(
10526 repository
10527 .status_for_path(&repo_path(A_TXT))
10528 .unwrap()
10529 .status,
10530 StatusCode::Modified.worktree(),
10531 );
10532 });
10533
10534 // Create a commit in the git repository.
10535 git_add(A_TXT, &repo);
10536 git_add(B_TXT, &repo);
10537 git_commit("Committing modified and added", &repo);
10538 tree.flush_fs_events(cx).await;
10539 project
10540 .update(cx, |project, cx| project.git_scans_complete(cx))
10541 .await;
10542 cx.executor().run_until_parked();
10543
10544 // The worktree detects that the files' git status have changed.
10545 repository.read_with(cx, |repository, _cx| {
10546 assert_eq!(
10547 repository
10548 .status_for_path(&repo_path(F_TXT))
10549 .unwrap()
10550 .status,
10551 FileStatus::Untracked,
10552 );
10553 assert_eq!(repository.status_for_path(&repo_path(B_TXT)), None);
10554 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
10555 });
10556
10557 // Modify files in the working copy and perform git operations on other files.
10558 git_reset(0, &repo);
10559 git_remove_index(Path::new(B_TXT), &repo);
10560 git_stash(&mut repo);
10561 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
10562 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
10563 tree.flush_fs_events(cx).await;
10564 project
10565 .update(cx, |project, cx| project.git_scans_complete(cx))
10566 .await;
10567 cx.executor().run_until_parked();
10568
10569 // Check that more complex repo changes are tracked
10570 repository.read_with(cx, |repository, _cx| {
10571 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
10572 assert_eq!(
10573 repository
10574 .status_for_path(&repo_path(B_TXT))
10575 .unwrap()
10576 .status,
10577 FileStatus::Untracked,
10578 );
10579 assert_eq!(
10580 repository
10581 .status_for_path(&repo_path(E_TXT))
10582 .unwrap()
10583 .status,
10584 StatusCode::Modified.worktree(),
10585 );
10586 });
10587
10588 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
10589 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
10590 std::fs::write(
10591 work_dir.join(DOTGITIGNORE),
10592 [IGNORE_RULE, "f.txt"].join("\n"),
10593 )
10594 .unwrap();
10595
10596 git_add(Path::new(DOTGITIGNORE), &repo);
10597 git_commit("Committing modified git ignore", &repo);
10598
10599 tree.flush_fs_events(cx).await;
10600 cx.executor().run_until_parked();
10601
10602 let mut renamed_dir_name = "first_directory/second_directory";
10603 const RENAMED_FILE: &str = "rf.txt";
10604
10605 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
10606 std::fs::write(
10607 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
10608 "new-contents",
10609 )
10610 .unwrap();
10611
10612 tree.flush_fs_events(cx).await;
10613 project
10614 .update(cx, |project, cx| project.git_scans_complete(cx))
10615 .await;
10616 cx.executor().run_until_parked();
10617
10618 repository.read_with(cx, |repository, _cx| {
10619 assert_eq!(
10620 repository
10621 .status_for_path(&RepoPath::from_rel_path(
10622 &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE))
10623 ))
10624 .unwrap()
10625 .status,
10626 FileStatus::Untracked,
10627 );
10628 });
10629
10630 renamed_dir_name = "new_first_directory/second_directory";
10631
10632 std::fs::rename(
10633 work_dir.join("first_directory"),
10634 work_dir.join("new_first_directory"),
10635 )
10636 .unwrap();
10637
10638 tree.flush_fs_events(cx).await;
10639 project
10640 .update(cx, |project, cx| project.git_scans_complete(cx))
10641 .await;
10642 cx.executor().run_until_parked();
10643
10644 repository.read_with(cx, |repository, _cx| {
10645 assert_eq!(
10646 repository
10647 .status_for_path(&RepoPath::from_rel_path(
10648 &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE))
10649 ))
10650 .unwrap()
10651 .status,
10652 FileStatus::Untracked,
10653 );
10654 });
10655}
10656
10657#[gpui::test]
10658#[ignore]
10659async fn test_ignored_dirs_events(cx: &mut gpui::TestAppContext) {
10660 init_test(cx);
10661 cx.executor().allow_parking();
10662
10663 const IGNORE_RULE: &str = "**/target";
10664
10665 let root = TempTree::new(json!({
10666 "project": {
10667 "src": {
10668 "main.rs": "fn main() {}"
10669 },
10670 "target": {
10671 "debug": {
10672 "important_text.txt": "important text",
10673 },
10674 },
10675 ".gitignore": IGNORE_RULE
10676 },
10677
10678 }));
10679 let root_path = root.path();
10680
10681 // Set up git repository before creating the worktree.
10682 let work_dir = root.path().join("project");
10683 let repo = git_init(work_dir.as_path());
10684 repo.add_ignore_rule(IGNORE_RULE).unwrap();
10685 git_add("src/main.rs", &repo);
10686 git_add(".gitignore", &repo);
10687 git_commit("Initial commit", &repo);
10688
10689 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
10690 let repository_updates = Arc::new(Mutex::new(Vec::new()));
10691 let project_events = Arc::new(Mutex::new(Vec::new()));
10692 project.update(cx, |project, cx| {
10693 let repo_events = repository_updates.clone();
10694 cx.subscribe(project.git_store(), move |_, _, e, _| {
10695 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
10696 repo_events.lock().push(e.clone());
10697 }
10698 })
10699 .detach();
10700 let project_events = project_events.clone();
10701 cx.subscribe_self(move |_, e, _| {
10702 if let Event::WorktreeUpdatedEntries(_, updates) = e {
10703 project_events.lock().extend(
10704 updates
10705 .iter()
10706 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
10707 .filter(|(path, _)| path != "fs-event-sentinel"),
10708 );
10709 }
10710 })
10711 .detach();
10712 });
10713
10714 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10715 tree.flush_fs_events(cx).await;
10716 tree.update(cx, |tree, cx| {
10717 tree.load_file(rel_path("project/target/debug/important_text.txt"), cx)
10718 })
10719 .await
10720 .unwrap();
10721 tree.update(cx, |tree, _| {
10722 assert_eq!(
10723 tree.entries(true, 0)
10724 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
10725 .collect::<Vec<_>>(),
10726 vec![
10727 (rel_path(""), false),
10728 (rel_path("project/"), false),
10729 (rel_path("project/.gitignore"), false),
10730 (rel_path("project/src"), false),
10731 (rel_path("project/src/main.rs"), false),
10732 (rel_path("project/target"), true),
10733 (rel_path("project/target/debug"), true),
10734 (rel_path("project/target/debug/important_text.txt"), true),
10735 ]
10736 );
10737 });
10738
10739 assert_eq!(
10740 repository_updates.lock().drain(..).collect::<Vec<_>>(),
10741 vec![RepositoryEvent::StatusesChanged,],
10742 "Initial worktree scan should produce a repo update event"
10743 );
10744 assert_eq!(
10745 project_events.lock().drain(..).collect::<Vec<_>>(),
10746 vec![
10747 ("project/target".to_string(), PathChange::Loaded),
10748 ("project/target/debug".to_string(), PathChange::Loaded),
10749 (
10750 "project/target/debug/important_text.txt".to_string(),
10751 PathChange::Loaded
10752 ),
10753 ],
10754 "Initial project changes should show that all not-ignored and all opened files are loaded"
10755 );
10756
10757 let deps_dir = work_dir.join("target").join("debug").join("deps");
10758 std::fs::create_dir_all(&deps_dir).unwrap();
10759 tree.flush_fs_events(cx).await;
10760 project
10761 .update(cx, |project, cx| project.git_scans_complete(cx))
10762 .await;
10763 cx.executor().run_until_parked();
10764 std::fs::write(deps_dir.join("aa.tmp"), "something tmp").unwrap();
10765 tree.flush_fs_events(cx).await;
10766 project
10767 .update(cx, |project, cx| project.git_scans_complete(cx))
10768 .await;
10769 cx.executor().run_until_parked();
10770 std::fs::remove_dir_all(&deps_dir).unwrap();
10771 tree.flush_fs_events(cx).await;
10772 project
10773 .update(cx, |project, cx| project.git_scans_complete(cx))
10774 .await;
10775 cx.executor().run_until_parked();
10776
10777 tree.update(cx, |tree, _| {
10778 assert_eq!(
10779 tree.entries(true, 0)
10780 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
10781 .collect::<Vec<_>>(),
10782 vec![
10783 (rel_path(""), false),
10784 (rel_path("project/"), false),
10785 (rel_path("project/.gitignore"), false),
10786 (rel_path("project/src"), false),
10787 (rel_path("project/src/main.rs"), false),
10788 (rel_path("project/target"), true),
10789 (rel_path("project/target/debug"), true),
10790 (rel_path("project/target/debug/important_text.txt"), true),
10791 ],
10792 "No stray temp files should be left after the flycheck changes"
10793 );
10794 });
10795
10796 assert_eq!(
10797 repository_updates
10798 .lock()
10799 .iter()
10800 .cloned()
10801 .collect::<Vec<_>>(),
10802 Vec::new(),
10803 "No further RepositoryUpdated events should happen, as only ignored dirs' contents was changed",
10804 );
10805 assert_eq!(
10806 project_events.lock().as_slice(),
10807 vec![
10808 ("project/target/debug/deps".to_string(), PathChange::Added),
10809 ("project/target/debug/deps".to_string(), PathChange::Removed),
10810 ],
10811 "Due to `debug` directory being tracked, it should get updates for entries inside it.
10812 No updates for more nested directories should happen as those are ignored",
10813 );
10814}
10815
10816// todo(jk): turning this test off until we rework it in such a way so that it is not so susceptible
10817// to different timings/ordering of events.
10818#[ignore]
10819#[gpui::test]
10820async fn test_odd_events_for_ignored_dirs(
10821 executor: BackgroundExecutor,
10822 cx: &mut gpui::TestAppContext,
10823) {
10824 init_test(cx);
10825 let fs = FakeFs::new(executor);
10826 fs.insert_tree(
10827 path!("/root"),
10828 json!({
10829 ".git": {},
10830 ".gitignore": "**/target/",
10831 "src": {
10832 "main.rs": "fn main() {}",
10833 },
10834 "target": {
10835 "debug": {
10836 "foo.txt": "foo",
10837 "deps": {}
10838 }
10839 }
10840 }),
10841 )
10842 .await;
10843 fs.set_head_and_index_for_repo(
10844 path!("/root/.git").as_ref(),
10845 &[
10846 (".gitignore", "**/target/".into()),
10847 ("src/main.rs", "fn main() {}".into()),
10848 ],
10849 );
10850
10851 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
10852 let repository_updates = Arc::new(Mutex::new(Vec::new()));
10853 let project_events = Arc::new(Mutex::new(Vec::new()));
10854 project.update(cx, |project, cx| {
10855 let repository_updates = repository_updates.clone();
10856 cx.subscribe(project.git_store(), move |_, _, e, _| {
10857 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
10858 repository_updates.lock().push(e.clone());
10859 }
10860 })
10861 .detach();
10862 let project_events = project_events.clone();
10863 cx.subscribe_self(move |_, e, _| {
10864 if let Event::WorktreeUpdatedEntries(_, updates) = e {
10865 project_events.lock().extend(
10866 updates
10867 .iter()
10868 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
10869 .filter(|(path, _)| path != "fs-event-sentinel"),
10870 );
10871 }
10872 })
10873 .detach();
10874 });
10875
10876 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10877 tree.update(cx, |tree, cx| {
10878 tree.load_file(rel_path("target/debug/foo.txt"), cx)
10879 })
10880 .await
10881 .unwrap();
10882 tree.flush_fs_events(cx).await;
10883 project
10884 .update(cx, |project, cx| project.git_scans_complete(cx))
10885 .await;
10886 cx.run_until_parked();
10887 tree.update(cx, |tree, _| {
10888 assert_eq!(
10889 tree.entries(true, 0)
10890 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
10891 .collect::<Vec<_>>(),
10892 vec![
10893 (rel_path(""), false),
10894 (rel_path(".gitignore"), false),
10895 (rel_path("src"), false),
10896 (rel_path("src/main.rs"), false),
10897 (rel_path("target"), true),
10898 (rel_path("target/debug"), true),
10899 (rel_path("target/debug/deps"), true),
10900 (rel_path("target/debug/foo.txt"), true),
10901 ]
10902 );
10903 });
10904
10905 assert_eq!(
10906 repository_updates.lock().drain(..).collect::<Vec<_>>(),
10907 vec![
10908 RepositoryEvent::BranchChanged,
10909 RepositoryEvent::StatusesChanged,
10910 RepositoryEvent::StatusesChanged,
10911 ],
10912 "Initial worktree scan should produce a repo update event"
10913 );
10914 assert_eq!(
10915 project_events.lock().drain(..).collect::<Vec<_>>(),
10916 vec![
10917 ("target".to_string(), PathChange::Loaded),
10918 ("target/debug".to_string(), PathChange::Loaded),
10919 ("target/debug/deps".to_string(), PathChange::Loaded),
10920 ("target/debug/foo.txt".to_string(), PathChange::Loaded),
10921 ],
10922 "All non-ignored entries and all opened firs should be getting a project event",
10923 );
10924
10925 // Emulate a flycheck spawn: it emits a `INODE_META_MOD`-flagged FS event on target/debug/deps, then creates and removes temp files inside.
10926 // This may happen multiple times during a single flycheck, but once is enough for testing.
10927 fs.emit_fs_event("/root/target/debug/deps", None);
10928 tree.flush_fs_events(cx).await;
10929 project
10930 .update(cx, |project, cx| project.git_scans_complete(cx))
10931 .await;
10932 cx.executor().run_until_parked();
10933
10934 assert_eq!(
10935 repository_updates
10936 .lock()
10937 .iter()
10938 .cloned()
10939 .collect::<Vec<_>>(),
10940 Vec::new(),
10941 "No further RepositoryUpdated events should happen, as only ignored dirs received FS events",
10942 );
10943 assert_eq!(
10944 project_events.lock().as_slice(),
10945 Vec::new(),
10946 "No further project events should happen, as only ignored dirs received FS events",
10947 );
10948}
10949
10950#[gpui::test]
10951async fn test_repos_in_invisible_worktrees(
10952 executor: BackgroundExecutor,
10953 cx: &mut gpui::TestAppContext,
10954) {
10955 init_test(cx);
10956 let fs = FakeFs::new(executor);
10957 fs.insert_tree(
10958 path!("/root"),
10959 json!({
10960 "dir1": {
10961 ".git": {},
10962 "dep1": {
10963 ".git": {},
10964 "src": {
10965 "a.txt": "",
10966 },
10967 },
10968 "b.txt": "",
10969 },
10970 }),
10971 )
10972 .await;
10973
10974 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
10975 let _visible_worktree =
10976 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10977 project
10978 .update(cx, |project, cx| project.git_scans_complete(cx))
10979 .await;
10980
10981 let repos = project.read_with(cx, |project, cx| {
10982 project
10983 .repositories(cx)
10984 .values()
10985 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
10986 .collect::<Vec<_>>()
10987 });
10988 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
10989
10990 let (_invisible_worktree, _) = project
10991 .update(cx, |project, cx| {
10992 project.worktree_store().update(cx, |worktree_store, cx| {
10993 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
10994 })
10995 })
10996 .await
10997 .expect("failed to create worktree");
10998 project
10999 .update(cx, |project, cx| project.git_scans_complete(cx))
11000 .await;
11001
11002 let repos = project.read_with(cx, |project, cx| {
11003 project
11004 .repositories(cx)
11005 .values()
11006 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
11007 .collect::<Vec<_>>()
11008 });
11009 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
11010}
11011
11012#[gpui::test(iterations = 10)]
11013async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
11014 init_test(cx);
11015 cx.update(|cx| {
11016 cx.update_global::<SettingsStore, _>(|store, cx| {
11017 store.update_user_settings(cx, |settings| {
11018 settings.project.worktree.file_scan_exclusions = Some(Vec::new());
11019 });
11020 });
11021 });
11022 let fs = FakeFs::new(cx.background_executor.clone());
11023 fs.insert_tree(
11024 path!("/root"),
11025 json!({
11026 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
11027 "tree": {
11028 ".git": {},
11029 ".gitignore": "ignored-dir\n",
11030 "tracked-dir": {
11031 "tracked-file1": "",
11032 "ancestor-ignored-file1": "",
11033 },
11034 "ignored-dir": {
11035 "ignored-file1": ""
11036 }
11037 }
11038 }),
11039 )
11040 .await;
11041 fs.set_head_and_index_for_repo(
11042 path!("/root/tree/.git").as_ref(),
11043 &[
11044 (".gitignore", "ignored-dir\n".into()),
11045 ("tracked-dir/tracked-file1", "".into()),
11046 ],
11047 );
11048
11049 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
11050
11051 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
11052 tree.flush_fs_events(cx).await;
11053 project
11054 .update(cx, |project, cx| project.git_scans_complete(cx))
11055 .await;
11056 cx.executor().run_until_parked();
11057
11058 let repository = project.read_with(cx, |project, cx| {
11059 project.repositories(cx).values().next().unwrap().clone()
11060 });
11061
11062 tree.read_with(cx, |tree, _| {
11063 tree.as_local()
11064 .unwrap()
11065 .manually_refresh_entries_for_paths(vec![rel_path("ignored-dir").into()])
11066 })
11067 .recv()
11068 .await;
11069
11070 cx.read(|cx| {
11071 assert_entry_git_state(
11072 tree.read(cx),
11073 repository.read(cx),
11074 "tracked-dir/tracked-file1",
11075 None,
11076 false,
11077 );
11078 assert_entry_git_state(
11079 tree.read(cx),
11080 repository.read(cx),
11081 "tracked-dir/ancestor-ignored-file1",
11082 None,
11083 false,
11084 );
11085 assert_entry_git_state(
11086 tree.read(cx),
11087 repository.read(cx),
11088 "ignored-dir/ignored-file1",
11089 None,
11090 true,
11091 );
11092 });
11093
11094 fs.create_file(
11095 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
11096 Default::default(),
11097 )
11098 .await
11099 .unwrap();
11100 fs.set_index_for_repo(
11101 path!("/root/tree/.git").as_ref(),
11102 &[
11103 (".gitignore", "ignored-dir\n".into()),
11104 ("tracked-dir/tracked-file1", "".into()),
11105 ("tracked-dir/tracked-file2", "".into()),
11106 ],
11107 );
11108 fs.create_file(
11109 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
11110 Default::default(),
11111 )
11112 .await
11113 .unwrap();
11114 fs.create_file(
11115 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
11116 Default::default(),
11117 )
11118 .await
11119 .unwrap();
11120
11121 cx.executor().run_until_parked();
11122 cx.read(|cx| {
11123 assert_entry_git_state(
11124 tree.read(cx),
11125 repository.read(cx),
11126 "tracked-dir/tracked-file2",
11127 Some(StatusCode::Added),
11128 false,
11129 );
11130 assert_entry_git_state(
11131 tree.read(cx),
11132 repository.read(cx),
11133 "tracked-dir/ancestor-ignored-file2",
11134 None,
11135 false,
11136 );
11137 assert_entry_git_state(
11138 tree.read(cx),
11139 repository.read(cx),
11140 "ignored-dir/ignored-file2",
11141 None,
11142 true,
11143 );
11144 assert!(
11145 tree.read(cx)
11146 .entry_for_path(&rel_path(".git"))
11147 .unwrap()
11148 .is_ignored
11149 );
11150 });
11151}
11152
11153#[gpui::test]
11154async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) {
11155 init_test(cx);
11156
11157 let fs = FakeFs::new(cx.executor());
11158 fs.insert_tree(
11159 path!("/project"),
11160 json!({
11161 ".git": {
11162 "worktrees": {
11163 "some-worktree": {
11164 "commondir": "../..\n",
11165 // For is_git_dir
11166 "HEAD": "",
11167 "config": ""
11168 }
11169 },
11170 "modules": {
11171 "subdir": {
11172 "some-submodule": {
11173 // For is_git_dir
11174 "HEAD": "",
11175 "config": "",
11176 }
11177 }
11178 }
11179 },
11180 "src": {
11181 "a.txt": "A",
11182 },
11183 "some-worktree": {
11184 ".git": "gitdir: ../.git/worktrees/some-worktree\n",
11185 "src": {
11186 "b.txt": "B",
11187 }
11188 },
11189 "subdir": {
11190 "some-submodule": {
11191 ".git": "gitdir: ../../.git/modules/subdir/some-submodule\n",
11192 "c.txt": "C",
11193 }
11194 }
11195 }),
11196 )
11197 .await;
11198
11199 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
11200 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
11201 scan_complete.await;
11202
11203 let mut repositories = project.update(cx, |project, cx| {
11204 project
11205 .repositories(cx)
11206 .values()
11207 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
11208 .collect::<Vec<_>>()
11209 });
11210 repositories.sort();
11211 pretty_assertions::assert_eq!(
11212 repositories,
11213 [
11214 Path::new(path!("/project")).into(),
11215 Path::new(path!("/project/some-worktree")).into(),
11216 Path::new(path!("/project/subdir/some-submodule")).into(),
11217 ]
11218 );
11219
11220 // Generate a git-related event for the worktree and check that it's refreshed.
11221 fs.with_git_state(
11222 path!("/project/some-worktree/.git").as_ref(),
11223 true,
11224 |state| {
11225 state
11226 .head_contents
11227 .insert(repo_path("src/b.txt"), "b".to_owned());
11228 state
11229 .index_contents
11230 .insert(repo_path("src/b.txt"), "b".to_owned());
11231 },
11232 )
11233 .unwrap();
11234 cx.run_until_parked();
11235
11236 let buffer = project
11237 .update(cx, |project, cx| {
11238 project.open_local_buffer(path!("/project/some-worktree/src/b.txt"), cx)
11239 })
11240 .await
11241 .unwrap();
11242 let (worktree_repo, barrier) = project.update(cx, |project, cx| {
11243 let (repo, _) = project
11244 .git_store()
11245 .read(cx)
11246 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
11247 .unwrap();
11248 pretty_assertions::assert_eq!(
11249 repo.read(cx).work_directory_abs_path,
11250 Path::new(path!("/project/some-worktree")).into(),
11251 );
11252 let barrier = repo.update(cx, |repo, _| repo.barrier());
11253 (repo.clone(), barrier)
11254 });
11255 barrier.await.unwrap();
11256 worktree_repo.update(cx, |repo, _| {
11257 pretty_assertions::assert_eq!(
11258 repo.status_for_path(&repo_path("src/b.txt"))
11259 .unwrap()
11260 .status,
11261 StatusCode::Modified.worktree(),
11262 );
11263 });
11264
11265 // The same for the submodule.
11266 fs.with_git_state(
11267 path!("/project/subdir/some-submodule/.git").as_ref(),
11268 true,
11269 |state| {
11270 state
11271 .head_contents
11272 .insert(repo_path("c.txt"), "c".to_owned());
11273 state
11274 .index_contents
11275 .insert(repo_path("c.txt"), "c".to_owned());
11276 },
11277 )
11278 .unwrap();
11279 cx.run_until_parked();
11280
11281 let buffer = project
11282 .update(cx, |project, cx| {
11283 project.open_local_buffer(path!("/project/subdir/some-submodule/c.txt"), cx)
11284 })
11285 .await
11286 .unwrap();
11287 let (submodule_repo, barrier) = project.update(cx, |project, cx| {
11288 let (repo, _) = project
11289 .git_store()
11290 .read(cx)
11291 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
11292 .unwrap();
11293 pretty_assertions::assert_eq!(
11294 repo.read(cx).work_directory_abs_path,
11295 Path::new(path!("/project/subdir/some-submodule")).into(),
11296 );
11297 let barrier = repo.update(cx, |repo, _| repo.barrier());
11298 (repo.clone(), barrier)
11299 });
11300 barrier.await.unwrap();
11301 submodule_repo.update(cx, |repo, _| {
11302 pretty_assertions::assert_eq!(
11303 repo.status_for_path(&repo_path("c.txt")).unwrap().status,
11304 StatusCode::Modified.worktree(),
11305 );
11306 });
11307}
11308
11309#[gpui::test]
11310async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
11311 init_test(cx);
11312 let fs = FakeFs::new(cx.background_executor.clone());
11313 fs.insert_tree(
11314 path!("/root"),
11315 json!({
11316 "project": {
11317 ".git": {},
11318 "child1": {
11319 "a.txt": "A",
11320 },
11321 "child2": {
11322 "b.txt": "B",
11323 }
11324 }
11325 }),
11326 )
11327 .await;
11328
11329 let project = Project::test(
11330 fs.clone(),
11331 [
11332 path!("/root/project/child1").as_ref(),
11333 path!("/root/project/child2").as_ref(),
11334 ],
11335 cx,
11336 )
11337 .await;
11338
11339 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
11340 tree.flush_fs_events(cx).await;
11341 project
11342 .update(cx, |project, cx| project.git_scans_complete(cx))
11343 .await;
11344 cx.executor().run_until_parked();
11345
11346 let repos = project.read_with(cx, |project, cx| {
11347 project
11348 .repositories(cx)
11349 .values()
11350 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
11351 .collect::<Vec<_>>()
11352 });
11353 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
11354}
11355
11356#[gpui::test]
11357async fn test_buffer_changed_file_path_updates_git_diff(cx: &mut gpui::TestAppContext) {
11358 init_test(cx);
11359
11360 let file_1_committed = String::from(r#"file_1_committed"#);
11361 let file_1_staged = String::from(r#"file_1_staged"#);
11362 let file_2_committed = String::from(r#"file_2_committed"#);
11363 let file_2_staged = String::from(r#"file_2_staged"#);
11364 let buffer_contents = String::from(r#"buffer"#);
11365
11366 let fs = FakeFs::new(cx.background_executor.clone());
11367 fs.insert_tree(
11368 path!("/dir"),
11369 json!({
11370 ".git": {},
11371 "src": {
11372 "file_1.rs": file_1_committed.clone(),
11373 "file_2.rs": file_2_committed.clone(),
11374 }
11375 }),
11376 )
11377 .await;
11378
11379 fs.set_head_for_repo(
11380 path!("/dir/.git").as_ref(),
11381 &[
11382 ("src/file_1.rs", file_1_committed.clone()),
11383 ("src/file_2.rs", file_2_committed.clone()),
11384 ],
11385 "deadbeef",
11386 );
11387 fs.set_index_for_repo(
11388 path!("/dir/.git").as_ref(),
11389 &[
11390 ("src/file_1.rs", file_1_staged.clone()),
11391 ("src/file_2.rs", file_2_staged.clone()),
11392 ],
11393 );
11394
11395 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
11396
11397 let buffer = project
11398 .update(cx, |project, cx| {
11399 project.open_local_buffer(path!("/dir/src/file_1.rs"), cx)
11400 })
11401 .await
11402 .unwrap();
11403
11404 buffer.update(cx, |buffer, cx| {
11405 buffer.edit([(0..buffer.len(), buffer_contents.as_str())], None, cx);
11406 });
11407
11408 let unstaged_diff = project
11409 .update(cx, |project, cx| {
11410 project.open_unstaged_diff(buffer.clone(), cx)
11411 })
11412 .await
11413 .unwrap();
11414
11415 cx.run_until_parked();
11416
11417 unstaged_diff.update(cx, |unstaged_diff, cx| {
11418 let base_text = unstaged_diff.base_text_string(cx).unwrap();
11419 assert_eq!(base_text, file_1_staged, "Should start with file_1 staged");
11420 });
11421
11422 // Save the buffer as `file_2.rs`, which should trigger the
11423 // `BufferChangedFilePath` event.
11424 project
11425 .update(cx, |project, cx| {
11426 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
11427 let path = ProjectPath {
11428 worktree_id,
11429 path: rel_path("src/file_2.rs").into(),
11430 };
11431 project.save_buffer_as(buffer.clone(), path, cx)
11432 })
11433 .await
11434 .unwrap();
11435
11436 cx.run_until_parked();
11437
11438 // Verify that the diff bases have been updated to file_2's contents due to
11439 // the `BufferChangedFilePath` event being handled.
11440 unstaged_diff.update(cx, |unstaged_diff, cx| {
11441 let snapshot = buffer.read(cx).snapshot();
11442 let base_text = unstaged_diff.base_text_string(cx).unwrap();
11443 assert_eq!(
11444 base_text, file_2_staged,
11445 "Diff bases should be automatically updated to file_2 staged content"
11446 );
11447
11448 let hunks: Vec<_> = unstaged_diff.snapshot(cx).hunks(&snapshot).collect();
11449 assert!(!hunks.is_empty(), "Should have diff hunks for file_2");
11450 });
11451
11452 let uncommitted_diff = project
11453 .update(cx, |project, cx| {
11454 project.open_uncommitted_diff(buffer.clone(), cx)
11455 })
11456 .await
11457 .unwrap();
11458
11459 cx.run_until_parked();
11460
11461 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
11462 let base_text = uncommitted_diff.base_text_string(cx).unwrap();
11463 assert_eq!(
11464 base_text, file_2_committed,
11465 "Uncommitted diff should compare against file_2 committed content"
11466 );
11467 });
11468}
11469
11470async fn search(
11471 project: &Entity<Project>,
11472 query: SearchQuery,
11473 cx: &mut gpui::TestAppContext,
11474) -> Result<HashMap<String, Vec<Range<usize>>>> {
11475 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
11476 let mut results = HashMap::default();
11477 while let Ok(search_result) = search_rx.rx.recv().await {
11478 match search_result {
11479 SearchResult::Buffer { buffer, ranges } => {
11480 results.entry(buffer).or_insert(ranges);
11481 }
11482 SearchResult::LimitReached => {}
11483 }
11484 }
11485 Ok(results
11486 .into_iter()
11487 .map(|(buffer, ranges)| {
11488 buffer.update(cx, |buffer, cx| {
11489 let path = buffer
11490 .file()
11491 .unwrap()
11492 .full_path(cx)
11493 .to_string_lossy()
11494 .to_string();
11495 let ranges = ranges
11496 .into_iter()
11497 .map(|range| range.to_offset(buffer))
11498 .collect::<Vec<_>>();
11499 (path, ranges)
11500 })
11501 })
11502 .collect())
11503}
11504
11505#[gpui::test]
11506async fn test_undo_encoding_change(cx: &mut gpui::TestAppContext) {
11507 init_test(cx);
11508
11509 let fs = FakeFs::new(cx.executor());
11510
11511 // Create a file with ASCII content "Hi" - this will be detected as UTF-8
11512 // When reinterpreted as UTF-16LE, the bytes 0x48 0x69 become a single character
11513 let ascii_bytes: Vec<u8> = vec![0x48, 0x69];
11514 fs.insert_tree(path!("/dir"), json!({})).await;
11515 fs.insert_file(path!("/dir/test.txt"), ascii_bytes).await;
11516
11517 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
11518
11519 let buffer = project
11520 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/test.txt"), cx))
11521 .await
11522 .unwrap();
11523
11524 let (initial_encoding, initial_text, initial_dirty) = buffer.read_with(cx, |buffer, _| {
11525 (buffer.encoding(), buffer.text(), buffer.is_dirty())
11526 });
11527 assert_eq!(initial_encoding, encoding_rs::UTF_8);
11528 assert_eq!(initial_text, "Hi");
11529 assert!(!initial_dirty);
11530
11531 let reload_receiver = buffer.update(cx, |buffer, cx| {
11532 buffer.reload_with_encoding(encoding_rs::UTF_16LE, cx)
11533 });
11534 cx.executor().run_until_parked();
11535
11536 // Wait for reload to complete
11537 let _ = reload_receiver.await;
11538
11539 // Verify the encoding changed, text is different, and still not dirty (we reloaded from disk)
11540 let (reloaded_encoding, reloaded_text, reloaded_dirty) = buffer.read_with(cx, |buffer, _| {
11541 (buffer.encoding(), buffer.text(), buffer.is_dirty())
11542 });
11543 assert_eq!(reloaded_encoding, encoding_rs::UTF_16LE);
11544 assert_eq!(reloaded_text, "楈");
11545 assert!(!reloaded_dirty);
11546
11547 // Undo the reload
11548 buffer.update(cx, |buffer, cx| {
11549 buffer.undo(cx);
11550 });
11551
11552 buffer.read_with(cx, |buffer, _| {
11553 assert_eq!(buffer.encoding(), encoding_rs::UTF_8);
11554 assert_eq!(buffer.text(), "Hi");
11555 assert!(!buffer.is_dirty());
11556 });
11557
11558 buffer.update(cx, |buffer, cx| {
11559 buffer.redo(cx);
11560 });
11561
11562 buffer.read_with(cx, |buffer, _| {
11563 assert_eq!(buffer.encoding(), encoding_rs::UTF_16LE);
11564 assert_ne!(buffer.text(), "Hi");
11565 assert!(!buffer.is_dirty());
11566 });
11567}
11568
11569pub fn init_test(cx: &mut gpui::TestAppContext) {
11570 zlog::init_test();
11571
11572 cx.update(|cx| {
11573 let settings_store = SettingsStore::test(cx);
11574 cx.set_global(settings_store);
11575 release_channel::init(semver::Version::new(0, 0, 0), cx);
11576 });
11577}
11578
11579fn json_lang() -> Arc<Language> {
11580 Arc::new(Language::new(
11581 LanguageConfig {
11582 name: "JSON".into(),
11583 matcher: LanguageMatcher {
11584 path_suffixes: vec!["json".to_string()],
11585 ..Default::default()
11586 },
11587 ..Default::default()
11588 },
11589 None,
11590 ))
11591}
11592
11593fn js_lang() -> Arc<Language> {
11594 Arc::new(Language::new(
11595 LanguageConfig {
11596 name: "JavaScript".into(),
11597 matcher: LanguageMatcher {
11598 path_suffixes: vec!["js".to_string()],
11599 ..Default::default()
11600 },
11601 ..Default::default()
11602 },
11603 None,
11604 ))
11605}
11606
11607fn python_lang(fs: Arc<FakeFs>) -> Arc<Language> {
11608 struct PythonMootToolchainLister(Arc<FakeFs>);
11609 #[async_trait]
11610 impl ToolchainLister for PythonMootToolchainLister {
11611 async fn list(
11612 &self,
11613 worktree_root: PathBuf,
11614 subroot_relative_path: Arc<RelPath>,
11615 _: Option<HashMap<String, String>>,
11616 _: &dyn Fs,
11617 ) -> ToolchainList {
11618 // This lister will always return a path .venv directories within ancestors
11619 let ancestors = subroot_relative_path.ancestors().collect::<Vec<_>>();
11620 let mut toolchains = vec![];
11621 for ancestor in ancestors {
11622 let venv_path = worktree_root.join(ancestor.as_std_path()).join(".venv");
11623 if self.0.is_dir(&venv_path).await {
11624 toolchains.push(Toolchain {
11625 name: SharedString::new_static("Python Venv"),
11626 path: venv_path.to_string_lossy().into_owned().into(),
11627 language_name: LanguageName(SharedString::new_static("Python")),
11628 as_json: serde_json::Value::Null,
11629 })
11630 }
11631 }
11632 ToolchainList {
11633 toolchains,
11634 ..Default::default()
11635 }
11636 }
11637 async fn resolve(
11638 &self,
11639 _: PathBuf,
11640 _: Option<HashMap<String, String>>,
11641 _: &dyn Fs,
11642 ) -> anyhow::Result<Toolchain> {
11643 Err(anyhow::anyhow!("Not implemented"))
11644 }
11645 fn meta(&self) -> ToolchainMetadata {
11646 ToolchainMetadata {
11647 term: SharedString::new_static("Virtual Environment"),
11648 new_toolchain_placeholder: SharedString::new_static(
11649 "A path to the python3 executable within a virtual environment, or path to virtual environment itself",
11650 ),
11651 manifest_name: ManifestName::from(SharedString::new_static("pyproject.toml")),
11652 }
11653 }
11654 fn activation_script(
11655 &self,
11656 _: &Toolchain,
11657 _: ShellKind,
11658 _: &gpui::App,
11659 ) -> futures::future::BoxFuture<'static, Vec<String>> {
11660 Box::pin(async { vec![] })
11661 }
11662 }
11663 Arc::new(
11664 Language::new(
11665 LanguageConfig {
11666 name: "Python".into(),
11667 matcher: LanguageMatcher {
11668 path_suffixes: vec!["py".to_string()],
11669 ..Default::default()
11670 },
11671 ..Default::default()
11672 },
11673 None, // We're not testing Python parsing with this language.
11674 )
11675 .with_manifest(Some(ManifestName::from(SharedString::new_static(
11676 "pyproject.toml",
11677 ))))
11678 .with_toolchain_lister(Some(Arc::new(PythonMootToolchainLister(fs)))),
11679 )
11680}
11681
11682fn typescript_lang() -> Arc<Language> {
11683 Arc::new(Language::new(
11684 LanguageConfig {
11685 name: "TypeScript".into(),
11686 matcher: LanguageMatcher {
11687 path_suffixes: vec!["ts".to_string()],
11688 ..Default::default()
11689 },
11690 ..Default::default()
11691 },
11692 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
11693 ))
11694}
11695
11696fn tsx_lang() -> Arc<Language> {
11697 Arc::new(Language::new(
11698 LanguageConfig {
11699 name: "tsx".into(),
11700 matcher: LanguageMatcher {
11701 path_suffixes: vec!["tsx".to_string()],
11702 ..Default::default()
11703 },
11704 ..Default::default()
11705 },
11706 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
11707 ))
11708}
11709
11710fn get_all_tasks(
11711 project: &Entity<Project>,
11712 task_contexts: Arc<TaskContexts>,
11713 cx: &mut App,
11714) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
11715 let new_tasks = project.update(cx, |project, cx| {
11716 project.task_store().update(cx, |task_store, cx| {
11717 task_store.task_inventory().unwrap().update(cx, |this, cx| {
11718 this.used_and_current_resolved_tasks(task_contexts, cx)
11719 })
11720 })
11721 });
11722
11723 cx.background_spawn(async move {
11724 let (mut old, new) = new_tasks.await;
11725 old.extend(new);
11726 old
11727 })
11728}
11729
11730#[track_caller]
11731fn assert_entry_git_state(
11732 tree: &Worktree,
11733 repository: &Repository,
11734 path: &str,
11735 index_status: Option<StatusCode>,
11736 is_ignored: bool,
11737) {
11738 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
11739 let entry = tree
11740 .entry_for_path(&rel_path(path))
11741 .unwrap_or_else(|| panic!("entry {path} not found"));
11742 let status = repository
11743 .status_for_path(&repo_path(path))
11744 .map(|entry| entry.status);
11745 let expected = index_status.map(|index_status| {
11746 TrackedStatus {
11747 index_status,
11748 worktree_status: StatusCode::Unmodified,
11749 }
11750 .into()
11751 });
11752 assert_eq!(
11753 status, expected,
11754 "expected {path} to have git status: {expected:?}"
11755 );
11756 assert_eq!(
11757 entry.is_ignored, is_ignored,
11758 "expected {path} to have is_ignored: {is_ignored}"
11759 );
11760}
11761
11762#[track_caller]
11763fn git_init(path: &Path) -> git2::Repository {
11764 let mut init_opts = RepositoryInitOptions::new();
11765 init_opts.initial_head("main");
11766 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
11767}
11768
11769#[track_caller]
11770fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
11771 let path = path.as_ref();
11772 let mut index = repo.index().expect("Failed to get index");
11773 index.add_path(path).expect("Failed to add file");
11774 index.write().expect("Failed to write index");
11775}
11776
11777#[track_caller]
11778fn git_remove_index(path: &Path, repo: &git2::Repository) {
11779 let mut index = repo.index().expect("Failed to get index");
11780 index.remove_path(path).expect("Failed to add file");
11781 index.write().expect("Failed to write index");
11782}
11783
11784#[track_caller]
11785fn git_commit(msg: &'static str, repo: &git2::Repository) {
11786 use git2::Signature;
11787
11788 let signature = Signature::now("test", "test@zed.dev").unwrap();
11789 let oid = repo.index().unwrap().write_tree().unwrap();
11790 let tree = repo.find_tree(oid).unwrap();
11791 if let Ok(head) = repo.head() {
11792 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
11793
11794 let parent_commit = parent_obj.as_commit().unwrap();
11795
11796 repo.commit(
11797 Some("HEAD"),
11798 &signature,
11799 &signature,
11800 msg,
11801 &tree,
11802 &[parent_commit],
11803 )
11804 .expect("Failed to commit with parent");
11805 } else {
11806 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
11807 .expect("Failed to commit");
11808 }
11809}
11810
11811#[cfg(any())]
11812#[track_caller]
11813fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
11814 repo.cherrypick(commit, None).expect("Failed to cherrypick");
11815}
11816
11817#[track_caller]
11818fn git_stash(repo: &mut git2::Repository) {
11819 use git2::Signature;
11820
11821 let signature = Signature::now("test", "test@zed.dev").unwrap();
11822 repo.stash_save(&signature, "N/A", None)
11823 .expect("Failed to stash");
11824}
11825
11826#[track_caller]
11827fn git_reset(offset: usize, repo: &git2::Repository) {
11828 let head = repo.head().expect("Couldn't get repo head");
11829 let object = head.peel(git2::ObjectType::Commit).unwrap();
11830 let commit = object.as_commit().unwrap();
11831 let new_head = commit
11832 .parents()
11833 .inspect(|parnet| {
11834 parnet.message();
11835 })
11836 .nth(offset)
11837 .expect("Not enough history");
11838 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
11839 .expect("Could not reset");
11840}
11841
11842#[cfg(any())]
11843#[track_caller]
11844fn git_branch(name: &str, repo: &git2::Repository) {
11845 let head = repo
11846 .head()
11847 .expect("Couldn't get repo head")
11848 .peel_to_commit()
11849 .expect("HEAD is not a commit");
11850 repo.branch(name, &head, false).expect("Failed to commit");
11851}
11852
11853#[cfg(any())]
11854#[track_caller]
11855fn git_checkout(name: &str, repo: &git2::Repository) {
11856 repo.set_head(name).expect("Failed to set head");
11857 repo.checkout_head(None).expect("Failed to check out head");
11858}
11859
11860#[cfg(any())]
11861#[track_caller]
11862fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
11863 repo.statuses(None)
11864 .unwrap()
11865 .iter()
11866 .map(|status| (status.path().unwrap().to_string(), status.status()))
11867 .collect()
11868}
11869
11870#[gpui::test]
11871async fn test_find_project_path_abs(
11872 background_executor: BackgroundExecutor,
11873 cx: &mut gpui::TestAppContext,
11874) {
11875 // find_project_path should work with absolute paths
11876 init_test(cx);
11877
11878 let fs = FakeFs::new(background_executor);
11879 fs.insert_tree(
11880 path!("/root"),
11881 json!({
11882 "project1": {
11883 "file1.txt": "content1",
11884 "subdir": {
11885 "file2.txt": "content2"
11886 }
11887 },
11888 "project2": {
11889 "file3.txt": "content3"
11890 }
11891 }),
11892 )
11893 .await;
11894
11895 let project = Project::test(
11896 fs.clone(),
11897 [
11898 path!("/root/project1").as_ref(),
11899 path!("/root/project2").as_ref(),
11900 ],
11901 cx,
11902 )
11903 .await;
11904
11905 // Make sure the worktrees are fully initialized
11906 project
11907 .update(cx, |project, cx| project.git_scans_complete(cx))
11908 .await;
11909 cx.run_until_parked();
11910
11911 let (project1_abs_path, project1_id, project2_abs_path, project2_id) =
11912 project.read_with(cx, |project, cx| {
11913 let worktrees: Vec<_> = project.worktrees(cx).collect();
11914 let abs_path1 = worktrees[0].read(cx).abs_path().to_path_buf();
11915 let id1 = worktrees[0].read(cx).id();
11916 let abs_path2 = worktrees[1].read(cx).abs_path().to_path_buf();
11917 let id2 = worktrees[1].read(cx).id();
11918 (abs_path1, id1, abs_path2, id2)
11919 });
11920
11921 project.update(cx, |project, cx| {
11922 let abs_path = project1_abs_path.join("file1.txt");
11923 let found_path = project.find_project_path(abs_path, cx).unwrap();
11924 assert_eq!(found_path.worktree_id, project1_id);
11925 assert_eq!(&*found_path.path, rel_path("file1.txt"));
11926
11927 let abs_path = project1_abs_path.join("subdir").join("file2.txt");
11928 let found_path = project.find_project_path(abs_path, cx).unwrap();
11929 assert_eq!(found_path.worktree_id, project1_id);
11930 assert_eq!(&*found_path.path, rel_path("subdir/file2.txt"));
11931
11932 let abs_path = project2_abs_path.join("file3.txt");
11933 let found_path = project.find_project_path(abs_path, cx).unwrap();
11934 assert_eq!(found_path.worktree_id, project2_id);
11935 assert_eq!(&*found_path.path, rel_path("file3.txt"));
11936
11937 let abs_path = project1_abs_path.join("nonexistent.txt");
11938 let found_path = project.find_project_path(abs_path, cx);
11939 assert!(
11940 found_path.is_some(),
11941 "Should find project path for nonexistent file in worktree"
11942 );
11943
11944 // Test with an absolute path outside any worktree
11945 let abs_path = Path::new("/some/other/path");
11946 let found_path = project.find_project_path(abs_path, cx);
11947 assert!(
11948 found_path.is_none(),
11949 "Should not find project path for path outside any worktree"
11950 );
11951 });
11952}
11953
11954#[gpui::test]
11955async fn test_git_worktree_remove(cx: &mut gpui::TestAppContext) {
11956 init_test(cx);
11957
11958 let fs = FakeFs::new(cx.executor());
11959 fs.insert_tree(
11960 path!("/root"),
11961 json!({
11962 "a": {
11963 ".git": {},
11964 "src": {
11965 "main.rs": "fn main() {}",
11966 }
11967 },
11968 "b": {
11969 ".git": {},
11970 "src": {
11971 "main.rs": "fn main() {}",
11972 },
11973 "script": {
11974 "run.sh": "#!/bin/bash"
11975 }
11976 }
11977 }),
11978 )
11979 .await;
11980
11981 let project = Project::test(
11982 fs.clone(),
11983 [
11984 path!("/root/a").as_ref(),
11985 path!("/root/b/script").as_ref(),
11986 path!("/root/b").as_ref(),
11987 ],
11988 cx,
11989 )
11990 .await;
11991 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
11992 scan_complete.await;
11993
11994 let worktrees = project.update(cx, |project, cx| project.worktrees(cx).collect::<Vec<_>>());
11995 assert_eq!(worktrees.len(), 3);
11996
11997 let worktree_id_by_abs_path = worktrees
11998 .into_iter()
11999 .map(|worktree| worktree.read_with(cx, |w, _| (w.abs_path(), w.id())))
12000 .collect::<HashMap<_, _>>();
12001 let worktree_id = worktree_id_by_abs_path
12002 .get(Path::new(path!("/root/b/script")))
12003 .unwrap();
12004
12005 let repos = project.update(cx, |p, cx| p.git_store().read(cx).repositories().clone());
12006 assert_eq!(repos.len(), 2);
12007
12008 project.update(cx, |project, cx| {
12009 project.remove_worktree(*worktree_id, cx);
12010 });
12011 cx.run_until_parked();
12012
12013 let mut repo_paths = project
12014 .update(cx, |p, cx| p.git_store().read(cx).repositories().clone())
12015 .values()
12016 .map(|repo| repo.read_with(cx, |r, _| r.work_directory_abs_path.clone()))
12017 .collect::<Vec<_>>();
12018 repo_paths.sort();
12019
12020 pretty_assertions::assert_eq!(
12021 repo_paths,
12022 [
12023 Path::new(path!("/root/a")).into(),
12024 Path::new(path!("/root/b")).into(),
12025 ]
12026 );
12027
12028 let active_repo_path = project
12029 .read_with(cx, |p, cx| {
12030 p.active_repository(cx)
12031 .map(|r| r.read(cx).work_directory_abs_path.clone())
12032 })
12033 .unwrap();
12034 assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/a")));
12035
12036 let worktree_id = worktree_id_by_abs_path
12037 .get(Path::new(path!("/root/a")))
12038 .unwrap();
12039 project.update(cx, |project, cx| {
12040 project.remove_worktree(*worktree_id, cx);
12041 });
12042 cx.run_until_parked();
12043
12044 let active_repo_path = project
12045 .read_with(cx, |p, cx| {
12046 p.active_repository(cx)
12047 .map(|r| r.read(cx).work_directory_abs_path.clone())
12048 })
12049 .unwrap();
12050 assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/b")));
12051
12052 let worktree_id = worktree_id_by_abs_path
12053 .get(Path::new(path!("/root/b")))
12054 .unwrap();
12055 project.update(cx, |project, cx| {
12056 project.remove_worktree(*worktree_id, cx);
12057 });
12058 cx.run_until_parked();
12059
12060 let active_repo_path = project.read_with(cx, |p, cx| {
12061 p.active_repository(cx)
12062 .map(|r| r.read(cx).work_directory_abs_path.clone())
12063 });
12064 assert!(active_repo_path.is_none());
12065}
12066
12067#[gpui::test]
12068async fn test_optimistic_hunks_in_staged_files(cx: &mut gpui::TestAppContext) {
12069 use DiffHunkSecondaryStatus::*;
12070 init_test(cx);
12071
12072 let committed_contents = r#"
12073 one
12074 two
12075 three
12076 "#
12077 .unindent();
12078 let file_contents = r#"
12079 one
12080 TWO
12081 three
12082 "#
12083 .unindent();
12084
12085 let fs = FakeFs::new(cx.background_executor.clone());
12086 fs.insert_tree(
12087 path!("/dir"),
12088 json!({
12089 ".git": {},
12090 "file.txt": file_contents.clone()
12091 }),
12092 )
12093 .await;
12094
12095 fs.set_head_and_index_for_repo(
12096 path!("/dir/.git").as_ref(),
12097 &[("file.txt", committed_contents.clone())],
12098 );
12099
12100 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
12101
12102 let buffer = project
12103 .update(cx, |project, cx| {
12104 project.open_local_buffer(path!("/dir/file.txt"), cx)
12105 })
12106 .await
12107 .unwrap();
12108 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
12109 let uncommitted_diff = project
12110 .update(cx, |project, cx| {
12111 project.open_uncommitted_diff(buffer.clone(), cx)
12112 })
12113 .await
12114 .unwrap();
12115
12116 // The hunk is initially unstaged.
12117 uncommitted_diff.read_with(cx, |diff, cx| {
12118 assert_hunks(
12119 diff.snapshot(cx).hunks(&snapshot),
12120 &snapshot,
12121 &diff.base_text_string(cx).unwrap(),
12122 &[(
12123 1..2,
12124 "two\n",
12125 "TWO\n",
12126 DiffHunkStatus::modified(HasSecondaryHunk),
12127 )],
12128 );
12129 });
12130
12131 // Get the repository handle.
12132 let repo = project.read_with(cx, |project, cx| {
12133 project.repositories(cx).values().next().unwrap().clone()
12134 });
12135
12136 // Stage the file.
12137 let stage_task = repo.update(cx, |repo, cx| {
12138 repo.stage_entries(vec![repo_path("file.txt")], cx)
12139 });
12140
12141 // Run a few ticks to let the job start and mark hunks as pending,
12142 // but don't run_until_parked which would complete the entire operation.
12143 for _ in 0..10 {
12144 cx.executor().tick();
12145 let [hunk]: [_; 1] = uncommitted_diff
12146 .read_with(cx, |diff, cx| {
12147 diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>()
12148 })
12149 .try_into()
12150 .unwrap();
12151 match hunk.secondary_status {
12152 HasSecondaryHunk => {}
12153 SecondaryHunkRemovalPending => break,
12154 NoSecondaryHunk => panic!("hunk was not optimistically staged"),
12155 _ => panic!("unexpected hunk state"),
12156 }
12157 }
12158 uncommitted_diff.read_with(cx, |diff, cx| {
12159 assert_hunks(
12160 diff.snapshot(cx).hunks(&snapshot),
12161 &snapshot,
12162 &diff.base_text_string(cx).unwrap(),
12163 &[(
12164 1..2,
12165 "two\n",
12166 "TWO\n",
12167 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
12168 )],
12169 );
12170 });
12171
12172 // Let the staging complete.
12173 stage_task.await.unwrap();
12174 cx.run_until_parked();
12175
12176 // The hunk is now fully staged.
12177 uncommitted_diff.read_with(cx, |diff, cx| {
12178 assert_hunks(
12179 diff.snapshot(cx).hunks(&snapshot),
12180 &snapshot,
12181 &diff.base_text_string(cx).unwrap(),
12182 &[(
12183 1..2,
12184 "two\n",
12185 "TWO\n",
12186 DiffHunkStatus::modified(NoSecondaryHunk),
12187 )],
12188 );
12189 });
12190
12191 // Simulate a commit by updating HEAD to match the current file contents.
12192 // The FakeGitRepository's commit method is a no-op, so we need to manually
12193 // update HEAD to simulate the commit completing.
12194 fs.set_head_for_repo(
12195 path!("/dir/.git").as_ref(),
12196 &[("file.txt", file_contents.clone())],
12197 "newhead",
12198 );
12199 cx.run_until_parked();
12200
12201 // After committing, there are no more hunks.
12202 uncommitted_diff.read_with(cx, |diff, cx| {
12203 assert_hunks(
12204 diff.snapshot(cx).hunks(&snapshot),
12205 &snapshot,
12206 &diff.base_text_string(cx).unwrap(),
12207 &[] as &[(Range<u32>, &str, &str, DiffHunkStatus)],
12208 );
12209 });
12210}
12211
12212#[gpui::test]
12213async fn test_read_only_files_setting(cx: &mut gpui::TestAppContext) {
12214 init_test(cx);
12215
12216 // Configure read_only_files setting
12217 cx.update(|cx| {
12218 cx.update_global::<SettingsStore, _>(|store, cx| {
12219 store.update_user_settings(cx, |settings| {
12220 settings.project.worktree.read_only_files = Some(vec![
12221 "**/generated/**".to_string(),
12222 "**/*.gen.rs".to_string(),
12223 ]);
12224 });
12225 });
12226 });
12227
12228 let fs = FakeFs::new(cx.background_executor.clone());
12229 fs.insert_tree(
12230 path!("/root"),
12231 json!({
12232 "src": {
12233 "main.rs": "fn main() {}",
12234 "types.gen.rs": "// Generated file",
12235 },
12236 "generated": {
12237 "schema.rs": "// Auto-generated schema",
12238 }
12239 }),
12240 )
12241 .await;
12242
12243 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
12244
12245 // Open a regular file - should be read-write
12246 let regular_buffer = project
12247 .update(cx, |project, cx| {
12248 project.open_local_buffer(path!("/root/src/main.rs"), cx)
12249 })
12250 .await
12251 .unwrap();
12252
12253 regular_buffer.read_with(cx, |buffer, _| {
12254 assert!(!buffer.read_only(), "Regular file should not be read-only");
12255 });
12256
12257 // Open a file matching *.gen.rs pattern - should be read-only
12258 let gen_buffer = project
12259 .update(cx, |project, cx| {
12260 project.open_local_buffer(path!("/root/src/types.gen.rs"), cx)
12261 })
12262 .await
12263 .unwrap();
12264
12265 gen_buffer.read_with(cx, |buffer, _| {
12266 assert!(
12267 buffer.read_only(),
12268 "File matching *.gen.rs pattern should be read-only"
12269 );
12270 });
12271
12272 // Open a file in generated directory - should be read-only
12273 let generated_buffer = project
12274 .update(cx, |project, cx| {
12275 project.open_local_buffer(path!("/root/generated/schema.rs"), cx)
12276 })
12277 .await
12278 .unwrap();
12279
12280 generated_buffer.read_with(cx, |buffer, _| {
12281 assert!(
12282 buffer.read_only(),
12283 "File in generated directory should be read-only"
12284 );
12285 });
12286}
12287
12288#[gpui::test]
12289async fn test_read_only_files_empty_setting(cx: &mut gpui::TestAppContext) {
12290 init_test(cx);
12291
12292 // Explicitly set read_only_files to empty (default behavior)
12293 cx.update(|cx| {
12294 cx.update_global::<SettingsStore, _>(|store, cx| {
12295 store.update_user_settings(cx, |settings| {
12296 settings.project.worktree.read_only_files = Some(vec![]);
12297 });
12298 });
12299 });
12300
12301 let fs = FakeFs::new(cx.background_executor.clone());
12302 fs.insert_tree(
12303 path!("/root"),
12304 json!({
12305 "src": {
12306 "main.rs": "fn main() {}",
12307 },
12308 "generated": {
12309 "schema.rs": "// Auto-generated schema",
12310 }
12311 }),
12312 )
12313 .await;
12314
12315 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
12316
12317 // All files should be read-write when read_only_files is empty
12318 let main_buffer = project
12319 .update(cx, |project, cx| {
12320 project.open_local_buffer(path!("/root/src/main.rs"), cx)
12321 })
12322 .await
12323 .unwrap();
12324
12325 main_buffer.read_with(cx, |buffer, _| {
12326 assert!(
12327 !buffer.read_only(),
12328 "Files should not be read-only when read_only_files is empty"
12329 );
12330 });
12331
12332 let generated_buffer = project
12333 .update(cx, |project, cx| {
12334 project.open_local_buffer(path!("/root/generated/schema.rs"), cx)
12335 })
12336 .await
12337 .unwrap();
12338
12339 generated_buffer.read_with(cx, |buffer, _| {
12340 assert!(
12341 !buffer.read_only(),
12342 "Generated files should not be read-only when read_only_files is empty"
12343 );
12344 });
12345}
12346
12347#[gpui::test]
12348async fn test_read_only_files_with_lock_files(cx: &mut gpui::TestAppContext) {
12349 init_test(cx);
12350
12351 // Configure to make lock files read-only
12352 cx.update(|cx| {
12353 cx.update_global::<SettingsStore, _>(|store, cx| {
12354 store.update_user_settings(cx, |settings| {
12355 settings.project.worktree.read_only_files = Some(vec![
12356 "**/*.lock".to_string(),
12357 "**/package-lock.json".to_string(),
12358 ]);
12359 });
12360 });
12361 });
12362
12363 let fs = FakeFs::new(cx.background_executor.clone());
12364 fs.insert_tree(
12365 path!("/root"),
12366 json!({
12367 "Cargo.lock": "# Lock file",
12368 "Cargo.toml": "[package]",
12369 "package-lock.json": "{}",
12370 "package.json": "{}",
12371 }),
12372 )
12373 .await;
12374
12375 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
12376
12377 // Cargo.lock should be read-only
12378 let cargo_lock = project
12379 .update(cx, |project, cx| {
12380 project.open_local_buffer(path!("/root/Cargo.lock"), cx)
12381 })
12382 .await
12383 .unwrap();
12384
12385 cargo_lock.read_with(cx, |buffer, _| {
12386 assert!(buffer.read_only(), "Cargo.lock should be read-only");
12387 });
12388
12389 // Cargo.toml should be read-write
12390 let cargo_toml = project
12391 .update(cx, |project, cx| {
12392 project.open_local_buffer(path!("/root/Cargo.toml"), cx)
12393 })
12394 .await
12395 .unwrap();
12396
12397 cargo_toml.read_with(cx, |buffer, _| {
12398 assert!(!buffer.read_only(), "Cargo.toml should not be read-only");
12399 });
12400
12401 // package-lock.json should be read-only
12402 let package_lock = project
12403 .update(cx, |project, cx| {
12404 project.open_local_buffer(path!("/root/package-lock.json"), cx)
12405 })
12406 .await
12407 .unwrap();
12408
12409 package_lock.read_with(cx, |buffer, _| {
12410 assert!(buffer.read_only(), "package-lock.json should be read-only");
12411 });
12412
12413 // package.json should be read-write
12414 let package_json = project
12415 .update(cx, |project, cx| {
12416 project.open_local_buffer(path!("/root/package.json"), cx)
12417 })
12418 .await
12419 .unwrap();
12420
12421 package_json.read_with(cx, |buffer, _| {
12422 assert!(!buffer.read_only(), "package.json should not be read-only");
12423 });
12424}
12425
12426mod disable_ai_settings_tests {
12427 use gpui::TestAppContext;
12428 use project::*;
12429 use settings::{Settings, SettingsStore};
12430
12431 #[gpui::test]
12432 async fn test_disable_ai_settings_security(cx: &mut TestAppContext) {
12433 cx.update(|cx| {
12434 settings::init(cx);
12435
12436 // Test 1: Default is false (AI enabled)
12437 assert!(
12438 !DisableAiSettings::get_global(cx).disable_ai,
12439 "Default should allow AI"
12440 );
12441 });
12442
12443 let disable_true = serde_json::json!({
12444 "disable_ai": true
12445 })
12446 .to_string();
12447 let disable_false = serde_json::json!({
12448 "disable_ai": false
12449 })
12450 .to_string();
12451
12452 cx.update_global::<SettingsStore, _>(|store, cx| {
12453 store.set_user_settings(&disable_false, cx).unwrap();
12454 store.set_global_settings(&disable_true, cx).unwrap();
12455 });
12456 cx.update(|cx| {
12457 assert!(
12458 DisableAiSettings::get_global(cx).disable_ai,
12459 "Local false cannot override global true"
12460 );
12461 });
12462
12463 cx.update_global::<SettingsStore, _>(|store, cx| {
12464 store.set_global_settings(&disable_false, cx).unwrap();
12465 store.set_user_settings(&disable_true, cx).unwrap();
12466 });
12467
12468 cx.update(|cx| {
12469 assert!(
12470 DisableAiSettings::get_global(cx).disable_ai,
12471 "Local false cannot override global true"
12472 );
12473 });
12474 }
12475
12476 #[gpui::test]
12477 async fn test_disable_ai_project_level_settings(cx: &mut TestAppContext) {
12478 use settings::{LocalSettingsKind, LocalSettingsPath, SettingsLocation, SettingsStore};
12479 use worktree::WorktreeId;
12480
12481 cx.update(|cx| {
12482 settings::init(cx);
12483
12484 // Default should allow AI
12485 assert!(
12486 !DisableAiSettings::get_global(cx).disable_ai,
12487 "Default should allow AI"
12488 );
12489 });
12490
12491 let worktree_id = WorktreeId::from_usize(1);
12492 let rel_path = |path: &str| -> std::sync::Arc<util::rel_path::RelPath> {
12493 std::sync::Arc::from(util::rel_path::RelPath::unix(path).unwrap())
12494 };
12495 let project_path = rel_path("project");
12496 let settings_location = SettingsLocation {
12497 worktree_id,
12498 path: project_path.as_ref(),
12499 };
12500
12501 // Test: Project-level disable_ai=true should disable AI for files in that project
12502 cx.update_global::<SettingsStore, _>(|store, cx| {
12503 store
12504 .set_local_settings(
12505 worktree_id,
12506 LocalSettingsPath::InWorktree(project_path.clone()),
12507 LocalSettingsKind::Settings,
12508 Some(r#"{ "disable_ai": true }"#),
12509 cx,
12510 )
12511 .unwrap();
12512 });
12513
12514 cx.update(|cx| {
12515 let settings = DisableAiSettings::get(Some(settings_location), cx);
12516 assert!(
12517 settings.disable_ai,
12518 "Project-level disable_ai=true should disable AI for files in that project"
12519 );
12520 // Global should now also be true since project-level disable_ai is merged into global
12521 assert!(
12522 DisableAiSettings::get_global(cx).disable_ai,
12523 "Global setting should be affected by project-level disable_ai=true"
12524 );
12525 });
12526
12527 // Test: Setting project-level to false should allow AI for that project
12528 cx.update_global::<SettingsStore, _>(|store, cx| {
12529 store
12530 .set_local_settings(
12531 worktree_id,
12532 LocalSettingsPath::InWorktree(project_path.clone()),
12533 LocalSettingsKind::Settings,
12534 Some(r#"{ "disable_ai": false }"#),
12535 cx,
12536 )
12537 .unwrap();
12538 });
12539
12540 cx.update(|cx| {
12541 let settings = DisableAiSettings::get(Some(settings_location), cx);
12542 assert!(
12543 !settings.disable_ai,
12544 "Project-level disable_ai=false should allow AI"
12545 );
12546 // Global should also be false now
12547 assert!(
12548 !DisableAiSettings::get_global(cx).disable_ai,
12549 "Global setting should be false when project-level is false"
12550 );
12551 });
12552
12553 // Test: User-level true + project-level false = AI disabled (saturation)
12554 let disable_true = serde_json::json!({ "disable_ai": true }).to_string();
12555 cx.update_global::<SettingsStore, _>(|store, cx| {
12556 store.set_user_settings(&disable_true, cx).unwrap();
12557 store
12558 .set_local_settings(
12559 worktree_id,
12560 LocalSettingsPath::InWorktree(project_path.clone()),
12561 LocalSettingsKind::Settings,
12562 Some(r#"{ "disable_ai": false }"#),
12563 cx,
12564 )
12565 .unwrap();
12566 });
12567
12568 cx.update(|cx| {
12569 let settings = DisableAiSettings::get(Some(settings_location), cx);
12570 assert!(
12571 settings.disable_ai,
12572 "Project-level false cannot override user-level true (SaturatingBool)"
12573 );
12574 });
12575 }
12576}