1#![allow(clippy::format_collect)]
2
3use crate::{
4 Event, git_store::StatusEntry, task_inventory::TaskContexts, task_store::TaskSettingsLocation,
5 *,
6};
7use buffer_diff::{
8 BufferDiffEvent, CALCULATE_DIFF_TASK, DiffHunkSecondaryStatus, DiffHunkStatus,
9 DiffHunkStatusKind, assert_hunks,
10};
11use fs::FakeFs;
12use futures::{StreamExt, future};
13use git::{
14 GitHostingProviderRegistry,
15 repository::RepoPath,
16 status::{StatusCode, TrackedStatus},
17};
18use git2::RepositoryInitOptions;
19use gpui::{App, BackgroundExecutor, SemanticVersion, UpdateGlobal};
20use http_client::Url;
21use itertools::Itertools;
22use language::{
23 Diagnostic, DiagnosticEntry, DiagnosticSet, DiagnosticSourceKind, DiskState, FakeLspAdapter,
24 LanguageConfig, LanguageMatcher, LanguageName, LineEnding, OffsetRangeExt, Point, ToPoint,
25 language_settings::{AllLanguageSettings, LanguageSettingsContent, language_settings},
26 tree_sitter_rust, tree_sitter_typescript,
27};
28use lsp::{
29 DiagnosticSeverity, DocumentChanges, FileOperationFilter, NumberOrString, TextDocumentEdit,
30 WillRenameFiles, notification::DidRenameFiles,
31};
32use parking_lot::Mutex;
33use paths::{config_dir, tasks_file};
34use postage::stream::Stream as _;
35use pretty_assertions::{assert_eq, assert_matches};
36use rand::{Rng as _, rngs::StdRng};
37use serde_json::json;
38#[cfg(not(windows))]
39use std::os;
40use std::{env, mem, num::NonZeroU32, ops::Range, str::FromStr, sync::OnceLock, task::Poll};
41use task::{ResolvedTask, TaskContext};
42use unindent::Unindent as _;
43use util::{
44 TryFutureExt as _, assert_set_eq, maybe, path,
45 paths::PathMatcher,
46 test::{TempTree, marked_text_offsets},
47 uri,
48};
49use worktree::WorktreeModelHandle as _;
50
51#[gpui::test]
52async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
53 cx.executor().allow_parking();
54
55 let (tx, mut rx) = futures::channel::mpsc::unbounded();
56 let _thread = std::thread::spawn(move || {
57 #[cfg(not(target_os = "windows"))]
58 std::fs::metadata("/tmp").unwrap();
59 #[cfg(target_os = "windows")]
60 std::fs::metadata("C:/Windows").unwrap();
61 std::thread::sleep(Duration::from_millis(1000));
62 tx.unbounded_send(1).unwrap();
63 });
64 rx.next().await.unwrap();
65}
66
67#[gpui::test]
68async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
69 cx.executor().allow_parking();
70
71 let io_task = smol::unblock(move || {
72 println!("sleeping on thread {:?}", std::thread::current().id());
73 std::thread::sleep(Duration::from_millis(10));
74 1
75 });
76
77 let task = cx.foreground_executor().spawn(async move {
78 io_task.await;
79 });
80
81 task.await;
82}
83
84#[cfg(not(windows))]
85#[gpui::test]
86async fn test_symlinks(cx: &mut gpui::TestAppContext) {
87 init_test(cx);
88 cx.executor().allow_parking();
89
90 let dir = TempTree::new(json!({
91 "root": {
92 "apple": "",
93 "banana": {
94 "carrot": {
95 "date": "",
96 "endive": "",
97 }
98 },
99 "fennel": {
100 "grape": "",
101 }
102 }
103 }));
104
105 let root_link_path = dir.path().join("root_link");
106 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
107 os::unix::fs::symlink(
108 dir.path().join("root/fennel"),
109 dir.path().join("root/finnochio"),
110 )
111 .unwrap();
112
113 let project = Project::test(
114 Arc::new(RealFs::new(None, cx.executor())),
115 [root_link_path.as_ref()],
116 cx,
117 )
118 .await;
119
120 project.update(cx, |project, cx| {
121 let tree = project.worktrees(cx).next().unwrap().read(cx);
122 assert_eq!(tree.file_count(), 5);
123 assert_eq!(
124 tree.inode_for_path("fennel/grape"),
125 tree.inode_for_path("finnochio/grape")
126 );
127 });
128}
129
130#[gpui::test]
131async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
132 init_test(cx);
133
134 let dir = TempTree::new(json!({
135 ".editorconfig": r#"
136 root = true
137 [*.rs]
138 indent_style = tab
139 indent_size = 3
140 end_of_line = lf
141 insert_final_newline = true
142 trim_trailing_whitespace = true
143 max_line_length = 120
144 [*.js]
145 tab_width = 10
146 max_line_length = off
147 "#,
148 ".zed": {
149 "settings.json": r#"{
150 "tab_size": 8,
151 "hard_tabs": false,
152 "ensure_final_newline_on_save": false,
153 "remove_trailing_whitespace_on_save": false,
154 "preferred_line_length": 64,
155 "soft_wrap": "editor_width",
156 }"#,
157 },
158 "a.rs": "fn a() {\n A\n}",
159 "b": {
160 ".editorconfig": r#"
161 [*.rs]
162 indent_size = 2
163 max_line_length = off,
164 "#,
165 "b.rs": "fn b() {\n B\n}",
166 },
167 "c.js": "def c\n C\nend",
168 "README.json": "tabs are better\n",
169 }));
170
171 let path = dir.path();
172 let fs = FakeFs::new(cx.executor());
173 fs.insert_tree_from_real_fs(path, path).await;
174 let project = Project::test(fs, [path], cx).await;
175
176 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
177 language_registry.add(js_lang());
178 language_registry.add(json_lang());
179 language_registry.add(rust_lang());
180
181 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
182
183 cx.executor().run_until_parked();
184
185 cx.update(|cx| {
186 let tree = worktree.read(cx);
187 let settings_for = |path: &str| {
188 let file_entry = tree.entry_for_path(path).unwrap().clone();
189 let file = File::for_entry(file_entry, worktree.clone());
190 let file_language = project
191 .read(cx)
192 .languages()
193 .language_for_file_path(file.path.as_ref());
194 let file_language = cx
195 .background_executor()
196 .block(file_language)
197 .expect("Failed to get file language");
198 let file = file as _;
199 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
200 };
201
202 let settings_a = settings_for("a.rs");
203 let settings_b = settings_for("b/b.rs");
204 let settings_c = settings_for("c.js");
205 let settings_readme = settings_for("README.json");
206
207 // .editorconfig overrides .zed/settings
208 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
209 assert_eq!(settings_a.hard_tabs, true);
210 assert_eq!(settings_a.ensure_final_newline_on_save, true);
211 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
212 assert_eq!(settings_a.preferred_line_length, 120);
213
214 // .editorconfig in b/ overrides .editorconfig in root
215 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
216
217 // "indent_size" is not set, so "tab_width" is used
218 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
219
220 // When max_line_length is "off", default to .zed/settings.json
221 assert_eq!(settings_b.preferred_line_length, 64);
222 assert_eq!(settings_c.preferred_line_length, 64);
223
224 // README.md should not be affected by .editorconfig's globe "*.rs"
225 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
226 });
227}
228
229#[gpui::test]
230async fn test_git_provider_project_setting(cx: &mut gpui::TestAppContext) {
231 init_test(cx);
232 cx.update(|cx| {
233 GitHostingProviderRegistry::default_global(cx);
234 git_hosting_providers::init(cx);
235 });
236
237 let fs = FakeFs::new(cx.executor());
238 let str_path = path!("/dir");
239 let path = Path::new(str_path);
240
241 fs.insert_tree(
242 path!("/dir"),
243 json!({
244 ".zed": {
245 "settings.json": r#"{
246 "git_hosting_providers": [
247 {
248 "provider": "gitlab",
249 "base_url": "https://google.com",
250 "name": "foo"
251 }
252 ]
253 }"#
254 },
255 }),
256 )
257 .await;
258
259 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
260 let (_worktree, _) =
261 project.read_with(cx, |project, cx| project.find_worktree(path, cx).unwrap());
262 cx.executor().run_until_parked();
263
264 cx.update(|cx| {
265 let provider = GitHostingProviderRegistry::global(cx);
266 assert!(
267 provider
268 .list_hosting_providers()
269 .into_iter()
270 .any(|provider| provider.name() == "foo")
271 );
272 });
273
274 fs.atomic_write(
275 Path::new(path!("/dir/.zed/settings.json")).to_owned(),
276 "{}".into(),
277 )
278 .await
279 .unwrap();
280
281 cx.run_until_parked();
282
283 cx.update(|cx| {
284 let provider = GitHostingProviderRegistry::global(cx);
285 assert!(
286 !provider
287 .list_hosting_providers()
288 .into_iter()
289 .any(|provider| provider.name() == "foo")
290 );
291 });
292}
293
294#[gpui::test]
295async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
296 init_test(cx);
297 TaskStore::init(None);
298
299 let fs = FakeFs::new(cx.executor());
300 fs.insert_tree(
301 path!("/dir"),
302 json!({
303 ".zed": {
304 "settings.json": r#"{ "tab_size": 8 }"#,
305 "tasks.json": r#"[{
306 "label": "cargo check all",
307 "command": "cargo",
308 "args": ["check", "--all"]
309 },]"#,
310 },
311 "a": {
312 "a.rs": "fn a() {\n A\n}"
313 },
314 "b": {
315 ".zed": {
316 "settings.json": r#"{ "tab_size": 2 }"#,
317 "tasks.json": r#"[{
318 "label": "cargo check",
319 "command": "cargo",
320 "args": ["check"]
321 },]"#,
322 },
323 "b.rs": "fn b() {\n B\n}"
324 }
325 }),
326 )
327 .await;
328
329 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
330 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
331
332 cx.executor().run_until_parked();
333 let worktree_id = cx.update(|cx| {
334 project.update(cx, |project, cx| {
335 project.worktrees(cx).next().unwrap().read(cx).id()
336 })
337 });
338
339 let mut task_contexts = TaskContexts::default();
340 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
341 let task_contexts = Arc::new(task_contexts);
342
343 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
344 id: worktree_id,
345 directory_in_worktree: PathBuf::from(".zed"),
346 id_base: "local worktree tasks from directory \".zed\"".into(),
347 };
348
349 let all_tasks = cx
350 .update(|cx| {
351 let tree = worktree.read(cx);
352
353 let file_a = File::for_entry(
354 tree.entry_for_path("a/a.rs").unwrap().clone(),
355 worktree.clone(),
356 ) as _;
357 let settings_a = language_settings(None, Some(&file_a), cx);
358 let file_b = File::for_entry(
359 tree.entry_for_path("b/b.rs").unwrap().clone(),
360 worktree.clone(),
361 ) as _;
362 let settings_b = language_settings(None, Some(&file_b), cx);
363
364 assert_eq!(settings_a.tab_size.get(), 8);
365 assert_eq!(settings_b.tab_size.get(), 2);
366
367 get_all_tasks(&project, task_contexts.clone(), cx)
368 })
369 .await
370 .into_iter()
371 .map(|(source_kind, task)| {
372 let resolved = task.resolved;
373 (
374 source_kind,
375 task.resolved_label,
376 resolved.args,
377 resolved.env,
378 )
379 })
380 .collect::<Vec<_>>();
381 assert_eq!(
382 all_tasks,
383 vec![
384 (
385 TaskSourceKind::Worktree {
386 id: worktree_id,
387 directory_in_worktree: PathBuf::from(path!("b/.zed")),
388 id_base: if cfg!(windows) {
389 "local worktree tasks from directory \"b\\\\.zed\"".into()
390 } else {
391 "local worktree tasks from directory \"b/.zed\"".into()
392 },
393 },
394 "cargo check".to_string(),
395 vec!["check".to_string()],
396 HashMap::default(),
397 ),
398 (
399 topmost_local_task_source_kind.clone(),
400 "cargo check all".to_string(),
401 vec!["check".to_string(), "--all".to_string()],
402 HashMap::default(),
403 ),
404 ]
405 );
406
407 let (_, resolved_task) = cx
408 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
409 .await
410 .into_iter()
411 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
412 .expect("should have one global task");
413 project.update(cx, |project, cx| {
414 let task_inventory = project
415 .task_store
416 .read(cx)
417 .task_inventory()
418 .cloned()
419 .unwrap();
420 task_inventory.update(cx, |inventory, _| {
421 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
422 inventory
423 .update_file_based_tasks(
424 TaskSettingsLocation::Global(tasks_file()),
425 Some(
426 &json!([{
427 "label": "cargo check unstable",
428 "command": "cargo",
429 "args": [
430 "check",
431 "--all",
432 "--all-targets"
433 ],
434 "env": {
435 "RUSTFLAGS": "-Zunstable-options"
436 }
437 }])
438 .to_string(),
439 ),
440 )
441 .unwrap();
442 });
443 });
444 cx.run_until_parked();
445
446 let all_tasks = cx
447 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
448 .await
449 .into_iter()
450 .map(|(source_kind, task)| {
451 let resolved = task.resolved;
452 (
453 source_kind,
454 task.resolved_label,
455 resolved.args,
456 resolved.env,
457 )
458 })
459 .collect::<Vec<_>>();
460 assert_eq!(
461 all_tasks,
462 vec![
463 (
464 topmost_local_task_source_kind.clone(),
465 "cargo check all".to_string(),
466 vec!["check".to_string(), "--all".to_string()],
467 HashMap::default(),
468 ),
469 (
470 TaskSourceKind::Worktree {
471 id: worktree_id,
472 directory_in_worktree: PathBuf::from(path!("b/.zed")),
473 id_base: if cfg!(windows) {
474 "local worktree tasks from directory \"b\\\\.zed\"".into()
475 } else {
476 "local worktree tasks from directory \"b/.zed\"".into()
477 },
478 },
479 "cargo check".to_string(),
480 vec!["check".to_string()],
481 HashMap::default(),
482 ),
483 (
484 TaskSourceKind::AbsPath {
485 abs_path: paths::tasks_file().clone(),
486 id_base: "global tasks.json".into(),
487 },
488 "cargo check unstable".to_string(),
489 vec![
490 "check".to_string(),
491 "--all".to_string(),
492 "--all-targets".to_string(),
493 ],
494 HashMap::from_iter(Some((
495 "RUSTFLAGS".to_string(),
496 "-Zunstable-options".to_string()
497 ))),
498 ),
499 ]
500 );
501}
502
503#[gpui::test]
504async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
505 init_test(cx);
506 TaskStore::init(None);
507
508 let fs = FakeFs::new(cx.executor());
509 fs.insert_tree(
510 path!("/dir"),
511 json!({
512 ".zed": {
513 "tasks.json": r#"[{
514 "label": "test worktree root",
515 "command": "echo $ZED_WORKTREE_ROOT"
516 }]"#,
517 },
518 "a": {
519 "a.rs": "fn a() {\n A\n}"
520 },
521 }),
522 )
523 .await;
524
525 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
526 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
527
528 cx.executor().run_until_parked();
529 let worktree_id = cx.update(|cx| {
530 project.update(cx, |project, cx| {
531 project.worktrees(cx).next().unwrap().read(cx).id()
532 })
533 });
534
535 let active_non_worktree_item_tasks = cx
536 .update(|cx| {
537 get_all_tasks(
538 &project,
539 Arc::new(TaskContexts {
540 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
541 active_worktree_context: None,
542 other_worktree_contexts: Vec::new(),
543 lsp_task_sources: HashMap::default(),
544 latest_selection: None,
545 }),
546 cx,
547 )
548 })
549 .await;
550 assert!(
551 active_non_worktree_item_tasks.is_empty(),
552 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
553 );
554
555 let active_worktree_tasks = cx
556 .update(|cx| {
557 get_all_tasks(
558 &project,
559 Arc::new(TaskContexts {
560 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
561 active_worktree_context: Some((worktree_id, {
562 let mut worktree_context = TaskContext::default();
563 worktree_context
564 .task_variables
565 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
566 worktree_context
567 })),
568 other_worktree_contexts: Vec::new(),
569 lsp_task_sources: HashMap::default(),
570 latest_selection: None,
571 }),
572 cx,
573 )
574 })
575 .await;
576 assert_eq!(
577 active_worktree_tasks
578 .into_iter()
579 .map(|(source_kind, task)| {
580 let resolved = task.resolved;
581 (source_kind, resolved.command.unwrap())
582 })
583 .collect::<Vec<_>>(),
584 vec![(
585 TaskSourceKind::Worktree {
586 id: worktree_id,
587 directory_in_worktree: PathBuf::from(path!(".zed")),
588 id_base: if cfg!(windows) {
589 "local worktree tasks from directory \".zed\"".into()
590 } else {
591 "local worktree tasks from directory \".zed\"".into()
592 },
593 },
594 "echo /dir".to_string(),
595 )]
596 );
597}
598
599#[gpui::test]
600async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
601 init_test(cx);
602
603 let fs = FakeFs::new(cx.executor());
604 fs.insert_tree(
605 path!("/dir"),
606 json!({
607 "test.rs": "const A: i32 = 1;",
608 "test2.rs": "",
609 "Cargo.toml": "a = 1",
610 "package.json": "{\"a\": 1}",
611 }),
612 )
613 .await;
614
615 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
616 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
617
618 let mut fake_rust_servers = language_registry.register_fake_lsp(
619 "Rust",
620 FakeLspAdapter {
621 name: "the-rust-language-server",
622 capabilities: lsp::ServerCapabilities {
623 completion_provider: Some(lsp::CompletionOptions {
624 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
625 ..Default::default()
626 }),
627 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
628 lsp::TextDocumentSyncOptions {
629 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
630 ..Default::default()
631 },
632 )),
633 ..Default::default()
634 },
635 ..Default::default()
636 },
637 );
638 let mut fake_json_servers = language_registry.register_fake_lsp(
639 "JSON",
640 FakeLspAdapter {
641 name: "the-json-language-server",
642 capabilities: lsp::ServerCapabilities {
643 completion_provider: Some(lsp::CompletionOptions {
644 trigger_characters: Some(vec![":".to_string()]),
645 ..Default::default()
646 }),
647 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
648 lsp::TextDocumentSyncOptions {
649 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
650 ..Default::default()
651 },
652 )),
653 ..Default::default()
654 },
655 ..Default::default()
656 },
657 );
658
659 // Open a buffer without an associated language server.
660 let (toml_buffer, _handle) = project
661 .update(cx, |project, cx| {
662 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
663 })
664 .await
665 .unwrap();
666
667 // Open a buffer with an associated language server before the language for it has been loaded.
668 let (rust_buffer, _handle2) = project
669 .update(cx, |project, cx| {
670 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
671 })
672 .await
673 .unwrap();
674 rust_buffer.update(cx, |buffer, _| {
675 assert_eq!(buffer.language().map(|l| l.name()), None);
676 });
677
678 // Now we add the languages to the project, and ensure they get assigned to all
679 // the relevant open buffers.
680 language_registry.add(json_lang());
681 language_registry.add(rust_lang());
682 cx.executor().run_until_parked();
683 rust_buffer.update(cx, |buffer, _| {
684 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
685 });
686
687 // A server is started up, and it is notified about Rust files.
688 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
689 assert_eq!(
690 fake_rust_server
691 .receive_notification::<lsp::notification::DidOpenTextDocument>()
692 .await
693 .text_document,
694 lsp::TextDocumentItem {
695 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
696 version: 0,
697 text: "const A: i32 = 1;".to_string(),
698 language_id: "rust".to_string(),
699 }
700 );
701
702 // The buffer is configured based on the language server's capabilities.
703 rust_buffer.update(cx, |buffer, _| {
704 assert_eq!(
705 buffer
706 .completion_triggers()
707 .iter()
708 .cloned()
709 .collect::<Vec<_>>(),
710 &[".".to_string(), "::".to_string()]
711 );
712 });
713 toml_buffer.update(cx, |buffer, _| {
714 assert!(buffer.completion_triggers().is_empty());
715 });
716
717 // Edit a buffer. The changes are reported to the language server.
718 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
719 assert_eq!(
720 fake_rust_server
721 .receive_notification::<lsp::notification::DidChangeTextDocument>()
722 .await
723 .text_document,
724 lsp::VersionedTextDocumentIdentifier::new(
725 lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
726 1
727 )
728 );
729
730 // Open a third buffer with a different associated language server.
731 let (json_buffer, _json_handle) = project
732 .update(cx, |project, cx| {
733 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
734 })
735 .await
736 .unwrap();
737
738 // A json language server is started up and is only notified about the json buffer.
739 let mut fake_json_server = fake_json_servers.next().await.unwrap();
740 assert_eq!(
741 fake_json_server
742 .receive_notification::<lsp::notification::DidOpenTextDocument>()
743 .await
744 .text_document,
745 lsp::TextDocumentItem {
746 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
747 version: 0,
748 text: "{\"a\": 1}".to_string(),
749 language_id: "json".to_string(),
750 }
751 );
752
753 // This buffer is configured based on the second language server's
754 // capabilities.
755 json_buffer.update(cx, |buffer, _| {
756 assert_eq!(
757 buffer
758 .completion_triggers()
759 .iter()
760 .cloned()
761 .collect::<Vec<_>>(),
762 &[":".to_string()]
763 );
764 });
765
766 // When opening another buffer whose language server is already running,
767 // it is also configured based on the existing language server's capabilities.
768 let (rust_buffer2, _handle4) = project
769 .update(cx, |project, cx| {
770 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
771 })
772 .await
773 .unwrap();
774 rust_buffer2.update(cx, |buffer, _| {
775 assert_eq!(
776 buffer
777 .completion_triggers()
778 .iter()
779 .cloned()
780 .collect::<Vec<_>>(),
781 &[".".to_string(), "::".to_string()]
782 );
783 });
784
785 // Changes are reported only to servers matching the buffer's language.
786 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
787 rust_buffer2.update(cx, |buffer, cx| {
788 buffer.edit([(0..0, "let x = 1;")], None, cx)
789 });
790 assert_eq!(
791 fake_rust_server
792 .receive_notification::<lsp::notification::DidChangeTextDocument>()
793 .await
794 .text_document,
795 lsp::VersionedTextDocumentIdentifier::new(
796 lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap(),
797 1
798 )
799 );
800
801 // Save notifications are reported to all servers.
802 project
803 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
804 .await
805 .unwrap();
806 assert_eq!(
807 fake_rust_server
808 .receive_notification::<lsp::notification::DidSaveTextDocument>()
809 .await
810 .text_document,
811 lsp::TextDocumentIdentifier::new(
812 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
813 )
814 );
815 assert_eq!(
816 fake_json_server
817 .receive_notification::<lsp::notification::DidSaveTextDocument>()
818 .await
819 .text_document,
820 lsp::TextDocumentIdentifier::new(
821 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
822 )
823 );
824
825 // Renames are reported only to servers matching the buffer's language.
826 fs.rename(
827 Path::new(path!("/dir/test2.rs")),
828 Path::new(path!("/dir/test3.rs")),
829 Default::default(),
830 )
831 .await
832 .unwrap();
833 assert_eq!(
834 fake_rust_server
835 .receive_notification::<lsp::notification::DidCloseTextDocument>()
836 .await
837 .text_document,
838 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap()),
839 );
840 assert_eq!(
841 fake_rust_server
842 .receive_notification::<lsp::notification::DidOpenTextDocument>()
843 .await
844 .text_document,
845 lsp::TextDocumentItem {
846 uri: lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap(),
847 version: 0,
848 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
849 language_id: "rust".to_string(),
850 },
851 );
852
853 rust_buffer2.update(cx, |buffer, cx| {
854 buffer.update_diagnostics(
855 LanguageServerId(0),
856 DiagnosticSet::from_sorted_entries(
857 vec![DiagnosticEntry {
858 diagnostic: Default::default(),
859 range: Anchor::MIN..Anchor::MAX,
860 }],
861 &buffer.snapshot(),
862 ),
863 cx,
864 );
865 assert_eq!(
866 buffer
867 .snapshot()
868 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
869 .count(),
870 1
871 );
872 });
873
874 // When the rename changes the extension of the file, the buffer gets closed on the old
875 // language server and gets opened on the new one.
876 fs.rename(
877 Path::new(path!("/dir/test3.rs")),
878 Path::new(path!("/dir/test3.json")),
879 Default::default(),
880 )
881 .await
882 .unwrap();
883 assert_eq!(
884 fake_rust_server
885 .receive_notification::<lsp::notification::DidCloseTextDocument>()
886 .await
887 .text_document,
888 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap()),
889 );
890 assert_eq!(
891 fake_json_server
892 .receive_notification::<lsp::notification::DidOpenTextDocument>()
893 .await
894 .text_document,
895 lsp::TextDocumentItem {
896 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
897 version: 0,
898 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
899 language_id: "json".to_string(),
900 },
901 );
902
903 // We clear the diagnostics, since the language has changed.
904 rust_buffer2.update(cx, |buffer, _| {
905 assert_eq!(
906 buffer
907 .snapshot()
908 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
909 .count(),
910 0
911 );
912 });
913
914 // The renamed file's version resets after changing language server.
915 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
916 assert_eq!(
917 fake_json_server
918 .receive_notification::<lsp::notification::DidChangeTextDocument>()
919 .await
920 .text_document,
921 lsp::VersionedTextDocumentIdentifier::new(
922 lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
923 1
924 )
925 );
926
927 // Restart language servers
928 project.update(cx, |project, cx| {
929 project.restart_language_servers_for_buffers(
930 vec![rust_buffer.clone(), json_buffer.clone()],
931 HashSet::default(),
932 cx,
933 );
934 });
935
936 let mut rust_shutdown_requests = fake_rust_server
937 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
938 let mut json_shutdown_requests = fake_json_server
939 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
940 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
941
942 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
943 let mut fake_json_server = fake_json_servers.next().await.unwrap();
944
945 // Ensure rust document is reopened in new rust language server
946 assert_eq!(
947 fake_rust_server
948 .receive_notification::<lsp::notification::DidOpenTextDocument>()
949 .await
950 .text_document,
951 lsp::TextDocumentItem {
952 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
953 version: 0,
954 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
955 language_id: "rust".to_string(),
956 }
957 );
958
959 // Ensure json documents are reopened in new json language server
960 assert_set_eq!(
961 [
962 fake_json_server
963 .receive_notification::<lsp::notification::DidOpenTextDocument>()
964 .await
965 .text_document,
966 fake_json_server
967 .receive_notification::<lsp::notification::DidOpenTextDocument>()
968 .await
969 .text_document,
970 ],
971 [
972 lsp::TextDocumentItem {
973 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
974 version: 0,
975 text: json_buffer.update(cx, |buffer, _| buffer.text()),
976 language_id: "json".to_string(),
977 },
978 lsp::TextDocumentItem {
979 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
980 version: 0,
981 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
982 language_id: "json".to_string(),
983 }
984 ]
985 );
986
987 // Close notifications are reported only to servers matching the buffer's language.
988 cx.update(|_| drop(_json_handle));
989 let close_message = lsp::DidCloseTextDocumentParams {
990 text_document: lsp::TextDocumentIdentifier::new(
991 lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
992 ),
993 };
994 assert_eq!(
995 fake_json_server
996 .receive_notification::<lsp::notification::DidCloseTextDocument>()
997 .await,
998 close_message,
999 );
1000}
1001
1002#[gpui::test]
1003async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
1004 init_test(cx);
1005
1006 let fs = FakeFs::new(cx.executor());
1007 fs.insert_tree(
1008 path!("/the-root"),
1009 json!({
1010 ".gitignore": "target\n",
1011 "Cargo.lock": "",
1012 "src": {
1013 "a.rs": "",
1014 "b.rs": "",
1015 },
1016 "target": {
1017 "x": {
1018 "out": {
1019 "x.rs": ""
1020 }
1021 },
1022 "y": {
1023 "out": {
1024 "y.rs": "",
1025 }
1026 },
1027 "z": {
1028 "out": {
1029 "z.rs": ""
1030 }
1031 }
1032 }
1033 }),
1034 )
1035 .await;
1036 fs.insert_tree(
1037 path!("/the-registry"),
1038 json!({
1039 "dep1": {
1040 "src": {
1041 "dep1.rs": "",
1042 }
1043 },
1044 "dep2": {
1045 "src": {
1046 "dep2.rs": "",
1047 }
1048 },
1049 }),
1050 )
1051 .await;
1052 fs.insert_tree(
1053 path!("/the/stdlib"),
1054 json!({
1055 "LICENSE": "",
1056 "src": {
1057 "string.rs": "",
1058 }
1059 }),
1060 )
1061 .await;
1062
1063 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1064 let (language_registry, lsp_store) = project.read_with(cx, |project, _| {
1065 (project.languages().clone(), project.lsp_store())
1066 });
1067 language_registry.add(rust_lang());
1068 let mut fake_servers = language_registry.register_fake_lsp(
1069 "Rust",
1070 FakeLspAdapter {
1071 name: "the-language-server",
1072 ..Default::default()
1073 },
1074 );
1075
1076 cx.executor().run_until_parked();
1077
1078 // Start the language server by opening a buffer with a compatible file extension.
1079 project
1080 .update(cx, |project, cx| {
1081 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
1082 })
1083 .await
1084 .unwrap();
1085
1086 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
1087 project.update(cx, |project, cx| {
1088 let worktree = project.worktrees(cx).next().unwrap();
1089 assert_eq!(
1090 worktree
1091 .read(cx)
1092 .snapshot()
1093 .entries(true, 0)
1094 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
1095 .collect::<Vec<_>>(),
1096 &[
1097 (Path::new(""), false),
1098 (Path::new(".gitignore"), false),
1099 (Path::new("Cargo.lock"), false),
1100 (Path::new("src"), false),
1101 (Path::new("src/a.rs"), false),
1102 (Path::new("src/b.rs"), false),
1103 (Path::new("target"), true),
1104 ]
1105 );
1106 });
1107
1108 let prev_read_dir_count = fs.read_dir_call_count();
1109
1110 let fake_server = fake_servers.next().await.unwrap();
1111 let server_id = lsp_store.read_with(cx, |lsp_store, _| {
1112 let (id, _) = lsp_store.language_server_statuses().next().unwrap();
1113 id
1114 });
1115
1116 // Simulate jumping to a definition in a dependency outside of the worktree.
1117 let _out_of_worktree_buffer = project
1118 .update(cx, |project, cx| {
1119 project.open_local_buffer_via_lsp(
1120 lsp::Url::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
1121 server_id,
1122 cx,
1123 )
1124 })
1125 .await
1126 .unwrap();
1127
1128 // Keep track of the FS events reported to the language server.
1129 let file_changes = Arc::new(Mutex::new(Vec::new()));
1130 fake_server
1131 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
1132 registrations: vec![lsp::Registration {
1133 id: Default::default(),
1134 method: "workspace/didChangeWatchedFiles".to_string(),
1135 register_options: serde_json::to_value(
1136 lsp::DidChangeWatchedFilesRegistrationOptions {
1137 watchers: vec![
1138 lsp::FileSystemWatcher {
1139 glob_pattern: lsp::GlobPattern::String(
1140 path!("/the-root/Cargo.toml").to_string(),
1141 ),
1142 kind: None,
1143 },
1144 lsp::FileSystemWatcher {
1145 glob_pattern: lsp::GlobPattern::String(
1146 path!("/the-root/src/*.{rs,c}").to_string(),
1147 ),
1148 kind: None,
1149 },
1150 lsp::FileSystemWatcher {
1151 glob_pattern: lsp::GlobPattern::String(
1152 path!("/the-root/target/y/**/*.rs").to_string(),
1153 ),
1154 kind: None,
1155 },
1156 lsp::FileSystemWatcher {
1157 glob_pattern: lsp::GlobPattern::String(
1158 path!("/the/stdlib/src/**/*.rs").to_string(),
1159 ),
1160 kind: None,
1161 },
1162 lsp::FileSystemWatcher {
1163 glob_pattern: lsp::GlobPattern::String(
1164 path!("**/Cargo.lock").to_string(),
1165 ),
1166 kind: None,
1167 },
1168 ],
1169 },
1170 )
1171 .ok(),
1172 }],
1173 })
1174 .await
1175 .into_response()
1176 .unwrap();
1177 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
1178 let file_changes = file_changes.clone();
1179 move |params, _| {
1180 let mut file_changes = file_changes.lock();
1181 file_changes.extend(params.changes);
1182 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
1183 }
1184 });
1185
1186 cx.executor().run_until_parked();
1187 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
1188 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 5);
1189
1190 let mut new_watched_paths = fs.watched_paths();
1191 new_watched_paths.retain(|path| !path.starts_with(config_dir()));
1192 assert_eq!(
1193 &new_watched_paths,
1194 &[
1195 Path::new(path!("/the-root")),
1196 Path::new(path!("/the-registry/dep1/src/dep1.rs")),
1197 Path::new(path!("/the/stdlib/src"))
1198 ]
1199 );
1200
1201 // Now the language server has asked us to watch an ignored directory path,
1202 // so we recursively load it.
1203 project.update(cx, |project, cx| {
1204 let worktree = project.visible_worktrees(cx).next().unwrap();
1205 assert_eq!(
1206 worktree
1207 .read(cx)
1208 .snapshot()
1209 .entries(true, 0)
1210 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
1211 .collect::<Vec<_>>(),
1212 &[
1213 (Path::new(""), false),
1214 (Path::new(".gitignore"), false),
1215 (Path::new("Cargo.lock"), false),
1216 (Path::new("src"), false),
1217 (Path::new("src/a.rs"), false),
1218 (Path::new("src/b.rs"), false),
1219 (Path::new("target"), true),
1220 (Path::new("target/x"), true),
1221 (Path::new("target/y"), true),
1222 (Path::new("target/y/out"), true),
1223 (Path::new("target/y/out/y.rs"), true),
1224 (Path::new("target/z"), true),
1225 ]
1226 );
1227 });
1228
1229 // Perform some file system mutations, two of which match the watched patterns,
1230 // and one of which does not.
1231 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
1232 .await
1233 .unwrap();
1234 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
1235 .await
1236 .unwrap();
1237 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
1238 .await
1239 .unwrap();
1240 fs.create_file(
1241 path!("/the-root/target/x/out/x2.rs").as_ref(),
1242 Default::default(),
1243 )
1244 .await
1245 .unwrap();
1246 fs.create_file(
1247 path!("/the-root/target/y/out/y2.rs").as_ref(),
1248 Default::default(),
1249 )
1250 .await
1251 .unwrap();
1252 fs.save(
1253 path!("/the-root/Cargo.lock").as_ref(),
1254 &"".into(),
1255 Default::default(),
1256 )
1257 .await
1258 .unwrap();
1259 fs.save(
1260 path!("/the-stdlib/LICENSE").as_ref(),
1261 &"".into(),
1262 Default::default(),
1263 )
1264 .await
1265 .unwrap();
1266 fs.save(
1267 path!("/the/stdlib/src/string.rs").as_ref(),
1268 &"".into(),
1269 Default::default(),
1270 )
1271 .await
1272 .unwrap();
1273
1274 // The language server receives events for the FS mutations that match its watch patterns.
1275 cx.executor().run_until_parked();
1276 assert_eq!(
1277 &*file_changes.lock(),
1278 &[
1279 lsp::FileEvent {
1280 uri: lsp::Url::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
1281 typ: lsp::FileChangeType::CHANGED,
1282 },
1283 lsp::FileEvent {
1284 uri: lsp::Url::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
1285 typ: lsp::FileChangeType::DELETED,
1286 },
1287 lsp::FileEvent {
1288 uri: lsp::Url::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
1289 typ: lsp::FileChangeType::CREATED,
1290 },
1291 lsp::FileEvent {
1292 uri: lsp::Url::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
1293 typ: lsp::FileChangeType::CREATED,
1294 },
1295 lsp::FileEvent {
1296 uri: lsp::Url::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
1297 typ: lsp::FileChangeType::CHANGED,
1298 },
1299 ]
1300 );
1301}
1302
1303#[gpui::test]
1304async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
1305 init_test(cx);
1306
1307 let fs = FakeFs::new(cx.executor());
1308 fs.insert_tree(
1309 path!("/dir"),
1310 json!({
1311 "a.rs": "let a = 1;",
1312 "b.rs": "let b = 2;"
1313 }),
1314 )
1315 .await;
1316
1317 let project = Project::test(
1318 fs,
1319 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
1320 cx,
1321 )
1322 .await;
1323 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1324
1325 let buffer_a = project
1326 .update(cx, |project, cx| {
1327 project.open_local_buffer(path!("/dir/a.rs"), cx)
1328 })
1329 .await
1330 .unwrap();
1331 let buffer_b = project
1332 .update(cx, |project, cx| {
1333 project.open_local_buffer(path!("/dir/b.rs"), cx)
1334 })
1335 .await
1336 .unwrap();
1337
1338 lsp_store.update(cx, |lsp_store, cx| {
1339 lsp_store
1340 .update_diagnostics(
1341 LanguageServerId(0),
1342 lsp::PublishDiagnosticsParams {
1343 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1344 version: None,
1345 diagnostics: vec![lsp::Diagnostic {
1346 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1347 severity: Some(lsp::DiagnosticSeverity::ERROR),
1348 message: "error 1".to_string(),
1349 ..Default::default()
1350 }],
1351 },
1352 None,
1353 DiagnosticSourceKind::Pushed,
1354 &[],
1355 cx,
1356 )
1357 .unwrap();
1358 lsp_store
1359 .update_diagnostics(
1360 LanguageServerId(0),
1361 lsp::PublishDiagnosticsParams {
1362 uri: Url::from_file_path(path!("/dir/b.rs")).unwrap(),
1363 version: None,
1364 diagnostics: vec![lsp::Diagnostic {
1365 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1366 severity: Some(DiagnosticSeverity::WARNING),
1367 message: "error 2".to_string(),
1368 ..Default::default()
1369 }],
1370 },
1371 None,
1372 DiagnosticSourceKind::Pushed,
1373 &[],
1374 cx,
1375 )
1376 .unwrap();
1377 });
1378
1379 buffer_a.update(cx, |buffer, _| {
1380 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1381 assert_eq!(
1382 chunks
1383 .iter()
1384 .map(|(s, d)| (s.as_str(), *d))
1385 .collect::<Vec<_>>(),
1386 &[
1387 ("let ", None),
1388 ("a", Some(DiagnosticSeverity::ERROR)),
1389 (" = 1;", None),
1390 ]
1391 );
1392 });
1393 buffer_b.update(cx, |buffer, _| {
1394 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1395 assert_eq!(
1396 chunks
1397 .iter()
1398 .map(|(s, d)| (s.as_str(), *d))
1399 .collect::<Vec<_>>(),
1400 &[
1401 ("let ", None),
1402 ("b", Some(DiagnosticSeverity::WARNING)),
1403 (" = 2;", None),
1404 ]
1405 );
1406 });
1407}
1408
1409#[gpui::test]
1410async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1411 init_test(cx);
1412
1413 let fs = FakeFs::new(cx.executor());
1414 fs.insert_tree(
1415 path!("/root"),
1416 json!({
1417 "dir": {
1418 ".git": {
1419 "HEAD": "ref: refs/heads/main",
1420 },
1421 ".gitignore": "b.rs",
1422 "a.rs": "let a = 1;",
1423 "b.rs": "let b = 2;",
1424 },
1425 "other.rs": "let b = c;"
1426 }),
1427 )
1428 .await;
1429
1430 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
1431 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1432 let (worktree, _) = project
1433 .update(cx, |project, cx| {
1434 project.find_or_create_worktree(path!("/root/dir"), true, cx)
1435 })
1436 .await
1437 .unwrap();
1438 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1439
1440 let (worktree, _) = project
1441 .update(cx, |project, cx| {
1442 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
1443 })
1444 .await
1445 .unwrap();
1446 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1447
1448 let server_id = LanguageServerId(0);
1449 lsp_store.update(cx, |lsp_store, cx| {
1450 lsp_store
1451 .update_diagnostics(
1452 server_id,
1453 lsp::PublishDiagnosticsParams {
1454 uri: Url::from_file_path(path!("/root/dir/b.rs")).unwrap(),
1455 version: None,
1456 diagnostics: vec![lsp::Diagnostic {
1457 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1458 severity: Some(lsp::DiagnosticSeverity::ERROR),
1459 message: "unused variable 'b'".to_string(),
1460 ..Default::default()
1461 }],
1462 },
1463 None,
1464 DiagnosticSourceKind::Pushed,
1465 &[],
1466 cx,
1467 )
1468 .unwrap();
1469 lsp_store
1470 .update_diagnostics(
1471 server_id,
1472 lsp::PublishDiagnosticsParams {
1473 uri: Url::from_file_path(path!("/root/other.rs")).unwrap(),
1474 version: None,
1475 diagnostics: vec![lsp::Diagnostic {
1476 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1477 severity: Some(lsp::DiagnosticSeverity::ERROR),
1478 message: "unknown variable 'c'".to_string(),
1479 ..Default::default()
1480 }],
1481 },
1482 None,
1483 DiagnosticSourceKind::Pushed,
1484 &[],
1485 cx,
1486 )
1487 .unwrap();
1488 });
1489
1490 let main_ignored_buffer = project
1491 .update(cx, |project, cx| {
1492 project.open_buffer((main_worktree_id, "b.rs"), cx)
1493 })
1494 .await
1495 .unwrap();
1496 main_ignored_buffer.update(cx, |buffer, _| {
1497 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1498 assert_eq!(
1499 chunks
1500 .iter()
1501 .map(|(s, d)| (s.as_str(), *d))
1502 .collect::<Vec<_>>(),
1503 &[
1504 ("let ", None),
1505 ("b", Some(DiagnosticSeverity::ERROR)),
1506 (" = 2;", None),
1507 ],
1508 "Gigitnored buffers should still get in-buffer diagnostics",
1509 );
1510 });
1511 let other_buffer = project
1512 .update(cx, |project, cx| {
1513 project.open_buffer((other_worktree_id, ""), cx)
1514 })
1515 .await
1516 .unwrap();
1517 other_buffer.update(cx, |buffer, _| {
1518 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1519 assert_eq!(
1520 chunks
1521 .iter()
1522 .map(|(s, d)| (s.as_str(), *d))
1523 .collect::<Vec<_>>(),
1524 &[
1525 ("let b = ", None),
1526 ("c", Some(DiagnosticSeverity::ERROR)),
1527 (";", None),
1528 ],
1529 "Buffers from hidden projects should still get in-buffer diagnostics"
1530 );
1531 });
1532
1533 project.update(cx, |project, cx| {
1534 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1535 assert_eq!(
1536 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1537 vec![(
1538 ProjectPath {
1539 worktree_id: main_worktree_id,
1540 path: Arc::from(Path::new("b.rs")),
1541 },
1542 server_id,
1543 DiagnosticSummary {
1544 error_count: 1,
1545 warning_count: 0,
1546 }
1547 )]
1548 );
1549 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1550 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1551 });
1552}
1553
1554#[gpui::test]
1555async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1556 init_test(cx);
1557
1558 let progress_token = "the-progress-token";
1559
1560 let fs = FakeFs::new(cx.executor());
1561 fs.insert_tree(
1562 path!("/dir"),
1563 json!({
1564 "a.rs": "fn a() { A }",
1565 "b.rs": "const y: i32 = 1",
1566 }),
1567 )
1568 .await;
1569
1570 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1571 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1572
1573 language_registry.add(rust_lang());
1574 let mut fake_servers = language_registry.register_fake_lsp(
1575 "Rust",
1576 FakeLspAdapter {
1577 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1578 disk_based_diagnostics_sources: vec!["disk".into()],
1579 ..Default::default()
1580 },
1581 );
1582
1583 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1584
1585 // Cause worktree to start the fake language server
1586 let _ = project
1587 .update(cx, |project, cx| {
1588 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
1589 })
1590 .await
1591 .unwrap();
1592
1593 let mut events = cx.events(&project);
1594
1595 let fake_server = fake_servers.next().await.unwrap();
1596 assert_eq!(
1597 events.next().await.unwrap(),
1598 Event::LanguageServerAdded(
1599 LanguageServerId(0),
1600 fake_server.server.name(),
1601 Some(worktree_id)
1602 ),
1603 );
1604
1605 fake_server
1606 .start_progress(format!("{}/0", progress_token))
1607 .await;
1608 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1609 assert_eq!(
1610 events.next().await.unwrap(),
1611 Event::DiskBasedDiagnosticsStarted {
1612 language_server_id: LanguageServerId(0),
1613 }
1614 );
1615
1616 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1617 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1618 version: None,
1619 diagnostics: vec![lsp::Diagnostic {
1620 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1621 severity: Some(lsp::DiagnosticSeverity::ERROR),
1622 message: "undefined variable 'A'".to_string(),
1623 ..Default::default()
1624 }],
1625 });
1626 assert_eq!(
1627 events.next().await.unwrap(),
1628 Event::DiagnosticsUpdated {
1629 language_server_id: LanguageServerId(0),
1630 paths: vec![(worktree_id, Path::new("a.rs")).into()],
1631 }
1632 );
1633
1634 fake_server.end_progress(format!("{}/0", progress_token));
1635 assert_eq!(
1636 events.next().await.unwrap(),
1637 Event::DiskBasedDiagnosticsFinished {
1638 language_server_id: LanguageServerId(0)
1639 }
1640 );
1641
1642 let buffer = project
1643 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
1644 .await
1645 .unwrap();
1646
1647 buffer.update(cx, |buffer, _| {
1648 let snapshot = buffer.snapshot();
1649 let diagnostics = snapshot
1650 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1651 .collect::<Vec<_>>();
1652 assert_eq!(
1653 diagnostics,
1654 &[DiagnosticEntry {
1655 range: Point::new(0, 9)..Point::new(0, 10),
1656 diagnostic: Diagnostic {
1657 severity: lsp::DiagnosticSeverity::ERROR,
1658 message: "undefined variable 'A'".to_string(),
1659 group_id: 0,
1660 is_primary: true,
1661 source_kind: DiagnosticSourceKind::Pushed,
1662 ..Diagnostic::default()
1663 }
1664 }]
1665 )
1666 });
1667
1668 // Ensure publishing empty diagnostics twice only results in one update event.
1669 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1670 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1671 version: None,
1672 diagnostics: Default::default(),
1673 });
1674 assert_eq!(
1675 events.next().await.unwrap(),
1676 Event::DiagnosticsUpdated {
1677 language_server_id: LanguageServerId(0),
1678 paths: vec![(worktree_id, Path::new("a.rs")).into()],
1679 }
1680 );
1681
1682 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1683 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1684 version: None,
1685 diagnostics: Default::default(),
1686 });
1687 cx.executor().run_until_parked();
1688 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1689}
1690
1691#[gpui::test]
1692async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1693 init_test(cx);
1694
1695 let progress_token = "the-progress-token";
1696
1697 let fs = FakeFs::new(cx.executor());
1698 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1699
1700 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1701
1702 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1703 language_registry.add(rust_lang());
1704 let mut fake_servers = language_registry.register_fake_lsp(
1705 "Rust",
1706 FakeLspAdapter {
1707 name: "the-language-server",
1708 disk_based_diagnostics_sources: vec!["disk".into()],
1709 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1710 ..FakeLspAdapter::default()
1711 },
1712 );
1713
1714 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1715
1716 let (buffer, _handle) = project
1717 .update(cx, |project, cx| {
1718 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1719 })
1720 .await
1721 .unwrap();
1722 let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id());
1723 // Simulate diagnostics starting to update.
1724 let fake_server = fake_servers.next().await.unwrap();
1725 fake_server.start_progress(progress_token).await;
1726
1727 // Restart the server before the diagnostics finish updating.
1728 project.update(cx, |project, cx| {
1729 project.restart_language_servers_for_buffers(vec![buffer], HashSet::default(), cx);
1730 });
1731 let mut events = cx.events(&project);
1732
1733 // Simulate the newly started server sending more diagnostics.
1734 let fake_server = fake_servers.next().await.unwrap();
1735 assert_eq!(
1736 events.next().await.unwrap(),
1737 Event::LanguageServerRemoved(LanguageServerId(0))
1738 );
1739 assert_eq!(
1740 events.next().await.unwrap(),
1741 Event::LanguageServerAdded(
1742 LanguageServerId(1),
1743 fake_server.server.name(),
1744 Some(worktree_id)
1745 )
1746 );
1747 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1748 fake_server.start_progress(progress_token).await;
1749 assert_eq!(
1750 events.next().await.unwrap(),
1751 Event::LanguageServerBufferRegistered {
1752 server_id: LanguageServerId(1),
1753 buffer_id,
1754 buffer_abs_path: PathBuf::from(path!("/dir/a.rs")),
1755 }
1756 );
1757 assert_eq!(
1758 events.next().await.unwrap(),
1759 Event::DiskBasedDiagnosticsStarted {
1760 language_server_id: LanguageServerId(1)
1761 }
1762 );
1763 project.update(cx, |project, cx| {
1764 assert_eq!(
1765 project
1766 .language_servers_running_disk_based_diagnostics(cx)
1767 .collect::<Vec<_>>(),
1768 [LanguageServerId(1)]
1769 );
1770 });
1771
1772 // All diagnostics are considered done, despite the old server's diagnostic
1773 // task never completing.
1774 fake_server.end_progress(progress_token);
1775 assert_eq!(
1776 events.next().await.unwrap(),
1777 Event::DiskBasedDiagnosticsFinished {
1778 language_server_id: LanguageServerId(1)
1779 }
1780 );
1781 project.update(cx, |project, cx| {
1782 assert_eq!(
1783 project
1784 .language_servers_running_disk_based_diagnostics(cx)
1785 .collect::<Vec<_>>(),
1786 [] as [language::LanguageServerId; 0]
1787 );
1788 });
1789}
1790
1791#[gpui::test]
1792async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1793 init_test(cx);
1794
1795 let fs = FakeFs::new(cx.executor());
1796 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
1797
1798 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1799
1800 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1801 language_registry.add(rust_lang());
1802 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1803
1804 let (buffer, _) = project
1805 .update(cx, |project, cx| {
1806 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1807 })
1808 .await
1809 .unwrap();
1810
1811 // Publish diagnostics
1812 let fake_server = fake_servers.next().await.unwrap();
1813 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1814 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1815 version: None,
1816 diagnostics: vec![lsp::Diagnostic {
1817 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1818 severity: Some(lsp::DiagnosticSeverity::ERROR),
1819 message: "the message".to_string(),
1820 ..Default::default()
1821 }],
1822 });
1823
1824 cx.executor().run_until_parked();
1825 buffer.update(cx, |buffer, _| {
1826 assert_eq!(
1827 buffer
1828 .snapshot()
1829 .diagnostics_in_range::<_, usize>(0..1, false)
1830 .map(|entry| entry.diagnostic.message)
1831 .collect::<Vec<_>>(),
1832 ["the message".to_string()]
1833 );
1834 });
1835 project.update(cx, |project, cx| {
1836 assert_eq!(
1837 project.diagnostic_summary(false, cx),
1838 DiagnosticSummary {
1839 error_count: 1,
1840 warning_count: 0,
1841 }
1842 );
1843 });
1844
1845 project.update(cx, |project, cx| {
1846 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
1847 });
1848
1849 // The diagnostics are cleared.
1850 cx.executor().run_until_parked();
1851 buffer.update(cx, |buffer, _| {
1852 assert_eq!(
1853 buffer
1854 .snapshot()
1855 .diagnostics_in_range::<_, usize>(0..1, false)
1856 .map(|entry| entry.diagnostic.message)
1857 .collect::<Vec<_>>(),
1858 Vec::<String>::new(),
1859 );
1860 });
1861 project.update(cx, |project, cx| {
1862 assert_eq!(
1863 project.diagnostic_summary(false, cx),
1864 DiagnosticSummary {
1865 error_count: 0,
1866 warning_count: 0,
1867 }
1868 );
1869 });
1870}
1871
1872#[gpui::test]
1873async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1874 init_test(cx);
1875
1876 let fs = FakeFs::new(cx.executor());
1877 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1878
1879 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1880 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1881
1882 language_registry.add(rust_lang());
1883 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1884
1885 let (buffer, _handle) = project
1886 .update(cx, |project, cx| {
1887 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1888 })
1889 .await
1890 .unwrap();
1891
1892 // Before restarting the server, report diagnostics with an unknown buffer version.
1893 let fake_server = fake_servers.next().await.unwrap();
1894 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1895 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1896 version: Some(10000),
1897 diagnostics: Vec::new(),
1898 });
1899 cx.executor().run_until_parked();
1900 project.update(cx, |project, cx| {
1901 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
1902 });
1903
1904 let mut fake_server = fake_servers.next().await.unwrap();
1905 let notification = fake_server
1906 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1907 .await
1908 .text_document;
1909 assert_eq!(notification.version, 0);
1910}
1911
1912#[gpui::test]
1913async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
1914 init_test(cx);
1915
1916 let progress_token = "the-progress-token";
1917
1918 let fs = FakeFs::new(cx.executor());
1919 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1920
1921 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1922
1923 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1924 language_registry.add(rust_lang());
1925 let mut fake_servers = language_registry.register_fake_lsp(
1926 "Rust",
1927 FakeLspAdapter {
1928 name: "the-language-server",
1929 disk_based_diagnostics_sources: vec!["disk".into()],
1930 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1931 ..Default::default()
1932 },
1933 );
1934
1935 let (buffer, _handle) = project
1936 .update(cx, |project, cx| {
1937 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1938 })
1939 .await
1940 .unwrap();
1941
1942 // Simulate diagnostics starting to update.
1943 let mut fake_server = fake_servers.next().await.unwrap();
1944 fake_server
1945 .start_progress_with(
1946 "another-token",
1947 lsp::WorkDoneProgressBegin {
1948 cancellable: Some(false),
1949 ..Default::default()
1950 },
1951 )
1952 .await;
1953 fake_server
1954 .start_progress_with(
1955 progress_token,
1956 lsp::WorkDoneProgressBegin {
1957 cancellable: Some(true),
1958 ..Default::default()
1959 },
1960 )
1961 .await;
1962 cx.executor().run_until_parked();
1963
1964 project.update(cx, |project, cx| {
1965 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
1966 });
1967
1968 let cancel_notification = fake_server
1969 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
1970 .await;
1971 assert_eq!(
1972 cancel_notification.token,
1973 NumberOrString::String(progress_token.into())
1974 );
1975}
1976
1977#[gpui::test]
1978async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1979 init_test(cx);
1980
1981 let fs = FakeFs::new(cx.executor());
1982 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
1983 .await;
1984
1985 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1986 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1987
1988 let mut fake_rust_servers = language_registry.register_fake_lsp(
1989 "Rust",
1990 FakeLspAdapter {
1991 name: "rust-lsp",
1992 ..Default::default()
1993 },
1994 );
1995 let mut fake_js_servers = language_registry.register_fake_lsp(
1996 "JavaScript",
1997 FakeLspAdapter {
1998 name: "js-lsp",
1999 ..Default::default()
2000 },
2001 );
2002 language_registry.add(rust_lang());
2003 language_registry.add(js_lang());
2004
2005 let _rs_buffer = project
2006 .update(cx, |project, cx| {
2007 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2008 })
2009 .await
2010 .unwrap();
2011 let _js_buffer = project
2012 .update(cx, |project, cx| {
2013 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
2014 })
2015 .await
2016 .unwrap();
2017
2018 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
2019 assert_eq!(
2020 fake_rust_server_1
2021 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2022 .await
2023 .text_document
2024 .uri
2025 .as_str(),
2026 uri!("file:///dir/a.rs")
2027 );
2028
2029 let mut fake_js_server = fake_js_servers.next().await.unwrap();
2030 assert_eq!(
2031 fake_js_server
2032 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2033 .await
2034 .text_document
2035 .uri
2036 .as_str(),
2037 uri!("file:///dir/b.js")
2038 );
2039
2040 // Disable Rust language server, ensuring only that server gets stopped.
2041 cx.update(|cx| {
2042 SettingsStore::update_global(cx, |settings, cx| {
2043 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
2044 settings.languages.0.insert(
2045 "Rust".into(),
2046 LanguageSettingsContent {
2047 enable_language_server: Some(false),
2048 ..Default::default()
2049 },
2050 );
2051 });
2052 })
2053 });
2054 fake_rust_server_1
2055 .receive_notification::<lsp::notification::Exit>()
2056 .await;
2057
2058 // Enable Rust and disable JavaScript language servers, ensuring that the
2059 // former gets started again and that the latter stops.
2060 cx.update(|cx| {
2061 SettingsStore::update_global(cx, |settings, cx| {
2062 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
2063 settings.languages.0.insert(
2064 LanguageName::new("Rust"),
2065 LanguageSettingsContent {
2066 enable_language_server: Some(true),
2067 ..Default::default()
2068 },
2069 );
2070 settings.languages.0.insert(
2071 LanguageName::new("JavaScript"),
2072 LanguageSettingsContent {
2073 enable_language_server: Some(false),
2074 ..Default::default()
2075 },
2076 );
2077 });
2078 })
2079 });
2080 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
2081 assert_eq!(
2082 fake_rust_server_2
2083 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2084 .await
2085 .text_document
2086 .uri
2087 .as_str(),
2088 uri!("file:///dir/a.rs")
2089 );
2090 fake_js_server
2091 .receive_notification::<lsp::notification::Exit>()
2092 .await;
2093}
2094
2095#[gpui::test(iterations = 3)]
2096async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
2097 init_test(cx);
2098
2099 let text = "
2100 fn a() { A }
2101 fn b() { BB }
2102 fn c() { CCC }
2103 "
2104 .unindent();
2105
2106 let fs = FakeFs::new(cx.executor());
2107 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
2108
2109 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2110 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2111
2112 language_registry.add(rust_lang());
2113 let mut fake_servers = language_registry.register_fake_lsp(
2114 "Rust",
2115 FakeLspAdapter {
2116 disk_based_diagnostics_sources: vec!["disk".into()],
2117 ..Default::default()
2118 },
2119 );
2120
2121 let buffer = project
2122 .update(cx, |project, cx| {
2123 project.open_local_buffer(path!("/dir/a.rs"), cx)
2124 })
2125 .await
2126 .unwrap();
2127
2128 let _handle = project.update(cx, |project, cx| {
2129 project.register_buffer_with_language_servers(&buffer, cx)
2130 });
2131
2132 let mut fake_server = fake_servers.next().await.unwrap();
2133 let open_notification = fake_server
2134 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2135 .await;
2136
2137 // Edit the buffer, moving the content down
2138 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
2139 let change_notification_1 = fake_server
2140 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2141 .await;
2142 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
2143
2144 // Report some diagnostics for the initial version of the buffer
2145 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2146 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2147 version: Some(open_notification.text_document.version),
2148 diagnostics: vec![
2149 lsp::Diagnostic {
2150 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2151 severity: Some(DiagnosticSeverity::ERROR),
2152 message: "undefined variable 'A'".to_string(),
2153 source: Some("disk".to_string()),
2154 ..Default::default()
2155 },
2156 lsp::Diagnostic {
2157 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2158 severity: Some(DiagnosticSeverity::ERROR),
2159 message: "undefined variable 'BB'".to_string(),
2160 source: Some("disk".to_string()),
2161 ..Default::default()
2162 },
2163 lsp::Diagnostic {
2164 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
2165 severity: Some(DiagnosticSeverity::ERROR),
2166 source: Some("disk".to_string()),
2167 message: "undefined variable 'CCC'".to_string(),
2168 ..Default::default()
2169 },
2170 ],
2171 });
2172
2173 // The diagnostics have moved down since they were created.
2174 cx.executor().run_until_parked();
2175 buffer.update(cx, |buffer, _| {
2176 assert_eq!(
2177 buffer
2178 .snapshot()
2179 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
2180 .collect::<Vec<_>>(),
2181 &[
2182 DiagnosticEntry {
2183 range: Point::new(3, 9)..Point::new(3, 11),
2184 diagnostic: Diagnostic {
2185 source: Some("disk".into()),
2186 severity: DiagnosticSeverity::ERROR,
2187 message: "undefined variable 'BB'".to_string(),
2188 is_disk_based: true,
2189 group_id: 1,
2190 is_primary: true,
2191 source_kind: DiagnosticSourceKind::Pushed,
2192 ..Diagnostic::default()
2193 },
2194 },
2195 DiagnosticEntry {
2196 range: Point::new(4, 9)..Point::new(4, 12),
2197 diagnostic: Diagnostic {
2198 source: Some("disk".into()),
2199 severity: DiagnosticSeverity::ERROR,
2200 message: "undefined variable 'CCC'".to_string(),
2201 is_disk_based: true,
2202 group_id: 2,
2203 is_primary: true,
2204 source_kind: DiagnosticSourceKind::Pushed,
2205 ..Diagnostic::default()
2206 }
2207 }
2208 ]
2209 );
2210 assert_eq!(
2211 chunks_with_diagnostics(buffer, 0..buffer.len()),
2212 [
2213 ("\n\nfn a() { ".to_string(), None),
2214 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2215 (" }\nfn b() { ".to_string(), None),
2216 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
2217 (" }\nfn c() { ".to_string(), None),
2218 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
2219 (" }\n".to_string(), None),
2220 ]
2221 );
2222 assert_eq!(
2223 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
2224 [
2225 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
2226 (" }\nfn c() { ".to_string(), None),
2227 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
2228 ]
2229 );
2230 });
2231
2232 // Ensure overlapping diagnostics are highlighted correctly.
2233 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2234 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2235 version: Some(open_notification.text_document.version),
2236 diagnostics: vec![
2237 lsp::Diagnostic {
2238 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2239 severity: Some(DiagnosticSeverity::ERROR),
2240 message: "undefined variable 'A'".to_string(),
2241 source: Some("disk".to_string()),
2242 ..Default::default()
2243 },
2244 lsp::Diagnostic {
2245 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
2246 severity: Some(DiagnosticSeverity::WARNING),
2247 message: "unreachable statement".to_string(),
2248 source: Some("disk".to_string()),
2249 ..Default::default()
2250 },
2251 ],
2252 });
2253
2254 cx.executor().run_until_parked();
2255 buffer.update(cx, |buffer, _| {
2256 assert_eq!(
2257 buffer
2258 .snapshot()
2259 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
2260 .collect::<Vec<_>>(),
2261 &[
2262 DiagnosticEntry {
2263 range: Point::new(2, 9)..Point::new(2, 12),
2264 diagnostic: Diagnostic {
2265 source: Some("disk".into()),
2266 severity: DiagnosticSeverity::WARNING,
2267 message: "unreachable statement".to_string(),
2268 is_disk_based: true,
2269 group_id: 4,
2270 is_primary: true,
2271 source_kind: DiagnosticSourceKind::Pushed,
2272 ..Diagnostic::default()
2273 }
2274 },
2275 DiagnosticEntry {
2276 range: Point::new(2, 9)..Point::new(2, 10),
2277 diagnostic: Diagnostic {
2278 source: Some("disk".into()),
2279 severity: DiagnosticSeverity::ERROR,
2280 message: "undefined variable 'A'".to_string(),
2281 is_disk_based: true,
2282 group_id: 3,
2283 is_primary: true,
2284 source_kind: DiagnosticSourceKind::Pushed,
2285 ..Diagnostic::default()
2286 },
2287 }
2288 ]
2289 );
2290 assert_eq!(
2291 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
2292 [
2293 ("fn a() { ".to_string(), None),
2294 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2295 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2296 ("\n".to_string(), None),
2297 ]
2298 );
2299 assert_eq!(
2300 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
2301 [
2302 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2303 ("\n".to_string(), None),
2304 ]
2305 );
2306 });
2307
2308 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
2309 // changes since the last save.
2310 buffer.update(cx, |buffer, cx| {
2311 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
2312 buffer.edit(
2313 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
2314 None,
2315 cx,
2316 );
2317 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
2318 });
2319 let change_notification_2 = fake_server
2320 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2321 .await;
2322 assert!(
2323 change_notification_2.text_document.version > change_notification_1.text_document.version
2324 );
2325
2326 // Handle out-of-order diagnostics
2327 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2328 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2329 version: Some(change_notification_2.text_document.version),
2330 diagnostics: vec![
2331 lsp::Diagnostic {
2332 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2333 severity: Some(DiagnosticSeverity::ERROR),
2334 message: "undefined variable 'BB'".to_string(),
2335 source: Some("disk".to_string()),
2336 ..Default::default()
2337 },
2338 lsp::Diagnostic {
2339 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2340 severity: Some(DiagnosticSeverity::WARNING),
2341 message: "undefined variable 'A'".to_string(),
2342 source: Some("disk".to_string()),
2343 ..Default::default()
2344 },
2345 ],
2346 });
2347
2348 cx.executor().run_until_parked();
2349 buffer.update(cx, |buffer, _| {
2350 assert_eq!(
2351 buffer
2352 .snapshot()
2353 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2354 .collect::<Vec<_>>(),
2355 &[
2356 DiagnosticEntry {
2357 range: Point::new(2, 21)..Point::new(2, 22),
2358 diagnostic: Diagnostic {
2359 source: Some("disk".into()),
2360 severity: DiagnosticSeverity::WARNING,
2361 message: "undefined variable 'A'".to_string(),
2362 is_disk_based: true,
2363 group_id: 6,
2364 is_primary: true,
2365 source_kind: DiagnosticSourceKind::Pushed,
2366 ..Diagnostic::default()
2367 }
2368 },
2369 DiagnosticEntry {
2370 range: Point::new(3, 9)..Point::new(3, 14),
2371 diagnostic: Diagnostic {
2372 source: Some("disk".into()),
2373 severity: DiagnosticSeverity::ERROR,
2374 message: "undefined variable 'BB'".to_string(),
2375 is_disk_based: true,
2376 group_id: 5,
2377 is_primary: true,
2378 source_kind: DiagnosticSourceKind::Pushed,
2379 ..Diagnostic::default()
2380 },
2381 }
2382 ]
2383 );
2384 });
2385}
2386
2387#[gpui::test]
2388async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
2389 init_test(cx);
2390
2391 let text = concat!(
2392 "let one = ;\n", //
2393 "let two = \n",
2394 "let three = 3;\n",
2395 );
2396
2397 let fs = FakeFs::new(cx.executor());
2398 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
2399
2400 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2401 let buffer = project
2402 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2403 .await
2404 .unwrap();
2405
2406 project.update(cx, |project, cx| {
2407 project.lsp_store.update(cx, |lsp_store, cx| {
2408 lsp_store
2409 .update_diagnostic_entries(
2410 LanguageServerId(0),
2411 PathBuf::from("/dir/a.rs"),
2412 None,
2413 None,
2414 vec![
2415 DiagnosticEntry {
2416 range: Unclipped(PointUtf16::new(0, 10))
2417 ..Unclipped(PointUtf16::new(0, 10)),
2418 diagnostic: Diagnostic {
2419 severity: DiagnosticSeverity::ERROR,
2420 message: "syntax error 1".to_string(),
2421 source_kind: DiagnosticSourceKind::Pushed,
2422 ..Diagnostic::default()
2423 },
2424 },
2425 DiagnosticEntry {
2426 range: Unclipped(PointUtf16::new(1, 10))
2427 ..Unclipped(PointUtf16::new(1, 10)),
2428 diagnostic: Diagnostic {
2429 severity: DiagnosticSeverity::ERROR,
2430 message: "syntax error 2".to_string(),
2431 source_kind: DiagnosticSourceKind::Pushed,
2432 ..Diagnostic::default()
2433 },
2434 },
2435 ],
2436 cx,
2437 )
2438 .unwrap();
2439 })
2440 });
2441
2442 // An empty range is extended forward to include the following character.
2443 // At the end of a line, an empty range is extended backward to include
2444 // the preceding character.
2445 buffer.update(cx, |buffer, _| {
2446 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2447 assert_eq!(
2448 chunks
2449 .iter()
2450 .map(|(s, d)| (s.as_str(), *d))
2451 .collect::<Vec<_>>(),
2452 &[
2453 ("let one = ", None),
2454 (";", Some(DiagnosticSeverity::ERROR)),
2455 ("\nlet two =", None),
2456 (" ", Some(DiagnosticSeverity::ERROR)),
2457 ("\nlet three = 3;\n", None)
2458 ]
2459 );
2460 });
2461}
2462
2463#[gpui::test]
2464async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2465 init_test(cx);
2466
2467 let fs = FakeFs::new(cx.executor());
2468 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2469 .await;
2470
2471 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2472 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
2473
2474 lsp_store.update(cx, |lsp_store, cx| {
2475 lsp_store
2476 .update_diagnostic_entries(
2477 LanguageServerId(0),
2478 Path::new("/dir/a.rs").to_owned(),
2479 None,
2480 None,
2481 vec![DiagnosticEntry {
2482 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2483 diagnostic: Diagnostic {
2484 severity: DiagnosticSeverity::ERROR,
2485 is_primary: true,
2486 message: "syntax error a1".to_string(),
2487 source_kind: DiagnosticSourceKind::Pushed,
2488 ..Diagnostic::default()
2489 },
2490 }],
2491 cx,
2492 )
2493 .unwrap();
2494 lsp_store
2495 .update_diagnostic_entries(
2496 LanguageServerId(1),
2497 Path::new("/dir/a.rs").to_owned(),
2498 None,
2499 None,
2500 vec![DiagnosticEntry {
2501 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2502 diagnostic: Diagnostic {
2503 severity: DiagnosticSeverity::ERROR,
2504 is_primary: true,
2505 message: "syntax error b1".to_string(),
2506 source_kind: DiagnosticSourceKind::Pushed,
2507 ..Diagnostic::default()
2508 },
2509 }],
2510 cx,
2511 )
2512 .unwrap();
2513
2514 assert_eq!(
2515 lsp_store.diagnostic_summary(false, cx),
2516 DiagnosticSummary {
2517 error_count: 2,
2518 warning_count: 0,
2519 }
2520 );
2521 });
2522}
2523
2524#[gpui::test]
2525async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2526 init_test(cx);
2527
2528 let text = "
2529 fn a() {
2530 f1();
2531 }
2532 fn b() {
2533 f2();
2534 }
2535 fn c() {
2536 f3();
2537 }
2538 "
2539 .unindent();
2540
2541 let fs = FakeFs::new(cx.executor());
2542 fs.insert_tree(
2543 path!("/dir"),
2544 json!({
2545 "a.rs": text.clone(),
2546 }),
2547 )
2548 .await;
2549
2550 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2551 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2552
2553 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2554 language_registry.add(rust_lang());
2555 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2556
2557 let (buffer, _handle) = project
2558 .update(cx, |project, cx| {
2559 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2560 })
2561 .await
2562 .unwrap();
2563
2564 let mut fake_server = fake_servers.next().await.unwrap();
2565 let lsp_document_version = fake_server
2566 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2567 .await
2568 .text_document
2569 .version;
2570
2571 // Simulate editing the buffer after the language server computes some edits.
2572 buffer.update(cx, |buffer, cx| {
2573 buffer.edit(
2574 [(
2575 Point::new(0, 0)..Point::new(0, 0),
2576 "// above first function\n",
2577 )],
2578 None,
2579 cx,
2580 );
2581 buffer.edit(
2582 [(
2583 Point::new(2, 0)..Point::new(2, 0),
2584 " // inside first function\n",
2585 )],
2586 None,
2587 cx,
2588 );
2589 buffer.edit(
2590 [(
2591 Point::new(6, 4)..Point::new(6, 4),
2592 "// inside second function ",
2593 )],
2594 None,
2595 cx,
2596 );
2597
2598 assert_eq!(
2599 buffer.text(),
2600 "
2601 // above first function
2602 fn a() {
2603 // inside first function
2604 f1();
2605 }
2606 fn b() {
2607 // inside second function f2();
2608 }
2609 fn c() {
2610 f3();
2611 }
2612 "
2613 .unindent()
2614 );
2615 });
2616
2617 let edits = lsp_store
2618 .update(cx, |lsp_store, cx| {
2619 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2620 &buffer,
2621 vec![
2622 // replace body of first function
2623 lsp::TextEdit {
2624 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2625 new_text: "
2626 fn a() {
2627 f10();
2628 }
2629 "
2630 .unindent(),
2631 },
2632 // edit inside second function
2633 lsp::TextEdit {
2634 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2635 new_text: "00".into(),
2636 },
2637 // edit inside third function via two distinct edits
2638 lsp::TextEdit {
2639 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2640 new_text: "4000".into(),
2641 },
2642 lsp::TextEdit {
2643 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2644 new_text: "".into(),
2645 },
2646 ],
2647 LanguageServerId(0),
2648 Some(lsp_document_version),
2649 cx,
2650 )
2651 })
2652 .await
2653 .unwrap();
2654
2655 buffer.update(cx, |buffer, cx| {
2656 for (range, new_text) in edits {
2657 buffer.edit([(range, new_text)], None, cx);
2658 }
2659 assert_eq!(
2660 buffer.text(),
2661 "
2662 // above first function
2663 fn a() {
2664 // inside first function
2665 f10();
2666 }
2667 fn b() {
2668 // inside second function f200();
2669 }
2670 fn c() {
2671 f4000();
2672 }
2673 "
2674 .unindent()
2675 );
2676 });
2677}
2678
2679#[gpui::test]
2680async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2681 init_test(cx);
2682
2683 let text = "
2684 use a::b;
2685 use a::c;
2686
2687 fn f() {
2688 b();
2689 c();
2690 }
2691 "
2692 .unindent();
2693
2694 let fs = FakeFs::new(cx.executor());
2695 fs.insert_tree(
2696 path!("/dir"),
2697 json!({
2698 "a.rs": text.clone(),
2699 }),
2700 )
2701 .await;
2702
2703 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2704 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2705 let buffer = project
2706 .update(cx, |project, cx| {
2707 project.open_local_buffer(path!("/dir/a.rs"), cx)
2708 })
2709 .await
2710 .unwrap();
2711
2712 // Simulate the language server sending us a small edit in the form of a very large diff.
2713 // Rust-analyzer does this when performing a merge-imports code action.
2714 let edits = lsp_store
2715 .update(cx, |lsp_store, cx| {
2716 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2717 &buffer,
2718 [
2719 // Replace the first use statement without editing the semicolon.
2720 lsp::TextEdit {
2721 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2722 new_text: "a::{b, c}".into(),
2723 },
2724 // Reinsert the remainder of the file between the semicolon and the final
2725 // newline of the file.
2726 lsp::TextEdit {
2727 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2728 new_text: "\n\n".into(),
2729 },
2730 lsp::TextEdit {
2731 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2732 new_text: "
2733 fn f() {
2734 b();
2735 c();
2736 }"
2737 .unindent(),
2738 },
2739 // Delete everything after the first newline of the file.
2740 lsp::TextEdit {
2741 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2742 new_text: "".into(),
2743 },
2744 ],
2745 LanguageServerId(0),
2746 None,
2747 cx,
2748 )
2749 })
2750 .await
2751 .unwrap();
2752
2753 buffer.update(cx, |buffer, cx| {
2754 let edits = edits
2755 .into_iter()
2756 .map(|(range, text)| {
2757 (
2758 range.start.to_point(buffer)..range.end.to_point(buffer),
2759 text,
2760 )
2761 })
2762 .collect::<Vec<_>>();
2763
2764 assert_eq!(
2765 edits,
2766 [
2767 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2768 (Point::new(1, 0)..Point::new(2, 0), "".into())
2769 ]
2770 );
2771
2772 for (range, new_text) in edits {
2773 buffer.edit([(range, new_text)], None, cx);
2774 }
2775 assert_eq!(
2776 buffer.text(),
2777 "
2778 use a::{b, c};
2779
2780 fn f() {
2781 b();
2782 c();
2783 }
2784 "
2785 .unindent()
2786 );
2787 });
2788}
2789
2790#[gpui::test]
2791async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
2792 cx: &mut gpui::TestAppContext,
2793) {
2794 init_test(cx);
2795
2796 let text = "Path()";
2797
2798 let fs = FakeFs::new(cx.executor());
2799 fs.insert_tree(
2800 path!("/dir"),
2801 json!({
2802 "a.rs": text
2803 }),
2804 )
2805 .await;
2806
2807 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2808 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2809 let buffer = project
2810 .update(cx, |project, cx| {
2811 project.open_local_buffer(path!("/dir/a.rs"), cx)
2812 })
2813 .await
2814 .unwrap();
2815
2816 // Simulate the language server sending us a pair of edits at the same location,
2817 // with an insertion following a replacement (which violates the LSP spec).
2818 let edits = lsp_store
2819 .update(cx, |lsp_store, cx| {
2820 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2821 &buffer,
2822 [
2823 lsp::TextEdit {
2824 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
2825 new_text: "Path".into(),
2826 },
2827 lsp::TextEdit {
2828 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
2829 new_text: "from path import Path\n\n\n".into(),
2830 },
2831 ],
2832 LanguageServerId(0),
2833 None,
2834 cx,
2835 )
2836 })
2837 .await
2838 .unwrap();
2839
2840 buffer.update(cx, |buffer, cx| {
2841 buffer.edit(edits, None, cx);
2842 assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
2843 });
2844}
2845
2846#[gpui::test]
2847async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2848 init_test(cx);
2849
2850 let text = "
2851 use a::b;
2852 use a::c;
2853
2854 fn f() {
2855 b();
2856 c();
2857 }
2858 "
2859 .unindent();
2860
2861 let fs = FakeFs::new(cx.executor());
2862 fs.insert_tree(
2863 path!("/dir"),
2864 json!({
2865 "a.rs": text.clone(),
2866 }),
2867 )
2868 .await;
2869
2870 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2871 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2872 let buffer = project
2873 .update(cx, |project, cx| {
2874 project.open_local_buffer(path!("/dir/a.rs"), cx)
2875 })
2876 .await
2877 .unwrap();
2878
2879 // Simulate the language server sending us edits in a non-ordered fashion,
2880 // with ranges sometimes being inverted or pointing to invalid locations.
2881 let edits = lsp_store
2882 .update(cx, |lsp_store, cx| {
2883 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2884 &buffer,
2885 [
2886 lsp::TextEdit {
2887 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2888 new_text: "\n\n".into(),
2889 },
2890 lsp::TextEdit {
2891 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2892 new_text: "a::{b, c}".into(),
2893 },
2894 lsp::TextEdit {
2895 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2896 new_text: "".into(),
2897 },
2898 lsp::TextEdit {
2899 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2900 new_text: "
2901 fn f() {
2902 b();
2903 c();
2904 }"
2905 .unindent(),
2906 },
2907 ],
2908 LanguageServerId(0),
2909 None,
2910 cx,
2911 )
2912 })
2913 .await
2914 .unwrap();
2915
2916 buffer.update(cx, |buffer, cx| {
2917 let edits = edits
2918 .into_iter()
2919 .map(|(range, text)| {
2920 (
2921 range.start.to_point(buffer)..range.end.to_point(buffer),
2922 text,
2923 )
2924 })
2925 .collect::<Vec<_>>();
2926
2927 assert_eq!(
2928 edits,
2929 [
2930 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2931 (Point::new(1, 0)..Point::new(2, 0), "".into())
2932 ]
2933 );
2934
2935 for (range, new_text) in edits {
2936 buffer.edit([(range, new_text)], None, cx);
2937 }
2938 assert_eq!(
2939 buffer.text(),
2940 "
2941 use a::{b, c};
2942
2943 fn f() {
2944 b();
2945 c();
2946 }
2947 "
2948 .unindent()
2949 );
2950 });
2951}
2952
2953fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2954 buffer: &Buffer,
2955 range: Range<T>,
2956) -> Vec<(String, Option<DiagnosticSeverity>)> {
2957 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2958 for chunk in buffer.snapshot().chunks(range, true) {
2959 if chunks
2960 .last()
2961 .is_some_and(|prev_chunk| prev_chunk.1 == chunk.diagnostic_severity)
2962 {
2963 chunks.last_mut().unwrap().0.push_str(chunk.text);
2964 } else {
2965 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2966 }
2967 }
2968 chunks
2969}
2970
2971#[gpui::test(iterations = 10)]
2972async fn test_definition(cx: &mut gpui::TestAppContext) {
2973 init_test(cx);
2974
2975 let fs = FakeFs::new(cx.executor());
2976 fs.insert_tree(
2977 path!("/dir"),
2978 json!({
2979 "a.rs": "const fn a() { A }",
2980 "b.rs": "const y: i32 = crate::a()",
2981 }),
2982 )
2983 .await;
2984
2985 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
2986
2987 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2988 language_registry.add(rust_lang());
2989 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2990
2991 let (buffer, _handle) = project
2992 .update(cx, |project, cx| {
2993 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
2994 })
2995 .await
2996 .unwrap();
2997
2998 let fake_server = fake_servers.next().await.unwrap();
2999 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
3000 let params = params.text_document_position_params;
3001 assert_eq!(
3002 params.text_document.uri.to_file_path().unwrap(),
3003 Path::new(path!("/dir/b.rs")),
3004 );
3005 assert_eq!(params.position, lsp::Position::new(0, 22));
3006
3007 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
3008 lsp::Location::new(
3009 lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
3010 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3011 ),
3012 )))
3013 });
3014 let mut definitions = project
3015 .update(cx, |project, cx| project.definitions(&buffer, 22, cx))
3016 .await
3017 .unwrap()
3018 .unwrap();
3019
3020 // Assert no new language server started
3021 cx.executor().run_until_parked();
3022 assert!(fake_servers.try_next().is_err());
3023
3024 assert_eq!(definitions.len(), 1);
3025 let definition = definitions.pop().unwrap();
3026 cx.update(|cx| {
3027 let target_buffer = definition.target.buffer.read(cx);
3028 assert_eq!(
3029 target_buffer
3030 .file()
3031 .unwrap()
3032 .as_local()
3033 .unwrap()
3034 .abs_path(cx),
3035 Path::new(path!("/dir/a.rs")),
3036 );
3037 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
3038 assert_eq!(
3039 list_worktrees(&project, cx),
3040 [
3041 (path!("/dir/a.rs").as_ref(), false),
3042 (path!("/dir/b.rs").as_ref(), true)
3043 ],
3044 );
3045
3046 drop(definition);
3047 });
3048 cx.update(|cx| {
3049 assert_eq!(
3050 list_worktrees(&project, cx),
3051 [(path!("/dir/b.rs").as_ref(), true)]
3052 );
3053 });
3054
3055 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
3056 project
3057 .read(cx)
3058 .worktrees(cx)
3059 .map(|worktree| {
3060 let worktree = worktree.read(cx);
3061 (
3062 worktree.as_local().unwrap().abs_path().as_ref(),
3063 worktree.is_visible(),
3064 )
3065 })
3066 .collect::<Vec<_>>()
3067 }
3068}
3069
3070#[gpui::test]
3071async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
3072 init_test(cx);
3073
3074 let fs = FakeFs::new(cx.executor());
3075 fs.insert_tree(
3076 path!("/dir"),
3077 json!({
3078 "a.ts": "",
3079 }),
3080 )
3081 .await;
3082
3083 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3084
3085 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3086 language_registry.add(typescript_lang());
3087 let mut fake_language_servers = language_registry.register_fake_lsp(
3088 "TypeScript",
3089 FakeLspAdapter {
3090 capabilities: lsp::ServerCapabilities {
3091 completion_provider: Some(lsp::CompletionOptions {
3092 trigger_characters: Some(vec![".".to_string()]),
3093 ..Default::default()
3094 }),
3095 ..Default::default()
3096 },
3097 ..Default::default()
3098 },
3099 );
3100
3101 let (buffer, _handle) = project
3102 .update(cx, |p, cx| {
3103 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3104 })
3105 .await
3106 .unwrap();
3107
3108 let fake_server = fake_language_servers.next().await.unwrap();
3109
3110 // When text_edit exists, it takes precedence over insert_text and label
3111 let text = "let a = obj.fqn";
3112 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3113 let completions = project.update(cx, |project, cx| {
3114 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3115 });
3116
3117 fake_server
3118 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3119 Ok(Some(lsp::CompletionResponse::Array(vec![
3120 lsp::CompletionItem {
3121 label: "labelText".into(),
3122 insert_text: Some("insertText".into()),
3123 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
3124 range: lsp::Range::new(
3125 lsp::Position::new(0, text.len() as u32 - 3),
3126 lsp::Position::new(0, text.len() as u32),
3127 ),
3128 new_text: "textEditText".into(),
3129 })),
3130 ..Default::default()
3131 },
3132 ])))
3133 })
3134 .next()
3135 .await;
3136
3137 let completions = completions
3138 .await
3139 .unwrap()
3140 .into_iter()
3141 .flat_map(|response| response.completions)
3142 .collect::<Vec<_>>();
3143 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3144
3145 assert_eq!(completions.len(), 1);
3146 assert_eq!(completions[0].new_text, "textEditText");
3147 assert_eq!(
3148 completions[0].replace_range.to_offset(&snapshot),
3149 text.len() - 3..text.len()
3150 );
3151}
3152
3153#[gpui::test]
3154async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
3155 init_test(cx);
3156
3157 let fs = FakeFs::new(cx.executor());
3158 fs.insert_tree(
3159 path!("/dir"),
3160 json!({
3161 "a.ts": "",
3162 }),
3163 )
3164 .await;
3165
3166 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3167
3168 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3169 language_registry.add(typescript_lang());
3170 let mut fake_language_servers = language_registry.register_fake_lsp(
3171 "TypeScript",
3172 FakeLspAdapter {
3173 capabilities: lsp::ServerCapabilities {
3174 completion_provider: Some(lsp::CompletionOptions {
3175 trigger_characters: Some(vec![".".to_string()]),
3176 ..Default::default()
3177 }),
3178 ..Default::default()
3179 },
3180 ..Default::default()
3181 },
3182 );
3183
3184 let (buffer, _handle) = project
3185 .update(cx, |p, cx| {
3186 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3187 })
3188 .await
3189 .unwrap();
3190
3191 let fake_server = fake_language_servers.next().await.unwrap();
3192 let text = "let a = obj.fqn";
3193
3194 // Test 1: When text_edit is None but insert_text exists with default edit_range
3195 {
3196 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3197 let completions = project.update(cx, |project, cx| {
3198 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3199 });
3200
3201 fake_server
3202 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3203 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3204 is_incomplete: false,
3205 item_defaults: Some(lsp::CompletionListItemDefaults {
3206 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3207 lsp::Range::new(
3208 lsp::Position::new(0, text.len() as u32 - 3),
3209 lsp::Position::new(0, text.len() as u32),
3210 ),
3211 )),
3212 ..Default::default()
3213 }),
3214 items: vec![lsp::CompletionItem {
3215 label: "labelText".into(),
3216 insert_text: Some("insertText".into()),
3217 text_edit: None,
3218 ..Default::default()
3219 }],
3220 })))
3221 })
3222 .next()
3223 .await;
3224
3225 let completions = completions
3226 .await
3227 .unwrap()
3228 .into_iter()
3229 .flat_map(|response| response.completions)
3230 .collect::<Vec<_>>();
3231 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3232
3233 assert_eq!(completions.len(), 1);
3234 assert_eq!(completions[0].new_text, "insertText");
3235 assert_eq!(
3236 completions[0].replace_range.to_offset(&snapshot),
3237 text.len() - 3..text.len()
3238 );
3239 }
3240
3241 // Test 2: When both text_edit and insert_text are None with default edit_range
3242 {
3243 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3244 let completions = project.update(cx, |project, cx| {
3245 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3246 });
3247
3248 fake_server
3249 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3250 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3251 is_incomplete: false,
3252 item_defaults: Some(lsp::CompletionListItemDefaults {
3253 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3254 lsp::Range::new(
3255 lsp::Position::new(0, text.len() as u32 - 3),
3256 lsp::Position::new(0, text.len() as u32),
3257 ),
3258 )),
3259 ..Default::default()
3260 }),
3261 items: vec![lsp::CompletionItem {
3262 label: "labelText".into(),
3263 insert_text: None,
3264 text_edit: None,
3265 ..Default::default()
3266 }],
3267 })))
3268 })
3269 .next()
3270 .await;
3271
3272 let completions = completions
3273 .await
3274 .unwrap()
3275 .into_iter()
3276 .flat_map(|response| response.completions)
3277 .collect::<Vec<_>>();
3278 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3279
3280 assert_eq!(completions.len(), 1);
3281 assert_eq!(completions[0].new_text, "labelText");
3282 assert_eq!(
3283 completions[0].replace_range.to_offset(&snapshot),
3284 text.len() - 3..text.len()
3285 );
3286 }
3287}
3288
3289#[gpui::test]
3290async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
3291 init_test(cx);
3292
3293 let fs = FakeFs::new(cx.executor());
3294 fs.insert_tree(
3295 path!("/dir"),
3296 json!({
3297 "a.ts": "",
3298 }),
3299 )
3300 .await;
3301
3302 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3303
3304 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3305 language_registry.add(typescript_lang());
3306 let mut fake_language_servers = language_registry.register_fake_lsp(
3307 "TypeScript",
3308 FakeLspAdapter {
3309 capabilities: lsp::ServerCapabilities {
3310 completion_provider: Some(lsp::CompletionOptions {
3311 trigger_characters: Some(vec![":".to_string()]),
3312 ..Default::default()
3313 }),
3314 ..Default::default()
3315 },
3316 ..Default::default()
3317 },
3318 );
3319
3320 let (buffer, _handle) = project
3321 .update(cx, |p, cx| {
3322 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3323 })
3324 .await
3325 .unwrap();
3326
3327 let fake_server = fake_language_servers.next().await.unwrap();
3328
3329 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
3330 let text = "let a = b.fqn";
3331 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3332 let completions = project.update(cx, |project, cx| {
3333 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3334 });
3335
3336 fake_server
3337 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3338 Ok(Some(lsp::CompletionResponse::Array(vec![
3339 lsp::CompletionItem {
3340 label: "fullyQualifiedName?".into(),
3341 insert_text: Some("fullyQualifiedName".into()),
3342 ..Default::default()
3343 },
3344 ])))
3345 })
3346 .next()
3347 .await;
3348 let completions = completions
3349 .await
3350 .unwrap()
3351 .into_iter()
3352 .flat_map(|response| response.completions)
3353 .collect::<Vec<_>>();
3354 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3355 assert_eq!(completions.len(), 1);
3356 assert_eq!(completions[0].new_text, "fullyQualifiedName");
3357 assert_eq!(
3358 completions[0].replace_range.to_offset(&snapshot),
3359 text.len() - 3..text.len()
3360 );
3361
3362 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
3363 let text = "let a = \"atoms/cmp\"";
3364 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3365 let completions = project.update(cx, |project, cx| {
3366 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
3367 });
3368
3369 fake_server
3370 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3371 Ok(Some(lsp::CompletionResponse::Array(vec![
3372 lsp::CompletionItem {
3373 label: "component".into(),
3374 ..Default::default()
3375 },
3376 ])))
3377 })
3378 .next()
3379 .await;
3380 let completions = completions
3381 .await
3382 .unwrap()
3383 .into_iter()
3384 .flat_map(|response| response.completions)
3385 .collect::<Vec<_>>();
3386 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3387 assert_eq!(completions.len(), 1);
3388 assert_eq!(completions[0].new_text, "component");
3389 assert_eq!(
3390 completions[0].replace_range.to_offset(&snapshot),
3391 text.len() - 4..text.len() - 1
3392 );
3393}
3394
3395#[gpui::test]
3396async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
3397 init_test(cx);
3398
3399 let fs = FakeFs::new(cx.executor());
3400 fs.insert_tree(
3401 path!("/dir"),
3402 json!({
3403 "a.ts": "",
3404 }),
3405 )
3406 .await;
3407
3408 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3409
3410 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3411 language_registry.add(typescript_lang());
3412 let mut fake_language_servers = language_registry.register_fake_lsp(
3413 "TypeScript",
3414 FakeLspAdapter {
3415 capabilities: lsp::ServerCapabilities {
3416 completion_provider: Some(lsp::CompletionOptions {
3417 trigger_characters: Some(vec![":".to_string()]),
3418 ..Default::default()
3419 }),
3420 ..Default::default()
3421 },
3422 ..Default::default()
3423 },
3424 );
3425
3426 let (buffer, _handle) = project
3427 .update(cx, |p, cx| {
3428 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3429 })
3430 .await
3431 .unwrap();
3432
3433 let fake_server = fake_language_servers.next().await.unwrap();
3434
3435 let text = "let a = b.fqn";
3436 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3437 let completions = project.update(cx, |project, cx| {
3438 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3439 });
3440
3441 fake_server
3442 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3443 Ok(Some(lsp::CompletionResponse::Array(vec![
3444 lsp::CompletionItem {
3445 label: "fullyQualifiedName?".into(),
3446 insert_text: Some("fully\rQualified\r\nName".into()),
3447 ..Default::default()
3448 },
3449 ])))
3450 })
3451 .next()
3452 .await;
3453 let completions = completions
3454 .await
3455 .unwrap()
3456 .into_iter()
3457 .flat_map(|response| response.completions)
3458 .collect::<Vec<_>>();
3459 assert_eq!(completions.len(), 1);
3460 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
3461}
3462
3463#[gpui::test(iterations = 10)]
3464async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
3465 init_test(cx);
3466
3467 let fs = FakeFs::new(cx.executor());
3468 fs.insert_tree(
3469 path!("/dir"),
3470 json!({
3471 "a.ts": "a",
3472 }),
3473 )
3474 .await;
3475
3476 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3477
3478 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3479 language_registry.add(typescript_lang());
3480 let mut fake_language_servers = language_registry.register_fake_lsp(
3481 "TypeScript",
3482 FakeLspAdapter {
3483 capabilities: lsp::ServerCapabilities {
3484 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
3485 lsp::CodeActionOptions {
3486 resolve_provider: Some(true),
3487 ..lsp::CodeActionOptions::default()
3488 },
3489 )),
3490 execute_command_provider: Some(lsp::ExecuteCommandOptions {
3491 commands: vec!["_the/command".to_string()],
3492 ..lsp::ExecuteCommandOptions::default()
3493 }),
3494 ..lsp::ServerCapabilities::default()
3495 },
3496 ..FakeLspAdapter::default()
3497 },
3498 );
3499
3500 let (buffer, _handle) = project
3501 .update(cx, |p, cx| {
3502 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3503 })
3504 .await
3505 .unwrap();
3506
3507 let fake_server = fake_language_servers.next().await.unwrap();
3508
3509 // Language server returns code actions that contain commands, and not edits.
3510 let actions = project.update(cx, |project, cx| {
3511 project.code_actions(&buffer, 0..0, None, cx)
3512 });
3513 fake_server
3514 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
3515 Ok(Some(vec![
3516 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3517 title: "The code action".into(),
3518 data: Some(serde_json::json!({
3519 "command": "_the/command",
3520 })),
3521 ..lsp::CodeAction::default()
3522 }),
3523 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3524 title: "two".into(),
3525 ..lsp::CodeAction::default()
3526 }),
3527 ]))
3528 })
3529 .next()
3530 .await;
3531
3532 let action = actions.await.unwrap().unwrap()[0].clone();
3533 let apply = project.update(cx, |project, cx| {
3534 project.apply_code_action(buffer.clone(), action, true, cx)
3535 });
3536
3537 // Resolving the code action does not populate its edits. In absence of
3538 // edits, we must execute the given command.
3539 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
3540 |mut action, _| async move {
3541 if action.data.is_some() {
3542 action.command = Some(lsp::Command {
3543 title: "The command".into(),
3544 command: "_the/command".into(),
3545 arguments: Some(vec![json!("the-argument")]),
3546 });
3547 }
3548 Ok(action)
3549 },
3550 );
3551
3552 // While executing the command, the language server sends the editor
3553 // a `workspaceEdit` request.
3554 fake_server
3555 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
3556 let fake = fake_server.clone();
3557 move |params, _| {
3558 assert_eq!(params.command, "_the/command");
3559 let fake = fake.clone();
3560 async move {
3561 fake.server
3562 .request::<lsp::request::ApplyWorkspaceEdit>(
3563 lsp::ApplyWorkspaceEditParams {
3564 label: None,
3565 edit: lsp::WorkspaceEdit {
3566 changes: Some(
3567 [(
3568 lsp::Url::from_file_path(path!("/dir/a.ts")).unwrap(),
3569 vec![lsp::TextEdit {
3570 range: lsp::Range::new(
3571 lsp::Position::new(0, 0),
3572 lsp::Position::new(0, 0),
3573 ),
3574 new_text: "X".into(),
3575 }],
3576 )]
3577 .into_iter()
3578 .collect(),
3579 ),
3580 ..Default::default()
3581 },
3582 },
3583 )
3584 .await
3585 .into_response()
3586 .unwrap();
3587 Ok(Some(json!(null)))
3588 }
3589 }
3590 })
3591 .next()
3592 .await;
3593
3594 // Applying the code action returns a project transaction containing the edits
3595 // sent by the language server in its `workspaceEdit` request.
3596 let transaction = apply.await.unwrap();
3597 assert!(transaction.0.contains_key(&buffer));
3598 buffer.update(cx, |buffer, cx| {
3599 assert_eq!(buffer.text(), "Xa");
3600 buffer.undo(cx);
3601 assert_eq!(buffer.text(), "a");
3602 });
3603}
3604
3605#[gpui::test(iterations = 10)]
3606async fn test_save_file(cx: &mut gpui::TestAppContext) {
3607 init_test(cx);
3608
3609 let fs = FakeFs::new(cx.executor());
3610 fs.insert_tree(
3611 path!("/dir"),
3612 json!({
3613 "file1": "the old contents",
3614 }),
3615 )
3616 .await;
3617
3618 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3619 let buffer = project
3620 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3621 .await
3622 .unwrap();
3623 buffer.update(cx, |buffer, cx| {
3624 assert_eq!(buffer.text(), "the old contents");
3625 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3626 });
3627
3628 project
3629 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3630 .await
3631 .unwrap();
3632
3633 let new_text = fs
3634 .load(Path::new(path!("/dir/file1")))
3635 .await
3636 .unwrap()
3637 .replace("\r\n", "\n");
3638 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3639}
3640
3641#[gpui::test(iterations = 10)]
3642async fn test_save_file_spawns_language_server(cx: &mut gpui::TestAppContext) {
3643 // Issue: #24349
3644 init_test(cx);
3645
3646 let fs = FakeFs::new(cx.executor());
3647 fs.insert_tree(path!("/dir"), json!({})).await;
3648
3649 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3650 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3651
3652 language_registry.add(rust_lang());
3653 let mut fake_rust_servers = language_registry.register_fake_lsp(
3654 "Rust",
3655 FakeLspAdapter {
3656 name: "the-rust-language-server",
3657 capabilities: lsp::ServerCapabilities {
3658 completion_provider: Some(lsp::CompletionOptions {
3659 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
3660 ..Default::default()
3661 }),
3662 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
3663 lsp::TextDocumentSyncOptions {
3664 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
3665 ..Default::default()
3666 },
3667 )),
3668 ..Default::default()
3669 },
3670 ..Default::default()
3671 },
3672 );
3673
3674 let buffer = project
3675 .update(cx, |this, cx| this.create_buffer(cx))
3676 .unwrap()
3677 .await;
3678 project.update(cx, |this, cx| {
3679 this.register_buffer_with_language_servers(&buffer, cx);
3680 buffer.update(cx, |buffer, cx| {
3681 assert!(!this.has_language_servers_for(buffer, cx));
3682 })
3683 });
3684
3685 project
3686 .update(cx, |this, cx| {
3687 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
3688 this.save_buffer_as(
3689 buffer.clone(),
3690 ProjectPath {
3691 worktree_id,
3692 path: Arc::from("file.rs".as_ref()),
3693 },
3694 cx,
3695 )
3696 })
3697 .await
3698 .unwrap();
3699 // A server is started up, and it is notified about Rust files.
3700 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
3701 assert_eq!(
3702 fake_rust_server
3703 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3704 .await
3705 .text_document,
3706 lsp::TextDocumentItem {
3707 uri: lsp::Url::from_file_path(path!("/dir/file.rs")).unwrap(),
3708 version: 0,
3709 text: "".to_string(),
3710 language_id: "rust".to_string(),
3711 }
3712 );
3713
3714 project.update(cx, |this, cx| {
3715 buffer.update(cx, |buffer, cx| {
3716 assert!(this.has_language_servers_for(buffer, cx));
3717 })
3718 });
3719}
3720
3721#[gpui::test(iterations = 30)]
3722async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
3723 init_test(cx);
3724
3725 let fs = FakeFs::new(cx.executor());
3726 fs.insert_tree(
3727 path!("/dir"),
3728 json!({
3729 "file1": "the original contents",
3730 }),
3731 )
3732 .await;
3733
3734 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3735 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3736 let buffer = project
3737 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3738 .await
3739 .unwrap();
3740
3741 // Simulate buffer diffs being slow, so that they don't complete before
3742 // the next file change occurs.
3743 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3744
3745 // Change the buffer's file on disk, and then wait for the file change
3746 // to be detected by the worktree, so that the buffer starts reloading.
3747 fs.save(
3748 path!("/dir/file1").as_ref(),
3749 &"the first contents".into(),
3750 Default::default(),
3751 )
3752 .await
3753 .unwrap();
3754 worktree.next_event(cx).await;
3755
3756 // Change the buffer's file again. Depending on the random seed, the
3757 // previous file change may still be in progress.
3758 fs.save(
3759 path!("/dir/file1").as_ref(),
3760 &"the second contents".into(),
3761 Default::default(),
3762 )
3763 .await
3764 .unwrap();
3765 worktree.next_event(cx).await;
3766
3767 cx.executor().run_until_parked();
3768 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3769 buffer.read_with(cx, |buffer, _| {
3770 assert_eq!(buffer.text(), on_disk_text);
3771 assert!(!buffer.is_dirty(), "buffer should not be dirty");
3772 assert!(!buffer.has_conflict(), "buffer should not be dirty");
3773 });
3774}
3775
3776#[gpui::test(iterations = 30)]
3777async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
3778 init_test(cx);
3779
3780 let fs = FakeFs::new(cx.executor());
3781 fs.insert_tree(
3782 path!("/dir"),
3783 json!({
3784 "file1": "the original contents",
3785 }),
3786 )
3787 .await;
3788
3789 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3790 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3791 let buffer = project
3792 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3793 .await
3794 .unwrap();
3795
3796 // Simulate buffer diffs being slow, so that they don't complete before
3797 // the next file change occurs.
3798 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3799
3800 // Change the buffer's file on disk, and then wait for the file change
3801 // to be detected by the worktree, so that the buffer starts reloading.
3802 fs.save(
3803 path!("/dir/file1").as_ref(),
3804 &"the first contents".into(),
3805 Default::default(),
3806 )
3807 .await
3808 .unwrap();
3809 worktree.next_event(cx).await;
3810
3811 cx.executor()
3812 .spawn(cx.executor().simulate_random_delay())
3813 .await;
3814
3815 // Perform a noop edit, causing the buffer's version to increase.
3816 buffer.update(cx, |buffer, cx| {
3817 buffer.edit([(0..0, " ")], None, cx);
3818 buffer.undo(cx);
3819 });
3820
3821 cx.executor().run_until_parked();
3822 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3823 buffer.read_with(cx, |buffer, _| {
3824 let buffer_text = buffer.text();
3825 if buffer_text == on_disk_text {
3826 assert!(
3827 !buffer.is_dirty() && !buffer.has_conflict(),
3828 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
3829 );
3830 }
3831 // If the file change occurred while the buffer was processing the first
3832 // change, the buffer will be in a conflicting state.
3833 else {
3834 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3835 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3836 }
3837 });
3838}
3839
3840#[gpui::test]
3841async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
3842 init_test(cx);
3843
3844 let fs = FakeFs::new(cx.executor());
3845 fs.insert_tree(
3846 path!("/dir"),
3847 json!({
3848 "file1": "the old contents",
3849 }),
3850 )
3851 .await;
3852
3853 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
3854 let buffer = project
3855 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3856 .await
3857 .unwrap();
3858 buffer.update(cx, |buffer, cx| {
3859 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3860 });
3861
3862 project
3863 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3864 .await
3865 .unwrap();
3866
3867 let new_text = fs
3868 .load(Path::new(path!("/dir/file1")))
3869 .await
3870 .unwrap()
3871 .replace("\r\n", "\n");
3872 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3873}
3874
3875#[gpui::test]
3876async fn test_save_as(cx: &mut gpui::TestAppContext) {
3877 init_test(cx);
3878
3879 let fs = FakeFs::new(cx.executor());
3880 fs.insert_tree("/dir", json!({})).await;
3881
3882 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3883
3884 let languages = project.update(cx, |project, _| project.languages().clone());
3885 languages.add(rust_lang());
3886
3887 let buffer = project.update(cx, |project, cx| project.create_local_buffer("", None, cx));
3888 buffer.update(cx, |buffer, cx| {
3889 buffer.edit([(0..0, "abc")], None, cx);
3890 assert!(buffer.is_dirty());
3891 assert!(!buffer.has_conflict());
3892 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
3893 });
3894 project
3895 .update(cx, |project, cx| {
3896 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
3897 let path = ProjectPath {
3898 worktree_id,
3899 path: Arc::from(Path::new("file1.rs")),
3900 };
3901 project.save_buffer_as(buffer.clone(), path, cx)
3902 })
3903 .await
3904 .unwrap();
3905 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
3906
3907 cx.executor().run_until_parked();
3908 buffer.update(cx, |buffer, cx| {
3909 assert_eq!(
3910 buffer.file().unwrap().full_path(cx),
3911 Path::new("dir/file1.rs")
3912 );
3913 assert!(!buffer.is_dirty());
3914 assert!(!buffer.has_conflict());
3915 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
3916 });
3917
3918 let opened_buffer = project
3919 .update(cx, |project, cx| {
3920 project.open_local_buffer("/dir/file1.rs", cx)
3921 })
3922 .await
3923 .unwrap();
3924 assert_eq!(opened_buffer, buffer);
3925}
3926
3927#[gpui::test(retries = 5)]
3928async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
3929 use worktree::WorktreeModelHandle as _;
3930
3931 init_test(cx);
3932 cx.executor().allow_parking();
3933
3934 let dir = TempTree::new(json!({
3935 "a": {
3936 "file1": "",
3937 "file2": "",
3938 "file3": "",
3939 },
3940 "b": {
3941 "c": {
3942 "file4": "",
3943 "file5": "",
3944 }
3945 }
3946 }));
3947
3948 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
3949
3950 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3951 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
3952 async move { buffer.await.unwrap() }
3953 };
3954 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3955 project.update(cx, |project, cx| {
3956 let tree = project.worktrees(cx).next().unwrap();
3957 tree.read(cx)
3958 .entry_for_path(path)
3959 .unwrap_or_else(|| panic!("no entry for path {}", path))
3960 .id
3961 })
3962 };
3963
3964 let buffer2 = buffer_for_path("a/file2", cx).await;
3965 let buffer3 = buffer_for_path("a/file3", cx).await;
3966 let buffer4 = buffer_for_path("b/c/file4", cx).await;
3967 let buffer5 = buffer_for_path("b/c/file5", cx).await;
3968
3969 let file2_id = id_for_path("a/file2", cx);
3970 let file3_id = id_for_path("a/file3", cx);
3971 let file4_id = id_for_path("b/c/file4", cx);
3972
3973 // Create a remote copy of this worktree.
3974 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3975 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
3976
3977 let updates = Arc::new(Mutex::new(Vec::new()));
3978 tree.update(cx, |tree, cx| {
3979 let updates = updates.clone();
3980 tree.observe_updates(0, cx, move |update| {
3981 updates.lock().push(update);
3982 async { true }
3983 });
3984 });
3985
3986 let remote =
3987 cx.update(|cx| Worktree::remote(0, 1, metadata, project.read(cx).client().into(), cx));
3988
3989 cx.executor().run_until_parked();
3990
3991 cx.update(|cx| {
3992 assert!(!buffer2.read(cx).is_dirty());
3993 assert!(!buffer3.read(cx).is_dirty());
3994 assert!(!buffer4.read(cx).is_dirty());
3995 assert!(!buffer5.read(cx).is_dirty());
3996 });
3997
3998 // Rename and delete files and directories.
3999 tree.flush_fs_events(cx).await;
4000 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
4001 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
4002 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
4003 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
4004 tree.flush_fs_events(cx).await;
4005
4006 cx.update(|app| {
4007 assert_eq!(
4008 tree.read(app)
4009 .paths()
4010 .map(|p| p.to_str().unwrap())
4011 .collect::<Vec<_>>(),
4012 vec![
4013 "a",
4014 path!("a/file1"),
4015 path!("a/file2.new"),
4016 "b",
4017 "d",
4018 path!("d/file3"),
4019 path!("d/file4"),
4020 ]
4021 );
4022 });
4023
4024 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
4025 assert_eq!(id_for_path("d/file3", cx), file3_id);
4026 assert_eq!(id_for_path("d/file4", cx), file4_id);
4027
4028 cx.update(|cx| {
4029 assert_eq!(
4030 buffer2.read(cx).file().unwrap().path().as_ref(),
4031 Path::new("a/file2.new")
4032 );
4033 assert_eq!(
4034 buffer3.read(cx).file().unwrap().path().as_ref(),
4035 Path::new("d/file3")
4036 );
4037 assert_eq!(
4038 buffer4.read(cx).file().unwrap().path().as_ref(),
4039 Path::new("d/file4")
4040 );
4041 assert_eq!(
4042 buffer5.read(cx).file().unwrap().path().as_ref(),
4043 Path::new("b/c/file5")
4044 );
4045
4046 assert_matches!(
4047 buffer2.read(cx).file().unwrap().disk_state(),
4048 DiskState::Present { .. }
4049 );
4050 assert_matches!(
4051 buffer3.read(cx).file().unwrap().disk_state(),
4052 DiskState::Present { .. }
4053 );
4054 assert_matches!(
4055 buffer4.read(cx).file().unwrap().disk_state(),
4056 DiskState::Present { .. }
4057 );
4058 assert_eq!(
4059 buffer5.read(cx).file().unwrap().disk_state(),
4060 DiskState::Deleted
4061 );
4062 });
4063
4064 // Update the remote worktree. Check that it becomes consistent with the
4065 // local worktree.
4066 cx.executor().run_until_parked();
4067
4068 remote.update(cx, |remote, _| {
4069 for update in updates.lock().drain(..) {
4070 remote.as_remote_mut().unwrap().update_from_remote(update);
4071 }
4072 });
4073 cx.executor().run_until_parked();
4074 remote.update(cx, |remote, _| {
4075 assert_eq!(
4076 remote
4077 .paths()
4078 .map(|p| p.to_str().unwrap())
4079 .collect::<Vec<_>>(),
4080 vec![
4081 "a",
4082 path!("a/file1"),
4083 path!("a/file2.new"),
4084 "b",
4085 "d",
4086 path!("d/file3"),
4087 path!("d/file4"),
4088 ]
4089 );
4090 });
4091}
4092
4093#[gpui::test(iterations = 10)]
4094async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
4095 init_test(cx);
4096
4097 let fs = FakeFs::new(cx.executor());
4098 fs.insert_tree(
4099 path!("/dir"),
4100 json!({
4101 "a": {
4102 "file1": "",
4103 }
4104 }),
4105 )
4106 .await;
4107
4108 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
4109 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
4110 let tree_id = tree.update(cx, |tree, _| tree.id());
4111
4112 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4113 project.update(cx, |project, cx| {
4114 let tree = project.worktrees(cx).next().unwrap();
4115 tree.read(cx)
4116 .entry_for_path(path)
4117 .unwrap_or_else(|| panic!("no entry for path {}", path))
4118 .id
4119 })
4120 };
4121
4122 let dir_id = id_for_path("a", cx);
4123 let file_id = id_for_path("a/file1", cx);
4124 let buffer = project
4125 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
4126 .await
4127 .unwrap();
4128 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4129
4130 project
4131 .update(cx, |project, cx| {
4132 project.rename_entry(dir_id, Path::new("b"), cx)
4133 })
4134 .unwrap()
4135 .await
4136 .into_included()
4137 .unwrap();
4138 cx.executor().run_until_parked();
4139
4140 assert_eq!(id_for_path("b", cx), dir_id);
4141 assert_eq!(id_for_path("b/file1", cx), file_id);
4142 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4143}
4144
4145#[gpui::test]
4146async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
4147 init_test(cx);
4148
4149 let fs = FakeFs::new(cx.executor());
4150 fs.insert_tree(
4151 "/dir",
4152 json!({
4153 "a.txt": "a-contents",
4154 "b.txt": "b-contents",
4155 }),
4156 )
4157 .await;
4158
4159 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4160
4161 // Spawn multiple tasks to open paths, repeating some paths.
4162 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
4163 (
4164 p.open_local_buffer("/dir/a.txt", cx),
4165 p.open_local_buffer("/dir/b.txt", cx),
4166 p.open_local_buffer("/dir/a.txt", cx),
4167 )
4168 });
4169
4170 let buffer_a_1 = buffer_a_1.await.unwrap();
4171 let buffer_a_2 = buffer_a_2.await.unwrap();
4172 let buffer_b = buffer_b.await.unwrap();
4173 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
4174 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
4175
4176 // There is only one buffer per path.
4177 let buffer_a_id = buffer_a_1.entity_id();
4178 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
4179
4180 // Open the same path again while it is still open.
4181 drop(buffer_a_1);
4182 let buffer_a_3 = project
4183 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
4184 .await
4185 .unwrap();
4186
4187 // There's still only one buffer per path.
4188 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
4189}
4190
4191#[gpui::test]
4192async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
4193 init_test(cx);
4194
4195 let fs = FakeFs::new(cx.executor());
4196 fs.insert_tree(
4197 path!("/dir"),
4198 json!({
4199 "file1": "abc",
4200 "file2": "def",
4201 "file3": "ghi",
4202 }),
4203 )
4204 .await;
4205
4206 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4207
4208 let buffer1 = project
4209 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4210 .await
4211 .unwrap();
4212 let events = Arc::new(Mutex::new(Vec::new()));
4213
4214 // initially, the buffer isn't dirty.
4215 buffer1.update(cx, |buffer, cx| {
4216 cx.subscribe(&buffer1, {
4217 let events = events.clone();
4218 move |_, _, event, _| match event {
4219 BufferEvent::Operation { .. } => {}
4220 _ => events.lock().push(event.clone()),
4221 }
4222 })
4223 .detach();
4224
4225 assert!(!buffer.is_dirty());
4226 assert!(events.lock().is_empty());
4227
4228 buffer.edit([(1..2, "")], None, cx);
4229 });
4230
4231 // after the first edit, the buffer is dirty, and emits a dirtied event.
4232 buffer1.update(cx, |buffer, cx| {
4233 assert!(buffer.text() == "ac");
4234 assert!(buffer.is_dirty());
4235 assert_eq!(
4236 *events.lock(),
4237 &[
4238 language::BufferEvent::Edited,
4239 language::BufferEvent::DirtyChanged
4240 ]
4241 );
4242 events.lock().clear();
4243 buffer.did_save(
4244 buffer.version(),
4245 buffer.file().unwrap().disk_state().mtime(),
4246 cx,
4247 );
4248 });
4249
4250 // after saving, the buffer is not dirty, and emits a saved event.
4251 buffer1.update(cx, |buffer, cx| {
4252 assert!(!buffer.is_dirty());
4253 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
4254 events.lock().clear();
4255
4256 buffer.edit([(1..1, "B")], None, cx);
4257 buffer.edit([(2..2, "D")], None, cx);
4258 });
4259
4260 // after editing again, the buffer is dirty, and emits another dirty event.
4261 buffer1.update(cx, |buffer, cx| {
4262 assert!(buffer.text() == "aBDc");
4263 assert!(buffer.is_dirty());
4264 assert_eq!(
4265 *events.lock(),
4266 &[
4267 language::BufferEvent::Edited,
4268 language::BufferEvent::DirtyChanged,
4269 language::BufferEvent::Edited,
4270 ],
4271 );
4272 events.lock().clear();
4273
4274 // After restoring the buffer to its previously-saved state,
4275 // the buffer is not considered dirty anymore.
4276 buffer.edit([(1..3, "")], None, cx);
4277 assert!(buffer.text() == "ac");
4278 assert!(!buffer.is_dirty());
4279 });
4280
4281 assert_eq!(
4282 *events.lock(),
4283 &[
4284 language::BufferEvent::Edited,
4285 language::BufferEvent::DirtyChanged
4286 ]
4287 );
4288
4289 // When a file is deleted, it is not considered dirty.
4290 let events = Arc::new(Mutex::new(Vec::new()));
4291 let buffer2 = project
4292 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4293 .await
4294 .unwrap();
4295 buffer2.update(cx, |_, cx| {
4296 cx.subscribe(&buffer2, {
4297 let events = events.clone();
4298 move |_, _, event, _| match event {
4299 BufferEvent::Operation { .. } => {}
4300 _ => events.lock().push(event.clone()),
4301 }
4302 })
4303 .detach();
4304 });
4305
4306 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
4307 .await
4308 .unwrap();
4309 cx.executor().run_until_parked();
4310 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4311 assert_eq!(
4312 mem::take(&mut *events.lock()),
4313 &[language::BufferEvent::FileHandleChanged]
4314 );
4315
4316 // Buffer becomes dirty when edited.
4317 buffer2.update(cx, |buffer, cx| {
4318 buffer.edit([(2..3, "")], None, cx);
4319 assert_eq!(buffer.is_dirty(), true);
4320 });
4321 assert_eq!(
4322 mem::take(&mut *events.lock()),
4323 &[
4324 language::BufferEvent::Edited,
4325 language::BufferEvent::DirtyChanged
4326 ]
4327 );
4328
4329 // Buffer becomes clean again when all of its content is removed, because
4330 // the file was deleted.
4331 buffer2.update(cx, |buffer, cx| {
4332 buffer.edit([(0..2, "")], None, cx);
4333 assert_eq!(buffer.is_empty(), true);
4334 assert_eq!(buffer.is_dirty(), false);
4335 });
4336 assert_eq!(
4337 *events.lock(),
4338 &[
4339 language::BufferEvent::Edited,
4340 language::BufferEvent::DirtyChanged
4341 ]
4342 );
4343
4344 // When a file is already dirty when deleted, we don't emit a Dirtied event.
4345 let events = Arc::new(Mutex::new(Vec::new()));
4346 let buffer3 = project
4347 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
4348 .await
4349 .unwrap();
4350 buffer3.update(cx, |_, cx| {
4351 cx.subscribe(&buffer3, {
4352 let events = events.clone();
4353 move |_, _, event, _| match event {
4354 BufferEvent::Operation { .. } => {}
4355 _ => events.lock().push(event.clone()),
4356 }
4357 })
4358 .detach();
4359 });
4360
4361 buffer3.update(cx, |buffer, cx| {
4362 buffer.edit([(0..0, "x")], None, cx);
4363 });
4364 events.lock().clear();
4365 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
4366 .await
4367 .unwrap();
4368 cx.executor().run_until_parked();
4369 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
4370 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
4371}
4372
4373#[gpui::test]
4374async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
4375 init_test(cx);
4376
4377 let (initial_contents, initial_offsets) =
4378 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
4379 let fs = FakeFs::new(cx.executor());
4380 fs.insert_tree(
4381 path!("/dir"),
4382 json!({
4383 "the-file": initial_contents,
4384 }),
4385 )
4386 .await;
4387 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4388 let buffer = project
4389 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
4390 .await
4391 .unwrap();
4392
4393 let anchors = initial_offsets
4394 .iter()
4395 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
4396 .collect::<Vec<_>>();
4397
4398 // Change the file on disk, adding two new lines of text, and removing
4399 // one line.
4400 buffer.update(cx, |buffer, _| {
4401 assert!(!buffer.is_dirty());
4402 assert!(!buffer.has_conflict());
4403 });
4404
4405 let (new_contents, new_offsets) =
4406 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
4407 fs.save(
4408 path!("/dir/the-file").as_ref(),
4409 &new_contents.as_str().into(),
4410 LineEnding::Unix,
4411 )
4412 .await
4413 .unwrap();
4414
4415 // Because the buffer was not modified, it is reloaded from disk. Its
4416 // contents are edited according to the diff between the old and new
4417 // file contents.
4418 cx.executor().run_until_parked();
4419 buffer.update(cx, |buffer, _| {
4420 assert_eq!(buffer.text(), new_contents);
4421 assert!(!buffer.is_dirty());
4422 assert!(!buffer.has_conflict());
4423
4424 let anchor_offsets = anchors
4425 .iter()
4426 .map(|anchor| anchor.to_offset(&*buffer))
4427 .collect::<Vec<_>>();
4428 assert_eq!(anchor_offsets, new_offsets);
4429 });
4430
4431 // Modify the buffer
4432 buffer.update(cx, |buffer, cx| {
4433 buffer.edit([(0..0, " ")], None, cx);
4434 assert!(buffer.is_dirty());
4435 assert!(!buffer.has_conflict());
4436 });
4437
4438 // Change the file on disk again, adding blank lines to the beginning.
4439 fs.save(
4440 path!("/dir/the-file").as_ref(),
4441 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
4442 LineEnding::Unix,
4443 )
4444 .await
4445 .unwrap();
4446
4447 // Because the buffer is modified, it doesn't reload from disk, but is
4448 // marked as having a conflict.
4449 cx.executor().run_until_parked();
4450 buffer.update(cx, |buffer, _| {
4451 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
4452 assert!(buffer.has_conflict());
4453 });
4454}
4455
4456#[gpui::test]
4457async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
4458 init_test(cx);
4459
4460 let fs = FakeFs::new(cx.executor());
4461 fs.insert_tree(
4462 path!("/dir"),
4463 json!({
4464 "file1": "a\nb\nc\n",
4465 "file2": "one\r\ntwo\r\nthree\r\n",
4466 }),
4467 )
4468 .await;
4469
4470 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4471 let buffer1 = project
4472 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4473 .await
4474 .unwrap();
4475 let buffer2 = project
4476 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4477 .await
4478 .unwrap();
4479
4480 buffer1.update(cx, |buffer, _| {
4481 assert_eq!(buffer.text(), "a\nb\nc\n");
4482 assert_eq!(buffer.line_ending(), LineEnding::Unix);
4483 });
4484 buffer2.update(cx, |buffer, _| {
4485 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
4486 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4487 });
4488
4489 // Change a file's line endings on disk from unix to windows. The buffer's
4490 // state updates correctly.
4491 fs.save(
4492 path!("/dir/file1").as_ref(),
4493 &"aaa\nb\nc\n".into(),
4494 LineEnding::Windows,
4495 )
4496 .await
4497 .unwrap();
4498 cx.executor().run_until_parked();
4499 buffer1.update(cx, |buffer, _| {
4500 assert_eq!(buffer.text(), "aaa\nb\nc\n");
4501 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4502 });
4503
4504 // Save a file with windows line endings. The file is written correctly.
4505 buffer2.update(cx, |buffer, cx| {
4506 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
4507 });
4508 project
4509 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
4510 .await
4511 .unwrap();
4512 assert_eq!(
4513 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
4514 "one\r\ntwo\r\nthree\r\nfour\r\n",
4515 );
4516}
4517
4518#[gpui::test]
4519async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
4520 init_test(cx);
4521
4522 let fs = FakeFs::new(cx.executor());
4523 fs.insert_tree(
4524 path!("/dir"),
4525 json!({
4526 "a.rs": "
4527 fn foo(mut v: Vec<usize>) {
4528 for x in &v {
4529 v.push(1);
4530 }
4531 }
4532 "
4533 .unindent(),
4534 }),
4535 )
4536 .await;
4537
4538 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4539 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4540 let buffer = project
4541 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
4542 .await
4543 .unwrap();
4544
4545 let buffer_uri = Url::from_file_path(path!("/dir/a.rs")).unwrap();
4546 let message = lsp::PublishDiagnosticsParams {
4547 uri: buffer_uri.clone(),
4548 diagnostics: vec![
4549 lsp::Diagnostic {
4550 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4551 severity: Some(DiagnosticSeverity::WARNING),
4552 message: "error 1".to_string(),
4553 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4554 location: lsp::Location {
4555 uri: buffer_uri.clone(),
4556 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4557 },
4558 message: "error 1 hint 1".to_string(),
4559 }]),
4560 ..Default::default()
4561 },
4562 lsp::Diagnostic {
4563 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4564 severity: Some(DiagnosticSeverity::HINT),
4565 message: "error 1 hint 1".to_string(),
4566 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4567 location: lsp::Location {
4568 uri: buffer_uri.clone(),
4569 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4570 },
4571 message: "original diagnostic".to_string(),
4572 }]),
4573 ..Default::default()
4574 },
4575 lsp::Diagnostic {
4576 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4577 severity: Some(DiagnosticSeverity::ERROR),
4578 message: "error 2".to_string(),
4579 related_information: Some(vec![
4580 lsp::DiagnosticRelatedInformation {
4581 location: lsp::Location {
4582 uri: buffer_uri.clone(),
4583 range: lsp::Range::new(
4584 lsp::Position::new(1, 13),
4585 lsp::Position::new(1, 15),
4586 ),
4587 },
4588 message: "error 2 hint 1".to_string(),
4589 },
4590 lsp::DiagnosticRelatedInformation {
4591 location: lsp::Location {
4592 uri: buffer_uri.clone(),
4593 range: lsp::Range::new(
4594 lsp::Position::new(1, 13),
4595 lsp::Position::new(1, 15),
4596 ),
4597 },
4598 message: "error 2 hint 2".to_string(),
4599 },
4600 ]),
4601 ..Default::default()
4602 },
4603 lsp::Diagnostic {
4604 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4605 severity: Some(DiagnosticSeverity::HINT),
4606 message: "error 2 hint 1".to_string(),
4607 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4608 location: lsp::Location {
4609 uri: buffer_uri.clone(),
4610 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4611 },
4612 message: "original diagnostic".to_string(),
4613 }]),
4614 ..Default::default()
4615 },
4616 lsp::Diagnostic {
4617 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4618 severity: Some(DiagnosticSeverity::HINT),
4619 message: "error 2 hint 2".to_string(),
4620 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4621 location: lsp::Location {
4622 uri: buffer_uri,
4623 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4624 },
4625 message: "original diagnostic".to_string(),
4626 }]),
4627 ..Default::default()
4628 },
4629 ],
4630 version: None,
4631 };
4632
4633 lsp_store
4634 .update(cx, |lsp_store, cx| {
4635 lsp_store.update_diagnostics(
4636 LanguageServerId(0),
4637 message,
4638 None,
4639 DiagnosticSourceKind::Pushed,
4640 &[],
4641 cx,
4642 )
4643 })
4644 .unwrap();
4645 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
4646
4647 assert_eq!(
4648 buffer
4649 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
4650 .collect::<Vec<_>>(),
4651 &[
4652 DiagnosticEntry {
4653 range: Point::new(1, 8)..Point::new(1, 9),
4654 diagnostic: Diagnostic {
4655 severity: DiagnosticSeverity::WARNING,
4656 message: "error 1".to_string(),
4657 group_id: 1,
4658 is_primary: true,
4659 source_kind: DiagnosticSourceKind::Pushed,
4660 ..Diagnostic::default()
4661 }
4662 },
4663 DiagnosticEntry {
4664 range: Point::new(1, 8)..Point::new(1, 9),
4665 diagnostic: Diagnostic {
4666 severity: DiagnosticSeverity::HINT,
4667 message: "error 1 hint 1".to_string(),
4668 group_id: 1,
4669 is_primary: false,
4670 source_kind: DiagnosticSourceKind::Pushed,
4671 ..Diagnostic::default()
4672 }
4673 },
4674 DiagnosticEntry {
4675 range: Point::new(1, 13)..Point::new(1, 15),
4676 diagnostic: Diagnostic {
4677 severity: DiagnosticSeverity::HINT,
4678 message: "error 2 hint 1".to_string(),
4679 group_id: 0,
4680 is_primary: false,
4681 source_kind: DiagnosticSourceKind::Pushed,
4682 ..Diagnostic::default()
4683 }
4684 },
4685 DiagnosticEntry {
4686 range: Point::new(1, 13)..Point::new(1, 15),
4687 diagnostic: Diagnostic {
4688 severity: DiagnosticSeverity::HINT,
4689 message: "error 2 hint 2".to_string(),
4690 group_id: 0,
4691 is_primary: false,
4692 source_kind: DiagnosticSourceKind::Pushed,
4693 ..Diagnostic::default()
4694 }
4695 },
4696 DiagnosticEntry {
4697 range: Point::new(2, 8)..Point::new(2, 17),
4698 diagnostic: Diagnostic {
4699 severity: DiagnosticSeverity::ERROR,
4700 message: "error 2".to_string(),
4701 group_id: 0,
4702 is_primary: true,
4703 source_kind: DiagnosticSourceKind::Pushed,
4704 ..Diagnostic::default()
4705 }
4706 }
4707 ]
4708 );
4709
4710 assert_eq!(
4711 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
4712 &[
4713 DiagnosticEntry {
4714 range: Point::new(1, 13)..Point::new(1, 15),
4715 diagnostic: Diagnostic {
4716 severity: DiagnosticSeverity::HINT,
4717 message: "error 2 hint 1".to_string(),
4718 group_id: 0,
4719 is_primary: false,
4720 source_kind: DiagnosticSourceKind::Pushed,
4721 ..Diagnostic::default()
4722 }
4723 },
4724 DiagnosticEntry {
4725 range: Point::new(1, 13)..Point::new(1, 15),
4726 diagnostic: Diagnostic {
4727 severity: DiagnosticSeverity::HINT,
4728 message: "error 2 hint 2".to_string(),
4729 group_id: 0,
4730 is_primary: false,
4731 source_kind: DiagnosticSourceKind::Pushed,
4732 ..Diagnostic::default()
4733 }
4734 },
4735 DiagnosticEntry {
4736 range: Point::new(2, 8)..Point::new(2, 17),
4737 diagnostic: Diagnostic {
4738 severity: DiagnosticSeverity::ERROR,
4739 message: "error 2".to_string(),
4740 group_id: 0,
4741 is_primary: true,
4742 source_kind: DiagnosticSourceKind::Pushed,
4743 ..Diagnostic::default()
4744 }
4745 }
4746 ]
4747 );
4748
4749 assert_eq!(
4750 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4751 &[
4752 DiagnosticEntry {
4753 range: Point::new(1, 8)..Point::new(1, 9),
4754 diagnostic: Diagnostic {
4755 severity: DiagnosticSeverity::WARNING,
4756 message: "error 1".to_string(),
4757 group_id: 1,
4758 is_primary: true,
4759 source_kind: DiagnosticSourceKind::Pushed,
4760 ..Diagnostic::default()
4761 }
4762 },
4763 DiagnosticEntry {
4764 range: Point::new(1, 8)..Point::new(1, 9),
4765 diagnostic: Diagnostic {
4766 severity: DiagnosticSeverity::HINT,
4767 message: "error 1 hint 1".to_string(),
4768 group_id: 1,
4769 is_primary: false,
4770 source_kind: DiagnosticSourceKind::Pushed,
4771 ..Diagnostic::default()
4772 }
4773 },
4774 ]
4775 );
4776}
4777
4778#[gpui::test]
4779async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
4780 init_test(cx);
4781
4782 let fs = FakeFs::new(cx.executor());
4783 fs.insert_tree(
4784 path!("/dir"),
4785 json!({
4786 "one.rs": "const ONE: usize = 1;",
4787 "two": {
4788 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4789 }
4790
4791 }),
4792 )
4793 .await;
4794 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4795
4796 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4797 language_registry.add(rust_lang());
4798 let watched_paths = lsp::FileOperationRegistrationOptions {
4799 filters: vec![
4800 FileOperationFilter {
4801 scheme: Some("file".to_owned()),
4802 pattern: lsp::FileOperationPattern {
4803 glob: "**/*.rs".to_owned(),
4804 matches: Some(lsp::FileOperationPatternKind::File),
4805 options: None,
4806 },
4807 },
4808 FileOperationFilter {
4809 scheme: Some("file".to_owned()),
4810 pattern: lsp::FileOperationPattern {
4811 glob: "**/**".to_owned(),
4812 matches: Some(lsp::FileOperationPatternKind::Folder),
4813 options: None,
4814 },
4815 },
4816 ],
4817 };
4818 let mut fake_servers = language_registry.register_fake_lsp(
4819 "Rust",
4820 FakeLspAdapter {
4821 capabilities: lsp::ServerCapabilities {
4822 workspace: Some(lsp::WorkspaceServerCapabilities {
4823 workspace_folders: None,
4824 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
4825 did_rename: Some(watched_paths.clone()),
4826 will_rename: Some(watched_paths),
4827 ..Default::default()
4828 }),
4829 }),
4830 ..Default::default()
4831 },
4832 ..Default::default()
4833 },
4834 );
4835
4836 let _ = project
4837 .update(cx, |project, cx| {
4838 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
4839 })
4840 .await
4841 .unwrap();
4842
4843 let fake_server = fake_servers.next().await.unwrap();
4844 let response = project.update(cx, |project, cx| {
4845 let worktree = project.worktrees(cx).next().unwrap();
4846 let entry = worktree.read(cx).entry_for_path("one.rs").unwrap();
4847 project.rename_entry(entry.id, "three.rs".as_ref(), cx)
4848 });
4849 let expected_edit = lsp::WorkspaceEdit {
4850 changes: None,
4851 document_changes: Some(DocumentChanges::Edits({
4852 vec![TextDocumentEdit {
4853 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
4854 range: lsp::Range {
4855 start: lsp::Position {
4856 line: 0,
4857 character: 1,
4858 },
4859 end: lsp::Position {
4860 line: 0,
4861 character: 3,
4862 },
4863 },
4864 new_text: "This is not a drill".to_owned(),
4865 })],
4866 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
4867 uri: Url::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
4868 version: Some(1337),
4869 },
4870 }]
4871 })),
4872 change_annotations: None,
4873 };
4874 let resolved_workspace_edit = Arc::new(OnceLock::new());
4875 fake_server
4876 .set_request_handler::<WillRenameFiles, _, _>({
4877 let resolved_workspace_edit = resolved_workspace_edit.clone();
4878 let expected_edit = expected_edit.clone();
4879 move |params, _| {
4880 let resolved_workspace_edit = resolved_workspace_edit.clone();
4881 let expected_edit = expected_edit.clone();
4882 async move {
4883 assert_eq!(params.files.len(), 1);
4884 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
4885 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
4886 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
4887 Ok(Some(expected_edit))
4888 }
4889 }
4890 })
4891 .next()
4892 .await
4893 .unwrap();
4894 let _ = response.await.unwrap();
4895 fake_server
4896 .handle_notification::<DidRenameFiles, _>(|params, _| {
4897 assert_eq!(params.files.len(), 1);
4898 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
4899 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
4900 })
4901 .next()
4902 .await
4903 .unwrap();
4904 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
4905}
4906
4907#[gpui::test]
4908async fn test_rename(cx: &mut gpui::TestAppContext) {
4909 // hi
4910 init_test(cx);
4911
4912 let fs = FakeFs::new(cx.executor());
4913 fs.insert_tree(
4914 path!("/dir"),
4915 json!({
4916 "one.rs": "const ONE: usize = 1;",
4917 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4918 }),
4919 )
4920 .await;
4921
4922 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4923
4924 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4925 language_registry.add(rust_lang());
4926 let mut fake_servers = language_registry.register_fake_lsp(
4927 "Rust",
4928 FakeLspAdapter {
4929 capabilities: lsp::ServerCapabilities {
4930 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
4931 prepare_provider: Some(true),
4932 work_done_progress_options: Default::default(),
4933 })),
4934 ..Default::default()
4935 },
4936 ..Default::default()
4937 },
4938 );
4939
4940 let (buffer, _handle) = project
4941 .update(cx, |project, cx| {
4942 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
4943 })
4944 .await
4945 .unwrap();
4946
4947 let fake_server = fake_servers.next().await.unwrap();
4948
4949 let response = project.update(cx, |project, cx| {
4950 project.prepare_rename(buffer.clone(), 7, cx)
4951 });
4952 fake_server
4953 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
4954 assert_eq!(
4955 params.text_document.uri.as_str(),
4956 uri!("file:///dir/one.rs")
4957 );
4958 assert_eq!(params.position, lsp::Position::new(0, 7));
4959 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
4960 lsp::Position::new(0, 6),
4961 lsp::Position::new(0, 9),
4962 ))))
4963 })
4964 .next()
4965 .await
4966 .unwrap();
4967 let response = response.await.unwrap();
4968 let PrepareRenameResponse::Success(range) = response else {
4969 panic!("{:?}", response);
4970 };
4971 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
4972 assert_eq!(range, 6..9);
4973
4974 let response = project.update(cx, |project, cx| {
4975 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
4976 });
4977 fake_server
4978 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
4979 assert_eq!(
4980 params.text_document_position.text_document.uri.as_str(),
4981 uri!("file:///dir/one.rs")
4982 );
4983 assert_eq!(
4984 params.text_document_position.position,
4985 lsp::Position::new(0, 7)
4986 );
4987 assert_eq!(params.new_name, "THREE");
4988 Ok(Some(lsp::WorkspaceEdit {
4989 changes: Some(
4990 [
4991 (
4992 lsp::Url::from_file_path(path!("/dir/one.rs")).unwrap(),
4993 vec![lsp::TextEdit::new(
4994 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
4995 "THREE".to_string(),
4996 )],
4997 ),
4998 (
4999 lsp::Url::from_file_path(path!("/dir/two.rs")).unwrap(),
5000 vec![
5001 lsp::TextEdit::new(
5002 lsp::Range::new(
5003 lsp::Position::new(0, 24),
5004 lsp::Position::new(0, 27),
5005 ),
5006 "THREE".to_string(),
5007 ),
5008 lsp::TextEdit::new(
5009 lsp::Range::new(
5010 lsp::Position::new(0, 35),
5011 lsp::Position::new(0, 38),
5012 ),
5013 "THREE".to_string(),
5014 ),
5015 ],
5016 ),
5017 ]
5018 .into_iter()
5019 .collect(),
5020 ),
5021 ..Default::default()
5022 }))
5023 })
5024 .next()
5025 .await
5026 .unwrap();
5027 let mut transaction = response.await.unwrap().0;
5028 assert_eq!(transaction.len(), 2);
5029 assert_eq!(
5030 transaction
5031 .remove_entry(&buffer)
5032 .unwrap()
5033 .0
5034 .update(cx, |buffer, _| buffer.text()),
5035 "const THREE: usize = 1;"
5036 );
5037 assert_eq!(
5038 transaction
5039 .into_keys()
5040 .next()
5041 .unwrap()
5042 .update(cx, |buffer, _| buffer.text()),
5043 "const TWO: usize = one::THREE + one::THREE;"
5044 );
5045}
5046
5047#[gpui::test]
5048async fn test_search(cx: &mut gpui::TestAppContext) {
5049 init_test(cx);
5050
5051 let fs = FakeFs::new(cx.executor());
5052 fs.insert_tree(
5053 path!("/dir"),
5054 json!({
5055 "one.rs": "const ONE: usize = 1;",
5056 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
5057 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
5058 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
5059 }),
5060 )
5061 .await;
5062 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5063 assert_eq!(
5064 search(
5065 &project,
5066 SearchQuery::text(
5067 "TWO",
5068 false,
5069 true,
5070 false,
5071 Default::default(),
5072 Default::default(),
5073 false,
5074 None
5075 )
5076 .unwrap(),
5077 cx
5078 )
5079 .await
5080 .unwrap(),
5081 HashMap::from_iter([
5082 (path!("dir/two.rs").to_string(), vec![6..9]),
5083 (path!("dir/three.rs").to_string(), vec![37..40])
5084 ])
5085 );
5086
5087 let buffer_4 = project
5088 .update(cx, |project, cx| {
5089 project.open_local_buffer(path!("/dir/four.rs"), cx)
5090 })
5091 .await
5092 .unwrap();
5093 buffer_4.update(cx, |buffer, cx| {
5094 let text = "two::TWO";
5095 buffer.edit([(20..28, text), (31..43, text)], None, cx);
5096 });
5097
5098 assert_eq!(
5099 search(
5100 &project,
5101 SearchQuery::text(
5102 "TWO",
5103 false,
5104 true,
5105 false,
5106 Default::default(),
5107 Default::default(),
5108 false,
5109 None,
5110 )
5111 .unwrap(),
5112 cx
5113 )
5114 .await
5115 .unwrap(),
5116 HashMap::from_iter([
5117 (path!("dir/two.rs").to_string(), vec![6..9]),
5118 (path!("dir/three.rs").to_string(), vec![37..40]),
5119 (path!("dir/four.rs").to_string(), vec![25..28, 36..39])
5120 ])
5121 );
5122}
5123
5124#[gpui::test]
5125async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
5126 init_test(cx);
5127
5128 let search_query = "file";
5129
5130 let fs = FakeFs::new(cx.executor());
5131 fs.insert_tree(
5132 path!("/dir"),
5133 json!({
5134 "one.rs": r#"// Rust file one"#,
5135 "one.ts": r#"// TypeScript file one"#,
5136 "two.rs": r#"// Rust file two"#,
5137 "two.ts": r#"// TypeScript file two"#,
5138 }),
5139 )
5140 .await;
5141 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5142
5143 assert!(
5144 search(
5145 &project,
5146 SearchQuery::text(
5147 search_query,
5148 false,
5149 true,
5150 false,
5151 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5152 Default::default(),
5153 false,
5154 None
5155 )
5156 .unwrap(),
5157 cx
5158 )
5159 .await
5160 .unwrap()
5161 .is_empty(),
5162 "If no inclusions match, no files should be returned"
5163 );
5164
5165 assert_eq!(
5166 search(
5167 &project,
5168 SearchQuery::text(
5169 search_query,
5170 false,
5171 true,
5172 false,
5173 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
5174 Default::default(),
5175 false,
5176 None
5177 )
5178 .unwrap(),
5179 cx
5180 )
5181 .await
5182 .unwrap(),
5183 HashMap::from_iter([
5184 (path!("dir/one.rs").to_string(), vec![8..12]),
5185 (path!("dir/two.rs").to_string(), vec![8..12]),
5186 ]),
5187 "Rust only search should give only Rust files"
5188 );
5189
5190 assert_eq!(
5191 search(
5192 &project,
5193 SearchQuery::text(
5194 search_query,
5195 false,
5196 true,
5197 false,
5198 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5199 Default::default(),
5200 false,
5201 None,
5202 )
5203 .unwrap(),
5204 cx
5205 )
5206 .await
5207 .unwrap(),
5208 HashMap::from_iter([
5209 (path!("dir/one.ts").to_string(), vec![14..18]),
5210 (path!("dir/two.ts").to_string(), vec![14..18]),
5211 ]),
5212 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
5213 );
5214
5215 assert_eq!(
5216 search(
5217 &project,
5218 SearchQuery::text(
5219 search_query,
5220 false,
5221 true,
5222 false,
5223 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()])
5224 .unwrap(),
5225 Default::default(),
5226 false,
5227 None,
5228 )
5229 .unwrap(),
5230 cx
5231 )
5232 .await
5233 .unwrap(),
5234 HashMap::from_iter([
5235 (path!("dir/two.ts").to_string(), vec![14..18]),
5236 (path!("dir/one.rs").to_string(), vec![8..12]),
5237 (path!("dir/one.ts").to_string(), vec![14..18]),
5238 (path!("dir/two.rs").to_string(), vec![8..12]),
5239 ]),
5240 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
5241 );
5242}
5243
5244#[gpui::test]
5245async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
5246 init_test(cx);
5247
5248 let search_query = "file";
5249
5250 let fs = FakeFs::new(cx.executor());
5251 fs.insert_tree(
5252 path!("/dir"),
5253 json!({
5254 "one.rs": r#"// Rust file one"#,
5255 "one.ts": r#"// TypeScript file one"#,
5256 "two.rs": r#"// Rust file two"#,
5257 "two.ts": r#"// TypeScript file two"#,
5258 }),
5259 )
5260 .await;
5261 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5262
5263 assert_eq!(
5264 search(
5265 &project,
5266 SearchQuery::text(
5267 search_query,
5268 false,
5269 true,
5270 false,
5271 Default::default(),
5272 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5273 false,
5274 None,
5275 )
5276 .unwrap(),
5277 cx
5278 )
5279 .await
5280 .unwrap(),
5281 HashMap::from_iter([
5282 (path!("dir/one.rs").to_string(), vec![8..12]),
5283 (path!("dir/one.ts").to_string(), vec![14..18]),
5284 (path!("dir/two.rs").to_string(), vec![8..12]),
5285 (path!("dir/two.ts").to_string(), vec![14..18]),
5286 ]),
5287 "If no exclusions match, all files should be returned"
5288 );
5289
5290 assert_eq!(
5291 search(
5292 &project,
5293 SearchQuery::text(
5294 search_query,
5295 false,
5296 true,
5297 false,
5298 Default::default(),
5299 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
5300 false,
5301 None,
5302 )
5303 .unwrap(),
5304 cx
5305 )
5306 .await
5307 .unwrap(),
5308 HashMap::from_iter([
5309 (path!("dir/one.ts").to_string(), vec![14..18]),
5310 (path!("dir/two.ts").to_string(), vec![14..18]),
5311 ]),
5312 "Rust exclusion search should give only TypeScript files"
5313 );
5314
5315 assert_eq!(
5316 search(
5317 &project,
5318 SearchQuery::text(
5319 search_query,
5320 false,
5321 true,
5322 false,
5323 Default::default(),
5324 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5325 false,
5326 None,
5327 )
5328 .unwrap(),
5329 cx
5330 )
5331 .await
5332 .unwrap(),
5333 HashMap::from_iter([
5334 (path!("dir/one.rs").to_string(), vec![8..12]),
5335 (path!("dir/two.rs").to_string(), vec![8..12]),
5336 ]),
5337 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
5338 );
5339
5340 assert!(
5341 search(
5342 &project,
5343 SearchQuery::text(
5344 search_query,
5345 false,
5346 true,
5347 false,
5348 Default::default(),
5349 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()])
5350 .unwrap(),
5351 false,
5352 None,
5353 )
5354 .unwrap(),
5355 cx
5356 )
5357 .await
5358 .unwrap()
5359 .is_empty(),
5360 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
5361 );
5362}
5363
5364#[gpui::test]
5365async fn test_search_with_buffer_exclusions(cx: &mut gpui::TestAppContext) {
5366 init_test(cx);
5367
5368 let search_query = "file";
5369
5370 let fs = FakeFs::new(cx.executor());
5371 fs.insert_tree(
5372 path!("/dir"),
5373 json!({
5374 "one.rs": r#"// Rust file one"#,
5375 "one.ts": r#"// TypeScript file one"#,
5376 "two.rs": r#"// Rust file two"#,
5377 "two.ts": r#"// TypeScript file two"#,
5378 }),
5379 )
5380 .await;
5381
5382 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5383 let _buffer = project.update(cx, |project, cx| {
5384 let buffer = project.create_local_buffer("file", None, cx);
5385 project.mark_buffer_as_non_searchable(buffer.read(cx).remote_id(), cx);
5386 buffer
5387 });
5388
5389 assert_eq!(
5390 search(
5391 &project,
5392 SearchQuery::text(
5393 search_query,
5394 false,
5395 true,
5396 false,
5397 Default::default(),
5398 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5399 false,
5400 None,
5401 )
5402 .unwrap(),
5403 cx
5404 )
5405 .await
5406 .unwrap(),
5407 HashMap::from_iter([
5408 (path!("dir/one.rs").to_string(), vec![8..12]),
5409 (path!("dir/one.ts").to_string(), vec![14..18]),
5410 (path!("dir/two.rs").to_string(), vec![8..12]),
5411 (path!("dir/two.ts").to_string(), vec![14..18]),
5412 ]),
5413 "If no exclusions match, all files should be returned"
5414 );
5415
5416 assert_eq!(
5417 search(
5418 &project,
5419 SearchQuery::text(
5420 search_query,
5421 false,
5422 true,
5423 false,
5424 Default::default(),
5425 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
5426 false,
5427 None,
5428 )
5429 .unwrap(),
5430 cx
5431 )
5432 .await
5433 .unwrap(),
5434 HashMap::from_iter([
5435 (path!("dir/one.ts").to_string(), vec![14..18]),
5436 (path!("dir/two.ts").to_string(), vec![14..18]),
5437 ]),
5438 "Rust exclusion search should give only TypeScript files"
5439 );
5440
5441 assert_eq!(
5442 search(
5443 &project,
5444 SearchQuery::text(
5445 search_query,
5446 false,
5447 true,
5448 false,
5449 Default::default(),
5450 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5451 false,
5452 None,
5453 )
5454 .unwrap(),
5455 cx
5456 )
5457 .await
5458 .unwrap(),
5459 HashMap::from_iter([
5460 (path!("dir/one.rs").to_string(), vec![8..12]),
5461 (path!("dir/two.rs").to_string(), vec![8..12]),
5462 ]),
5463 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
5464 );
5465
5466 assert!(
5467 search(
5468 &project,
5469 SearchQuery::text(
5470 search_query,
5471 false,
5472 true,
5473 false,
5474 Default::default(),
5475 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()])
5476 .unwrap(),
5477 false,
5478 None,
5479 )
5480 .unwrap(),
5481 cx
5482 )
5483 .await
5484 .unwrap()
5485 .is_empty(),
5486 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
5487 );
5488}
5489
5490#[gpui::test]
5491async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
5492 init_test(cx);
5493
5494 let search_query = "file";
5495
5496 let fs = FakeFs::new(cx.executor());
5497 fs.insert_tree(
5498 path!("/dir"),
5499 json!({
5500 "one.rs": r#"// Rust file one"#,
5501 "one.ts": r#"// TypeScript file one"#,
5502 "two.rs": r#"// Rust file two"#,
5503 "two.ts": r#"// TypeScript file two"#,
5504 }),
5505 )
5506 .await;
5507 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5508
5509 assert!(
5510 search(
5511 &project,
5512 SearchQuery::text(
5513 search_query,
5514 false,
5515 true,
5516 false,
5517 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5518 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5519 false,
5520 None,
5521 )
5522 .unwrap(),
5523 cx
5524 )
5525 .await
5526 .unwrap()
5527 .is_empty(),
5528 "If both no exclusions and inclusions match, exclusions should win and return nothing"
5529 );
5530
5531 assert!(
5532 search(
5533 &project,
5534 SearchQuery::text(
5535 search_query,
5536 false,
5537 true,
5538 false,
5539 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5540 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5541 false,
5542 None,
5543 )
5544 .unwrap(),
5545 cx
5546 )
5547 .await
5548 .unwrap()
5549 .is_empty(),
5550 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
5551 );
5552
5553 assert!(
5554 search(
5555 &project,
5556 SearchQuery::text(
5557 search_query,
5558 false,
5559 true,
5560 false,
5561 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5562 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5563 false,
5564 None,
5565 )
5566 .unwrap(),
5567 cx
5568 )
5569 .await
5570 .unwrap()
5571 .is_empty(),
5572 "Non-matching inclusions and exclusions should not change that."
5573 );
5574
5575 assert_eq!(
5576 search(
5577 &project,
5578 SearchQuery::text(
5579 search_query,
5580 false,
5581 true,
5582 false,
5583 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5584 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()]).unwrap(),
5585 false,
5586 None,
5587 )
5588 .unwrap(),
5589 cx
5590 )
5591 .await
5592 .unwrap(),
5593 HashMap::from_iter([
5594 (path!("dir/one.ts").to_string(), vec![14..18]),
5595 (path!("dir/two.ts").to_string(), vec![14..18]),
5596 ]),
5597 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
5598 );
5599}
5600
5601#[gpui::test]
5602async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
5603 init_test(cx);
5604
5605 let fs = FakeFs::new(cx.executor());
5606 fs.insert_tree(
5607 path!("/worktree-a"),
5608 json!({
5609 "haystack.rs": r#"// NEEDLE"#,
5610 "haystack.ts": r#"// NEEDLE"#,
5611 }),
5612 )
5613 .await;
5614 fs.insert_tree(
5615 path!("/worktree-b"),
5616 json!({
5617 "haystack.rs": r#"// NEEDLE"#,
5618 "haystack.ts": r#"// NEEDLE"#,
5619 }),
5620 )
5621 .await;
5622
5623 let project = Project::test(
5624 fs.clone(),
5625 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
5626 cx,
5627 )
5628 .await;
5629
5630 assert_eq!(
5631 search(
5632 &project,
5633 SearchQuery::text(
5634 "NEEDLE",
5635 false,
5636 true,
5637 false,
5638 PathMatcher::new(&["worktree-a/*.rs".to_owned()]).unwrap(),
5639 Default::default(),
5640 true,
5641 None,
5642 )
5643 .unwrap(),
5644 cx
5645 )
5646 .await
5647 .unwrap(),
5648 HashMap::from_iter([(path!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
5649 "should only return results from included worktree"
5650 );
5651 assert_eq!(
5652 search(
5653 &project,
5654 SearchQuery::text(
5655 "NEEDLE",
5656 false,
5657 true,
5658 false,
5659 PathMatcher::new(&["worktree-b/*.rs".to_owned()]).unwrap(),
5660 Default::default(),
5661 true,
5662 None,
5663 )
5664 .unwrap(),
5665 cx
5666 )
5667 .await
5668 .unwrap(),
5669 HashMap::from_iter([(path!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
5670 "should only return results from included worktree"
5671 );
5672
5673 assert_eq!(
5674 search(
5675 &project,
5676 SearchQuery::text(
5677 "NEEDLE",
5678 false,
5679 true,
5680 false,
5681 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5682 Default::default(),
5683 false,
5684 None,
5685 )
5686 .unwrap(),
5687 cx
5688 )
5689 .await
5690 .unwrap(),
5691 HashMap::from_iter([
5692 (path!("worktree-a/haystack.ts").to_string(), vec![3..9]),
5693 (path!("worktree-b/haystack.ts").to_string(), vec![3..9])
5694 ]),
5695 "should return results from both worktrees"
5696 );
5697}
5698
5699#[gpui::test]
5700async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
5701 init_test(cx);
5702
5703 let fs = FakeFs::new(cx.background_executor.clone());
5704 fs.insert_tree(
5705 path!("/dir"),
5706 json!({
5707 ".git": {},
5708 ".gitignore": "**/target\n/node_modules\n",
5709 "target": {
5710 "index.txt": "index_key:index_value"
5711 },
5712 "node_modules": {
5713 "eslint": {
5714 "index.ts": "const eslint_key = 'eslint value'",
5715 "package.json": r#"{ "some_key": "some value" }"#,
5716 },
5717 "prettier": {
5718 "index.ts": "const prettier_key = 'prettier value'",
5719 "package.json": r#"{ "other_key": "other value" }"#,
5720 },
5721 },
5722 "package.json": r#"{ "main_key": "main value" }"#,
5723 }),
5724 )
5725 .await;
5726 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5727
5728 let query = "key";
5729 assert_eq!(
5730 search(
5731 &project,
5732 SearchQuery::text(
5733 query,
5734 false,
5735 false,
5736 false,
5737 Default::default(),
5738 Default::default(),
5739 false,
5740 None,
5741 )
5742 .unwrap(),
5743 cx
5744 )
5745 .await
5746 .unwrap(),
5747 HashMap::from_iter([(path!("dir/package.json").to_string(), vec![8..11])]),
5748 "Only one non-ignored file should have the query"
5749 );
5750
5751 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5752 assert_eq!(
5753 search(
5754 &project,
5755 SearchQuery::text(
5756 query,
5757 false,
5758 false,
5759 true,
5760 Default::default(),
5761 Default::default(),
5762 false,
5763 None,
5764 )
5765 .unwrap(),
5766 cx
5767 )
5768 .await
5769 .unwrap(),
5770 HashMap::from_iter([
5771 (path!("dir/package.json").to_string(), vec![8..11]),
5772 (path!("dir/target/index.txt").to_string(), vec![6..9]),
5773 (
5774 path!("dir/node_modules/prettier/package.json").to_string(),
5775 vec![9..12]
5776 ),
5777 (
5778 path!("dir/node_modules/prettier/index.ts").to_string(),
5779 vec![15..18]
5780 ),
5781 (
5782 path!("dir/node_modules/eslint/index.ts").to_string(),
5783 vec![13..16]
5784 ),
5785 (
5786 path!("dir/node_modules/eslint/package.json").to_string(),
5787 vec![8..11]
5788 ),
5789 ]),
5790 "Unrestricted search with ignored directories should find every file with the query"
5791 );
5792
5793 let files_to_include = PathMatcher::new(&["node_modules/prettier/**".to_owned()]).unwrap();
5794 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()]).unwrap();
5795 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5796 assert_eq!(
5797 search(
5798 &project,
5799 SearchQuery::text(
5800 query,
5801 false,
5802 false,
5803 true,
5804 files_to_include,
5805 files_to_exclude,
5806 false,
5807 None,
5808 )
5809 .unwrap(),
5810 cx
5811 )
5812 .await
5813 .unwrap(),
5814 HashMap::from_iter([(
5815 path!("dir/node_modules/prettier/package.json").to_string(),
5816 vec![9..12]
5817 )]),
5818 "With search including ignored prettier directory and excluding TS files, only one file should be found"
5819 );
5820}
5821
5822#[gpui::test]
5823async fn test_search_with_unicode(cx: &mut gpui::TestAppContext) {
5824 init_test(cx);
5825
5826 let fs = FakeFs::new(cx.executor());
5827 fs.insert_tree(
5828 path!("/dir"),
5829 json!({
5830 "one.rs": "// ПРИВЕТ? привет!",
5831 "two.rs": "// ПРИВЕТ.",
5832 "three.rs": "// привет",
5833 }),
5834 )
5835 .await;
5836 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5837
5838 let unicode_case_sensitive_query = SearchQuery::text(
5839 "привет",
5840 false,
5841 true,
5842 false,
5843 Default::default(),
5844 Default::default(),
5845 false,
5846 None,
5847 );
5848 assert_matches!(unicode_case_sensitive_query, Ok(SearchQuery::Text { .. }));
5849 assert_eq!(
5850 search(&project, unicode_case_sensitive_query.unwrap(), cx)
5851 .await
5852 .unwrap(),
5853 HashMap::from_iter([
5854 (path!("dir/one.rs").to_string(), vec![17..29]),
5855 (path!("dir/three.rs").to_string(), vec![3..15]),
5856 ])
5857 );
5858
5859 let unicode_case_insensitive_query = SearchQuery::text(
5860 "привет",
5861 false,
5862 false,
5863 false,
5864 Default::default(),
5865 Default::default(),
5866 false,
5867 None,
5868 );
5869 assert_matches!(
5870 unicode_case_insensitive_query,
5871 Ok(SearchQuery::Regex { .. })
5872 );
5873 assert_eq!(
5874 search(&project, unicode_case_insensitive_query.unwrap(), cx)
5875 .await
5876 .unwrap(),
5877 HashMap::from_iter([
5878 (path!("dir/one.rs").to_string(), vec![3..15, 17..29]),
5879 (path!("dir/two.rs").to_string(), vec![3..15]),
5880 (path!("dir/three.rs").to_string(), vec![3..15]),
5881 ])
5882 );
5883
5884 assert_eq!(
5885 search(
5886 &project,
5887 SearchQuery::text(
5888 "привет.",
5889 false,
5890 false,
5891 false,
5892 Default::default(),
5893 Default::default(),
5894 false,
5895 None,
5896 )
5897 .unwrap(),
5898 cx
5899 )
5900 .await
5901 .unwrap(),
5902 HashMap::from_iter([(path!("dir/two.rs").to_string(), vec![3..16]),])
5903 );
5904}
5905
5906#[gpui::test]
5907async fn test_create_entry(cx: &mut gpui::TestAppContext) {
5908 init_test(cx);
5909
5910 let fs = FakeFs::new(cx.executor());
5911 fs.insert_tree(
5912 "/one/two",
5913 json!({
5914 "three": {
5915 "a.txt": "",
5916 "four": {}
5917 },
5918 "c.rs": ""
5919 }),
5920 )
5921 .await;
5922
5923 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
5924 project
5925 .update(cx, |project, cx| {
5926 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5927 project.create_entry((id, "b.."), true, cx)
5928 })
5929 .await
5930 .unwrap()
5931 .into_included()
5932 .unwrap();
5933
5934 // Can't create paths outside the project
5935 let result = project
5936 .update(cx, |project, cx| {
5937 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5938 project.create_entry((id, "../../boop"), true, cx)
5939 })
5940 .await;
5941 assert!(result.is_err());
5942
5943 // Can't create paths with '..'
5944 let result = project
5945 .update(cx, |project, cx| {
5946 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5947 project.create_entry((id, "four/../beep"), true, cx)
5948 })
5949 .await;
5950 assert!(result.is_err());
5951
5952 assert_eq!(
5953 fs.paths(true),
5954 vec![
5955 PathBuf::from(path!("/")),
5956 PathBuf::from(path!("/one")),
5957 PathBuf::from(path!("/one/two")),
5958 PathBuf::from(path!("/one/two/c.rs")),
5959 PathBuf::from(path!("/one/two/three")),
5960 PathBuf::from(path!("/one/two/three/a.txt")),
5961 PathBuf::from(path!("/one/two/three/b..")),
5962 PathBuf::from(path!("/one/two/three/four")),
5963 ]
5964 );
5965
5966 // And we cannot open buffers with '..'
5967 let result = project
5968 .update(cx, |project, cx| {
5969 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5970 project.open_buffer((id, "../c.rs"), cx)
5971 })
5972 .await;
5973 assert!(result.is_err())
5974}
5975
5976#[gpui::test]
5977async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
5978 init_test(cx);
5979
5980 let fs = FakeFs::new(cx.executor());
5981 fs.insert_tree(
5982 path!("/dir"),
5983 json!({
5984 "a.tsx": "a",
5985 }),
5986 )
5987 .await;
5988
5989 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5990
5991 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5992 language_registry.add(tsx_lang());
5993 let language_server_names = [
5994 "TypeScriptServer",
5995 "TailwindServer",
5996 "ESLintServer",
5997 "NoHoverCapabilitiesServer",
5998 ];
5999 let mut language_servers = [
6000 language_registry.register_fake_lsp(
6001 "tsx",
6002 FakeLspAdapter {
6003 name: language_server_names[0],
6004 capabilities: lsp::ServerCapabilities {
6005 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6006 ..lsp::ServerCapabilities::default()
6007 },
6008 ..FakeLspAdapter::default()
6009 },
6010 ),
6011 language_registry.register_fake_lsp(
6012 "tsx",
6013 FakeLspAdapter {
6014 name: language_server_names[1],
6015 capabilities: lsp::ServerCapabilities {
6016 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6017 ..lsp::ServerCapabilities::default()
6018 },
6019 ..FakeLspAdapter::default()
6020 },
6021 ),
6022 language_registry.register_fake_lsp(
6023 "tsx",
6024 FakeLspAdapter {
6025 name: language_server_names[2],
6026 capabilities: lsp::ServerCapabilities {
6027 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6028 ..lsp::ServerCapabilities::default()
6029 },
6030 ..FakeLspAdapter::default()
6031 },
6032 ),
6033 language_registry.register_fake_lsp(
6034 "tsx",
6035 FakeLspAdapter {
6036 name: language_server_names[3],
6037 capabilities: lsp::ServerCapabilities {
6038 hover_provider: None,
6039 ..lsp::ServerCapabilities::default()
6040 },
6041 ..FakeLspAdapter::default()
6042 },
6043 ),
6044 ];
6045
6046 let (buffer, _handle) = project
6047 .update(cx, |p, cx| {
6048 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
6049 })
6050 .await
6051 .unwrap();
6052 cx.executor().run_until_parked();
6053
6054 let mut servers_with_hover_requests = HashMap::default();
6055 for i in 0..language_server_names.len() {
6056 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
6057 panic!(
6058 "Failed to get language server #{i} with name {}",
6059 &language_server_names[i]
6060 )
6061 });
6062 let new_server_name = new_server.server.name();
6063 assert!(
6064 !servers_with_hover_requests.contains_key(&new_server_name),
6065 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6066 );
6067 match new_server_name.as_ref() {
6068 "TailwindServer" | "TypeScriptServer" => {
6069 servers_with_hover_requests.insert(
6070 new_server_name.clone(),
6071 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6072 move |_, _| {
6073 let name = new_server_name.clone();
6074 async move {
6075 Ok(Some(lsp::Hover {
6076 contents: lsp::HoverContents::Scalar(
6077 lsp::MarkedString::String(format!("{name} hover")),
6078 ),
6079 range: None,
6080 }))
6081 }
6082 },
6083 ),
6084 );
6085 }
6086 "ESLintServer" => {
6087 servers_with_hover_requests.insert(
6088 new_server_name,
6089 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6090 |_, _| async move { Ok(None) },
6091 ),
6092 );
6093 }
6094 "NoHoverCapabilitiesServer" => {
6095 let _never_handled = new_server
6096 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
6097 panic!(
6098 "Should not call for hovers server with no corresponding capabilities"
6099 )
6100 });
6101 }
6102 unexpected => panic!("Unexpected server name: {unexpected}"),
6103 }
6104 }
6105
6106 let hover_task = project.update(cx, |project, cx| {
6107 project.hover(&buffer, Point::new(0, 0), cx)
6108 });
6109 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
6110 |mut hover_request| async move {
6111 hover_request
6112 .next()
6113 .await
6114 .expect("All hover requests should have been triggered")
6115 },
6116 ))
6117 .await;
6118 assert_eq!(
6119 vec!["TailwindServer hover", "TypeScriptServer hover"],
6120 hover_task
6121 .await
6122 .into_iter()
6123 .flatten()
6124 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
6125 .sorted()
6126 .collect::<Vec<_>>(),
6127 "Should receive hover responses from all related servers with hover capabilities"
6128 );
6129}
6130
6131#[gpui::test]
6132async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
6133 init_test(cx);
6134
6135 let fs = FakeFs::new(cx.executor());
6136 fs.insert_tree(
6137 path!("/dir"),
6138 json!({
6139 "a.ts": "a",
6140 }),
6141 )
6142 .await;
6143
6144 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6145
6146 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6147 language_registry.add(typescript_lang());
6148 let mut fake_language_servers = language_registry.register_fake_lsp(
6149 "TypeScript",
6150 FakeLspAdapter {
6151 capabilities: lsp::ServerCapabilities {
6152 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6153 ..lsp::ServerCapabilities::default()
6154 },
6155 ..FakeLspAdapter::default()
6156 },
6157 );
6158
6159 let (buffer, _handle) = project
6160 .update(cx, |p, cx| {
6161 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6162 })
6163 .await
6164 .unwrap();
6165 cx.executor().run_until_parked();
6166
6167 let fake_server = fake_language_servers
6168 .next()
6169 .await
6170 .expect("failed to get the language server");
6171
6172 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6173 move |_, _| async move {
6174 Ok(Some(lsp::Hover {
6175 contents: lsp::HoverContents::Array(vec![
6176 lsp::MarkedString::String("".to_string()),
6177 lsp::MarkedString::String(" ".to_string()),
6178 lsp::MarkedString::String("\n\n\n".to_string()),
6179 ]),
6180 range: None,
6181 }))
6182 },
6183 );
6184
6185 let hover_task = project.update(cx, |project, cx| {
6186 project.hover(&buffer, Point::new(0, 0), cx)
6187 });
6188 let () = request_handled
6189 .next()
6190 .await
6191 .expect("All hover requests should have been triggered");
6192 assert_eq!(
6193 Vec::<String>::new(),
6194 hover_task
6195 .await
6196 .into_iter()
6197 .flatten()
6198 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
6199 .sorted()
6200 .collect::<Vec<_>>(),
6201 "Empty hover parts should be ignored"
6202 );
6203}
6204
6205#[gpui::test]
6206async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
6207 init_test(cx);
6208
6209 let fs = FakeFs::new(cx.executor());
6210 fs.insert_tree(
6211 path!("/dir"),
6212 json!({
6213 "a.ts": "a",
6214 }),
6215 )
6216 .await;
6217
6218 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6219
6220 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6221 language_registry.add(typescript_lang());
6222 let mut fake_language_servers = language_registry.register_fake_lsp(
6223 "TypeScript",
6224 FakeLspAdapter {
6225 capabilities: lsp::ServerCapabilities {
6226 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6227 ..lsp::ServerCapabilities::default()
6228 },
6229 ..FakeLspAdapter::default()
6230 },
6231 );
6232
6233 let (buffer, _handle) = project
6234 .update(cx, |p, cx| {
6235 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6236 })
6237 .await
6238 .unwrap();
6239 cx.executor().run_until_parked();
6240
6241 let fake_server = fake_language_servers
6242 .next()
6243 .await
6244 .expect("failed to get the language server");
6245
6246 let mut request_handled = fake_server
6247 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
6248 Ok(Some(vec![
6249 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6250 title: "organize imports".to_string(),
6251 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
6252 ..lsp::CodeAction::default()
6253 }),
6254 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6255 title: "fix code".to_string(),
6256 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
6257 ..lsp::CodeAction::default()
6258 }),
6259 ]))
6260 });
6261
6262 let code_actions_task = project.update(cx, |project, cx| {
6263 project.code_actions(
6264 &buffer,
6265 0..buffer.read(cx).len(),
6266 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
6267 cx,
6268 )
6269 });
6270
6271 let () = request_handled
6272 .next()
6273 .await
6274 .expect("The code action request should have been triggered");
6275
6276 let code_actions = code_actions_task.await.unwrap().unwrap();
6277 assert_eq!(code_actions.len(), 1);
6278 assert_eq!(
6279 code_actions[0].lsp_action.action_kind(),
6280 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
6281 );
6282}
6283
6284#[gpui::test]
6285async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
6286 init_test(cx);
6287
6288 let fs = FakeFs::new(cx.executor());
6289 fs.insert_tree(
6290 path!("/dir"),
6291 json!({
6292 "a.tsx": "a",
6293 }),
6294 )
6295 .await;
6296
6297 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6298
6299 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6300 language_registry.add(tsx_lang());
6301 let language_server_names = [
6302 "TypeScriptServer",
6303 "TailwindServer",
6304 "ESLintServer",
6305 "NoActionsCapabilitiesServer",
6306 ];
6307
6308 let mut language_server_rxs = [
6309 language_registry.register_fake_lsp(
6310 "tsx",
6311 FakeLspAdapter {
6312 name: language_server_names[0],
6313 capabilities: lsp::ServerCapabilities {
6314 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6315 ..lsp::ServerCapabilities::default()
6316 },
6317 ..FakeLspAdapter::default()
6318 },
6319 ),
6320 language_registry.register_fake_lsp(
6321 "tsx",
6322 FakeLspAdapter {
6323 name: language_server_names[1],
6324 capabilities: lsp::ServerCapabilities {
6325 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6326 ..lsp::ServerCapabilities::default()
6327 },
6328 ..FakeLspAdapter::default()
6329 },
6330 ),
6331 language_registry.register_fake_lsp(
6332 "tsx",
6333 FakeLspAdapter {
6334 name: language_server_names[2],
6335 capabilities: lsp::ServerCapabilities {
6336 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6337 ..lsp::ServerCapabilities::default()
6338 },
6339 ..FakeLspAdapter::default()
6340 },
6341 ),
6342 language_registry.register_fake_lsp(
6343 "tsx",
6344 FakeLspAdapter {
6345 name: language_server_names[3],
6346 capabilities: lsp::ServerCapabilities {
6347 code_action_provider: None,
6348 ..lsp::ServerCapabilities::default()
6349 },
6350 ..FakeLspAdapter::default()
6351 },
6352 ),
6353 ];
6354
6355 let (buffer, _handle) = project
6356 .update(cx, |p, cx| {
6357 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
6358 })
6359 .await
6360 .unwrap();
6361 cx.executor().run_until_parked();
6362
6363 let mut servers_with_actions_requests = HashMap::default();
6364 for i in 0..language_server_names.len() {
6365 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
6366 panic!(
6367 "Failed to get language server #{i} with name {}",
6368 &language_server_names[i]
6369 )
6370 });
6371 let new_server_name = new_server.server.name();
6372
6373 assert!(
6374 !servers_with_actions_requests.contains_key(&new_server_name),
6375 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6376 );
6377 match new_server_name.0.as_ref() {
6378 "TailwindServer" | "TypeScriptServer" => {
6379 servers_with_actions_requests.insert(
6380 new_server_name.clone(),
6381 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6382 move |_, _| {
6383 let name = new_server_name.clone();
6384 async move {
6385 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
6386 lsp::CodeAction {
6387 title: format!("{name} code action"),
6388 ..lsp::CodeAction::default()
6389 },
6390 )]))
6391 }
6392 },
6393 ),
6394 );
6395 }
6396 "ESLintServer" => {
6397 servers_with_actions_requests.insert(
6398 new_server_name,
6399 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6400 |_, _| async move { Ok(None) },
6401 ),
6402 );
6403 }
6404 "NoActionsCapabilitiesServer" => {
6405 let _never_handled = new_server
6406 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6407 panic!(
6408 "Should not call for code actions server with no corresponding capabilities"
6409 )
6410 });
6411 }
6412 unexpected => panic!("Unexpected server name: {unexpected}"),
6413 }
6414 }
6415
6416 let code_actions_task = project.update(cx, |project, cx| {
6417 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
6418 });
6419
6420 // cx.run_until_parked();
6421 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
6422 |mut code_actions_request| async move {
6423 code_actions_request
6424 .next()
6425 .await
6426 .expect("All code actions requests should have been triggered")
6427 },
6428 ))
6429 .await;
6430 assert_eq!(
6431 vec!["TailwindServer code action", "TypeScriptServer code action"],
6432 code_actions_task
6433 .await
6434 .unwrap()
6435 .unwrap()
6436 .into_iter()
6437 .map(|code_action| code_action.lsp_action.title().to_owned())
6438 .sorted()
6439 .collect::<Vec<_>>(),
6440 "Should receive code actions responses from all related servers with hover capabilities"
6441 );
6442}
6443
6444#[gpui::test]
6445async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
6446 init_test(cx);
6447
6448 let fs = FakeFs::new(cx.executor());
6449 fs.insert_tree(
6450 "/dir",
6451 json!({
6452 "a.rs": "let a = 1;",
6453 "b.rs": "let b = 2;",
6454 "c.rs": "let c = 2;",
6455 }),
6456 )
6457 .await;
6458
6459 let project = Project::test(
6460 fs,
6461 [
6462 "/dir/a.rs".as_ref(),
6463 "/dir/b.rs".as_ref(),
6464 "/dir/c.rs".as_ref(),
6465 ],
6466 cx,
6467 )
6468 .await;
6469
6470 // check the initial state and get the worktrees
6471 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
6472 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6473 assert_eq!(worktrees.len(), 3);
6474
6475 let worktree_a = worktrees[0].read(cx);
6476 let worktree_b = worktrees[1].read(cx);
6477 let worktree_c = worktrees[2].read(cx);
6478
6479 // check they start in the right order
6480 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
6481 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
6482 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
6483
6484 (
6485 worktrees[0].clone(),
6486 worktrees[1].clone(),
6487 worktrees[2].clone(),
6488 )
6489 });
6490
6491 // move first worktree to after the second
6492 // [a, b, c] -> [b, a, c]
6493 project
6494 .update(cx, |project, cx| {
6495 let first = worktree_a.read(cx);
6496 let second = worktree_b.read(cx);
6497 project.move_worktree(first.id(), second.id(), cx)
6498 })
6499 .expect("moving first after second");
6500
6501 // check the state after moving
6502 project.update(cx, |project, cx| {
6503 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6504 assert_eq!(worktrees.len(), 3);
6505
6506 let first = worktrees[0].read(cx);
6507 let second = worktrees[1].read(cx);
6508 let third = worktrees[2].read(cx);
6509
6510 // check they are now in the right order
6511 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
6512 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
6513 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6514 });
6515
6516 // move the second worktree to before the first
6517 // [b, a, c] -> [a, b, c]
6518 project
6519 .update(cx, |project, cx| {
6520 let second = worktree_a.read(cx);
6521 let first = worktree_b.read(cx);
6522 project.move_worktree(first.id(), second.id(), cx)
6523 })
6524 .expect("moving second before first");
6525
6526 // check the state after moving
6527 project.update(cx, |project, cx| {
6528 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6529 assert_eq!(worktrees.len(), 3);
6530
6531 let first = worktrees[0].read(cx);
6532 let second = worktrees[1].read(cx);
6533 let third = worktrees[2].read(cx);
6534
6535 // check they are now in the right order
6536 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6537 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6538 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6539 });
6540
6541 // move the second worktree to after the third
6542 // [a, b, c] -> [a, c, b]
6543 project
6544 .update(cx, |project, cx| {
6545 let second = worktree_b.read(cx);
6546 let third = worktree_c.read(cx);
6547 project.move_worktree(second.id(), third.id(), cx)
6548 })
6549 .expect("moving second after third");
6550
6551 // check the state after moving
6552 project.update(cx, |project, cx| {
6553 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6554 assert_eq!(worktrees.len(), 3);
6555
6556 let first = worktrees[0].read(cx);
6557 let second = worktrees[1].read(cx);
6558 let third = worktrees[2].read(cx);
6559
6560 // check they are now in the right order
6561 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6562 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
6563 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
6564 });
6565
6566 // move the third worktree to before the second
6567 // [a, c, b] -> [a, b, c]
6568 project
6569 .update(cx, |project, cx| {
6570 let third = worktree_c.read(cx);
6571 let second = worktree_b.read(cx);
6572 project.move_worktree(third.id(), second.id(), cx)
6573 })
6574 .expect("moving third before second");
6575
6576 // check the state after moving
6577 project.update(cx, |project, cx| {
6578 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6579 assert_eq!(worktrees.len(), 3);
6580
6581 let first = worktrees[0].read(cx);
6582 let second = worktrees[1].read(cx);
6583 let third = worktrees[2].read(cx);
6584
6585 // check they are now in the right order
6586 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6587 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6588 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6589 });
6590
6591 // move the first worktree to after the third
6592 // [a, b, c] -> [b, c, a]
6593 project
6594 .update(cx, |project, cx| {
6595 let first = worktree_a.read(cx);
6596 let third = worktree_c.read(cx);
6597 project.move_worktree(first.id(), third.id(), cx)
6598 })
6599 .expect("moving first after third");
6600
6601 // check the state after moving
6602 project.update(cx, |project, cx| {
6603 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6604 assert_eq!(worktrees.len(), 3);
6605
6606 let first = worktrees[0].read(cx);
6607 let second = worktrees[1].read(cx);
6608 let third = worktrees[2].read(cx);
6609
6610 // check they are now in the right order
6611 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
6612 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
6613 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
6614 });
6615
6616 // move the third worktree to before the first
6617 // [b, c, a] -> [a, b, c]
6618 project
6619 .update(cx, |project, cx| {
6620 let third = worktree_a.read(cx);
6621 let first = worktree_b.read(cx);
6622 project.move_worktree(third.id(), first.id(), cx)
6623 })
6624 .expect("moving third before first");
6625
6626 // check the state after moving
6627 project.update(cx, |project, cx| {
6628 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6629 assert_eq!(worktrees.len(), 3);
6630
6631 let first = worktrees[0].read(cx);
6632 let second = worktrees[1].read(cx);
6633 let third = worktrees[2].read(cx);
6634
6635 // check they are now in the right order
6636 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6637 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6638 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6639 });
6640}
6641
6642#[gpui::test]
6643async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
6644 init_test(cx);
6645
6646 let staged_contents = r#"
6647 fn main() {
6648 println!("hello world");
6649 }
6650 "#
6651 .unindent();
6652 let file_contents = r#"
6653 // print goodbye
6654 fn main() {
6655 println!("goodbye world");
6656 }
6657 "#
6658 .unindent();
6659
6660 let fs = FakeFs::new(cx.background_executor.clone());
6661 fs.insert_tree(
6662 "/dir",
6663 json!({
6664 ".git": {},
6665 "src": {
6666 "main.rs": file_contents,
6667 }
6668 }),
6669 )
6670 .await;
6671
6672 fs.set_index_for_repo(
6673 Path::new("/dir/.git"),
6674 &[("src/main.rs".into(), staged_contents)],
6675 );
6676
6677 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6678
6679 let buffer = project
6680 .update(cx, |project, cx| {
6681 project.open_local_buffer("/dir/src/main.rs", cx)
6682 })
6683 .await
6684 .unwrap();
6685 let unstaged_diff = project
6686 .update(cx, |project, cx| {
6687 project.open_unstaged_diff(buffer.clone(), cx)
6688 })
6689 .await
6690 .unwrap();
6691
6692 cx.run_until_parked();
6693 unstaged_diff.update(cx, |unstaged_diff, cx| {
6694 let snapshot = buffer.read(cx).snapshot();
6695 assert_hunks(
6696 unstaged_diff.hunks(&snapshot, cx),
6697 &snapshot,
6698 &unstaged_diff.base_text_string().unwrap(),
6699 &[
6700 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
6701 (
6702 2..3,
6703 " println!(\"hello world\");\n",
6704 " println!(\"goodbye world\");\n",
6705 DiffHunkStatus::modified_none(),
6706 ),
6707 ],
6708 );
6709 });
6710
6711 let staged_contents = r#"
6712 // print goodbye
6713 fn main() {
6714 }
6715 "#
6716 .unindent();
6717
6718 fs.set_index_for_repo(
6719 Path::new("/dir/.git"),
6720 &[("src/main.rs".into(), staged_contents)],
6721 );
6722
6723 cx.run_until_parked();
6724 unstaged_diff.update(cx, |unstaged_diff, cx| {
6725 let snapshot = buffer.read(cx).snapshot();
6726 assert_hunks(
6727 unstaged_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6728 &snapshot,
6729 &unstaged_diff.base_text().text(),
6730 &[(
6731 2..3,
6732 "",
6733 " println!(\"goodbye world\");\n",
6734 DiffHunkStatus::added_none(),
6735 )],
6736 );
6737 });
6738}
6739
6740#[gpui::test]
6741async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
6742 init_test(cx);
6743
6744 let committed_contents = r#"
6745 fn main() {
6746 println!("hello world");
6747 }
6748 "#
6749 .unindent();
6750 let staged_contents = r#"
6751 fn main() {
6752 println!("goodbye world");
6753 }
6754 "#
6755 .unindent();
6756 let file_contents = r#"
6757 // print goodbye
6758 fn main() {
6759 println!("goodbye world");
6760 }
6761 "#
6762 .unindent();
6763
6764 let fs = FakeFs::new(cx.background_executor.clone());
6765 fs.insert_tree(
6766 "/dir",
6767 json!({
6768 ".git": {},
6769 "src": {
6770 "modification.rs": file_contents,
6771 }
6772 }),
6773 )
6774 .await;
6775
6776 fs.set_head_for_repo(
6777 Path::new("/dir/.git"),
6778 &[
6779 ("src/modification.rs".into(), committed_contents),
6780 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6781 ],
6782 "deadbeef",
6783 );
6784 fs.set_index_for_repo(
6785 Path::new("/dir/.git"),
6786 &[
6787 ("src/modification.rs".into(), staged_contents),
6788 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6789 ],
6790 );
6791
6792 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6793 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6794 let language = rust_lang();
6795 language_registry.add(language.clone());
6796
6797 let buffer_1 = project
6798 .update(cx, |project, cx| {
6799 project.open_local_buffer("/dir/src/modification.rs", cx)
6800 })
6801 .await
6802 .unwrap();
6803 let diff_1 = project
6804 .update(cx, |project, cx| {
6805 project.open_uncommitted_diff(buffer_1.clone(), cx)
6806 })
6807 .await
6808 .unwrap();
6809 diff_1.read_with(cx, |diff, _| {
6810 assert_eq!(diff.base_text().language().cloned(), Some(language))
6811 });
6812 cx.run_until_parked();
6813 diff_1.update(cx, |diff, cx| {
6814 let snapshot = buffer_1.read(cx).snapshot();
6815 assert_hunks(
6816 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6817 &snapshot,
6818 &diff.base_text_string().unwrap(),
6819 &[
6820 (
6821 0..1,
6822 "",
6823 "// print goodbye\n",
6824 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
6825 ),
6826 (
6827 2..3,
6828 " println!(\"hello world\");\n",
6829 " println!(\"goodbye world\");\n",
6830 DiffHunkStatus::modified_none(),
6831 ),
6832 ],
6833 );
6834 });
6835
6836 // Reset HEAD to a version that differs from both the buffer and the index.
6837 let committed_contents = r#"
6838 // print goodbye
6839 fn main() {
6840 }
6841 "#
6842 .unindent();
6843 fs.set_head_for_repo(
6844 Path::new("/dir/.git"),
6845 &[
6846 ("src/modification.rs".into(), committed_contents.clone()),
6847 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6848 ],
6849 "deadbeef",
6850 );
6851
6852 // Buffer now has an unstaged hunk.
6853 cx.run_until_parked();
6854 diff_1.update(cx, |diff, cx| {
6855 let snapshot = buffer_1.read(cx).snapshot();
6856 assert_hunks(
6857 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6858 &snapshot,
6859 &diff.base_text().text(),
6860 &[(
6861 2..3,
6862 "",
6863 " println!(\"goodbye world\");\n",
6864 DiffHunkStatus::added_none(),
6865 )],
6866 );
6867 });
6868
6869 // Open a buffer for a file that's been deleted.
6870 let buffer_2 = project
6871 .update(cx, |project, cx| {
6872 project.open_local_buffer("/dir/src/deletion.rs", cx)
6873 })
6874 .await
6875 .unwrap();
6876 let diff_2 = project
6877 .update(cx, |project, cx| {
6878 project.open_uncommitted_diff(buffer_2.clone(), cx)
6879 })
6880 .await
6881 .unwrap();
6882 cx.run_until_parked();
6883 diff_2.update(cx, |diff, cx| {
6884 let snapshot = buffer_2.read(cx).snapshot();
6885 assert_hunks(
6886 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6887 &snapshot,
6888 &diff.base_text_string().unwrap(),
6889 &[(
6890 0..0,
6891 "// the-deleted-contents\n",
6892 "",
6893 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
6894 )],
6895 );
6896 });
6897
6898 // Stage the deletion of this file
6899 fs.set_index_for_repo(
6900 Path::new("/dir/.git"),
6901 &[("src/modification.rs".into(), committed_contents.clone())],
6902 );
6903 cx.run_until_parked();
6904 diff_2.update(cx, |diff, cx| {
6905 let snapshot = buffer_2.read(cx).snapshot();
6906 assert_hunks(
6907 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6908 &snapshot,
6909 &diff.base_text_string().unwrap(),
6910 &[(
6911 0..0,
6912 "// the-deleted-contents\n",
6913 "",
6914 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
6915 )],
6916 );
6917 });
6918}
6919
6920#[gpui::test]
6921async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
6922 use DiffHunkSecondaryStatus::*;
6923 init_test(cx);
6924
6925 let committed_contents = r#"
6926 zero
6927 one
6928 two
6929 three
6930 four
6931 five
6932 "#
6933 .unindent();
6934 let file_contents = r#"
6935 one
6936 TWO
6937 three
6938 FOUR
6939 five
6940 "#
6941 .unindent();
6942
6943 let fs = FakeFs::new(cx.background_executor.clone());
6944 fs.insert_tree(
6945 "/dir",
6946 json!({
6947 ".git": {},
6948 "file.txt": file_contents.clone()
6949 }),
6950 )
6951 .await;
6952
6953 fs.set_head_and_index_for_repo(
6954 "/dir/.git".as_ref(),
6955 &[("file.txt".into(), committed_contents.clone())],
6956 );
6957
6958 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6959
6960 let buffer = project
6961 .update(cx, |project, cx| {
6962 project.open_local_buffer("/dir/file.txt", cx)
6963 })
6964 .await
6965 .unwrap();
6966 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6967 let uncommitted_diff = project
6968 .update(cx, |project, cx| {
6969 project.open_uncommitted_diff(buffer.clone(), cx)
6970 })
6971 .await
6972 .unwrap();
6973 let mut diff_events = cx.events(&uncommitted_diff);
6974
6975 // The hunks are initially unstaged.
6976 uncommitted_diff.read_with(cx, |diff, cx| {
6977 assert_hunks(
6978 diff.hunks(&snapshot, cx),
6979 &snapshot,
6980 &diff.base_text_string().unwrap(),
6981 &[
6982 (
6983 0..0,
6984 "zero\n",
6985 "",
6986 DiffHunkStatus::deleted(HasSecondaryHunk),
6987 ),
6988 (
6989 1..2,
6990 "two\n",
6991 "TWO\n",
6992 DiffHunkStatus::modified(HasSecondaryHunk),
6993 ),
6994 (
6995 3..4,
6996 "four\n",
6997 "FOUR\n",
6998 DiffHunkStatus::modified(HasSecondaryHunk),
6999 ),
7000 ],
7001 );
7002 });
7003
7004 // Stage a hunk. It appears as optimistically staged.
7005 uncommitted_diff.update(cx, |diff, cx| {
7006 let range =
7007 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
7008 let hunks = diff
7009 .hunks_intersecting_range(range, &snapshot, cx)
7010 .collect::<Vec<_>>();
7011 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
7012
7013 assert_hunks(
7014 diff.hunks(&snapshot, cx),
7015 &snapshot,
7016 &diff.base_text_string().unwrap(),
7017 &[
7018 (
7019 0..0,
7020 "zero\n",
7021 "",
7022 DiffHunkStatus::deleted(HasSecondaryHunk),
7023 ),
7024 (
7025 1..2,
7026 "two\n",
7027 "TWO\n",
7028 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7029 ),
7030 (
7031 3..4,
7032 "four\n",
7033 "FOUR\n",
7034 DiffHunkStatus::modified(HasSecondaryHunk),
7035 ),
7036 ],
7037 );
7038 });
7039
7040 // The diff emits a change event for the range of the staged hunk.
7041 assert!(matches!(
7042 diff_events.next().await.unwrap(),
7043 BufferDiffEvent::HunksStagedOrUnstaged(_)
7044 ));
7045 let event = diff_events.next().await.unwrap();
7046 if let BufferDiffEvent::DiffChanged {
7047 changed_range: Some(changed_range),
7048 } = event
7049 {
7050 let changed_range = changed_range.to_point(&snapshot);
7051 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
7052 } else {
7053 panic!("Unexpected event {event:?}");
7054 }
7055
7056 // When the write to the index completes, it appears as staged.
7057 cx.run_until_parked();
7058 uncommitted_diff.update(cx, |diff, cx| {
7059 assert_hunks(
7060 diff.hunks(&snapshot, cx),
7061 &snapshot,
7062 &diff.base_text_string().unwrap(),
7063 &[
7064 (
7065 0..0,
7066 "zero\n",
7067 "",
7068 DiffHunkStatus::deleted(HasSecondaryHunk),
7069 ),
7070 (
7071 1..2,
7072 "two\n",
7073 "TWO\n",
7074 DiffHunkStatus::modified(NoSecondaryHunk),
7075 ),
7076 (
7077 3..4,
7078 "four\n",
7079 "FOUR\n",
7080 DiffHunkStatus::modified(HasSecondaryHunk),
7081 ),
7082 ],
7083 );
7084 });
7085
7086 // The diff emits a change event for the changed index text.
7087 let event = diff_events.next().await.unwrap();
7088 if let BufferDiffEvent::DiffChanged {
7089 changed_range: Some(changed_range),
7090 } = event
7091 {
7092 let changed_range = changed_range.to_point(&snapshot);
7093 assert_eq!(changed_range, Point::new(0, 0)..Point::new(4, 0));
7094 } else {
7095 panic!("Unexpected event {event:?}");
7096 }
7097
7098 // Simulate a problem writing to the git index.
7099 fs.set_error_message_for_index_write(
7100 "/dir/.git".as_ref(),
7101 Some("failed to write git index".into()),
7102 );
7103
7104 // Stage another hunk.
7105 uncommitted_diff.update(cx, |diff, cx| {
7106 let range =
7107 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
7108 let hunks = diff
7109 .hunks_intersecting_range(range, &snapshot, cx)
7110 .collect::<Vec<_>>();
7111 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
7112
7113 assert_hunks(
7114 diff.hunks(&snapshot, cx),
7115 &snapshot,
7116 &diff.base_text_string().unwrap(),
7117 &[
7118 (
7119 0..0,
7120 "zero\n",
7121 "",
7122 DiffHunkStatus::deleted(HasSecondaryHunk),
7123 ),
7124 (
7125 1..2,
7126 "two\n",
7127 "TWO\n",
7128 DiffHunkStatus::modified(NoSecondaryHunk),
7129 ),
7130 (
7131 3..4,
7132 "four\n",
7133 "FOUR\n",
7134 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7135 ),
7136 ],
7137 );
7138 });
7139 assert!(matches!(
7140 diff_events.next().await.unwrap(),
7141 BufferDiffEvent::HunksStagedOrUnstaged(_)
7142 ));
7143 let event = diff_events.next().await.unwrap();
7144 if let BufferDiffEvent::DiffChanged {
7145 changed_range: Some(changed_range),
7146 } = event
7147 {
7148 let changed_range = changed_range.to_point(&snapshot);
7149 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
7150 } else {
7151 panic!("Unexpected event {event:?}");
7152 }
7153
7154 // When the write fails, the hunk returns to being unstaged.
7155 cx.run_until_parked();
7156 uncommitted_diff.update(cx, |diff, cx| {
7157 assert_hunks(
7158 diff.hunks(&snapshot, cx),
7159 &snapshot,
7160 &diff.base_text_string().unwrap(),
7161 &[
7162 (
7163 0..0,
7164 "zero\n",
7165 "",
7166 DiffHunkStatus::deleted(HasSecondaryHunk),
7167 ),
7168 (
7169 1..2,
7170 "two\n",
7171 "TWO\n",
7172 DiffHunkStatus::modified(NoSecondaryHunk),
7173 ),
7174 (
7175 3..4,
7176 "four\n",
7177 "FOUR\n",
7178 DiffHunkStatus::modified(HasSecondaryHunk),
7179 ),
7180 ],
7181 );
7182 });
7183
7184 let event = diff_events.next().await.unwrap();
7185 if let BufferDiffEvent::DiffChanged {
7186 changed_range: Some(changed_range),
7187 } = event
7188 {
7189 let changed_range = changed_range.to_point(&snapshot);
7190 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
7191 } else {
7192 panic!("Unexpected event {event:?}");
7193 }
7194
7195 // Allow writing to the git index to succeed again.
7196 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
7197
7198 // Stage two hunks with separate operations.
7199 uncommitted_diff.update(cx, |diff, cx| {
7200 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
7201 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
7202 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
7203 });
7204
7205 // Both staged hunks appear as pending.
7206 uncommitted_diff.update(cx, |diff, cx| {
7207 assert_hunks(
7208 diff.hunks(&snapshot, cx),
7209 &snapshot,
7210 &diff.base_text_string().unwrap(),
7211 &[
7212 (
7213 0..0,
7214 "zero\n",
7215 "",
7216 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7217 ),
7218 (
7219 1..2,
7220 "two\n",
7221 "TWO\n",
7222 DiffHunkStatus::modified(NoSecondaryHunk),
7223 ),
7224 (
7225 3..4,
7226 "four\n",
7227 "FOUR\n",
7228 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7229 ),
7230 ],
7231 );
7232 });
7233
7234 // Both staging operations take effect.
7235 cx.run_until_parked();
7236 uncommitted_diff.update(cx, |diff, cx| {
7237 assert_hunks(
7238 diff.hunks(&snapshot, cx),
7239 &snapshot,
7240 &diff.base_text_string().unwrap(),
7241 &[
7242 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7243 (
7244 1..2,
7245 "two\n",
7246 "TWO\n",
7247 DiffHunkStatus::modified(NoSecondaryHunk),
7248 ),
7249 (
7250 3..4,
7251 "four\n",
7252 "FOUR\n",
7253 DiffHunkStatus::modified(NoSecondaryHunk),
7254 ),
7255 ],
7256 );
7257 });
7258}
7259
7260#[gpui::test(seeds(340, 472))]
7261async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
7262 use DiffHunkSecondaryStatus::*;
7263 init_test(cx);
7264
7265 let committed_contents = r#"
7266 zero
7267 one
7268 two
7269 three
7270 four
7271 five
7272 "#
7273 .unindent();
7274 let file_contents = r#"
7275 one
7276 TWO
7277 three
7278 FOUR
7279 five
7280 "#
7281 .unindent();
7282
7283 let fs = FakeFs::new(cx.background_executor.clone());
7284 fs.insert_tree(
7285 "/dir",
7286 json!({
7287 ".git": {},
7288 "file.txt": file_contents.clone()
7289 }),
7290 )
7291 .await;
7292
7293 fs.set_head_for_repo(
7294 "/dir/.git".as_ref(),
7295 &[("file.txt".into(), committed_contents.clone())],
7296 "deadbeef",
7297 );
7298 fs.set_index_for_repo(
7299 "/dir/.git".as_ref(),
7300 &[("file.txt".into(), committed_contents.clone())],
7301 );
7302
7303 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7304
7305 let buffer = project
7306 .update(cx, |project, cx| {
7307 project.open_local_buffer("/dir/file.txt", cx)
7308 })
7309 .await
7310 .unwrap();
7311 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7312 let uncommitted_diff = project
7313 .update(cx, |project, cx| {
7314 project.open_uncommitted_diff(buffer.clone(), cx)
7315 })
7316 .await
7317 .unwrap();
7318
7319 // The hunks are initially unstaged.
7320 uncommitted_diff.read_with(cx, |diff, cx| {
7321 assert_hunks(
7322 diff.hunks(&snapshot, cx),
7323 &snapshot,
7324 &diff.base_text_string().unwrap(),
7325 &[
7326 (
7327 0..0,
7328 "zero\n",
7329 "",
7330 DiffHunkStatus::deleted(HasSecondaryHunk),
7331 ),
7332 (
7333 1..2,
7334 "two\n",
7335 "TWO\n",
7336 DiffHunkStatus::modified(HasSecondaryHunk),
7337 ),
7338 (
7339 3..4,
7340 "four\n",
7341 "FOUR\n",
7342 DiffHunkStatus::modified(HasSecondaryHunk),
7343 ),
7344 ],
7345 );
7346 });
7347
7348 // Pause IO events
7349 fs.pause_events();
7350
7351 // Stage the first hunk.
7352 uncommitted_diff.update(cx, |diff, cx| {
7353 let hunk = diff.hunks(&snapshot, cx).next().unwrap();
7354 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7355 assert_hunks(
7356 diff.hunks(&snapshot, cx),
7357 &snapshot,
7358 &diff.base_text_string().unwrap(),
7359 &[
7360 (
7361 0..0,
7362 "zero\n",
7363 "",
7364 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7365 ),
7366 (
7367 1..2,
7368 "two\n",
7369 "TWO\n",
7370 DiffHunkStatus::modified(HasSecondaryHunk),
7371 ),
7372 (
7373 3..4,
7374 "four\n",
7375 "FOUR\n",
7376 DiffHunkStatus::modified(HasSecondaryHunk),
7377 ),
7378 ],
7379 );
7380 });
7381
7382 // Stage the second hunk *before* receiving the FS event for the first hunk.
7383 cx.run_until_parked();
7384 uncommitted_diff.update(cx, |diff, cx| {
7385 let hunk = diff.hunks(&snapshot, cx).nth(1).unwrap();
7386 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7387 assert_hunks(
7388 diff.hunks(&snapshot, cx),
7389 &snapshot,
7390 &diff.base_text_string().unwrap(),
7391 &[
7392 (
7393 0..0,
7394 "zero\n",
7395 "",
7396 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7397 ),
7398 (
7399 1..2,
7400 "two\n",
7401 "TWO\n",
7402 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7403 ),
7404 (
7405 3..4,
7406 "four\n",
7407 "FOUR\n",
7408 DiffHunkStatus::modified(HasSecondaryHunk),
7409 ),
7410 ],
7411 );
7412 });
7413
7414 // Process the FS event for staging the first hunk (second event is still pending).
7415 fs.flush_events(1);
7416 cx.run_until_parked();
7417
7418 // Stage the third hunk before receiving the second FS event.
7419 uncommitted_diff.update(cx, |diff, cx| {
7420 let hunk = diff.hunks(&snapshot, cx).nth(2).unwrap();
7421 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7422 });
7423
7424 // Wait for all remaining IO.
7425 cx.run_until_parked();
7426 fs.flush_events(fs.buffered_event_count());
7427
7428 // Now all hunks are staged.
7429 cx.run_until_parked();
7430 uncommitted_diff.update(cx, |diff, cx| {
7431 assert_hunks(
7432 diff.hunks(&snapshot, cx),
7433 &snapshot,
7434 &diff.base_text_string().unwrap(),
7435 &[
7436 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7437 (
7438 1..2,
7439 "two\n",
7440 "TWO\n",
7441 DiffHunkStatus::modified(NoSecondaryHunk),
7442 ),
7443 (
7444 3..4,
7445 "four\n",
7446 "FOUR\n",
7447 DiffHunkStatus::modified(NoSecondaryHunk),
7448 ),
7449 ],
7450 );
7451 });
7452}
7453
7454#[gpui::test(iterations = 25)]
7455async fn test_staging_random_hunks(
7456 mut rng: StdRng,
7457 executor: BackgroundExecutor,
7458 cx: &mut gpui::TestAppContext,
7459) {
7460 let operations = env::var("OPERATIONS")
7461 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
7462 .unwrap_or(20);
7463
7464 // Try to induce races between diff recalculation and index writes.
7465 if rng.gen_bool(0.5) {
7466 executor.deprioritize(*CALCULATE_DIFF_TASK);
7467 }
7468
7469 use DiffHunkSecondaryStatus::*;
7470 init_test(cx);
7471
7472 let committed_text = (0..30).map(|i| format!("line {i}\n")).collect::<String>();
7473 let index_text = committed_text.clone();
7474 let buffer_text = (0..30)
7475 .map(|i| match i % 5 {
7476 0 => format!("line {i} (modified)\n"),
7477 _ => format!("line {i}\n"),
7478 })
7479 .collect::<String>();
7480
7481 let fs = FakeFs::new(cx.background_executor.clone());
7482 fs.insert_tree(
7483 path!("/dir"),
7484 json!({
7485 ".git": {},
7486 "file.txt": buffer_text.clone()
7487 }),
7488 )
7489 .await;
7490 fs.set_head_for_repo(
7491 path!("/dir/.git").as_ref(),
7492 &[("file.txt".into(), committed_text.clone())],
7493 "deadbeef",
7494 );
7495 fs.set_index_for_repo(
7496 path!("/dir/.git").as_ref(),
7497 &[("file.txt".into(), index_text.clone())],
7498 );
7499 let repo = fs.open_repo(path!("/dir/.git").as_ref()).unwrap();
7500
7501 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7502 let buffer = project
7503 .update(cx, |project, cx| {
7504 project.open_local_buffer(path!("/dir/file.txt"), cx)
7505 })
7506 .await
7507 .unwrap();
7508 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7509 let uncommitted_diff = project
7510 .update(cx, |project, cx| {
7511 project.open_uncommitted_diff(buffer.clone(), cx)
7512 })
7513 .await
7514 .unwrap();
7515
7516 let mut hunks =
7517 uncommitted_diff.update(cx, |diff, cx| diff.hunks(&snapshot, cx).collect::<Vec<_>>());
7518 assert_eq!(hunks.len(), 6);
7519
7520 for _i in 0..operations {
7521 let hunk_ix = rng.gen_range(0..hunks.len());
7522 let hunk = &mut hunks[hunk_ix];
7523 let row = hunk.range.start.row;
7524
7525 if hunk.status().has_secondary_hunk() {
7526 log::info!("staging hunk at {row}");
7527 uncommitted_diff.update(cx, |diff, cx| {
7528 diff.stage_or_unstage_hunks(true, std::slice::from_ref(hunk), &snapshot, true, cx);
7529 });
7530 hunk.secondary_status = SecondaryHunkRemovalPending;
7531 } else {
7532 log::info!("unstaging hunk at {row}");
7533 uncommitted_diff.update(cx, |diff, cx| {
7534 diff.stage_or_unstage_hunks(false, std::slice::from_ref(hunk), &snapshot, true, cx);
7535 });
7536 hunk.secondary_status = SecondaryHunkAdditionPending;
7537 }
7538
7539 for _ in 0..rng.gen_range(0..10) {
7540 log::info!("yielding");
7541 cx.executor().simulate_random_delay().await;
7542 }
7543 }
7544
7545 cx.executor().run_until_parked();
7546
7547 for hunk in &mut hunks {
7548 if hunk.secondary_status == SecondaryHunkRemovalPending {
7549 hunk.secondary_status = NoSecondaryHunk;
7550 } else if hunk.secondary_status == SecondaryHunkAdditionPending {
7551 hunk.secondary_status = HasSecondaryHunk;
7552 }
7553 }
7554
7555 log::info!(
7556 "index text:\n{}",
7557 repo.load_index_text("file.txt".into()).await.unwrap()
7558 );
7559
7560 uncommitted_diff.update(cx, |diff, cx| {
7561 let expected_hunks = hunks
7562 .iter()
7563 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
7564 .collect::<Vec<_>>();
7565 let actual_hunks = diff
7566 .hunks(&snapshot, cx)
7567 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
7568 .collect::<Vec<_>>();
7569 assert_eq!(actual_hunks, expected_hunks);
7570 });
7571}
7572
7573#[gpui::test]
7574async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
7575 init_test(cx);
7576
7577 let committed_contents = r#"
7578 fn main() {
7579 println!("hello from HEAD");
7580 }
7581 "#
7582 .unindent();
7583 let file_contents = r#"
7584 fn main() {
7585 println!("hello from the working copy");
7586 }
7587 "#
7588 .unindent();
7589
7590 let fs = FakeFs::new(cx.background_executor.clone());
7591 fs.insert_tree(
7592 "/dir",
7593 json!({
7594 ".git": {},
7595 "src": {
7596 "main.rs": file_contents,
7597 }
7598 }),
7599 )
7600 .await;
7601
7602 fs.set_head_for_repo(
7603 Path::new("/dir/.git"),
7604 &[("src/main.rs".into(), committed_contents.clone())],
7605 "deadbeef",
7606 );
7607 fs.set_index_for_repo(
7608 Path::new("/dir/.git"),
7609 &[("src/main.rs".into(), committed_contents.clone())],
7610 );
7611
7612 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
7613
7614 let buffer = project
7615 .update(cx, |project, cx| {
7616 project.open_local_buffer("/dir/src/main.rs", cx)
7617 })
7618 .await
7619 .unwrap();
7620 let uncommitted_diff = project
7621 .update(cx, |project, cx| {
7622 project.open_uncommitted_diff(buffer.clone(), cx)
7623 })
7624 .await
7625 .unwrap();
7626
7627 cx.run_until_parked();
7628 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
7629 let snapshot = buffer.read(cx).snapshot();
7630 assert_hunks(
7631 uncommitted_diff.hunks(&snapshot, cx),
7632 &snapshot,
7633 &uncommitted_diff.base_text_string().unwrap(),
7634 &[(
7635 1..2,
7636 " println!(\"hello from HEAD\");\n",
7637 " println!(\"hello from the working copy\");\n",
7638 DiffHunkStatus {
7639 kind: DiffHunkStatusKind::Modified,
7640 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
7641 },
7642 )],
7643 );
7644 });
7645}
7646
7647#[gpui::test]
7648async fn test_repository_and_path_for_project_path(
7649 background_executor: BackgroundExecutor,
7650 cx: &mut gpui::TestAppContext,
7651) {
7652 init_test(cx);
7653 let fs = FakeFs::new(background_executor);
7654 fs.insert_tree(
7655 path!("/root"),
7656 json!({
7657 "c.txt": "",
7658 "dir1": {
7659 ".git": {},
7660 "deps": {
7661 "dep1": {
7662 ".git": {},
7663 "src": {
7664 "a.txt": ""
7665 }
7666 }
7667 },
7668 "src": {
7669 "b.txt": ""
7670 }
7671 },
7672 }),
7673 )
7674 .await;
7675
7676 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
7677 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7678 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7679 project
7680 .update(cx, |project, cx| project.git_scans_complete(cx))
7681 .await;
7682 cx.run_until_parked();
7683
7684 project.read_with(cx, |project, cx| {
7685 let git_store = project.git_store().read(cx);
7686 let pairs = [
7687 ("c.txt", None),
7688 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
7689 (
7690 "dir1/deps/dep1/src/a.txt",
7691 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
7692 ),
7693 ];
7694 let expected = pairs
7695 .iter()
7696 .map(|(path, result)| {
7697 (
7698 path,
7699 result.map(|(repo, repo_path)| {
7700 (Path::new(repo).into(), RepoPath::from(repo_path))
7701 }),
7702 )
7703 })
7704 .collect::<Vec<_>>();
7705 let actual = pairs
7706 .iter()
7707 .map(|(path, _)| {
7708 let project_path = (tree_id, Path::new(path)).into();
7709 let result = maybe!({
7710 let (repo, repo_path) =
7711 git_store.repository_and_path_for_project_path(&project_path, cx)?;
7712 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
7713 });
7714 (path, result)
7715 })
7716 .collect::<Vec<_>>();
7717 pretty_assertions::assert_eq!(expected, actual);
7718 });
7719
7720 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
7721 .await
7722 .unwrap();
7723 cx.run_until_parked();
7724
7725 project.read_with(cx, |project, cx| {
7726 let git_store = project.git_store().read(cx);
7727 assert_eq!(
7728 git_store.repository_and_path_for_project_path(
7729 &(tree_id, Path::new("dir1/src/b.txt")).into(),
7730 cx
7731 ),
7732 None
7733 );
7734 });
7735}
7736
7737#[gpui::test]
7738async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
7739 init_test(cx);
7740 let fs = FakeFs::new(cx.background_executor.clone());
7741 fs.insert_tree(
7742 path!("/root"),
7743 json!({
7744 "home": {
7745 ".git": {},
7746 "project": {
7747 "a.txt": "A"
7748 },
7749 },
7750 }),
7751 )
7752 .await;
7753 fs.set_home_dir(Path::new(path!("/root/home")).to_owned());
7754
7755 let project = Project::test(fs.clone(), [path!("/root/home/project").as_ref()], cx).await;
7756 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7757 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7758
7759 project
7760 .update(cx, |project, cx| project.git_scans_complete(cx))
7761 .await;
7762 tree.flush_fs_events(cx).await;
7763
7764 project.read_with(cx, |project, cx| {
7765 let containing = project
7766 .git_store()
7767 .read(cx)
7768 .repository_and_path_for_project_path(&(tree_id, "a.txt").into(), cx);
7769 assert!(containing.is_none());
7770 });
7771
7772 let project = Project::test(fs.clone(), [path!("/root/home").as_ref()], cx).await;
7773 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7774 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7775 project
7776 .update(cx, |project, cx| project.git_scans_complete(cx))
7777 .await;
7778 tree.flush_fs_events(cx).await;
7779
7780 project.read_with(cx, |project, cx| {
7781 let containing = project
7782 .git_store()
7783 .read(cx)
7784 .repository_and_path_for_project_path(&(tree_id, "project/a.txt").into(), cx);
7785 assert_eq!(
7786 containing
7787 .unwrap()
7788 .0
7789 .read(cx)
7790 .work_directory_abs_path
7791 .as_ref(),
7792 Path::new(path!("/root/home"))
7793 );
7794 });
7795}
7796
7797#[gpui::test]
7798async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
7799 init_test(cx);
7800 cx.executor().allow_parking();
7801
7802 let root = TempTree::new(json!({
7803 "project": {
7804 "a.txt": "a", // Modified
7805 "b.txt": "bb", // Added
7806 "c.txt": "ccc", // Unchanged
7807 "d.txt": "dddd", // Deleted
7808 },
7809 }));
7810
7811 // Set up git repository before creating the project.
7812 let work_dir = root.path().join("project");
7813 let repo = git_init(work_dir.as_path());
7814 git_add("a.txt", &repo);
7815 git_add("c.txt", &repo);
7816 git_add("d.txt", &repo);
7817 git_commit("Initial commit", &repo);
7818 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
7819 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
7820
7821 let project = Project::test(
7822 Arc::new(RealFs::new(None, cx.executor())),
7823 [root.path()],
7824 cx,
7825 )
7826 .await;
7827
7828 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7829 tree.flush_fs_events(cx).await;
7830 project
7831 .update(cx, |project, cx| project.git_scans_complete(cx))
7832 .await;
7833 cx.executor().run_until_parked();
7834
7835 let repository = project.read_with(cx, |project, cx| {
7836 project.repositories(cx).values().next().unwrap().clone()
7837 });
7838
7839 // Check that the right git state is observed on startup
7840 repository.read_with(cx, |repository, _| {
7841 let entries = repository.cached_status().collect::<Vec<_>>();
7842 assert_eq!(
7843 entries,
7844 [
7845 StatusEntry {
7846 repo_path: "a.txt".into(),
7847 status: StatusCode::Modified.worktree(),
7848 },
7849 StatusEntry {
7850 repo_path: "b.txt".into(),
7851 status: FileStatus::Untracked,
7852 },
7853 StatusEntry {
7854 repo_path: "d.txt".into(),
7855 status: StatusCode::Deleted.worktree(),
7856 },
7857 ]
7858 );
7859 });
7860
7861 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
7862
7863 tree.flush_fs_events(cx).await;
7864 project
7865 .update(cx, |project, cx| project.git_scans_complete(cx))
7866 .await;
7867 cx.executor().run_until_parked();
7868
7869 repository.read_with(cx, |repository, _| {
7870 let entries = repository.cached_status().collect::<Vec<_>>();
7871 assert_eq!(
7872 entries,
7873 [
7874 StatusEntry {
7875 repo_path: "a.txt".into(),
7876 status: StatusCode::Modified.worktree(),
7877 },
7878 StatusEntry {
7879 repo_path: "b.txt".into(),
7880 status: FileStatus::Untracked,
7881 },
7882 StatusEntry {
7883 repo_path: "c.txt".into(),
7884 status: StatusCode::Modified.worktree(),
7885 },
7886 StatusEntry {
7887 repo_path: "d.txt".into(),
7888 status: StatusCode::Deleted.worktree(),
7889 },
7890 ]
7891 );
7892 });
7893
7894 git_add("a.txt", &repo);
7895 git_add("c.txt", &repo);
7896 git_remove_index(Path::new("d.txt"), &repo);
7897 git_commit("Another commit", &repo);
7898 tree.flush_fs_events(cx).await;
7899 project
7900 .update(cx, |project, cx| project.git_scans_complete(cx))
7901 .await;
7902 cx.executor().run_until_parked();
7903
7904 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
7905 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
7906 tree.flush_fs_events(cx).await;
7907 project
7908 .update(cx, |project, cx| project.git_scans_complete(cx))
7909 .await;
7910 cx.executor().run_until_parked();
7911
7912 repository.read_with(cx, |repository, _cx| {
7913 let entries = repository.cached_status().collect::<Vec<_>>();
7914
7915 // Deleting an untracked entry, b.txt, should leave no status
7916 // a.txt was tracked, and so should have a status
7917 assert_eq!(
7918 entries,
7919 [StatusEntry {
7920 repo_path: "a.txt".into(),
7921 status: StatusCode::Deleted.worktree(),
7922 }]
7923 );
7924 });
7925}
7926
7927#[gpui::test]
7928async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
7929 init_test(cx);
7930 cx.executor().allow_parking();
7931
7932 let root = TempTree::new(json!({
7933 "project": {
7934 "sub": {},
7935 "a.txt": "",
7936 },
7937 }));
7938
7939 let work_dir = root.path().join("project");
7940 let repo = git_init(work_dir.as_path());
7941 // a.txt exists in HEAD and the working copy but is deleted in the index.
7942 git_add("a.txt", &repo);
7943 git_commit("Initial commit", &repo);
7944 git_remove_index("a.txt".as_ref(), &repo);
7945 // `sub` is a nested git repository.
7946 let _sub = git_init(&work_dir.join("sub"));
7947
7948 let project = Project::test(
7949 Arc::new(RealFs::new(None, cx.executor())),
7950 [root.path()],
7951 cx,
7952 )
7953 .await;
7954
7955 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7956 tree.flush_fs_events(cx).await;
7957 project
7958 .update(cx, |project, cx| project.git_scans_complete(cx))
7959 .await;
7960 cx.executor().run_until_parked();
7961
7962 let repository = project.read_with(cx, |project, cx| {
7963 project
7964 .repositories(cx)
7965 .values()
7966 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
7967 .unwrap()
7968 .clone()
7969 });
7970
7971 repository.read_with(cx, |repository, _cx| {
7972 let entries = repository.cached_status().collect::<Vec<_>>();
7973
7974 // `sub` doesn't appear in our computed statuses.
7975 // a.txt appears with a combined `DA` status.
7976 assert_eq!(
7977 entries,
7978 [StatusEntry {
7979 repo_path: "a.txt".into(),
7980 status: TrackedStatus {
7981 index_status: StatusCode::Deleted,
7982 worktree_status: StatusCode::Added
7983 }
7984 .into(),
7985 }]
7986 )
7987 });
7988}
7989
7990#[gpui::test]
7991async fn test_repository_subfolder_git_status(
7992 executor: gpui::BackgroundExecutor,
7993 cx: &mut gpui::TestAppContext,
7994) {
7995 init_test(cx);
7996
7997 let fs = FakeFs::new(executor);
7998 fs.insert_tree(
7999 path!("/root"),
8000 json!({
8001 "my-repo": {
8002 ".git": {},
8003 "a.txt": "a",
8004 "sub-folder-1": {
8005 "sub-folder-2": {
8006 "c.txt": "cc",
8007 "d": {
8008 "e.txt": "eee"
8009 }
8010 },
8011 }
8012 },
8013 }),
8014 )
8015 .await;
8016
8017 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
8018 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
8019
8020 fs.set_status_for_repo(
8021 path!("/root/my-repo/.git").as_ref(),
8022 &[(E_TXT.as_ref(), FileStatus::Untracked)],
8023 );
8024
8025 let project = Project::test(
8026 fs.clone(),
8027 [path!("/root/my-repo/sub-folder-1/sub-folder-2").as_ref()],
8028 cx,
8029 )
8030 .await;
8031
8032 project
8033 .update(cx, |project, cx| project.git_scans_complete(cx))
8034 .await;
8035 cx.run_until_parked();
8036
8037 let repository = project.read_with(cx, |project, cx| {
8038 project.repositories(cx).values().next().unwrap().clone()
8039 });
8040
8041 // Ensure that the git status is loaded correctly
8042 repository.read_with(cx, |repository, _cx| {
8043 assert_eq!(
8044 repository.work_directory_abs_path,
8045 Path::new(path!("/root/my-repo")).into()
8046 );
8047
8048 assert_eq!(repository.status_for_path(&C_TXT.into()), None);
8049 assert_eq!(
8050 repository.status_for_path(&E_TXT.into()).unwrap().status,
8051 FileStatus::Untracked
8052 );
8053 });
8054
8055 fs.set_status_for_repo(path!("/root/my-repo/.git").as_ref(), &[]);
8056 project
8057 .update(cx, |project, cx| project.git_scans_complete(cx))
8058 .await;
8059 cx.run_until_parked();
8060
8061 repository.read_with(cx, |repository, _cx| {
8062 assert_eq!(repository.status_for_path(&C_TXT.into()), None);
8063 assert_eq!(repository.status_for_path(&E_TXT.into()), None);
8064 });
8065}
8066
8067// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
8068#[cfg(any())]
8069#[gpui::test]
8070async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
8071 init_test(cx);
8072 cx.executor().allow_parking();
8073
8074 let root = TempTree::new(json!({
8075 "project": {
8076 "a.txt": "a",
8077 },
8078 }));
8079 let root_path = root.path();
8080
8081 let repo = git_init(&root_path.join("project"));
8082 git_add("a.txt", &repo);
8083 git_commit("init", &repo);
8084
8085 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8086
8087 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8088 tree.flush_fs_events(cx).await;
8089 project
8090 .update(cx, |project, cx| project.git_scans_complete(cx))
8091 .await;
8092 cx.executor().run_until_parked();
8093
8094 let repository = project.read_with(cx, |project, cx| {
8095 project.repositories(cx).values().next().unwrap().clone()
8096 });
8097
8098 git_branch("other-branch", &repo);
8099 git_checkout("refs/heads/other-branch", &repo);
8100 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
8101 git_add("a.txt", &repo);
8102 git_commit("capitalize", &repo);
8103 let commit = repo
8104 .head()
8105 .expect("Failed to get HEAD")
8106 .peel_to_commit()
8107 .expect("HEAD is not a commit");
8108 git_checkout("refs/heads/main", &repo);
8109 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
8110 git_add("a.txt", &repo);
8111 git_commit("improve letter", &repo);
8112 git_cherry_pick(&commit, &repo);
8113 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
8114 .expect("No CHERRY_PICK_HEAD");
8115 pretty_assertions::assert_eq!(
8116 git_status(&repo),
8117 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
8118 );
8119 tree.flush_fs_events(cx).await;
8120 project
8121 .update(cx, |project, cx| project.git_scans_complete(cx))
8122 .await;
8123 cx.executor().run_until_parked();
8124 let conflicts = repository.update(cx, |repository, _| {
8125 repository
8126 .merge_conflicts
8127 .iter()
8128 .cloned()
8129 .collect::<Vec<_>>()
8130 });
8131 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
8132
8133 git_add("a.txt", &repo);
8134 // Attempt to manually simulate what `git cherry-pick --continue` would do.
8135 git_commit("whatevs", &repo);
8136 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
8137 .expect("Failed to remove CHERRY_PICK_HEAD");
8138 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
8139 tree.flush_fs_events(cx).await;
8140 let conflicts = repository.update(cx, |repository, _| {
8141 repository
8142 .merge_conflicts
8143 .iter()
8144 .cloned()
8145 .collect::<Vec<_>>()
8146 });
8147 pretty_assertions::assert_eq!(conflicts, []);
8148}
8149
8150#[gpui::test]
8151async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
8152 init_test(cx);
8153 let fs = FakeFs::new(cx.background_executor.clone());
8154 fs.insert_tree(
8155 path!("/root"),
8156 json!({
8157 ".git": {},
8158 ".gitignore": "*.txt\n",
8159 "a.xml": "<a></a>",
8160 "b.txt": "Some text"
8161 }),
8162 )
8163 .await;
8164
8165 fs.set_head_and_index_for_repo(
8166 path!("/root/.git").as_ref(),
8167 &[
8168 (".gitignore".into(), "*.txt\n".into()),
8169 ("a.xml".into(), "<a></a>".into()),
8170 ],
8171 );
8172
8173 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
8174
8175 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8176 tree.flush_fs_events(cx).await;
8177 project
8178 .update(cx, |project, cx| project.git_scans_complete(cx))
8179 .await;
8180 cx.executor().run_until_parked();
8181
8182 let repository = project.read_with(cx, |project, cx| {
8183 project.repositories(cx).values().next().unwrap().clone()
8184 });
8185
8186 // One file is unmodified, the other is ignored.
8187 cx.read(|cx| {
8188 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
8189 assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
8190 });
8191
8192 // Change the gitignore, and stage the newly non-ignored file.
8193 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
8194 .await
8195 .unwrap();
8196 fs.set_index_for_repo(
8197 Path::new(path!("/root/.git")),
8198 &[
8199 (".gitignore".into(), "*.txt\n".into()),
8200 ("a.xml".into(), "<a></a>".into()),
8201 ("b.txt".into(), "Some text".into()),
8202 ],
8203 );
8204
8205 cx.executor().run_until_parked();
8206 cx.read(|cx| {
8207 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
8208 assert_entry_git_state(
8209 tree.read(cx),
8210 repository.read(cx),
8211 "b.txt",
8212 Some(StatusCode::Added),
8213 false,
8214 );
8215 });
8216}
8217
8218// NOTE:
8219// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
8220// a directory which some program has already open.
8221// This is a limitation of the Windows.
8222// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
8223#[gpui::test]
8224#[cfg_attr(target_os = "windows", ignore)]
8225async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
8226 init_test(cx);
8227 cx.executor().allow_parking();
8228 let root = TempTree::new(json!({
8229 "projects": {
8230 "project1": {
8231 "a": "",
8232 "b": "",
8233 }
8234 },
8235
8236 }));
8237 let root_path = root.path();
8238
8239 let repo = git_init(&root_path.join("projects/project1"));
8240 git_add("a", &repo);
8241 git_commit("init", &repo);
8242 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
8243
8244 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8245
8246 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8247 tree.flush_fs_events(cx).await;
8248 project
8249 .update(cx, |project, cx| project.git_scans_complete(cx))
8250 .await;
8251 cx.executor().run_until_parked();
8252
8253 let repository = project.read_with(cx, |project, cx| {
8254 project.repositories(cx).values().next().unwrap().clone()
8255 });
8256
8257 repository.read_with(cx, |repository, _| {
8258 assert_eq!(
8259 repository.work_directory_abs_path.as_ref(),
8260 root_path.join("projects/project1").as_path()
8261 );
8262 assert_eq!(
8263 repository
8264 .status_for_path(&"a".into())
8265 .map(|entry| entry.status),
8266 Some(StatusCode::Modified.worktree()),
8267 );
8268 assert_eq!(
8269 repository
8270 .status_for_path(&"b".into())
8271 .map(|entry| entry.status),
8272 Some(FileStatus::Untracked),
8273 );
8274 });
8275
8276 std::fs::rename(
8277 root_path.join("projects/project1"),
8278 root_path.join("projects/project2"),
8279 )
8280 .unwrap();
8281 tree.flush_fs_events(cx).await;
8282
8283 repository.read_with(cx, |repository, _| {
8284 assert_eq!(
8285 repository.work_directory_abs_path.as_ref(),
8286 root_path.join("projects/project2").as_path()
8287 );
8288 assert_eq!(
8289 repository.status_for_path(&"a".into()).unwrap().status,
8290 StatusCode::Modified.worktree(),
8291 );
8292 assert_eq!(
8293 repository.status_for_path(&"b".into()).unwrap().status,
8294 FileStatus::Untracked,
8295 );
8296 });
8297}
8298
8299// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
8300// you can't rename a directory which some program has already open. This is a
8301// limitation of the Windows. See:
8302// https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
8303#[gpui::test]
8304#[cfg_attr(target_os = "windows", ignore)]
8305async fn test_file_status(cx: &mut gpui::TestAppContext) {
8306 init_test(cx);
8307 cx.executor().allow_parking();
8308 const IGNORE_RULE: &str = "**/target";
8309
8310 let root = TempTree::new(json!({
8311 "project": {
8312 "a.txt": "a",
8313 "b.txt": "bb",
8314 "c": {
8315 "d": {
8316 "e.txt": "eee"
8317 }
8318 },
8319 "f.txt": "ffff",
8320 "target": {
8321 "build_file": "???"
8322 },
8323 ".gitignore": IGNORE_RULE
8324 },
8325
8326 }));
8327 let root_path = root.path();
8328
8329 const A_TXT: &str = "a.txt";
8330 const B_TXT: &str = "b.txt";
8331 const E_TXT: &str = "c/d/e.txt";
8332 const F_TXT: &str = "f.txt";
8333 const DOTGITIGNORE: &str = ".gitignore";
8334 const BUILD_FILE: &str = "target/build_file";
8335
8336 // Set up git repository before creating the worktree.
8337 let work_dir = root.path().join("project");
8338 let mut repo = git_init(work_dir.as_path());
8339 repo.add_ignore_rule(IGNORE_RULE).unwrap();
8340 git_add(A_TXT, &repo);
8341 git_add(E_TXT, &repo);
8342 git_add(DOTGITIGNORE, &repo);
8343 git_commit("Initial commit", &repo);
8344
8345 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8346
8347 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8348 tree.flush_fs_events(cx).await;
8349 project
8350 .update(cx, |project, cx| project.git_scans_complete(cx))
8351 .await;
8352 cx.executor().run_until_parked();
8353
8354 let repository = project.read_with(cx, |project, cx| {
8355 project.repositories(cx).values().next().unwrap().clone()
8356 });
8357
8358 // Check that the right git state is observed on startup
8359 repository.read_with(cx, |repository, _cx| {
8360 assert_eq!(
8361 repository.work_directory_abs_path.as_ref(),
8362 root_path.join("project").as_path()
8363 );
8364
8365 assert_eq!(
8366 repository.status_for_path(&B_TXT.into()).unwrap().status,
8367 FileStatus::Untracked,
8368 );
8369 assert_eq!(
8370 repository.status_for_path(&F_TXT.into()).unwrap().status,
8371 FileStatus::Untracked,
8372 );
8373 });
8374
8375 // Modify a file in the working copy.
8376 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
8377 tree.flush_fs_events(cx).await;
8378 project
8379 .update(cx, |project, cx| project.git_scans_complete(cx))
8380 .await;
8381 cx.executor().run_until_parked();
8382
8383 // The worktree detects that the file's git status has changed.
8384 repository.read_with(cx, |repository, _| {
8385 assert_eq!(
8386 repository.status_for_path(&A_TXT.into()).unwrap().status,
8387 StatusCode::Modified.worktree(),
8388 );
8389 });
8390
8391 // Create a commit in the git repository.
8392 git_add(A_TXT, &repo);
8393 git_add(B_TXT, &repo);
8394 git_commit("Committing modified and added", &repo);
8395 tree.flush_fs_events(cx).await;
8396 project
8397 .update(cx, |project, cx| project.git_scans_complete(cx))
8398 .await;
8399 cx.executor().run_until_parked();
8400
8401 // The worktree detects that the files' git status have changed.
8402 repository.read_with(cx, |repository, _cx| {
8403 assert_eq!(
8404 repository.status_for_path(&F_TXT.into()).unwrap().status,
8405 FileStatus::Untracked,
8406 );
8407 assert_eq!(repository.status_for_path(&B_TXT.into()), None);
8408 assert_eq!(repository.status_for_path(&A_TXT.into()), None);
8409 });
8410
8411 // Modify files in the working copy and perform git operations on other files.
8412 git_reset(0, &repo);
8413 git_remove_index(Path::new(B_TXT), &repo);
8414 git_stash(&mut repo);
8415 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
8416 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
8417 tree.flush_fs_events(cx).await;
8418 project
8419 .update(cx, |project, cx| project.git_scans_complete(cx))
8420 .await;
8421 cx.executor().run_until_parked();
8422
8423 // Check that more complex repo changes are tracked
8424 repository.read_with(cx, |repository, _cx| {
8425 assert_eq!(repository.status_for_path(&A_TXT.into()), None);
8426 assert_eq!(
8427 repository.status_for_path(&B_TXT.into()).unwrap().status,
8428 FileStatus::Untracked,
8429 );
8430 assert_eq!(
8431 repository.status_for_path(&E_TXT.into()).unwrap().status,
8432 StatusCode::Modified.worktree(),
8433 );
8434 });
8435
8436 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
8437 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
8438 std::fs::write(
8439 work_dir.join(DOTGITIGNORE),
8440 [IGNORE_RULE, "f.txt"].join("\n"),
8441 )
8442 .unwrap();
8443
8444 git_add(Path::new(DOTGITIGNORE), &repo);
8445 git_commit("Committing modified git ignore", &repo);
8446
8447 tree.flush_fs_events(cx).await;
8448 cx.executor().run_until_parked();
8449
8450 let mut renamed_dir_name = "first_directory/second_directory";
8451 const RENAMED_FILE: &str = "rf.txt";
8452
8453 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
8454 std::fs::write(
8455 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
8456 "new-contents",
8457 )
8458 .unwrap();
8459
8460 tree.flush_fs_events(cx).await;
8461 project
8462 .update(cx, |project, cx| project.git_scans_complete(cx))
8463 .await;
8464 cx.executor().run_until_parked();
8465
8466 repository.read_with(cx, |repository, _cx| {
8467 assert_eq!(
8468 repository
8469 .status_for_path(&Path::new(renamed_dir_name).join(RENAMED_FILE).into())
8470 .unwrap()
8471 .status,
8472 FileStatus::Untracked,
8473 );
8474 });
8475
8476 renamed_dir_name = "new_first_directory/second_directory";
8477
8478 std::fs::rename(
8479 work_dir.join("first_directory"),
8480 work_dir.join("new_first_directory"),
8481 )
8482 .unwrap();
8483
8484 tree.flush_fs_events(cx).await;
8485 project
8486 .update(cx, |project, cx| project.git_scans_complete(cx))
8487 .await;
8488 cx.executor().run_until_parked();
8489
8490 repository.read_with(cx, |repository, _cx| {
8491 assert_eq!(
8492 repository
8493 .status_for_path(&Path::new(renamed_dir_name).join(RENAMED_FILE).into())
8494 .unwrap()
8495 .status,
8496 FileStatus::Untracked,
8497 );
8498 });
8499}
8500
8501#[gpui::test]
8502async fn test_repos_in_invisible_worktrees(
8503 executor: BackgroundExecutor,
8504 cx: &mut gpui::TestAppContext,
8505) {
8506 init_test(cx);
8507 let fs = FakeFs::new(executor);
8508 fs.insert_tree(
8509 path!("/root"),
8510 json!({
8511 "dir1": {
8512 ".git": {},
8513 "dep1": {
8514 ".git": {},
8515 "src": {
8516 "a.txt": "",
8517 },
8518 },
8519 "b.txt": "",
8520 },
8521 }),
8522 )
8523 .await;
8524
8525 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
8526 let _visible_worktree =
8527 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8528 project
8529 .update(cx, |project, cx| project.git_scans_complete(cx))
8530 .await;
8531
8532 let repos = project.read_with(cx, |project, cx| {
8533 project
8534 .repositories(cx)
8535 .values()
8536 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8537 .collect::<Vec<_>>()
8538 });
8539 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
8540
8541 let (_invisible_worktree, _) = project
8542 .update(cx, |project, cx| {
8543 project.worktree_store.update(cx, |worktree_store, cx| {
8544 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
8545 })
8546 })
8547 .await
8548 .expect("failed to create worktree");
8549 project
8550 .update(cx, |project, cx| project.git_scans_complete(cx))
8551 .await;
8552
8553 let repos = project.read_with(cx, |project, cx| {
8554 project
8555 .repositories(cx)
8556 .values()
8557 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8558 .collect::<Vec<_>>()
8559 });
8560 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
8561}
8562
8563#[gpui::test(iterations = 10)]
8564async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
8565 init_test(cx);
8566 cx.update(|cx| {
8567 cx.update_global::<SettingsStore, _>(|store, cx| {
8568 store.update_user_settings::<WorktreeSettings>(cx, |project_settings| {
8569 project_settings.file_scan_exclusions = Some(Vec::new());
8570 });
8571 });
8572 });
8573 let fs = FakeFs::new(cx.background_executor.clone());
8574 fs.insert_tree(
8575 path!("/root"),
8576 json!({
8577 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
8578 "tree": {
8579 ".git": {},
8580 ".gitignore": "ignored-dir\n",
8581 "tracked-dir": {
8582 "tracked-file1": "",
8583 "ancestor-ignored-file1": "",
8584 },
8585 "ignored-dir": {
8586 "ignored-file1": ""
8587 }
8588 }
8589 }),
8590 )
8591 .await;
8592 fs.set_head_and_index_for_repo(
8593 path!("/root/tree/.git").as_ref(),
8594 &[
8595 (".gitignore".into(), "ignored-dir\n".into()),
8596 ("tracked-dir/tracked-file1".into(), "".into()),
8597 ],
8598 );
8599
8600 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
8601
8602 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8603 tree.flush_fs_events(cx).await;
8604 project
8605 .update(cx, |project, cx| project.git_scans_complete(cx))
8606 .await;
8607 cx.executor().run_until_parked();
8608
8609 let repository = project.read_with(cx, |project, cx| {
8610 project.repositories(cx).values().next().unwrap().clone()
8611 });
8612
8613 tree.read_with(cx, |tree, _| {
8614 tree.as_local()
8615 .unwrap()
8616 .manually_refresh_entries_for_paths(vec![Path::new("ignored-dir").into()])
8617 })
8618 .recv()
8619 .await;
8620
8621 cx.read(|cx| {
8622 assert_entry_git_state(
8623 tree.read(cx),
8624 repository.read(cx),
8625 "tracked-dir/tracked-file1",
8626 None,
8627 false,
8628 );
8629 assert_entry_git_state(
8630 tree.read(cx),
8631 repository.read(cx),
8632 "tracked-dir/ancestor-ignored-file1",
8633 None,
8634 false,
8635 );
8636 assert_entry_git_state(
8637 tree.read(cx),
8638 repository.read(cx),
8639 "ignored-dir/ignored-file1",
8640 None,
8641 true,
8642 );
8643 });
8644
8645 fs.create_file(
8646 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
8647 Default::default(),
8648 )
8649 .await
8650 .unwrap();
8651 fs.set_index_for_repo(
8652 path!("/root/tree/.git").as_ref(),
8653 &[
8654 (".gitignore".into(), "ignored-dir\n".into()),
8655 ("tracked-dir/tracked-file1".into(), "".into()),
8656 ("tracked-dir/tracked-file2".into(), "".into()),
8657 ],
8658 );
8659 fs.create_file(
8660 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
8661 Default::default(),
8662 )
8663 .await
8664 .unwrap();
8665 fs.create_file(
8666 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
8667 Default::default(),
8668 )
8669 .await
8670 .unwrap();
8671
8672 cx.executor().run_until_parked();
8673 cx.read(|cx| {
8674 assert_entry_git_state(
8675 tree.read(cx),
8676 repository.read(cx),
8677 "tracked-dir/tracked-file2",
8678 Some(StatusCode::Added),
8679 false,
8680 );
8681 assert_entry_git_state(
8682 tree.read(cx),
8683 repository.read(cx),
8684 "tracked-dir/ancestor-ignored-file2",
8685 None,
8686 false,
8687 );
8688 assert_entry_git_state(
8689 tree.read(cx),
8690 repository.read(cx),
8691 "ignored-dir/ignored-file2",
8692 None,
8693 true,
8694 );
8695 assert!(tree.read(cx).entry_for_path(".git").unwrap().is_ignored);
8696 });
8697}
8698
8699#[gpui::test]
8700async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) {
8701 init_test(cx);
8702
8703 let fs = FakeFs::new(cx.executor());
8704 fs.insert_tree(
8705 path!("/project"),
8706 json!({
8707 ".git": {
8708 "worktrees": {
8709 "some-worktree": {
8710 "commondir": "../..\n",
8711 // For is_git_dir
8712 "HEAD": "",
8713 "config": ""
8714 }
8715 },
8716 "modules": {
8717 "subdir": {
8718 "some-submodule": {
8719 // For is_git_dir
8720 "HEAD": "",
8721 "config": "",
8722 }
8723 }
8724 }
8725 },
8726 "src": {
8727 "a.txt": "A",
8728 },
8729 "some-worktree": {
8730 ".git": "gitdir: ../.git/worktrees/some-worktree\n",
8731 "src": {
8732 "b.txt": "B",
8733 }
8734 },
8735 "subdir": {
8736 "some-submodule": {
8737 ".git": "gitdir: ../../.git/modules/subdir/some-submodule\n",
8738 "c.txt": "C",
8739 }
8740 }
8741 }),
8742 )
8743 .await;
8744
8745 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
8746 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
8747 scan_complete.await;
8748
8749 let mut repositories = project.update(cx, |project, cx| {
8750 project
8751 .repositories(cx)
8752 .values()
8753 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8754 .collect::<Vec<_>>()
8755 });
8756 repositories.sort();
8757 pretty_assertions::assert_eq!(
8758 repositories,
8759 [
8760 Path::new(path!("/project")).into(),
8761 Path::new(path!("/project/some-worktree")).into(),
8762 Path::new(path!("/project/subdir/some-submodule")).into(),
8763 ]
8764 );
8765
8766 // Generate a git-related event for the worktree and check that it's refreshed.
8767 fs.with_git_state(
8768 path!("/project/some-worktree/.git").as_ref(),
8769 true,
8770 |state| {
8771 state
8772 .head_contents
8773 .insert("src/b.txt".into(), "b".to_owned());
8774 state
8775 .index_contents
8776 .insert("src/b.txt".into(), "b".to_owned());
8777 },
8778 )
8779 .unwrap();
8780 cx.run_until_parked();
8781
8782 let buffer = project
8783 .update(cx, |project, cx| {
8784 project.open_local_buffer(path!("/project/some-worktree/src/b.txt"), cx)
8785 })
8786 .await
8787 .unwrap();
8788 let (worktree_repo, barrier) = project.update(cx, |project, cx| {
8789 let (repo, _) = project
8790 .git_store()
8791 .read(cx)
8792 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
8793 .unwrap();
8794 pretty_assertions::assert_eq!(
8795 repo.read(cx).work_directory_abs_path,
8796 Path::new(path!("/project/some-worktree")).into(),
8797 );
8798 let barrier = repo.update(cx, |repo, _| repo.barrier());
8799 (repo.clone(), barrier)
8800 });
8801 barrier.await.unwrap();
8802 worktree_repo.update(cx, |repo, _| {
8803 pretty_assertions::assert_eq!(
8804 repo.status_for_path(&"src/b.txt".into()).unwrap().status,
8805 StatusCode::Modified.worktree(),
8806 );
8807 });
8808
8809 // The same for the submodule.
8810 fs.with_git_state(
8811 path!("/project/subdir/some-submodule/.git").as_ref(),
8812 true,
8813 |state| {
8814 state.head_contents.insert("c.txt".into(), "c".to_owned());
8815 state.index_contents.insert("c.txt".into(), "c".to_owned());
8816 },
8817 )
8818 .unwrap();
8819 cx.run_until_parked();
8820
8821 let buffer = project
8822 .update(cx, |project, cx| {
8823 project.open_local_buffer(path!("/project/subdir/some-submodule/c.txt"), cx)
8824 })
8825 .await
8826 .unwrap();
8827 let (submodule_repo, barrier) = project.update(cx, |project, cx| {
8828 let (repo, _) = project
8829 .git_store()
8830 .read(cx)
8831 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
8832 .unwrap();
8833 pretty_assertions::assert_eq!(
8834 repo.read(cx).work_directory_abs_path,
8835 Path::new(path!("/project/subdir/some-submodule")).into(),
8836 );
8837 let barrier = repo.update(cx, |repo, _| repo.barrier());
8838 (repo.clone(), barrier)
8839 });
8840 barrier.await.unwrap();
8841 submodule_repo.update(cx, |repo, _| {
8842 pretty_assertions::assert_eq!(
8843 repo.status_for_path(&"c.txt".into()).unwrap().status,
8844 StatusCode::Modified.worktree(),
8845 );
8846 });
8847}
8848
8849#[gpui::test]
8850async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
8851 init_test(cx);
8852 let fs = FakeFs::new(cx.background_executor.clone());
8853 fs.insert_tree(
8854 path!("/root"),
8855 json!({
8856 "project": {
8857 ".git": {},
8858 "child1": {
8859 "a.txt": "A",
8860 },
8861 "child2": {
8862 "b.txt": "B",
8863 }
8864 }
8865 }),
8866 )
8867 .await;
8868
8869 let project = Project::test(
8870 fs.clone(),
8871 [
8872 path!("/root/project/child1").as_ref(),
8873 path!("/root/project/child2").as_ref(),
8874 ],
8875 cx,
8876 )
8877 .await;
8878
8879 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8880 tree.flush_fs_events(cx).await;
8881 project
8882 .update(cx, |project, cx| project.git_scans_complete(cx))
8883 .await;
8884 cx.executor().run_until_parked();
8885
8886 let repos = project.read_with(cx, |project, cx| {
8887 project
8888 .repositories(cx)
8889 .values()
8890 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8891 .collect::<Vec<_>>()
8892 });
8893 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
8894}
8895
8896async fn search(
8897 project: &Entity<Project>,
8898 query: SearchQuery,
8899 cx: &mut gpui::TestAppContext,
8900) -> Result<HashMap<String, Vec<Range<usize>>>> {
8901 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
8902 let mut results = HashMap::default();
8903 while let Ok(search_result) = search_rx.recv().await {
8904 match search_result {
8905 SearchResult::Buffer { buffer, ranges } => {
8906 results.entry(buffer).or_insert(ranges);
8907 }
8908 SearchResult::LimitReached => {}
8909 }
8910 }
8911 Ok(results
8912 .into_iter()
8913 .map(|(buffer, ranges)| {
8914 buffer.update(cx, |buffer, cx| {
8915 let path = buffer
8916 .file()
8917 .unwrap()
8918 .full_path(cx)
8919 .to_string_lossy()
8920 .to_string();
8921 let ranges = ranges
8922 .into_iter()
8923 .map(|range| range.to_offset(buffer))
8924 .collect::<Vec<_>>();
8925 (path, ranges)
8926 })
8927 })
8928 .collect())
8929}
8930
8931pub fn init_test(cx: &mut gpui::TestAppContext) {
8932 zlog::init_test();
8933
8934 cx.update(|cx| {
8935 let settings_store = SettingsStore::test(cx);
8936 cx.set_global(settings_store);
8937 release_channel::init(SemanticVersion::default(), cx);
8938 language::init(cx);
8939 Project::init_settings(cx);
8940 });
8941}
8942
8943fn json_lang() -> Arc<Language> {
8944 Arc::new(Language::new(
8945 LanguageConfig {
8946 name: "JSON".into(),
8947 matcher: LanguageMatcher {
8948 path_suffixes: vec!["json".to_string()],
8949 ..Default::default()
8950 },
8951 ..Default::default()
8952 },
8953 None,
8954 ))
8955}
8956
8957fn js_lang() -> Arc<Language> {
8958 Arc::new(Language::new(
8959 LanguageConfig {
8960 name: "JavaScript".into(),
8961 matcher: LanguageMatcher {
8962 path_suffixes: vec!["js".to_string()],
8963 ..Default::default()
8964 },
8965 ..Default::default()
8966 },
8967 None,
8968 ))
8969}
8970
8971fn rust_lang() -> Arc<Language> {
8972 Arc::new(Language::new(
8973 LanguageConfig {
8974 name: "Rust".into(),
8975 matcher: LanguageMatcher {
8976 path_suffixes: vec!["rs".to_string()],
8977 ..Default::default()
8978 },
8979 ..Default::default()
8980 },
8981 Some(tree_sitter_rust::LANGUAGE.into()),
8982 ))
8983}
8984
8985fn typescript_lang() -> Arc<Language> {
8986 Arc::new(Language::new(
8987 LanguageConfig {
8988 name: "TypeScript".into(),
8989 matcher: LanguageMatcher {
8990 path_suffixes: vec!["ts".to_string()],
8991 ..Default::default()
8992 },
8993 ..Default::default()
8994 },
8995 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
8996 ))
8997}
8998
8999fn tsx_lang() -> Arc<Language> {
9000 Arc::new(Language::new(
9001 LanguageConfig {
9002 name: "tsx".into(),
9003 matcher: LanguageMatcher {
9004 path_suffixes: vec!["tsx".to_string()],
9005 ..Default::default()
9006 },
9007 ..Default::default()
9008 },
9009 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
9010 ))
9011}
9012
9013fn get_all_tasks(
9014 project: &Entity<Project>,
9015 task_contexts: Arc<TaskContexts>,
9016 cx: &mut App,
9017) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
9018 let new_tasks = project.update(cx, |project, cx| {
9019 project.task_store.update(cx, |task_store, cx| {
9020 task_store.task_inventory().unwrap().update(cx, |this, cx| {
9021 this.used_and_current_resolved_tasks(task_contexts, cx)
9022 })
9023 })
9024 });
9025
9026 cx.background_spawn(async move {
9027 let (mut old, new) = new_tasks.await;
9028 old.extend(new);
9029 old
9030 })
9031}
9032
9033#[track_caller]
9034fn assert_entry_git_state(
9035 tree: &Worktree,
9036 repository: &Repository,
9037 path: &str,
9038 index_status: Option<StatusCode>,
9039 is_ignored: bool,
9040) {
9041 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
9042 let entry = tree
9043 .entry_for_path(path)
9044 .unwrap_or_else(|| panic!("entry {path} not found"));
9045 let status = repository
9046 .status_for_path(&path.into())
9047 .map(|entry| entry.status);
9048 let expected = index_status.map(|index_status| {
9049 TrackedStatus {
9050 index_status,
9051 worktree_status: StatusCode::Unmodified,
9052 }
9053 .into()
9054 });
9055 assert_eq!(
9056 status, expected,
9057 "expected {path} to have git status: {expected:?}"
9058 );
9059 assert_eq!(
9060 entry.is_ignored, is_ignored,
9061 "expected {path} to have is_ignored: {is_ignored}"
9062 );
9063}
9064
9065#[track_caller]
9066fn git_init(path: &Path) -> git2::Repository {
9067 let mut init_opts = RepositoryInitOptions::new();
9068 init_opts.initial_head("main");
9069 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
9070}
9071
9072#[track_caller]
9073fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
9074 let path = path.as_ref();
9075 let mut index = repo.index().expect("Failed to get index");
9076 index.add_path(path).expect("Failed to add file");
9077 index.write().expect("Failed to write index");
9078}
9079
9080#[track_caller]
9081fn git_remove_index(path: &Path, repo: &git2::Repository) {
9082 let mut index = repo.index().expect("Failed to get index");
9083 index.remove_path(path).expect("Failed to add file");
9084 index.write().expect("Failed to write index");
9085}
9086
9087#[track_caller]
9088fn git_commit(msg: &'static str, repo: &git2::Repository) {
9089 use git2::Signature;
9090
9091 let signature = Signature::now("test", "test@zed.dev").unwrap();
9092 let oid = repo.index().unwrap().write_tree().unwrap();
9093 let tree = repo.find_tree(oid).unwrap();
9094 if let Ok(head) = repo.head() {
9095 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
9096
9097 let parent_commit = parent_obj.as_commit().unwrap();
9098
9099 repo.commit(
9100 Some("HEAD"),
9101 &signature,
9102 &signature,
9103 msg,
9104 &tree,
9105 &[parent_commit],
9106 )
9107 .expect("Failed to commit with parent");
9108 } else {
9109 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
9110 .expect("Failed to commit");
9111 }
9112}
9113
9114#[cfg(any())]
9115#[track_caller]
9116fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
9117 repo.cherrypick(commit, None).expect("Failed to cherrypick");
9118}
9119
9120#[track_caller]
9121fn git_stash(repo: &mut git2::Repository) {
9122 use git2::Signature;
9123
9124 let signature = Signature::now("test", "test@zed.dev").unwrap();
9125 repo.stash_save(&signature, "N/A", None)
9126 .expect("Failed to stash");
9127}
9128
9129#[track_caller]
9130fn git_reset(offset: usize, repo: &git2::Repository) {
9131 let head = repo.head().expect("Couldn't get repo head");
9132 let object = head.peel(git2::ObjectType::Commit).unwrap();
9133 let commit = object.as_commit().unwrap();
9134 let new_head = commit
9135 .parents()
9136 .inspect(|parnet| {
9137 parnet.message();
9138 })
9139 .nth(offset)
9140 .expect("Not enough history");
9141 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
9142 .expect("Could not reset");
9143}
9144
9145#[cfg(any())]
9146#[track_caller]
9147fn git_branch(name: &str, repo: &git2::Repository) {
9148 let head = repo
9149 .head()
9150 .expect("Couldn't get repo head")
9151 .peel_to_commit()
9152 .expect("HEAD is not a commit");
9153 repo.branch(name, &head, false).expect("Failed to commit");
9154}
9155
9156#[cfg(any())]
9157#[track_caller]
9158fn git_checkout(name: &str, repo: &git2::Repository) {
9159 repo.set_head(name).expect("Failed to set head");
9160 repo.checkout_head(None).expect("Failed to check out head");
9161}
9162
9163#[cfg(any())]
9164#[track_caller]
9165fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
9166 repo.statuses(None)
9167 .unwrap()
9168 .iter()
9169 .map(|status| (status.path().unwrap().to_string(), status.status()))
9170 .collect()
9171}
9172
9173#[gpui::test]
9174async fn test_find_project_path_abs(
9175 background_executor: BackgroundExecutor,
9176 cx: &mut gpui::TestAppContext,
9177) {
9178 // find_project_path should work with absolute paths
9179 init_test(cx);
9180
9181 let fs = FakeFs::new(background_executor);
9182 fs.insert_tree(
9183 path!("/root"),
9184 json!({
9185 "project1": {
9186 "file1.txt": "content1",
9187 "subdir": {
9188 "file2.txt": "content2"
9189 }
9190 },
9191 "project2": {
9192 "file3.txt": "content3"
9193 }
9194 }),
9195 )
9196 .await;
9197
9198 let project = Project::test(
9199 fs.clone(),
9200 [
9201 path!("/root/project1").as_ref(),
9202 path!("/root/project2").as_ref(),
9203 ],
9204 cx,
9205 )
9206 .await;
9207
9208 // Make sure the worktrees are fully initialized
9209 project
9210 .update(cx, |project, cx| project.git_scans_complete(cx))
9211 .await;
9212 cx.run_until_parked();
9213
9214 let (project1_abs_path, project1_id, project2_abs_path, project2_id) =
9215 project.read_with(cx, |project, cx| {
9216 let worktrees: Vec<_> = project.worktrees(cx).collect();
9217 let abs_path1 = worktrees[0].read(cx).abs_path().to_path_buf();
9218 let id1 = worktrees[0].read(cx).id();
9219 let abs_path2 = worktrees[1].read(cx).abs_path().to_path_buf();
9220 let id2 = worktrees[1].read(cx).id();
9221 (abs_path1, id1, abs_path2, id2)
9222 });
9223
9224 project.update(cx, |project, cx| {
9225 let abs_path = project1_abs_path.join("file1.txt");
9226 let found_path = project.find_project_path(abs_path, cx).unwrap();
9227 assert_eq!(found_path.worktree_id, project1_id);
9228 assert_eq!(found_path.path.as_ref(), Path::new("file1.txt"));
9229
9230 let abs_path = project1_abs_path.join("subdir").join("file2.txt");
9231 let found_path = project.find_project_path(abs_path, cx).unwrap();
9232 assert_eq!(found_path.worktree_id, project1_id);
9233 assert_eq!(found_path.path.as_ref(), Path::new("subdir/file2.txt"));
9234
9235 let abs_path = project2_abs_path.join("file3.txt");
9236 let found_path = project.find_project_path(abs_path, cx).unwrap();
9237 assert_eq!(found_path.worktree_id, project2_id);
9238 assert_eq!(found_path.path.as_ref(), Path::new("file3.txt"));
9239
9240 let abs_path = project1_abs_path.join("nonexistent.txt");
9241 let found_path = project.find_project_path(abs_path, cx);
9242 assert!(
9243 found_path.is_some(),
9244 "Should find project path for nonexistent file in worktree"
9245 );
9246
9247 // Test with an absolute path outside any worktree
9248 let abs_path = Path::new("/some/other/path");
9249 let found_path = project.find_project_path(abs_path, cx);
9250 assert!(
9251 found_path.is_none(),
9252 "Should not find project path for path outside any worktree"
9253 );
9254 });
9255}