1#![allow(clippy::format_collect)]
2
3use crate::{
4 Event, git_store::StatusEntry, task_inventory::TaskContexts, task_store::TaskSettingsLocation,
5 *,
6};
7use buffer_diff::{
8 BufferDiffEvent, CALCULATE_DIFF_TASK, DiffHunkSecondaryStatus, DiffHunkStatus,
9 DiffHunkStatusKind, assert_hunks,
10};
11use fs::FakeFs;
12use futures::{StreamExt, future};
13use git::{
14 GitHostingProviderRegistry,
15 repository::RepoPath,
16 status::{StatusCode, TrackedStatus},
17};
18use git2::RepositoryInitOptions;
19use gpui::{App, BackgroundExecutor, SemanticVersion, UpdateGlobal};
20use http_client::Url;
21use language::{
22 Diagnostic, DiagnosticEntry, DiagnosticSet, DiskState, FakeLspAdapter, LanguageConfig,
23 LanguageMatcher, LanguageName, LineEnding, OffsetRangeExt, Point, ToPoint,
24 language_settings::{AllLanguageSettings, LanguageSettingsContent, language_settings},
25 tree_sitter_rust, tree_sitter_typescript,
26};
27use lsp::{
28 DiagnosticSeverity, DocumentChanges, FileOperationFilter, NumberOrString, TextDocumentEdit,
29 WillRenameFiles, notification::DidRenameFiles,
30};
31use parking_lot::Mutex;
32use paths::{config_dir, tasks_file};
33use postage::stream::Stream as _;
34use pretty_assertions::{assert_eq, assert_matches};
35use rand::{Rng as _, rngs::StdRng};
36use serde_json::json;
37#[cfg(not(windows))]
38use std::os;
39use std::{env, mem, num::NonZeroU32, ops::Range, str::FromStr, sync::OnceLock, task::Poll};
40use task::{ResolvedTask, TaskContext};
41use unindent::Unindent as _;
42use util::{
43 TryFutureExt as _, assert_set_eq, maybe, path,
44 paths::PathMatcher,
45 separator,
46 test::{TempTree, marked_text_offsets},
47 uri,
48};
49use worktree::WorktreeModelHandle as _;
50
51#[gpui::test]
52async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
53 cx.executor().allow_parking();
54
55 let (tx, mut rx) = futures::channel::mpsc::unbounded();
56 let _thread = std::thread::spawn(move || {
57 #[cfg(not(target_os = "windows"))]
58 std::fs::metadata("/tmp").unwrap();
59 #[cfg(target_os = "windows")]
60 std::fs::metadata("C:/Windows").unwrap();
61 std::thread::sleep(Duration::from_millis(1000));
62 tx.unbounded_send(1).unwrap();
63 });
64 rx.next().await.unwrap();
65}
66
67#[gpui::test]
68async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
69 cx.executor().allow_parking();
70
71 let io_task = smol::unblock(move || {
72 println!("sleeping on thread {:?}", std::thread::current().id());
73 std::thread::sleep(Duration::from_millis(10));
74 1
75 });
76
77 let task = cx.foreground_executor().spawn(async move {
78 io_task.await;
79 });
80
81 task.await;
82}
83
84#[cfg(not(windows))]
85#[gpui::test]
86async fn test_symlinks(cx: &mut gpui::TestAppContext) {
87 init_test(cx);
88 cx.executor().allow_parking();
89
90 let dir = TempTree::new(json!({
91 "root": {
92 "apple": "",
93 "banana": {
94 "carrot": {
95 "date": "",
96 "endive": "",
97 }
98 },
99 "fennel": {
100 "grape": "",
101 }
102 }
103 }));
104
105 let root_link_path = dir.path().join("root_link");
106 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
107 os::unix::fs::symlink(
108 dir.path().join("root/fennel"),
109 dir.path().join("root/finnochio"),
110 )
111 .unwrap();
112
113 let project = Project::test(
114 Arc::new(RealFs::new(None, cx.executor())),
115 [root_link_path.as_ref()],
116 cx,
117 )
118 .await;
119
120 project.update(cx, |project, cx| {
121 let tree = project.worktrees(cx).next().unwrap().read(cx);
122 assert_eq!(tree.file_count(), 5);
123 assert_eq!(
124 tree.inode_for_path("fennel/grape"),
125 tree.inode_for_path("finnochio/grape")
126 );
127 });
128}
129
130#[gpui::test]
131async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
132 init_test(cx);
133
134 let dir = TempTree::new(json!({
135 ".editorconfig": r#"
136 root = true
137 [*.rs]
138 indent_style = tab
139 indent_size = 3
140 end_of_line = lf
141 insert_final_newline = true
142 trim_trailing_whitespace = true
143 [*.js]
144 tab_width = 10
145 "#,
146 ".zed": {
147 "settings.json": r#"{
148 "tab_size": 8,
149 "hard_tabs": false,
150 "ensure_final_newline_on_save": false,
151 "remove_trailing_whitespace_on_save": false,
152 "soft_wrap": "editor_width"
153 }"#,
154 },
155 "a.rs": "fn a() {\n A\n}",
156 "b": {
157 ".editorconfig": r#"
158 [*.rs]
159 indent_size = 2
160 "#,
161 "b.rs": "fn b() {\n B\n}",
162 },
163 "c.js": "def c\n C\nend",
164 "README.json": "tabs are better\n",
165 }));
166
167 let path = dir.path();
168 let fs = FakeFs::new(cx.executor());
169 fs.insert_tree_from_real_fs(path, path).await;
170 let project = Project::test(fs, [path], cx).await;
171
172 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
173 language_registry.add(js_lang());
174 language_registry.add(json_lang());
175 language_registry.add(rust_lang());
176
177 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
178
179 cx.executor().run_until_parked();
180
181 cx.update(|cx| {
182 let tree = worktree.read(cx);
183 let settings_for = |path: &str| {
184 let file_entry = tree.entry_for_path(path).unwrap().clone();
185 let file = File::for_entry(file_entry, worktree.clone());
186 let file_language = project
187 .read(cx)
188 .languages()
189 .language_for_file_path(file.path.as_ref());
190 let file_language = cx
191 .background_executor()
192 .block(file_language)
193 .expect("Failed to get file language");
194 let file = file as _;
195 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
196 };
197
198 let settings_a = settings_for("a.rs");
199 let settings_b = settings_for("b/b.rs");
200 let settings_c = settings_for("c.js");
201 let settings_readme = settings_for("README.json");
202
203 // .editorconfig overrides .zed/settings
204 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
205 assert_eq!(settings_a.hard_tabs, true);
206 assert_eq!(settings_a.ensure_final_newline_on_save, true);
207 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
208
209 // .editorconfig in b/ overrides .editorconfig in root
210 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
211
212 // "indent_size" is not set, so "tab_width" is used
213 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
214
215 // README.md should not be affected by .editorconfig's globe "*.rs"
216 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
217 });
218}
219
220#[gpui::test]
221async fn test_git_provider_project_setting(cx: &mut gpui::TestAppContext) {
222 init_test(cx);
223 cx.update(|cx| {
224 GitHostingProviderRegistry::default_global(cx);
225 git_hosting_providers::init(cx);
226 });
227
228 let fs = FakeFs::new(cx.executor());
229 let str_path = path!("/dir");
230 let path = Path::new(str_path);
231
232 fs.insert_tree(
233 path!("/dir"),
234 json!({
235 ".zed": {
236 "settings.json": r#"{
237 "git_hosting_providers": [
238 {
239 "provider": "gitlab",
240 "base_url": "https://google.com",
241 "name": "foo"
242 }
243 ]
244 }"#
245 },
246 }),
247 )
248 .await;
249
250 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
251 let (_worktree, _) =
252 project.read_with(cx, |project, cx| project.find_worktree(path, cx).unwrap());
253 cx.executor().run_until_parked();
254
255 cx.update(|cx| {
256 let provider = GitHostingProviderRegistry::global(cx);
257 assert!(
258 provider
259 .list_hosting_providers()
260 .into_iter()
261 .any(|provider| provider.name() == "foo")
262 );
263 });
264
265 fs.atomic_write(
266 Path::new(path!("/dir/.zed/settings.json")).to_owned(),
267 "{}".into(),
268 )
269 .await
270 .unwrap();
271
272 cx.run_until_parked();
273
274 cx.update(|cx| {
275 let provider = GitHostingProviderRegistry::global(cx);
276 assert!(
277 !provider
278 .list_hosting_providers()
279 .into_iter()
280 .any(|provider| provider.name() == "foo")
281 );
282 });
283}
284
285#[gpui::test]
286async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
287 init_test(cx);
288 TaskStore::init(None);
289
290 let fs = FakeFs::new(cx.executor());
291 fs.insert_tree(
292 path!("/dir"),
293 json!({
294 ".zed": {
295 "settings.json": r#"{ "tab_size": 8 }"#,
296 "tasks.json": r#"[{
297 "label": "cargo check all",
298 "command": "cargo",
299 "args": ["check", "--all"]
300 },]"#,
301 },
302 "a": {
303 "a.rs": "fn a() {\n A\n}"
304 },
305 "b": {
306 ".zed": {
307 "settings.json": r#"{ "tab_size": 2 }"#,
308 "tasks.json": r#"[{
309 "label": "cargo check",
310 "command": "cargo",
311 "args": ["check"]
312 },]"#,
313 },
314 "b.rs": "fn b() {\n B\n}"
315 }
316 }),
317 )
318 .await;
319
320 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
321 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
322
323 cx.executor().run_until_parked();
324 let worktree_id = cx.update(|cx| {
325 project.update(cx, |project, cx| {
326 project.worktrees(cx).next().unwrap().read(cx).id()
327 })
328 });
329
330 let mut task_contexts = TaskContexts::default();
331 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
332
333 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
334 id: worktree_id,
335 directory_in_worktree: PathBuf::from(".zed"),
336 id_base: "local worktree tasks from directory \".zed\"".into(),
337 };
338
339 let all_tasks = cx
340 .update(|cx| {
341 let tree = worktree.read(cx);
342
343 let file_a = File::for_entry(
344 tree.entry_for_path("a/a.rs").unwrap().clone(),
345 worktree.clone(),
346 ) as _;
347 let settings_a = language_settings(None, Some(&file_a), cx);
348 let file_b = File::for_entry(
349 tree.entry_for_path("b/b.rs").unwrap().clone(),
350 worktree.clone(),
351 ) as _;
352 let settings_b = language_settings(None, Some(&file_b), cx);
353
354 assert_eq!(settings_a.tab_size.get(), 8);
355 assert_eq!(settings_b.tab_size.get(), 2);
356
357 get_all_tasks(&project, &task_contexts, cx)
358 })
359 .into_iter()
360 .map(|(source_kind, task)| {
361 let resolved = task.resolved;
362 (
363 source_kind,
364 task.resolved_label,
365 resolved.args,
366 resolved.env,
367 )
368 })
369 .collect::<Vec<_>>();
370 assert_eq!(
371 all_tasks,
372 vec![
373 (
374 TaskSourceKind::Worktree {
375 id: worktree_id,
376 directory_in_worktree: PathBuf::from(separator!("b/.zed")),
377 id_base: if cfg!(windows) {
378 "local worktree tasks from directory \"b\\\\.zed\"".into()
379 } else {
380 "local worktree tasks from directory \"b/.zed\"".into()
381 },
382 },
383 "cargo check".to_string(),
384 vec!["check".to_string()],
385 HashMap::default(),
386 ),
387 (
388 topmost_local_task_source_kind.clone(),
389 "cargo check all".to_string(),
390 vec!["check".to_string(), "--all".to_string()],
391 HashMap::default(),
392 ),
393 ]
394 );
395
396 let (_, resolved_task) = cx
397 .update(|cx| get_all_tasks(&project, &task_contexts, cx))
398 .into_iter()
399 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
400 .expect("should have one global task");
401 project.update(cx, |project, cx| {
402 let task_inventory = project
403 .task_store
404 .read(cx)
405 .task_inventory()
406 .cloned()
407 .unwrap();
408 task_inventory.update(cx, |inventory, _| {
409 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
410 inventory
411 .update_file_based_tasks(
412 TaskSettingsLocation::Global(tasks_file()),
413 Some(
414 &json!([{
415 "label": "cargo check unstable",
416 "command": "cargo",
417 "args": [
418 "check",
419 "--all",
420 "--all-targets"
421 ],
422 "env": {
423 "RUSTFLAGS": "-Zunstable-options"
424 }
425 }])
426 .to_string(),
427 ),
428 )
429 .unwrap();
430 });
431 });
432 cx.run_until_parked();
433
434 let all_tasks = cx
435 .update(|cx| get_all_tasks(&project, &task_contexts, cx))
436 .into_iter()
437 .map(|(source_kind, task)| {
438 let resolved = task.resolved;
439 (
440 source_kind,
441 task.resolved_label,
442 resolved.args,
443 resolved.env,
444 )
445 })
446 .collect::<Vec<_>>();
447 assert_eq!(
448 all_tasks,
449 vec![
450 (
451 topmost_local_task_source_kind.clone(),
452 "cargo check all".to_string(),
453 vec!["check".to_string(), "--all".to_string()],
454 HashMap::default(),
455 ),
456 (
457 TaskSourceKind::Worktree {
458 id: worktree_id,
459 directory_in_worktree: PathBuf::from(separator!("b/.zed")),
460 id_base: if cfg!(windows) {
461 "local worktree tasks from directory \"b\\\\.zed\"".into()
462 } else {
463 "local worktree tasks from directory \"b/.zed\"".into()
464 },
465 },
466 "cargo check".to_string(),
467 vec!["check".to_string()],
468 HashMap::default(),
469 ),
470 (
471 TaskSourceKind::AbsPath {
472 abs_path: paths::tasks_file().clone(),
473 id_base: "global tasks.json".into(),
474 },
475 "cargo check unstable".to_string(),
476 vec![
477 "check".to_string(),
478 "--all".to_string(),
479 "--all-targets".to_string(),
480 ],
481 HashMap::from_iter(Some((
482 "RUSTFLAGS".to_string(),
483 "-Zunstable-options".to_string()
484 ))),
485 ),
486 ]
487 );
488}
489
490#[gpui::test]
491async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
492 init_test(cx);
493 TaskStore::init(None);
494
495 let fs = FakeFs::new(cx.executor());
496 fs.insert_tree(
497 path!("/dir"),
498 json!({
499 ".zed": {
500 "tasks.json": r#"[{
501 "label": "test worktree root",
502 "command": "echo $ZED_WORKTREE_ROOT"
503 }]"#,
504 },
505 "a": {
506 "a.rs": "fn a() {\n A\n}"
507 },
508 }),
509 )
510 .await;
511
512 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
513 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
514
515 cx.executor().run_until_parked();
516 let worktree_id = cx.update(|cx| {
517 project.update(cx, |project, cx| {
518 project.worktrees(cx).next().unwrap().read(cx).id()
519 })
520 });
521
522 let active_non_worktree_item_tasks = cx.update(|cx| {
523 get_all_tasks(
524 &project,
525 &TaskContexts {
526 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
527 active_worktree_context: None,
528 other_worktree_contexts: Vec::new(),
529 lsp_task_sources: HashMap::default(),
530 latest_selection: None,
531 },
532 cx,
533 )
534 });
535 assert!(
536 active_non_worktree_item_tasks.is_empty(),
537 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
538 );
539
540 let active_worktree_tasks = cx.update(|cx| {
541 get_all_tasks(
542 &project,
543 &TaskContexts {
544 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
545 active_worktree_context: Some((worktree_id, {
546 let mut worktree_context = TaskContext::default();
547 worktree_context
548 .task_variables
549 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
550 worktree_context
551 })),
552 other_worktree_contexts: Vec::new(),
553 lsp_task_sources: HashMap::default(),
554 latest_selection: None,
555 },
556 cx,
557 )
558 });
559 assert_eq!(
560 active_worktree_tasks
561 .into_iter()
562 .map(|(source_kind, task)| {
563 let resolved = task.resolved;
564 (source_kind, resolved.command)
565 })
566 .collect::<Vec<_>>(),
567 vec![(
568 TaskSourceKind::Worktree {
569 id: worktree_id,
570 directory_in_worktree: PathBuf::from(separator!(".zed")),
571 id_base: if cfg!(windows) {
572 "local worktree tasks from directory \".zed\"".into()
573 } else {
574 "local worktree tasks from directory \".zed\"".into()
575 },
576 },
577 "echo /dir".to_string(),
578 )]
579 );
580}
581
582#[gpui::test]
583async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
584 init_test(cx);
585
586 let fs = FakeFs::new(cx.executor());
587 fs.insert_tree(
588 path!("/dir"),
589 json!({
590 "test.rs": "const A: i32 = 1;",
591 "test2.rs": "",
592 "Cargo.toml": "a = 1",
593 "package.json": "{\"a\": 1}",
594 }),
595 )
596 .await;
597
598 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
599 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
600
601 let mut fake_rust_servers = language_registry.register_fake_lsp(
602 "Rust",
603 FakeLspAdapter {
604 name: "the-rust-language-server",
605 capabilities: lsp::ServerCapabilities {
606 completion_provider: Some(lsp::CompletionOptions {
607 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
608 ..Default::default()
609 }),
610 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
611 lsp::TextDocumentSyncOptions {
612 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
613 ..Default::default()
614 },
615 )),
616 ..Default::default()
617 },
618 ..Default::default()
619 },
620 );
621 let mut fake_json_servers = language_registry.register_fake_lsp(
622 "JSON",
623 FakeLspAdapter {
624 name: "the-json-language-server",
625 capabilities: lsp::ServerCapabilities {
626 completion_provider: Some(lsp::CompletionOptions {
627 trigger_characters: Some(vec![":".to_string()]),
628 ..Default::default()
629 }),
630 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
631 lsp::TextDocumentSyncOptions {
632 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
633 ..Default::default()
634 },
635 )),
636 ..Default::default()
637 },
638 ..Default::default()
639 },
640 );
641
642 // Open a buffer without an associated language server.
643 let (toml_buffer, _handle) = project
644 .update(cx, |project, cx| {
645 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
646 })
647 .await
648 .unwrap();
649
650 // Open a buffer with an associated language server before the language for it has been loaded.
651 let (rust_buffer, _handle2) = project
652 .update(cx, |project, cx| {
653 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
654 })
655 .await
656 .unwrap();
657 rust_buffer.update(cx, |buffer, _| {
658 assert_eq!(buffer.language().map(|l| l.name()), None);
659 });
660
661 // Now we add the languages to the project, and ensure they get assigned to all
662 // the relevant open buffers.
663 language_registry.add(json_lang());
664 language_registry.add(rust_lang());
665 cx.executor().run_until_parked();
666 rust_buffer.update(cx, |buffer, _| {
667 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
668 });
669
670 // A server is started up, and it is notified about Rust files.
671 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
672 assert_eq!(
673 fake_rust_server
674 .receive_notification::<lsp::notification::DidOpenTextDocument>()
675 .await
676 .text_document,
677 lsp::TextDocumentItem {
678 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
679 version: 0,
680 text: "const A: i32 = 1;".to_string(),
681 language_id: "rust".to_string(),
682 }
683 );
684
685 // The buffer is configured based on the language server's capabilities.
686 rust_buffer.update(cx, |buffer, _| {
687 assert_eq!(
688 buffer
689 .completion_triggers()
690 .into_iter()
691 .cloned()
692 .collect::<Vec<_>>(),
693 &[".".to_string(), "::".to_string()]
694 );
695 });
696 toml_buffer.update(cx, |buffer, _| {
697 assert!(buffer.completion_triggers().is_empty());
698 });
699
700 // Edit a buffer. The changes are reported to the language server.
701 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
702 assert_eq!(
703 fake_rust_server
704 .receive_notification::<lsp::notification::DidChangeTextDocument>()
705 .await
706 .text_document,
707 lsp::VersionedTextDocumentIdentifier::new(
708 lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
709 1
710 )
711 );
712
713 // Open a third buffer with a different associated language server.
714 let (json_buffer, _json_handle) = project
715 .update(cx, |project, cx| {
716 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
717 })
718 .await
719 .unwrap();
720
721 // A json language server is started up and is only notified about the json buffer.
722 let mut fake_json_server = fake_json_servers.next().await.unwrap();
723 assert_eq!(
724 fake_json_server
725 .receive_notification::<lsp::notification::DidOpenTextDocument>()
726 .await
727 .text_document,
728 lsp::TextDocumentItem {
729 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
730 version: 0,
731 text: "{\"a\": 1}".to_string(),
732 language_id: "json".to_string(),
733 }
734 );
735
736 // This buffer is configured based on the second language server's
737 // capabilities.
738 json_buffer.update(cx, |buffer, _| {
739 assert_eq!(
740 buffer
741 .completion_triggers()
742 .into_iter()
743 .cloned()
744 .collect::<Vec<_>>(),
745 &[":".to_string()]
746 );
747 });
748
749 // When opening another buffer whose language server is already running,
750 // it is also configured based on the existing language server's capabilities.
751 let (rust_buffer2, _handle4) = project
752 .update(cx, |project, cx| {
753 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
754 })
755 .await
756 .unwrap();
757 rust_buffer2.update(cx, |buffer, _| {
758 assert_eq!(
759 buffer
760 .completion_triggers()
761 .into_iter()
762 .cloned()
763 .collect::<Vec<_>>(),
764 &[".".to_string(), "::".to_string()]
765 );
766 });
767
768 // Changes are reported only to servers matching the buffer's language.
769 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
770 rust_buffer2.update(cx, |buffer, cx| {
771 buffer.edit([(0..0, "let x = 1;")], None, cx)
772 });
773 assert_eq!(
774 fake_rust_server
775 .receive_notification::<lsp::notification::DidChangeTextDocument>()
776 .await
777 .text_document,
778 lsp::VersionedTextDocumentIdentifier::new(
779 lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap(),
780 1
781 )
782 );
783
784 // Save notifications are reported to all servers.
785 project
786 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
787 .await
788 .unwrap();
789 assert_eq!(
790 fake_rust_server
791 .receive_notification::<lsp::notification::DidSaveTextDocument>()
792 .await
793 .text_document,
794 lsp::TextDocumentIdentifier::new(
795 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
796 )
797 );
798 assert_eq!(
799 fake_json_server
800 .receive_notification::<lsp::notification::DidSaveTextDocument>()
801 .await
802 .text_document,
803 lsp::TextDocumentIdentifier::new(
804 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
805 )
806 );
807
808 // Renames are reported only to servers matching the buffer's language.
809 fs.rename(
810 Path::new(path!("/dir/test2.rs")),
811 Path::new(path!("/dir/test3.rs")),
812 Default::default(),
813 )
814 .await
815 .unwrap();
816 assert_eq!(
817 fake_rust_server
818 .receive_notification::<lsp::notification::DidCloseTextDocument>()
819 .await
820 .text_document,
821 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap()),
822 );
823 assert_eq!(
824 fake_rust_server
825 .receive_notification::<lsp::notification::DidOpenTextDocument>()
826 .await
827 .text_document,
828 lsp::TextDocumentItem {
829 uri: lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap(),
830 version: 0,
831 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
832 language_id: "rust".to_string(),
833 },
834 );
835
836 rust_buffer2.update(cx, |buffer, cx| {
837 buffer.update_diagnostics(
838 LanguageServerId(0),
839 DiagnosticSet::from_sorted_entries(
840 vec![DiagnosticEntry {
841 diagnostic: Default::default(),
842 range: Anchor::MIN..Anchor::MAX,
843 }],
844 &buffer.snapshot(),
845 ),
846 cx,
847 );
848 assert_eq!(
849 buffer
850 .snapshot()
851 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
852 .count(),
853 1
854 );
855 });
856
857 // When the rename changes the extension of the file, the buffer gets closed on the old
858 // language server and gets opened on the new one.
859 fs.rename(
860 Path::new(path!("/dir/test3.rs")),
861 Path::new(path!("/dir/test3.json")),
862 Default::default(),
863 )
864 .await
865 .unwrap();
866 assert_eq!(
867 fake_rust_server
868 .receive_notification::<lsp::notification::DidCloseTextDocument>()
869 .await
870 .text_document,
871 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap()),
872 );
873 assert_eq!(
874 fake_json_server
875 .receive_notification::<lsp::notification::DidOpenTextDocument>()
876 .await
877 .text_document,
878 lsp::TextDocumentItem {
879 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
880 version: 0,
881 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
882 language_id: "json".to_string(),
883 },
884 );
885
886 // We clear the diagnostics, since the language has changed.
887 rust_buffer2.update(cx, |buffer, _| {
888 assert_eq!(
889 buffer
890 .snapshot()
891 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
892 .count(),
893 0
894 );
895 });
896
897 // The renamed file's version resets after changing language server.
898 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
899 assert_eq!(
900 fake_json_server
901 .receive_notification::<lsp::notification::DidChangeTextDocument>()
902 .await
903 .text_document,
904 lsp::VersionedTextDocumentIdentifier::new(
905 lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
906 1
907 )
908 );
909
910 // Restart language servers
911 project.update(cx, |project, cx| {
912 project.restart_language_servers_for_buffers(
913 vec![rust_buffer.clone(), json_buffer.clone()],
914 cx,
915 );
916 });
917
918 let mut rust_shutdown_requests = fake_rust_server
919 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
920 let mut json_shutdown_requests = fake_json_server
921 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
922 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
923
924 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
925 let mut fake_json_server = fake_json_servers.next().await.unwrap();
926
927 // Ensure rust document is reopened in new rust language server
928 assert_eq!(
929 fake_rust_server
930 .receive_notification::<lsp::notification::DidOpenTextDocument>()
931 .await
932 .text_document,
933 lsp::TextDocumentItem {
934 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
935 version: 0,
936 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
937 language_id: "rust".to_string(),
938 }
939 );
940
941 // Ensure json documents are reopened in new json language server
942 assert_set_eq!(
943 [
944 fake_json_server
945 .receive_notification::<lsp::notification::DidOpenTextDocument>()
946 .await
947 .text_document,
948 fake_json_server
949 .receive_notification::<lsp::notification::DidOpenTextDocument>()
950 .await
951 .text_document,
952 ],
953 [
954 lsp::TextDocumentItem {
955 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
956 version: 0,
957 text: json_buffer.update(cx, |buffer, _| buffer.text()),
958 language_id: "json".to_string(),
959 },
960 lsp::TextDocumentItem {
961 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
962 version: 0,
963 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
964 language_id: "json".to_string(),
965 }
966 ]
967 );
968
969 // Close notifications are reported only to servers matching the buffer's language.
970 cx.update(|_| drop(_json_handle));
971 let close_message = lsp::DidCloseTextDocumentParams {
972 text_document: lsp::TextDocumentIdentifier::new(
973 lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
974 ),
975 };
976 assert_eq!(
977 fake_json_server
978 .receive_notification::<lsp::notification::DidCloseTextDocument>()
979 .await,
980 close_message,
981 );
982}
983
984#[gpui::test]
985async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
986 init_test(cx);
987
988 let fs = FakeFs::new(cx.executor());
989 fs.insert_tree(
990 path!("/the-root"),
991 json!({
992 ".gitignore": "target\n",
993 "Cargo.lock": "",
994 "src": {
995 "a.rs": "",
996 "b.rs": "",
997 },
998 "target": {
999 "x": {
1000 "out": {
1001 "x.rs": ""
1002 }
1003 },
1004 "y": {
1005 "out": {
1006 "y.rs": "",
1007 }
1008 },
1009 "z": {
1010 "out": {
1011 "z.rs": ""
1012 }
1013 }
1014 }
1015 }),
1016 )
1017 .await;
1018 fs.insert_tree(
1019 path!("/the-registry"),
1020 json!({
1021 "dep1": {
1022 "src": {
1023 "dep1.rs": "",
1024 }
1025 },
1026 "dep2": {
1027 "src": {
1028 "dep2.rs": "",
1029 }
1030 },
1031 }),
1032 )
1033 .await;
1034 fs.insert_tree(
1035 path!("/the/stdlib"),
1036 json!({
1037 "LICENSE": "",
1038 "src": {
1039 "string.rs": "",
1040 }
1041 }),
1042 )
1043 .await;
1044
1045 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1046 let (language_registry, lsp_store) = project.read_with(cx, |project, _| {
1047 (project.languages().clone(), project.lsp_store())
1048 });
1049 language_registry.add(rust_lang());
1050 let mut fake_servers = language_registry.register_fake_lsp(
1051 "Rust",
1052 FakeLspAdapter {
1053 name: "the-language-server",
1054 ..Default::default()
1055 },
1056 );
1057
1058 cx.executor().run_until_parked();
1059
1060 // Start the language server by opening a buffer with a compatible file extension.
1061 project
1062 .update(cx, |project, cx| {
1063 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
1064 })
1065 .await
1066 .unwrap();
1067
1068 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
1069 project.update(cx, |project, cx| {
1070 let worktree = project.worktrees(cx).next().unwrap();
1071 assert_eq!(
1072 worktree
1073 .read(cx)
1074 .snapshot()
1075 .entries(true, 0)
1076 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
1077 .collect::<Vec<_>>(),
1078 &[
1079 (Path::new(""), false),
1080 (Path::new(".gitignore"), false),
1081 (Path::new("Cargo.lock"), false),
1082 (Path::new("src"), false),
1083 (Path::new("src/a.rs"), false),
1084 (Path::new("src/b.rs"), false),
1085 (Path::new("target"), true),
1086 ]
1087 );
1088 });
1089
1090 let prev_read_dir_count = fs.read_dir_call_count();
1091
1092 let fake_server = fake_servers.next().await.unwrap();
1093 let (server_id, server_name) = lsp_store.read_with(cx, |lsp_store, _| {
1094 let (id, status) = lsp_store.language_server_statuses().next().unwrap();
1095 (id, LanguageServerName::from(status.name.as_str()))
1096 });
1097
1098 // Simulate jumping to a definition in a dependency outside of the worktree.
1099 let _out_of_worktree_buffer = project
1100 .update(cx, |project, cx| {
1101 project.open_local_buffer_via_lsp(
1102 lsp::Url::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
1103 server_id,
1104 server_name.clone(),
1105 cx,
1106 )
1107 })
1108 .await
1109 .unwrap();
1110
1111 // Keep track of the FS events reported to the language server.
1112 let file_changes = Arc::new(Mutex::new(Vec::new()));
1113 fake_server
1114 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
1115 registrations: vec![lsp::Registration {
1116 id: Default::default(),
1117 method: "workspace/didChangeWatchedFiles".to_string(),
1118 register_options: serde_json::to_value(
1119 lsp::DidChangeWatchedFilesRegistrationOptions {
1120 watchers: vec![
1121 lsp::FileSystemWatcher {
1122 glob_pattern: lsp::GlobPattern::String(
1123 path!("/the-root/Cargo.toml").to_string(),
1124 ),
1125 kind: None,
1126 },
1127 lsp::FileSystemWatcher {
1128 glob_pattern: lsp::GlobPattern::String(
1129 path!("/the-root/src/*.{rs,c}").to_string(),
1130 ),
1131 kind: None,
1132 },
1133 lsp::FileSystemWatcher {
1134 glob_pattern: lsp::GlobPattern::String(
1135 path!("/the-root/target/y/**/*.rs").to_string(),
1136 ),
1137 kind: None,
1138 },
1139 lsp::FileSystemWatcher {
1140 glob_pattern: lsp::GlobPattern::String(
1141 path!("/the/stdlib/src/**/*.rs").to_string(),
1142 ),
1143 kind: None,
1144 },
1145 lsp::FileSystemWatcher {
1146 glob_pattern: lsp::GlobPattern::String(
1147 path!("**/Cargo.lock").to_string(),
1148 ),
1149 kind: None,
1150 },
1151 ],
1152 },
1153 )
1154 .ok(),
1155 }],
1156 })
1157 .await
1158 .into_response()
1159 .unwrap();
1160 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
1161 let file_changes = file_changes.clone();
1162 move |params, _| {
1163 let mut file_changes = file_changes.lock();
1164 file_changes.extend(params.changes);
1165 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
1166 }
1167 });
1168
1169 cx.executor().run_until_parked();
1170 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
1171 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 5);
1172
1173 let mut new_watched_paths = fs.watched_paths();
1174 new_watched_paths.retain(|path| !path.starts_with(config_dir()));
1175 assert_eq!(
1176 &new_watched_paths,
1177 &[
1178 Path::new(path!("/the-root")),
1179 Path::new(path!("/the-registry/dep1/src/dep1.rs")),
1180 Path::new(path!("/the/stdlib/src"))
1181 ]
1182 );
1183
1184 // Now the language server has asked us to watch an ignored directory path,
1185 // so we recursively load it.
1186 project.update(cx, |project, cx| {
1187 let worktree = project.visible_worktrees(cx).next().unwrap();
1188 assert_eq!(
1189 worktree
1190 .read(cx)
1191 .snapshot()
1192 .entries(true, 0)
1193 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
1194 .collect::<Vec<_>>(),
1195 &[
1196 (Path::new(""), false),
1197 (Path::new(".gitignore"), false),
1198 (Path::new("Cargo.lock"), false),
1199 (Path::new("src"), false),
1200 (Path::new("src/a.rs"), false),
1201 (Path::new("src/b.rs"), false),
1202 (Path::new("target"), true),
1203 (Path::new("target/x"), true),
1204 (Path::new("target/y"), true),
1205 (Path::new("target/y/out"), true),
1206 (Path::new("target/y/out/y.rs"), true),
1207 (Path::new("target/z"), true),
1208 ]
1209 );
1210 });
1211
1212 // Perform some file system mutations, two of which match the watched patterns,
1213 // and one of which does not.
1214 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
1215 .await
1216 .unwrap();
1217 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
1218 .await
1219 .unwrap();
1220 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
1221 .await
1222 .unwrap();
1223 fs.create_file(
1224 path!("/the-root/target/x/out/x2.rs").as_ref(),
1225 Default::default(),
1226 )
1227 .await
1228 .unwrap();
1229 fs.create_file(
1230 path!("/the-root/target/y/out/y2.rs").as_ref(),
1231 Default::default(),
1232 )
1233 .await
1234 .unwrap();
1235 fs.save(
1236 path!("/the-root/Cargo.lock").as_ref(),
1237 &"".into(),
1238 Default::default(),
1239 )
1240 .await
1241 .unwrap();
1242 fs.save(
1243 path!("/the-stdlib/LICENSE").as_ref(),
1244 &"".into(),
1245 Default::default(),
1246 )
1247 .await
1248 .unwrap();
1249 fs.save(
1250 path!("/the/stdlib/src/string.rs").as_ref(),
1251 &"".into(),
1252 Default::default(),
1253 )
1254 .await
1255 .unwrap();
1256
1257 // The language server receives events for the FS mutations that match its watch patterns.
1258 cx.executor().run_until_parked();
1259 assert_eq!(
1260 &*file_changes.lock(),
1261 &[
1262 lsp::FileEvent {
1263 uri: lsp::Url::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
1264 typ: lsp::FileChangeType::CHANGED,
1265 },
1266 lsp::FileEvent {
1267 uri: lsp::Url::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
1268 typ: lsp::FileChangeType::DELETED,
1269 },
1270 lsp::FileEvent {
1271 uri: lsp::Url::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
1272 typ: lsp::FileChangeType::CREATED,
1273 },
1274 lsp::FileEvent {
1275 uri: lsp::Url::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
1276 typ: lsp::FileChangeType::CREATED,
1277 },
1278 lsp::FileEvent {
1279 uri: lsp::Url::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
1280 typ: lsp::FileChangeType::CHANGED,
1281 },
1282 ]
1283 );
1284}
1285
1286#[gpui::test]
1287async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
1288 init_test(cx);
1289
1290 let fs = FakeFs::new(cx.executor());
1291 fs.insert_tree(
1292 path!("/dir"),
1293 json!({
1294 "a.rs": "let a = 1;",
1295 "b.rs": "let b = 2;"
1296 }),
1297 )
1298 .await;
1299
1300 let project = Project::test(
1301 fs,
1302 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
1303 cx,
1304 )
1305 .await;
1306 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1307
1308 let buffer_a = project
1309 .update(cx, |project, cx| {
1310 project.open_local_buffer(path!("/dir/a.rs"), cx)
1311 })
1312 .await
1313 .unwrap();
1314 let buffer_b = project
1315 .update(cx, |project, cx| {
1316 project.open_local_buffer(path!("/dir/b.rs"), cx)
1317 })
1318 .await
1319 .unwrap();
1320
1321 lsp_store.update(cx, |lsp_store, cx| {
1322 lsp_store
1323 .update_diagnostics(
1324 LanguageServerId(0),
1325 lsp::PublishDiagnosticsParams {
1326 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1327 version: None,
1328 diagnostics: vec![lsp::Diagnostic {
1329 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1330 severity: Some(lsp::DiagnosticSeverity::ERROR),
1331 message: "error 1".to_string(),
1332 ..Default::default()
1333 }],
1334 },
1335 None,
1336 DiagnosticSourceKind::Pushed,
1337 &[],
1338 cx,
1339 )
1340 .unwrap();
1341 lsp_store
1342 .update_diagnostics(
1343 LanguageServerId(0),
1344 lsp::PublishDiagnosticsParams {
1345 uri: Url::from_file_path(path!("/dir/b.rs")).unwrap(),
1346 version: None,
1347 diagnostics: vec![lsp::Diagnostic {
1348 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1349 severity: Some(DiagnosticSeverity::WARNING),
1350 message: "error 2".to_string(),
1351 ..Default::default()
1352 }],
1353 },
1354 None,
1355 DiagnosticSourceKind::Pushed,
1356 &[],
1357 cx,
1358 )
1359 .unwrap();
1360 });
1361
1362 buffer_a.update(cx, |buffer, _| {
1363 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1364 assert_eq!(
1365 chunks
1366 .iter()
1367 .map(|(s, d)| (s.as_str(), *d))
1368 .collect::<Vec<_>>(),
1369 &[
1370 ("let ", None),
1371 ("a", Some(DiagnosticSeverity::ERROR)),
1372 (" = 1;", None),
1373 ]
1374 );
1375 });
1376 buffer_b.update(cx, |buffer, _| {
1377 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1378 assert_eq!(
1379 chunks
1380 .iter()
1381 .map(|(s, d)| (s.as_str(), *d))
1382 .collect::<Vec<_>>(),
1383 &[
1384 ("let ", None),
1385 ("b", Some(DiagnosticSeverity::WARNING)),
1386 (" = 2;", None),
1387 ]
1388 );
1389 });
1390}
1391
1392#[gpui::test]
1393async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1394 init_test(cx);
1395
1396 let fs = FakeFs::new(cx.executor());
1397 fs.insert_tree(
1398 path!("/root"),
1399 json!({
1400 "dir": {
1401 ".git": {
1402 "HEAD": "ref: refs/heads/main",
1403 },
1404 ".gitignore": "b.rs",
1405 "a.rs": "let a = 1;",
1406 "b.rs": "let b = 2;",
1407 },
1408 "other.rs": "let b = c;"
1409 }),
1410 )
1411 .await;
1412
1413 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
1414 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1415 let (worktree, _) = project
1416 .update(cx, |project, cx| {
1417 project.find_or_create_worktree(path!("/root/dir"), true, cx)
1418 })
1419 .await
1420 .unwrap();
1421 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1422
1423 let (worktree, _) = project
1424 .update(cx, |project, cx| {
1425 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
1426 })
1427 .await
1428 .unwrap();
1429 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1430
1431 let server_id = LanguageServerId(0);
1432 lsp_store.update(cx, |lsp_store, cx| {
1433 lsp_store
1434 .update_diagnostics(
1435 server_id,
1436 lsp::PublishDiagnosticsParams {
1437 uri: Url::from_file_path(path!("/root/dir/b.rs")).unwrap(),
1438 version: None,
1439 diagnostics: vec![lsp::Diagnostic {
1440 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1441 severity: Some(lsp::DiagnosticSeverity::ERROR),
1442 message: "unused variable 'b'".to_string(),
1443 ..Default::default()
1444 }],
1445 },
1446 None,
1447 DiagnosticSourceKind::Pushed,
1448 &[],
1449 cx,
1450 )
1451 .unwrap();
1452 lsp_store
1453 .update_diagnostics(
1454 server_id,
1455 lsp::PublishDiagnosticsParams {
1456 uri: Url::from_file_path(path!("/root/other.rs")).unwrap(),
1457 version: None,
1458 diagnostics: vec![lsp::Diagnostic {
1459 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1460 severity: Some(lsp::DiagnosticSeverity::ERROR),
1461 message: "unknown variable 'c'".to_string(),
1462 ..Default::default()
1463 }],
1464 },
1465 None,
1466 DiagnosticSourceKind::Pushed,
1467 &[],
1468 cx,
1469 )
1470 .unwrap();
1471 });
1472
1473 let main_ignored_buffer = project
1474 .update(cx, |project, cx| {
1475 project.open_buffer((main_worktree_id, "b.rs"), cx)
1476 })
1477 .await
1478 .unwrap();
1479 main_ignored_buffer.update(cx, |buffer, _| {
1480 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1481 assert_eq!(
1482 chunks
1483 .iter()
1484 .map(|(s, d)| (s.as_str(), *d))
1485 .collect::<Vec<_>>(),
1486 &[
1487 ("let ", None),
1488 ("b", Some(DiagnosticSeverity::ERROR)),
1489 (" = 2;", None),
1490 ],
1491 "Gigitnored buffers should still get in-buffer diagnostics",
1492 );
1493 });
1494 let other_buffer = project
1495 .update(cx, |project, cx| {
1496 project.open_buffer((other_worktree_id, ""), cx)
1497 })
1498 .await
1499 .unwrap();
1500 other_buffer.update(cx, |buffer, _| {
1501 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1502 assert_eq!(
1503 chunks
1504 .iter()
1505 .map(|(s, d)| (s.as_str(), *d))
1506 .collect::<Vec<_>>(),
1507 &[
1508 ("let b = ", None),
1509 ("c", Some(DiagnosticSeverity::ERROR)),
1510 (";", None),
1511 ],
1512 "Buffers from hidden projects should still get in-buffer diagnostics"
1513 );
1514 });
1515
1516 project.update(cx, |project, cx| {
1517 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1518 assert_eq!(
1519 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1520 vec![(
1521 ProjectPath {
1522 worktree_id: main_worktree_id,
1523 path: Arc::from(Path::new("b.rs")),
1524 },
1525 server_id,
1526 DiagnosticSummary {
1527 error_count: 1,
1528 warning_count: 0,
1529 }
1530 )]
1531 );
1532 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1533 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1534 });
1535}
1536
1537#[gpui::test]
1538async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1539 init_test(cx);
1540
1541 let progress_token = "the-progress-token";
1542
1543 let fs = FakeFs::new(cx.executor());
1544 fs.insert_tree(
1545 path!("/dir"),
1546 json!({
1547 "a.rs": "fn a() { A }",
1548 "b.rs": "const y: i32 = 1",
1549 }),
1550 )
1551 .await;
1552
1553 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1554 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1555
1556 language_registry.add(rust_lang());
1557 let mut fake_servers = language_registry.register_fake_lsp(
1558 "Rust",
1559 FakeLspAdapter {
1560 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1561 disk_based_diagnostics_sources: vec!["disk".into()],
1562 ..Default::default()
1563 },
1564 );
1565
1566 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1567
1568 // Cause worktree to start the fake language server
1569 let _ = project
1570 .update(cx, |project, cx| {
1571 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
1572 })
1573 .await
1574 .unwrap();
1575
1576 let mut events = cx.events(&project);
1577
1578 let fake_server = fake_servers.next().await.unwrap();
1579 assert_eq!(
1580 events.next().await.unwrap(),
1581 Event::LanguageServerAdded(
1582 LanguageServerId(0),
1583 fake_server.server.name(),
1584 Some(worktree_id)
1585 ),
1586 );
1587
1588 fake_server
1589 .start_progress(format!("{}/0", progress_token))
1590 .await;
1591 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1592 assert_eq!(
1593 events.next().await.unwrap(),
1594 Event::DiskBasedDiagnosticsStarted {
1595 language_server_id: LanguageServerId(0),
1596 }
1597 );
1598
1599 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1600 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1601 version: None,
1602 diagnostics: vec![lsp::Diagnostic {
1603 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1604 severity: Some(lsp::DiagnosticSeverity::ERROR),
1605 message: "undefined variable 'A'".to_string(),
1606 ..Default::default()
1607 }],
1608 });
1609 assert_eq!(
1610 events.next().await.unwrap(),
1611 Event::DiagnosticsUpdated {
1612 language_server_id: LanguageServerId(0),
1613 path: (worktree_id, Path::new("a.rs")).into()
1614 }
1615 );
1616
1617 fake_server.end_progress(format!("{}/0", progress_token));
1618 assert_eq!(
1619 events.next().await.unwrap(),
1620 Event::DiskBasedDiagnosticsFinished {
1621 language_server_id: LanguageServerId(0)
1622 }
1623 );
1624
1625 let buffer = project
1626 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
1627 .await
1628 .unwrap();
1629
1630 buffer.update(cx, |buffer, _| {
1631 let snapshot = buffer.snapshot();
1632 let diagnostics = snapshot
1633 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1634 .collect::<Vec<_>>();
1635 assert_eq!(
1636 diagnostics,
1637 &[DiagnosticEntry {
1638 range: Point::new(0, 9)..Point::new(0, 10),
1639 diagnostic: Diagnostic {
1640 severity: lsp::DiagnosticSeverity::ERROR,
1641 message: "undefined variable 'A'".to_string(),
1642 group_id: 0,
1643 is_primary: true,
1644 source_kind: DiagnosticSourceKind::Pushed,
1645 ..Diagnostic::default()
1646 }
1647 }]
1648 )
1649 });
1650
1651 // Ensure publishing empty diagnostics twice only results in one update event.
1652 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1653 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1654 version: None,
1655 diagnostics: Default::default(),
1656 });
1657 assert_eq!(
1658 events.next().await.unwrap(),
1659 Event::DiagnosticsUpdated {
1660 language_server_id: LanguageServerId(0),
1661 path: (worktree_id, Path::new("a.rs")).into()
1662 }
1663 );
1664
1665 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1666 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1667 version: None,
1668 diagnostics: Default::default(),
1669 });
1670 cx.executor().run_until_parked();
1671 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1672}
1673
1674#[gpui::test]
1675async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1676 init_test(cx);
1677
1678 let progress_token = "the-progress-token";
1679
1680 let fs = FakeFs::new(cx.executor());
1681 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1682
1683 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1684
1685 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1686 language_registry.add(rust_lang());
1687 let mut fake_servers = language_registry.register_fake_lsp(
1688 "Rust",
1689 FakeLspAdapter {
1690 name: "the-language-server",
1691 disk_based_diagnostics_sources: vec!["disk".into()],
1692 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1693 ..Default::default()
1694 },
1695 );
1696
1697 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1698
1699 let (buffer, _handle) = project
1700 .update(cx, |project, cx| {
1701 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1702 })
1703 .await
1704 .unwrap();
1705 // Simulate diagnostics starting to update.
1706 let fake_server = fake_servers.next().await.unwrap();
1707 fake_server.start_progress(progress_token).await;
1708
1709 // Restart the server before the diagnostics finish updating.
1710 project.update(cx, |project, cx| {
1711 project.restart_language_servers_for_buffers(vec![buffer], cx);
1712 });
1713 let mut events = cx.events(&project);
1714
1715 // Simulate the newly started server sending more diagnostics.
1716 let fake_server = fake_servers.next().await.unwrap();
1717 assert_eq!(
1718 events.next().await.unwrap(),
1719 Event::LanguageServerAdded(
1720 LanguageServerId(1),
1721 fake_server.server.name(),
1722 Some(worktree_id)
1723 )
1724 );
1725 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1726 fake_server.start_progress(progress_token).await;
1727 assert_eq!(
1728 events.next().await.unwrap(),
1729 Event::DiskBasedDiagnosticsStarted {
1730 language_server_id: LanguageServerId(1)
1731 }
1732 );
1733 project.update(cx, |project, cx| {
1734 assert_eq!(
1735 project
1736 .language_servers_running_disk_based_diagnostics(cx)
1737 .collect::<Vec<_>>(),
1738 [LanguageServerId(1)]
1739 );
1740 });
1741
1742 // All diagnostics are considered done, despite the old server's diagnostic
1743 // task never completing.
1744 fake_server.end_progress(progress_token);
1745 assert_eq!(
1746 events.next().await.unwrap(),
1747 Event::DiskBasedDiagnosticsFinished {
1748 language_server_id: LanguageServerId(1)
1749 }
1750 );
1751 project.update(cx, |project, cx| {
1752 assert_eq!(
1753 project
1754 .language_servers_running_disk_based_diagnostics(cx)
1755 .collect::<Vec<_>>(),
1756 [] as [language::LanguageServerId; 0]
1757 );
1758 });
1759}
1760
1761#[gpui::test]
1762async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1763 init_test(cx);
1764
1765 let fs = FakeFs::new(cx.executor());
1766 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
1767
1768 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1769
1770 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1771 language_registry.add(rust_lang());
1772 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1773
1774 let (buffer, _) = project
1775 .update(cx, |project, cx| {
1776 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1777 })
1778 .await
1779 .unwrap();
1780
1781 // Publish diagnostics
1782 let fake_server = fake_servers.next().await.unwrap();
1783 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1784 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1785 version: None,
1786 diagnostics: vec![lsp::Diagnostic {
1787 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1788 severity: Some(lsp::DiagnosticSeverity::ERROR),
1789 message: "the message".to_string(),
1790 ..Default::default()
1791 }],
1792 });
1793
1794 cx.executor().run_until_parked();
1795 buffer.update(cx, |buffer, _| {
1796 assert_eq!(
1797 buffer
1798 .snapshot()
1799 .diagnostics_in_range::<_, usize>(0..1, false)
1800 .map(|entry| entry.diagnostic.message.clone())
1801 .collect::<Vec<_>>(),
1802 ["the message".to_string()]
1803 );
1804 });
1805 project.update(cx, |project, cx| {
1806 assert_eq!(
1807 project.diagnostic_summary(false, cx),
1808 DiagnosticSummary {
1809 error_count: 1,
1810 warning_count: 0,
1811 }
1812 );
1813 });
1814
1815 project.update(cx, |project, cx| {
1816 project.restart_language_servers_for_buffers(vec![buffer.clone()], cx);
1817 });
1818
1819 // The diagnostics are cleared.
1820 cx.executor().run_until_parked();
1821 buffer.update(cx, |buffer, _| {
1822 assert_eq!(
1823 buffer
1824 .snapshot()
1825 .diagnostics_in_range::<_, usize>(0..1, false)
1826 .map(|entry| entry.diagnostic.message.clone())
1827 .collect::<Vec<_>>(),
1828 Vec::<String>::new(),
1829 );
1830 });
1831 project.update(cx, |project, cx| {
1832 assert_eq!(
1833 project.diagnostic_summary(false, cx),
1834 DiagnosticSummary {
1835 error_count: 0,
1836 warning_count: 0,
1837 }
1838 );
1839 });
1840}
1841
1842#[gpui::test]
1843async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1844 init_test(cx);
1845
1846 let fs = FakeFs::new(cx.executor());
1847 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1848
1849 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1850 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1851
1852 language_registry.add(rust_lang());
1853 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1854
1855 let (buffer, _handle) = project
1856 .update(cx, |project, cx| {
1857 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1858 })
1859 .await
1860 .unwrap();
1861
1862 // Before restarting the server, report diagnostics with an unknown buffer version.
1863 let fake_server = fake_servers.next().await.unwrap();
1864 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1865 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1866 version: Some(10000),
1867 diagnostics: Vec::new(),
1868 });
1869 cx.executor().run_until_parked();
1870 project.update(cx, |project, cx| {
1871 project.restart_language_servers_for_buffers(vec![buffer.clone()], cx);
1872 });
1873
1874 let mut fake_server = fake_servers.next().await.unwrap();
1875 let notification = fake_server
1876 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1877 .await
1878 .text_document;
1879 assert_eq!(notification.version, 0);
1880}
1881
1882#[gpui::test]
1883async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
1884 init_test(cx);
1885
1886 let progress_token = "the-progress-token";
1887
1888 let fs = FakeFs::new(cx.executor());
1889 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1890
1891 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1892
1893 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1894 language_registry.add(rust_lang());
1895 let mut fake_servers = language_registry.register_fake_lsp(
1896 "Rust",
1897 FakeLspAdapter {
1898 name: "the-language-server",
1899 disk_based_diagnostics_sources: vec!["disk".into()],
1900 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1901 ..Default::default()
1902 },
1903 );
1904
1905 let (buffer, _handle) = project
1906 .update(cx, |project, cx| {
1907 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1908 })
1909 .await
1910 .unwrap();
1911
1912 // Simulate diagnostics starting to update.
1913 let mut fake_server = fake_servers.next().await.unwrap();
1914 fake_server
1915 .start_progress_with(
1916 "another-token",
1917 lsp::WorkDoneProgressBegin {
1918 cancellable: Some(false),
1919 ..Default::default()
1920 },
1921 )
1922 .await;
1923 fake_server
1924 .start_progress_with(
1925 progress_token,
1926 lsp::WorkDoneProgressBegin {
1927 cancellable: Some(true),
1928 ..Default::default()
1929 },
1930 )
1931 .await;
1932 cx.executor().run_until_parked();
1933
1934 project.update(cx, |project, cx| {
1935 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
1936 });
1937
1938 let cancel_notification = fake_server
1939 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
1940 .await;
1941 assert_eq!(
1942 cancel_notification.token,
1943 NumberOrString::String(progress_token.into())
1944 );
1945}
1946
1947#[gpui::test]
1948async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1949 init_test(cx);
1950
1951 let fs = FakeFs::new(cx.executor());
1952 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
1953 .await;
1954
1955 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1956 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1957
1958 let mut fake_rust_servers = language_registry.register_fake_lsp(
1959 "Rust",
1960 FakeLspAdapter {
1961 name: "rust-lsp",
1962 ..Default::default()
1963 },
1964 );
1965 let mut fake_js_servers = language_registry.register_fake_lsp(
1966 "JavaScript",
1967 FakeLspAdapter {
1968 name: "js-lsp",
1969 ..Default::default()
1970 },
1971 );
1972 language_registry.add(rust_lang());
1973 language_registry.add(js_lang());
1974
1975 let _rs_buffer = project
1976 .update(cx, |project, cx| {
1977 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1978 })
1979 .await
1980 .unwrap();
1981 let _js_buffer = project
1982 .update(cx, |project, cx| {
1983 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
1984 })
1985 .await
1986 .unwrap();
1987
1988 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1989 assert_eq!(
1990 fake_rust_server_1
1991 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1992 .await
1993 .text_document
1994 .uri
1995 .as_str(),
1996 uri!("file:///dir/a.rs")
1997 );
1998
1999 let mut fake_js_server = fake_js_servers.next().await.unwrap();
2000 assert_eq!(
2001 fake_js_server
2002 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2003 .await
2004 .text_document
2005 .uri
2006 .as_str(),
2007 uri!("file:///dir/b.js")
2008 );
2009
2010 // Disable Rust language server, ensuring only that server gets stopped.
2011 cx.update(|cx| {
2012 SettingsStore::update_global(cx, |settings, cx| {
2013 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
2014 settings.languages.insert(
2015 "Rust".into(),
2016 LanguageSettingsContent {
2017 enable_language_server: Some(false),
2018 ..Default::default()
2019 },
2020 );
2021 });
2022 })
2023 });
2024 fake_rust_server_1
2025 .receive_notification::<lsp::notification::Exit>()
2026 .await;
2027
2028 // Enable Rust and disable JavaScript language servers, ensuring that the
2029 // former gets started again and that the latter stops.
2030 cx.update(|cx| {
2031 SettingsStore::update_global(cx, |settings, cx| {
2032 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
2033 settings.languages.insert(
2034 LanguageName::new("Rust"),
2035 LanguageSettingsContent {
2036 enable_language_server: Some(true),
2037 ..Default::default()
2038 },
2039 );
2040 settings.languages.insert(
2041 LanguageName::new("JavaScript"),
2042 LanguageSettingsContent {
2043 enable_language_server: Some(false),
2044 ..Default::default()
2045 },
2046 );
2047 });
2048 })
2049 });
2050 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
2051 assert_eq!(
2052 fake_rust_server_2
2053 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2054 .await
2055 .text_document
2056 .uri
2057 .as_str(),
2058 uri!("file:///dir/a.rs")
2059 );
2060 fake_js_server
2061 .receive_notification::<lsp::notification::Exit>()
2062 .await;
2063}
2064
2065#[gpui::test(iterations = 3)]
2066async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
2067 init_test(cx);
2068
2069 let text = "
2070 fn a() { A }
2071 fn b() { BB }
2072 fn c() { CCC }
2073 "
2074 .unindent();
2075
2076 let fs = FakeFs::new(cx.executor());
2077 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
2078
2079 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2080 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2081
2082 language_registry.add(rust_lang());
2083 let mut fake_servers = language_registry.register_fake_lsp(
2084 "Rust",
2085 FakeLspAdapter {
2086 disk_based_diagnostics_sources: vec!["disk".into()],
2087 ..Default::default()
2088 },
2089 );
2090
2091 let buffer = project
2092 .update(cx, |project, cx| {
2093 project.open_local_buffer(path!("/dir/a.rs"), cx)
2094 })
2095 .await
2096 .unwrap();
2097
2098 let _handle = project.update(cx, |project, cx| {
2099 project.register_buffer_with_language_servers(&buffer, cx)
2100 });
2101
2102 let mut fake_server = fake_servers.next().await.unwrap();
2103 let open_notification = fake_server
2104 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2105 .await;
2106
2107 // Edit the buffer, moving the content down
2108 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
2109 let change_notification_1 = fake_server
2110 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2111 .await;
2112 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
2113
2114 // Report some diagnostics for the initial version of the buffer
2115 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2116 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2117 version: Some(open_notification.text_document.version),
2118 diagnostics: vec![
2119 lsp::Diagnostic {
2120 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2121 severity: Some(DiagnosticSeverity::ERROR),
2122 message: "undefined variable 'A'".to_string(),
2123 source: Some("disk".to_string()),
2124 ..Default::default()
2125 },
2126 lsp::Diagnostic {
2127 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2128 severity: Some(DiagnosticSeverity::ERROR),
2129 message: "undefined variable 'BB'".to_string(),
2130 source: Some("disk".to_string()),
2131 ..Default::default()
2132 },
2133 lsp::Diagnostic {
2134 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
2135 severity: Some(DiagnosticSeverity::ERROR),
2136 source: Some("disk".to_string()),
2137 message: "undefined variable 'CCC'".to_string(),
2138 ..Default::default()
2139 },
2140 ],
2141 });
2142
2143 // The diagnostics have moved down since they were created.
2144 cx.executor().run_until_parked();
2145 buffer.update(cx, |buffer, _| {
2146 assert_eq!(
2147 buffer
2148 .snapshot()
2149 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
2150 .collect::<Vec<_>>(),
2151 &[
2152 DiagnosticEntry {
2153 range: Point::new(3, 9)..Point::new(3, 11),
2154 diagnostic: Diagnostic {
2155 source: Some("disk".into()),
2156 severity: DiagnosticSeverity::ERROR,
2157 message: "undefined variable 'BB'".to_string(),
2158 is_disk_based: true,
2159 group_id: 1,
2160 is_primary: true,
2161 source_kind: DiagnosticSourceKind::Pushed,
2162 ..Diagnostic::default()
2163 },
2164 },
2165 DiagnosticEntry {
2166 range: Point::new(4, 9)..Point::new(4, 12),
2167 diagnostic: Diagnostic {
2168 source: Some("disk".into()),
2169 severity: DiagnosticSeverity::ERROR,
2170 message: "undefined variable 'CCC'".to_string(),
2171 is_disk_based: true,
2172 group_id: 2,
2173 is_primary: true,
2174 source_kind: DiagnosticSourceKind::Pushed,
2175 ..Diagnostic::default()
2176 }
2177 }
2178 ]
2179 );
2180 assert_eq!(
2181 chunks_with_diagnostics(buffer, 0..buffer.len()),
2182 [
2183 ("\n\nfn a() { ".to_string(), None),
2184 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2185 (" }\nfn b() { ".to_string(), None),
2186 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
2187 (" }\nfn c() { ".to_string(), None),
2188 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
2189 (" }\n".to_string(), None),
2190 ]
2191 );
2192 assert_eq!(
2193 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
2194 [
2195 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
2196 (" }\nfn c() { ".to_string(), None),
2197 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
2198 ]
2199 );
2200 });
2201
2202 // Ensure overlapping diagnostics are highlighted correctly.
2203 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2204 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2205 version: Some(open_notification.text_document.version),
2206 diagnostics: vec![
2207 lsp::Diagnostic {
2208 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2209 severity: Some(DiagnosticSeverity::ERROR),
2210 message: "undefined variable 'A'".to_string(),
2211 source: Some("disk".to_string()),
2212 ..Default::default()
2213 },
2214 lsp::Diagnostic {
2215 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
2216 severity: Some(DiagnosticSeverity::WARNING),
2217 message: "unreachable statement".to_string(),
2218 source: Some("disk".to_string()),
2219 ..Default::default()
2220 },
2221 ],
2222 });
2223
2224 cx.executor().run_until_parked();
2225 buffer.update(cx, |buffer, _| {
2226 assert_eq!(
2227 buffer
2228 .snapshot()
2229 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
2230 .collect::<Vec<_>>(),
2231 &[
2232 DiagnosticEntry {
2233 range: Point::new(2, 9)..Point::new(2, 12),
2234 diagnostic: Diagnostic {
2235 source: Some("disk".into()),
2236 severity: DiagnosticSeverity::WARNING,
2237 message: "unreachable statement".to_string(),
2238 is_disk_based: true,
2239 group_id: 4,
2240 is_primary: true,
2241 source_kind: DiagnosticSourceKind::Pushed,
2242 ..Diagnostic::default()
2243 }
2244 },
2245 DiagnosticEntry {
2246 range: Point::new(2, 9)..Point::new(2, 10),
2247 diagnostic: Diagnostic {
2248 source: Some("disk".into()),
2249 severity: DiagnosticSeverity::ERROR,
2250 message: "undefined variable 'A'".to_string(),
2251 is_disk_based: true,
2252 group_id: 3,
2253 is_primary: true,
2254 source_kind: DiagnosticSourceKind::Pushed,
2255 ..Diagnostic::default()
2256 },
2257 }
2258 ]
2259 );
2260 assert_eq!(
2261 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
2262 [
2263 ("fn a() { ".to_string(), None),
2264 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2265 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2266 ("\n".to_string(), None),
2267 ]
2268 );
2269 assert_eq!(
2270 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
2271 [
2272 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2273 ("\n".to_string(), None),
2274 ]
2275 );
2276 });
2277
2278 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
2279 // changes since the last save.
2280 buffer.update(cx, |buffer, cx| {
2281 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
2282 buffer.edit(
2283 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
2284 None,
2285 cx,
2286 );
2287 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
2288 });
2289 let change_notification_2 = fake_server
2290 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2291 .await;
2292 assert!(
2293 change_notification_2.text_document.version > change_notification_1.text_document.version
2294 );
2295
2296 // Handle out-of-order diagnostics
2297 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2298 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2299 version: Some(change_notification_2.text_document.version),
2300 diagnostics: vec![
2301 lsp::Diagnostic {
2302 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2303 severity: Some(DiagnosticSeverity::ERROR),
2304 message: "undefined variable 'BB'".to_string(),
2305 source: Some("disk".to_string()),
2306 ..Default::default()
2307 },
2308 lsp::Diagnostic {
2309 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2310 severity: Some(DiagnosticSeverity::WARNING),
2311 message: "undefined variable 'A'".to_string(),
2312 source: Some("disk".to_string()),
2313 ..Default::default()
2314 },
2315 ],
2316 });
2317
2318 cx.executor().run_until_parked();
2319 buffer.update(cx, |buffer, _| {
2320 assert_eq!(
2321 buffer
2322 .snapshot()
2323 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2324 .collect::<Vec<_>>(),
2325 &[
2326 DiagnosticEntry {
2327 range: Point::new(2, 21)..Point::new(2, 22),
2328 diagnostic: Diagnostic {
2329 source: Some("disk".into()),
2330 severity: DiagnosticSeverity::WARNING,
2331 message: "undefined variable 'A'".to_string(),
2332 is_disk_based: true,
2333 group_id: 6,
2334 is_primary: true,
2335 source_kind: DiagnosticSourceKind::Pushed,
2336 ..Diagnostic::default()
2337 }
2338 },
2339 DiagnosticEntry {
2340 range: Point::new(3, 9)..Point::new(3, 14),
2341 diagnostic: Diagnostic {
2342 source: Some("disk".into()),
2343 severity: DiagnosticSeverity::ERROR,
2344 message: "undefined variable 'BB'".to_string(),
2345 is_disk_based: true,
2346 group_id: 5,
2347 is_primary: true,
2348 source_kind: DiagnosticSourceKind::Pushed,
2349 ..Diagnostic::default()
2350 },
2351 }
2352 ]
2353 );
2354 });
2355}
2356
2357#[gpui::test]
2358async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
2359 init_test(cx);
2360
2361 let text = concat!(
2362 "let one = ;\n", //
2363 "let two = \n",
2364 "let three = 3;\n",
2365 );
2366
2367 let fs = FakeFs::new(cx.executor());
2368 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
2369
2370 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2371 let buffer = project
2372 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2373 .await
2374 .unwrap();
2375
2376 project.update(cx, |project, cx| {
2377 project.lsp_store.update(cx, |lsp_store, cx| {
2378 lsp_store
2379 .update_diagnostic_entries(
2380 LanguageServerId(0),
2381 PathBuf::from("/dir/a.rs"),
2382 None,
2383 None,
2384 vec![
2385 DiagnosticEntry {
2386 range: Unclipped(PointUtf16::new(0, 10))
2387 ..Unclipped(PointUtf16::new(0, 10)),
2388 diagnostic: Diagnostic {
2389 severity: DiagnosticSeverity::ERROR,
2390 message: "syntax error 1".to_string(),
2391 source_kind: DiagnosticSourceKind::Pushed,
2392 ..Diagnostic::default()
2393 },
2394 },
2395 DiagnosticEntry {
2396 range: Unclipped(PointUtf16::new(1, 10))
2397 ..Unclipped(PointUtf16::new(1, 10)),
2398 diagnostic: Diagnostic {
2399 severity: DiagnosticSeverity::ERROR,
2400 message: "syntax error 2".to_string(),
2401 source_kind: DiagnosticSourceKind::Pushed,
2402 ..Diagnostic::default()
2403 },
2404 },
2405 ],
2406 cx,
2407 )
2408 .unwrap();
2409 })
2410 });
2411
2412 // An empty range is extended forward to include the following character.
2413 // At the end of a line, an empty range is extended backward to include
2414 // the preceding character.
2415 buffer.update(cx, |buffer, _| {
2416 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2417 assert_eq!(
2418 chunks
2419 .iter()
2420 .map(|(s, d)| (s.as_str(), *d))
2421 .collect::<Vec<_>>(),
2422 &[
2423 ("let one = ", None),
2424 (";", Some(DiagnosticSeverity::ERROR)),
2425 ("\nlet two =", None),
2426 (" ", Some(DiagnosticSeverity::ERROR)),
2427 ("\nlet three = 3;\n", None)
2428 ]
2429 );
2430 });
2431}
2432
2433#[gpui::test]
2434async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2435 init_test(cx);
2436
2437 let fs = FakeFs::new(cx.executor());
2438 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2439 .await;
2440
2441 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2442 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
2443
2444 lsp_store.update(cx, |lsp_store, cx| {
2445 lsp_store
2446 .update_diagnostic_entries(
2447 LanguageServerId(0),
2448 Path::new("/dir/a.rs").to_owned(),
2449 None,
2450 None,
2451 vec![DiagnosticEntry {
2452 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2453 diagnostic: Diagnostic {
2454 severity: DiagnosticSeverity::ERROR,
2455 is_primary: true,
2456 message: "syntax error a1".to_string(),
2457 source_kind: DiagnosticSourceKind::Pushed,
2458 ..Diagnostic::default()
2459 },
2460 }],
2461 cx,
2462 )
2463 .unwrap();
2464 lsp_store
2465 .update_diagnostic_entries(
2466 LanguageServerId(1),
2467 Path::new("/dir/a.rs").to_owned(),
2468 None,
2469 None,
2470 vec![DiagnosticEntry {
2471 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2472 diagnostic: Diagnostic {
2473 severity: DiagnosticSeverity::ERROR,
2474 is_primary: true,
2475 message: "syntax error b1".to_string(),
2476 source_kind: DiagnosticSourceKind::Pushed,
2477 ..Diagnostic::default()
2478 },
2479 }],
2480 cx,
2481 )
2482 .unwrap();
2483
2484 assert_eq!(
2485 lsp_store.diagnostic_summary(false, cx),
2486 DiagnosticSummary {
2487 error_count: 2,
2488 warning_count: 0,
2489 }
2490 );
2491 });
2492}
2493
2494#[gpui::test]
2495async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2496 init_test(cx);
2497
2498 let text = "
2499 fn a() {
2500 f1();
2501 }
2502 fn b() {
2503 f2();
2504 }
2505 fn c() {
2506 f3();
2507 }
2508 "
2509 .unindent();
2510
2511 let fs = FakeFs::new(cx.executor());
2512 fs.insert_tree(
2513 path!("/dir"),
2514 json!({
2515 "a.rs": text.clone(),
2516 }),
2517 )
2518 .await;
2519
2520 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2521 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2522
2523 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2524 language_registry.add(rust_lang());
2525 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2526
2527 let (buffer, _handle) = project
2528 .update(cx, |project, cx| {
2529 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2530 })
2531 .await
2532 .unwrap();
2533
2534 let mut fake_server = fake_servers.next().await.unwrap();
2535 let lsp_document_version = fake_server
2536 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2537 .await
2538 .text_document
2539 .version;
2540
2541 // Simulate editing the buffer after the language server computes some edits.
2542 buffer.update(cx, |buffer, cx| {
2543 buffer.edit(
2544 [(
2545 Point::new(0, 0)..Point::new(0, 0),
2546 "// above first function\n",
2547 )],
2548 None,
2549 cx,
2550 );
2551 buffer.edit(
2552 [(
2553 Point::new(2, 0)..Point::new(2, 0),
2554 " // inside first function\n",
2555 )],
2556 None,
2557 cx,
2558 );
2559 buffer.edit(
2560 [(
2561 Point::new(6, 4)..Point::new(6, 4),
2562 "// inside second function ",
2563 )],
2564 None,
2565 cx,
2566 );
2567
2568 assert_eq!(
2569 buffer.text(),
2570 "
2571 // above first function
2572 fn a() {
2573 // inside first function
2574 f1();
2575 }
2576 fn b() {
2577 // inside second function f2();
2578 }
2579 fn c() {
2580 f3();
2581 }
2582 "
2583 .unindent()
2584 );
2585 });
2586
2587 let edits = lsp_store
2588 .update(cx, |lsp_store, cx| {
2589 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2590 &buffer,
2591 vec![
2592 // replace body of first function
2593 lsp::TextEdit {
2594 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2595 new_text: "
2596 fn a() {
2597 f10();
2598 }
2599 "
2600 .unindent(),
2601 },
2602 // edit inside second function
2603 lsp::TextEdit {
2604 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2605 new_text: "00".into(),
2606 },
2607 // edit inside third function via two distinct edits
2608 lsp::TextEdit {
2609 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2610 new_text: "4000".into(),
2611 },
2612 lsp::TextEdit {
2613 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2614 new_text: "".into(),
2615 },
2616 ],
2617 LanguageServerId(0),
2618 Some(lsp_document_version),
2619 cx,
2620 )
2621 })
2622 .await
2623 .unwrap();
2624
2625 buffer.update(cx, |buffer, cx| {
2626 for (range, new_text) in edits {
2627 buffer.edit([(range, new_text)], None, cx);
2628 }
2629 assert_eq!(
2630 buffer.text(),
2631 "
2632 // above first function
2633 fn a() {
2634 // inside first function
2635 f10();
2636 }
2637 fn b() {
2638 // inside second function f200();
2639 }
2640 fn c() {
2641 f4000();
2642 }
2643 "
2644 .unindent()
2645 );
2646 });
2647}
2648
2649#[gpui::test]
2650async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2651 init_test(cx);
2652
2653 let text = "
2654 use a::b;
2655 use a::c;
2656
2657 fn f() {
2658 b();
2659 c();
2660 }
2661 "
2662 .unindent();
2663
2664 let fs = FakeFs::new(cx.executor());
2665 fs.insert_tree(
2666 path!("/dir"),
2667 json!({
2668 "a.rs": text.clone(),
2669 }),
2670 )
2671 .await;
2672
2673 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2674 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2675 let buffer = project
2676 .update(cx, |project, cx| {
2677 project.open_local_buffer(path!("/dir/a.rs"), cx)
2678 })
2679 .await
2680 .unwrap();
2681
2682 // Simulate the language server sending us a small edit in the form of a very large diff.
2683 // Rust-analyzer does this when performing a merge-imports code action.
2684 let edits = lsp_store
2685 .update(cx, |lsp_store, cx| {
2686 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2687 &buffer,
2688 [
2689 // Replace the first use statement without editing the semicolon.
2690 lsp::TextEdit {
2691 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2692 new_text: "a::{b, c}".into(),
2693 },
2694 // Reinsert the remainder of the file between the semicolon and the final
2695 // newline of the file.
2696 lsp::TextEdit {
2697 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2698 new_text: "\n\n".into(),
2699 },
2700 lsp::TextEdit {
2701 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2702 new_text: "
2703 fn f() {
2704 b();
2705 c();
2706 }"
2707 .unindent(),
2708 },
2709 // Delete everything after the first newline of the file.
2710 lsp::TextEdit {
2711 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2712 new_text: "".into(),
2713 },
2714 ],
2715 LanguageServerId(0),
2716 None,
2717 cx,
2718 )
2719 })
2720 .await
2721 .unwrap();
2722
2723 buffer.update(cx, |buffer, cx| {
2724 let edits = edits
2725 .into_iter()
2726 .map(|(range, text)| {
2727 (
2728 range.start.to_point(buffer)..range.end.to_point(buffer),
2729 text,
2730 )
2731 })
2732 .collect::<Vec<_>>();
2733
2734 assert_eq!(
2735 edits,
2736 [
2737 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2738 (Point::new(1, 0)..Point::new(2, 0), "".into())
2739 ]
2740 );
2741
2742 for (range, new_text) in edits {
2743 buffer.edit([(range, new_text)], None, cx);
2744 }
2745 assert_eq!(
2746 buffer.text(),
2747 "
2748 use a::{b, c};
2749
2750 fn f() {
2751 b();
2752 c();
2753 }
2754 "
2755 .unindent()
2756 );
2757 });
2758}
2759
2760#[gpui::test]
2761async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
2762 cx: &mut gpui::TestAppContext,
2763) {
2764 init_test(cx);
2765
2766 let text = "Path()";
2767
2768 let fs = FakeFs::new(cx.executor());
2769 fs.insert_tree(
2770 path!("/dir"),
2771 json!({
2772 "a.rs": text
2773 }),
2774 )
2775 .await;
2776
2777 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2778 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2779 let buffer = project
2780 .update(cx, |project, cx| {
2781 project.open_local_buffer(path!("/dir/a.rs"), cx)
2782 })
2783 .await
2784 .unwrap();
2785
2786 // Simulate the language server sending us a pair of edits at the same location,
2787 // with an insertion following a replacement (which violates the LSP spec).
2788 let edits = lsp_store
2789 .update(cx, |lsp_store, cx| {
2790 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2791 &buffer,
2792 [
2793 lsp::TextEdit {
2794 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
2795 new_text: "Path".into(),
2796 },
2797 lsp::TextEdit {
2798 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
2799 new_text: "from path import Path\n\n\n".into(),
2800 },
2801 ],
2802 LanguageServerId(0),
2803 None,
2804 cx,
2805 )
2806 })
2807 .await
2808 .unwrap();
2809
2810 buffer.update(cx, |buffer, cx| {
2811 buffer.edit(edits, None, cx);
2812 assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
2813 });
2814}
2815
2816#[gpui::test]
2817async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2818 init_test(cx);
2819
2820 let text = "
2821 use a::b;
2822 use a::c;
2823
2824 fn f() {
2825 b();
2826 c();
2827 }
2828 "
2829 .unindent();
2830
2831 let fs = FakeFs::new(cx.executor());
2832 fs.insert_tree(
2833 path!("/dir"),
2834 json!({
2835 "a.rs": text.clone(),
2836 }),
2837 )
2838 .await;
2839
2840 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2841 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2842 let buffer = project
2843 .update(cx, |project, cx| {
2844 project.open_local_buffer(path!("/dir/a.rs"), cx)
2845 })
2846 .await
2847 .unwrap();
2848
2849 // Simulate the language server sending us edits in a non-ordered fashion,
2850 // with ranges sometimes being inverted or pointing to invalid locations.
2851 let edits = lsp_store
2852 .update(cx, |lsp_store, cx| {
2853 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2854 &buffer,
2855 [
2856 lsp::TextEdit {
2857 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2858 new_text: "\n\n".into(),
2859 },
2860 lsp::TextEdit {
2861 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2862 new_text: "a::{b, c}".into(),
2863 },
2864 lsp::TextEdit {
2865 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2866 new_text: "".into(),
2867 },
2868 lsp::TextEdit {
2869 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2870 new_text: "
2871 fn f() {
2872 b();
2873 c();
2874 }"
2875 .unindent(),
2876 },
2877 ],
2878 LanguageServerId(0),
2879 None,
2880 cx,
2881 )
2882 })
2883 .await
2884 .unwrap();
2885
2886 buffer.update(cx, |buffer, cx| {
2887 let edits = edits
2888 .into_iter()
2889 .map(|(range, text)| {
2890 (
2891 range.start.to_point(buffer)..range.end.to_point(buffer),
2892 text,
2893 )
2894 })
2895 .collect::<Vec<_>>();
2896
2897 assert_eq!(
2898 edits,
2899 [
2900 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2901 (Point::new(1, 0)..Point::new(2, 0), "".into())
2902 ]
2903 );
2904
2905 for (range, new_text) in edits {
2906 buffer.edit([(range, new_text)], None, cx);
2907 }
2908 assert_eq!(
2909 buffer.text(),
2910 "
2911 use a::{b, c};
2912
2913 fn f() {
2914 b();
2915 c();
2916 }
2917 "
2918 .unindent()
2919 );
2920 });
2921}
2922
2923fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2924 buffer: &Buffer,
2925 range: Range<T>,
2926) -> Vec<(String, Option<DiagnosticSeverity>)> {
2927 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2928 for chunk in buffer.snapshot().chunks(range, true) {
2929 if chunks.last().map_or(false, |prev_chunk| {
2930 prev_chunk.1 == chunk.diagnostic_severity
2931 }) {
2932 chunks.last_mut().unwrap().0.push_str(chunk.text);
2933 } else {
2934 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2935 }
2936 }
2937 chunks
2938}
2939
2940#[gpui::test(iterations = 10)]
2941async fn test_definition(cx: &mut gpui::TestAppContext) {
2942 init_test(cx);
2943
2944 let fs = FakeFs::new(cx.executor());
2945 fs.insert_tree(
2946 path!("/dir"),
2947 json!({
2948 "a.rs": "const fn a() { A }",
2949 "b.rs": "const y: i32 = crate::a()",
2950 }),
2951 )
2952 .await;
2953
2954 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
2955
2956 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2957 language_registry.add(rust_lang());
2958 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2959
2960 let (buffer, _handle) = project
2961 .update(cx, |project, cx| {
2962 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
2963 })
2964 .await
2965 .unwrap();
2966
2967 let fake_server = fake_servers.next().await.unwrap();
2968 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2969 let params = params.text_document_position_params;
2970 assert_eq!(
2971 params.text_document.uri.to_file_path().unwrap(),
2972 Path::new(path!("/dir/b.rs")),
2973 );
2974 assert_eq!(params.position, lsp::Position::new(0, 22));
2975
2976 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2977 lsp::Location::new(
2978 lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2979 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2980 ),
2981 )))
2982 });
2983 let mut definitions = project
2984 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2985 .await
2986 .unwrap();
2987
2988 // Assert no new language server started
2989 cx.executor().run_until_parked();
2990 assert!(fake_servers.try_next().is_err());
2991
2992 assert_eq!(definitions.len(), 1);
2993 let definition = definitions.pop().unwrap();
2994 cx.update(|cx| {
2995 let target_buffer = definition.target.buffer.read(cx);
2996 assert_eq!(
2997 target_buffer
2998 .file()
2999 .unwrap()
3000 .as_local()
3001 .unwrap()
3002 .abs_path(cx),
3003 Path::new(path!("/dir/a.rs")),
3004 );
3005 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
3006 assert_eq!(
3007 list_worktrees(&project, cx),
3008 [
3009 (path!("/dir/a.rs").as_ref(), false),
3010 (path!("/dir/b.rs").as_ref(), true)
3011 ],
3012 );
3013
3014 drop(definition);
3015 });
3016 cx.update(|cx| {
3017 assert_eq!(
3018 list_worktrees(&project, cx),
3019 [(path!("/dir/b.rs").as_ref(), true)]
3020 );
3021 });
3022
3023 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
3024 project
3025 .read(cx)
3026 .worktrees(cx)
3027 .map(|worktree| {
3028 let worktree = worktree.read(cx);
3029 (
3030 worktree.as_local().unwrap().abs_path().as_ref(),
3031 worktree.is_visible(),
3032 )
3033 })
3034 .collect::<Vec<_>>()
3035 }
3036}
3037
3038#[gpui::test]
3039async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
3040 init_test(cx);
3041
3042 let fs = FakeFs::new(cx.executor());
3043 fs.insert_tree(
3044 path!("/dir"),
3045 json!({
3046 "a.ts": "",
3047 }),
3048 )
3049 .await;
3050
3051 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3052
3053 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3054 language_registry.add(typescript_lang());
3055 let mut fake_language_servers = language_registry.register_fake_lsp(
3056 "TypeScript",
3057 FakeLspAdapter {
3058 capabilities: lsp::ServerCapabilities {
3059 completion_provider: Some(lsp::CompletionOptions {
3060 trigger_characters: Some(vec![".".to_string()]),
3061 ..Default::default()
3062 }),
3063 ..Default::default()
3064 },
3065 ..Default::default()
3066 },
3067 );
3068
3069 let (buffer, _handle) = project
3070 .update(cx, |p, cx| {
3071 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3072 })
3073 .await
3074 .unwrap();
3075
3076 let fake_server = fake_language_servers.next().await.unwrap();
3077
3078 // When text_edit exists, it takes precedence over insert_text and label
3079 let text = "let a = obj.fqn";
3080 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3081 let completions = project.update(cx, |project, cx| {
3082 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3083 });
3084
3085 fake_server
3086 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3087 Ok(Some(lsp::CompletionResponse::Array(vec![
3088 lsp::CompletionItem {
3089 label: "labelText".into(),
3090 insert_text: Some("insertText".into()),
3091 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
3092 range: lsp::Range::new(
3093 lsp::Position::new(0, text.len() as u32 - 3),
3094 lsp::Position::new(0, text.len() as u32),
3095 ),
3096 new_text: "textEditText".into(),
3097 })),
3098 ..Default::default()
3099 },
3100 ])))
3101 })
3102 .next()
3103 .await;
3104
3105 let completions = completions
3106 .await
3107 .unwrap()
3108 .into_iter()
3109 .flat_map(|response| response.completions)
3110 .collect::<Vec<_>>();
3111 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3112
3113 assert_eq!(completions.len(), 1);
3114 assert_eq!(completions[0].new_text, "textEditText");
3115 assert_eq!(
3116 completions[0].replace_range.to_offset(&snapshot),
3117 text.len() - 3..text.len()
3118 );
3119}
3120
3121#[gpui::test]
3122async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
3123 init_test(cx);
3124
3125 let fs = FakeFs::new(cx.executor());
3126 fs.insert_tree(
3127 path!("/dir"),
3128 json!({
3129 "a.ts": "",
3130 }),
3131 )
3132 .await;
3133
3134 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3135
3136 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3137 language_registry.add(typescript_lang());
3138 let mut fake_language_servers = language_registry.register_fake_lsp(
3139 "TypeScript",
3140 FakeLspAdapter {
3141 capabilities: lsp::ServerCapabilities {
3142 completion_provider: Some(lsp::CompletionOptions {
3143 trigger_characters: Some(vec![".".to_string()]),
3144 ..Default::default()
3145 }),
3146 ..Default::default()
3147 },
3148 ..Default::default()
3149 },
3150 );
3151
3152 let (buffer, _handle) = project
3153 .update(cx, |p, cx| {
3154 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3155 })
3156 .await
3157 .unwrap();
3158
3159 let fake_server = fake_language_servers.next().await.unwrap();
3160 let text = "let a = obj.fqn";
3161
3162 // Test 1: When text_edit is None but insert_text exists with default edit_range
3163 {
3164 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3165 let completions = project.update(cx, |project, cx| {
3166 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3167 });
3168
3169 fake_server
3170 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3171 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3172 is_incomplete: false,
3173 item_defaults: Some(lsp::CompletionListItemDefaults {
3174 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3175 lsp::Range::new(
3176 lsp::Position::new(0, text.len() as u32 - 3),
3177 lsp::Position::new(0, text.len() as u32),
3178 ),
3179 )),
3180 ..Default::default()
3181 }),
3182 items: vec![lsp::CompletionItem {
3183 label: "labelText".into(),
3184 insert_text: Some("insertText".into()),
3185 text_edit: None,
3186 ..Default::default()
3187 }],
3188 })))
3189 })
3190 .next()
3191 .await;
3192
3193 let completions = completions
3194 .await
3195 .unwrap()
3196 .into_iter()
3197 .flat_map(|response| response.completions)
3198 .collect::<Vec<_>>();
3199 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3200
3201 assert_eq!(completions.len(), 1);
3202 assert_eq!(completions[0].new_text, "insertText");
3203 assert_eq!(
3204 completions[0].replace_range.to_offset(&snapshot),
3205 text.len() - 3..text.len()
3206 );
3207 }
3208
3209 // Test 2: When both text_edit and insert_text are None with default edit_range
3210 {
3211 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3212 let completions = project.update(cx, |project, cx| {
3213 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3214 });
3215
3216 fake_server
3217 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3218 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3219 is_incomplete: false,
3220 item_defaults: Some(lsp::CompletionListItemDefaults {
3221 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3222 lsp::Range::new(
3223 lsp::Position::new(0, text.len() as u32 - 3),
3224 lsp::Position::new(0, text.len() as u32),
3225 ),
3226 )),
3227 ..Default::default()
3228 }),
3229 items: vec![lsp::CompletionItem {
3230 label: "labelText".into(),
3231 insert_text: None,
3232 text_edit: None,
3233 ..Default::default()
3234 }],
3235 })))
3236 })
3237 .next()
3238 .await;
3239
3240 let completions = completions
3241 .await
3242 .unwrap()
3243 .into_iter()
3244 .flat_map(|response| response.completions)
3245 .collect::<Vec<_>>();
3246 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3247
3248 assert_eq!(completions.len(), 1);
3249 assert_eq!(completions[0].new_text, "labelText");
3250 assert_eq!(
3251 completions[0].replace_range.to_offset(&snapshot),
3252 text.len() - 3..text.len()
3253 );
3254 }
3255}
3256
3257#[gpui::test]
3258async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
3259 init_test(cx);
3260
3261 let fs = FakeFs::new(cx.executor());
3262 fs.insert_tree(
3263 path!("/dir"),
3264 json!({
3265 "a.ts": "",
3266 }),
3267 )
3268 .await;
3269
3270 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3271
3272 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3273 language_registry.add(typescript_lang());
3274 let mut fake_language_servers = language_registry.register_fake_lsp(
3275 "TypeScript",
3276 FakeLspAdapter {
3277 capabilities: lsp::ServerCapabilities {
3278 completion_provider: Some(lsp::CompletionOptions {
3279 trigger_characters: Some(vec![":".to_string()]),
3280 ..Default::default()
3281 }),
3282 ..Default::default()
3283 },
3284 ..Default::default()
3285 },
3286 );
3287
3288 let (buffer, _handle) = project
3289 .update(cx, |p, cx| {
3290 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3291 })
3292 .await
3293 .unwrap();
3294
3295 let fake_server = fake_language_servers.next().await.unwrap();
3296
3297 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
3298 let text = "let a = b.fqn";
3299 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3300 let completions = project.update(cx, |project, cx| {
3301 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3302 });
3303
3304 fake_server
3305 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3306 Ok(Some(lsp::CompletionResponse::Array(vec![
3307 lsp::CompletionItem {
3308 label: "fullyQualifiedName?".into(),
3309 insert_text: Some("fullyQualifiedName".into()),
3310 ..Default::default()
3311 },
3312 ])))
3313 })
3314 .next()
3315 .await;
3316 let completions = completions
3317 .await
3318 .unwrap()
3319 .into_iter()
3320 .flat_map(|response| response.completions)
3321 .collect::<Vec<_>>();
3322 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3323 assert_eq!(completions.len(), 1);
3324 assert_eq!(completions[0].new_text, "fullyQualifiedName");
3325 assert_eq!(
3326 completions[0].replace_range.to_offset(&snapshot),
3327 text.len() - 3..text.len()
3328 );
3329
3330 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
3331 let text = "let a = \"atoms/cmp\"";
3332 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3333 let completions = project.update(cx, |project, cx| {
3334 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
3335 });
3336
3337 fake_server
3338 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3339 Ok(Some(lsp::CompletionResponse::Array(vec![
3340 lsp::CompletionItem {
3341 label: "component".into(),
3342 ..Default::default()
3343 },
3344 ])))
3345 })
3346 .next()
3347 .await;
3348 let completions = completions
3349 .await
3350 .unwrap()
3351 .into_iter()
3352 .flat_map(|response| response.completions)
3353 .collect::<Vec<_>>();
3354 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3355 assert_eq!(completions.len(), 1);
3356 assert_eq!(completions[0].new_text, "component");
3357 assert_eq!(
3358 completions[0].replace_range.to_offset(&snapshot),
3359 text.len() - 4..text.len() - 1
3360 );
3361}
3362
3363#[gpui::test]
3364async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
3365 init_test(cx);
3366
3367 let fs = FakeFs::new(cx.executor());
3368 fs.insert_tree(
3369 path!("/dir"),
3370 json!({
3371 "a.ts": "",
3372 }),
3373 )
3374 .await;
3375
3376 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3377
3378 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3379 language_registry.add(typescript_lang());
3380 let mut fake_language_servers = language_registry.register_fake_lsp(
3381 "TypeScript",
3382 FakeLspAdapter {
3383 capabilities: lsp::ServerCapabilities {
3384 completion_provider: Some(lsp::CompletionOptions {
3385 trigger_characters: Some(vec![":".to_string()]),
3386 ..Default::default()
3387 }),
3388 ..Default::default()
3389 },
3390 ..Default::default()
3391 },
3392 );
3393
3394 let (buffer, _handle) = project
3395 .update(cx, |p, cx| {
3396 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3397 })
3398 .await
3399 .unwrap();
3400
3401 let fake_server = fake_language_servers.next().await.unwrap();
3402
3403 let text = "let a = b.fqn";
3404 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3405 let completions = project.update(cx, |project, cx| {
3406 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3407 });
3408
3409 fake_server
3410 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3411 Ok(Some(lsp::CompletionResponse::Array(vec![
3412 lsp::CompletionItem {
3413 label: "fullyQualifiedName?".into(),
3414 insert_text: Some("fully\rQualified\r\nName".into()),
3415 ..Default::default()
3416 },
3417 ])))
3418 })
3419 .next()
3420 .await;
3421 let completions = completions
3422 .await
3423 .unwrap()
3424 .into_iter()
3425 .flat_map(|response| response.completions)
3426 .collect::<Vec<_>>();
3427 assert_eq!(completions.len(), 1);
3428 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
3429}
3430
3431#[gpui::test(iterations = 10)]
3432async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
3433 init_test(cx);
3434
3435 let fs = FakeFs::new(cx.executor());
3436 fs.insert_tree(
3437 path!("/dir"),
3438 json!({
3439 "a.ts": "a",
3440 }),
3441 )
3442 .await;
3443
3444 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3445
3446 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3447 language_registry.add(typescript_lang());
3448 let mut fake_language_servers = language_registry.register_fake_lsp(
3449 "TypeScript",
3450 FakeLspAdapter {
3451 capabilities: lsp::ServerCapabilities {
3452 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
3453 lsp::CodeActionOptions {
3454 resolve_provider: Some(true),
3455 ..lsp::CodeActionOptions::default()
3456 },
3457 )),
3458 execute_command_provider: Some(lsp::ExecuteCommandOptions {
3459 commands: vec!["_the/command".to_string()],
3460 ..lsp::ExecuteCommandOptions::default()
3461 }),
3462 ..lsp::ServerCapabilities::default()
3463 },
3464 ..FakeLspAdapter::default()
3465 },
3466 );
3467
3468 let (buffer, _handle) = project
3469 .update(cx, |p, cx| {
3470 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3471 })
3472 .await
3473 .unwrap();
3474
3475 let fake_server = fake_language_servers.next().await.unwrap();
3476
3477 // Language server returns code actions that contain commands, and not edits.
3478 let actions = project.update(cx, |project, cx| {
3479 project.code_actions(&buffer, 0..0, None, cx)
3480 });
3481 fake_server
3482 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
3483 Ok(Some(vec![
3484 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3485 title: "The code action".into(),
3486 data: Some(serde_json::json!({
3487 "command": "_the/command",
3488 })),
3489 ..lsp::CodeAction::default()
3490 }),
3491 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3492 title: "two".into(),
3493 ..lsp::CodeAction::default()
3494 }),
3495 ]))
3496 })
3497 .next()
3498 .await;
3499
3500 let action = actions.await.unwrap()[0].clone();
3501 let apply = project.update(cx, |project, cx| {
3502 project.apply_code_action(buffer.clone(), action, true, cx)
3503 });
3504
3505 // Resolving the code action does not populate its edits. In absence of
3506 // edits, we must execute the given command.
3507 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
3508 |mut action, _| async move {
3509 if action.data.is_some() {
3510 action.command = Some(lsp::Command {
3511 title: "The command".into(),
3512 command: "_the/command".into(),
3513 arguments: Some(vec![json!("the-argument")]),
3514 });
3515 }
3516 Ok(action)
3517 },
3518 );
3519
3520 // While executing the command, the language server sends the editor
3521 // a `workspaceEdit` request.
3522 fake_server
3523 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
3524 let fake = fake_server.clone();
3525 move |params, _| {
3526 assert_eq!(params.command, "_the/command");
3527 let fake = fake.clone();
3528 async move {
3529 fake.server
3530 .request::<lsp::request::ApplyWorkspaceEdit>(
3531 lsp::ApplyWorkspaceEditParams {
3532 label: None,
3533 edit: lsp::WorkspaceEdit {
3534 changes: Some(
3535 [(
3536 lsp::Url::from_file_path(path!("/dir/a.ts")).unwrap(),
3537 vec![lsp::TextEdit {
3538 range: lsp::Range::new(
3539 lsp::Position::new(0, 0),
3540 lsp::Position::new(0, 0),
3541 ),
3542 new_text: "X".into(),
3543 }],
3544 )]
3545 .into_iter()
3546 .collect(),
3547 ),
3548 ..Default::default()
3549 },
3550 },
3551 )
3552 .await
3553 .into_response()
3554 .unwrap();
3555 Ok(Some(json!(null)))
3556 }
3557 }
3558 })
3559 .next()
3560 .await;
3561
3562 // Applying the code action returns a project transaction containing the edits
3563 // sent by the language server in its `workspaceEdit` request.
3564 let transaction = apply.await.unwrap();
3565 assert!(transaction.0.contains_key(&buffer));
3566 buffer.update(cx, |buffer, cx| {
3567 assert_eq!(buffer.text(), "Xa");
3568 buffer.undo(cx);
3569 assert_eq!(buffer.text(), "a");
3570 });
3571}
3572
3573#[gpui::test(iterations = 10)]
3574async fn test_save_file(cx: &mut gpui::TestAppContext) {
3575 init_test(cx);
3576
3577 let fs = FakeFs::new(cx.executor());
3578 fs.insert_tree(
3579 path!("/dir"),
3580 json!({
3581 "file1": "the old contents",
3582 }),
3583 )
3584 .await;
3585
3586 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3587 let buffer = project
3588 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3589 .await
3590 .unwrap();
3591 buffer.update(cx, |buffer, cx| {
3592 assert_eq!(buffer.text(), "the old contents");
3593 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3594 });
3595
3596 project
3597 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3598 .await
3599 .unwrap();
3600
3601 let new_text = fs
3602 .load(Path::new(path!("/dir/file1")))
3603 .await
3604 .unwrap()
3605 .replace("\r\n", "\n");
3606 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3607}
3608
3609#[gpui::test(iterations = 10)]
3610async fn test_save_file_spawns_language_server(cx: &mut gpui::TestAppContext) {
3611 // Issue: #24349
3612 init_test(cx);
3613
3614 let fs = FakeFs::new(cx.executor());
3615 fs.insert_tree(path!("/dir"), json!({})).await;
3616
3617 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3618 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3619
3620 language_registry.add(rust_lang());
3621 let mut fake_rust_servers = language_registry.register_fake_lsp(
3622 "Rust",
3623 FakeLspAdapter {
3624 name: "the-rust-language-server",
3625 capabilities: lsp::ServerCapabilities {
3626 completion_provider: Some(lsp::CompletionOptions {
3627 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
3628 ..Default::default()
3629 }),
3630 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
3631 lsp::TextDocumentSyncOptions {
3632 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
3633 ..Default::default()
3634 },
3635 )),
3636 ..Default::default()
3637 },
3638 ..Default::default()
3639 },
3640 );
3641
3642 let buffer = project
3643 .update(cx, |this, cx| this.create_buffer(cx))
3644 .unwrap()
3645 .await;
3646 project.update(cx, |this, cx| {
3647 this.register_buffer_with_language_servers(&buffer, cx);
3648 buffer.update(cx, |buffer, cx| {
3649 assert!(!this.has_language_servers_for(buffer, cx));
3650 })
3651 });
3652
3653 project
3654 .update(cx, |this, cx| {
3655 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
3656 this.save_buffer_as(
3657 buffer.clone(),
3658 ProjectPath {
3659 worktree_id,
3660 path: Arc::from("file.rs".as_ref()),
3661 },
3662 cx,
3663 )
3664 })
3665 .await
3666 .unwrap();
3667 // A server is started up, and it is notified about Rust files.
3668 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
3669 assert_eq!(
3670 fake_rust_server
3671 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3672 .await
3673 .text_document,
3674 lsp::TextDocumentItem {
3675 uri: lsp::Url::from_file_path(path!("/dir/file.rs")).unwrap(),
3676 version: 0,
3677 text: "".to_string(),
3678 language_id: "rust".to_string(),
3679 }
3680 );
3681
3682 project.update(cx, |this, cx| {
3683 buffer.update(cx, |buffer, cx| {
3684 assert!(this.has_language_servers_for(buffer, cx));
3685 })
3686 });
3687}
3688
3689#[gpui::test(iterations = 30)]
3690async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
3691 init_test(cx);
3692
3693 let fs = FakeFs::new(cx.executor().clone());
3694 fs.insert_tree(
3695 path!("/dir"),
3696 json!({
3697 "file1": "the original contents",
3698 }),
3699 )
3700 .await;
3701
3702 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3703 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3704 let buffer = project
3705 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3706 .await
3707 .unwrap();
3708
3709 // Simulate buffer diffs being slow, so that they don't complete before
3710 // the next file change occurs.
3711 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3712
3713 // Change the buffer's file on disk, and then wait for the file change
3714 // to be detected by the worktree, so that the buffer starts reloading.
3715 fs.save(
3716 path!("/dir/file1").as_ref(),
3717 &"the first contents".into(),
3718 Default::default(),
3719 )
3720 .await
3721 .unwrap();
3722 worktree.next_event(cx).await;
3723
3724 // Change the buffer's file again. Depending on the random seed, the
3725 // previous file change may still be in progress.
3726 fs.save(
3727 path!("/dir/file1").as_ref(),
3728 &"the second contents".into(),
3729 Default::default(),
3730 )
3731 .await
3732 .unwrap();
3733 worktree.next_event(cx).await;
3734
3735 cx.executor().run_until_parked();
3736 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3737 buffer.read_with(cx, |buffer, _| {
3738 assert_eq!(buffer.text(), on_disk_text);
3739 assert!(!buffer.is_dirty(), "buffer should not be dirty");
3740 assert!(!buffer.has_conflict(), "buffer should not be dirty");
3741 });
3742}
3743
3744#[gpui::test(iterations = 30)]
3745async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
3746 init_test(cx);
3747
3748 let fs = FakeFs::new(cx.executor().clone());
3749 fs.insert_tree(
3750 path!("/dir"),
3751 json!({
3752 "file1": "the original contents",
3753 }),
3754 )
3755 .await;
3756
3757 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3758 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3759 let buffer = project
3760 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3761 .await
3762 .unwrap();
3763
3764 // Simulate buffer diffs being slow, so that they don't complete before
3765 // the next file change occurs.
3766 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3767
3768 // Change the buffer's file on disk, and then wait for the file change
3769 // to be detected by the worktree, so that the buffer starts reloading.
3770 fs.save(
3771 path!("/dir/file1").as_ref(),
3772 &"the first contents".into(),
3773 Default::default(),
3774 )
3775 .await
3776 .unwrap();
3777 worktree.next_event(cx).await;
3778
3779 cx.executor()
3780 .spawn(cx.executor().simulate_random_delay())
3781 .await;
3782
3783 // Perform a noop edit, causing the buffer's version to increase.
3784 buffer.update(cx, |buffer, cx| {
3785 buffer.edit([(0..0, " ")], None, cx);
3786 buffer.undo(cx);
3787 });
3788
3789 cx.executor().run_until_parked();
3790 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3791 buffer.read_with(cx, |buffer, _| {
3792 let buffer_text = buffer.text();
3793 if buffer_text == on_disk_text {
3794 assert!(
3795 !buffer.is_dirty() && !buffer.has_conflict(),
3796 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
3797 );
3798 }
3799 // If the file change occurred while the buffer was processing the first
3800 // change, the buffer will be in a conflicting state.
3801 else {
3802 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3803 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3804 }
3805 });
3806}
3807
3808#[gpui::test]
3809async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
3810 init_test(cx);
3811
3812 let fs = FakeFs::new(cx.executor());
3813 fs.insert_tree(
3814 path!("/dir"),
3815 json!({
3816 "file1": "the old contents",
3817 }),
3818 )
3819 .await;
3820
3821 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
3822 let buffer = project
3823 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3824 .await
3825 .unwrap();
3826 buffer.update(cx, |buffer, cx| {
3827 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3828 });
3829
3830 project
3831 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3832 .await
3833 .unwrap();
3834
3835 let new_text = fs
3836 .load(Path::new(path!("/dir/file1")))
3837 .await
3838 .unwrap()
3839 .replace("\r\n", "\n");
3840 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3841}
3842
3843#[gpui::test]
3844async fn test_save_as(cx: &mut gpui::TestAppContext) {
3845 init_test(cx);
3846
3847 let fs = FakeFs::new(cx.executor());
3848 fs.insert_tree("/dir", json!({})).await;
3849
3850 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3851
3852 let languages = project.update(cx, |project, _| project.languages().clone());
3853 languages.add(rust_lang());
3854
3855 let buffer = project.update(cx, |project, cx| project.create_local_buffer("", None, cx));
3856 buffer.update(cx, |buffer, cx| {
3857 buffer.edit([(0..0, "abc")], None, cx);
3858 assert!(buffer.is_dirty());
3859 assert!(!buffer.has_conflict());
3860 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
3861 });
3862 project
3863 .update(cx, |project, cx| {
3864 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
3865 let path = ProjectPath {
3866 worktree_id,
3867 path: Arc::from(Path::new("file1.rs")),
3868 };
3869 project.save_buffer_as(buffer.clone(), path, cx)
3870 })
3871 .await
3872 .unwrap();
3873 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
3874
3875 cx.executor().run_until_parked();
3876 buffer.update(cx, |buffer, cx| {
3877 assert_eq!(
3878 buffer.file().unwrap().full_path(cx),
3879 Path::new("dir/file1.rs")
3880 );
3881 assert!(!buffer.is_dirty());
3882 assert!(!buffer.has_conflict());
3883 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
3884 });
3885
3886 let opened_buffer = project
3887 .update(cx, |project, cx| {
3888 project.open_local_buffer("/dir/file1.rs", cx)
3889 })
3890 .await
3891 .unwrap();
3892 assert_eq!(opened_buffer, buffer);
3893}
3894
3895#[gpui::test(retries = 5)]
3896async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
3897 use worktree::WorktreeModelHandle as _;
3898
3899 init_test(cx);
3900 cx.executor().allow_parking();
3901
3902 let dir = TempTree::new(json!({
3903 "a": {
3904 "file1": "",
3905 "file2": "",
3906 "file3": "",
3907 },
3908 "b": {
3909 "c": {
3910 "file4": "",
3911 "file5": "",
3912 }
3913 }
3914 }));
3915
3916 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
3917
3918 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3919 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
3920 async move { buffer.await.unwrap() }
3921 };
3922 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3923 project.update(cx, |project, cx| {
3924 let tree = project.worktrees(cx).next().unwrap();
3925 tree.read(cx)
3926 .entry_for_path(path)
3927 .unwrap_or_else(|| panic!("no entry for path {}", path))
3928 .id
3929 })
3930 };
3931
3932 let buffer2 = buffer_for_path("a/file2", cx).await;
3933 let buffer3 = buffer_for_path("a/file3", cx).await;
3934 let buffer4 = buffer_for_path("b/c/file4", cx).await;
3935 let buffer5 = buffer_for_path("b/c/file5", cx).await;
3936
3937 let file2_id = id_for_path("a/file2", cx);
3938 let file3_id = id_for_path("a/file3", cx);
3939 let file4_id = id_for_path("b/c/file4", cx);
3940
3941 // Create a remote copy of this worktree.
3942 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3943 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
3944
3945 let updates = Arc::new(Mutex::new(Vec::new()));
3946 tree.update(cx, |tree, cx| {
3947 let updates = updates.clone();
3948 tree.observe_updates(0, cx, move |update| {
3949 updates.lock().push(update);
3950 async { true }
3951 });
3952 });
3953
3954 let remote =
3955 cx.update(|cx| Worktree::remote(0, 1, metadata, project.read(cx).client().into(), cx));
3956
3957 cx.executor().run_until_parked();
3958
3959 cx.update(|cx| {
3960 assert!(!buffer2.read(cx).is_dirty());
3961 assert!(!buffer3.read(cx).is_dirty());
3962 assert!(!buffer4.read(cx).is_dirty());
3963 assert!(!buffer5.read(cx).is_dirty());
3964 });
3965
3966 // Rename and delete files and directories.
3967 tree.flush_fs_events(cx).await;
3968 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3969 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3970 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3971 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3972 tree.flush_fs_events(cx).await;
3973
3974 cx.update(|app| {
3975 assert_eq!(
3976 tree.read(app)
3977 .paths()
3978 .map(|p| p.to_str().unwrap())
3979 .collect::<Vec<_>>(),
3980 vec![
3981 "a",
3982 separator!("a/file1"),
3983 separator!("a/file2.new"),
3984 "b",
3985 "d",
3986 separator!("d/file3"),
3987 separator!("d/file4"),
3988 ]
3989 );
3990 });
3991
3992 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
3993 assert_eq!(id_for_path("d/file3", cx), file3_id);
3994 assert_eq!(id_for_path("d/file4", cx), file4_id);
3995
3996 cx.update(|cx| {
3997 assert_eq!(
3998 buffer2.read(cx).file().unwrap().path().as_ref(),
3999 Path::new("a/file2.new")
4000 );
4001 assert_eq!(
4002 buffer3.read(cx).file().unwrap().path().as_ref(),
4003 Path::new("d/file3")
4004 );
4005 assert_eq!(
4006 buffer4.read(cx).file().unwrap().path().as_ref(),
4007 Path::new("d/file4")
4008 );
4009 assert_eq!(
4010 buffer5.read(cx).file().unwrap().path().as_ref(),
4011 Path::new("b/c/file5")
4012 );
4013
4014 assert_matches!(
4015 buffer2.read(cx).file().unwrap().disk_state(),
4016 DiskState::Present { .. }
4017 );
4018 assert_matches!(
4019 buffer3.read(cx).file().unwrap().disk_state(),
4020 DiskState::Present { .. }
4021 );
4022 assert_matches!(
4023 buffer4.read(cx).file().unwrap().disk_state(),
4024 DiskState::Present { .. }
4025 );
4026 assert_eq!(
4027 buffer5.read(cx).file().unwrap().disk_state(),
4028 DiskState::Deleted
4029 );
4030 });
4031
4032 // Update the remote worktree. Check that it becomes consistent with the
4033 // local worktree.
4034 cx.executor().run_until_parked();
4035
4036 remote.update(cx, |remote, _| {
4037 for update in updates.lock().drain(..) {
4038 remote.as_remote_mut().unwrap().update_from_remote(update);
4039 }
4040 });
4041 cx.executor().run_until_parked();
4042 remote.update(cx, |remote, _| {
4043 assert_eq!(
4044 remote
4045 .paths()
4046 .map(|p| p.to_str().unwrap())
4047 .collect::<Vec<_>>(),
4048 vec![
4049 "a",
4050 separator!("a/file1"),
4051 separator!("a/file2.new"),
4052 "b",
4053 "d",
4054 separator!("d/file3"),
4055 separator!("d/file4"),
4056 ]
4057 );
4058 });
4059}
4060
4061#[gpui::test(iterations = 10)]
4062async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
4063 init_test(cx);
4064
4065 let fs = FakeFs::new(cx.executor());
4066 fs.insert_tree(
4067 path!("/dir"),
4068 json!({
4069 "a": {
4070 "file1": "",
4071 }
4072 }),
4073 )
4074 .await;
4075
4076 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
4077 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
4078 let tree_id = tree.update(cx, |tree, _| tree.id());
4079
4080 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4081 project.update(cx, |project, cx| {
4082 let tree = project.worktrees(cx).next().unwrap();
4083 tree.read(cx)
4084 .entry_for_path(path)
4085 .unwrap_or_else(|| panic!("no entry for path {}", path))
4086 .id
4087 })
4088 };
4089
4090 let dir_id = id_for_path("a", cx);
4091 let file_id = id_for_path("a/file1", cx);
4092 let buffer = project
4093 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
4094 .await
4095 .unwrap();
4096 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4097
4098 project
4099 .update(cx, |project, cx| {
4100 project.rename_entry(dir_id, Path::new("b"), cx)
4101 })
4102 .unwrap()
4103 .await
4104 .to_included()
4105 .unwrap();
4106 cx.executor().run_until_parked();
4107
4108 assert_eq!(id_for_path("b", cx), dir_id);
4109 assert_eq!(id_for_path("b/file1", cx), file_id);
4110 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4111}
4112
4113#[gpui::test]
4114async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
4115 init_test(cx);
4116
4117 let fs = FakeFs::new(cx.executor());
4118 fs.insert_tree(
4119 "/dir",
4120 json!({
4121 "a.txt": "a-contents",
4122 "b.txt": "b-contents",
4123 }),
4124 )
4125 .await;
4126
4127 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4128
4129 // Spawn multiple tasks to open paths, repeating some paths.
4130 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
4131 (
4132 p.open_local_buffer("/dir/a.txt", cx),
4133 p.open_local_buffer("/dir/b.txt", cx),
4134 p.open_local_buffer("/dir/a.txt", cx),
4135 )
4136 });
4137
4138 let buffer_a_1 = buffer_a_1.await.unwrap();
4139 let buffer_a_2 = buffer_a_2.await.unwrap();
4140 let buffer_b = buffer_b.await.unwrap();
4141 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
4142 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
4143
4144 // There is only one buffer per path.
4145 let buffer_a_id = buffer_a_1.entity_id();
4146 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
4147
4148 // Open the same path again while it is still open.
4149 drop(buffer_a_1);
4150 let buffer_a_3 = project
4151 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
4152 .await
4153 .unwrap();
4154
4155 // There's still only one buffer per path.
4156 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
4157}
4158
4159#[gpui::test]
4160async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
4161 init_test(cx);
4162
4163 let fs = FakeFs::new(cx.executor());
4164 fs.insert_tree(
4165 path!("/dir"),
4166 json!({
4167 "file1": "abc",
4168 "file2": "def",
4169 "file3": "ghi",
4170 }),
4171 )
4172 .await;
4173
4174 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4175
4176 let buffer1 = project
4177 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4178 .await
4179 .unwrap();
4180 let events = Arc::new(Mutex::new(Vec::new()));
4181
4182 // initially, the buffer isn't dirty.
4183 buffer1.update(cx, |buffer, cx| {
4184 cx.subscribe(&buffer1, {
4185 let events = events.clone();
4186 move |_, _, event, _| match event {
4187 BufferEvent::Operation { .. } => {}
4188 _ => events.lock().push(event.clone()),
4189 }
4190 })
4191 .detach();
4192
4193 assert!(!buffer.is_dirty());
4194 assert!(events.lock().is_empty());
4195
4196 buffer.edit([(1..2, "")], None, cx);
4197 });
4198
4199 // after the first edit, the buffer is dirty, and emits a dirtied event.
4200 buffer1.update(cx, |buffer, cx| {
4201 assert!(buffer.text() == "ac");
4202 assert!(buffer.is_dirty());
4203 assert_eq!(
4204 *events.lock(),
4205 &[
4206 language::BufferEvent::Edited,
4207 language::BufferEvent::DirtyChanged
4208 ]
4209 );
4210 events.lock().clear();
4211 buffer.did_save(
4212 buffer.version(),
4213 buffer.file().unwrap().disk_state().mtime(),
4214 cx,
4215 );
4216 });
4217
4218 // after saving, the buffer is not dirty, and emits a saved event.
4219 buffer1.update(cx, |buffer, cx| {
4220 assert!(!buffer.is_dirty());
4221 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
4222 events.lock().clear();
4223
4224 buffer.edit([(1..1, "B")], None, cx);
4225 buffer.edit([(2..2, "D")], None, cx);
4226 });
4227
4228 // after editing again, the buffer is dirty, and emits another dirty event.
4229 buffer1.update(cx, |buffer, cx| {
4230 assert!(buffer.text() == "aBDc");
4231 assert!(buffer.is_dirty());
4232 assert_eq!(
4233 *events.lock(),
4234 &[
4235 language::BufferEvent::Edited,
4236 language::BufferEvent::DirtyChanged,
4237 language::BufferEvent::Edited,
4238 ],
4239 );
4240 events.lock().clear();
4241
4242 // After restoring the buffer to its previously-saved state,
4243 // the buffer is not considered dirty anymore.
4244 buffer.edit([(1..3, "")], None, cx);
4245 assert!(buffer.text() == "ac");
4246 assert!(!buffer.is_dirty());
4247 });
4248
4249 assert_eq!(
4250 *events.lock(),
4251 &[
4252 language::BufferEvent::Edited,
4253 language::BufferEvent::DirtyChanged
4254 ]
4255 );
4256
4257 // When a file is deleted, it is not considered dirty.
4258 let events = Arc::new(Mutex::new(Vec::new()));
4259 let buffer2 = project
4260 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4261 .await
4262 .unwrap();
4263 buffer2.update(cx, |_, cx| {
4264 cx.subscribe(&buffer2, {
4265 let events = events.clone();
4266 move |_, _, event, _| match event {
4267 BufferEvent::Operation { .. } => {}
4268 _ => events.lock().push(event.clone()),
4269 }
4270 })
4271 .detach();
4272 });
4273
4274 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
4275 .await
4276 .unwrap();
4277 cx.executor().run_until_parked();
4278 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4279 assert_eq!(
4280 mem::take(&mut *events.lock()),
4281 &[language::BufferEvent::FileHandleChanged]
4282 );
4283
4284 // Buffer becomes dirty when edited.
4285 buffer2.update(cx, |buffer, cx| {
4286 buffer.edit([(2..3, "")], None, cx);
4287 assert_eq!(buffer.is_dirty(), true);
4288 });
4289 assert_eq!(
4290 mem::take(&mut *events.lock()),
4291 &[
4292 language::BufferEvent::Edited,
4293 language::BufferEvent::DirtyChanged
4294 ]
4295 );
4296
4297 // Buffer becomes clean again when all of its content is removed, because
4298 // the file was deleted.
4299 buffer2.update(cx, |buffer, cx| {
4300 buffer.edit([(0..2, "")], None, cx);
4301 assert_eq!(buffer.is_empty(), true);
4302 assert_eq!(buffer.is_dirty(), false);
4303 });
4304 assert_eq!(
4305 *events.lock(),
4306 &[
4307 language::BufferEvent::Edited,
4308 language::BufferEvent::DirtyChanged
4309 ]
4310 );
4311
4312 // When a file is already dirty when deleted, we don't emit a Dirtied event.
4313 let events = Arc::new(Mutex::new(Vec::new()));
4314 let buffer3 = project
4315 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
4316 .await
4317 .unwrap();
4318 buffer3.update(cx, |_, cx| {
4319 cx.subscribe(&buffer3, {
4320 let events = events.clone();
4321 move |_, _, event, _| match event {
4322 BufferEvent::Operation { .. } => {}
4323 _ => events.lock().push(event.clone()),
4324 }
4325 })
4326 .detach();
4327 });
4328
4329 buffer3.update(cx, |buffer, cx| {
4330 buffer.edit([(0..0, "x")], None, cx);
4331 });
4332 events.lock().clear();
4333 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
4334 .await
4335 .unwrap();
4336 cx.executor().run_until_parked();
4337 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
4338 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
4339}
4340
4341#[gpui::test]
4342async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
4343 init_test(cx);
4344
4345 let (initial_contents, initial_offsets) =
4346 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
4347 let fs = FakeFs::new(cx.executor());
4348 fs.insert_tree(
4349 path!("/dir"),
4350 json!({
4351 "the-file": initial_contents,
4352 }),
4353 )
4354 .await;
4355 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4356 let buffer = project
4357 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
4358 .await
4359 .unwrap();
4360
4361 let anchors = initial_offsets
4362 .iter()
4363 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
4364 .collect::<Vec<_>>();
4365
4366 // Change the file on disk, adding two new lines of text, and removing
4367 // one line.
4368 buffer.update(cx, |buffer, _| {
4369 assert!(!buffer.is_dirty());
4370 assert!(!buffer.has_conflict());
4371 });
4372
4373 let (new_contents, new_offsets) =
4374 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
4375 fs.save(
4376 path!("/dir/the-file").as_ref(),
4377 &new_contents.as_str().into(),
4378 LineEnding::Unix,
4379 )
4380 .await
4381 .unwrap();
4382
4383 // Because the buffer was not modified, it is reloaded from disk. Its
4384 // contents are edited according to the diff between the old and new
4385 // file contents.
4386 cx.executor().run_until_parked();
4387 buffer.update(cx, |buffer, _| {
4388 assert_eq!(buffer.text(), new_contents);
4389 assert!(!buffer.is_dirty());
4390 assert!(!buffer.has_conflict());
4391
4392 let anchor_offsets = anchors
4393 .iter()
4394 .map(|anchor| anchor.to_offset(&*buffer))
4395 .collect::<Vec<_>>();
4396 assert_eq!(anchor_offsets, new_offsets);
4397 });
4398
4399 // Modify the buffer
4400 buffer.update(cx, |buffer, cx| {
4401 buffer.edit([(0..0, " ")], None, cx);
4402 assert!(buffer.is_dirty());
4403 assert!(!buffer.has_conflict());
4404 });
4405
4406 // Change the file on disk again, adding blank lines to the beginning.
4407 fs.save(
4408 path!("/dir/the-file").as_ref(),
4409 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
4410 LineEnding::Unix,
4411 )
4412 .await
4413 .unwrap();
4414
4415 // Because the buffer is modified, it doesn't reload from disk, but is
4416 // marked as having a conflict.
4417 cx.executor().run_until_parked();
4418 buffer.update(cx, |buffer, _| {
4419 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
4420 assert!(buffer.has_conflict());
4421 });
4422}
4423
4424#[gpui::test]
4425async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
4426 init_test(cx);
4427
4428 let fs = FakeFs::new(cx.executor());
4429 fs.insert_tree(
4430 path!("/dir"),
4431 json!({
4432 "file1": "a\nb\nc\n",
4433 "file2": "one\r\ntwo\r\nthree\r\n",
4434 }),
4435 )
4436 .await;
4437
4438 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4439 let buffer1 = project
4440 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4441 .await
4442 .unwrap();
4443 let buffer2 = project
4444 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4445 .await
4446 .unwrap();
4447
4448 buffer1.update(cx, |buffer, _| {
4449 assert_eq!(buffer.text(), "a\nb\nc\n");
4450 assert_eq!(buffer.line_ending(), LineEnding::Unix);
4451 });
4452 buffer2.update(cx, |buffer, _| {
4453 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
4454 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4455 });
4456
4457 // Change a file's line endings on disk from unix to windows. The buffer's
4458 // state updates correctly.
4459 fs.save(
4460 path!("/dir/file1").as_ref(),
4461 &"aaa\nb\nc\n".into(),
4462 LineEnding::Windows,
4463 )
4464 .await
4465 .unwrap();
4466 cx.executor().run_until_parked();
4467 buffer1.update(cx, |buffer, _| {
4468 assert_eq!(buffer.text(), "aaa\nb\nc\n");
4469 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4470 });
4471
4472 // Save a file with windows line endings. The file is written correctly.
4473 buffer2.update(cx, |buffer, cx| {
4474 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
4475 });
4476 project
4477 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
4478 .await
4479 .unwrap();
4480 assert_eq!(
4481 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
4482 "one\r\ntwo\r\nthree\r\nfour\r\n",
4483 );
4484}
4485
4486#[gpui::test]
4487async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
4488 init_test(cx);
4489
4490 let fs = FakeFs::new(cx.executor());
4491 fs.insert_tree(
4492 path!("/dir"),
4493 json!({
4494 "a.rs": "
4495 fn foo(mut v: Vec<usize>) {
4496 for x in &v {
4497 v.push(1);
4498 }
4499 }
4500 "
4501 .unindent(),
4502 }),
4503 )
4504 .await;
4505
4506 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4507 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4508 let buffer = project
4509 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
4510 .await
4511 .unwrap();
4512
4513 let buffer_uri = Url::from_file_path(path!("/dir/a.rs")).unwrap();
4514 let message = lsp::PublishDiagnosticsParams {
4515 uri: buffer_uri.clone(),
4516 diagnostics: vec![
4517 lsp::Diagnostic {
4518 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4519 severity: Some(DiagnosticSeverity::WARNING),
4520 message: "error 1".to_string(),
4521 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4522 location: lsp::Location {
4523 uri: buffer_uri.clone(),
4524 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4525 },
4526 message: "error 1 hint 1".to_string(),
4527 }]),
4528 ..Default::default()
4529 },
4530 lsp::Diagnostic {
4531 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4532 severity: Some(DiagnosticSeverity::HINT),
4533 message: "error 1 hint 1".to_string(),
4534 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4535 location: lsp::Location {
4536 uri: buffer_uri.clone(),
4537 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4538 },
4539 message: "original diagnostic".to_string(),
4540 }]),
4541 ..Default::default()
4542 },
4543 lsp::Diagnostic {
4544 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4545 severity: Some(DiagnosticSeverity::ERROR),
4546 message: "error 2".to_string(),
4547 related_information: Some(vec![
4548 lsp::DiagnosticRelatedInformation {
4549 location: lsp::Location {
4550 uri: buffer_uri.clone(),
4551 range: lsp::Range::new(
4552 lsp::Position::new(1, 13),
4553 lsp::Position::new(1, 15),
4554 ),
4555 },
4556 message: "error 2 hint 1".to_string(),
4557 },
4558 lsp::DiagnosticRelatedInformation {
4559 location: lsp::Location {
4560 uri: buffer_uri.clone(),
4561 range: lsp::Range::new(
4562 lsp::Position::new(1, 13),
4563 lsp::Position::new(1, 15),
4564 ),
4565 },
4566 message: "error 2 hint 2".to_string(),
4567 },
4568 ]),
4569 ..Default::default()
4570 },
4571 lsp::Diagnostic {
4572 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4573 severity: Some(DiagnosticSeverity::HINT),
4574 message: "error 2 hint 1".to_string(),
4575 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4576 location: lsp::Location {
4577 uri: buffer_uri.clone(),
4578 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4579 },
4580 message: "original diagnostic".to_string(),
4581 }]),
4582 ..Default::default()
4583 },
4584 lsp::Diagnostic {
4585 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4586 severity: Some(DiagnosticSeverity::HINT),
4587 message: "error 2 hint 2".to_string(),
4588 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4589 location: lsp::Location {
4590 uri: buffer_uri,
4591 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4592 },
4593 message: "original diagnostic".to_string(),
4594 }]),
4595 ..Default::default()
4596 },
4597 ],
4598 version: None,
4599 };
4600
4601 lsp_store
4602 .update(cx, |lsp_store, cx| {
4603 lsp_store.update_diagnostics(
4604 LanguageServerId(0),
4605 message,
4606 None,
4607 DiagnosticSourceKind::Pushed,
4608 &[],
4609 cx,
4610 )
4611 })
4612 .unwrap();
4613 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
4614
4615 assert_eq!(
4616 buffer
4617 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
4618 .collect::<Vec<_>>(),
4619 &[
4620 DiagnosticEntry {
4621 range: Point::new(1, 8)..Point::new(1, 9),
4622 diagnostic: Diagnostic {
4623 severity: DiagnosticSeverity::WARNING,
4624 message: "error 1".to_string(),
4625 group_id: 1,
4626 is_primary: true,
4627 source_kind: DiagnosticSourceKind::Pushed,
4628 ..Diagnostic::default()
4629 }
4630 },
4631 DiagnosticEntry {
4632 range: Point::new(1, 8)..Point::new(1, 9),
4633 diagnostic: Diagnostic {
4634 severity: DiagnosticSeverity::HINT,
4635 message: "error 1 hint 1".to_string(),
4636 group_id: 1,
4637 is_primary: false,
4638 source_kind: DiagnosticSourceKind::Pushed,
4639 ..Diagnostic::default()
4640 }
4641 },
4642 DiagnosticEntry {
4643 range: Point::new(1, 13)..Point::new(1, 15),
4644 diagnostic: Diagnostic {
4645 severity: DiagnosticSeverity::HINT,
4646 message: "error 2 hint 1".to_string(),
4647 group_id: 0,
4648 is_primary: false,
4649 source_kind: DiagnosticSourceKind::Pushed,
4650 ..Diagnostic::default()
4651 }
4652 },
4653 DiagnosticEntry {
4654 range: Point::new(1, 13)..Point::new(1, 15),
4655 diagnostic: Diagnostic {
4656 severity: DiagnosticSeverity::HINT,
4657 message: "error 2 hint 2".to_string(),
4658 group_id: 0,
4659 is_primary: false,
4660 source_kind: DiagnosticSourceKind::Pushed,
4661 ..Diagnostic::default()
4662 }
4663 },
4664 DiagnosticEntry {
4665 range: Point::new(2, 8)..Point::new(2, 17),
4666 diagnostic: Diagnostic {
4667 severity: DiagnosticSeverity::ERROR,
4668 message: "error 2".to_string(),
4669 group_id: 0,
4670 is_primary: true,
4671 source_kind: DiagnosticSourceKind::Pushed,
4672 ..Diagnostic::default()
4673 }
4674 }
4675 ]
4676 );
4677
4678 assert_eq!(
4679 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
4680 &[
4681 DiagnosticEntry {
4682 range: Point::new(1, 13)..Point::new(1, 15),
4683 diagnostic: Diagnostic {
4684 severity: DiagnosticSeverity::HINT,
4685 message: "error 2 hint 1".to_string(),
4686 group_id: 0,
4687 is_primary: false,
4688 source_kind: DiagnosticSourceKind::Pushed,
4689 ..Diagnostic::default()
4690 }
4691 },
4692 DiagnosticEntry {
4693 range: Point::new(1, 13)..Point::new(1, 15),
4694 diagnostic: Diagnostic {
4695 severity: DiagnosticSeverity::HINT,
4696 message: "error 2 hint 2".to_string(),
4697 group_id: 0,
4698 is_primary: false,
4699 source_kind: DiagnosticSourceKind::Pushed,
4700 ..Diagnostic::default()
4701 }
4702 },
4703 DiagnosticEntry {
4704 range: Point::new(2, 8)..Point::new(2, 17),
4705 diagnostic: Diagnostic {
4706 severity: DiagnosticSeverity::ERROR,
4707 message: "error 2".to_string(),
4708 group_id: 0,
4709 is_primary: true,
4710 source_kind: DiagnosticSourceKind::Pushed,
4711 ..Diagnostic::default()
4712 }
4713 }
4714 ]
4715 );
4716
4717 assert_eq!(
4718 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4719 &[
4720 DiagnosticEntry {
4721 range: Point::new(1, 8)..Point::new(1, 9),
4722 diagnostic: Diagnostic {
4723 severity: DiagnosticSeverity::WARNING,
4724 message: "error 1".to_string(),
4725 group_id: 1,
4726 is_primary: true,
4727 source_kind: DiagnosticSourceKind::Pushed,
4728 ..Diagnostic::default()
4729 }
4730 },
4731 DiagnosticEntry {
4732 range: Point::new(1, 8)..Point::new(1, 9),
4733 diagnostic: Diagnostic {
4734 severity: DiagnosticSeverity::HINT,
4735 message: "error 1 hint 1".to_string(),
4736 group_id: 1,
4737 is_primary: false,
4738 source_kind: DiagnosticSourceKind::Pushed,
4739 ..Diagnostic::default()
4740 }
4741 },
4742 ]
4743 );
4744}
4745
4746#[gpui::test]
4747async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
4748 init_test(cx);
4749
4750 let fs = FakeFs::new(cx.executor());
4751 fs.insert_tree(
4752 path!("/dir"),
4753 json!({
4754 "one.rs": "const ONE: usize = 1;",
4755 "two": {
4756 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4757 }
4758
4759 }),
4760 )
4761 .await;
4762 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4763
4764 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4765 language_registry.add(rust_lang());
4766 let watched_paths = lsp::FileOperationRegistrationOptions {
4767 filters: vec![
4768 FileOperationFilter {
4769 scheme: Some("file".to_owned()),
4770 pattern: lsp::FileOperationPattern {
4771 glob: "**/*.rs".to_owned(),
4772 matches: Some(lsp::FileOperationPatternKind::File),
4773 options: None,
4774 },
4775 },
4776 FileOperationFilter {
4777 scheme: Some("file".to_owned()),
4778 pattern: lsp::FileOperationPattern {
4779 glob: "**/**".to_owned(),
4780 matches: Some(lsp::FileOperationPatternKind::Folder),
4781 options: None,
4782 },
4783 },
4784 ],
4785 };
4786 let mut fake_servers = language_registry.register_fake_lsp(
4787 "Rust",
4788 FakeLspAdapter {
4789 capabilities: lsp::ServerCapabilities {
4790 workspace: Some(lsp::WorkspaceServerCapabilities {
4791 workspace_folders: None,
4792 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
4793 did_rename: Some(watched_paths.clone()),
4794 will_rename: Some(watched_paths),
4795 ..Default::default()
4796 }),
4797 }),
4798 ..Default::default()
4799 },
4800 ..Default::default()
4801 },
4802 );
4803
4804 let _ = project
4805 .update(cx, |project, cx| {
4806 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
4807 })
4808 .await
4809 .unwrap();
4810
4811 let fake_server = fake_servers.next().await.unwrap();
4812 let response = project.update(cx, |project, cx| {
4813 let worktree = project.worktrees(cx).next().unwrap();
4814 let entry = worktree.read(cx).entry_for_path("one.rs").unwrap();
4815 project.rename_entry(entry.id, "three.rs".as_ref(), cx)
4816 });
4817 let expected_edit = lsp::WorkspaceEdit {
4818 changes: None,
4819 document_changes: Some(DocumentChanges::Edits({
4820 vec![TextDocumentEdit {
4821 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
4822 range: lsp::Range {
4823 start: lsp::Position {
4824 line: 0,
4825 character: 1,
4826 },
4827 end: lsp::Position {
4828 line: 0,
4829 character: 3,
4830 },
4831 },
4832 new_text: "This is not a drill".to_owned(),
4833 })],
4834 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
4835 uri: Url::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
4836 version: Some(1337),
4837 },
4838 }]
4839 })),
4840 change_annotations: None,
4841 };
4842 let resolved_workspace_edit = Arc::new(OnceLock::new());
4843 fake_server
4844 .set_request_handler::<WillRenameFiles, _, _>({
4845 let resolved_workspace_edit = resolved_workspace_edit.clone();
4846 let expected_edit = expected_edit.clone();
4847 move |params, _| {
4848 let resolved_workspace_edit = resolved_workspace_edit.clone();
4849 let expected_edit = expected_edit.clone();
4850 async move {
4851 assert_eq!(params.files.len(), 1);
4852 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
4853 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
4854 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
4855 Ok(Some(expected_edit))
4856 }
4857 }
4858 })
4859 .next()
4860 .await
4861 .unwrap();
4862 let _ = response.await.unwrap();
4863 fake_server
4864 .handle_notification::<DidRenameFiles, _>(|params, _| {
4865 assert_eq!(params.files.len(), 1);
4866 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
4867 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
4868 })
4869 .next()
4870 .await
4871 .unwrap();
4872 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
4873}
4874
4875#[gpui::test]
4876async fn test_rename(cx: &mut gpui::TestAppContext) {
4877 // hi
4878 init_test(cx);
4879
4880 let fs = FakeFs::new(cx.executor());
4881 fs.insert_tree(
4882 path!("/dir"),
4883 json!({
4884 "one.rs": "const ONE: usize = 1;",
4885 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4886 }),
4887 )
4888 .await;
4889
4890 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4891
4892 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4893 language_registry.add(rust_lang());
4894 let mut fake_servers = language_registry.register_fake_lsp(
4895 "Rust",
4896 FakeLspAdapter {
4897 capabilities: lsp::ServerCapabilities {
4898 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
4899 prepare_provider: Some(true),
4900 work_done_progress_options: Default::default(),
4901 })),
4902 ..Default::default()
4903 },
4904 ..Default::default()
4905 },
4906 );
4907
4908 let (buffer, _handle) = project
4909 .update(cx, |project, cx| {
4910 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
4911 })
4912 .await
4913 .unwrap();
4914
4915 let fake_server = fake_servers.next().await.unwrap();
4916
4917 let response = project.update(cx, |project, cx| {
4918 project.prepare_rename(buffer.clone(), 7, cx)
4919 });
4920 fake_server
4921 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
4922 assert_eq!(
4923 params.text_document.uri.as_str(),
4924 uri!("file:///dir/one.rs")
4925 );
4926 assert_eq!(params.position, lsp::Position::new(0, 7));
4927 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
4928 lsp::Position::new(0, 6),
4929 lsp::Position::new(0, 9),
4930 ))))
4931 })
4932 .next()
4933 .await
4934 .unwrap();
4935 let response = response.await.unwrap();
4936 let PrepareRenameResponse::Success(range) = response else {
4937 panic!("{:?}", response);
4938 };
4939 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
4940 assert_eq!(range, 6..9);
4941
4942 let response = project.update(cx, |project, cx| {
4943 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
4944 });
4945 fake_server
4946 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
4947 assert_eq!(
4948 params.text_document_position.text_document.uri.as_str(),
4949 uri!("file:///dir/one.rs")
4950 );
4951 assert_eq!(
4952 params.text_document_position.position,
4953 lsp::Position::new(0, 7)
4954 );
4955 assert_eq!(params.new_name, "THREE");
4956 Ok(Some(lsp::WorkspaceEdit {
4957 changes: Some(
4958 [
4959 (
4960 lsp::Url::from_file_path(path!("/dir/one.rs")).unwrap(),
4961 vec![lsp::TextEdit::new(
4962 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
4963 "THREE".to_string(),
4964 )],
4965 ),
4966 (
4967 lsp::Url::from_file_path(path!("/dir/two.rs")).unwrap(),
4968 vec![
4969 lsp::TextEdit::new(
4970 lsp::Range::new(
4971 lsp::Position::new(0, 24),
4972 lsp::Position::new(0, 27),
4973 ),
4974 "THREE".to_string(),
4975 ),
4976 lsp::TextEdit::new(
4977 lsp::Range::new(
4978 lsp::Position::new(0, 35),
4979 lsp::Position::new(0, 38),
4980 ),
4981 "THREE".to_string(),
4982 ),
4983 ],
4984 ),
4985 ]
4986 .into_iter()
4987 .collect(),
4988 ),
4989 ..Default::default()
4990 }))
4991 })
4992 .next()
4993 .await
4994 .unwrap();
4995 let mut transaction = response.await.unwrap().0;
4996 assert_eq!(transaction.len(), 2);
4997 assert_eq!(
4998 transaction
4999 .remove_entry(&buffer)
5000 .unwrap()
5001 .0
5002 .update(cx, |buffer, _| buffer.text()),
5003 "const THREE: usize = 1;"
5004 );
5005 assert_eq!(
5006 transaction
5007 .into_keys()
5008 .next()
5009 .unwrap()
5010 .update(cx, |buffer, _| buffer.text()),
5011 "const TWO: usize = one::THREE + one::THREE;"
5012 );
5013}
5014
5015#[gpui::test]
5016async fn test_search(cx: &mut gpui::TestAppContext) {
5017 init_test(cx);
5018
5019 let fs = FakeFs::new(cx.executor());
5020 fs.insert_tree(
5021 path!("/dir"),
5022 json!({
5023 "one.rs": "const ONE: usize = 1;",
5024 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
5025 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
5026 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
5027 }),
5028 )
5029 .await;
5030 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5031 assert_eq!(
5032 search(
5033 &project,
5034 SearchQuery::text(
5035 "TWO",
5036 false,
5037 true,
5038 false,
5039 Default::default(),
5040 Default::default(),
5041 false,
5042 None
5043 )
5044 .unwrap(),
5045 cx
5046 )
5047 .await
5048 .unwrap(),
5049 HashMap::from_iter([
5050 (separator!("dir/two.rs").to_string(), vec![6..9]),
5051 (separator!("dir/three.rs").to_string(), vec![37..40])
5052 ])
5053 );
5054
5055 let buffer_4 = project
5056 .update(cx, |project, cx| {
5057 project.open_local_buffer(path!("/dir/four.rs"), cx)
5058 })
5059 .await
5060 .unwrap();
5061 buffer_4.update(cx, |buffer, cx| {
5062 let text = "two::TWO";
5063 buffer.edit([(20..28, text), (31..43, text)], None, cx);
5064 });
5065
5066 assert_eq!(
5067 search(
5068 &project,
5069 SearchQuery::text(
5070 "TWO",
5071 false,
5072 true,
5073 false,
5074 Default::default(),
5075 Default::default(),
5076 false,
5077 None,
5078 )
5079 .unwrap(),
5080 cx
5081 )
5082 .await
5083 .unwrap(),
5084 HashMap::from_iter([
5085 (separator!("dir/two.rs").to_string(), vec![6..9]),
5086 (separator!("dir/three.rs").to_string(), vec![37..40]),
5087 (separator!("dir/four.rs").to_string(), vec![25..28, 36..39])
5088 ])
5089 );
5090}
5091
5092#[gpui::test]
5093async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
5094 init_test(cx);
5095
5096 let search_query = "file";
5097
5098 let fs = FakeFs::new(cx.executor());
5099 fs.insert_tree(
5100 path!("/dir"),
5101 json!({
5102 "one.rs": r#"// Rust file one"#,
5103 "one.ts": r#"// TypeScript file one"#,
5104 "two.rs": r#"// Rust file two"#,
5105 "two.ts": r#"// TypeScript file two"#,
5106 }),
5107 )
5108 .await;
5109 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5110
5111 assert!(
5112 search(
5113 &project,
5114 SearchQuery::text(
5115 search_query,
5116 false,
5117 true,
5118 false,
5119 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5120 Default::default(),
5121 false,
5122 None
5123 )
5124 .unwrap(),
5125 cx
5126 )
5127 .await
5128 .unwrap()
5129 .is_empty(),
5130 "If no inclusions match, no files should be returned"
5131 );
5132
5133 assert_eq!(
5134 search(
5135 &project,
5136 SearchQuery::text(
5137 search_query,
5138 false,
5139 true,
5140 false,
5141 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
5142 Default::default(),
5143 false,
5144 None
5145 )
5146 .unwrap(),
5147 cx
5148 )
5149 .await
5150 .unwrap(),
5151 HashMap::from_iter([
5152 (separator!("dir/one.rs").to_string(), vec![8..12]),
5153 (separator!("dir/two.rs").to_string(), vec![8..12]),
5154 ]),
5155 "Rust only search should give only Rust files"
5156 );
5157
5158 assert_eq!(
5159 search(
5160 &project,
5161 SearchQuery::text(
5162 search_query,
5163 false,
5164 true,
5165 false,
5166 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5167 Default::default(),
5168 false,
5169 None,
5170 )
5171 .unwrap(),
5172 cx
5173 )
5174 .await
5175 .unwrap(),
5176 HashMap::from_iter([
5177 (separator!("dir/one.ts").to_string(), vec![14..18]),
5178 (separator!("dir/two.ts").to_string(), vec![14..18]),
5179 ]),
5180 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
5181 );
5182
5183 assert_eq!(
5184 search(
5185 &project,
5186 SearchQuery::text(
5187 search_query,
5188 false,
5189 true,
5190 false,
5191 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()])
5192 .unwrap(),
5193 Default::default(),
5194 false,
5195 None,
5196 )
5197 .unwrap(),
5198 cx
5199 )
5200 .await
5201 .unwrap(),
5202 HashMap::from_iter([
5203 (separator!("dir/two.ts").to_string(), vec![14..18]),
5204 (separator!("dir/one.rs").to_string(), vec![8..12]),
5205 (separator!("dir/one.ts").to_string(), vec![14..18]),
5206 (separator!("dir/two.rs").to_string(), vec![8..12]),
5207 ]),
5208 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
5209 );
5210}
5211
5212#[gpui::test]
5213async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
5214 init_test(cx);
5215
5216 let search_query = "file";
5217
5218 let fs = FakeFs::new(cx.executor());
5219 fs.insert_tree(
5220 path!("/dir"),
5221 json!({
5222 "one.rs": r#"// Rust file one"#,
5223 "one.ts": r#"// TypeScript file one"#,
5224 "two.rs": r#"// Rust file two"#,
5225 "two.ts": r#"// TypeScript file two"#,
5226 }),
5227 )
5228 .await;
5229 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5230
5231 assert_eq!(
5232 search(
5233 &project,
5234 SearchQuery::text(
5235 search_query,
5236 false,
5237 true,
5238 false,
5239 Default::default(),
5240 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5241 false,
5242 None,
5243 )
5244 .unwrap(),
5245 cx
5246 )
5247 .await
5248 .unwrap(),
5249 HashMap::from_iter([
5250 (separator!("dir/one.rs").to_string(), vec![8..12]),
5251 (separator!("dir/one.ts").to_string(), vec![14..18]),
5252 (separator!("dir/two.rs").to_string(), vec![8..12]),
5253 (separator!("dir/two.ts").to_string(), vec![14..18]),
5254 ]),
5255 "If no exclusions match, all files should be returned"
5256 );
5257
5258 assert_eq!(
5259 search(
5260 &project,
5261 SearchQuery::text(
5262 search_query,
5263 false,
5264 true,
5265 false,
5266 Default::default(),
5267 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
5268 false,
5269 None,
5270 )
5271 .unwrap(),
5272 cx
5273 )
5274 .await
5275 .unwrap(),
5276 HashMap::from_iter([
5277 (separator!("dir/one.ts").to_string(), vec![14..18]),
5278 (separator!("dir/two.ts").to_string(), vec![14..18]),
5279 ]),
5280 "Rust exclusion search should give only TypeScript files"
5281 );
5282
5283 assert_eq!(
5284 search(
5285 &project,
5286 SearchQuery::text(
5287 search_query,
5288 false,
5289 true,
5290 false,
5291 Default::default(),
5292 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5293 false,
5294 None,
5295 )
5296 .unwrap(),
5297 cx
5298 )
5299 .await
5300 .unwrap(),
5301 HashMap::from_iter([
5302 (separator!("dir/one.rs").to_string(), vec![8..12]),
5303 (separator!("dir/two.rs").to_string(), vec![8..12]),
5304 ]),
5305 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
5306 );
5307
5308 assert!(
5309 search(
5310 &project,
5311 SearchQuery::text(
5312 search_query,
5313 false,
5314 true,
5315 false,
5316 Default::default(),
5317 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()])
5318 .unwrap(),
5319 false,
5320 None,
5321 )
5322 .unwrap(),
5323 cx
5324 )
5325 .await
5326 .unwrap()
5327 .is_empty(),
5328 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
5329 );
5330}
5331
5332#[gpui::test]
5333async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
5334 init_test(cx);
5335
5336 let search_query = "file";
5337
5338 let fs = FakeFs::new(cx.executor());
5339 fs.insert_tree(
5340 path!("/dir"),
5341 json!({
5342 "one.rs": r#"// Rust file one"#,
5343 "one.ts": r#"// TypeScript file one"#,
5344 "two.rs": r#"// Rust file two"#,
5345 "two.ts": r#"// TypeScript file two"#,
5346 }),
5347 )
5348 .await;
5349 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5350
5351 assert!(
5352 search(
5353 &project,
5354 SearchQuery::text(
5355 search_query,
5356 false,
5357 true,
5358 false,
5359 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5360 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5361 false,
5362 None,
5363 )
5364 .unwrap(),
5365 cx
5366 )
5367 .await
5368 .unwrap()
5369 .is_empty(),
5370 "If both no exclusions and inclusions match, exclusions should win and return nothing"
5371 );
5372
5373 assert!(
5374 search(
5375 &project,
5376 SearchQuery::text(
5377 search_query,
5378 false,
5379 true,
5380 false,
5381 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5382 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5383 false,
5384 None,
5385 )
5386 .unwrap(),
5387 cx
5388 )
5389 .await
5390 .unwrap()
5391 .is_empty(),
5392 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
5393 );
5394
5395 assert!(
5396 search(
5397 &project,
5398 SearchQuery::text(
5399 search_query,
5400 false,
5401 true,
5402 false,
5403 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5404 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5405 false,
5406 None,
5407 )
5408 .unwrap(),
5409 cx
5410 )
5411 .await
5412 .unwrap()
5413 .is_empty(),
5414 "Non-matching inclusions and exclusions should not change that."
5415 );
5416
5417 assert_eq!(
5418 search(
5419 &project,
5420 SearchQuery::text(
5421 search_query,
5422 false,
5423 true,
5424 false,
5425 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5426 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()]).unwrap(),
5427 false,
5428 None,
5429 )
5430 .unwrap(),
5431 cx
5432 )
5433 .await
5434 .unwrap(),
5435 HashMap::from_iter([
5436 (separator!("dir/one.ts").to_string(), vec![14..18]),
5437 (separator!("dir/two.ts").to_string(), vec![14..18]),
5438 ]),
5439 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
5440 );
5441}
5442
5443#[gpui::test]
5444async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
5445 init_test(cx);
5446
5447 let fs = FakeFs::new(cx.executor());
5448 fs.insert_tree(
5449 path!("/worktree-a"),
5450 json!({
5451 "haystack.rs": r#"// NEEDLE"#,
5452 "haystack.ts": r#"// NEEDLE"#,
5453 }),
5454 )
5455 .await;
5456 fs.insert_tree(
5457 path!("/worktree-b"),
5458 json!({
5459 "haystack.rs": r#"// NEEDLE"#,
5460 "haystack.ts": r#"// NEEDLE"#,
5461 }),
5462 )
5463 .await;
5464
5465 let project = Project::test(
5466 fs.clone(),
5467 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
5468 cx,
5469 )
5470 .await;
5471
5472 assert_eq!(
5473 search(
5474 &project,
5475 SearchQuery::text(
5476 "NEEDLE",
5477 false,
5478 true,
5479 false,
5480 PathMatcher::new(&["worktree-a/*.rs".to_owned()]).unwrap(),
5481 Default::default(),
5482 true,
5483 None,
5484 )
5485 .unwrap(),
5486 cx
5487 )
5488 .await
5489 .unwrap(),
5490 HashMap::from_iter([(separator!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
5491 "should only return results from included worktree"
5492 );
5493 assert_eq!(
5494 search(
5495 &project,
5496 SearchQuery::text(
5497 "NEEDLE",
5498 false,
5499 true,
5500 false,
5501 PathMatcher::new(&["worktree-b/*.rs".to_owned()]).unwrap(),
5502 Default::default(),
5503 true,
5504 None,
5505 )
5506 .unwrap(),
5507 cx
5508 )
5509 .await
5510 .unwrap(),
5511 HashMap::from_iter([(separator!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
5512 "should only return results from included worktree"
5513 );
5514
5515 assert_eq!(
5516 search(
5517 &project,
5518 SearchQuery::text(
5519 "NEEDLE",
5520 false,
5521 true,
5522 false,
5523 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5524 Default::default(),
5525 false,
5526 None,
5527 )
5528 .unwrap(),
5529 cx
5530 )
5531 .await
5532 .unwrap(),
5533 HashMap::from_iter([
5534 (separator!("worktree-a/haystack.ts").to_string(), vec![3..9]),
5535 (separator!("worktree-b/haystack.ts").to_string(), vec![3..9])
5536 ]),
5537 "should return results from both worktrees"
5538 );
5539}
5540
5541#[gpui::test]
5542async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
5543 init_test(cx);
5544
5545 let fs = FakeFs::new(cx.background_executor.clone());
5546 fs.insert_tree(
5547 path!("/dir"),
5548 json!({
5549 ".git": {},
5550 ".gitignore": "**/target\n/node_modules\n",
5551 "target": {
5552 "index.txt": "index_key:index_value"
5553 },
5554 "node_modules": {
5555 "eslint": {
5556 "index.ts": "const eslint_key = 'eslint value'",
5557 "package.json": r#"{ "some_key": "some value" }"#,
5558 },
5559 "prettier": {
5560 "index.ts": "const prettier_key = 'prettier value'",
5561 "package.json": r#"{ "other_key": "other value" }"#,
5562 },
5563 },
5564 "package.json": r#"{ "main_key": "main value" }"#,
5565 }),
5566 )
5567 .await;
5568 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5569
5570 let query = "key";
5571 assert_eq!(
5572 search(
5573 &project,
5574 SearchQuery::text(
5575 query,
5576 false,
5577 false,
5578 false,
5579 Default::default(),
5580 Default::default(),
5581 false,
5582 None,
5583 )
5584 .unwrap(),
5585 cx
5586 )
5587 .await
5588 .unwrap(),
5589 HashMap::from_iter([(separator!("dir/package.json").to_string(), vec![8..11])]),
5590 "Only one non-ignored file should have the query"
5591 );
5592
5593 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5594 assert_eq!(
5595 search(
5596 &project,
5597 SearchQuery::text(
5598 query,
5599 false,
5600 false,
5601 true,
5602 Default::default(),
5603 Default::default(),
5604 false,
5605 None,
5606 )
5607 .unwrap(),
5608 cx
5609 )
5610 .await
5611 .unwrap(),
5612 HashMap::from_iter([
5613 (separator!("dir/package.json").to_string(), vec![8..11]),
5614 (separator!("dir/target/index.txt").to_string(), vec![6..9]),
5615 (
5616 separator!("dir/node_modules/prettier/package.json").to_string(),
5617 vec![9..12]
5618 ),
5619 (
5620 separator!("dir/node_modules/prettier/index.ts").to_string(),
5621 vec![15..18]
5622 ),
5623 (
5624 separator!("dir/node_modules/eslint/index.ts").to_string(),
5625 vec![13..16]
5626 ),
5627 (
5628 separator!("dir/node_modules/eslint/package.json").to_string(),
5629 vec![8..11]
5630 ),
5631 ]),
5632 "Unrestricted search with ignored directories should find every file with the query"
5633 );
5634
5635 let files_to_include = PathMatcher::new(&["node_modules/prettier/**".to_owned()]).unwrap();
5636 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()]).unwrap();
5637 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5638 assert_eq!(
5639 search(
5640 &project,
5641 SearchQuery::text(
5642 query,
5643 false,
5644 false,
5645 true,
5646 files_to_include,
5647 files_to_exclude,
5648 false,
5649 None,
5650 )
5651 .unwrap(),
5652 cx
5653 )
5654 .await
5655 .unwrap(),
5656 HashMap::from_iter([(
5657 separator!("dir/node_modules/prettier/package.json").to_string(),
5658 vec![9..12]
5659 )]),
5660 "With search including ignored prettier directory and excluding TS files, only one file should be found"
5661 );
5662}
5663
5664#[gpui::test]
5665async fn test_search_with_unicode(cx: &mut gpui::TestAppContext) {
5666 init_test(cx);
5667
5668 let fs = FakeFs::new(cx.executor());
5669 fs.insert_tree(
5670 path!("/dir"),
5671 json!({
5672 "one.rs": "// ПРИВЕТ? привет!",
5673 "two.rs": "// ПРИВЕТ.",
5674 "three.rs": "// привет",
5675 }),
5676 )
5677 .await;
5678 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5679
5680 let unicode_case_sensitive_query = SearchQuery::text(
5681 "привет",
5682 false,
5683 true,
5684 false,
5685 Default::default(),
5686 Default::default(),
5687 false,
5688 None,
5689 );
5690 assert_matches!(unicode_case_sensitive_query, Ok(SearchQuery::Text { .. }));
5691 assert_eq!(
5692 search(&project, unicode_case_sensitive_query.unwrap(), cx)
5693 .await
5694 .unwrap(),
5695 HashMap::from_iter([
5696 (separator!("dir/one.rs").to_string(), vec![17..29]),
5697 (separator!("dir/three.rs").to_string(), vec![3..15]),
5698 ])
5699 );
5700
5701 let unicode_case_insensitive_query = SearchQuery::text(
5702 "привет",
5703 false,
5704 false,
5705 false,
5706 Default::default(),
5707 Default::default(),
5708 false,
5709 None,
5710 );
5711 assert_matches!(
5712 unicode_case_insensitive_query,
5713 Ok(SearchQuery::Regex { .. })
5714 );
5715 assert_eq!(
5716 search(&project, unicode_case_insensitive_query.unwrap(), cx)
5717 .await
5718 .unwrap(),
5719 HashMap::from_iter([
5720 (separator!("dir/one.rs").to_string(), vec![3..15, 17..29]),
5721 (separator!("dir/two.rs").to_string(), vec![3..15]),
5722 (separator!("dir/three.rs").to_string(), vec![3..15]),
5723 ])
5724 );
5725
5726 assert_eq!(
5727 search(
5728 &project,
5729 SearchQuery::text(
5730 "привет.",
5731 false,
5732 false,
5733 false,
5734 Default::default(),
5735 Default::default(),
5736 false,
5737 None,
5738 )
5739 .unwrap(),
5740 cx
5741 )
5742 .await
5743 .unwrap(),
5744 HashMap::from_iter([(separator!("dir/two.rs").to_string(), vec![3..16]),])
5745 );
5746}
5747
5748#[gpui::test]
5749async fn test_create_entry(cx: &mut gpui::TestAppContext) {
5750 init_test(cx);
5751
5752 let fs = FakeFs::new(cx.executor().clone());
5753 fs.insert_tree(
5754 "/one/two",
5755 json!({
5756 "three": {
5757 "a.txt": "",
5758 "four": {}
5759 },
5760 "c.rs": ""
5761 }),
5762 )
5763 .await;
5764
5765 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
5766 project
5767 .update(cx, |project, cx| {
5768 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5769 project.create_entry((id, "b.."), true, cx)
5770 })
5771 .await
5772 .unwrap()
5773 .to_included()
5774 .unwrap();
5775
5776 // Can't create paths outside the project
5777 let result = project
5778 .update(cx, |project, cx| {
5779 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5780 project.create_entry((id, "../../boop"), true, cx)
5781 })
5782 .await;
5783 assert!(result.is_err());
5784
5785 // Can't create paths with '..'
5786 let result = project
5787 .update(cx, |project, cx| {
5788 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5789 project.create_entry((id, "four/../beep"), true, cx)
5790 })
5791 .await;
5792 assert!(result.is_err());
5793
5794 assert_eq!(
5795 fs.paths(true),
5796 vec![
5797 PathBuf::from(path!("/")),
5798 PathBuf::from(path!("/one")),
5799 PathBuf::from(path!("/one/two")),
5800 PathBuf::from(path!("/one/two/c.rs")),
5801 PathBuf::from(path!("/one/two/three")),
5802 PathBuf::from(path!("/one/two/three/a.txt")),
5803 PathBuf::from(path!("/one/two/three/b..")),
5804 PathBuf::from(path!("/one/two/three/four")),
5805 ]
5806 );
5807
5808 // And we cannot open buffers with '..'
5809 let result = project
5810 .update(cx, |project, cx| {
5811 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5812 project.open_buffer((id, "../c.rs"), cx)
5813 })
5814 .await;
5815 assert!(result.is_err())
5816}
5817
5818#[gpui::test]
5819async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
5820 init_test(cx);
5821
5822 let fs = FakeFs::new(cx.executor());
5823 fs.insert_tree(
5824 path!("/dir"),
5825 json!({
5826 "a.tsx": "a",
5827 }),
5828 )
5829 .await;
5830
5831 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5832
5833 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5834 language_registry.add(tsx_lang());
5835 let language_server_names = [
5836 "TypeScriptServer",
5837 "TailwindServer",
5838 "ESLintServer",
5839 "NoHoverCapabilitiesServer",
5840 ];
5841 let mut language_servers = [
5842 language_registry.register_fake_lsp(
5843 "tsx",
5844 FakeLspAdapter {
5845 name: language_server_names[0],
5846 capabilities: lsp::ServerCapabilities {
5847 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5848 ..lsp::ServerCapabilities::default()
5849 },
5850 ..FakeLspAdapter::default()
5851 },
5852 ),
5853 language_registry.register_fake_lsp(
5854 "tsx",
5855 FakeLspAdapter {
5856 name: language_server_names[1],
5857 capabilities: lsp::ServerCapabilities {
5858 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5859 ..lsp::ServerCapabilities::default()
5860 },
5861 ..FakeLspAdapter::default()
5862 },
5863 ),
5864 language_registry.register_fake_lsp(
5865 "tsx",
5866 FakeLspAdapter {
5867 name: language_server_names[2],
5868 capabilities: lsp::ServerCapabilities {
5869 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5870 ..lsp::ServerCapabilities::default()
5871 },
5872 ..FakeLspAdapter::default()
5873 },
5874 ),
5875 language_registry.register_fake_lsp(
5876 "tsx",
5877 FakeLspAdapter {
5878 name: language_server_names[3],
5879 capabilities: lsp::ServerCapabilities {
5880 hover_provider: None,
5881 ..lsp::ServerCapabilities::default()
5882 },
5883 ..FakeLspAdapter::default()
5884 },
5885 ),
5886 ];
5887
5888 let (buffer, _handle) = project
5889 .update(cx, |p, cx| {
5890 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
5891 })
5892 .await
5893 .unwrap();
5894 cx.executor().run_until_parked();
5895
5896 let mut servers_with_hover_requests = HashMap::default();
5897 for i in 0..language_server_names.len() {
5898 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
5899 panic!(
5900 "Failed to get language server #{i} with name {}",
5901 &language_server_names[i]
5902 )
5903 });
5904 let new_server_name = new_server.server.name();
5905 assert!(
5906 !servers_with_hover_requests.contains_key(&new_server_name),
5907 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5908 );
5909 match new_server_name.as_ref() {
5910 "TailwindServer" | "TypeScriptServer" => {
5911 servers_with_hover_requests.insert(
5912 new_server_name.clone(),
5913 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
5914 move |_, _| {
5915 let name = new_server_name.clone();
5916 async move {
5917 Ok(Some(lsp::Hover {
5918 contents: lsp::HoverContents::Scalar(
5919 lsp::MarkedString::String(format!("{name} hover")),
5920 ),
5921 range: None,
5922 }))
5923 }
5924 },
5925 ),
5926 );
5927 }
5928 "ESLintServer" => {
5929 servers_with_hover_requests.insert(
5930 new_server_name,
5931 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
5932 |_, _| async move { Ok(None) },
5933 ),
5934 );
5935 }
5936 "NoHoverCapabilitiesServer" => {
5937 let _never_handled = new_server
5938 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
5939 panic!(
5940 "Should not call for hovers server with no corresponding capabilities"
5941 )
5942 });
5943 }
5944 unexpected => panic!("Unexpected server name: {unexpected}"),
5945 }
5946 }
5947
5948 let hover_task = project.update(cx, |project, cx| {
5949 project.hover(&buffer, Point::new(0, 0), cx)
5950 });
5951 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
5952 |mut hover_request| async move {
5953 hover_request
5954 .next()
5955 .await
5956 .expect("All hover requests should have been triggered")
5957 },
5958 ))
5959 .await;
5960 assert_eq!(
5961 vec!["TailwindServer hover", "TypeScriptServer hover"],
5962 hover_task
5963 .await
5964 .into_iter()
5965 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
5966 .sorted()
5967 .collect::<Vec<_>>(),
5968 "Should receive hover responses from all related servers with hover capabilities"
5969 );
5970}
5971
5972#[gpui::test]
5973async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
5974 init_test(cx);
5975
5976 let fs = FakeFs::new(cx.executor());
5977 fs.insert_tree(
5978 path!("/dir"),
5979 json!({
5980 "a.ts": "a",
5981 }),
5982 )
5983 .await;
5984
5985 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5986
5987 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5988 language_registry.add(typescript_lang());
5989 let mut fake_language_servers = language_registry.register_fake_lsp(
5990 "TypeScript",
5991 FakeLspAdapter {
5992 capabilities: lsp::ServerCapabilities {
5993 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5994 ..lsp::ServerCapabilities::default()
5995 },
5996 ..FakeLspAdapter::default()
5997 },
5998 );
5999
6000 let (buffer, _handle) = project
6001 .update(cx, |p, cx| {
6002 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6003 })
6004 .await
6005 .unwrap();
6006 cx.executor().run_until_parked();
6007
6008 let fake_server = fake_language_servers
6009 .next()
6010 .await
6011 .expect("failed to get the language server");
6012
6013 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6014 move |_, _| async move {
6015 Ok(Some(lsp::Hover {
6016 contents: lsp::HoverContents::Array(vec![
6017 lsp::MarkedString::String("".to_string()),
6018 lsp::MarkedString::String(" ".to_string()),
6019 lsp::MarkedString::String("\n\n\n".to_string()),
6020 ]),
6021 range: None,
6022 }))
6023 },
6024 );
6025
6026 let hover_task = project.update(cx, |project, cx| {
6027 project.hover(&buffer, Point::new(0, 0), cx)
6028 });
6029 let () = request_handled
6030 .next()
6031 .await
6032 .expect("All hover requests should have been triggered");
6033 assert_eq!(
6034 Vec::<String>::new(),
6035 hover_task
6036 .await
6037 .into_iter()
6038 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
6039 .sorted()
6040 .collect::<Vec<_>>(),
6041 "Empty hover parts should be ignored"
6042 );
6043}
6044
6045#[gpui::test]
6046async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
6047 init_test(cx);
6048
6049 let fs = FakeFs::new(cx.executor());
6050 fs.insert_tree(
6051 path!("/dir"),
6052 json!({
6053 "a.ts": "a",
6054 }),
6055 )
6056 .await;
6057
6058 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6059
6060 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6061 language_registry.add(typescript_lang());
6062 let mut fake_language_servers = language_registry.register_fake_lsp(
6063 "TypeScript",
6064 FakeLspAdapter {
6065 capabilities: lsp::ServerCapabilities {
6066 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6067 ..lsp::ServerCapabilities::default()
6068 },
6069 ..FakeLspAdapter::default()
6070 },
6071 );
6072
6073 let (buffer, _handle) = project
6074 .update(cx, |p, cx| {
6075 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6076 })
6077 .await
6078 .unwrap();
6079 cx.executor().run_until_parked();
6080
6081 let fake_server = fake_language_servers
6082 .next()
6083 .await
6084 .expect("failed to get the language server");
6085
6086 let mut request_handled = fake_server
6087 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
6088 Ok(Some(vec![
6089 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6090 title: "organize imports".to_string(),
6091 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
6092 ..lsp::CodeAction::default()
6093 }),
6094 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6095 title: "fix code".to_string(),
6096 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
6097 ..lsp::CodeAction::default()
6098 }),
6099 ]))
6100 });
6101
6102 let code_actions_task = project.update(cx, |project, cx| {
6103 project.code_actions(
6104 &buffer,
6105 0..buffer.read(cx).len(),
6106 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
6107 cx,
6108 )
6109 });
6110
6111 let () = request_handled
6112 .next()
6113 .await
6114 .expect("The code action request should have been triggered");
6115
6116 let code_actions = code_actions_task.await.unwrap();
6117 assert_eq!(code_actions.len(), 1);
6118 assert_eq!(
6119 code_actions[0].lsp_action.action_kind(),
6120 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
6121 );
6122}
6123
6124#[gpui::test]
6125async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
6126 init_test(cx);
6127
6128 let fs = FakeFs::new(cx.executor());
6129 fs.insert_tree(
6130 path!("/dir"),
6131 json!({
6132 "a.tsx": "a",
6133 }),
6134 )
6135 .await;
6136
6137 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6138
6139 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6140 language_registry.add(tsx_lang());
6141 let language_server_names = [
6142 "TypeScriptServer",
6143 "TailwindServer",
6144 "ESLintServer",
6145 "NoActionsCapabilitiesServer",
6146 ];
6147
6148 let mut language_server_rxs = [
6149 language_registry.register_fake_lsp(
6150 "tsx",
6151 FakeLspAdapter {
6152 name: language_server_names[0],
6153 capabilities: lsp::ServerCapabilities {
6154 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6155 ..lsp::ServerCapabilities::default()
6156 },
6157 ..FakeLspAdapter::default()
6158 },
6159 ),
6160 language_registry.register_fake_lsp(
6161 "tsx",
6162 FakeLspAdapter {
6163 name: language_server_names[1],
6164 capabilities: lsp::ServerCapabilities {
6165 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6166 ..lsp::ServerCapabilities::default()
6167 },
6168 ..FakeLspAdapter::default()
6169 },
6170 ),
6171 language_registry.register_fake_lsp(
6172 "tsx",
6173 FakeLspAdapter {
6174 name: language_server_names[2],
6175 capabilities: lsp::ServerCapabilities {
6176 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6177 ..lsp::ServerCapabilities::default()
6178 },
6179 ..FakeLspAdapter::default()
6180 },
6181 ),
6182 language_registry.register_fake_lsp(
6183 "tsx",
6184 FakeLspAdapter {
6185 name: language_server_names[3],
6186 capabilities: lsp::ServerCapabilities {
6187 code_action_provider: None,
6188 ..lsp::ServerCapabilities::default()
6189 },
6190 ..FakeLspAdapter::default()
6191 },
6192 ),
6193 ];
6194
6195 let (buffer, _handle) = project
6196 .update(cx, |p, cx| {
6197 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
6198 })
6199 .await
6200 .unwrap();
6201 cx.executor().run_until_parked();
6202
6203 let mut servers_with_actions_requests = HashMap::default();
6204 for i in 0..language_server_names.len() {
6205 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
6206 panic!(
6207 "Failed to get language server #{i} with name {}",
6208 &language_server_names[i]
6209 )
6210 });
6211 let new_server_name = new_server.server.name();
6212
6213 assert!(
6214 !servers_with_actions_requests.contains_key(&new_server_name),
6215 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6216 );
6217 match new_server_name.0.as_ref() {
6218 "TailwindServer" | "TypeScriptServer" => {
6219 servers_with_actions_requests.insert(
6220 new_server_name.clone(),
6221 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6222 move |_, _| {
6223 let name = new_server_name.clone();
6224 async move {
6225 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
6226 lsp::CodeAction {
6227 title: format!("{name} code action"),
6228 ..lsp::CodeAction::default()
6229 },
6230 )]))
6231 }
6232 },
6233 ),
6234 );
6235 }
6236 "ESLintServer" => {
6237 servers_with_actions_requests.insert(
6238 new_server_name,
6239 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6240 |_, _| async move { Ok(None) },
6241 ),
6242 );
6243 }
6244 "NoActionsCapabilitiesServer" => {
6245 let _never_handled = new_server
6246 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6247 panic!(
6248 "Should not call for code actions server with no corresponding capabilities"
6249 )
6250 });
6251 }
6252 unexpected => panic!("Unexpected server name: {unexpected}"),
6253 }
6254 }
6255
6256 let code_actions_task = project.update(cx, |project, cx| {
6257 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
6258 });
6259
6260 // cx.run_until_parked();
6261 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
6262 |mut code_actions_request| async move {
6263 code_actions_request
6264 .next()
6265 .await
6266 .expect("All code actions requests should have been triggered")
6267 },
6268 ))
6269 .await;
6270 assert_eq!(
6271 vec!["TailwindServer code action", "TypeScriptServer code action"],
6272 code_actions_task
6273 .await
6274 .unwrap()
6275 .into_iter()
6276 .map(|code_action| code_action.lsp_action.title().to_owned())
6277 .sorted()
6278 .collect::<Vec<_>>(),
6279 "Should receive code actions responses from all related servers with hover capabilities"
6280 );
6281}
6282
6283#[gpui::test]
6284async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
6285 init_test(cx);
6286
6287 let fs = FakeFs::new(cx.executor());
6288 fs.insert_tree(
6289 "/dir",
6290 json!({
6291 "a.rs": "let a = 1;",
6292 "b.rs": "let b = 2;",
6293 "c.rs": "let c = 2;",
6294 }),
6295 )
6296 .await;
6297
6298 let project = Project::test(
6299 fs,
6300 [
6301 "/dir/a.rs".as_ref(),
6302 "/dir/b.rs".as_ref(),
6303 "/dir/c.rs".as_ref(),
6304 ],
6305 cx,
6306 )
6307 .await;
6308
6309 // check the initial state and get the worktrees
6310 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
6311 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6312 assert_eq!(worktrees.len(), 3);
6313
6314 let worktree_a = worktrees[0].read(cx);
6315 let worktree_b = worktrees[1].read(cx);
6316 let worktree_c = worktrees[2].read(cx);
6317
6318 // check they start in the right order
6319 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
6320 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
6321 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
6322
6323 (
6324 worktrees[0].clone(),
6325 worktrees[1].clone(),
6326 worktrees[2].clone(),
6327 )
6328 });
6329
6330 // move first worktree to after the second
6331 // [a, b, c] -> [b, a, c]
6332 project
6333 .update(cx, |project, cx| {
6334 let first = worktree_a.read(cx);
6335 let second = worktree_b.read(cx);
6336 project.move_worktree(first.id(), second.id(), cx)
6337 })
6338 .expect("moving first after second");
6339
6340 // check the state after moving
6341 project.update(cx, |project, cx| {
6342 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6343 assert_eq!(worktrees.len(), 3);
6344
6345 let first = worktrees[0].read(cx);
6346 let second = worktrees[1].read(cx);
6347 let third = worktrees[2].read(cx);
6348
6349 // check they are now in the right order
6350 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
6351 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
6352 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6353 });
6354
6355 // move the second worktree to before the first
6356 // [b, a, c] -> [a, b, c]
6357 project
6358 .update(cx, |project, cx| {
6359 let second = worktree_a.read(cx);
6360 let first = worktree_b.read(cx);
6361 project.move_worktree(first.id(), second.id(), cx)
6362 })
6363 .expect("moving second before first");
6364
6365 // check the state after moving
6366 project.update(cx, |project, cx| {
6367 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6368 assert_eq!(worktrees.len(), 3);
6369
6370 let first = worktrees[0].read(cx);
6371 let second = worktrees[1].read(cx);
6372 let third = worktrees[2].read(cx);
6373
6374 // check they are now in the right order
6375 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6376 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6377 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6378 });
6379
6380 // move the second worktree to after the third
6381 // [a, b, c] -> [a, c, b]
6382 project
6383 .update(cx, |project, cx| {
6384 let second = worktree_b.read(cx);
6385 let third = worktree_c.read(cx);
6386 project.move_worktree(second.id(), third.id(), cx)
6387 })
6388 .expect("moving second after third");
6389
6390 // check the state after moving
6391 project.update(cx, |project, cx| {
6392 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6393 assert_eq!(worktrees.len(), 3);
6394
6395 let first = worktrees[0].read(cx);
6396 let second = worktrees[1].read(cx);
6397 let third = worktrees[2].read(cx);
6398
6399 // check they are now in the right order
6400 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6401 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
6402 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
6403 });
6404
6405 // move the third worktree to before the second
6406 // [a, c, b] -> [a, b, c]
6407 project
6408 .update(cx, |project, cx| {
6409 let third = worktree_c.read(cx);
6410 let second = worktree_b.read(cx);
6411 project.move_worktree(third.id(), second.id(), cx)
6412 })
6413 .expect("moving third before second");
6414
6415 // check the state after moving
6416 project.update(cx, |project, cx| {
6417 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6418 assert_eq!(worktrees.len(), 3);
6419
6420 let first = worktrees[0].read(cx);
6421 let second = worktrees[1].read(cx);
6422 let third = worktrees[2].read(cx);
6423
6424 // check they are now in the right order
6425 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6426 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6427 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6428 });
6429
6430 // move the first worktree to after the third
6431 // [a, b, c] -> [b, c, a]
6432 project
6433 .update(cx, |project, cx| {
6434 let first = worktree_a.read(cx);
6435 let third = worktree_c.read(cx);
6436 project.move_worktree(first.id(), third.id(), cx)
6437 })
6438 .expect("moving first after third");
6439
6440 // check the state after moving
6441 project.update(cx, |project, cx| {
6442 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6443 assert_eq!(worktrees.len(), 3);
6444
6445 let first = worktrees[0].read(cx);
6446 let second = worktrees[1].read(cx);
6447 let third = worktrees[2].read(cx);
6448
6449 // check they are now in the right order
6450 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
6451 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
6452 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
6453 });
6454
6455 // move the third worktree to before the first
6456 // [b, c, a] -> [a, b, c]
6457 project
6458 .update(cx, |project, cx| {
6459 let third = worktree_a.read(cx);
6460 let first = worktree_b.read(cx);
6461 project.move_worktree(third.id(), first.id(), cx)
6462 })
6463 .expect("moving third before first");
6464
6465 // check the state after moving
6466 project.update(cx, |project, cx| {
6467 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6468 assert_eq!(worktrees.len(), 3);
6469
6470 let first = worktrees[0].read(cx);
6471 let second = worktrees[1].read(cx);
6472 let third = worktrees[2].read(cx);
6473
6474 // check they are now in the right order
6475 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6476 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6477 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6478 });
6479}
6480
6481#[gpui::test]
6482async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
6483 init_test(cx);
6484
6485 let staged_contents = r#"
6486 fn main() {
6487 println!("hello world");
6488 }
6489 "#
6490 .unindent();
6491 let file_contents = r#"
6492 // print goodbye
6493 fn main() {
6494 println!("goodbye world");
6495 }
6496 "#
6497 .unindent();
6498
6499 let fs = FakeFs::new(cx.background_executor.clone());
6500 fs.insert_tree(
6501 "/dir",
6502 json!({
6503 ".git": {},
6504 "src": {
6505 "main.rs": file_contents,
6506 }
6507 }),
6508 )
6509 .await;
6510
6511 fs.set_index_for_repo(
6512 Path::new("/dir/.git"),
6513 &[("src/main.rs".into(), staged_contents)],
6514 );
6515
6516 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6517
6518 let buffer = project
6519 .update(cx, |project, cx| {
6520 project.open_local_buffer("/dir/src/main.rs", cx)
6521 })
6522 .await
6523 .unwrap();
6524 let unstaged_diff = project
6525 .update(cx, |project, cx| {
6526 project.open_unstaged_diff(buffer.clone(), cx)
6527 })
6528 .await
6529 .unwrap();
6530
6531 cx.run_until_parked();
6532 unstaged_diff.update(cx, |unstaged_diff, cx| {
6533 let snapshot = buffer.read(cx).snapshot();
6534 assert_hunks(
6535 unstaged_diff.hunks(&snapshot, cx),
6536 &snapshot,
6537 &unstaged_diff.base_text_string().unwrap(),
6538 &[
6539 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
6540 (
6541 2..3,
6542 " println!(\"hello world\");\n",
6543 " println!(\"goodbye world\");\n",
6544 DiffHunkStatus::modified_none(),
6545 ),
6546 ],
6547 );
6548 });
6549
6550 let staged_contents = r#"
6551 // print goodbye
6552 fn main() {
6553 }
6554 "#
6555 .unindent();
6556
6557 fs.set_index_for_repo(
6558 Path::new("/dir/.git"),
6559 &[("src/main.rs".into(), staged_contents)],
6560 );
6561
6562 cx.run_until_parked();
6563 unstaged_diff.update(cx, |unstaged_diff, cx| {
6564 let snapshot = buffer.read(cx).snapshot();
6565 assert_hunks(
6566 unstaged_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6567 &snapshot,
6568 &unstaged_diff.base_text().text(),
6569 &[(
6570 2..3,
6571 "",
6572 " println!(\"goodbye world\");\n",
6573 DiffHunkStatus::added_none(),
6574 )],
6575 );
6576 });
6577}
6578
6579#[gpui::test]
6580async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
6581 init_test(cx);
6582
6583 let committed_contents = r#"
6584 fn main() {
6585 println!("hello world");
6586 }
6587 "#
6588 .unindent();
6589 let staged_contents = r#"
6590 fn main() {
6591 println!("goodbye world");
6592 }
6593 "#
6594 .unindent();
6595 let file_contents = r#"
6596 // print goodbye
6597 fn main() {
6598 println!("goodbye world");
6599 }
6600 "#
6601 .unindent();
6602
6603 let fs = FakeFs::new(cx.background_executor.clone());
6604 fs.insert_tree(
6605 "/dir",
6606 json!({
6607 ".git": {},
6608 "src": {
6609 "modification.rs": file_contents,
6610 }
6611 }),
6612 )
6613 .await;
6614
6615 fs.set_head_for_repo(
6616 Path::new("/dir/.git"),
6617 &[
6618 ("src/modification.rs".into(), committed_contents),
6619 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6620 ],
6621 "deadbeef",
6622 );
6623 fs.set_index_for_repo(
6624 Path::new("/dir/.git"),
6625 &[
6626 ("src/modification.rs".into(), staged_contents),
6627 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6628 ],
6629 );
6630
6631 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6632 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6633 let language = rust_lang();
6634 language_registry.add(language.clone());
6635
6636 let buffer_1 = project
6637 .update(cx, |project, cx| {
6638 project.open_local_buffer("/dir/src/modification.rs", cx)
6639 })
6640 .await
6641 .unwrap();
6642 let diff_1 = project
6643 .update(cx, |project, cx| {
6644 project.open_uncommitted_diff(buffer_1.clone(), cx)
6645 })
6646 .await
6647 .unwrap();
6648 diff_1.read_with(cx, |diff, _| {
6649 assert_eq!(diff.base_text().language().cloned(), Some(language))
6650 });
6651 cx.run_until_parked();
6652 diff_1.update(cx, |diff, cx| {
6653 let snapshot = buffer_1.read(cx).snapshot();
6654 assert_hunks(
6655 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6656 &snapshot,
6657 &diff.base_text_string().unwrap(),
6658 &[
6659 (
6660 0..1,
6661 "",
6662 "// print goodbye\n",
6663 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
6664 ),
6665 (
6666 2..3,
6667 " println!(\"hello world\");\n",
6668 " println!(\"goodbye world\");\n",
6669 DiffHunkStatus::modified_none(),
6670 ),
6671 ],
6672 );
6673 });
6674
6675 // Reset HEAD to a version that differs from both the buffer and the index.
6676 let committed_contents = r#"
6677 // print goodbye
6678 fn main() {
6679 }
6680 "#
6681 .unindent();
6682 fs.set_head_for_repo(
6683 Path::new("/dir/.git"),
6684 &[
6685 ("src/modification.rs".into(), committed_contents.clone()),
6686 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6687 ],
6688 "deadbeef",
6689 );
6690
6691 // Buffer now has an unstaged hunk.
6692 cx.run_until_parked();
6693 diff_1.update(cx, |diff, cx| {
6694 let snapshot = buffer_1.read(cx).snapshot();
6695 assert_hunks(
6696 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6697 &snapshot,
6698 &diff.base_text().text(),
6699 &[(
6700 2..3,
6701 "",
6702 " println!(\"goodbye world\");\n",
6703 DiffHunkStatus::added_none(),
6704 )],
6705 );
6706 });
6707
6708 // Open a buffer for a file that's been deleted.
6709 let buffer_2 = project
6710 .update(cx, |project, cx| {
6711 project.open_local_buffer("/dir/src/deletion.rs", cx)
6712 })
6713 .await
6714 .unwrap();
6715 let diff_2 = project
6716 .update(cx, |project, cx| {
6717 project.open_uncommitted_diff(buffer_2.clone(), cx)
6718 })
6719 .await
6720 .unwrap();
6721 cx.run_until_parked();
6722 diff_2.update(cx, |diff, cx| {
6723 let snapshot = buffer_2.read(cx).snapshot();
6724 assert_hunks(
6725 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6726 &snapshot,
6727 &diff.base_text_string().unwrap(),
6728 &[(
6729 0..0,
6730 "// the-deleted-contents\n",
6731 "",
6732 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
6733 )],
6734 );
6735 });
6736
6737 // Stage the deletion of this file
6738 fs.set_index_for_repo(
6739 Path::new("/dir/.git"),
6740 &[("src/modification.rs".into(), committed_contents.clone())],
6741 );
6742 cx.run_until_parked();
6743 diff_2.update(cx, |diff, cx| {
6744 let snapshot = buffer_2.read(cx).snapshot();
6745 assert_hunks(
6746 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6747 &snapshot,
6748 &diff.base_text_string().unwrap(),
6749 &[(
6750 0..0,
6751 "// the-deleted-contents\n",
6752 "",
6753 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
6754 )],
6755 );
6756 });
6757}
6758
6759#[gpui::test]
6760async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
6761 use DiffHunkSecondaryStatus::*;
6762 init_test(cx);
6763
6764 let committed_contents = r#"
6765 zero
6766 one
6767 two
6768 three
6769 four
6770 five
6771 "#
6772 .unindent();
6773 let file_contents = r#"
6774 one
6775 TWO
6776 three
6777 FOUR
6778 five
6779 "#
6780 .unindent();
6781
6782 let fs = FakeFs::new(cx.background_executor.clone());
6783 fs.insert_tree(
6784 "/dir",
6785 json!({
6786 ".git": {},
6787 "file.txt": file_contents.clone()
6788 }),
6789 )
6790 .await;
6791
6792 fs.set_head_and_index_for_repo(
6793 "/dir/.git".as_ref(),
6794 &[("file.txt".into(), committed_contents.clone())],
6795 );
6796
6797 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6798
6799 let buffer = project
6800 .update(cx, |project, cx| {
6801 project.open_local_buffer("/dir/file.txt", cx)
6802 })
6803 .await
6804 .unwrap();
6805 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6806 let uncommitted_diff = project
6807 .update(cx, |project, cx| {
6808 project.open_uncommitted_diff(buffer.clone(), cx)
6809 })
6810 .await
6811 .unwrap();
6812 let mut diff_events = cx.events(&uncommitted_diff);
6813
6814 // The hunks are initially unstaged.
6815 uncommitted_diff.read_with(cx, |diff, cx| {
6816 assert_hunks(
6817 diff.hunks(&snapshot, cx),
6818 &snapshot,
6819 &diff.base_text_string().unwrap(),
6820 &[
6821 (
6822 0..0,
6823 "zero\n",
6824 "",
6825 DiffHunkStatus::deleted(HasSecondaryHunk),
6826 ),
6827 (
6828 1..2,
6829 "two\n",
6830 "TWO\n",
6831 DiffHunkStatus::modified(HasSecondaryHunk),
6832 ),
6833 (
6834 3..4,
6835 "four\n",
6836 "FOUR\n",
6837 DiffHunkStatus::modified(HasSecondaryHunk),
6838 ),
6839 ],
6840 );
6841 });
6842
6843 // Stage a hunk. It appears as optimistically staged.
6844 uncommitted_diff.update(cx, |diff, cx| {
6845 let range =
6846 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
6847 let hunks = diff
6848 .hunks_intersecting_range(range, &snapshot, cx)
6849 .collect::<Vec<_>>();
6850 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
6851
6852 assert_hunks(
6853 diff.hunks(&snapshot, cx),
6854 &snapshot,
6855 &diff.base_text_string().unwrap(),
6856 &[
6857 (
6858 0..0,
6859 "zero\n",
6860 "",
6861 DiffHunkStatus::deleted(HasSecondaryHunk),
6862 ),
6863 (
6864 1..2,
6865 "two\n",
6866 "TWO\n",
6867 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6868 ),
6869 (
6870 3..4,
6871 "four\n",
6872 "FOUR\n",
6873 DiffHunkStatus::modified(HasSecondaryHunk),
6874 ),
6875 ],
6876 );
6877 });
6878
6879 // The diff emits a change event for the range of the staged hunk.
6880 assert!(matches!(
6881 diff_events.next().await.unwrap(),
6882 BufferDiffEvent::HunksStagedOrUnstaged(_)
6883 ));
6884 let event = diff_events.next().await.unwrap();
6885 if let BufferDiffEvent::DiffChanged {
6886 changed_range: Some(changed_range),
6887 } = event
6888 {
6889 let changed_range = changed_range.to_point(&snapshot);
6890 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
6891 } else {
6892 panic!("Unexpected event {event:?}");
6893 }
6894
6895 // When the write to the index completes, it appears as staged.
6896 cx.run_until_parked();
6897 uncommitted_diff.update(cx, |diff, cx| {
6898 assert_hunks(
6899 diff.hunks(&snapshot, cx),
6900 &snapshot,
6901 &diff.base_text_string().unwrap(),
6902 &[
6903 (
6904 0..0,
6905 "zero\n",
6906 "",
6907 DiffHunkStatus::deleted(HasSecondaryHunk),
6908 ),
6909 (
6910 1..2,
6911 "two\n",
6912 "TWO\n",
6913 DiffHunkStatus::modified(NoSecondaryHunk),
6914 ),
6915 (
6916 3..4,
6917 "four\n",
6918 "FOUR\n",
6919 DiffHunkStatus::modified(HasSecondaryHunk),
6920 ),
6921 ],
6922 );
6923 });
6924
6925 // The diff emits a change event for the changed index text.
6926 let event = diff_events.next().await.unwrap();
6927 if let BufferDiffEvent::DiffChanged {
6928 changed_range: Some(changed_range),
6929 } = event
6930 {
6931 let changed_range = changed_range.to_point(&snapshot);
6932 assert_eq!(changed_range, Point::new(0, 0)..Point::new(4, 0));
6933 } else {
6934 panic!("Unexpected event {event:?}");
6935 }
6936
6937 // Simulate a problem writing to the git index.
6938 fs.set_error_message_for_index_write(
6939 "/dir/.git".as_ref(),
6940 Some("failed to write git index".into()),
6941 );
6942
6943 // Stage another hunk.
6944 uncommitted_diff.update(cx, |diff, cx| {
6945 let range =
6946 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
6947 let hunks = diff
6948 .hunks_intersecting_range(range, &snapshot, cx)
6949 .collect::<Vec<_>>();
6950 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
6951
6952 assert_hunks(
6953 diff.hunks(&snapshot, cx),
6954 &snapshot,
6955 &diff.base_text_string().unwrap(),
6956 &[
6957 (
6958 0..0,
6959 "zero\n",
6960 "",
6961 DiffHunkStatus::deleted(HasSecondaryHunk),
6962 ),
6963 (
6964 1..2,
6965 "two\n",
6966 "TWO\n",
6967 DiffHunkStatus::modified(NoSecondaryHunk),
6968 ),
6969 (
6970 3..4,
6971 "four\n",
6972 "FOUR\n",
6973 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6974 ),
6975 ],
6976 );
6977 });
6978 assert!(matches!(
6979 diff_events.next().await.unwrap(),
6980 BufferDiffEvent::HunksStagedOrUnstaged(_)
6981 ));
6982 let event = diff_events.next().await.unwrap();
6983 if let BufferDiffEvent::DiffChanged {
6984 changed_range: Some(changed_range),
6985 } = event
6986 {
6987 let changed_range = changed_range.to_point(&snapshot);
6988 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
6989 } else {
6990 panic!("Unexpected event {event:?}");
6991 }
6992
6993 // When the write fails, the hunk returns to being unstaged.
6994 cx.run_until_parked();
6995 uncommitted_diff.update(cx, |diff, cx| {
6996 assert_hunks(
6997 diff.hunks(&snapshot, cx),
6998 &snapshot,
6999 &diff.base_text_string().unwrap(),
7000 &[
7001 (
7002 0..0,
7003 "zero\n",
7004 "",
7005 DiffHunkStatus::deleted(HasSecondaryHunk),
7006 ),
7007 (
7008 1..2,
7009 "two\n",
7010 "TWO\n",
7011 DiffHunkStatus::modified(NoSecondaryHunk),
7012 ),
7013 (
7014 3..4,
7015 "four\n",
7016 "FOUR\n",
7017 DiffHunkStatus::modified(HasSecondaryHunk),
7018 ),
7019 ],
7020 );
7021 });
7022
7023 let event = diff_events.next().await.unwrap();
7024 if let BufferDiffEvent::DiffChanged {
7025 changed_range: Some(changed_range),
7026 } = event
7027 {
7028 let changed_range = changed_range.to_point(&snapshot);
7029 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
7030 } else {
7031 panic!("Unexpected event {event:?}");
7032 }
7033
7034 // Allow writing to the git index to succeed again.
7035 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
7036
7037 // Stage two hunks with separate operations.
7038 uncommitted_diff.update(cx, |diff, cx| {
7039 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
7040 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
7041 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
7042 });
7043
7044 // Both staged hunks appear as pending.
7045 uncommitted_diff.update(cx, |diff, cx| {
7046 assert_hunks(
7047 diff.hunks(&snapshot, cx),
7048 &snapshot,
7049 &diff.base_text_string().unwrap(),
7050 &[
7051 (
7052 0..0,
7053 "zero\n",
7054 "",
7055 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7056 ),
7057 (
7058 1..2,
7059 "two\n",
7060 "TWO\n",
7061 DiffHunkStatus::modified(NoSecondaryHunk),
7062 ),
7063 (
7064 3..4,
7065 "four\n",
7066 "FOUR\n",
7067 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7068 ),
7069 ],
7070 );
7071 });
7072
7073 // Both staging operations take effect.
7074 cx.run_until_parked();
7075 uncommitted_diff.update(cx, |diff, cx| {
7076 assert_hunks(
7077 diff.hunks(&snapshot, cx),
7078 &snapshot,
7079 &diff.base_text_string().unwrap(),
7080 &[
7081 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7082 (
7083 1..2,
7084 "two\n",
7085 "TWO\n",
7086 DiffHunkStatus::modified(NoSecondaryHunk),
7087 ),
7088 (
7089 3..4,
7090 "four\n",
7091 "FOUR\n",
7092 DiffHunkStatus::modified(NoSecondaryHunk),
7093 ),
7094 ],
7095 );
7096 });
7097}
7098
7099#[gpui::test(seeds(340, 472))]
7100async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
7101 use DiffHunkSecondaryStatus::*;
7102 init_test(cx);
7103
7104 let committed_contents = r#"
7105 zero
7106 one
7107 two
7108 three
7109 four
7110 five
7111 "#
7112 .unindent();
7113 let file_contents = r#"
7114 one
7115 TWO
7116 three
7117 FOUR
7118 five
7119 "#
7120 .unindent();
7121
7122 let fs = FakeFs::new(cx.background_executor.clone());
7123 fs.insert_tree(
7124 "/dir",
7125 json!({
7126 ".git": {},
7127 "file.txt": file_contents.clone()
7128 }),
7129 )
7130 .await;
7131
7132 fs.set_head_for_repo(
7133 "/dir/.git".as_ref(),
7134 &[("file.txt".into(), committed_contents.clone())],
7135 "deadbeef",
7136 );
7137 fs.set_index_for_repo(
7138 "/dir/.git".as_ref(),
7139 &[("file.txt".into(), committed_contents.clone())],
7140 );
7141
7142 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7143
7144 let buffer = project
7145 .update(cx, |project, cx| {
7146 project.open_local_buffer("/dir/file.txt", cx)
7147 })
7148 .await
7149 .unwrap();
7150 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7151 let uncommitted_diff = project
7152 .update(cx, |project, cx| {
7153 project.open_uncommitted_diff(buffer.clone(), cx)
7154 })
7155 .await
7156 .unwrap();
7157
7158 // The hunks are initially unstaged.
7159 uncommitted_diff.read_with(cx, |diff, cx| {
7160 assert_hunks(
7161 diff.hunks(&snapshot, cx),
7162 &snapshot,
7163 &diff.base_text_string().unwrap(),
7164 &[
7165 (
7166 0..0,
7167 "zero\n",
7168 "",
7169 DiffHunkStatus::deleted(HasSecondaryHunk),
7170 ),
7171 (
7172 1..2,
7173 "two\n",
7174 "TWO\n",
7175 DiffHunkStatus::modified(HasSecondaryHunk),
7176 ),
7177 (
7178 3..4,
7179 "four\n",
7180 "FOUR\n",
7181 DiffHunkStatus::modified(HasSecondaryHunk),
7182 ),
7183 ],
7184 );
7185 });
7186
7187 // Pause IO events
7188 fs.pause_events();
7189
7190 // Stage the first hunk.
7191 uncommitted_diff.update(cx, |diff, cx| {
7192 let hunk = diff.hunks(&snapshot, cx).next().unwrap();
7193 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7194 assert_hunks(
7195 diff.hunks(&snapshot, cx),
7196 &snapshot,
7197 &diff.base_text_string().unwrap(),
7198 &[
7199 (
7200 0..0,
7201 "zero\n",
7202 "",
7203 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7204 ),
7205 (
7206 1..2,
7207 "two\n",
7208 "TWO\n",
7209 DiffHunkStatus::modified(HasSecondaryHunk),
7210 ),
7211 (
7212 3..4,
7213 "four\n",
7214 "FOUR\n",
7215 DiffHunkStatus::modified(HasSecondaryHunk),
7216 ),
7217 ],
7218 );
7219 });
7220
7221 // Stage the second hunk *before* receiving the FS event for the first hunk.
7222 cx.run_until_parked();
7223 uncommitted_diff.update(cx, |diff, cx| {
7224 let hunk = diff.hunks(&snapshot, cx).nth(1).unwrap();
7225 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7226 assert_hunks(
7227 diff.hunks(&snapshot, cx),
7228 &snapshot,
7229 &diff.base_text_string().unwrap(),
7230 &[
7231 (
7232 0..0,
7233 "zero\n",
7234 "",
7235 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7236 ),
7237 (
7238 1..2,
7239 "two\n",
7240 "TWO\n",
7241 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7242 ),
7243 (
7244 3..4,
7245 "four\n",
7246 "FOUR\n",
7247 DiffHunkStatus::modified(HasSecondaryHunk),
7248 ),
7249 ],
7250 );
7251 });
7252
7253 // Process the FS event for staging the first hunk (second event is still pending).
7254 fs.flush_events(1);
7255 cx.run_until_parked();
7256
7257 // Stage the third hunk before receiving the second FS event.
7258 uncommitted_diff.update(cx, |diff, cx| {
7259 let hunk = diff.hunks(&snapshot, cx).nth(2).unwrap();
7260 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7261 });
7262
7263 // Wait for all remaining IO.
7264 cx.run_until_parked();
7265 fs.flush_events(fs.buffered_event_count());
7266
7267 // Now all hunks are staged.
7268 cx.run_until_parked();
7269 uncommitted_diff.update(cx, |diff, cx| {
7270 assert_hunks(
7271 diff.hunks(&snapshot, cx),
7272 &snapshot,
7273 &diff.base_text_string().unwrap(),
7274 &[
7275 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7276 (
7277 1..2,
7278 "two\n",
7279 "TWO\n",
7280 DiffHunkStatus::modified(NoSecondaryHunk),
7281 ),
7282 (
7283 3..4,
7284 "four\n",
7285 "FOUR\n",
7286 DiffHunkStatus::modified(NoSecondaryHunk),
7287 ),
7288 ],
7289 );
7290 });
7291}
7292
7293#[gpui::test(iterations = 25)]
7294async fn test_staging_random_hunks(
7295 mut rng: StdRng,
7296 executor: BackgroundExecutor,
7297 cx: &mut gpui::TestAppContext,
7298) {
7299 let operations = env::var("OPERATIONS")
7300 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
7301 .unwrap_or(20);
7302
7303 // Try to induce races between diff recalculation and index writes.
7304 if rng.gen_bool(0.5) {
7305 executor.deprioritize(*CALCULATE_DIFF_TASK);
7306 }
7307
7308 use DiffHunkSecondaryStatus::*;
7309 init_test(cx);
7310
7311 let committed_text = (0..30).map(|i| format!("line {i}\n")).collect::<String>();
7312 let index_text = committed_text.clone();
7313 let buffer_text = (0..30)
7314 .map(|i| match i % 5 {
7315 0 => format!("line {i} (modified)\n"),
7316 _ => format!("line {i}\n"),
7317 })
7318 .collect::<String>();
7319
7320 let fs = FakeFs::new(cx.background_executor.clone());
7321 fs.insert_tree(
7322 path!("/dir"),
7323 json!({
7324 ".git": {},
7325 "file.txt": buffer_text.clone()
7326 }),
7327 )
7328 .await;
7329 fs.set_head_for_repo(
7330 path!("/dir/.git").as_ref(),
7331 &[("file.txt".into(), committed_text.clone())],
7332 "deadbeef",
7333 );
7334 fs.set_index_for_repo(
7335 path!("/dir/.git").as_ref(),
7336 &[("file.txt".into(), index_text.clone())],
7337 );
7338 let repo = fs.open_repo(path!("/dir/.git").as_ref()).unwrap();
7339
7340 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7341 let buffer = project
7342 .update(cx, |project, cx| {
7343 project.open_local_buffer(path!("/dir/file.txt"), cx)
7344 })
7345 .await
7346 .unwrap();
7347 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7348 let uncommitted_diff = project
7349 .update(cx, |project, cx| {
7350 project.open_uncommitted_diff(buffer.clone(), cx)
7351 })
7352 .await
7353 .unwrap();
7354
7355 let mut hunks =
7356 uncommitted_diff.update(cx, |diff, cx| diff.hunks(&snapshot, cx).collect::<Vec<_>>());
7357 assert_eq!(hunks.len(), 6);
7358
7359 for _i in 0..operations {
7360 let hunk_ix = rng.gen_range(0..hunks.len());
7361 let hunk = &mut hunks[hunk_ix];
7362 let row = hunk.range.start.row;
7363
7364 if hunk.status().has_secondary_hunk() {
7365 log::info!("staging hunk at {row}");
7366 uncommitted_diff.update(cx, |diff, cx| {
7367 diff.stage_or_unstage_hunks(true, &[hunk.clone()], &snapshot, true, cx);
7368 });
7369 hunk.secondary_status = SecondaryHunkRemovalPending;
7370 } else {
7371 log::info!("unstaging hunk at {row}");
7372 uncommitted_diff.update(cx, |diff, cx| {
7373 diff.stage_or_unstage_hunks(false, &[hunk.clone()], &snapshot, true, cx);
7374 });
7375 hunk.secondary_status = SecondaryHunkAdditionPending;
7376 }
7377
7378 for _ in 0..rng.gen_range(0..10) {
7379 log::info!("yielding");
7380 cx.executor().simulate_random_delay().await;
7381 }
7382 }
7383
7384 cx.executor().run_until_parked();
7385
7386 for hunk in &mut hunks {
7387 if hunk.secondary_status == SecondaryHunkRemovalPending {
7388 hunk.secondary_status = NoSecondaryHunk;
7389 } else if hunk.secondary_status == SecondaryHunkAdditionPending {
7390 hunk.secondary_status = HasSecondaryHunk;
7391 }
7392 }
7393
7394 log::info!(
7395 "index text:\n{}",
7396 repo.load_index_text("file.txt".into()).await.unwrap()
7397 );
7398
7399 uncommitted_diff.update(cx, |diff, cx| {
7400 let expected_hunks = hunks
7401 .iter()
7402 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
7403 .collect::<Vec<_>>();
7404 let actual_hunks = diff
7405 .hunks(&snapshot, cx)
7406 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
7407 .collect::<Vec<_>>();
7408 assert_eq!(actual_hunks, expected_hunks);
7409 });
7410}
7411
7412#[gpui::test]
7413async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
7414 init_test(cx);
7415
7416 let committed_contents = r#"
7417 fn main() {
7418 println!("hello from HEAD");
7419 }
7420 "#
7421 .unindent();
7422 let file_contents = r#"
7423 fn main() {
7424 println!("hello from the working copy");
7425 }
7426 "#
7427 .unindent();
7428
7429 let fs = FakeFs::new(cx.background_executor.clone());
7430 fs.insert_tree(
7431 "/dir",
7432 json!({
7433 ".git": {},
7434 "src": {
7435 "main.rs": file_contents,
7436 }
7437 }),
7438 )
7439 .await;
7440
7441 fs.set_head_for_repo(
7442 Path::new("/dir/.git"),
7443 &[("src/main.rs".into(), committed_contents.clone())],
7444 "deadbeef",
7445 );
7446 fs.set_index_for_repo(
7447 Path::new("/dir/.git"),
7448 &[("src/main.rs".into(), committed_contents.clone())],
7449 );
7450
7451 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
7452
7453 let buffer = project
7454 .update(cx, |project, cx| {
7455 project.open_local_buffer("/dir/src/main.rs", cx)
7456 })
7457 .await
7458 .unwrap();
7459 let uncommitted_diff = project
7460 .update(cx, |project, cx| {
7461 project.open_uncommitted_diff(buffer.clone(), cx)
7462 })
7463 .await
7464 .unwrap();
7465
7466 cx.run_until_parked();
7467 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
7468 let snapshot = buffer.read(cx).snapshot();
7469 assert_hunks(
7470 uncommitted_diff.hunks(&snapshot, cx),
7471 &snapshot,
7472 &uncommitted_diff.base_text_string().unwrap(),
7473 &[(
7474 1..2,
7475 " println!(\"hello from HEAD\");\n",
7476 " println!(\"hello from the working copy\");\n",
7477 DiffHunkStatus {
7478 kind: DiffHunkStatusKind::Modified,
7479 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
7480 },
7481 )],
7482 );
7483 });
7484}
7485
7486#[gpui::test]
7487async fn test_repository_and_path_for_project_path(
7488 background_executor: BackgroundExecutor,
7489 cx: &mut gpui::TestAppContext,
7490) {
7491 init_test(cx);
7492 let fs = FakeFs::new(background_executor);
7493 fs.insert_tree(
7494 path!("/root"),
7495 json!({
7496 "c.txt": "",
7497 "dir1": {
7498 ".git": {},
7499 "deps": {
7500 "dep1": {
7501 ".git": {},
7502 "src": {
7503 "a.txt": ""
7504 }
7505 }
7506 },
7507 "src": {
7508 "b.txt": ""
7509 }
7510 },
7511 }),
7512 )
7513 .await;
7514
7515 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
7516 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7517 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7518 project
7519 .update(cx, |project, cx| project.git_scans_complete(cx))
7520 .await;
7521 cx.run_until_parked();
7522
7523 project.read_with(cx, |project, cx| {
7524 let git_store = project.git_store().read(cx);
7525 let pairs = [
7526 ("c.txt", None),
7527 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
7528 (
7529 "dir1/deps/dep1/src/a.txt",
7530 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
7531 ),
7532 ];
7533 let expected = pairs
7534 .iter()
7535 .map(|(path, result)| {
7536 (
7537 path,
7538 result.map(|(repo, repo_path)| {
7539 (Path::new(repo).into(), RepoPath::from(repo_path))
7540 }),
7541 )
7542 })
7543 .collect::<Vec<_>>();
7544 let actual = pairs
7545 .iter()
7546 .map(|(path, _)| {
7547 let project_path = (tree_id, Path::new(path)).into();
7548 let result = maybe!({
7549 let (repo, repo_path) =
7550 git_store.repository_and_path_for_project_path(&project_path, cx)?;
7551 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
7552 });
7553 (path, result)
7554 })
7555 .collect::<Vec<_>>();
7556 pretty_assertions::assert_eq!(expected, actual);
7557 });
7558
7559 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
7560 .await
7561 .unwrap();
7562 cx.run_until_parked();
7563
7564 project.read_with(cx, |project, cx| {
7565 let git_store = project.git_store().read(cx);
7566 assert_eq!(
7567 git_store.repository_and_path_for_project_path(
7568 &(tree_id, Path::new("dir1/src/b.txt")).into(),
7569 cx
7570 ),
7571 None
7572 );
7573 });
7574}
7575
7576#[gpui::test]
7577async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
7578 init_test(cx);
7579 let fs = FakeFs::new(cx.background_executor.clone());
7580 fs.insert_tree(
7581 path!("/root"),
7582 json!({
7583 "home": {
7584 ".git": {},
7585 "project": {
7586 "a.txt": "A"
7587 },
7588 },
7589 }),
7590 )
7591 .await;
7592 fs.set_home_dir(Path::new(path!("/root/home")).to_owned());
7593
7594 let project = Project::test(fs.clone(), [path!("/root/home/project").as_ref()], cx).await;
7595 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7596 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7597
7598 project
7599 .update(cx, |project, cx| project.git_scans_complete(cx))
7600 .await;
7601 tree.flush_fs_events(cx).await;
7602
7603 project.read_with(cx, |project, cx| {
7604 let containing = project
7605 .git_store()
7606 .read(cx)
7607 .repository_and_path_for_project_path(&(tree_id, "a.txt").into(), cx);
7608 assert!(containing.is_none());
7609 });
7610
7611 let project = Project::test(fs.clone(), [path!("/root/home").as_ref()], cx).await;
7612 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7613 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7614 project
7615 .update(cx, |project, cx| project.git_scans_complete(cx))
7616 .await;
7617 tree.flush_fs_events(cx).await;
7618
7619 project.read_with(cx, |project, cx| {
7620 let containing = project
7621 .git_store()
7622 .read(cx)
7623 .repository_and_path_for_project_path(&(tree_id, "project/a.txt").into(), cx);
7624 assert_eq!(
7625 containing
7626 .unwrap()
7627 .0
7628 .read(cx)
7629 .work_directory_abs_path
7630 .as_ref(),
7631 Path::new(path!("/root/home"))
7632 );
7633 });
7634}
7635
7636#[gpui::test]
7637async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
7638 init_test(cx);
7639 cx.executor().allow_parking();
7640
7641 let root = TempTree::new(json!({
7642 "project": {
7643 "a.txt": "a", // Modified
7644 "b.txt": "bb", // Added
7645 "c.txt": "ccc", // Unchanged
7646 "d.txt": "dddd", // Deleted
7647 },
7648 }));
7649
7650 // Set up git repository before creating the project.
7651 let work_dir = root.path().join("project");
7652 let repo = git_init(work_dir.as_path());
7653 git_add("a.txt", &repo);
7654 git_add("c.txt", &repo);
7655 git_add("d.txt", &repo);
7656 git_commit("Initial commit", &repo);
7657 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
7658 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
7659
7660 let project = Project::test(
7661 Arc::new(RealFs::new(None, cx.executor())),
7662 [root.path()],
7663 cx,
7664 )
7665 .await;
7666
7667 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7668 tree.flush_fs_events(cx).await;
7669 project
7670 .update(cx, |project, cx| project.git_scans_complete(cx))
7671 .await;
7672 cx.executor().run_until_parked();
7673
7674 let repository = project.read_with(cx, |project, cx| {
7675 project.repositories(cx).values().next().unwrap().clone()
7676 });
7677
7678 // Check that the right git state is observed on startup
7679 repository.read_with(cx, |repository, _| {
7680 let entries = repository.cached_status().collect::<Vec<_>>();
7681 assert_eq!(
7682 entries,
7683 [
7684 StatusEntry {
7685 repo_path: "a.txt".into(),
7686 status: StatusCode::Modified.worktree(),
7687 },
7688 StatusEntry {
7689 repo_path: "b.txt".into(),
7690 status: FileStatus::Untracked,
7691 },
7692 StatusEntry {
7693 repo_path: "d.txt".into(),
7694 status: StatusCode::Deleted.worktree(),
7695 },
7696 ]
7697 );
7698 });
7699
7700 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
7701
7702 tree.flush_fs_events(cx).await;
7703 project
7704 .update(cx, |project, cx| project.git_scans_complete(cx))
7705 .await;
7706 cx.executor().run_until_parked();
7707
7708 repository.read_with(cx, |repository, _| {
7709 let entries = repository.cached_status().collect::<Vec<_>>();
7710 assert_eq!(
7711 entries,
7712 [
7713 StatusEntry {
7714 repo_path: "a.txt".into(),
7715 status: StatusCode::Modified.worktree(),
7716 },
7717 StatusEntry {
7718 repo_path: "b.txt".into(),
7719 status: FileStatus::Untracked,
7720 },
7721 StatusEntry {
7722 repo_path: "c.txt".into(),
7723 status: StatusCode::Modified.worktree(),
7724 },
7725 StatusEntry {
7726 repo_path: "d.txt".into(),
7727 status: StatusCode::Deleted.worktree(),
7728 },
7729 ]
7730 );
7731 });
7732
7733 git_add("a.txt", &repo);
7734 git_add("c.txt", &repo);
7735 git_remove_index(Path::new("d.txt"), &repo);
7736 git_commit("Another commit", &repo);
7737 tree.flush_fs_events(cx).await;
7738 project
7739 .update(cx, |project, cx| project.git_scans_complete(cx))
7740 .await;
7741 cx.executor().run_until_parked();
7742
7743 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
7744 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
7745 tree.flush_fs_events(cx).await;
7746 project
7747 .update(cx, |project, cx| project.git_scans_complete(cx))
7748 .await;
7749 cx.executor().run_until_parked();
7750
7751 repository.read_with(cx, |repository, _cx| {
7752 let entries = repository.cached_status().collect::<Vec<_>>();
7753
7754 // Deleting an untracked entry, b.txt, should leave no status
7755 // a.txt was tracked, and so should have a status
7756 assert_eq!(
7757 entries,
7758 [StatusEntry {
7759 repo_path: "a.txt".into(),
7760 status: StatusCode::Deleted.worktree(),
7761 }]
7762 );
7763 });
7764}
7765
7766#[gpui::test]
7767async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
7768 init_test(cx);
7769 cx.executor().allow_parking();
7770
7771 let root = TempTree::new(json!({
7772 "project": {
7773 "sub": {},
7774 "a.txt": "",
7775 },
7776 }));
7777
7778 let work_dir = root.path().join("project");
7779 let repo = git_init(work_dir.as_path());
7780 // a.txt exists in HEAD and the working copy but is deleted in the index.
7781 git_add("a.txt", &repo);
7782 git_commit("Initial commit", &repo);
7783 git_remove_index("a.txt".as_ref(), &repo);
7784 // `sub` is a nested git repository.
7785 let _sub = git_init(&work_dir.join("sub"));
7786
7787 let project = Project::test(
7788 Arc::new(RealFs::new(None, cx.executor())),
7789 [root.path()],
7790 cx,
7791 )
7792 .await;
7793
7794 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7795 tree.flush_fs_events(cx).await;
7796 project
7797 .update(cx, |project, cx| project.git_scans_complete(cx))
7798 .await;
7799 cx.executor().run_until_parked();
7800
7801 let repository = project.read_with(cx, |project, cx| {
7802 project
7803 .repositories(cx)
7804 .values()
7805 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
7806 .unwrap()
7807 .clone()
7808 });
7809
7810 repository.read_with(cx, |repository, _cx| {
7811 let entries = repository.cached_status().collect::<Vec<_>>();
7812
7813 // `sub` doesn't appear in our computed statuses.
7814 // a.txt appears with a combined `DA` status.
7815 assert_eq!(
7816 entries,
7817 [StatusEntry {
7818 repo_path: "a.txt".into(),
7819 status: TrackedStatus {
7820 index_status: StatusCode::Deleted,
7821 worktree_status: StatusCode::Added
7822 }
7823 .into(),
7824 }]
7825 )
7826 });
7827}
7828
7829#[gpui::test]
7830async fn test_repository_subfolder_git_status(
7831 executor: gpui::BackgroundExecutor,
7832 cx: &mut gpui::TestAppContext,
7833) {
7834 init_test(cx);
7835
7836 let fs = FakeFs::new(executor);
7837 fs.insert_tree(
7838 path!("/root"),
7839 json!({
7840 "my-repo": {
7841 ".git": {},
7842 "a.txt": "a",
7843 "sub-folder-1": {
7844 "sub-folder-2": {
7845 "c.txt": "cc",
7846 "d": {
7847 "e.txt": "eee"
7848 }
7849 },
7850 }
7851 },
7852 }),
7853 )
7854 .await;
7855
7856 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
7857 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
7858
7859 fs.set_status_for_repo(
7860 path!("/root/my-repo/.git").as_ref(),
7861 &[(E_TXT.as_ref(), FileStatus::Untracked)],
7862 );
7863
7864 let project = Project::test(
7865 fs.clone(),
7866 [path!("/root/my-repo/sub-folder-1/sub-folder-2").as_ref()],
7867 cx,
7868 )
7869 .await;
7870
7871 project
7872 .update(cx, |project, cx| project.git_scans_complete(cx))
7873 .await;
7874 cx.run_until_parked();
7875
7876 let repository = project.read_with(cx, |project, cx| {
7877 project.repositories(cx).values().next().unwrap().clone()
7878 });
7879
7880 // Ensure that the git status is loaded correctly
7881 repository.read_with(cx, |repository, _cx| {
7882 assert_eq!(
7883 repository.work_directory_abs_path,
7884 Path::new(path!("/root/my-repo")).into()
7885 );
7886
7887 assert_eq!(repository.status_for_path(&C_TXT.into()), None);
7888 assert_eq!(
7889 repository.status_for_path(&E_TXT.into()).unwrap().status,
7890 FileStatus::Untracked
7891 );
7892 });
7893
7894 fs.set_status_for_repo(path!("/root/my-repo/.git").as_ref(), &[]);
7895 project
7896 .update(cx, |project, cx| project.git_scans_complete(cx))
7897 .await;
7898 cx.run_until_parked();
7899
7900 repository.read_with(cx, |repository, _cx| {
7901 assert_eq!(repository.status_for_path(&C_TXT.into()), None);
7902 assert_eq!(repository.status_for_path(&E_TXT.into()), None);
7903 });
7904}
7905
7906// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
7907#[cfg(any())]
7908#[gpui::test]
7909async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
7910 init_test(cx);
7911 cx.executor().allow_parking();
7912
7913 let root = TempTree::new(json!({
7914 "project": {
7915 "a.txt": "a",
7916 },
7917 }));
7918 let root_path = root.path();
7919
7920 let repo = git_init(&root_path.join("project"));
7921 git_add("a.txt", &repo);
7922 git_commit("init", &repo);
7923
7924 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
7925
7926 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7927 tree.flush_fs_events(cx).await;
7928 project
7929 .update(cx, |project, cx| project.git_scans_complete(cx))
7930 .await;
7931 cx.executor().run_until_parked();
7932
7933 let repository = project.read_with(cx, |project, cx| {
7934 project.repositories(cx).values().next().unwrap().clone()
7935 });
7936
7937 git_branch("other-branch", &repo);
7938 git_checkout("refs/heads/other-branch", &repo);
7939 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
7940 git_add("a.txt", &repo);
7941 git_commit("capitalize", &repo);
7942 let commit = repo
7943 .head()
7944 .expect("Failed to get HEAD")
7945 .peel_to_commit()
7946 .expect("HEAD is not a commit");
7947 git_checkout("refs/heads/main", &repo);
7948 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
7949 git_add("a.txt", &repo);
7950 git_commit("improve letter", &repo);
7951 git_cherry_pick(&commit, &repo);
7952 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
7953 .expect("No CHERRY_PICK_HEAD");
7954 pretty_assertions::assert_eq!(
7955 git_status(&repo),
7956 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
7957 );
7958 tree.flush_fs_events(cx).await;
7959 project
7960 .update(cx, |project, cx| project.git_scans_complete(cx))
7961 .await;
7962 cx.executor().run_until_parked();
7963 let conflicts = repository.update(cx, |repository, _| {
7964 repository
7965 .merge_conflicts
7966 .iter()
7967 .cloned()
7968 .collect::<Vec<_>>()
7969 });
7970 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
7971
7972 git_add("a.txt", &repo);
7973 // Attempt to manually simulate what `git cherry-pick --continue` would do.
7974 git_commit("whatevs", &repo);
7975 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
7976 .expect("Failed to remove CHERRY_PICK_HEAD");
7977 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
7978 tree.flush_fs_events(cx).await;
7979 let conflicts = repository.update(cx, |repository, _| {
7980 repository
7981 .merge_conflicts
7982 .iter()
7983 .cloned()
7984 .collect::<Vec<_>>()
7985 });
7986 pretty_assertions::assert_eq!(conflicts, []);
7987}
7988
7989#[gpui::test]
7990async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
7991 init_test(cx);
7992 let fs = FakeFs::new(cx.background_executor.clone());
7993 fs.insert_tree(
7994 path!("/root"),
7995 json!({
7996 ".git": {},
7997 ".gitignore": "*.txt\n",
7998 "a.xml": "<a></a>",
7999 "b.txt": "Some text"
8000 }),
8001 )
8002 .await;
8003
8004 fs.set_head_and_index_for_repo(
8005 path!("/root/.git").as_ref(),
8006 &[
8007 (".gitignore".into(), "*.txt\n".into()),
8008 ("a.xml".into(), "<a></a>".into()),
8009 ],
8010 );
8011
8012 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
8013
8014 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8015 tree.flush_fs_events(cx).await;
8016 project
8017 .update(cx, |project, cx| project.git_scans_complete(cx))
8018 .await;
8019 cx.executor().run_until_parked();
8020
8021 let repository = project.read_with(cx, |project, cx| {
8022 project.repositories(cx).values().next().unwrap().clone()
8023 });
8024
8025 // One file is unmodified, the other is ignored.
8026 cx.read(|cx| {
8027 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
8028 assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
8029 });
8030
8031 // Change the gitignore, and stage the newly non-ignored file.
8032 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
8033 .await
8034 .unwrap();
8035 fs.set_index_for_repo(
8036 Path::new(path!("/root/.git")),
8037 &[
8038 (".gitignore".into(), "*.txt\n".into()),
8039 ("a.xml".into(), "<a></a>".into()),
8040 ("b.txt".into(), "Some text".into()),
8041 ],
8042 );
8043
8044 cx.executor().run_until_parked();
8045 cx.read(|cx| {
8046 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
8047 assert_entry_git_state(
8048 tree.read(cx),
8049 repository.read(cx),
8050 "b.txt",
8051 Some(StatusCode::Added),
8052 false,
8053 );
8054 });
8055}
8056
8057// NOTE:
8058// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
8059// a directory which some program has already open.
8060// This is a limitation of the Windows.
8061// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
8062#[gpui::test]
8063#[cfg_attr(target_os = "windows", ignore)]
8064async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
8065 init_test(cx);
8066 cx.executor().allow_parking();
8067 let root = TempTree::new(json!({
8068 "projects": {
8069 "project1": {
8070 "a": "",
8071 "b": "",
8072 }
8073 },
8074
8075 }));
8076 let root_path = root.path();
8077
8078 let repo = git_init(&root_path.join("projects/project1"));
8079 git_add("a", &repo);
8080 git_commit("init", &repo);
8081 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
8082
8083 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8084
8085 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8086 tree.flush_fs_events(cx).await;
8087 project
8088 .update(cx, |project, cx| project.git_scans_complete(cx))
8089 .await;
8090 cx.executor().run_until_parked();
8091
8092 let repository = project.read_with(cx, |project, cx| {
8093 project.repositories(cx).values().next().unwrap().clone()
8094 });
8095
8096 repository.read_with(cx, |repository, _| {
8097 assert_eq!(
8098 repository.work_directory_abs_path.as_ref(),
8099 root_path.join("projects/project1").as_path()
8100 );
8101 assert_eq!(
8102 repository
8103 .status_for_path(&"a".into())
8104 .map(|entry| entry.status),
8105 Some(StatusCode::Modified.worktree()),
8106 );
8107 assert_eq!(
8108 repository
8109 .status_for_path(&"b".into())
8110 .map(|entry| entry.status),
8111 Some(FileStatus::Untracked),
8112 );
8113 });
8114
8115 std::fs::rename(
8116 root_path.join("projects/project1"),
8117 root_path.join("projects/project2"),
8118 )
8119 .unwrap();
8120 tree.flush_fs_events(cx).await;
8121
8122 repository.read_with(cx, |repository, _| {
8123 assert_eq!(
8124 repository.work_directory_abs_path.as_ref(),
8125 root_path.join("projects/project2").as_path()
8126 );
8127 assert_eq!(
8128 repository.status_for_path(&"a".into()).unwrap().status,
8129 StatusCode::Modified.worktree(),
8130 );
8131 assert_eq!(
8132 repository.status_for_path(&"b".into()).unwrap().status,
8133 FileStatus::Untracked,
8134 );
8135 });
8136}
8137
8138// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
8139// you can't rename a directory which some program has already open. This is a
8140// limitation of the Windows. See:
8141// https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
8142#[gpui::test]
8143#[cfg_attr(target_os = "windows", ignore)]
8144async fn test_file_status(cx: &mut gpui::TestAppContext) {
8145 init_test(cx);
8146 cx.executor().allow_parking();
8147 const IGNORE_RULE: &str = "**/target";
8148
8149 let root = TempTree::new(json!({
8150 "project": {
8151 "a.txt": "a",
8152 "b.txt": "bb",
8153 "c": {
8154 "d": {
8155 "e.txt": "eee"
8156 }
8157 },
8158 "f.txt": "ffff",
8159 "target": {
8160 "build_file": "???"
8161 },
8162 ".gitignore": IGNORE_RULE
8163 },
8164
8165 }));
8166 let root_path = root.path();
8167
8168 const A_TXT: &str = "a.txt";
8169 const B_TXT: &str = "b.txt";
8170 const E_TXT: &str = "c/d/e.txt";
8171 const F_TXT: &str = "f.txt";
8172 const DOTGITIGNORE: &str = ".gitignore";
8173 const BUILD_FILE: &str = "target/build_file";
8174
8175 // Set up git repository before creating the worktree.
8176 let work_dir = root.path().join("project");
8177 let mut repo = git_init(work_dir.as_path());
8178 repo.add_ignore_rule(IGNORE_RULE).unwrap();
8179 git_add(A_TXT, &repo);
8180 git_add(E_TXT, &repo);
8181 git_add(DOTGITIGNORE, &repo);
8182 git_commit("Initial commit", &repo);
8183
8184 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8185
8186 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8187 tree.flush_fs_events(cx).await;
8188 project
8189 .update(cx, |project, cx| project.git_scans_complete(cx))
8190 .await;
8191 cx.executor().run_until_parked();
8192
8193 let repository = project.read_with(cx, |project, cx| {
8194 project.repositories(cx).values().next().unwrap().clone()
8195 });
8196
8197 // Check that the right git state is observed on startup
8198 repository.read_with(cx, |repository, _cx| {
8199 assert_eq!(
8200 repository.work_directory_abs_path.as_ref(),
8201 root_path.join("project").as_path()
8202 );
8203
8204 assert_eq!(
8205 repository.status_for_path(&B_TXT.into()).unwrap().status,
8206 FileStatus::Untracked,
8207 );
8208 assert_eq!(
8209 repository.status_for_path(&F_TXT.into()).unwrap().status,
8210 FileStatus::Untracked,
8211 );
8212 });
8213
8214 // Modify a file in the working copy.
8215 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
8216 tree.flush_fs_events(cx).await;
8217 project
8218 .update(cx, |project, cx| project.git_scans_complete(cx))
8219 .await;
8220 cx.executor().run_until_parked();
8221
8222 // The worktree detects that the file's git status has changed.
8223 repository.read_with(cx, |repository, _| {
8224 assert_eq!(
8225 repository.status_for_path(&A_TXT.into()).unwrap().status,
8226 StatusCode::Modified.worktree(),
8227 );
8228 });
8229
8230 // Create a commit in the git repository.
8231 git_add(A_TXT, &repo);
8232 git_add(B_TXT, &repo);
8233 git_commit("Committing modified and added", &repo);
8234 tree.flush_fs_events(cx).await;
8235 project
8236 .update(cx, |project, cx| project.git_scans_complete(cx))
8237 .await;
8238 cx.executor().run_until_parked();
8239
8240 // The worktree detects that the files' git status have changed.
8241 repository.read_with(cx, |repository, _cx| {
8242 assert_eq!(
8243 repository.status_for_path(&F_TXT.into()).unwrap().status,
8244 FileStatus::Untracked,
8245 );
8246 assert_eq!(repository.status_for_path(&B_TXT.into()), None);
8247 assert_eq!(repository.status_for_path(&A_TXT.into()), None);
8248 });
8249
8250 // Modify files in the working copy and perform git operations on other files.
8251 git_reset(0, &repo);
8252 git_remove_index(Path::new(B_TXT), &repo);
8253 git_stash(&mut repo);
8254 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
8255 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
8256 tree.flush_fs_events(cx).await;
8257 project
8258 .update(cx, |project, cx| project.git_scans_complete(cx))
8259 .await;
8260 cx.executor().run_until_parked();
8261
8262 // Check that more complex repo changes are tracked
8263 repository.read_with(cx, |repository, _cx| {
8264 assert_eq!(repository.status_for_path(&A_TXT.into()), None);
8265 assert_eq!(
8266 repository.status_for_path(&B_TXT.into()).unwrap().status,
8267 FileStatus::Untracked,
8268 );
8269 assert_eq!(
8270 repository.status_for_path(&E_TXT.into()).unwrap().status,
8271 StatusCode::Modified.worktree(),
8272 );
8273 });
8274
8275 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
8276 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
8277 std::fs::write(
8278 work_dir.join(DOTGITIGNORE),
8279 [IGNORE_RULE, "f.txt"].join("\n"),
8280 )
8281 .unwrap();
8282
8283 git_add(Path::new(DOTGITIGNORE), &repo);
8284 git_commit("Committing modified git ignore", &repo);
8285
8286 tree.flush_fs_events(cx).await;
8287 cx.executor().run_until_parked();
8288
8289 let mut renamed_dir_name = "first_directory/second_directory";
8290 const RENAMED_FILE: &str = "rf.txt";
8291
8292 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
8293 std::fs::write(
8294 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
8295 "new-contents",
8296 )
8297 .unwrap();
8298
8299 tree.flush_fs_events(cx).await;
8300 project
8301 .update(cx, |project, cx| project.git_scans_complete(cx))
8302 .await;
8303 cx.executor().run_until_parked();
8304
8305 repository.read_with(cx, |repository, _cx| {
8306 assert_eq!(
8307 repository
8308 .status_for_path(&Path::new(renamed_dir_name).join(RENAMED_FILE).into())
8309 .unwrap()
8310 .status,
8311 FileStatus::Untracked,
8312 );
8313 });
8314
8315 renamed_dir_name = "new_first_directory/second_directory";
8316
8317 std::fs::rename(
8318 work_dir.join("first_directory"),
8319 work_dir.join("new_first_directory"),
8320 )
8321 .unwrap();
8322
8323 tree.flush_fs_events(cx).await;
8324 project
8325 .update(cx, |project, cx| project.git_scans_complete(cx))
8326 .await;
8327 cx.executor().run_until_parked();
8328
8329 repository.read_with(cx, |repository, _cx| {
8330 assert_eq!(
8331 repository
8332 .status_for_path(&Path::new(renamed_dir_name).join(RENAMED_FILE).into())
8333 .unwrap()
8334 .status,
8335 FileStatus::Untracked,
8336 );
8337 });
8338}
8339
8340#[gpui::test]
8341async fn test_repos_in_invisible_worktrees(
8342 executor: BackgroundExecutor,
8343 cx: &mut gpui::TestAppContext,
8344) {
8345 init_test(cx);
8346 let fs = FakeFs::new(executor);
8347 fs.insert_tree(
8348 path!("/root"),
8349 json!({
8350 "dir1": {
8351 ".git": {},
8352 "dep1": {
8353 ".git": {},
8354 "src": {
8355 "a.txt": "",
8356 },
8357 },
8358 "b.txt": "",
8359 },
8360 }),
8361 )
8362 .await;
8363
8364 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
8365 let _visible_worktree =
8366 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8367 project
8368 .update(cx, |project, cx| project.git_scans_complete(cx))
8369 .await;
8370
8371 let repos = project.read_with(cx, |project, cx| {
8372 project
8373 .repositories(cx)
8374 .values()
8375 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8376 .collect::<Vec<_>>()
8377 });
8378 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
8379
8380 let (_invisible_worktree, _) = project
8381 .update(cx, |project, cx| {
8382 project.worktree_store.update(cx, |worktree_store, cx| {
8383 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
8384 })
8385 })
8386 .await
8387 .expect("failed to create worktree");
8388 project
8389 .update(cx, |project, cx| project.git_scans_complete(cx))
8390 .await;
8391
8392 let repos = project.read_with(cx, |project, cx| {
8393 project
8394 .repositories(cx)
8395 .values()
8396 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8397 .collect::<Vec<_>>()
8398 });
8399 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
8400}
8401
8402#[gpui::test(iterations = 10)]
8403async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
8404 init_test(cx);
8405 cx.update(|cx| {
8406 cx.update_global::<SettingsStore, _>(|store, cx| {
8407 store.update_user_settings::<WorktreeSettings>(cx, |project_settings| {
8408 project_settings.file_scan_exclusions = Some(Vec::new());
8409 });
8410 });
8411 });
8412 let fs = FakeFs::new(cx.background_executor.clone());
8413 fs.insert_tree(
8414 path!("/root"),
8415 json!({
8416 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
8417 "tree": {
8418 ".git": {},
8419 ".gitignore": "ignored-dir\n",
8420 "tracked-dir": {
8421 "tracked-file1": "",
8422 "ancestor-ignored-file1": "",
8423 },
8424 "ignored-dir": {
8425 "ignored-file1": ""
8426 }
8427 }
8428 }),
8429 )
8430 .await;
8431 fs.set_head_and_index_for_repo(
8432 path!("/root/tree/.git").as_ref(),
8433 &[
8434 (".gitignore".into(), "ignored-dir\n".into()),
8435 ("tracked-dir/tracked-file1".into(), "".into()),
8436 ],
8437 );
8438
8439 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
8440
8441 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8442 tree.flush_fs_events(cx).await;
8443 project
8444 .update(cx, |project, cx| project.git_scans_complete(cx))
8445 .await;
8446 cx.executor().run_until_parked();
8447
8448 let repository = project.read_with(cx, |project, cx| {
8449 project.repositories(cx).values().next().unwrap().clone()
8450 });
8451
8452 tree.read_with(cx, |tree, _| {
8453 tree.as_local()
8454 .unwrap()
8455 .manually_refresh_entries_for_paths(vec![Path::new("ignored-dir").into()])
8456 })
8457 .recv()
8458 .await;
8459
8460 cx.read(|cx| {
8461 assert_entry_git_state(
8462 tree.read(cx),
8463 repository.read(cx),
8464 "tracked-dir/tracked-file1",
8465 None,
8466 false,
8467 );
8468 assert_entry_git_state(
8469 tree.read(cx),
8470 repository.read(cx),
8471 "tracked-dir/ancestor-ignored-file1",
8472 None,
8473 false,
8474 );
8475 assert_entry_git_state(
8476 tree.read(cx),
8477 repository.read(cx),
8478 "ignored-dir/ignored-file1",
8479 None,
8480 true,
8481 );
8482 });
8483
8484 fs.create_file(
8485 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
8486 Default::default(),
8487 )
8488 .await
8489 .unwrap();
8490 fs.set_index_for_repo(
8491 path!("/root/tree/.git").as_ref(),
8492 &[
8493 (".gitignore".into(), "ignored-dir\n".into()),
8494 ("tracked-dir/tracked-file1".into(), "".into()),
8495 ("tracked-dir/tracked-file2".into(), "".into()),
8496 ],
8497 );
8498 fs.create_file(
8499 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
8500 Default::default(),
8501 )
8502 .await
8503 .unwrap();
8504 fs.create_file(
8505 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
8506 Default::default(),
8507 )
8508 .await
8509 .unwrap();
8510
8511 cx.executor().run_until_parked();
8512 cx.read(|cx| {
8513 assert_entry_git_state(
8514 tree.read(cx),
8515 repository.read(cx),
8516 "tracked-dir/tracked-file2",
8517 Some(StatusCode::Added),
8518 false,
8519 );
8520 assert_entry_git_state(
8521 tree.read(cx),
8522 repository.read(cx),
8523 "tracked-dir/ancestor-ignored-file2",
8524 None,
8525 false,
8526 );
8527 assert_entry_git_state(
8528 tree.read(cx),
8529 repository.read(cx),
8530 "ignored-dir/ignored-file2",
8531 None,
8532 true,
8533 );
8534 assert!(tree.read(cx).entry_for_path(".git").unwrap().is_ignored);
8535 });
8536}
8537
8538#[gpui::test]
8539async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) {
8540 init_test(cx);
8541
8542 let fs = FakeFs::new(cx.executor());
8543 fs.insert_tree(
8544 path!("/project"),
8545 json!({
8546 ".git": {
8547 "worktrees": {
8548 "some-worktree": {
8549 "commondir": "../..\n",
8550 // For is_git_dir
8551 "HEAD": "",
8552 "config": ""
8553 }
8554 },
8555 "modules": {
8556 "subdir": {
8557 "some-submodule": {
8558 // For is_git_dir
8559 "HEAD": "",
8560 "config": "",
8561 }
8562 }
8563 }
8564 },
8565 "src": {
8566 "a.txt": "A",
8567 },
8568 "some-worktree": {
8569 ".git": "gitdir: ../.git/worktrees/some-worktree\n",
8570 "src": {
8571 "b.txt": "B",
8572 }
8573 },
8574 "subdir": {
8575 "some-submodule": {
8576 ".git": "gitdir: ../../.git/modules/subdir/some-submodule\n",
8577 "c.txt": "C",
8578 }
8579 }
8580 }),
8581 )
8582 .await;
8583
8584 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
8585 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
8586 scan_complete.await;
8587
8588 let mut repositories = project.update(cx, |project, cx| {
8589 project
8590 .repositories(cx)
8591 .values()
8592 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8593 .collect::<Vec<_>>()
8594 });
8595 repositories.sort();
8596 pretty_assertions::assert_eq!(
8597 repositories,
8598 [
8599 Path::new(path!("/project")).into(),
8600 Path::new(path!("/project/some-worktree")).into(),
8601 Path::new(path!("/project/subdir/some-submodule")).into(),
8602 ]
8603 );
8604
8605 // Generate a git-related event for the worktree and check that it's refreshed.
8606 fs.with_git_state(
8607 path!("/project/some-worktree/.git").as_ref(),
8608 true,
8609 |state| {
8610 state
8611 .head_contents
8612 .insert("src/b.txt".into(), "b".to_owned());
8613 state
8614 .index_contents
8615 .insert("src/b.txt".into(), "b".to_owned());
8616 },
8617 )
8618 .unwrap();
8619 cx.run_until_parked();
8620
8621 let buffer = project
8622 .update(cx, |project, cx| {
8623 project.open_local_buffer(path!("/project/some-worktree/src/b.txt"), cx)
8624 })
8625 .await
8626 .unwrap();
8627 let (worktree_repo, barrier) = project.update(cx, |project, cx| {
8628 let (repo, _) = project
8629 .git_store()
8630 .read(cx)
8631 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
8632 .unwrap();
8633 pretty_assertions::assert_eq!(
8634 repo.read(cx).work_directory_abs_path,
8635 Path::new(path!("/project/some-worktree")).into(),
8636 );
8637 let barrier = repo.update(cx, |repo, _| repo.barrier());
8638 (repo.clone(), barrier)
8639 });
8640 barrier.await.unwrap();
8641 worktree_repo.update(cx, |repo, _| {
8642 pretty_assertions::assert_eq!(
8643 repo.status_for_path(&"src/b.txt".into()).unwrap().status,
8644 StatusCode::Modified.worktree(),
8645 );
8646 });
8647
8648 // The same for the submodule.
8649 fs.with_git_state(
8650 path!("/project/subdir/some-submodule/.git").as_ref(),
8651 true,
8652 |state| {
8653 state.head_contents.insert("c.txt".into(), "c".to_owned());
8654 state.index_contents.insert("c.txt".into(), "c".to_owned());
8655 },
8656 )
8657 .unwrap();
8658 cx.run_until_parked();
8659
8660 let buffer = project
8661 .update(cx, |project, cx| {
8662 project.open_local_buffer(path!("/project/subdir/some-submodule/c.txt"), cx)
8663 })
8664 .await
8665 .unwrap();
8666 let (submodule_repo, barrier) = project.update(cx, |project, cx| {
8667 let (repo, _) = project
8668 .git_store()
8669 .read(cx)
8670 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
8671 .unwrap();
8672 pretty_assertions::assert_eq!(
8673 repo.read(cx).work_directory_abs_path,
8674 Path::new(path!("/project/subdir/some-submodule")).into(),
8675 );
8676 let barrier = repo.update(cx, |repo, _| repo.barrier());
8677 (repo.clone(), barrier)
8678 });
8679 barrier.await.unwrap();
8680 submodule_repo.update(cx, |repo, _| {
8681 pretty_assertions::assert_eq!(
8682 repo.status_for_path(&"c.txt".into()).unwrap().status,
8683 StatusCode::Modified.worktree(),
8684 );
8685 });
8686}
8687
8688#[gpui::test]
8689async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
8690 init_test(cx);
8691 let fs = FakeFs::new(cx.background_executor.clone());
8692 fs.insert_tree(
8693 path!("/root"),
8694 json!({
8695 "project": {
8696 ".git": {},
8697 "child1": {
8698 "a.txt": "A",
8699 },
8700 "child2": {
8701 "b.txt": "B",
8702 }
8703 }
8704 }),
8705 )
8706 .await;
8707
8708 let project = Project::test(
8709 fs.clone(),
8710 [
8711 path!("/root/project/child1").as_ref(),
8712 path!("/root/project/child2").as_ref(),
8713 ],
8714 cx,
8715 )
8716 .await;
8717
8718 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8719 tree.flush_fs_events(cx).await;
8720 project
8721 .update(cx, |project, cx| project.git_scans_complete(cx))
8722 .await;
8723 cx.executor().run_until_parked();
8724
8725 let repos = project.read_with(cx, |project, cx| {
8726 project
8727 .repositories(cx)
8728 .values()
8729 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8730 .collect::<Vec<_>>()
8731 });
8732 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
8733}
8734
8735async fn search(
8736 project: &Entity<Project>,
8737 query: SearchQuery,
8738 cx: &mut gpui::TestAppContext,
8739) -> Result<HashMap<String, Vec<Range<usize>>>> {
8740 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
8741 let mut results = HashMap::default();
8742 while let Ok(search_result) = search_rx.recv().await {
8743 match search_result {
8744 SearchResult::Buffer { buffer, ranges } => {
8745 results.entry(buffer).or_insert(ranges);
8746 }
8747 SearchResult::LimitReached => {}
8748 }
8749 }
8750 Ok(results
8751 .into_iter()
8752 .map(|(buffer, ranges)| {
8753 buffer.update(cx, |buffer, cx| {
8754 let path = buffer
8755 .file()
8756 .unwrap()
8757 .full_path(cx)
8758 .to_string_lossy()
8759 .to_string();
8760 let ranges = ranges
8761 .into_iter()
8762 .map(|range| range.to_offset(buffer))
8763 .collect::<Vec<_>>();
8764 (path, ranges)
8765 })
8766 })
8767 .collect())
8768}
8769
8770pub fn init_test(cx: &mut gpui::TestAppContext) {
8771 zlog::init_test();
8772
8773 cx.update(|cx| {
8774 let settings_store = SettingsStore::test(cx);
8775 cx.set_global(settings_store);
8776 release_channel::init(SemanticVersion::default(), cx);
8777 language::init(cx);
8778 Project::init_settings(cx);
8779 });
8780}
8781
8782fn json_lang() -> Arc<Language> {
8783 Arc::new(Language::new(
8784 LanguageConfig {
8785 name: "JSON".into(),
8786 matcher: LanguageMatcher {
8787 path_suffixes: vec!["json".to_string()],
8788 ..Default::default()
8789 },
8790 ..Default::default()
8791 },
8792 None,
8793 ))
8794}
8795
8796fn js_lang() -> Arc<Language> {
8797 Arc::new(Language::new(
8798 LanguageConfig {
8799 name: "JavaScript".into(),
8800 matcher: LanguageMatcher {
8801 path_suffixes: vec!["js".to_string()],
8802 ..Default::default()
8803 },
8804 ..Default::default()
8805 },
8806 None,
8807 ))
8808}
8809
8810fn rust_lang() -> Arc<Language> {
8811 Arc::new(Language::new(
8812 LanguageConfig {
8813 name: "Rust".into(),
8814 matcher: LanguageMatcher {
8815 path_suffixes: vec!["rs".to_string()],
8816 ..Default::default()
8817 },
8818 ..Default::default()
8819 },
8820 Some(tree_sitter_rust::LANGUAGE.into()),
8821 ))
8822}
8823
8824fn typescript_lang() -> Arc<Language> {
8825 Arc::new(Language::new(
8826 LanguageConfig {
8827 name: "TypeScript".into(),
8828 matcher: LanguageMatcher {
8829 path_suffixes: vec!["ts".to_string()],
8830 ..Default::default()
8831 },
8832 ..Default::default()
8833 },
8834 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
8835 ))
8836}
8837
8838fn tsx_lang() -> Arc<Language> {
8839 Arc::new(Language::new(
8840 LanguageConfig {
8841 name: "tsx".into(),
8842 matcher: LanguageMatcher {
8843 path_suffixes: vec!["tsx".to_string()],
8844 ..Default::default()
8845 },
8846 ..Default::default()
8847 },
8848 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
8849 ))
8850}
8851
8852fn get_all_tasks(
8853 project: &Entity<Project>,
8854 task_contexts: &TaskContexts,
8855 cx: &mut App,
8856) -> Vec<(TaskSourceKind, ResolvedTask)> {
8857 let (mut old, new) = project.update(cx, |project, cx| {
8858 project
8859 .task_store
8860 .read(cx)
8861 .task_inventory()
8862 .unwrap()
8863 .read(cx)
8864 .used_and_current_resolved_tasks(task_contexts, cx)
8865 });
8866 old.extend(new);
8867 old
8868}
8869
8870#[track_caller]
8871fn assert_entry_git_state(
8872 tree: &Worktree,
8873 repository: &Repository,
8874 path: &str,
8875 index_status: Option<StatusCode>,
8876 is_ignored: bool,
8877) {
8878 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
8879 let entry = tree
8880 .entry_for_path(path)
8881 .unwrap_or_else(|| panic!("entry {path} not found"));
8882 let status = repository
8883 .status_for_path(&path.into())
8884 .map(|entry| entry.status);
8885 let expected = index_status.map(|index_status| {
8886 TrackedStatus {
8887 index_status,
8888 worktree_status: StatusCode::Unmodified,
8889 }
8890 .into()
8891 });
8892 assert_eq!(
8893 status, expected,
8894 "expected {path} to have git status: {expected:?}"
8895 );
8896 assert_eq!(
8897 entry.is_ignored, is_ignored,
8898 "expected {path} to have is_ignored: {is_ignored}"
8899 );
8900}
8901
8902#[track_caller]
8903fn git_init(path: &Path) -> git2::Repository {
8904 let mut init_opts = RepositoryInitOptions::new();
8905 init_opts.initial_head("main");
8906 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
8907}
8908
8909#[track_caller]
8910fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
8911 let path = path.as_ref();
8912 let mut index = repo.index().expect("Failed to get index");
8913 index.add_path(path).expect("Failed to add file");
8914 index.write().expect("Failed to write index");
8915}
8916
8917#[track_caller]
8918fn git_remove_index(path: &Path, repo: &git2::Repository) {
8919 let mut index = repo.index().expect("Failed to get index");
8920 index.remove_path(path).expect("Failed to add file");
8921 index.write().expect("Failed to write index");
8922}
8923
8924#[track_caller]
8925fn git_commit(msg: &'static str, repo: &git2::Repository) {
8926 use git2::Signature;
8927
8928 let signature = Signature::now("test", "test@zed.dev").unwrap();
8929 let oid = repo.index().unwrap().write_tree().unwrap();
8930 let tree = repo.find_tree(oid).unwrap();
8931 if let Ok(head) = repo.head() {
8932 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
8933
8934 let parent_commit = parent_obj.as_commit().unwrap();
8935
8936 repo.commit(
8937 Some("HEAD"),
8938 &signature,
8939 &signature,
8940 msg,
8941 &tree,
8942 &[parent_commit],
8943 )
8944 .expect("Failed to commit with parent");
8945 } else {
8946 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
8947 .expect("Failed to commit");
8948 }
8949}
8950
8951#[cfg(any())]
8952#[track_caller]
8953fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
8954 repo.cherrypick(commit, None).expect("Failed to cherrypick");
8955}
8956
8957#[track_caller]
8958fn git_stash(repo: &mut git2::Repository) {
8959 use git2::Signature;
8960
8961 let signature = Signature::now("test", "test@zed.dev").unwrap();
8962 repo.stash_save(&signature, "N/A", None)
8963 .expect("Failed to stash");
8964}
8965
8966#[track_caller]
8967fn git_reset(offset: usize, repo: &git2::Repository) {
8968 let head = repo.head().expect("Couldn't get repo head");
8969 let object = head.peel(git2::ObjectType::Commit).unwrap();
8970 let commit = object.as_commit().unwrap();
8971 let new_head = commit
8972 .parents()
8973 .inspect(|parnet| {
8974 parnet.message();
8975 })
8976 .nth(offset)
8977 .expect("Not enough history");
8978 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
8979 .expect("Could not reset");
8980}
8981
8982#[cfg(any())]
8983#[track_caller]
8984fn git_branch(name: &str, repo: &git2::Repository) {
8985 let head = repo
8986 .head()
8987 .expect("Couldn't get repo head")
8988 .peel_to_commit()
8989 .expect("HEAD is not a commit");
8990 repo.branch(name, &head, false).expect("Failed to commit");
8991}
8992
8993#[cfg(any())]
8994#[track_caller]
8995fn git_checkout(name: &str, repo: &git2::Repository) {
8996 repo.set_head(name).expect("Failed to set head");
8997 repo.checkout_head(None).expect("Failed to check out head");
8998}
8999
9000#[cfg(any())]
9001#[track_caller]
9002fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
9003 repo.statuses(None)
9004 .unwrap()
9005 .iter()
9006 .map(|status| (status.path().unwrap().to_string(), status.status()))
9007 .collect()
9008}
9009
9010#[gpui::test]
9011async fn test_find_project_path_abs(
9012 background_executor: BackgroundExecutor,
9013 cx: &mut gpui::TestAppContext,
9014) {
9015 // find_project_path should work with absolute paths
9016 init_test(cx);
9017
9018 let fs = FakeFs::new(background_executor);
9019 fs.insert_tree(
9020 path!("/root"),
9021 json!({
9022 "project1": {
9023 "file1.txt": "content1",
9024 "subdir": {
9025 "file2.txt": "content2"
9026 }
9027 },
9028 "project2": {
9029 "file3.txt": "content3"
9030 }
9031 }),
9032 )
9033 .await;
9034
9035 let project = Project::test(
9036 fs.clone(),
9037 [
9038 path!("/root/project1").as_ref(),
9039 path!("/root/project2").as_ref(),
9040 ],
9041 cx,
9042 )
9043 .await;
9044
9045 // Make sure the worktrees are fully initialized
9046 project
9047 .update(cx, |project, cx| project.git_scans_complete(cx))
9048 .await;
9049 cx.run_until_parked();
9050
9051 let (project1_abs_path, project1_id, project2_abs_path, project2_id) =
9052 project.read_with(cx, |project, cx| {
9053 let worktrees: Vec<_> = project.worktrees(cx).collect();
9054 let abs_path1 = worktrees[0].read(cx).abs_path().to_path_buf();
9055 let id1 = worktrees[0].read(cx).id();
9056 let abs_path2 = worktrees[1].read(cx).abs_path().to_path_buf();
9057 let id2 = worktrees[1].read(cx).id();
9058 (abs_path1, id1, abs_path2, id2)
9059 });
9060
9061 project.update(cx, |project, cx| {
9062 let abs_path = project1_abs_path.join("file1.txt");
9063 let found_path = project.find_project_path(abs_path, cx).unwrap();
9064 assert_eq!(found_path.worktree_id, project1_id);
9065 assert_eq!(found_path.path.as_ref(), Path::new("file1.txt"));
9066
9067 let abs_path = project1_abs_path.join("subdir").join("file2.txt");
9068 let found_path = project.find_project_path(abs_path, cx).unwrap();
9069 assert_eq!(found_path.worktree_id, project1_id);
9070 assert_eq!(found_path.path.as_ref(), Path::new("subdir/file2.txt"));
9071
9072 let abs_path = project2_abs_path.join("file3.txt");
9073 let found_path = project.find_project_path(abs_path, cx).unwrap();
9074 assert_eq!(found_path.worktree_id, project2_id);
9075 assert_eq!(found_path.path.as_ref(), Path::new("file3.txt"));
9076
9077 let abs_path = project1_abs_path.join("nonexistent.txt");
9078 let found_path = project.find_project_path(abs_path, cx);
9079 assert!(
9080 found_path.is_some(),
9081 "Should find project path for nonexistent file in worktree"
9082 );
9083
9084 // Test with an absolute path outside any worktree
9085 let abs_path = Path::new("/some/other/path");
9086 let found_path = project.find_project_path(abs_path, cx);
9087 assert!(
9088 found_path.is_none(),
9089 "Should not find project path for path outside any worktree"
9090 );
9091 });
9092}