1#![allow(clippy::format_collect)]
2
3use crate::{
4 Event, git_store::StatusEntry, task_inventory::TaskContexts, task_store::TaskSettingsLocation,
5 *,
6};
7use buffer_diff::{
8 BufferDiffEvent, CALCULATE_DIFF_TASK, DiffHunkSecondaryStatus, DiffHunkStatus,
9 DiffHunkStatusKind, assert_hunks,
10};
11use fs::FakeFs;
12use futures::{StreamExt, future};
13use git::{
14 GitHostingProviderRegistry,
15 repository::RepoPath,
16 status::{StatusCode, TrackedStatus},
17};
18use git2::RepositoryInitOptions;
19use gpui::{App, BackgroundExecutor, SemanticVersion, UpdateGlobal};
20use http_client::Url;
21use language::{
22 Diagnostic, DiagnosticEntry, DiagnosticSet, DiskState, FakeLspAdapter, LanguageConfig,
23 LanguageMatcher, LanguageName, LineEnding, OffsetRangeExt, Point, ToPoint,
24 language_settings::{AllLanguageSettings, LanguageSettingsContent, language_settings},
25 tree_sitter_rust, tree_sitter_typescript,
26};
27use lsp::{
28 DiagnosticSeverity, DocumentChanges, FileOperationFilter, NumberOrString, TextDocumentEdit,
29 WillRenameFiles, notification::DidRenameFiles,
30};
31use parking_lot::Mutex;
32use paths::{config_dir, tasks_file};
33use postage::stream::Stream as _;
34use pretty_assertions::{assert_eq, assert_matches};
35use rand::{Rng as _, rngs::StdRng};
36use serde_json::json;
37#[cfg(not(windows))]
38use std::os;
39use std::{env, mem, num::NonZeroU32, ops::Range, str::FromStr, sync::OnceLock, task::Poll};
40use task::{ResolvedTask, TaskContext};
41use unindent::Unindent as _;
42use util::{
43 TryFutureExt as _, assert_set_eq, maybe, path,
44 paths::PathMatcher,
45 test::{TempTree, marked_text_offsets},
46 uri,
47};
48use worktree::WorktreeModelHandle as _;
49
50#[gpui::test]
51async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
52 cx.executor().allow_parking();
53
54 let (tx, mut rx) = futures::channel::mpsc::unbounded();
55 let _thread = std::thread::spawn(move || {
56 #[cfg(not(target_os = "windows"))]
57 std::fs::metadata("/tmp").unwrap();
58 #[cfg(target_os = "windows")]
59 std::fs::metadata("C:/Windows").unwrap();
60 std::thread::sleep(Duration::from_millis(1000));
61 tx.unbounded_send(1).unwrap();
62 });
63 rx.next().await.unwrap();
64}
65
66#[gpui::test]
67async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
68 cx.executor().allow_parking();
69
70 let io_task = smol::unblock(move || {
71 println!("sleeping on thread {:?}", std::thread::current().id());
72 std::thread::sleep(Duration::from_millis(10));
73 1
74 });
75
76 let task = cx.foreground_executor().spawn(async move {
77 io_task.await;
78 });
79
80 task.await;
81}
82
83#[cfg(not(windows))]
84#[gpui::test]
85async fn test_symlinks(cx: &mut gpui::TestAppContext) {
86 init_test(cx);
87 cx.executor().allow_parking();
88
89 let dir = TempTree::new(json!({
90 "root": {
91 "apple": "",
92 "banana": {
93 "carrot": {
94 "date": "",
95 "endive": "",
96 }
97 },
98 "fennel": {
99 "grape": "",
100 }
101 }
102 }));
103
104 let root_link_path = dir.path().join("root_link");
105 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
106 os::unix::fs::symlink(
107 dir.path().join("root/fennel"),
108 dir.path().join("root/finnochio"),
109 )
110 .unwrap();
111
112 let project = Project::test(
113 Arc::new(RealFs::new(None, cx.executor())),
114 [root_link_path.as_ref()],
115 cx,
116 )
117 .await;
118
119 project.update(cx, |project, cx| {
120 let tree = project.worktrees(cx).next().unwrap().read(cx);
121 assert_eq!(tree.file_count(), 5);
122 assert_eq!(
123 tree.inode_for_path("fennel/grape"),
124 tree.inode_for_path("finnochio/grape")
125 );
126 });
127}
128
129#[gpui::test]
130async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
131 init_test(cx);
132
133 let dir = TempTree::new(json!({
134 ".editorconfig": r#"
135 root = true
136 [*.rs]
137 indent_style = tab
138 indent_size = 3
139 end_of_line = lf
140 insert_final_newline = true
141 trim_trailing_whitespace = true
142 [*.js]
143 tab_width = 10
144 "#,
145 ".zed": {
146 "settings.json": r#"{
147 "tab_size": 8,
148 "hard_tabs": false,
149 "ensure_final_newline_on_save": false,
150 "remove_trailing_whitespace_on_save": false,
151 "soft_wrap": "editor_width"
152 }"#,
153 },
154 "a.rs": "fn a() {\n A\n}",
155 "b": {
156 ".editorconfig": r#"
157 [*.rs]
158 indent_size = 2
159 "#,
160 "b.rs": "fn b() {\n B\n}",
161 },
162 "c.js": "def c\n C\nend",
163 "README.json": "tabs are better\n",
164 }));
165
166 let path = dir.path();
167 let fs = FakeFs::new(cx.executor());
168 fs.insert_tree_from_real_fs(path, path).await;
169 let project = Project::test(fs, [path], cx).await;
170
171 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
172 language_registry.add(js_lang());
173 language_registry.add(json_lang());
174 language_registry.add(rust_lang());
175
176 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
177
178 cx.executor().run_until_parked();
179
180 cx.update(|cx| {
181 let tree = worktree.read(cx);
182 let settings_for = |path: &str| {
183 let file_entry = tree.entry_for_path(path).unwrap().clone();
184 let file = File::for_entry(file_entry, worktree.clone());
185 let file_language = project
186 .read(cx)
187 .languages()
188 .language_for_file_path(file.path.as_ref());
189 let file_language = cx
190 .background_executor()
191 .block(file_language)
192 .expect("Failed to get file language");
193 let file = file as _;
194 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
195 };
196
197 let settings_a = settings_for("a.rs");
198 let settings_b = settings_for("b/b.rs");
199 let settings_c = settings_for("c.js");
200 let settings_readme = settings_for("README.json");
201
202 // .editorconfig overrides .zed/settings
203 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
204 assert_eq!(settings_a.hard_tabs, true);
205 assert_eq!(settings_a.ensure_final_newline_on_save, true);
206 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
207
208 // .editorconfig in b/ overrides .editorconfig in root
209 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
210
211 // "indent_size" is not set, so "tab_width" is used
212 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
213
214 // README.md should not be affected by .editorconfig's globe "*.rs"
215 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
216 });
217}
218
219#[gpui::test]
220async fn test_git_provider_project_setting(cx: &mut gpui::TestAppContext) {
221 init_test(cx);
222 cx.update(|cx| {
223 GitHostingProviderRegistry::default_global(cx);
224 git_hosting_providers::init(cx);
225 });
226
227 let fs = FakeFs::new(cx.executor());
228 let str_path = path!("/dir");
229 let path = Path::new(str_path);
230
231 fs.insert_tree(
232 path!("/dir"),
233 json!({
234 ".zed": {
235 "settings.json": r#"{
236 "git_hosting_providers": [
237 {
238 "provider": "gitlab",
239 "base_url": "https://google.com",
240 "name": "foo"
241 }
242 ]
243 }"#
244 },
245 }),
246 )
247 .await;
248
249 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
250 let (_worktree, _) =
251 project.read_with(cx, |project, cx| project.find_worktree(path, cx).unwrap());
252 cx.executor().run_until_parked();
253
254 cx.update(|cx| {
255 let provider = GitHostingProviderRegistry::global(cx);
256 assert!(
257 provider
258 .list_hosting_providers()
259 .into_iter()
260 .any(|provider| provider.name() == "foo")
261 );
262 });
263
264 fs.atomic_write(
265 Path::new(path!("/dir/.zed/settings.json")).to_owned(),
266 "{}".into(),
267 )
268 .await
269 .unwrap();
270
271 cx.run_until_parked();
272
273 cx.update(|cx| {
274 let provider = GitHostingProviderRegistry::global(cx);
275 assert!(
276 !provider
277 .list_hosting_providers()
278 .into_iter()
279 .any(|provider| provider.name() == "foo")
280 );
281 });
282}
283
284#[gpui::test]
285async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
286 init_test(cx);
287 TaskStore::init(None);
288
289 let fs = FakeFs::new(cx.executor());
290 fs.insert_tree(
291 path!("/dir"),
292 json!({
293 ".zed": {
294 "settings.json": r#"{ "tab_size": 8 }"#,
295 "tasks.json": r#"[{
296 "label": "cargo check all",
297 "command": "cargo",
298 "args": ["check", "--all"]
299 },]"#,
300 },
301 "a": {
302 "a.rs": "fn a() {\n A\n}"
303 },
304 "b": {
305 ".zed": {
306 "settings.json": r#"{ "tab_size": 2 }"#,
307 "tasks.json": r#"[{
308 "label": "cargo check",
309 "command": "cargo",
310 "args": ["check"]
311 },]"#,
312 },
313 "b.rs": "fn b() {\n B\n}"
314 }
315 }),
316 )
317 .await;
318
319 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
320 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
321
322 cx.executor().run_until_parked();
323 let worktree_id = cx.update(|cx| {
324 project.update(cx, |project, cx| {
325 project.worktrees(cx).next().unwrap().read(cx).id()
326 })
327 });
328
329 let mut task_contexts = TaskContexts::default();
330 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
331 let task_contexts = Arc::new(task_contexts);
332
333 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
334 id: worktree_id,
335 directory_in_worktree: PathBuf::from(".zed"),
336 id_base: "local worktree tasks from directory \".zed\"".into(),
337 };
338
339 let all_tasks = cx
340 .update(|cx| {
341 let tree = worktree.read(cx);
342
343 let file_a = File::for_entry(
344 tree.entry_for_path("a/a.rs").unwrap().clone(),
345 worktree.clone(),
346 ) as _;
347 let settings_a = language_settings(None, Some(&file_a), cx);
348 let file_b = File::for_entry(
349 tree.entry_for_path("b/b.rs").unwrap().clone(),
350 worktree.clone(),
351 ) as _;
352 let settings_b = language_settings(None, Some(&file_b), cx);
353
354 assert_eq!(settings_a.tab_size.get(), 8);
355 assert_eq!(settings_b.tab_size.get(), 2);
356
357 get_all_tasks(&project, task_contexts.clone(), cx)
358 })
359 .await
360 .into_iter()
361 .map(|(source_kind, task)| {
362 let resolved = task.resolved;
363 (
364 source_kind,
365 task.resolved_label,
366 resolved.args,
367 resolved.env,
368 )
369 })
370 .collect::<Vec<_>>();
371 assert_eq!(
372 all_tasks,
373 vec![
374 (
375 TaskSourceKind::Worktree {
376 id: worktree_id,
377 directory_in_worktree: PathBuf::from(path!("b/.zed")),
378 id_base: if cfg!(windows) {
379 "local worktree tasks from directory \"b\\\\.zed\"".into()
380 } else {
381 "local worktree tasks from directory \"b/.zed\"".into()
382 },
383 },
384 "cargo check".to_string(),
385 vec!["check".to_string()],
386 HashMap::default(),
387 ),
388 (
389 topmost_local_task_source_kind.clone(),
390 "cargo check all".to_string(),
391 vec!["check".to_string(), "--all".to_string()],
392 HashMap::default(),
393 ),
394 ]
395 );
396
397 let (_, resolved_task) = cx
398 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
399 .await
400 .into_iter()
401 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
402 .expect("should have one global task");
403 project.update(cx, |project, cx| {
404 let task_inventory = project
405 .task_store
406 .read(cx)
407 .task_inventory()
408 .cloned()
409 .unwrap();
410 task_inventory.update(cx, |inventory, _| {
411 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
412 inventory
413 .update_file_based_tasks(
414 TaskSettingsLocation::Global(tasks_file()),
415 Some(
416 &json!([{
417 "label": "cargo check unstable",
418 "command": "cargo",
419 "args": [
420 "check",
421 "--all",
422 "--all-targets"
423 ],
424 "env": {
425 "RUSTFLAGS": "-Zunstable-options"
426 }
427 }])
428 .to_string(),
429 ),
430 )
431 .unwrap();
432 });
433 });
434 cx.run_until_parked();
435
436 let all_tasks = cx
437 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
438 .await
439 .into_iter()
440 .map(|(source_kind, task)| {
441 let resolved = task.resolved;
442 (
443 source_kind,
444 task.resolved_label,
445 resolved.args,
446 resolved.env,
447 )
448 })
449 .collect::<Vec<_>>();
450 assert_eq!(
451 all_tasks,
452 vec![
453 (
454 topmost_local_task_source_kind.clone(),
455 "cargo check all".to_string(),
456 vec!["check".to_string(), "--all".to_string()],
457 HashMap::default(),
458 ),
459 (
460 TaskSourceKind::Worktree {
461 id: worktree_id,
462 directory_in_worktree: PathBuf::from(path!("b/.zed")),
463 id_base: if cfg!(windows) {
464 "local worktree tasks from directory \"b\\\\.zed\"".into()
465 } else {
466 "local worktree tasks from directory \"b/.zed\"".into()
467 },
468 },
469 "cargo check".to_string(),
470 vec!["check".to_string()],
471 HashMap::default(),
472 ),
473 (
474 TaskSourceKind::AbsPath {
475 abs_path: paths::tasks_file().clone(),
476 id_base: "global tasks.json".into(),
477 },
478 "cargo check unstable".to_string(),
479 vec![
480 "check".to_string(),
481 "--all".to_string(),
482 "--all-targets".to_string(),
483 ],
484 HashMap::from_iter(Some((
485 "RUSTFLAGS".to_string(),
486 "-Zunstable-options".to_string()
487 ))),
488 ),
489 ]
490 );
491}
492
493#[gpui::test]
494async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
495 init_test(cx);
496 TaskStore::init(None);
497
498 let fs = FakeFs::new(cx.executor());
499 fs.insert_tree(
500 path!("/dir"),
501 json!({
502 ".zed": {
503 "tasks.json": r#"[{
504 "label": "test worktree root",
505 "command": "echo $ZED_WORKTREE_ROOT"
506 }]"#,
507 },
508 "a": {
509 "a.rs": "fn a() {\n A\n}"
510 },
511 }),
512 )
513 .await;
514
515 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
516 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
517
518 cx.executor().run_until_parked();
519 let worktree_id = cx.update(|cx| {
520 project.update(cx, |project, cx| {
521 project.worktrees(cx).next().unwrap().read(cx).id()
522 })
523 });
524
525 let active_non_worktree_item_tasks = cx
526 .update(|cx| {
527 get_all_tasks(
528 &project,
529 Arc::new(TaskContexts {
530 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
531 active_worktree_context: None,
532 other_worktree_contexts: Vec::new(),
533 lsp_task_sources: HashMap::default(),
534 latest_selection: None,
535 }),
536 cx,
537 )
538 })
539 .await;
540 assert!(
541 active_non_worktree_item_tasks.is_empty(),
542 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
543 );
544
545 let active_worktree_tasks = cx
546 .update(|cx| {
547 get_all_tasks(
548 &project,
549 Arc::new(TaskContexts {
550 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
551 active_worktree_context: Some((worktree_id, {
552 let mut worktree_context = TaskContext::default();
553 worktree_context
554 .task_variables
555 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
556 worktree_context
557 })),
558 other_worktree_contexts: Vec::new(),
559 lsp_task_sources: HashMap::default(),
560 latest_selection: None,
561 }),
562 cx,
563 )
564 })
565 .await;
566 assert_eq!(
567 active_worktree_tasks
568 .into_iter()
569 .map(|(source_kind, task)| {
570 let resolved = task.resolved;
571 (source_kind, resolved.command)
572 })
573 .collect::<Vec<_>>(),
574 vec![(
575 TaskSourceKind::Worktree {
576 id: worktree_id,
577 directory_in_worktree: PathBuf::from(path!(".zed")),
578 id_base: if cfg!(windows) {
579 "local worktree tasks from directory \".zed\"".into()
580 } else {
581 "local worktree tasks from directory \".zed\"".into()
582 },
583 },
584 "echo /dir".to_string(),
585 )]
586 );
587}
588
589#[gpui::test]
590async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
591 init_test(cx);
592
593 let fs = FakeFs::new(cx.executor());
594 fs.insert_tree(
595 path!("/dir"),
596 json!({
597 "test.rs": "const A: i32 = 1;",
598 "test2.rs": "",
599 "Cargo.toml": "a = 1",
600 "package.json": "{\"a\": 1}",
601 }),
602 )
603 .await;
604
605 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
606 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
607
608 let mut fake_rust_servers = language_registry.register_fake_lsp(
609 "Rust",
610 FakeLspAdapter {
611 name: "the-rust-language-server",
612 capabilities: lsp::ServerCapabilities {
613 completion_provider: Some(lsp::CompletionOptions {
614 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
615 ..Default::default()
616 }),
617 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
618 lsp::TextDocumentSyncOptions {
619 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
620 ..Default::default()
621 },
622 )),
623 ..Default::default()
624 },
625 ..Default::default()
626 },
627 );
628 let mut fake_json_servers = language_registry.register_fake_lsp(
629 "JSON",
630 FakeLspAdapter {
631 name: "the-json-language-server",
632 capabilities: lsp::ServerCapabilities {
633 completion_provider: Some(lsp::CompletionOptions {
634 trigger_characters: Some(vec![":".to_string()]),
635 ..Default::default()
636 }),
637 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
638 lsp::TextDocumentSyncOptions {
639 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
640 ..Default::default()
641 },
642 )),
643 ..Default::default()
644 },
645 ..Default::default()
646 },
647 );
648
649 // Open a buffer without an associated language server.
650 let (toml_buffer, _handle) = project
651 .update(cx, |project, cx| {
652 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
653 })
654 .await
655 .unwrap();
656
657 // Open a buffer with an associated language server before the language for it has been loaded.
658 let (rust_buffer, _handle2) = project
659 .update(cx, |project, cx| {
660 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
661 })
662 .await
663 .unwrap();
664 rust_buffer.update(cx, |buffer, _| {
665 assert_eq!(buffer.language().map(|l| l.name()), None);
666 });
667
668 // Now we add the languages to the project, and ensure they get assigned to all
669 // the relevant open buffers.
670 language_registry.add(json_lang());
671 language_registry.add(rust_lang());
672 cx.executor().run_until_parked();
673 rust_buffer.update(cx, |buffer, _| {
674 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
675 });
676
677 // A server is started up, and it is notified about Rust files.
678 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
679 assert_eq!(
680 fake_rust_server
681 .receive_notification::<lsp::notification::DidOpenTextDocument>()
682 .await
683 .text_document,
684 lsp::TextDocumentItem {
685 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
686 version: 0,
687 text: "const A: i32 = 1;".to_string(),
688 language_id: "rust".to_string(),
689 }
690 );
691
692 // The buffer is configured based on the language server's capabilities.
693 rust_buffer.update(cx, |buffer, _| {
694 assert_eq!(
695 buffer
696 .completion_triggers()
697 .into_iter()
698 .cloned()
699 .collect::<Vec<_>>(),
700 &[".".to_string(), "::".to_string()]
701 );
702 });
703 toml_buffer.update(cx, |buffer, _| {
704 assert!(buffer.completion_triggers().is_empty());
705 });
706
707 // Edit a buffer. The changes are reported to the language server.
708 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
709 assert_eq!(
710 fake_rust_server
711 .receive_notification::<lsp::notification::DidChangeTextDocument>()
712 .await
713 .text_document,
714 lsp::VersionedTextDocumentIdentifier::new(
715 lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
716 1
717 )
718 );
719
720 // Open a third buffer with a different associated language server.
721 let (json_buffer, _json_handle) = project
722 .update(cx, |project, cx| {
723 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
724 })
725 .await
726 .unwrap();
727
728 // A json language server is started up and is only notified about the json buffer.
729 let mut fake_json_server = fake_json_servers.next().await.unwrap();
730 assert_eq!(
731 fake_json_server
732 .receive_notification::<lsp::notification::DidOpenTextDocument>()
733 .await
734 .text_document,
735 lsp::TextDocumentItem {
736 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
737 version: 0,
738 text: "{\"a\": 1}".to_string(),
739 language_id: "json".to_string(),
740 }
741 );
742
743 // This buffer is configured based on the second language server's
744 // capabilities.
745 json_buffer.update(cx, |buffer, _| {
746 assert_eq!(
747 buffer
748 .completion_triggers()
749 .into_iter()
750 .cloned()
751 .collect::<Vec<_>>(),
752 &[":".to_string()]
753 );
754 });
755
756 // When opening another buffer whose language server is already running,
757 // it is also configured based on the existing language server's capabilities.
758 let (rust_buffer2, _handle4) = project
759 .update(cx, |project, cx| {
760 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
761 })
762 .await
763 .unwrap();
764 rust_buffer2.update(cx, |buffer, _| {
765 assert_eq!(
766 buffer
767 .completion_triggers()
768 .into_iter()
769 .cloned()
770 .collect::<Vec<_>>(),
771 &[".".to_string(), "::".to_string()]
772 );
773 });
774
775 // Changes are reported only to servers matching the buffer's language.
776 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
777 rust_buffer2.update(cx, |buffer, cx| {
778 buffer.edit([(0..0, "let x = 1;")], None, cx)
779 });
780 assert_eq!(
781 fake_rust_server
782 .receive_notification::<lsp::notification::DidChangeTextDocument>()
783 .await
784 .text_document,
785 lsp::VersionedTextDocumentIdentifier::new(
786 lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap(),
787 1
788 )
789 );
790
791 // Save notifications are reported to all servers.
792 project
793 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
794 .await
795 .unwrap();
796 assert_eq!(
797 fake_rust_server
798 .receive_notification::<lsp::notification::DidSaveTextDocument>()
799 .await
800 .text_document,
801 lsp::TextDocumentIdentifier::new(
802 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
803 )
804 );
805 assert_eq!(
806 fake_json_server
807 .receive_notification::<lsp::notification::DidSaveTextDocument>()
808 .await
809 .text_document,
810 lsp::TextDocumentIdentifier::new(
811 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
812 )
813 );
814
815 // Renames are reported only to servers matching the buffer's language.
816 fs.rename(
817 Path::new(path!("/dir/test2.rs")),
818 Path::new(path!("/dir/test3.rs")),
819 Default::default(),
820 )
821 .await
822 .unwrap();
823 assert_eq!(
824 fake_rust_server
825 .receive_notification::<lsp::notification::DidCloseTextDocument>()
826 .await
827 .text_document,
828 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap()),
829 );
830 assert_eq!(
831 fake_rust_server
832 .receive_notification::<lsp::notification::DidOpenTextDocument>()
833 .await
834 .text_document,
835 lsp::TextDocumentItem {
836 uri: lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap(),
837 version: 0,
838 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
839 language_id: "rust".to_string(),
840 },
841 );
842
843 rust_buffer2.update(cx, |buffer, cx| {
844 buffer.update_diagnostics(
845 LanguageServerId(0),
846 DiagnosticSet::from_sorted_entries(
847 vec![DiagnosticEntry {
848 diagnostic: Default::default(),
849 range: Anchor::MIN..Anchor::MAX,
850 }],
851 &buffer.snapshot(),
852 ),
853 cx,
854 );
855 assert_eq!(
856 buffer
857 .snapshot()
858 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
859 .count(),
860 1
861 );
862 });
863
864 // When the rename changes the extension of the file, the buffer gets closed on the old
865 // language server and gets opened on the new one.
866 fs.rename(
867 Path::new(path!("/dir/test3.rs")),
868 Path::new(path!("/dir/test3.json")),
869 Default::default(),
870 )
871 .await
872 .unwrap();
873 assert_eq!(
874 fake_rust_server
875 .receive_notification::<lsp::notification::DidCloseTextDocument>()
876 .await
877 .text_document,
878 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap()),
879 );
880 assert_eq!(
881 fake_json_server
882 .receive_notification::<lsp::notification::DidOpenTextDocument>()
883 .await
884 .text_document,
885 lsp::TextDocumentItem {
886 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
887 version: 0,
888 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
889 language_id: "json".to_string(),
890 },
891 );
892
893 // We clear the diagnostics, since the language has changed.
894 rust_buffer2.update(cx, |buffer, _| {
895 assert_eq!(
896 buffer
897 .snapshot()
898 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
899 .count(),
900 0
901 );
902 });
903
904 // The renamed file's version resets after changing language server.
905 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
906 assert_eq!(
907 fake_json_server
908 .receive_notification::<lsp::notification::DidChangeTextDocument>()
909 .await
910 .text_document,
911 lsp::VersionedTextDocumentIdentifier::new(
912 lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
913 1
914 )
915 );
916
917 // Restart language servers
918 project.update(cx, |project, cx| {
919 project.restart_language_servers_for_buffers(
920 vec![rust_buffer.clone(), json_buffer.clone()],
921 cx,
922 );
923 });
924
925 let mut rust_shutdown_requests = fake_rust_server
926 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
927 let mut json_shutdown_requests = fake_json_server
928 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
929 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
930
931 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
932 let mut fake_json_server = fake_json_servers.next().await.unwrap();
933
934 // Ensure rust document is reopened in new rust language server
935 assert_eq!(
936 fake_rust_server
937 .receive_notification::<lsp::notification::DidOpenTextDocument>()
938 .await
939 .text_document,
940 lsp::TextDocumentItem {
941 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
942 version: 0,
943 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
944 language_id: "rust".to_string(),
945 }
946 );
947
948 // Ensure json documents are reopened in new json language server
949 assert_set_eq!(
950 [
951 fake_json_server
952 .receive_notification::<lsp::notification::DidOpenTextDocument>()
953 .await
954 .text_document,
955 fake_json_server
956 .receive_notification::<lsp::notification::DidOpenTextDocument>()
957 .await
958 .text_document,
959 ],
960 [
961 lsp::TextDocumentItem {
962 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
963 version: 0,
964 text: json_buffer.update(cx, |buffer, _| buffer.text()),
965 language_id: "json".to_string(),
966 },
967 lsp::TextDocumentItem {
968 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
969 version: 0,
970 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
971 language_id: "json".to_string(),
972 }
973 ]
974 );
975
976 // Close notifications are reported only to servers matching the buffer's language.
977 cx.update(|_| drop(_json_handle));
978 let close_message = lsp::DidCloseTextDocumentParams {
979 text_document: lsp::TextDocumentIdentifier::new(
980 lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
981 ),
982 };
983 assert_eq!(
984 fake_json_server
985 .receive_notification::<lsp::notification::DidCloseTextDocument>()
986 .await,
987 close_message,
988 );
989}
990
991#[gpui::test]
992async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
993 init_test(cx);
994
995 let fs = FakeFs::new(cx.executor());
996 fs.insert_tree(
997 path!("/the-root"),
998 json!({
999 ".gitignore": "target\n",
1000 "Cargo.lock": "",
1001 "src": {
1002 "a.rs": "",
1003 "b.rs": "",
1004 },
1005 "target": {
1006 "x": {
1007 "out": {
1008 "x.rs": ""
1009 }
1010 },
1011 "y": {
1012 "out": {
1013 "y.rs": "",
1014 }
1015 },
1016 "z": {
1017 "out": {
1018 "z.rs": ""
1019 }
1020 }
1021 }
1022 }),
1023 )
1024 .await;
1025 fs.insert_tree(
1026 path!("/the-registry"),
1027 json!({
1028 "dep1": {
1029 "src": {
1030 "dep1.rs": "",
1031 }
1032 },
1033 "dep2": {
1034 "src": {
1035 "dep2.rs": "",
1036 }
1037 },
1038 }),
1039 )
1040 .await;
1041 fs.insert_tree(
1042 path!("/the/stdlib"),
1043 json!({
1044 "LICENSE": "",
1045 "src": {
1046 "string.rs": "",
1047 }
1048 }),
1049 )
1050 .await;
1051
1052 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1053 let (language_registry, lsp_store) = project.read_with(cx, |project, _| {
1054 (project.languages().clone(), project.lsp_store())
1055 });
1056 language_registry.add(rust_lang());
1057 let mut fake_servers = language_registry.register_fake_lsp(
1058 "Rust",
1059 FakeLspAdapter {
1060 name: "the-language-server",
1061 ..Default::default()
1062 },
1063 );
1064
1065 cx.executor().run_until_parked();
1066
1067 // Start the language server by opening a buffer with a compatible file extension.
1068 project
1069 .update(cx, |project, cx| {
1070 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
1071 })
1072 .await
1073 .unwrap();
1074
1075 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
1076 project.update(cx, |project, cx| {
1077 let worktree = project.worktrees(cx).next().unwrap();
1078 assert_eq!(
1079 worktree
1080 .read(cx)
1081 .snapshot()
1082 .entries(true, 0)
1083 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
1084 .collect::<Vec<_>>(),
1085 &[
1086 (Path::new(""), false),
1087 (Path::new(".gitignore"), false),
1088 (Path::new("Cargo.lock"), false),
1089 (Path::new("src"), false),
1090 (Path::new("src/a.rs"), false),
1091 (Path::new("src/b.rs"), false),
1092 (Path::new("target"), true),
1093 ]
1094 );
1095 });
1096
1097 let prev_read_dir_count = fs.read_dir_call_count();
1098
1099 let fake_server = fake_servers.next().await.unwrap();
1100 let (server_id, server_name) = lsp_store.read_with(cx, |lsp_store, _| {
1101 let (id, status) = lsp_store.language_server_statuses().next().unwrap();
1102 (id, LanguageServerName::from(status.name.as_str()))
1103 });
1104
1105 // Simulate jumping to a definition in a dependency outside of the worktree.
1106 let _out_of_worktree_buffer = project
1107 .update(cx, |project, cx| {
1108 project.open_local_buffer_via_lsp(
1109 lsp::Url::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
1110 server_id,
1111 server_name.clone(),
1112 cx,
1113 )
1114 })
1115 .await
1116 .unwrap();
1117
1118 // Keep track of the FS events reported to the language server.
1119 let file_changes = Arc::new(Mutex::new(Vec::new()));
1120 fake_server
1121 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
1122 registrations: vec![lsp::Registration {
1123 id: Default::default(),
1124 method: "workspace/didChangeWatchedFiles".to_string(),
1125 register_options: serde_json::to_value(
1126 lsp::DidChangeWatchedFilesRegistrationOptions {
1127 watchers: vec![
1128 lsp::FileSystemWatcher {
1129 glob_pattern: lsp::GlobPattern::String(
1130 path!("/the-root/Cargo.toml").to_string(),
1131 ),
1132 kind: None,
1133 },
1134 lsp::FileSystemWatcher {
1135 glob_pattern: lsp::GlobPattern::String(
1136 path!("/the-root/src/*.{rs,c}").to_string(),
1137 ),
1138 kind: None,
1139 },
1140 lsp::FileSystemWatcher {
1141 glob_pattern: lsp::GlobPattern::String(
1142 path!("/the-root/target/y/**/*.rs").to_string(),
1143 ),
1144 kind: None,
1145 },
1146 lsp::FileSystemWatcher {
1147 glob_pattern: lsp::GlobPattern::String(
1148 path!("/the/stdlib/src/**/*.rs").to_string(),
1149 ),
1150 kind: None,
1151 },
1152 lsp::FileSystemWatcher {
1153 glob_pattern: lsp::GlobPattern::String(
1154 path!("**/Cargo.lock").to_string(),
1155 ),
1156 kind: None,
1157 },
1158 ],
1159 },
1160 )
1161 .ok(),
1162 }],
1163 })
1164 .await
1165 .into_response()
1166 .unwrap();
1167 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
1168 let file_changes = file_changes.clone();
1169 move |params, _| {
1170 let mut file_changes = file_changes.lock();
1171 file_changes.extend(params.changes);
1172 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
1173 }
1174 });
1175
1176 cx.executor().run_until_parked();
1177 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
1178 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 5);
1179
1180 let mut new_watched_paths = fs.watched_paths();
1181 new_watched_paths.retain(|path| !path.starts_with(config_dir()));
1182 assert_eq!(
1183 &new_watched_paths,
1184 &[
1185 Path::new(path!("/the-root")),
1186 Path::new(path!("/the-registry/dep1/src/dep1.rs")),
1187 Path::new(path!("/the/stdlib/src"))
1188 ]
1189 );
1190
1191 // Now the language server has asked us to watch an ignored directory path,
1192 // so we recursively load it.
1193 project.update(cx, |project, cx| {
1194 let worktree = project.visible_worktrees(cx).next().unwrap();
1195 assert_eq!(
1196 worktree
1197 .read(cx)
1198 .snapshot()
1199 .entries(true, 0)
1200 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
1201 .collect::<Vec<_>>(),
1202 &[
1203 (Path::new(""), false),
1204 (Path::new(".gitignore"), false),
1205 (Path::new("Cargo.lock"), false),
1206 (Path::new("src"), false),
1207 (Path::new("src/a.rs"), false),
1208 (Path::new("src/b.rs"), false),
1209 (Path::new("target"), true),
1210 (Path::new("target/x"), true),
1211 (Path::new("target/y"), true),
1212 (Path::new("target/y/out"), true),
1213 (Path::new("target/y/out/y.rs"), true),
1214 (Path::new("target/z"), true),
1215 ]
1216 );
1217 });
1218
1219 // Perform some file system mutations, two of which match the watched patterns,
1220 // and one of which does not.
1221 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
1222 .await
1223 .unwrap();
1224 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
1225 .await
1226 .unwrap();
1227 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
1228 .await
1229 .unwrap();
1230 fs.create_file(
1231 path!("/the-root/target/x/out/x2.rs").as_ref(),
1232 Default::default(),
1233 )
1234 .await
1235 .unwrap();
1236 fs.create_file(
1237 path!("/the-root/target/y/out/y2.rs").as_ref(),
1238 Default::default(),
1239 )
1240 .await
1241 .unwrap();
1242 fs.save(
1243 path!("/the-root/Cargo.lock").as_ref(),
1244 &"".into(),
1245 Default::default(),
1246 )
1247 .await
1248 .unwrap();
1249 fs.save(
1250 path!("/the-stdlib/LICENSE").as_ref(),
1251 &"".into(),
1252 Default::default(),
1253 )
1254 .await
1255 .unwrap();
1256 fs.save(
1257 path!("/the/stdlib/src/string.rs").as_ref(),
1258 &"".into(),
1259 Default::default(),
1260 )
1261 .await
1262 .unwrap();
1263
1264 // The language server receives events for the FS mutations that match its watch patterns.
1265 cx.executor().run_until_parked();
1266 assert_eq!(
1267 &*file_changes.lock(),
1268 &[
1269 lsp::FileEvent {
1270 uri: lsp::Url::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
1271 typ: lsp::FileChangeType::CHANGED,
1272 },
1273 lsp::FileEvent {
1274 uri: lsp::Url::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
1275 typ: lsp::FileChangeType::DELETED,
1276 },
1277 lsp::FileEvent {
1278 uri: lsp::Url::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
1279 typ: lsp::FileChangeType::CREATED,
1280 },
1281 lsp::FileEvent {
1282 uri: lsp::Url::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
1283 typ: lsp::FileChangeType::CREATED,
1284 },
1285 lsp::FileEvent {
1286 uri: lsp::Url::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
1287 typ: lsp::FileChangeType::CHANGED,
1288 },
1289 ]
1290 );
1291}
1292
1293#[gpui::test]
1294async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
1295 init_test(cx);
1296
1297 let fs = FakeFs::new(cx.executor());
1298 fs.insert_tree(
1299 path!("/dir"),
1300 json!({
1301 "a.rs": "let a = 1;",
1302 "b.rs": "let b = 2;"
1303 }),
1304 )
1305 .await;
1306
1307 let project = Project::test(
1308 fs,
1309 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
1310 cx,
1311 )
1312 .await;
1313 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1314
1315 let buffer_a = project
1316 .update(cx, |project, cx| {
1317 project.open_local_buffer(path!("/dir/a.rs"), cx)
1318 })
1319 .await
1320 .unwrap();
1321 let buffer_b = project
1322 .update(cx, |project, cx| {
1323 project.open_local_buffer(path!("/dir/b.rs"), cx)
1324 })
1325 .await
1326 .unwrap();
1327
1328 lsp_store.update(cx, |lsp_store, cx| {
1329 lsp_store
1330 .update_diagnostics(
1331 LanguageServerId(0),
1332 lsp::PublishDiagnosticsParams {
1333 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1334 version: None,
1335 diagnostics: vec![lsp::Diagnostic {
1336 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1337 severity: Some(lsp::DiagnosticSeverity::ERROR),
1338 message: "error 1".to_string(),
1339 ..Default::default()
1340 }],
1341 },
1342 None,
1343 DiagnosticSourceKind::Pushed,
1344 &[],
1345 cx,
1346 )
1347 .unwrap();
1348 lsp_store
1349 .update_diagnostics(
1350 LanguageServerId(0),
1351 lsp::PublishDiagnosticsParams {
1352 uri: Url::from_file_path(path!("/dir/b.rs")).unwrap(),
1353 version: None,
1354 diagnostics: vec![lsp::Diagnostic {
1355 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1356 severity: Some(DiagnosticSeverity::WARNING),
1357 message: "error 2".to_string(),
1358 ..Default::default()
1359 }],
1360 },
1361 None,
1362 DiagnosticSourceKind::Pushed,
1363 &[],
1364 cx,
1365 )
1366 .unwrap();
1367 });
1368
1369 buffer_a.update(cx, |buffer, _| {
1370 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1371 assert_eq!(
1372 chunks
1373 .iter()
1374 .map(|(s, d)| (s.as_str(), *d))
1375 .collect::<Vec<_>>(),
1376 &[
1377 ("let ", None),
1378 ("a", Some(DiagnosticSeverity::ERROR)),
1379 (" = 1;", None),
1380 ]
1381 );
1382 });
1383 buffer_b.update(cx, |buffer, _| {
1384 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1385 assert_eq!(
1386 chunks
1387 .iter()
1388 .map(|(s, d)| (s.as_str(), *d))
1389 .collect::<Vec<_>>(),
1390 &[
1391 ("let ", None),
1392 ("b", Some(DiagnosticSeverity::WARNING)),
1393 (" = 2;", None),
1394 ]
1395 );
1396 });
1397}
1398
1399#[gpui::test]
1400async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1401 init_test(cx);
1402
1403 let fs = FakeFs::new(cx.executor());
1404 fs.insert_tree(
1405 path!("/root"),
1406 json!({
1407 "dir": {
1408 ".git": {
1409 "HEAD": "ref: refs/heads/main",
1410 },
1411 ".gitignore": "b.rs",
1412 "a.rs": "let a = 1;",
1413 "b.rs": "let b = 2;",
1414 },
1415 "other.rs": "let b = c;"
1416 }),
1417 )
1418 .await;
1419
1420 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
1421 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1422 let (worktree, _) = project
1423 .update(cx, |project, cx| {
1424 project.find_or_create_worktree(path!("/root/dir"), true, cx)
1425 })
1426 .await
1427 .unwrap();
1428 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1429
1430 let (worktree, _) = project
1431 .update(cx, |project, cx| {
1432 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
1433 })
1434 .await
1435 .unwrap();
1436 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1437
1438 let server_id = LanguageServerId(0);
1439 lsp_store.update(cx, |lsp_store, cx| {
1440 lsp_store
1441 .update_diagnostics(
1442 server_id,
1443 lsp::PublishDiagnosticsParams {
1444 uri: Url::from_file_path(path!("/root/dir/b.rs")).unwrap(),
1445 version: None,
1446 diagnostics: vec![lsp::Diagnostic {
1447 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1448 severity: Some(lsp::DiagnosticSeverity::ERROR),
1449 message: "unused variable 'b'".to_string(),
1450 ..Default::default()
1451 }],
1452 },
1453 None,
1454 DiagnosticSourceKind::Pushed,
1455 &[],
1456 cx,
1457 )
1458 .unwrap();
1459 lsp_store
1460 .update_diagnostics(
1461 server_id,
1462 lsp::PublishDiagnosticsParams {
1463 uri: Url::from_file_path(path!("/root/other.rs")).unwrap(),
1464 version: None,
1465 diagnostics: vec![lsp::Diagnostic {
1466 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1467 severity: Some(lsp::DiagnosticSeverity::ERROR),
1468 message: "unknown variable 'c'".to_string(),
1469 ..Default::default()
1470 }],
1471 },
1472 None,
1473 DiagnosticSourceKind::Pushed,
1474 &[],
1475 cx,
1476 )
1477 .unwrap();
1478 });
1479
1480 let main_ignored_buffer = project
1481 .update(cx, |project, cx| {
1482 project.open_buffer((main_worktree_id, "b.rs"), cx)
1483 })
1484 .await
1485 .unwrap();
1486 main_ignored_buffer.update(cx, |buffer, _| {
1487 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1488 assert_eq!(
1489 chunks
1490 .iter()
1491 .map(|(s, d)| (s.as_str(), *d))
1492 .collect::<Vec<_>>(),
1493 &[
1494 ("let ", None),
1495 ("b", Some(DiagnosticSeverity::ERROR)),
1496 (" = 2;", None),
1497 ],
1498 "Gigitnored buffers should still get in-buffer diagnostics",
1499 );
1500 });
1501 let other_buffer = project
1502 .update(cx, |project, cx| {
1503 project.open_buffer((other_worktree_id, ""), cx)
1504 })
1505 .await
1506 .unwrap();
1507 other_buffer.update(cx, |buffer, _| {
1508 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1509 assert_eq!(
1510 chunks
1511 .iter()
1512 .map(|(s, d)| (s.as_str(), *d))
1513 .collect::<Vec<_>>(),
1514 &[
1515 ("let b = ", None),
1516 ("c", Some(DiagnosticSeverity::ERROR)),
1517 (";", None),
1518 ],
1519 "Buffers from hidden projects should still get in-buffer diagnostics"
1520 );
1521 });
1522
1523 project.update(cx, |project, cx| {
1524 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1525 assert_eq!(
1526 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1527 vec![(
1528 ProjectPath {
1529 worktree_id: main_worktree_id,
1530 path: Arc::from(Path::new("b.rs")),
1531 },
1532 server_id,
1533 DiagnosticSummary {
1534 error_count: 1,
1535 warning_count: 0,
1536 }
1537 )]
1538 );
1539 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1540 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1541 });
1542}
1543
1544#[gpui::test]
1545async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1546 init_test(cx);
1547
1548 let progress_token = "the-progress-token";
1549
1550 let fs = FakeFs::new(cx.executor());
1551 fs.insert_tree(
1552 path!("/dir"),
1553 json!({
1554 "a.rs": "fn a() { A }",
1555 "b.rs": "const y: i32 = 1",
1556 }),
1557 )
1558 .await;
1559
1560 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1561 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1562
1563 language_registry.add(rust_lang());
1564 let mut fake_servers = language_registry.register_fake_lsp(
1565 "Rust",
1566 FakeLspAdapter {
1567 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1568 disk_based_diagnostics_sources: vec!["disk".into()],
1569 ..Default::default()
1570 },
1571 );
1572
1573 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1574
1575 // Cause worktree to start the fake language server
1576 let _ = project
1577 .update(cx, |project, cx| {
1578 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
1579 })
1580 .await
1581 .unwrap();
1582
1583 let mut events = cx.events(&project);
1584
1585 let fake_server = fake_servers.next().await.unwrap();
1586 assert_eq!(
1587 events.next().await.unwrap(),
1588 Event::LanguageServerAdded(
1589 LanguageServerId(0),
1590 fake_server.server.name(),
1591 Some(worktree_id)
1592 ),
1593 );
1594
1595 fake_server
1596 .start_progress(format!("{}/0", progress_token))
1597 .await;
1598 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1599 assert_eq!(
1600 events.next().await.unwrap(),
1601 Event::DiskBasedDiagnosticsStarted {
1602 language_server_id: LanguageServerId(0),
1603 }
1604 );
1605
1606 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1607 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1608 version: None,
1609 diagnostics: vec![lsp::Diagnostic {
1610 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1611 severity: Some(lsp::DiagnosticSeverity::ERROR),
1612 message: "undefined variable 'A'".to_string(),
1613 ..Default::default()
1614 }],
1615 });
1616 assert_eq!(
1617 events.next().await.unwrap(),
1618 Event::DiagnosticsUpdated {
1619 language_server_id: LanguageServerId(0),
1620 path: (worktree_id, Path::new("a.rs")).into()
1621 }
1622 );
1623
1624 fake_server.end_progress(format!("{}/0", progress_token));
1625 assert_eq!(
1626 events.next().await.unwrap(),
1627 Event::DiskBasedDiagnosticsFinished {
1628 language_server_id: LanguageServerId(0)
1629 }
1630 );
1631
1632 let buffer = project
1633 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
1634 .await
1635 .unwrap();
1636
1637 buffer.update(cx, |buffer, _| {
1638 let snapshot = buffer.snapshot();
1639 let diagnostics = snapshot
1640 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1641 .collect::<Vec<_>>();
1642 assert_eq!(
1643 diagnostics,
1644 &[DiagnosticEntry {
1645 range: Point::new(0, 9)..Point::new(0, 10),
1646 diagnostic: Diagnostic {
1647 severity: lsp::DiagnosticSeverity::ERROR,
1648 message: "undefined variable 'A'".to_string(),
1649 group_id: 0,
1650 is_primary: true,
1651 source_kind: DiagnosticSourceKind::Pushed,
1652 ..Diagnostic::default()
1653 }
1654 }]
1655 )
1656 });
1657
1658 // Ensure publishing empty diagnostics twice only results in one update event.
1659 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1660 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1661 version: None,
1662 diagnostics: Default::default(),
1663 });
1664 assert_eq!(
1665 events.next().await.unwrap(),
1666 Event::DiagnosticsUpdated {
1667 language_server_id: LanguageServerId(0),
1668 path: (worktree_id, Path::new("a.rs")).into()
1669 }
1670 );
1671
1672 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1673 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1674 version: None,
1675 diagnostics: Default::default(),
1676 });
1677 cx.executor().run_until_parked();
1678 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1679}
1680
1681#[gpui::test]
1682async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1683 init_test(cx);
1684
1685 let progress_token = "the-progress-token";
1686
1687 let fs = FakeFs::new(cx.executor());
1688 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1689
1690 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1691
1692 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1693 language_registry.add(rust_lang());
1694 let mut fake_servers = language_registry.register_fake_lsp(
1695 "Rust",
1696 FakeLspAdapter {
1697 name: "the-language-server",
1698 disk_based_diagnostics_sources: vec!["disk".into()],
1699 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1700 ..Default::default()
1701 },
1702 );
1703
1704 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1705
1706 let (buffer, _handle) = project
1707 .update(cx, |project, cx| {
1708 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1709 })
1710 .await
1711 .unwrap();
1712 // Simulate diagnostics starting to update.
1713 let fake_server = fake_servers.next().await.unwrap();
1714 fake_server.start_progress(progress_token).await;
1715
1716 // Restart the server before the diagnostics finish updating.
1717 project.update(cx, |project, cx| {
1718 project.restart_language_servers_for_buffers(vec![buffer], cx);
1719 });
1720 let mut events = cx.events(&project);
1721
1722 // Simulate the newly started server sending more diagnostics.
1723 let fake_server = fake_servers.next().await.unwrap();
1724 assert_eq!(
1725 events.next().await.unwrap(),
1726 Event::LanguageServerAdded(
1727 LanguageServerId(1),
1728 fake_server.server.name(),
1729 Some(worktree_id)
1730 )
1731 );
1732 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1733 fake_server.start_progress(progress_token).await;
1734 assert_eq!(
1735 events.next().await.unwrap(),
1736 Event::DiskBasedDiagnosticsStarted {
1737 language_server_id: LanguageServerId(1)
1738 }
1739 );
1740 project.update(cx, |project, cx| {
1741 assert_eq!(
1742 project
1743 .language_servers_running_disk_based_diagnostics(cx)
1744 .collect::<Vec<_>>(),
1745 [LanguageServerId(1)]
1746 );
1747 });
1748
1749 // All diagnostics are considered done, despite the old server's diagnostic
1750 // task never completing.
1751 fake_server.end_progress(progress_token);
1752 assert_eq!(
1753 events.next().await.unwrap(),
1754 Event::DiskBasedDiagnosticsFinished {
1755 language_server_id: LanguageServerId(1)
1756 }
1757 );
1758 project.update(cx, |project, cx| {
1759 assert_eq!(
1760 project
1761 .language_servers_running_disk_based_diagnostics(cx)
1762 .collect::<Vec<_>>(),
1763 [] as [language::LanguageServerId; 0]
1764 );
1765 });
1766}
1767
1768#[gpui::test]
1769async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1770 init_test(cx);
1771
1772 let fs = FakeFs::new(cx.executor());
1773 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
1774
1775 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1776
1777 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1778 language_registry.add(rust_lang());
1779 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1780
1781 let (buffer, _) = project
1782 .update(cx, |project, cx| {
1783 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1784 })
1785 .await
1786 .unwrap();
1787
1788 // Publish diagnostics
1789 let fake_server = fake_servers.next().await.unwrap();
1790 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1791 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1792 version: None,
1793 diagnostics: vec![lsp::Diagnostic {
1794 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1795 severity: Some(lsp::DiagnosticSeverity::ERROR),
1796 message: "the message".to_string(),
1797 ..Default::default()
1798 }],
1799 });
1800
1801 cx.executor().run_until_parked();
1802 buffer.update(cx, |buffer, _| {
1803 assert_eq!(
1804 buffer
1805 .snapshot()
1806 .diagnostics_in_range::<_, usize>(0..1, false)
1807 .map(|entry| entry.diagnostic.message.clone())
1808 .collect::<Vec<_>>(),
1809 ["the message".to_string()]
1810 );
1811 });
1812 project.update(cx, |project, cx| {
1813 assert_eq!(
1814 project.diagnostic_summary(false, cx),
1815 DiagnosticSummary {
1816 error_count: 1,
1817 warning_count: 0,
1818 }
1819 );
1820 });
1821
1822 project.update(cx, |project, cx| {
1823 project.restart_language_servers_for_buffers(vec![buffer.clone()], cx);
1824 });
1825
1826 // The diagnostics are cleared.
1827 cx.executor().run_until_parked();
1828 buffer.update(cx, |buffer, _| {
1829 assert_eq!(
1830 buffer
1831 .snapshot()
1832 .diagnostics_in_range::<_, usize>(0..1, false)
1833 .map(|entry| entry.diagnostic.message.clone())
1834 .collect::<Vec<_>>(),
1835 Vec::<String>::new(),
1836 );
1837 });
1838 project.update(cx, |project, cx| {
1839 assert_eq!(
1840 project.diagnostic_summary(false, cx),
1841 DiagnosticSummary {
1842 error_count: 0,
1843 warning_count: 0,
1844 }
1845 );
1846 });
1847}
1848
1849#[gpui::test]
1850async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1851 init_test(cx);
1852
1853 let fs = FakeFs::new(cx.executor());
1854 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1855
1856 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1857 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1858
1859 language_registry.add(rust_lang());
1860 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1861
1862 let (buffer, _handle) = project
1863 .update(cx, |project, cx| {
1864 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1865 })
1866 .await
1867 .unwrap();
1868
1869 // Before restarting the server, report diagnostics with an unknown buffer version.
1870 let fake_server = fake_servers.next().await.unwrap();
1871 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1872 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1873 version: Some(10000),
1874 diagnostics: Vec::new(),
1875 });
1876 cx.executor().run_until_parked();
1877 project.update(cx, |project, cx| {
1878 project.restart_language_servers_for_buffers(vec![buffer.clone()], cx);
1879 });
1880
1881 let mut fake_server = fake_servers.next().await.unwrap();
1882 let notification = fake_server
1883 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1884 .await
1885 .text_document;
1886 assert_eq!(notification.version, 0);
1887}
1888
1889#[gpui::test]
1890async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
1891 init_test(cx);
1892
1893 let progress_token = "the-progress-token";
1894
1895 let fs = FakeFs::new(cx.executor());
1896 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1897
1898 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1899
1900 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1901 language_registry.add(rust_lang());
1902 let mut fake_servers = language_registry.register_fake_lsp(
1903 "Rust",
1904 FakeLspAdapter {
1905 name: "the-language-server",
1906 disk_based_diagnostics_sources: vec!["disk".into()],
1907 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1908 ..Default::default()
1909 },
1910 );
1911
1912 let (buffer, _handle) = project
1913 .update(cx, |project, cx| {
1914 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1915 })
1916 .await
1917 .unwrap();
1918
1919 // Simulate diagnostics starting to update.
1920 let mut fake_server = fake_servers.next().await.unwrap();
1921 fake_server
1922 .start_progress_with(
1923 "another-token",
1924 lsp::WorkDoneProgressBegin {
1925 cancellable: Some(false),
1926 ..Default::default()
1927 },
1928 )
1929 .await;
1930 fake_server
1931 .start_progress_with(
1932 progress_token,
1933 lsp::WorkDoneProgressBegin {
1934 cancellable: Some(true),
1935 ..Default::default()
1936 },
1937 )
1938 .await;
1939 cx.executor().run_until_parked();
1940
1941 project.update(cx, |project, cx| {
1942 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
1943 });
1944
1945 let cancel_notification = fake_server
1946 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
1947 .await;
1948 assert_eq!(
1949 cancel_notification.token,
1950 NumberOrString::String(progress_token.into())
1951 );
1952}
1953
1954#[gpui::test]
1955async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1956 init_test(cx);
1957
1958 let fs = FakeFs::new(cx.executor());
1959 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
1960 .await;
1961
1962 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1963 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1964
1965 let mut fake_rust_servers = language_registry.register_fake_lsp(
1966 "Rust",
1967 FakeLspAdapter {
1968 name: "rust-lsp",
1969 ..Default::default()
1970 },
1971 );
1972 let mut fake_js_servers = language_registry.register_fake_lsp(
1973 "JavaScript",
1974 FakeLspAdapter {
1975 name: "js-lsp",
1976 ..Default::default()
1977 },
1978 );
1979 language_registry.add(rust_lang());
1980 language_registry.add(js_lang());
1981
1982 let _rs_buffer = project
1983 .update(cx, |project, cx| {
1984 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1985 })
1986 .await
1987 .unwrap();
1988 let _js_buffer = project
1989 .update(cx, |project, cx| {
1990 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
1991 })
1992 .await
1993 .unwrap();
1994
1995 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1996 assert_eq!(
1997 fake_rust_server_1
1998 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1999 .await
2000 .text_document
2001 .uri
2002 .as_str(),
2003 uri!("file:///dir/a.rs")
2004 );
2005
2006 let mut fake_js_server = fake_js_servers.next().await.unwrap();
2007 assert_eq!(
2008 fake_js_server
2009 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2010 .await
2011 .text_document
2012 .uri
2013 .as_str(),
2014 uri!("file:///dir/b.js")
2015 );
2016
2017 // Disable Rust language server, ensuring only that server gets stopped.
2018 cx.update(|cx| {
2019 SettingsStore::update_global(cx, |settings, cx| {
2020 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
2021 settings.languages.insert(
2022 "Rust".into(),
2023 LanguageSettingsContent {
2024 enable_language_server: Some(false),
2025 ..Default::default()
2026 },
2027 );
2028 });
2029 })
2030 });
2031 fake_rust_server_1
2032 .receive_notification::<lsp::notification::Exit>()
2033 .await;
2034
2035 // Enable Rust and disable JavaScript language servers, ensuring that the
2036 // former gets started again and that the latter stops.
2037 cx.update(|cx| {
2038 SettingsStore::update_global(cx, |settings, cx| {
2039 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
2040 settings.languages.insert(
2041 LanguageName::new("Rust"),
2042 LanguageSettingsContent {
2043 enable_language_server: Some(true),
2044 ..Default::default()
2045 },
2046 );
2047 settings.languages.insert(
2048 LanguageName::new("JavaScript"),
2049 LanguageSettingsContent {
2050 enable_language_server: Some(false),
2051 ..Default::default()
2052 },
2053 );
2054 });
2055 })
2056 });
2057 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
2058 assert_eq!(
2059 fake_rust_server_2
2060 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2061 .await
2062 .text_document
2063 .uri
2064 .as_str(),
2065 uri!("file:///dir/a.rs")
2066 );
2067 fake_js_server
2068 .receive_notification::<lsp::notification::Exit>()
2069 .await;
2070}
2071
2072#[gpui::test(iterations = 3)]
2073async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
2074 init_test(cx);
2075
2076 let text = "
2077 fn a() { A }
2078 fn b() { BB }
2079 fn c() { CCC }
2080 "
2081 .unindent();
2082
2083 let fs = FakeFs::new(cx.executor());
2084 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
2085
2086 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2087 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2088
2089 language_registry.add(rust_lang());
2090 let mut fake_servers = language_registry.register_fake_lsp(
2091 "Rust",
2092 FakeLspAdapter {
2093 disk_based_diagnostics_sources: vec!["disk".into()],
2094 ..Default::default()
2095 },
2096 );
2097
2098 let buffer = project
2099 .update(cx, |project, cx| {
2100 project.open_local_buffer(path!("/dir/a.rs"), cx)
2101 })
2102 .await
2103 .unwrap();
2104
2105 let _handle = project.update(cx, |project, cx| {
2106 project.register_buffer_with_language_servers(&buffer, cx)
2107 });
2108
2109 let mut fake_server = fake_servers.next().await.unwrap();
2110 let open_notification = fake_server
2111 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2112 .await;
2113
2114 // Edit the buffer, moving the content down
2115 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
2116 let change_notification_1 = fake_server
2117 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2118 .await;
2119 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
2120
2121 // Report some diagnostics for the initial version of the buffer
2122 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2123 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2124 version: Some(open_notification.text_document.version),
2125 diagnostics: vec![
2126 lsp::Diagnostic {
2127 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2128 severity: Some(DiagnosticSeverity::ERROR),
2129 message: "undefined variable 'A'".to_string(),
2130 source: Some("disk".to_string()),
2131 ..Default::default()
2132 },
2133 lsp::Diagnostic {
2134 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2135 severity: Some(DiagnosticSeverity::ERROR),
2136 message: "undefined variable 'BB'".to_string(),
2137 source: Some("disk".to_string()),
2138 ..Default::default()
2139 },
2140 lsp::Diagnostic {
2141 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
2142 severity: Some(DiagnosticSeverity::ERROR),
2143 source: Some("disk".to_string()),
2144 message: "undefined variable 'CCC'".to_string(),
2145 ..Default::default()
2146 },
2147 ],
2148 });
2149
2150 // The diagnostics have moved down since they were created.
2151 cx.executor().run_until_parked();
2152 buffer.update(cx, |buffer, _| {
2153 assert_eq!(
2154 buffer
2155 .snapshot()
2156 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
2157 .collect::<Vec<_>>(),
2158 &[
2159 DiagnosticEntry {
2160 range: Point::new(3, 9)..Point::new(3, 11),
2161 diagnostic: Diagnostic {
2162 source: Some("disk".into()),
2163 severity: DiagnosticSeverity::ERROR,
2164 message: "undefined variable 'BB'".to_string(),
2165 is_disk_based: true,
2166 group_id: 1,
2167 is_primary: true,
2168 source_kind: DiagnosticSourceKind::Pushed,
2169 ..Diagnostic::default()
2170 },
2171 },
2172 DiagnosticEntry {
2173 range: Point::new(4, 9)..Point::new(4, 12),
2174 diagnostic: Diagnostic {
2175 source: Some("disk".into()),
2176 severity: DiagnosticSeverity::ERROR,
2177 message: "undefined variable 'CCC'".to_string(),
2178 is_disk_based: true,
2179 group_id: 2,
2180 is_primary: true,
2181 source_kind: DiagnosticSourceKind::Pushed,
2182 ..Diagnostic::default()
2183 }
2184 }
2185 ]
2186 );
2187 assert_eq!(
2188 chunks_with_diagnostics(buffer, 0..buffer.len()),
2189 [
2190 ("\n\nfn a() { ".to_string(), None),
2191 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2192 (" }\nfn b() { ".to_string(), None),
2193 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
2194 (" }\nfn c() { ".to_string(), None),
2195 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
2196 (" }\n".to_string(), None),
2197 ]
2198 );
2199 assert_eq!(
2200 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
2201 [
2202 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
2203 (" }\nfn c() { ".to_string(), None),
2204 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
2205 ]
2206 );
2207 });
2208
2209 // Ensure overlapping diagnostics are highlighted correctly.
2210 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2211 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2212 version: Some(open_notification.text_document.version),
2213 diagnostics: vec![
2214 lsp::Diagnostic {
2215 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2216 severity: Some(DiagnosticSeverity::ERROR),
2217 message: "undefined variable 'A'".to_string(),
2218 source: Some("disk".to_string()),
2219 ..Default::default()
2220 },
2221 lsp::Diagnostic {
2222 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
2223 severity: Some(DiagnosticSeverity::WARNING),
2224 message: "unreachable statement".to_string(),
2225 source: Some("disk".to_string()),
2226 ..Default::default()
2227 },
2228 ],
2229 });
2230
2231 cx.executor().run_until_parked();
2232 buffer.update(cx, |buffer, _| {
2233 assert_eq!(
2234 buffer
2235 .snapshot()
2236 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
2237 .collect::<Vec<_>>(),
2238 &[
2239 DiagnosticEntry {
2240 range: Point::new(2, 9)..Point::new(2, 12),
2241 diagnostic: Diagnostic {
2242 source: Some("disk".into()),
2243 severity: DiagnosticSeverity::WARNING,
2244 message: "unreachable statement".to_string(),
2245 is_disk_based: true,
2246 group_id: 4,
2247 is_primary: true,
2248 source_kind: DiagnosticSourceKind::Pushed,
2249 ..Diagnostic::default()
2250 }
2251 },
2252 DiagnosticEntry {
2253 range: Point::new(2, 9)..Point::new(2, 10),
2254 diagnostic: Diagnostic {
2255 source: Some("disk".into()),
2256 severity: DiagnosticSeverity::ERROR,
2257 message: "undefined variable 'A'".to_string(),
2258 is_disk_based: true,
2259 group_id: 3,
2260 is_primary: true,
2261 source_kind: DiagnosticSourceKind::Pushed,
2262 ..Diagnostic::default()
2263 },
2264 }
2265 ]
2266 );
2267 assert_eq!(
2268 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
2269 [
2270 ("fn a() { ".to_string(), None),
2271 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2272 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2273 ("\n".to_string(), None),
2274 ]
2275 );
2276 assert_eq!(
2277 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
2278 [
2279 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2280 ("\n".to_string(), None),
2281 ]
2282 );
2283 });
2284
2285 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
2286 // changes since the last save.
2287 buffer.update(cx, |buffer, cx| {
2288 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
2289 buffer.edit(
2290 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
2291 None,
2292 cx,
2293 );
2294 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
2295 });
2296 let change_notification_2 = fake_server
2297 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2298 .await;
2299 assert!(
2300 change_notification_2.text_document.version > change_notification_1.text_document.version
2301 );
2302
2303 // Handle out-of-order diagnostics
2304 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2305 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2306 version: Some(change_notification_2.text_document.version),
2307 diagnostics: vec![
2308 lsp::Diagnostic {
2309 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2310 severity: Some(DiagnosticSeverity::ERROR),
2311 message: "undefined variable 'BB'".to_string(),
2312 source: Some("disk".to_string()),
2313 ..Default::default()
2314 },
2315 lsp::Diagnostic {
2316 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2317 severity: Some(DiagnosticSeverity::WARNING),
2318 message: "undefined variable 'A'".to_string(),
2319 source: Some("disk".to_string()),
2320 ..Default::default()
2321 },
2322 ],
2323 });
2324
2325 cx.executor().run_until_parked();
2326 buffer.update(cx, |buffer, _| {
2327 assert_eq!(
2328 buffer
2329 .snapshot()
2330 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2331 .collect::<Vec<_>>(),
2332 &[
2333 DiagnosticEntry {
2334 range: Point::new(2, 21)..Point::new(2, 22),
2335 diagnostic: Diagnostic {
2336 source: Some("disk".into()),
2337 severity: DiagnosticSeverity::WARNING,
2338 message: "undefined variable 'A'".to_string(),
2339 is_disk_based: true,
2340 group_id: 6,
2341 is_primary: true,
2342 source_kind: DiagnosticSourceKind::Pushed,
2343 ..Diagnostic::default()
2344 }
2345 },
2346 DiagnosticEntry {
2347 range: Point::new(3, 9)..Point::new(3, 14),
2348 diagnostic: Diagnostic {
2349 source: Some("disk".into()),
2350 severity: DiagnosticSeverity::ERROR,
2351 message: "undefined variable 'BB'".to_string(),
2352 is_disk_based: true,
2353 group_id: 5,
2354 is_primary: true,
2355 source_kind: DiagnosticSourceKind::Pushed,
2356 ..Diagnostic::default()
2357 },
2358 }
2359 ]
2360 );
2361 });
2362}
2363
2364#[gpui::test]
2365async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
2366 init_test(cx);
2367
2368 let text = concat!(
2369 "let one = ;\n", //
2370 "let two = \n",
2371 "let three = 3;\n",
2372 );
2373
2374 let fs = FakeFs::new(cx.executor());
2375 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
2376
2377 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2378 let buffer = project
2379 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2380 .await
2381 .unwrap();
2382
2383 project.update(cx, |project, cx| {
2384 project.lsp_store.update(cx, |lsp_store, cx| {
2385 lsp_store
2386 .update_diagnostic_entries(
2387 LanguageServerId(0),
2388 PathBuf::from("/dir/a.rs"),
2389 None,
2390 None,
2391 vec![
2392 DiagnosticEntry {
2393 range: Unclipped(PointUtf16::new(0, 10))
2394 ..Unclipped(PointUtf16::new(0, 10)),
2395 diagnostic: Diagnostic {
2396 severity: DiagnosticSeverity::ERROR,
2397 message: "syntax error 1".to_string(),
2398 source_kind: DiagnosticSourceKind::Pushed,
2399 ..Diagnostic::default()
2400 },
2401 },
2402 DiagnosticEntry {
2403 range: Unclipped(PointUtf16::new(1, 10))
2404 ..Unclipped(PointUtf16::new(1, 10)),
2405 diagnostic: Diagnostic {
2406 severity: DiagnosticSeverity::ERROR,
2407 message: "syntax error 2".to_string(),
2408 source_kind: DiagnosticSourceKind::Pushed,
2409 ..Diagnostic::default()
2410 },
2411 },
2412 ],
2413 cx,
2414 )
2415 .unwrap();
2416 })
2417 });
2418
2419 // An empty range is extended forward to include the following character.
2420 // At the end of a line, an empty range is extended backward to include
2421 // the preceding character.
2422 buffer.update(cx, |buffer, _| {
2423 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2424 assert_eq!(
2425 chunks
2426 .iter()
2427 .map(|(s, d)| (s.as_str(), *d))
2428 .collect::<Vec<_>>(),
2429 &[
2430 ("let one = ", None),
2431 (";", Some(DiagnosticSeverity::ERROR)),
2432 ("\nlet two =", None),
2433 (" ", Some(DiagnosticSeverity::ERROR)),
2434 ("\nlet three = 3;\n", None)
2435 ]
2436 );
2437 });
2438}
2439
2440#[gpui::test]
2441async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2442 init_test(cx);
2443
2444 let fs = FakeFs::new(cx.executor());
2445 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2446 .await;
2447
2448 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2449 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
2450
2451 lsp_store.update(cx, |lsp_store, cx| {
2452 lsp_store
2453 .update_diagnostic_entries(
2454 LanguageServerId(0),
2455 Path::new("/dir/a.rs").to_owned(),
2456 None,
2457 None,
2458 vec![DiagnosticEntry {
2459 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2460 diagnostic: Diagnostic {
2461 severity: DiagnosticSeverity::ERROR,
2462 is_primary: true,
2463 message: "syntax error a1".to_string(),
2464 source_kind: DiagnosticSourceKind::Pushed,
2465 ..Diagnostic::default()
2466 },
2467 }],
2468 cx,
2469 )
2470 .unwrap();
2471 lsp_store
2472 .update_diagnostic_entries(
2473 LanguageServerId(1),
2474 Path::new("/dir/a.rs").to_owned(),
2475 None,
2476 None,
2477 vec![DiagnosticEntry {
2478 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2479 diagnostic: Diagnostic {
2480 severity: DiagnosticSeverity::ERROR,
2481 is_primary: true,
2482 message: "syntax error b1".to_string(),
2483 source_kind: DiagnosticSourceKind::Pushed,
2484 ..Diagnostic::default()
2485 },
2486 }],
2487 cx,
2488 )
2489 .unwrap();
2490
2491 assert_eq!(
2492 lsp_store.diagnostic_summary(false, cx),
2493 DiagnosticSummary {
2494 error_count: 2,
2495 warning_count: 0,
2496 }
2497 );
2498 });
2499}
2500
2501#[gpui::test]
2502async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2503 init_test(cx);
2504
2505 let text = "
2506 fn a() {
2507 f1();
2508 }
2509 fn b() {
2510 f2();
2511 }
2512 fn c() {
2513 f3();
2514 }
2515 "
2516 .unindent();
2517
2518 let fs = FakeFs::new(cx.executor());
2519 fs.insert_tree(
2520 path!("/dir"),
2521 json!({
2522 "a.rs": text.clone(),
2523 }),
2524 )
2525 .await;
2526
2527 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2528 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2529
2530 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2531 language_registry.add(rust_lang());
2532 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2533
2534 let (buffer, _handle) = project
2535 .update(cx, |project, cx| {
2536 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2537 })
2538 .await
2539 .unwrap();
2540
2541 let mut fake_server = fake_servers.next().await.unwrap();
2542 let lsp_document_version = fake_server
2543 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2544 .await
2545 .text_document
2546 .version;
2547
2548 // Simulate editing the buffer after the language server computes some edits.
2549 buffer.update(cx, |buffer, cx| {
2550 buffer.edit(
2551 [(
2552 Point::new(0, 0)..Point::new(0, 0),
2553 "// above first function\n",
2554 )],
2555 None,
2556 cx,
2557 );
2558 buffer.edit(
2559 [(
2560 Point::new(2, 0)..Point::new(2, 0),
2561 " // inside first function\n",
2562 )],
2563 None,
2564 cx,
2565 );
2566 buffer.edit(
2567 [(
2568 Point::new(6, 4)..Point::new(6, 4),
2569 "// inside second function ",
2570 )],
2571 None,
2572 cx,
2573 );
2574
2575 assert_eq!(
2576 buffer.text(),
2577 "
2578 // above first function
2579 fn a() {
2580 // inside first function
2581 f1();
2582 }
2583 fn b() {
2584 // inside second function f2();
2585 }
2586 fn c() {
2587 f3();
2588 }
2589 "
2590 .unindent()
2591 );
2592 });
2593
2594 let edits = lsp_store
2595 .update(cx, |lsp_store, cx| {
2596 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2597 &buffer,
2598 vec![
2599 // replace body of first function
2600 lsp::TextEdit {
2601 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2602 new_text: "
2603 fn a() {
2604 f10();
2605 }
2606 "
2607 .unindent(),
2608 },
2609 // edit inside second function
2610 lsp::TextEdit {
2611 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2612 new_text: "00".into(),
2613 },
2614 // edit inside third function via two distinct edits
2615 lsp::TextEdit {
2616 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2617 new_text: "4000".into(),
2618 },
2619 lsp::TextEdit {
2620 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2621 new_text: "".into(),
2622 },
2623 ],
2624 LanguageServerId(0),
2625 Some(lsp_document_version),
2626 cx,
2627 )
2628 })
2629 .await
2630 .unwrap();
2631
2632 buffer.update(cx, |buffer, cx| {
2633 for (range, new_text) in edits {
2634 buffer.edit([(range, new_text)], None, cx);
2635 }
2636 assert_eq!(
2637 buffer.text(),
2638 "
2639 // above first function
2640 fn a() {
2641 // inside first function
2642 f10();
2643 }
2644 fn b() {
2645 // inside second function f200();
2646 }
2647 fn c() {
2648 f4000();
2649 }
2650 "
2651 .unindent()
2652 );
2653 });
2654}
2655
2656#[gpui::test]
2657async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2658 init_test(cx);
2659
2660 let text = "
2661 use a::b;
2662 use a::c;
2663
2664 fn f() {
2665 b();
2666 c();
2667 }
2668 "
2669 .unindent();
2670
2671 let fs = FakeFs::new(cx.executor());
2672 fs.insert_tree(
2673 path!("/dir"),
2674 json!({
2675 "a.rs": text.clone(),
2676 }),
2677 )
2678 .await;
2679
2680 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2681 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2682 let buffer = project
2683 .update(cx, |project, cx| {
2684 project.open_local_buffer(path!("/dir/a.rs"), cx)
2685 })
2686 .await
2687 .unwrap();
2688
2689 // Simulate the language server sending us a small edit in the form of a very large diff.
2690 // Rust-analyzer does this when performing a merge-imports code action.
2691 let edits = lsp_store
2692 .update(cx, |lsp_store, cx| {
2693 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2694 &buffer,
2695 [
2696 // Replace the first use statement without editing the semicolon.
2697 lsp::TextEdit {
2698 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2699 new_text: "a::{b, c}".into(),
2700 },
2701 // Reinsert the remainder of the file between the semicolon and the final
2702 // newline of the file.
2703 lsp::TextEdit {
2704 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2705 new_text: "\n\n".into(),
2706 },
2707 lsp::TextEdit {
2708 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2709 new_text: "
2710 fn f() {
2711 b();
2712 c();
2713 }"
2714 .unindent(),
2715 },
2716 // Delete everything after the first newline of the file.
2717 lsp::TextEdit {
2718 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2719 new_text: "".into(),
2720 },
2721 ],
2722 LanguageServerId(0),
2723 None,
2724 cx,
2725 )
2726 })
2727 .await
2728 .unwrap();
2729
2730 buffer.update(cx, |buffer, cx| {
2731 let edits = edits
2732 .into_iter()
2733 .map(|(range, text)| {
2734 (
2735 range.start.to_point(buffer)..range.end.to_point(buffer),
2736 text,
2737 )
2738 })
2739 .collect::<Vec<_>>();
2740
2741 assert_eq!(
2742 edits,
2743 [
2744 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2745 (Point::new(1, 0)..Point::new(2, 0), "".into())
2746 ]
2747 );
2748
2749 for (range, new_text) in edits {
2750 buffer.edit([(range, new_text)], None, cx);
2751 }
2752 assert_eq!(
2753 buffer.text(),
2754 "
2755 use a::{b, c};
2756
2757 fn f() {
2758 b();
2759 c();
2760 }
2761 "
2762 .unindent()
2763 );
2764 });
2765}
2766
2767#[gpui::test]
2768async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
2769 cx: &mut gpui::TestAppContext,
2770) {
2771 init_test(cx);
2772
2773 let text = "Path()";
2774
2775 let fs = FakeFs::new(cx.executor());
2776 fs.insert_tree(
2777 path!("/dir"),
2778 json!({
2779 "a.rs": text
2780 }),
2781 )
2782 .await;
2783
2784 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2785 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2786 let buffer = project
2787 .update(cx, |project, cx| {
2788 project.open_local_buffer(path!("/dir/a.rs"), cx)
2789 })
2790 .await
2791 .unwrap();
2792
2793 // Simulate the language server sending us a pair of edits at the same location,
2794 // with an insertion following a replacement (which violates the LSP spec).
2795 let edits = lsp_store
2796 .update(cx, |lsp_store, cx| {
2797 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2798 &buffer,
2799 [
2800 lsp::TextEdit {
2801 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
2802 new_text: "Path".into(),
2803 },
2804 lsp::TextEdit {
2805 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
2806 new_text: "from path import Path\n\n\n".into(),
2807 },
2808 ],
2809 LanguageServerId(0),
2810 None,
2811 cx,
2812 )
2813 })
2814 .await
2815 .unwrap();
2816
2817 buffer.update(cx, |buffer, cx| {
2818 buffer.edit(edits, None, cx);
2819 assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
2820 });
2821}
2822
2823#[gpui::test]
2824async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2825 init_test(cx);
2826
2827 let text = "
2828 use a::b;
2829 use a::c;
2830
2831 fn f() {
2832 b();
2833 c();
2834 }
2835 "
2836 .unindent();
2837
2838 let fs = FakeFs::new(cx.executor());
2839 fs.insert_tree(
2840 path!("/dir"),
2841 json!({
2842 "a.rs": text.clone(),
2843 }),
2844 )
2845 .await;
2846
2847 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2848 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2849 let buffer = project
2850 .update(cx, |project, cx| {
2851 project.open_local_buffer(path!("/dir/a.rs"), cx)
2852 })
2853 .await
2854 .unwrap();
2855
2856 // Simulate the language server sending us edits in a non-ordered fashion,
2857 // with ranges sometimes being inverted or pointing to invalid locations.
2858 let edits = lsp_store
2859 .update(cx, |lsp_store, cx| {
2860 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2861 &buffer,
2862 [
2863 lsp::TextEdit {
2864 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2865 new_text: "\n\n".into(),
2866 },
2867 lsp::TextEdit {
2868 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2869 new_text: "a::{b, c}".into(),
2870 },
2871 lsp::TextEdit {
2872 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2873 new_text: "".into(),
2874 },
2875 lsp::TextEdit {
2876 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2877 new_text: "
2878 fn f() {
2879 b();
2880 c();
2881 }"
2882 .unindent(),
2883 },
2884 ],
2885 LanguageServerId(0),
2886 None,
2887 cx,
2888 )
2889 })
2890 .await
2891 .unwrap();
2892
2893 buffer.update(cx, |buffer, cx| {
2894 let edits = edits
2895 .into_iter()
2896 .map(|(range, text)| {
2897 (
2898 range.start.to_point(buffer)..range.end.to_point(buffer),
2899 text,
2900 )
2901 })
2902 .collect::<Vec<_>>();
2903
2904 assert_eq!(
2905 edits,
2906 [
2907 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2908 (Point::new(1, 0)..Point::new(2, 0), "".into())
2909 ]
2910 );
2911
2912 for (range, new_text) in edits {
2913 buffer.edit([(range, new_text)], None, cx);
2914 }
2915 assert_eq!(
2916 buffer.text(),
2917 "
2918 use a::{b, c};
2919
2920 fn f() {
2921 b();
2922 c();
2923 }
2924 "
2925 .unindent()
2926 );
2927 });
2928}
2929
2930fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2931 buffer: &Buffer,
2932 range: Range<T>,
2933) -> Vec<(String, Option<DiagnosticSeverity>)> {
2934 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2935 for chunk in buffer.snapshot().chunks(range, true) {
2936 if chunks.last().map_or(false, |prev_chunk| {
2937 prev_chunk.1 == chunk.diagnostic_severity
2938 }) {
2939 chunks.last_mut().unwrap().0.push_str(chunk.text);
2940 } else {
2941 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2942 }
2943 }
2944 chunks
2945}
2946
2947#[gpui::test(iterations = 10)]
2948async fn test_definition(cx: &mut gpui::TestAppContext) {
2949 init_test(cx);
2950
2951 let fs = FakeFs::new(cx.executor());
2952 fs.insert_tree(
2953 path!("/dir"),
2954 json!({
2955 "a.rs": "const fn a() { A }",
2956 "b.rs": "const y: i32 = crate::a()",
2957 }),
2958 )
2959 .await;
2960
2961 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
2962
2963 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2964 language_registry.add(rust_lang());
2965 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2966
2967 let (buffer, _handle) = project
2968 .update(cx, |project, cx| {
2969 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
2970 })
2971 .await
2972 .unwrap();
2973
2974 let fake_server = fake_servers.next().await.unwrap();
2975 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2976 let params = params.text_document_position_params;
2977 assert_eq!(
2978 params.text_document.uri.to_file_path().unwrap(),
2979 Path::new(path!("/dir/b.rs")),
2980 );
2981 assert_eq!(params.position, lsp::Position::new(0, 22));
2982
2983 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2984 lsp::Location::new(
2985 lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2986 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2987 ),
2988 )))
2989 });
2990 let mut definitions = project
2991 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2992 .await
2993 .unwrap();
2994
2995 // Assert no new language server started
2996 cx.executor().run_until_parked();
2997 assert!(fake_servers.try_next().is_err());
2998
2999 assert_eq!(definitions.len(), 1);
3000 let definition = definitions.pop().unwrap();
3001 cx.update(|cx| {
3002 let target_buffer = definition.target.buffer.read(cx);
3003 assert_eq!(
3004 target_buffer
3005 .file()
3006 .unwrap()
3007 .as_local()
3008 .unwrap()
3009 .abs_path(cx),
3010 Path::new(path!("/dir/a.rs")),
3011 );
3012 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
3013 assert_eq!(
3014 list_worktrees(&project, cx),
3015 [
3016 (path!("/dir/a.rs").as_ref(), false),
3017 (path!("/dir/b.rs").as_ref(), true)
3018 ],
3019 );
3020
3021 drop(definition);
3022 });
3023 cx.update(|cx| {
3024 assert_eq!(
3025 list_worktrees(&project, cx),
3026 [(path!("/dir/b.rs").as_ref(), true)]
3027 );
3028 });
3029
3030 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
3031 project
3032 .read(cx)
3033 .worktrees(cx)
3034 .map(|worktree| {
3035 let worktree = worktree.read(cx);
3036 (
3037 worktree.as_local().unwrap().abs_path().as_ref(),
3038 worktree.is_visible(),
3039 )
3040 })
3041 .collect::<Vec<_>>()
3042 }
3043}
3044
3045#[gpui::test]
3046async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
3047 init_test(cx);
3048
3049 let fs = FakeFs::new(cx.executor());
3050 fs.insert_tree(
3051 path!("/dir"),
3052 json!({
3053 "a.ts": "",
3054 }),
3055 )
3056 .await;
3057
3058 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3059
3060 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3061 language_registry.add(typescript_lang());
3062 let mut fake_language_servers = language_registry.register_fake_lsp(
3063 "TypeScript",
3064 FakeLspAdapter {
3065 capabilities: lsp::ServerCapabilities {
3066 completion_provider: Some(lsp::CompletionOptions {
3067 trigger_characters: Some(vec![".".to_string()]),
3068 ..Default::default()
3069 }),
3070 ..Default::default()
3071 },
3072 ..Default::default()
3073 },
3074 );
3075
3076 let (buffer, _handle) = project
3077 .update(cx, |p, cx| {
3078 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3079 })
3080 .await
3081 .unwrap();
3082
3083 let fake_server = fake_language_servers.next().await.unwrap();
3084
3085 // When text_edit exists, it takes precedence over insert_text and label
3086 let text = "let a = obj.fqn";
3087 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3088 let completions = project.update(cx, |project, cx| {
3089 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3090 });
3091
3092 fake_server
3093 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3094 Ok(Some(lsp::CompletionResponse::Array(vec![
3095 lsp::CompletionItem {
3096 label: "labelText".into(),
3097 insert_text: Some("insertText".into()),
3098 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
3099 range: lsp::Range::new(
3100 lsp::Position::new(0, text.len() as u32 - 3),
3101 lsp::Position::new(0, text.len() as u32),
3102 ),
3103 new_text: "textEditText".into(),
3104 })),
3105 ..Default::default()
3106 },
3107 ])))
3108 })
3109 .next()
3110 .await;
3111
3112 let completions = completions
3113 .await
3114 .unwrap()
3115 .into_iter()
3116 .flat_map(|response| response.completions)
3117 .collect::<Vec<_>>();
3118 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3119
3120 assert_eq!(completions.len(), 1);
3121 assert_eq!(completions[0].new_text, "textEditText");
3122 assert_eq!(
3123 completions[0].replace_range.to_offset(&snapshot),
3124 text.len() - 3..text.len()
3125 );
3126}
3127
3128#[gpui::test]
3129async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
3130 init_test(cx);
3131
3132 let fs = FakeFs::new(cx.executor());
3133 fs.insert_tree(
3134 path!("/dir"),
3135 json!({
3136 "a.ts": "",
3137 }),
3138 )
3139 .await;
3140
3141 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3142
3143 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3144 language_registry.add(typescript_lang());
3145 let mut fake_language_servers = language_registry.register_fake_lsp(
3146 "TypeScript",
3147 FakeLspAdapter {
3148 capabilities: lsp::ServerCapabilities {
3149 completion_provider: Some(lsp::CompletionOptions {
3150 trigger_characters: Some(vec![".".to_string()]),
3151 ..Default::default()
3152 }),
3153 ..Default::default()
3154 },
3155 ..Default::default()
3156 },
3157 );
3158
3159 let (buffer, _handle) = project
3160 .update(cx, |p, cx| {
3161 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3162 })
3163 .await
3164 .unwrap();
3165
3166 let fake_server = fake_language_servers.next().await.unwrap();
3167 let text = "let a = obj.fqn";
3168
3169 // Test 1: When text_edit is None but insert_text exists with default edit_range
3170 {
3171 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3172 let completions = project.update(cx, |project, cx| {
3173 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3174 });
3175
3176 fake_server
3177 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3178 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3179 is_incomplete: false,
3180 item_defaults: Some(lsp::CompletionListItemDefaults {
3181 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3182 lsp::Range::new(
3183 lsp::Position::new(0, text.len() as u32 - 3),
3184 lsp::Position::new(0, text.len() as u32),
3185 ),
3186 )),
3187 ..Default::default()
3188 }),
3189 items: vec![lsp::CompletionItem {
3190 label: "labelText".into(),
3191 insert_text: Some("insertText".into()),
3192 text_edit: None,
3193 ..Default::default()
3194 }],
3195 })))
3196 })
3197 .next()
3198 .await;
3199
3200 let completions = completions
3201 .await
3202 .unwrap()
3203 .into_iter()
3204 .flat_map(|response| response.completions)
3205 .collect::<Vec<_>>();
3206 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3207
3208 assert_eq!(completions.len(), 1);
3209 assert_eq!(completions[0].new_text, "insertText");
3210 assert_eq!(
3211 completions[0].replace_range.to_offset(&snapshot),
3212 text.len() - 3..text.len()
3213 );
3214 }
3215
3216 // Test 2: When both text_edit and insert_text are None with default edit_range
3217 {
3218 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3219 let completions = project.update(cx, |project, cx| {
3220 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3221 });
3222
3223 fake_server
3224 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3225 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3226 is_incomplete: false,
3227 item_defaults: Some(lsp::CompletionListItemDefaults {
3228 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3229 lsp::Range::new(
3230 lsp::Position::new(0, text.len() as u32 - 3),
3231 lsp::Position::new(0, text.len() as u32),
3232 ),
3233 )),
3234 ..Default::default()
3235 }),
3236 items: vec![lsp::CompletionItem {
3237 label: "labelText".into(),
3238 insert_text: None,
3239 text_edit: None,
3240 ..Default::default()
3241 }],
3242 })))
3243 })
3244 .next()
3245 .await;
3246
3247 let completions = completions
3248 .await
3249 .unwrap()
3250 .into_iter()
3251 .flat_map(|response| response.completions)
3252 .collect::<Vec<_>>();
3253 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3254
3255 assert_eq!(completions.len(), 1);
3256 assert_eq!(completions[0].new_text, "labelText");
3257 assert_eq!(
3258 completions[0].replace_range.to_offset(&snapshot),
3259 text.len() - 3..text.len()
3260 );
3261 }
3262}
3263
3264#[gpui::test]
3265async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
3266 init_test(cx);
3267
3268 let fs = FakeFs::new(cx.executor());
3269 fs.insert_tree(
3270 path!("/dir"),
3271 json!({
3272 "a.ts": "",
3273 }),
3274 )
3275 .await;
3276
3277 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3278
3279 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3280 language_registry.add(typescript_lang());
3281 let mut fake_language_servers = language_registry.register_fake_lsp(
3282 "TypeScript",
3283 FakeLspAdapter {
3284 capabilities: lsp::ServerCapabilities {
3285 completion_provider: Some(lsp::CompletionOptions {
3286 trigger_characters: Some(vec![":".to_string()]),
3287 ..Default::default()
3288 }),
3289 ..Default::default()
3290 },
3291 ..Default::default()
3292 },
3293 );
3294
3295 let (buffer, _handle) = project
3296 .update(cx, |p, cx| {
3297 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3298 })
3299 .await
3300 .unwrap();
3301
3302 let fake_server = fake_language_servers.next().await.unwrap();
3303
3304 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
3305 let text = "let a = b.fqn";
3306 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3307 let completions = project.update(cx, |project, cx| {
3308 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3309 });
3310
3311 fake_server
3312 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3313 Ok(Some(lsp::CompletionResponse::Array(vec![
3314 lsp::CompletionItem {
3315 label: "fullyQualifiedName?".into(),
3316 insert_text: Some("fullyQualifiedName".into()),
3317 ..Default::default()
3318 },
3319 ])))
3320 })
3321 .next()
3322 .await;
3323 let completions = completions
3324 .await
3325 .unwrap()
3326 .into_iter()
3327 .flat_map(|response| response.completions)
3328 .collect::<Vec<_>>();
3329 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3330 assert_eq!(completions.len(), 1);
3331 assert_eq!(completions[0].new_text, "fullyQualifiedName");
3332 assert_eq!(
3333 completions[0].replace_range.to_offset(&snapshot),
3334 text.len() - 3..text.len()
3335 );
3336
3337 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
3338 let text = "let a = \"atoms/cmp\"";
3339 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3340 let completions = project.update(cx, |project, cx| {
3341 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
3342 });
3343
3344 fake_server
3345 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3346 Ok(Some(lsp::CompletionResponse::Array(vec![
3347 lsp::CompletionItem {
3348 label: "component".into(),
3349 ..Default::default()
3350 },
3351 ])))
3352 })
3353 .next()
3354 .await;
3355 let completions = completions
3356 .await
3357 .unwrap()
3358 .into_iter()
3359 .flat_map(|response| response.completions)
3360 .collect::<Vec<_>>();
3361 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3362 assert_eq!(completions.len(), 1);
3363 assert_eq!(completions[0].new_text, "component");
3364 assert_eq!(
3365 completions[0].replace_range.to_offset(&snapshot),
3366 text.len() - 4..text.len() - 1
3367 );
3368}
3369
3370#[gpui::test]
3371async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
3372 init_test(cx);
3373
3374 let fs = FakeFs::new(cx.executor());
3375 fs.insert_tree(
3376 path!("/dir"),
3377 json!({
3378 "a.ts": "",
3379 }),
3380 )
3381 .await;
3382
3383 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3384
3385 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3386 language_registry.add(typescript_lang());
3387 let mut fake_language_servers = language_registry.register_fake_lsp(
3388 "TypeScript",
3389 FakeLspAdapter {
3390 capabilities: lsp::ServerCapabilities {
3391 completion_provider: Some(lsp::CompletionOptions {
3392 trigger_characters: Some(vec![":".to_string()]),
3393 ..Default::default()
3394 }),
3395 ..Default::default()
3396 },
3397 ..Default::default()
3398 },
3399 );
3400
3401 let (buffer, _handle) = project
3402 .update(cx, |p, cx| {
3403 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3404 })
3405 .await
3406 .unwrap();
3407
3408 let fake_server = fake_language_servers.next().await.unwrap();
3409
3410 let text = "let a = b.fqn";
3411 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3412 let completions = project.update(cx, |project, cx| {
3413 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3414 });
3415
3416 fake_server
3417 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3418 Ok(Some(lsp::CompletionResponse::Array(vec![
3419 lsp::CompletionItem {
3420 label: "fullyQualifiedName?".into(),
3421 insert_text: Some("fully\rQualified\r\nName".into()),
3422 ..Default::default()
3423 },
3424 ])))
3425 })
3426 .next()
3427 .await;
3428 let completions = completions
3429 .await
3430 .unwrap()
3431 .into_iter()
3432 .flat_map(|response| response.completions)
3433 .collect::<Vec<_>>();
3434 assert_eq!(completions.len(), 1);
3435 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
3436}
3437
3438#[gpui::test(iterations = 10)]
3439async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
3440 init_test(cx);
3441
3442 let fs = FakeFs::new(cx.executor());
3443 fs.insert_tree(
3444 path!("/dir"),
3445 json!({
3446 "a.ts": "a",
3447 }),
3448 )
3449 .await;
3450
3451 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3452
3453 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3454 language_registry.add(typescript_lang());
3455 let mut fake_language_servers = language_registry.register_fake_lsp(
3456 "TypeScript",
3457 FakeLspAdapter {
3458 capabilities: lsp::ServerCapabilities {
3459 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
3460 lsp::CodeActionOptions {
3461 resolve_provider: Some(true),
3462 ..lsp::CodeActionOptions::default()
3463 },
3464 )),
3465 execute_command_provider: Some(lsp::ExecuteCommandOptions {
3466 commands: vec!["_the/command".to_string()],
3467 ..lsp::ExecuteCommandOptions::default()
3468 }),
3469 ..lsp::ServerCapabilities::default()
3470 },
3471 ..FakeLspAdapter::default()
3472 },
3473 );
3474
3475 let (buffer, _handle) = project
3476 .update(cx, |p, cx| {
3477 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3478 })
3479 .await
3480 .unwrap();
3481
3482 let fake_server = fake_language_servers.next().await.unwrap();
3483
3484 // Language server returns code actions that contain commands, and not edits.
3485 let actions = project.update(cx, |project, cx| {
3486 project.code_actions(&buffer, 0..0, None, cx)
3487 });
3488 fake_server
3489 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
3490 Ok(Some(vec![
3491 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3492 title: "The code action".into(),
3493 data: Some(serde_json::json!({
3494 "command": "_the/command",
3495 })),
3496 ..lsp::CodeAction::default()
3497 }),
3498 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3499 title: "two".into(),
3500 ..lsp::CodeAction::default()
3501 }),
3502 ]))
3503 })
3504 .next()
3505 .await;
3506
3507 let action = actions.await.unwrap()[0].clone();
3508 let apply = project.update(cx, |project, cx| {
3509 project.apply_code_action(buffer.clone(), action, true, cx)
3510 });
3511
3512 // Resolving the code action does not populate its edits. In absence of
3513 // edits, we must execute the given command.
3514 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
3515 |mut action, _| async move {
3516 if action.data.is_some() {
3517 action.command = Some(lsp::Command {
3518 title: "The command".into(),
3519 command: "_the/command".into(),
3520 arguments: Some(vec![json!("the-argument")]),
3521 });
3522 }
3523 Ok(action)
3524 },
3525 );
3526
3527 // While executing the command, the language server sends the editor
3528 // a `workspaceEdit` request.
3529 fake_server
3530 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
3531 let fake = fake_server.clone();
3532 move |params, _| {
3533 assert_eq!(params.command, "_the/command");
3534 let fake = fake.clone();
3535 async move {
3536 fake.server
3537 .request::<lsp::request::ApplyWorkspaceEdit>(
3538 lsp::ApplyWorkspaceEditParams {
3539 label: None,
3540 edit: lsp::WorkspaceEdit {
3541 changes: Some(
3542 [(
3543 lsp::Url::from_file_path(path!("/dir/a.ts")).unwrap(),
3544 vec![lsp::TextEdit {
3545 range: lsp::Range::new(
3546 lsp::Position::new(0, 0),
3547 lsp::Position::new(0, 0),
3548 ),
3549 new_text: "X".into(),
3550 }],
3551 )]
3552 .into_iter()
3553 .collect(),
3554 ),
3555 ..Default::default()
3556 },
3557 },
3558 )
3559 .await
3560 .into_response()
3561 .unwrap();
3562 Ok(Some(json!(null)))
3563 }
3564 }
3565 })
3566 .next()
3567 .await;
3568
3569 // Applying the code action returns a project transaction containing the edits
3570 // sent by the language server in its `workspaceEdit` request.
3571 let transaction = apply.await.unwrap();
3572 assert!(transaction.0.contains_key(&buffer));
3573 buffer.update(cx, |buffer, cx| {
3574 assert_eq!(buffer.text(), "Xa");
3575 buffer.undo(cx);
3576 assert_eq!(buffer.text(), "a");
3577 });
3578}
3579
3580#[gpui::test(iterations = 10)]
3581async fn test_save_file(cx: &mut gpui::TestAppContext) {
3582 init_test(cx);
3583
3584 let fs = FakeFs::new(cx.executor());
3585 fs.insert_tree(
3586 path!("/dir"),
3587 json!({
3588 "file1": "the old contents",
3589 }),
3590 )
3591 .await;
3592
3593 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3594 let buffer = project
3595 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3596 .await
3597 .unwrap();
3598 buffer.update(cx, |buffer, cx| {
3599 assert_eq!(buffer.text(), "the old contents");
3600 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3601 });
3602
3603 project
3604 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3605 .await
3606 .unwrap();
3607
3608 let new_text = fs
3609 .load(Path::new(path!("/dir/file1")))
3610 .await
3611 .unwrap()
3612 .replace("\r\n", "\n");
3613 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3614}
3615
3616#[gpui::test(iterations = 10)]
3617async fn test_save_file_spawns_language_server(cx: &mut gpui::TestAppContext) {
3618 // Issue: #24349
3619 init_test(cx);
3620
3621 let fs = FakeFs::new(cx.executor());
3622 fs.insert_tree(path!("/dir"), json!({})).await;
3623
3624 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3625 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3626
3627 language_registry.add(rust_lang());
3628 let mut fake_rust_servers = language_registry.register_fake_lsp(
3629 "Rust",
3630 FakeLspAdapter {
3631 name: "the-rust-language-server",
3632 capabilities: lsp::ServerCapabilities {
3633 completion_provider: Some(lsp::CompletionOptions {
3634 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
3635 ..Default::default()
3636 }),
3637 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
3638 lsp::TextDocumentSyncOptions {
3639 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
3640 ..Default::default()
3641 },
3642 )),
3643 ..Default::default()
3644 },
3645 ..Default::default()
3646 },
3647 );
3648
3649 let buffer = project
3650 .update(cx, |this, cx| this.create_buffer(cx))
3651 .unwrap()
3652 .await;
3653 project.update(cx, |this, cx| {
3654 this.register_buffer_with_language_servers(&buffer, cx);
3655 buffer.update(cx, |buffer, cx| {
3656 assert!(!this.has_language_servers_for(buffer, cx));
3657 })
3658 });
3659
3660 project
3661 .update(cx, |this, cx| {
3662 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
3663 this.save_buffer_as(
3664 buffer.clone(),
3665 ProjectPath {
3666 worktree_id,
3667 path: Arc::from("file.rs".as_ref()),
3668 },
3669 cx,
3670 )
3671 })
3672 .await
3673 .unwrap();
3674 // A server is started up, and it is notified about Rust files.
3675 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
3676 assert_eq!(
3677 fake_rust_server
3678 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3679 .await
3680 .text_document,
3681 lsp::TextDocumentItem {
3682 uri: lsp::Url::from_file_path(path!("/dir/file.rs")).unwrap(),
3683 version: 0,
3684 text: "".to_string(),
3685 language_id: "rust".to_string(),
3686 }
3687 );
3688
3689 project.update(cx, |this, cx| {
3690 buffer.update(cx, |buffer, cx| {
3691 assert!(this.has_language_servers_for(buffer, cx));
3692 })
3693 });
3694}
3695
3696#[gpui::test(iterations = 30)]
3697async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
3698 init_test(cx);
3699
3700 let fs = FakeFs::new(cx.executor().clone());
3701 fs.insert_tree(
3702 path!("/dir"),
3703 json!({
3704 "file1": "the original contents",
3705 }),
3706 )
3707 .await;
3708
3709 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3710 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3711 let buffer = project
3712 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3713 .await
3714 .unwrap();
3715
3716 // Simulate buffer diffs being slow, so that they don't complete before
3717 // the next file change occurs.
3718 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3719
3720 // Change the buffer's file on disk, and then wait for the file change
3721 // to be detected by the worktree, so that the buffer starts reloading.
3722 fs.save(
3723 path!("/dir/file1").as_ref(),
3724 &"the first contents".into(),
3725 Default::default(),
3726 )
3727 .await
3728 .unwrap();
3729 worktree.next_event(cx).await;
3730
3731 // Change the buffer's file again. Depending on the random seed, the
3732 // previous file change may still be in progress.
3733 fs.save(
3734 path!("/dir/file1").as_ref(),
3735 &"the second contents".into(),
3736 Default::default(),
3737 )
3738 .await
3739 .unwrap();
3740 worktree.next_event(cx).await;
3741
3742 cx.executor().run_until_parked();
3743 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3744 buffer.read_with(cx, |buffer, _| {
3745 assert_eq!(buffer.text(), on_disk_text);
3746 assert!(!buffer.is_dirty(), "buffer should not be dirty");
3747 assert!(!buffer.has_conflict(), "buffer should not be dirty");
3748 });
3749}
3750
3751#[gpui::test(iterations = 30)]
3752async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
3753 init_test(cx);
3754
3755 let fs = FakeFs::new(cx.executor().clone());
3756 fs.insert_tree(
3757 path!("/dir"),
3758 json!({
3759 "file1": "the original contents",
3760 }),
3761 )
3762 .await;
3763
3764 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3765 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3766 let buffer = project
3767 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3768 .await
3769 .unwrap();
3770
3771 // Simulate buffer diffs being slow, so that they don't complete before
3772 // the next file change occurs.
3773 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3774
3775 // Change the buffer's file on disk, and then wait for the file change
3776 // to be detected by the worktree, so that the buffer starts reloading.
3777 fs.save(
3778 path!("/dir/file1").as_ref(),
3779 &"the first contents".into(),
3780 Default::default(),
3781 )
3782 .await
3783 .unwrap();
3784 worktree.next_event(cx).await;
3785
3786 cx.executor()
3787 .spawn(cx.executor().simulate_random_delay())
3788 .await;
3789
3790 // Perform a noop edit, causing the buffer's version to increase.
3791 buffer.update(cx, |buffer, cx| {
3792 buffer.edit([(0..0, " ")], None, cx);
3793 buffer.undo(cx);
3794 });
3795
3796 cx.executor().run_until_parked();
3797 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3798 buffer.read_with(cx, |buffer, _| {
3799 let buffer_text = buffer.text();
3800 if buffer_text == on_disk_text {
3801 assert!(
3802 !buffer.is_dirty() && !buffer.has_conflict(),
3803 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
3804 );
3805 }
3806 // If the file change occurred while the buffer was processing the first
3807 // change, the buffer will be in a conflicting state.
3808 else {
3809 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3810 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3811 }
3812 });
3813}
3814
3815#[gpui::test]
3816async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
3817 init_test(cx);
3818
3819 let fs = FakeFs::new(cx.executor());
3820 fs.insert_tree(
3821 path!("/dir"),
3822 json!({
3823 "file1": "the old contents",
3824 }),
3825 )
3826 .await;
3827
3828 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
3829 let buffer = project
3830 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3831 .await
3832 .unwrap();
3833 buffer.update(cx, |buffer, cx| {
3834 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3835 });
3836
3837 project
3838 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3839 .await
3840 .unwrap();
3841
3842 let new_text = fs
3843 .load(Path::new(path!("/dir/file1")))
3844 .await
3845 .unwrap()
3846 .replace("\r\n", "\n");
3847 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3848}
3849
3850#[gpui::test]
3851async fn test_save_as(cx: &mut gpui::TestAppContext) {
3852 init_test(cx);
3853
3854 let fs = FakeFs::new(cx.executor());
3855 fs.insert_tree("/dir", json!({})).await;
3856
3857 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3858
3859 let languages = project.update(cx, |project, _| project.languages().clone());
3860 languages.add(rust_lang());
3861
3862 let buffer = project.update(cx, |project, cx| project.create_local_buffer("", None, cx));
3863 buffer.update(cx, |buffer, cx| {
3864 buffer.edit([(0..0, "abc")], None, cx);
3865 assert!(buffer.is_dirty());
3866 assert!(!buffer.has_conflict());
3867 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
3868 });
3869 project
3870 .update(cx, |project, cx| {
3871 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
3872 let path = ProjectPath {
3873 worktree_id,
3874 path: Arc::from(Path::new("file1.rs")),
3875 };
3876 project.save_buffer_as(buffer.clone(), path, cx)
3877 })
3878 .await
3879 .unwrap();
3880 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
3881
3882 cx.executor().run_until_parked();
3883 buffer.update(cx, |buffer, cx| {
3884 assert_eq!(
3885 buffer.file().unwrap().full_path(cx),
3886 Path::new("dir/file1.rs")
3887 );
3888 assert!(!buffer.is_dirty());
3889 assert!(!buffer.has_conflict());
3890 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
3891 });
3892
3893 let opened_buffer = project
3894 .update(cx, |project, cx| {
3895 project.open_local_buffer("/dir/file1.rs", cx)
3896 })
3897 .await
3898 .unwrap();
3899 assert_eq!(opened_buffer, buffer);
3900}
3901
3902#[gpui::test(retries = 5)]
3903async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
3904 use worktree::WorktreeModelHandle as _;
3905
3906 init_test(cx);
3907 cx.executor().allow_parking();
3908
3909 let dir = TempTree::new(json!({
3910 "a": {
3911 "file1": "",
3912 "file2": "",
3913 "file3": "",
3914 },
3915 "b": {
3916 "c": {
3917 "file4": "",
3918 "file5": "",
3919 }
3920 }
3921 }));
3922
3923 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
3924
3925 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3926 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
3927 async move { buffer.await.unwrap() }
3928 };
3929 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3930 project.update(cx, |project, cx| {
3931 let tree = project.worktrees(cx).next().unwrap();
3932 tree.read(cx)
3933 .entry_for_path(path)
3934 .unwrap_or_else(|| panic!("no entry for path {}", path))
3935 .id
3936 })
3937 };
3938
3939 let buffer2 = buffer_for_path("a/file2", cx).await;
3940 let buffer3 = buffer_for_path("a/file3", cx).await;
3941 let buffer4 = buffer_for_path("b/c/file4", cx).await;
3942 let buffer5 = buffer_for_path("b/c/file5", cx).await;
3943
3944 let file2_id = id_for_path("a/file2", cx);
3945 let file3_id = id_for_path("a/file3", cx);
3946 let file4_id = id_for_path("b/c/file4", cx);
3947
3948 // Create a remote copy of this worktree.
3949 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3950 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
3951
3952 let updates = Arc::new(Mutex::new(Vec::new()));
3953 tree.update(cx, |tree, cx| {
3954 let updates = updates.clone();
3955 tree.observe_updates(0, cx, move |update| {
3956 updates.lock().push(update);
3957 async { true }
3958 });
3959 });
3960
3961 let remote =
3962 cx.update(|cx| Worktree::remote(0, 1, metadata, project.read(cx).client().into(), cx));
3963
3964 cx.executor().run_until_parked();
3965
3966 cx.update(|cx| {
3967 assert!(!buffer2.read(cx).is_dirty());
3968 assert!(!buffer3.read(cx).is_dirty());
3969 assert!(!buffer4.read(cx).is_dirty());
3970 assert!(!buffer5.read(cx).is_dirty());
3971 });
3972
3973 // Rename and delete files and directories.
3974 tree.flush_fs_events(cx).await;
3975 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3976 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3977 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3978 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3979 tree.flush_fs_events(cx).await;
3980
3981 cx.update(|app| {
3982 assert_eq!(
3983 tree.read(app)
3984 .paths()
3985 .map(|p| p.to_str().unwrap())
3986 .collect::<Vec<_>>(),
3987 vec![
3988 "a",
3989 path!("a/file1"),
3990 path!("a/file2.new"),
3991 "b",
3992 "d",
3993 path!("d/file3"),
3994 path!("d/file4"),
3995 ]
3996 );
3997 });
3998
3999 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
4000 assert_eq!(id_for_path("d/file3", cx), file3_id);
4001 assert_eq!(id_for_path("d/file4", cx), file4_id);
4002
4003 cx.update(|cx| {
4004 assert_eq!(
4005 buffer2.read(cx).file().unwrap().path().as_ref(),
4006 Path::new("a/file2.new")
4007 );
4008 assert_eq!(
4009 buffer3.read(cx).file().unwrap().path().as_ref(),
4010 Path::new("d/file3")
4011 );
4012 assert_eq!(
4013 buffer4.read(cx).file().unwrap().path().as_ref(),
4014 Path::new("d/file4")
4015 );
4016 assert_eq!(
4017 buffer5.read(cx).file().unwrap().path().as_ref(),
4018 Path::new("b/c/file5")
4019 );
4020
4021 assert_matches!(
4022 buffer2.read(cx).file().unwrap().disk_state(),
4023 DiskState::Present { .. }
4024 );
4025 assert_matches!(
4026 buffer3.read(cx).file().unwrap().disk_state(),
4027 DiskState::Present { .. }
4028 );
4029 assert_matches!(
4030 buffer4.read(cx).file().unwrap().disk_state(),
4031 DiskState::Present { .. }
4032 );
4033 assert_eq!(
4034 buffer5.read(cx).file().unwrap().disk_state(),
4035 DiskState::Deleted
4036 );
4037 });
4038
4039 // Update the remote worktree. Check that it becomes consistent with the
4040 // local worktree.
4041 cx.executor().run_until_parked();
4042
4043 remote.update(cx, |remote, _| {
4044 for update in updates.lock().drain(..) {
4045 remote.as_remote_mut().unwrap().update_from_remote(update);
4046 }
4047 });
4048 cx.executor().run_until_parked();
4049 remote.update(cx, |remote, _| {
4050 assert_eq!(
4051 remote
4052 .paths()
4053 .map(|p| p.to_str().unwrap())
4054 .collect::<Vec<_>>(),
4055 vec![
4056 "a",
4057 path!("a/file1"),
4058 path!("a/file2.new"),
4059 "b",
4060 "d",
4061 path!("d/file3"),
4062 path!("d/file4"),
4063 ]
4064 );
4065 });
4066}
4067
4068#[gpui::test(iterations = 10)]
4069async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
4070 init_test(cx);
4071
4072 let fs = FakeFs::new(cx.executor());
4073 fs.insert_tree(
4074 path!("/dir"),
4075 json!({
4076 "a": {
4077 "file1": "",
4078 }
4079 }),
4080 )
4081 .await;
4082
4083 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
4084 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
4085 let tree_id = tree.update(cx, |tree, _| tree.id());
4086
4087 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4088 project.update(cx, |project, cx| {
4089 let tree = project.worktrees(cx).next().unwrap();
4090 tree.read(cx)
4091 .entry_for_path(path)
4092 .unwrap_or_else(|| panic!("no entry for path {}", path))
4093 .id
4094 })
4095 };
4096
4097 let dir_id = id_for_path("a", cx);
4098 let file_id = id_for_path("a/file1", cx);
4099 let buffer = project
4100 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
4101 .await
4102 .unwrap();
4103 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4104
4105 project
4106 .update(cx, |project, cx| {
4107 project.rename_entry(dir_id, Path::new("b"), cx)
4108 })
4109 .unwrap()
4110 .await
4111 .to_included()
4112 .unwrap();
4113 cx.executor().run_until_parked();
4114
4115 assert_eq!(id_for_path("b", cx), dir_id);
4116 assert_eq!(id_for_path("b/file1", cx), file_id);
4117 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4118}
4119
4120#[gpui::test]
4121async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
4122 init_test(cx);
4123
4124 let fs = FakeFs::new(cx.executor());
4125 fs.insert_tree(
4126 "/dir",
4127 json!({
4128 "a.txt": "a-contents",
4129 "b.txt": "b-contents",
4130 }),
4131 )
4132 .await;
4133
4134 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4135
4136 // Spawn multiple tasks to open paths, repeating some paths.
4137 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
4138 (
4139 p.open_local_buffer("/dir/a.txt", cx),
4140 p.open_local_buffer("/dir/b.txt", cx),
4141 p.open_local_buffer("/dir/a.txt", cx),
4142 )
4143 });
4144
4145 let buffer_a_1 = buffer_a_1.await.unwrap();
4146 let buffer_a_2 = buffer_a_2.await.unwrap();
4147 let buffer_b = buffer_b.await.unwrap();
4148 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
4149 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
4150
4151 // There is only one buffer per path.
4152 let buffer_a_id = buffer_a_1.entity_id();
4153 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
4154
4155 // Open the same path again while it is still open.
4156 drop(buffer_a_1);
4157 let buffer_a_3 = project
4158 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
4159 .await
4160 .unwrap();
4161
4162 // There's still only one buffer per path.
4163 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
4164}
4165
4166#[gpui::test]
4167async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
4168 init_test(cx);
4169
4170 let fs = FakeFs::new(cx.executor());
4171 fs.insert_tree(
4172 path!("/dir"),
4173 json!({
4174 "file1": "abc",
4175 "file2": "def",
4176 "file3": "ghi",
4177 }),
4178 )
4179 .await;
4180
4181 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4182
4183 let buffer1 = project
4184 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4185 .await
4186 .unwrap();
4187 let events = Arc::new(Mutex::new(Vec::new()));
4188
4189 // initially, the buffer isn't dirty.
4190 buffer1.update(cx, |buffer, cx| {
4191 cx.subscribe(&buffer1, {
4192 let events = events.clone();
4193 move |_, _, event, _| match event {
4194 BufferEvent::Operation { .. } => {}
4195 _ => events.lock().push(event.clone()),
4196 }
4197 })
4198 .detach();
4199
4200 assert!(!buffer.is_dirty());
4201 assert!(events.lock().is_empty());
4202
4203 buffer.edit([(1..2, "")], None, cx);
4204 });
4205
4206 // after the first edit, the buffer is dirty, and emits a dirtied event.
4207 buffer1.update(cx, |buffer, cx| {
4208 assert!(buffer.text() == "ac");
4209 assert!(buffer.is_dirty());
4210 assert_eq!(
4211 *events.lock(),
4212 &[
4213 language::BufferEvent::Edited,
4214 language::BufferEvent::DirtyChanged
4215 ]
4216 );
4217 events.lock().clear();
4218 buffer.did_save(
4219 buffer.version(),
4220 buffer.file().unwrap().disk_state().mtime(),
4221 cx,
4222 );
4223 });
4224
4225 // after saving, the buffer is not dirty, and emits a saved event.
4226 buffer1.update(cx, |buffer, cx| {
4227 assert!(!buffer.is_dirty());
4228 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
4229 events.lock().clear();
4230
4231 buffer.edit([(1..1, "B")], None, cx);
4232 buffer.edit([(2..2, "D")], None, cx);
4233 });
4234
4235 // after editing again, the buffer is dirty, and emits another dirty event.
4236 buffer1.update(cx, |buffer, cx| {
4237 assert!(buffer.text() == "aBDc");
4238 assert!(buffer.is_dirty());
4239 assert_eq!(
4240 *events.lock(),
4241 &[
4242 language::BufferEvent::Edited,
4243 language::BufferEvent::DirtyChanged,
4244 language::BufferEvent::Edited,
4245 ],
4246 );
4247 events.lock().clear();
4248
4249 // After restoring the buffer to its previously-saved state,
4250 // the buffer is not considered dirty anymore.
4251 buffer.edit([(1..3, "")], None, cx);
4252 assert!(buffer.text() == "ac");
4253 assert!(!buffer.is_dirty());
4254 });
4255
4256 assert_eq!(
4257 *events.lock(),
4258 &[
4259 language::BufferEvent::Edited,
4260 language::BufferEvent::DirtyChanged
4261 ]
4262 );
4263
4264 // When a file is deleted, it is not considered dirty.
4265 let events = Arc::new(Mutex::new(Vec::new()));
4266 let buffer2 = project
4267 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4268 .await
4269 .unwrap();
4270 buffer2.update(cx, |_, cx| {
4271 cx.subscribe(&buffer2, {
4272 let events = events.clone();
4273 move |_, _, event, _| match event {
4274 BufferEvent::Operation { .. } => {}
4275 _ => events.lock().push(event.clone()),
4276 }
4277 })
4278 .detach();
4279 });
4280
4281 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
4282 .await
4283 .unwrap();
4284 cx.executor().run_until_parked();
4285 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4286 assert_eq!(
4287 mem::take(&mut *events.lock()),
4288 &[language::BufferEvent::FileHandleChanged]
4289 );
4290
4291 // Buffer becomes dirty when edited.
4292 buffer2.update(cx, |buffer, cx| {
4293 buffer.edit([(2..3, "")], None, cx);
4294 assert_eq!(buffer.is_dirty(), true);
4295 });
4296 assert_eq!(
4297 mem::take(&mut *events.lock()),
4298 &[
4299 language::BufferEvent::Edited,
4300 language::BufferEvent::DirtyChanged
4301 ]
4302 );
4303
4304 // Buffer becomes clean again when all of its content is removed, because
4305 // the file was deleted.
4306 buffer2.update(cx, |buffer, cx| {
4307 buffer.edit([(0..2, "")], None, cx);
4308 assert_eq!(buffer.is_empty(), true);
4309 assert_eq!(buffer.is_dirty(), false);
4310 });
4311 assert_eq!(
4312 *events.lock(),
4313 &[
4314 language::BufferEvent::Edited,
4315 language::BufferEvent::DirtyChanged
4316 ]
4317 );
4318
4319 // When a file is already dirty when deleted, we don't emit a Dirtied event.
4320 let events = Arc::new(Mutex::new(Vec::new()));
4321 let buffer3 = project
4322 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
4323 .await
4324 .unwrap();
4325 buffer3.update(cx, |_, cx| {
4326 cx.subscribe(&buffer3, {
4327 let events = events.clone();
4328 move |_, _, event, _| match event {
4329 BufferEvent::Operation { .. } => {}
4330 _ => events.lock().push(event.clone()),
4331 }
4332 })
4333 .detach();
4334 });
4335
4336 buffer3.update(cx, |buffer, cx| {
4337 buffer.edit([(0..0, "x")], None, cx);
4338 });
4339 events.lock().clear();
4340 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
4341 .await
4342 .unwrap();
4343 cx.executor().run_until_parked();
4344 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
4345 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
4346}
4347
4348#[gpui::test]
4349async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
4350 init_test(cx);
4351
4352 let (initial_contents, initial_offsets) =
4353 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
4354 let fs = FakeFs::new(cx.executor());
4355 fs.insert_tree(
4356 path!("/dir"),
4357 json!({
4358 "the-file": initial_contents,
4359 }),
4360 )
4361 .await;
4362 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4363 let buffer = project
4364 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
4365 .await
4366 .unwrap();
4367
4368 let anchors = initial_offsets
4369 .iter()
4370 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
4371 .collect::<Vec<_>>();
4372
4373 // Change the file on disk, adding two new lines of text, and removing
4374 // one line.
4375 buffer.update(cx, |buffer, _| {
4376 assert!(!buffer.is_dirty());
4377 assert!(!buffer.has_conflict());
4378 });
4379
4380 let (new_contents, new_offsets) =
4381 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
4382 fs.save(
4383 path!("/dir/the-file").as_ref(),
4384 &new_contents.as_str().into(),
4385 LineEnding::Unix,
4386 )
4387 .await
4388 .unwrap();
4389
4390 // Because the buffer was not modified, it is reloaded from disk. Its
4391 // contents are edited according to the diff between the old and new
4392 // file contents.
4393 cx.executor().run_until_parked();
4394 buffer.update(cx, |buffer, _| {
4395 assert_eq!(buffer.text(), new_contents);
4396 assert!(!buffer.is_dirty());
4397 assert!(!buffer.has_conflict());
4398
4399 let anchor_offsets = anchors
4400 .iter()
4401 .map(|anchor| anchor.to_offset(&*buffer))
4402 .collect::<Vec<_>>();
4403 assert_eq!(anchor_offsets, new_offsets);
4404 });
4405
4406 // Modify the buffer
4407 buffer.update(cx, |buffer, cx| {
4408 buffer.edit([(0..0, " ")], None, cx);
4409 assert!(buffer.is_dirty());
4410 assert!(!buffer.has_conflict());
4411 });
4412
4413 // Change the file on disk again, adding blank lines to the beginning.
4414 fs.save(
4415 path!("/dir/the-file").as_ref(),
4416 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
4417 LineEnding::Unix,
4418 )
4419 .await
4420 .unwrap();
4421
4422 // Because the buffer is modified, it doesn't reload from disk, but is
4423 // marked as having a conflict.
4424 cx.executor().run_until_parked();
4425 buffer.update(cx, |buffer, _| {
4426 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
4427 assert!(buffer.has_conflict());
4428 });
4429}
4430
4431#[gpui::test]
4432async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
4433 init_test(cx);
4434
4435 let fs = FakeFs::new(cx.executor());
4436 fs.insert_tree(
4437 path!("/dir"),
4438 json!({
4439 "file1": "a\nb\nc\n",
4440 "file2": "one\r\ntwo\r\nthree\r\n",
4441 }),
4442 )
4443 .await;
4444
4445 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4446 let buffer1 = project
4447 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4448 .await
4449 .unwrap();
4450 let buffer2 = project
4451 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4452 .await
4453 .unwrap();
4454
4455 buffer1.update(cx, |buffer, _| {
4456 assert_eq!(buffer.text(), "a\nb\nc\n");
4457 assert_eq!(buffer.line_ending(), LineEnding::Unix);
4458 });
4459 buffer2.update(cx, |buffer, _| {
4460 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
4461 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4462 });
4463
4464 // Change a file's line endings on disk from unix to windows. The buffer's
4465 // state updates correctly.
4466 fs.save(
4467 path!("/dir/file1").as_ref(),
4468 &"aaa\nb\nc\n".into(),
4469 LineEnding::Windows,
4470 )
4471 .await
4472 .unwrap();
4473 cx.executor().run_until_parked();
4474 buffer1.update(cx, |buffer, _| {
4475 assert_eq!(buffer.text(), "aaa\nb\nc\n");
4476 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4477 });
4478
4479 // Save a file with windows line endings. The file is written correctly.
4480 buffer2.update(cx, |buffer, cx| {
4481 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
4482 });
4483 project
4484 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
4485 .await
4486 .unwrap();
4487 assert_eq!(
4488 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
4489 "one\r\ntwo\r\nthree\r\nfour\r\n",
4490 );
4491}
4492
4493#[gpui::test]
4494async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
4495 init_test(cx);
4496
4497 let fs = FakeFs::new(cx.executor());
4498 fs.insert_tree(
4499 path!("/dir"),
4500 json!({
4501 "a.rs": "
4502 fn foo(mut v: Vec<usize>) {
4503 for x in &v {
4504 v.push(1);
4505 }
4506 }
4507 "
4508 .unindent(),
4509 }),
4510 )
4511 .await;
4512
4513 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4514 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4515 let buffer = project
4516 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
4517 .await
4518 .unwrap();
4519
4520 let buffer_uri = Url::from_file_path(path!("/dir/a.rs")).unwrap();
4521 let message = lsp::PublishDiagnosticsParams {
4522 uri: buffer_uri.clone(),
4523 diagnostics: vec![
4524 lsp::Diagnostic {
4525 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4526 severity: Some(DiagnosticSeverity::WARNING),
4527 message: "error 1".to_string(),
4528 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4529 location: lsp::Location {
4530 uri: buffer_uri.clone(),
4531 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4532 },
4533 message: "error 1 hint 1".to_string(),
4534 }]),
4535 ..Default::default()
4536 },
4537 lsp::Diagnostic {
4538 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4539 severity: Some(DiagnosticSeverity::HINT),
4540 message: "error 1 hint 1".to_string(),
4541 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4542 location: lsp::Location {
4543 uri: buffer_uri.clone(),
4544 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4545 },
4546 message: "original diagnostic".to_string(),
4547 }]),
4548 ..Default::default()
4549 },
4550 lsp::Diagnostic {
4551 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4552 severity: Some(DiagnosticSeverity::ERROR),
4553 message: "error 2".to_string(),
4554 related_information: Some(vec![
4555 lsp::DiagnosticRelatedInformation {
4556 location: lsp::Location {
4557 uri: buffer_uri.clone(),
4558 range: lsp::Range::new(
4559 lsp::Position::new(1, 13),
4560 lsp::Position::new(1, 15),
4561 ),
4562 },
4563 message: "error 2 hint 1".to_string(),
4564 },
4565 lsp::DiagnosticRelatedInformation {
4566 location: lsp::Location {
4567 uri: buffer_uri.clone(),
4568 range: lsp::Range::new(
4569 lsp::Position::new(1, 13),
4570 lsp::Position::new(1, 15),
4571 ),
4572 },
4573 message: "error 2 hint 2".to_string(),
4574 },
4575 ]),
4576 ..Default::default()
4577 },
4578 lsp::Diagnostic {
4579 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4580 severity: Some(DiagnosticSeverity::HINT),
4581 message: "error 2 hint 1".to_string(),
4582 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4583 location: lsp::Location {
4584 uri: buffer_uri.clone(),
4585 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4586 },
4587 message: "original diagnostic".to_string(),
4588 }]),
4589 ..Default::default()
4590 },
4591 lsp::Diagnostic {
4592 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4593 severity: Some(DiagnosticSeverity::HINT),
4594 message: "error 2 hint 2".to_string(),
4595 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4596 location: lsp::Location {
4597 uri: buffer_uri,
4598 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4599 },
4600 message: "original diagnostic".to_string(),
4601 }]),
4602 ..Default::default()
4603 },
4604 ],
4605 version: None,
4606 };
4607
4608 lsp_store
4609 .update(cx, |lsp_store, cx| {
4610 lsp_store.update_diagnostics(
4611 LanguageServerId(0),
4612 message,
4613 None,
4614 DiagnosticSourceKind::Pushed,
4615 &[],
4616 cx,
4617 )
4618 })
4619 .unwrap();
4620 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
4621
4622 assert_eq!(
4623 buffer
4624 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
4625 .collect::<Vec<_>>(),
4626 &[
4627 DiagnosticEntry {
4628 range: Point::new(1, 8)..Point::new(1, 9),
4629 diagnostic: Diagnostic {
4630 severity: DiagnosticSeverity::WARNING,
4631 message: "error 1".to_string(),
4632 group_id: 1,
4633 is_primary: true,
4634 source_kind: DiagnosticSourceKind::Pushed,
4635 ..Diagnostic::default()
4636 }
4637 },
4638 DiagnosticEntry {
4639 range: Point::new(1, 8)..Point::new(1, 9),
4640 diagnostic: Diagnostic {
4641 severity: DiagnosticSeverity::HINT,
4642 message: "error 1 hint 1".to_string(),
4643 group_id: 1,
4644 is_primary: false,
4645 source_kind: DiagnosticSourceKind::Pushed,
4646 ..Diagnostic::default()
4647 }
4648 },
4649 DiagnosticEntry {
4650 range: Point::new(1, 13)..Point::new(1, 15),
4651 diagnostic: Diagnostic {
4652 severity: DiagnosticSeverity::HINT,
4653 message: "error 2 hint 1".to_string(),
4654 group_id: 0,
4655 is_primary: false,
4656 source_kind: DiagnosticSourceKind::Pushed,
4657 ..Diagnostic::default()
4658 }
4659 },
4660 DiagnosticEntry {
4661 range: Point::new(1, 13)..Point::new(1, 15),
4662 diagnostic: Diagnostic {
4663 severity: DiagnosticSeverity::HINT,
4664 message: "error 2 hint 2".to_string(),
4665 group_id: 0,
4666 is_primary: false,
4667 source_kind: DiagnosticSourceKind::Pushed,
4668 ..Diagnostic::default()
4669 }
4670 },
4671 DiagnosticEntry {
4672 range: Point::new(2, 8)..Point::new(2, 17),
4673 diagnostic: Diagnostic {
4674 severity: DiagnosticSeverity::ERROR,
4675 message: "error 2".to_string(),
4676 group_id: 0,
4677 is_primary: true,
4678 source_kind: DiagnosticSourceKind::Pushed,
4679 ..Diagnostic::default()
4680 }
4681 }
4682 ]
4683 );
4684
4685 assert_eq!(
4686 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
4687 &[
4688 DiagnosticEntry {
4689 range: Point::new(1, 13)..Point::new(1, 15),
4690 diagnostic: Diagnostic {
4691 severity: DiagnosticSeverity::HINT,
4692 message: "error 2 hint 1".to_string(),
4693 group_id: 0,
4694 is_primary: false,
4695 source_kind: DiagnosticSourceKind::Pushed,
4696 ..Diagnostic::default()
4697 }
4698 },
4699 DiagnosticEntry {
4700 range: Point::new(1, 13)..Point::new(1, 15),
4701 diagnostic: Diagnostic {
4702 severity: DiagnosticSeverity::HINT,
4703 message: "error 2 hint 2".to_string(),
4704 group_id: 0,
4705 is_primary: false,
4706 source_kind: DiagnosticSourceKind::Pushed,
4707 ..Diagnostic::default()
4708 }
4709 },
4710 DiagnosticEntry {
4711 range: Point::new(2, 8)..Point::new(2, 17),
4712 diagnostic: Diagnostic {
4713 severity: DiagnosticSeverity::ERROR,
4714 message: "error 2".to_string(),
4715 group_id: 0,
4716 is_primary: true,
4717 source_kind: DiagnosticSourceKind::Pushed,
4718 ..Diagnostic::default()
4719 }
4720 }
4721 ]
4722 );
4723
4724 assert_eq!(
4725 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4726 &[
4727 DiagnosticEntry {
4728 range: Point::new(1, 8)..Point::new(1, 9),
4729 diagnostic: Diagnostic {
4730 severity: DiagnosticSeverity::WARNING,
4731 message: "error 1".to_string(),
4732 group_id: 1,
4733 is_primary: true,
4734 source_kind: DiagnosticSourceKind::Pushed,
4735 ..Diagnostic::default()
4736 }
4737 },
4738 DiagnosticEntry {
4739 range: Point::new(1, 8)..Point::new(1, 9),
4740 diagnostic: Diagnostic {
4741 severity: DiagnosticSeverity::HINT,
4742 message: "error 1 hint 1".to_string(),
4743 group_id: 1,
4744 is_primary: false,
4745 source_kind: DiagnosticSourceKind::Pushed,
4746 ..Diagnostic::default()
4747 }
4748 },
4749 ]
4750 );
4751}
4752
4753#[gpui::test]
4754async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
4755 init_test(cx);
4756
4757 let fs = FakeFs::new(cx.executor());
4758 fs.insert_tree(
4759 path!("/dir"),
4760 json!({
4761 "one.rs": "const ONE: usize = 1;",
4762 "two": {
4763 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4764 }
4765
4766 }),
4767 )
4768 .await;
4769 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4770
4771 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4772 language_registry.add(rust_lang());
4773 let watched_paths = lsp::FileOperationRegistrationOptions {
4774 filters: vec![
4775 FileOperationFilter {
4776 scheme: Some("file".to_owned()),
4777 pattern: lsp::FileOperationPattern {
4778 glob: "**/*.rs".to_owned(),
4779 matches: Some(lsp::FileOperationPatternKind::File),
4780 options: None,
4781 },
4782 },
4783 FileOperationFilter {
4784 scheme: Some("file".to_owned()),
4785 pattern: lsp::FileOperationPattern {
4786 glob: "**/**".to_owned(),
4787 matches: Some(lsp::FileOperationPatternKind::Folder),
4788 options: None,
4789 },
4790 },
4791 ],
4792 };
4793 let mut fake_servers = language_registry.register_fake_lsp(
4794 "Rust",
4795 FakeLspAdapter {
4796 capabilities: lsp::ServerCapabilities {
4797 workspace: Some(lsp::WorkspaceServerCapabilities {
4798 workspace_folders: None,
4799 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
4800 did_rename: Some(watched_paths.clone()),
4801 will_rename: Some(watched_paths),
4802 ..Default::default()
4803 }),
4804 }),
4805 ..Default::default()
4806 },
4807 ..Default::default()
4808 },
4809 );
4810
4811 let _ = project
4812 .update(cx, |project, cx| {
4813 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
4814 })
4815 .await
4816 .unwrap();
4817
4818 let fake_server = fake_servers.next().await.unwrap();
4819 let response = project.update(cx, |project, cx| {
4820 let worktree = project.worktrees(cx).next().unwrap();
4821 let entry = worktree.read(cx).entry_for_path("one.rs").unwrap();
4822 project.rename_entry(entry.id, "three.rs".as_ref(), cx)
4823 });
4824 let expected_edit = lsp::WorkspaceEdit {
4825 changes: None,
4826 document_changes: Some(DocumentChanges::Edits({
4827 vec![TextDocumentEdit {
4828 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
4829 range: lsp::Range {
4830 start: lsp::Position {
4831 line: 0,
4832 character: 1,
4833 },
4834 end: lsp::Position {
4835 line: 0,
4836 character: 3,
4837 },
4838 },
4839 new_text: "This is not a drill".to_owned(),
4840 })],
4841 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
4842 uri: Url::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
4843 version: Some(1337),
4844 },
4845 }]
4846 })),
4847 change_annotations: None,
4848 };
4849 let resolved_workspace_edit = Arc::new(OnceLock::new());
4850 fake_server
4851 .set_request_handler::<WillRenameFiles, _, _>({
4852 let resolved_workspace_edit = resolved_workspace_edit.clone();
4853 let expected_edit = expected_edit.clone();
4854 move |params, _| {
4855 let resolved_workspace_edit = resolved_workspace_edit.clone();
4856 let expected_edit = expected_edit.clone();
4857 async move {
4858 assert_eq!(params.files.len(), 1);
4859 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
4860 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
4861 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
4862 Ok(Some(expected_edit))
4863 }
4864 }
4865 })
4866 .next()
4867 .await
4868 .unwrap();
4869 let _ = response.await.unwrap();
4870 fake_server
4871 .handle_notification::<DidRenameFiles, _>(|params, _| {
4872 assert_eq!(params.files.len(), 1);
4873 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
4874 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
4875 })
4876 .next()
4877 .await
4878 .unwrap();
4879 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
4880}
4881
4882#[gpui::test]
4883async fn test_rename(cx: &mut gpui::TestAppContext) {
4884 // hi
4885 init_test(cx);
4886
4887 let fs = FakeFs::new(cx.executor());
4888 fs.insert_tree(
4889 path!("/dir"),
4890 json!({
4891 "one.rs": "const ONE: usize = 1;",
4892 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4893 }),
4894 )
4895 .await;
4896
4897 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4898
4899 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4900 language_registry.add(rust_lang());
4901 let mut fake_servers = language_registry.register_fake_lsp(
4902 "Rust",
4903 FakeLspAdapter {
4904 capabilities: lsp::ServerCapabilities {
4905 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
4906 prepare_provider: Some(true),
4907 work_done_progress_options: Default::default(),
4908 })),
4909 ..Default::default()
4910 },
4911 ..Default::default()
4912 },
4913 );
4914
4915 let (buffer, _handle) = project
4916 .update(cx, |project, cx| {
4917 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
4918 })
4919 .await
4920 .unwrap();
4921
4922 let fake_server = fake_servers.next().await.unwrap();
4923
4924 let response = project.update(cx, |project, cx| {
4925 project.prepare_rename(buffer.clone(), 7, cx)
4926 });
4927 fake_server
4928 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
4929 assert_eq!(
4930 params.text_document.uri.as_str(),
4931 uri!("file:///dir/one.rs")
4932 );
4933 assert_eq!(params.position, lsp::Position::new(0, 7));
4934 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
4935 lsp::Position::new(0, 6),
4936 lsp::Position::new(0, 9),
4937 ))))
4938 })
4939 .next()
4940 .await
4941 .unwrap();
4942 let response = response.await.unwrap();
4943 let PrepareRenameResponse::Success(range) = response else {
4944 panic!("{:?}", response);
4945 };
4946 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
4947 assert_eq!(range, 6..9);
4948
4949 let response = project.update(cx, |project, cx| {
4950 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
4951 });
4952 fake_server
4953 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
4954 assert_eq!(
4955 params.text_document_position.text_document.uri.as_str(),
4956 uri!("file:///dir/one.rs")
4957 );
4958 assert_eq!(
4959 params.text_document_position.position,
4960 lsp::Position::new(0, 7)
4961 );
4962 assert_eq!(params.new_name, "THREE");
4963 Ok(Some(lsp::WorkspaceEdit {
4964 changes: Some(
4965 [
4966 (
4967 lsp::Url::from_file_path(path!("/dir/one.rs")).unwrap(),
4968 vec![lsp::TextEdit::new(
4969 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
4970 "THREE".to_string(),
4971 )],
4972 ),
4973 (
4974 lsp::Url::from_file_path(path!("/dir/two.rs")).unwrap(),
4975 vec![
4976 lsp::TextEdit::new(
4977 lsp::Range::new(
4978 lsp::Position::new(0, 24),
4979 lsp::Position::new(0, 27),
4980 ),
4981 "THREE".to_string(),
4982 ),
4983 lsp::TextEdit::new(
4984 lsp::Range::new(
4985 lsp::Position::new(0, 35),
4986 lsp::Position::new(0, 38),
4987 ),
4988 "THREE".to_string(),
4989 ),
4990 ],
4991 ),
4992 ]
4993 .into_iter()
4994 .collect(),
4995 ),
4996 ..Default::default()
4997 }))
4998 })
4999 .next()
5000 .await
5001 .unwrap();
5002 let mut transaction = response.await.unwrap().0;
5003 assert_eq!(transaction.len(), 2);
5004 assert_eq!(
5005 transaction
5006 .remove_entry(&buffer)
5007 .unwrap()
5008 .0
5009 .update(cx, |buffer, _| buffer.text()),
5010 "const THREE: usize = 1;"
5011 );
5012 assert_eq!(
5013 transaction
5014 .into_keys()
5015 .next()
5016 .unwrap()
5017 .update(cx, |buffer, _| buffer.text()),
5018 "const TWO: usize = one::THREE + one::THREE;"
5019 );
5020}
5021
5022#[gpui::test]
5023async fn test_search(cx: &mut gpui::TestAppContext) {
5024 init_test(cx);
5025
5026 let fs = FakeFs::new(cx.executor());
5027 fs.insert_tree(
5028 path!("/dir"),
5029 json!({
5030 "one.rs": "const ONE: usize = 1;",
5031 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
5032 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
5033 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
5034 }),
5035 )
5036 .await;
5037 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5038 assert_eq!(
5039 search(
5040 &project,
5041 SearchQuery::text(
5042 "TWO",
5043 false,
5044 true,
5045 false,
5046 Default::default(),
5047 Default::default(),
5048 false,
5049 None
5050 )
5051 .unwrap(),
5052 cx
5053 )
5054 .await
5055 .unwrap(),
5056 HashMap::from_iter([
5057 (path!("dir/two.rs").to_string(), vec![6..9]),
5058 (path!("dir/three.rs").to_string(), vec![37..40])
5059 ])
5060 );
5061
5062 let buffer_4 = project
5063 .update(cx, |project, cx| {
5064 project.open_local_buffer(path!("/dir/four.rs"), cx)
5065 })
5066 .await
5067 .unwrap();
5068 buffer_4.update(cx, |buffer, cx| {
5069 let text = "two::TWO";
5070 buffer.edit([(20..28, text), (31..43, text)], None, cx);
5071 });
5072
5073 assert_eq!(
5074 search(
5075 &project,
5076 SearchQuery::text(
5077 "TWO",
5078 false,
5079 true,
5080 false,
5081 Default::default(),
5082 Default::default(),
5083 false,
5084 None,
5085 )
5086 .unwrap(),
5087 cx
5088 )
5089 .await
5090 .unwrap(),
5091 HashMap::from_iter([
5092 (path!("dir/two.rs").to_string(), vec![6..9]),
5093 (path!("dir/three.rs").to_string(), vec![37..40]),
5094 (path!("dir/four.rs").to_string(), vec![25..28, 36..39])
5095 ])
5096 );
5097}
5098
5099#[gpui::test]
5100async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
5101 init_test(cx);
5102
5103 let search_query = "file";
5104
5105 let fs = FakeFs::new(cx.executor());
5106 fs.insert_tree(
5107 path!("/dir"),
5108 json!({
5109 "one.rs": r#"// Rust file one"#,
5110 "one.ts": r#"// TypeScript file one"#,
5111 "two.rs": r#"// Rust file two"#,
5112 "two.ts": r#"// TypeScript file two"#,
5113 }),
5114 )
5115 .await;
5116 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5117
5118 assert!(
5119 search(
5120 &project,
5121 SearchQuery::text(
5122 search_query,
5123 false,
5124 true,
5125 false,
5126 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5127 Default::default(),
5128 false,
5129 None
5130 )
5131 .unwrap(),
5132 cx
5133 )
5134 .await
5135 .unwrap()
5136 .is_empty(),
5137 "If no inclusions match, no files should be returned"
5138 );
5139
5140 assert_eq!(
5141 search(
5142 &project,
5143 SearchQuery::text(
5144 search_query,
5145 false,
5146 true,
5147 false,
5148 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
5149 Default::default(),
5150 false,
5151 None
5152 )
5153 .unwrap(),
5154 cx
5155 )
5156 .await
5157 .unwrap(),
5158 HashMap::from_iter([
5159 (path!("dir/one.rs").to_string(), vec![8..12]),
5160 (path!("dir/two.rs").to_string(), vec![8..12]),
5161 ]),
5162 "Rust only search should give only Rust files"
5163 );
5164
5165 assert_eq!(
5166 search(
5167 &project,
5168 SearchQuery::text(
5169 search_query,
5170 false,
5171 true,
5172 false,
5173 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5174 Default::default(),
5175 false,
5176 None,
5177 )
5178 .unwrap(),
5179 cx
5180 )
5181 .await
5182 .unwrap(),
5183 HashMap::from_iter([
5184 (path!("dir/one.ts").to_string(), vec![14..18]),
5185 (path!("dir/two.ts").to_string(), vec![14..18]),
5186 ]),
5187 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
5188 );
5189
5190 assert_eq!(
5191 search(
5192 &project,
5193 SearchQuery::text(
5194 search_query,
5195 false,
5196 true,
5197 false,
5198 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()])
5199 .unwrap(),
5200 Default::default(),
5201 false,
5202 None,
5203 )
5204 .unwrap(),
5205 cx
5206 )
5207 .await
5208 .unwrap(),
5209 HashMap::from_iter([
5210 (path!("dir/two.ts").to_string(), vec![14..18]),
5211 (path!("dir/one.rs").to_string(), vec![8..12]),
5212 (path!("dir/one.ts").to_string(), vec![14..18]),
5213 (path!("dir/two.rs").to_string(), vec![8..12]),
5214 ]),
5215 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
5216 );
5217}
5218
5219#[gpui::test]
5220async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
5221 init_test(cx);
5222
5223 let search_query = "file";
5224
5225 let fs = FakeFs::new(cx.executor());
5226 fs.insert_tree(
5227 path!("/dir"),
5228 json!({
5229 "one.rs": r#"// Rust file one"#,
5230 "one.ts": r#"// TypeScript file one"#,
5231 "two.rs": r#"// Rust file two"#,
5232 "two.ts": r#"// TypeScript file two"#,
5233 }),
5234 )
5235 .await;
5236 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5237
5238 assert_eq!(
5239 search(
5240 &project,
5241 SearchQuery::text(
5242 search_query,
5243 false,
5244 true,
5245 false,
5246 Default::default(),
5247 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5248 false,
5249 None,
5250 )
5251 .unwrap(),
5252 cx
5253 )
5254 .await
5255 .unwrap(),
5256 HashMap::from_iter([
5257 (path!("dir/one.rs").to_string(), vec![8..12]),
5258 (path!("dir/one.ts").to_string(), vec![14..18]),
5259 (path!("dir/two.rs").to_string(), vec![8..12]),
5260 (path!("dir/two.ts").to_string(), vec![14..18]),
5261 ]),
5262 "If no exclusions match, all files should be returned"
5263 );
5264
5265 assert_eq!(
5266 search(
5267 &project,
5268 SearchQuery::text(
5269 search_query,
5270 false,
5271 true,
5272 false,
5273 Default::default(),
5274 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
5275 false,
5276 None,
5277 )
5278 .unwrap(),
5279 cx
5280 )
5281 .await
5282 .unwrap(),
5283 HashMap::from_iter([
5284 (path!("dir/one.ts").to_string(), vec![14..18]),
5285 (path!("dir/two.ts").to_string(), vec![14..18]),
5286 ]),
5287 "Rust exclusion search should give only TypeScript files"
5288 );
5289
5290 assert_eq!(
5291 search(
5292 &project,
5293 SearchQuery::text(
5294 search_query,
5295 false,
5296 true,
5297 false,
5298 Default::default(),
5299 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5300 false,
5301 None,
5302 )
5303 .unwrap(),
5304 cx
5305 )
5306 .await
5307 .unwrap(),
5308 HashMap::from_iter([
5309 (path!("dir/one.rs").to_string(), vec![8..12]),
5310 (path!("dir/two.rs").to_string(), vec![8..12]),
5311 ]),
5312 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
5313 );
5314
5315 assert!(
5316 search(
5317 &project,
5318 SearchQuery::text(
5319 search_query,
5320 false,
5321 true,
5322 false,
5323 Default::default(),
5324 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()])
5325 .unwrap(),
5326 false,
5327 None,
5328 )
5329 .unwrap(),
5330 cx
5331 )
5332 .await
5333 .unwrap()
5334 .is_empty(),
5335 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
5336 );
5337}
5338
5339#[gpui::test]
5340async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
5341 init_test(cx);
5342
5343 let search_query = "file";
5344
5345 let fs = FakeFs::new(cx.executor());
5346 fs.insert_tree(
5347 path!("/dir"),
5348 json!({
5349 "one.rs": r#"// Rust file one"#,
5350 "one.ts": r#"// TypeScript file one"#,
5351 "two.rs": r#"// Rust file two"#,
5352 "two.ts": r#"// TypeScript file two"#,
5353 }),
5354 )
5355 .await;
5356 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5357
5358 assert!(
5359 search(
5360 &project,
5361 SearchQuery::text(
5362 search_query,
5363 false,
5364 true,
5365 false,
5366 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5367 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5368 false,
5369 None,
5370 )
5371 .unwrap(),
5372 cx
5373 )
5374 .await
5375 .unwrap()
5376 .is_empty(),
5377 "If both no exclusions and inclusions match, exclusions should win and return nothing"
5378 );
5379
5380 assert!(
5381 search(
5382 &project,
5383 SearchQuery::text(
5384 search_query,
5385 false,
5386 true,
5387 false,
5388 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5389 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5390 false,
5391 None,
5392 )
5393 .unwrap(),
5394 cx
5395 )
5396 .await
5397 .unwrap()
5398 .is_empty(),
5399 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
5400 );
5401
5402 assert!(
5403 search(
5404 &project,
5405 SearchQuery::text(
5406 search_query,
5407 false,
5408 true,
5409 false,
5410 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5411 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5412 false,
5413 None,
5414 )
5415 .unwrap(),
5416 cx
5417 )
5418 .await
5419 .unwrap()
5420 .is_empty(),
5421 "Non-matching inclusions and exclusions should not change that."
5422 );
5423
5424 assert_eq!(
5425 search(
5426 &project,
5427 SearchQuery::text(
5428 search_query,
5429 false,
5430 true,
5431 false,
5432 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5433 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()]).unwrap(),
5434 false,
5435 None,
5436 )
5437 .unwrap(),
5438 cx
5439 )
5440 .await
5441 .unwrap(),
5442 HashMap::from_iter([
5443 (path!("dir/one.ts").to_string(), vec![14..18]),
5444 (path!("dir/two.ts").to_string(), vec![14..18]),
5445 ]),
5446 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
5447 );
5448}
5449
5450#[gpui::test]
5451async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
5452 init_test(cx);
5453
5454 let fs = FakeFs::new(cx.executor());
5455 fs.insert_tree(
5456 path!("/worktree-a"),
5457 json!({
5458 "haystack.rs": r#"// NEEDLE"#,
5459 "haystack.ts": r#"// NEEDLE"#,
5460 }),
5461 )
5462 .await;
5463 fs.insert_tree(
5464 path!("/worktree-b"),
5465 json!({
5466 "haystack.rs": r#"// NEEDLE"#,
5467 "haystack.ts": r#"// NEEDLE"#,
5468 }),
5469 )
5470 .await;
5471
5472 let project = Project::test(
5473 fs.clone(),
5474 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
5475 cx,
5476 )
5477 .await;
5478
5479 assert_eq!(
5480 search(
5481 &project,
5482 SearchQuery::text(
5483 "NEEDLE",
5484 false,
5485 true,
5486 false,
5487 PathMatcher::new(&["worktree-a/*.rs".to_owned()]).unwrap(),
5488 Default::default(),
5489 true,
5490 None,
5491 )
5492 .unwrap(),
5493 cx
5494 )
5495 .await
5496 .unwrap(),
5497 HashMap::from_iter([(path!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
5498 "should only return results from included worktree"
5499 );
5500 assert_eq!(
5501 search(
5502 &project,
5503 SearchQuery::text(
5504 "NEEDLE",
5505 false,
5506 true,
5507 false,
5508 PathMatcher::new(&["worktree-b/*.rs".to_owned()]).unwrap(),
5509 Default::default(),
5510 true,
5511 None,
5512 )
5513 .unwrap(),
5514 cx
5515 )
5516 .await
5517 .unwrap(),
5518 HashMap::from_iter([(path!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
5519 "should only return results from included worktree"
5520 );
5521
5522 assert_eq!(
5523 search(
5524 &project,
5525 SearchQuery::text(
5526 "NEEDLE",
5527 false,
5528 true,
5529 false,
5530 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5531 Default::default(),
5532 false,
5533 None,
5534 )
5535 .unwrap(),
5536 cx
5537 )
5538 .await
5539 .unwrap(),
5540 HashMap::from_iter([
5541 (path!("worktree-a/haystack.ts").to_string(), vec![3..9]),
5542 (path!("worktree-b/haystack.ts").to_string(), vec![3..9])
5543 ]),
5544 "should return results from both worktrees"
5545 );
5546}
5547
5548#[gpui::test]
5549async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
5550 init_test(cx);
5551
5552 let fs = FakeFs::new(cx.background_executor.clone());
5553 fs.insert_tree(
5554 path!("/dir"),
5555 json!({
5556 ".git": {},
5557 ".gitignore": "**/target\n/node_modules\n",
5558 "target": {
5559 "index.txt": "index_key:index_value"
5560 },
5561 "node_modules": {
5562 "eslint": {
5563 "index.ts": "const eslint_key = 'eslint value'",
5564 "package.json": r#"{ "some_key": "some value" }"#,
5565 },
5566 "prettier": {
5567 "index.ts": "const prettier_key = 'prettier value'",
5568 "package.json": r#"{ "other_key": "other value" }"#,
5569 },
5570 },
5571 "package.json": r#"{ "main_key": "main value" }"#,
5572 }),
5573 )
5574 .await;
5575 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5576
5577 let query = "key";
5578 assert_eq!(
5579 search(
5580 &project,
5581 SearchQuery::text(
5582 query,
5583 false,
5584 false,
5585 false,
5586 Default::default(),
5587 Default::default(),
5588 false,
5589 None,
5590 )
5591 .unwrap(),
5592 cx
5593 )
5594 .await
5595 .unwrap(),
5596 HashMap::from_iter([(path!("dir/package.json").to_string(), vec![8..11])]),
5597 "Only one non-ignored file should have the query"
5598 );
5599
5600 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5601 assert_eq!(
5602 search(
5603 &project,
5604 SearchQuery::text(
5605 query,
5606 false,
5607 false,
5608 true,
5609 Default::default(),
5610 Default::default(),
5611 false,
5612 None,
5613 )
5614 .unwrap(),
5615 cx
5616 )
5617 .await
5618 .unwrap(),
5619 HashMap::from_iter([
5620 (path!("dir/package.json").to_string(), vec![8..11]),
5621 (path!("dir/target/index.txt").to_string(), vec![6..9]),
5622 (
5623 path!("dir/node_modules/prettier/package.json").to_string(),
5624 vec![9..12]
5625 ),
5626 (
5627 path!("dir/node_modules/prettier/index.ts").to_string(),
5628 vec![15..18]
5629 ),
5630 (
5631 path!("dir/node_modules/eslint/index.ts").to_string(),
5632 vec![13..16]
5633 ),
5634 (
5635 path!("dir/node_modules/eslint/package.json").to_string(),
5636 vec![8..11]
5637 ),
5638 ]),
5639 "Unrestricted search with ignored directories should find every file with the query"
5640 );
5641
5642 let files_to_include = PathMatcher::new(&["node_modules/prettier/**".to_owned()]).unwrap();
5643 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()]).unwrap();
5644 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5645 assert_eq!(
5646 search(
5647 &project,
5648 SearchQuery::text(
5649 query,
5650 false,
5651 false,
5652 true,
5653 files_to_include,
5654 files_to_exclude,
5655 false,
5656 None,
5657 )
5658 .unwrap(),
5659 cx
5660 )
5661 .await
5662 .unwrap(),
5663 HashMap::from_iter([(
5664 path!("dir/node_modules/prettier/package.json").to_string(),
5665 vec![9..12]
5666 )]),
5667 "With search including ignored prettier directory and excluding TS files, only one file should be found"
5668 );
5669}
5670
5671#[gpui::test]
5672async fn test_search_with_unicode(cx: &mut gpui::TestAppContext) {
5673 init_test(cx);
5674
5675 let fs = FakeFs::new(cx.executor());
5676 fs.insert_tree(
5677 path!("/dir"),
5678 json!({
5679 "one.rs": "// ПРИВЕТ? привет!",
5680 "two.rs": "// ПРИВЕТ.",
5681 "three.rs": "// привет",
5682 }),
5683 )
5684 .await;
5685 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5686
5687 let unicode_case_sensitive_query = SearchQuery::text(
5688 "привет",
5689 false,
5690 true,
5691 false,
5692 Default::default(),
5693 Default::default(),
5694 false,
5695 None,
5696 );
5697 assert_matches!(unicode_case_sensitive_query, Ok(SearchQuery::Text { .. }));
5698 assert_eq!(
5699 search(&project, unicode_case_sensitive_query.unwrap(), cx)
5700 .await
5701 .unwrap(),
5702 HashMap::from_iter([
5703 (path!("dir/one.rs").to_string(), vec![17..29]),
5704 (path!("dir/three.rs").to_string(), vec![3..15]),
5705 ])
5706 );
5707
5708 let unicode_case_insensitive_query = SearchQuery::text(
5709 "привет",
5710 false,
5711 false,
5712 false,
5713 Default::default(),
5714 Default::default(),
5715 false,
5716 None,
5717 );
5718 assert_matches!(
5719 unicode_case_insensitive_query,
5720 Ok(SearchQuery::Regex { .. })
5721 );
5722 assert_eq!(
5723 search(&project, unicode_case_insensitive_query.unwrap(), cx)
5724 .await
5725 .unwrap(),
5726 HashMap::from_iter([
5727 (path!("dir/one.rs").to_string(), vec![3..15, 17..29]),
5728 (path!("dir/two.rs").to_string(), vec![3..15]),
5729 (path!("dir/three.rs").to_string(), vec![3..15]),
5730 ])
5731 );
5732
5733 assert_eq!(
5734 search(
5735 &project,
5736 SearchQuery::text(
5737 "привет.",
5738 false,
5739 false,
5740 false,
5741 Default::default(),
5742 Default::default(),
5743 false,
5744 None,
5745 )
5746 .unwrap(),
5747 cx
5748 )
5749 .await
5750 .unwrap(),
5751 HashMap::from_iter([(path!("dir/two.rs").to_string(), vec![3..16]),])
5752 );
5753}
5754
5755#[gpui::test]
5756async fn test_create_entry(cx: &mut gpui::TestAppContext) {
5757 init_test(cx);
5758
5759 let fs = FakeFs::new(cx.executor().clone());
5760 fs.insert_tree(
5761 "/one/two",
5762 json!({
5763 "three": {
5764 "a.txt": "",
5765 "four": {}
5766 },
5767 "c.rs": ""
5768 }),
5769 )
5770 .await;
5771
5772 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
5773 project
5774 .update(cx, |project, cx| {
5775 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5776 project.create_entry((id, "b.."), true, cx)
5777 })
5778 .await
5779 .unwrap()
5780 .to_included()
5781 .unwrap();
5782
5783 // Can't create paths outside the project
5784 let result = project
5785 .update(cx, |project, cx| {
5786 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5787 project.create_entry((id, "../../boop"), true, cx)
5788 })
5789 .await;
5790 assert!(result.is_err());
5791
5792 // Can't create paths with '..'
5793 let result = project
5794 .update(cx, |project, cx| {
5795 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5796 project.create_entry((id, "four/../beep"), true, cx)
5797 })
5798 .await;
5799 assert!(result.is_err());
5800
5801 assert_eq!(
5802 fs.paths(true),
5803 vec![
5804 PathBuf::from(path!("/")),
5805 PathBuf::from(path!("/one")),
5806 PathBuf::from(path!("/one/two")),
5807 PathBuf::from(path!("/one/two/c.rs")),
5808 PathBuf::from(path!("/one/two/three")),
5809 PathBuf::from(path!("/one/two/three/a.txt")),
5810 PathBuf::from(path!("/one/two/three/b..")),
5811 PathBuf::from(path!("/one/two/three/four")),
5812 ]
5813 );
5814
5815 // And we cannot open buffers with '..'
5816 let result = project
5817 .update(cx, |project, cx| {
5818 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5819 project.open_buffer((id, "../c.rs"), cx)
5820 })
5821 .await;
5822 assert!(result.is_err())
5823}
5824
5825#[gpui::test]
5826async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
5827 init_test(cx);
5828
5829 let fs = FakeFs::new(cx.executor());
5830 fs.insert_tree(
5831 path!("/dir"),
5832 json!({
5833 "a.tsx": "a",
5834 }),
5835 )
5836 .await;
5837
5838 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5839
5840 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5841 language_registry.add(tsx_lang());
5842 let language_server_names = [
5843 "TypeScriptServer",
5844 "TailwindServer",
5845 "ESLintServer",
5846 "NoHoverCapabilitiesServer",
5847 ];
5848 let mut language_servers = [
5849 language_registry.register_fake_lsp(
5850 "tsx",
5851 FakeLspAdapter {
5852 name: language_server_names[0],
5853 capabilities: lsp::ServerCapabilities {
5854 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5855 ..lsp::ServerCapabilities::default()
5856 },
5857 ..FakeLspAdapter::default()
5858 },
5859 ),
5860 language_registry.register_fake_lsp(
5861 "tsx",
5862 FakeLspAdapter {
5863 name: language_server_names[1],
5864 capabilities: lsp::ServerCapabilities {
5865 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5866 ..lsp::ServerCapabilities::default()
5867 },
5868 ..FakeLspAdapter::default()
5869 },
5870 ),
5871 language_registry.register_fake_lsp(
5872 "tsx",
5873 FakeLspAdapter {
5874 name: language_server_names[2],
5875 capabilities: lsp::ServerCapabilities {
5876 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5877 ..lsp::ServerCapabilities::default()
5878 },
5879 ..FakeLspAdapter::default()
5880 },
5881 ),
5882 language_registry.register_fake_lsp(
5883 "tsx",
5884 FakeLspAdapter {
5885 name: language_server_names[3],
5886 capabilities: lsp::ServerCapabilities {
5887 hover_provider: None,
5888 ..lsp::ServerCapabilities::default()
5889 },
5890 ..FakeLspAdapter::default()
5891 },
5892 ),
5893 ];
5894
5895 let (buffer, _handle) = project
5896 .update(cx, |p, cx| {
5897 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
5898 })
5899 .await
5900 .unwrap();
5901 cx.executor().run_until_parked();
5902
5903 let mut servers_with_hover_requests = HashMap::default();
5904 for i in 0..language_server_names.len() {
5905 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
5906 panic!(
5907 "Failed to get language server #{i} with name {}",
5908 &language_server_names[i]
5909 )
5910 });
5911 let new_server_name = new_server.server.name();
5912 assert!(
5913 !servers_with_hover_requests.contains_key(&new_server_name),
5914 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5915 );
5916 match new_server_name.as_ref() {
5917 "TailwindServer" | "TypeScriptServer" => {
5918 servers_with_hover_requests.insert(
5919 new_server_name.clone(),
5920 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
5921 move |_, _| {
5922 let name = new_server_name.clone();
5923 async move {
5924 Ok(Some(lsp::Hover {
5925 contents: lsp::HoverContents::Scalar(
5926 lsp::MarkedString::String(format!("{name} hover")),
5927 ),
5928 range: None,
5929 }))
5930 }
5931 },
5932 ),
5933 );
5934 }
5935 "ESLintServer" => {
5936 servers_with_hover_requests.insert(
5937 new_server_name,
5938 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
5939 |_, _| async move { Ok(None) },
5940 ),
5941 );
5942 }
5943 "NoHoverCapabilitiesServer" => {
5944 let _never_handled = new_server
5945 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
5946 panic!(
5947 "Should not call for hovers server with no corresponding capabilities"
5948 )
5949 });
5950 }
5951 unexpected => panic!("Unexpected server name: {unexpected}"),
5952 }
5953 }
5954
5955 let hover_task = project.update(cx, |project, cx| {
5956 project.hover(&buffer, Point::new(0, 0), cx)
5957 });
5958 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
5959 |mut hover_request| async move {
5960 hover_request
5961 .next()
5962 .await
5963 .expect("All hover requests should have been triggered")
5964 },
5965 ))
5966 .await;
5967 assert_eq!(
5968 vec!["TailwindServer hover", "TypeScriptServer hover"],
5969 hover_task
5970 .await
5971 .into_iter()
5972 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
5973 .sorted()
5974 .collect::<Vec<_>>(),
5975 "Should receive hover responses from all related servers with hover capabilities"
5976 );
5977}
5978
5979#[gpui::test]
5980async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
5981 init_test(cx);
5982
5983 let fs = FakeFs::new(cx.executor());
5984 fs.insert_tree(
5985 path!("/dir"),
5986 json!({
5987 "a.ts": "a",
5988 }),
5989 )
5990 .await;
5991
5992 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5993
5994 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5995 language_registry.add(typescript_lang());
5996 let mut fake_language_servers = language_registry.register_fake_lsp(
5997 "TypeScript",
5998 FakeLspAdapter {
5999 capabilities: lsp::ServerCapabilities {
6000 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6001 ..lsp::ServerCapabilities::default()
6002 },
6003 ..FakeLspAdapter::default()
6004 },
6005 );
6006
6007 let (buffer, _handle) = project
6008 .update(cx, |p, cx| {
6009 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6010 })
6011 .await
6012 .unwrap();
6013 cx.executor().run_until_parked();
6014
6015 let fake_server = fake_language_servers
6016 .next()
6017 .await
6018 .expect("failed to get the language server");
6019
6020 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6021 move |_, _| async move {
6022 Ok(Some(lsp::Hover {
6023 contents: lsp::HoverContents::Array(vec![
6024 lsp::MarkedString::String("".to_string()),
6025 lsp::MarkedString::String(" ".to_string()),
6026 lsp::MarkedString::String("\n\n\n".to_string()),
6027 ]),
6028 range: None,
6029 }))
6030 },
6031 );
6032
6033 let hover_task = project.update(cx, |project, cx| {
6034 project.hover(&buffer, Point::new(0, 0), cx)
6035 });
6036 let () = request_handled
6037 .next()
6038 .await
6039 .expect("All hover requests should have been triggered");
6040 assert_eq!(
6041 Vec::<String>::new(),
6042 hover_task
6043 .await
6044 .into_iter()
6045 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
6046 .sorted()
6047 .collect::<Vec<_>>(),
6048 "Empty hover parts should be ignored"
6049 );
6050}
6051
6052#[gpui::test]
6053async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
6054 init_test(cx);
6055
6056 let fs = FakeFs::new(cx.executor());
6057 fs.insert_tree(
6058 path!("/dir"),
6059 json!({
6060 "a.ts": "a",
6061 }),
6062 )
6063 .await;
6064
6065 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6066
6067 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6068 language_registry.add(typescript_lang());
6069 let mut fake_language_servers = language_registry.register_fake_lsp(
6070 "TypeScript",
6071 FakeLspAdapter {
6072 capabilities: lsp::ServerCapabilities {
6073 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6074 ..lsp::ServerCapabilities::default()
6075 },
6076 ..FakeLspAdapter::default()
6077 },
6078 );
6079
6080 let (buffer, _handle) = project
6081 .update(cx, |p, cx| {
6082 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6083 })
6084 .await
6085 .unwrap();
6086 cx.executor().run_until_parked();
6087
6088 let fake_server = fake_language_servers
6089 .next()
6090 .await
6091 .expect("failed to get the language server");
6092
6093 let mut request_handled = fake_server
6094 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
6095 Ok(Some(vec![
6096 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6097 title: "organize imports".to_string(),
6098 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
6099 ..lsp::CodeAction::default()
6100 }),
6101 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6102 title: "fix code".to_string(),
6103 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
6104 ..lsp::CodeAction::default()
6105 }),
6106 ]))
6107 });
6108
6109 let code_actions_task = project.update(cx, |project, cx| {
6110 project.code_actions(
6111 &buffer,
6112 0..buffer.read(cx).len(),
6113 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
6114 cx,
6115 )
6116 });
6117
6118 let () = request_handled
6119 .next()
6120 .await
6121 .expect("The code action request should have been triggered");
6122
6123 let code_actions = code_actions_task.await.unwrap();
6124 assert_eq!(code_actions.len(), 1);
6125 assert_eq!(
6126 code_actions[0].lsp_action.action_kind(),
6127 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
6128 );
6129}
6130
6131#[gpui::test]
6132async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
6133 init_test(cx);
6134
6135 let fs = FakeFs::new(cx.executor());
6136 fs.insert_tree(
6137 path!("/dir"),
6138 json!({
6139 "a.tsx": "a",
6140 }),
6141 )
6142 .await;
6143
6144 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6145
6146 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6147 language_registry.add(tsx_lang());
6148 let language_server_names = [
6149 "TypeScriptServer",
6150 "TailwindServer",
6151 "ESLintServer",
6152 "NoActionsCapabilitiesServer",
6153 ];
6154
6155 let mut language_server_rxs = [
6156 language_registry.register_fake_lsp(
6157 "tsx",
6158 FakeLspAdapter {
6159 name: language_server_names[0],
6160 capabilities: lsp::ServerCapabilities {
6161 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6162 ..lsp::ServerCapabilities::default()
6163 },
6164 ..FakeLspAdapter::default()
6165 },
6166 ),
6167 language_registry.register_fake_lsp(
6168 "tsx",
6169 FakeLspAdapter {
6170 name: language_server_names[1],
6171 capabilities: lsp::ServerCapabilities {
6172 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6173 ..lsp::ServerCapabilities::default()
6174 },
6175 ..FakeLspAdapter::default()
6176 },
6177 ),
6178 language_registry.register_fake_lsp(
6179 "tsx",
6180 FakeLspAdapter {
6181 name: language_server_names[2],
6182 capabilities: lsp::ServerCapabilities {
6183 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6184 ..lsp::ServerCapabilities::default()
6185 },
6186 ..FakeLspAdapter::default()
6187 },
6188 ),
6189 language_registry.register_fake_lsp(
6190 "tsx",
6191 FakeLspAdapter {
6192 name: language_server_names[3],
6193 capabilities: lsp::ServerCapabilities {
6194 code_action_provider: None,
6195 ..lsp::ServerCapabilities::default()
6196 },
6197 ..FakeLspAdapter::default()
6198 },
6199 ),
6200 ];
6201
6202 let (buffer, _handle) = project
6203 .update(cx, |p, cx| {
6204 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
6205 })
6206 .await
6207 .unwrap();
6208 cx.executor().run_until_parked();
6209
6210 let mut servers_with_actions_requests = HashMap::default();
6211 for i in 0..language_server_names.len() {
6212 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
6213 panic!(
6214 "Failed to get language server #{i} with name {}",
6215 &language_server_names[i]
6216 )
6217 });
6218 let new_server_name = new_server.server.name();
6219
6220 assert!(
6221 !servers_with_actions_requests.contains_key(&new_server_name),
6222 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6223 );
6224 match new_server_name.0.as_ref() {
6225 "TailwindServer" | "TypeScriptServer" => {
6226 servers_with_actions_requests.insert(
6227 new_server_name.clone(),
6228 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6229 move |_, _| {
6230 let name = new_server_name.clone();
6231 async move {
6232 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
6233 lsp::CodeAction {
6234 title: format!("{name} code action"),
6235 ..lsp::CodeAction::default()
6236 },
6237 )]))
6238 }
6239 },
6240 ),
6241 );
6242 }
6243 "ESLintServer" => {
6244 servers_with_actions_requests.insert(
6245 new_server_name,
6246 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6247 |_, _| async move { Ok(None) },
6248 ),
6249 );
6250 }
6251 "NoActionsCapabilitiesServer" => {
6252 let _never_handled = new_server
6253 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6254 panic!(
6255 "Should not call for code actions server with no corresponding capabilities"
6256 )
6257 });
6258 }
6259 unexpected => panic!("Unexpected server name: {unexpected}"),
6260 }
6261 }
6262
6263 let code_actions_task = project.update(cx, |project, cx| {
6264 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
6265 });
6266
6267 // cx.run_until_parked();
6268 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
6269 |mut code_actions_request| async move {
6270 code_actions_request
6271 .next()
6272 .await
6273 .expect("All code actions requests should have been triggered")
6274 },
6275 ))
6276 .await;
6277 assert_eq!(
6278 vec!["TailwindServer code action", "TypeScriptServer code action"],
6279 code_actions_task
6280 .await
6281 .unwrap()
6282 .into_iter()
6283 .map(|code_action| code_action.lsp_action.title().to_owned())
6284 .sorted()
6285 .collect::<Vec<_>>(),
6286 "Should receive code actions responses from all related servers with hover capabilities"
6287 );
6288}
6289
6290#[gpui::test]
6291async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
6292 init_test(cx);
6293
6294 let fs = FakeFs::new(cx.executor());
6295 fs.insert_tree(
6296 "/dir",
6297 json!({
6298 "a.rs": "let a = 1;",
6299 "b.rs": "let b = 2;",
6300 "c.rs": "let c = 2;",
6301 }),
6302 )
6303 .await;
6304
6305 let project = Project::test(
6306 fs,
6307 [
6308 "/dir/a.rs".as_ref(),
6309 "/dir/b.rs".as_ref(),
6310 "/dir/c.rs".as_ref(),
6311 ],
6312 cx,
6313 )
6314 .await;
6315
6316 // check the initial state and get the worktrees
6317 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
6318 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6319 assert_eq!(worktrees.len(), 3);
6320
6321 let worktree_a = worktrees[0].read(cx);
6322 let worktree_b = worktrees[1].read(cx);
6323 let worktree_c = worktrees[2].read(cx);
6324
6325 // check they start in the right order
6326 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
6327 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
6328 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
6329
6330 (
6331 worktrees[0].clone(),
6332 worktrees[1].clone(),
6333 worktrees[2].clone(),
6334 )
6335 });
6336
6337 // move first worktree to after the second
6338 // [a, b, c] -> [b, a, c]
6339 project
6340 .update(cx, |project, cx| {
6341 let first = worktree_a.read(cx);
6342 let second = worktree_b.read(cx);
6343 project.move_worktree(first.id(), second.id(), cx)
6344 })
6345 .expect("moving first after second");
6346
6347 // check the state after moving
6348 project.update(cx, |project, cx| {
6349 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6350 assert_eq!(worktrees.len(), 3);
6351
6352 let first = worktrees[0].read(cx);
6353 let second = worktrees[1].read(cx);
6354 let third = worktrees[2].read(cx);
6355
6356 // check they are now in the right order
6357 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
6358 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
6359 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6360 });
6361
6362 // move the second worktree to before the first
6363 // [b, a, c] -> [a, b, c]
6364 project
6365 .update(cx, |project, cx| {
6366 let second = worktree_a.read(cx);
6367 let first = worktree_b.read(cx);
6368 project.move_worktree(first.id(), second.id(), cx)
6369 })
6370 .expect("moving second before first");
6371
6372 // check the state after moving
6373 project.update(cx, |project, cx| {
6374 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6375 assert_eq!(worktrees.len(), 3);
6376
6377 let first = worktrees[0].read(cx);
6378 let second = worktrees[1].read(cx);
6379 let third = worktrees[2].read(cx);
6380
6381 // check they are now in the right order
6382 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6383 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6384 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6385 });
6386
6387 // move the second worktree to after the third
6388 // [a, b, c] -> [a, c, b]
6389 project
6390 .update(cx, |project, cx| {
6391 let second = worktree_b.read(cx);
6392 let third = worktree_c.read(cx);
6393 project.move_worktree(second.id(), third.id(), cx)
6394 })
6395 .expect("moving second after third");
6396
6397 // check the state after moving
6398 project.update(cx, |project, cx| {
6399 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6400 assert_eq!(worktrees.len(), 3);
6401
6402 let first = worktrees[0].read(cx);
6403 let second = worktrees[1].read(cx);
6404 let third = worktrees[2].read(cx);
6405
6406 // check they are now in the right order
6407 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6408 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
6409 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
6410 });
6411
6412 // move the third worktree to before the second
6413 // [a, c, b] -> [a, b, c]
6414 project
6415 .update(cx, |project, cx| {
6416 let third = worktree_c.read(cx);
6417 let second = worktree_b.read(cx);
6418 project.move_worktree(third.id(), second.id(), cx)
6419 })
6420 .expect("moving third before second");
6421
6422 // check the state after moving
6423 project.update(cx, |project, cx| {
6424 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6425 assert_eq!(worktrees.len(), 3);
6426
6427 let first = worktrees[0].read(cx);
6428 let second = worktrees[1].read(cx);
6429 let third = worktrees[2].read(cx);
6430
6431 // check they are now in the right order
6432 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6433 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6434 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6435 });
6436
6437 // move the first worktree to after the third
6438 // [a, b, c] -> [b, c, a]
6439 project
6440 .update(cx, |project, cx| {
6441 let first = worktree_a.read(cx);
6442 let third = worktree_c.read(cx);
6443 project.move_worktree(first.id(), third.id(), cx)
6444 })
6445 .expect("moving first after third");
6446
6447 // check the state after moving
6448 project.update(cx, |project, cx| {
6449 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6450 assert_eq!(worktrees.len(), 3);
6451
6452 let first = worktrees[0].read(cx);
6453 let second = worktrees[1].read(cx);
6454 let third = worktrees[2].read(cx);
6455
6456 // check they are now in the right order
6457 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
6458 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
6459 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
6460 });
6461
6462 // move the third worktree to before the first
6463 // [b, c, a] -> [a, b, c]
6464 project
6465 .update(cx, |project, cx| {
6466 let third = worktree_a.read(cx);
6467 let first = worktree_b.read(cx);
6468 project.move_worktree(third.id(), first.id(), cx)
6469 })
6470 .expect("moving third before first");
6471
6472 // check the state after moving
6473 project.update(cx, |project, cx| {
6474 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6475 assert_eq!(worktrees.len(), 3);
6476
6477 let first = worktrees[0].read(cx);
6478 let second = worktrees[1].read(cx);
6479 let third = worktrees[2].read(cx);
6480
6481 // check they are now in the right order
6482 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6483 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6484 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6485 });
6486}
6487
6488#[gpui::test]
6489async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
6490 init_test(cx);
6491
6492 let staged_contents = r#"
6493 fn main() {
6494 println!("hello world");
6495 }
6496 "#
6497 .unindent();
6498 let file_contents = r#"
6499 // print goodbye
6500 fn main() {
6501 println!("goodbye world");
6502 }
6503 "#
6504 .unindent();
6505
6506 let fs = FakeFs::new(cx.background_executor.clone());
6507 fs.insert_tree(
6508 "/dir",
6509 json!({
6510 ".git": {},
6511 "src": {
6512 "main.rs": file_contents,
6513 }
6514 }),
6515 )
6516 .await;
6517
6518 fs.set_index_for_repo(
6519 Path::new("/dir/.git"),
6520 &[("src/main.rs".into(), staged_contents)],
6521 );
6522
6523 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6524
6525 let buffer = project
6526 .update(cx, |project, cx| {
6527 project.open_local_buffer("/dir/src/main.rs", cx)
6528 })
6529 .await
6530 .unwrap();
6531 let unstaged_diff = project
6532 .update(cx, |project, cx| {
6533 project.open_unstaged_diff(buffer.clone(), cx)
6534 })
6535 .await
6536 .unwrap();
6537
6538 cx.run_until_parked();
6539 unstaged_diff.update(cx, |unstaged_diff, cx| {
6540 let snapshot = buffer.read(cx).snapshot();
6541 assert_hunks(
6542 unstaged_diff.hunks(&snapshot, cx),
6543 &snapshot,
6544 &unstaged_diff.base_text_string().unwrap(),
6545 &[
6546 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
6547 (
6548 2..3,
6549 " println!(\"hello world\");\n",
6550 " println!(\"goodbye world\");\n",
6551 DiffHunkStatus::modified_none(),
6552 ),
6553 ],
6554 );
6555 });
6556
6557 let staged_contents = r#"
6558 // print goodbye
6559 fn main() {
6560 }
6561 "#
6562 .unindent();
6563
6564 fs.set_index_for_repo(
6565 Path::new("/dir/.git"),
6566 &[("src/main.rs".into(), staged_contents)],
6567 );
6568
6569 cx.run_until_parked();
6570 unstaged_diff.update(cx, |unstaged_diff, cx| {
6571 let snapshot = buffer.read(cx).snapshot();
6572 assert_hunks(
6573 unstaged_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6574 &snapshot,
6575 &unstaged_diff.base_text().text(),
6576 &[(
6577 2..3,
6578 "",
6579 " println!(\"goodbye world\");\n",
6580 DiffHunkStatus::added_none(),
6581 )],
6582 );
6583 });
6584}
6585
6586#[gpui::test]
6587async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
6588 init_test(cx);
6589
6590 let committed_contents = r#"
6591 fn main() {
6592 println!("hello world");
6593 }
6594 "#
6595 .unindent();
6596 let staged_contents = r#"
6597 fn main() {
6598 println!("goodbye world");
6599 }
6600 "#
6601 .unindent();
6602 let file_contents = r#"
6603 // print goodbye
6604 fn main() {
6605 println!("goodbye world");
6606 }
6607 "#
6608 .unindent();
6609
6610 let fs = FakeFs::new(cx.background_executor.clone());
6611 fs.insert_tree(
6612 "/dir",
6613 json!({
6614 ".git": {},
6615 "src": {
6616 "modification.rs": file_contents,
6617 }
6618 }),
6619 )
6620 .await;
6621
6622 fs.set_head_for_repo(
6623 Path::new("/dir/.git"),
6624 &[
6625 ("src/modification.rs".into(), committed_contents),
6626 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6627 ],
6628 "deadbeef",
6629 );
6630 fs.set_index_for_repo(
6631 Path::new("/dir/.git"),
6632 &[
6633 ("src/modification.rs".into(), staged_contents),
6634 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6635 ],
6636 );
6637
6638 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6639 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6640 let language = rust_lang();
6641 language_registry.add(language.clone());
6642
6643 let buffer_1 = project
6644 .update(cx, |project, cx| {
6645 project.open_local_buffer("/dir/src/modification.rs", cx)
6646 })
6647 .await
6648 .unwrap();
6649 let diff_1 = project
6650 .update(cx, |project, cx| {
6651 project.open_uncommitted_diff(buffer_1.clone(), cx)
6652 })
6653 .await
6654 .unwrap();
6655 diff_1.read_with(cx, |diff, _| {
6656 assert_eq!(diff.base_text().language().cloned(), Some(language))
6657 });
6658 cx.run_until_parked();
6659 diff_1.update(cx, |diff, cx| {
6660 let snapshot = buffer_1.read(cx).snapshot();
6661 assert_hunks(
6662 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6663 &snapshot,
6664 &diff.base_text_string().unwrap(),
6665 &[
6666 (
6667 0..1,
6668 "",
6669 "// print goodbye\n",
6670 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
6671 ),
6672 (
6673 2..3,
6674 " println!(\"hello world\");\n",
6675 " println!(\"goodbye world\");\n",
6676 DiffHunkStatus::modified_none(),
6677 ),
6678 ],
6679 );
6680 });
6681
6682 // Reset HEAD to a version that differs from both the buffer and the index.
6683 let committed_contents = r#"
6684 // print goodbye
6685 fn main() {
6686 }
6687 "#
6688 .unindent();
6689 fs.set_head_for_repo(
6690 Path::new("/dir/.git"),
6691 &[
6692 ("src/modification.rs".into(), committed_contents.clone()),
6693 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6694 ],
6695 "deadbeef",
6696 );
6697
6698 // Buffer now has an unstaged hunk.
6699 cx.run_until_parked();
6700 diff_1.update(cx, |diff, cx| {
6701 let snapshot = buffer_1.read(cx).snapshot();
6702 assert_hunks(
6703 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6704 &snapshot,
6705 &diff.base_text().text(),
6706 &[(
6707 2..3,
6708 "",
6709 " println!(\"goodbye world\");\n",
6710 DiffHunkStatus::added_none(),
6711 )],
6712 );
6713 });
6714
6715 // Open a buffer for a file that's been deleted.
6716 let buffer_2 = project
6717 .update(cx, |project, cx| {
6718 project.open_local_buffer("/dir/src/deletion.rs", cx)
6719 })
6720 .await
6721 .unwrap();
6722 let diff_2 = project
6723 .update(cx, |project, cx| {
6724 project.open_uncommitted_diff(buffer_2.clone(), cx)
6725 })
6726 .await
6727 .unwrap();
6728 cx.run_until_parked();
6729 diff_2.update(cx, |diff, cx| {
6730 let snapshot = buffer_2.read(cx).snapshot();
6731 assert_hunks(
6732 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6733 &snapshot,
6734 &diff.base_text_string().unwrap(),
6735 &[(
6736 0..0,
6737 "// the-deleted-contents\n",
6738 "",
6739 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
6740 )],
6741 );
6742 });
6743
6744 // Stage the deletion of this file
6745 fs.set_index_for_repo(
6746 Path::new("/dir/.git"),
6747 &[("src/modification.rs".into(), committed_contents.clone())],
6748 );
6749 cx.run_until_parked();
6750 diff_2.update(cx, |diff, cx| {
6751 let snapshot = buffer_2.read(cx).snapshot();
6752 assert_hunks(
6753 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6754 &snapshot,
6755 &diff.base_text_string().unwrap(),
6756 &[(
6757 0..0,
6758 "// the-deleted-contents\n",
6759 "",
6760 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
6761 )],
6762 );
6763 });
6764}
6765
6766#[gpui::test]
6767async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
6768 use DiffHunkSecondaryStatus::*;
6769 init_test(cx);
6770
6771 let committed_contents = r#"
6772 zero
6773 one
6774 two
6775 three
6776 four
6777 five
6778 "#
6779 .unindent();
6780 let file_contents = r#"
6781 one
6782 TWO
6783 three
6784 FOUR
6785 five
6786 "#
6787 .unindent();
6788
6789 let fs = FakeFs::new(cx.background_executor.clone());
6790 fs.insert_tree(
6791 "/dir",
6792 json!({
6793 ".git": {},
6794 "file.txt": file_contents.clone()
6795 }),
6796 )
6797 .await;
6798
6799 fs.set_head_and_index_for_repo(
6800 "/dir/.git".as_ref(),
6801 &[("file.txt".into(), committed_contents.clone())],
6802 );
6803
6804 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6805
6806 let buffer = project
6807 .update(cx, |project, cx| {
6808 project.open_local_buffer("/dir/file.txt", cx)
6809 })
6810 .await
6811 .unwrap();
6812 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6813 let uncommitted_diff = project
6814 .update(cx, |project, cx| {
6815 project.open_uncommitted_diff(buffer.clone(), cx)
6816 })
6817 .await
6818 .unwrap();
6819 let mut diff_events = cx.events(&uncommitted_diff);
6820
6821 // The hunks are initially unstaged.
6822 uncommitted_diff.read_with(cx, |diff, cx| {
6823 assert_hunks(
6824 diff.hunks(&snapshot, cx),
6825 &snapshot,
6826 &diff.base_text_string().unwrap(),
6827 &[
6828 (
6829 0..0,
6830 "zero\n",
6831 "",
6832 DiffHunkStatus::deleted(HasSecondaryHunk),
6833 ),
6834 (
6835 1..2,
6836 "two\n",
6837 "TWO\n",
6838 DiffHunkStatus::modified(HasSecondaryHunk),
6839 ),
6840 (
6841 3..4,
6842 "four\n",
6843 "FOUR\n",
6844 DiffHunkStatus::modified(HasSecondaryHunk),
6845 ),
6846 ],
6847 );
6848 });
6849
6850 // Stage a hunk. It appears as optimistically staged.
6851 uncommitted_diff.update(cx, |diff, cx| {
6852 let range =
6853 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
6854 let hunks = diff
6855 .hunks_intersecting_range(range, &snapshot, cx)
6856 .collect::<Vec<_>>();
6857 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
6858
6859 assert_hunks(
6860 diff.hunks(&snapshot, cx),
6861 &snapshot,
6862 &diff.base_text_string().unwrap(),
6863 &[
6864 (
6865 0..0,
6866 "zero\n",
6867 "",
6868 DiffHunkStatus::deleted(HasSecondaryHunk),
6869 ),
6870 (
6871 1..2,
6872 "two\n",
6873 "TWO\n",
6874 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6875 ),
6876 (
6877 3..4,
6878 "four\n",
6879 "FOUR\n",
6880 DiffHunkStatus::modified(HasSecondaryHunk),
6881 ),
6882 ],
6883 );
6884 });
6885
6886 // The diff emits a change event for the range of the staged hunk.
6887 assert!(matches!(
6888 diff_events.next().await.unwrap(),
6889 BufferDiffEvent::HunksStagedOrUnstaged(_)
6890 ));
6891 let event = diff_events.next().await.unwrap();
6892 if let BufferDiffEvent::DiffChanged {
6893 changed_range: Some(changed_range),
6894 } = event
6895 {
6896 let changed_range = changed_range.to_point(&snapshot);
6897 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
6898 } else {
6899 panic!("Unexpected event {event:?}");
6900 }
6901
6902 // When the write to the index completes, it appears as staged.
6903 cx.run_until_parked();
6904 uncommitted_diff.update(cx, |diff, cx| {
6905 assert_hunks(
6906 diff.hunks(&snapshot, cx),
6907 &snapshot,
6908 &diff.base_text_string().unwrap(),
6909 &[
6910 (
6911 0..0,
6912 "zero\n",
6913 "",
6914 DiffHunkStatus::deleted(HasSecondaryHunk),
6915 ),
6916 (
6917 1..2,
6918 "two\n",
6919 "TWO\n",
6920 DiffHunkStatus::modified(NoSecondaryHunk),
6921 ),
6922 (
6923 3..4,
6924 "four\n",
6925 "FOUR\n",
6926 DiffHunkStatus::modified(HasSecondaryHunk),
6927 ),
6928 ],
6929 );
6930 });
6931
6932 // The diff emits a change event for the changed index text.
6933 let event = diff_events.next().await.unwrap();
6934 if let BufferDiffEvent::DiffChanged {
6935 changed_range: Some(changed_range),
6936 } = event
6937 {
6938 let changed_range = changed_range.to_point(&snapshot);
6939 assert_eq!(changed_range, Point::new(0, 0)..Point::new(4, 0));
6940 } else {
6941 panic!("Unexpected event {event:?}");
6942 }
6943
6944 // Simulate a problem writing to the git index.
6945 fs.set_error_message_for_index_write(
6946 "/dir/.git".as_ref(),
6947 Some("failed to write git index".into()),
6948 );
6949
6950 // Stage another hunk.
6951 uncommitted_diff.update(cx, |diff, cx| {
6952 let range =
6953 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
6954 let hunks = diff
6955 .hunks_intersecting_range(range, &snapshot, cx)
6956 .collect::<Vec<_>>();
6957 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
6958
6959 assert_hunks(
6960 diff.hunks(&snapshot, cx),
6961 &snapshot,
6962 &diff.base_text_string().unwrap(),
6963 &[
6964 (
6965 0..0,
6966 "zero\n",
6967 "",
6968 DiffHunkStatus::deleted(HasSecondaryHunk),
6969 ),
6970 (
6971 1..2,
6972 "two\n",
6973 "TWO\n",
6974 DiffHunkStatus::modified(NoSecondaryHunk),
6975 ),
6976 (
6977 3..4,
6978 "four\n",
6979 "FOUR\n",
6980 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6981 ),
6982 ],
6983 );
6984 });
6985 assert!(matches!(
6986 diff_events.next().await.unwrap(),
6987 BufferDiffEvent::HunksStagedOrUnstaged(_)
6988 ));
6989 let event = diff_events.next().await.unwrap();
6990 if let BufferDiffEvent::DiffChanged {
6991 changed_range: Some(changed_range),
6992 } = event
6993 {
6994 let changed_range = changed_range.to_point(&snapshot);
6995 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
6996 } else {
6997 panic!("Unexpected event {event:?}");
6998 }
6999
7000 // When the write fails, the hunk returns to being unstaged.
7001 cx.run_until_parked();
7002 uncommitted_diff.update(cx, |diff, cx| {
7003 assert_hunks(
7004 diff.hunks(&snapshot, cx),
7005 &snapshot,
7006 &diff.base_text_string().unwrap(),
7007 &[
7008 (
7009 0..0,
7010 "zero\n",
7011 "",
7012 DiffHunkStatus::deleted(HasSecondaryHunk),
7013 ),
7014 (
7015 1..2,
7016 "two\n",
7017 "TWO\n",
7018 DiffHunkStatus::modified(NoSecondaryHunk),
7019 ),
7020 (
7021 3..4,
7022 "four\n",
7023 "FOUR\n",
7024 DiffHunkStatus::modified(HasSecondaryHunk),
7025 ),
7026 ],
7027 );
7028 });
7029
7030 let event = diff_events.next().await.unwrap();
7031 if let BufferDiffEvent::DiffChanged {
7032 changed_range: Some(changed_range),
7033 } = event
7034 {
7035 let changed_range = changed_range.to_point(&snapshot);
7036 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
7037 } else {
7038 panic!("Unexpected event {event:?}");
7039 }
7040
7041 // Allow writing to the git index to succeed again.
7042 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
7043
7044 // Stage two hunks with separate operations.
7045 uncommitted_diff.update(cx, |diff, cx| {
7046 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
7047 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
7048 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
7049 });
7050
7051 // Both staged hunks appear as pending.
7052 uncommitted_diff.update(cx, |diff, cx| {
7053 assert_hunks(
7054 diff.hunks(&snapshot, cx),
7055 &snapshot,
7056 &diff.base_text_string().unwrap(),
7057 &[
7058 (
7059 0..0,
7060 "zero\n",
7061 "",
7062 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7063 ),
7064 (
7065 1..2,
7066 "two\n",
7067 "TWO\n",
7068 DiffHunkStatus::modified(NoSecondaryHunk),
7069 ),
7070 (
7071 3..4,
7072 "four\n",
7073 "FOUR\n",
7074 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7075 ),
7076 ],
7077 );
7078 });
7079
7080 // Both staging operations take effect.
7081 cx.run_until_parked();
7082 uncommitted_diff.update(cx, |diff, cx| {
7083 assert_hunks(
7084 diff.hunks(&snapshot, cx),
7085 &snapshot,
7086 &diff.base_text_string().unwrap(),
7087 &[
7088 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7089 (
7090 1..2,
7091 "two\n",
7092 "TWO\n",
7093 DiffHunkStatus::modified(NoSecondaryHunk),
7094 ),
7095 (
7096 3..4,
7097 "four\n",
7098 "FOUR\n",
7099 DiffHunkStatus::modified(NoSecondaryHunk),
7100 ),
7101 ],
7102 );
7103 });
7104}
7105
7106#[gpui::test(seeds(340, 472))]
7107async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
7108 use DiffHunkSecondaryStatus::*;
7109 init_test(cx);
7110
7111 let committed_contents = r#"
7112 zero
7113 one
7114 two
7115 three
7116 four
7117 five
7118 "#
7119 .unindent();
7120 let file_contents = r#"
7121 one
7122 TWO
7123 three
7124 FOUR
7125 five
7126 "#
7127 .unindent();
7128
7129 let fs = FakeFs::new(cx.background_executor.clone());
7130 fs.insert_tree(
7131 "/dir",
7132 json!({
7133 ".git": {},
7134 "file.txt": file_contents.clone()
7135 }),
7136 )
7137 .await;
7138
7139 fs.set_head_for_repo(
7140 "/dir/.git".as_ref(),
7141 &[("file.txt".into(), committed_contents.clone())],
7142 "deadbeef",
7143 );
7144 fs.set_index_for_repo(
7145 "/dir/.git".as_ref(),
7146 &[("file.txt".into(), committed_contents.clone())],
7147 );
7148
7149 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7150
7151 let buffer = project
7152 .update(cx, |project, cx| {
7153 project.open_local_buffer("/dir/file.txt", cx)
7154 })
7155 .await
7156 .unwrap();
7157 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7158 let uncommitted_diff = project
7159 .update(cx, |project, cx| {
7160 project.open_uncommitted_diff(buffer.clone(), cx)
7161 })
7162 .await
7163 .unwrap();
7164
7165 // The hunks are initially unstaged.
7166 uncommitted_diff.read_with(cx, |diff, cx| {
7167 assert_hunks(
7168 diff.hunks(&snapshot, cx),
7169 &snapshot,
7170 &diff.base_text_string().unwrap(),
7171 &[
7172 (
7173 0..0,
7174 "zero\n",
7175 "",
7176 DiffHunkStatus::deleted(HasSecondaryHunk),
7177 ),
7178 (
7179 1..2,
7180 "two\n",
7181 "TWO\n",
7182 DiffHunkStatus::modified(HasSecondaryHunk),
7183 ),
7184 (
7185 3..4,
7186 "four\n",
7187 "FOUR\n",
7188 DiffHunkStatus::modified(HasSecondaryHunk),
7189 ),
7190 ],
7191 );
7192 });
7193
7194 // Pause IO events
7195 fs.pause_events();
7196
7197 // Stage the first hunk.
7198 uncommitted_diff.update(cx, |diff, cx| {
7199 let hunk = diff.hunks(&snapshot, cx).next().unwrap();
7200 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7201 assert_hunks(
7202 diff.hunks(&snapshot, cx),
7203 &snapshot,
7204 &diff.base_text_string().unwrap(),
7205 &[
7206 (
7207 0..0,
7208 "zero\n",
7209 "",
7210 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7211 ),
7212 (
7213 1..2,
7214 "two\n",
7215 "TWO\n",
7216 DiffHunkStatus::modified(HasSecondaryHunk),
7217 ),
7218 (
7219 3..4,
7220 "four\n",
7221 "FOUR\n",
7222 DiffHunkStatus::modified(HasSecondaryHunk),
7223 ),
7224 ],
7225 );
7226 });
7227
7228 // Stage the second hunk *before* receiving the FS event for the first hunk.
7229 cx.run_until_parked();
7230 uncommitted_diff.update(cx, |diff, cx| {
7231 let hunk = diff.hunks(&snapshot, cx).nth(1).unwrap();
7232 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7233 assert_hunks(
7234 diff.hunks(&snapshot, cx),
7235 &snapshot,
7236 &diff.base_text_string().unwrap(),
7237 &[
7238 (
7239 0..0,
7240 "zero\n",
7241 "",
7242 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7243 ),
7244 (
7245 1..2,
7246 "two\n",
7247 "TWO\n",
7248 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7249 ),
7250 (
7251 3..4,
7252 "four\n",
7253 "FOUR\n",
7254 DiffHunkStatus::modified(HasSecondaryHunk),
7255 ),
7256 ],
7257 );
7258 });
7259
7260 // Process the FS event for staging the first hunk (second event is still pending).
7261 fs.flush_events(1);
7262 cx.run_until_parked();
7263
7264 // Stage the third hunk before receiving the second FS event.
7265 uncommitted_diff.update(cx, |diff, cx| {
7266 let hunk = diff.hunks(&snapshot, cx).nth(2).unwrap();
7267 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7268 });
7269
7270 // Wait for all remaining IO.
7271 cx.run_until_parked();
7272 fs.flush_events(fs.buffered_event_count());
7273
7274 // Now all hunks are staged.
7275 cx.run_until_parked();
7276 uncommitted_diff.update(cx, |diff, cx| {
7277 assert_hunks(
7278 diff.hunks(&snapshot, cx),
7279 &snapshot,
7280 &diff.base_text_string().unwrap(),
7281 &[
7282 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7283 (
7284 1..2,
7285 "two\n",
7286 "TWO\n",
7287 DiffHunkStatus::modified(NoSecondaryHunk),
7288 ),
7289 (
7290 3..4,
7291 "four\n",
7292 "FOUR\n",
7293 DiffHunkStatus::modified(NoSecondaryHunk),
7294 ),
7295 ],
7296 );
7297 });
7298}
7299
7300#[gpui::test(iterations = 25)]
7301async fn test_staging_random_hunks(
7302 mut rng: StdRng,
7303 executor: BackgroundExecutor,
7304 cx: &mut gpui::TestAppContext,
7305) {
7306 let operations = env::var("OPERATIONS")
7307 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
7308 .unwrap_or(20);
7309
7310 // Try to induce races between diff recalculation and index writes.
7311 if rng.gen_bool(0.5) {
7312 executor.deprioritize(*CALCULATE_DIFF_TASK);
7313 }
7314
7315 use DiffHunkSecondaryStatus::*;
7316 init_test(cx);
7317
7318 let committed_text = (0..30).map(|i| format!("line {i}\n")).collect::<String>();
7319 let index_text = committed_text.clone();
7320 let buffer_text = (0..30)
7321 .map(|i| match i % 5 {
7322 0 => format!("line {i} (modified)\n"),
7323 _ => format!("line {i}\n"),
7324 })
7325 .collect::<String>();
7326
7327 let fs = FakeFs::new(cx.background_executor.clone());
7328 fs.insert_tree(
7329 path!("/dir"),
7330 json!({
7331 ".git": {},
7332 "file.txt": buffer_text.clone()
7333 }),
7334 )
7335 .await;
7336 fs.set_head_for_repo(
7337 path!("/dir/.git").as_ref(),
7338 &[("file.txt".into(), committed_text.clone())],
7339 "deadbeef",
7340 );
7341 fs.set_index_for_repo(
7342 path!("/dir/.git").as_ref(),
7343 &[("file.txt".into(), index_text.clone())],
7344 );
7345 let repo = fs.open_repo(path!("/dir/.git").as_ref()).unwrap();
7346
7347 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7348 let buffer = project
7349 .update(cx, |project, cx| {
7350 project.open_local_buffer(path!("/dir/file.txt"), cx)
7351 })
7352 .await
7353 .unwrap();
7354 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7355 let uncommitted_diff = project
7356 .update(cx, |project, cx| {
7357 project.open_uncommitted_diff(buffer.clone(), cx)
7358 })
7359 .await
7360 .unwrap();
7361
7362 let mut hunks =
7363 uncommitted_diff.update(cx, |diff, cx| diff.hunks(&snapshot, cx).collect::<Vec<_>>());
7364 assert_eq!(hunks.len(), 6);
7365
7366 for _i in 0..operations {
7367 let hunk_ix = rng.gen_range(0..hunks.len());
7368 let hunk = &mut hunks[hunk_ix];
7369 let row = hunk.range.start.row;
7370
7371 if hunk.status().has_secondary_hunk() {
7372 log::info!("staging hunk at {row}");
7373 uncommitted_diff.update(cx, |diff, cx| {
7374 diff.stage_or_unstage_hunks(true, &[hunk.clone()], &snapshot, true, cx);
7375 });
7376 hunk.secondary_status = SecondaryHunkRemovalPending;
7377 } else {
7378 log::info!("unstaging hunk at {row}");
7379 uncommitted_diff.update(cx, |diff, cx| {
7380 diff.stage_or_unstage_hunks(false, &[hunk.clone()], &snapshot, true, cx);
7381 });
7382 hunk.secondary_status = SecondaryHunkAdditionPending;
7383 }
7384
7385 for _ in 0..rng.gen_range(0..10) {
7386 log::info!("yielding");
7387 cx.executor().simulate_random_delay().await;
7388 }
7389 }
7390
7391 cx.executor().run_until_parked();
7392
7393 for hunk in &mut hunks {
7394 if hunk.secondary_status == SecondaryHunkRemovalPending {
7395 hunk.secondary_status = NoSecondaryHunk;
7396 } else if hunk.secondary_status == SecondaryHunkAdditionPending {
7397 hunk.secondary_status = HasSecondaryHunk;
7398 }
7399 }
7400
7401 log::info!(
7402 "index text:\n{}",
7403 repo.load_index_text("file.txt".into()).await.unwrap()
7404 );
7405
7406 uncommitted_diff.update(cx, |diff, cx| {
7407 let expected_hunks = hunks
7408 .iter()
7409 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
7410 .collect::<Vec<_>>();
7411 let actual_hunks = diff
7412 .hunks(&snapshot, cx)
7413 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
7414 .collect::<Vec<_>>();
7415 assert_eq!(actual_hunks, expected_hunks);
7416 });
7417}
7418
7419#[gpui::test]
7420async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
7421 init_test(cx);
7422
7423 let committed_contents = r#"
7424 fn main() {
7425 println!("hello from HEAD");
7426 }
7427 "#
7428 .unindent();
7429 let file_contents = r#"
7430 fn main() {
7431 println!("hello from the working copy");
7432 }
7433 "#
7434 .unindent();
7435
7436 let fs = FakeFs::new(cx.background_executor.clone());
7437 fs.insert_tree(
7438 "/dir",
7439 json!({
7440 ".git": {},
7441 "src": {
7442 "main.rs": file_contents,
7443 }
7444 }),
7445 )
7446 .await;
7447
7448 fs.set_head_for_repo(
7449 Path::new("/dir/.git"),
7450 &[("src/main.rs".into(), committed_contents.clone())],
7451 "deadbeef",
7452 );
7453 fs.set_index_for_repo(
7454 Path::new("/dir/.git"),
7455 &[("src/main.rs".into(), committed_contents.clone())],
7456 );
7457
7458 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
7459
7460 let buffer = project
7461 .update(cx, |project, cx| {
7462 project.open_local_buffer("/dir/src/main.rs", cx)
7463 })
7464 .await
7465 .unwrap();
7466 let uncommitted_diff = project
7467 .update(cx, |project, cx| {
7468 project.open_uncommitted_diff(buffer.clone(), cx)
7469 })
7470 .await
7471 .unwrap();
7472
7473 cx.run_until_parked();
7474 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
7475 let snapshot = buffer.read(cx).snapshot();
7476 assert_hunks(
7477 uncommitted_diff.hunks(&snapshot, cx),
7478 &snapshot,
7479 &uncommitted_diff.base_text_string().unwrap(),
7480 &[(
7481 1..2,
7482 " println!(\"hello from HEAD\");\n",
7483 " println!(\"hello from the working copy\");\n",
7484 DiffHunkStatus {
7485 kind: DiffHunkStatusKind::Modified,
7486 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
7487 },
7488 )],
7489 );
7490 });
7491}
7492
7493#[gpui::test]
7494async fn test_repository_and_path_for_project_path(
7495 background_executor: BackgroundExecutor,
7496 cx: &mut gpui::TestAppContext,
7497) {
7498 init_test(cx);
7499 let fs = FakeFs::new(background_executor);
7500 fs.insert_tree(
7501 path!("/root"),
7502 json!({
7503 "c.txt": "",
7504 "dir1": {
7505 ".git": {},
7506 "deps": {
7507 "dep1": {
7508 ".git": {},
7509 "src": {
7510 "a.txt": ""
7511 }
7512 }
7513 },
7514 "src": {
7515 "b.txt": ""
7516 }
7517 },
7518 }),
7519 )
7520 .await;
7521
7522 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
7523 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7524 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7525 project
7526 .update(cx, |project, cx| project.git_scans_complete(cx))
7527 .await;
7528 cx.run_until_parked();
7529
7530 project.read_with(cx, |project, cx| {
7531 let git_store = project.git_store().read(cx);
7532 let pairs = [
7533 ("c.txt", None),
7534 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
7535 (
7536 "dir1/deps/dep1/src/a.txt",
7537 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
7538 ),
7539 ];
7540 let expected = pairs
7541 .iter()
7542 .map(|(path, result)| {
7543 (
7544 path,
7545 result.map(|(repo, repo_path)| {
7546 (Path::new(repo).into(), RepoPath::from(repo_path))
7547 }),
7548 )
7549 })
7550 .collect::<Vec<_>>();
7551 let actual = pairs
7552 .iter()
7553 .map(|(path, _)| {
7554 let project_path = (tree_id, Path::new(path)).into();
7555 let result = maybe!({
7556 let (repo, repo_path) =
7557 git_store.repository_and_path_for_project_path(&project_path, cx)?;
7558 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
7559 });
7560 (path, result)
7561 })
7562 .collect::<Vec<_>>();
7563 pretty_assertions::assert_eq!(expected, actual);
7564 });
7565
7566 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
7567 .await
7568 .unwrap();
7569 cx.run_until_parked();
7570
7571 project.read_with(cx, |project, cx| {
7572 let git_store = project.git_store().read(cx);
7573 assert_eq!(
7574 git_store.repository_and_path_for_project_path(
7575 &(tree_id, Path::new("dir1/src/b.txt")).into(),
7576 cx
7577 ),
7578 None
7579 );
7580 });
7581}
7582
7583#[gpui::test]
7584async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
7585 init_test(cx);
7586 let fs = FakeFs::new(cx.background_executor.clone());
7587 fs.insert_tree(
7588 path!("/root"),
7589 json!({
7590 "home": {
7591 ".git": {},
7592 "project": {
7593 "a.txt": "A"
7594 },
7595 },
7596 }),
7597 )
7598 .await;
7599 fs.set_home_dir(Path::new(path!("/root/home")).to_owned());
7600
7601 let project = Project::test(fs.clone(), [path!("/root/home/project").as_ref()], cx).await;
7602 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7603 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7604
7605 project
7606 .update(cx, |project, cx| project.git_scans_complete(cx))
7607 .await;
7608 tree.flush_fs_events(cx).await;
7609
7610 project.read_with(cx, |project, cx| {
7611 let containing = project
7612 .git_store()
7613 .read(cx)
7614 .repository_and_path_for_project_path(&(tree_id, "a.txt").into(), cx);
7615 assert!(containing.is_none());
7616 });
7617
7618 let project = Project::test(fs.clone(), [path!("/root/home").as_ref()], cx).await;
7619 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7620 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7621 project
7622 .update(cx, |project, cx| project.git_scans_complete(cx))
7623 .await;
7624 tree.flush_fs_events(cx).await;
7625
7626 project.read_with(cx, |project, cx| {
7627 let containing = project
7628 .git_store()
7629 .read(cx)
7630 .repository_and_path_for_project_path(&(tree_id, "project/a.txt").into(), cx);
7631 assert_eq!(
7632 containing
7633 .unwrap()
7634 .0
7635 .read(cx)
7636 .work_directory_abs_path
7637 .as_ref(),
7638 Path::new(path!("/root/home"))
7639 );
7640 });
7641}
7642
7643#[gpui::test]
7644async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
7645 init_test(cx);
7646 cx.executor().allow_parking();
7647
7648 let root = TempTree::new(json!({
7649 "project": {
7650 "a.txt": "a", // Modified
7651 "b.txt": "bb", // Added
7652 "c.txt": "ccc", // Unchanged
7653 "d.txt": "dddd", // Deleted
7654 },
7655 }));
7656
7657 // Set up git repository before creating the project.
7658 let work_dir = root.path().join("project");
7659 let repo = git_init(work_dir.as_path());
7660 git_add("a.txt", &repo);
7661 git_add("c.txt", &repo);
7662 git_add("d.txt", &repo);
7663 git_commit("Initial commit", &repo);
7664 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
7665 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
7666
7667 let project = Project::test(
7668 Arc::new(RealFs::new(None, cx.executor())),
7669 [root.path()],
7670 cx,
7671 )
7672 .await;
7673
7674 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7675 tree.flush_fs_events(cx).await;
7676 project
7677 .update(cx, |project, cx| project.git_scans_complete(cx))
7678 .await;
7679 cx.executor().run_until_parked();
7680
7681 let repository = project.read_with(cx, |project, cx| {
7682 project.repositories(cx).values().next().unwrap().clone()
7683 });
7684
7685 // Check that the right git state is observed on startup
7686 repository.read_with(cx, |repository, _| {
7687 let entries = repository.cached_status().collect::<Vec<_>>();
7688 assert_eq!(
7689 entries,
7690 [
7691 StatusEntry {
7692 repo_path: "a.txt".into(),
7693 status: StatusCode::Modified.worktree(),
7694 },
7695 StatusEntry {
7696 repo_path: "b.txt".into(),
7697 status: FileStatus::Untracked,
7698 },
7699 StatusEntry {
7700 repo_path: "d.txt".into(),
7701 status: StatusCode::Deleted.worktree(),
7702 },
7703 ]
7704 );
7705 });
7706
7707 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
7708
7709 tree.flush_fs_events(cx).await;
7710 project
7711 .update(cx, |project, cx| project.git_scans_complete(cx))
7712 .await;
7713 cx.executor().run_until_parked();
7714
7715 repository.read_with(cx, |repository, _| {
7716 let entries = repository.cached_status().collect::<Vec<_>>();
7717 assert_eq!(
7718 entries,
7719 [
7720 StatusEntry {
7721 repo_path: "a.txt".into(),
7722 status: StatusCode::Modified.worktree(),
7723 },
7724 StatusEntry {
7725 repo_path: "b.txt".into(),
7726 status: FileStatus::Untracked,
7727 },
7728 StatusEntry {
7729 repo_path: "c.txt".into(),
7730 status: StatusCode::Modified.worktree(),
7731 },
7732 StatusEntry {
7733 repo_path: "d.txt".into(),
7734 status: StatusCode::Deleted.worktree(),
7735 },
7736 ]
7737 );
7738 });
7739
7740 git_add("a.txt", &repo);
7741 git_add("c.txt", &repo);
7742 git_remove_index(Path::new("d.txt"), &repo);
7743 git_commit("Another commit", &repo);
7744 tree.flush_fs_events(cx).await;
7745 project
7746 .update(cx, |project, cx| project.git_scans_complete(cx))
7747 .await;
7748 cx.executor().run_until_parked();
7749
7750 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
7751 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
7752 tree.flush_fs_events(cx).await;
7753 project
7754 .update(cx, |project, cx| project.git_scans_complete(cx))
7755 .await;
7756 cx.executor().run_until_parked();
7757
7758 repository.read_with(cx, |repository, _cx| {
7759 let entries = repository.cached_status().collect::<Vec<_>>();
7760
7761 // Deleting an untracked entry, b.txt, should leave no status
7762 // a.txt was tracked, and so should have a status
7763 assert_eq!(
7764 entries,
7765 [StatusEntry {
7766 repo_path: "a.txt".into(),
7767 status: StatusCode::Deleted.worktree(),
7768 }]
7769 );
7770 });
7771}
7772
7773#[gpui::test]
7774async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
7775 init_test(cx);
7776 cx.executor().allow_parking();
7777
7778 let root = TempTree::new(json!({
7779 "project": {
7780 "sub": {},
7781 "a.txt": "",
7782 },
7783 }));
7784
7785 let work_dir = root.path().join("project");
7786 let repo = git_init(work_dir.as_path());
7787 // a.txt exists in HEAD and the working copy but is deleted in the index.
7788 git_add("a.txt", &repo);
7789 git_commit("Initial commit", &repo);
7790 git_remove_index("a.txt".as_ref(), &repo);
7791 // `sub` is a nested git repository.
7792 let _sub = git_init(&work_dir.join("sub"));
7793
7794 let project = Project::test(
7795 Arc::new(RealFs::new(None, cx.executor())),
7796 [root.path()],
7797 cx,
7798 )
7799 .await;
7800
7801 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7802 tree.flush_fs_events(cx).await;
7803 project
7804 .update(cx, |project, cx| project.git_scans_complete(cx))
7805 .await;
7806 cx.executor().run_until_parked();
7807
7808 let repository = project.read_with(cx, |project, cx| {
7809 project
7810 .repositories(cx)
7811 .values()
7812 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
7813 .unwrap()
7814 .clone()
7815 });
7816
7817 repository.read_with(cx, |repository, _cx| {
7818 let entries = repository.cached_status().collect::<Vec<_>>();
7819
7820 // `sub` doesn't appear in our computed statuses.
7821 // a.txt appears with a combined `DA` status.
7822 assert_eq!(
7823 entries,
7824 [StatusEntry {
7825 repo_path: "a.txt".into(),
7826 status: TrackedStatus {
7827 index_status: StatusCode::Deleted,
7828 worktree_status: StatusCode::Added
7829 }
7830 .into(),
7831 }]
7832 )
7833 });
7834}
7835
7836#[gpui::test]
7837async fn test_repository_subfolder_git_status(
7838 executor: gpui::BackgroundExecutor,
7839 cx: &mut gpui::TestAppContext,
7840) {
7841 init_test(cx);
7842
7843 let fs = FakeFs::new(executor);
7844 fs.insert_tree(
7845 path!("/root"),
7846 json!({
7847 "my-repo": {
7848 ".git": {},
7849 "a.txt": "a",
7850 "sub-folder-1": {
7851 "sub-folder-2": {
7852 "c.txt": "cc",
7853 "d": {
7854 "e.txt": "eee"
7855 }
7856 },
7857 }
7858 },
7859 }),
7860 )
7861 .await;
7862
7863 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
7864 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
7865
7866 fs.set_status_for_repo(
7867 path!("/root/my-repo/.git").as_ref(),
7868 &[(E_TXT.as_ref(), FileStatus::Untracked)],
7869 );
7870
7871 let project = Project::test(
7872 fs.clone(),
7873 [path!("/root/my-repo/sub-folder-1/sub-folder-2").as_ref()],
7874 cx,
7875 )
7876 .await;
7877
7878 project
7879 .update(cx, |project, cx| project.git_scans_complete(cx))
7880 .await;
7881 cx.run_until_parked();
7882
7883 let repository = project.read_with(cx, |project, cx| {
7884 project.repositories(cx).values().next().unwrap().clone()
7885 });
7886
7887 // Ensure that the git status is loaded correctly
7888 repository.read_with(cx, |repository, _cx| {
7889 assert_eq!(
7890 repository.work_directory_abs_path,
7891 Path::new(path!("/root/my-repo")).into()
7892 );
7893
7894 assert_eq!(repository.status_for_path(&C_TXT.into()), None);
7895 assert_eq!(
7896 repository.status_for_path(&E_TXT.into()).unwrap().status,
7897 FileStatus::Untracked
7898 );
7899 });
7900
7901 fs.set_status_for_repo(path!("/root/my-repo/.git").as_ref(), &[]);
7902 project
7903 .update(cx, |project, cx| project.git_scans_complete(cx))
7904 .await;
7905 cx.run_until_parked();
7906
7907 repository.read_with(cx, |repository, _cx| {
7908 assert_eq!(repository.status_for_path(&C_TXT.into()), None);
7909 assert_eq!(repository.status_for_path(&E_TXT.into()), None);
7910 });
7911}
7912
7913// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
7914#[cfg(any())]
7915#[gpui::test]
7916async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
7917 init_test(cx);
7918 cx.executor().allow_parking();
7919
7920 let root = TempTree::new(json!({
7921 "project": {
7922 "a.txt": "a",
7923 },
7924 }));
7925 let root_path = root.path();
7926
7927 let repo = git_init(&root_path.join("project"));
7928 git_add("a.txt", &repo);
7929 git_commit("init", &repo);
7930
7931 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
7932
7933 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7934 tree.flush_fs_events(cx).await;
7935 project
7936 .update(cx, |project, cx| project.git_scans_complete(cx))
7937 .await;
7938 cx.executor().run_until_parked();
7939
7940 let repository = project.read_with(cx, |project, cx| {
7941 project.repositories(cx).values().next().unwrap().clone()
7942 });
7943
7944 git_branch("other-branch", &repo);
7945 git_checkout("refs/heads/other-branch", &repo);
7946 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
7947 git_add("a.txt", &repo);
7948 git_commit("capitalize", &repo);
7949 let commit = repo
7950 .head()
7951 .expect("Failed to get HEAD")
7952 .peel_to_commit()
7953 .expect("HEAD is not a commit");
7954 git_checkout("refs/heads/main", &repo);
7955 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
7956 git_add("a.txt", &repo);
7957 git_commit("improve letter", &repo);
7958 git_cherry_pick(&commit, &repo);
7959 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
7960 .expect("No CHERRY_PICK_HEAD");
7961 pretty_assertions::assert_eq!(
7962 git_status(&repo),
7963 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
7964 );
7965 tree.flush_fs_events(cx).await;
7966 project
7967 .update(cx, |project, cx| project.git_scans_complete(cx))
7968 .await;
7969 cx.executor().run_until_parked();
7970 let conflicts = repository.update(cx, |repository, _| {
7971 repository
7972 .merge_conflicts
7973 .iter()
7974 .cloned()
7975 .collect::<Vec<_>>()
7976 });
7977 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
7978
7979 git_add("a.txt", &repo);
7980 // Attempt to manually simulate what `git cherry-pick --continue` would do.
7981 git_commit("whatevs", &repo);
7982 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
7983 .expect("Failed to remove CHERRY_PICK_HEAD");
7984 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
7985 tree.flush_fs_events(cx).await;
7986 let conflicts = repository.update(cx, |repository, _| {
7987 repository
7988 .merge_conflicts
7989 .iter()
7990 .cloned()
7991 .collect::<Vec<_>>()
7992 });
7993 pretty_assertions::assert_eq!(conflicts, []);
7994}
7995
7996#[gpui::test]
7997async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
7998 init_test(cx);
7999 let fs = FakeFs::new(cx.background_executor.clone());
8000 fs.insert_tree(
8001 path!("/root"),
8002 json!({
8003 ".git": {},
8004 ".gitignore": "*.txt\n",
8005 "a.xml": "<a></a>",
8006 "b.txt": "Some text"
8007 }),
8008 )
8009 .await;
8010
8011 fs.set_head_and_index_for_repo(
8012 path!("/root/.git").as_ref(),
8013 &[
8014 (".gitignore".into(), "*.txt\n".into()),
8015 ("a.xml".into(), "<a></a>".into()),
8016 ],
8017 );
8018
8019 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
8020
8021 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8022 tree.flush_fs_events(cx).await;
8023 project
8024 .update(cx, |project, cx| project.git_scans_complete(cx))
8025 .await;
8026 cx.executor().run_until_parked();
8027
8028 let repository = project.read_with(cx, |project, cx| {
8029 project.repositories(cx).values().next().unwrap().clone()
8030 });
8031
8032 // One file is unmodified, the other is ignored.
8033 cx.read(|cx| {
8034 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
8035 assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
8036 });
8037
8038 // Change the gitignore, and stage the newly non-ignored file.
8039 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
8040 .await
8041 .unwrap();
8042 fs.set_index_for_repo(
8043 Path::new(path!("/root/.git")),
8044 &[
8045 (".gitignore".into(), "*.txt\n".into()),
8046 ("a.xml".into(), "<a></a>".into()),
8047 ("b.txt".into(), "Some text".into()),
8048 ],
8049 );
8050
8051 cx.executor().run_until_parked();
8052 cx.read(|cx| {
8053 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
8054 assert_entry_git_state(
8055 tree.read(cx),
8056 repository.read(cx),
8057 "b.txt",
8058 Some(StatusCode::Added),
8059 false,
8060 );
8061 });
8062}
8063
8064// NOTE:
8065// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
8066// a directory which some program has already open.
8067// This is a limitation of the Windows.
8068// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
8069#[gpui::test]
8070#[cfg_attr(target_os = "windows", ignore)]
8071async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
8072 init_test(cx);
8073 cx.executor().allow_parking();
8074 let root = TempTree::new(json!({
8075 "projects": {
8076 "project1": {
8077 "a": "",
8078 "b": "",
8079 }
8080 },
8081
8082 }));
8083 let root_path = root.path();
8084
8085 let repo = git_init(&root_path.join("projects/project1"));
8086 git_add("a", &repo);
8087 git_commit("init", &repo);
8088 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
8089
8090 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8091
8092 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8093 tree.flush_fs_events(cx).await;
8094 project
8095 .update(cx, |project, cx| project.git_scans_complete(cx))
8096 .await;
8097 cx.executor().run_until_parked();
8098
8099 let repository = project.read_with(cx, |project, cx| {
8100 project.repositories(cx).values().next().unwrap().clone()
8101 });
8102
8103 repository.read_with(cx, |repository, _| {
8104 assert_eq!(
8105 repository.work_directory_abs_path.as_ref(),
8106 root_path.join("projects/project1").as_path()
8107 );
8108 assert_eq!(
8109 repository
8110 .status_for_path(&"a".into())
8111 .map(|entry| entry.status),
8112 Some(StatusCode::Modified.worktree()),
8113 );
8114 assert_eq!(
8115 repository
8116 .status_for_path(&"b".into())
8117 .map(|entry| entry.status),
8118 Some(FileStatus::Untracked),
8119 );
8120 });
8121
8122 std::fs::rename(
8123 root_path.join("projects/project1"),
8124 root_path.join("projects/project2"),
8125 )
8126 .unwrap();
8127 tree.flush_fs_events(cx).await;
8128
8129 repository.read_with(cx, |repository, _| {
8130 assert_eq!(
8131 repository.work_directory_abs_path.as_ref(),
8132 root_path.join("projects/project2").as_path()
8133 );
8134 assert_eq!(
8135 repository.status_for_path(&"a".into()).unwrap().status,
8136 StatusCode::Modified.worktree(),
8137 );
8138 assert_eq!(
8139 repository.status_for_path(&"b".into()).unwrap().status,
8140 FileStatus::Untracked,
8141 );
8142 });
8143}
8144
8145// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
8146// you can't rename a directory which some program has already open. This is a
8147// limitation of the Windows. See:
8148// https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
8149#[gpui::test]
8150#[cfg_attr(target_os = "windows", ignore)]
8151async fn test_file_status(cx: &mut gpui::TestAppContext) {
8152 init_test(cx);
8153 cx.executor().allow_parking();
8154 const IGNORE_RULE: &str = "**/target";
8155
8156 let root = TempTree::new(json!({
8157 "project": {
8158 "a.txt": "a",
8159 "b.txt": "bb",
8160 "c": {
8161 "d": {
8162 "e.txt": "eee"
8163 }
8164 },
8165 "f.txt": "ffff",
8166 "target": {
8167 "build_file": "???"
8168 },
8169 ".gitignore": IGNORE_RULE
8170 },
8171
8172 }));
8173 let root_path = root.path();
8174
8175 const A_TXT: &str = "a.txt";
8176 const B_TXT: &str = "b.txt";
8177 const E_TXT: &str = "c/d/e.txt";
8178 const F_TXT: &str = "f.txt";
8179 const DOTGITIGNORE: &str = ".gitignore";
8180 const BUILD_FILE: &str = "target/build_file";
8181
8182 // Set up git repository before creating the worktree.
8183 let work_dir = root.path().join("project");
8184 let mut repo = git_init(work_dir.as_path());
8185 repo.add_ignore_rule(IGNORE_RULE).unwrap();
8186 git_add(A_TXT, &repo);
8187 git_add(E_TXT, &repo);
8188 git_add(DOTGITIGNORE, &repo);
8189 git_commit("Initial commit", &repo);
8190
8191 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8192
8193 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8194 tree.flush_fs_events(cx).await;
8195 project
8196 .update(cx, |project, cx| project.git_scans_complete(cx))
8197 .await;
8198 cx.executor().run_until_parked();
8199
8200 let repository = project.read_with(cx, |project, cx| {
8201 project.repositories(cx).values().next().unwrap().clone()
8202 });
8203
8204 // Check that the right git state is observed on startup
8205 repository.read_with(cx, |repository, _cx| {
8206 assert_eq!(
8207 repository.work_directory_abs_path.as_ref(),
8208 root_path.join("project").as_path()
8209 );
8210
8211 assert_eq!(
8212 repository.status_for_path(&B_TXT.into()).unwrap().status,
8213 FileStatus::Untracked,
8214 );
8215 assert_eq!(
8216 repository.status_for_path(&F_TXT.into()).unwrap().status,
8217 FileStatus::Untracked,
8218 );
8219 });
8220
8221 // Modify a file in the working copy.
8222 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
8223 tree.flush_fs_events(cx).await;
8224 project
8225 .update(cx, |project, cx| project.git_scans_complete(cx))
8226 .await;
8227 cx.executor().run_until_parked();
8228
8229 // The worktree detects that the file's git status has changed.
8230 repository.read_with(cx, |repository, _| {
8231 assert_eq!(
8232 repository.status_for_path(&A_TXT.into()).unwrap().status,
8233 StatusCode::Modified.worktree(),
8234 );
8235 });
8236
8237 // Create a commit in the git repository.
8238 git_add(A_TXT, &repo);
8239 git_add(B_TXT, &repo);
8240 git_commit("Committing modified and added", &repo);
8241 tree.flush_fs_events(cx).await;
8242 project
8243 .update(cx, |project, cx| project.git_scans_complete(cx))
8244 .await;
8245 cx.executor().run_until_parked();
8246
8247 // The worktree detects that the files' git status have changed.
8248 repository.read_with(cx, |repository, _cx| {
8249 assert_eq!(
8250 repository.status_for_path(&F_TXT.into()).unwrap().status,
8251 FileStatus::Untracked,
8252 );
8253 assert_eq!(repository.status_for_path(&B_TXT.into()), None);
8254 assert_eq!(repository.status_for_path(&A_TXT.into()), None);
8255 });
8256
8257 // Modify files in the working copy and perform git operations on other files.
8258 git_reset(0, &repo);
8259 git_remove_index(Path::new(B_TXT), &repo);
8260 git_stash(&mut repo);
8261 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
8262 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
8263 tree.flush_fs_events(cx).await;
8264 project
8265 .update(cx, |project, cx| project.git_scans_complete(cx))
8266 .await;
8267 cx.executor().run_until_parked();
8268
8269 // Check that more complex repo changes are tracked
8270 repository.read_with(cx, |repository, _cx| {
8271 assert_eq!(repository.status_for_path(&A_TXT.into()), None);
8272 assert_eq!(
8273 repository.status_for_path(&B_TXT.into()).unwrap().status,
8274 FileStatus::Untracked,
8275 );
8276 assert_eq!(
8277 repository.status_for_path(&E_TXT.into()).unwrap().status,
8278 StatusCode::Modified.worktree(),
8279 );
8280 });
8281
8282 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
8283 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
8284 std::fs::write(
8285 work_dir.join(DOTGITIGNORE),
8286 [IGNORE_RULE, "f.txt"].join("\n"),
8287 )
8288 .unwrap();
8289
8290 git_add(Path::new(DOTGITIGNORE), &repo);
8291 git_commit("Committing modified git ignore", &repo);
8292
8293 tree.flush_fs_events(cx).await;
8294 cx.executor().run_until_parked();
8295
8296 let mut renamed_dir_name = "first_directory/second_directory";
8297 const RENAMED_FILE: &str = "rf.txt";
8298
8299 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
8300 std::fs::write(
8301 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
8302 "new-contents",
8303 )
8304 .unwrap();
8305
8306 tree.flush_fs_events(cx).await;
8307 project
8308 .update(cx, |project, cx| project.git_scans_complete(cx))
8309 .await;
8310 cx.executor().run_until_parked();
8311
8312 repository.read_with(cx, |repository, _cx| {
8313 assert_eq!(
8314 repository
8315 .status_for_path(&Path::new(renamed_dir_name).join(RENAMED_FILE).into())
8316 .unwrap()
8317 .status,
8318 FileStatus::Untracked,
8319 );
8320 });
8321
8322 renamed_dir_name = "new_first_directory/second_directory";
8323
8324 std::fs::rename(
8325 work_dir.join("first_directory"),
8326 work_dir.join("new_first_directory"),
8327 )
8328 .unwrap();
8329
8330 tree.flush_fs_events(cx).await;
8331 project
8332 .update(cx, |project, cx| project.git_scans_complete(cx))
8333 .await;
8334 cx.executor().run_until_parked();
8335
8336 repository.read_with(cx, |repository, _cx| {
8337 assert_eq!(
8338 repository
8339 .status_for_path(&Path::new(renamed_dir_name).join(RENAMED_FILE).into())
8340 .unwrap()
8341 .status,
8342 FileStatus::Untracked,
8343 );
8344 });
8345}
8346
8347#[gpui::test]
8348async fn test_repos_in_invisible_worktrees(
8349 executor: BackgroundExecutor,
8350 cx: &mut gpui::TestAppContext,
8351) {
8352 init_test(cx);
8353 let fs = FakeFs::new(executor);
8354 fs.insert_tree(
8355 path!("/root"),
8356 json!({
8357 "dir1": {
8358 ".git": {},
8359 "dep1": {
8360 ".git": {},
8361 "src": {
8362 "a.txt": "",
8363 },
8364 },
8365 "b.txt": "",
8366 },
8367 }),
8368 )
8369 .await;
8370
8371 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
8372 let _visible_worktree =
8373 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8374 project
8375 .update(cx, |project, cx| project.git_scans_complete(cx))
8376 .await;
8377
8378 let repos = project.read_with(cx, |project, cx| {
8379 project
8380 .repositories(cx)
8381 .values()
8382 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8383 .collect::<Vec<_>>()
8384 });
8385 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
8386
8387 let (_invisible_worktree, _) = project
8388 .update(cx, |project, cx| {
8389 project.worktree_store.update(cx, |worktree_store, cx| {
8390 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
8391 })
8392 })
8393 .await
8394 .expect("failed to create worktree");
8395 project
8396 .update(cx, |project, cx| project.git_scans_complete(cx))
8397 .await;
8398
8399 let repos = project.read_with(cx, |project, cx| {
8400 project
8401 .repositories(cx)
8402 .values()
8403 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8404 .collect::<Vec<_>>()
8405 });
8406 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
8407}
8408
8409#[gpui::test(iterations = 10)]
8410async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
8411 init_test(cx);
8412 cx.update(|cx| {
8413 cx.update_global::<SettingsStore, _>(|store, cx| {
8414 store.update_user_settings::<WorktreeSettings>(cx, |project_settings| {
8415 project_settings.file_scan_exclusions = Some(Vec::new());
8416 });
8417 });
8418 });
8419 let fs = FakeFs::new(cx.background_executor.clone());
8420 fs.insert_tree(
8421 path!("/root"),
8422 json!({
8423 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
8424 "tree": {
8425 ".git": {},
8426 ".gitignore": "ignored-dir\n",
8427 "tracked-dir": {
8428 "tracked-file1": "",
8429 "ancestor-ignored-file1": "",
8430 },
8431 "ignored-dir": {
8432 "ignored-file1": ""
8433 }
8434 }
8435 }),
8436 )
8437 .await;
8438 fs.set_head_and_index_for_repo(
8439 path!("/root/tree/.git").as_ref(),
8440 &[
8441 (".gitignore".into(), "ignored-dir\n".into()),
8442 ("tracked-dir/tracked-file1".into(), "".into()),
8443 ],
8444 );
8445
8446 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
8447
8448 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8449 tree.flush_fs_events(cx).await;
8450 project
8451 .update(cx, |project, cx| project.git_scans_complete(cx))
8452 .await;
8453 cx.executor().run_until_parked();
8454
8455 let repository = project.read_with(cx, |project, cx| {
8456 project.repositories(cx).values().next().unwrap().clone()
8457 });
8458
8459 tree.read_with(cx, |tree, _| {
8460 tree.as_local()
8461 .unwrap()
8462 .manually_refresh_entries_for_paths(vec![Path::new("ignored-dir").into()])
8463 })
8464 .recv()
8465 .await;
8466
8467 cx.read(|cx| {
8468 assert_entry_git_state(
8469 tree.read(cx),
8470 repository.read(cx),
8471 "tracked-dir/tracked-file1",
8472 None,
8473 false,
8474 );
8475 assert_entry_git_state(
8476 tree.read(cx),
8477 repository.read(cx),
8478 "tracked-dir/ancestor-ignored-file1",
8479 None,
8480 false,
8481 );
8482 assert_entry_git_state(
8483 tree.read(cx),
8484 repository.read(cx),
8485 "ignored-dir/ignored-file1",
8486 None,
8487 true,
8488 );
8489 });
8490
8491 fs.create_file(
8492 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
8493 Default::default(),
8494 )
8495 .await
8496 .unwrap();
8497 fs.set_index_for_repo(
8498 path!("/root/tree/.git").as_ref(),
8499 &[
8500 (".gitignore".into(), "ignored-dir\n".into()),
8501 ("tracked-dir/tracked-file1".into(), "".into()),
8502 ("tracked-dir/tracked-file2".into(), "".into()),
8503 ],
8504 );
8505 fs.create_file(
8506 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
8507 Default::default(),
8508 )
8509 .await
8510 .unwrap();
8511 fs.create_file(
8512 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
8513 Default::default(),
8514 )
8515 .await
8516 .unwrap();
8517
8518 cx.executor().run_until_parked();
8519 cx.read(|cx| {
8520 assert_entry_git_state(
8521 tree.read(cx),
8522 repository.read(cx),
8523 "tracked-dir/tracked-file2",
8524 Some(StatusCode::Added),
8525 false,
8526 );
8527 assert_entry_git_state(
8528 tree.read(cx),
8529 repository.read(cx),
8530 "tracked-dir/ancestor-ignored-file2",
8531 None,
8532 false,
8533 );
8534 assert_entry_git_state(
8535 tree.read(cx),
8536 repository.read(cx),
8537 "ignored-dir/ignored-file2",
8538 None,
8539 true,
8540 );
8541 assert!(tree.read(cx).entry_for_path(".git").unwrap().is_ignored);
8542 });
8543}
8544
8545#[gpui::test]
8546async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) {
8547 init_test(cx);
8548
8549 let fs = FakeFs::new(cx.executor());
8550 fs.insert_tree(
8551 path!("/project"),
8552 json!({
8553 ".git": {
8554 "worktrees": {
8555 "some-worktree": {
8556 "commondir": "../..\n",
8557 // For is_git_dir
8558 "HEAD": "",
8559 "config": ""
8560 }
8561 },
8562 "modules": {
8563 "subdir": {
8564 "some-submodule": {
8565 // For is_git_dir
8566 "HEAD": "",
8567 "config": "",
8568 }
8569 }
8570 }
8571 },
8572 "src": {
8573 "a.txt": "A",
8574 },
8575 "some-worktree": {
8576 ".git": "gitdir: ../.git/worktrees/some-worktree\n",
8577 "src": {
8578 "b.txt": "B",
8579 }
8580 },
8581 "subdir": {
8582 "some-submodule": {
8583 ".git": "gitdir: ../../.git/modules/subdir/some-submodule\n",
8584 "c.txt": "C",
8585 }
8586 }
8587 }),
8588 )
8589 .await;
8590
8591 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
8592 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
8593 scan_complete.await;
8594
8595 let mut repositories = project.update(cx, |project, cx| {
8596 project
8597 .repositories(cx)
8598 .values()
8599 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8600 .collect::<Vec<_>>()
8601 });
8602 repositories.sort();
8603 pretty_assertions::assert_eq!(
8604 repositories,
8605 [
8606 Path::new(path!("/project")).into(),
8607 Path::new(path!("/project/some-worktree")).into(),
8608 Path::new(path!("/project/subdir/some-submodule")).into(),
8609 ]
8610 );
8611
8612 // Generate a git-related event for the worktree and check that it's refreshed.
8613 fs.with_git_state(
8614 path!("/project/some-worktree/.git").as_ref(),
8615 true,
8616 |state| {
8617 state
8618 .head_contents
8619 .insert("src/b.txt".into(), "b".to_owned());
8620 state
8621 .index_contents
8622 .insert("src/b.txt".into(), "b".to_owned());
8623 },
8624 )
8625 .unwrap();
8626 cx.run_until_parked();
8627
8628 let buffer = project
8629 .update(cx, |project, cx| {
8630 project.open_local_buffer(path!("/project/some-worktree/src/b.txt"), cx)
8631 })
8632 .await
8633 .unwrap();
8634 let (worktree_repo, barrier) = project.update(cx, |project, cx| {
8635 let (repo, _) = project
8636 .git_store()
8637 .read(cx)
8638 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
8639 .unwrap();
8640 pretty_assertions::assert_eq!(
8641 repo.read(cx).work_directory_abs_path,
8642 Path::new(path!("/project/some-worktree")).into(),
8643 );
8644 let barrier = repo.update(cx, |repo, _| repo.barrier());
8645 (repo.clone(), barrier)
8646 });
8647 barrier.await.unwrap();
8648 worktree_repo.update(cx, |repo, _| {
8649 pretty_assertions::assert_eq!(
8650 repo.status_for_path(&"src/b.txt".into()).unwrap().status,
8651 StatusCode::Modified.worktree(),
8652 );
8653 });
8654
8655 // The same for the submodule.
8656 fs.with_git_state(
8657 path!("/project/subdir/some-submodule/.git").as_ref(),
8658 true,
8659 |state| {
8660 state.head_contents.insert("c.txt".into(), "c".to_owned());
8661 state.index_contents.insert("c.txt".into(), "c".to_owned());
8662 },
8663 )
8664 .unwrap();
8665 cx.run_until_parked();
8666
8667 let buffer = project
8668 .update(cx, |project, cx| {
8669 project.open_local_buffer(path!("/project/subdir/some-submodule/c.txt"), cx)
8670 })
8671 .await
8672 .unwrap();
8673 let (submodule_repo, barrier) = project.update(cx, |project, cx| {
8674 let (repo, _) = project
8675 .git_store()
8676 .read(cx)
8677 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
8678 .unwrap();
8679 pretty_assertions::assert_eq!(
8680 repo.read(cx).work_directory_abs_path,
8681 Path::new(path!("/project/subdir/some-submodule")).into(),
8682 );
8683 let barrier = repo.update(cx, |repo, _| repo.barrier());
8684 (repo.clone(), barrier)
8685 });
8686 barrier.await.unwrap();
8687 submodule_repo.update(cx, |repo, _| {
8688 pretty_assertions::assert_eq!(
8689 repo.status_for_path(&"c.txt".into()).unwrap().status,
8690 StatusCode::Modified.worktree(),
8691 );
8692 });
8693}
8694
8695#[gpui::test]
8696async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
8697 init_test(cx);
8698 let fs = FakeFs::new(cx.background_executor.clone());
8699 fs.insert_tree(
8700 path!("/root"),
8701 json!({
8702 "project": {
8703 ".git": {},
8704 "child1": {
8705 "a.txt": "A",
8706 },
8707 "child2": {
8708 "b.txt": "B",
8709 }
8710 }
8711 }),
8712 )
8713 .await;
8714
8715 let project = Project::test(
8716 fs.clone(),
8717 [
8718 path!("/root/project/child1").as_ref(),
8719 path!("/root/project/child2").as_ref(),
8720 ],
8721 cx,
8722 )
8723 .await;
8724
8725 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8726 tree.flush_fs_events(cx).await;
8727 project
8728 .update(cx, |project, cx| project.git_scans_complete(cx))
8729 .await;
8730 cx.executor().run_until_parked();
8731
8732 let repos = project.read_with(cx, |project, cx| {
8733 project
8734 .repositories(cx)
8735 .values()
8736 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8737 .collect::<Vec<_>>()
8738 });
8739 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
8740}
8741
8742async fn search(
8743 project: &Entity<Project>,
8744 query: SearchQuery,
8745 cx: &mut gpui::TestAppContext,
8746) -> Result<HashMap<String, Vec<Range<usize>>>> {
8747 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
8748 let mut results = HashMap::default();
8749 while let Ok(search_result) = search_rx.recv().await {
8750 match search_result {
8751 SearchResult::Buffer { buffer, ranges } => {
8752 results.entry(buffer).or_insert(ranges);
8753 }
8754 SearchResult::LimitReached => {}
8755 }
8756 }
8757 Ok(results
8758 .into_iter()
8759 .map(|(buffer, ranges)| {
8760 buffer.update(cx, |buffer, cx| {
8761 let path = buffer
8762 .file()
8763 .unwrap()
8764 .full_path(cx)
8765 .to_string_lossy()
8766 .to_string();
8767 let ranges = ranges
8768 .into_iter()
8769 .map(|range| range.to_offset(buffer))
8770 .collect::<Vec<_>>();
8771 (path, ranges)
8772 })
8773 })
8774 .collect())
8775}
8776
8777pub fn init_test(cx: &mut gpui::TestAppContext) {
8778 zlog::init_test();
8779
8780 cx.update(|cx| {
8781 let settings_store = SettingsStore::test(cx);
8782 cx.set_global(settings_store);
8783 release_channel::init(SemanticVersion::default(), cx);
8784 language::init(cx);
8785 Project::init_settings(cx);
8786 });
8787}
8788
8789fn json_lang() -> Arc<Language> {
8790 Arc::new(Language::new(
8791 LanguageConfig {
8792 name: "JSON".into(),
8793 matcher: LanguageMatcher {
8794 path_suffixes: vec!["json".to_string()],
8795 ..Default::default()
8796 },
8797 ..Default::default()
8798 },
8799 None,
8800 ))
8801}
8802
8803fn js_lang() -> Arc<Language> {
8804 Arc::new(Language::new(
8805 LanguageConfig {
8806 name: "JavaScript".into(),
8807 matcher: LanguageMatcher {
8808 path_suffixes: vec!["js".to_string()],
8809 ..Default::default()
8810 },
8811 ..Default::default()
8812 },
8813 None,
8814 ))
8815}
8816
8817fn rust_lang() -> Arc<Language> {
8818 Arc::new(Language::new(
8819 LanguageConfig {
8820 name: "Rust".into(),
8821 matcher: LanguageMatcher {
8822 path_suffixes: vec!["rs".to_string()],
8823 ..Default::default()
8824 },
8825 ..Default::default()
8826 },
8827 Some(tree_sitter_rust::LANGUAGE.into()),
8828 ))
8829}
8830
8831fn typescript_lang() -> Arc<Language> {
8832 Arc::new(Language::new(
8833 LanguageConfig {
8834 name: "TypeScript".into(),
8835 matcher: LanguageMatcher {
8836 path_suffixes: vec!["ts".to_string()],
8837 ..Default::default()
8838 },
8839 ..Default::default()
8840 },
8841 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
8842 ))
8843}
8844
8845fn tsx_lang() -> Arc<Language> {
8846 Arc::new(Language::new(
8847 LanguageConfig {
8848 name: "tsx".into(),
8849 matcher: LanguageMatcher {
8850 path_suffixes: vec!["tsx".to_string()],
8851 ..Default::default()
8852 },
8853 ..Default::default()
8854 },
8855 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
8856 ))
8857}
8858
8859fn get_all_tasks(
8860 project: &Entity<Project>,
8861 task_contexts: Arc<TaskContexts>,
8862 cx: &mut App,
8863) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
8864 let new_tasks = project.update(cx, |project, cx| {
8865 project.task_store.update(cx, |task_store, cx| {
8866 task_store.task_inventory().unwrap().update(cx, |this, cx| {
8867 this.used_and_current_resolved_tasks(task_contexts, cx)
8868 })
8869 })
8870 });
8871
8872 cx.background_spawn(async move {
8873 let (mut old, new) = new_tasks.await;
8874 old.extend(new);
8875 old
8876 })
8877}
8878
8879#[track_caller]
8880fn assert_entry_git_state(
8881 tree: &Worktree,
8882 repository: &Repository,
8883 path: &str,
8884 index_status: Option<StatusCode>,
8885 is_ignored: bool,
8886) {
8887 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
8888 let entry = tree
8889 .entry_for_path(path)
8890 .unwrap_or_else(|| panic!("entry {path} not found"));
8891 let status = repository
8892 .status_for_path(&path.into())
8893 .map(|entry| entry.status);
8894 let expected = index_status.map(|index_status| {
8895 TrackedStatus {
8896 index_status,
8897 worktree_status: StatusCode::Unmodified,
8898 }
8899 .into()
8900 });
8901 assert_eq!(
8902 status, expected,
8903 "expected {path} to have git status: {expected:?}"
8904 );
8905 assert_eq!(
8906 entry.is_ignored, is_ignored,
8907 "expected {path} to have is_ignored: {is_ignored}"
8908 );
8909}
8910
8911#[track_caller]
8912fn git_init(path: &Path) -> git2::Repository {
8913 let mut init_opts = RepositoryInitOptions::new();
8914 init_opts.initial_head("main");
8915 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
8916}
8917
8918#[track_caller]
8919fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
8920 let path = path.as_ref();
8921 let mut index = repo.index().expect("Failed to get index");
8922 index.add_path(path).expect("Failed to add file");
8923 index.write().expect("Failed to write index");
8924}
8925
8926#[track_caller]
8927fn git_remove_index(path: &Path, repo: &git2::Repository) {
8928 let mut index = repo.index().expect("Failed to get index");
8929 index.remove_path(path).expect("Failed to add file");
8930 index.write().expect("Failed to write index");
8931}
8932
8933#[track_caller]
8934fn git_commit(msg: &'static str, repo: &git2::Repository) {
8935 use git2::Signature;
8936
8937 let signature = Signature::now("test", "test@zed.dev").unwrap();
8938 let oid = repo.index().unwrap().write_tree().unwrap();
8939 let tree = repo.find_tree(oid).unwrap();
8940 if let Ok(head) = repo.head() {
8941 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
8942
8943 let parent_commit = parent_obj.as_commit().unwrap();
8944
8945 repo.commit(
8946 Some("HEAD"),
8947 &signature,
8948 &signature,
8949 msg,
8950 &tree,
8951 &[parent_commit],
8952 )
8953 .expect("Failed to commit with parent");
8954 } else {
8955 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
8956 .expect("Failed to commit");
8957 }
8958}
8959
8960#[cfg(any())]
8961#[track_caller]
8962fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
8963 repo.cherrypick(commit, None).expect("Failed to cherrypick");
8964}
8965
8966#[track_caller]
8967fn git_stash(repo: &mut git2::Repository) {
8968 use git2::Signature;
8969
8970 let signature = Signature::now("test", "test@zed.dev").unwrap();
8971 repo.stash_save(&signature, "N/A", None)
8972 .expect("Failed to stash");
8973}
8974
8975#[track_caller]
8976fn git_reset(offset: usize, repo: &git2::Repository) {
8977 let head = repo.head().expect("Couldn't get repo head");
8978 let object = head.peel(git2::ObjectType::Commit).unwrap();
8979 let commit = object.as_commit().unwrap();
8980 let new_head = commit
8981 .parents()
8982 .inspect(|parnet| {
8983 parnet.message();
8984 })
8985 .nth(offset)
8986 .expect("Not enough history");
8987 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
8988 .expect("Could not reset");
8989}
8990
8991#[cfg(any())]
8992#[track_caller]
8993fn git_branch(name: &str, repo: &git2::Repository) {
8994 let head = repo
8995 .head()
8996 .expect("Couldn't get repo head")
8997 .peel_to_commit()
8998 .expect("HEAD is not a commit");
8999 repo.branch(name, &head, false).expect("Failed to commit");
9000}
9001
9002#[cfg(any())]
9003#[track_caller]
9004fn git_checkout(name: &str, repo: &git2::Repository) {
9005 repo.set_head(name).expect("Failed to set head");
9006 repo.checkout_head(None).expect("Failed to check out head");
9007}
9008
9009#[cfg(any())]
9010#[track_caller]
9011fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
9012 repo.statuses(None)
9013 .unwrap()
9014 .iter()
9015 .map(|status| (status.path().unwrap().to_string(), status.status()))
9016 .collect()
9017}
9018
9019#[gpui::test]
9020async fn test_find_project_path_abs(
9021 background_executor: BackgroundExecutor,
9022 cx: &mut gpui::TestAppContext,
9023) {
9024 // find_project_path should work with absolute paths
9025 init_test(cx);
9026
9027 let fs = FakeFs::new(background_executor);
9028 fs.insert_tree(
9029 path!("/root"),
9030 json!({
9031 "project1": {
9032 "file1.txt": "content1",
9033 "subdir": {
9034 "file2.txt": "content2"
9035 }
9036 },
9037 "project2": {
9038 "file3.txt": "content3"
9039 }
9040 }),
9041 )
9042 .await;
9043
9044 let project = Project::test(
9045 fs.clone(),
9046 [
9047 path!("/root/project1").as_ref(),
9048 path!("/root/project2").as_ref(),
9049 ],
9050 cx,
9051 )
9052 .await;
9053
9054 // Make sure the worktrees are fully initialized
9055 project
9056 .update(cx, |project, cx| project.git_scans_complete(cx))
9057 .await;
9058 cx.run_until_parked();
9059
9060 let (project1_abs_path, project1_id, project2_abs_path, project2_id) =
9061 project.read_with(cx, |project, cx| {
9062 let worktrees: Vec<_> = project.worktrees(cx).collect();
9063 let abs_path1 = worktrees[0].read(cx).abs_path().to_path_buf();
9064 let id1 = worktrees[0].read(cx).id();
9065 let abs_path2 = worktrees[1].read(cx).abs_path().to_path_buf();
9066 let id2 = worktrees[1].read(cx).id();
9067 (abs_path1, id1, abs_path2, id2)
9068 });
9069
9070 project.update(cx, |project, cx| {
9071 let abs_path = project1_abs_path.join("file1.txt");
9072 let found_path = project.find_project_path(abs_path, cx).unwrap();
9073 assert_eq!(found_path.worktree_id, project1_id);
9074 assert_eq!(found_path.path.as_ref(), Path::new("file1.txt"));
9075
9076 let abs_path = project1_abs_path.join("subdir").join("file2.txt");
9077 let found_path = project.find_project_path(abs_path, cx).unwrap();
9078 assert_eq!(found_path.worktree_id, project1_id);
9079 assert_eq!(found_path.path.as_ref(), Path::new("subdir/file2.txt"));
9080
9081 let abs_path = project2_abs_path.join("file3.txt");
9082 let found_path = project.find_project_path(abs_path, cx).unwrap();
9083 assert_eq!(found_path.worktree_id, project2_id);
9084 assert_eq!(found_path.path.as_ref(), Path::new("file3.txt"));
9085
9086 let abs_path = project1_abs_path.join("nonexistent.txt");
9087 let found_path = project.find_project_path(abs_path, cx);
9088 assert!(
9089 found_path.is_some(),
9090 "Should find project path for nonexistent file in worktree"
9091 );
9092
9093 // Test with an absolute path outside any worktree
9094 let abs_path = Path::new("/some/other/path");
9095 let found_path = project.find_project_path(abs_path, cx);
9096 assert!(
9097 found_path.is_none(),
9098 "Should not find project path for path outside any worktree"
9099 );
9100 });
9101}