1#![allow(clippy::format_collect)]
2
3use crate::{
4 Event, git_store::StatusEntry, task_inventory::TaskContexts, task_store::TaskSettingsLocation,
5 *,
6};
7use buffer_diff::{
8 BufferDiffEvent, CALCULATE_DIFF_TASK, DiffHunkSecondaryStatus, DiffHunkStatus,
9 DiffHunkStatusKind, assert_hunks,
10};
11use fs::FakeFs;
12use futures::{StreamExt, future};
13use git::{
14 GitHostingProviderRegistry,
15 repository::RepoPath,
16 status::{StatusCode, TrackedStatus},
17};
18use git2::RepositoryInitOptions;
19use gpui::{App, BackgroundExecutor, SemanticVersion, UpdateGlobal};
20use http_client::Url;
21use language::{
22 Diagnostic, DiagnosticEntry, DiagnosticSet, DiskState, FakeLspAdapter, LanguageConfig,
23 LanguageMatcher, LanguageName, LineEnding, OffsetRangeExt, Point, ToPoint,
24 language_settings::{AllLanguageSettings, LanguageSettingsContent, language_settings},
25 tree_sitter_rust, tree_sitter_typescript,
26};
27use lsp::{
28 DiagnosticSeverity, DocumentChanges, FileOperationFilter, NumberOrString, TextDocumentEdit,
29 WillRenameFiles, notification::DidRenameFiles,
30};
31use parking_lot::Mutex;
32use paths::{config_dir, tasks_file};
33use postage::stream::Stream as _;
34use pretty_assertions::{assert_eq, assert_matches};
35use rand::{Rng as _, rngs::StdRng};
36use serde_json::json;
37#[cfg(not(windows))]
38use std::os;
39use std::{env, mem, num::NonZeroU32, ops::Range, str::FromStr, sync::OnceLock, task::Poll};
40use task::{ResolvedTask, TaskContext};
41use unindent::Unindent as _;
42use util::{
43 TryFutureExt as _, assert_set_eq, maybe, path,
44 paths::PathMatcher,
45 separator,
46 test::{TempTree, marked_text_offsets},
47 uri,
48};
49use worktree::WorktreeModelHandle as _;
50
51#[gpui::test]
52async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
53 cx.executor().allow_parking();
54
55 let (tx, mut rx) = futures::channel::mpsc::unbounded();
56 let _thread = std::thread::spawn(move || {
57 #[cfg(not(target_os = "windows"))]
58 std::fs::metadata("/tmp").unwrap();
59 #[cfg(target_os = "windows")]
60 std::fs::metadata("C:/Windows").unwrap();
61 std::thread::sleep(Duration::from_millis(1000));
62 tx.unbounded_send(1).unwrap();
63 });
64 rx.next().await.unwrap();
65}
66
67#[gpui::test]
68async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
69 cx.executor().allow_parking();
70
71 let io_task = smol::unblock(move || {
72 println!("sleeping on thread {:?}", std::thread::current().id());
73 std::thread::sleep(Duration::from_millis(10));
74 1
75 });
76
77 let task = cx.foreground_executor().spawn(async move {
78 io_task.await;
79 });
80
81 task.await;
82}
83
84#[cfg(not(windows))]
85#[gpui::test]
86async fn test_symlinks(cx: &mut gpui::TestAppContext) {
87 init_test(cx);
88 cx.executor().allow_parking();
89
90 let dir = TempTree::new(json!({
91 "root": {
92 "apple": "",
93 "banana": {
94 "carrot": {
95 "date": "",
96 "endive": "",
97 }
98 },
99 "fennel": {
100 "grape": "",
101 }
102 }
103 }));
104
105 let root_link_path = dir.path().join("root_link");
106 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
107 os::unix::fs::symlink(
108 dir.path().join("root/fennel"),
109 dir.path().join("root/finnochio"),
110 )
111 .unwrap();
112
113 let project = Project::test(
114 Arc::new(RealFs::new(None, cx.executor())),
115 [root_link_path.as_ref()],
116 cx,
117 )
118 .await;
119
120 project.update(cx, |project, cx| {
121 let tree = project.worktrees(cx).next().unwrap().read(cx);
122 assert_eq!(tree.file_count(), 5);
123 assert_eq!(
124 tree.inode_for_path("fennel/grape"),
125 tree.inode_for_path("finnochio/grape")
126 );
127 });
128}
129
130#[gpui::test]
131async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
132 init_test(cx);
133
134 let dir = TempTree::new(json!({
135 ".editorconfig": r#"
136 root = true
137 [*.rs]
138 indent_style = tab
139 indent_size = 3
140 end_of_line = lf
141 insert_final_newline = true
142 trim_trailing_whitespace = true
143 [*.js]
144 tab_width = 10
145 "#,
146 ".zed": {
147 "settings.json": r#"{
148 "tab_size": 8,
149 "hard_tabs": false,
150 "ensure_final_newline_on_save": false,
151 "remove_trailing_whitespace_on_save": false,
152 "soft_wrap": "editor_width"
153 }"#,
154 },
155 "a.rs": "fn a() {\n A\n}",
156 "b": {
157 ".editorconfig": r#"
158 [*.rs]
159 indent_size = 2
160 "#,
161 "b.rs": "fn b() {\n B\n}",
162 },
163 "c.js": "def c\n C\nend",
164 "README.json": "tabs are better\n",
165 }));
166
167 let path = dir.path();
168 let fs = FakeFs::new(cx.executor());
169 fs.insert_tree_from_real_fs(path, path).await;
170 let project = Project::test(fs, [path], cx).await;
171
172 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
173 language_registry.add(js_lang());
174 language_registry.add(json_lang());
175 language_registry.add(rust_lang());
176
177 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
178
179 cx.executor().run_until_parked();
180
181 cx.update(|cx| {
182 let tree = worktree.read(cx);
183 let settings_for = |path: &str| {
184 let file_entry = tree.entry_for_path(path).unwrap().clone();
185 let file = File::for_entry(file_entry, worktree.clone());
186 let file_language = project
187 .read(cx)
188 .languages()
189 .language_for_file_path(file.path.as_ref());
190 let file_language = cx
191 .background_executor()
192 .block(file_language)
193 .expect("Failed to get file language");
194 let file = file as _;
195 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
196 };
197
198 let settings_a = settings_for("a.rs");
199 let settings_b = settings_for("b/b.rs");
200 let settings_c = settings_for("c.js");
201 let settings_readme = settings_for("README.json");
202
203 // .editorconfig overrides .zed/settings
204 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
205 assert_eq!(settings_a.hard_tabs, true);
206 assert_eq!(settings_a.ensure_final_newline_on_save, true);
207 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
208
209 // .editorconfig in b/ overrides .editorconfig in root
210 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
211
212 // "indent_size" is not set, so "tab_width" is used
213 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
214
215 // README.md should not be affected by .editorconfig's globe "*.rs"
216 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
217 });
218}
219
220#[gpui::test]
221async fn test_git_provider_project_setting(cx: &mut gpui::TestAppContext) {
222 init_test(cx);
223 cx.update(|cx| {
224 GitHostingProviderRegistry::default_global(cx);
225 git_hosting_providers::init(cx);
226 });
227
228 let fs = FakeFs::new(cx.executor());
229 let str_path = path!("/dir");
230 let path = Path::new(str_path);
231
232 fs.insert_tree(
233 path!("/dir"),
234 json!({
235 ".zed": {
236 "settings.json": r#"{
237 "git_hosting_providers": [
238 {
239 "provider": "gitlab",
240 "base_url": "https://google.com",
241 "name": "foo"
242 }
243 ]
244 }"#
245 },
246 }),
247 )
248 .await;
249
250 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
251 let (_worktree, _) =
252 project.read_with(cx, |project, cx| project.find_worktree(path, cx).unwrap());
253 cx.executor().run_until_parked();
254
255 cx.update(|cx| {
256 let provider = GitHostingProviderRegistry::global(cx);
257 assert!(
258 provider
259 .list_hosting_providers()
260 .into_iter()
261 .any(|provider| provider.name() == "foo")
262 );
263 });
264
265 fs.atomic_write(
266 Path::new(path!("/dir/.zed/settings.json")).to_owned(),
267 "{}".into(),
268 )
269 .await
270 .unwrap();
271
272 cx.run_until_parked();
273
274 cx.update(|cx| {
275 let provider = GitHostingProviderRegistry::global(cx);
276 assert!(
277 !provider
278 .list_hosting_providers()
279 .into_iter()
280 .any(|provider| provider.name() == "foo")
281 );
282 });
283}
284
285#[gpui::test]
286async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
287 init_test(cx);
288 TaskStore::init(None);
289
290 let fs = FakeFs::new(cx.executor());
291 fs.insert_tree(
292 path!("/dir"),
293 json!({
294 ".zed": {
295 "settings.json": r#"{ "tab_size": 8 }"#,
296 "tasks.json": r#"[{
297 "label": "cargo check all",
298 "command": "cargo",
299 "args": ["check", "--all"]
300 },]"#,
301 },
302 "a": {
303 "a.rs": "fn a() {\n A\n}"
304 },
305 "b": {
306 ".zed": {
307 "settings.json": r#"{ "tab_size": 2 }"#,
308 "tasks.json": r#"[{
309 "label": "cargo check",
310 "command": "cargo",
311 "args": ["check"]
312 },]"#,
313 },
314 "b.rs": "fn b() {\n B\n}"
315 }
316 }),
317 )
318 .await;
319
320 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
321 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
322
323 cx.executor().run_until_parked();
324 let worktree_id = cx.update(|cx| {
325 project.update(cx, |project, cx| {
326 project.worktrees(cx).next().unwrap().read(cx).id()
327 })
328 });
329
330 let mut task_contexts = TaskContexts::default();
331 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
332 let task_contexts = Arc::new(task_contexts);
333
334 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
335 id: worktree_id,
336 directory_in_worktree: PathBuf::from(".zed"),
337 id_base: "local worktree tasks from directory \".zed\"".into(),
338 };
339
340 let all_tasks = cx
341 .update(|cx| {
342 let tree = worktree.read(cx);
343
344 let file_a = File::for_entry(
345 tree.entry_for_path("a/a.rs").unwrap().clone(),
346 worktree.clone(),
347 ) as _;
348 let settings_a = language_settings(None, Some(&file_a), cx);
349 let file_b = File::for_entry(
350 tree.entry_for_path("b/b.rs").unwrap().clone(),
351 worktree.clone(),
352 ) as _;
353 let settings_b = language_settings(None, Some(&file_b), cx);
354
355 assert_eq!(settings_a.tab_size.get(), 8);
356 assert_eq!(settings_b.tab_size.get(), 2);
357
358 get_all_tasks(&project, task_contexts.clone(), cx)
359 })
360 .await
361 .into_iter()
362 .map(|(source_kind, task)| {
363 let resolved = task.resolved;
364 (
365 source_kind,
366 task.resolved_label,
367 resolved.args,
368 resolved.env,
369 )
370 })
371 .collect::<Vec<_>>();
372 assert_eq!(
373 all_tasks,
374 vec![
375 (
376 TaskSourceKind::Worktree {
377 id: worktree_id,
378 directory_in_worktree: PathBuf::from(separator!("b/.zed")),
379 id_base: if cfg!(windows) {
380 "local worktree tasks from directory \"b\\\\.zed\"".into()
381 } else {
382 "local worktree tasks from directory \"b/.zed\"".into()
383 },
384 },
385 "cargo check".to_string(),
386 vec!["check".to_string()],
387 HashMap::default(),
388 ),
389 (
390 topmost_local_task_source_kind.clone(),
391 "cargo check all".to_string(),
392 vec!["check".to_string(), "--all".to_string()],
393 HashMap::default(),
394 ),
395 ]
396 );
397
398 let (_, resolved_task) = cx
399 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
400 .await
401 .into_iter()
402 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
403 .expect("should have one global task");
404 project.update(cx, |project, cx| {
405 let task_inventory = project
406 .task_store
407 .read(cx)
408 .task_inventory()
409 .cloned()
410 .unwrap();
411 task_inventory.update(cx, |inventory, _| {
412 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
413 inventory
414 .update_file_based_tasks(
415 TaskSettingsLocation::Global(tasks_file()),
416 Some(
417 &json!([{
418 "label": "cargo check unstable",
419 "command": "cargo",
420 "args": [
421 "check",
422 "--all",
423 "--all-targets"
424 ],
425 "env": {
426 "RUSTFLAGS": "-Zunstable-options"
427 }
428 }])
429 .to_string(),
430 ),
431 )
432 .unwrap();
433 });
434 });
435 cx.run_until_parked();
436
437 let all_tasks = cx
438 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
439 .await
440 .into_iter()
441 .map(|(source_kind, task)| {
442 let resolved = task.resolved;
443 (
444 source_kind,
445 task.resolved_label,
446 resolved.args,
447 resolved.env,
448 )
449 })
450 .collect::<Vec<_>>();
451 assert_eq!(
452 all_tasks,
453 vec![
454 (
455 topmost_local_task_source_kind.clone(),
456 "cargo check all".to_string(),
457 vec!["check".to_string(), "--all".to_string()],
458 HashMap::default(),
459 ),
460 (
461 TaskSourceKind::Worktree {
462 id: worktree_id,
463 directory_in_worktree: PathBuf::from(separator!("b/.zed")),
464 id_base: if cfg!(windows) {
465 "local worktree tasks from directory \"b\\\\.zed\"".into()
466 } else {
467 "local worktree tasks from directory \"b/.zed\"".into()
468 },
469 },
470 "cargo check".to_string(),
471 vec!["check".to_string()],
472 HashMap::default(),
473 ),
474 (
475 TaskSourceKind::AbsPath {
476 abs_path: paths::tasks_file().clone(),
477 id_base: "global tasks.json".into(),
478 },
479 "cargo check unstable".to_string(),
480 vec![
481 "check".to_string(),
482 "--all".to_string(),
483 "--all-targets".to_string(),
484 ],
485 HashMap::from_iter(Some((
486 "RUSTFLAGS".to_string(),
487 "-Zunstable-options".to_string()
488 ))),
489 ),
490 ]
491 );
492}
493
494#[gpui::test]
495async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
496 init_test(cx);
497 TaskStore::init(None);
498
499 let fs = FakeFs::new(cx.executor());
500 fs.insert_tree(
501 path!("/dir"),
502 json!({
503 ".zed": {
504 "tasks.json": r#"[{
505 "label": "test worktree root",
506 "command": "echo $ZED_WORKTREE_ROOT"
507 }]"#,
508 },
509 "a": {
510 "a.rs": "fn a() {\n A\n}"
511 },
512 }),
513 )
514 .await;
515
516 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
517 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
518
519 cx.executor().run_until_parked();
520 let worktree_id = cx.update(|cx| {
521 project.update(cx, |project, cx| {
522 project.worktrees(cx).next().unwrap().read(cx).id()
523 })
524 });
525
526 let active_non_worktree_item_tasks = cx
527 .update(|cx| {
528 get_all_tasks(
529 &project,
530 Arc::new(TaskContexts {
531 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
532 active_worktree_context: None,
533 other_worktree_contexts: Vec::new(),
534 lsp_task_sources: HashMap::default(),
535 latest_selection: None,
536 }),
537 cx,
538 )
539 })
540 .await;
541 assert!(
542 active_non_worktree_item_tasks.is_empty(),
543 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
544 );
545
546 let active_worktree_tasks = cx
547 .update(|cx| {
548 get_all_tasks(
549 &project,
550 Arc::new(TaskContexts {
551 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
552 active_worktree_context: Some((worktree_id, {
553 let mut worktree_context = TaskContext::default();
554 worktree_context
555 .task_variables
556 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
557 worktree_context
558 })),
559 other_worktree_contexts: Vec::new(),
560 lsp_task_sources: HashMap::default(),
561 latest_selection: None,
562 }),
563 cx,
564 )
565 })
566 .await;
567 assert_eq!(
568 active_worktree_tasks
569 .into_iter()
570 .map(|(source_kind, task)| {
571 let resolved = task.resolved;
572 (source_kind, resolved.command)
573 })
574 .collect::<Vec<_>>(),
575 vec![(
576 TaskSourceKind::Worktree {
577 id: worktree_id,
578 directory_in_worktree: PathBuf::from(separator!(".zed")),
579 id_base: if cfg!(windows) {
580 "local worktree tasks from directory \".zed\"".into()
581 } else {
582 "local worktree tasks from directory \".zed\"".into()
583 },
584 },
585 "echo /dir".to_string(),
586 )]
587 );
588}
589
590#[gpui::test]
591async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
592 init_test(cx);
593
594 let fs = FakeFs::new(cx.executor());
595 fs.insert_tree(
596 path!("/dir"),
597 json!({
598 "test.rs": "const A: i32 = 1;",
599 "test2.rs": "",
600 "Cargo.toml": "a = 1",
601 "package.json": "{\"a\": 1}",
602 }),
603 )
604 .await;
605
606 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
607 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
608
609 let mut fake_rust_servers = language_registry.register_fake_lsp(
610 "Rust",
611 FakeLspAdapter {
612 name: "the-rust-language-server",
613 capabilities: lsp::ServerCapabilities {
614 completion_provider: Some(lsp::CompletionOptions {
615 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
616 ..Default::default()
617 }),
618 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
619 lsp::TextDocumentSyncOptions {
620 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
621 ..Default::default()
622 },
623 )),
624 ..Default::default()
625 },
626 ..Default::default()
627 },
628 );
629 let mut fake_json_servers = language_registry.register_fake_lsp(
630 "JSON",
631 FakeLspAdapter {
632 name: "the-json-language-server",
633 capabilities: lsp::ServerCapabilities {
634 completion_provider: Some(lsp::CompletionOptions {
635 trigger_characters: Some(vec![":".to_string()]),
636 ..Default::default()
637 }),
638 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
639 lsp::TextDocumentSyncOptions {
640 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
641 ..Default::default()
642 },
643 )),
644 ..Default::default()
645 },
646 ..Default::default()
647 },
648 );
649
650 // Open a buffer without an associated language server.
651 let (toml_buffer, _handle) = project
652 .update(cx, |project, cx| {
653 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
654 })
655 .await
656 .unwrap();
657
658 // Open a buffer with an associated language server before the language for it has been loaded.
659 let (rust_buffer, _handle2) = project
660 .update(cx, |project, cx| {
661 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
662 })
663 .await
664 .unwrap();
665 rust_buffer.update(cx, |buffer, _| {
666 assert_eq!(buffer.language().map(|l| l.name()), None);
667 });
668
669 // Now we add the languages to the project, and ensure they get assigned to all
670 // the relevant open buffers.
671 language_registry.add(json_lang());
672 language_registry.add(rust_lang());
673 cx.executor().run_until_parked();
674 rust_buffer.update(cx, |buffer, _| {
675 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
676 });
677
678 // A server is started up, and it is notified about Rust files.
679 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
680 assert_eq!(
681 fake_rust_server
682 .receive_notification::<lsp::notification::DidOpenTextDocument>()
683 .await
684 .text_document,
685 lsp::TextDocumentItem {
686 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
687 version: 0,
688 text: "const A: i32 = 1;".to_string(),
689 language_id: "rust".to_string(),
690 }
691 );
692
693 // The buffer is configured based on the language server's capabilities.
694 rust_buffer.update(cx, |buffer, _| {
695 assert_eq!(
696 buffer
697 .completion_triggers()
698 .into_iter()
699 .cloned()
700 .collect::<Vec<_>>(),
701 &[".".to_string(), "::".to_string()]
702 );
703 });
704 toml_buffer.update(cx, |buffer, _| {
705 assert!(buffer.completion_triggers().is_empty());
706 });
707
708 // Edit a buffer. The changes are reported to the language server.
709 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
710 assert_eq!(
711 fake_rust_server
712 .receive_notification::<lsp::notification::DidChangeTextDocument>()
713 .await
714 .text_document,
715 lsp::VersionedTextDocumentIdentifier::new(
716 lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
717 1
718 )
719 );
720
721 // Open a third buffer with a different associated language server.
722 let (json_buffer, _json_handle) = project
723 .update(cx, |project, cx| {
724 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
725 })
726 .await
727 .unwrap();
728
729 // A json language server is started up and is only notified about the json buffer.
730 let mut fake_json_server = fake_json_servers.next().await.unwrap();
731 assert_eq!(
732 fake_json_server
733 .receive_notification::<lsp::notification::DidOpenTextDocument>()
734 .await
735 .text_document,
736 lsp::TextDocumentItem {
737 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
738 version: 0,
739 text: "{\"a\": 1}".to_string(),
740 language_id: "json".to_string(),
741 }
742 );
743
744 // This buffer is configured based on the second language server's
745 // capabilities.
746 json_buffer.update(cx, |buffer, _| {
747 assert_eq!(
748 buffer
749 .completion_triggers()
750 .into_iter()
751 .cloned()
752 .collect::<Vec<_>>(),
753 &[":".to_string()]
754 );
755 });
756
757 // When opening another buffer whose language server is already running,
758 // it is also configured based on the existing language server's capabilities.
759 let (rust_buffer2, _handle4) = project
760 .update(cx, |project, cx| {
761 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
762 })
763 .await
764 .unwrap();
765 rust_buffer2.update(cx, |buffer, _| {
766 assert_eq!(
767 buffer
768 .completion_triggers()
769 .into_iter()
770 .cloned()
771 .collect::<Vec<_>>(),
772 &[".".to_string(), "::".to_string()]
773 );
774 });
775
776 // Changes are reported only to servers matching the buffer's language.
777 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
778 rust_buffer2.update(cx, |buffer, cx| {
779 buffer.edit([(0..0, "let x = 1;")], None, cx)
780 });
781 assert_eq!(
782 fake_rust_server
783 .receive_notification::<lsp::notification::DidChangeTextDocument>()
784 .await
785 .text_document,
786 lsp::VersionedTextDocumentIdentifier::new(
787 lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap(),
788 1
789 )
790 );
791
792 // Save notifications are reported to all servers.
793 project
794 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
795 .await
796 .unwrap();
797 assert_eq!(
798 fake_rust_server
799 .receive_notification::<lsp::notification::DidSaveTextDocument>()
800 .await
801 .text_document,
802 lsp::TextDocumentIdentifier::new(
803 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
804 )
805 );
806 assert_eq!(
807 fake_json_server
808 .receive_notification::<lsp::notification::DidSaveTextDocument>()
809 .await
810 .text_document,
811 lsp::TextDocumentIdentifier::new(
812 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
813 )
814 );
815
816 // Renames are reported only to servers matching the buffer's language.
817 fs.rename(
818 Path::new(path!("/dir/test2.rs")),
819 Path::new(path!("/dir/test3.rs")),
820 Default::default(),
821 )
822 .await
823 .unwrap();
824 assert_eq!(
825 fake_rust_server
826 .receive_notification::<lsp::notification::DidCloseTextDocument>()
827 .await
828 .text_document,
829 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap()),
830 );
831 assert_eq!(
832 fake_rust_server
833 .receive_notification::<lsp::notification::DidOpenTextDocument>()
834 .await
835 .text_document,
836 lsp::TextDocumentItem {
837 uri: lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap(),
838 version: 0,
839 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
840 language_id: "rust".to_string(),
841 },
842 );
843
844 rust_buffer2.update(cx, |buffer, cx| {
845 buffer.update_diagnostics(
846 LanguageServerId(0),
847 DiagnosticSet::from_sorted_entries(
848 vec![DiagnosticEntry {
849 diagnostic: Default::default(),
850 range: Anchor::MIN..Anchor::MAX,
851 }],
852 &buffer.snapshot(),
853 ),
854 cx,
855 );
856 assert_eq!(
857 buffer
858 .snapshot()
859 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
860 .count(),
861 1
862 );
863 });
864
865 // When the rename changes the extension of the file, the buffer gets closed on the old
866 // language server and gets opened on the new one.
867 fs.rename(
868 Path::new(path!("/dir/test3.rs")),
869 Path::new(path!("/dir/test3.json")),
870 Default::default(),
871 )
872 .await
873 .unwrap();
874 assert_eq!(
875 fake_rust_server
876 .receive_notification::<lsp::notification::DidCloseTextDocument>()
877 .await
878 .text_document,
879 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap()),
880 );
881 assert_eq!(
882 fake_json_server
883 .receive_notification::<lsp::notification::DidOpenTextDocument>()
884 .await
885 .text_document,
886 lsp::TextDocumentItem {
887 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
888 version: 0,
889 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
890 language_id: "json".to_string(),
891 },
892 );
893
894 // We clear the diagnostics, since the language has changed.
895 rust_buffer2.update(cx, |buffer, _| {
896 assert_eq!(
897 buffer
898 .snapshot()
899 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
900 .count(),
901 0
902 );
903 });
904
905 // The renamed file's version resets after changing language server.
906 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
907 assert_eq!(
908 fake_json_server
909 .receive_notification::<lsp::notification::DidChangeTextDocument>()
910 .await
911 .text_document,
912 lsp::VersionedTextDocumentIdentifier::new(
913 lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
914 1
915 )
916 );
917
918 // Restart language servers
919 project.update(cx, |project, cx| {
920 project.restart_language_servers_for_buffers(
921 vec![rust_buffer.clone(), json_buffer.clone()],
922 cx,
923 );
924 });
925
926 let mut rust_shutdown_requests = fake_rust_server
927 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
928 let mut json_shutdown_requests = fake_json_server
929 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
930 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
931
932 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
933 let mut fake_json_server = fake_json_servers.next().await.unwrap();
934
935 // Ensure rust document is reopened in new rust language server
936 assert_eq!(
937 fake_rust_server
938 .receive_notification::<lsp::notification::DidOpenTextDocument>()
939 .await
940 .text_document,
941 lsp::TextDocumentItem {
942 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
943 version: 0,
944 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
945 language_id: "rust".to_string(),
946 }
947 );
948
949 // Ensure json documents are reopened in new json language server
950 assert_set_eq!(
951 [
952 fake_json_server
953 .receive_notification::<lsp::notification::DidOpenTextDocument>()
954 .await
955 .text_document,
956 fake_json_server
957 .receive_notification::<lsp::notification::DidOpenTextDocument>()
958 .await
959 .text_document,
960 ],
961 [
962 lsp::TextDocumentItem {
963 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
964 version: 0,
965 text: json_buffer.update(cx, |buffer, _| buffer.text()),
966 language_id: "json".to_string(),
967 },
968 lsp::TextDocumentItem {
969 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
970 version: 0,
971 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
972 language_id: "json".to_string(),
973 }
974 ]
975 );
976
977 // Close notifications are reported only to servers matching the buffer's language.
978 cx.update(|_| drop(_json_handle));
979 let close_message = lsp::DidCloseTextDocumentParams {
980 text_document: lsp::TextDocumentIdentifier::new(
981 lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
982 ),
983 };
984 assert_eq!(
985 fake_json_server
986 .receive_notification::<lsp::notification::DidCloseTextDocument>()
987 .await,
988 close_message,
989 );
990}
991
992#[gpui::test]
993async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
994 init_test(cx);
995
996 let fs = FakeFs::new(cx.executor());
997 fs.insert_tree(
998 path!("/the-root"),
999 json!({
1000 ".gitignore": "target\n",
1001 "Cargo.lock": "",
1002 "src": {
1003 "a.rs": "",
1004 "b.rs": "",
1005 },
1006 "target": {
1007 "x": {
1008 "out": {
1009 "x.rs": ""
1010 }
1011 },
1012 "y": {
1013 "out": {
1014 "y.rs": "",
1015 }
1016 },
1017 "z": {
1018 "out": {
1019 "z.rs": ""
1020 }
1021 }
1022 }
1023 }),
1024 )
1025 .await;
1026 fs.insert_tree(
1027 path!("/the-registry"),
1028 json!({
1029 "dep1": {
1030 "src": {
1031 "dep1.rs": "",
1032 }
1033 },
1034 "dep2": {
1035 "src": {
1036 "dep2.rs": "",
1037 }
1038 },
1039 }),
1040 )
1041 .await;
1042 fs.insert_tree(
1043 path!("/the/stdlib"),
1044 json!({
1045 "LICENSE": "",
1046 "src": {
1047 "string.rs": "",
1048 }
1049 }),
1050 )
1051 .await;
1052
1053 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1054 let (language_registry, lsp_store) = project.read_with(cx, |project, _| {
1055 (project.languages().clone(), project.lsp_store())
1056 });
1057 language_registry.add(rust_lang());
1058 let mut fake_servers = language_registry.register_fake_lsp(
1059 "Rust",
1060 FakeLspAdapter {
1061 name: "the-language-server",
1062 ..Default::default()
1063 },
1064 );
1065
1066 cx.executor().run_until_parked();
1067
1068 // Start the language server by opening a buffer with a compatible file extension.
1069 project
1070 .update(cx, |project, cx| {
1071 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
1072 })
1073 .await
1074 .unwrap();
1075
1076 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
1077 project.update(cx, |project, cx| {
1078 let worktree = project.worktrees(cx).next().unwrap();
1079 assert_eq!(
1080 worktree
1081 .read(cx)
1082 .snapshot()
1083 .entries(true, 0)
1084 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
1085 .collect::<Vec<_>>(),
1086 &[
1087 (Path::new(""), false),
1088 (Path::new(".gitignore"), false),
1089 (Path::new("Cargo.lock"), false),
1090 (Path::new("src"), false),
1091 (Path::new("src/a.rs"), false),
1092 (Path::new("src/b.rs"), false),
1093 (Path::new("target"), true),
1094 ]
1095 );
1096 });
1097
1098 let prev_read_dir_count = fs.read_dir_call_count();
1099
1100 let fake_server = fake_servers.next().await.unwrap();
1101 let (server_id, server_name) = lsp_store.read_with(cx, |lsp_store, _| {
1102 let (id, status) = lsp_store.language_server_statuses().next().unwrap();
1103 (id, LanguageServerName::from(status.name.as_str()))
1104 });
1105
1106 // Simulate jumping to a definition in a dependency outside of the worktree.
1107 let _out_of_worktree_buffer = project
1108 .update(cx, |project, cx| {
1109 project.open_local_buffer_via_lsp(
1110 lsp::Url::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
1111 server_id,
1112 server_name.clone(),
1113 cx,
1114 )
1115 })
1116 .await
1117 .unwrap();
1118
1119 // Keep track of the FS events reported to the language server.
1120 let file_changes = Arc::new(Mutex::new(Vec::new()));
1121 fake_server
1122 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
1123 registrations: vec![lsp::Registration {
1124 id: Default::default(),
1125 method: "workspace/didChangeWatchedFiles".to_string(),
1126 register_options: serde_json::to_value(
1127 lsp::DidChangeWatchedFilesRegistrationOptions {
1128 watchers: vec![
1129 lsp::FileSystemWatcher {
1130 glob_pattern: lsp::GlobPattern::String(
1131 path!("/the-root/Cargo.toml").to_string(),
1132 ),
1133 kind: None,
1134 },
1135 lsp::FileSystemWatcher {
1136 glob_pattern: lsp::GlobPattern::String(
1137 path!("/the-root/src/*.{rs,c}").to_string(),
1138 ),
1139 kind: None,
1140 },
1141 lsp::FileSystemWatcher {
1142 glob_pattern: lsp::GlobPattern::String(
1143 path!("/the-root/target/y/**/*.rs").to_string(),
1144 ),
1145 kind: None,
1146 },
1147 lsp::FileSystemWatcher {
1148 glob_pattern: lsp::GlobPattern::String(
1149 path!("/the/stdlib/src/**/*.rs").to_string(),
1150 ),
1151 kind: None,
1152 },
1153 lsp::FileSystemWatcher {
1154 glob_pattern: lsp::GlobPattern::String(
1155 path!("**/Cargo.lock").to_string(),
1156 ),
1157 kind: None,
1158 },
1159 ],
1160 },
1161 )
1162 .ok(),
1163 }],
1164 })
1165 .await
1166 .into_response()
1167 .unwrap();
1168 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
1169 let file_changes = file_changes.clone();
1170 move |params, _| {
1171 let mut file_changes = file_changes.lock();
1172 file_changes.extend(params.changes);
1173 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
1174 }
1175 });
1176
1177 cx.executor().run_until_parked();
1178 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
1179 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 5);
1180
1181 let mut new_watched_paths = fs.watched_paths();
1182 new_watched_paths.retain(|path| !path.starts_with(config_dir()));
1183 assert_eq!(
1184 &new_watched_paths,
1185 &[
1186 Path::new(path!("/the-root")),
1187 Path::new(path!("/the-registry/dep1/src/dep1.rs")),
1188 Path::new(path!("/the/stdlib/src"))
1189 ]
1190 );
1191
1192 // Now the language server has asked us to watch an ignored directory path,
1193 // so we recursively load it.
1194 project.update(cx, |project, cx| {
1195 let worktree = project.visible_worktrees(cx).next().unwrap();
1196 assert_eq!(
1197 worktree
1198 .read(cx)
1199 .snapshot()
1200 .entries(true, 0)
1201 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
1202 .collect::<Vec<_>>(),
1203 &[
1204 (Path::new(""), false),
1205 (Path::new(".gitignore"), false),
1206 (Path::new("Cargo.lock"), false),
1207 (Path::new("src"), false),
1208 (Path::new("src/a.rs"), false),
1209 (Path::new("src/b.rs"), false),
1210 (Path::new("target"), true),
1211 (Path::new("target/x"), true),
1212 (Path::new("target/y"), true),
1213 (Path::new("target/y/out"), true),
1214 (Path::new("target/y/out/y.rs"), true),
1215 (Path::new("target/z"), true),
1216 ]
1217 );
1218 });
1219
1220 // Perform some file system mutations, two of which match the watched patterns,
1221 // and one of which does not.
1222 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
1223 .await
1224 .unwrap();
1225 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
1226 .await
1227 .unwrap();
1228 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
1229 .await
1230 .unwrap();
1231 fs.create_file(
1232 path!("/the-root/target/x/out/x2.rs").as_ref(),
1233 Default::default(),
1234 )
1235 .await
1236 .unwrap();
1237 fs.create_file(
1238 path!("/the-root/target/y/out/y2.rs").as_ref(),
1239 Default::default(),
1240 )
1241 .await
1242 .unwrap();
1243 fs.save(
1244 path!("/the-root/Cargo.lock").as_ref(),
1245 &"".into(),
1246 Default::default(),
1247 )
1248 .await
1249 .unwrap();
1250 fs.save(
1251 path!("/the-stdlib/LICENSE").as_ref(),
1252 &"".into(),
1253 Default::default(),
1254 )
1255 .await
1256 .unwrap();
1257 fs.save(
1258 path!("/the/stdlib/src/string.rs").as_ref(),
1259 &"".into(),
1260 Default::default(),
1261 )
1262 .await
1263 .unwrap();
1264
1265 // The language server receives events for the FS mutations that match its watch patterns.
1266 cx.executor().run_until_parked();
1267 assert_eq!(
1268 &*file_changes.lock(),
1269 &[
1270 lsp::FileEvent {
1271 uri: lsp::Url::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
1272 typ: lsp::FileChangeType::CHANGED,
1273 },
1274 lsp::FileEvent {
1275 uri: lsp::Url::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
1276 typ: lsp::FileChangeType::DELETED,
1277 },
1278 lsp::FileEvent {
1279 uri: lsp::Url::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
1280 typ: lsp::FileChangeType::CREATED,
1281 },
1282 lsp::FileEvent {
1283 uri: lsp::Url::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
1284 typ: lsp::FileChangeType::CREATED,
1285 },
1286 lsp::FileEvent {
1287 uri: lsp::Url::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
1288 typ: lsp::FileChangeType::CHANGED,
1289 },
1290 ]
1291 );
1292}
1293
1294#[gpui::test]
1295async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
1296 init_test(cx);
1297
1298 let fs = FakeFs::new(cx.executor());
1299 fs.insert_tree(
1300 path!("/dir"),
1301 json!({
1302 "a.rs": "let a = 1;",
1303 "b.rs": "let b = 2;"
1304 }),
1305 )
1306 .await;
1307
1308 let project = Project::test(
1309 fs,
1310 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
1311 cx,
1312 )
1313 .await;
1314 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1315
1316 let buffer_a = project
1317 .update(cx, |project, cx| {
1318 project.open_local_buffer(path!("/dir/a.rs"), cx)
1319 })
1320 .await
1321 .unwrap();
1322 let buffer_b = project
1323 .update(cx, |project, cx| {
1324 project.open_local_buffer(path!("/dir/b.rs"), cx)
1325 })
1326 .await
1327 .unwrap();
1328
1329 lsp_store.update(cx, |lsp_store, cx| {
1330 lsp_store
1331 .update_diagnostics(
1332 LanguageServerId(0),
1333 lsp::PublishDiagnosticsParams {
1334 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1335 version: None,
1336 diagnostics: vec![lsp::Diagnostic {
1337 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1338 severity: Some(lsp::DiagnosticSeverity::ERROR),
1339 message: "error 1".to_string(),
1340 ..Default::default()
1341 }],
1342 },
1343 None,
1344 DiagnosticSourceKind::Pushed,
1345 &[],
1346 cx,
1347 )
1348 .unwrap();
1349 lsp_store
1350 .update_diagnostics(
1351 LanguageServerId(0),
1352 lsp::PublishDiagnosticsParams {
1353 uri: Url::from_file_path(path!("/dir/b.rs")).unwrap(),
1354 version: None,
1355 diagnostics: vec![lsp::Diagnostic {
1356 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1357 severity: Some(DiagnosticSeverity::WARNING),
1358 message: "error 2".to_string(),
1359 ..Default::default()
1360 }],
1361 },
1362 None,
1363 DiagnosticSourceKind::Pushed,
1364 &[],
1365 cx,
1366 )
1367 .unwrap();
1368 });
1369
1370 buffer_a.update(cx, |buffer, _| {
1371 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1372 assert_eq!(
1373 chunks
1374 .iter()
1375 .map(|(s, d)| (s.as_str(), *d))
1376 .collect::<Vec<_>>(),
1377 &[
1378 ("let ", None),
1379 ("a", Some(DiagnosticSeverity::ERROR)),
1380 (" = 1;", None),
1381 ]
1382 );
1383 });
1384 buffer_b.update(cx, |buffer, _| {
1385 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1386 assert_eq!(
1387 chunks
1388 .iter()
1389 .map(|(s, d)| (s.as_str(), *d))
1390 .collect::<Vec<_>>(),
1391 &[
1392 ("let ", None),
1393 ("b", Some(DiagnosticSeverity::WARNING)),
1394 (" = 2;", None),
1395 ]
1396 );
1397 });
1398}
1399
1400#[gpui::test]
1401async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1402 init_test(cx);
1403
1404 let fs = FakeFs::new(cx.executor());
1405 fs.insert_tree(
1406 path!("/root"),
1407 json!({
1408 "dir": {
1409 ".git": {
1410 "HEAD": "ref: refs/heads/main",
1411 },
1412 ".gitignore": "b.rs",
1413 "a.rs": "let a = 1;",
1414 "b.rs": "let b = 2;",
1415 },
1416 "other.rs": "let b = c;"
1417 }),
1418 )
1419 .await;
1420
1421 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
1422 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1423 let (worktree, _) = project
1424 .update(cx, |project, cx| {
1425 project.find_or_create_worktree(path!("/root/dir"), true, cx)
1426 })
1427 .await
1428 .unwrap();
1429 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1430
1431 let (worktree, _) = project
1432 .update(cx, |project, cx| {
1433 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
1434 })
1435 .await
1436 .unwrap();
1437 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1438
1439 let server_id = LanguageServerId(0);
1440 lsp_store.update(cx, |lsp_store, cx| {
1441 lsp_store
1442 .update_diagnostics(
1443 server_id,
1444 lsp::PublishDiagnosticsParams {
1445 uri: Url::from_file_path(path!("/root/dir/b.rs")).unwrap(),
1446 version: None,
1447 diagnostics: vec![lsp::Diagnostic {
1448 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1449 severity: Some(lsp::DiagnosticSeverity::ERROR),
1450 message: "unused variable 'b'".to_string(),
1451 ..Default::default()
1452 }],
1453 },
1454 None,
1455 DiagnosticSourceKind::Pushed,
1456 &[],
1457 cx,
1458 )
1459 .unwrap();
1460 lsp_store
1461 .update_diagnostics(
1462 server_id,
1463 lsp::PublishDiagnosticsParams {
1464 uri: Url::from_file_path(path!("/root/other.rs")).unwrap(),
1465 version: None,
1466 diagnostics: vec![lsp::Diagnostic {
1467 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1468 severity: Some(lsp::DiagnosticSeverity::ERROR),
1469 message: "unknown variable 'c'".to_string(),
1470 ..Default::default()
1471 }],
1472 },
1473 None,
1474 DiagnosticSourceKind::Pushed,
1475 &[],
1476 cx,
1477 )
1478 .unwrap();
1479 });
1480
1481 let main_ignored_buffer = project
1482 .update(cx, |project, cx| {
1483 project.open_buffer((main_worktree_id, "b.rs"), cx)
1484 })
1485 .await
1486 .unwrap();
1487 main_ignored_buffer.update(cx, |buffer, _| {
1488 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1489 assert_eq!(
1490 chunks
1491 .iter()
1492 .map(|(s, d)| (s.as_str(), *d))
1493 .collect::<Vec<_>>(),
1494 &[
1495 ("let ", None),
1496 ("b", Some(DiagnosticSeverity::ERROR)),
1497 (" = 2;", None),
1498 ],
1499 "Gigitnored buffers should still get in-buffer diagnostics",
1500 );
1501 });
1502 let other_buffer = project
1503 .update(cx, |project, cx| {
1504 project.open_buffer((other_worktree_id, ""), cx)
1505 })
1506 .await
1507 .unwrap();
1508 other_buffer.update(cx, |buffer, _| {
1509 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1510 assert_eq!(
1511 chunks
1512 .iter()
1513 .map(|(s, d)| (s.as_str(), *d))
1514 .collect::<Vec<_>>(),
1515 &[
1516 ("let b = ", None),
1517 ("c", Some(DiagnosticSeverity::ERROR)),
1518 (";", None),
1519 ],
1520 "Buffers from hidden projects should still get in-buffer diagnostics"
1521 );
1522 });
1523
1524 project.update(cx, |project, cx| {
1525 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1526 assert_eq!(
1527 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1528 vec![(
1529 ProjectPath {
1530 worktree_id: main_worktree_id,
1531 path: Arc::from(Path::new("b.rs")),
1532 },
1533 server_id,
1534 DiagnosticSummary {
1535 error_count: 1,
1536 warning_count: 0,
1537 }
1538 )]
1539 );
1540 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1541 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1542 });
1543}
1544
1545#[gpui::test]
1546async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1547 init_test(cx);
1548
1549 let progress_token = "the-progress-token";
1550
1551 let fs = FakeFs::new(cx.executor());
1552 fs.insert_tree(
1553 path!("/dir"),
1554 json!({
1555 "a.rs": "fn a() { A }",
1556 "b.rs": "const y: i32 = 1",
1557 }),
1558 )
1559 .await;
1560
1561 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1562 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1563
1564 language_registry.add(rust_lang());
1565 let mut fake_servers = language_registry.register_fake_lsp(
1566 "Rust",
1567 FakeLspAdapter {
1568 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1569 disk_based_diagnostics_sources: vec!["disk".into()],
1570 ..Default::default()
1571 },
1572 );
1573
1574 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1575
1576 // Cause worktree to start the fake language server
1577 let _ = project
1578 .update(cx, |project, cx| {
1579 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
1580 })
1581 .await
1582 .unwrap();
1583
1584 let mut events = cx.events(&project);
1585
1586 let fake_server = fake_servers.next().await.unwrap();
1587 assert_eq!(
1588 events.next().await.unwrap(),
1589 Event::LanguageServerAdded(
1590 LanguageServerId(0),
1591 fake_server.server.name(),
1592 Some(worktree_id)
1593 ),
1594 );
1595
1596 fake_server
1597 .start_progress(format!("{}/0", progress_token))
1598 .await;
1599 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1600 assert_eq!(
1601 events.next().await.unwrap(),
1602 Event::DiskBasedDiagnosticsStarted {
1603 language_server_id: LanguageServerId(0),
1604 }
1605 );
1606
1607 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1608 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1609 version: None,
1610 diagnostics: vec![lsp::Diagnostic {
1611 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1612 severity: Some(lsp::DiagnosticSeverity::ERROR),
1613 message: "undefined variable 'A'".to_string(),
1614 ..Default::default()
1615 }],
1616 });
1617 assert_eq!(
1618 events.next().await.unwrap(),
1619 Event::DiagnosticsUpdated {
1620 language_server_id: LanguageServerId(0),
1621 path: (worktree_id, Path::new("a.rs")).into()
1622 }
1623 );
1624
1625 fake_server.end_progress(format!("{}/0", progress_token));
1626 assert_eq!(
1627 events.next().await.unwrap(),
1628 Event::DiskBasedDiagnosticsFinished {
1629 language_server_id: LanguageServerId(0)
1630 }
1631 );
1632
1633 let buffer = project
1634 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
1635 .await
1636 .unwrap();
1637
1638 buffer.update(cx, |buffer, _| {
1639 let snapshot = buffer.snapshot();
1640 let diagnostics = snapshot
1641 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1642 .collect::<Vec<_>>();
1643 assert_eq!(
1644 diagnostics,
1645 &[DiagnosticEntry {
1646 range: Point::new(0, 9)..Point::new(0, 10),
1647 diagnostic: Diagnostic {
1648 severity: lsp::DiagnosticSeverity::ERROR,
1649 message: "undefined variable 'A'".to_string(),
1650 group_id: 0,
1651 is_primary: true,
1652 source_kind: DiagnosticSourceKind::Pushed,
1653 ..Diagnostic::default()
1654 }
1655 }]
1656 )
1657 });
1658
1659 // Ensure publishing empty diagnostics twice only results in one update event.
1660 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1661 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1662 version: None,
1663 diagnostics: Default::default(),
1664 });
1665 assert_eq!(
1666 events.next().await.unwrap(),
1667 Event::DiagnosticsUpdated {
1668 language_server_id: LanguageServerId(0),
1669 path: (worktree_id, Path::new("a.rs")).into()
1670 }
1671 );
1672
1673 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1674 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1675 version: None,
1676 diagnostics: Default::default(),
1677 });
1678 cx.executor().run_until_parked();
1679 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1680}
1681
1682#[gpui::test]
1683async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1684 init_test(cx);
1685
1686 let progress_token = "the-progress-token";
1687
1688 let fs = FakeFs::new(cx.executor());
1689 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1690
1691 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1692
1693 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1694 language_registry.add(rust_lang());
1695 let mut fake_servers = language_registry.register_fake_lsp(
1696 "Rust",
1697 FakeLspAdapter {
1698 name: "the-language-server",
1699 disk_based_diagnostics_sources: vec!["disk".into()],
1700 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1701 ..Default::default()
1702 },
1703 );
1704
1705 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1706
1707 let (buffer, _handle) = project
1708 .update(cx, |project, cx| {
1709 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1710 })
1711 .await
1712 .unwrap();
1713 // Simulate diagnostics starting to update.
1714 let fake_server = fake_servers.next().await.unwrap();
1715 fake_server.start_progress(progress_token).await;
1716
1717 // Restart the server before the diagnostics finish updating.
1718 project.update(cx, |project, cx| {
1719 project.restart_language_servers_for_buffers(vec![buffer], cx);
1720 });
1721 let mut events = cx.events(&project);
1722
1723 // Simulate the newly started server sending more diagnostics.
1724 let fake_server = fake_servers.next().await.unwrap();
1725 assert_eq!(
1726 events.next().await.unwrap(),
1727 Event::LanguageServerAdded(
1728 LanguageServerId(1),
1729 fake_server.server.name(),
1730 Some(worktree_id)
1731 )
1732 );
1733 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1734 fake_server.start_progress(progress_token).await;
1735 assert_eq!(
1736 events.next().await.unwrap(),
1737 Event::DiskBasedDiagnosticsStarted {
1738 language_server_id: LanguageServerId(1)
1739 }
1740 );
1741 project.update(cx, |project, cx| {
1742 assert_eq!(
1743 project
1744 .language_servers_running_disk_based_diagnostics(cx)
1745 .collect::<Vec<_>>(),
1746 [LanguageServerId(1)]
1747 );
1748 });
1749
1750 // All diagnostics are considered done, despite the old server's diagnostic
1751 // task never completing.
1752 fake_server.end_progress(progress_token);
1753 assert_eq!(
1754 events.next().await.unwrap(),
1755 Event::DiskBasedDiagnosticsFinished {
1756 language_server_id: LanguageServerId(1)
1757 }
1758 );
1759 project.update(cx, |project, cx| {
1760 assert_eq!(
1761 project
1762 .language_servers_running_disk_based_diagnostics(cx)
1763 .collect::<Vec<_>>(),
1764 [] as [language::LanguageServerId; 0]
1765 );
1766 });
1767}
1768
1769#[gpui::test]
1770async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1771 init_test(cx);
1772
1773 let fs = FakeFs::new(cx.executor());
1774 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
1775
1776 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1777
1778 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1779 language_registry.add(rust_lang());
1780 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1781
1782 let (buffer, _) = project
1783 .update(cx, |project, cx| {
1784 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1785 })
1786 .await
1787 .unwrap();
1788
1789 // Publish diagnostics
1790 let fake_server = fake_servers.next().await.unwrap();
1791 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1792 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1793 version: None,
1794 diagnostics: vec![lsp::Diagnostic {
1795 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1796 severity: Some(lsp::DiagnosticSeverity::ERROR),
1797 message: "the message".to_string(),
1798 ..Default::default()
1799 }],
1800 });
1801
1802 cx.executor().run_until_parked();
1803 buffer.update(cx, |buffer, _| {
1804 assert_eq!(
1805 buffer
1806 .snapshot()
1807 .diagnostics_in_range::<_, usize>(0..1, false)
1808 .map(|entry| entry.diagnostic.message.clone())
1809 .collect::<Vec<_>>(),
1810 ["the message".to_string()]
1811 );
1812 });
1813 project.update(cx, |project, cx| {
1814 assert_eq!(
1815 project.diagnostic_summary(false, cx),
1816 DiagnosticSummary {
1817 error_count: 1,
1818 warning_count: 0,
1819 }
1820 );
1821 });
1822
1823 project.update(cx, |project, cx| {
1824 project.restart_language_servers_for_buffers(vec![buffer.clone()], cx);
1825 });
1826
1827 // The diagnostics are cleared.
1828 cx.executor().run_until_parked();
1829 buffer.update(cx, |buffer, _| {
1830 assert_eq!(
1831 buffer
1832 .snapshot()
1833 .diagnostics_in_range::<_, usize>(0..1, false)
1834 .map(|entry| entry.diagnostic.message.clone())
1835 .collect::<Vec<_>>(),
1836 Vec::<String>::new(),
1837 );
1838 });
1839 project.update(cx, |project, cx| {
1840 assert_eq!(
1841 project.diagnostic_summary(false, cx),
1842 DiagnosticSummary {
1843 error_count: 0,
1844 warning_count: 0,
1845 }
1846 );
1847 });
1848}
1849
1850#[gpui::test]
1851async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1852 init_test(cx);
1853
1854 let fs = FakeFs::new(cx.executor());
1855 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1856
1857 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1858 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1859
1860 language_registry.add(rust_lang());
1861 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1862
1863 let (buffer, _handle) = project
1864 .update(cx, |project, cx| {
1865 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1866 })
1867 .await
1868 .unwrap();
1869
1870 // Before restarting the server, report diagnostics with an unknown buffer version.
1871 let fake_server = fake_servers.next().await.unwrap();
1872 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1873 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1874 version: Some(10000),
1875 diagnostics: Vec::new(),
1876 });
1877 cx.executor().run_until_parked();
1878 project.update(cx, |project, cx| {
1879 project.restart_language_servers_for_buffers(vec![buffer.clone()], cx);
1880 });
1881
1882 let mut fake_server = fake_servers.next().await.unwrap();
1883 let notification = fake_server
1884 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1885 .await
1886 .text_document;
1887 assert_eq!(notification.version, 0);
1888}
1889
1890#[gpui::test]
1891async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
1892 init_test(cx);
1893
1894 let progress_token = "the-progress-token";
1895
1896 let fs = FakeFs::new(cx.executor());
1897 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1898
1899 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1900
1901 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1902 language_registry.add(rust_lang());
1903 let mut fake_servers = language_registry.register_fake_lsp(
1904 "Rust",
1905 FakeLspAdapter {
1906 name: "the-language-server",
1907 disk_based_diagnostics_sources: vec!["disk".into()],
1908 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1909 ..Default::default()
1910 },
1911 );
1912
1913 let (buffer, _handle) = project
1914 .update(cx, |project, cx| {
1915 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1916 })
1917 .await
1918 .unwrap();
1919
1920 // Simulate diagnostics starting to update.
1921 let mut fake_server = fake_servers.next().await.unwrap();
1922 fake_server
1923 .start_progress_with(
1924 "another-token",
1925 lsp::WorkDoneProgressBegin {
1926 cancellable: Some(false),
1927 ..Default::default()
1928 },
1929 )
1930 .await;
1931 fake_server
1932 .start_progress_with(
1933 progress_token,
1934 lsp::WorkDoneProgressBegin {
1935 cancellable: Some(true),
1936 ..Default::default()
1937 },
1938 )
1939 .await;
1940 cx.executor().run_until_parked();
1941
1942 project.update(cx, |project, cx| {
1943 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
1944 });
1945
1946 let cancel_notification = fake_server
1947 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
1948 .await;
1949 assert_eq!(
1950 cancel_notification.token,
1951 NumberOrString::String(progress_token.into())
1952 );
1953}
1954
1955#[gpui::test]
1956async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1957 init_test(cx);
1958
1959 let fs = FakeFs::new(cx.executor());
1960 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
1961 .await;
1962
1963 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1964 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1965
1966 let mut fake_rust_servers = language_registry.register_fake_lsp(
1967 "Rust",
1968 FakeLspAdapter {
1969 name: "rust-lsp",
1970 ..Default::default()
1971 },
1972 );
1973 let mut fake_js_servers = language_registry.register_fake_lsp(
1974 "JavaScript",
1975 FakeLspAdapter {
1976 name: "js-lsp",
1977 ..Default::default()
1978 },
1979 );
1980 language_registry.add(rust_lang());
1981 language_registry.add(js_lang());
1982
1983 let _rs_buffer = project
1984 .update(cx, |project, cx| {
1985 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1986 })
1987 .await
1988 .unwrap();
1989 let _js_buffer = project
1990 .update(cx, |project, cx| {
1991 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
1992 })
1993 .await
1994 .unwrap();
1995
1996 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1997 assert_eq!(
1998 fake_rust_server_1
1999 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2000 .await
2001 .text_document
2002 .uri
2003 .as_str(),
2004 uri!("file:///dir/a.rs")
2005 );
2006
2007 let mut fake_js_server = fake_js_servers.next().await.unwrap();
2008 assert_eq!(
2009 fake_js_server
2010 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2011 .await
2012 .text_document
2013 .uri
2014 .as_str(),
2015 uri!("file:///dir/b.js")
2016 );
2017
2018 // Disable Rust language server, ensuring only that server gets stopped.
2019 cx.update(|cx| {
2020 SettingsStore::update_global(cx, |settings, cx| {
2021 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
2022 settings.languages.insert(
2023 "Rust".into(),
2024 LanguageSettingsContent {
2025 enable_language_server: Some(false),
2026 ..Default::default()
2027 },
2028 );
2029 });
2030 })
2031 });
2032 fake_rust_server_1
2033 .receive_notification::<lsp::notification::Exit>()
2034 .await;
2035
2036 // Enable Rust and disable JavaScript language servers, ensuring that the
2037 // former gets started again and that the latter stops.
2038 cx.update(|cx| {
2039 SettingsStore::update_global(cx, |settings, cx| {
2040 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
2041 settings.languages.insert(
2042 LanguageName::new("Rust"),
2043 LanguageSettingsContent {
2044 enable_language_server: Some(true),
2045 ..Default::default()
2046 },
2047 );
2048 settings.languages.insert(
2049 LanguageName::new("JavaScript"),
2050 LanguageSettingsContent {
2051 enable_language_server: Some(false),
2052 ..Default::default()
2053 },
2054 );
2055 });
2056 })
2057 });
2058 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
2059 assert_eq!(
2060 fake_rust_server_2
2061 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2062 .await
2063 .text_document
2064 .uri
2065 .as_str(),
2066 uri!("file:///dir/a.rs")
2067 );
2068 fake_js_server
2069 .receive_notification::<lsp::notification::Exit>()
2070 .await;
2071}
2072
2073#[gpui::test(iterations = 3)]
2074async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
2075 init_test(cx);
2076
2077 let text = "
2078 fn a() { A }
2079 fn b() { BB }
2080 fn c() { CCC }
2081 "
2082 .unindent();
2083
2084 let fs = FakeFs::new(cx.executor());
2085 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
2086
2087 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2088 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2089
2090 language_registry.add(rust_lang());
2091 let mut fake_servers = language_registry.register_fake_lsp(
2092 "Rust",
2093 FakeLspAdapter {
2094 disk_based_diagnostics_sources: vec!["disk".into()],
2095 ..Default::default()
2096 },
2097 );
2098
2099 let buffer = project
2100 .update(cx, |project, cx| {
2101 project.open_local_buffer(path!("/dir/a.rs"), cx)
2102 })
2103 .await
2104 .unwrap();
2105
2106 let _handle = project.update(cx, |project, cx| {
2107 project.register_buffer_with_language_servers(&buffer, cx)
2108 });
2109
2110 let mut fake_server = fake_servers.next().await.unwrap();
2111 let open_notification = fake_server
2112 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2113 .await;
2114
2115 // Edit the buffer, moving the content down
2116 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
2117 let change_notification_1 = fake_server
2118 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2119 .await;
2120 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
2121
2122 // Report some diagnostics for the initial version of the buffer
2123 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2124 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2125 version: Some(open_notification.text_document.version),
2126 diagnostics: vec![
2127 lsp::Diagnostic {
2128 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2129 severity: Some(DiagnosticSeverity::ERROR),
2130 message: "undefined variable 'A'".to_string(),
2131 source: Some("disk".to_string()),
2132 ..Default::default()
2133 },
2134 lsp::Diagnostic {
2135 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2136 severity: Some(DiagnosticSeverity::ERROR),
2137 message: "undefined variable 'BB'".to_string(),
2138 source: Some("disk".to_string()),
2139 ..Default::default()
2140 },
2141 lsp::Diagnostic {
2142 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
2143 severity: Some(DiagnosticSeverity::ERROR),
2144 source: Some("disk".to_string()),
2145 message: "undefined variable 'CCC'".to_string(),
2146 ..Default::default()
2147 },
2148 ],
2149 });
2150
2151 // The diagnostics have moved down since they were created.
2152 cx.executor().run_until_parked();
2153 buffer.update(cx, |buffer, _| {
2154 assert_eq!(
2155 buffer
2156 .snapshot()
2157 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
2158 .collect::<Vec<_>>(),
2159 &[
2160 DiagnosticEntry {
2161 range: Point::new(3, 9)..Point::new(3, 11),
2162 diagnostic: Diagnostic {
2163 source: Some("disk".into()),
2164 severity: DiagnosticSeverity::ERROR,
2165 message: "undefined variable 'BB'".to_string(),
2166 is_disk_based: true,
2167 group_id: 1,
2168 is_primary: true,
2169 source_kind: DiagnosticSourceKind::Pushed,
2170 ..Diagnostic::default()
2171 },
2172 },
2173 DiagnosticEntry {
2174 range: Point::new(4, 9)..Point::new(4, 12),
2175 diagnostic: Diagnostic {
2176 source: Some("disk".into()),
2177 severity: DiagnosticSeverity::ERROR,
2178 message: "undefined variable 'CCC'".to_string(),
2179 is_disk_based: true,
2180 group_id: 2,
2181 is_primary: true,
2182 source_kind: DiagnosticSourceKind::Pushed,
2183 ..Diagnostic::default()
2184 }
2185 }
2186 ]
2187 );
2188 assert_eq!(
2189 chunks_with_diagnostics(buffer, 0..buffer.len()),
2190 [
2191 ("\n\nfn a() { ".to_string(), None),
2192 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2193 (" }\nfn b() { ".to_string(), None),
2194 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
2195 (" }\nfn c() { ".to_string(), None),
2196 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
2197 (" }\n".to_string(), None),
2198 ]
2199 );
2200 assert_eq!(
2201 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
2202 [
2203 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
2204 (" }\nfn c() { ".to_string(), None),
2205 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
2206 ]
2207 );
2208 });
2209
2210 // Ensure overlapping diagnostics are highlighted correctly.
2211 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2212 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2213 version: Some(open_notification.text_document.version),
2214 diagnostics: vec![
2215 lsp::Diagnostic {
2216 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2217 severity: Some(DiagnosticSeverity::ERROR),
2218 message: "undefined variable 'A'".to_string(),
2219 source: Some("disk".to_string()),
2220 ..Default::default()
2221 },
2222 lsp::Diagnostic {
2223 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
2224 severity: Some(DiagnosticSeverity::WARNING),
2225 message: "unreachable statement".to_string(),
2226 source: Some("disk".to_string()),
2227 ..Default::default()
2228 },
2229 ],
2230 });
2231
2232 cx.executor().run_until_parked();
2233 buffer.update(cx, |buffer, _| {
2234 assert_eq!(
2235 buffer
2236 .snapshot()
2237 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
2238 .collect::<Vec<_>>(),
2239 &[
2240 DiagnosticEntry {
2241 range: Point::new(2, 9)..Point::new(2, 12),
2242 diagnostic: Diagnostic {
2243 source: Some("disk".into()),
2244 severity: DiagnosticSeverity::WARNING,
2245 message: "unreachable statement".to_string(),
2246 is_disk_based: true,
2247 group_id: 4,
2248 is_primary: true,
2249 source_kind: DiagnosticSourceKind::Pushed,
2250 ..Diagnostic::default()
2251 }
2252 },
2253 DiagnosticEntry {
2254 range: Point::new(2, 9)..Point::new(2, 10),
2255 diagnostic: Diagnostic {
2256 source: Some("disk".into()),
2257 severity: DiagnosticSeverity::ERROR,
2258 message: "undefined variable 'A'".to_string(),
2259 is_disk_based: true,
2260 group_id: 3,
2261 is_primary: true,
2262 source_kind: DiagnosticSourceKind::Pushed,
2263 ..Diagnostic::default()
2264 },
2265 }
2266 ]
2267 );
2268 assert_eq!(
2269 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
2270 [
2271 ("fn a() { ".to_string(), None),
2272 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2273 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2274 ("\n".to_string(), None),
2275 ]
2276 );
2277 assert_eq!(
2278 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
2279 [
2280 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2281 ("\n".to_string(), None),
2282 ]
2283 );
2284 });
2285
2286 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
2287 // changes since the last save.
2288 buffer.update(cx, |buffer, cx| {
2289 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
2290 buffer.edit(
2291 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
2292 None,
2293 cx,
2294 );
2295 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
2296 });
2297 let change_notification_2 = fake_server
2298 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2299 .await;
2300 assert!(
2301 change_notification_2.text_document.version > change_notification_1.text_document.version
2302 );
2303
2304 // Handle out-of-order diagnostics
2305 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2306 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2307 version: Some(change_notification_2.text_document.version),
2308 diagnostics: vec![
2309 lsp::Diagnostic {
2310 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2311 severity: Some(DiagnosticSeverity::ERROR),
2312 message: "undefined variable 'BB'".to_string(),
2313 source: Some("disk".to_string()),
2314 ..Default::default()
2315 },
2316 lsp::Diagnostic {
2317 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2318 severity: Some(DiagnosticSeverity::WARNING),
2319 message: "undefined variable 'A'".to_string(),
2320 source: Some("disk".to_string()),
2321 ..Default::default()
2322 },
2323 ],
2324 });
2325
2326 cx.executor().run_until_parked();
2327 buffer.update(cx, |buffer, _| {
2328 assert_eq!(
2329 buffer
2330 .snapshot()
2331 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2332 .collect::<Vec<_>>(),
2333 &[
2334 DiagnosticEntry {
2335 range: Point::new(2, 21)..Point::new(2, 22),
2336 diagnostic: Diagnostic {
2337 source: Some("disk".into()),
2338 severity: DiagnosticSeverity::WARNING,
2339 message: "undefined variable 'A'".to_string(),
2340 is_disk_based: true,
2341 group_id: 6,
2342 is_primary: true,
2343 source_kind: DiagnosticSourceKind::Pushed,
2344 ..Diagnostic::default()
2345 }
2346 },
2347 DiagnosticEntry {
2348 range: Point::new(3, 9)..Point::new(3, 14),
2349 diagnostic: Diagnostic {
2350 source: Some("disk".into()),
2351 severity: DiagnosticSeverity::ERROR,
2352 message: "undefined variable 'BB'".to_string(),
2353 is_disk_based: true,
2354 group_id: 5,
2355 is_primary: true,
2356 source_kind: DiagnosticSourceKind::Pushed,
2357 ..Diagnostic::default()
2358 },
2359 }
2360 ]
2361 );
2362 });
2363}
2364
2365#[gpui::test]
2366async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
2367 init_test(cx);
2368
2369 let text = concat!(
2370 "let one = ;\n", //
2371 "let two = \n",
2372 "let three = 3;\n",
2373 );
2374
2375 let fs = FakeFs::new(cx.executor());
2376 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
2377
2378 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2379 let buffer = project
2380 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2381 .await
2382 .unwrap();
2383
2384 project.update(cx, |project, cx| {
2385 project.lsp_store.update(cx, |lsp_store, cx| {
2386 lsp_store
2387 .update_diagnostic_entries(
2388 LanguageServerId(0),
2389 PathBuf::from("/dir/a.rs"),
2390 None,
2391 None,
2392 vec![
2393 DiagnosticEntry {
2394 range: Unclipped(PointUtf16::new(0, 10))
2395 ..Unclipped(PointUtf16::new(0, 10)),
2396 diagnostic: Diagnostic {
2397 severity: DiagnosticSeverity::ERROR,
2398 message: "syntax error 1".to_string(),
2399 source_kind: DiagnosticSourceKind::Pushed,
2400 ..Diagnostic::default()
2401 },
2402 },
2403 DiagnosticEntry {
2404 range: Unclipped(PointUtf16::new(1, 10))
2405 ..Unclipped(PointUtf16::new(1, 10)),
2406 diagnostic: Diagnostic {
2407 severity: DiagnosticSeverity::ERROR,
2408 message: "syntax error 2".to_string(),
2409 source_kind: DiagnosticSourceKind::Pushed,
2410 ..Diagnostic::default()
2411 },
2412 },
2413 ],
2414 cx,
2415 )
2416 .unwrap();
2417 })
2418 });
2419
2420 // An empty range is extended forward to include the following character.
2421 // At the end of a line, an empty range is extended backward to include
2422 // the preceding character.
2423 buffer.update(cx, |buffer, _| {
2424 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2425 assert_eq!(
2426 chunks
2427 .iter()
2428 .map(|(s, d)| (s.as_str(), *d))
2429 .collect::<Vec<_>>(),
2430 &[
2431 ("let one = ", None),
2432 (";", Some(DiagnosticSeverity::ERROR)),
2433 ("\nlet two =", None),
2434 (" ", Some(DiagnosticSeverity::ERROR)),
2435 ("\nlet three = 3;\n", None)
2436 ]
2437 );
2438 });
2439}
2440
2441#[gpui::test]
2442async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2443 init_test(cx);
2444
2445 let fs = FakeFs::new(cx.executor());
2446 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2447 .await;
2448
2449 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2450 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
2451
2452 lsp_store.update(cx, |lsp_store, cx| {
2453 lsp_store
2454 .update_diagnostic_entries(
2455 LanguageServerId(0),
2456 Path::new("/dir/a.rs").to_owned(),
2457 None,
2458 None,
2459 vec![DiagnosticEntry {
2460 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2461 diagnostic: Diagnostic {
2462 severity: DiagnosticSeverity::ERROR,
2463 is_primary: true,
2464 message: "syntax error a1".to_string(),
2465 source_kind: DiagnosticSourceKind::Pushed,
2466 ..Diagnostic::default()
2467 },
2468 }],
2469 cx,
2470 )
2471 .unwrap();
2472 lsp_store
2473 .update_diagnostic_entries(
2474 LanguageServerId(1),
2475 Path::new("/dir/a.rs").to_owned(),
2476 None,
2477 None,
2478 vec![DiagnosticEntry {
2479 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2480 diagnostic: Diagnostic {
2481 severity: DiagnosticSeverity::ERROR,
2482 is_primary: true,
2483 message: "syntax error b1".to_string(),
2484 source_kind: DiagnosticSourceKind::Pushed,
2485 ..Diagnostic::default()
2486 },
2487 }],
2488 cx,
2489 )
2490 .unwrap();
2491
2492 assert_eq!(
2493 lsp_store.diagnostic_summary(false, cx),
2494 DiagnosticSummary {
2495 error_count: 2,
2496 warning_count: 0,
2497 }
2498 );
2499 });
2500}
2501
2502#[gpui::test]
2503async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2504 init_test(cx);
2505
2506 let text = "
2507 fn a() {
2508 f1();
2509 }
2510 fn b() {
2511 f2();
2512 }
2513 fn c() {
2514 f3();
2515 }
2516 "
2517 .unindent();
2518
2519 let fs = FakeFs::new(cx.executor());
2520 fs.insert_tree(
2521 path!("/dir"),
2522 json!({
2523 "a.rs": text.clone(),
2524 }),
2525 )
2526 .await;
2527
2528 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2529 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2530
2531 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2532 language_registry.add(rust_lang());
2533 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2534
2535 let (buffer, _handle) = project
2536 .update(cx, |project, cx| {
2537 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2538 })
2539 .await
2540 .unwrap();
2541
2542 let mut fake_server = fake_servers.next().await.unwrap();
2543 let lsp_document_version = fake_server
2544 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2545 .await
2546 .text_document
2547 .version;
2548
2549 // Simulate editing the buffer after the language server computes some edits.
2550 buffer.update(cx, |buffer, cx| {
2551 buffer.edit(
2552 [(
2553 Point::new(0, 0)..Point::new(0, 0),
2554 "// above first function\n",
2555 )],
2556 None,
2557 cx,
2558 );
2559 buffer.edit(
2560 [(
2561 Point::new(2, 0)..Point::new(2, 0),
2562 " // inside first function\n",
2563 )],
2564 None,
2565 cx,
2566 );
2567 buffer.edit(
2568 [(
2569 Point::new(6, 4)..Point::new(6, 4),
2570 "// inside second function ",
2571 )],
2572 None,
2573 cx,
2574 );
2575
2576 assert_eq!(
2577 buffer.text(),
2578 "
2579 // above first function
2580 fn a() {
2581 // inside first function
2582 f1();
2583 }
2584 fn b() {
2585 // inside second function f2();
2586 }
2587 fn c() {
2588 f3();
2589 }
2590 "
2591 .unindent()
2592 );
2593 });
2594
2595 let edits = lsp_store
2596 .update(cx, |lsp_store, cx| {
2597 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2598 &buffer,
2599 vec![
2600 // replace body of first function
2601 lsp::TextEdit {
2602 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2603 new_text: "
2604 fn a() {
2605 f10();
2606 }
2607 "
2608 .unindent(),
2609 },
2610 // edit inside second function
2611 lsp::TextEdit {
2612 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2613 new_text: "00".into(),
2614 },
2615 // edit inside third function via two distinct edits
2616 lsp::TextEdit {
2617 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2618 new_text: "4000".into(),
2619 },
2620 lsp::TextEdit {
2621 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2622 new_text: "".into(),
2623 },
2624 ],
2625 LanguageServerId(0),
2626 Some(lsp_document_version),
2627 cx,
2628 )
2629 })
2630 .await
2631 .unwrap();
2632
2633 buffer.update(cx, |buffer, cx| {
2634 for (range, new_text) in edits {
2635 buffer.edit([(range, new_text)], None, cx);
2636 }
2637 assert_eq!(
2638 buffer.text(),
2639 "
2640 // above first function
2641 fn a() {
2642 // inside first function
2643 f10();
2644 }
2645 fn b() {
2646 // inside second function f200();
2647 }
2648 fn c() {
2649 f4000();
2650 }
2651 "
2652 .unindent()
2653 );
2654 });
2655}
2656
2657#[gpui::test]
2658async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2659 init_test(cx);
2660
2661 let text = "
2662 use a::b;
2663 use a::c;
2664
2665 fn f() {
2666 b();
2667 c();
2668 }
2669 "
2670 .unindent();
2671
2672 let fs = FakeFs::new(cx.executor());
2673 fs.insert_tree(
2674 path!("/dir"),
2675 json!({
2676 "a.rs": text.clone(),
2677 }),
2678 )
2679 .await;
2680
2681 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2682 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2683 let buffer = project
2684 .update(cx, |project, cx| {
2685 project.open_local_buffer(path!("/dir/a.rs"), cx)
2686 })
2687 .await
2688 .unwrap();
2689
2690 // Simulate the language server sending us a small edit in the form of a very large diff.
2691 // Rust-analyzer does this when performing a merge-imports code action.
2692 let edits = lsp_store
2693 .update(cx, |lsp_store, cx| {
2694 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2695 &buffer,
2696 [
2697 // Replace the first use statement without editing the semicolon.
2698 lsp::TextEdit {
2699 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2700 new_text: "a::{b, c}".into(),
2701 },
2702 // Reinsert the remainder of the file between the semicolon and the final
2703 // newline of the file.
2704 lsp::TextEdit {
2705 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2706 new_text: "\n\n".into(),
2707 },
2708 lsp::TextEdit {
2709 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2710 new_text: "
2711 fn f() {
2712 b();
2713 c();
2714 }"
2715 .unindent(),
2716 },
2717 // Delete everything after the first newline of the file.
2718 lsp::TextEdit {
2719 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2720 new_text: "".into(),
2721 },
2722 ],
2723 LanguageServerId(0),
2724 None,
2725 cx,
2726 )
2727 })
2728 .await
2729 .unwrap();
2730
2731 buffer.update(cx, |buffer, cx| {
2732 let edits = edits
2733 .into_iter()
2734 .map(|(range, text)| {
2735 (
2736 range.start.to_point(buffer)..range.end.to_point(buffer),
2737 text,
2738 )
2739 })
2740 .collect::<Vec<_>>();
2741
2742 assert_eq!(
2743 edits,
2744 [
2745 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2746 (Point::new(1, 0)..Point::new(2, 0), "".into())
2747 ]
2748 );
2749
2750 for (range, new_text) in edits {
2751 buffer.edit([(range, new_text)], None, cx);
2752 }
2753 assert_eq!(
2754 buffer.text(),
2755 "
2756 use a::{b, c};
2757
2758 fn f() {
2759 b();
2760 c();
2761 }
2762 "
2763 .unindent()
2764 );
2765 });
2766}
2767
2768#[gpui::test]
2769async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
2770 cx: &mut gpui::TestAppContext,
2771) {
2772 init_test(cx);
2773
2774 let text = "Path()";
2775
2776 let fs = FakeFs::new(cx.executor());
2777 fs.insert_tree(
2778 path!("/dir"),
2779 json!({
2780 "a.rs": text
2781 }),
2782 )
2783 .await;
2784
2785 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2786 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2787 let buffer = project
2788 .update(cx, |project, cx| {
2789 project.open_local_buffer(path!("/dir/a.rs"), cx)
2790 })
2791 .await
2792 .unwrap();
2793
2794 // Simulate the language server sending us a pair of edits at the same location,
2795 // with an insertion following a replacement (which violates the LSP spec).
2796 let edits = lsp_store
2797 .update(cx, |lsp_store, cx| {
2798 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2799 &buffer,
2800 [
2801 lsp::TextEdit {
2802 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
2803 new_text: "Path".into(),
2804 },
2805 lsp::TextEdit {
2806 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
2807 new_text: "from path import Path\n\n\n".into(),
2808 },
2809 ],
2810 LanguageServerId(0),
2811 None,
2812 cx,
2813 )
2814 })
2815 .await
2816 .unwrap();
2817
2818 buffer.update(cx, |buffer, cx| {
2819 buffer.edit(edits, None, cx);
2820 assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
2821 });
2822}
2823
2824#[gpui::test]
2825async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2826 init_test(cx);
2827
2828 let text = "
2829 use a::b;
2830 use a::c;
2831
2832 fn f() {
2833 b();
2834 c();
2835 }
2836 "
2837 .unindent();
2838
2839 let fs = FakeFs::new(cx.executor());
2840 fs.insert_tree(
2841 path!("/dir"),
2842 json!({
2843 "a.rs": text.clone(),
2844 }),
2845 )
2846 .await;
2847
2848 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2849 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2850 let buffer = project
2851 .update(cx, |project, cx| {
2852 project.open_local_buffer(path!("/dir/a.rs"), cx)
2853 })
2854 .await
2855 .unwrap();
2856
2857 // Simulate the language server sending us edits in a non-ordered fashion,
2858 // with ranges sometimes being inverted or pointing to invalid locations.
2859 let edits = lsp_store
2860 .update(cx, |lsp_store, cx| {
2861 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2862 &buffer,
2863 [
2864 lsp::TextEdit {
2865 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2866 new_text: "\n\n".into(),
2867 },
2868 lsp::TextEdit {
2869 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2870 new_text: "a::{b, c}".into(),
2871 },
2872 lsp::TextEdit {
2873 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2874 new_text: "".into(),
2875 },
2876 lsp::TextEdit {
2877 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2878 new_text: "
2879 fn f() {
2880 b();
2881 c();
2882 }"
2883 .unindent(),
2884 },
2885 ],
2886 LanguageServerId(0),
2887 None,
2888 cx,
2889 )
2890 })
2891 .await
2892 .unwrap();
2893
2894 buffer.update(cx, |buffer, cx| {
2895 let edits = edits
2896 .into_iter()
2897 .map(|(range, text)| {
2898 (
2899 range.start.to_point(buffer)..range.end.to_point(buffer),
2900 text,
2901 )
2902 })
2903 .collect::<Vec<_>>();
2904
2905 assert_eq!(
2906 edits,
2907 [
2908 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2909 (Point::new(1, 0)..Point::new(2, 0), "".into())
2910 ]
2911 );
2912
2913 for (range, new_text) in edits {
2914 buffer.edit([(range, new_text)], None, cx);
2915 }
2916 assert_eq!(
2917 buffer.text(),
2918 "
2919 use a::{b, c};
2920
2921 fn f() {
2922 b();
2923 c();
2924 }
2925 "
2926 .unindent()
2927 );
2928 });
2929}
2930
2931fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2932 buffer: &Buffer,
2933 range: Range<T>,
2934) -> Vec<(String, Option<DiagnosticSeverity>)> {
2935 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2936 for chunk in buffer.snapshot().chunks(range, true) {
2937 if chunks.last().map_or(false, |prev_chunk| {
2938 prev_chunk.1 == chunk.diagnostic_severity
2939 }) {
2940 chunks.last_mut().unwrap().0.push_str(chunk.text);
2941 } else {
2942 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2943 }
2944 }
2945 chunks
2946}
2947
2948#[gpui::test(iterations = 10)]
2949async fn test_definition(cx: &mut gpui::TestAppContext) {
2950 init_test(cx);
2951
2952 let fs = FakeFs::new(cx.executor());
2953 fs.insert_tree(
2954 path!("/dir"),
2955 json!({
2956 "a.rs": "const fn a() { A }",
2957 "b.rs": "const y: i32 = crate::a()",
2958 }),
2959 )
2960 .await;
2961
2962 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
2963
2964 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2965 language_registry.add(rust_lang());
2966 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2967
2968 let (buffer, _handle) = project
2969 .update(cx, |project, cx| {
2970 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
2971 })
2972 .await
2973 .unwrap();
2974
2975 let fake_server = fake_servers.next().await.unwrap();
2976 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2977 let params = params.text_document_position_params;
2978 assert_eq!(
2979 params.text_document.uri.to_file_path().unwrap(),
2980 Path::new(path!("/dir/b.rs")),
2981 );
2982 assert_eq!(params.position, lsp::Position::new(0, 22));
2983
2984 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2985 lsp::Location::new(
2986 lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2987 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2988 ),
2989 )))
2990 });
2991 let mut definitions = project
2992 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2993 .await
2994 .unwrap();
2995
2996 // Assert no new language server started
2997 cx.executor().run_until_parked();
2998 assert!(fake_servers.try_next().is_err());
2999
3000 assert_eq!(definitions.len(), 1);
3001 let definition = definitions.pop().unwrap();
3002 cx.update(|cx| {
3003 let target_buffer = definition.target.buffer.read(cx);
3004 assert_eq!(
3005 target_buffer
3006 .file()
3007 .unwrap()
3008 .as_local()
3009 .unwrap()
3010 .abs_path(cx),
3011 Path::new(path!("/dir/a.rs")),
3012 );
3013 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
3014 assert_eq!(
3015 list_worktrees(&project, cx),
3016 [
3017 (path!("/dir/a.rs").as_ref(), false),
3018 (path!("/dir/b.rs").as_ref(), true)
3019 ],
3020 );
3021
3022 drop(definition);
3023 });
3024 cx.update(|cx| {
3025 assert_eq!(
3026 list_worktrees(&project, cx),
3027 [(path!("/dir/b.rs").as_ref(), true)]
3028 );
3029 });
3030
3031 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
3032 project
3033 .read(cx)
3034 .worktrees(cx)
3035 .map(|worktree| {
3036 let worktree = worktree.read(cx);
3037 (
3038 worktree.as_local().unwrap().abs_path().as_ref(),
3039 worktree.is_visible(),
3040 )
3041 })
3042 .collect::<Vec<_>>()
3043 }
3044}
3045
3046#[gpui::test]
3047async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
3048 init_test(cx);
3049
3050 let fs = FakeFs::new(cx.executor());
3051 fs.insert_tree(
3052 path!("/dir"),
3053 json!({
3054 "a.ts": "",
3055 }),
3056 )
3057 .await;
3058
3059 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3060
3061 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3062 language_registry.add(typescript_lang());
3063 let mut fake_language_servers = language_registry.register_fake_lsp(
3064 "TypeScript",
3065 FakeLspAdapter {
3066 capabilities: lsp::ServerCapabilities {
3067 completion_provider: Some(lsp::CompletionOptions {
3068 trigger_characters: Some(vec![".".to_string()]),
3069 ..Default::default()
3070 }),
3071 ..Default::default()
3072 },
3073 ..Default::default()
3074 },
3075 );
3076
3077 let (buffer, _handle) = project
3078 .update(cx, |p, cx| {
3079 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3080 })
3081 .await
3082 .unwrap();
3083
3084 let fake_server = fake_language_servers.next().await.unwrap();
3085
3086 // When text_edit exists, it takes precedence over insert_text and label
3087 let text = "let a = obj.fqn";
3088 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3089 let completions = project.update(cx, |project, cx| {
3090 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3091 });
3092
3093 fake_server
3094 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3095 Ok(Some(lsp::CompletionResponse::Array(vec![
3096 lsp::CompletionItem {
3097 label: "labelText".into(),
3098 insert_text: Some("insertText".into()),
3099 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
3100 range: lsp::Range::new(
3101 lsp::Position::new(0, text.len() as u32 - 3),
3102 lsp::Position::new(0, text.len() as u32),
3103 ),
3104 new_text: "textEditText".into(),
3105 })),
3106 ..Default::default()
3107 },
3108 ])))
3109 })
3110 .next()
3111 .await;
3112
3113 let completions = completions
3114 .await
3115 .unwrap()
3116 .into_iter()
3117 .flat_map(|response| response.completions)
3118 .collect::<Vec<_>>();
3119 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3120
3121 assert_eq!(completions.len(), 1);
3122 assert_eq!(completions[0].new_text, "textEditText");
3123 assert_eq!(
3124 completions[0].replace_range.to_offset(&snapshot),
3125 text.len() - 3..text.len()
3126 );
3127}
3128
3129#[gpui::test]
3130async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
3131 init_test(cx);
3132
3133 let fs = FakeFs::new(cx.executor());
3134 fs.insert_tree(
3135 path!("/dir"),
3136 json!({
3137 "a.ts": "",
3138 }),
3139 )
3140 .await;
3141
3142 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3143
3144 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3145 language_registry.add(typescript_lang());
3146 let mut fake_language_servers = language_registry.register_fake_lsp(
3147 "TypeScript",
3148 FakeLspAdapter {
3149 capabilities: lsp::ServerCapabilities {
3150 completion_provider: Some(lsp::CompletionOptions {
3151 trigger_characters: Some(vec![".".to_string()]),
3152 ..Default::default()
3153 }),
3154 ..Default::default()
3155 },
3156 ..Default::default()
3157 },
3158 );
3159
3160 let (buffer, _handle) = project
3161 .update(cx, |p, cx| {
3162 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3163 })
3164 .await
3165 .unwrap();
3166
3167 let fake_server = fake_language_servers.next().await.unwrap();
3168 let text = "let a = obj.fqn";
3169
3170 // Test 1: When text_edit is None but insert_text exists with default edit_range
3171 {
3172 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3173 let completions = project.update(cx, |project, cx| {
3174 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3175 });
3176
3177 fake_server
3178 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3179 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3180 is_incomplete: false,
3181 item_defaults: Some(lsp::CompletionListItemDefaults {
3182 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3183 lsp::Range::new(
3184 lsp::Position::new(0, text.len() as u32 - 3),
3185 lsp::Position::new(0, text.len() as u32),
3186 ),
3187 )),
3188 ..Default::default()
3189 }),
3190 items: vec![lsp::CompletionItem {
3191 label: "labelText".into(),
3192 insert_text: Some("insertText".into()),
3193 text_edit: None,
3194 ..Default::default()
3195 }],
3196 })))
3197 })
3198 .next()
3199 .await;
3200
3201 let completions = completions
3202 .await
3203 .unwrap()
3204 .into_iter()
3205 .flat_map(|response| response.completions)
3206 .collect::<Vec<_>>();
3207 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3208
3209 assert_eq!(completions.len(), 1);
3210 assert_eq!(completions[0].new_text, "insertText");
3211 assert_eq!(
3212 completions[0].replace_range.to_offset(&snapshot),
3213 text.len() - 3..text.len()
3214 );
3215 }
3216
3217 // Test 2: When both text_edit and insert_text are None with default edit_range
3218 {
3219 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3220 let completions = project.update(cx, |project, cx| {
3221 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3222 });
3223
3224 fake_server
3225 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3226 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3227 is_incomplete: false,
3228 item_defaults: Some(lsp::CompletionListItemDefaults {
3229 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3230 lsp::Range::new(
3231 lsp::Position::new(0, text.len() as u32 - 3),
3232 lsp::Position::new(0, text.len() as u32),
3233 ),
3234 )),
3235 ..Default::default()
3236 }),
3237 items: vec![lsp::CompletionItem {
3238 label: "labelText".into(),
3239 insert_text: None,
3240 text_edit: None,
3241 ..Default::default()
3242 }],
3243 })))
3244 })
3245 .next()
3246 .await;
3247
3248 let completions = completions
3249 .await
3250 .unwrap()
3251 .into_iter()
3252 .flat_map(|response| response.completions)
3253 .collect::<Vec<_>>();
3254 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3255
3256 assert_eq!(completions.len(), 1);
3257 assert_eq!(completions[0].new_text, "labelText");
3258 assert_eq!(
3259 completions[0].replace_range.to_offset(&snapshot),
3260 text.len() - 3..text.len()
3261 );
3262 }
3263}
3264
3265#[gpui::test]
3266async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
3267 init_test(cx);
3268
3269 let fs = FakeFs::new(cx.executor());
3270 fs.insert_tree(
3271 path!("/dir"),
3272 json!({
3273 "a.ts": "",
3274 }),
3275 )
3276 .await;
3277
3278 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3279
3280 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3281 language_registry.add(typescript_lang());
3282 let mut fake_language_servers = language_registry.register_fake_lsp(
3283 "TypeScript",
3284 FakeLspAdapter {
3285 capabilities: lsp::ServerCapabilities {
3286 completion_provider: Some(lsp::CompletionOptions {
3287 trigger_characters: Some(vec![":".to_string()]),
3288 ..Default::default()
3289 }),
3290 ..Default::default()
3291 },
3292 ..Default::default()
3293 },
3294 );
3295
3296 let (buffer, _handle) = project
3297 .update(cx, |p, cx| {
3298 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3299 })
3300 .await
3301 .unwrap();
3302
3303 let fake_server = fake_language_servers.next().await.unwrap();
3304
3305 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
3306 let text = "let a = b.fqn";
3307 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3308 let completions = project.update(cx, |project, cx| {
3309 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3310 });
3311
3312 fake_server
3313 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3314 Ok(Some(lsp::CompletionResponse::Array(vec![
3315 lsp::CompletionItem {
3316 label: "fullyQualifiedName?".into(),
3317 insert_text: Some("fullyQualifiedName".into()),
3318 ..Default::default()
3319 },
3320 ])))
3321 })
3322 .next()
3323 .await;
3324 let completions = completions
3325 .await
3326 .unwrap()
3327 .into_iter()
3328 .flat_map(|response| response.completions)
3329 .collect::<Vec<_>>();
3330 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3331 assert_eq!(completions.len(), 1);
3332 assert_eq!(completions[0].new_text, "fullyQualifiedName");
3333 assert_eq!(
3334 completions[0].replace_range.to_offset(&snapshot),
3335 text.len() - 3..text.len()
3336 );
3337
3338 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
3339 let text = "let a = \"atoms/cmp\"";
3340 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3341 let completions = project.update(cx, |project, cx| {
3342 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
3343 });
3344
3345 fake_server
3346 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3347 Ok(Some(lsp::CompletionResponse::Array(vec![
3348 lsp::CompletionItem {
3349 label: "component".into(),
3350 ..Default::default()
3351 },
3352 ])))
3353 })
3354 .next()
3355 .await;
3356 let completions = completions
3357 .await
3358 .unwrap()
3359 .into_iter()
3360 .flat_map(|response| response.completions)
3361 .collect::<Vec<_>>();
3362 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3363 assert_eq!(completions.len(), 1);
3364 assert_eq!(completions[0].new_text, "component");
3365 assert_eq!(
3366 completions[0].replace_range.to_offset(&snapshot),
3367 text.len() - 4..text.len() - 1
3368 );
3369}
3370
3371#[gpui::test]
3372async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
3373 init_test(cx);
3374
3375 let fs = FakeFs::new(cx.executor());
3376 fs.insert_tree(
3377 path!("/dir"),
3378 json!({
3379 "a.ts": "",
3380 }),
3381 )
3382 .await;
3383
3384 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3385
3386 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3387 language_registry.add(typescript_lang());
3388 let mut fake_language_servers = language_registry.register_fake_lsp(
3389 "TypeScript",
3390 FakeLspAdapter {
3391 capabilities: lsp::ServerCapabilities {
3392 completion_provider: Some(lsp::CompletionOptions {
3393 trigger_characters: Some(vec![":".to_string()]),
3394 ..Default::default()
3395 }),
3396 ..Default::default()
3397 },
3398 ..Default::default()
3399 },
3400 );
3401
3402 let (buffer, _handle) = project
3403 .update(cx, |p, cx| {
3404 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3405 })
3406 .await
3407 .unwrap();
3408
3409 let fake_server = fake_language_servers.next().await.unwrap();
3410
3411 let text = "let a = b.fqn";
3412 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3413 let completions = project.update(cx, |project, cx| {
3414 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3415 });
3416
3417 fake_server
3418 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3419 Ok(Some(lsp::CompletionResponse::Array(vec![
3420 lsp::CompletionItem {
3421 label: "fullyQualifiedName?".into(),
3422 insert_text: Some("fully\rQualified\r\nName".into()),
3423 ..Default::default()
3424 },
3425 ])))
3426 })
3427 .next()
3428 .await;
3429 let completions = completions
3430 .await
3431 .unwrap()
3432 .into_iter()
3433 .flat_map(|response| response.completions)
3434 .collect::<Vec<_>>();
3435 assert_eq!(completions.len(), 1);
3436 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
3437}
3438
3439#[gpui::test(iterations = 10)]
3440async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
3441 init_test(cx);
3442
3443 let fs = FakeFs::new(cx.executor());
3444 fs.insert_tree(
3445 path!("/dir"),
3446 json!({
3447 "a.ts": "a",
3448 }),
3449 )
3450 .await;
3451
3452 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3453
3454 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3455 language_registry.add(typescript_lang());
3456 let mut fake_language_servers = language_registry.register_fake_lsp(
3457 "TypeScript",
3458 FakeLspAdapter {
3459 capabilities: lsp::ServerCapabilities {
3460 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
3461 lsp::CodeActionOptions {
3462 resolve_provider: Some(true),
3463 ..lsp::CodeActionOptions::default()
3464 },
3465 )),
3466 execute_command_provider: Some(lsp::ExecuteCommandOptions {
3467 commands: vec!["_the/command".to_string()],
3468 ..lsp::ExecuteCommandOptions::default()
3469 }),
3470 ..lsp::ServerCapabilities::default()
3471 },
3472 ..FakeLspAdapter::default()
3473 },
3474 );
3475
3476 let (buffer, _handle) = project
3477 .update(cx, |p, cx| {
3478 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3479 })
3480 .await
3481 .unwrap();
3482
3483 let fake_server = fake_language_servers.next().await.unwrap();
3484
3485 // Language server returns code actions that contain commands, and not edits.
3486 let actions = project.update(cx, |project, cx| {
3487 project.code_actions(&buffer, 0..0, None, cx)
3488 });
3489 fake_server
3490 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
3491 Ok(Some(vec![
3492 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3493 title: "The code action".into(),
3494 data: Some(serde_json::json!({
3495 "command": "_the/command",
3496 })),
3497 ..lsp::CodeAction::default()
3498 }),
3499 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3500 title: "two".into(),
3501 ..lsp::CodeAction::default()
3502 }),
3503 ]))
3504 })
3505 .next()
3506 .await;
3507
3508 let action = actions.await.unwrap()[0].clone();
3509 let apply = project.update(cx, |project, cx| {
3510 project.apply_code_action(buffer.clone(), action, true, cx)
3511 });
3512
3513 // Resolving the code action does not populate its edits. In absence of
3514 // edits, we must execute the given command.
3515 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
3516 |mut action, _| async move {
3517 if action.data.is_some() {
3518 action.command = Some(lsp::Command {
3519 title: "The command".into(),
3520 command: "_the/command".into(),
3521 arguments: Some(vec![json!("the-argument")]),
3522 });
3523 }
3524 Ok(action)
3525 },
3526 );
3527
3528 // While executing the command, the language server sends the editor
3529 // a `workspaceEdit` request.
3530 fake_server
3531 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
3532 let fake = fake_server.clone();
3533 move |params, _| {
3534 assert_eq!(params.command, "_the/command");
3535 let fake = fake.clone();
3536 async move {
3537 fake.server
3538 .request::<lsp::request::ApplyWorkspaceEdit>(
3539 lsp::ApplyWorkspaceEditParams {
3540 label: None,
3541 edit: lsp::WorkspaceEdit {
3542 changes: Some(
3543 [(
3544 lsp::Url::from_file_path(path!("/dir/a.ts")).unwrap(),
3545 vec![lsp::TextEdit {
3546 range: lsp::Range::new(
3547 lsp::Position::new(0, 0),
3548 lsp::Position::new(0, 0),
3549 ),
3550 new_text: "X".into(),
3551 }],
3552 )]
3553 .into_iter()
3554 .collect(),
3555 ),
3556 ..Default::default()
3557 },
3558 },
3559 )
3560 .await
3561 .into_response()
3562 .unwrap();
3563 Ok(Some(json!(null)))
3564 }
3565 }
3566 })
3567 .next()
3568 .await;
3569
3570 // Applying the code action returns a project transaction containing the edits
3571 // sent by the language server in its `workspaceEdit` request.
3572 let transaction = apply.await.unwrap();
3573 assert!(transaction.0.contains_key(&buffer));
3574 buffer.update(cx, |buffer, cx| {
3575 assert_eq!(buffer.text(), "Xa");
3576 buffer.undo(cx);
3577 assert_eq!(buffer.text(), "a");
3578 });
3579}
3580
3581#[gpui::test(iterations = 10)]
3582async fn test_save_file(cx: &mut gpui::TestAppContext) {
3583 init_test(cx);
3584
3585 let fs = FakeFs::new(cx.executor());
3586 fs.insert_tree(
3587 path!("/dir"),
3588 json!({
3589 "file1": "the old contents",
3590 }),
3591 )
3592 .await;
3593
3594 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3595 let buffer = project
3596 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3597 .await
3598 .unwrap();
3599 buffer.update(cx, |buffer, cx| {
3600 assert_eq!(buffer.text(), "the old contents");
3601 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3602 });
3603
3604 project
3605 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3606 .await
3607 .unwrap();
3608
3609 let new_text = fs
3610 .load(Path::new(path!("/dir/file1")))
3611 .await
3612 .unwrap()
3613 .replace("\r\n", "\n");
3614 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3615}
3616
3617#[gpui::test(iterations = 10)]
3618async fn test_save_file_spawns_language_server(cx: &mut gpui::TestAppContext) {
3619 // Issue: #24349
3620 init_test(cx);
3621
3622 let fs = FakeFs::new(cx.executor());
3623 fs.insert_tree(path!("/dir"), json!({})).await;
3624
3625 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3626 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3627
3628 language_registry.add(rust_lang());
3629 let mut fake_rust_servers = language_registry.register_fake_lsp(
3630 "Rust",
3631 FakeLspAdapter {
3632 name: "the-rust-language-server",
3633 capabilities: lsp::ServerCapabilities {
3634 completion_provider: Some(lsp::CompletionOptions {
3635 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
3636 ..Default::default()
3637 }),
3638 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
3639 lsp::TextDocumentSyncOptions {
3640 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
3641 ..Default::default()
3642 },
3643 )),
3644 ..Default::default()
3645 },
3646 ..Default::default()
3647 },
3648 );
3649
3650 let buffer = project
3651 .update(cx, |this, cx| this.create_buffer(cx))
3652 .unwrap()
3653 .await;
3654 project.update(cx, |this, cx| {
3655 this.register_buffer_with_language_servers(&buffer, cx);
3656 buffer.update(cx, |buffer, cx| {
3657 assert!(!this.has_language_servers_for(buffer, cx));
3658 })
3659 });
3660
3661 project
3662 .update(cx, |this, cx| {
3663 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
3664 this.save_buffer_as(
3665 buffer.clone(),
3666 ProjectPath {
3667 worktree_id,
3668 path: Arc::from("file.rs".as_ref()),
3669 },
3670 cx,
3671 )
3672 })
3673 .await
3674 .unwrap();
3675 // A server is started up, and it is notified about Rust files.
3676 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
3677 assert_eq!(
3678 fake_rust_server
3679 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3680 .await
3681 .text_document,
3682 lsp::TextDocumentItem {
3683 uri: lsp::Url::from_file_path(path!("/dir/file.rs")).unwrap(),
3684 version: 0,
3685 text: "".to_string(),
3686 language_id: "rust".to_string(),
3687 }
3688 );
3689
3690 project.update(cx, |this, cx| {
3691 buffer.update(cx, |buffer, cx| {
3692 assert!(this.has_language_servers_for(buffer, cx));
3693 })
3694 });
3695}
3696
3697#[gpui::test(iterations = 30)]
3698async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
3699 init_test(cx);
3700
3701 let fs = FakeFs::new(cx.executor().clone());
3702 fs.insert_tree(
3703 path!("/dir"),
3704 json!({
3705 "file1": "the original contents",
3706 }),
3707 )
3708 .await;
3709
3710 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3711 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3712 let buffer = project
3713 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3714 .await
3715 .unwrap();
3716
3717 // Simulate buffer diffs being slow, so that they don't complete before
3718 // the next file change occurs.
3719 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3720
3721 // Change the buffer's file on disk, and then wait for the file change
3722 // to be detected by the worktree, so that the buffer starts reloading.
3723 fs.save(
3724 path!("/dir/file1").as_ref(),
3725 &"the first contents".into(),
3726 Default::default(),
3727 )
3728 .await
3729 .unwrap();
3730 worktree.next_event(cx).await;
3731
3732 // Change the buffer's file again. Depending on the random seed, the
3733 // previous file change may still be in progress.
3734 fs.save(
3735 path!("/dir/file1").as_ref(),
3736 &"the second contents".into(),
3737 Default::default(),
3738 )
3739 .await
3740 .unwrap();
3741 worktree.next_event(cx).await;
3742
3743 cx.executor().run_until_parked();
3744 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3745 buffer.read_with(cx, |buffer, _| {
3746 assert_eq!(buffer.text(), on_disk_text);
3747 assert!(!buffer.is_dirty(), "buffer should not be dirty");
3748 assert!(!buffer.has_conflict(), "buffer should not be dirty");
3749 });
3750}
3751
3752#[gpui::test(iterations = 30)]
3753async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
3754 init_test(cx);
3755
3756 let fs = FakeFs::new(cx.executor().clone());
3757 fs.insert_tree(
3758 path!("/dir"),
3759 json!({
3760 "file1": "the original contents",
3761 }),
3762 )
3763 .await;
3764
3765 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3766 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3767 let buffer = project
3768 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3769 .await
3770 .unwrap();
3771
3772 // Simulate buffer diffs being slow, so that they don't complete before
3773 // the next file change occurs.
3774 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3775
3776 // Change the buffer's file on disk, and then wait for the file change
3777 // to be detected by the worktree, so that the buffer starts reloading.
3778 fs.save(
3779 path!("/dir/file1").as_ref(),
3780 &"the first contents".into(),
3781 Default::default(),
3782 )
3783 .await
3784 .unwrap();
3785 worktree.next_event(cx).await;
3786
3787 cx.executor()
3788 .spawn(cx.executor().simulate_random_delay())
3789 .await;
3790
3791 // Perform a noop edit, causing the buffer's version to increase.
3792 buffer.update(cx, |buffer, cx| {
3793 buffer.edit([(0..0, " ")], None, cx);
3794 buffer.undo(cx);
3795 });
3796
3797 cx.executor().run_until_parked();
3798 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3799 buffer.read_with(cx, |buffer, _| {
3800 let buffer_text = buffer.text();
3801 if buffer_text == on_disk_text {
3802 assert!(
3803 !buffer.is_dirty() && !buffer.has_conflict(),
3804 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
3805 );
3806 }
3807 // If the file change occurred while the buffer was processing the first
3808 // change, the buffer will be in a conflicting state.
3809 else {
3810 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3811 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3812 }
3813 });
3814}
3815
3816#[gpui::test]
3817async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
3818 init_test(cx);
3819
3820 let fs = FakeFs::new(cx.executor());
3821 fs.insert_tree(
3822 path!("/dir"),
3823 json!({
3824 "file1": "the old contents",
3825 }),
3826 )
3827 .await;
3828
3829 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
3830 let buffer = project
3831 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3832 .await
3833 .unwrap();
3834 buffer.update(cx, |buffer, cx| {
3835 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3836 });
3837
3838 project
3839 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3840 .await
3841 .unwrap();
3842
3843 let new_text = fs
3844 .load(Path::new(path!("/dir/file1")))
3845 .await
3846 .unwrap()
3847 .replace("\r\n", "\n");
3848 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3849}
3850
3851#[gpui::test]
3852async fn test_save_as(cx: &mut gpui::TestAppContext) {
3853 init_test(cx);
3854
3855 let fs = FakeFs::new(cx.executor());
3856 fs.insert_tree("/dir", json!({})).await;
3857
3858 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3859
3860 let languages = project.update(cx, |project, _| project.languages().clone());
3861 languages.add(rust_lang());
3862
3863 let buffer = project.update(cx, |project, cx| project.create_local_buffer("", None, cx));
3864 buffer.update(cx, |buffer, cx| {
3865 buffer.edit([(0..0, "abc")], None, cx);
3866 assert!(buffer.is_dirty());
3867 assert!(!buffer.has_conflict());
3868 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
3869 });
3870 project
3871 .update(cx, |project, cx| {
3872 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
3873 let path = ProjectPath {
3874 worktree_id,
3875 path: Arc::from(Path::new("file1.rs")),
3876 };
3877 project.save_buffer_as(buffer.clone(), path, cx)
3878 })
3879 .await
3880 .unwrap();
3881 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
3882
3883 cx.executor().run_until_parked();
3884 buffer.update(cx, |buffer, cx| {
3885 assert_eq!(
3886 buffer.file().unwrap().full_path(cx),
3887 Path::new("dir/file1.rs")
3888 );
3889 assert!(!buffer.is_dirty());
3890 assert!(!buffer.has_conflict());
3891 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
3892 });
3893
3894 let opened_buffer = project
3895 .update(cx, |project, cx| {
3896 project.open_local_buffer("/dir/file1.rs", cx)
3897 })
3898 .await
3899 .unwrap();
3900 assert_eq!(opened_buffer, buffer);
3901}
3902
3903#[gpui::test(retries = 5)]
3904async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
3905 use worktree::WorktreeModelHandle as _;
3906
3907 init_test(cx);
3908 cx.executor().allow_parking();
3909
3910 let dir = TempTree::new(json!({
3911 "a": {
3912 "file1": "",
3913 "file2": "",
3914 "file3": "",
3915 },
3916 "b": {
3917 "c": {
3918 "file4": "",
3919 "file5": "",
3920 }
3921 }
3922 }));
3923
3924 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
3925
3926 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3927 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
3928 async move { buffer.await.unwrap() }
3929 };
3930 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3931 project.update(cx, |project, cx| {
3932 let tree = project.worktrees(cx).next().unwrap();
3933 tree.read(cx)
3934 .entry_for_path(path)
3935 .unwrap_or_else(|| panic!("no entry for path {}", path))
3936 .id
3937 })
3938 };
3939
3940 let buffer2 = buffer_for_path("a/file2", cx).await;
3941 let buffer3 = buffer_for_path("a/file3", cx).await;
3942 let buffer4 = buffer_for_path("b/c/file4", cx).await;
3943 let buffer5 = buffer_for_path("b/c/file5", cx).await;
3944
3945 let file2_id = id_for_path("a/file2", cx);
3946 let file3_id = id_for_path("a/file3", cx);
3947 let file4_id = id_for_path("b/c/file4", cx);
3948
3949 // Create a remote copy of this worktree.
3950 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3951 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
3952
3953 let updates = Arc::new(Mutex::new(Vec::new()));
3954 tree.update(cx, |tree, cx| {
3955 let updates = updates.clone();
3956 tree.observe_updates(0, cx, move |update| {
3957 updates.lock().push(update);
3958 async { true }
3959 });
3960 });
3961
3962 let remote =
3963 cx.update(|cx| Worktree::remote(0, 1, metadata, project.read(cx).client().into(), cx));
3964
3965 cx.executor().run_until_parked();
3966
3967 cx.update(|cx| {
3968 assert!(!buffer2.read(cx).is_dirty());
3969 assert!(!buffer3.read(cx).is_dirty());
3970 assert!(!buffer4.read(cx).is_dirty());
3971 assert!(!buffer5.read(cx).is_dirty());
3972 });
3973
3974 // Rename and delete files and directories.
3975 tree.flush_fs_events(cx).await;
3976 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3977 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3978 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3979 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3980 tree.flush_fs_events(cx).await;
3981
3982 cx.update(|app| {
3983 assert_eq!(
3984 tree.read(app)
3985 .paths()
3986 .map(|p| p.to_str().unwrap())
3987 .collect::<Vec<_>>(),
3988 vec![
3989 "a",
3990 separator!("a/file1"),
3991 separator!("a/file2.new"),
3992 "b",
3993 "d",
3994 separator!("d/file3"),
3995 separator!("d/file4"),
3996 ]
3997 );
3998 });
3999
4000 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
4001 assert_eq!(id_for_path("d/file3", cx), file3_id);
4002 assert_eq!(id_for_path("d/file4", cx), file4_id);
4003
4004 cx.update(|cx| {
4005 assert_eq!(
4006 buffer2.read(cx).file().unwrap().path().as_ref(),
4007 Path::new("a/file2.new")
4008 );
4009 assert_eq!(
4010 buffer3.read(cx).file().unwrap().path().as_ref(),
4011 Path::new("d/file3")
4012 );
4013 assert_eq!(
4014 buffer4.read(cx).file().unwrap().path().as_ref(),
4015 Path::new("d/file4")
4016 );
4017 assert_eq!(
4018 buffer5.read(cx).file().unwrap().path().as_ref(),
4019 Path::new("b/c/file5")
4020 );
4021
4022 assert_matches!(
4023 buffer2.read(cx).file().unwrap().disk_state(),
4024 DiskState::Present { .. }
4025 );
4026 assert_matches!(
4027 buffer3.read(cx).file().unwrap().disk_state(),
4028 DiskState::Present { .. }
4029 );
4030 assert_matches!(
4031 buffer4.read(cx).file().unwrap().disk_state(),
4032 DiskState::Present { .. }
4033 );
4034 assert_eq!(
4035 buffer5.read(cx).file().unwrap().disk_state(),
4036 DiskState::Deleted
4037 );
4038 });
4039
4040 // Update the remote worktree. Check that it becomes consistent with the
4041 // local worktree.
4042 cx.executor().run_until_parked();
4043
4044 remote.update(cx, |remote, _| {
4045 for update in updates.lock().drain(..) {
4046 remote.as_remote_mut().unwrap().update_from_remote(update);
4047 }
4048 });
4049 cx.executor().run_until_parked();
4050 remote.update(cx, |remote, _| {
4051 assert_eq!(
4052 remote
4053 .paths()
4054 .map(|p| p.to_str().unwrap())
4055 .collect::<Vec<_>>(),
4056 vec![
4057 "a",
4058 separator!("a/file1"),
4059 separator!("a/file2.new"),
4060 "b",
4061 "d",
4062 separator!("d/file3"),
4063 separator!("d/file4"),
4064 ]
4065 );
4066 });
4067}
4068
4069#[gpui::test(iterations = 10)]
4070async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
4071 init_test(cx);
4072
4073 let fs = FakeFs::new(cx.executor());
4074 fs.insert_tree(
4075 path!("/dir"),
4076 json!({
4077 "a": {
4078 "file1": "",
4079 }
4080 }),
4081 )
4082 .await;
4083
4084 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
4085 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
4086 let tree_id = tree.update(cx, |tree, _| tree.id());
4087
4088 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4089 project.update(cx, |project, cx| {
4090 let tree = project.worktrees(cx).next().unwrap();
4091 tree.read(cx)
4092 .entry_for_path(path)
4093 .unwrap_or_else(|| panic!("no entry for path {}", path))
4094 .id
4095 })
4096 };
4097
4098 let dir_id = id_for_path("a", cx);
4099 let file_id = id_for_path("a/file1", cx);
4100 let buffer = project
4101 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
4102 .await
4103 .unwrap();
4104 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4105
4106 project
4107 .update(cx, |project, cx| {
4108 project.rename_entry(dir_id, Path::new("b"), cx)
4109 })
4110 .unwrap()
4111 .await
4112 .to_included()
4113 .unwrap();
4114 cx.executor().run_until_parked();
4115
4116 assert_eq!(id_for_path("b", cx), dir_id);
4117 assert_eq!(id_for_path("b/file1", cx), file_id);
4118 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4119}
4120
4121#[gpui::test]
4122async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
4123 init_test(cx);
4124
4125 let fs = FakeFs::new(cx.executor());
4126 fs.insert_tree(
4127 "/dir",
4128 json!({
4129 "a.txt": "a-contents",
4130 "b.txt": "b-contents",
4131 }),
4132 )
4133 .await;
4134
4135 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4136
4137 // Spawn multiple tasks to open paths, repeating some paths.
4138 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
4139 (
4140 p.open_local_buffer("/dir/a.txt", cx),
4141 p.open_local_buffer("/dir/b.txt", cx),
4142 p.open_local_buffer("/dir/a.txt", cx),
4143 )
4144 });
4145
4146 let buffer_a_1 = buffer_a_1.await.unwrap();
4147 let buffer_a_2 = buffer_a_2.await.unwrap();
4148 let buffer_b = buffer_b.await.unwrap();
4149 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
4150 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
4151
4152 // There is only one buffer per path.
4153 let buffer_a_id = buffer_a_1.entity_id();
4154 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
4155
4156 // Open the same path again while it is still open.
4157 drop(buffer_a_1);
4158 let buffer_a_3 = project
4159 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
4160 .await
4161 .unwrap();
4162
4163 // There's still only one buffer per path.
4164 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
4165}
4166
4167#[gpui::test]
4168async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
4169 init_test(cx);
4170
4171 let fs = FakeFs::new(cx.executor());
4172 fs.insert_tree(
4173 path!("/dir"),
4174 json!({
4175 "file1": "abc",
4176 "file2": "def",
4177 "file3": "ghi",
4178 }),
4179 )
4180 .await;
4181
4182 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4183
4184 let buffer1 = project
4185 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4186 .await
4187 .unwrap();
4188 let events = Arc::new(Mutex::new(Vec::new()));
4189
4190 // initially, the buffer isn't dirty.
4191 buffer1.update(cx, |buffer, cx| {
4192 cx.subscribe(&buffer1, {
4193 let events = events.clone();
4194 move |_, _, event, _| match event {
4195 BufferEvent::Operation { .. } => {}
4196 _ => events.lock().push(event.clone()),
4197 }
4198 })
4199 .detach();
4200
4201 assert!(!buffer.is_dirty());
4202 assert!(events.lock().is_empty());
4203
4204 buffer.edit([(1..2, "")], None, cx);
4205 });
4206
4207 // after the first edit, the buffer is dirty, and emits a dirtied event.
4208 buffer1.update(cx, |buffer, cx| {
4209 assert!(buffer.text() == "ac");
4210 assert!(buffer.is_dirty());
4211 assert_eq!(
4212 *events.lock(),
4213 &[
4214 language::BufferEvent::Edited,
4215 language::BufferEvent::DirtyChanged
4216 ]
4217 );
4218 events.lock().clear();
4219 buffer.did_save(
4220 buffer.version(),
4221 buffer.file().unwrap().disk_state().mtime(),
4222 cx,
4223 );
4224 });
4225
4226 // after saving, the buffer is not dirty, and emits a saved event.
4227 buffer1.update(cx, |buffer, cx| {
4228 assert!(!buffer.is_dirty());
4229 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
4230 events.lock().clear();
4231
4232 buffer.edit([(1..1, "B")], None, cx);
4233 buffer.edit([(2..2, "D")], None, cx);
4234 });
4235
4236 // after editing again, the buffer is dirty, and emits another dirty event.
4237 buffer1.update(cx, |buffer, cx| {
4238 assert!(buffer.text() == "aBDc");
4239 assert!(buffer.is_dirty());
4240 assert_eq!(
4241 *events.lock(),
4242 &[
4243 language::BufferEvent::Edited,
4244 language::BufferEvent::DirtyChanged,
4245 language::BufferEvent::Edited,
4246 ],
4247 );
4248 events.lock().clear();
4249
4250 // After restoring the buffer to its previously-saved state,
4251 // the buffer is not considered dirty anymore.
4252 buffer.edit([(1..3, "")], None, cx);
4253 assert!(buffer.text() == "ac");
4254 assert!(!buffer.is_dirty());
4255 });
4256
4257 assert_eq!(
4258 *events.lock(),
4259 &[
4260 language::BufferEvent::Edited,
4261 language::BufferEvent::DirtyChanged
4262 ]
4263 );
4264
4265 // When a file is deleted, it is not considered dirty.
4266 let events = Arc::new(Mutex::new(Vec::new()));
4267 let buffer2 = project
4268 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4269 .await
4270 .unwrap();
4271 buffer2.update(cx, |_, cx| {
4272 cx.subscribe(&buffer2, {
4273 let events = events.clone();
4274 move |_, _, event, _| match event {
4275 BufferEvent::Operation { .. } => {}
4276 _ => events.lock().push(event.clone()),
4277 }
4278 })
4279 .detach();
4280 });
4281
4282 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
4283 .await
4284 .unwrap();
4285 cx.executor().run_until_parked();
4286 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4287 assert_eq!(
4288 mem::take(&mut *events.lock()),
4289 &[language::BufferEvent::FileHandleChanged]
4290 );
4291
4292 // Buffer becomes dirty when edited.
4293 buffer2.update(cx, |buffer, cx| {
4294 buffer.edit([(2..3, "")], None, cx);
4295 assert_eq!(buffer.is_dirty(), true);
4296 });
4297 assert_eq!(
4298 mem::take(&mut *events.lock()),
4299 &[
4300 language::BufferEvent::Edited,
4301 language::BufferEvent::DirtyChanged
4302 ]
4303 );
4304
4305 // Buffer becomes clean again when all of its content is removed, because
4306 // the file was deleted.
4307 buffer2.update(cx, |buffer, cx| {
4308 buffer.edit([(0..2, "")], None, cx);
4309 assert_eq!(buffer.is_empty(), true);
4310 assert_eq!(buffer.is_dirty(), false);
4311 });
4312 assert_eq!(
4313 *events.lock(),
4314 &[
4315 language::BufferEvent::Edited,
4316 language::BufferEvent::DirtyChanged
4317 ]
4318 );
4319
4320 // When a file is already dirty when deleted, we don't emit a Dirtied event.
4321 let events = Arc::new(Mutex::new(Vec::new()));
4322 let buffer3 = project
4323 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
4324 .await
4325 .unwrap();
4326 buffer3.update(cx, |_, cx| {
4327 cx.subscribe(&buffer3, {
4328 let events = events.clone();
4329 move |_, _, event, _| match event {
4330 BufferEvent::Operation { .. } => {}
4331 _ => events.lock().push(event.clone()),
4332 }
4333 })
4334 .detach();
4335 });
4336
4337 buffer3.update(cx, |buffer, cx| {
4338 buffer.edit([(0..0, "x")], None, cx);
4339 });
4340 events.lock().clear();
4341 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
4342 .await
4343 .unwrap();
4344 cx.executor().run_until_parked();
4345 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
4346 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
4347}
4348
4349#[gpui::test]
4350async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
4351 init_test(cx);
4352
4353 let (initial_contents, initial_offsets) =
4354 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
4355 let fs = FakeFs::new(cx.executor());
4356 fs.insert_tree(
4357 path!("/dir"),
4358 json!({
4359 "the-file": initial_contents,
4360 }),
4361 )
4362 .await;
4363 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4364 let buffer = project
4365 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
4366 .await
4367 .unwrap();
4368
4369 let anchors = initial_offsets
4370 .iter()
4371 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
4372 .collect::<Vec<_>>();
4373
4374 // Change the file on disk, adding two new lines of text, and removing
4375 // one line.
4376 buffer.update(cx, |buffer, _| {
4377 assert!(!buffer.is_dirty());
4378 assert!(!buffer.has_conflict());
4379 });
4380
4381 let (new_contents, new_offsets) =
4382 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
4383 fs.save(
4384 path!("/dir/the-file").as_ref(),
4385 &new_contents.as_str().into(),
4386 LineEnding::Unix,
4387 )
4388 .await
4389 .unwrap();
4390
4391 // Because the buffer was not modified, it is reloaded from disk. Its
4392 // contents are edited according to the diff between the old and new
4393 // file contents.
4394 cx.executor().run_until_parked();
4395 buffer.update(cx, |buffer, _| {
4396 assert_eq!(buffer.text(), new_contents);
4397 assert!(!buffer.is_dirty());
4398 assert!(!buffer.has_conflict());
4399
4400 let anchor_offsets = anchors
4401 .iter()
4402 .map(|anchor| anchor.to_offset(&*buffer))
4403 .collect::<Vec<_>>();
4404 assert_eq!(anchor_offsets, new_offsets);
4405 });
4406
4407 // Modify the buffer
4408 buffer.update(cx, |buffer, cx| {
4409 buffer.edit([(0..0, " ")], None, cx);
4410 assert!(buffer.is_dirty());
4411 assert!(!buffer.has_conflict());
4412 });
4413
4414 // Change the file on disk again, adding blank lines to the beginning.
4415 fs.save(
4416 path!("/dir/the-file").as_ref(),
4417 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
4418 LineEnding::Unix,
4419 )
4420 .await
4421 .unwrap();
4422
4423 // Because the buffer is modified, it doesn't reload from disk, but is
4424 // marked as having a conflict.
4425 cx.executor().run_until_parked();
4426 buffer.update(cx, |buffer, _| {
4427 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
4428 assert!(buffer.has_conflict());
4429 });
4430}
4431
4432#[gpui::test]
4433async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
4434 init_test(cx);
4435
4436 let fs = FakeFs::new(cx.executor());
4437 fs.insert_tree(
4438 path!("/dir"),
4439 json!({
4440 "file1": "a\nb\nc\n",
4441 "file2": "one\r\ntwo\r\nthree\r\n",
4442 }),
4443 )
4444 .await;
4445
4446 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4447 let buffer1 = project
4448 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4449 .await
4450 .unwrap();
4451 let buffer2 = project
4452 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4453 .await
4454 .unwrap();
4455
4456 buffer1.update(cx, |buffer, _| {
4457 assert_eq!(buffer.text(), "a\nb\nc\n");
4458 assert_eq!(buffer.line_ending(), LineEnding::Unix);
4459 });
4460 buffer2.update(cx, |buffer, _| {
4461 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
4462 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4463 });
4464
4465 // Change a file's line endings on disk from unix to windows. The buffer's
4466 // state updates correctly.
4467 fs.save(
4468 path!("/dir/file1").as_ref(),
4469 &"aaa\nb\nc\n".into(),
4470 LineEnding::Windows,
4471 )
4472 .await
4473 .unwrap();
4474 cx.executor().run_until_parked();
4475 buffer1.update(cx, |buffer, _| {
4476 assert_eq!(buffer.text(), "aaa\nb\nc\n");
4477 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4478 });
4479
4480 // Save a file with windows line endings. The file is written correctly.
4481 buffer2.update(cx, |buffer, cx| {
4482 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
4483 });
4484 project
4485 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
4486 .await
4487 .unwrap();
4488 assert_eq!(
4489 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
4490 "one\r\ntwo\r\nthree\r\nfour\r\n",
4491 );
4492}
4493
4494#[gpui::test]
4495async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
4496 init_test(cx);
4497
4498 let fs = FakeFs::new(cx.executor());
4499 fs.insert_tree(
4500 path!("/dir"),
4501 json!({
4502 "a.rs": "
4503 fn foo(mut v: Vec<usize>) {
4504 for x in &v {
4505 v.push(1);
4506 }
4507 }
4508 "
4509 .unindent(),
4510 }),
4511 )
4512 .await;
4513
4514 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4515 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4516 let buffer = project
4517 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
4518 .await
4519 .unwrap();
4520
4521 let buffer_uri = Url::from_file_path(path!("/dir/a.rs")).unwrap();
4522 let message = lsp::PublishDiagnosticsParams {
4523 uri: buffer_uri.clone(),
4524 diagnostics: vec![
4525 lsp::Diagnostic {
4526 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4527 severity: Some(DiagnosticSeverity::WARNING),
4528 message: "error 1".to_string(),
4529 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4530 location: lsp::Location {
4531 uri: buffer_uri.clone(),
4532 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4533 },
4534 message: "error 1 hint 1".to_string(),
4535 }]),
4536 ..Default::default()
4537 },
4538 lsp::Diagnostic {
4539 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4540 severity: Some(DiagnosticSeverity::HINT),
4541 message: "error 1 hint 1".to_string(),
4542 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4543 location: lsp::Location {
4544 uri: buffer_uri.clone(),
4545 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4546 },
4547 message: "original diagnostic".to_string(),
4548 }]),
4549 ..Default::default()
4550 },
4551 lsp::Diagnostic {
4552 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4553 severity: Some(DiagnosticSeverity::ERROR),
4554 message: "error 2".to_string(),
4555 related_information: Some(vec![
4556 lsp::DiagnosticRelatedInformation {
4557 location: lsp::Location {
4558 uri: buffer_uri.clone(),
4559 range: lsp::Range::new(
4560 lsp::Position::new(1, 13),
4561 lsp::Position::new(1, 15),
4562 ),
4563 },
4564 message: "error 2 hint 1".to_string(),
4565 },
4566 lsp::DiagnosticRelatedInformation {
4567 location: lsp::Location {
4568 uri: buffer_uri.clone(),
4569 range: lsp::Range::new(
4570 lsp::Position::new(1, 13),
4571 lsp::Position::new(1, 15),
4572 ),
4573 },
4574 message: "error 2 hint 2".to_string(),
4575 },
4576 ]),
4577 ..Default::default()
4578 },
4579 lsp::Diagnostic {
4580 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4581 severity: Some(DiagnosticSeverity::HINT),
4582 message: "error 2 hint 1".to_string(),
4583 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4584 location: lsp::Location {
4585 uri: buffer_uri.clone(),
4586 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4587 },
4588 message: "original diagnostic".to_string(),
4589 }]),
4590 ..Default::default()
4591 },
4592 lsp::Diagnostic {
4593 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4594 severity: Some(DiagnosticSeverity::HINT),
4595 message: "error 2 hint 2".to_string(),
4596 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4597 location: lsp::Location {
4598 uri: buffer_uri,
4599 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4600 },
4601 message: "original diagnostic".to_string(),
4602 }]),
4603 ..Default::default()
4604 },
4605 ],
4606 version: None,
4607 };
4608
4609 lsp_store
4610 .update(cx, |lsp_store, cx| {
4611 lsp_store.update_diagnostics(
4612 LanguageServerId(0),
4613 message,
4614 None,
4615 DiagnosticSourceKind::Pushed,
4616 &[],
4617 cx,
4618 )
4619 })
4620 .unwrap();
4621 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
4622
4623 assert_eq!(
4624 buffer
4625 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
4626 .collect::<Vec<_>>(),
4627 &[
4628 DiagnosticEntry {
4629 range: Point::new(1, 8)..Point::new(1, 9),
4630 diagnostic: Diagnostic {
4631 severity: DiagnosticSeverity::WARNING,
4632 message: "error 1".to_string(),
4633 group_id: 1,
4634 is_primary: true,
4635 source_kind: DiagnosticSourceKind::Pushed,
4636 ..Diagnostic::default()
4637 }
4638 },
4639 DiagnosticEntry {
4640 range: Point::new(1, 8)..Point::new(1, 9),
4641 diagnostic: Diagnostic {
4642 severity: DiagnosticSeverity::HINT,
4643 message: "error 1 hint 1".to_string(),
4644 group_id: 1,
4645 is_primary: false,
4646 source_kind: DiagnosticSourceKind::Pushed,
4647 ..Diagnostic::default()
4648 }
4649 },
4650 DiagnosticEntry {
4651 range: Point::new(1, 13)..Point::new(1, 15),
4652 diagnostic: Diagnostic {
4653 severity: DiagnosticSeverity::HINT,
4654 message: "error 2 hint 1".to_string(),
4655 group_id: 0,
4656 is_primary: false,
4657 source_kind: DiagnosticSourceKind::Pushed,
4658 ..Diagnostic::default()
4659 }
4660 },
4661 DiagnosticEntry {
4662 range: Point::new(1, 13)..Point::new(1, 15),
4663 diagnostic: Diagnostic {
4664 severity: DiagnosticSeverity::HINT,
4665 message: "error 2 hint 2".to_string(),
4666 group_id: 0,
4667 is_primary: false,
4668 source_kind: DiagnosticSourceKind::Pushed,
4669 ..Diagnostic::default()
4670 }
4671 },
4672 DiagnosticEntry {
4673 range: Point::new(2, 8)..Point::new(2, 17),
4674 diagnostic: Diagnostic {
4675 severity: DiagnosticSeverity::ERROR,
4676 message: "error 2".to_string(),
4677 group_id: 0,
4678 is_primary: true,
4679 source_kind: DiagnosticSourceKind::Pushed,
4680 ..Diagnostic::default()
4681 }
4682 }
4683 ]
4684 );
4685
4686 assert_eq!(
4687 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
4688 &[
4689 DiagnosticEntry {
4690 range: Point::new(1, 13)..Point::new(1, 15),
4691 diagnostic: Diagnostic {
4692 severity: DiagnosticSeverity::HINT,
4693 message: "error 2 hint 1".to_string(),
4694 group_id: 0,
4695 is_primary: false,
4696 source_kind: DiagnosticSourceKind::Pushed,
4697 ..Diagnostic::default()
4698 }
4699 },
4700 DiagnosticEntry {
4701 range: Point::new(1, 13)..Point::new(1, 15),
4702 diagnostic: Diagnostic {
4703 severity: DiagnosticSeverity::HINT,
4704 message: "error 2 hint 2".to_string(),
4705 group_id: 0,
4706 is_primary: false,
4707 source_kind: DiagnosticSourceKind::Pushed,
4708 ..Diagnostic::default()
4709 }
4710 },
4711 DiagnosticEntry {
4712 range: Point::new(2, 8)..Point::new(2, 17),
4713 diagnostic: Diagnostic {
4714 severity: DiagnosticSeverity::ERROR,
4715 message: "error 2".to_string(),
4716 group_id: 0,
4717 is_primary: true,
4718 source_kind: DiagnosticSourceKind::Pushed,
4719 ..Diagnostic::default()
4720 }
4721 }
4722 ]
4723 );
4724
4725 assert_eq!(
4726 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4727 &[
4728 DiagnosticEntry {
4729 range: Point::new(1, 8)..Point::new(1, 9),
4730 diagnostic: Diagnostic {
4731 severity: DiagnosticSeverity::WARNING,
4732 message: "error 1".to_string(),
4733 group_id: 1,
4734 is_primary: true,
4735 source_kind: DiagnosticSourceKind::Pushed,
4736 ..Diagnostic::default()
4737 }
4738 },
4739 DiagnosticEntry {
4740 range: Point::new(1, 8)..Point::new(1, 9),
4741 diagnostic: Diagnostic {
4742 severity: DiagnosticSeverity::HINT,
4743 message: "error 1 hint 1".to_string(),
4744 group_id: 1,
4745 is_primary: false,
4746 source_kind: DiagnosticSourceKind::Pushed,
4747 ..Diagnostic::default()
4748 }
4749 },
4750 ]
4751 );
4752}
4753
4754#[gpui::test]
4755async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
4756 init_test(cx);
4757
4758 let fs = FakeFs::new(cx.executor());
4759 fs.insert_tree(
4760 path!("/dir"),
4761 json!({
4762 "one.rs": "const ONE: usize = 1;",
4763 "two": {
4764 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4765 }
4766
4767 }),
4768 )
4769 .await;
4770 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4771
4772 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4773 language_registry.add(rust_lang());
4774 let watched_paths = lsp::FileOperationRegistrationOptions {
4775 filters: vec![
4776 FileOperationFilter {
4777 scheme: Some("file".to_owned()),
4778 pattern: lsp::FileOperationPattern {
4779 glob: "**/*.rs".to_owned(),
4780 matches: Some(lsp::FileOperationPatternKind::File),
4781 options: None,
4782 },
4783 },
4784 FileOperationFilter {
4785 scheme: Some("file".to_owned()),
4786 pattern: lsp::FileOperationPattern {
4787 glob: "**/**".to_owned(),
4788 matches: Some(lsp::FileOperationPatternKind::Folder),
4789 options: None,
4790 },
4791 },
4792 ],
4793 };
4794 let mut fake_servers = language_registry.register_fake_lsp(
4795 "Rust",
4796 FakeLspAdapter {
4797 capabilities: lsp::ServerCapabilities {
4798 workspace: Some(lsp::WorkspaceServerCapabilities {
4799 workspace_folders: None,
4800 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
4801 did_rename: Some(watched_paths.clone()),
4802 will_rename: Some(watched_paths),
4803 ..Default::default()
4804 }),
4805 }),
4806 ..Default::default()
4807 },
4808 ..Default::default()
4809 },
4810 );
4811
4812 let _ = project
4813 .update(cx, |project, cx| {
4814 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
4815 })
4816 .await
4817 .unwrap();
4818
4819 let fake_server = fake_servers.next().await.unwrap();
4820 let response = project.update(cx, |project, cx| {
4821 let worktree = project.worktrees(cx).next().unwrap();
4822 let entry = worktree.read(cx).entry_for_path("one.rs").unwrap();
4823 project.rename_entry(entry.id, "three.rs".as_ref(), cx)
4824 });
4825 let expected_edit = lsp::WorkspaceEdit {
4826 changes: None,
4827 document_changes: Some(DocumentChanges::Edits({
4828 vec![TextDocumentEdit {
4829 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
4830 range: lsp::Range {
4831 start: lsp::Position {
4832 line: 0,
4833 character: 1,
4834 },
4835 end: lsp::Position {
4836 line: 0,
4837 character: 3,
4838 },
4839 },
4840 new_text: "This is not a drill".to_owned(),
4841 })],
4842 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
4843 uri: Url::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
4844 version: Some(1337),
4845 },
4846 }]
4847 })),
4848 change_annotations: None,
4849 };
4850 let resolved_workspace_edit = Arc::new(OnceLock::new());
4851 fake_server
4852 .set_request_handler::<WillRenameFiles, _, _>({
4853 let resolved_workspace_edit = resolved_workspace_edit.clone();
4854 let expected_edit = expected_edit.clone();
4855 move |params, _| {
4856 let resolved_workspace_edit = resolved_workspace_edit.clone();
4857 let expected_edit = expected_edit.clone();
4858 async move {
4859 assert_eq!(params.files.len(), 1);
4860 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
4861 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
4862 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
4863 Ok(Some(expected_edit))
4864 }
4865 }
4866 })
4867 .next()
4868 .await
4869 .unwrap();
4870 let _ = response.await.unwrap();
4871 fake_server
4872 .handle_notification::<DidRenameFiles, _>(|params, _| {
4873 assert_eq!(params.files.len(), 1);
4874 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
4875 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
4876 })
4877 .next()
4878 .await
4879 .unwrap();
4880 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
4881}
4882
4883#[gpui::test]
4884async fn test_rename(cx: &mut gpui::TestAppContext) {
4885 // hi
4886 init_test(cx);
4887
4888 let fs = FakeFs::new(cx.executor());
4889 fs.insert_tree(
4890 path!("/dir"),
4891 json!({
4892 "one.rs": "const ONE: usize = 1;",
4893 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4894 }),
4895 )
4896 .await;
4897
4898 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4899
4900 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4901 language_registry.add(rust_lang());
4902 let mut fake_servers = language_registry.register_fake_lsp(
4903 "Rust",
4904 FakeLspAdapter {
4905 capabilities: lsp::ServerCapabilities {
4906 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
4907 prepare_provider: Some(true),
4908 work_done_progress_options: Default::default(),
4909 })),
4910 ..Default::default()
4911 },
4912 ..Default::default()
4913 },
4914 );
4915
4916 let (buffer, _handle) = project
4917 .update(cx, |project, cx| {
4918 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
4919 })
4920 .await
4921 .unwrap();
4922
4923 let fake_server = fake_servers.next().await.unwrap();
4924
4925 let response = project.update(cx, |project, cx| {
4926 project.prepare_rename(buffer.clone(), 7, cx)
4927 });
4928 fake_server
4929 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
4930 assert_eq!(
4931 params.text_document.uri.as_str(),
4932 uri!("file:///dir/one.rs")
4933 );
4934 assert_eq!(params.position, lsp::Position::new(0, 7));
4935 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
4936 lsp::Position::new(0, 6),
4937 lsp::Position::new(0, 9),
4938 ))))
4939 })
4940 .next()
4941 .await
4942 .unwrap();
4943 let response = response.await.unwrap();
4944 let PrepareRenameResponse::Success(range) = response else {
4945 panic!("{:?}", response);
4946 };
4947 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
4948 assert_eq!(range, 6..9);
4949
4950 let response = project.update(cx, |project, cx| {
4951 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
4952 });
4953 fake_server
4954 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
4955 assert_eq!(
4956 params.text_document_position.text_document.uri.as_str(),
4957 uri!("file:///dir/one.rs")
4958 );
4959 assert_eq!(
4960 params.text_document_position.position,
4961 lsp::Position::new(0, 7)
4962 );
4963 assert_eq!(params.new_name, "THREE");
4964 Ok(Some(lsp::WorkspaceEdit {
4965 changes: Some(
4966 [
4967 (
4968 lsp::Url::from_file_path(path!("/dir/one.rs")).unwrap(),
4969 vec![lsp::TextEdit::new(
4970 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
4971 "THREE".to_string(),
4972 )],
4973 ),
4974 (
4975 lsp::Url::from_file_path(path!("/dir/two.rs")).unwrap(),
4976 vec![
4977 lsp::TextEdit::new(
4978 lsp::Range::new(
4979 lsp::Position::new(0, 24),
4980 lsp::Position::new(0, 27),
4981 ),
4982 "THREE".to_string(),
4983 ),
4984 lsp::TextEdit::new(
4985 lsp::Range::new(
4986 lsp::Position::new(0, 35),
4987 lsp::Position::new(0, 38),
4988 ),
4989 "THREE".to_string(),
4990 ),
4991 ],
4992 ),
4993 ]
4994 .into_iter()
4995 .collect(),
4996 ),
4997 ..Default::default()
4998 }))
4999 })
5000 .next()
5001 .await
5002 .unwrap();
5003 let mut transaction = response.await.unwrap().0;
5004 assert_eq!(transaction.len(), 2);
5005 assert_eq!(
5006 transaction
5007 .remove_entry(&buffer)
5008 .unwrap()
5009 .0
5010 .update(cx, |buffer, _| buffer.text()),
5011 "const THREE: usize = 1;"
5012 );
5013 assert_eq!(
5014 transaction
5015 .into_keys()
5016 .next()
5017 .unwrap()
5018 .update(cx, |buffer, _| buffer.text()),
5019 "const TWO: usize = one::THREE + one::THREE;"
5020 );
5021}
5022
5023#[gpui::test]
5024async fn test_search(cx: &mut gpui::TestAppContext) {
5025 init_test(cx);
5026
5027 let fs = FakeFs::new(cx.executor());
5028 fs.insert_tree(
5029 path!("/dir"),
5030 json!({
5031 "one.rs": "const ONE: usize = 1;",
5032 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
5033 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
5034 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
5035 }),
5036 )
5037 .await;
5038 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5039 assert_eq!(
5040 search(
5041 &project,
5042 SearchQuery::text(
5043 "TWO",
5044 false,
5045 true,
5046 false,
5047 Default::default(),
5048 Default::default(),
5049 false,
5050 None
5051 )
5052 .unwrap(),
5053 cx
5054 )
5055 .await
5056 .unwrap(),
5057 HashMap::from_iter([
5058 (separator!("dir/two.rs").to_string(), vec![6..9]),
5059 (separator!("dir/three.rs").to_string(), vec![37..40])
5060 ])
5061 );
5062
5063 let buffer_4 = project
5064 .update(cx, |project, cx| {
5065 project.open_local_buffer(path!("/dir/four.rs"), cx)
5066 })
5067 .await
5068 .unwrap();
5069 buffer_4.update(cx, |buffer, cx| {
5070 let text = "two::TWO";
5071 buffer.edit([(20..28, text), (31..43, text)], None, cx);
5072 });
5073
5074 assert_eq!(
5075 search(
5076 &project,
5077 SearchQuery::text(
5078 "TWO",
5079 false,
5080 true,
5081 false,
5082 Default::default(),
5083 Default::default(),
5084 false,
5085 None,
5086 )
5087 .unwrap(),
5088 cx
5089 )
5090 .await
5091 .unwrap(),
5092 HashMap::from_iter([
5093 (separator!("dir/two.rs").to_string(), vec![6..9]),
5094 (separator!("dir/three.rs").to_string(), vec![37..40]),
5095 (separator!("dir/four.rs").to_string(), vec![25..28, 36..39])
5096 ])
5097 );
5098}
5099
5100#[gpui::test]
5101async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
5102 init_test(cx);
5103
5104 let search_query = "file";
5105
5106 let fs = FakeFs::new(cx.executor());
5107 fs.insert_tree(
5108 path!("/dir"),
5109 json!({
5110 "one.rs": r#"// Rust file one"#,
5111 "one.ts": r#"// TypeScript file one"#,
5112 "two.rs": r#"// Rust file two"#,
5113 "two.ts": r#"// TypeScript file two"#,
5114 }),
5115 )
5116 .await;
5117 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5118
5119 assert!(
5120 search(
5121 &project,
5122 SearchQuery::text(
5123 search_query,
5124 false,
5125 true,
5126 false,
5127 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5128 Default::default(),
5129 false,
5130 None
5131 )
5132 .unwrap(),
5133 cx
5134 )
5135 .await
5136 .unwrap()
5137 .is_empty(),
5138 "If no inclusions match, no files should be returned"
5139 );
5140
5141 assert_eq!(
5142 search(
5143 &project,
5144 SearchQuery::text(
5145 search_query,
5146 false,
5147 true,
5148 false,
5149 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
5150 Default::default(),
5151 false,
5152 None
5153 )
5154 .unwrap(),
5155 cx
5156 )
5157 .await
5158 .unwrap(),
5159 HashMap::from_iter([
5160 (separator!("dir/one.rs").to_string(), vec![8..12]),
5161 (separator!("dir/two.rs").to_string(), vec![8..12]),
5162 ]),
5163 "Rust only search should give only Rust files"
5164 );
5165
5166 assert_eq!(
5167 search(
5168 &project,
5169 SearchQuery::text(
5170 search_query,
5171 false,
5172 true,
5173 false,
5174 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5175 Default::default(),
5176 false,
5177 None,
5178 )
5179 .unwrap(),
5180 cx
5181 )
5182 .await
5183 .unwrap(),
5184 HashMap::from_iter([
5185 (separator!("dir/one.ts").to_string(), vec![14..18]),
5186 (separator!("dir/two.ts").to_string(), vec![14..18]),
5187 ]),
5188 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
5189 );
5190
5191 assert_eq!(
5192 search(
5193 &project,
5194 SearchQuery::text(
5195 search_query,
5196 false,
5197 true,
5198 false,
5199 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()])
5200 .unwrap(),
5201 Default::default(),
5202 false,
5203 None,
5204 )
5205 .unwrap(),
5206 cx
5207 )
5208 .await
5209 .unwrap(),
5210 HashMap::from_iter([
5211 (separator!("dir/two.ts").to_string(), vec![14..18]),
5212 (separator!("dir/one.rs").to_string(), vec![8..12]),
5213 (separator!("dir/one.ts").to_string(), vec![14..18]),
5214 (separator!("dir/two.rs").to_string(), vec![8..12]),
5215 ]),
5216 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
5217 );
5218}
5219
5220#[gpui::test]
5221async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
5222 init_test(cx);
5223
5224 let search_query = "file";
5225
5226 let fs = FakeFs::new(cx.executor());
5227 fs.insert_tree(
5228 path!("/dir"),
5229 json!({
5230 "one.rs": r#"// Rust file one"#,
5231 "one.ts": r#"// TypeScript file one"#,
5232 "two.rs": r#"// Rust file two"#,
5233 "two.ts": r#"// TypeScript file two"#,
5234 }),
5235 )
5236 .await;
5237 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5238
5239 assert_eq!(
5240 search(
5241 &project,
5242 SearchQuery::text(
5243 search_query,
5244 false,
5245 true,
5246 false,
5247 Default::default(),
5248 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5249 false,
5250 None,
5251 )
5252 .unwrap(),
5253 cx
5254 )
5255 .await
5256 .unwrap(),
5257 HashMap::from_iter([
5258 (separator!("dir/one.rs").to_string(), vec![8..12]),
5259 (separator!("dir/one.ts").to_string(), vec![14..18]),
5260 (separator!("dir/two.rs").to_string(), vec![8..12]),
5261 (separator!("dir/two.ts").to_string(), vec![14..18]),
5262 ]),
5263 "If no exclusions match, all files should be returned"
5264 );
5265
5266 assert_eq!(
5267 search(
5268 &project,
5269 SearchQuery::text(
5270 search_query,
5271 false,
5272 true,
5273 false,
5274 Default::default(),
5275 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
5276 false,
5277 None,
5278 )
5279 .unwrap(),
5280 cx
5281 )
5282 .await
5283 .unwrap(),
5284 HashMap::from_iter([
5285 (separator!("dir/one.ts").to_string(), vec![14..18]),
5286 (separator!("dir/two.ts").to_string(), vec![14..18]),
5287 ]),
5288 "Rust exclusion search should give only TypeScript files"
5289 );
5290
5291 assert_eq!(
5292 search(
5293 &project,
5294 SearchQuery::text(
5295 search_query,
5296 false,
5297 true,
5298 false,
5299 Default::default(),
5300 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5301 false,
5302 None,
5303 )
5304 .unwrap(),
5305 cx
5306 )
5307 .await
5308 .unwrap(),
5309 HashMap::from_iter([
5310 (separator!("dir/one.rs").to_string(), vec![8..12]),
5311 (separator!("dir/two.rs").to_string(), vec![8..12]),
5312 ]),
5313 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
5314 );
5315
5316 assert!(
5317 search(
5318 &project,
5319 SearchQuery::text(
5320 search_query,
5321 false,
5322 true,
5323 false,
5324 Default::default(),
5325 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()])
5326 .unwrap(),
5327 false,
5328 None,
5329 )
5330 .unwrap(),
5331 cx
5332 )
5333 .await
5334 .unwrap()
5335 .is_empty(),
5336 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
5337 );
5338}
5339
5340#[gpui::test]
5341async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
5342 init_test(cx);
5343
5344 let search_query = "file";
5345
5346 let fs = FakeFs::new(cx.executor());
5347 fs.insert_tree(
5348 path!("/dir"),
5349 json!({
5350 "one.rs": r#"// Rust file one"#,
5351 "one.ts": r#"// TypeScript file one"#,
5352 "two.rs": r#"// Rust file two"#,
5353 "two.ts": r#"// TypeScript file two"#,
5354 }),
5355 )
5356 .await;
5357 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5358
5359 assert!(
5360 search(
5361 &project,
5362 SearchQuery::text(
5363 search_query,
5364 false,
5365 true,
5366 false,
5367 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5368 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5369 false,
5370 None,
5371 )
5372 .unwrap(),
5373 cx
5374 )
5375 .await
5376 .unwrap()
5377 .is_empty(),
5378 "If both no exclusions and inclusions match, exclusions should win and return nothing"
5379 );
5380
5381 assert!(
5382 search(
5383 &project,
5384 SearchQuery::text(
5385 search_query,
5386 false,
5387 true,
5388 false,
5389 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5390 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5391 false,
5392 None,
5393 )
5394 .unwrap(),
5395 cx
5396 )
5397 .await
5398 .unwrap()
5399 .is_empty(),
5400 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
5401 );
5402
5403 assert!(
5404 search(
5405 &project,
5406 SearchQuery::text(
5407 search_query,
5408 false,
5409 true,
5410 false,
5411 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5412 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5413 false,
5414 None,
5415 )
5416 .unwrap(),
5417 cx
5418 )
5419 .await
5420 .unwrap()
5421 .is_empty(),
5422 "Non-matching inclusions and exclusions should not change that."
5423 );
5424
5425 assert_eq!(
5426 search(
5427 &project,
5428 SearchQuery::text(
5429 search_query,
5430 false,
5431 true,
5432 false,
5433 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5434 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()]).unwrap(),
5435 false,
5436 None,
5437 )
5438 .unwrap(),
5439 cx
5440 )
5441 .await
5442 .unwrap(),
5443 HashMap::from_iter([
5444 (separator!("dir/one.ts").to_string(), vec![14..18]),
5445 (separator!("dir/two.ts").to_string(), vec![14..18]),
5446 ]),
5447 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
5448 );
5449}
5450
5451#[gpui::test]
5452async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
5453 init_test(cx);
5454
5455 let fs = FakeFs::new(cx.executor());
5456 fs.insert_tree(
5457 path!("/worktree-a"),
5458 json!({
5459 "haystack.rs": r#"// NEEDLE"#,
5460 "haystack.ts": r#"// NEEDLE"#,
5461 }),
5462 )
5463 .await;
5464 fs.insert_tree(
5465 path!("/worktree-b"),
5466 json!({
5467 "haystack.rs": r#"// NEEDLE"#,
5468 "haystack.ts": r#"// NEEDLE"#,
5469 }),
5470 )
5471 .await;
5472
5473 let project = Project::test(
5474 fs.clone(),
5475 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
5476 cx,
5477 )
5478 .await;
5479
5480 assert_eq!(
5481 search(
5482 &project,
5483 SearchQuery::text(
5484 "NEEDLE",
5485 false,
5486 true,
5487 false,
5488 PathMatcher::new(&["worktree-a/*.rs".to_owned()]).unwrap(),
5489 Default::default(),
5490 true,
5491 None,
5492 )
5493 .unwrap(),
5494 cx
5495 )
5496 .await
5497 .unwrap(),
5498 HashMap::from_iter([(separator!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
5499 "should only return results from included worktree"
5500 );
5501 assert_eq!(
5502 search(
5503 &project,
5504 SearchQuery::text(
5505 "NEEDLE",
5506 false,
5507 true,
5508 false,
5509 PathMatcher::new(&["worktree-b/*.rs".to_owned()]).unwrap(),
5510 Default::default(),
5511 true,
5512 None,
5513 )
5514 .unwrap(),
5515 cx
5516 )
5517 .await
5518 .unwrap(),
5519 HashMap::from_iter([(separator!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
5520 "should only return results from included worktree"
5521 );
5522
5523 assert_eq!(
5524 search(
5525 &project,
5526 SearchQuery::text(
5527 "NEEDLE",
5528 false,
5529 true,
5530 false,
5531 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5532 Default::default(),
5533 false,
5534 None,
5535 )
5536 .unwrap(),
5537 cx
5538 )
5539 .await
5540 .unwrap(),
5541 HashMap::from_iter([
5542 (separator!("worktree-a/haystack.ts").to_string(), vec![3..9]),
5543 (separator!("worktree-b/haystack.ts").to_string(), vec![3..9])
5544 ]),
5545 "should return results from both worktrees"
5546 );
5547}
5548
5549#[gpui::test]
5550async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
5551 init_test(cx);
5552
5553 let fs = FakeFs::new(cx.background_executor.clone());
5554 fs.insert_tree(
5555 path!("/dir"),
5556 json!({
5557 ".git": {},
5558 ".gitignore": "**/target\n/node_modules\n",
5559 "target": {
5560 "index.txt": "index_key:index_value"
5561 },
5562 "node_modules": {
5563 "eslint": {
5564 "index.ts": "const eslint_key = 'eslint value'",
5565 "package.json": r#"{ "some_key": "some value" }"#,
5566 },
5567 "prettier": {
5568 "index.ts": "const prettier_key = 'prettier value'",
5569 "package.json": r#"{ "other_key": "other value" }"#,
5570 },
5571 },
5572 "package.json": r#"{ "main_key": "main value" }"#,
5573 }),
5574 )
5575 .await;
5576 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5577
5578 let query = "key";
5579 assert_eq!(
5580 search(
5581 &project,
5582 SearchQuery::text(
5583 query,
5584 false,
5585 false,
5586 false,
5587 Default::default(),
5588 Default::default(),
5589 false,
5590 None,
5591 )
5592 .unwrap(),
5593 cx
5594 )
5595 .await
5596 .unwrap(),
5597 HashMap::from_iter([(separator!("dir/package.json").to_string(), vec![8..11])]),
5598 "Only one non-ignored file should have the query"
5599 );
5600
5601 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5602 assert_eq!(
5603 search(
5604 &project,
5605 SearchQuery::text(
5606 query,
5607 false,
5608 false,
5609 true,
5610 Default::default(),
5611 Default::default(),
5612 false,
5613 None,
5614 )
5615 .unwrap(),
5616 cx
5617 )
5618 .await
5619 .unwrap(),
5620 HashMap::from_iter([
5621 (separator!("dir/package.json").to_string(), vec![8..11]),
5622 (separator!("dir/target/index.txt").to_string(), vec![6..9]),
5623 (
5624 separator!("dir/node_modules/prettier/package.json").to_string(),
5625 vec![9..12]
5626 ),
5627 (
5628 separator!("dir/node_modules/prettier/index.ts").to_string(),
5629 vec![15..18]
5630 ),
5631 (
5632 separator!("dir/node_modules/eslint/index.ts").to_string(),
5633 vec![13..16]
5634 ),
5635 (
5636 separator!("dir/node_modules/eslint/package.json").to_string(),
5637 vec![8..11]
5638 ),
5639 ]),
5640 "Unrestricted search with ignored directories should find every file with the query"
5641 );
5642
5643 let files_to_include = PathMatcher::new(&["node_modules/prettier/**".to_owned()]).unwrap();
5644 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()]).unwrap();
5645 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5646 assert_eq!(
5647 search(
5648 &project,
5649 SearchQuery::text(
5650 query,
5651 false,
5652 false,
5653 true,
5654 files_to_include,
5655 files_to_exclude,
5656 false,
5657 None,
5658 )
5659 .unwrap(),
5660 cx
5661 )
5662 .await
5663 .unwrap(),
5664 HashMap::from_iter([(
5665 separator!("dir/node_modules/prettier/package.json").to_string(),
5666 vec![9..12]
5667 )]),
5668 "With search including ignored prettier directory and excluding TS files, only one file should be found"
5669 );
5670}
5671
5672#[gpui::test]
5673async fn test_search_with_unicode(cx: &mut gpui::TestAppContext) {
5674 init_test(cx);
5675
5676 let fs = FakeFs::new(cx.executor());
5677 fs.insert_tree(
5678 path!("/dir"),
5679 json!({
5680 "one.rs": "// ПРИВЕТ? привет!",
5681 "two.rs": "// ПРИВЕТ.",
5682 "three.rs": "// привет",
5683 }),
5684 )
5685 .await;
5686 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5687
5688 let unicode_case_sensitive_query = SearchQuery::text(
5689 "привет",
5690 false,
5691 true,
5692 false,
5693 Default::default(),
5694 Default::default(),
5695 false,
5696 None,
5697 );
5698 assert_matches!(unicode_case_sensitive_query, Ok(SearchQuery::Text { .. }));
5699 assert_eq!(
5700 search(&project, unicode_case_sensitive_query.unwrap(), cx)
5701 .await
5702 .unwrap(),
5703 HashMap::from_iter([
5704 (separator!("dir/one.rs").to_string(), vec![17..29]),
5705 (separator!("dir/three.rs").to_string(), vec![3..15]),
5706 ])
5707 );
5708
5709 let unicode_case_insensitive_query = SearchQuery::text(
5710 "привет",
5711 false,
5712 false,
5713 false,
5714 Default::default(),
5715 Default::default(),
5716 false,
5717 None,
5718 );
5719 assert_matches!(
5720 unicode_case_insensitive_query,
5721 Ok(SearchQuery::Regex { .. })
5722 );
5723 assert_eq!(
5724 search(&project, unicode_case_insensitive_query.unwrap(), cx)
5725 .await
5726 .unwrap(),
5727 HashMap::from_iter([
5728 (separator!("dir/one.rs").to_string(), vec![3..15, 17..29]),
5729 (separator!("dir/two.rs").to_string(), vec![3..15]),
5730 (separator!("dir/three.rs").to_string(), vec![3..15]),
5731 ])
5732 );
5733
5734 assert_eq!(
5735 search(
5736 &project,
5737 SearchQuery::text(
5738 "привет.",
5739 false,
5740 false,
5741 false,
5742 Default::default(),
5743 Default::default(),
5744 false,
5745 None,
5746 )
5747 .unwrap(),
5748 cx
5749 )
5750 .await
5751 .unwrap(),
5752 HashMap::from_iter([(separator!("dir/two.rs").to_string(), vec![3..16]),])
5753 );
5754}
5755
5756#[gpui::test]
5757async fn test_create_entry(cx: &mut gpui::TestAppContext) {
5758 init_test(cx);
5759
5760 let fs = FakeFs::new(cx.executor().clone());
5761 fs.insert_tree(
5762 "/one/two",
5763 json!({
5764 "three": {
5765 "a.txt": "",
5766 "four": {}
5767 },
5768 "c.rs": ""
5769 }),
5770 )
5771 .await;
5772
5773 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
5774 project
5775 .update(cx, |project, cx| {
5776 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5777 project.create_entry((id, "b.."), true, cx)
5778 })
5779 .await
5780 .unwrap()
5781 .to_included()
5782 .unwrap();
5783
5784 // Can't create paths outside the project
5785 let result = project
5786 .update(cx, |project, cx| {
5787 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5788 project.create_entry((id, "../../boop"), true, cx)
5789 })
5790 .await;
5791 assert!(result.is_err());
5792
5793 // Can't create paths with '..'
5794 let result = project
5795 .update(cx, |project, cx| {
5796 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5797 project.create_entry((id, "four/../beep"), true, cx)
5798 })
5799 .await;
5800 assert!(result.is_err());
5801
5802 assert_eq!(
5803 fs.paths(true),
5804 vec![
5805 PathBuf::from(path!("/")),
5806 PathBuf::from(path!("/one")),
5807 PathBuf::from(path!("/one/two")),
5808 PathBuf::from(path!("/one/two/c.rs")),
5809 PathBuf::from(path!("/one/two/three")),
5810 PathBuf::from(path!("/one/two/three/a.txt")),
5811 PathBuf::from(path!("/one/two/three/b..")),
5812 PathBuf::from(path!("/one/two/three/four")),
5813 ]
5814 );
5815
5816 // And we cannot open buffers with '..'
5817 let result = project
5818 .update(cx, |project, cx| {
5819 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5820 project.open_buffer((id, "../c.rs"), cx)
5821 })
5822 .await;
5823 assert!(result.is_err())
5824}
5825
5826#[gpui::test]
5827async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
5828 init_test(cx);
5829
5830 let fs = FakeFs::new(cx.executor());
5831 fs.insert_tree(
5832 path!("/dir"),
5833 json!({
5834 "a.tsx": "a",
5835 }),
5836 )
5837 .await;
5838
5839 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5840
5841 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5842 language_registry.add(tsx_lang());
5843 let language_server_names = [
5844 "TypeScriptServer",
5845 "TailwindServer",
5846 "ESLintServer",
5847 "NoHoverCapabilitiesServer",
5848 ];
5849 let mut language_servers = [
5850 language_registry.register_fake_lsp(
5851 "tsx",
5852 FakeLspAdapter {
5853 name: language_server_names[0],
5854 capabilities: lsp::ServerCapabilities {
5855 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5856 ..lsp::ServerCapabilities::default()
5857 },
5858 ..FakeLspAdapter::default()
5859 },
5860 ),
5861 language_registry.register_fake_lsp(
5862 "tsx",
5863 FakeLspAdapter {
5864 name: language_server_names[1],
5865 capabilities: lsp::ServerCapabilities {
5866 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5867 ..lsp::ServerCapabilities::default()
5868 },
5869 ..FakeLspAdapter::default()
5870 },
5871 ),
5872 language_registry.register_fake_lsp(
5873 "tsx",
5874 FakeLspAdapter {
5875 name: language_server_names[2],
5876 capabilities: lsp::ServerCapabilities {
5877 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5878 ..lsp::ServerCapabilities::default()
5879 },
5880 ..FakeLspAdapter::default()
5881 },
5882 ),
5883 language_registry.register_fake_lsp(
5884 "tsx",
5885 FakeLspAdapter {
5886 name: language_server_names[3],
5887 capabilities: lsp::ServerCapabilities {
5888 hover_provider: None,
5889 ..lsp::ServerCapabilities::default()
5890 },
5891 ..FakeLspAdapter::default()
5892 },
5893 ),
5894 ];
5895
5896 let (buffer, _handle) = project
5897 .update(cx, |p, cx| {
5898 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
5899 })
5900 .await
5901 .unwrap();
5902 cx.executor().run_until_parked();
5903
5904 let mut servers_with_hover_requests = HashMap::default();
5905 for i in 0..language_server_names.len() {
5906 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
5907 panic!(
5908 "Failed to get language server #{i} with name {}",
5909 &language_server_names[i]
5910 )
5911 });
5912 let new_server_name = new_server.server.name();
5913 assert!(
5914 !servers_with_hover_requests.contains_key(&new_server_name),
5915 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5916 );
5917 match new_server_name.as_ref() {
5918 "TailwindServer" | "TypeScriptServer" => {
5919 servers_with_hover_requests.insert(
5920 new_server_name.clone(),
5921 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
5922 move |_, _| {
5923 let name = new_server_name.clone();
5924 async move {
5925 Ok(Some(lsp::Hover {
5926 contents: lsp::HoverContents::Scalar(
5927 lsp::MarkedString::String(format!("{name} hover")),
5928 ),
5929 range: None,
5930 }))
5931 }
5932 },
5933 ),
5934 );
5935 }
5936 "ESLintServer" => {
5937 servers_with_hover_requests.insert(
5938 new_server_name,
5939 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
5940 |_, _| async move { Ok(None) },
5941 ),
5942 );
5943 }
5944 "NoHoverCapabilitiesServer" => {
5945 let _never_handled = new_server
5946 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
5947 panic!(
5948 "Should not call for hovers server with no corresponding capabilities"
5949 )
5950 });
5951 }
5952 unexpected => panic!("Unexpected server name: {unexpected}"),
5953 }
5954 }
5955
5956 let hover_task = project.update(cx, |project, cx| {
5957 project.hover(&buffer, Point::new(0, 0), cx)
5958 });
5959 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
5960 |mut hover_request| async move {
5961 hover_request
5962 .next()
5963 .await
5964 .expect("All hover requests should have been triggered")
5965 },
5966 ))
5967 .await;
5968 assert_eq!(
5969 vec!["TailwindServer hover", "TypeScriptServer hover"],
5970 hover_task
5971 .await
5972 .into_iter()
5973 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
5974 .sorted()
5975 .collect::<Vec<_>>(),
5976 "Should receive hover responses from all related servers with hover capabilities"
5977 );
5978}
5979
5980#[gpui::test]
5981async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
5982 init_test(cx);
5983
5984 let fs = FakeFs::new(cx.executor());
5985 fs.insert_tree(
5986 path!("/dir"),
5987 json!({
5988 "a.ts": "a",
5989 }),
5990 )
5991 .await;
5992
5993 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5994
5995 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5996 language_registry.add(typescript_lang());
5997 let mut fake_language_servers = language_registry.register_fake_lsp(
5998 "TypeScript",
5999 FakeLspAdapter {
6000 capabilities: lsp::ServerCapabilities {
6001 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6002 ..lsp::ServerCapabilities::default()
6003 },
6004 ..FakeLspAdapter::default()
6005 },
6006 );
6007
6008 let (buffer, _handle) = project
6009 .update(cx, |p, cx| {
6010 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6011 })
6012 .await
6013 .unwrap();
6014 cx.executor().run_until_parked();
6015
6016 let fake_server = fake_language_servers
6017 .next()
6018 .await
6019 .expect("failed to get the language server");
6020
6021 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6022 move |_, _| async move {
6023 Ok(Some(lsp::Hover {
6024 contents: lsp::HoverContents::Array(vec![
6025 lsp::MarkedString::String("".to_string()),
6026 lsp::MarkedString::String(" ".to_string()),
6027 lsp::MarkedString::String("\n\n\n".to_string()),
6028 ]),
6029 range: None,
6030 }))
6031 },
6032 );
6033
6034 let hover_task = project.update(cx, |project, cx| {
6035 project.hover(&buffer, Point::new(0, 0), cx)
6036 });
6037 let () = request_handled
6038 .next()
6039 .await
6040 .expect("All hover requests should have been triggered");
6041 assert_eq!(
6042 Vec::<String>::new(),
6043 hover_task
6044 .await
6045 .into_iter()
6046 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
6047 .sorted()
6048 .collect::<Vec<_>>(),
6049 "Empty hover parts should be ignored"
6050 );
6051}
6052
6053#[gpui::test]
6054async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
6055 init_test(cx);
6056
6057 let fs = FakeFs::new(cx.executor());
6058 fs.insert_tree(
6059 path!("/dir"),
6060 json!({
6061 "a.ts": "a",
6062 }),
6063 )
6064 .await;
6065
6066 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6067
6068 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6069 language_registry.add(typescript_lang());
6070 let mut fake_language_servers = language_registry.register_fake_lsp(
6071 "TypeScript",
6072 FakeLspAdapter {
6073 capabilities: lsp::ServerCapabilities {
6074 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6075 ..lsp::ServerCapabilities::default()
6076 },
6077 ..FakeLspAdapter::default()
6078 },
6079 );
6080
6081 let (buffer, _handle) = project
6082 .update(cx, |p, cx| {
6083 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6084 })
6085 .await
6086 .unwrap();
6087 cx.executor().run_until_parked();
6088
6089 let fake_server = fake_language_servers
6090 .next()
6091 .await
6092 .expect("failed to get the language server");
6093
6094 let mut request_handled = fake_server
6095 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
6096 Ok(Some(vec![
6097 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6098 title: "organize imports".to_string(),
6099 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
6100 ..lsp::CodeAction::default()
6101 }),
6102 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6103 title: "fix code".to_string(),
6104 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
6105 ..lsp::CodeAction::default()
6106 }),
6107 ]))
6108 });
6109
6110 let code_actions_task = project.update(cx, |project, cx| {
6111 project.code_actions(
6112 &buffer,
6113 0..buffer.read(cx).len(),
6114 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
6115 cx,
6116 )
6117 });
6118
6119 let () = request_handled
6120 .next()
6121 .await
6122 .expect("The code action request should have been triggered");
6123
6124 let code_actions = code_actions_task.await.unwrap();
6125 assert_eq!(code_actions.len(), 1);
6126 assert_eq!(
6127 code_actions[0].lsp_action.action_kind(),
6128 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
6129 );
6130}
6131
6132#[gpui::test]
6133async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
6134 init_test(cx);
6135
6136 let fs = FakeFs::new(cx.executor());
6137 fs.insert_tree(
6138 path!("/dir"),
6139 json!({
6140 "a.tsx": "a",
6141 }),
6142 )
6143 .await;
6144
6145 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6146
6147 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6148 language_registry.add(tsx_lang());
6149 let language_server_names = [
6150 "TypeScriptServer",
6151 "TailwindServer",
6152 "ESLintServer",
6153 "NoActionsCapabilitiesServer",
6154 ];
6155
6156 let mut language_server_rxs = [
6157 language_registry.register_fake_lsp(
6158 "tsx",
6159 FakeLspAdapter {
6160 name: language_server_names[0],
6161 capabilities: lsp::ServerCapabilities {
6162 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6163 ..lsp::ServerCapabilities::default()
6164 },
6165 ..FakeLspAdapter::default()
6166 },
6167 ),
6168 language_registry.register_fake_lsp(
6169 "tsx",
6170 FakeLspAdapter {
6171 name: language_server_names[1],
6172 capabilities: lsp::ServerCapabilities {
6173 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6174 ..lsp::ServerCapabilities::default()
6175 },
6176 ..FakeLspAdapter::default()
6177 },
6178 ),
6179 language_registry.register_fake_lsp(
6180 "tsx",
6181 FakeLspAdapter {
6182 name: language_server_names[2],
6183 capabilities: lsp::ServerCapabilities {
6184 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6185 ..lsp::ServerCapabilities::default()
6186 },
6187 ..FakeLspAdapter::default()
6188 },
6189 ),
6190 language_registry.register_fake_lsp(
6191 "tsx",
6192 FakeLspAdapter {
6193 name: language_server_names[3],
6194 capabilities: lsp::ServerCapabilities {
6195 code_action_provider: None,
6196 ..lsp::ServerCapabilities::default()
6197 },
6198 ..FakeLspAdapter::default()
6199 },
6200 ),
6201 ];
6202
6203 let (buffer, _handle) = project
6204 .update(cx, |p, cx| {
6205 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
6206 })
6207 .await
6208 .unwrap();
6209 cx.executor().run_until_parked();
6210
6211 let mut servers_with_actions_requests = HashMap::default();
6212 for i in 0..language_server_names.len() {
6213 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
6214 panic!(
6215 "Failed to get language server #{i} with name {}",
6216 &language_server_names[i]
6217 )
6218 });
6219 let new_server_name = new_server.server.name();
6220
6221 assert!(
6222 !servers_with_actions_requests.contains_key(&new_server_name),
6223 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6224 );
6225 match new_server_name.0.as_ref() {
6226 "TailwindServer" | "TypeScriptServer" => {
6227 servers_with_actions_requests.insert(
6228 new_server_name.clone(),
6229 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6230 move |_, _| {
6231 let name = new_server_name.clone();
6232 async move {
6233 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
6234 lsp::CodeAction {
6235 title: format!("{name} code action"),
6236 ..lsp::CodeAction::default()
6237 },
6238 )]))
6239 }
6240 },
6241 ),
6242 );
6243 }
6244 "ESLintServer" => {
6245 servers_with_actions_requests.insert(
6246 new_server_name,
6247 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6248 |_, _| async move { Ok(None) },
6249 ),
6250 );
6251 }
6252 "NoActionsCapabilitiesServer" => {
6253 let _never_handled = new_server
6254 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6255 panic!(
6256 "Should not call for code actions server with no corresponding capabilities"
6257 )
6258 });
6259 }
6260 unexpected => panic!("Unexpected server name: {unexpected}"),
6261 }
6262 }
6263
6264 let code_actions_task = project.update(cx, |project, cx| {
6265 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
6266 });
6267
6268 // cx.run_until_parked();
6269 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
6270 |mut code_actions_request| async move {
6271 code_actions_request
6272 .next()
6273 .await
6274 .expect("All code actions requests should have been triggered")
6275 },
6276 ))
6277 .await;
6278 assert_eq!(
6279 vec!["TailwindServer code action", "TypeScriptServer code action"],
6280 code_actions_task
6281 .await
6282 .unwrap()
6283 .into_iter()
6284 .map(|code_action| code_action.lsp_action.title().to_owned())
6285 .sorted()
6286 .collect::<Vec<_>>(),
6287 "Should receive code actions responses from all related servers with hover capabilities"
6288 );
6289}
6290
6291#[gpui::test]
6292async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
6293 init_test(cx);
6294
6295 let fs = FakeFs::new(cx.executor());
6296 fs.insert_tree(
6297 "/dir",
6298 json!({
6299 "a.rs": "let a = 1;",
6300 "b.rs": "let b = 2;",
6301 "c.rs": "let c = 2;",
6302 }),
6303 )
6304 .await;
6305
6306 let project = Project::test(
6307 fs,
6308 [
6309 "/dir/a.rs".as_ref(),
6310 "/dir/b.rs".as_ref(),
6311 "/dir/c.rs".as_ref(),
6312 ],
6313 cx,
6314 )
6315 .await;
6316
6317 // check the initial state and get the worktrees
6318 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
6319 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6320 assert_eq!(worktrees.len(), 3);
6321
6322 let worktree_a = worktrees[0].read(cx);
6323 let worktree_b = worktrees[1].read(cx);
6324 let worktree_c = worktrees[2].read(cx);
6325
6326 // check they start in the right order
6327 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
6328 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
6329 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
6330
6331 (
6332 worktrees[0].clone(),
6333 worktrees[1].clone(),
6334 worktrees[2].clone(),
6335 )
6336 });
6337
6338 // move first worktree to after the second
6339 // [a, b, c] -> [b, a, c]
6340 project
6341 .update(cx, |project, cx| {
6342 let first = worktree_a.read(cx);
6343 let second = worktree_b.read(cx);
6344 project.move_worktree(first.id(), second.id(), cx)
6345 })
6346 .expect("moving first after second");
6347
6348 // check the state after moving
6349 project.update(cx, |project, cx| {
6350 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6351 assert_eq!(worktrees.len(), 3);
6352
6353 let first = worktrees[0].read(cx);
6354 let second = worktrees[1].read(cx);
6355 let third = worktrees[2].read(cx);
6356
6357 // check they are now in the right order
6358 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
6359 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
6360 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6361 });
6362
6363 // move the second worktree to before the first
6364 // [b, a, c] -> [a, b, c]
6365 project
6366 .update(cx, |project, cx| {
6367 let second = worktree_a.read(cx);
6368 let first = worktree_b.read(cx);
6369 project.move_worktree(first.id(), second.id(), cx)
6370 })
6371 .expect("moving second before first");
6372
6373 // check the state after moving
6374 project.update(cx, |project, cx| {
6375 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6376 assert_eq!(worktrees.len(), 3);
6377
6378 let first = worktrees[0].read(cx);
6379 let second = worktrees[1].read(cx);
6380 let third = worktrees[2].read(cx);
6381
6382 // check they are now in the right order
6383 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6384 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6385 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6386 });
6387
6388 // move the second worktree to after the third
6389 // [a, b, c] -> [a, c, b]
6390 project
6391 .update(cx, |project, cx| {
6392 let second = worktree_b.read(cx);
6393 let third = worktree_c.read(cx);
6394 project.move_worktree(second.id(), third.id(), cx)
6395 })
6396 .expect("moving second after third");
6397
6398 // check the state after moving
6399 project.update(cx, |project, cx| {
6400 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6401 assert_eq!(worktrees.len(), 3);
6402
6403 let first = worktrees[0].read(cx);
6404 let second = worktrees[1].read(cx);
6405 let third = worktrees[2].read(cx);
6406
6407 // check they are now in the right order
6408 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6409 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
6410 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
6411 });
6412
6413 // move the third worktree to before the second
6414 // [a, c, b] -> [a, b, c]
6415 project
6416 .update(cx, |project, cx| {
6417 let third = worktree_c.read(cx);
6418 let second = worktree_b.read(cx);
6419 project.move_worktree(third.id(), second.id(), cx)
6420 })
6421 .expect("moving third before second");
6422
6423 // check the state after moving
6424 project.update(cx, |project, cx| {
6425 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6426 assert_eq!(worktrees.len(), 3);
6427
6428 let first = worktrees[0].read(cx);
6429 let second = worktrees[1].read(cx);
6430 let third = worktrees[2].read(cx);
6431
6432 // check they are now in the right order
6433 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6434 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6435 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6436 });
6437
6438 // move the first worktree to after the third
6439 // [a, b, c] -> [b, c, a]
6440 project
6441 .update(cx, |project, cx| {
6442 let first = worktree_a.read(cx);
6443 let third = worktree_c.read(cx);
6444 project.move_worktree(first.id(), third.id(), cx)
6445 })
6446 .expect("moving first after third");
6447
6448 // check the state after moving
6449 project.update(cx, |project, cx| {
6450 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6451 assert_eq!(worktrees.len(), 3);
6452
6453 let first = worktrees[0].read(cx);
6454 let second = worktrees[1].read(cx);
6455 let third = worktrees[2].read(cx);
6456
6457 // check they are now in the right order
6458 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
6459 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
6460 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
6461 });
6462
6463 // move the third worktree to before the first
6464 // [b, c, a] -> [a, b, c]
6465 project
6466 .update(cx, |project, cx| {
6467 let third = worktree_a.read(cx);
6468 let first = worktree_b.read(cx);
6469 project.move_worktree(third.id(), first.id(), cx)
6470 })
6471 .expect("moving third before first");
6472
6473 // check the state after moving
6474 project.update(cx, |project, cx| {
6475 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6476 assert_eq!(worktrees.len(), 3);
6477
6478 let first = worktrees[0].read(cx);
6479 let second = worktrees[1].read(cx);
6480 let third = worktrees[2].read(cx);
6481
6482 // check they are now in the right order
6483 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6484 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6485 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6486 });
6487}
6488
6489#[gpui::test]
6490async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
6491 init_test(cx);
6492
6493 let staged_contents = r#"
6494 fn main() {
6495 println!("hello world");
6496 }
6497 "#
6498 .unindent();
6499 let file_contents = r#"
6500 // print goodbye
6501 fn main() {
6502 println!("goodbye world");
6503 }
6504 "#
6505 .unindent();
6506
6507 let fs = FakeFs::new(cx.background_executor.clone());
6508 fs.insert_tree(
6509 "/dir",
6510 json!({
6511 ".git": {},
6512 "src": {
6513 "main.rs": file_contents,
6514 }
6515 }),
6516 )
6517 .await;
6518
6519 fs.set_index_for_repo(
6520 Path::new("/dir/.git"),
6521 &[("src/main.rs".into(), staged_contents)],
6522 );
6523
6524 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6525
6526 let buffer = project
6527 .update(cx, |project, cx| {
6528 project.open_local_buffer("/dir/src/main.rs", cx)
6529 })
6530 .await
6531 .unwrap();
6532 let unstaged_diff = project
6533 .update(cx, |project, cx| {
6534 project.open_unstaged_diff(buffer.clone(), cx)
6535 })
6536 .await
6537 .unwrap();
6538
6539 cx.run_until_parked();
6540 unstaged_diff.update(cx, |unstaged_diff, cx| {
6541 let snapshot = buffer.read(cx).snapshot();
6542 assert_hunks(
6543 unstaged_diff.hunks(&snapshot, cx),
6544 &snapshot,
6545 &unstaged_diff.base_text_string().unwrap(),
6546 &[
6547 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
6548 (
6549 2..3,
6550 " println!(\"hello world\");\n",
6551 " println!(\"goodbye world\");\n",
6552 DiffHunkStatus::modified_none(),
6553 ),
6554 ],
6555 );
6556 });
6557
6558 let staged_contents = r#"
6559 // print goodbye
6560 fn main() {
6561 }
6562 "#
6563 .unindent();
6564
6565 fs.set_index_for_repo(
6566 Path::new("/dir/.git"),
6567 &[("src/main.rs".into(), staged_contents)],
6568 );
6569
6570 cx.run_until_parked();
6571 unstaged_diff.update(cx, |unstaged_diff, cx| {
6572 let snapshot = buffer.read(cx).snapshot();
6573 assert_hunks(
6574 unstaged_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6575 &snapshot,
6576 &unstaged_diff.base_text().text(),
6577 &[(
6578 2..3,
6579 "",
6580 " println!(\"goodbye world\");\n",
6581 DiffHunkStatus::added_none(),
6582 )],
6583 );
6584 });
6585}
6586
6587#[gpui::test]
6588async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
6589 init_test(cx);
6590
6591 let committed_contents = r#"
6592 fn main() {
6593 println!("hello world");
6594 }
6595 "#
6596 .unindent();
6597 let staged_contents = r#"
6598 fn main() {
6599 println!("goodbye world");
6600 }
6601 "#
6602 .unindent();
6603 let file_contents = r#"
6604 // print goodbye
6605 fn main() {
6606 println!("goodbye world");
6607 }
6608 "#
6609 .unindent();
6610
6611 let fs = FakeFs::new(cx.background_executor.clone());
6612 fs.insert_tree(
6613 "/dir",
6614 json!({
6615 ".git": {},
6616 "src": {
6617 "modification.rs": file_contents,
6618 }
6619 }),
6620 )
6621 .await;
6622
6623 fs.set_head_for_repo(
6624 Path::new("/dir/.git"),
6625 &[
6626 ("src/modification.rs".into(), committed_contents),
6627 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6628 ],
6629 "deadbeef",
6630 );
6631 fs.set_index_for_repo(
6632 Path::new("/dir/.git"),
6633 &[
6634 ("src/modification.rs".into(), staged_contents),
6635 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6636 ],
6637 );
6638
6639 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6640 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6641 let language = rust_lang();
6642 language_registry.add(language.clone());
6643
6644 let buffer_1 = project
6645 .update(cx, |project, cx| {
6646 project.open_local_buffer("/dir/src/modification.rs", cx)
6647 })
6648 .await
6649 .unwrap();
6650 let diff_1 = project
6651 .update(cx, |project, cx| {
6652 project.open_uncommitted_diff(buffer_1.clone(), cx)
6653 })
6654 .await
6655 .unwrap();
6656 diff_1.read_with(cx, |diff, _| {
6657 assert_eq!(diff.base_text().language().cloned(), Some(language))
6658 });
6659 cx.run_until_parked();
6660 diff_1.update(cx, |diff, cx| {
6661 let snapshot = buffer_1.read(cx).snapshot();
6662 assert_hunks(
6663 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6664 &snapshot,
6665 &diff.base_text_string().unwrap(),
6666 &[
6667 (
6668 0..1,
6669 "",
6670 "// print goodbye\n",
6671 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
6672 ),
6673 (
6674 2..3,
6675 " println!(\"hello world\");\n",
6676 " println!(\"goodbye world\");\n",
6677 DiffHunkStatus::modified_none(),
6678 ),
6679 ],
6680 );
6681 });
6682
6683 // Reset HEAD to a version that differs from both the buffer and the index.
6684 let committed_contents = r#"
6685 // print goodbye
6686 fn main() {
6687 }
6688 "#
6689 .unindent();
6690 fs.set_head_for_repo(
6691 Path::new("/dir/.git"),
6692 &[
6693 ("src/modification.rs".into(), committed_contents.clone()),
6694 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6695 ],
6696 "deadbeef",
6697 );
6698
6699 // Buffer now has an unstaged hunk.
6700 cx.run_until_parked();
6701 diff_1.update(cx, |diff, cx| {
6702 let snapshot = buffer_1.read(cx).snapshot();
6703 assert_hunks(
6704 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6705 &snapshot,
6706 &diff.base_text().text(),
6707 &[(
6708 2..3,
6709 "",
6710 " println!(\"goodbye world\");\n",
6711 DiffHunkStatus::added_none(),
6712 )],
6713 );
6714 });
6715
6716 // Open a buffer for a file that's been deleted.
6717 let buffer_2 = project
6718 .update(cx, |project, cx| {
6719 project.open_local_buffer("/dir/src/deletion.rs", cx)
6720 })
6721 .await
6722 .unwrap();
6723 let diff_2 = project
6724 .update(cx, |project, cx| {
6725 project.open_uncommitted_diff(buffer_2.clone(), cx)
6726 })
6727 .await
6728 .unwrap();
6729 cx.run_until_parked();
6730 diff_2.update(cx, |diff, cx| {
6731 let snapshot = buffer_2.read(cx).snapshot();
6732 assert_hunks(
6733 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6734 &snapshot,
6735 &diff.base_text_string().unwrap(),
6736 &[(
6737 0..0,
6738 "// the-deleted-contents\n",
6739 "",
6740 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
6741 )],
6742 );
6743 });
6744
6745 // Stage the deletion of this file
6746 fs.set_index_for_repo(
6747 Path::new("/dir/.git"),
6748 &[("src/modification.rs".into(), committed_contents.clone())],
6749 );
6750 cx.run_until_parked();
6751 diff_2.update(cx, |diff, cx| {
6752 let snapshot = buffer_2.read(cx).snapshot();
6753 assert_hunks(
6754 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6755 &snapshot,
6756 &diff.base_text_string().unwrap(),
6757 &[(
6758 0..0,
6759 "// the-deleted-contents\n",
6760 "",
6761 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
6762 )],
6763 );
6764 });
6765}
6766
6767#[gpui::test]
6768async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
6769 use DiffHunkSecondaryStatus::*;
6770 init_test(cx);
6771
6772 let committed_contents = r#"
6773 zero
6774 one
6775 two
6776 three
6777 four
6778 five
6779 "#
6780 .unindent();
6781 let file_contents = r#"
6782 one
6783 TWO
6784 three
6785 FOUR
6786 five
6787 "#
6788 .unindent();
6789
6790 let fs = FakeFs::new(cx.background_executor.clone());
6791 fs.insert_tree(
6792 "/dir",
6793 json!({
6794 ".git": {},
6795 "file.txt": file_contents.clone()
6796 }),
6797 )
6798 .await;
6799
6800 fs.set_head_and_index_for_repo(
6801 "/dir/.git".as_ref(),
6802 &[("file.txt".into(), committed_contents.clone())],
6803 );
6804
6805 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6806
6807 let buffer = project
6808 .update(cx, |project, cx| {
6809 project.open_local_buffer("/dir/file.txt", cx)
6810 })
6811 .await
6812 .unwrap();
6813 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6814 let uncommitted_diff = project
6815 .update(cx, |project, cx| {
6816 project.open_uncommitted_diff(buffer.clone(), cx)
6817 })
6818 .await
6819 .unwrap();
6820 let mut diff_events = cx.events(&uncommitted_diff);
6821
6822 // The hunks are initially unstaged.
6823 uncommitted_diff.read_with(cx, |diff, cx| {
6824 assert_hunks(
6825 diff.hunks(&snapshot, cx),
6826 &snapshot,
6827 &diff.base_text_string().unwrap(),
6828 &[
6829 (
6830 0..0,
6831 "zero\n",
6832 "",
6833 DiffHunkStatus::deleted(HasSecondaryHunk),
6834 ),
6835 (
6836 1..2,
6837 "two\n",
6838 "TWO\n",
6839 DiffHunkStatus::modified(HasSecondaryHunk),
6840 ),
6841 (
6842 3..4,
6843 "four\n",
6844 "FOUR\n",
6845 DiffHunkStatus::modified(HasSecondaryHunk),
6846 ),
6847 ],
6848 );
6849 });
6850
6851 // Stage a hunk. It appears as optimistically staged.
6852 uncommitted_diff.update(cx, |diff, cx| {
6853 let range =
6854 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
6855 let hunks = diff
6856 .hunks_intersecting_range(range, &snapshot, cx)
6857 .collect::<Vec<_>>();
6858 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
6859
6860 assert_hunks(
6861 diff.hunks(&snapshot, cx),
6862 &snapshot,
6863 &diff.base_text_string().unwrap(),
6864 &[
6865 (
6866 0..0,
6867 "zero\n",
6868 "",
6869 DiffHunkStatus::deleted(HasSecondaryHunk),
6870 ),
6871 (
6872 1..2,
6873 "two\n",
6874 "TWO\n",
6875 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6876 ),
6877 (
6878 3..4,
6879 "four\n",
6880 "FOUR\n",
6881 DiffHunkStatus::modified(HasSecondaryHunk),
6882 ),
6883 ],
6884 );
6885 });
6886
6887 // The diff emits a change event for the range of the staged hunk.
6888 assert!(matches!(
6889 diff_events.next().await.unwrap(),
6890 BufferDiffEvent::HunksStagedOrUnstaged(_)
6891 ));
6892 let event = diff_events.next().await.unwrap();
6893 if let BufferDiffEvent::DiffChanged {
6894 changed_range: Some(changed_range),
6895 } = event
6896 {
6897 let changed_range = changed_range.to_point(&snapshot);
6898 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
6899 } else {
6900 panic!("Unexpected event {event:?}");
6901 }
6902
6903 // When the write to the index completes, it appears as staged.
6904 cx.run_until_parked();
6905 uncommitted_diff.update(cx, |diff, cx| {
6906 assert_hunks(
6907 diff.hunks(&snapshot, cx),
6908 &snapshot,
6909 &diff.base_text_string().unwrap(),
6910 &[
6911 (
6912 0..0,
6913 "zero\n",
6914 "",
6915 DiffHunkStatus::deleted(HasSecondaryHunk),
6916 ),
6917 (
6918 1..2,
6919 "two\n",
6920 "TWO\n",
6921 DiffHunkStatus::modified(NoSecondaryHunk),
6922 ),
6923 (
6924 3..4,
6925 "four\n",
6926 "FOUR\n",
6927 DiffHunkStatus::modified(HasSecondaryHunk),
6928 ),
6929 ],
6930 );
6931 });
6932
6933 // The diff emits a change event for the changed index text.
6934 let event = diff_events.next().await.unwrap();
6935 if let BufferDiffEvent::DiffChanged {
6936 changed_range: Some(changed_range),
6937 } = event
6938 {
6939 let changed_range = changed_range.to_point(&snapshot);
6940 assert_eq!(changed_range, Point::new(0, 0)..Point::new(4, 0));
6941 } else {
6942 panic!("Unexpected event {event:?}");
6943 }
6944
6945 // Simulate a problem writing to the git index.
6946 fs.set_error_message_for_index_write(
6947 "/dir/.git".as_ref(),
6948 Some("failed to write git index".into()),
6949 );
6950
6951 // Stage another hunk.
6952 uncommitted_diff.update(cx, |diff, cx| {
6953 let range =
6954 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
6955 let hunks = diff
6956 .hunks_intersecting_range(range, &snapshot, cx)
6957 .collect::<Vec<_>>();
6958 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
6959
6960 assert_hunks(
6961 diff.hunks(&snapshot, cx),
6962 &snapshot,
6963 &diff.base_text_string().unwrap(),
6964 &[
6965 (
6966 0..0,
6967 "zero\n",
6968 "",
6969 DiffHunkStatus::deleted(HasSecondaryHunk),
6970 ),
6971 (
6972 1..2,
6973 "two\n",
6974 "TWO\n",
6975 DiffHunkStatus::modified(NoSecondaryHunk),
6976 ),
6977 (
6978 3..4,
6979 "four\n",
6980 "FOUR\n",
6981 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6982 ),
6983 ],
6984 );
6985 });
6986 assert!(matches!(
6987 diff_events.next().await.unwrap(),
6988 BufferDiffEvent::HunksStagedOrUnstaged(_)
6989 ));
6990 let event = diff_events.next().await.unwrap();
6991 if let BufferDiffEvent::DiffChanged {
6992 changed_range: Some(changed_range),
6993 } = event
6994 {
6995 let changed_range = changed_range.to_point(&snapshot);
6996 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
6997 } else {
6998 panic!("Unexpected event {event:?}");
6999 }
7000
7001 // When the write fails, the hunk returns to being unstaged.
7002 cx.run_until_parked();
7003 uncommitted_diff.update(cx, |diff, cx| {
7004 assert_hunks(
7005 diff.hunks(&snapshot, cx),
7006 &snapshot,
7007 &diff.base_text_string().unwrap(),
7008 &[
7009 (
7010 0..0,
7011 "zero\n",
7012 "",
7013 DiffHunkStatus::deleted(HasSecondaryHunk),
7014 ),
7015 (
7016 1..2,
7017 "two\n",
7018 "TWO\n",
7019 DiffHunkStatus::modified(NoSecondaryHunk),
7020 ),
7021 (
7022 3..4,
7023 "four\n",
7024 "FOUR\n",
7025 DiffHunkStatus::modified(HasSecondaryHunk),
7026 ),
7027 ],
7028 );
7029 });
7030
7031 let event = diff_events.next().await.unwrap();
7032 if let BufferDiffEvent::DiffChanged {
7033 changed_range: Some(changed_range),
7034 } = event
7035 {
7036 let changed_range = changed_range.to_point(&snapshot);
7037 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
7038 } else {
7039 panic!("Unexpected event {event:?}");
7040 }
7041
7042 // Allow writing to the git index to succeed again.
7043 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
7044
7045 // Stage two hunks with separate operations.
7046 uncommitted_diff.update(cx, |diff, cx| {
7047 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
7048 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
7049 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
7050 });
7051
7052 // Both staged hunks appear as pending.
7053 uncommitted_diff.update(cx, |diff, cx| {
7054 assert_hunks(
7055 diff.hunks(&snapshot, cx),
7056 &snapshot,
7057 &diff.base_text_string().unwrap(),
7058 &[
7059 (
7060 0..0,
7061 "zero\n",
7062 "",
7063 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7064 ),
7065 (
7066 1..2,
7067 "two\n",
7068 "TWO\n",
7069 DiffHunkStatus::modified(NoSecondaryHunk),
7070 ),
7071 (
7072 3..4,
7073 "four\n",
7074 "FOUR\n",
7075 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7076 ),
7077 ],
7078 );
7079 });
7080
7081 // Both staging operations take effect.
7082 cx.run_until_parked();
7083 uncommitted_diff.update(cx, |diff, cx| {
7084 assert_hunks(
7085 diff.hunks(&snapshot, cx),
7086 &snapshot,
7087 &diff.base_text_string().unwrap(),
7088 &[
7089 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7090 (
7091 1..2,
7092 "two\n",
7093 "TWO\n",
7094 DiffHunkStatus::modified(NoSecondaryHunk),
7095 ),
7096 (
7097 3..4,
7098 "four\n",
7099 "FOUR\n",
7100 DiffHunkStatus::modified(NoSecondaryHunk),
7101 ),
7102 ],
7103 );
7104 });
7105}
7106
7107#[gpui::test(seeds(340, 472))]
7108async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
7109 use DiffHunkSecondaryStatus::*;
7110 init_test(cx);
7111
7112 let committed_contents = r#"
7113 zero
7114 one
7115 two
7116 three
7117 four
7118 five
7119 "#
7120 .unindent();
7121 let file_contents = r#"
7122 one
7123 TWO
7124 three
7125 FOUR
7126 five
7127 "#
7128 .unindent();
7129
7130 let fs = FakeFs::new(cx.background_executor.clone());
7131 fs.insert_tree(
7132 "/dir",
7133 json!({
7134 ".git": {},
7135 "file.txt": file_contents.clone()
7136 }),
7137 )
7138 .await;
7139
7140 fs.set_head_for_repo(
7141 "/dir/.git".as_ref(),
7142 &[("file.txt".into(), committed_contents.clone())],
7143 "deadbeef",
7144 );
7145 fs.set_index_for_repo(
7146 "/dir/.git".as_ref(),
7147 &[("file.txt".into(), committed_contents.clone())],
7148 );
7149
7150 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7151
7152 let buffer = project
7153 .update(cx, |project, cx| {
7154 project.open_local_buffer("/dir/file.txt", cx)
7155 })
7156 .await
7157 .unwrap();
7158 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7159 let uncommitted_diff = project
7160 .update(cx, |project, cx| {
7161 project.open_uncommitted_diff(buffer.clone(), cx)
7162 })
7163 .await
7164 .unwrap();
7165
7166 // The hunks are initially unstaged.
7167 uncommitted_diff.read_with(cx, |diff, cx| {
7168 assert_hunks(
7169 diff.hunks(&snapshot, cx),
7170 &snapshot,
7171 &diff.base_text_string().unwrap(),
7172 &[
7173 (
7174 0..0,
7175 "zero\n",
7176 "",
7177 DiffHunkStatus::deleted(HasSecondaryHunk),
7178 ),
7179 (
7180 1..2,
7181 "two\n",
7182 "TWO\n",
7183 DiffHunkStatus::modified(HasSecondaryHunk),
7184 ),
7185 (
7186 3..4,
7187 "four\n",
7188 "FOUR\n",
7189 DiffHunkStatus::modified(HasSecondaryHunk),
7190 ),
7191 ],
7192 );
7193 });
7194
7195 // Pause IO events
7196 fs.pause_events();
7197
7198 // Stage the first hunk.
7199 uncommitted_diff.update(cx, |diff, cx| {
7200 let hunk = diff.hunks(&snapshot, cx).next().unwrap();
7201 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7202 assert_hunks(
7203 diff.hunks(&snapshot, cx),
7204 &snapshot,
7205 &diff.base_text_string().unwrap(),
7206 &[
7207 (
7208 0..0,
7209 "zero\n",
7210 "",
7211 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7212 ),
7213 (
7214 1..2,
7215 "two\n",
7216 "TWO\n",
7217 DiffHunkStatus::modified(HasSecondaryHunk),
7218 ),
7219 (
7220 3..4,
7221 "four\n",
7222 "FOUR\n",
7223 DiffHunkStatus::modified(HasSecondaryHunk),
7224 ),
7225 ],
7226 );
7227 });
7228
7229 // Stage the second hunk *before* receiving the FS event for the first hunk.
7230 cx.run_until_parked();
7231 uncommitted_diff.update(cx, |diff, cx| {
7232 let hunk = diff.hunks(&snapshot, cx).nth(1).unwrap();
7233 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7234 assert_hunks(
7235 diff.hunks(&snapshot, cx),
7236 &snapshot,
7237 &diff.base_text_string().unwrap(),
7238 &[
7239 (
7240 0..0,
7241 "zero\n",
7242 "",
7243 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7244 ),
7245 (
7246 1..2,
7247 "two\n",
7248 "TWO\n",
7249 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7250 ),
7251 (
7252 3..4,
7253 "four\n",
7254 "FOUR\n",
7255 DiffHunkStatus::modified(HasSecondaryHunk),
7256 ),
7257 ],
7258 );
7259 });
7260
7261 // Process the FS event for staging the first hunk (second event is still pending).
7262 fs.flush_events(1);
7263 cx.run_until_parked();
7264
7265 // Stage the third hunk before receiving the second FS event.
7266 uncommitted_diff.update(cx, |diff, cx| {
7267 let hunk = diff.hunks(&snapshot, cx).nth(2).unwrap();
7268 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7269 });
7270
7271 // Wait for all remaining IO.
7272 cx.run_until_parked();
7273 fs.flush_events(fs.buffered_event_count());
7274
7275 // Now all hunks are staged.
7276 cx.run_until_parked();
7277 uncommitted_diff.update(cx, |diff, cx| {
7278 assert_hunks(
7279 diff.hunks(&snapshot, cx),
7280 &snapshot,
7281 &diff.base_text_string().unwrap(),
7282 &[
7283 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7284 (
7285 1..2,
7286 "two\n",
7287 "TWO\n",
7288 DiffHunkStatus::modified(NoSecondaryHunk),
7289 ),
7290 (
7291 3..4,
7292 "four\n",
7293 "FOUR\n",
7294 DiffHunkStatus::modified(NoSecondaryHunk),
7295 ),
7296 ],
7297 );
7298 });
7299}
7300
7301#[gpui::test(iterations = 25)]
7302async fn test_staging_random_hunks(
7303 mut rng: StdRng,
7304 executor: BackgroundExecutor,
7305 cx: &mut gpui::TestAppContext,
7306) {
7307 let operations = env::var("OPERATIONS")
7308 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
7309 .unwrap_or(20);
7310
7311 // Try to induce races between diff recalculation and index writes.
7312 if rng.gen_bool(0.5) {
7313 executor.deprioritize(*CALCULATE_DIFF_TASK);
7314 }
7315
7316 use DiffHunkSecondaryStatus::*;
7317 init_test(cx);
7318
7319 let committed_text = (0..30).map(|i| format!("line {i}\n")).collect::<String>();
7320 let index_text = committed_text.clone();
7321 let buffer_text = (0..30)
7322 .map(|i| match i % 5 {
7323 0 => format!("line {i} (modified)\n"),
7324 _ => format!("line {i}\n"),
7325 })
7326 .collect::<String>();
7327
7328 let fs = FakeFs::new(cx.background_executor.clone());
7329 fs.insert_tree(
7330 path!("/dir"),
7331 json!({
7332 ".git": {},
7333 "file.txt": buffer_text.clone()
7334 }),
7335 )
7336 .await;
7337 fs.set_head_for_repo(
7338 path!("/dir/.git").as_ref(),
7339 &[("file.txt".into(), committed_text.clone())],
7340 "deadbeef",
7341 );
7342 fs.set_index_for_repo(
7343 path!("/dir/.git").as_ref(),
7344 &[("file.txt".into(), index_text.clone())],
7345 );
7346 let repo = fs.open_repo(path!("/dir/.git").as_ref()).unwrap();
7347
7348 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7349 let buffer = project
7350 .update(cx, |project, cx| {
7351 project.open_local_buffer(path!("/dir/file.txt"), cx)
7352 })
7353 .await
7354 .unwrap();
7355 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7356 let uncommitted_diff = project
7357 .update(cx, |project, cx| {
7358 project.open_uncommitted_diff(buffer.clone(), cx)
7359 })
7360 .await
7361 .unwrap();
7362
7363 let mut hunks =
7364 uncommitted_diff.update(cx, |diff, cx| diff.hunks(&snapshot, cx).collect::<Vec<_>>());
7365 assert_eq!(hunks.len(), 6);
7366
7367 for _i in 0..operations {
7368 let hunk_ix = rng.gen_range(0..hunks.len());
7369 let hunk = &mut hunks[hunk_ix];
7370 let row = hunk.range.start.row;
7371
7372 if hunk.status().has_secondary_hunk() {
7373 log::info!("staging hunk at {row}");
7374 uncommitted_diff.update(cx, |diff, cx| {
7375 diff.stage_or_unstage_hunks(true, &[hunk.clone()], &snapshot, true, cx);
7376 });
7377 hunk.secondary_status = SecondaryHunkRemovalPending;
7378 } else {
7379 log::info!("unstaging hunk at {row}");
7380 uncommitted_diff.update(cx, |diff, cx| {
7381 diff.stage_or_unstage_hunks(false, &[hunk.clone()], &snapshot, true, cx);
7382 });
7383 hunk.secondary_status = SecondaryHunkAdditionPending;
7384 }
7385
7386 for _ in 0..rng.gen_range(0..10) {
7387 log::info!("yielding");
7388 cx.executor().simulate_random_delay().await;
7389 }
7390 }
7391
7392 cx.executor().run_until_parked();
7393
7394 for hunk in &mut hunks {
7395 if hunk.secondary_status == SecondaryHunkRemovalPending {
7396 hunk.secondary_status = NoSecondaryHunk;
7397 } else if hunk.secondary_status == SecondaryHunkAdditionPending {
7398 hunk.secondary_status = HasSecondaryHunk;
7399 }
7400 }
7401
7402 log::info!(
7403 "index text:\n{}",
7404 repo.load_index_text("file.txt".into()).await.unwrap()
7405 );
7406
7407 uncommitted_diff.update(cx, |diff, cx| {
7408 let expected_hunks = hunks
7409 .iter()
7410 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
7411 .collect::<Vec<_>>();
7412 let actual_hunks = diff
7413 .hunks(&snapshot, cx)
7414 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
7415 .collect::<Vec<_>>();
7416 assert_eq!(actual_hunks, expected_hunks);
7417 });
7418}
7419
7420#[gpui::test]
7421async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
7422 init_test(cx);
7423
7424 let committed_contents = r#"
7425 fn main() {
7426 println!("hello from HEAD");
7427 }
7428 "#
7429 .unindent();
7430 let file_contents = r#"
7431 fn main() {
7432 println!("hello from the working copy");
7433 }
7434 "#
7435 .unindent();
7436
7437 let fs = FakeFs::new(cx.background_executor.clone());
7438 fs.insert_tree(
7439 "/dir",
7440 json!({
7441 ".git": {},
7442 "src": {
7443 "main.rs": file_contents,
7444 }
7445 }),
7446 )
7447 .await;
7448
7449 fs.set_head_for_repo(
7450 Path::new("/dir/.git"),
7451 &[("src/main.rs".into(), committed_contents.clone())],
7452 "deadbeef",
7453 );
7454 fs.set_index_for_repo(
7455 Path::new("/dir/.git"),
7456 &[("src/main.rs".into(), committed_contents.clone())],
7457 );
7458
7459 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
7460
7461 let buffer = project
7462 .update(cx, |project, cx| {
7463 project.open_local_buffer("/dir/src/main.rs", cx)
7464 })
7465 .await
7466 .unwrap();
7467 let uncommitted_diff = project
7468 .update(cx, |project, cx| {
7469 project.open_uncommitted_diff(buffer.clone(), cx)
7470 })
7471 .await
7472 .unwrap();
7473
7474 cx.run_until_parked();
7475 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
7476 let snapshot = buffer.read(cx).snapshot();
7477 assert_hunks(
7478 uncommitted_diff.hunks(&snapshot, cx),
7479 &snapshot,
7480 &uncommitted_diff.base_text_string().unwrap(),
7481 &[(
7482 1..2,
7483 " println!(\"hello from HEAD\");\n",
7484 " println!(\"hello from the working copy\");\n",
7485 DiffHunkStatus {
7486 kind: DiffHunkStatusKind::Modified,
7487 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
7488 },
7489 )],
7490 );
7491 });
7492}
7493
7494#[gpui::test]
7495async fn test_repository_and_path_for_project_path(
7496 background_executor: BackgroundExecutor,
7497 cx: &mut gpui::TestAppContext,
7498) {
7499 init_test(cx);
7500 let fs = FakeFs::new(background_executor);
7501 fs.insert_tree(
7502 path!("/root"),
7503 json!({
7504 "c.txt": "",
7505 "dir1": {
7506 ".git": {},
7507 "deps": {
7508 "dep1": {
7509 ".git": {},
7510 "src": {
7511 "a.txt": ""
7512 }
7513 }
7514 },
7515 "src": {
7516 "b.txt": ""
7517 }
7518 },
7519 }),
7520 )
7521 .await;
7522
7523 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
7524 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7525 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7526 project
7527 .update(cx, |project, cx| project.git_scans_complete(cx))
7528 .await;
7529 cx.run_until_parked();
7530
7531 project.read_with(cx, |project, cx| {
7532 let git_store = project.git_store().read(cx);
7533 let pairs = [
7534 ("c.txt", None),
7535 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
7536 (
7537 "dir1/deps/dep1/src/a.txt",
7538 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
7539 ),
7540 ];
7541 let expected = pairs
7542 .iter()
7543 .map(|(path, result)| {
7544 (
7545 path,
7546 result.map(|(repo, repo_path)| {
7547 (Path::new(repo).into(), RepoPath::from(repo_path))
7548 }),
7549 )
7550 })
7551 .collect::<Vec<_>>();
7552 let actual = pairs
7553 .iter()
7554 .map(|(path, _)| {
7555 let project_path = (tree_id, Path::new(path)).into();
7556 let result = maybe!({
7557 let (repo, repo_path) =
7558 git_store.repository_and_path_for_project_path(&project_path, cx)?;
7559 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
7560 });
7561 (path, result)
7562 })
7563 .collect::<Vec<_>>();
7564 pretty_assertions::assert_eq!(expected, actual);
7565 });
7566
7567 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
7568 .await
7569 .unwrap();
7570 cx.run_until_parked();
7571
7572 project.read_with(cx, |project, cx| {
7573 let git_store = project.git_store().read(cx);
7574 assert_eq!(
7575 git_store.repository_and_path_for_project_path(
7576 &(tree_id, Path::new("dir1/src/b.txt")).into(),
7577 cx
7578 ),
7579 None
7580 );
7581 });
7582}
7583
7584#[gpui::test]
7585async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
7586 init_test(cx);
7587 let fs = FakeFs::new(cx.background_executor.clone());
7588 fs.insert_tree(
7589 path!("/root"),
7590 json!({
7591 "home": {
7592 ".git": {},
7593 "project": {
7594 "a.txt": "A"
7595 },
7596 },
7597 }),
7598 )
7599 .await;
7600 fs.set_home_dir(Path::new(path!("/root/home")).to_owned());
7601
7602 let project = Project::test(fs.clone(), [path!("/root/home/project").as_ref()], cx).await;
7603 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7604 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7605
7606 project
7607 .update(cx, |project, cx| project.git_scans_complete(cx))
7608 .await;
7609 tree.flush_fs_events(cx).await;
7610
7611 project.read_with(cx, |project, cx| {
7612 let containing = project
7613 .git_store()
7614 .read(cx)
7615 .repository_and_path_for_project_path(&(tree_id, "a.txt").into(), cx);
7616 assert!(containing.is_none());
7617 });
7618
7619 let project = Project::test(fs.clone(), [path!("/root/home").as_ref()], cx).await;
7620 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7621 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7622 project
7623 .update(cx, |project, cx| project.git_scans_complete(cx))
7624 .await;
7625 tree.flush_fs_events(cx).await;
7626
7627 project.read_with(cx, |project, cx| {
7628 let containing = project
7629 .git_store()
7630 .read(cx)
7631 .repository_and_path_for_project_path(&(tree_id, "project/a.txt").into(), cx);
7632 assert_eq!(
7633 containing
7634 .unwrap()
7635 .0
7636 .read(cx)
7637 .work_directory_abs_path
7638 .as_ref(),
7639 Path::new(path!("/root/home"))
7640 );
7641 });
7642}
7643
7644#[gpui::test]
7645async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
7646 init_test(cx);
7647 cx.executor().allow_parking();
7648
7649 let root = TempTree::new(json!({
7650 "project": {
7651 "a.txt": "a", // Modified
7652 "b.txt": "bb", // Added
7653 "c.txt": "ccc", // Unchanged
7654 "d.txt": "dddd", // Deleted
7655 },
7656 }));
7657
7658 // Set up git repository before creating the project.
7659 let work_dir = root.path().join("project");
7660 let repo = git_init(work_dir.as_path());
7661 git_add("a.txt", &repo);
7662 git_add("c.txt", &repo);
7663 git_add("d.txt", &repo);
7664 git_commit("Initial commit", &repo);
7665 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
7666 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
7667
7668 let project = Project::test(
7669 Arc::new(RealFs::new(None, cx.executor())),
7670 [root.path()],
7671 cx,
7672 )
7673 .await;
7674
7675 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7676 tree.flush_fs_events(cx).await;
7677 project
7678 .update(cx, |project, cx| project.git_scans_complete(cx))
7679 .await;
7680 cx.executor().run_until_parked();
7681
7682 let repository = project.read_with(cx, |project, cx| {
7683 project.repositories(cx).values().next().unwrap().clone()
7684 });
7685
7686 // Check that the right git state is observed on startup
7687 repository.read_with(cx, |repository, _| {
7688 let entries = repository.cached_status().collect::<Vec<_>>();
7689 assert_eq!(
7690 entries,
7691 [
7692 StatusEntry {
7693 repo_path: "a.txt".into(),
7694 status: StatusCode::Modified.worktree(),
7695 },
7696 StatusEntry {
7697 repo_path: "b.txt".into(),
7698 status: FileStatus::Untracked,
7699 },
7700 StatusEntry {
7701 repo_path: "d.txt".into(),
7702 status: StatusCode::Deleted.worktree(),
7703 },
7704 ]
7705 );
7706 });
7707
7708 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
7709
7710 tree.flush_fs_events(cx).await;
7711 project
7712 .update(cx, |project, cx| project.git_scans_complete(cx))
7713 .await;
7714 cx.executor().run_until_parked();
7715
7716 repository.read_with(cx, |repository, _| {
7717 let entries = repository.cached_status().collect::<Vec<_>>();
7718 assert_eq!(
7719 entries,
7720 [
7721 StatusEntry {
7722 repo_path: "a.txt".into(),
7723 status: StatusCode::Modified.worktree(),
7724 },
7725 StatusEntry {
7726 repo_path: "b.txt".into(),
7727 status: FileStatus::Untracked,
7728 },
7729 StatusEntry {
7730 repo_path: "c.txt".into(),
7731 status: StatusCode::Modified.worktree(),
7732 },
7733 StatusEntry {
7734 repo_path: "d.txt".into(),
7735 status: StatusCode::Deleted.worktree(),
7736 },
7737 ]
7738 );
7739 });
7740
7741 git_add("a.txt", &repo);
7742 git_add("c.txt", &repo);
7743 git_remove_index(Path::new("d.txt"), &repo);
7744 git_commit("Another commit", &repo);
7745 tree.flush_fs_events(cx).await;
7746 project
7747 .update(cx, |project, cx| project.git_scans_complete(cx))
7748 .await;
7749 cx.executor().run_until_parked();
7750
7751 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
7752 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
7753 tree.flush_fs_events(cx).await;
7754 project
7755 .update(cx, |project, cx| project.git_scans_complete(cx))
7756 .await;
7757 cx.executor().run_until_parked();
7758
7759 repository.read_with(cx, |repository, _cx| {
7760 let entries = repository.cached_status().collect::<Vec<_>>();
7761
7762 // Deleting an untracked entry, b.txt, should leave no status
7763 // a.txt was tracked, and so should have a status
7764 assert_eq!(
7765 entries,
7766 [StatusEntry {
7767 repo_path: "a.txt".into(),
7768 status: StatusCode::Deleted.worktree(),
7769 }]
7770 );
7771 });
7772}
7773
7774#[gpui::test]
7775async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
7776 init_test(cx);
7777 cx.executor().allow_parking();
7778
7779 let root = TempTree::new(json!({
7780 "project": {
7781 "sub": {},
7782 "a.txt": "",
7783 },
7784 }));
7785
7786 let work_dir = root.path().join("project");
7787 let repo = git_init(work_dir.as_path());
7788 // a.txt exists in HEAD and the working copy but is deleted in the index.
7789 git_add("a.txt", &repo);
7790 git_commit("Initial commit", &repo);
7791 git_remove_index("a.txt".as_ref(), &repo);
7792 // `sub` is a nested git repository.
7793 let _sub = git_init(&work_dir.join("sub"));
7794
7795 let project = Project::test(
7796 Arc::new(RealFs::new(None, cx.executor())),
7797 [root.path()],
7798 cx,
7799 )
7800 .await;
7801
7802 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7803 tree.flush_fs_events(cx).await;
7804 project
7805 .update(cx, |project, cx| project.git_scans_complete(cx))
7806 .await;
7807 cx.executor().run_until_parked();
7808
7809 let repository = project.read_with(cx, |project, cx| {
7810 project
7811 .repositories(cx)
7812 .values()
7813 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
7814 .unwrap()
7815 .clone()
7816 });
7817
7818 repository.read_with(cx, |repository, _cx| {
7819 let entries = repository.cached_status().collect::<Vec<_>>();
7820
7821 // `sub` doesn't appear in our computed statuses.
7822 // a.txt appears with a combined `DA` status.
7823 assert_eq!(
7824 entries,
7825 [StatusEntry {
7826 repo_path: "a.txt".into(),
7827 status: TrackedStatus {
7828 index_status: StatusCode::Deleted,
7829 worktree_status: StatusCode::Added
7830 }
7831 .into(),
7832 }]
7833 )
7834 });
7835}
7836
7837#[gpui::test]
7838async fn test_repository_subfolder_git_status(
7839 executor: gpui::BackgroundExecutor,
7840 cx: &mut gpui::TestAppContext,
7841) {
7842 init_test(cx);
7843
7844 let fs = FakeFs::new(executor);
7845 fs.insert_tree(
7846 path!("/root"),
7847 json!({
7848 "my-repo": {
7849 ".git": {},
7850 "a.txt": "a",
7851 "sub-folder-1": {
7852 "sub-folder-2": {
7853 "c.txt": "cc",
7854 "d": {
7855 "e.txt": "eee"
7856 }
7857 },
7858 }
7859 },
7860 }),
7861 )
7862 .await;
7863
7864 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
7865 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
7866
7867 fs.set_status_for_repo(
7868 path!("/root/my-repo/.git").as_ref(),
7869 &[(E_TXT.as_ref(), FileStatus::Untracked)],
7870 );
7871
7872 let project = Project::test(
7873 fs.clone(),
7874 [path!("/root/my-repo/sub-folder-1/sub-folder-2").as_ref()],
7875 cx,
7876 )
7877 .await;
7878
7879 project
7880 .update(cx, |project, cx| project.git_scans_complete(cx))
7881 .await;
7882 cx.run_until_parked();
7883
7884 let repository = project.read_with(cx, |project, cx| {
7885 project.repositories(cx).values().next().unwrap().clone()
7886 });
7887
7888 // Ensure that the git status is loaded correctly
7889 repository.read_with(cx, |repository, _cx| {
7890 assert_eq!(
7891 repository.work_directory_abs_path,
7892 Path::new(path!("/root/my-repo")).into()
7893 );
7894
7895 assert_eq!(repository.status_for_path(&C_TXT.into()), None);
7896 assert_eq!(
7897 repository.status_for_path(&E_TXT.into()).unwrap().status,
7898 FileStatus::Untracked
7899 );
7900 });
7901
7902 fs.set_status_for_repo(path!("/root/my-repo/.git").as_ref(), &[]);
7903 project
7904 .update(cx, |project, cx| project.git_scans_complete(cx))
7905 .await;
7906 cx.run_until_parked();
7907
7908 repository.read_with(cx, |repository, _cx| {
7909 assert_eq!(repository.status_for_path(&C_TXT.into()), None);
7910 assert_eq!(repository.status_for_path(&E_TXT.into()), None);
7911 });
7912}
7913
7914// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
7915#[cfg(any())]
7916#[gpui::test]
7917async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
7918 init_test(cx);
7919 cx.executor().allow_parking();
7920
7921 let root = TempTree::new(json!({
7922 "project": {
7923 "a.txt": "a",
7924 },
7925 }));
7926 let root_path = root.path();
7927
7928 let repo = git_init(&root_path.join("project"));
7929 git_add("a.txt", &repo);
7930 git_commit("init", &repo);
7931
7932 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
7933
7934 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7935 tree.flush_fs_events(cx).await;
7936 project
7937 .update(cx, |project, cx| project.git_scans_complete(cx))
7938 .await;
7939 cx.executor().run_until_parked();
7940
7941 let repository = project.read_with(cx, |project, cx| {
7942 project.repositories(cx).values().next().unwrap().clone()
7943 });
7944
7945 git_branch("other-branch", &repo);
7946 git_checkout("refs/heads/other-branch", &repo);
7947 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
7948 git_add("a.txt", &repo);
7949 git_commit("capitalize", &repo);
7950 let commit = repo
7951 .head()
7952 .expect("Failed to get HEAD")
7953 .peel_to_commit()
7954 .expect("HEAD is not a commit");
7955 git_checkout("refs/heads/main", &repo);
7956 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
7957 git_add("a.txt", &repo);
7958 git_commit("improve letter", &repo);
7959 git_cherry_pick(&commit, &repo);
7960 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
7961 .expect("No CHERRY_PICK_HEAD");
7962 pretty_assertions::assert_eq!(
7963 git_status(&repo),
7964 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
7965 );
7966 tree.flush_fs_events(cx).await;
7967 project
7968 .update(cx, |project, cx| project.git_scans_complete(cx))
7969 .await;
7970 cx.executor().run_until_parked();
7971 let conflicts = repository.update(cx, |repository, _| {
7972 repository
7973 .merge_conflicts
7974 .iter()
7975 .cloned()
7976 .collect::<Vec<_>>()
7977 });
7978 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
7979
7980 git_add("a.txt", &repo);
7981 // Attempt to manually simulate what `git cherry-pick --continue` would do.
7982 git_commit("whatevs", &repo);
7983 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
7984 .expect("Failed to remove CHERRY_PICK_HEAD");
7985 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
7986 tree.flush_fs_events(cx).await;
7987 let conflicts = repository.update(cx, |repository, _| {
7988 repository
7989 .merge_conflicts
7990 .iter()
7991 .cloned()
7992 .collect::<Vec<_>>()
7993 });
7994 pretty_assertions::assert_eq!(conflicts, []);
7995}
7996
7997#[gpui::test]
7998async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
7999 init_test(cx);
8000 let fs = FakeFs::new(cx.background_executor.clone());
8001 fs.insert_tree(
8002 path!("/root"),
8003 json!({
8004 ".git": {},
8005 ".gitignore": "*.txt\n",
8006 "a.xml": "<a></a>",
8007 "b.txt": "Some text"
8008 }),
8009 )
8010 .await;
8011
8012 fs.set_head_and_index_for_repo(
8013 path!("/root/.git").as_ref(),
8014 &[
8015 (".gitignore".into(), "*.txt\n".into()),
8016 ("a.xml".into(), "<a></a>".into()),
8017 ],
8018 );
8019
8020 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
8021
8022 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8023 tree.flush_fs_events(cx).await;
8024 project
8025 .update(cx, |project, cx| project.git_scans_complete(cx))
8026 .await;
8027 cx.executor().run_until_parked();
8028
8029 let repository = project.read_with(cx, |project, cx| {
8030 project.repositories(cx).values().next().unwrap().clone()
8031 });
8032
8033 // One file is unmodified, the other is ignored.
8034 cx.read(|cx| {
8035 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
8036 assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
8037 });
8038
8039 // Change the gitignore, and stage the newly non-ignored file.
8040 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
8041 .await
8042 .unwrap();
8043 fs.set_index_for_repo(
8044 Path::new(path!("/root/.git")),
8045 &[
8046 (".gitignore".into(), "*.txt\n".into()),
8047 ("a.xml".into(), "<a></a>".into()),
8048 ("b.txt".into(), "Some text".into()),
8049 ],
8050 );
8051
8052 cx.executor().run_until_parked();
8053 cx.read(|cx| {
8054 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
8055 assert_entry_git_state(
8056 tree.read(cx),
8057 repository.read(cx),
8058 "b.txt",
8059 Some(StatusCode::Added),
8060 false,
8061 );
8062 });
8063}
8064
8065// NOTE:
8066// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
8067// a directory which some program has already open.
8068// This is a limitation of the Windows.
8069// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
8070#[gpui::test]
8071#[cfg_attr(target_os = "windows", ignore)]
8072async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
8073 init_test(cx);
8074 cx.executor().allow_parking();
8075 let root = TempTree::new(json!({
8076 "projects": {
8077 "project1": {
8078 "a": "",
8079 "b": "",
8080 }
8081 },
8082
8083 }));
8084 let root_path = root.path();
8085
8086 let repo = git_init(&root_path.join("projects/project1"));
8087 git_add("a", &repo);
8088 git_commit("init", &repo);
8089 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
8090
8091 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8092
8093 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8094 tree.flush_fs_events(cx).await;
8095 project
8096 .update(cx, |project, cx| project.git_scans_complete(cx))
8097 .await;
8098 cx.executor().run_until_parked();
8099
8100 let repository = project.read_with(cx, |project, cx| {
8101 project.repositories(cx).values().next().unwrap().clone()
8102 });
8103
8104 repository.read_with(cx, |repository, _| {
8105 assert_eq!(
8106 repository.work_directory_abs_path.as_ref(),
8107 root_path.join("projects/project1").as_path()
8108 );
8109 assert_eq!(
8110 repository
8111 .status_for_path(&"a".into())
8112 .map(|entry| entry.status),
8113 Some(StatusCode::Modified.worktree()),
8114 );
8115 assert_eq!(
8116 repository
8117 .status_for_path(&"b".into())
8118 .map(|entry| entry.status),
8119 Some(FileStatus::Untracked),
8120 );
8121 });
8122
8123 std::fs::rename(
8124 root_path.join("projects/project1"),
8125 root_path.join("projects/project2"),
8126 )
8127 .unwrap();
8128 tree.flush_fs_events(cx).await;
8129
8130 repository.read_with(cx, |repository, _| {
8131 assert_eq!(
8132 repository.work_directory_abs_path.as_ref(),
8133 root_path.join("projects/project2").as_path()
8134 );
8135 assert_eq!(
8136 repository.status_for_path(&"a".into()).unwrap().status,
8137 StatusCode::Modified.worktree(),
8138 );
8139 assert_eq!(
8140 repository.status_for_path(&"b".into()).unwrap().status,
8141 FileStatus::Untracked,
8142 );
8143 });
8144}
8145
8146// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
8147// you can't rename a directory which some program has already open. This is a
8148// limitation of the Windows. See:
8149// https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
8150#[gpui::test]
8151#[cfg_attr(target_os = "windows", ignore)]
8152async fn test_file_status(cx: &mut gpui::TestAppContext) {
8153 init_test(cx);
8154 cx.executor().allow_parking();
8155 const IGNORE_RULE: &str = "**/target";
8156
8157 let root = TempTree::new(json!({
8158 "project": {
8159 "a.txt": "a",
8160 "b.txt": "bb",
8161 "c": {
8162 "d": {
8163 "e.txt": "eee"
8164 }
8165 },
8166 "f.txt": "ffff",
8167 "target": {
8168 "build_file": "???"
8169 },
8170 ".gitignore": IGNORE_RULE
8171 },
8172
8173 }));
8174 let root_path = root.path();
8175
8176 const A_TXT: &str = "a.txt";
8177 const B_TXT: &str = "b.txt";
8178 const E_TXT: &str = "c/d/e.txt";
8179 const F_TXT: &str = "f.txt";
8180 const DOTGITIGNORE: &str = ".gitignore";
8181 const BUILD_FILE: &str = "target/build_file";
8182
8183 // Set up git repository before creating the worktree.
8184 let work_dir = root.path().join("project");
8185 let mut repo = git_init(work_dir.as_path());
8186 repo.add_ignore_rule(IGNORE_RULE).unwrap();
8187 git_add(A_TXT, &repo);
8188 git_add(E_TXT, &repo);
8189 git_add(DOTGITIGNORE, &repo);
8190 git_commit("Initial commit", &repo);
8191
8192 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8193
8194 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8195 tree.flush_fs_events(cx).await;
8196 project
8197 .update(cx, |project, cx| project.git_scans_complete(cx))
8198 .await;
8199 cx.executor().run_until_parked();
8200
8201 let repository = project.read_with(cx, |project, cx| {
8202 project.repositories(cx).values().next().unwrap().clone()
8203 });
8204
8205 // Check that the right git state is observed on startup
8206 repository.read_with(cx, |repository, _cx| {
8207 assert_eq!(
8208 repository.work_directory_abs_path.as_ref(),
8209 root_path.join("project").as_path()
8210 );
8211
8212 assert_eq!(
8213 repository.status_for_path(&B_TXT.into()).unwrap().status,
8214 FileStatus::Untracked,
8215 );
8216 assert_eq!(
8217 repository.status_for_path(&F_TXT.into()).unwrap().status,
8218 FileStatus::Untracked,
8219 );
8220 });
8221
8222 // Modify a file in the working copy.
8223 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
8224 tree.flush_fs_events(cx).await;
8225 project
8226 .update(cx, |project, cx| project.git_scans_complete(cx))
8227 .await;
8228 cx.executor().run_until_parked();
8229
8230 // The worktree detects that the file's git status has changed.
8231 repository.read_with(cx, |repository, _| {
8232 assert_eq!(
8233 repository.status_for_path(&A_TXT.into()).unwrap().status,
8234 StatusCode::Modified.worktree(),
8235 );
8236 });
8237
8238 // Create a commit in the git repository.
8239 git_add(A_TXT, &repo);
8240 git_add(B_TXT, &repo);
8241 git_commit("Committing modified and added", &repo);
8242 tree.flush_fs_events(cx).await;
8243 project
8244 .update(cx, |project, cx| project.git_scans_complete(cx))
8245 .await;
8246 cx.executor().run_until_parked();
8247
8248 // The worktree detects that the files' git status have changed.
8249 repository.read_with(cx, |repository, _cx| {
8250 assert_eq!(
8251 repository.status_for_path(&F_TXT.into()).unwrap().status,
8252 FileStatus::Untracked,
8253 );
8254 assert_eq!(repository.status_for_path(&B_TXT.into()), None);
8255 assert_eq!(repository.status_for_path(&A_TXT.into()), None);
8256 });
8257
8258 // Modify files in the working copy and perform git operations on other files.
8259 git_reset(0, &repo);
8260 git_remove_index(Path::new(B_TXT), &repo);
8261 git_stash(&mut repo);
8262 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
8263 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
8264 tree.flush_fs_events(cx).await;
8265 project
8266 .update(cx, |project, cx| project.git_scans_complete(cx))
8267 .await;
8268 cx.executor().run_until_parked();
8269
8270 // Check that more complex repo changes are tracked
8271 repository.read_with(cx, |repository, _cx| {
8272 assert_eq!(repository.status_for_path(&A_TXT.into()), None);
8273 assert_eq!(
8274 repository.status_for_path(&B_TXT.into()).unwrap().status,
8275 FileStatus::Untracked,
8276 );
8277 assert_eq!(
8278 repository.status_for_path(&E_TXT.into()).unwrap().status,
8279 StatusCode::Modified.worktree(),
8280 );
8281 });
8282
8283 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
8284 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
8285 std::fs::write(
8286 work_dir.join(DOTGITIGNORE),
8287 [IGNORE_RULE, "f.txt"].join("\n"),
8288 )
8289 .unwrap();
8290
8291 git_add(Path::new(DOTGITIGNORE), &repo);
8292 git_commit("Committing modified git ignore", &repo);
8293
8294 tree.flush_fs_events(cx).await;
8295 cx.executor().run_until_parked();
8296
8297 let mut renamed_dir_name = "first_directory/second_directory";
8298 const RENAMED_FILE: &str = "rf.txt";
8299
8300 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
8301 std::fs::write(
8302 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
8303 "new-contents",
8304 )
8305 .unwrap();
8306
8307 tree.flush_fs_events(cx).await;
8308 project
8309 .update(cx, |project, cx| project.git_scans_complete(cx))
8310 .await;
8311 cx.executor().run_until_parked();
8312
8313 repository.read_with(cx, |repository, _cx| {
8314 assert_eq!(
8315 repository
8316 .status_for_path(&Path::new(renamed_dir_name).join(RENAMED_FILE).into())
8317 .unwrap()
8318 .status,
8319 FileStatus::Untracked,
8320 );
8321 });
8322
8323 renamed_dir_name = "new_first_directory/second_directory";
8324
8325 std::fs::rename(
8326 work_dir.join("first_directory"),
8327 work_dir.join("new_first_directory"),
8328 )
8329 .unwrap();
8330
8331 tree.flush_fs_events(cx).await;
8332 project
8333 .update(cx, |project, cx| project.git_scans_complete(cx))
8334 .await;
8335 cx.executor().run_until_parked();
8336
8337 repository.read_with(cx, |repository, _cx| {
8338 assert_eq!(
8339 repository
8340 .status_for_path(&Path::new(renamed_dir_name).join(RENAMED_FILE).into())
8341 .unwrap()
8342 .status,
8343 FileStatus::Untracked,
8344 );
8345 });
8346}
8347
8348#[gpui::test]
8349async fn test_repos_in_invisible_worktrees(
8350 executor: BackgroundExecutor,
8351 cx: &mut gpui::TestAppContext,
8352) {
8353 init_test(cx);
8354 let fs = FakeFs::new(executor);
8355 fs.insert_tree(
8356 path!("/root"),
8357 json!({
8358 "dir1": {
8359 ".git": {},
8360 "dep1": {
8361 ".git": {},
8362 "src": {
8363 "a.txt": "",
8364 },
8365 },
8366 "b.txt": "",
8367 },
8368 }),
8369 )
8370 .await;
8371
8372 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
8373 let _visible_worktree =
8374 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8375 project
8376 .update(cx, |project, cx| project.git_scans_complete(cx))
8377 .await;
8378
8379 let repos = project.read_with(cx, |project, cx| {
8380 project
8381 .repositories(cx)
8382 .values()
8383 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8384 .collect::<Vec<_>>()
8385 });
8386 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
8387
8388 let (_invisible_worktree, _) = project
8389 .update(cx, |project, cx| {
8390 project.worktree_store.update(cx, |worktree_store, cx| {
8391 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
8392 })
8393 })
8394 .await
8395 .expect("failed to create worktree");
8396 project
8397 .update(cx, |project, cx| project.git_scans_complete(cx))
8398 .await;
8399
8400 let repos = project.read_with(cx, |project, cx| {
8401 project
8402 .repositories(cx)
8403 .values()
8404 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8405 .collect::<Vec<_>>()
8406 });
8407 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
8408}
8409
8410#[gpui::test(iterations = 10)]
8411async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
8412 init_test(cx);
8413 cx.update(|cx| {
8414 cx.update_global::<SettingsStore, _>(|store, cx| {
8415 store.update_user_settings::<WorktreeSettings>(cx, |project_settings| {
8416 project_settings.file_scan_exclusions = Some(Vec::new());
8417 });
8418 });
8419 });
8420 let fs = FakeFs::new(cx.background_executor.clone());
8421 fs.insert_tree(
8422 path!("/root"),
8423 json!({
8424 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
8425 "tree": {
8426 ".git": {},
8427 ".gitignore": "ignored-dir\n",
8428 "tracked-dir": {
8429 "tracked-file1": "",
8430 "ancestor-ignored-file1": "",
8431 },
8432 "ignored-dir": {
8433 "ignored-file1": ""
8434 }
8435 }
8436 }),
8437 )
8438 .await;
8439 fs.set_head_and_index_for_repo(
8440 path!("/root/tree/.git").as_ref(),
8441 &[
8442 (".gitignore".into(), "ignored-dir\n".into()),
8443 ("tracked-dir/tracked-file1".into(), "".into()),
8444 ],
8445 );
8446
8447 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
8448
8449 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8450 tree.flush_fs_events(cx).await;
8451 project
8452 .update(cx, |project, cx| project.git_scans_complete(cx))
8453 .await;
8454 cx.executor().run_until_parked();
8455
8456 let repository = project.read_with(cx, |project, cx| {
8457 project.repositories(cx).values().next().unwrap().clone()
8458 });
8459
8460 tree.read_with(cx, |tree, _| {
8461 tree.as_local()
8462 .unwrap()
8463 .manually_refresh_entries_for_paths(vec![Path::new("ignored-dir").into()])
8464 })
8465 .recv()
8466 .await;
8467
8468 cx.read(|cx| {
8469 assert_entry_git_state(
8470 tree.read(cx),
8471 repository.read(cx),
8472 "tracked-dir/tracked-file1",
8473 None,
8474 false,
8475 );
8476 assert_entry_git_state(
8477 tree.read(cx),
8478 repository.read(cx),
8479 "tracked-dir/ancestor-ignored-file1",
8480 None,
8481 false,
8482 );
8483 assert_entry_git_state(
8484 tree.read(cx),
8485 repository.read(cx),
8486 "ignored-dir/ignored-file1",
8487 None,
8488 true,
8489 );
8490 });
8491
8492 fs.create_file(
8493 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
8494 Default::default(),
8495 )
8496 .await
8497 .unwrap();
8498 fs.set_index_for_repo(
8499 path!("/root/tree/.git").as_ref(),
8500 &[
8501 (".gitignore".into(), "ignored-dir\n".into()),
8502 ("tracked-dir/tracked-file1".into(), "".into()),
8503 ("tracked-dir/tracked-file2".into(), "".into()),
8504 ],
8505 );
8506 fs.create_file(
8507 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
8508 Default::default(),
8509 )
8510 .await
8511 .unwrap();
8512 fs.create_file(
8513 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
8514 Default::default(),
8515 )
8516 .await
8517 .unwrap();
8518
8519 cx.executor().run_until_parked();
8520 cx.read(|cx| {
8521 assert_entry_git_state(
8522 tree.read(cx),
8523 repository.read(cx),
8524 "tracked-dir/tracked-file2",
8525 Some(StatusCode::Added),
8526 false,
8527 );
8528 assert_entry_git_state(
8529 tree.read(cx),
8530 repository.read(cx),
8531 "tracked-dir/ancestor-ignored-file2",
8532 None,
8533 false,
8534 );
8535 assert_entry_git_state(
8536 tree.read(cx),
8537 repository.read(cx),
8538 "ignored-dir/ignored-file2",
8539 None,
8540 true,
8541 );
8542 assert!(tree.read(cx).entry_for_path(".git").unwrap().is_ignored);
8543 });
8544}
8545
8546#[gpui::test]
8547async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) {
8548 init_test(cx);
8549
8550 let fs = FakeFs::new(cx.executor());
8551 fs.insert_tree(
8552 path!("/project"),
8553 json!({
8554 ".git": {
8555 "worktrees": {
8556 "some-worktree": {
8557 "commondir": "../..\n",
8558 // For is_git_dir
8559 "HEAD": "",
8560 "config": ""
8561 }
8562 },
8563 "modules": {
8564 "subdir": {
8565 "some-submodule": {
8566 // For is_git_dir
8567 "HEAD": "",
8568 "config": "",
8569 }
8570 }
8571 }
8572 },
8573 "src": {
8574 "a.txt": "A",
8575 },
8576 "some-worktree": {
8577 ".git": "gitdir: ../.git/worktrees/some-worktree\n",
8578 "src": {
8579 "b.txt": "B",
8580 }
8581 },
8582 "subdir": {
8583 "some-submodule": {
8584 ".git": "gitdir: ../../.git/modules/subdir/some-submodule\n",
8585 "c.txt": "C",
8586 }
8587 }
8588 }),
8589 )
8590 .await;
8591
8592 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
8593 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
8594 scan_complete.await;
8595
8596 let mut repositories = project.update(cx, |project, cx| {
8597 project
8598 .repositories(cx)
8599 .values()
8600 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8601 .collect::<Vec<_>>()
8602 });
8603 repositories.sort();
8604 pretty_assertions::assert_eq!(
8605 repositories,
8606 [
8607 Path::new(path!("/project")).into(),
8608 Path::new(path!("/project/some-worktree")).into(),
8609 Path::new(path!("/project/subdir/some-submodule")).into(),
8610 ]
8611 );
8612
8613 // Generate a git-related event for the worktree and check that it's refreshed.
8614 fs.with_git_state(
8615 path!("/project/some-worktree/.git").as_ref(),
8616 true,
8617 |state| {
8618 state
8619 .head_contents
8620 .insert("src/b.txt".into(), "b".to_owned());
8621 state
8622 .index_contents
8623 .insert("src/b.txt".into(), "b".to_owned());
8624 },
8625 )
8626 .unwrap();
8627 cx.run_until_parked();
8628
8629 let buffer = project
8630 .update(cx, |project, cx| {
8631 project.open_local_buffer(path!("/project/some-worktree/src/b.txt"), cx)
8632 })
8633 .await
8634 .unwrap();
8635 let (worktree_repo, barrier) = project.update(cx, |project, cx| {
8636 let (repo, _) = project
8637 .git_store()
8638 .read(cx)
8639 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
8640 .unwrap();
8641 pretty_assertions::assert_eq!(
8642 repo.read(cx).work_directory_abs_path,
8643 Path::new(path!("/project/some-worktree")).into(),
8644 );
8645 let barrier = repo.update(cx, |repo, _| repo.barrier());
8646 (repo.clone(), barrier)
8647 });
8648 barrier.await.unwrap();
8649 worktree_repo.update(cx, |repo, _| {
8650 pretty_assertions::assert_eq!(
8651 repo.status_for_path(&"src/b.txt".into()).unwrap().status,
8652 StatusCode::Modified.worktree(),
8653 );
8654 });
8655
8656 // The same for the submodule.
8657 fs.with_git_state(
8658 path!("/project/subdir/some-submodule/.git").as_ref(),
8659 true,
8660 |state| {
8661 state.head_contents.insert("c.txt".into(), "c".to_owned());
8662 state.index_contents.insert("c.txt".into(), "c".to_owned());
8663 },
8664 )
8665 .unwrap();
8666 cx.run_until_parked();
8667
8668 let buffer = project
8669 .update(cx, |project, cx| {
8670 project.open_local_buffer(path!("/project/subdir/some-submodule/c.txt"), cx)
8671 })
8672 .await
8673 .unwrap();
8674 let (submodule_repo, barrier) = project.update(cx, |project, cx| {
8675 let (repo, _) = project
8676 .git_store()
8677 .read(cx)
8678 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
8679 .unwrap();
8680 pretty_assertions::assert_eq!(
8681 repo.read(cx).work_directory_abs_path,
8682 Path::new(path!("/project/subdir/some-submodule")).into(),
8683 );
8684 let barrier = repo.update(cx, |repo, _| repo.barrier());
8685 (repo.clone(), barrier)
8686 });
8687 barrier.await.unwrap();
8688 submodule_repo.update(cx, |repo, _| {
8689 pretty_assertions::assert_eq!(
8690 repo.status_for_path(&"c.txt".into()).unwrap().status,
8691 StatusCode::Modified.worktree(),
8692 );
8693 });
8694}
8695
8696#[gpui::test]
8697async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
8698 init_test(cx);
8699 let fs = FakeFs::new(cx.background_executor.clone());
8700 fs.insert_tree(
8701 path!("/root"),
8702 json!({
8703 "project": {
8704 ".git": {},
8705 "child1": {
8706 "a.txt": "A",
8707 },
8708 "child2": {
8709 "b.txt": "B",
8710 }
8711 }
8712 }),
8713 )
8714 .await;
8715
8716 let project = Project::test(
8717 fs.clone(),
8718 [
8719 path!("/root/project/child1").as_ref(),
8720 path!("/root/project/child2").as_ref(),
8721 ],
8722 cx,
8723 )
8724 .await;
8725
8726 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8727 tree.flush_fs_events(cx).await;
8728 project
8729 .update(cx, |project, cx| project.git_scans_complete(cx))
8730 .await;
8731 cx.executor().run_until_parked();
8732
8733 let repos = project.read_with(cx, |project, cx| {
8734 project
8735 .repositories(cx)
8736 .values()
8737 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8738 .collect::<Vec<_>>()
8739 });
8740 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
8741}
8742
8743async fn search(
8744 project: &Entity<Project>,
8745 query: SearchQuery,
8746 cx: &mut gpui::TestAppContext,
8747) -> Result<HashMap<String, Vec<Range<usize>>>> {
8748 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
8749 let mut results = HashMap::default();
8750 while let Ok(search_result) = search_rx.recv().await {
8751 match search_result {
8752 SearchResult::Buffer { buffer, ranges } => {
8753 results.entry(buffer).or_insert(ranges);
8754 }
8755 SearchResult::LimitReached => {}
8756 }
8757 }
8758 Ok(results
8759 .into_iter()
8760 .map(|(buffer, ranges)| {
8761 buffer.update(cx, |buffer, cx| {
8762 let path = buffer
8763 .file()
8764 .unwrap()
8765 .full_path(cx)
8766 .to_string_lossy()
8767 .to_string();
8768 let ranges = ranges
8769 .into_iter()
8770 .map(|range| range.to_offset(buffer))
8771 .collect::<Vec<_>>();
8772 (path, ranges)
8773 })
8774 })
8775 .collect())
8776}
8777
8778pub fn init_test(cx: &mut gpui::TestAppContext) {
8779 zlog::init_test();
8780
8781 cx.update(|cx| {
8782 let settings_store = SettingsStore::test(cx);
8783 cx.set_global(settings_store);
8784 release_channel::init(SemanticVersion::default(), cx);
8785 language::init(cx);
8786 Project::init_settings(cx);
8787 });
8788}
8789
8790fn json_lang() -> Arc<Language> {
8791 Arc::new(Language::new(
8792 LanguageConfig {
8793 name: "JSON".into(),
8794 matcher: LanguageMatcher {
8795 path_suffixes: vec!["json".to_string()],
8796 ..Default::default()
8797 },
8798 ..Default::default()
8799 },
8800 None,
8801 ))
8802}
8803
8804fn js_lang() -> Arc<Language> {
8805 Arc::new(Language::new(
8806 LanguageConfig {
8807 name: "JavaScript".into(),
8808 matcher: LanguageMatcher {
8809 path_suffixes: vec!["js".to_string()],
8810 ..Default::default()
8811 },
8812 ..Default::default()
8813 },
8814 None,
8815 ))
8816}
8817
8818fn rust_lang() -> Arc<Language> {
8819 Arc::new(Language::new(
8820 LanguageConfig {
8821 name: "Rust".into(),
8822 matcher: LanguageMatcher {
8823 path_suffixes: vec!["rs".to_string()],
8824 ..Default::default()
8825 },
8826 ..Default::default()
8827 },
8828 Some(tree_sitter_rust::LANGUAGE.into()),
8829 ))
8830}
8831
8832fn typescript_lang() -> Arc<Language> {
8833 Arc::new(Language::new(
8834 LanguageConfig {
8835 name: "TypeScript".into(),
8836 matcher: LanguageMatcher {
8837 path_suffixes: vec!["ts".to_string()],
8838 ..Default::default()
8839 },
8840 ..Default::default()
8841 },
8842 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
8843 ))
8844}
8845
8846fn tsx_lang() -> Arc<Language> {
8847 Arc::new(Language::new(
8848 LanguageConfig {
8849 name: "tsx".into(),
8850 matcher: LanguageMatcher {
8851 path_suffixes: vec!["tsx".to_string()],
8852 ..Default::default()
8853 },
8854 ..Default::default()
8855 },
8856 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
8857 ))
8858}
8859
8860fn get_all_tasks(
8861 project: &Entity<Project>,
8862 task_contexts: Arc<TaskContexts>,
8863 cx: &mut App,
8864) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
8865 let new_tasks = project.update(cx, |project, cx| {
8866 project.task_store.update(cx, |task_store, cx| {
8867 task_store.task_inventory().unwrap().update(cx, |this, cx| {
8868 this.used_and_current_resolved_tasks(task_contexts, cx)
8869 })
8870 })
8871 });
8872
8873 cx.background_spawn(async move {
8874 let (mut old, new) = new_tasks.await;
8875 old.extend(new);
8876 old
8877 })
8878}
8879
8880#[track_caller]
8881fn assert_entry_git_state(
8882 tree: &Worktree,
8883 repository: &Repository,
8884 path: &str,
8885 index_status: Option<StatusCode>,
8886 is_ignored: bool,
8887) {
8888 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
8889 let entry = tree
8890 .entry_for_path(path)
8891 .unwrap_or_else(|| panic!("entry {path} not found"));
8892 let status = repository
8893 .status_for_path(&path.into())
8894 .map(|entry| entry.status);
8895 let expected = index_status.map(|index_status| {
8896 TrackedStatus {
8897 index_status,
8898 worktree_status: StatusCode::Unmodified,
8899 }
8900 .into()
8901 });
8902 assert_eq!(
8903 status, expected,
8904 "expected {path} to have git status: {expected:?}"
8905 );
8906 assert_eq!(
8907 entry.is_ignored, is_ignored,
8908 "expected {path} to have is_ignored: {is_ignored}"
8909 );
8910}
8911
8912#[track_caller]
8913fn git_init(path: &Path) -> git2::Repository {
8914 let mut init_opts = RepositoryInitOptions::new();
8915 init_opts.initial_head("main");
8916 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
8917}
8918
8919#[track_caller]
8920fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
8921 let path = path.as_ref();
8922 let mut index = repo.index().expect("Failed to get index");
8923 index.add_path(path).expect("Failed to add file");
8924 index.write().expect("Failed to write index");
8925}
8926
8927#[track_caller]
8928fn git_remove_index(path: &Path, repo: &git2::Repository) {
8929 let mut index = repo.index().expect("Failed to get index");
8930 index.remove_path(path).expect("Failed to add file");
8931 index.write().expect("Failed to write index");
8932}
8933
8934#[track_caller]
8935fn git_commit(msg: &'static str, repo: &git2::Repository) {
8936 use git2::Signature;
8937
8938 let signature = Signature::now("test", "test@zed.dev").unwrap();
8939 let oid = repo.index().unwrap().write_tree().unwrap();
8940 let tree = repo.find_tree(oid).unwrap();
8941 if let Ok(head) = repo.head() {
8942 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
8943
8944 let parent_commit = parent_obj.as_commit().unwrap();
8945
8946 repo.commit(
8947 Some("HEAD"),
8948 &signature,
8949 &signature,
8950 msg,
8951 &tree,
8952 &[parent_commit],
8953 )
8954 .expect("Failed to commit with parent");
8955 } else {
8956 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
8957 .expect("Failed to commit");
8958 }
8959}
8960
8961#[cfg(any())]
8962#[track_caller]
8963fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
8964 repo.cherrypick(commit, None).expect("Failed to cherrypick");
8965}
8966
8967#[track_caller]
8968fn git_stash(repo: &mut git2::Repository) {
8969 use git2::Signature;
8970
8971 let signature = Signature::now("test", "test@zed.dev").unwrap();
8972 repo.stash_save(&signature, "N/A", None)
8973 .expect("Failed to stash");
8974}
8975
8976#[track_caller]
8977fn git_reset(offset: usize, repo: &git2::Repository) {
8978 let head = repo.head().expect("Couldn't get repo head");
8979 let object = head.peel(git2::ObjectType::Commit).unwrap();
8980 let commit = object.as_commit().unwrap();
8981 let new_head = commit
8982 .parents()
8983 .inspect(|parnet| {
8984 parnet.message();
8985 })
8986 .nth(offset)
8987 .expect("Not enough history");
8988 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
8989 .expect("Could not reset");
8990}
8991
8992#[cfg(any())]
8993#[track_caller]
8994fn git_branch(name: &str, repo: &git2::Repository) {
8995 let head = repo
8996 .head()
8997 .expect("Couldn't get repo head")
8998 .peel_to_commit()
8999 .expect("HEAD is not a commit");
9000 repo.branch(name, &head, false).expect("Failed to commit");
9001}
9002
9003#[cfg(any())]
9004#[track_caller]
9005fn git_checkout(name: &str, repo: &git2::Repository) {
9006 repo.set_head(name).expect("Failed to set head");
9007 repo.checkout_head(None).expect("Failed to check out head");
9008}
9009
9010#[cfg(any())]
9011#[track_caller]
9012fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
9013 repo.statuses(None)
9014 .unwrap()
9015 .iter()
9016 .map(|status| (status.path().unwrap().to_string(), status.status()))
9017 .collect()
9018}
9019
9020#[gpui::test]
9021async fn test_find_project_path_abs(
9022 background_executor: BackgroundExecutor,
9023 cx: &mut gpui::TestAppContext,
9024) {
9025 // find_project_path should work with absolute paths
9026 init_test(cx);
9027
9028 let fs = FakeFs::new(background_executor);
9029 fs.insert_tree(
9030 path!("/root"),
9031 json!({
9032 "project1": {
9033 "file1.txt": "content1",
9034 "subdir": {
9035 "file2.txt": "content2"
9036 }
9037 },
9038 "project2": {
9039 "file3.txt": "content3"
9040 }
9041 }),
9042 )
9043 .await;
9044
9045 let project = Project::test(
9046 fs.clone(),
9047 [
9048 path!("/root/project1").as_ref(),
9049 path!("/root/project2").as_ref(),
9050 ],
9051 cx,
9052 )
9053 .await;
9054
9055 // Make sure the worktrees are fully initialized
9056 project
9057 .update(cx, |project, cx| project.git_scans_complete(cx))
9058 .await;
9059 cx.run_until_parked();
9060
9061 let (project1_abs_path, project1_id, project2_abs_path, project2_id) =
9062 project.read_with(cx, |project, cx| {
9063 let worktrees: Vec<_> = project.worktrees(cx).collect();
9064 let abs_path1 = worktrees[0].read(cx).abs_path().to_path_buf();
9065 let id1 = worktrees[0].read(cx).id();
9066 let abs_path2 = worktrees[1].read(cx).abs_path().to_path_buf();
9067 let id2 = worktrees[1].read(cx).id();
9068 (abs_path1, id1, abs_path2, id2)
9069 });
9070
9071 project.update(cx, |project, cx| {
9072 let abs_path = project1_abs_path.join("file1.txt");
9073 let found_path = project.find_project_path(abs_path, cx).unwrap();
9074 assert_eq!(found_path.worktree_id, project1_id);
9075 assert_eq!(found_path.path.as_ref(), Path::new("file1.txt"));
9076
9077 let abs_path = project1_abs_path.join("subdir").join("file2.txt");
9078 let found_path = project.find_project_path(abs_path, cx).unwrap();
9079 assert_eq!(found_path.worktree_id, project1_id);
9080 assert_eq!(found_path.path.as_ref(), Path::new("subdir/file2.txt"));
9081
9082 let abs_path = project2_abs_path.join("file3.txt");
9083 let found_path = project.find_project_path(abs_path, cx).unwrap();
9084 assert_eq!(found_path.worktree_id, project2_id);
9085 assert_eq!(found_path.path.as_ref(), Path::new("file3.txt"));
9086
9087 let abs_path = project1_abs_path.join("nonexistent.txt");
9088 let found_path = project.find_project_path(abs_path, cx);
9089 assert!(
9090 found_path.is_some(),
9091 "Should find project path for nonexistent file in worktree"
9092 );
9093
9094 // Test with an absolute path outside any worktree
9095 let abs_path = Path::new("/some/other/path");
9096 let found_path = project.find_project_path(abs_path, cx);
9097 assert!(
9098 found_path.is_none(),
9099 "Should not find project path for path outside any worktree"
9100 );
9101 });
9102}