1#![allow(clippy::format_collect)]
2
3use crate::{
4 Event, git_store::StatusEntry, task_inventory::TaskContexts, task_store::TaskSettingsLocation,
5 *,
6};
7use buffer_diff::{
8 BufferDiffEvent, CALCULATE_DIFF_TASK, DiffHunkSecondaryStatus, DiffHunkStatus,
9 DiffHunkStatusKind, assert_hunks,
10};
11use fs::FakeFs;
12use futures::{StreamExt, future};
13use git::{
14 GitHostingProviderRegistry,
15 repository::RepoPath,
16 status::{StatusCode, TrackedStatus},
17};
18use git2::RepositoryInitOptions;
19use gpui::{App, BackgroundExecutor, SemanticVersion, UpdateGlobal};
20use http_client::Url;
21use language::{
22 Diagnostic, DiagnosticEntry, DiagnosticSet, DiskState, FakeLspAdapter, LanguageConfig,
23 LanguageMatcher, LanguageName, LineEnding, OffsetRangeExt, Point, ToPoint,
24 language_settings::{AllLanguageSettings, LanguageSettingsContent, language_settings},
25 tree_sitter_rust, tree_sitter_typescript,
26};
27use lsp::{
28 DiagnosticSeverity, DocumentChanges, FileOperationFilter, NumberOrString, TextDocumentEdit,
29 WillRenameFiles, notification::DidRenameFiles,
30};
31use parking_lot::Mutex;
32use paths::{config_dir, tasks_file};
33use postage::stream::Stream as _;
34use pretty_assertions::{assert_eq, assert_matches};
35use rand::{Rng as _, rngs::StdRng};
36use serde_json::json;
37#[cfg(not(windows))]
38use std::os;
39use std::{env, mem, num::NonZeroU32, ops::Range, str::FromStr, sync::OnceLock, task::Poll};
40use task::{ResolvedTask, TaskContext};
41use unindent::Unindent as _;
42use util::{
43 TryFutureExt as _, assert_set_eq, maybe, path,
44 paths::PathMatcher,
45 separator,
46 test::{TempTree, marked_text_offsets},
47 uri,
48};
49use worktree::WorktreeModelHandle as _;
50
51#[gpui::test]
52async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
53 cx.executor().allow_parking();
54
55 let (tx, mut rx) = futures::channel::mpsc::unbounded();
56 let _thread = std::thread::spawn(move || {
57 #[cfg(not(target_os = "windows"))]
58 std::fs::metadata("/tmp").unwrap();
59 #[cfg(target_os = "windows")]
60 std::fs::metadata("C:/Windows").unwrap();
61 std::thread::sleep(Duration::from_millis(1000));
62 tx.unbounded_send(1).unwrap();
63 });
64 rx.next().await.unwrap();
65}
66
67#[gpui::test]
68async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
69 cx.executor().allow_parking();
70
71 let io_task = smol::unblock(move || {
72 println!("sleeping on thread {:?}", std::thread::current().id());
73 std::thread::sleep(Duration::from_millis(10));
74 1
75 });
76
77 let task = cx.foreground_executor().spawn(async move {
78 io_task.await;
79 });
80
81 task.await;
82}
83
84#[cfg(not(windows))]
85#[gpui::test]
86async fn test_symlinks(cx: &mut gpui::TestAppContext) {
87 init_test(cx);
88 cx.executor().allow_parking();
89
90 let dir = TempTree::new(json!({
91 "root": {
92 "apple": "",
93 "banana": {
94 "carrot": {
95 "date": "",
96 "endive": "",
97 }
98 },
99 "fennel": {
100 "grape": "",
101 }
102 }
103 }));
104
105 let root_link_path = dir.path().join("root_link");
106 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
107 os::unix::fs::symlink(
108 dir.path().join("root/fennel"),
109 dir.path().join("root/finnochio"),
110 )
111 .unwrap();
112
113 let project = Project::test(
114 Arc::new(RealFs::new(None, cx.executor())),
115 [root_link_path.as_ref()],
116 cx,
117 )
118 .await;
119
120 project.update(cx, |project, cx| {
121 let tree = project.worktrees(cx).next().unwrap().read(cx);
122 assert_eq!(tree.file_count(), 5);
123 assert_eq!(
124 tree.inode_for_path("fennel/grape"),
125 tree.inode_for_path("finnochio/grape")
126 );
127 });
128}
129
130#[gpui::test]
131async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
132 init_test(cx);
133
134 let dir = TempTree::new(json!({
135 ".editorconfig": r#"
136 root = true
137 [*.rs]
138 indent_style = tab
139 indent_size = 3
140 end_of_line = lf
141 insert_final_newline = true
142 trim_trailing_whitespace = true
143 [*.js]
144 tab_width = 10
145 "#,
146 ".zed": {
147 "settings.json": r#"{
148 "tab_size": 8,
149 "hard_tabs": false,
150 "ensure_final_newline_on_save": false,
151 "remove_trailing_whitespace_on_save": false,
152 "soft_wrap": "editor_width"
153 }"#,
154 },
155 "a.rs": "fn a() {\n A\n}",
156 "b": {
157 ".editorconfig": r#"
158 [*.rs]
159 indent_size = 2
160 "#,
161 "b.rs": "fn b() {\n B\n}",
162 },
163 "c.js": "def c\n C\nend",
164 "README.json": "tabs are better\n",
165 }));
166
167 let path = dir.path();
168 let fs = FakeFs::new(cx.executor());
169 fs.insert_tree_from_real_fs(path, path).await;
170 let project = Project::test(fs, [path], cx).await;
171
172 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
173 language_registry.add(js_lang());
174 language_registry.add(json_lang());
175 language_registry.add(rust_lang());
176
177 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
178
179 cx.executor().run_until_parked();
180
181 cx.update(|cx| {
182 let tree = worktree.read(cx);
183 let settings_for = |path: &str| {
184 let file_entry = tree.entry_for_path(path).unwrap().clone();
185 let file = File::for_entry(file_entry, worktree.clone());
186 let file_language = project
187 .read(cx)
188 .languages()
189 .language_for_file_path(file.path.as_ref());
190 let file_language = cx
191 .background_executor()
192 .block(file_language)
193 .expect("Failed to get file language");
194 let file = file as _;
195 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
196 };
197
198 let settings_a = settings_for("a.rs");
199 let settings_b = settings_for("b/b.rs");
200 let settings_c = settings_for("c.js");
201 let settings_readme = settings_for("README.json");
202
203 // .editorconfig overrides .zed/settings
204 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
205 assert_eq!(settings_a.hard_tabs, true);
206 assert_eq!(settings_a.ensure_final_newline_on_save, true);
207 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
208
209 // .editorconfig in b/ overrides .editorconfig in root
210 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
211
212 // "indent_size" is not set, so "tab_width" is used
213 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
214
215 // README.md should not be affected by .editorconfig's globe "*.rs"
216 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
217 });
218}
219
220#[gpui::test]
221async fn test_git_provider_project_setting(cx: &mut gpui::TestAppContext) {
222 init_test(cx);
223 cx.update(|cx| {
224 GitHostingProviderRegistry::default_global(cx);
225 git_hosting_providers::init(cx);
226 });
227
228 let fs = FakeFs::new(cx.executor());
229 let str_path = path!("/dir");
230 let path = Path::new(str_path);
231
232 fs.insert_tree(
233 path!("/dir"),
234 json!({
235 ".zed": {
236 "settings.json": r#"{
237 "git_hosting_providers": [
238 {
239 "provider": "gitlab",
240 "base_url": "https://google.com",
241 "name": "foo"
242 }
243 ]
244 }"#
245 },
246 }),
247 )
248 .await;
249
250 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
251 let (_worktree, _) =
252 project.read_with(cx, |project, cx| project.find_worktree(path, cx).unwrap());
253 cx.executor().run_until_parked();
254
255 cx.update(|cx| {
256 let provider = GitHostingProviderRegistry::global(cx);
257 assert!(
258 provider
259 .list_hosting_providers()
260 .into_iter()
261 .any(|provider| provider.name() == "foo")
262 );
263 });
264
265 fs.atomic_write(
266 Path::new(path!("/dir/.zed/settings.json")).to_owned(),
267 "{}".into(),
268 )
269 .await
270 .unwrap();
271
272 cx.run_until_parked();
273
274 cx.update(|cx| {
275 let provider = GitHostingProviderRegistry::global(cx);
276 assert!(
277 !provider
278 .list_hosting_providers()
279 .into_iter()
280 .any(|provider| provider.name() == "foo")
281 );
282 });
283}
284
285#[gpui::test]
286async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
287 init_test(cx);
288 TaskStore::init(None);
289
290 let fs = FakeFs::new(cx.executor());
291 fs.insert_tree(
292 path!("/dir"),
293 json!({
294 ".zed": {
295 "settings.json": r#"{ "tab_size": 8 }"#,
296 "tasks.json": r#"[{
297 "label": "cargo check all",
298 "command": "cargo",
299 "args": ["check", "--all"]
300 },]"#,
301 },
302 "a": {
303 "a.rs": "fn a() {\n A\n}"
304 },
305 "b": {
306 ".zed": {
307 "settings.json": r#"{ "tab_size": 2 }"#,
308 "tasks.json": r#"[{
309 "label": "cargo check",
310 "command": "cargo",
311 "args": ["check"]
312 },]"#,
313 },
314 "b.rs": "fn b() {\n B\n}"
315 }
316 }),
317 )
318 .await;
319
320 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
321 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
322
323 cx.executor().run_until_parked();
324 let worktree_id = cx.update(|cx| {
325 project.update(cx, |project, cx| {
326 project.worktrees(cx).next().unwrap().read(cx).id()
327 })
328 });
329
330 let mut task_contexts = TaskContexts::default();
331 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
332
333 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
334 id: worktree_id,
335 directory_in_worktree: PathBuf::from(".zed"),
336 id_base: "local worktree tasks from directory \".zed\"".into(),
337 };
338
339 let all_tasks = cx
340 .update(|cx| {
341 let tree = worktree.read(cx);
342
343 let file_a = File::for_entry(
344 tree.entry_for_path("a/a.rs").unwrap().clone(),
345 worktree.clone(),
346 ) as _;
347 let settings_a = language_settings(None, Some(&file_a), cx);
348 let file_b = File::for_entry(
349 tree.entry_for_path("b/b.rs").unwrap().clone(),
350 worktree.clone(),
351 ) as _;
352 let settings_b = language_settings(None, Some(&file_b), cx);
353
354 assert_eq!(settings_a.tab_size.get(), 8);
355 assert_eq!(settings_b.tab_size.get(), 2);
356
357 get_all_tasks(&project, &task_contexts, cx)
358 })
359 .into_iter()
360 .map(|(source_kind, task)| {
361 let resolved = task.resolved;
362 (
363 source_kind,
364 task.resolved_label,
365 resolved.args,
366 resolved.env,
367 )
368 })
369 .collect::<Vec<_>>();
370 assert_eq!(
371 all_tasks,
372 vec![
373 (
374 TaskSourceKind::Worktree {
375 id: worktree_id,
376 directory_in_worktree: PathBuf::from(separator!("b/.zed")),
377 id_base: if cfg!(windows) {
378 "local worktree tasks from directory \"b\\\\.zed\"".into()
379 } else {
380 "local worktree tasks from directory \"b/.zed\"".into()
381 },
382 },
383 "cargo check".to_string(),
384 vec!["check".to_string()],
385 HashMap::default(),
386 ),
387 (
388 topmost_local_task_source_kind.clone(),
389 "cargo check all".to_string(),
390 vec!["check".to_string(), "--all".to_string()],
391 HashMap::default(),
392 ),
393 ]
394 );
395
396 let (_, resolved_task) = cx
397 .update(|cx| get_all_tasks(&project, &task_contexts, cx))
398 .into_iter()
399 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
400 .expect("should have one global task");
401 project.update(cx, |project, cx| {
402 let task_inventory = project
403 .task_store
404 .read(cx)
405 .task_inventory()
406 .cloned()
407 .unwrap();
408 task_inventory.update(cx, |inventory, _| {
409 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
410 inventory
411 .update_file_based_tasks(
412 TaskSettingsLocation::Global(tasks_file()),
413 Some(
414 &json!([{
415 "label": "cargo check unstable",
416 "command": "cargo",
417 "args": [
418 "check",
419 "--all",
420 "--all-targets"
421 ],
422 "env": {
423 "RUSTFLAGS": "-Zunstable-options"
424 }
425 }])
426 .to_string(),
427 ),
428 )
429 .unwrap();
430 });
431 });
432 cx.run_until_parked();
433
434 let all_tasks = cx
435 .update(|cx| get_all_tasks(&project, &task_contexts, cx))
436 .into_iter()
437 .map(|(source_kind, task)| {
438 let resolved = task.resolved;
439 (
440 source_kind,
441 task.resolved_label,
442 resolved.args,
443 resolved.env,
444 )
445 })
446 .collect::<Vec<_>>();
447 assert_eq!(
448 all_tasks,
449 vec![
450 (
451 topmost_local_task_source_kind.clone(),
452 "cargo check all".to_string(),
453 vec!["check".to_string(), "--all".to_string()],
454 HashMap::default(),
455 ),
456 (
457 TaskSourceKind::Worktree {
458 id: worktree_id,
459 directory_in_worktree: PathBuf::from(separator!("b/.zed")),
460 id_base: if cfg!(windows) {
461 "local worktree tasks from directory \"b\\\\.zed\"".into()
462 } else {
463 "local worktree tasks from directory \"b/.zed\"".into()
464 },
465 },
466 "cargo check".to_string(),
467 vec!["check".to_string()],
468 HashMap::default(),
469 ),
470 (
471 TaskSourceKind::AbsPath {
472 abs_path: paths::tasks_file().clone(),
473 id_base: "global tasks.json".into(),
474 },
475 "cargo check unstable".to_string(),
476 vec![
477 "check".to_string(),
478 "--all".to_string(),
479 "--all-targets".to_string(),
480 ],
481 HashMap::from_iter(Some((
482 "RUSTFLAGS".to_string(),
483 "-Zunstable-options".to_string()
484 ))),
485 ),
486 ]
487 );
488}
489
490#[gpui::test]
491async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
492 init_test(cx);
493 TaskStore::init(None);
494
495 let fs = FakeFs::new(cx.executor());
496 fs.insert_tree(
497 path!("/dir"),
498 json!({
499 ".zed": {
500 "tasks.json": r#"[{
501 "label": "test worktree root",
502 "command": "echo $ZED_WORKTREE_ROOT"
503 }]"#,
504 },
505 "a": {
506 "a.rs": "fn a() {\n A\n}"
507 },
508 }),
509 )
510 .await;
511
512 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
513 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
514
515 cx.executor().run_until_parked();
516 let worktree_id = cx.update(|cx| {
517 project.update(cx, |project, cx| {
518 project.worktrees(cx).next().unwrap().read(cx).id()
519 })
520 });
521
522 let active_non_worktree_item_tasks = cx.update(|cx| {
523 get_all_tasks(
524 &project,
525 &TaskContexts {
526 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
527 active_worktree_context: None,
528 other_worktree_contexts: Vec::new(),
529 lsp_task_sources: HashMap::default(),
530 latest_selection: None,
531 },
532 cx,
533 )
534 });
535 assert!(
536 active_non_worktree_item_tasks.is_empty(),
537 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
538 );
539
540 let active_worktree_tasks = cx.update(|cx| {
541 get_all_tasks(
542 &project,
543 &TaskContexts {
544 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
545 active_worktree_context: Some((worktree_id, {
546 let mut worktree_context = TaskContext::default();
547 worktree_context
548 .task_variables
549 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
550 worktree_context
551 })),
552 other_worktree_contexts: Vec::new(),
553 lsp_task_sources: HashMap::default(),
554 latest_selection: None,
555 },
556 cx,
557 )
558 });
559 assert_eq!(
560 active_worktree_tasks
561 .into_iter()
562 .map(|(source_kind, task)| {
563 let resolved = task.resolved;
564 (source_kind, resolved.command)
565 })
566 .collect::<Vec<_>>(),
567 vec![(
568 TaskSourceKind::Worktree {
569 id: worktree_id,
570 directory_in_worktree: PathBuf::from(separator!(".zed")),
571 id_base: if cfg!(windows) {
572 "local worktree tasks from directory \".zed\"".into()
573 } else {
574 "local worktree tasks from directory \".zed\"".into()
575 },
576 },
577 "echo /dir".to_string(),
578 )]
579 );
580}
581
582#[gpui::test]
583async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
584 init_test(cx);
585
586 let fs = FakeFs::new(cx.executor());
587 fs.insert_tree(
588 path!("/dir"),
589 json!({
590 "test.rs": "const A: i32 = 1;",
591 "test2.rs": "",
592 "Cargo.toml": "a = 1",
593 "package.json": "{\"a\": 1}",
594 }),
595 )
596 .await;
597
598 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
599 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
600
601 let mut fake_rust_servers = language_registry.register_fake_lsp(
602 "Rust",
603 FakeLspAdapter {
604 name: "the-rust-language-server",
605 capabilities: lsp::ServerCapabilities {
606 completion_provider: Some(lsp::CompletionOptions {
607 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
608 ..Default::default()
609 }),
610 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
611 lsp::TextDocumentSyncOptions {
612 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
613 ..Default::default()
614 },
615 )),
616 ..Default::default()
617 },
618 ..Default::default()
619 },
620 );
621 let mut fake_json_servers = language_registry.register_fake_lsp(
622 "JSON",
623 FakeLspAdapter {
624 name: "the-json-language-server",
625 capabilities: lsp::ServerCapabilities {
626 completion_provider: Some(lsp::CompletionOptions {
627 trigger_characters: Some(vec![":".to_string()]),
628 ..Default::default()
629 }),
630 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
631 lsp::TextDocumentSyncOptions {
632 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
633 ..Default::default()
634 },
635 )),
636 ..Default::default()
637 },
638 ..Default::default()
639 },
640 );
641
642 // Open a buffer without an associated language server.
643 let (toml_buffer, _handle) = project
644 .update(cx, |project, cx| {
645 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
646 })
647 .await
648 .unwrap();
649
650 // Open a buffer with an associated language server before the language for it has been loaded.
651 let (rust_buffer, _handle2) = project
652 .update(cx, |project, cx| {
653 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
654 })
655 .await
656 .unwrap();
657 rust_buffer.update(cx, |buffer, _| {
658 assert_eq!(buffer.language().map(|l| l.name()), None);
659 });
660
661 // Now we add the languages to the project, and ensure they get assigned to all
662 // the relevant open buffers.
663 language_registry.add(json_lang());
664 language_registry.add(rust_lang());
665 cx.executor().run_until_parked();
666 rust_buffer.update(cx, |buffer, _| {
667 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
668 });
669
670 // A server is started up, and it is notified about Rust files.
671 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
672 assert_eq!(
673 fake_rust_server
674 .receive_notification::<lsp::notification::DidOpenTextDocument>()
675 .await
676 .text_document,
677 lsp::TextDocumentItem {
678 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
679 version: 0,
680 text: "const A: i32 = 1;".to_string(),
681 language_id: "rust".to_string(),
682 }
683 );
684
685 // The buffer is configured based on the language server's capabilities.
686 rust_buffer.update(cx, |buffer, _| {
687 assert_eq!(
688 buffer
689 .completion_triggers()
690 .into_iter()
691 .cloned()
692 .collect::<Vec<_>>(),
693 &[".".to_string(), "::".to_string()]
694 );
695 });
696 toml_buffer.update(cx, |buffer, _| {
697 assert!(buffer.completion_triggers().is_empty());
698 });
699
700 // Edit a buffer. The changes are reported to the language server.
701 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
702 assert_eq!(
703 fake_rust_server
704 .receive_notification::<lsp::notification::DidChangeTextDocument>()
705 .await
706 .text_document,
707 lsp::VersionedTextDocumentIdentifier::new(
708 lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
709 1
710 )
711 );
712
713 // Open a third buffer with a different associated language server.
714 let (json_buffer, _json_handle) = project
715 .update(cx, |project, cx| {
716 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
717 })
718 .await
719 .unwrap();
720
721 // A json language server is started up and is only notified about the json buffer.
722 let mut fake_json_server = fake_json_servers.next().await.unwrap();
723 assert_eq!(
724 fake_json_server
725 .receive_notification::<lsp::notification::DidOpenTextDocument>()
726 .await
727 .text_document,
728 lsp::TextDocumentItem {
729 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
730 version: 0,
731 text: "{\"a\": 1}".to_string(),
732 language_id: "json".to_string(),
733 }
734 );
735
736 // This buffer is configured based on the second language server's
737 // capabilities.
738 json_buffer.update(cx, |buffer, _| {
739 assert_eq!(
740 buffer
741 .completion_triggers()
742 .into_iter()
743 .cloned()
744 .collect::<Vec<_>>(),
745 &[":".to_string()]
746 );
747 });
748
749 // When opening another buffer whose language server is already running,
750 // it is also configured based on the existing language server's capabilities.
751 let (rust_buffer2, _handle4) = project
752 .update(cx, |project, cx| {
753 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
754 })
755 .await
756 .unwrap();
757 rust_buffer2.update(cx, |buffer, _| {
758 assert_eq!(
759 buffer
760 .completion_triggers()
761 .into_iter()
762 .cloned()
763 .collect::<Vec<_>>(),
764 &[".".to_string(), "::".to_string()]
765 );
766 });
767
768 // Changes are reported only to servers matching the buffer's language.
769 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
770 rust_buffer2.update(cx, |buffer, cx| {
771 buffer.edit([(0..0, "let x = 1;")], None, cx)
772 });
773 assert_eq!(
774 fake_rust_server
775 .receive_notification::<lsp::notification::DidChangeTextDocument>()
776 .await
777 .text_document,
778 lsp::VersionedTextDocumentIdentifier::new(
779 lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap(),
780 1
781 )
782 );
783
784 // Save notifications are reported to all servers.
785 project
786 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
787 .await
788 .unwrap();
789 assert_eq!(
790 fake_rust_server
791 .receive_notification::<lsp::notification::DidSaveTextDocument>()
792 .await
793 .text_document,
794 lsp::TextDocumentIdentifier::new(
795 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
796 )
797 );
798 assert_eq!(
799 fake_json_server
800 .receive_notification::<lsp::notification::DidSaveTextDocument>()
801 .await
802 .text_document,
803 lsp::TextDocumentIdentifier::new(
804 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
805 )
806 );
807
808 // Renames are reported only to servers matching the buffer's language.
809 fs.rename(
810 Path::new(path!("/dir/test2.rs")),
811 Path::new(path!("/dir/test3.rs")),
812 Default::default(),
813 )
814 .await
815 .unwrap();
816 assert_eq!(
817 fake_rust_server
818 .receive_notification::<lsp::notification::DidCloseTextDocument>()
819 .await
820 .text_document,
821 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap()),
822 );
823 assert_eq!(
824 fake_rust_server
825 .receive_notification::<lsp::notification::DidOpenTextDocument>()
826 .await
827 .text_document,
828 lsp::TextDocumentItem {
829 uri: lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap(),
830 version: 0,
831 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
832 language_id: "rust".to_string(),
833 },
834 );
835
836 rust_buffer2.update(cx, |buffer, cx| {
837 buffer.update_diagnostics(
838 LanguageServerId(0),
839 DiagnosticSet::from_sorted_entries(
840 vec![DiagnosticEntry {
841 diagnostic: Default::default(),
842 range: Anchor::MIN..Anchor::MAX,
843 }],
844 &buffer.snapshot(),
845 ),
846 cx,
847 );
848 assert_eq!(
849 buffer
850 .snapshot()
851 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
852 .count(),
853 1
854 );
855 });
856
857 // When the rename changes the extension of the file, the buffer gets closed on the old
858 // language server and gets opened on the new one.
859 fs.rename(
860 Path::new(path!("/dir/test3.rs")),
861 Path::new(path!("/dir/test3.json")),
862 Default::default(),
863 )
864 .await
865 .unwrap();
866 assert_eq!(
867 fake_rust_server
868 .receive_notification::<lsp::notification::DidCloseTextDocument>()
869 .await
870 .text_document,
871 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap()),
872 );
873 assert_eq!(
874 fake_json_server
875 .receive_notification::<lsp::notification::DidOpenTextDocument>()
876 .await
877 .text_document,
878 lsp::TextDocumentItem {
879 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
880 version: 0,
881 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
882 language_id: "json".to_string(),
883 },
884 );
885
886 // We clear the diagnostics, since the language has changed.
887 rust_buffer2.update(cx, |buffer, _| {
888 assert_eq!(
889 buffer
890 .snapshot()
891 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
892 .count(),
893 0
894 );
895 });
896
897 // The renamed file's version resets after changing language server.
898 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
899 assert_eq!(
900 fake_json_server
901 .receive_notification::<lsp::notification::DidChangeTextDocument>()
902 .await
903 .text_document,
904 lsp::VersionedTextDocumentIdentifier::new(
905 lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
906 1
907 )
908 );
909
910 // Restart language servers
911 project.update(cx, |project, cx| {
912 project.restart_language_servers_for_buffers(
913 vec![rust_buffer.clone(), json_buffer.clone()],
914 cx,
915 );
916 });
917
918 let mut rust_shutdown_requests = fake_rust_server
919 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
920 let mut json_shutdown_requests = fake_json_server
921 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
922 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
923
924 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
925 let mut fake_json_server = fake_json_servers.next().await.unwrap();
926
927 // Ensure rust document is reopened in new rust language server
928 assert_eq!(
929 fake_rust_server
930 .receive_notification::<lsp::notification::DidOpenTextDocument>()
931 .await
932 .text_document,
933 lsp::TextDocumentItem {
934 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
935 version: 0,
936 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
937 language_id: "rust".to_string(),
938 }
939 );
940
941 // Ensure json documents are reopened in new json language server
942 assert_set_eq!(
943 [
944 fake_json_server
945 .receive_notification::<lsp::notification::DidOpenTextDocument>()
946 .await
947 .text_document,
948 fake_json_server
949 .receive_notification::<lsp::notification::DidOpenTextDocument>()
950 .await
951 .text_document,
952 ],
953 [
954 lsp::TextDocumentItem {
955 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
956 version: 0,
957 text: json_buffer.update(cx, |buffer, _| buffer.text()),
958 language_id: "json".to_string(),
959 },
960 lsp::TextDocumentItem {
961 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
962 version: 0,
963 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
964 language_id: "json".to_string(),
965 }
966 ]
967 );
968
969 // Close notifications are reported only to servers matching the buffer's language.
970 cx.update(|_| drop(_json_handle));
971 let close_message = lsp::DidCloseTextDocumentParams {
972 text_document: lsp::TextDocumentIdentifier::new(
973 lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
974 ),
975 };
976 assert_eq!(
977 fake_json_server
978 .receive_notification::<lsp::notification::DidCloseTextDocument>()
979 .await,
980 close_message,
981 );
982}
983
984#[gpui::test]
985async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
986 init_test(cx);
987
988 let fs = FakeFs::new(cx.executor());
989 fs.insert_tree(
990 path!("/the-root"),
991 json!({
992 ".gitignore": "target\n",
993 "Cargo.lock": "",
994 "src": {
995 "a.rs": "",
996 "b.rs": "",
997 },
998 "target": {
999 "x": {
1000 "out": {
1001 "x.rs": ""
1002 }
1003 },
1004 "y": {
1005 "out": {
1006 "y.rs": "",
1007 }
1008 },
1009 "z": {
1010 "out": {
1011 "z.rs": ""
1012 }
1013 }
1014 }
1015 }),
1016 )
1017 .await;
1018 fs.insert_tree(
1019 path!("/the-registry"),
1020 json!({
1021 "dep1": {
1022 "src": {
1023 "dep1.rs": "",
1024 }
1025 },
1026 "dep2": {
1027 "src": {
1028 "dep2.rs": "",
1029 }
1030 },
1031 }),
1032 )
1033 .await;
1034 fs.insert_tree(
1035 path!("/the/stdlib"),
1036 json!({
1037 "LICENSE": "",
1038 "src": {
1039 "string.rs": "",
1040 }
1041 }),
1042 )
1043 .await;
1044
1045 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1046 let (language_registry, lsp_store) = project.read_with(cx, |project, _| {
1047 (project.languages().clone(), project.lsp_store())
1048 });
1049 language_registry.add(rust_lang());
1050 let mut fake_servers = language_registry.register_fake_lsp(
1051 "Rust",
1052 FakeLspAdapter {
1053 name: "the-language-server",
1054 ..Default::default()
1055 },
1056 );
1057
1058 cx.executor().run_until_parked();
1059
1060 // Start the language server by opening a buffer with a compatible file extension.
1061 project
1062 .update(cx, |project, cx| {
1063 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
1064 })
1065 .await
1066 .unwrap();
1067
1068 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
1069 project.update(cx, |project, cx| {
1070 let worktree = project.worktrees(cx).next().unwrap();
1071 assert_eq!(
1072 worktree
1073 .read(cx)
1074 .snapshot()
1075 .entries(true, 0)
1076 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
1077 .collect::<Vec<_>>(),
1078 &[
1079 (Path::new(""), false),
1080 (Path::new(".gitignore"), false),
1081 (Path::new("Cargo.lock"), false),
1082 (Path::new("src"), false),
1083 (Path::new("src/a.rs"), false),
1084 (Path::new("src/b.rs"), false),
1085 (Path::new("target"), true),
1086 ]
1087 );
1088 });
1089
1090 let prev_read_dir_count = fs.read_dir_call_count();
1091
1092 let fake_server = fake_servers.next().await.unwrap();
1093 let (server_id, server_name) = lsp_store.read_with(cx, |lsp_store, _| {
1094 let (id, status) = lsp_store.language_server_statuses().next().unwrap();
1095 (id, LanguageServerName::from(status.name.as_str()))
1096 });
1097
1098 // Simulate jumping to a definition in a dependency outside of the worktree.
1099 let _out_of_worktree_buffer = project
1100 .update(cx, |project, cx| {
1101 project.open_local_buffer_via_lsp(
1102 lsp::Url::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
1103 server_id,
1104 server_name.clone(),
1105 cx,
1106 )
1107 })
1108 .await
1109 .unwrap();
1110
1111 // Keep track of the FS events reported to the language server.
1112 let file_changes = Arc::new(Mutex::new(Vec::new()));
1113 fake_server
1114 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
1115 registrations: vec![lsp::Registration {
1116 id: Default::default(),
1117 method: "workspace/didChangeWatchedFiles".to_string(),
1118 register_options: serde_json::to_value(
1119 lsp::DidChangeWatchedFilesRegistrationOptions {
1120 watchers: vec![
1121 lsp::FileSystemWatcher {
1122 glob_pattern: lsp::GlobPattern::String(
1123 path!("/the-root/Cargo.toml").to_string(),
1124 ),
1125 kind: None,
1126 },
1127 lsp::FileSystemWatcher {
1128 glob_pattern: lsp::GlobPattern::String(
1129 path!("/the-root/src/*.{rs,c}").to_string(),
1130 ),
1131 kind: None,
1132 },
1133 lsp::FileSystemWatcher {
1134 glob_pattern: lsp::GlobPattern::String(
1135 path!("/the-root/target/y/**/*.rs").to_string(),
1136 ),
1137 kind: None,
1138 },
1139 lsp::FileSystemWatcher {
1140 glob_pattern: lsp::GlobPattern::String(
1141 path!("/the/stdlib/src/**/*.rs").to_string(),
1142 ),
1143 kind: None,
1144 },
1145 lsp::FileSystemWatcher {
1146 glob_pattern: lsp::GlobPattern::String(
1147 path!("**/Cargo.lock").to_string(),
1148 ),
1149 kind: None,
1150 },
1151 ],
1152 },
1153 )
1154 .ok(),
1155 }],
1156 })
1157 .await
1158 .into_response()
1159 .unwrap();
1160 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
1161 let file_changes = file_changes.clone();
1162 move |params, _| {
1163 let mut file_changes = file_changes.lock();
1164 file_changes.extend(params.changes);
1165 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
1166 }
1167 });
1168
1169 cx.executor().run_until_parked();
1170 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
1171 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 5);
1172
1173 let mut new_watched_paths = fs.watched_paths();
1174 new_watched_paths.retain(|path| !path.starts_with(config_dir()));
1175 assert_eq!(
1176 &new_watched_paths,
1177 &[
1178 Path::new(path!("/the-root")),
1179 Path::new(path!("/the-registry/dep1/src/dep1.rs")),
1180 Path::new(path!("/the/stdlib/src"))
1181 ]
1182 );
1183
1184 // Now the language server has asked us to watch an ignored directory path,
1185 // so we recursively load it.
1186 project.update(cx, |project, cx| {
1187 let worktree = project.visible_worktrees(cx).next().unwrap();
1188 assert_eq!(
1189 worktree
1190 .read(cx)
1191 .snapshot()
1192 .entries(true, 0)
1193 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
1194 .collect::<Vec<_>>(),
1195 &[
1196 (Path::new(""), false),
1197 (Path::new(".gitignore"), false),
1198 (Path::new("Cargo.lock"), false),
1199 (Path::new("src"), false),
1200 (Path::new("src/a.rs"), false),
1201 (Path::new("src/b.rs"), false),
1202 (Path::new("target"), true),
1203 (Path::new("target/x"), true),
1204 (Path::new("target/y"), true),
1205 (Path::new("target/y/out"), true),
1206 (Path::new("target/y/out/y.rs"), true),
1207 (Path::new("target/z"), true),
1208 ]
1209 );
1210 });
1211
1212 // Perform some file system mutations, two of which match the watched patterns,
1213 // and one of which does not.
1214 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
1215 .await
1216 .unwrap();
1217 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
1218 .await
1219 .unwrap();
1220 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
1221 .await
1222 .unwrap();
1223 fs.create_file(
1224 path!("/the-root/target/x/out/x2.rs").as_ref(),
1225 Default::default(),
1226 )
1227 .await
1228 .unwrap();
1229 fs.create_file(
1230 path!("/the-root/target/y/out/y2.rs").as_ref(),
1231 Default::default(),
1232 )
1233 .await
1234 .unwrap();
1235 fs.save(
1236 path!("/the-root/Cargo.lock").as_ref(),
1237 &"".into(),
1238 Default::default(),
1239 )
1240 .await
1241 .unwrap();
1242 fs.save(
1243 path!("/the-stdlib/LICENSE").as_ref(),
1244 &"".into(),
1245 Default::default(),
1246 )
1247 .await
1248 .unwrap();
1249 fs.save(
1250 path!("/the/stdlib/src/string.rs").as_ref(),
1251 &"".into(),
1252 Default::default(),
1253 )
1254 .await
1255 .unwrap();
1256
1257 // The language server receives events for the FS mutations that match its watch patterns.
1258 cx.executor().run_until_parked();
1259 assert_eq!(
1260 &*file_changes.lock(),
1261 &[
1262 lsp::FileEvent {
1263 uri: lsp::Url::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
1264 typ: lsp::FileChangeType::CHANGED,
1265 },
1266 lsp::FileEvent {
1267 uri: lsp::Url::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
1268 typ: lsp::FileChangeType::DELETED,
1269 },
1270 lsp::FileEvent {
1271 uri: lsp::Url::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
1272 typ: lsp::FileChangeType::CREATED,
1273 },
1274 lsp::FileEvent {
1275 uri: lsp::Url::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
1276 typ: lsp::FileChangeType::CREATED,
1277 },
1278 lsp::FileEvent {
1279 uri: lsp::Url::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
1280 typ: lsp::FileChangeType::CHANGED,
1281 },
1282 ]
1283 );
1284}
1285
1286#[gpui::test]
1287async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
1288 init_test(cx);
1289
1290 let fs = FakeFs::new(cx.executor());
1291 fs.insert_tree(
1292 path!("/dir"),
1293 json!({
1294 "a.rs": "let a = 1;",
1295 "b.rs": "let b = 2;"
1296 }),
1297 )
1298 .await;
1299
1300 let project = Project::test(
1301 fs,
1302 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
1303 cx,
1304 )
1305 .await;
1306 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1307
1308 let buffer_a = project
1309 .update(cx, |project, cx| {
1310 project.open_local_buffer(path!("/dir/a.rs"), cx)
1311 })
1312 .await
1313 .unwrap();
1314 let buffer_b = project
1315 .update(cx, |project, cx| {
1316 project.open_local_buffer(path!("/dir/b.rs"), cx)
1317 })
1318 .await
1319 .unwrap();
1320
1321 lsp_store.update(cx, |lsp_store, cx| {
1322 lsp_store
1323 .update_diagnostics(
1324 LanguageServerId(0),
1325 lsp::PublishDiagnosticsParams {
1326 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1327 version: None,
1328 diagnostics: vec![lsp::Diagnostic {
1329 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1330 severity: Some(lsp::DiagnosticSeverity::ERROR),
1331 message: "error 1".to_string(),
1332 ..Default::default()
1333 }],
1334 },
1335 DiagnosticSourceKind::Pushed,
1336 &[],
1337 cx,
1338 )
1339 .unwrap();
1340 lsp_store
1341 .update_diagnostics(
1342 LanguageServerId(0),
1343 lsp::PublishDiagnosticsParams {
1344 uri: Url::from_file_path(path!("/dir/b.rs")).unwrap(),
1345 version: None,
1346 diagnostics: vec![lsp::Diagnostic {
1347 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1348 severity: Some(DiagnosticSeverity::WARNING),
1349 message: "error 2".to_string(),
1350 ..Default::default()
1351 }],
1352 },
1353 DiagnosticSourceKind::Pushed,
1354 &[],
1355 cx,
1356 )
1357 .unwrap();
1358 });
1359
1360 buffer_a.update(cx, |buffer, _| {
1361 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1362 assert_eq!(
1363 chunks
1364 .iter()
1365 .map(|(s, d)| (s.as_str(), *d))
1366 .collect::<Vec<_>>(),
1367 &[
1368 ("let ", None),
1369 ("a", Some(DiagnosticSeverity::ERROR)),
1370 (" = 1;", None),
1371 ]
1372 );
1373 });
1374 buffer_b.update(cx, |buffer, _| {
1375 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1376 assert_eq!(
1377 chunks
1378 .iter()
1379 .map(|(s, d)| (s.as_str(), *d))
1380 .collect::<Vec<_>>(),
1381 &[
1382 ("let ", None),
1383 ("b", Some(DiagnosticSeverity::WARNING)),
1384 (" = 2;", None),
1385 ]
1386 );
1387 });
1388}
1389
1390#[gpui::test]
1391async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1392 init_test(cx);
1393
1394 let fs = FakeFs::new(cx.executor());
1395 fs.insert_tree(
1396 path!("/root"),
1397 json!({
1398 "dir": {
1399 ".git": {
1400 "HEAD": "ref: refs/heads/main",
1401 },
1402 ".gitignore": "b.rs",
1403 "a.rs": "let a = 1;",
1404 "b.rs": "let b = 2;",
1405 },
1406 "other.rs": "let b = c;"
1407 }),
1408 )
1409 .await;
1410
1411 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
1412 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1413 let (worktree, _) = project
1414 .update(cx, |project, cx| {
1415 project.find_or_create_worktree(path!("/root/dir"), true, cx)
1416 })
1417 .await
1418 .unwrap();
1419 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1420
1421 let (worktree, _) = project
1422 .update(cx, |project, cx| {
1423 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
1424 })
1425 .await
1426 .unwrap();
1427 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1428
1429 let server_id = LanguageServerId(0);
1430 lsp_store.update(cx, |lsp_store, cx| {
1431 lsp_store
1432 .update_diagnostics(
1433 server_id,
1434 lsp::PublishDiagnosticsParams {
1435 uri: Url::from_file_path(path!("/root/dir/b.rs")).unwrap(),
1436 version: None,
1437 diagnostics: vec![lsp::Diagnostic {
1438 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1439 severity: Some(lsp::DiagnosticSeverity::ERROR),
1440 message: "unused variable 'b'".to_string(),
1441 ..Default::default()
1442 }],
1443 },
1444 DiagnosticSourceKind::Pushed,
1445 &[],
1446 cx,
1447 )
1448 .unwrap();
1449 lsp_store
1450 .update_diagnostics(
1451 server_id,
1452 lsp::PublishDiagnosticsParams {
1453 uri: Url::from_file_path(path!("/root/other.rs")).unwrap(),
1454 version: None,
1455 diagnostics: vec![lsp::Diagnostic {
1456 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1457 severity: Some(lsp::DiagnosticSeverity::ERROR),
1458 message: "unknown variable 'c'".to_string(),
1459 ..Default::default()
1460 }],
1461 },
1462 DiagnosticSourceKind::Pushed,
1463 &[],
1464 cx,
1465 )
1466 .unwrap();
1467 });
1468
1469 let main_ignored_buffer = project
1470 .update(cx, |project, cx| {
1471 project.open_buffer((main_worktree_id, "b.rs"), cx)
1472 })
1473 .await
1474 .unwrap();
1475 main_ignored_buffer.update(cx, |buffer, _| {
1476 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1477 assert_eq!(
1478 chunks
1479 .iter()
1480 .map(|(s, d)| (s.as_str(), *d))
1481 .collect::<Vec<_>>(),
1482 &[
1483 ("let ", None),
1484 ("b", Some(DiagnosticSeverity::ERROR)),
1485 (" = 2;", None),
1486 ],
1487 "Gigitnored buffers should still get in-buffer diagnostics",
1488 );
1489 });
1490 let other_buffer = project
1491 .update(cx, |project, cx| {
1492 project.open_buffer((other_worktree_id, ""), cx)
1493 })
1494 .await
1495 .unwrap();
1496 other_buffer.update(cx, |buffer, _| {
1497 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1498 assert_eq!(
1499 chunks
1500 .iter()
1501 .map(|(s, d)| (s.as_str(), *d))
1502 .collect::<Vec<_>>(),
1503 &[
1504 ("let b = ", None),
1505 ("c", Some(DiagnosticSeverity::ERROR)),
1506 (";", None),
1507 ],
1508 "Buffers from hidden projects should still get in-buffer diagnostics"
1509 );
1510 });
1511
1512 project.update(cx, |project, cx| {
1513 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1514 assert_eq!(
1515 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1516 vec![(
1517 ProjectPath {
1518 worktree_id: main_worktree_id,
1519 path: Arc::from(Path::new("b.rs")),
1520 },
1521 server_id,
1522 DiagnosticSummary {
1523 error_count: 1,
1524 warning_count: 0,
1525 }
1526 )]
1527 );
1528 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1529 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1530 });
1531}
1532
1533#[gpui::test]
1534async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1535 init_test(cx);
1536
1537 let progress_token = "the-progress-token";
1538
1539 let fs = FakeFs::new(cx.executor());
1540 fs.insert_tree(
1541 path!("/dir"),
1542 json!({
1543 "a.rs": "fn a() { A }",
1544 "b.rs": "const y: i32 = 1",
1545 }),
1546 )
1547 .await;
1548
1549 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1550 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1551
1552 language_registry.add(rust_lang());
1553 let mut fake_servers = language_registry.register_fake_lsp(
1554 "Rust",
1555 FakeLspAdapter {
1556 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1557 disk_based_diagnostics_sources: vec!["disk".into()],
1558 ..Default::default()
1559 },
1560 );
1561
1562 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1563
1564 // Cause worktree to start the fake language server
1565 let _ = project
1566 .update(cx, |project, cx| {
1567 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
1568 })
1569 .await
1570 .unwrap();
1571
1572 let mut events = cx.events(&project);
1573
1574 let fake_server = fake_servers.next().await.unwrap();
1575 assert_eq!(
1576 events.next().await.unwrap(),
1577 Event::LanguageServerAdded(
1578 LanguageServerId(0),
1579 fake_server.server.name(),
1580 Some(worktree_id)
1581 ),
1582 );
1583
1584 fake_server
1585 .start_progress(format!("{}/0", progress_token))
1586 .await;
1587 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1588 assert_eq!(
1589 events.next().await.unwrap(),
1590 Event::DiskBasedDiagnosticsStarted {
1591 language_server_id: LanguageServerId(0),
1592 }
1593 );
1594
1595 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1596 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1597 version: None,
1598 diagnostics: vec![lsp::Diagnostic {
1599 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1600 severity: Some(lsp::DiagnosticSeverity::ERROR),
1601 message: "undefined variable 'A'".to_string(),
1602 ..Default::default()
1603 }],
1604 });
1605 assert_eq!(
1606 events.next().await.unwrap(),
1607 Event::DiagnosticsUpdated {
1608 language_server_id: LanguageServerId(0),
1609 path: (worktree_id, Path::new("a.rs")).into()
1610 }
1611 );
1612
1613 fake_server.end_progress(format!("{}/0", progress_token));
1614 assert_eq!(
1615 events.next().await.unwrap(),
1616 Event::DiskBasedDiagnosticsFinished {
1617 language_server_id: LanguageServerId(0)
1618 }
1619 );
1620
1621 let buffer = project
1622 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
1623 .await
1624 .unwrap();
1625
1626 buffer.update(cx, |buffer, _| {
1627 let snapshot = buffer.snapshot();
1628 let diagnostics = snapshot
1629 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1630 .collect::<Vec<_>>();
1631 assert_eq!(
1632 diagnostics,
1633 &[DiagnosticEntry {
1634 range: Point::new(0, 9)..Point::new(0, 10),
1635 diagnostic: Diagnostic {
1636 severity: lsp::DiagnosticSeverity::ERROR,
1637 message: "undefined variable 'A'".to_string(),
1638 group_id: 0,
1639 is_primary: true,
1640 source_kind: DiagnosticSourceKind::Pushed,
1641 ..Diagnostic::default()
1642 }
1643 }]
1644 )
1645 });
1646
1647 // Ensure publishing empty diagnostics twice only results in one update event.
1648 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1649 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1650 version: None,
1651 diagnostics: Default::default(),
1652 });
1653 assert_eq!(
1654 events.next().await.unwrap(),
1655 Event::DiagnosticsUpdated {
1656 language_server_id: LanguageServerId(0),
1657 path: (worktree_id, Path::new("a.rs")).into()
1658 }
1659 );
1660
1661 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1662 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1663 version: None,
1664 diagnostics: Default::default(),
1665 });
1666 cx.executor().run_until_parked();
1667 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1668}
1669
1670#[gpui::test]
1671async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1672 init_test(cx);
1673
1674 let progress_token = "the-progress-token";
1675
1676 let fs = FakeFs::new(cx.executor());
1677 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1678
1679 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1680
1681 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1682 language_registry.add(rust_lang());
1683 let mut fake_servers = language_registry.register_fake_lsp(
1684 "Rust",
1685 FakeLspAdapter {
1686 name: "the-language-server",
1687 disk_based_diagnostics_sources: vec!["disk".into()],
1688 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1689 ..Default::default()
1690 },
1691 );
1692
1693 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1694
1695 let (buffer, _handle) = project
1696 .update(cx, |project, cx| {
1697 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1698 })
1699 .await
1700 .unwrap();
1701 // Simulate diagnostics starting to update.
1702 let fake_server = fake_servers.next().await.unwrap();
1703 fake_server.start_progress(progress_token).await;
1704
1705 // Restart the server before the diagnostics finish updating.
1706 project.update(cx, |project, cx| {
1707 project.restart_language_servers_for_buffers(vec![buffer], cx);
1708 });
1709 let mut events = cx.events(&project);
1710
1711 // Simulate the newly started server sending more diagnostics.
1712 let fake_server = fake_servers.next().await.unwrap();
1713 assert_eq!(
1714 events.next().await.unwrap(),
1715 Event::LanguageServerAdded(
1716 LanguageServerId(1),
1717 fake_server.server.name(),
1718 Some(worktree_id)
1719 )
1720 );
1721 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1722 fake_server.start_progress(progress_token).await;
1723 assert_eq!(
1724 events.next().await.unwrap(),
1725 Event::DiskBasedDiagnosticsStarted {
1726 language_server_id: LanguageServerId(1)
1727 }
1728 );
1729 project.update(cx, |project, cx| {
1730 assert_eq!(
1731 project
1732 .language_servers_running_disk_based_diagnostics(cx)
1733 .collect::<Vec<_>>(),
1734 [LanguageServerId(1)]
1735 );
1736 });
1737
1738 // All diagnostics are considered done, despite the old server's diagnostic
1739 // task never completing.
1740 fake_server.end_progress(progress_token);
1741 assert_eq!(
1742 events.next().await.unwrap(),
1743 Event::DiskBasedDiagnosticsFinished {
1744 language_server_id: LanguageServerId(1)
1745 }
1746 );
1747 project.update(cx, |project, cx| {
1748 assert_eq!(
1749 project
1750 .language_servers_running_disk_based_diagnostics(cx)
1751 .collect::<Vec<_>>(),
1752 [] as [language::LanguageServerId; 0]
1753 );
1754 });
1755}
1756
1757#[gpui::test]
1758async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1759 init_test(cx);
1760
1761 let fs = FakeFs::new(cx.executor());
1762 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
1763
1764 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1765
1766 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1767 language_registry.add(rust_lang());
1768 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1769
1770 let (buffer, _) = project
1771 .update(cx, |project, cx| {
1772 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1773 })
1774 .await
1775 .unwrap();
1776
1777 // Publish diagnostics
1778 let fake_server = fake_servers.next().await.unwrap();
1779 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1780 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1781 version: None,
1782 diagnostics: vec![lsp::Diagnostic {
1783 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1784 severity: Some(lsp::DiagnosticSeverity::ERROR),
1785 message: "the message".to_string(),
1786 ..Default::default()
1787 }],
1788 });
1789
1790 cx.executor().run_until_parked();
1791 buffer.update(cx, |buffer, _| {
1792 assert_eq!(
1793 buffer
1794 .snapshot()
1795 .diagnostics_in_range::<_, usize>(0..1, false)
1796 .map(|entry| entry.diagnostic.message.clone())
1797 .collect::<Vec<_>>(),
1798 ["the message".to_string()]
1799 );
1800 });
1801 project.update(cx, |project, cx| {
1802 assert_eq!(
1803 project.diagnostic_summary(false, cx),
1804 DiagnosticSummary {
1805 error_count: 1,
1806 warning_count: 0,
1807 }
1808 );
1809 });
1810
1811 project.update(cx, |project, cx| {
1812 project.restart_language_servers_for_buffers(vec![buffer.clone()], cx);
1813 });
1814
1815 // The diagnostics are cleared.
1816 cx.executor().run_until_parked();
1817 buffer.update(cx, |buffer, _| {
1818 assert_eq!(
1819 buffer
1820 .snapshot()
1821 .diagnostics_in_range::<_, usize>(0..1, false)
1822 .map(|entry| entry.diagnostic.message.clone())
1823 .collect::<Vec<_>>(),
1824 Vec::<String>::new(),
1825 );
1826 });
1827 project.update(cx, |project, cx| {
1828 assert_eq!(
1829 project.diagnostic_summary(false, cx),
1830 DiagnosticSummary {
1831 error_count: 0,
1832 warning_count: 0,
1833 }
1834 );
1835 });
1836}
1837
1838#[gpui::test]
1839async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1840 init_test(cx);
1841
1842 let fs = FakeFs::new(cx.executor());
1843 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1844
1845 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1846 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1847
1848 language_registry.add(rust_lang());
1849 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1850
1851 let (buffer, _handle) = project
1852 .update(cx, |project, cx| {
1853 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1854 })
1855 .await
1856 .unwrap();
1857
1858 // Before restarting the server, report diagnostics with an unknown buffer version.
1859 let fake_server = fake_servers.next().await.unwrap();
1860 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1861 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1862 version: Some(10000),
1863 diagnostics: Vec::new(),
1864 });
1865 cx.executor().run_until_parked();
1866 project.update(cx, |project, cx| {
1867 project.restart_language_servers_for_buffers(vec![buffer.clone()], cx);
1868 });
1869
1870 let mut fake_server = fake_servers.next().await.unwrap();
1871 let notification = fake_server
1872 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1873 .await
1874 .text_document;
1875 assert_eq!(notification.version, 0);
1876}
1877
1878#[gpui::test]
1879async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
1880 init_test(cx);
1881
1882 let progress_token = "the-progress-token";
1883
1884 let fs = FakeFs::new(cx.executor());
1885 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1886
1887 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1888
1889 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1890 language_registry.add(rust_lang());
1891 let mut fake_servers = language_registry.register_fake_lsp(
1892 "Rust",
1893 FakeLspAdapter {
1894 name: "the-language-server",
1895 disk_based_diagnostics_sources: vec!["disk".into()],
1896 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1897 ..Default::default()
1898 },
1899 );
1900
1901 let (buffer, _handle) = project
1902 .update(cx, |project, cx| {
1903 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1904 })
1905 .await
1906 .unwrap();
1907
1908 // Simulate diagnostics starting to update.
1909 let mut fake_server = fake_servers.next().await.unwrap();
1910 fake_server
1911 .start_progress_with(
1912 "another-token",
1913 lsp::WorkDoneProgressBegin {
1914 cancellable: Some(false),
1915 ..Default::default()
1916 },
1917 )
1918 .await;
1919 fake_server
1920 .start_progress_with(
1921 progress_token,
1922 lsp::WorkDoneProgressBegin {
1923 cancellable: Some(true),
1924 ..Default::default()
1925 },
1926 )
1927 .await;
1928 cx.executor().run_until_parked();
1929
1930 project.update(cx, |project, cx| {
1931 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
1932 });
1933
1934 let cancel_notification = fake_server
1935 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
1936 .await;
1937 assert_eq!(
1938 cancel_notification.token,
1939 NumberOrString::String(progress_token.into())
1940 );
1941}
1942
1943#[gpui::test]
1944async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1945 init_test(cx);
1946
1947 let fs = FakeFs::new(cx.executor());
1948 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
1949 .await;
1950
1951 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1952 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1953
1954 let mut fake_rust_servers = language_registry.register_fake_lsp(
1955 "Rust",
1956 FakeLspAdapter {
1957 name: "rust-lsp",
1958 ..Default::default()
1959 },
1960 );
1961 let mut fake_js_servers = language_registry.register_fake_lsp(
1962 "JavaScript",
1963 FakeLspAdapter {
1964 name: "js-lsp",
1965 ..Default::default()
1966 },
1967 );
1968 language_registry.add(rust_lang());
1969 language_registry.add(js_lang());
1970
1971 let _rs_buffer = project
1972 .update(cx, |project, cx| {
1973 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1974 })
1975 .await
1976 .unwrap();
1977 let _js_buffer = project
1978 .update(cx, |project, cx| {
1979 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
1980 })
1981 .await
1982 .unwrap();
1983
1984 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1985 assert_eq!(
1986 fake_rust_server_1
1987 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1988 .await
1989 .text_document
1990 .uri
1991 .as_str(),
1992 uri!("file:///dir/a.rs")
1993 );
1994
1995 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1996 assert_eq!(
1997 fake_js_server
1998 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1999 .await
2000 .text_document
2001 .uri
2002 .as_str(),
2003 uri!("file:///dir/b.js")
2004 );
2005
2006 // Disable Rust language server, ensuring only that server gets stopped.
2007 cx.update(|cx| {
2008 SettingsStore::update_global(cx, |settings, cx| {
2009 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
2010 settings.languages.insert(
2011 "Rust".into(),
2012 LanguageSettingsContent {
2013 enable_language_server: Some(false),
2014 ..Default::default()
2015 },
2016 );
2017 });
2018 })
2019 });
2020 fake_rust_server_1
2021 .receive_notification::<lsp::notification::Exit>()
2022 .await;
2023
2024 // Enable Rust and disable JavaScript language servers, ensuring that the
2025 // former gets started again and that the latter stops.
2026 cx.update(|cx| {
2027 SettingsStore::update_global(cx, |settings, cx| {
2028 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
2029 settings.languages.insert(
2030 LanguageName::new("Rust"),
2031 LanguageSettingsContent {
2032 enable_language_server: Some(true),
2033 ..Default::default()
2034 },
2035 );
2036 settings.languages.insert(
2037 LanguageName::new("JavaScript"),
2038 LanguageSettingsContent {
2039 enable_language_server: Some(false),
2040 ..Default::default()
2041 },
2042 );
2043 });
2044 })
2045 });
2046 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
2047 assert_eq!(
2048 fake_rust_server_2
2049 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2050 .await
2051 .text_document
2052 .uri
2053 .as_str(),
2054 uri!("file:///dir/a.rs")
2055 );
2056 fake_js_server
2057 .receive_notification::<lsp::notification::Exit>()
2058 .await;
2059}
2060
2061#[gpui::test(iterations = 3)]
2062async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
2063 init_test(cx);
2064
2065 let text = "
2066 fn a() { A }
2067 fn b() { BB }
2068 fn c() { CCC }
2069 "
2070 .unindent();
2071
2072 let fs = FakeFs::new(cx.executor());
2073 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
2074
2075 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2076 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2077
2078 language_registry.add(rust_lang());
2079 let mut fake_servers = language_registry.register_fake_lsp(
2080 "Rust",
2081 FakeLspAdapter {
2082 disk_based_diagnostics_sources: vec!["disk".into()],
2083 ..Default::default()
2084 },
2085 );
2086
2087 let buffer = project
2088 .update(cx, |project, cx| {
2089 project.open_local_buffer(path!("/dir/a.rs"), cx)
2090 })
2091 .await
2092 .unwrap();
2093
2094 let _handle = project.update(cx, |project, cx| {
2095 project.register_buffer_with_language_servers(&buffer, cx)
2096 });
2097
2098 let mut fake_server = fake_servers.next().await.unwrap();
2099 let open_notification = fake_server
2100 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2101 .await;
2102
2103 // Edit the buffer, moving the content down
2104 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
2105 let change_notification_1 = fake_server
2106 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2107 .await;
2108 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
2109
2110 // Report some diagnostics for the initial version of the buffer
2111 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2112 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2113 version: Some(open_notification.text_document.version),
2114 diagnostics: vec![
2115 lsp::Diagnostic {
2116 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2117 severity: Some(DiagnosticSeverity::ERROR),
2118 message: "undefined variable 'A'".to_string(),
2119 source: Some("disk".to_string()),
2120 ..Default::default()
2121 },
2122 lsp::Diagnostic {
2123 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2124 severity: Some(DiagnosticSeverity::ERROR),
2125 message: "undefined variable 'BB'".to_string(),
2126 source: Some("disk".to_string()),
2127 ..Default::default()
2128 },
2129 lsp::Diagnostic {
2130 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
2131 severity: Some(DiagnosticSeverity::ERROR),
2132 source: Some("disk".to_string()),
2133 message: "undefined variable 'CCC'".to_string(),
2134 ..Default::default()
2135 },
2136 ],
2137 });
2138
2139 // The diagnostics have moved down since they were created.
2140 cx.executor().run_until_parked();
2141 buffer.update(cx, |buffer, _| {
2142 assert_eq!(
2143 buffer
2144 .snapshot()
2145 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
2146 .collect::<Vec<_>>(),
2147 &[
2148 DiagnosticEntry {
2149 range: Point::new(3, 9)..Point::new(3, 11),
2150 diagnostic: Diagnostic {
2151 source: Some("disk".into()),
2152 severity: DiagnosticSeverity::ERROR,
2153 message: "undefined variable 'BB'".to_string(),
2154 is_disk_based: true,
2155 group_id: 1,
2156 is_primary: true,
2157 source_kind: DiagnosticSourceKind::Pushed,
2158 ..Diagnostic::default()
2159 },
2160 },
2161 DiagnosticEntry {
2162 range: Point::new(4, 9)..Point::new(4, 12),
2163 diagnostic: Diagnostic {
2164 source: Some("disk".into()),
2165 severity: DiagnosticSeverity::ERROR,
2166 message: "undefined variable 'CCC'".to_string(),
2167 is_disk_based: true,
2168 group_id: 2,
2169 is_primary: true,
2170 source_kind: DiagnosticSourceKind::Pushed,
2171 ..Diagnostic::default()
2172 }
2173 }
2174 ]
2175 );
2176 assert_eq!(
2177 chunks_with_diagnostics(buffer, 0..buffer.len()),
2178 [
2179 ("\n\nfn a() { ".to_string(), None),
2180 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2181 (" }\nfn b() { ".to_string(), None),
2182 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
2183 (" }\nfn c() { ".to_string(), None),
2184 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
2185 (" }\n".to_string(), None),
2186 ]
2187 );
2188 assert_eq!(
2189 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
2190 [
2191 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
2192 (" }\nfn c() { ".to_string(), None),
2193 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
2194 ]
2195 );
2196 });
2197
2198 // Ensure overlapping diagnostics are highlighted correctly.
2199 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2200 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2201 version: Some(open_notification.text_document.version),
2202 diagnostics: vec![
2203 lsp::Diagnostic {
2204 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2205 severity: Some(DiagnosticSeverity::ERROR),
2206 message: "undefined variable 'A'".to_string(),
2207 source: Some("disk".to_string()),
2208 ..Default::default()
2209 },
2210 lsp::Diagnostic {
2211 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
2212 severity: Some(DiagnosticSeverity::WARNING),
2213 message: "unreachable statement".to_string(),
2214 source: Some("disk".to_string()),
2215 ..Default::default()
2216 },
2217 ],
2218 });
2219
2220 cx.executor().run_until_parked();
2221 buffer.update(cx, |buffer, _| {
2222 assert_eq!(
2223 buffer
2224 .snapshot()
2225 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
2226 .collect::<Vec<_>>(),
2227 &[
2228 DiagnosticEntry {
2229 range: Point::new(2, 9)..Point::new(2, 12),
2230 diagnostic: Diagnostic {
2231 source: Some("disk".into()),
2232 severity: DiagnosticSeverity::WARNING,
2233 message: "unreachable statement".to_string(),
2234 is_disk_based: true,
2235 group_id: 4,
2236 is_primary: true,
2237 source_kind: DiagnosticSourceKind::Pushed,
2238 ..Diagnostic::default()
2239 }
2240 },
2241 DiagnosticEntry {
2242 range: Point::new(2, 9)..Point::new(2, 10),
2243 diagnostic: Diagnostic {
2244 source: Some("disk".into()),
2245 severity: DiagnosticSeverity::ERROR,
2246 message: "undefined variable 'A'".to_string(),
2247 is_disk_based: true,
2248 group_id: 3,
2249 is_primary: true,
2250 source_kind: DiagnosticSourceKind::Pushed,
2251 ..Diagnostic::default()
2252 },
2253 }
2254 ]
2255 );
2256 assert_eq!(
2257 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
2258 [
2259 ("fn a() { ".to_string(), None),
2260 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2261 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2262 ("\n".to_string(), None),
2263 ]
2264 );
2265 assert_eq!(
2266 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
2267 [
2268 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2269 ("\n".to_string(), None),
2270 ]
2271 );
2272 });
2273
2274 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
2275 // changes since the last save.
2276 buffer.update(cx, |buffer, cx| {
2277 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
2278 buffer.edit(
2279 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
2280 None,
2281 cx,
2282 );
2283 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
2284 });
2285 let change_notification_2 = fake_server
2286 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2287 .await;
2288 assert!(
2289 change_notification_2.text_document.version > change_notification_1.text_document.version
2290 );
2291
2292 // Handle out-of-order diagnostics
2293 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2294 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2295 version: Some(change_notification_2.text_document.version),
2296 diagnostics: vec![
2297 lsp::Diagnostic {
2298 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2299 severity: Some(DiagnosticSeverity::ERROR),
2300 message: "undefined variable 'BB'".to_string(),
2301 source: Some("disk".to_string()),
2302 ..Default::default()
2303 },
2304 lsp::Diagnostic {
2305 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2306 severity: Some(DiagnosticSeverity::WARNING),
2307 message: "undefined variable 'A'".to_string(),
2308 source: Some("disk".to_string()),
2309 ..Default::default()
2310 },
2311 ],
2312 });
2313
2314 cx.executor().run_until_parked();
2315 buffer.update(cx, |buffer, _| {
2316 assert_eq!(
2317 buffer
2318 .snapshot()
2319 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2320 .collect::<Vec<_>>(),
2321 &[
2322 DiagnosticEntry {
2323 range: Point::new(2, 21)..Point::new(2, 22),
2324 diagnostic: Diagnostic {
2325 source: Some("disk".into()),
2326 severity: DiagnosticSeverity::WARNING,
2327 message: "undefined variable 'A'".to_string(),
2328 is_disk_based: true,
2329 group_id: 6,
2330 is_primary: true,
2331 source_kind: DiagnosticSourceKind::Pushed,
2332 ..Diagnostic::default()
2333 }
2334 },
2335 DiagnosticEntry {
2336 range: Point::new(3, 9)..Point::new(3, 14),
2337 diagnostic: Diagnostic {
2338 source: Some("disk".into()),
2339 severity: DiagnosticSeverity::ERROR,
2340 message: "undefined variable 'BB'".to_string(),
2341 is_disk_based: true,
2342 group_id: 5,
2343 is_primary: true,
2344 source_kind: DiagnosticSourceKind::Pushed,
2345 ..Diagnostic::default()
2346 },
2347 }
2348 ]
2349 );
2350 });
2351}
2352
2353#[gpui::test]
2354async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
2355 init_test(cx);
2356
2357 let text = concat!(
2358 "let one = ;\n", //
2359 "let two = \n",
2360 "let three = 3;\n",
2361 );
2362
2363 let fs = FakeFs::new(cx.executor());
2364 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
2365
2366 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2367 let buffer = project
2368 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2369 .await
2370 .unwrap();
2371
2372 project.update(cx, |project, cx| {
2373 project.lsp_store.update(cx, |lsp_store, cx| {
2374 lsp_store
2375 .update_diagnostic_entries(
2376 LanguageServerId(0),
2377 PathBuf::from("/dir/a.rs"),
2378 None,
2379 vec![
2380 DiagnosticEntry {
2381 range: Unclipped(PointUtf16::new(0, 10))
2382 ..Unclipped(PointUtf16::new(0, 10)),
2383 diagnostic: Diagnostic {
2384 severity: DiagnosticSeverity::ERROR,
2385 message: "syntax error 1".to_string(),
2386 source_kind: DiagnosticSourceKind::Pushed,
2387 ..Diagnostic::default()
2388 },
2389 },
2390 DiagnosticEntry {
2391 range: Unclipped(PointUtf16::new(1, 10))
2392 ..Unclipped(PointUtf16::new(1, 10)),
2393 diagnostic: Diagnostic {
2394 severity: DiagnosticSeverity::ERROR,
2395 message: "syntax error 2".to_string(),
2396 source_kind: DiagnosticSourceKind::Pushed,
2397 ..Diagnostic::default()
2398 },
2399 },
2400 ],
2401 cx,
2402 )
2403 .unwrap();
2404 })
2405 });
2406
2407 // An empty range is extended forward to include the following character.
2408 // At the end of a line, an empty range is extended backward to include
2409 // the preceding character.
2410 buffer.update(cx, |buffer, _| {
2411 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2412 assert_eq!(
2413 chunks
2414 .iter()
2415 .map(|(s, d)| (s.as_str(), *d))
2416 .collect::<Vec<_>>(),
2417 &[
2418 ("let one = ", None),
2419 (";", Some(DiagnosticSeverity::ERROR)),
2420 ("\nlet two =", None),
2421 (" ", Some(DiagnosticSeverity::ERROR)),
2422 ("\nlet three = 3;\n", None)
2423 ]
2424 );
2425 });
2426}
2427
2428#[gpui::test]
2429async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2430 init_test(cx);
2431
2432 let fs = FakeFs::new(cx.executor());
2433 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2434 .await;
2435
2436 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2437 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
2438
2439 lsp_store.update(cx, |lsp_store, cx| {
2440 lsp_store
2441 .update_diagnostic_entries(
2442 LanguageServerId(0),
2443 Path::new("/dir/a.rs").to_owned(),
2444 None,
2445 vec![DiagnosticEntry {
2446 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2447 diagnostic: Diagnostic {
2448 severity: DiagnosticSeverity::ERROR,
2449 is_primary: true,
2450 message: "syntax error a1".to_string(),
2451 source_kind: DiagnosticSourceKind::Pushed,
2452 ..Diagnostic::default()
2453 },
2454 }],
2455 cx,
2456 )
2457 .unwrap();
2458 lsp_store
2459 .update_diagnostic_entries(
2460 LanguageServerId(1),
2461 Path::new("/dir/a.rs").to_owned(),
2462 None,
2463 vec![DiagnosticEntry {
2464 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2465 diagnostic: Diagnostic {
2466 severity: DiagnosticSeverity::ERROR,
2467 is_primary: true,
2468 message: "syntax error b1".to_string(),
2469 source_kind: DiagnosticSourceKind::Pushed,
2470 ..Diagnostic::default()
2471 },
2472 }],
2473 cx,
2474 )
2475 .unwrap();
2476
2477 assert_eq!(
2478 lsp_store.diagnostic_summary(false, cx),
2479 DiagnosticSummary {
2480 error_count: 2,
2481 warning_count: 0,
2482 }
2483 );
2484 });
2485}
2486
2487#[gpui::test]
2488async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2489 init_test(cx);
2490
2491 let text = "
2492 fn a() {
2493 f1();
2494 }
2495 fn b() {
2496 f2();
2497 }
2498 fn c() {
2499 f3();
2500 }
2501 "
2502 .unindent();
2503
2504 let fs = FakeFs::new(cx.executor());
2505 fs.insert_tree(
2506 path!("/dir"),
2507 json!({
2508 "a.rs": text.clone(),
2509 }),
2510 )
2511 .await;
2512
2513 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2514 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2515
2516 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2517 language_registry.add(rust_lang());
2518 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2519
2520 let (buffer, _handle) = project
2521 .update(cx, |project, cx| {
2522 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2523 })
2524 .await
2525 .unwrap();
2526
2527 let mut fake_server = fake_servers.next().await.unwrap();
2528 let lsp_document_version = fake_server
2529 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2530 .await
2531 .text_document
2532 .version;
2533
2534 // Simulate editing the buffer after the language server computes some edits.
2535 buffer.update(cx, |buffer, cx| {
2536 buffer.edit(
2537 [(
2538 Point::new(0, 0)..Point::new(0, 0),
2539 "// above first function\n",
2540 )],
2541 None,
2542 cx,
2543 );
2544 buffer.edit(
2545 [(
2546 Point::new(2, 0)..Point::new(2, 0),
2547 " // inside first function\n",
2548 )],
2549 None,
2550 cx,
2551 );
2552 buffer.edit(
2553 [(
2554 Point::new(6, 4)..Point::new(6, 4),
2555 "// inside second function ",
2556 )],
2557 None,
2558 cx,
2559 );
2560
2561 assert_eq!(
2562 buffer.text(),
2563 "
2564 // above first function
2565 fn a() {
2566 // inside first function
2567 f1();
2568 }
2569 fn b() {
2570 // inside second function f2();
2571 }
2572 fn c() {
2573 f3();
2574 }
2575 "
2576 .unindent()
2577 );
2578 });
2579
2580 let edits = lsp_store
2581 .update(cx, |lsp_store, cx| {
2582 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2583 &buffer,
2584 vec![
2585 // replace body of first function
2586 lsp::TextEdit {
2587 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2588 new_text: "
2589 fn a() {
2590 f10();
2591 }
2592 "
2593 .unindent(),
2594 },
2595 // edit inside second function
2596 lsp::TextEdit {
2597 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2598 new_text: "00".into(),
2599 },
2600 // edit inside third function via two distinct edits
2601 lsp::TextEdit {
2602 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2603 new_text: "4000".into(),
2604 },
2605 lsp::TextEdit {
2606 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2607 new_text: "".into(),
2608 },
2609 ],
2610 LanguageServerId(0),
2611 Some(lsp_document_version),
2612 cx,
2613 )
2614 })
2615 .await
2616 .unwrap();
2617
2618 buffer.update(cx, |buffer, cx| {
2619 for (range, new_text) in edits {
2620 buffer.edit([(range, new_text)], None, cx);
2621 }
2622 assert_eq!(
2623 buffer.text(),
2624 "
2625 // above first function
2626 fn a() {
2627 // inside first function
2628 f10();
2629 }
2630 fn b() {
2631 // inside second function f200();
2632 }
2633 fn c() {
2634 f4000();
2635 }
2636 "
2637 .unindent()
2638 );
2639 });
2640}
2641
2642#[gpui::test]
2643async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2644 init_test(cx);
2645
2646 let text = "
2647 use a::b;
2648 use a::c;
2649
2650 fn f() {
2651 b();
2652 c();
2653 }
2654 "
2655 .unindent();
2656
2657 let fs = FakeFs::new(cx.executor());
2658 fs.insert_tree(
2659 path!("/dir"),
2660 json!({
2661 "a.rs": text.clone(),
2662 }),
2663 )
2664 .await;
2665
2666 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2667 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2668 let buffer = project
2669 .update(cx, |project, cx| {
2670 project.open_local_buffer(path!("/dir/a.rs"), cx)
2671 })
2672 .await
2673 .unwrap();
2674
2675 // Simulate the language server sending us a small edit in the form of a very large diff.
2676 // Rust-analyzer does this when performing a merge-imports code action.
2677 let edits = lsp_store
2678 .update(cx, |lsp_store, cx| {
2679 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2680 &buffer,
2681 [
2682 // Replace the first use statement without editing the semicolon.
2683 lsp::TextEdit {
2684 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2685 new_text: "a::{b, c}".into(),
2686 },
2687 // Reinsert the remainder of the file between the semicolon and the final
2688 // newline of the file.
2689 lsp::TextEdit {
2690 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2691 new_text: "\n\n".into(),
2692 },
2693 lsp::TextEdit {
2694 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2695 new_text: "
2696 fn f() {
2697 b();
2698 c();
2699 }"
2700 .unindent(),
2701 },
2702 // Delete everything after the first newline of the file.
2703 lsp::TextEdit {
2704 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2705 new_text: "".into(),
2706 },
2707 ],
2708 LanguageServerId(0),
2709 None,
2710 cx,
2711 )
2712 })
2713 .await
2714 .unwrap();
2715
2716 buffer.update(cx, |buffer, cx| {
2717 let edits = edits
2718 .into_iter()
2719 .map(|(range, text)| {
2720 (
2721 range.start.to_point(buffer)..range.end.to_point(buffer),
2722 text,
2723 )
2724 })
2725 .collect::<Vec<_>>();
2726
2727 assert_eq!(
2728 edits,
2729 [
2730 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2731 (Point::new(1, 0)..Point::new(2, 0), "".into())
2732 ]
2733 );
2734
2735 for (range, new_text) in edits {
2736 buffer.edit([(range, new_text)], None, cx);
2737 }
2738 assert_eq!(
2739 buffer.text(),
2740 "
2741 use a::{b, c};
2742
2743 fn f() {
2744 b();
2745 c();
2746 }
2747 "
2748 .unindent()
2749 );
2750 });
2751}
2752
2753#[gpui::test]
2754async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
2755 cx: &mut gpui::TestAppContext,
2756) {
2757 init_test(cx);
2758
2759 let text = "Path()";
2760
2761 let fs = FakeFs::new(cx.executor());
2762 fs.insert_tree(
2763 path!("/dir"),
2764 json!({
2765 "a.rs": text
2766 }),
2767 )
2768 .await;
2769
2770 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2771 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2772 let buffer = project
2773 .update(cx, |project, cx| {
2774 project.open_local_buffer(path!("/dir/a.rs"), cx)
2775 })
2776 .await
2777 .unwrap();
2778
2779 // Simulate the language server sending us a pair of edits at the same location,
2780 // with an insertion following a replacement (which violates the LSP spec).
2781 let edits = lsp_store
2782 .update(cx, |lsp_store, cx| {
2783 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2784 &buffer,
2785 [
2786 lsp::TextEdit {
2787 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
2788 new_text: "Path".into(),
2789 },
2790 lsp::TextEdit {
2791 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
2792 new_text: "from path import Path\n\n\n".into(),
2793 },
2794 ],
2795 LanguageServerId(0),
2796 None,
2797 cx,
2798 )
2799 })
2800 .await
2801 .unwrap();
2802
2803 buffer.update(cx, |buffer, cx| {
2804 buffer.edit(edits, None, cx);
2805 assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
2806 });
2807}
2808
2809#[gpui::test]
2810async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2811 init_test(cx);
2812
2813 let text = "
2814 use a::b;
2815 use a::c;
2816
2817 fn f() {
2818 b();
2819 c();
2820 }
2821 "
2822 .unindent();
2823
2824 let fs = FakeFs::new(cx.executor());
2825 fs.insert_tree(
2826 path!("/dir"),
2827 json!({
2828 "a.rs": text.clone(),
2829 }),
2830 )
2831 .await;
2832
2833 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2834 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2835 let buffer = project
2836 .update(cx, |project, cx| {
2837 project.open_local_buffer(path!("/dir/a.rs"), cx)
2838 })
2839 .await
2840 .unwrap();
2841
2842 // Simulate the language server sending us edits in a non-ordered fashion,
2843 // with ranges sometimes being inverted or pointing to invalid locations.
2844 let edits = lsp_store
2845 .update(cx, |lsp_store, cx| {
2846 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2847 &buffer,
2848 [
2849 lsp::TextEdit {
2850 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2851 new_text: "\n\n".into(),
2852 },
2853 lsp::TextEdit {
2854 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2855 new_text: "a::{b, c}".into(),
2856 },
2857 lsp::TextEdit {
2858 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2859 new_text: "".into(),
2860 },
2861 lsp::TextEdit {
2862 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2863 new_text: "
2864 fn f() {
2865 b();
2866 c();
2867 }"
2868 .unindent(),
2869 },
2870 ],
2871 LanguageServerId(0),
2872 None,
2873 cx,
2874 )
2875 })
2876 .await
2877 .unwrap();
2878
2879 buffer.update(cx, |buffer, cx| {
2880 let edits = edits
2881 .into_iter()
2882 .map(|(range, text)| {
2883 (
2884 range.start.to_point(buffer)..range.end.to_point(buffer),
2885 text,
2886 )
2887 })
2888 .collect::<Vec<_>>();
2889
2890 assert_eq!(
2891 edits,
2892 [
2893 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2894 (Point::new(1, 0)..Point::new(2, 0), "".into())
2895 ]
2896 );
2897
2898 for (range, new_text) in edits {
2899 buffer.edit([(range, new_text)], None, cx);
2900 }
2901 assert_eq!(
2902 buffer.text(),
2903 "
2904 use a::{b, c};
2905
2906 fn f() {
2907 b();
2908 c();
2909 }
2910 "
2911 .unindent()
2912 );
2913 });
2914}
2915
2916fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2917 buffer: &Buffer,
2918 range: Range<T>,
2919) -> Vec<(String, Option<DiagnosticSeverity>)> {
2920 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2921 for chunk in buffer.snapshot().chunks(range, true) {
2922 if chunks.last().map_or(false, |prev_chunk| {
2923 prev_chunk.1 == chunk.diagnostic_severity
2924 }) {
2925 chunks.last_mut().unwrap().0.push_str(chunk.text);
2926 } else {
2927 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2928 }
2929 }
2930 chunks
2931}
2932
2933#[gpui::test(iterations = 10)]
2934async fn test_definition(cx: &mut gpui::TestAppContext) {
2935 init_test(cx);
2936
2937 let fs = FakeFs::new(cx.executor());
2938 fs.insert_tree(
2939 path!("/dir"),
2940 json!({
2941 "a.rs": "const fn a() { A }",
2942 "b.rs": "const y: i32 = crate::a()",
2943 }),
2944 )
2945 .await;
2946
2947 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
2948
2949 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2950 language_registry.add(rust_lang());
2951 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2952
2953 let (buffer, _handle) = project
2954 .update(cx, |project, cx| {
2955 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
2956 })
2957 .await
2958 .unwrap();
2959
2960 let fake_server = fake_servers.next().await.unwrap();
2961 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2962 let params = params.text_document_position_params;
2963 assert_eq!(
2964 params.text_document.uri.to_file_path().unwrap(),
2965 Path::new(path!("/dir/b.rs")),
2966 );
2967 assert_eq!(params.position, lsp::Position::new(0, 22));
2968
2969 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2970 lsp::Location::new(
2971 lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2972 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2973 ),
2974 )))
2975 });
2976 let mut definitions = project
2977 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2978 .await
2979 .unwrap();
2980
2981 // Assert no new language server started
2982 cx.executor().run_until_parked();
2983 assert!(fake_servers.try_next().is_err());
2984
2985 assert_eq!(definitions.len(), 1);
2986 let definition = definitions.pop().unwrap();
2987 cx.update(|cx| {
2988 let target_buffer = definition.target.buffer.read(cx);
2989 assert_eq!(
2990 target_buffer
2991 .file()
2992 .unwrap()
2993 .as_local()
2994 .unwrap()
2995 .abs_path(cx),
2996 Path::new(path!("/dir/a.rs")),
2997 );
2998 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2999 assert_eq!(
3000 list_worktrees(&project, cx),
3001 [
3002 (path!("/dir/a.rs").as_ref(), false),
3003 (path!("/dir/b.rs").as_ref(), true)
3004 ],
3005 );
3006
3007 drop(definition);
3008 });
3009 cx.update(|cx| {
3010 assert_eq!(
3011 list_worktrees(&project, cx),
3012 [(path!("/dir/b.rs").as_ref(), true)]
3013 );
3014 });
3015
3016 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
3017 project
3018 .read(cx)
3019 .worktrees(cx)
3020 .map(|worktree| {
3021 let worktree = worktree.read(cx);
3022 (
3023 worktree.as_local().unwrap().abs_path().as_ref(),
3024 worktree.is_visible(),
3025 )
3026 })
3027 .collect::<Vec<_>>()
3028 }
3029}
3030
3031#[gpui::test]
3032async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
3033 init_test(cx);
3034
3035 let fs = FakeFs::new(cx.executor());
3036 fs.insert_tree(
3037 path!("/dir"),
3038 json!({
3039 "a.ts": "",
3040 }),
3041 )
3042 .await;
3043
3044 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3045
3046 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3047 language_registry.add(typescript_lang());
3048 let mut fake_language_servers = language_registry.register_fake_lsp(
3049 "TypeScript",
3050 FakeLspAdapter {
3051 capabilities: lsp::ServerCapabilities {
3052 completion_provider: Some(lsp::CompletionOptions {
3053 trigger_characters: Some(vec![".".to_string()]),
3054 ..Default::default()
3055 }),
3056 ..Default::default()
3057 },
3058 ..Default::default()
3059 },
3060 );
3061
3062 let (buffer, _handle) = project
3063 .update(cx, |p, cx| {
3064 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3065 })
3066 .await
3067 .unwrap();
3068
3069 let fake_server = fake_language_servers.next().await.unwrap();
3070
3071 // When text_edit exists, it takes precedence over insert_text and label
3072 let text = "let a = obj.fqn";
3073 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3074 let completions = project.update(cx, |project, cx| {
3075 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3076 });
3077
3078 fake_server
3079 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3080 Ok(Some(lsp::CompletionResponse::Array(vec![
3081 lsp::CompletionItem {
3082 label: "labelText".into(),
3083 insert_text: Some("insertText".into()),
3084 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
3085 range: lsp::Range::new(
3086 lsp::Position::new(0, text.len() as u32 - 3),
3087 lsp::Position::new(0, text.len() as u32),
3088 ),
3089 new_text: "textEditText".into(),
3090 })),
3091 ..Default::default()
3092 },
3093 ])))
3094 })
3095 .next()
3096 .await;
3097
3098 let completions = completions
3099 .await
3100 .unwrap()
3101 .into_iter()
3102 .flat_map(|response| response.completions)
3103 .collect::<Vec<_>>();
3104 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3105
3106 assert_eq!(completions.len(), 1);
3107 assert_eq!(completions[0].new_text, "textEditText");
3108 assert_eq!(
3109 completions[0].replace_range.to_offset(&snapshot),
3110 text.len() - 3..text.len()
3111 );
3112}
3113
3114#[gpui::test]
3115async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
3116 init_test(cx);
3117
3118 let fs = FakeFs::new(cx.executor());
3119 fs.insert_tree(
3120 path!("/dir"),
3121 json!({
3122 "a.ts": "",
3123 }),
3124 )
3125 .await;
3126
3127 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3128
3129 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3130 language_registry.add(typescript_lang());
3131 let mut fake_language_servers = language_registry.register_fake_lsp(
3132 "TypeScript",
3133 FakeLspAdapter {
3134 capabilities: lsp::ServerCapabilities {
3135 completion_provider: Some(lsp::CompletionOptions {
3136 trigger_characters: Some(vec![".".to_string()]),
3137 ..Default::default()
3138 }),
3139 ..Default::default()
3140 },
3141 ..Default::default()
3142 },
3143 );
3144
3145 let (buffer, _handle) = project
3146 .update(cx, |p, cx| {
3147 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3148 })
3149 .await
3150 .unwrap();
3151
3152 let fake_server = fake_language_servers.next().await.unwrap();
3153 let text = "let a = obj.fqn";
3154
3155 // Test 1: When text_edit is None but insert_text exists with default edit_range
3156 {
3157 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3158 let completions = project.update(cx, |project, cx| {
3159 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3160 });
3161
3162 fake_server
3163 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3164 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3165 is_incomplete: false,
3166 item_defaults: Some(lsp::CompletionListItemDefaults {
3167 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3168 lsp::Range::new(
3169 lsp::Position::new(0, text.len() as u32 - 3),
3170 lsp::Position::new(0, text.len() as u32),
3171 ),
3172 )),
3173 ..Default::default()
3174 }),
3175 items: vec![lsp::CompletionItem {
3176 label: "labelText".into(),
3177 insert_text: Some("insertText".into()),
3178 text_edit: None,
3179 ..Default::default()
3180 }],
3181 })))
3182 })
3183 .next()
3184 .await;
3185
3186 let completions = completions
3187 .await
3188 .unwrap()
3189 .into_iter()
3190 .flat_map(|response| response.completions)
3191 .collect::<Vec<_>>();
3192 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3193
3194 assert_eq!(completions.len(), 1);
3195 assert_eq!(completions[0].new_text, "insertText");
3196 assert_eq!(
3197 completions[0].replace_range.to_offset(&snapshot),
3198 text.len() - 3..text.len()
3199 );
3200 }
3201
3202 // Test 2: When both text_edit and insert_text are None with default edit_range
3203 {
3204 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3205 let completions = project.update(cx, |project, cx| {
3206 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3207 });
3208
3209 fake_server
3210 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3211 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3212 is_incomplete: false,
3213 item_defaults: Some(lsp::CompletionListItemDefaults {
3214 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3215 lsp::Range::new(
3216 lsp::Position::new(0, text.len() as u32 - 3),
3217 lsp::Position::new(0, text.len() as u32),
3218 ),
3219 )),
3220 ..Default::default()
3221 }),
3222 items: vec![lsp::CompletionItem {
3223 label: "labelText".into(),
3224 insert_text: None,
3225 text_edit: None,
3226 ..Default::default()
3227 }],
3228 })))
3229 })
3230 .next()
3231 .await;
3232
3233 let completions = completions
3234 .await
3235 .unwrap()
3236 .into_iter()
3237 .flat_map(|response| response.completions)
3238 .collect::<Vec<_>>();
3239 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3240
3241 assert_eq!(completions.len(), 1);
3242 assert_eq!(completions[0].new_text, "labelText");
3243 assert_eq!(
3244 completions[0].replace_range.to_offset(&snapshot),
3245 text.len() - 3..text.len()
3246 );
3247 }
3248}
3249
3250#[gpui::test]
3251async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
3252 init_test(cx);
3253
3254 let fs = FakeFs::new(cx.executor());
3255 fs.insert_tree(
3256 path!("/dir"),
3257 json!({
3258 "a.ts": "",
3259 }),
3260 )
3261 .await;
3262
3263 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3264
3265 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3266 language_registry.add(typescript_lang());
3267 let mut fake_language_servers = language_registry.register_fake_lsp(
3268 "TypeScript",
3269 FakeLspAdapter {
3270 capabilities: lsp::ServerCapabilities {
3271 completion_provider: Some(lsp::CompletionOptions {
3272 trigger_characters: Some(vec![":".to_string()]),
3273 ..Default::default()
3274 }),
3275 ..Default::default()
3276 },
3277 ..Default::default()
3278 },
3279 );
3280
3281 let (buffer, _handle) = project
3282 .update(cx, |p, cx| {
3283 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3284 })
3285 .await
3286 .unwrap();
3287
3288 let fake_server = fake_language_servers.next().await.unwrap();
3289
3290 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
3291 let text = "let a = b.fqn";
3292 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3293 let completions = project.update(cx, |project, cx| {
3294 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3295 });
3296
3297 fake_server
3298 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3299 Ok(Some(lsp::CompletionResponse::Array(vec![
3300 lsp::CompletionItem {
3301 label: "fullyQualifiedName?".into(),
3302 insert_text: Some("fullyQualifiedName".into()),
3303 ..Default::default()
3304 },
3305 ])))
3306 })
3307 .next()
3308 .await;
3309 let completions = completions
3310 .await
3311 .unwrap()
3312 .into_iter()
3313 .flat_map(|response| response.completions)
3314 .collect::<Vec<_>>();
3315 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3316 assert_eq!(completions.len(), 1);
3317 assert_eq!(completions[0].new_text, "fullyQualifiedName");
3318 assert_eq!(
3319 completions[0].replace_range.to_offset(&snapshot),
3320 text.len() - 3..text.len()
3321 );
3322
3323 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
3324 let text = "let a = \"atoms/cmp\"";
3325 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3326 let completions = project.update(cx, |project, cx| {
3327 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
3328 });
3329
3330 fake_server
3331 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3332 Ok(Some(lsp::CompletionResponse::Array(vec![
3333 lsp::CompletionItem {
3334 label: "component".into(),
3335 ..Default::default()
3336 },
3337 ])))
3338 })
3339 .next()
3340 .await;
3341 let completions = completions
3342 .await
3343 .unwrap()
3344 .into_iter()
3345 .flat_map(|response| response.completions)
3346 .collect::<Vec<_>>();
3347 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3348 assert_eq!(completions.len(), 1);
3349 assert_eq!(completions[0].new_text, "component");
3350 assert_eq!(
3351 completions[0].replace_range.to_offset(&snapshot),
3352 text.len() - 4..text.len() - 1
3353 );
3354}
3355
3356#[gpui::test]
3357async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
3358 init_test(cx);
3359
3360 let fs = FakeFs::new(cx.executor());
3361 fs.insert_tree(
3362 path!("/dir"),
3363 json!({
3364 "a.ts": "",
3365 }),
3366 )
3367 .await;
3368
3369 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3370
3371 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3372 language_registry.add(typescript_lang());
3373 let mut fake_language_servers = language_registry.register_fake_lsp(
3374 "TypeScript",
3375 FakeLspAdapter {
3376 capabilities: lsp::ServerCapabilities {
3377 completion_provider: Some(lsp::CompletionOptions {
3378 trigger_characters: Some(vec![":".to_string()]),
3379 ..Default::default()
3380 }),
3381 ..Default::default()
3382 },
3383 ..Default::default()
3384 },
3385 );
3386
3387 let (buffer, _handle) = project
3388 .update(cx, |p, cx| {
3389 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3390 })
3391 .await
3392 .unwrap();
3393
3394 let fake_server = fake_language_servers.next().await.unwrap();
3395
3396 let text = "let a = b.fqn";
3397 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3398 let completions = project.update(cx, |project, cx| {
3399 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3400 });
3401
3402 fake_server
3403 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3404 Ok(Some(lsp::CompletionResponse::Array(vec![
3405 lsp::CompletionItem {
3406 label: "fullyQualifiedName?".into(),
3407 insert_text: Some("fully\rQualified\r\nName".into()),
3408 ..Default::default()
3409 },
3410 ])))
3411 })
3412 .next()
3413 .await;
3414 let completions = completions
3415 .await
3416 .unwrap()
3417 .into_iter()
3418 .flat_map(|response| response.completions)
3419 .collect::<Vec<_>>();
3420 assert_eq!(completions.len(), 1);
3421 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
3422}
3423
3424#[gpui::test(iterations = 10)]
3425async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
3426 init_test(cx);
3427
3428 let fs = FakeFs::new(cx.executor());
3429 fs.insert_tree(
3430 path!("/dir"),
3431 json!({
3432 "a.ts": "a",
3433 }),
3434 )
3435 .await;
3436
3437 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3438
3439 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3440 language_registry.add(typescript_lang());
3441 let mut fake_language_servers = language_registry.register_fake_lsp(
3442 "TypeScript",
3443 FakeLspAdapter {
3444 capabilities: lsp::ServerCapabilities {
3445 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
3446 lsp::CodeActionOptions {
3447 resolve_provider: Some(true),
3448 ..lsp::CodeActionOptions::default()
3449 },
3450 )),
3451 execute_command_provider: Some(lsp::ExecuteCommandOptions {
3452 commands: vec!["_the/command".to_string()],
3453 ..lsp::ExecuteCommandOptions::default()
3454 }),
3455 ..lsp::ServerCapabilities::default()
3456 },
3457 ..FakeLspAdapter::default()
3458 },
3459 );
3460
3461 let (buffer, _handle) = project
3462 .update(cx, |p, cx| {
3463 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3464 })
3465 .await
3466 .unwrap();
3467
3468 let fake_server = fake_language_servers.next().await.unwrap();
3469
3470 // Language server returns code actions that contain commands, and not edits.
3471 let actions = project.update(cx, |project, cx| {
3472 project.code_actions(&buffer, 0..0, None, cx)
3473 });
3474 fake_server
3475 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
3476 Ok(Some(vec![
3477 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3478 title: "The code action".into(),
3479 data: Some(serde_json::json!({
3480 "command": "_the/command",
3481 })),
3482 ..lsp::CodeAction::default()
3483 }),
3484 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3485 title: "two".into(),
3486 ..lsp::CodeAction::default()
3487 }),
3488 ]))
3489 })
3490 .next()
3491 .await;
3492
3493 let action = actions.await.unwrap()[0].clone();
3494 let apply = project.update(cx, |project, cx| {
3495 project.apply_code_action(buffer.clone(), action, true, cx)
3496 });
3497
3498 // Resolving the code action does not populate its edits. In absence of
3499 // edits, we must execute the given command.
3500 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
3501 |mut action, _| async move {
3502 if action.data.is_some() {
3503 action.command = Some(lsp::Command {
3504 title: "The command".into(),
3505 command: "_the/command".into(),
3506 arguments: Some(vec![json!("the-argument")]),
3507 });
3508 }
3509 Ok(action)
3510 },
3511 );
3512
3513 // While executing the command, the language server sends the editor
3514 // a `workspaceEdit` request.
3515 fake_server
3516 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
3517 let fake = fake_server.clone();
3518 move |params, _| {
3519 assert_eq!(params.command, "_the/command");
3520 let fake = fake.clone();
3521 async move {
3522 fake.server
3523 .request::<lsp::request::ApplyWorkspaceEdit>(
3524 lsp::ApplyWorkspaceEditParams {
3525 label: None,
3526 edit: lsp::WorkspaceEdit {
3527 changes: Some(
3528 [(
3529 lsp::Url::from_file_path(path!("/dir/a.ts")).unwrap(),
3530 vec![lsp::TextEdit {
3531 range: lsp::Range::new(
3532 lsp::Position::new(0, 0),
3533 lsp::Position::new(0, 0),
3534 ),
3535 new_text: "X".into(),
3536 }],
3537 )]
3538 .into_iter()
3539 .collect(),
3540 ),
3541 ..Default::default()
3542 },
3543 },
3544 )
3545 .await
3546 .into_response()
3547 .unwrap();
3548 Ok(Some(json!(null)))
3549 }
3550 }
3551 })
3552 .next()
3553 .await;
3554
3555 // Applying the code action returns a project transaction containing the edits
3556 // sent by the language server in its `workspaceEdit` request.
3557 let transaction = apply.await.unwrap();
3558 assert!(transaction.0.contains_key(&buffer));
3559 buffer.update(cx, |buffer, cx| {
3560 assert_eq!(buffer.text(), "Xa");
3561 buffer.undo(cx);
3562 assert_eq!(buffer.text(), "a");
3563 });
3564}
3565
3566#[gpui::test(iterations = 10)]
3567async fn test_save_file(cx: &mut gpui::TestAppContext) {
3568 init_test(cx);
3569
3570 let fs = FakeFs::new(cx.executor());
3571 fs.insert_tree(
3572 path!("/dir"),
3573 json!({
3574 "file1": "the old contents",
3575 }),
3576 )
3577 .await;
3578
3579 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3580 let buffer = project
3581 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3582 .await
3583 .unwrap();
3584 buffer.update(cx, |buffer, cx| {
3585 assert_eq!(buffer.text(), "the old contents");
3586 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3587 });
3588
3589 project
3590 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3591 .await
3592 .unwrap();
3593
3594 let new_text = fs
3595 .load(Path::new(path!("/dir/file1")))
3596 .await
3597 .unwrap()
3598 .replace("\r\n", "\n");
3599 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3600}
3601
3602#[gpui::test(iterations = 10)]
3603async fn test_save_file_spawns_language_server(cx: &mut gpui::TestAppContext) {
3604 // Issue: #24349
3605 init_test(cx);
3606
3607 let fs = FakeFs::new(cx.executor());
3608 fs.insert_tree(path!("/dir"), json!({})).await;
3609
3610 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3611 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3612
3613 language_registry.add(rust_lang());
3614 let mut fake_rust_servers = language_registry.register_fake_lsp(
3615 "Rust",
3616 FakeLspAdapter {
3617 name: "the-rust-language-server",
3618 capabilities: lsp::ServerCapabilities {
3619 completion_provider: Some(lsp::CompletionOptions {
3620 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
3621 ..Default::default()
3622 }),
3623 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
3624 lsp::TextDocumentSyncOptions {
3625 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
3626 ..Default::default()
3627 },
3628 )),
3629 ..Default::default()
3630 },
3631 ..Default::default()
3632 },
3633 );
3634
3635 let buffer = project
3636 .update(cx, |this, cx| this.create_buffer(cx))
3637 .unwrap()
3638 .await;
3639 project.update(cx, |this, cx| {
3640 this.register_buffer_with_language_servers(&buffer, cx);
3641 buffer.update(cx, |buffer, cx| {
3642 assert!(!this.has_language_servers_for(buffer, cx));
3643 })
3644 });
3645
3646 project
3647 .update(cx, |this, cx| {
3648 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
3649 this.save_buffer_as(
3650 buffer.clone(),
3651 ProjectPath {
3652 worktree_id,
3653 path: Arc::from("file.rs".as_ref()),
3654 },
3655 cx,
3656 )
3657 })
3658 .await
3659 .unwrap();
3660 // A server is started up, and it is notified about Rust files.
3661 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
3662 assert_eq!(
3663 fake_rust_server
3664 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3665 .await
3666 .text_document,
3667 lsp::TextDocumentItem {
3668 uri: lsp::Url::from_file_path(path!("/dir/file.rs")).unwrap(),
3669 version: 0,
3670 text: "".to_string(),
3671 language_id: "rust".to_string(),
3672 }
3673 );
3674
3675 project.update(cx, |this, cx| {
3676 buffer.update(cx, |buffer, cx| {
3677 assert!(this.has_language_servers_for(buffer, cx));
3678 })
3679 });
3680}
3681
3682#[gpui::test(iterations = 30)]
3683async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
3684 init_test(cx);
3685
3686 let fs = FakeFs::new(cx.executor().clone());
3687 fs.insert_tree(
3688 path!("/dir"),
3689 json!({
3690 "file1": "the original contents",
3691 }),
3692 )
3693 .await;
3694
3695 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3696 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3697 let buffer = project
3698 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3699 .await
3700 .unwrap();
3701
3702 // Simulate buffer diffs being slow, so that they don't complete before
3703 // the next file change occurs.
3704 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3705
3706 // Change the buffer's file on disk, and then wait for the file change
3707 // to be detected by the worktree, so that the buffer starts reloading.
3708 fs.save(
3709 path!("/dir/file1").as_ref(),
3710 &"the first contents".into(),
3711 Default::default(),
3712 )
3713 .await
3714 .unwrap();
3715 worktree.next_event(cx).await;
3716
3717 // Change the buffer's file again. Depending on the random seed, the
3718 // previous file change may still be in progress.
3719 fs.save(
3720 path!("/dir/file1").as_ref(),
3721 &"the second contents".into(),
3722 Default::default(),
3723 )
3724 .await
3725 .unwrap();
3726 worktree.next_event(cx).await;
3727
3728 cx.executor().run_until_parked();
3729 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3730 buffer.read_with(cx, |buffer, _| {
3731 assert_eq!(buffer.text(), on_disk_text);
3732 assert!(!buffer.is_dirty(), "buffer should not be dirty");
3733 assert!(!buffer.has_conflict(), "buffer should not be dirty");
3734 });
3735}
3736
3737#[gpui::test(iterations = 30)]
3738async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
3739 init_test(cx);
3740
3741 let fs = FakeFs::new(cx.executor().clone());
3742 fs.insert_tree(
3743 path!("/dir"),
3744 json!({
3745 "file1": "the original contents",
3746 }),
3747 )
3748 .await;
3749
3750 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3751 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3752 let buffer = project
3753 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3754 .await
3755 .unwrap();
3756
3757 // Simulate buffer diffs being slow, so that they don't complete before
3758 // the next file change occurs.
3759 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3760
3761 // Change the buffer's file on disk, and then wait for the file change
3762 // to be detected by the worktree, so that the buffer starts reloading.
3763 fs.save(
3764 path!("/dir/file1").as_ref(),
3765 &"the first contents".into(),
3766 Default::default(),
3767 )
3768 .await
3769 .unwrap();
3770 worktree.next_event(cx).await;
3771
3772 cx.executor()
3773 .spawn(cx.executor().simulate_random_delay())
3774 .await;
3775
3776 // Perform a noop edit, causing the buffer's version to increase.
3777 buffer.update(cx, |buffer, cx| {
3778 buffer.edit([(0..0, " ")], None, cx);
3779 buffer.undo(cx);
3780 });
3781
3782 cx.executor().run_until_parked();
3783 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3784 buffer.read_with(cx, |buffer, _| {
3785 let buffer_text = buffer.text();
3786 if buffer_text == on_disk_text {
3787 assert!(
3788 !buffer.is_dirty() && !buffer.has_conflict(),
3789 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
3790 );
3791 }
3792 // If the file change occurred while the buffer was processing the first
3793 // change, the buffer will be in a conflicting state.
3794 else {
3795 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3796 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3797 }
3798 });
3799}
3800
3801#[gpui::test]
3802async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
3803 init_test(cx);
3804
3805 let fs = FakeFs::new(cx.executor());
3806 fs.insert_tree(
3807 path!("/dir"),
3808 json!({
3809 "file1": "the old contents",
3810 }),
3811 )
3812 .await;
3813
3814 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
3815 let buffer = project
3816 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3817 .await
3818 .unwrap();
3819 buffer.update(cx, |buffer, cx| {
3820 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3821 });
3822
3823 project
3824 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3825 .await
3826 .unwrap();
3827
3828 let new_text = fs
3829 .load(Path::new(path!("/dir/file1")))
3830 .await
3831 .unwrap()
3832 .replace("\r\n", "\n");
3833 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3834}
3835
3836#[gpui::test]
3837async fn test_save_as(cx: &mut gpui::TestAppContext) {
3838 init_test(cx);
3839
3840 let fs = FakeFs::new(cx.executor());
3841 fs.insert_tree("/dir", json!({})).await;
3842
3843 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3844
3845 let languages = project.update(cx, |project, _| project.languages().clone());
3846 languages.add(rust_lang());
3847
3848 let buffer = project.update(cx, |project, cx| project.create_local_buffer("", None, cx));
3849 buffer.update(cx, |buffer, cx| {
3850 buffer.edit([(0..0, "abc")], None, cx);
3851 assert!(buffer.is_dirty());
3852 assert!(!buffer.has_conflict());
3853 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
3854 });
3855 project
3856 .update(cx, |project, cx| {
3857 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
3858 let path = ProjectPath {
3859 worktree_id,
3860 path: Arc::from(Path::new("file1.rs")),
3861 };
3862 project.save_buffer_as(buffer.clone(), path, cx)
3863 })
3864 .await
3865 .unwrap();
3866 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
3867
3868 cx.executor().run_until_parked();
3869 buffer.update(cx, |buffer, cx| {
3870 assert_eq!(
3871 buffer.file().unwrap().full_path(cx),
3872 Path::new("dir/file1.rs")
3873 );
3874 assert!(!buffer.is_dirty());
3875 assert!(!buffer.has_conflict());
3876 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
3877 });
3878
3879 let opened_buffer = project
3880 .update(cx, |project, cx| {
3881 project.open_local_buffer("/dir/file1.rs", cx)
3882 })
3883 .await
3884 .unwrap();
3885 assert_eq!(opened_buffer, buffer);
3886}
3887
3888#[gpui::test(retries = 5)]
3889async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
3890 use worktree::WorktreeModelHandle as _;
3891
3892 init_test(cx);
3893 cx.executor().allow_parking();
3894
3895 let dir = TempTree::new(json!({
3896 "a": {
3897 "file1": "",
3898 "file2": "",
3899 "file3": "",
3900 },
3901 "b": {
3902 "c": {
3903 "file4": "",
3904 "file5": "",
3905 }
3906 }
3907 }));
3908
3909 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
3910
3911 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3912 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
3913 async move { buffer.await.unwrap() }
3914 };
3915 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3916 project.update(cx, |project, cx| {
3917 let tree = project.worktrees(cx).next().unwrap();
3918 tree.read(cx)
3919 .entry_for_path(path)
3920 .unwrap_or_else(|| panic!("no entry for path {}", path))
3921 .id
3922 })
3923 };
3924
3925 let buffer2 = buffer_for_path("a/file2", cx).await;
3926 let buffer3 = buffer_for_path("a/file3", cx).await;
3927 let buffer4 = buffer_for_path("b/c/file4", cx).await;
3928 let buffer5 = buffer_for_path("b/c/file5", cx).await;
3929
3930 let file2_id = id_for_path("a/file2", cx);
3931 let file3_id = id_for_path("a/file3", cx);
3932 let file4_id = id_for_path("b/c/file4", cx);
3933
3934 // Create a remote copy of this worktree.
3935 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3936 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
3937
3938 let updates = Arc::new(Mutex::new(Vec::new()));
3939 tree.update(cx, |tree, cx| {
3940 let updates = updates.clone();
3941 tree.observe_updates(0, cx, move |update| {
3942 updates.lock().push(update);
3943 async { true }
3944 });
3945 });
3946
3947 let remote =
3948 cx.update(|cx| Worktree::remote(0, 1, metadata, project.read(cx).client().into(), cx));
3949
3950 cx.executor().run_until_parked();
3951
3952 cx.update(|cx| {
3953 assert!(!buffer2.read(cx).is_dirty());
3954 assert!(!buffer3.read(cx).is_dirty());
3955 assert!(!buffer4.read(cx).is_dirty());
3956 assert!(!buffer5.read(cx).is_dirty());
3957 });
3958
3959 // Rename and delete files and directories.
3960 tree.flush_fs_events(cx).await;
3961 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3962 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3963 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3964 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3965 tree.flush_fs_events(cx).await;
3966
3967 cx.update(|app| {
3968 assert_eq!(
3969 tree.read(app)
3970 .paths()
3971 .map(|p| p.to_str().unwrap())
3972 .collect::<Vec<_>>(),
3973 vec![
3974 "a",
3975 separator!("a/file1"),
3976 separator!("a/file2.new"),
3977 "b",
3978 "d",
3979 separator!("d/file3"),
3980 separator!("d/file4"),
3981 ]
3982 );
3983 });
3984
3985 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
3986 assert_eq!(id_for_path("d/file3", cx), file3_id);
3987 assert_eq!(id_for_path("d/file4", cx), file4_id);
3988
3989 cx.update(|cx| {
3990 assert_eq!(
3991 buffer2.read(cx).file().unwrap().path().as_ref(),
3992 Path::new("a/file2.new")
3993 );
3994 assert_eq!(
3995 buffer3.read(cx).file().unwrap().path().as_ref(),
3996 Path::new("d/file3")
3997 );
3998 assert_eq!(
3999 buffer4.read(cx).file().unwrap().path().as_ref(),
4000 Path::new("d/file4")
4001 );
4002 assert_eq!(
4003 buffer5.read(cx).file().unwrap().path().as_ref(),
4004 Path::new("b/c/file5")
4005 );
4006
4007 assert_matches!(
4008 buffer2.read(cx).file().unwrap().disk_state(),
4009 DiskState::Present { .. }
4010 );
4011 assert_matches!(
4012 buffer3.read(cx).file().unwrap().disk_state(),
4013 DiskState::Present { .. }
4014 );
4015 assert_matches!(
4016 buffer4.read(cx).file().unwrap().disk_state(),
4017 DiskState::Present { .. }
4018 );
4019 assert_eq!(
4020 buffer5.read(cx).file().unwrap().disk_state(),
4021 DiskState::Deleted
4022 );
4023 });
4024
4025 // Update the remote worktree. Check that it becomes consistent with the
4026 // local worktree.
4027 cx.executor().run_until_parked();
4028
4029 remote.update(cx, |remote, _| {
4030 for update in updates.lock().drain(..) {
4031 remote.as_remote_mut().unwrap().update_from_remote(update);
4032 }
4033 });
4034 cx.executor().run_until_parked();
4035 remote.update(cx, |remote, _| {
4036 assert_eq!(
4037 remote
4038 .paths()
4039 .map(|p| p.to_str().unwrap())
4040 .collect::<Vec<_>>(),
4041 vec![
4042 "a",
4043 separator!("a/file1"),
4044 separator!("a/file2.new"),
4045 "b",
4046 "d",
4047 separator!("d/file3"),
4048 separator!("d/file4"),
4049 ]
4050 );
4051 });
4052}
4053
4054#[gpui::test(iterations = 10)]
4055async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
4056 init_test(cx);
4057
4058 let fs = FakeFs::new(cx.executor());
4059 fs.insert_tree(
4060 path!("/dir"),
4061 json!({
4062 "a": {
4063 "file1": "",
4064 }
4065 }),
4066 )
4067 .await;
4068
4069 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
4070 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
4071 let tree_id = tree.update(cx, |tree, _| tree.id());
4072
4073 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4074 project.update(cx, |project, cx| {
4075 let tree = project.worktrees(cx).next().unwrap();
4076 tree.read(cx)
4077 .entry_for_path(path)
4078 .unwrap_or_else(|| panic!("no entry for path {}", path))
4079 .id
4080 })
4081 };
4082
4083 let dir_id = id_for_path("a", cx);
4084 let file_id = id_for_path("a/file1", cx);
4085 let buffer = project
4086 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
4087 .await
4088 .unwrap();
4089 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4090
4091 project
4092 .update(cx, |project, cx| {
4093 project.rename_entry(dir_id, Path::new("b"), cx)
4094 })
4095 .unwrap()
4096 .await
4097 .to_included()
4098 .unwrap();
4099 cx.executor().run_until_parked();
4100
4101 assert_eq!(id_for_path("b", cx), dir_id);
4102 assert_eq!(id_for_path("b/file1", cx), file_id);
4103 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4104}
4105
4106#[gpui::test]
4107async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
4108 init_test(cx);
4109
4110 let fs = FakeFs::new(cx.executor());
4111 fs.insert_tree(
4112 "/dir",
4113 json!({
4114 "a.txt": "a-contents",
4115 "b.txt": "b-contents",
4116 }),
4117 )
4118 .await;
4119
4120 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4121
4122 // Spawn multiple tasks to open paths, repeating some paths.
4123 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
4124 (
4125 p.open_local_buffer("/dir/a.txt", cx),
4126 p.open_local_buffer("/dir/b.txt", cx),
4127 p.open_local_buffer("/dir/a.txt", cx),
4128 )
4129 });
4130
4131 let buffer_a_1 = buffer_a_1.await.unwrap();
4132 let buffer_a_2 = buffer_a_2.await.unwrap();
4133 let buffer_b = buffer_b.await.unwrap();
4134 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
4135 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
4136
4137 // There is only one buffer per path.
4138 let buffer_a_id = buffer_a_1.entity_id();
4139 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
4140
4141 // Open the same path again while it is still open.
4142 drop(buffer_a_1);
4143 let buffer_a_3 = project
4144 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
4145 .await
4146 .unwrap();
4147
4148 // There's still only one buffer per path.
4149 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
4150}
4151
4152#[gpui::test]
4153async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
4154 init_test(cx);
4155
4156 let fs = FakeFs::new(cx.executor());
4157 fs.insert_tree(
4158 path!("/dir"),
4159 json!({
4160 "file1": "abc",
4161 "file2": "def",
4162 "file3": "ghi",
4163 }),
4164 )
4165 .await;
4166
4167 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4168
4169 let buffer1 = project
4170 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4171 .await
4172 .unwrap();
4173 let events = Arc::new(Mutex::new(Vec::new()));
4174
4175 // initially, the buffer isn't dirty.
4176 buffer1.update(cx, |buffer, cx| {
4177 cx.subscribe(&buffer1, {
4178 let events = events.clone();
4179 move |_, _, event, _| match event {
4180 BufferEvent::Operation { .. } => {}
4181 _ => events.lock().push(event.clone()),
4182 }
4183 })
4184 .detach();
4185
4186 assert!(!buffer.is_dirty());
4187 assert!(events.lock().is_empty());
4188
4189 buffer.edit([(1..2, "")], None, cx);
4190 });
4191
4192 // after the first edit, the buffer is dirty, and emits a dirtied event.
4193 buffer1.update(cx, |buffer, cx| {
4194 assert!(buffer.text() == "ac");
4195 assert!(buffer.is_dirty());
4196 assert_eq!(
4197 *events.lock(),
4198 &[
4199 language::BufferEvent::Edited,
4200 language::BufferEvent::DirtyChanged
4201 ]
4202 );
4203 events.lock().clear();
4204 buffer.did_save(
4205 buffer.version(),
4206 buffer.file().unwrap().disk_state().mtime(),
4207 cx,
4208 );
4209 });
4210
4211 // after saving, the buffer is not dirty, and emits a saved event.
4212 buffer1.update(cx, |buffer, cx| {
4213 assert!(!buffer.is_dirty());
4214 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
4215 events.lock().clear();
4216
4217 buffer.edit([(1..1, "B")], None, cx);
4218 buffer.edit([(2..2, "D")], None, cx);
4219 });
4220
4221 // after editing again, the buffer is dirty, and emits another dirty event.
4222 buffer1.update(cx, |buffer, cx| {
4223 assert!(buffer.text() == "aBDc");
4224 assert!(buffer.is_dirty());
4225 assert_eq!(
4226 *events.lock(),
4227 &[
4228 language::BufferEvent::Edited,
4229 language::BufferEvent::DirtyChanged,
4230 language::BufferEvent::Edited,
4231 ],
4232 );
4233 events.lock().clear();
4234
4235 // After restoring the buffer to its previously-saved state,
4236 // the buffer is not considered dirty anymore.
4237 buffer.edit([(1..3, "")], None, cx);
4238 assert!(buffer.text() == "ac");
4239 assert!(!buffer.is_dirty());
4240 });
4241
4242 assert_eq!(
4243 *events.lock(),
4244 &[
4245 language::BufferEvent::Edited,
4246 language::BufferEvent::DirtyChanged
4247 ]
4248 );
4249
4250 // When a file is deleted, it is not considered dirty.
4251 let events = Arc::new(Mutex::new(Vec::new()));
4252 let buffer2 = project
4253 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4254 .await
4255 .unwrap();
4256 buffer2.update(cx, |_, cx| {
4257 cx.subscribe(&buffer2, {
4258 let events = events.clone();
4259 move |_, _, event, _| match event {
4260 BufferEvent::Operation { .. } => {}
4261 _ => events.lock().push(event.clone()),
4262 }
4263 })
4264 .detach();
4265 });
4266
4267 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
4268 .await
4269 .unwrap();
4270 cx.executor().run_until_parked();
4271 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4272 assert_eq!(
4273 mem::take(&mut *events.lock()),
4274 &[language::BufferEvent::FileHandleChanged]
4275 );
4276
4277 // Buffer becomes dirty when edited.
4278 buffer2.update(cx, |buffer, cx| {
4279 buffer.edit([(2..3, "")], None, cx);
4280 assert_eq!(buffer.is_dirty(), true);
4281 });
4282 assert_eq!(
4283 mem::take(&mut *events.lock()),
4284 &[
4285 language::BufferEvent::Edited,
4286 language::BufferEvent::DirtyChanged
4287 ]
4288 );
4289
4290 // Buffer becomes clean again when all of its content is removed, because
4291 // the file was deleted.
4292 buffer2.update(cx, |buffer, cx| {
4293 buffer.edit([(0..2, "")], None, cx);
4294 assert_eq!(buffer.is_empty(), true);
4295 assert_eq!(buffer.is_dirty(), false);
4296 });
4297 assert_eq!(
4298 *events.lock(),
4299 &[
4300 language::BufferEvent::Edited,
4301 language::BufferEvent::DirtyChanged
4302 ]
4303 );
4304
4305 // When a file is already dirty when deleted, we don't emit a Dirtied event.
4306 let events = Arc::new(Mutex::new(Vec::new()));
4307 let buffer3 = project
4308 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
4309 .await
4310 .unwrap();
4311 buffer3.update(cx, |_, cx| {
4312 cx.subscribe(&buffer3, {
4313 let events = events.clone();
4314 move |_, _, event, _| match event {
4315 BufferEvent::Operation { .. } => {}
4316 _ => events.lock().push(event.clone()),
4317 }
4318 })
4319 .detach();
4320 });
4321
4322 buffer3.update(cx, |buffer, cx| {
4323 buffer.edit([(0..0, "x")], None, cx);
4324 });
4325 events.lock().clear();
4326 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
4327 .await
4328 .unwrap();
4329 cx.executor().run_until_parked();
4330 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
4331 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
4332}
4333
4334#[gpui::test]
4335async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
4336 init_test(cx);
4337
4338 let (initial_contents, initial_offsets) =
4339 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
4340 let fs = FakeFs::new(cx.executor());
4341 fs.insert_tree(
4342 path!("/dir"),
4343 json!({
4344 "the-file": initial_contents,
4345 }),
4346 )
4347 .await;
4348 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4349 let buffer = project
4350 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
4351 .await
4352 .unwrap();
4353
4354 let anchors = initial_offsets
4355 .iter()
4356 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
4357 .collect::<Vec<_>>();
4358
4359 // Change the file on disk, adding two new lines of text, and removing
4360 // one line.
4361 buffer.update(cx, |buffer, _| {
4362 assert!(!buffer.is_dirty());
4363 assert!(!buffer.has_conflict());
4364 });
4365
4366 let (new_contents, new_offsets) =
4367 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
4368 fs.save(
4369 path!("/dir/the-file").as_ref(),
4370 &new_contents.as_str().into(),
4371 LineEnding::Unix,
4372 )
4373 .await
4374 .unwrap();
4375
4376 // Because the buffer was not modified, it is reloaded from disk. Its
4377 // contents are edited according to the diff between the old and new
4378 // file contents.
4379 cx.executor().run_until_parked();
4380 buffer.update(cx, |buffer, _| {
4381 assert_eq!(buffer.text(), new_contents);
4382 assert!(!buffer.is_dirty());
4383 assert!(!buffer.has_conflict());
4384
4385 let anchor_offsets = anchors
4386 .iter()
4387 .map(|anchor| anchor.to_offset(&*buffer))
4388 .collect::<Vec<_>>();
4389 assert_eq!(anchor_offsets, new_offsets);
4390 });
4391
4392 // Modify the buffer
4393 buffer.update(cx, |buffer, cx| {
4394 buffer.edit([(0..0, " ")], None, cx);
4395 assert!(buffer.is_dirty());
4396 assert!(!buffer.has_conflict());
4397 });
4398
4399 // Change the file on disk again, adding blank lines to the beginning.
4400 fs.save(
4401 path!("/dir/the-file").as_ref(),
4402 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
4403 LineEnding::Unix,
4404 )
4405 .await
4406 .unwrap();
4407
4408 // Because the buffer is modified, it doesn't reload from disk, but is
4409 // marked as having a conflict.
4410 cx.executor().run_until_parked();
4411 buffer.update(cx, |buffer, _| {
4412 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
4413 assert!(buffer.has_conflict());
4414 });
4415}
4416
4417#[gpui::test]
4418async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
4419 init_test(cx);
4420
4421 let fs = FakeFs::new(cx.executor());
4422 fs.insert_tree(
4423 path!("/dir"),
4424 json!({
4425 "file1": "a\nb\nc\n",
4426 "file2": "one\r\ntwo\r\nthree\r\n",
4427 }),
4428 )
4429 .await;
4430
4431 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4432 let buffer1 = project
4433 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4434 .await
4435 .unwrap();
4436 let buffer2 = project
4437 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4438 .await
4439 .unwrap();
4440
4441 buffer1.update(cx, |buffer, _| {
4442 assert_eq!(buffer.text(), "a\nb\nc\n");
4443 assert_eq!(buffer.line_ending(), LineEnding::Unix);
4444 });
4445 buffer2.update(cx, |buffer, _| {
4446 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
4447 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4448 });
4449
4450 // Change a file's line endings on disk from unix to windows. The buffer's
4451 // state updates correctly.
4452 fs.save(
4453 path!("/dir/file1").as_ref(),
4454 &"aaa\nb\nc\n".into(),
4455 LineEnding::Windows,
4456 )
4457 .await
4458 .unwrap();
4459 cx.executor().run_until_parked();
4460 buffer1.update(cx, |buffer, _| {
4461 assert_eq!(buffer.text(), "aaa\nb\nc\n");
4462 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4463 });
4464
4465 // Save a file with windows line endings. The file is written correctly.
4466 buffer2.update(cx, |buffer, cx| {
4467 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
4468 });
4469 project
4470 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
4471 .await
4472 .unwrap();
4473 assert_eq!(
4474 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
4475 "one\r\ntwo\r\nthree\r\nfour\r\n",
4476 );
4477}
4478
4479#[gpui::test]
4480async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
4481 init_test(cx);
4482
4483 let fs = FakeFs::new(cx.executor());
4484 fs.insert_tree(
4485 path!("/dir"),
4486 json!({
4487 "a.rs": "
4488 fn foo(mut v: Vec<usize>) {
4489 for x in &v {
4490 v.push(1);
4491 }
4492 }
4493 "
4494 .unindent(),
4495 }),
4496 )
4497 .await;
4498
4499 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4500 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4501 let buffer = project
4502 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
4503 .await
4504 .unwrap();
4505
4506 let buffer_uri = Url::from_file_path(path!("/dir/a.rs")).unwrap();
4507 let message = lsp::PublishDiagnosticsParams {
4508 uri: buffer_uri.clone(),
4509 diagnostics: vec![
4510 lsp::Diagnostic {
4511 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4512 severity: Some(DiagnosticSeverity::WARNING),
4513 message: "error 1".to_string(),
4514 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4515 location: lsp::Location {
4516 uri: buffer_uri.clone(),
4517 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4518 },
4519 message: "error 1 hint 1".to_string(),
4520 }]),
4521 ..Default::default()
4522 },
4523 lsp::Diagnostic {
4524 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4525 severity: Some(DiagnosticSeverity::HINT),
4526 message: "error 1 hint 1".to_string(),
4527 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4528 location: lsp::Location {
4529 uri: buffer_uri.clone(),
4530 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4531 },
4532 message: "original diagnostic".to_string(),
4533 }]),
4534 ..Default::default()
4535 },
4536 lsp::Diagnostic {
4537 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4538 severity: Some(DiagnosticSeverity::ERROR),
4539 message: "error 2".to_string(),
4540 related_information: Some(vec![
4541 lsp::DiagnosticRelatedInformation {
4542 location: lsp::Location {
4543 uri: buffer_uri.clone(),
4544 range: lsp::Range::new(
4545 lsp::Position::new(1, 13),
4546 lsp::Position::new(1, 15),
4547 ),
4548 },
4549 message: "error 2 hint 1".to_string(),
4550 },
4551 lsp::DiagnosticRelatedInformation {
4552 location: lsp::Location {
4553 uri: buffer_uri.clone(),
4554 range: lsp::Range::new(
4555 lsp::Position::new(1, 13),
4556 lsp::Position::new(1, 15),
4557 ),
4558 },
4559 message: "error 2 hint 2".to_string(),
4560 },
4561 ]),
4562 ..Default::default()
4563 },
4564 lsp::Diagnostic {
4565 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4566 severity: Some(DiagnosticSeverity::HINT),
4567 message: "error 2 hint 1".to_string(),
4568 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4569 location: lsp::Location {
4570 uri: buffer_uri.clone(),
4571 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4572 },
4573 message: "original diagnostic".to_string(),
4574 }]),
4575 ..Default::default()
4576 },
4577 lsp::Diagnostic {
4578 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4579 severity: Some(DiagnosticSeverity::HINT),
4580 message: "error 2 hint 2".to_string(),
4581 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4582 location: lsp::Location {
4583 uri: buffer_uri,
4584 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4585 },
4586 message: "original diagnostic".to_string(),
4587 }]),
4588 ..Default::default()
4589 },
4590 ],
4591 version: None,
4592 };
4593
4594 lsp_store
4595 .update(cx, |lsp_store, cx| {
4596 lsp_store.update_diagnostics(
4597 LanguageServerId(0),
4598 message,
4599 DiagnosticSourceKind::Pushed,
4600 &[],
4601 cx,
4602 )
4603 })
4604 .unwrap();
4605 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
4606
4607 assert_eq!(
4608 buffer
4609 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
4610 .collect::<Vec<_>>(),
4611 &[
4612 DiagnosticEntry {
4613 range: Point::new(1, 8)..Point::new(1, 9),
4614 diagnostic: Diagnostic {
4615 severity: DiagnosticSeverity::WARNING,
4616 message: "error 1".to_string(),
4617 group_id: 1,
4618 is_primary: true,
4619 source_kind: DiagnosticSourceKind::Pushed,
4620 ..Diagnostic::default()
4621 }
4622 },
4623 DiagnosticEntry {
4624 range: Point::new(1, 8)..Point::new(1, 9),
4625 diagnostic: Diagnostic {
4626 severity: DiagnosticSeverity::HINT,
4627 message: "error 1 hint 1".to_string(),
4628 group_id: 1,
4629 is_primary: false,
4630 source_kind: DiagnosticSourceKind::Pushed,
4631 ..Diagnostic::default()
4632 }
4633 },
4634 DiagnosticEntry {
4635 range: Point::new(1, 13)..Point::new(1, 15),
4636 diagnostic: Diagnostic {
4637 severity: DiagnosticSeverity::HINT,
4638 message: "error 2 hint 1".to_string(),
4639 group_id: 0,
4640 is_primary: false,
4641 source_kind: DiagnosticSourceKind::Pushed,
4642 ..Diagnostic::default()
4643 }
4644 },
4645 DiagnosticEntry {
4646 range: Point::new(1, 13)..Point::new(1, 15),
4647 diagnostic: Diagnostic {
4648 severity: DiagnosticSeverity::HINT,
4649 message: "error 2 hint 2".to_string(),
4650 group_id: 0,
4651 is_primary: false,
4652 source_kind: DiagnosticSourceKind::Pushed,
4653 ..Diagnostic::default()
4654 }
4655 },
4656 DiagnosticEntry {
4657 range: Point::new(2, 8)..Point::new(2, 17),
4658 diagnostic: Diagnostic {
4659 severity: DiagnosticSeverity::ERROR,
4660 message: "error 2".to_string(),
4661 group_id: 0,
4662 is_primary: true,
4663 source_kind: DiagnosticSourceKind::Pushed,
4664 ..Diagnostic::default()
4665 }
4666 }
4667 ]
4668 );
4669
4670 assert_eq!(
4671 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
4672 &[
4673 DiagnosticEntry {
4674 range: Point::new(1, 13)..Point::new(1, 15),
4675 diagnostic: Diagnostic {
4676 severity: DiagnosticSeverity::HINT,
4677 message: "error 2 hint 1".to_string(),
4678 group_id: 0,
4679 is_primary: false,
4680 source_kind: DiagnosticSourceKind::Pushed,
4681 ..Diagnostic::default()
4682 }
4683 },
4684 DiagnosticEntry {
4685 range: Point::new(1, 13)..Point::new(1, 15),
4686 diagnostic: Diagnostic {
4687 severity: DiagnosticSeverity::HINT,
4688 message: "error 2 hint 2".to_string(),
4689 group_id: 0,
4690 is_primary: false,
4691 source_kind: DiagnosticSourceKind::Pushed,
4692 ..Diagnostic::default()
4693 }
4694 },
4695 DiagnosticEntry {
4696 range: Point::new(2, 8)..Point::new(2, 17),
4697 diagnostic: Diagnostic {
4698 severity: DiagnosticSeverity::ERROR,
4699 message: "error 2".to_string(),
4700 group_id: 0,
4701 is_primary: true,
4702 source_kind: DiagnosticSourceKind::Pushed,
4703 ..Diagnostic::default()
4704 }
4705 }
4706 ]
4707 );
4708
4709 assert_eq!(
4710 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4711 &[
4712 DiagnosticEntry {
4713 range: Point::new(1, 8)..Point::new(1, 9),
4714 diagnostic: Diagnostic {
4715 severity: DiagnosticSeverity::WARNING,
4716 message: "error 1".to_string(),
4717 group_id: 1,
4718 is_primary: true,
4719 source_kind: DiagnosticSourceKind::Pushed,
4720 ..Diagnostic::default()
4721 }
4722 },
4723 DiagnosticEntry {
4724 range: Point::new(1, 8)..Point::new(1, 9),
4725 diagnostic: Diagnostic {
4726 severity: DiagnosticSeverity::HINT,
4727 message: "error 1 hint 1".to_string(),
4728 group_id: 1,
4729 is_primary: false,
4730 source_kind: DiagnosticSourceKind::Pushed,
4731 ..Diagnostic::default()
4732 }
4733 },
4734 ]
4735 );
4736}
4737
4738#[gpui::test]
4739async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
4740 init_test(cx);
4741
4742 let fs = FakeFs::new(cx.executor());
4743 fs.insert_tree(
4744 path!("/dir"),
4745 json!({
4746 "one.rs": "const ONE: usize = 1;",
4747 "two": {
4748 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4749 }
4750
4751 }),
4752 )
4753 .await;
4754 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4755
4756 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4757 language_registry.add(rust_lang());
4758 let watched_paths = lsp::FileOperationRegistrationOptions {
4759 filters: vec![
4760 FileOperationFilter {
4761 scheme: Some("file".to_owned()),
4762 pattern: lsp::FileOperationPattern {
4763 glob: "**/*.rs".to_owned(),
4764 matches: Some(lsp::FileOperationPatternKind::File),
4765 options: None,
4766 },
4767 },
4768 FileOperationFilter {
4769 scheme: Some("file".to_owned()),
4770 pattern: lsp::FileOperationPattern {
4771 glob: "**/**".to_owned(),
4772 matches: Some(lsp::FileOperationPatternKind::Folder),
4773 options: None,
4774 },
4775 },
4776 ],
4777 };
4778 let mut fake_servers = language_registry.register_fake_lsp(
4779 "Rust",
4780 FakeLspAdapter {
4781 capabilities: lsp::ServerCapabilities {
4782 workspace: Some(lsp::WorkspaceServerCapabilities {
4783 workspace_folders: None,
4784 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
4785 did_rename: Some(watched_paths.clone()),
4786 will_rename: Some(watched_paths),
4787 ..Default::default()
4788 }),
4789 }),
4790 ..Default::default()
4791 },
4792 ..Default::default()
4793 },
4794 );
4795
4796 let _ = project
4797 .update(cx, |project, cx| {
4798 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
4799 })
4800 .await
4801 .unwrap();
4802
4803 let fake_server = fake_servers.next().await.unwrap();
4804 let response = project.update(cx, |project, cx| {
4805 let worktree = project.worktrees(cx).next().unwrap();
4806 let entry = worktree.read(cx).entry_for_path("one.rs").unwrap();
4807 project.rename_entry(entry.id, "three.rs".as_ref(), cx)
4808 });
4809 let expected_edit = lsp::WorkspaceEdit {
4810 changes: None,
4811 document_changes: Some(DocumentChanges::Edits({
4812 vec![TextDocumentEdit {
4813 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
4814 range: lsp::Range {
4815 start: lsp::Position {
4816 line: 0,
4817 character: 1,
4818 },
4819 end: lsp::Position {
4820 line: 0,
4821 character: 3,
4822 },
4823 },
4824 new_text: "This is not a drill".to_owned(),
4825 })],
4826 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
4827 uri: Url::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
4828 version: Some(1337),
4829 },
4830 }]
4831 })),
4832 change_annotations: None,
4833 };
4834 let resolved_workspace_edit = Arc::new(OnceLock::new());
4835 fake_server
4836 .set_request_handler::<WillRenameFiles, _, _>({
4837 let resolved_workspace_edit = resolved_workspace_edit.clone();
4838 let expected_edit = expected_edit.clone();
4839 move |params, _| {
4840 let resolved_workspace_edit = resolved_workspace_edit.clone();
4841 let expected_edit = expected_edit.clone();
4842 async move {
4843 assert_eq!(params.files.len(), 1);
4844 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
4845 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
4846 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
4847 Ok(Some(expected_edit))
4848 }
4849 }
4850 })
4851 .next()
4852 .await
4853 .unwrap();
4854 let _ = response.await.unwrap();
4855 fake_server
4856 .handle_notification::<DidRenameFiles, _>(|params, _| {
4857 assert_eq!(params.files.len(), 1);
4858 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
4859 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
4860 })
4861 .next()
4862 .await
4863 .unwrap();
4864 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
4865}
4866
4867#[gpui::test]
4868async fn test_rename(cx: &mut gpui::TestAppContext) {
4869 // hi
4870 init_test(cx);
4871
4872 let fs = FakeFs::new(cx.executor());
4873 fs.insert_tree(
4874 path!("/dir"),
4875 json!({
4876 "one.rs": "const ONE: usize = 1;",
4877 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4878 }),
4879 )
4880 .await;
4881
4882 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4883
4884 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4885 language_registry.add(rust_lang());
4886 let mut fake_servers = language_registry.register_fake_lsp(
4887 "Rust",
4888 FakeLspAdapter {
4889 capabilities: lsp::ServerCapabilities {
4890 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
4891 prepare_provider: Some(true),
4892 work_done_progress_options: Default::default(),
4893 })),
4894 ..Default::default()
4895 },
4896 ..Default::default()
4897 },
4898 );
4899
4900 let (buffer, _handle) = project
4901 .update(cx, |project, cx| {
4902 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
4903 })
4904 .await
4905 .unwrap();
4906
4907 let fake_server = fake_servers.next().await.unwrap();
4908
4909 let response = project.update(cx, |project, cx| {
4910 project.prepare_rename(buffer.clone(), 7, cx)
4911 });
4912 fake_server
4913 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
4914 assert_eq!(
4915 params.text_document.uri.as_str(),
4916 uri!("file:///dir/one.rs")
4917 );
4918 assert_eq!(params.position, lsp::Position::new(0, 7));
4919 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
4920 lsp::Position::new(0, 6),
4921 lsp::Position::new(0, 9),
4922 ))))
4923 })
4924 .next()
4925 .await
4926 .unwrap();
4927 let response = response.await.unwrap();
4928 let PrepareRenameResponse::Success(range) = response else {
4929 panic!("{:?}", response);
4930 };
4931 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
4932 assert_eq!(range, 6..9);
4933
4934 let response = project.update(cx, |project, cx| {
4935 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
4936 });
4937 fake_server
4938 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
4939 assert_eq!(
4940 params.text_document_position.text_document.uri.as_str(),
4941 uri!("file:///dir/one.rs")
4942 );
4943 assert_eq!(
4944 params.text_document_position.position,
4945 lsp::Position::new(0, 7)
4946 );
4947 assert_eq!(params.new_name, "THREE");
4948 Ok(Some(lsp::WorkspaceEdit {
4949 changes: Some(
4950 [
4951 (
4952 lsp::Url::from_file_path(path!("/dir/one.rs")).unwrap(),
4953 vec![lsp::TextEdit::new(
4954 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
4955 "THREE".to_string(),
4956 )],
4957 ),
4958 (
4959 lsp::Url::from_file_path(path!("/dir/two.rs")).unwrap(),
4960 vec![
4961 lsp::TextEdit::new(
4962 lsp::Range::new(
4963 lsp::Position::new(0, 24),
4964 lsp::Position::new(0, 27),
4965 ),
4966 "THREE".to_string(),
4967 ),
4968 lsp::TextEdit::new(
4969 lsp::Range::new(
4970 lsp::Position::new(0, 35),
4971 lsp::Position::new(0, 38),
4972 ),
4973 "THREE".to_string(),
4974 ),
4975 ],
4976 ),
4977 ]
4978 .into_iter()
4979 .collect(),
4980 ),
4981 ..Default::default()
4982 }))
4983 })
4984 .next()
4985 .await
4986 .unwrap();
4987 let mut transaction = response.await.unwrap().0;
4988 assert_eq!(transaction.len(), 2);
4989 assert_eq!(
4990 transaction
4991 .remove_entry(&buffer)
4992 .unwrap()
4993 .0
4994 .update(cx, |buffer, _| buffer.text()),
4995 "const THREE: usize = 1;"
4996 );
4997 assert_eq!(
4998 transaction
4999 .into_keys()
5000 .next()
5001 .unwrap()
5002 .update(cx, |buffer, _| buffer.text()),
5003 "const TWO: usize = one::THREE + one::THREE;"
5004 );
5005}
5006
5007#[gpui::test]
5008async fn test_search(cx: &mut gpui::TestAppContext) {
5009 init_test(cx);
5010
5011 let fs = FakeFs::new(cx.executor());
5012 fs.insert_tree(
5013 path!("/dir"),
5014 json!({
5015 "one.rs": "const ONE: usize = 1;",
5016 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
5017 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
5018 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
5019 }),
5020 )
5021 .await;
5022 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5023 assert_eq!(
5024 search(
5025 &project,
5026 SearchQuery::text(
5027 "TWO",
5028 false,
5029 true,
5030 false,
5031 Default::default(),
5032 Default::default(),
5033 false,
5034 None
5035 )
5036 .unwrap(),
5037 cx
5038 )
5039 .await
5040 .unwrap(),
5041 HashMap::from_iter([
5042 (separator!("dir/two.rs").to_string(), vec![6..9]),
5043 (separator!("dir/three.rs").to_string(), vec![37..40])
5044 ])
5045 );
5046
5047 let buffer_4 = project
5048 .update(cx, |project, cx| {
5049 project.open_local_buffer(path!("/dir/four.rs"), cx)
5050 })
5051 .await
5052 .unwrap();
5053 buffer_4.update(cx, |buffer, cx| {
5054 let text = "two::TWO";
5055 buffer.edit([(20..28, text), (31..43, text)], None, cx);
5056 });
5057
5058 assert_eq!(
5059 search(
5060 &project,
5061 SearchQuery::text(
5062 "TWO",
5063 false,
5064 true,
5065 false,
5066 Default::default(),
5067 Default::default(),
5068 false,
5069 None,
5070 )
5071 .unwrap(),
5072 cx
5073 )
5074 .await
5075 .unwrap(),
5076 HashMap::from_iter([
5077 (separator!("dir/two.rs").to_string(), vec![6..9]),
5078 (separator!("dir/three.rs").to_string(), vec![37..40]),
5079 (separator!("dir/four.rs").to_string(), vec![25..28, 36..39])
5080 ])
5081 );
5082}
5083
5084#[gpui::test]
5085async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
5086 init_test(cx);
5087
5088 let search_query = "file";
5089
5090 let fs = FakeFs::new(cx.executor());
5091 fs.insert_tree(
5092 path!("/dir"),
5093 json!({
5094 "one.rs": r#"// Rust file one"#,
5095 "one.ts": r#"// TypeScript file one"#,
5096 "two.rs": r#"// Rust file two"#,
5097 "two.ts": r#"// TypeScript file two"#,
5098 }),
5099 )
5100 .await;
5101 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5102
5103 assert!(
5104 search(
5105 &project,
5106 SearchQuery::text(
5107 search_query,
5108 false,
5109 true,
5110 false,
5111 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5112 Default::default(),
5113 false,
5114 None
5115 )
5116 .unwrap(),
5117 cx
5118 )
5119 .await
5120 .unwrap()
5121 .is_empty(),
5122 "If no inclusions match, no files should be returned"
5123 );
5124
5125 assert_eq!(
5126 search(
5127 &project,
5128 SearchQuery::text(
5129 search_query,
5130 false,
5131 true,
5132 false,
5133 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
5134 Default::default(),
5135 false,
5136 None
5137 )
5138 .unwrap(),
5139 cx
5140 )
5141 .await
5142 .unwrap(),
5143 HashMap::from_iter([
5144 (separator!("dir/one.rs").to_string(), vec![8..12]),
5145 (separator!("dir/two.rs").to_string(), vec![8..12]),
5146 ]),
5147 "Rust only search should give only Rust files"
5148 );
5149
5150 assert_eq!(
5151 search(
5152 &project,
5153 SearchQuery::text(
5154 search_query,
5155 false,
5156 true,
5157 false,
5158 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5159 Default::default(),
5160 false,
5161 None,
5162 )
5163 .unwrap(),
5164 cx
5165 )
5166 .await
5167 .unwrap(),
5168 HashMap::from_iter([
5169 (separator!("dir/one.ts").to_string(), vec![14..18]),
5170 (separator!("dir/two.ts").to_string(), vec![14..18]),
5171 ]),
5172 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
5173 );
5174
5175 assert_eq!(
5176 search(
5177 &project,
5178 SearchQuery::text(
5179 search_query,
5180 false,
5181 true,
5182 false,
5183 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()])
5184 .unwrap(),
5185 Default::default(),
5186 false,
5187 None,
5188 )
5189 .unwrap(),
5190 cx
5191 )
5192 .await
5193 .unwrap(),
5194 HashMap::from_iter([
5195 (separator!("dir/two.ts").to_string(), vec![14..18]),
5196 (separator!("dir/one.rs").to_string(), vec![8..12]),
5197 (separator!("dir/one.ts").to_string(), vec![14..18]),
5198 (separator!("dir/two.rs").to_string(), vec![8..12]),
5199 ]),
5200 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
5201 );
5202}
5203
5204#[gpui::test]
5205async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
5206 init_test(cx);
5207
5208 let search_query = "file";
5209
5210 let fs = FakeFs::new(cx.executor());
5211 fs.insert_tree(
5212 path!("/dir"),
5213 json!({
5214 "one.rs": r#"// Rust file one"#,
5215 "one.ts": r#"// TypeScript file one"#,
5216 "two.rs": r#"// Rust file two"#,
5217 "two.ts": r#"// TypeScript file two"#,
5218 }),
5219 )
5220 .await;
5221 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5222
5223 assert_eq!(
5224 search(
5225 &project,
5226 SearchQuery::text(
5227 search_query,
5228 false,
5229 true,
5230 false,
5231 Default::default(),
5232 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5233 false,
5234 None,
5235 )
5236 .unwrap(),
5237 cx
5238 )
5239 .await
5240 .unwrap(),
5241 HashMap::from_iter([
5242 (separator!("dir/one.rs").to_string(), vec![8..12]),
5243 (separator!("dir/one.ts").to_string(), vec![14..18]),
5244 (separator!("dir/two.rs").to_string(), vec![8..12]),
5245 (separator!("dir/two.ts").to_string(), vec![14..18]),
5246 ]),
5247 "If no exclusions match, all files should be returned"
5248 );
5249
5250 assert_eq!(
5251 search(
5252 &project,
5253 SearchQuery::text(
5254 search_query,
5255 false,
5256 true,
5257 false,
5258 Default::default(),
5259 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
5260 false,
5261 None,
5262 )
5263 .unwrap(),
5264 cx
5265 )
5266 .await
5267 .unwrap(),
5268 HashMap::from_iter([
5269 (separator!("dir/one.ts").to_string(), vec![14..18]),
5270 (separator!("dir/two.ts").to_string(), vec![14..18]),
5271 ]),
5272 "Rust exclusion search should give only TypeScript files"
5273 );
5274
5275 assert_eq!(
5276 search(
5277 &project,
5278 SearchQuery::text(
5279 search_query,
5280 false,
5281 true,
5282 false,
5283 Default::default(),
5284 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5285 false,
5286 None,
5287 )
5288 .unwrap(),
5289 cx
5290 )
5291 .await
5292 .unwrap(),
5293 HashMap::from_iter([
5294 (separator!("dir/one.rs").to_string(), vec![8..12]),
5295 (separator!("dir/two.rs").to_string(), vec![8..12]),
5296 ]),
5297 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
5298 );
5299
5300 assert!(
5301 search(
5302 &project,
5303 SearchQuery::text(
5304 search_query,
5305 false,
5306 true,
5307 false,
5308 Default::default(),
5309 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()])
5310 .unwrap(),
5311 false,
5312 None,
5313 )
5314 .unwrap(),
5315 cx
5316 )
5317 .await
5318 .unwrap()
5319 .is_empty(),
5320 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
5321 );
5322}
5323
5324#[gpui::test]
5325async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
5326 init_test(cx);
5327
5328 let search_query = "file";
5329
5330 let fs = FakeFs::new(cx.executor());
5331 fs.insert_tree(
5332 path!("/dir"),
5333 json!({
5334 "one.rs": r#"// Rust file one"#,
5335 "one.ts": r#"// TypeScript file one"#,
5336 "two.rs": r#"// Rust file two"#,
5337 "two.ts": r#"// TypeScript file two"#,
5338 }),
5339 )
5340 .await;
5341 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5342
5343 assert!(
5344 search(
5345 &project,
5346 SearchQuery::text(
5347 search_query,
5348 false,
5349 true,
5350 false,
5351 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5352 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5353 false,
5354 None,
5355 )
5356 .unwrap(),
5357 cx
5358 )
5359 .await
5360 .unwrap()
5361 .is_empty(),
5362 "If both no exclusions and inclusions match, exclusions should win and return nothing"
5363 );
5364
5365 assert!(
5366 search(
5367 &project,
5368 SearchQuery::text(
5369 search_query,
5370 false,
5371 true,
5372 false,
5373 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5374 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5375 false,
5376 None,
5377 )
5378 .unwrap(),
5379 cx
5380 )
5381 .await
5382 .unwrap()
5383 .is_empty(),
5384 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
5385 );
5386
5387 assert!(
5388 search(
5389 &project,
5390 SearchQuery::text(
5391 search_query,
5392 false,
5393 true,
5394 false,
5395 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5396 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5397 false,
5398 None,
5399 )
5400 .unwrap(),
5401 cx
5402 )
5403 .await
5404 .unwrap()
5405 .is_empty(),
5406 "Non-matching inclusions and exclusions should not change that."
5407 );
5408
5409 assert_eq!(
5410 search(
5411 &project,
5412 SearchQuery::text(
5413 search_query,
5414 false,
5415 true,
5416 false,
5417 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5418 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()]).unwrap(),
5419 false,
5420 None,
5421 )
5422 .unwrap(),
5423 cx
5424 )
5425 .await
5426 .unwrap(),
5427 HashMap::from_iter([
5428 (separator!("dir/one.ts").to_string(), vec![14..18]),
5429 (separator!("dir/two.ts").to_string(), vec![14..18]),
5430 ]),
5431 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
5432 );
5433}
5434
5435#[gpui::test]
5436async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
5437 init_test(cx);
5438
5439 let fs = FakeFs::new(cx.executor());
5440 fs.insert_tree(
5441 path!("/worktree-a"),
5442 json!({
5443 "haystack.rs": r#"// NEEDLE"#,
5444 "haystack.ts": r#"// NEEDLE"#,
5445 }),
5446 )
5447 .await;
5448 fs.insert_tree(
5449 path!("/worktree-b"),
5450 json!({
5451 "haystack.rs": r#"// NEEDLE"#,
5452 "haystack.ts": r#"// NEEDLE"#,
5453 }),
5454 )
5455 .await;
5456
5457 let project = Project::test(
5458 fs.clone(),
5459 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
5460 cx,
5461 )
5462 .await;
5463
5464 assert_eq!(
5465 search(
5466 &project,
5467 SearchQuery::text(
5468 "NEEDLE",
5469 false,
5470 true,
5471 false,
5472 PathMatcher::new(&["worktree-a/*.rs".to_owned()]).unwrap(),
5473 Default::default(),
5474 true,
5475 None,
5476 )
5477 .unwrap(),
5478 cx
5479 )
5480 .await
5481 .unwrap(),
5482 HashMap::from_iter([(separator!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
5483 "should only return results from included worktree"
5484 );
5485 assert_eq!(
5486 search(
5487 &project,
5488 SearchQuery::text(
5489 "NEEDLE",
5490 false,
5491 true,
5492 false,
5493 PathMatcher::new(&["worktree-b/*.rs".to_owned()]).unwrap(),
5494 Default::default(),
5495 true,
5496 None,
5497 )
5498 .unwrap(),
5499 cx
5500 )
5501 .await
5502 .unwrap(),
5503 HashMap::from_iter([(separator!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
5504 "should only return results from included worktree"
5505 );
5506
5507 assert_eq!(
5508 search(
5509 &project,
5510 SearchQuery::text(
5511 "NEEDLE",
5512 false,
5513 true,
5514 false,
5515 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5516 Default::default(),
5517 false,
5518 None,
5519 )
5520 .unwrap(),
5521 cx
5522 )
5523 .await
5524 .unwrap(),
5525 HashMap::from_iter([
5526 (separator!("worktree-a/haystack.ts").to_string(), vec![3..9]),
5527 (separator!("worktree-b/haystack.ts").to_string(), vec![3..9])
5528 ]),
5529 "should return results from both worktrees"
5530 );
5531}
5532
5533#[gpui::test]
5534async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
5535 init_test(cx);
5536
5537 let fs = FakeFs::new(cx.background_executor.clone());
5538 fs.insert_tree(
5539 path!("/dir"),
5540 json!({
5541 ".git": {},
5542 ".gitignore": "**/target\n/node_modules\n",
5543 "target": {
5544 "index.txt": "index_key:index_value"
5545 },
5546 "node_modules": {
5547 "eslint": {
5548 "index.ts": "const eslint_key = 'eslint value'",
5549 "package.json": r#"{ "some_key": "some value" }"#,
5550 },
5551 "prettier": {
5552 "index.ts": "const prettier_key = 'prettier value'",
5553 "package.json": r#"{ "other_key": "other value" }"#,
5554 },
5555 },
5556 "package.json": r#"{ "main_key": "main value" }"#,
5557 }),
5558 )
5559 .await;
5560 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5561
5562 let query = "key";
5563 assert_eq!(
5564 search(
5565 &project,
5566 SearchQuery::text(
5567 query,
5568 false,
5569 false,
5570 false,
5571 Default::default(),
5572 Default::default(),
5573 false,
5574 None,
5575 )
5576 .unwrap(),
5577 cx
5578 )
5579 .await
5580 .unwrap(),
5581 HashMap::from_iter([(separator!("dir/package.json").to_string(), vec![8..11])]),
5582 "Only one non-ignored file should have the query"
5583 );
5584
5585 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5586 assert_eq!(
5587 search(
5588 &project,
5589 SearchQuery::text(
5590 query,
5591 false,
5592 false,
5593 true,
5594 Default::default(),
5595 Default::default(),
5596 false,
5597 None,
5598 )
5599 .unwrap(),
5600 cx
5601 )
5602 .await
5603 .unwrap(),
5604 HashMap::from_iter([
5605 (separator!("dir/package.json").to_string(), vec![8..11]),
5606 (separator!("dir/target/index.txt").to_string(), vec![6..9]),
5607 (
5608 separator!("dir/node_modules/prettier/package.json").to_string(),
5609 vec![9..12]
5610 ),
5611 (
5612 separator!("dir/node_modules/prettier/index.ts").to_string(),
5613 vec![15..18]
5614 ),
5615 (
5616 separator!("dir/node_modules/eslint/index.ts").to_string(),
5617 vec![13..16]
5618 ),
5619 (
5620 separator!("dir/node_modules/eslint/package.json").to_string(),
5621 vec![8..11]
5622 ),
5623 ]),
5624 "Unrestricted search with ignored directories should find every file with the query"
5625 );
5626
5627 let files_to_include = PathMatcher::new(&["node_modules/prettier/**".to_owned()]).unwrap();
5628 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()]).unwrap();
5629 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5630 assert_eq!(
5631 search(
5632 &project,
5633 SearchQuery::text(
5634 query,
5635 false,
5636 false,
5637 true,
5638 files_to_include,
5639 files_to_exclude,
5640 false,
5641 None,
5642 )
5643 .unwrap(),
5644 cx
5645 )
5646 .await
5647 .unwrap(),
5648 HashMap::from_iter([(
5649 separator!("dir/node_modules/prettier/package.json").to_string(),
5650 vec![9..12]
5651 )]),
5652 "With search including ignored prettier directory and excluding TS files, only one file should be found"
5653 );
5654}
5655
5656#[gpui::test]
5657async fn test_search_with_unicode(cx: &mut gpui::TestAppContext) {
5658 init_test(cx);
5659
5660 let fs = FakeFs::new(cx.executor());
5661 fs.insert_tree(
5662 path!("/dir"),
5663 json!({
5664 "one.rs": "// ПРИВЕТ? привет!",
5665 "two.rs": "// ПРИВЕТ.",
5666 "three.rs": "// привет",
5667 }),
5668 )
5669 .await;
5670 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5671
5672 let unicode_case_sensitive_query = SearchQuery::text(
5673 "привет",
5674 false,
5675 true,
5676 false,
5677 Default::default(),
5678 Default::default(),
5679 false,
5680 None,
5681 );
5682 assert_matches!(unicode_case_sensitive_query, Ok(SearchQuery::Text { .. }));
5683 assert_eq!(
5684 search(&project, unicode_case_sensitive_query.unwrap(), cx)
5685 .await
5686 .unwrap(),
5687 HashMap::from_iter([
5688 (separator!("dir/one.rs").to_string(), vec![17..29]),
5689 (separator!("dir/three.rs").to_string(), vec![3..15]),
5690 ])
5691 );
5692
5693 let unicode_case_insensitive_query = SearchQuery::text(
5694 "привет",
5695 false,
5696 false,
5697 false,
5698 Default::default(),
5699 Default::default(),
5700 false,
5701 None,
5702 );
5703 assert_matches!(
5704 unicode_case_insensitive_query,
5705 Ok(SearchQuery::Regex { .. })
5706 );
5707 assert_eq!(
5708 search(&project, unicode_case_insensitive_query.unwrap(), cx)
5709 .await
5710 .unwrap(),
5711 HashMap::from_iter([
5712 (separator!("dir/one.rs").to_string(), vec![3..15, 17..29]),
5713 (separator!("dir/two.rs").to_string(), vec![3..15]),
5714 (separator!("dir/three.rs").to_string(), vec![3..15]),
5715 ])
5716 );
5717
5718 assert_eq!(
5719 search(
5720 &project,
5721 SearchQuery::text(
5722 "привет.",
5723 false,
5724 false,
5725 false,
5726 Default::default(),
5727 Default::default(),
5728 false,
5729 None,
5730 )
5731 .unwrap(),
5732 cx
5733 )
5734 .await
5735 .unwrap(),
5736 HashMap::from_iter([(separator!("dir/two.rs").to_string(), vec![3..16]),])
5737 );
5738}
5739
5740#[gpui::test]
5741async fn test_create_entry(cx: &mut gpui::TestAppContext) {
5742 init_test(cx);
5743
5744 let fs = FakeFs::new(cx.executor().clone());
5745 fs.insert_tree(
5746 "/one/two",
5747 json!({
5748 "three": {
5749 "a.txt": "",
5750 "four": {}
5751 },
5752 "c.rs": ""
5753 }),
5754 )
5755 .await;
5756
5757 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
5758 project
5759 .update(cx, |project, cx| {
5760 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5761 project.create_entry((id, "b.."), true, cx)
5762 })
5763 .await
5764 .unwrap()
5765 .to_included()
5766 .unwrap();
5767
5768 // Can't create paths outside the project
5769 let result = project
5770 .update(cx, |project, cx| {
5771 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5772 project.create_entry((id, "../../boop"), true, cx)
5773 })
5774 .await;
5775 assert!(result.is_err());
5776
5777 // Can't create paths with '..'
5778 let result = project
5779 .update(cx, |project, cx| {
5780 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5781 project.create_entry((id, "four/../beep"), true, cx)
5782 })
5783 .await;
5784 assert!(result.is_err());
5785
5786 assert_eq!(
5787 fs.paths(true),
5788 vec![
5789 PathBuf::from(path!("/")),
5790 PathBuf::from(path!("/one")),
5791 PathBuf::from(path!("/one/two")),
5792 PathBuf::from(path!("/one/two/c.rs")),
5793 PathBuf::from(path!("/one/two/three")),
5794 PathBuf::from(path!("/one/two/three/a.txt")),
5795 PathBuf::from(path!("/one/two/three/b..")),
5796 PathBuf::from(path!("/one/two/three/four")),
5797 ]
5798 );
5799
5800 // And we cannot open buffers with '..'
5801 let result = project
5802 .update(cx, |project, cx| {
5803 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5804 project.open_buffer((id, "../c.rs"), cx)
5805 })
5806 .await;
5807 assert!(result.is_err())
5808}
5809
5810#[gpui::test]
5811async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
5812 init_test(cx);
5813
5814 let fs = FakeFs::new(cx.executor());
5815 fs.insert_tree(
5816 path!("/dir"),
5817 json!({
5818 "a.tsx": "a",
5819 }),
5820 )
5821 .await;
5822
5823 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5824
5825 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5826 language_registry.add(tsx_lang());
5827 let language_server_names = [
5828 "TypeScriptServer",
5829 "TailwindServer",
5830 "ESLintServer",
5831 "NoHoverCapabilitiesServer",
5832 ];
5833 let mut language_servers = [
5834 language_registry.register_fake_lsp(
5835 "tsx",
5836 FakeLspAdapter {
5837 name: language_server_names[0],
5838 capabilities: lsp::ServerCapabilities {
5839 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5840 ..lsp::ServerCapabilities::default()
5841 },
5842 ..FakeLspAdapter::default()
5843 },
5844 ),
5845 language_registry.register_fake_lsp(
5846 "tsx",
5847 FakeLspAdapter {
5848 name: language_server_names[1],
5849 capabilities: lsp::ServerCapabilities {
5850 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5851 ..lsp::ServerCapabilities::default()
5852 },
5853 ..FakeLspAdapter::default()
5854 },
5855 ),
5856 language_registry.register_fake_lsp(
5857 "tsx",
5858 FakeLspAdapter {
5859 name: language_server_names[2],
5860 capabilities: lsp::ServerCapabilities {
5861 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5862 ..lsp::ServerCapabilities::default()
5863 },
5864 ..FakeLspAdapter::default()
5865 },
5866 ),
5867 language_registry.register_fake_lsp(
5868 "tsx",
5869 FakeLspAdapter {
5870 name: language_server_names[3],
5871 capabilities: lsp::ServerCapabilities {
5872 hover_provider: None,
5873 ..lsp::ServerCapabilities::default()
5874 },
5875 ..FakeLspAdapter::default()
5876 },
5877 ),
5878 ];
5879
5880 let (buffer, _handle) = project
5881 .update(cx, |p, cx| {
5882 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
5883 })
5884 .await
5885 .unwrap();
5886 cx.executor().run_until_parked();
5887
5888 let mut servers_with_hover_requests = HashMap::default();
5889 for i in 0..language_server_names.len() {
5890 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
5891 panic!(
5892 "Failed to get language server #{i} with name {}",
5893 &language_server_names[i]
5894 )
5895 });
5896 let new_server_name = new_server.server.name();
5897 assert!(
5898 !servers_with_hover_requests.contains_key(&new_server_name),
5899 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5900 );
5901 match new_server_name.as_ref() {
5902 "TailwindServer" | "TypeScriptServer" => {
5903 servers_with_hover_requests.insert(
5904 new_server_name.clone(),
5905 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
5906 move |_, _| {
5907 let name = new_server_name.clone();
5908 async move {
5909 Ok(Some(lsp::Hover {
5910 contents: lsp::HoverContents::Scalar(
5911 lsp::MarkedString::String(format!("{name} hover")),
5912 ),
5913 range: None,
5914 }))
5915 }
5916 },
5917 ),
5918 );
5919 }
5920 "ESLintServer" => {
5921 servers_with_hover_requests.insert(
5922 new_server_name,
5923 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
5924 |_, _| async move { Ok(None) },
5925 ),
5926 );
5927 }
5928 "NoHoverCapabilitiesServer" => {
5929 let _never_handled = new_server
5930 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
5931 panic!(
5932 "Should not call for hovers server with no corresponding capabilities"
5933 )
5934 });
5935 }
5936 unexpected => panic!("Unexpected server name: {unexpected}"),
5937 }
5938 }
5939
5940 let hover_task = project.update(cx, |project, cx| {
5941 project.hover(&buffer, Point::new(0, 0), cx)
5942 });
5943 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
5944 |mut hover_request| async move {
5945 hover_request
5946 .next()
5947 .await
5948 .expect("All hover requests should have been triggered")
5949 },
5950 ))
5951 .await;
5952 assert_eq!(
5953 vec!["TailwindServer hover", "TypeScriptServer hover"],
5954 hover_task
5955 .await
5956 .into_iter()
5957 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
5958 .sorted()
5959 .collect::<Vec<_>>(),
5960 "Should receive hover responses from all related servers with hover capabilities"
5961 );
5962}
5963
5964#[gpui::test]
5965async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
5966 init_test(cx);
5967
5968 let fs = FakeFs::new(cx.executor());
5969 fs.insert_tree(
5970 path!("/dir"),
5971 json!({
5972 "a.ts": "a",
5973 }),
5974 )
5975 .await;
5976
5977 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5978
5979 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5980 language_registry.add(typescript_lang());
5981 let mut fake_language_servers = language_registry.register_fake_lsp(
5982 "TypeScript",
5983 FakeLspAdapter {
5984 capabilities: lsp::ServerCapabilities {
5985 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5986 ..lsp::ServerCapabilities::default()
5987 },
5988 ..FakeLspAdapter::default()
5989 },
5990 );
5991
5992 let (buffer, _handle) = project
5993 .update(cx, |p, cx| {
5994 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
5995 })
5996 .await
5997 .unwrap();
5998 cx.executor().run_until_parked();
5999
6000 let fake_server = fake_language_servers
6001 .next()
6002 .await
6003 .expect("failed to get the language server");
6004
6005 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6006 move |_, _| async move {
6007 Ok(Some(lsp::Hover {
6008 contents: lsp::HoverContents::Array(vec![
6009 lsp::MarkedString::String("".to_string()),
6010 lsp::MarkedString::String(" ".to_string()),
6011 lsp::MarkedString::String("\n\n\n".to_string()),
6012 ]),
6013 range: None,
6014 }))
6015 },
6016 );
6017
6018 let hover_task = project.update(cx, |project, cx| {
6019 project.hover(&buffer, Point::new(0, 0), cx)
6020 });
6021 let () = request_handled
6022 .next()
6023 .await
6024 .expect("All hover requests should have been triggered");
6025 assert_eq!(
6026 Vec::<String>::new(),
6027 hover_task
6028 .await
6029 .into_iter()
6030 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
6031 .sorted()
6032 .collect::<Vec<_>>(),
6033 "Empty hover parts should be ignored"
6034 );
6035}
6036
6037#[gpui::test]
6038async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
6039 init_test(cx);
6040
6041 let fs = FakeFs::new(cx.executor());
6042 fs.insert_tree(
6043 path!("/dir"),
6044 json!({
6045 "a.ts": "a",
6046 }),
6047 )
6048 .await;
6049
6050 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6051
6052 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6053 language_registry.add(typescript_lang());
6054 let mut fake_language_servers = language_registry.register_fake_lsp(
6055 "TypeScript",
6056 FakeLspAdapter {
6057 capabilities: lsp::ServerCapabilities {
6058 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6059 ..lsp::ServerCapabilities::default()
6060 },
6061 ..FakeLspAdapter::default()
6062 },
6063 );
6064
6065 let (buffer, _handle) = project
6066 .update(cx, |p, cx| {
6067 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6068 })
6069 .await
6070 .unwrap();
6071 cx.executor().run_until_parked();
6072
6073 let fake_server = fake_language_servers
6074 .next()
6075 .await
6076 .expect("failed to get the language server");
6077
6078 let mut request_handled = fake_server
6079 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
6080 Ok(Some(vec![
6081 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6082 title: "organize imports".to_string(),
6083 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
6084 ..lsp::CodeAction::default()
6085 }),
6086 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6087 title: "fix code".to_string(),
6088 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
6089 ..lsp::CodeAction::default()
6090 }),
6091 ]))
6092 });
6093
6094 let code_actions_task = project.update(cx, |project, cx| {
6095 project.code_actions(
6096 &buffer,
6097 0..buffer.read(cx).len(),
6098 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
6099 cx,
6100 )
6101 });
6102
6103 let () = request_handled
6104 .next()
6105 .await
6106 .expect("The code action request should have been triggered");
6107
6108 let code_actions = code_actions_task.await.unwrap();
6109 assert_eq!(code_actions.len(), 1);
6110 assert_eq!(
6111 code_actions[0].lsp_action.action_kind(),
6112 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
6113 );
6114}
6115
6116#[gpui::test]
6117async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
6118 init_test(cx);
6119
6120 let fs = FakeFs::new(cx.executor());
6121 fs.insert_tree(
6122 path!("/dir"),
6123 json!({
6124 "a.tsx": "a",
6125 }),
6126 )
6127 .await;
6128
6129 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6130
6131 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6132 language_registry.add(tsx_lang());
6133 let language_server_names = [
6134 "TypeScriptServer",
6135 "TailwindServer",
6136 "ESLintServer",
6137 "NoActionsCapabilitiesServer",
6138 ];
6139
6140 let mut language_server_rxs = [
6141 language_registry.register_fake_lsp(
6142 "tsx",
6143 FakeLspAdapter {
6144 name: language_server_names[0],
6145 capabilities: lsp::ServerCapabilities {
6146 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6147 ..lsp::ServerCapabilities::default()
6148 },
6149 ..FakeLspAdapter::default()
6150 },
6151 ),
6152 language_registry.register_fake_lsp(
6153 "tsx",
6154 FakeLspAdapter {
6155 name: language_server_names[1],
6156 capabilities: lsp::ServerCapabilities {
6157 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6158 ..lsp::ServerCapabilities::default()
6159 },
6160 ..FakeLspAdapter::default()
6161 },
6162 ),
6163 language_registry.register_fake_lsp(
6164 "tsx",
6165 FakeLspAdapter {
6166 name: language_server_names[2],
6167 capabilities: lsp::ServerCapabilities {
6168 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6169 ..lsp::ServerCapabilities::default()
6170 },
6171 ..FakeLspAdapter::default()
6172 },
6173 ),
6174 language_registry.register_fake_lsp(
6175 "tsx",
6176 FakeLspAdapter {
6177 name: language_server_names[3],
6178 capabilities: lsp::ServerCapabilities {
6179 code_action_provider: None,
6180 ..lsp::ServerCapabilities::default()
6181 },
6182 ..FakeLspAdapter::default()
6183 },
6184 ),
6185 ];
6186
6187 let (buffer, _handle) = project
6188 .update(cx, |p, cx| {
6189 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
6190 })
6191 .await
6192 .unwrap();
6193 cx.executor().run_until_parked();
6194
6195 let mut servers_with_actions_requests = HashMap::default();
6196 for i in 0..language_server_names.len() {
6197 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
6198 panic!(
6199 "Failed to get language server #{i} with name {}",
6200 &language_server_names[i]
6201 )
6202 });
6203 let new_server_name = new_server.server.name();
6204
6205 assert!(
6206 !servers_with_actions_requests.contains_key(&new_server_name),
6207 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6208 );
6209 match new_server_name.0.as_ref() {
6210 "TailwindServer" | "TypeScriptServer" => {
6211 servers_with_actions_requests.insert(
6212 new_server_name.clone(),
6213 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6214 move |_, _| {
6215 let name = new_server_name.clone();
6216 async move {
6217 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
6218 lsp::CodeAction {
6219 title: format!("{name} code action"),
6220 ..lsp::CodeAction::default()
6221 },
6222 )]))
6223 }
6224 },
6225 ),
6226 );
6227 }
6228 "ESLintServer" => {
6229 servers_with_actions_requests.insert(
6230 new_server_name,
6231 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6232 |_, _| async move { Ok(None) },
6233 ),
6234 );
6235 }
6236 "NoActionsCapabilitiesServer" => {
6237 let _never_handled = new_server
6238 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6239 panic!(
6240 "Should not call for code actions server with no corresponding capabilities"
6241 )
6242 });
6243 }
6244 unexpected => panic!("Unexpected server name: {unexpected}"),
6245 }
6246 }
6247
6248 let code_actions_task = project.update(cx, |project, cx| {
6249 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
6250 });
6251
6252 // cx.run_until_parked();
6253 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
6254 |mut code_actions_request| async move {
6255 code_actions_request
6256 .next()
6257 .await
6258 .expect("All code actions requests should have been triggered")
6259 },
6260 ))
6261 .await;
6262 assert_eq!(
6263 vec!["TailwindServer code action", "TypeScriptServer code action"],
6264 code_actions_task
6265 .await
6266 .unwrap()
6267 .into_iter()
6268 .map(|code_action| code_action.lsp_action.title().to_owned())
6269 .sorted()
6270 .collect::<Vec<_>>(),
6271 "Should receive code actions responses from all related servers with hover capabilities"
6272 );
6273}
6274
6275#[gpui::test]
6276async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
6277 init_test(cx);
6278
6279 let fs = FakeFs::new(cx.executor());
6280 fs.insert_tree(
6281 "/dir",
6282 json!({
6283 "a.rs": "let a = 1;",
6284 "b.rs": "let b = 2;",
6285 "c.rs": "let c = 2;",
6286 }),
6287 )
6288 .await;
6289
6290 let project = Project::test(
6291 fs,
6292 [
6293 "/dir/a.rs".as_ref(),
6294 "/dir/b.rs".as_ref(),
6295 "/dir/c.rs".as_ref(),
6296 ],
6297 cx,
6298 )
6299 .await;
6300
6301 // check the initial state and get the worktrees
6302 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
6303 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6304 assert_eq!(worktrees.len(), 3);
6305
6306 let worktree_a = worktrees[0].read(cx);
6307 let worktree_b = worktrees[1].read(cx);
6308 let worktree_c = worktrees[2].read(cx);
6309
6310 // check they start in the right order
6311 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
6312 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
6313 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
6314
6315 (
6316 worktrees[0].clone(),
6317 worktrees[1].clone(),
6318 worktrees[2].clone(),
6319 )
6320 });
6321
6322 // move first worktree to after the second
6323 // [a, b, c] -> [b, a, c]
6324 project
6325 .update(cx, |project, cx| {
6326 let first = worktree_a.read(cx);
6327 let second = worktree_b.read(cx);
6328 project.move_worktree(first.id(), second.id(), cx)
6329 })
6330 .expect("moving first after second");
6331
6332 // check the state after moving
6333 project.update(cx, |project, cx| {
6334 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6335 assert_eq!(worktrees.len(), 3);
6336
6337 let first = worktrees[0].read(cx);
6338 let second = worktrees[1].read(cx);
6339 let third = worktrees[2].read(cx);
6340
6341 // check they are now in the right order
6342 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
6343 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
6344 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6345 });
6346
6347 // move the second worktree to before the first
6348 // [b, a, c] -> [a, b, c]
6349 project
6350 .update(cx, |project, cx| {
6351 let second = worktree_a.read(cx);
6352 let first = worktree_b.read(cx);
6353 project.move_worktree(first.id(), second.id(), cx)
6354 })
6355 .expect("moving second before first");
6356
6357 // check the state after moving
6358 project.update(cx, |project, cx| {
6359 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6360 assert_eq!(worktrees.len(), 3);
6361
6362 let first = worktrees[0].read(cx);
6363 let second = worktrees[1].read(cx);
6364 let third = worktrees[2].read(cx);
6365
6366 // check they are now in the right order
6367 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6368 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6369 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6370 });
6371
6372 // move the second worktree to after the third
6373 // [a, b, c] -> [a, c, b]
6374 project
6375 .update(cx, |project, cx| {
6376 let second = worktree_b.read(cx);
6377 let third = worktree_c.read(cx);
6378 project.move_worktree(second.id(), third.id(), cx)
6379 })
6380 .expect("moving second after third");
6381
6382 // check the state after moving
6383 project.update(cx, |project, cx| {
6384 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6385 assert_eq!(worktrees.len(), 3);
6386
6387 let first = worktrees[0].read(cx);
6388 let second = worktrees[1].read(cx);
6389 let third = worktrees[2].read(cx);
6390
6391 // check they are now in the right order
6392 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6393 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
6394 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
6395 });
6396
6397 // move the third worktree to before the second
6398 // [a, c, b] -> [a, b, c]
6399 project
6400 .update(cx, |project, cx| {
6401 let third = worktree_c.read(cx);
6402 let second = worktree_b.read(cx);
6403 project.move_worktree(third.id(), second.id(), cx)
6404 })
6405 .expect("moving third before second");
6406
6407 // check the state after moving
6408 project.update(cx, |project, cx| {
6409 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6410 assert_eq!(worktrees.len(), 3);
6411
6412 let first = worktrees[0].read(cx);
6413 let second = worktrees[1].read(cx);
6414 let third = worktrees[2].read(cx);
6415
6416 // check they are now in the right order
6417 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6418 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6419 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6420 });
6421
6422 // move the first worktree to after the third
6423 // [a, b, c] -> [b, c, a]
6424 project
6425 .update(cx, |project, cx| {
6426 let first = worktree_a.read(cx);
6427 let third = worktree_c.read(cx);
6428 project.move_worktree(first.id(), third.id(), cx)
6429 })
6430 .expect("moving first after third");
6431
6432 // check the state after moving
6433 project.update(cx, |project, cx| {
6434 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6435 assert_eq!(worktrees.len(), 3);
6436
6437 let first = worktrees[0].read(cx);
6438 let second = worktrees[1].read(cx);
6439 let third = worktrees[2].read(cx);
6440
6441 // check they are now in the right order
6442 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
6443 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
6444 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
6445 });
6446
6447 // move the third worktree to before the first
6448 // [b, c, a] -> [a, b, c]
6449 project
6450 .update(cx, |project, cx| {
6451 let third = worktree_a.read(cx);
6452 let first = worktree_b.read(cx);
6453 project.move_worktree(third.id(), first.id(), cx)
6454 })
6455 .expect("moving third before first");
6456
6457 // check the state after moving
6458 project.update(cx, |project, cx| {
6459 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6460 assert_eq!(worktrees.len(), 3);
6461
6462 let first = worktrees[0].read(cx);
6463 let second = worktrees[1].read(cx);
6464 let third = worktrees[2].read(cx);
6465
6466 // check they are now in the right order
6467 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6468 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6469 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6470 });
6471}
6472
6473#[gpui::test]
6474async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
6475 init_test(cx);
6476
6477 let staged_contents = r#"
6478 fn main() {
6479 println!("hello world");
6480 }
6481 "#
6482 .unindent();
6483 let file_contents = r#"
6484 // print goodbye
6485 fn main() {
6486 println!("goodbye world");
6487 }
6488 "#
6489 .unindent();
6490
6491 let fs = FakeFs::new(cx.background_executor.clone());
6492 fs.insert_tree(
6493 "/dir",
6494 json!({
6495 ".git": {},
6496 "src": {
6497 "main.rs": file_contents,
6498 }
6499 }),
6500 )
6501 .await;
6502
6503 fs.set_index_for_repo(
6504 Path::new("/dir/.git"),
6505 &[("src/main.rs".into(), staged_contents)],
6506 );
6507
6508 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6509
6510 let buffer = project
6511 .update(cx, |project, cx| {
6512 project.open_local_buffer("/dir/src/main.rs", cx)
6513 })
6514 .await
6515 .unwrap();
6516 let unstaged_diff = project
6517 .update(cx, |project, cx| {
6518 project.open_unstaged_diff(buffer.clone(), cx)
6519 })
6520 .await
6521 .unwrap();
6522
6523 cx.run_until_parked();
6524 unstaged_diff.update(cx, |unstaged_diff, cx| {
6525 let snapshot = buffer.read(cx).snapshot();
6526 assert_hunks(
6527 unstaged_diff.hunks(&snapshot, cx),
6528 &snapshot,
6529 &unstaged_diff.base_text_string().unwrap(),
6530 &[
6531 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
6532 (
6533 2..3,
6534 " println!(\"hello world\");\n",
6535 " println!(\"goodbye world\");\n",
6536 DiffHunkStatus::modified_none(),
6537 ),
6538 ],
6539 );
6540 });
6541
6542 let staged_contents = r#"
6543 // print goodbye
6544 fn main() {
6545 }
6546 "#
6547 .unindent();
6548
6549 fs.set_index_for_repo(
6550 Path::new("/dir/.git"),
6551 &[("src/main.rs".into(), staged_contents)],
6552 );
6553
6554 cx.run_until_parked();
6555 unstaged_diff.update(cx, |unstaged_diff, cx| {
6556 let snapshot = buffer.read(cx).snapshot();
6557 assert_hunks(
6558 unstaged_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6559 &snapshot,
6560 &unstaged_diff.base_text().text(),
6561 &[(
6562 2..3,
6563 "",
6564 " println!(\"goodbye world\");\n",
6565 DiffHunkStatus::added_none(),
6566 )],
6567 );
6568 });
6569}
6570
6571#[gpui::test]
6572async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
6573 init_test(cx);
6574
6575 let committed_contents = r#"
6576 fn main() {
6577 println!("hello world");
6578 }
6579 "#
6580 .unindent();
6581 let staged_contents = r#"
6582 fn main() {
6583 println!("goodbye world");
6584 }
6585 "#
6586 .unindent();
6587 let file_contents = r#"
6588 // print goodbye
6589 fn main() {
6590 println!("goodbye world");
6591 }
6592 "#
6593 .unindent();
6594
6595 let fs = FakeFs::new(cx.background_executor.clone());
6596 fs.insert_tree(
6597 "/dir",
6598 json!({
6599 ".git": {},
6600 "src": {
6601 "modification.rs": file_contents,
6602 }
6603 }),
6604 )
6605 .await;
6606
6607 fs.set_head_for_repo(
6608 Path::new("/dir/.git"),
6609 &[
6610 ("src/modification.rs".into(), committed_contents),
6611 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6612 ],
6613 "deadbeef",
6614 );
6615 fs.set_index_for_repo(
6616 Path::new("/dir/.git"),
6617 &[
6618 ("src/modification.rs".into(), staged_contents),
6619 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6620 ],
6621 );
6622
6623 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6624 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6625 let language = rust_lang();
6626 language_registry.add(language.clone());
6627
6628 let buffer_1 = project
6629 .update(cx, |project, cx| {
6630 project.open_local_buffer("/dir/src/modification.rs", cx)
6631 })
6632 .await
6633 .unwrap();
6634 let diff_1 = project
6635 .update(cx, |project, cx| {
6636 project.open_uncommitted_diff(buffer_1.clone(), cx)
6637 })
6638 .await
6639 .unwrap();
6640 diff_1.read_with(cx, |diff, _| {
6641 assert_eq!(diff.base_text().language().cloned(), Some(language))
6642 });
6643 cx.run_until_parked();
6644 diff_1.update(cx, |diff, cx| {
6645 let snapshot = buffer_1.read(cx).snapshot();
6646 assert_hunks(
6647 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6648 &snapshot,
6649 &diff.base_text_string().unwrap(),
6650 &[
6651 (
6652 0..1,
6653 "",
6654 "// print goodbye\n",
6655 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
6656 ),
6657 (
6658 2..3,
6659 " println!(\"hello world\");\n",
6660 " println!(\"goodbye world\");\n",
6661 DiffHunkStatus::modified_none(),
6662 ),
6663 ],
6664 );
6665 });
6666
6667 // Reset HEAD to a version that differs from both the buffer and the index.
6668 let committed_contents = r#"
6669 // print goodbye
6670 fn main() {
6671 }
6672 "#
6673 .unindent();
6674 fs.set_head_for_repo(
6675 Path::new("/dir/.git"),
6676 &[
6677 ("src/modification.rs".into(), committed_contents.clone()),
6678 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6679 ],
6680 "deadbeef",
6681 );
6682
6683 // Buffer now has an unstaged hunk.
6684 cx.run_until_parked();
6685 diff_1.update(cx, |diff, cx| {
6686 let snapshot = buffer_1.read(cx).snapshot();
6687 assert_hunks(
6688 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6689 &snapshot,
6690 &diff.base_text().text(),
6691 &[(
6692 2..3,
6693 "",
6694 " println!(\"goodbye world\");\n",
6695 DiffHunkStatus::added_none(),
6696 )],
6697 );
6698 });
6699
6700 // Open a buffer for a file that's been deleted.
6701 let buffer_2 = project
6702 .update(cx, |project, cx| {
6703 project.open_local_buffer("/dir/src/deletion.rs", cx)
6704 })
6705 .await
6706 .unwrap();
6707 let diff_2 = project
6708 .update(cx, |project, cx| {
6709 project.open_uncommitted_diff(buffer_2.clone(), cx)
6710 })
6711 .await
6712 .unwrap();
6713 cx.run_until_parked();
6714 diff_2.update(cx, |diff, cx| {
6715 let snapshot = buffer_2.read(cx).snapshot();
6716 assert_hunks(
6717 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6718 &snapshot,
6719 &diff.base_text_string().unwrap(),
6720 &[(
6721 0..0,
6722 "// the-deleted-contents\n",
6723 "",
6724 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
6725 )],
6726 );
6727 });
6728
6729 // Stage the deletion of this file
6730 fs.set_index_for_repo(
6731 Path::new("/dir/.git"),
6732 &[("src/modification.rs".into(), committed_contents.clone())],
6733 );
6734 cx.run_until_parked();
6735 diff_2.update(cx, |diff, cx| {
6736 let snapshot = buffer_2.read(cx).snapshot();
6737 assert_hunks(
6738 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6739 &snapshot,
6740 &diff.base_text_string().unwrap(),
6741 &[(
6742 0..0,
6743 "// the-deleted-contents\n",
6744 "",
6745 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
6746 )],
6747 );
6748 });
6749}
6750
6751#[gpui::test]
6752async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
6753 use DiffHunkSecondaryStatus::*;
6754 init_test(cx);
6755
6756 let committed_contents = r#"
6757 zero
6758 one
6759 two
6760 three
6761 four
6762 five
6763 "#
6764 .unindent();
6765 let file_contents = r#"
6766 one
6767 TWO
6768 three
6769 FOUR
6770 five
6771 "#
6772 .unindent();
6773
6774 let fs = FakeFs::new(cx.background_executor.clone());
6775 fs.insert_tree(
6776 "/dir",
6777 json!({
6778 ".git": {},
6779 "file.txt": file_contents.clone()
6780 }),
6781 )
6782 .await;
6783
6784 fs.set_head_and_index_for_repo(
6785 "/dir/.git".as_ref(),
6786 &[("file.txt".into(), committed_contents.clone())],
6787 );
6788
6789 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6790
6791 let buffer = project
6792 .update(cx, |project, cx| {
6793 project.open_local_buffer("/dir/file.txt", cx)
6794 })
6795 .await
6796 .unwrap();
6797 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6798 let uncommitted_diff = project
6799 .update(cx, |project, cx| {
6800 project.open_uncommitted_diff(buffer.clone(), cx)
6801 })
6802 .await
6803 .unwrap();
6804 let mut diff_events = cx.events(&uncommitted_diff);
6805
6806 // The hunks are initially unstaged.
6807 uncommitted_diff.read_with(cx, |diff, cx| {
6808 assert_hunks(
6809 diff.hunks(&snapshot, cx),
6810 &snapshot,
6811 &diff.base_text_string().unwrap(),
6812 &[
6813 (
6814 0..0,
6815 "zero\n",
6816 "",
6817 DiffHunkStatus::deleted(HasSecondaryHunk),
6818 ),
6819 (
6820 1..2,
6821 "two\n",
6822 "TWO\n",
6823 DiffHunkStatus::modified(HasSecondaryHunk),
6824 ),
6825 (
6826 3..4,
6827 "four\n",
6828 "FOUR\n",
6829 DiffHunkStatus::modified(HasSecondaryHunk),
6830 ),
6831 ],
6832 );
6833 });
6834
6835 // Stage a hunk. It appears as optimistically staged.
6836 uncommitted_diff.update(cx, |diff, cx| {
6837 let range =
6838 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
6839 let hunks = diff
6840 .hunks_intersecting_range(range, &snapshot, cx)
6841 .collect::<Vec<_>>();
6842 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
6843
6844 assert_hunks(
6845 diff.hunks(&snapshot, cx),
6846 &snapshot,
6847 &diff.base_text_string().unwrap(),
6848 &[
6849 (
6850 0..0,
6851 "zero\n",
6852 "",
6853 DiffHunkStatus::deleted(HasSecondaryHunk),
6854 ),
6855 (
6856 1..2,
6857 "two\n",
6858 "TWO\n",
6859 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6860 ),
6861 (
6862 3..4,
6863 "four\n",
6864 "FOUR\n",
6865 DiffHunkStatus::modified(HasSecondaryHunk),
6866 ),
6867 ],
6868 );
6869 });
6870
6871 // The diff emits a change event for the range of the staged hunk.
6872 assert!(matches!(
6873 diff_events.next().await.unwrap(),
6874 BufferDiffEvent::HunksStagedOrUnstaged(_)
6875 ));
6876 let event = diff_events.next().await.unwrap();
6877 if let BufferDiffEvent::DiffChanged {
6878 changed_range: Some(changed_range),
6879 } = event
6880 {
6881 let changed_range = changed_range.to_point(&snapshot);
6882 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
6883 } else {
6884 panic!("Unexpected event {event:?}");
6885 }
6886
6887 // When the write to the index completes, it appears as staged.
6888 cx.run_until_parked();
6889 uncommitted_diff.update(cx, |diff, cx| {
6890 assert_hunks(
6891 diff.hunks(&snapshot, cx),
6892 &snapshot,
6893 &diff.base_text_string().unwrap(),
6894 &[
6895 (
6896 0..0,
6897 "zero\n",
6898 "",
6899 DiffHunkStatus::deleted(HasSecondaryHunk),
6900 ),
6901 (
6902 1..2,
6903 "two\n",
6904 "TWO\n",
6905 DiffHunkStatus::modified(NoSecondaryHunk),
6906 ),
6907 (
6908 3..4,
6909 "four\n",
6910 "FOUR\n",
6911 DiffHunkStatus::modified(HasSecondaryHunk),
6912 ),
6913 ],
6914 );
6915 });
6916
6917 // The diff emits a change event for the changed index text.
6918 let event = diff_events.next().await.unwrap();
6919 if let BufferDiffEvent::DiffChanged {
6920 changed_range: Some(changed_range),
6921 } = event
6922 {
6923 let changed_range = changed_range.to_point(&snapshot);
6924 assert_eq!(changed_range, Point::new(0, 0)..Point::new(4, 0));
6925 } else {
6926 panic!("Unexpected event {event:?}");
6927 }
6928
6929 // Simulate a problem writing to the git index.
6930 fs.set_error_message_for_index_write(
6931 "/dir/.git".as_ref(),
6932 Some("failed to write git index".into()),
6933 );
6934
6935 // Stage another hunk.
6936 uncommitted_diff.update(cx, |diff, cx| {
6937 let range =
6938 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
6939 let hunks = diff
6940 .hunks_intersecting_range(range, &snapshot, cx)
6941 .collect::<Vec<_>>();
6942 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
6943
6944 assert_hunks(
6945 diff.hunks(&snapshot, cx),
6946 &snapshot,
6947 &diff.base_text_string().unwrap(),
6948 &[
6949 (
6950 0..0,
6951 "zero\n",
6952 "",
6953 DiffHunkStatus::deleted(HasSecondaryHunk),
6954 ),
6955 (
6956 1..2,
6957 "two\n",
6958 "TWO\n",
6959 DiffHunkStatus::modified(NoSecondaryHunk),
6960 ),
6961 (
6962 3..4,
6963 "four\n",
6964 "FOUR\n",
6965 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6966 ),
6967 ],
6968 );
6969 });
6970 assert!(matches!(
6971 diff_events.next().await.unwrap(),
6972 BufferDiffEvent::HunksStagedOrUnstaged(_)
6973 ));
6974 let event = diff_events.next().await.unwrap();
6975 if let BufferDiffEvent::DiffChanged {
6976 changed_range: Some(changed_range),
6977 } = event
6978 {
6979 let changed_range = changed_range.to_point(&snapshot);
6980 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
6981 } else {
6982 panic!("Unexpected event {event:?}");
6983 }
6984
6985 // When the write fails, the hunk returns to being unstaged.
6986 cx.run_until_parked();
6987 uncommitted_diff.update(cx, |diff, cx| {
6988 assert_hunks(
6989 diff.hunks(&snapshot, cx),
6990 &snapshot,
6991 &diff.base_text_string().unwrap(),
6992 &[
6993 (
6994 0..0,
6995 "zero\n",
6996 "",
6997 DiffHunkStatus::deleted(HasSecondaryHunk),
6998 ),
6999 (
7000 1..2,
7001 "two\n",
7002 "TWO\n",
7003 DiffHunkStatus::modified(NoSecondaryHunk),
7004 ),
7005 (
7006 3..4,
7007 "four\n",
7008 "FOUR\n",
7009 DiffHunkStatus::modified(HasSecondaryHunk),
7010 ),
7011 ],
7012 );
7013 });
7014
7015 let event = diff_events.next().await.unwrap();
7016 if let BufferDiffEvent::DiffChanged {
7017 changed_range: Some(changed_range),
7018 } = event
7019 {
7020 let changed_range = changed_range.to_point(&snapshot);
7021 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
7022 } else {
7023 panic!("Unexpected event {event:?}");
7024 }
7025
7026 // Allow writing to the git index to succeed again.
7027 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
7028
7029 // Stage two hunks with separate operations.
7030 uncommitted_diff.update(cx, |diff, cx| {
7031 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
7032 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
7033 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
7034 });
7035
7036 // Both staged hunks appear as pending.
7037 uncommitted_diff.update(cx, |diff, cx| {
7038 assert_hunks(
7039 diff.hunks(&snapshot, cx),
7040 &snapshot,
7041 &diff.base_text_string().unwrap(),
7042 &[
7043 (
7044 0..0,
7045 "zero\n",
7046 "",
7047 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7048 ),
7049 (
7050 1..2,
7051 "two\n",
7052 "TWO\n",
7053 DiffHunkStatus::modified(NoSecondaryHunk),
7054 ),
7055 (
7056 3..4,
7057 "four\n",
7058 "FOUR\n",
7059 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7060 ),
7061 ],
7062 );
7063 });
7064
7065 // Both staging operations take effect.
7066 cx.run_until_parked();
7067 uncommitted_diff.update(cx, |diff, cx| {
7068 assert_hunks(
7069 diff.hunks(&snapshot, cx),
7070 &snapshot,
7071 &diff.base_text_string().unwrap(),
7072 &[
7073 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7074 (
7075 1..2,
7076 "two\n",
7077 "TWO\n",
7078 DiffHunkStatus::modified(NoSecondaryHunk),
7079 ),
7080 (
7081 3..4,
7082 "four\n",
7083 "FOUR\n",
7084 DiffHunkStatus::modified(NoSecondaryHunk),
7085 ),
7086 ],
7087 );
7088 });
7089}
7090
7091#[gpui::test(seeds(340, 472))]
7092async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
7093 use DiffHunkSecondaryStatus::*;
7094 init_test(cx);
7095
7096 let committed_contents = r#"
7097 zero
7098 one
7099 two
7100 three
7101 four
7102 five
7103 "#
7104 .unindent();
7105 let file_contents = r#"
7106 one
7107 TWO
7108 three
7109 FOUR
7110 five
7111 "#
7112 .unindent();
7113
7114 let fs = FakeFs::new(cx.background_executor.clone());
7115 fs.insert_tree(
7116 "/dir",
7117 json!({
7118 ".git": {},
7119 "file.txt": file_contents.clone()
7120 }),
7121 )
7122 .await;
7123
7124 fs.set_head_for_repo(
7125 "/dir/.git".as_ref(),
7126 &[("file.txt".into(), committed_contents.clone())],
7127 "deadbeef",
7128 );
7129 fs.set_index_for_repo(
7130 "/dir/.git".as_ref(),
7131 &[("file.txt".into(), committed_contents.clone())],
7132 );
7133
7134 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7135
7136 let buffer = project
7137 .update(cx, |project, cx| {
7138 project.open_local_buffer("/dir/file.txt", cx)
7139 })
7140 .await
7141 .unwrap();
7142 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7143 let uncommitted_diff = project
7144 .update(cx, |project, cx| {
7145 project.open_uncommitted_diff(buffer.clone(), cx)
7146 })
7147 .await
7148 .unwrap();
7149
7150 // The hunks are initially unstaged.
7151 uncommitted_diff.read_with(cx, |diff, cx| {
7152 assert_hunks(
7153 diff.hunks(&snapshot, cx),
7154 &snapshot,
7155 &diff.base_text_string().unwrap(),
7156 &[
7157 (
7158 0..0,
7159 "zero\n",
7160 "",
7161 DiffHunkStatus::deleted(HasSecondaryHunk),
7162 ),
7163 (
7164 1..2,
7165 "two\n",
7166 "TWO\n",
7167 DiffHunkStatus::modified(HasSecondaryHunk),
7168 ),
7169 (
7170 3..4,
7171 "four\n",
7172 "FOUR\n",
7173 DiffHunkStatus::modified(HasSecondaryHunk),
7174 ),
7175 ],
7176 );
7177 });
7178
7179 // Pause IO events
7180 fs.pause_events();
7181
7182 // Stage the first hunk.
7183 uncommitted_diff.update(cx, |diff, cx| {
7184 let hunk = diff.hunks(&snapshot, cx).next().unwrap();
7185 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7186 assert_hunks(
7187 diff.hunks(&snapshot, cx),
7188 &snapshot,
7189 &diff.base_text_string().unwrap(),
7190 &[
7191 (
7192 0..0,
7193 "zero\n",
7194 "",
7195 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7196 ),
7197 (
7198 1..2,
7199 "two\n",
7200 "TWO\n",
7201 DiffHunkStatus::modified(HasSecondaryHunk),
7202 ),
7203 (
7204 3..4,
7205 "four\n",
7206 "FOUR\n",
7207 DiffHunkStatus::modified(HasSecondaryHunk),
7208 ),
7209 ],
7210 );
7211 });
7212
7213 // Stage the second hunk *before* receiving the FS event for the first hunk.
7214 cx.run_until_parked();
7215 uncommitted_diff.update(cx, |diff, cx| {
7216 let hunk = diff.hunks(&snapshot, cx).nth(1).unwrap();
7217 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7218 assert_hunks(
7219 diff.hunks(&snapshot, cx),
7220 &snapshot,
7221 &diff.base_text_string().unwrap(),
7222 &[
7223 (
7224 0..0,
7225 "zero\n",
7226 "",
7227 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7228 ),
7229 (
7230 1..2,
7231 "two\n",
7232 "TWO\n",
7233 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7234 ),
7235 (
7236 3..4,
7237 "four\n",
7238 "FOUR\n",
7239 DiffHunkStatus::modified(HasSecondaryHunk),
7240 ),
7241 ],
7242 );
7243 });
7244
7245 // Process the FS event for staging the first hunk (second event is still pending).
7246 fs.flush_events(1);
7247 cx.run_until_parked();
7248
7249 // Stage the third hunk before receiving the second FS event.
7250 uncommitted_diff.update(cx, |diff, cx| {
7251 let hunk = diff.hunks(&snapshot, cx).nth(2).unwrap();
7252 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7253 });
7254
7255 // Wait for all remaining IO.
7256 cx.run_until_parked();
7257 fs.flush_events(fs.buffered_event_count());
7258
7259 // Now all hunks are staged.
7260 cx.run_until_parked();
7261 uncommitted_diff.update(cx, |diff, cx| {
7262 assert_hunks(
7263 diff.hunks(&snapshot, cx),
7264 &snapshot,
7265 &diff.base_text_string().unwrap(),
7266 &[
7267 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7268 (
7269 1..2,
7270 "two\n",
7271 "TWO\n",
7272 DiffHunkStatus::modified(NoSecondaryHunk),
7273 ),
7274 (
7275 3..4,
7276 "four\n",
7277 "FOUR\n",
7278 DiffHunkStatus::modified(NoSecondaryHunk),
7279 ),
7280 ],
7281 );
7282 });
7283}
7284
7285#[gpui::test(iterations = 25)]
7286async fn test_staging_random_hunks(
7287 mut rng: StdRng,
7288 executor: BackgroundExecutor,
7289 cx: &mut gpui::TestAppContext,
7290) {
7291 let operations = env::var("OPERATIONS")
7292 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
7293 .unwrap_or(20);
7294
7295 // Try to induce races between diff recalculation and index writes.
7296 if rng.gen_bool(0.5) {
7297 executor.deprioritize(*CALCULATE_DIFF_TASK);
7298 }
7299
7300 use DiffHunkSecondaryStatus::*;
7301 init_test(cx);
7302
7303 let committed_text = (0..30).map(|i| format!("line {i}\n")).collect::<String>();
7304 let index_text = committed_text.clone();
7305 let buffer_text = (0..30)
7306 .map(|i| match i % 5 {
7307 0 => format!("line {i} (modified)\n"),
7308 _ => format!("line {i}\n"),
7309 })
7310 .collect::<String>();
7311
7312 let fs = FakeFs::new(cx.background_executor.clone());
7313 fs.insert_tree(
7314 path!("/dir"),
7315 json!({
7316 ".git": {},
7317 "file.txt": buffer_text.clone()
7318 }),
7319 )
7320 .await;
7321 fs.set_head_for_repo(
7322 path!("/dir/.git").as_ref(),
7323 &[("file.txt".into(), committed_text.clone())],
7324 "deadbeef",
7325 );
7326 fs.set_index_for_repo(
7327 path!("/dir/.git").as_ref(),
7328 &[("file.txt".into(), index_text.clone())],
7329 );
7330 let repo = fs.open_repo(path!("/dir/.git").as_ref()).unwrap();
7331
7332 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7333 let buffer = project
7334 .update(cx, |project, cx| {
7335 project.open_local_buffer(path!("/dir/file.txt"), cx)
7336 })
7337 .await
7338 .unwrap();
7339 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7340 let uncommitted_diff = project
7341 .update(cx, |project, cx| {
7342 project.open_uncommitted_diff(buffer.clone(), cx)
7343 })
7344 .await
7345 .unwrap();
7346
7347 let mut hunks =
7348 uncommitted_diff.update(cx, |diff, cx| diff.hunks(&snapshot, cx).collect::<Vec<_>>());
7349 assert_eq!(hunks.len(), 6);
7350
7351 for _i in 0..operations {
7352 let hunk_ix = rng.gen_range(0..hunks.len());
7353 let hunk = &mut hunks[hunk_ix];
7354 let row = hunk.range.start.row;
7355
7356 if hunk.status().has_secondary_hunk() {
7357 log::info!("staging hunk at {row}");
7358 uncommitted_diff.update(cx, |diff, cx| {
7359 diff.stage_or_unstage_hunks(true, &[hunk.clone()], &snapshot, true, cx);
7360 });
7361 hunk.secondary_status = SecondaryHunkRemovalPending;
7362 } else {
7363 log::info!("unstaging hunk at {row}");
7364 uncommitted_diff.update(cx, |diff, cx| {
7365 diff.stage_or_unstage_hunks(false, &[hunk.clone()], &snapshot, true, cx);
7366 });
7367 hunk.secondary_status = SecondaryHunkAdditionPending;
7368 }
7369
7370 for _ in 0..rng.gen_range(0..10) {
7371 log::info!("yielding");
7372 cx.executor().simulate_random_delay().await;
7373 }
7374 }
7375
7376 cx.executor().run_until_parked();
7377
7378 for hunk in &mut hunks {
7379 if hunk.secondary_status == SecondaryHunkRemovalPending {
7380 hunk.secondary_status = NoSecondaryHunk;
7381 } else if hunk.secondary_status == SecondaryHunkAdditionPending {
7382 hunk.secondary_status = HasSecondaryHunk;
7383 }
7384 }
7385
7386 log::info!(
7387 "index text:\n{}",
7388 repo.load_index_text("file.txt".into()).await.unwrap()
7389 );
7390
7391 uncommitted_diff.update(cx, |diff, cx| {
7392 let expected_hunks = hunks
7393 .iter()
7394 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
7395 .collect::<Vec<_>>();
7396 let actual_hunks = diff
7397 .hunks(&snapshot, cx)
7398 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
7399 .collect::<Vec<_>>();
7400 assert_eq!(actual_hunks, expected_hunks);
7401 });
7402}
7403
7404#[gpui::test]
7405async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
7406 init_test(cx);
7407
7408 let committed_contents = r#"
7409 fn main() {
7410 println!("hello from HEAD");
7411 }
7412 "#
7413 .unindent();
7414 let file_contents = r#"
7415 fn main() {
7416 println!("hello from the working copy");
7417 }
7418 "#
7419 .unindent();
7420
7421 let fs = FakeFs::new(cx.background_executor.clone());
7422 fs.insert_tree(
7423 "/dir",
7424 json!({
7425 ".git": {},
7426 "src": {
7427 "main.rs": file_contents,
7428 }
7429 }),
7430 )
7431 .await;
7432
7433 fs.set_head_for_repo(
7434 Path::new("/dir/.git"),
7435 &[("src/main.rs".into(), committed_contents.clone())],
7436 "deadbeef",
7437 );
7438 fs.set_index_for_repo(
7439 Path::new("/dir/.git"),
7440 &[("src/main.rs".into(), committed_contents.clone())],
7441 );
7442
7443 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
7444
7445 let buffer = project
7446 .update(cx, |project, cx| {
7447 project.open_local_buffer("/dir/src/main.rs", cx)
7448 })
7449 .await
7450 .unwrap();
7451 let uncommitted_diff = project
7452 .update(cx, |project, cx| {
7453 project.open_uncommitted_diff(buffer.clone(), cx)
7454 })
7455 .await
7456 .unwrap();
7457
7458 cx.run_until_parked();
7459 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
7460 let snapshot = buffer.read(cx).snapshot();
7461 assert_hunks(
7462 uncommitted_diff.hunks(&snapshot, cx),
7463 &snapshot,
7464 &uncommitted_diff.base_text_string().unwrap(),
7465 &[(
7466 1..2,
7467 " println!(\"hello from HEAD\");\n",
7468 " println!(\"hello from the working copy\");\n",
7469 DiffHunkStatus {
7470 kind: DiffHunkStatusKind::Modified,
7471 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
7472 },
7473 )],
7474 );
7475 });
7476}
7477
7478#[gpui::test]
7479async fn test_repository_and_path_for_project_path(
7480 background_executor: BackgroundExecutor,
7481 cx: &mut gpui::TestAppContext,
7482) {
7483 init_test(cx);
7484 let fs = FakeFs::new(background_executor);
7485 fs.insert_tree(
7486 path!("/root"),
7487 json!({
7488 "c.txt": "",
7489 "dir1": {
7490 ".git": {},
7491 "deps": {
7492 "dep1": {
7493 ".git": {},
7494 "src": {
7495 "a.txt": ""
7496 }
7497 }
7498 },
7499 "src": {
7500 "b.txt": ""
7501 }
7502 },
7503 }),
7504 )
7505 .await;
7506
7507 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
7508 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7509 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7510 project
7511 .update(cx, |project, cx| project.git_scans_complete(cx))
7512 .await;
7513 cx.run_until_parked();
7514
7515 project.read_with(cx, |project, cx| {
7516 let git_store = project.git_store().read(cx);
7517 let pairs = [
7518 ("c.txt", None),
7519 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
7520 (
7521 "dir1/deps/dep1/src/a.txt",
7522 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
7523 ),
7524 ];
7525 let expected = pairs
7526 .iter()
7527 .map(|(path, result)| {
7528 (
7529 path,
7530 result.map(|(repo, repo_path)| {
7531 (Path::new(repo).into(), RepoPath::from(repo_path))
7532 }),
7533 )
7534 })
7535 .collect::<Vec<_>>();
7536 let actual = pairs
7537 .iter()
7538 .map(|(path, _)| {
7539 let project_path = (tree_id, Path::new(path)).into();
7540 let result = maybe!({
7541 let (repo, repo_path) =
7542 git_store.repository_and_path_for_project_path(&project_path, cx)?;
7543 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
7544 });
7545 (path, result)
7546 })
7547 .collect::<Vec<_>>();
7548 pretty_assertions::assert_eq!(expected, actual);
7549 });
7550
7551 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
7552 .await
7553 .unwrap();
7554 cx.run_until_parked();
7555
7556 project.read_with(cx, |project, cx| {
7557 let git_store = project.git_store().read(cx);
7558 assert_eq!(
7559 git_store.repository_and_path_for_project_path(
7560 &(tree_id, Path::new("dir1/src/b.txt")).into(),
7561 cx
7562 ),
7563 None
7564 );
7565 });
7566}
7567
7568#[gpui::test]
7569async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
7570 init_test(cx);
7571 let fs = FakeFs::new(cx.background_executor.clone());
7572 fs.insert_tree(
7573 path!("/root"),
7574 json!({
7575 "home": {
7576 ".git": {},
7577 "project": {
7578 "a.txt": "A"
7579 },
7580 },
7581 }),
7582 )
7583 .await;
7584 fs.set_home_dir(Path::new(path!("/root/home")).to_owned());
7585
7586 let project = Project::test(fs.clone(), [path!("/root/home/project").as_ref()], cx).await;
7587 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7588 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7589
7590 project
7591 .update(cx, |project, cx| project.git_scans_complete(cx))
7592 .await;
7593 tree.flush_fs_events(cx).await;
7594
7595 project.read_with(cx, |project, cx| {
7596 let containing = project
7597 .git_store()
7598 .read(cx)
7599 .repository_and_path_for_project_path(&(tree_id, "a.txt").into(), cx);
7600 assert!(containing.is_none());
7601 });
7602
7603 let project = Project::test(fs.clone(), [path!("/root/home").as_ref()], cx).await;
7604 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7605 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7606 project
7607 .update(cx, |project, cx| project.git_scans_complete(cx))
7608 .await;
7609 tree.flush_fs_events(cx).await;
7610
7611 project.read_with(cx, |project, cx| {
7612 let containing = project
7613 .git_store()
7614 .read(cx)
7615 .repository_and_path_for_project_path(&(tree_id, "project/a.txt").into(), cx);
7616 assert_eq!(
7617 containing
7618 .unwrap()
7619 .0
7620 .read(cx)
7621 .work_directory_abs_path
7622 .as_ref(),
7623 Path::new(path!("/root/home"))
7624 );
7625 });
7626}
7627
7628#[gpui::test]
7629async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
7630 init_test(cx);
7631 cx.executor().allow_parking();
7632
7633 let root = TempTree::new(json!({
7634 "project": {
7635 "a.txt": "a", // Modified
7636 "b.txt": "bb", // Added
7637 "c.txt": "ccc", // Unchanged
7638 "d.txt": "dddd", // Deleted
7639 },
7640 }));
7641
7642 // Set up git repository before creating the project.
7643 let work_dir = root.path().join("project");
7644 let repo = git_init(work_dir.as_path());
7645 git_add("a.txt", &repo);
7646 git_add("c.txt", &repo);
7647 git_add("d.txt", &repo);
7648 git_commit("Initial commit", &repo);
7649 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
7650 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
7651
7652 let project = Project::test(
7653 Arc::new(RealFs::new(None, cx.executor())),
7654 [root.path()],
7655 cx,
7656 )
7657 .await;
7658
7659 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7660 tree.flush_fs_events(cx).await;
7661 project
7662 .update(cx, |project, cx| project.git_scans_complete(cx))
7663 .await;
7664 cx.executor().run_until_parked();
7665
7666 let repository = project.read_with(cx, |project, cx| {
7667 project.repositories(cx).values().next().unwrap().clone()
7668 });
7669
7670 // Check that the right git state is observed on startup
7671 repository.read_with(cx, |repository, _| {
7672 let entries = repository.cached_status().collect::<Vec<_>>();
7673 assert_eq!(
7674 entries,
7675 [
7676 StatusEntry {
7677 repo_path: "a.txt".into(),
7678 status: StatusCode::Modified.worktree(),
7679 },
7680 StatusEntry {
7681 repo_path: "b.txt".into(),
7682 status: FileStatus::Untracked,
7683 },
7684 StatusEntry {
7685 repo_path: "d.txt".into(),
7686 status: StatusCode::Deleted.worktree(),
7687 },
7688 ]
7689 );
7690 });
7691
7692 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
7693
7694 tree.flush_fs_events(cx).await;
7695 project
7696 .update(cx, |project, cx| project.git_scans_complete(cx))
7697 .await;
7698 cx.executor().run_until_parked();
7699
7700 repository.read_with(cx, |repository, _| {
7701 let entries = repository.cached_status().collect::<Vec<_>>();
7702 assert_eq!(
7703 entries,
7704 [
7705 StatusEntry {
7706 repo_path: "a.txt".into(),
7707 status: StatusCode::Modified.worktree(),
7708 },
7709 StatusEntry {
7710 repo_path: "b.txt".into(),
7711 status: FileStatus::Untracked,
7712 },
7713 StatusEntry {
7714 repo_path: "c.txt".into(),
7715 status: StatusCode::Modified.worktree(),
7716 },
7717 StatusEntry {
7718 repo_path: "d.txt".into(),
7719 status: StatusCode::Deleted.worktree(),
7720 },
7721 ]
7722 );
7723 });
7724
7725 git_add("a.txt", &repo);
7726 git_add("c.txt", &repo);
7727 git_remove_index(Path::new("d.txt"), &repo);
7728 git_commit("Another commit", &repo);
7729 tree.flush_fs_events(cx).await;
7730 project
7731 .update(cx, |project, cx| project.git_scans_complete(cx))
7732 .await;
7733 cx.executor().run_until_parked();
7734
7735 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
7736 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
7737 tree.flush_fs_events(cx).await;
7738 project
7739 .update(cx, |project, cx| project.git_scans_complete(cx))
7740 .await;
7741 cx.executor().run_until_parked();
7742
7743 repository.read_with(cx, |repository, _cx| {
7744 let entries = repository.cached_status().collect::<Vec<_>>();
7745
7746 // Deleting an untracked entry, b.txt, should leave no status
7747 // a.txt was tracked, and so should have a status
7748 assert_eq!(
7749 entries,
7750 [StatusEntry {
7751 repo_path: "a.txt".into(),
7752 status: StatusCode::Deleted.worktree(),
7753 }]
7754 );
7755 });
7756}
7757
7758#[gpui::test]
7759async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
7760 init_test(cx);
7761 cx.executor().allow_parking();
7762
7763 let root = TempTree::new(json!({
7764 "project": {
7765 "sub": {},
7766 "a.txt": "",
7767 },
7768 }));
7769
7770 let work_dir = root.path().join("project");
7771 let repo = git_init(work_dir.as_path());
7772 // a.txt exists in HEAD and the working copy but is deleted in the index.
7773 git_add("a.txt", &repo);
7774 git_commit("Initial commit", &repo);
7775 git_remove_index("a.txt".as_ref(), &repo);
7776 // `sub` is a nested git repository.
7777 let _sub = git_init(&work_dir.join("sub"));
7778
7779 let project = Project::test(
7780 Arc::new(RealFs::new(None, cx.executor())),
7781 [root.path()],
7782 cx,
7783 )
7784 .await;
7785
7786 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7787 tree.flush_fs_events(cx).await;
7788 project
7789 .update(cx, |project, cx| project.git_scans_complete(cx))
7790 .await;
7791 cx.executor().run_until_parked();
7792
7793 let repository = project.read_with(cx, |project, cx| {
7794 project
7795 .repositories(cx)
7796 .values()
7797 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
7798 .unwrap()
7799 .clone()
7800 });
7801
7802 repository.read_with(cx, |repository, _cx| {
7803 let entries = repository.cached_status().collect::<Vec<_>>();
7804
7805 // `sub` doesn't appear in our computed statuses.
7806 // a.txt appears with a combined `DA` status.
7807 assert_eq!(
7808 entries,
7809 [StatusEntry {
7810 repo_path: "a.txt".into(),
7811 status: TrackedStatus {
7812 index_status: StatusCode::Deleted,
7813 worktree_status: StatusCode::Added
7814 }
7815 .into(),
7816 }]
7817 )
7818 });
7819}
7820
7821#[gpui::test]
7822async fn test_repository_subfolder_git_status(
7823 executor: gpui::BackgroundExecutor,
7824 cx: &mut gpui::TestAppContext,
7825) {
7826 init_test(cx);
7827
7828 let fs = FakeFs::new(executor);
7829 fs.insert_tree(
7830 path!("/root"),
7831 json!({
7832 "my-repo": {
7833 ".git": {},
7834 "a.txt": "a",
7835 "sub-folder-1": {
7836 "sub-folder-2": {
7837 "c.txt": "cc",
7838 "d": {
7839 "e.txt": "eee"
7840 }
7841 },
7842 }
7843 },
7844 }),
7845 )
7846 .await;
7847
7848 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
7849 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
7850
7851 fs.set_status_for_repo(
7852 path!("/root/my-repo/.git").as_ref(),
7853 &[(E_TXT.as_ref(), FileStatus::Untracked)],
7854 );
7855
7856 let project = Project::test(
7857 fs.clone(),
7858 [path!("/root/my-repo/sub-folder-1/sub-folder-2").as_ref()],
7859 cx,
7860 )
7861 .await;
7862
7863 project
7864 .update(cx, |project, cx| project.git_scans_complete(cx))
7865 .await;
7866 cx.run_until_parked();
7867
7868 let repository = project.read_with(cx, |project, cx| {
7869 project.repositories(cx).values().next().unwrap().clone()
7870 });
7871
7872 // Ensure that the git status is loaded correctly
7873 repository.read_with(cx, |repository, _cx| {
7874 assert_eq!(
7875 repository.work_directory_abs_path,
7876 Path::new(path!("/root/my-repo")).into()
7877 );
7878
7879 assert_eq!(repository.status_for_path(&C_TXT.into()), None);
7880 assert_eq!(
7881 repository.status_for_path(&E_TXT.into()).unwrap().status,
7882 FileStatus::Untracked
7883 );
7884 });
7885
7886 fs.set_status_for_repo(path!("/root/my-repo/.git").as_ref(), &[]);
7887 project
7888 .update(cx, |project, cx| project.git_scans_complete(cx))
7889 .await;
7890 cx.run_until_parked();
7891
7892 repository.read_with(cx, |repository, _cx| {
7893 assert_eq!(repository.status_for_path(&C_TXT.into()), None);
7894 assert_eq!(repository.status_for_path(&E_TXT.into()), None);
7895 });
7896}
7897
7898// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
7899#[cfg(any())]
7900#[gpui::test]
7901async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
7902 init_test(cx);
7903 cx.executor().allow_parking();
7904
7905 let root = TempTree::new(json!({
7906 "project": {
7907 "a.txt": "a",
7908 },
7909 }));
7910 let root_path = root.path();
7911
7912 let repo = git_init(&root_path.join("project"));
7913 git_add("a.txt", &repo);
7914 git_commit("init", &repo);
7915
7916 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
7917
7918 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7919 tree.flush_fs_events(cx).await;
7920 project
7921 .update(cx, |project, cx| project.git_scans_complete(cx))
7922 .await;
7923 cx.executor().run_until_parked();
7924
7925 let repository = project.read_with(cx, |project, cx| {
7926 project.repositories(cx).values().next().unwrap().clone()
7927 });
7928
7929 git_branch("other-branch", &repo);
7930 git_checkout("refs/heads/other-branch", &repo);
7931 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
7932 git_add("a.txt", &repo);
7933 git_commit("capitalize", &repo);
7934 let commit = repo
7935 .head()
7936 .expect("Failed to get HEAD")
7937 .peel_to_commit()
7938 .expect("HEAD is not a commit");
7939 git_checkout("refs/heads/main", &repo);
7940 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
7941 git_add("a.txt", &repo);
7942 git_commit("improve letter", &repo);
7943 git_cherry_pick(&commit, &repo);
7944 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
7945 .expect("No CHERRY_PICK_HEAD");
7946 pretty_assertions::assert_eq!(
7947 git_status(&repo),
7948 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
7949 );
7950 tree.flush_fs_events(cx).await;
7951 project
7952 .update(cx, |project, cx| project.git_scans_complete(cx))
7953 .await;
7954 cx.executor().run_until_parked();
7955 let conflicts = repository.update(cx, |repository, _| {
7956 repository
7957 .merge_conflicts
7958 .iter()
7959 .cloned()
7960 .collect::<Vec<_>>()
7961 });
7962 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
7963
7964 git_add("a.txt", &repo);
7965 // Attempt to manually simulate what `git cherry-pick --continue` would do.
7966 git_commit("whatevs", &repo);
7967 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
7968 .expect("Failed to remove CHERRY_PICK_HEAD");
7969 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
7970 tree.flush_fs_events(cx).await;
7971 let conflicts = repository.update(cx, |repository, _| {
7972 repository
7973 .merge_conflicts
7974 .iter()
7975 .cloned()
7976 .collect::<Vec<_>>()
7977 });
7978 pretty_assertions::assert_eq!(conflicts, []);
7979}
7980
7981#[gpui::test]
7982async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
7983 init_test(cx);
7984 let fs = FakeFs::new(cx.background_executor.clone());
7985 fs.insert_tree(
7986 path!("/root"),
7987 json!({
7988 ".git": {},
7989 ".gitignore": "*.txt\n",
7990 "a.xml": "<a></a>",
7991 "b.txt": "Some text"
7992 }),
7993 )
7994 .await;
7995
7996 fs.set_head_and_index_for_repo(
7997 path!("/root/.git").as_ref(),
7998 &[
7999 (".gitignore".into(), "*.txt\n".into()),
8000 ("a.xml".into(), "<a></a>".into()),
8001 ],
8002 );
8003
8004 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
8005
8006 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8007 tree.flush_fs_events(cx).await;
8008 project
8009 .update(cx, |project, cx| project.git_scans_complete(cx))
8010 .await;
8011 cx.executor().run_until_parked();
8012
8013 let repository = project.read_with(cx, |project, cx| {
8014 project.repositories(cx).values().next().unwrap().clone()
8015 });
8016
8017 // One file is unmodified, the other is ignored.
8018 cx.read(|cx| {
8019 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
8020 assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
8021 });
8022
8023 // Change the gitignore, and stage the newly non-ignored file.
8024 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
8025 .await
8026 .unwrap();
8027 fs.set_index_for_repo(
8028 Path::new(path!("/root/.git")),
8029 &[
8030 (".gitignore".into(), "*.txt\n".into()),
8031 ("a.xml".into(), "<a></a>".into()),
8032 ("b.txt".into(), "Some text".into()),
8033 ],
8034 );
8035
8036 cx.executor().run_until_parked();
8037 cx.read(|cx| {
8038 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
8039 assert_entry_git_state(
8040 tree.read(cx),
8041 repository.read(cx),
8042 "b.txt",
8043 Some(StatusCode::Added),
8044 false,
8045 );
8046 });
8047}
8048
8049// NOTE:
8050// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
8051// a directory which some program has already open.
8052// This is a limitation of the Windows.
8053// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
8054#[gpui::test]
8055#[cfg_attr(target_os = "windows", ignore)]
8056async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
8057 init_test(cx);
8058 cx.executor().allow_parking();
8059 let root = TempTree::new(json!({
8060 "projects": {
8061 "project1": {
8062 "a": "",
8063 "b": "",
8064 }
8065 },
8066
8067 }));
8068 let root_path = root.path();
8069
8070 let repo = git_init(&root_path.join("projects/project1"));
8071 git_add("a", &repo);
8072 git_commit("init", &repo);
8073 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
8074
8075 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8076
8077 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8078 tree.flush_fs_events(cx).await;
8079 project
8080 .update(cx, |project, cx| project.git_scans_complete(cx))
8081 .await;
8082 cx.executor().run_until_parked();
8083
8084 let repository = project.read_with(cx, |project, cx| {
8085 project.repositories(cx).values().next().unwrap().clone()
8086 });
8087
8088 repository.read_with(cx, |repository, _| {
8089 assert_eq!(
8090 repository.work_directory_abs_path.as_ref(),
8091 root_path.join("projects/project1").as_path()
8092 );
8093 assert_eq!(
8094 repository
8095 .status_for_path(&"a".into())
8096 .map(|entry| entry.status),
8097 Some(StatusCode::Modified.worktree()),
8098 );
8099 assert_eq!(
8100 repository
8101 .status_for_path(&"b".into())
8102 .map(|entry| entry.status),
8103 Some(FileStatus::Untracked),
8104 );
8105 });
8106
8107 std::fs::rename(
8108 root_path.join("projects/project1"),
8109 root_path.join("projects/project2"),
8110 )
8111 .unwrap();
8112 tree.flush_fs_events(cx).await;
8113
8114 repository.read_with(cx, |repository, _| {
8115 assert_eq!(
8116 repository.work_directory_abs_path.as_ref(),
8117 root_path.join("projects/project2").as_path()
8118 );
8119 assert_eq!(
8120 repository.status_for_path(&"a".into()).unwrap().status,
8121 StatusCode::Modified.worktree(),
8122 );
8123 assert_eq!(
8124 repository.status_for_path(&"b".into()).unwrap().status,
8125 FileStatus::Untracked,
8126 );
8127 });
8128}
8129
8130// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
8131// you can't rename a directory which some program has already open. This is a
8132// limitation of the Windows. See:
8133// https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
8134#[gpui::test]
8135#[cfg_attr(target_os = "windows", ignore)]
8136async fn test_file_status(cx: &mut gpui::TestAppContext) {
8137 init_test(cx);
8138 cx.executor().allow_parking();
8139 const IGNORE_RULE: &str = "**/target";
8140
8141 let root = TempTree::new(json!({
8142 "project": {
8143 "a.txt": "a",
8144 "b.txt": "bb",
8145 "c": {
8146 "d": {
8147 "e.txt": "eee"
8148 }
8149 },
8150 "f.txt": "ffff",
8151 "target": {
8152 "build_file": "???"
8153 },
8154 ".gitignore": IGNORE_RULE
8155 },
8156
8157 }));
8158 let root_path = root.path();
8159
8160 const A_TXT: &str = "a.txt";
8161 const B_TXT: &str = "b.txt";
8162 const E_TXT: &str = "c/d/e.txt";
8163 const F_TXT: &str = "f.txt";
8164 const DOTGITIGNORE: &str = ".gitignore";
8165 const BUILD_FILE: &str = "target/build_file";
8166
8167 // Set up git repository before creating the worktree.
8168 let work_dir = root.path().join("project");
8169 let mut repo = git_init(work_dir.as_path());
8170 repo.add_ignore_rule(IGNORE_RULE).unwrap();
8171 git_add(A_TXT, &repo);
8172 git_add(E_TXT, &repo);
8173 git_add(DOTGITIGNORE, &repo);
8174 git_commit("Initial commit", &repo);
8175
8176 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8177
8178 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8179 tree.flush_fs_events(cx).await;
8180 project
8181 .update(cx, |project, cx| project.git_scans_complete(cx))
8182 .await;
8183 cx.executor().run_until_parked();
8184
8185 let repository = project.read_with(cx, |project, cx| {
8186 project.repositories(cx).values().next().unwrap().clone()
8187 });
8188
8189 // Check that the right git state is observed on startup
8190 repository.read_with(cx, |repository, _cx| {
8191 assert_eq!(
8192 repository.work_directory_abs_path.as_ref(),
8193 root_path.join("project").as_path()
8194 );
8195
8196 assert_eq!(
8197 repository.status_for_path(&B_TXT.into()).unwrap().status,
8198 FileStatus::Untracked,
8199 );
8200 assert_eq!(
8201 repository.status_for_path(&F_TXT.into()).unwrap().status,
8202 FileStatus::Untracked,
8203 );
8204 });
8205
8206 // Modify a file in the working copy.
8207 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
8208 tree.flush_fs_events(cx).await;
8209 project
8210 .update(cx, |project, cx| project.git_scans_complete(cx))
8211 .await;
8212 cx.executor().run_until_parked();
8213
8214 // The worktree detects that the file's git status has changed.
8215 repository.read_with(cx, |repository, _| {
8216 assert_eq!(
8217 repository.status_for_path(&A_TXT.into()).unwrap().status,
8218 StatusCode::Modified.worktree(),
8219 );
8220 });
8221
8222 // Create a commit in the git repository.
8223 git_add(A_TXT, &repo);
8224 git_add(B_TXT, &repo);
8225 git_commit("Committing modified and added", &repo);
8226 tree.flush_fs_events(cx).await;
8227 project
8228 .update(cx, |project, cx| project.git_scans_complete(cx))
8229 .await;
8230 cx.executor().run_until_parked();
8231
8232 // The worktree detects that the files' git status have changed.
8233 repository.read_with(cx, |repository, _cx| {
8234 assert_eq!(
8235 repository.status_for_path(&F_TXT.into()).unwrap().status,
8236 FileStatus::Untracked,
8237 );
8238 assert_eq!(repository.status_for_path(&B_TXT.into()), None);
8239 assert_eq!(repository.status_for_path(&A_TXT.into()), None);
8240 });
8241
8242 // Modify files in the working copy and perform git operations on other files.
8243 git_reset(0, &repo);
8244 git_remove_index(Path::new(B_TXT), &repo);
8245 git_stash(&mut repo);
8246 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
8247 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
8248 tree.flush_fs_events(cx).await;
8249 project
8250 .update(cx, |project, cx| project.git_scans_complete(cx))
8251 .await;
8252 cx.executor().run_until_parked();
8253
8254 // Check that more complex repo changes are tracked
8255 repository.read_with(cx, |repository, _cx| {
8256 assert_eq!(repository.status_for_path(&A_TXT.into()), None);
8257 assert_eq!(
8258 repository.status_for_path(&B_TXT.into()).unwrap().status,
8259 FileStatus::Untracked,
8260 );
8261 assert_eq!(
8262 repository.status_for_path(&E_TXT.into()).unwrap().status,
8263 StatusCode::Modified.worktree(),
8264 );
8265 });
8266
8267 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
8268 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
8269 std::fs::write(
8270 work_dir.join(DOTGITIGNORE),
8271 [IGNORE_RULE, "f.txt"].join("\n"),
8272 )
8273 .unwrap();
8274
8275 git_add(Path::new(DOTGITIGNORE), &repo);
8276 git_commit("Committing modified git ignore", &repo);
8277
8278 tree.flush_fs_events(cx).await;
8279 cx.executor().run_until_parked();
8280
8281 let mut renamed_dir_name = "first_directory/second_directory";
8282 const RENAMED_FILE: &str = "rf.txt";
8283
8284 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
8285 std::fs::write(
8286 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
8287 "new-contents",
8288 )
8289 .unwrap();
8290
8291 tree.flush_fs_events(cx).await;
8292 project
8293 .update(cx, |project, cx| project.git_scans_complete(cx))
8294 .await;
8295 cx.executor().run_until_parked();
8296
8297 repository.read_with(cx, |repository, _cx| {
8298 assert_eq!(
8299 repository
8300 .status_for_path(&Path::new(renamed_dir_name).join(RENAMED_FILE).into())
8301 .unwrap()
8302 .status,
8303 FileStatus::Untracked,
8304 );
8305 });
8306
8307 renamed_dir_name = "new_first_directory/second_directory";
8308
8309 std::fs::rename(
8310 work_dir.join("first_directory"),
8311 work_dir.join("new_first_directory"),
8312 )
8313 .unwrap();
8314
8315 tree.flush_fs_events(cx).await;
8316 project
8317 .update(cx, |project, cx| project.git_scans_complete(cx))
8318 .await;
8319 cx.executor().run_until_parked();
8320
8321 repository.read_with(cx, |repository, _cx| {
8322 assert_eq!(
8323 repository
8324 .status_for_path(&Path::new(renamed_dir_name).join(RENAMED_FILE).into())
8325 .unwrap()
8326 .status,
8327 FileStatus::Untracked,
8328 );
8329 });
8330}
8331
8332#[gpui::test]
8333async fn test_repos_in_invisible_worktrees(
8334 executor: BackgroundExecutor,
8335 cx: &mut gpui::TestAppContext,
8336) {
8337 init_test(cx);
8338 let fs = FakeFs::new(executor);
8339 fs.insert_tree(
8340 path!("/root"),
8341 json!({
8342 "dir1": {
8343 ".git": {},
8344 "dep1": {
8345 ".git": {},
8346 "src": {
8347 "a.txt": "",
8348 },
8349 },
8350 "b.txt": "",
8351 },
8352 }),
8353 )
8354 .await;
8355
8356 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
8357 let _visible_worktree =
8358 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8359 project
8360 .update(cx, |project, cx| project.git_scans_complete(cx))
8361 .await;
8362
8363 let repos = project.read_with(cx, |project, cx| {
8364 project
8365 .repositories(cx)
8366 .values()
8367 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8368 .collect::<Vec<_>>()
8369 });
8370 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
8371
8372 let (_invisible_worktree, _) = project
8373 .update(cx, |project, cx| {
8374 project.worktree_store.update(cx, |worktree_store, cx| {
8375 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
8376 })
8377 })
8378 .await
8379 .expect("failed to create worktree");
8380 project
8381 .update(cx, |project, cx| project.git_scans_complete(cx))
8382 .await;
8383
8384 let repos = project.read_with(cx, |project, cx| {
8385 project
8386 .repositories(cx)
8387 .values()
8388 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8389 .collect::<Vec<_>>()
8390 });
8391 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
8392}
8393
8394#[gpui::test(iterations = 10)]
8395async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
8396 init_test(cx);
8397 cx.update(|cx| {
8398 cx.update_global::<SettingsStore, _>(|store, cx| {
8399 store.update_user_settings::<WorktreeSettings>(cx, |project_settings| {
8400 project_settings.file_scan_exclusions = Some(Vec::new());
8401 });
8402 });
8403 });
8404 let fs = FakeFs::new(cx.background_executor.clone());
8405 fs.insert_tree(
8406 path!("/root"),
8407 json!({
8408 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
8409 "tree": {
8410 ".git": {},
8411 ".gitignore": "ignored-dir\n",
8412 "tracked-dir": {
8413 "tracked-file1": "",
8414 "ancestor-ignored-file1": "",
8415 },
8416 "ignored-dir": {
8417 "ignored-file1": ""
8418 }
8419 }
8420 }),
8421 )
8422 .await;
8423 fs.set_head_and_index_for_repo(
8424 path!("/root/tree/.git").as_ref(),
8425 &[
8426 (".gitignore".into(), "ignored-dir\n".into()),
8427 ("tracked-dir/tracked-file1".into(), "".into()),
8428 ],
8429 );
8430
8431 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
8432
8433 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8434 tree.flush_fs_events(cx).await;
8435 project
8436 .update(cx, |project, cx| project.git_scans_complete(cx))
8437 .await;
8438 cx.executor().run_until_parked();
8439
8440 let repository = project.read_with(cx, |project, cx| {
8441 project.repositories(cx).values().next().unwrap().clone()
8442 });
8443
8444 tree.read_with(cx, |tree, _| {
8445 tree.as_local()
8446 .unwrap()
8447 .manually_refresh_entries_for_paths(vec![Path::new("ignored-dir").into()])
8448 })
8449 .recv()
8450 .await;
8451
8452 cx.read(|cx| {
8453 assert_entry_git_state(
8454 tree.read(cx),
8455 repository.read(cx),
8456 "tracked-dir/tracked-file1",
8457 None,
8458 false,
8459 );
8460 assert_entry_git_state(
8461 tree.read(cx),
8462 repository.read(cx),
8463 "tracked-dir/ancestor-ignored-file1",
8464 None,
8465 false,
8466 );
8467 assert_entry_git_state(
8468 tree.read(cx),
8469 repository.read(cx),
8470 "ignored-dir/ignored-file1",
8471 None,
8472 true,
8473 );
8474 });
8475
8476 fs.create_file(
8477 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
8478 Default::default(),
8479 )
8480 .await
8481 .unwrap();
8482 fs.set_index_for_repo(
8483 path!("/root/tree/.git").as_ref(),
8484 &[
8485 (".gitignore".into(), "ignored-dir\n".into()),
8486 ("tracked-dir/tracked-file1".into(), "".into()),
8487 ("tracked-dir/tracked-file2".into(), "".into()),
8488 ],
8489 );
8490 fs.create_file(
8491 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
8492 Default::default(),
8493 )
8494 .await
8495 .unwrap();
8496 fs.create_file(
8497 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
8498 Default::default(),
8499 )
8500 .await
8501 .unwrap();
8502
8503 cx.executor().run_until_parked();
8504 cx.read(|cx| {
8505 assert_entry_git_state(
8506 tree.read(cx),
8507 repository.read(cx),
8508 "tracked-dir/tracked-file2",
8509 Some(StatusCode::Added),
8510 false,
8511 );
8512 assert_entry_git_state(
8513 tree.read(cx),
8514 repository.read(cx),
8515 "tracked-dir/ancestor-ignored-file2",
8516 None,
8517 false,
8518 );
8519 assert_entry_git_state(
8520 tree.read(cx),
8521 repository.read(cx),
8522 "ignored-dir/ignored-file2",
8523 None,
8524 true,
8525 );
8526 assert!(tree.read(cx).entry_for_path(".git").unwrap().is_ignored);
8527 });
8528}
8529
8530#[gpui::test]
8531async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) {
8532 init_test(cx);
8533
8534 let fs = FakeFs::new(cx.executor());
8535 fs.insert_tree(
8536 path!("/project"),
8537 json!({
8538 ".git": {
8539 "worktrees": {
8540 "some-worktree": {
8541 "commondir": "../..\n",
8542 // For is_git_dir
8543 "HEAD": "",
8544 "config": ""
8545 }
8546 },
8547 "modules": {
8548 "subdir": {
8549 "some-submodule": {
8550 // For is_git_dir
8551 "HEAD": "",
8552 "config": "",
8553 }
8554 }
8555 }
8556 },
8557 "src": {
8558 "a.txt": "A",
8559 },
8560 "some-worktree": {
8561 ".git": "gitdir: ../.git/worktrees/some-worktree\n",
8562 "src": {
8563 "b.txt": "B",
8564 }
8565 },
8566 "subdir": {
8567 "some-submodule": {
8568 ".git": "gitdir: ../../.git/modules/subdir/some-submodule\n",
8569 "c.txt": "C",
8570 }
8571 }
8572 }),
8573 )
8574 .await;
8575
8576 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
8577 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
8578 scan_complete.await;
8579
8580 let mut repositories = project.update(cx, |project, cx| {
8581 project
8582 .repositories(cx)
8583 .values()
8584 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8585 .collect::<Vec<_>>()
8586 });
8587 repositories.sort();
8588 pretty_assertions::assert_eq!(
8589 repositories,
8590 [
8591 Path::new(path!("/project")).into(),
8592 Path::new(path!("/project/some-worktree")).into(),
8593 Path::new(path!("/project/subdir/some-submodule")).into(),
8594 ]
8595 );
8596
8597 // Generate a git-related event for the worktree and check that it's refreshed.
8598 fs.with_git_state(
8599 path!("/project/some-worktree/.git").as_ref(),
8600 true,
8601 |state| {
8602 state
8603 .head_contents
8604 .insert("src/b.txt".into(), "b".to_owned());
8605 state
8606 .index_contents
8607 .insert("src/b.txt".into(), "b".to_owned());
8608 },
8609 )
8610 .unwrap();
8611 cx.run_until_parked();
8612
8613 let buffer = project
8614 .update(cx, |project, cx| {
8615 project.open_local_buffer(path!("/project/some-worktree/src/b.txt"), cx)
8616 })
8617 .await
8618 .unwrap();
8619 let (worktree_repo, barrier) = project.update(cx, |project, cx| {
8620 let (repo, _) = project
8621 .git_store()
8622 .read(cx)
8623 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
8624 .unwrap();
8625 pretty_assertions::assert_eq!(
8626 repo.read(cx).work_directory_abs_path,
8627 Path::new(path!("/project/some-worktree")).into(),
8628 );
8629 let barrier = repo.update(cx, |repo, _| repo.barrier());
8630 (repo.clone(), barrier)
8631 });
8632 barrier.await.unwrap();
8633 worktree_repo.update(cx, |repo, _| {
8634 pretty_assertions::assert_eq!(
8635 repo.status_for_path(&"src/b.txt".into()).unwrap().status,
8636 StatusCode::Modified.worktree(),
8637 );
8638 });
8639
8640 // The same for the submodule.
8641 fs.with_git_state(
8642 path!("/project/subdir/some-submodule/.git").as_ref(),
8643 true,
8644 |state| {
8645 state.head_contents.insert("c.txt".into(), "c".to_owned());
8646 state.index_contents.insert("c.txt".into(), "c".to_owned());
8647 },
8648 )
8649 .unwrap();
8650 cx.run_until_parked();
8651
8652 let buffer = project
8653 .update(cx, |project, cx| {
8654 project.open_local_buffer(path!("/project/subdir/some-submodule/c.txt"), cx)
8655 })
8656 .await
8657 .unwrap();
8658 let (submodule_repo, barrier) = project.update(cx, |project, cx| {
8659 let (repo, _) = project
8660 .git_store()
8661 .read(cx)
8662 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
8663 .unwrap();
8664 pretty_assertions::assert_eq!(
8665 repo.read(cx).work_directory_abs_path,
8666 Path::new(path!("/project/subdir/some-submodule")).into(),
8667 );
8668 let barrier = repo.update(cx, |repo, _| repo.barrier());
8669 (repo.clone(), barrier)
8670 });
8671 barrier.await.unwrap();
8672 submodule_repo.update(cx, |repo, _| {
8673 pretty_assertions::assert_eq!(
8674 repo.status_for_path(&"c.txt".into()).unwrap().status,
8675 StatusCode::Modified.worktree(),
8676 );
8677 });
8678}
8679
8680#[gpui::test]
8681async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
8682 init_test(cx);
8683 let fs = FakeFs::new(cx.background_executor.clone());
8684 fs.insert_tree(
8685 path!("/root"),
8686 json!({
8687 "project": {
8688 ".git": {},
8689 "child1": {
8690 "a.txt": "A",
8691 },
8692 "child2": {
8693 "b.txt": "B",
8694 }
8695 }
8696 }),
8697 )
8698 .await;
8699
8700 let project = Project::test(
8701 fs.clone(),
8702 [
8703 path!("/root/project/child1").as_ref(),
8704 path!("/root/project/child2").as_ref(),
8705 ],
8706 cx,
8707 )
8708 .await;
8709
8710 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8711 tree.flush_fs_events(cx).await;
8712 project
8713 .update(cx, |project, cx| project.git_scans_complete(cx))
8714 .await;
8715 cx.executor().run_until_parked();
8716
8717 let repos = project.read_with(cx, |project, cx| {
8718 project
8719 .repositories(cx)
8720 .values()
8721 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8722 .collect::<Vec<_>>()
8723 });
8724 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
8725}
8726
8727async fn search(
8728 project: &Entity<Project>,
8729 query: SearchQuery,
8730 cx: &mut gpui::TestAppContext,
8731) -> Result<HashMap<String, Vec<Range<usize>>>> {
8732 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
8733 let mut results = HashMap::default();
8734 while let Ok(search_result) = search_rx.recv().await {
8735 match search_result {
8736 SearchResult::Buffer { buffer, ranges } => {
8737 results.entry(buffer).or_insert(ranges);
8738 }
8739 SearchResult::LimitReached => {}
8740 }
8741 }
8742 Ok(results
8743 .into_iter()
8744 .map(|(buffer, ranges)| {
8745 buffer.update(cx, |buffer, cx| {
8746 let path = buffer
8747 .file()
8748 .unwrap()
8749 .full_path(cx)
8750 .to_string_lossy()
8751 .to_string();
8752 let ranges = ranges
8753 .into_iter()
8754 .map(|range| range.to_offset(buffer))
8755 .collect::<Vec<_>>();
8756 (path, ranges)
8757 })
8758 })
8759 .collect())
8760}
8761
8762pub fn init_test(cx: &mut gpui::TestAppContext) {
8763 zlog::init_test();
8764
8765 cx.update(|cx| {
8766 let settings_store = SettingsStore::test(cx);
8767 cx.set_global(settings_store);
8768 release_channel::init(SemanticVersion::default(), cx);
8769 language::init(cx);
8770 Project::init_settings(cx);
8771 });
8772}
8773
8774fn json_lang() -> Arc<Language> {
8775 Arc::new(Language::new(
8776 LanguageConfig {
8777 name: "JSON".into(),
8778 matcher: LanguageMatcher {
8779 path_suffixes: vec!["json".to_string()],
8780 ..Default::default()
8781 },
8782 ..Default::default()
8783 },
8784 None,
8785 ))
8786}
8787
8788fn js_lang() -> Arc<Language> {
8789 Arc::new(Language::new(
8790 LanguageConfig {
8791 name: "JavaScript".into(),
8792 matcher: LanguageMatcher {
8793 path_suffixes: vec!["js".to_string()],
8794 ..Default::default()
8795 },
8796 ..Default::default()
8797 },
8798 None,
8799 ))
8800}
8801
8802fn rust_lang() -> Arc<Language> {
8803 Arc::new(Language::new(
8804 LanguageConfig {
8805 name: "Rust".into(),
8806 matcher: LanguageMatcher {
8807 path_suffixes: vec!["rs".to_string()],
8808 ..Default::default()
8809 },
8810 ..Default::default()
8811 },
8812 Some(tree_sitter_rust::LANGUAGE.into()),
8813 ))
8814}
8815
8816fn typescript_lang() -> Arc<Language> {
8817 Arc::new(Language::new(
8818 LanguageConfig {
8819 name: "TypeScript".into(),
8820 matcher: LanguageMatcher {
8821 path_suffixes: vec!["ts".to_string()],
8822 ..Default::default()
8823 },
8824 ..Default::default()
8825 },
8826 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
8827 ))
8828}
8829
8830fn tsx_lang() -> Arc<Language> {
8831 Arc::new(Language::new(
8832 LanguageConfig {
8833 name: "tsx".into(),
8834 matcher: LanguageMatcher {
8835 path_suffixes: vec!["tsx".to_string()],
8836 ..Default::default()
8837 },
8838 ..Default::default()
8839 },
8840 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
8841 ))
8842}
8843
8844fn get_all_tasks(
8845 project: &Entity<Project>,
8846 task_contexts: &TaskContexts,
8847 cx: &mut App,
8848) -> Vec<(TaskSourceKind, ResolvedTask)> {
8849 let (mut old, new) = project.update(cx, |project, cx| {
8850 project
8851 .task_store
8852 .read(cx)
8853 .task_inventory()
8854 .unwrap()
8855 .read(cx)
8856 .used_and_current_resolved_tasks(task_contexts, cx)
8857 });
8858 old.extend(new);
8859 old
8860}
8861
8862#[track_caller]
8863fn assert_entry_git_state(
8864 tree: &Worktree,
8865 repository: &Repository,
8866 path: &str,
8867 index_status: Option<StatusCode>,
8868 is_ignored: bool,
8869) {
8870 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
8871 let entry = tree
8872 .entry_for_path(path)
8873 .unwrap_or_else(|| panic!("entry {path} not found"));
8874 let status = repository
8875 .status_for_path(&path.into())
8876 .map(|entry| entry.status);
8877 let expected = index_status.map(|index_status| {
8878 TrackedStatus {
8879 index_status,
8880 worktree_status: StatusCode::Unmodified,
8881 }
8882 .into()
8883 });
8884 assert_eq!(
8885 status, expected,
8886 "expected {path} to have git status: {expected:?}"
8887 );
8888 assert_eq!(
8889 entry.is_ignored, is_ignored,
8890 "expected {path} to have is_ignored: {is_ignored}"
8891 );
8892}
8893
8894#[track_caller]
8895fn git_init(path: &Path) -> git2::Repository {
8896 let mut init_opts = RepositoryInitOptions::new();
8897 init_opts.initial_head("main");
8898 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
8899}
8900
8901#[track_caller]
8902fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
8903 let path = path.as_ref();
8904 let mut index = repo.index().expect("Failed to get index");
8905 index.add_path(path).expect("Failed to add file");
8906 index.write().expect("Failed to write index");
8907}
8908
8909#[track_caller]
8910fn git_remove_index(path: &Path, repo: &git2::Repository) {
8911 let mut index = repo.index().expect("Failed to get index");
8912 index.remove_path(path).expect("Failed to add file");
8913 index.write().expect("Failed to write index");
8914}
8915
8916#[track_caller]
8917fn git_commit(msg: &'static str, repo: &git2::Repository) {
8918 use git2::Signature;
8919
8920 let signature = Signature::now("test", "test@zed.dev").unwrap();
8921 let oid = repo.index().unwrap().write_tree().unwrap();
8922 let tree = repo.find_tree(oid).unwrap();
8923 if let Ok(head) = repo.head() {
8924 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
8925
8926 let parent_commit = parent_obj.as_commit().unwrap();
8927
8928 repo.commit(
8929 Some("HEAD"),
8930 &signature,
8931 &signature,
8932 msg,
8933 &tree,
8934 &[parent_commit],
8935 )
8936 .expect("Failed to commit with parent");
8937 } else {
8938 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
8939 .expect("Failed to commit");
8940 }
8941}
8942
8943#[cfg(any())]
8944#[track_caller]
8945fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
8946 repo.cherrypick(commit, None).expect("Failed to cherrypick");
8947}
8948
8949#[track_caller]
8950fn git_stash(repo: &mut git2::Repository) {
8951 use git2::Signature;
8952
8953 let signature = Signature::now("test", "test@zed.dev").unwrap();
8954 repo.stash_save(&signature, "N/A", None)
8955 .expect("Failed to stash");
8956}
8957
8958#[track_caller]
8959fn git_reset(offset: usize, repo: &git2::Repository) {
8960 let head = repo.head().expect("Couldn't get repo head");
8961 let object = head.peel(git2::ObjectType::Commit).unwrap();
8962 let commit = object.as_commit().unwrap();
8963 let new_head = commit
8964 .parents()
8965 .inspect(|parnet| {
8966 parnet.message();
8967 })
8968 .nth(offset)
8969 .expect("Not enough history");
8970 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
8971 .expect("Could not reset");
8972}
8973
8974#[cfg(any())]
8975#[track_caller]
8976fn git_branch(name: &str, repo: &git2::Repository) {
8977 let head = repo
8978 .head()
8979 .expect("Couldn't get repo head")
8980 .peel_to_commit()
8981 .expect("HEAD is not a commit");
8982 repo.branch(name, &head, false).expect("Failed to commit");
8983}
8984
8985#[cfg(any())]
8986#[track_caller]
8987fn git_checkout(name: &str, repo: &git2::Repository) {
8988 repo.set_head(name).expect("Failed to set head");
8989 repo.checkout_head(None).expect("Failed to check out head");
8990}
8991
8992#[cfg(any())]
8993#[track_caller]
8994fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
8995 repo.statuses(None)
8996 .unwrap()
8997 .iter()
8998 .map(|status| (status.path().unwrap().to_string(), status.status()))
8999 .collect()
9000}
9001
9002#[gpui::test]
9003async fn test_find_project_path_abs(
9004 background_executor: BackgroundExecutor,
9005 cx: &mut gpui::TestAppContext,
9006) {
9007 // find_project_path should work with absolute paths
9008 init_test(cx);
9009
9010 let fs = FakeFs::new(background_executor);
9011 fs.insert_tree(
9012 path!("/root"),
9013 json!({
9014 "project1": {
9015 "file1.txt": "content1",
9016 "subdir": {
9017 "file2.txt": "content2"
9018 }
9019 },
9020 "project2": {
9021 "file3.txt": "content3"
9022 }
9023 }),
9024 )
9025 .await;
9026
9027 let project = Project::test(
9028 fs.clone(),
9029 [
9030 path!("/root/project1").as_ref(),
9031 path!("/root/project2").as_ref(),
9032 ],
9033 cx,
9034 )
9035 .await;
9036
9037 // Make sure the worktrees are fully initialized
9038 project
9039 .update(cx, |project, cx| project.git_scans_complete(cx))
9040 .await;
9041 cx.run_until_parked();
9042
9043 let (project1_abs_path, project1_id, project2_abs_path, project2_id) =
9044 project.read_with(cx, |project, cx| {
9045 let worktrees: Vec<_> = project.worktrees(cx).collect();
9046 let abs_path1 = worktrees[0].read(cx).abs_path().to_path_buf();
9047 let id1 = worktrees[0].read(cx).id();
9048 let abs_path2 = worktrees[1].read(cx).abs_path().to_path_buf();
9049 let id2 = worktrees[1].read(cx).id();
9050 (abs_path1, id1, abs_path2, id2)
9051 });
9052
9053 project.update(cx, |project, cx| {
9054 let abs_path = project1_abs_path.join("file1.txt");
9055 let found_path = project.find_project_path(abs_path, cx).unwrap();
9056 assert_eq!(found_path.worktree_id, project1_id);
9057 assert_eq!(found_path.path.as_ref(), Path::new("file1.txt"));
9058
9059 let abs_path = project1_abs_path.join("subdir").join("file2.txt");
9060 let found_path = project.find_project_path(abs_path, cx).unwrap();
9061 assert_eq!(found_path.worktree_id, project1_id);
9062 assert_eq!(found_path.path.as_ref(), Path::new("subdir/file2.txt"));
9063
9064 let abs_path = project2_abs_path.join("file3.txt");
9065 let found_path = project.find_project_path(abs_path, cx).unwrap();
9066 assert_eq!(found_path.worktree_id, project2_id);
9067 assert_eq!(found_path.path.as_ref(), Path::new("file3.txt"));
9068
9069 let abs_path = project1_abs_path.join("nonexistent.txt");
9070 let found_path = project.find_project_path(abs_path, cx);
9071 assert!(
9072 found_path.is_some(),
9073 "Should find project path for nonexistent file in worktree"
9074 );
9075
9076 // Test with an absolute path outside any worktree
9077 let abs_path = Path::new("/some/other/path");
9078 let found_path = project.find_project_path(abs_path, cx);
9079 assert!(
9080 found_path.is_none(),
9081 "Should not find project path for path outside any worktree"
9082 );
9083 });
9084}