1#![allow(clippy::format_collect)]
2
3use crate::{
4 Event, git_store::StatusEntry, task_inventory::TaskContexts, task_store::TaskSettingsLocation,
5 *,
6};
7use buffer_diff::{
8 BufferDiffEvent, CALCULATE_DIFF_TASK, DiffHunkSecondaryStatus, DiffHunkStatus,
9 DiffHunkStatusKind, assert_hunks,
10};
11use fs::FakeFs;
12use futures::{StreamExt, future};
13use git::{
14 GitHostingProviderRegistry,
15 repository::RepoPath,
16 status::{StatusCode, TrackedStatus},
17};
18use git2::RepositoryInitOptions;
19use gpui::{App, BackgroundExecutor, SemanticVersion, UpdateGlobal};
20use http_client::Url;
21use language::{
22 Diagnostic, DiagnosticEntry, DiagnosticSet, DiskState, FakeLspAdapter, LanguageConfig,
23 LanguageMatcher, LanguageName, LineEnding, OffsetRangeExt, Point, ToPoint,
24 language_settings::{AllLanguageSettings, LanguageSettingsContent, language_settings},
25 tree_sitter_rust, tree_sitter_typescript,
26};
27use lsp::{
28 DiagnosticSeverity, DocumentChanges, FileOperationFilter, NumberOrString, TextDocumentEdit,
29 WillRenameFiles, notification::DidRenameFiles,
30};
31use parking_lot::Mutex;
32use paths::{config_dir, tasks_file};
33use postage::stream::Stream as _;
34use pretty_assertions::{assert_eq, assert_matches};
35use rand::{Rng as _, rngs::StdRng};
36use serde_json::json;
37#[cfg(not(windows))]
38use std::os;
39use std::{env, mem, num::NonZeroU32, ops::Range, str::FromStr, sync::OnceLock, task::Poll};
40use task::{ResolvedTask, TaskContext};
41use unindent::Unindent as _;
42use util::{
43 TryFutureExt as _, assert_set_eq, maybe, path,
44 paths::PathMatcher,
45 separator,
46 test::{TempTree, marked_text_offsets},
47 uri,
48};
49use worktree::WorktreeModelHandle as _;
50
51#[gpui::test]
52async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
53 cx.executor().allow_parking();
54
55 let (tx, mut rx) = futures::channel::mpsc::unbounded();
56 let _thread = std::thread::spawn(move || {
57 #[cfg(not(target_os = "windows"))]
58 std::fs::metadata("/tmp").unwrap();
59 #[cfg(target_os = "windows")]
60 std::fs::metadata("C:/Windows").unwrap();
61 std::thread::sleep(Duration::from_millis(1000));
62 tx.unbounded_send(1).unwrap();
63 });
64 rx.next().await.unwrap();
65}
66
67#[gpui::test]
68async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
69 cx.executor().allow_parking();
70
71 let io_task = smol::unblock(move || {
72 println!("sleeping on thread {:?}", std::thread::current().id());
73 std::thread::sleep(Duration::from_millis(10));
74 1
75 });
76
77 let task = cx.foreground_executor().spawn(async move {
78 io_task.await;
79 });
80
81 task.await;
82}
83
84#[cfg(not(windows))]
85#[gpui::test]
86async fn test_symlinks(cx: &mut gpui::TestAppContext) {
87 init_test(cx);
88 cx.executor().allow_parking();
89
90 let dir = TempTree::new(json!({
91 "root": {
92 "apple": "",
93 "banana": {
94 "carrot": {
95 "date": "",
96 "endive": "",
97 }
98 },
99 "fennel": {
100 "grape": "",
101 }
102 }
103 }));
104
105 let root_link_path = dir.path().join("root_link");
106 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
107 os::unix::fs::symlink(
108 dir.path().join("root/fennel"),
109 dir.path().join("root/finnochio"),
110 )
111 .unwrap();
112
113 let project = Project::test(
114 Arc::new(RealFs::new(None, cx.executor())),
115 [root_link_path.as_ref()],
116 cx,
117 )
118 .await;
119
120 project.update(cx, |project, cx| {
121 let tree = project.worktrees(cx).next().unwrap().read(cx);
122 assert_eq!(tree.file_count(), 5);
123 assert_eq!(
124 tree.inode_for_path("fennel/grape"),
125 tree.inode_for_path("finnochio/grape")
126 );
127 });
128}
129
130#[gpui::test]
131async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
132 init_test(cx);
133
134 let dir = TempTree::new(json!({
135 ".editorconfig": r#"
136 root = true
137 [*.rs]
138 indent_style = tab
139 indent_size = 3
140 end_of_line = lf
141 insert_final_newline = true
142 trim_trailing_whitespace = true
143 [*.js]
144 tab_width = 10
145 "#,
146 ".zed": {
147 "settings.json": r#"{
148 "tab_size": 8,
149 "hard_tabs": false,
150 "ensure_final_newline_on_save": false,
151 "remove_trailing_whitespace_on_save": false,
152 "soft_wrap": "editor_width"
153 }"#,
154 },
155 "a.rs": "fn a() {\n A\n}",
156 "b": {
157 ".editorconfig": r#"
158 [*.rs]
159 indent_size = 2
160 "#,
161 "b.rs": "fn b() {\n B\n}",
162 },
163 "c.js": "def c\n C\nend",
164 "README.json": "tabs are better\n",
165 }));
166
167 let path = dir.path();
168 let fs = FakeFs::new(cx.executor());
169 fs.insert_tree_from_real_fs(path, path).await;
170 let project = Project::test(fs, [path], cx).await;
171
172 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
173 language_registry.add(js_lang());
174 language_registry.add(json_lang());
175 language_registry.add(rust_lang());
176
177 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
178
179 cx.executor().run_until_parked();
180
181 cx.update(|cx| {
182 let tree = worktree.read(cx);
183 let settings_for = |path: &str| {
184 let file_entry = tree.entry_for_path(path).unwrap().clone();
185 let file = File::for_entry(file_entry, worktree.clone());
186 let file_language = project
187 .read(cx)
188 .languages()
189 .language_for_file_path(file.path.as_ref());
190 let file_language = cx
191 .background_executor()
192 .block(file_language)
193 .expect("Failed to get file language");
194 let file = file as _;
195 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
196 };
197
198 let settings_a = settings_for("a.rs");
199 let settings_b = settings_for("b/b.rs");
200 let settings_c = settings_for("c.js");
201 let settings_readme = settings_for("README.json");
202
203 // .editorconfig overrides .zed/settings
204 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
205 assert_eq!(settings_a.hard_tabs, true);
206 assert_eq!(settings_a.ensure_final_newline_on_save, true);
207 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
208
209 // .editorconfig in b/ overrides .editorconfig in root
210 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
211
212 // "indent_size" is not set, so "tab_width" is used
213 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
214
215 // README.md should not be affected by .editorconfig's globe "*.rs"
216 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
217 });
218}
219
220#[gpui::test]
221async fn test_git_provider_project_setting(cx: &mut gpui::TestAppContext) {
222 init_test(cx);
223 cx.update(|cx| {
224 GitHostingProviderRegistry::default_global(cx);
225 git_hosting_providers::init(cx);
226 });
227
228 let fs = FakeFs::new(cx.executor());
229 let str_path = path!("/dir");
230 let path = Path::new(str_path);
231
232 fs.insert_tree(
233 path!("/dir"),
234 json!({
235 ".zed": {
236 "settings.json": r#"{
237 "git_hosting_providers": [
238 {
239 "provider": "gitlab",
240 "base_url": "https://google.com",
241 "name": "foo"
242 }
243 ]
244 }"#
245 },
246 }),
247 )
248 .await;
249
250 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
251 let (_worktree, _) =
252 project.read_with(cx, |project, cx| project.find_worktree(path, cx).unwrap());
253 cx.executor().run_until_parked();
254
255 cx.update(|cx| {
256 let provider = GitHostingProviderRegistry::global(cx);
257 assert!(
258 provider
259 .list_hosting_providers()
260 .into_iter()
261 .any(|provider| provider.name() == "foo")
262 );
263 });
264
265 fs.atomic_write(
266 Path::new(path!("/dir/.zed/settings.json")).to_owned(),
267 "{}".into(),
268 )
269 .await
270 .unwrap();
271
272 cx.run_until_parked();
273
274 cx.update(|cx| {
275 let provider = GitHostingProviderRegistry::global(cx);
276 assert!(
277 !provider
278 .list_hosting_providers()
279 .into_iter()
280 .any(|provider| provider.name() == "foo")
281 );
282 });
283}
284
285#[gpui::test]
286async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
287 init_test(cx);
288 TaskStore::init(None);
289
290 let fs = FakeFs::new(cx.executor());
291 fs.insert_tree(
292 path!("/dir"),
293 json!({
294 ".zed": {
295 "settings.json": r#"{ "tab_size": 8 }"#,
296 "tasks.json": r#"[{
297 "label": "cargo check all",
298 "command": "cargo",
299 "args": ["check", "--all"]
300 },]"#,
301 },
302 "a": {
303 "a.rs": "fn a() {\n A\n}"
304 },
305 "b": {
306 ".zed": {
307 "settings.json": r#"{ "tab_size": 2 }"#,
308 "tasks.json": r#"[{
309 "label": "cargo check",
310 "command": "cargo",
311 "args": ["check"]
312 },]"#,
313 },
314 "b.rs": "fn b() {\n B\n}"
315 }
316 }),
317 )
318 .await;
319
320 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
321 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
322
323 cx.executor().run_until_parked();
324 let worktree_id = cx.update(|cx| {
325 project.update(cx, |project, cx| {
326 project.worktrees(cx).next().unwrap().read(cx).id()
327 })
328 });
329
330 let mut task_contexts = TaskContexts::default();
331 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
332
333 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
334 id: worktree_id,
335 directory_in_worktree: PathBuf::from(".zed"),
336 id_base: "local worktree tasks from directory \".zed\"".into(),
337 };
338
339 let all_tasks = cx
340 .update(|cx| {
341 let tree = worktree.read(cx);
342
343 let file_a = File::for_entry(
344 tree.entry_for_path("a/a.rs").unwrap().clone(),
345 worktree.clone(),
346 ) as _;
347 let settings_a = language_settings(None, Some(&file_a), cx);
348 let file_b = File::for_entry(
349 tree.entry_for_path("b/b.rs").unwrap().clone(),
350 worktree.clone(),
351 ) as _;
352 let settings_b = language_settings(None, Some(&file_b), cx);
353
354 assert_eq!(settings_a.tab_size.get(), 8);
355 assert_eq!(settings_b.tab_size.get(), 2);
356
357 get_all_tasks(&project, &task_contexts, cx)
358 })
359 .into_iter()
360 .map(|(source_kind, task)| {
361 let resolved = task.resolved;
362 (
363 source_kind,
364 task.resolved_label,
365 resolved.args,
366 resolved.env,
367 )
368 })
369 .collect::<Vec<_>>();
370 assert_eq!(
371 all_tasks,
372 vec![
373 (
374 TaskSourceKind::Worktree {
375 id: worktree_id,
376 directory_in_worktree: PathBuf::from(separator!("b/.zed")),
377 id_base: if cfg!(windows) {
378 "local worktree tasks from directory \"b\\\\.zed\"".into()
379 } else {
380 "local worktree tasks from directory \"b/.zed\"".into()
381 },
382 },
383 "cargo check".to_string(),
384 vec!["check".to_string()],
385 HashMap::default(),
386 ),
387 (
388 topmost_local_task_source_kind.clone(),
389 "cargo check all".to_string(),
390 vec!["check".to_string(), "--all".to_string()],
391 HashMap::default(),
392 ),
393 ]
394 );
395
396 let (_, resolved_task) = cx
397 .update(|cx| get_all_tasks(&project, &task_contexts, cx))
398 .into_iter()
399 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
400 .expect("should have one global task");
401 project.update(cx, |project, cx| {
402 let task_inventory = project
403 .task_store
404 .read(cx)
405 .task_inventory()
406 .cloned()
407 .unwrap();
408 task_inventory.update(cx, |inventory, _| {
409 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
410 inventory
411 .update_file_based_tasks(
412 TaskSettingsLocation::Global(tasks_file()),
413 Some(
414 &json!([{
415 "label": "cargo check unstable",
416 "command": "cargo",
417 "args": [
418 "check",
419 "--all",
420 "--all-targets"
421 ],
422 "env": {
423 "RUSTFLAGS": "-Zunstable-options"
424 }
425 }])
426 .to_string(),
427 ),
428 )
429 .unwrap();
430 });
431 });
432 cx.run_until_parked();
433
434 let all_tasks = cx
435 .update(|cx| get_all_tasks(&project, &task_contexts, cx))
436 .into_iter()
437 .map(|(source_kind, task)| {
438 let resolved = task.resolved;
439 (
440 source_kind,
441 task.resolved_label,
442 resolved.args,
443 resolved.env,
444 )
445 })
446 .collect::<Vec<_>>();
447 assert_eq!(
448 all_tasks,
449 vec![
450 (
451 topmost_local_task_source_kind.clone(),
452 "cargo check all".to_string(),
453 vec!["check".to_string(), "--all".to_string()],
454 HashMap::default(),
455 ),
456 (
457 TaskSourceKind::Worktree {
458 id: worktree_id,
459 directory_in_worktree: PathBuf::from(separator!("b/.zed")),
460 id_base: if cfg!(windows) {
461 "local worktree tasks from directory \"b\\\\.zed\"".into()
462 } else {
463 "local worktree tasks from directory \"b/.zed\"".into()
464 },
465 },
466 "cargo check".to_string(),
467 vec!["check".to_string()],
468 HashMap::default(),
469 ),
470 (
471 TaskSourceKind::AbsPath {
472 abs_path: paths::tasks_file().clone(),
473 id_base: "global tasks.json".into(),
474 },
475 "cargo check unstable".to_string(),
476 vec![
477 "check".to_string(),
478 "--all".to_string(),
479 "--all-targets".to_string(),
480 ],
481 HashMap::from_iter(Some((
482 "RUSTFLAGS".to_string(),
483 "-Zunstable-options".to_string()
484 ))),
485 ),
486 ]
487 );
488}
489
490#[gpui::test]
491async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
492 init_test(cx);
493 TaskStore::init(None);
494
495 let fs = FakeFs::new(cx.executor());
496 fs.insert_tree(
497 path!("/dir"),
498 json!({
499 ".zed": {
500 "tasks.json": r#"[{
501 "label": "test worktree root",
502 "command": "echo $ZED_WORKTREE_ROOT"
503 }]"#,
504 },
505 "a": {
506 "a.rs": "fn a() {\n A\n}"
507 },
508 }),
509 )
510 .await;
511
512 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
513 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
514
515 cx.executor().run_until_parked();
516 let worktree_id = cx.update(|cx| {
517 project.update(cx, |project, cx| {
518 project.worktrees(cx).next().unwrap().read(cx).id()
519 })
520 });
521
522 let active_non_worktree_item_tasks = cx.update(|cx| {
523 get_all_tasks(
524 &project,
525 &TaskContexts {
526 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
527 active_worktree_context: None,
528 other_worktree_contexts: Vec::new(),
529 lsp_task_sources: HashMap::default(),
530 latest_selection: None,
531 },
532 cx,
533 )
534 });
535 assert!(
536 active_non_worktree_item_tasks.is_empty(),
537 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
538 );
539
540 let active_worktree_tasks = cx.update(|cx| {
541 get_all_tasks(
542 &project,
543 &TaskContexts {
544 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
545 active_worktree_context: Some((worktree_id, {
546 let mut worktree_context = TaskContext::default();
547 worktree_context
548 .task_variables
549 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
550 worktree_context
551 })),
552 other_worktree_contexts: Vec::new(),
553 lsp_task_sources: HashMap::default(),
554 latest_selection: None,
555 },
556 cx,
557 )
558 });
559 assert_eq!(
560 active_worktree_tasks
561 .into_iter()
562 .map(|(source_kind, task)| {
563 let resolved = task.resolved;
564 (source_kind, resolved.command)
565 })
566 .collect::<Vec<_>>(),
567 vec![(
568 TaskSourceKind::Worktree {
569 id: worktree_id,
570 directory_in_worktree: PathBuf::from(separator!(".zed")),
571 id_base: if cfg!(windows) {
572 "local worktree tasks from directory \".zed\"".into()
573 } else {
574 "local worktree tasks from directory \".zed\"".into()
575 },
576 },
577 "echo /dir".to_string(),
578 )]
579 );
580}
581
582#[gpui::test]
583async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
584 init_test(cx);
585
586 let fs = FakeFs::new(cx.executor());
587 fs.insert_tree(
588 path!("/dir"),
589 json!({
590 "test.rs": "const A: i32 = 1;",
591 "test2.rs": "",
592 "Cargo.toml": "a = 1",
593 "package.json": "{\"a\": 1}",
594 }),
595 )
596 .await;
597
598 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
599 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
600
601 let mut fake_rust_servers = language_registry.register_fake_lsp(
602 "Rust",
603 FakeLspAdapter {
604 name: "the-rust-language-server",
605 capabilities: lsp::ServerCapabilities {
606 completion_provider: Some(lsp::CompletionOptions {
607 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
608 ..Default::default()
609 }),
610 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
611 lsp::TextDocumentSyncOptions {
612 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
613 ..Default::default()
614 },
615 )),
616 ..Default::default()
617 },
618 ..Default::default()
619 },
620 );
621 let mut fake_json_servers = language_registry.register_fake_lsp(
622 "JSON",
623 FakeLspAdapter {
624 name: "the-json-language-server",
625 capabilities: lsp::ServerCapabilities {
626 completion_provider: Some(lsp::CompletionOptions {
627 trigger_characters: Some(vec![":".to_string()]),
628 ..Default::default()
629 }),
630 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
631 lsp::TextDocumentSyncOptions {
632 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
633 ..Default::default()
634 },
635 )),
636 ..Default::default()
637 },
638 ..Default::default()
639 },
640 );
641
642 // Open a buffer without an associated language server.
643 let (toml_buffer, _handle) = project
644 .update(cx, |project, cx| {
645 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
646 })
647 .await
648 .unwrap();
649
650 // Open a buffer with an associated language server before the language for it has been loaded.
651 let (rust_buffer, _handle2) = project
652 .update(cx, |project, cx| {
653 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
654 })
655 .await
656 .unwrap();
657 rust_buffer.update(cx, |buffer, _| {
658 assert_eq!(buffer.language().map(|l| l.name()), None);
659 });
660
661 // Now we add the languages to the project, and ensure they get assigned to all
662 // the relevant open buffers.
663 language_registry.add(json_lang());
664 language_registry.add(rust_lang());
665 cx.executor().run_until_parked();
666 rust_buffer.update(cx, |buffer, _| {
667 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
668 });
669
670 // A server is started up, and it is notified about Rust files.
671 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
672 assert_eq!(
673 fake_rust_server
674 .receive_notification::<lsp::notification::DidOpenTextDocument>()
675 .await
676 .text_document,
677 lsp::TextDocumentItem {
678 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
679 version: 0,
680 text: "const A: i32 = 1;".to_string(),
681 language_id: "rust".to_string(),
682 }
683 );
684
685 // The buffer is configured based on the language server's capabilities.
686 rust_buffer.update(cx, |buffer, _| {
687 assert_eq!(
688 buffer
689 .completion_triggers()
690 .into_iter()
691 .cloned()
692 .collect::<Vec<_>>(),
693 &[".".to_string(), "::".to_string()]
694 );
695 });
696 toml_buffer.update(cx, |buffer, _| {
697 assert!(buffer.completion_triggers().is_empty());
698 });
699
700 // Edit a buffer. The changes are reported to the language server.
701 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
702 assert_eq!(
703 fake_rust_server
704 .receive_notification::<lsp::notification::DidChangeTextDocument>()
705 .await
706 .text_document,
707 lsp::VersionedTextDocumentIdentifier::new(
708 lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
709 1
710 )
711 );
712
713 // Open a third buffer with a different associated language server.
714 let (json_buffer, _json_handle) = project
715 .update(cx, |project, cx| {
716 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
717 })
718 .await
719 .unwrap();
720
721 // A json language server is started up and is only notified about the json buffer.
722 let mut fake_json_server = fake_json_servers.next().await.unwrap();
723 assert_eq!(
724 fake_json_server
725 .receive_notification::<lsp::notification::DidOpenTextDocument>()
726 .await
727 .text_document,
728 lsp::TextDocumentItem {
729 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
730 version: 0,
731 text: "{\"a\": 1}".to_string(),
732 language_id: "json".to_string(),
733 }
734 );
735
736 // This buffer is configured based on the second language server's
737 // capabilities.
738 json_buffer.update(cx, |buffer, _| {
739 assert_eq!(
740 buffer
741 .completion_triggers()
742 .into_iter()
743 .cloned()
744 .collect::<Vec<_>>(),
745 &[":".to_string()]
746 );
747 });
748
749 // When opening another buffer whose language server is already running,
750 // it is also configured based on the existing language server's capabilities.
751 let (rust_buffer2, _handle4) = project
752 .update(cx, |project, cx| {
753 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
754 })
755 .await
756 .unwrap();
757 rust_buffer2.update(cx, |buffer, _| {
758 assert_eq!(
759 buffer
760 .completion_triggers()
761 .into_iter()
762 .cloned()
763 .collect::<Vec<_>>(),
764 &[".".to_string(), "::".to_string()]
765 );
766 });
767
768 // Changes are reported only to servers matching the buffer's language.
769 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
770 rust_buffer2.update(cx, |buffer, cx| {
771 buffer.edit([(0..0, "let x = 1;")], None, cx)
772 });
773 assert_eq!(
774 fake_rust_server
775 .receive_notification::<lsp::notification::DidChangeTextDocument>()
776 .await
777 .text_document,
778 lsp::VersionedTextDocumentIdentifier::new(
779 lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap(),
780 1
781 )
782 );
783
784 // Save notifications are reported to all servers.
785 project
786 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
787 .await
788 .unwrap();
789 assert_eq!(
790 fake_rust_server
791 .receive_notification::<lsp::notification::DidSaveTextDocument>()
792 .await
793 .text_document,
794 lsp::TextDocumentIdentifier::new(
795 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
796 )
797 );
798 assert_eq!(
799 fake_json_server
800 .receive_notification::<lsp::notification::DidSaveTextDocument>()
801 .await
802 .text_document,
803 lsp::TextDocumentIdentifier::new(
804 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
805 )
806 );
807
808 // Renames are reported only to servers matching the buffer's language.
809 fs.rename(
810 Path::new(path!("/dir/test2.rs")),
811 Path::new(path!("/dir/test3.rs")),
812 Default::default(),
813 )
814 .await
815 .unwrap();
816 assert_eq!(
817 fake_rust_server
818 .receive_notification::<lsp::notification::DidCloseTextDocument>()
819 .await
820 .text_document,
821 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap()),
822 );
823 assert_eq!(
824 fake_rust_server
825 .receive_notification::<lsp::notification::DidOpenTextDocument>()
826 .await
827 .text_document,
828 lsp::TextDocumentItem {
829 uri: lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap(),
830 version: 0,
831 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
832 language_id: "rust".to_string(),
833 },
834 );
835
836 rust_buffer2.update(cx, |buffer, cx| {
837 buffer.update_diagnostics(
838 LanguageServerId(0),
839 DiagnosticSet::from_sorted_entries(
840 vec![DiagnosticEntry {
841 diagnostic: Default::default(),
842 range: Anchor::MIN..Anchor::MAX,
843 }],
844 &buffer.snapshot(),
845 ),
846 cx,
847 );
848 assert_eq!(
849 buffer
850 .snapshot()
851 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
852 .count(),
853 1
854 );
855 });
856
857 // When the rename changes the extension of the file, the buffer gets closed on the old
858 // language server and gets opened on the new one.
859 fs.rename(
860 Path::new(path!("/dir/test3.rs")),
861 Path::new(path!("/dir/test3.json")),
862 Default::default(),
863 )
864 .await
865 .unwrap();
866 assert_eq!(
867 fake_rust_server
868 .receive_notification::<lsp::notification::DidCloseTextDocument>()
869 .await
870 .text_document,
871 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap()),
872 );
873 assert_eq!(
874 fake_json_server
875 .receive_notification::<lsp::notification::DidOpenTextDocument>()
876 .await
877 .text_document,
878 lsp::TextDocumentItem {
879 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
880 version: 0,
881 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
882 language_id: "json".to_string(),
883 },
884 );
885
886 // We clear the diagnostics, since the language has changed.
887 rust_buffer2.update(cx, |buffer, _| {
888 assert_eq!(
889 buffer
890 .snapshot()
891 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
892 .count(),
893 0
894 );
895 });
896
897 // The renamed file's version resets after changing language server.
898 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
899 assert_eq!(
900 fake_json_server
901 .receive_notification::<lsp::notification::DidChangeTextDocument>()
902 .await
903 .text_document,
904 lsp::VersionedTextDocumentIdentifier::new(
905 lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
906 1
907 )
908 );
909
910 // Restart language servers
911 project.update(cx, |project, cx| {
912 project.restart_language_servers_for_buffers(
913 vec![rust_buffer.clone(), json_buffer.clone()],
914 cx,
915 );
916 });
917
918 let mut rust_shutdown_requests = fake_rust_server
919 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
920 let mut json_shutdown_requests = fake_json_server
921 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
922 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
923
924 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
925 let mut fake_json_server = fake_json_servers.next().await.unwrap();
926
927 // Ensure rust document is reopened in new rust language server
928 assert_eq!(
929 fake_rust_server
930 .receive_notification::<lsp::notification::DidOpenTextDocument>()
931 .await
932 .text_document,
933 lsp::TextDocumentItem {
934 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
935 version: 0,
936 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
937 language_id: "rust".to_string(),
938 }
939 );
940
941 // Ensure json documents are reopened in new json language server
942 assert_set_eq!(
943 [
944 fake_json_server
945 .receive_notification::<lsp::notification::DidOpenTextDocument>()
946 .await
947 .text_document,
948 fake_json_server
949 .receive_notification::<lsp::notification::DidOpenTextDocument>()
950 .await
951 .text_document,
952 ],
953 [
954 lsp::TextDocumentItem {
955 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
956 version: 0,
957 text: json_buffer.update(cx, |buffer, _| buffer.text()),
958 language_id: "json".to_string(),
959 },
960 lsp::TextDocumentItem {
961 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
962 version: 0,
963 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
964 language_id: "json".to_string(),
965 }
966 ]
967 );
968
969 // Close notifications are reported only to servers matching the buffer's language.
970 cx.update(|_| drop(_json_handle));
971 let close_message = lsp::DidCloseTextDocumentParams {
972 text_document: lsp::TextDocumentIdentifier::new(
973 lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
974 ),
975 };
976 assert_eq!(
977 fake_json_server
978 .receive_notification::<lsp::notification::DidCloseTextDocument>()
979 .await,
980 close_message,
981 );
982}
983
984#[gpui::test]
985async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
986 init_test(cx);
987
988 let fs = FakeFs::new(cx.executor());
989 fs.insert_tree(
990 path!("/the-root"),
991 json!({
992 ".gitignore": "target\n",
993 "Cargo.lock": "",
994 "src": {
995 "a.rs": "",
996 "b.rs": "",
997 },
998 "target": {
999 "x": {
1000 "out": {
1001 "x.rs": ""
1002 }
1003 },
1004 "y": {
1005 "out": {
1006 "y.rs": "",
1007 }
1008 },
1009 "z": {
1010 "out": {
1011 "z.rs": ""
1012 }
1013 }
1014 }
1015 }),
1016 )
1017 .await;
1018 fs.insert_tree(
1019 path!("/the-registry"),
1020 json!({
1021 "dep1": {
1022 "src": {
1023 "dep1.rs": "",
1024 }
1025 },
1026 "dep2": {
1027 "src": {
1028 "dep2.rs": "",
1029 }
1030 },
1031 }),
1032 )
1033 .await;
1034 fs.insert_tree(
1035 path!("/the/stdlib"),
1036 json!({
1037 "LICENSE": "",
1038 "src": {
1039 "string.rs": "",
1040 }
1041 }),
1042 )
1043 .await;
1044
1045 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1046 let (language_registry, lsp_store) = project.read_with(cx, |project, _| {
1047 (project.languages().clone(), project.lsp_store())
1048 });
1049 language_registry.add(rust_lang());
1050 let mut fake_servers = language_registry.register_fake_lsp(
1051 "Rust",
1052 FakeLspAdapter {
1053 name: "the-language-server",
1054 ..Default::default()
1055 },
1056 );
1057
1058 cx.executor().run_until_parked();
1059
1060 // Start the language server by opening a buffer with a compatible file extension.
1061 project
1062 .update(cx, |project, cx| {
1063 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
1064 })
1065 .await
1066 .unwrap();
1067
1068 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
1069 project.update(cx, |project, cx| {
1070 let worktree = project.worktrees(cx).next().unwrap();
1071 assert_eq!(
1072 worktree
1073 .read(cx)
1074 .snapshot()
1075 .entries(true, 0)
1076 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
1077 .collect::<Vec<_>>(),
1078 &[
1079 (Path::new(""), false),
1080 (Path::new(".gitignore"), false),
1081 (Path::new("Cargo.lock"), false),
1082 (Path::new("src"), false),
1083 (Path::new("src/a.rs"), false),
1084 (Path::new("src/b.rs"), false),
1085 (Path::new("target"), true),
1086 ]
1087 );
1088 });
1089
1090 let prev_read_dir_count = fs.read_dir_call_count();
1091
1092 let fake_server = fake_servers.next().await.unwrap();
1093 let (server_id, server_name) = lsp_store.read_with(cx, |lsp_store, _| {
1094 let (id, status) = lsp_store.language_server_statuses().next().unwrap();
1095 (id, LanguageServerName::from(status.name.as_str()))
1096 });
1097
1098 // Simulate jumping to a definition in a dependency outside of the worktree.
1099 let _out_of_worktree_buffer = project
1100 .update(cx, |project, cx| {
1101 project.open_local_buffer_via_lsp(
1102 lsp::Url::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
1103 server_id,
1104 server_name.clone(),
1105 cx,
1106 )
1107 })
1108 .await
1109 .unwrap();
1110
1111 // Keep track of the FS events reported to the language server.
1112 let file_changes = Arc::new(Mutex::new(Vec::new()));
1113 fake_server
1114 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
1115 registrations: vec![lsp::Registration {
1116 id: Default::default(),
1117 method: "workspace/didChangeWatchedFiles".to_string(),
1118 register_options: serde_json::to_value(
1119 lsp::DidChangeWatchedFilesRegistrationOptions {
1120 watchers: vec![
1121 lsp::FileSystemWatcher {
1122 glob_pattern: lsp::GlobPattern::String(
1123 path!("/the-root/Cargo.toml").to_string(),
1124 ),
1125 kind: None,
1126 },
1127 lsp::FileSystemWatcher {
1128 glob_pattern: lsp::GlobPattern::String(
1129 path!("/the-root/src/*.{rs,c}").to_string(),
1130 ),
1131 kind: None,
1132 },
1133 lsp::FileSystemWatcher {
1134 glob_pattern: lsp::GlobPattern::String(
1135 path!("/the-root/target/y/**/*.rs").to_string(),
1136 ),
1137 kind: None,
1138 },
1139 lsp::FileSystemWatcher {
1140 glob_pattern: lsp::GlobPattern::String(
1141 path!("/the/stdlib/src/**/*.rs").to_string(),
1142 ),
1143 kind: None,
1144 },
1145 lsp::FileSystemWatcher {
1146 glob_pattern: lsp::GlobPattern::String(
1147 path!("**/Cargo.lock").to_string(),
1148 ),
1149 kind: None,
1150 },
1151 ],
1152 },
1153 )
1154 .ok(),
1155 }],
1156 })
1157 .await
1158 .into_response()
1159 .unwrap();
1160 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
1161 let file_changes = file_changes.clone();
1162 move |params, _| {
1163 let mut file_changes = file_changes.lock();
1164 file_changes.extend(params.changes);
1165 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
1166 }
1167 });
1168
1169 cx.executor().run_until_parked();
1170 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
1171 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 5);
1172
1173 let mut new_watched_paths = fs.watched_paths();
1174 new_watched_paths.retain(|path| !path.starts_with(config_dir()));
1175 assert_eq!(
1176 &new_watched_paths,
1177 &[
1178 Path::new(path!("/the-root")),
1179 Path::new(path!("/the-registry/dep1/src/dep1.rs")),
1180 Path::new(path!("/the/stdlib/src"))
1181 ]
1182 );
1183
1184 // Now the language server has asked us to watch an ignored directory path,
1185 // so we recursively load it.
1186 project.update(cx, |project, cx| {
1187 let worktree = project.visible_worktrees(cx).next().unwrap();
1188 assert_eq!(
1189 worktree
1190 .read(cx)
1191 .snapshot()
1192 .entries(true, 0)
1193 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
1194 .collect::<Vec<_>>(),
1195 &[
1196 (Path::new(""), false),
1197 (Path::new(".gitignore"), false),
1198 (Path::new("Cargo.lock"), false),
1199 (Path::new("src"), false),
1200 (Path::new("src/a.rs"), false),
1201 (Path::new("src/b.rs"), false),
1202 (Path::new("target"), true),
1203 (Path::new("target/x"), true),
1204 (Path::new("target/y"), true),
1205 (Path::new("target/y/out"), true),
1206 (Path::new("target/y/out/y.rs"), true),
1207 (Path::new("target/z"), true),
1208 ]
1209 );
1210 });
1211
1212 // Perform some file system mutations, two of which match the watched patterns,
1213 // and one of which does not.
1214 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
1215 .await
1216 .unwrap();
1217 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
1218 .await
1219 .unwrap();
1220 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
1221 .await
1222 .unwrap();
1223 fs.create_file(
1224 path!("/the-root/target/x/out/x2.rs").as_ref(),
1225 Default::default(),
1226 )
1227 .await
1228 .unwrap();
1229 fs.create_file(
1230 path!("/the-root/target/y/out/y2.rs").as_ref(),
1231 Default::default(),
1232 )
1233 .await
1234 .unwrap();
1235 fs.save(
1236 path!("/the-root/Cargo.lock").as_ref(),
1237 &"".into(),
1238 Default::default(),
1239 )
1240 .await
1241 .unwrap();
1242 fs.save(
1243 path!("/the-stdlib/LICENSE").as_ref(),
1244 &"".into(),
1245 Default::default(),
1246 )
1247 .await
1248 .unwrap();
1249 fs.save(
1250 path!("/the/stdlib/src/string.rs").as_ref(),
1251 &"".into(),
1252 Default::default(),
1253 )
1254 .await
1255 .unwrap();
1256
1257 // The language server receives events for the FS mutations that match its watch patterns.
1258 cx.executor().run_until_parked();
1259 assert_eq!(
1260 &*file_changes.lock(),
1261 &[
1262 lsp::FileEvent {
1263 uri: lsp::Url::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
1264 typ: lsp::FileChangeType::CHANGED,
1265 },
1266 lsp::FileEvent {
1267 uri: lsp::Url::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
1268 typ: lsp::FileChangeType::DELETED,
1269 },
1270 lsp::FileEvent {
1271 uri: lsp::Url::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
1272 typ: lsp::FileChangeType::CREATED,
1273 },
1274 lsp::FileEvent {
1275 uri: lsp::Url::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
1276 typ: lsp::FileChangeType::CREATED,
1277 },
1278 lsp::FileEvent {
1279 uri: lsp::Url::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
1280 typ: lsp::FileChangeType::CHANGED,
1281 },
1282 ]
1283 );
1284}
1285
1286#[gpui::test]
1287async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
1288 init_test(cx);
1289
1290 let fs = FakeFs::new(cx.executor());
1291 fs.insert_tree(
1292 path!("/dir"),
1293 json!({
1294 "a.rs": "let a = 1;",
1295 "b.rs": "let b = 2;"
1296 }),
1297 )
1298 .await;
1299
1300 let project = Project::test(
1301 fs,
1302 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
1303 cx,
1304 )
1305 .await;
1306 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1307
1308 let buffer_a = project
1309 .update(cx, |project, cx| {
1310 project.open_local_buffer(path!("/dir/a.rs"), cx)
1311 })
1312 .await
1313 .unwrap();
1314 let buffer_b = project
1315 .update(cx, |project, cx| {
1316 project.open_local_buffer(path!("/dir/b.rs"), cx)
1317 })
1318 .await
1319 .unwrap();
1320
1321 lsp_store.update(cx, |lsp_store, cx| {
1322 lsp_store
1323 .update_diagnostics(
1324 LanguageServerId(0),
1325 lsp::PublishDiagnosticsParams {
1326 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1327 version: None,
1328 diagnostics: vec![lsp::Diagnostic {
1329 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1330 severity: Some(lsp::DiagnosticSeverity::ERROR),
1331 message: "error 1".to_string(),
1332 ..Default::default()
1333 }],
1334 },
1335 &[],
1336 cx,
1337 )
1338 .unwrap();
1339 lsp_store
1340 .update_diagnostics(
1341 LanguageServerId(0),
1342 lsp::PublishDiagnosticsParams {
1343 uri: Url::from_file_path(path!("/dir/b.rs")).unwrap(),
1344 version: None,
1345 diagnostics: vec![lsp::Diagnostic {
1346 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1347 severity: Some(DiagnosticSeverity::WARNING),
1348 message: "error 2".to_string(),
1349 ..Default::default()
1350 }],
1351 },
1352 &[],
1353 cx,
1354 )
1355 .unwrap();
1356 });
1357
1358 buffer_a.update(cx, |buffer, _| {
1359 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1360 assert_eq!(
1361 chunks
1362 .iter()
1363 .map(|(s, d)| (s.as_str(), *d))
1364 .collect::<Vec<_>>(),
1365 &[
1366 ("let ", None),
1367 ("a", Some(DiagnosticSeverity::ERROR)),
1368 (" = 1;", None),
1369 ]
1370 );
1371 });
1372 buffer_b.update(cx, |buffer, _| {
1373 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1374 assert_eq!(
1375 chunks
1376 .iter()
1377 .map(|(s, d)| (s.as_str(), *d))
1378 .collect::<Vec<_>>(),
1379 &[
1380 ("let ", None),
1381 ("b", Some(DiagnosticSeverity::WARNING)),
1382 (" = 2;", None),
1383 ]
1384 );
1385 });
1386}
1387
1388#[gpui::test]
1389async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1390 init_test(cx);
1391
1392 let fs = FakeFs::new(cx.executor());
1393 fs.insert_tree(
1394 path!("/root"),
1395 json!({
1396 "dir": {
1397 ".git": {
1398 "HEAD": "ref: refs/heads/main",
1399 },
1400 ".gitignore": "b.rs",
1401 "a.rs": "let a = 1;",
1402 "b.rs": "let b = 2;",
1403 },
1404 "other.rs": "let b = c;"
1405 }),
1406 )
1407 .await;
1408
1409 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
1410 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1411 let (worktree, _) = project
1412 .update(cx, |project, cx| {
1413 project.find_or_create_worktree(path!("/root/dir"), true, cx)
1414 })
1415 .await
1416 .unwrap();
1417 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1418
1419 let (worktree, _) = project
1420 .update(cx, |project, cx| {
1421 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
1422 })
1423 .await
1424 .unwrap();
1425 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1426
1427 let server_id = LanguageServerId(0);
1428 lsp_store.update(cx, |lsp_store, cx| {
1429 lsp_store
1430 .update_diagnostics(
1431 server_id,
1432 lsp::PublishDiagnosticsParams {
1433 uri: Url::from_file_path(path!("/root/dir/b.rs")).unwrap(),
1434 version: None,
1435 diagnostics: vec![lsp::Diagnostic {
1436 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1437 severity: Some(lsp::DiagnosticSeverity::ERROR),
1438 message: "unused variable 'b'".to_string(),
1439 ..Default::default()
1440 }],
1441 },
1442 &[],
1443 cx,
1444 )
1445 .unwrap();
1446 lsp_store
1447 .update_diagnostics(
1448 server_id,
1449 lsp::PublishDiagnosticsParams {
1450 uri: Url::from_file_path(path!("/root/other.rs")).unwrap(),
1451 version: None,
1452 diagnostics: vec![lsp::Diagnostic {
1453 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1454 severity: Some(lsp::DiagnosticSeverity::ERROR),
1455 message: "unknown variable 'c'".to_string(),
1456 ..Default::default()
1457 }],
1458 },
1459 &[],
1460 cx,
1461 )
1462 .unwrap();
1463 });
1464
1465 let main_ignored_buffer = project
1466 .update(cx, |project, cx| {
1467 project.open_buffer((main_worktree_id, "b.rs"), cx)
1468 })
1469 .await
1470 .unwrap();
1471 main_ignored_buffer.update(cx, |buffer, _| {
1472 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1473 assert_eq!(
1474 chunks
1475 .iter()
1476 .map(|(s, d)| (s.as_str(), *d))
1477 .collect::<Vec<_>>(),
1478 &[
1479 ("let ", None),
1480 ("b", Some(DiagnosticSeverity::ERROR)),
1481 (" = 2;", None),
1482 ],
1483 "Gigitnored buffers should still get in-buffer diagnostics",
1484 );
1485 });
1486 let other_buffer = project
1487 .update(cx, |project, cx| {
1488 project.open_buffer((other_worktree_id, ""), cx)
1489 })
1490 .await
1491 .unwrap();
1492 other_buffer.update(cx, |buffer, _| {
1493 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1494 assert_eq!(
1495 chunks
1496 .iter()
1497 .map(|(s, d)| (s.as_str(), *d))
1498 .collect::<Vec<_>>(),
1499 &[
1500 ("let b = ", None),
1501 ("c", Some(DiagnosticSeverity::ERROR)),
1502 (";", None),
1503 ],
1504 "Buffers from hidden projects should still get in-buffer diagnostics"
1505 );
1506 });
1507
1508 project.update(cx, |project, cx| {
1509 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1510 assert_eq!(
1511 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1512 vec![(
1513 ProjectPath {
1514 worktree_id: main_worktree_id,
1515 path: Arc::from(Path::new("b.rs")),
1516 },
1517 server_id,
1518 DiagnosticSummary {
1519 error_count: 1,
1520 warning_count: 0,
1521 }
1522 )]
1523 );
1524 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1525 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1526 });
1527}
1528
1529#[gpui::test]
1530async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1531 init_test(cx);
1532
1533 let progress_token = "the-progress-token";
1534
1535 let fs = FakeFs::new(cx.executor());
1536 fs.insert_tree(
1537 path!("/dir"),
1538 json!({
1539 "a.rs": "fn a() { A }",
1540 "b.rs": "const y: i32 = 1",
1541 }),
1542 )
1543 .await;
1544
1545 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1546 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1547
1548 language_registry.add(rust_lang());
1549 let mut fake_servers = language_registry.register_fake_lsp(
1550 "Rust",
1551 FakeLspAdapter {
1552 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1553 disk_based_diagnostics_sources: vec!["disk".into()],
1554 ..Default::default()
1555 },
1556 );
1557
1558 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1559
1560 // Cause worktree to start the fake language server
1561 let _ = project
1562 .update(cx, |project, cx| {
1563 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
1564 })
1565 .await
1566 .unwrap();
1567
1568 let mut events = cx.events(&project);
1569
1570 let fake_server = fake_servers.next().await.unwrap();
1571 assert_eq!(
1572 events.next().await.unwrap(),
1573 Event::LanguageServerAdded(
1574 LanguageServerId(0),
1575 fake_server.server.name(),
1576 Some(worktree_id)
1577 ),
1578 );
1579
1580 fake_server
1581 .start_progress(format!("{}/0", progress_token))
1582 .await;
1583 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1584 assert_eq!(
1585 events.next().await.unwrap(),
1586 Event::DiskBasedDiagnosticsStarted {
1587 language_server_id: LanguageServerId(0),
1588 }
1589 );
1590
1591 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1592 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1593 version: None,
1594 diagnostics: vec![lsp::Diagnostic {
1595 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1596 severity: Some(lsp::DiagnosticSeverity::ERROR),
1597 message: "undefined variable 'A'".to_string(),
1598 ..Default::default()
1599 }],
1600 });
1601 assert_eq!(
1602 events.next().await.unwrap(),
1603 Event::DiagnosticsUpdated {
1604 language_server_id: LanguageServerId(0),
1605 path: (worktree_id, Path::new("a.rs")).into()
1606 }
1607 );
1608
1609 fake_server.end_progress(format!("{}/0", progress_token));
1610 assert_eq!(
1611 events.next().await.unwrap(),
1612 Event::DiskBasedDiagnosticsFinished {
1613 language_server_id: LanguageServerId(0)
1614 }
1615 );
1616
1617 let buffer = project
1618 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
1619 .await
1620 .unwrap();
1621
1622 buffer.update(cx, |buffer, _| {
1623 let snapshot = buffer.snapshot();
1624 let diagnostics = snapshot
1625 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1626 .collect::<Vec<_>>();
1627 assert_eq!(
1628 diagnostics,
1629 &[DiagnosticEntry {
1630 range: Point::new(0, 9)..Point::new(0, 10),
1631 diagnostic: Diagnostic {
1632 severity: lsp::DiagnosticSeverity::ERROR,
1633 message: "undefined variable 'A'".to_string(),
1634 group_id: 0,
1635 is_primary: true,
1636 ..Default::default()
1637 }
1638 }]
1639 )
1640 });
1641
1642 // Ensure publishing empty diagnostics twice only results in one update event.
1643 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1644 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1645 version: None,
1646 diagnostics: Default::default(),
1647 });
1648 assert_eq!(
1649 events.next().await.unwrap(),
1650 Event::DiagnosticsUpdated {
1651 language_server_id: LanguageServerId(0),
1652 path: (worktree_id, Path::new("a.rs")).into()
1653 }
1654 );
1655
1656 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1657 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1658 version: None,
1659 diagnostics: Default::default(),
1660 });
1661 cx.executor().run_until_parked();
1662 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1663}
1664
1665#[gpui::test]
1666async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1667 init_test(cx);
1668
1669 let progress_token = "the-progress-token";
1670
1671 let fs = FakeFs::new(cx.executor());
1672 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1673
1674 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1675
1676 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1677 language_registry.add(rust_lang());
1678 let mut fake_servers = language_registry.register_fake_lsp(
1679 "Rust",
1680 FakeLspAdapter {
1681 name: "the-language-server",
1682 disk_based_diagnostics_sources: vec!["disk".into()],
1683 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1684 ..Default::default()
1685 },
1686 );
1687
1688 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1689
1690 let (buffer, _handle) = project
1691 .update(cx, |project, cx| {
1692 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1693 })
1694 .await
1695 .unwrap();
1696 // Simulate diagnostics starting to update.
1697 let fake_server = fake_servers.next().await.unwrap();
1698 fake_server.start_progress(progress_token).await;
1699
1700 // Restart the server before the diagnostics finish updating.
1701 project.update(cx, |project, cx| {
1702 project.restart_language_servers_for_buffers(vec![buffer], cx);
1703 });
1704 let mut events = cx.events(&project);
1705
1706 // Simulate the newly started server sending more diagnostics.
1707 let fake_server = fake_servers.next().await.unwrap();
1708 assert_eq!(
1709 events.next().await.unwrap(),
1710 Event::LanguageServerAdded(
1711 LanguageServerId(1),
1712 fake_server.server.name(),
1713 Some(worktree_id)
1714 )
1715 );
1716 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1717 fake_server.start_progress(progress_token).await;
1718 assert_eq!(
1719 events.next().await.unwrap(),
1720 Event::DiskBasedDiagnosticsStarted {
1721 language_server_id: LanguageServerId(1)
1722 }
1723 );
1724 project.update(cx, |project, cx| {
1725 assert_eq!(
1726 project
1727 .language_servers_running_disk_based_diagnostics(cx)
1728 .collect::<Vec<_>>(),
1729 [LanguageServerId(1)]
1730 );
1731 });
1732
1733 // All diagnostics are considered done, despite the old server's diagnostic
1734 // task never completing.
1735 fake_server.end_progress(progress_token);
1736 assert_eq!(
1737 events.next().await.unwrap(),
1738 Event::DiskBasedDiagnosticsFinished {
1739 language_server_id: LanguageServerId(1)
1740 }
1741 );
1742 project.update(cx, |project, cx| {
1743 assert_eq!(
1744 project
1745 .language_servers_running_disk_based_diagnostics(cx)
1746 .collect::<Vec<_>>(),
1747 [] as [language::LanguageServerId; 0]
1748 );
1749 });
1750}
1751
1752#[gpui::test]
1753async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1754 init_test(cx);
1755
1756 let fs = FakeFs::new(cx.executor());
1757 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
1758
1759 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1760
1761 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1762 language_registry.add(rust_lang());
1763 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1764
1765 let (buffer, _) = project
1766 .update(cx, |project, cx| {
1767 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1768 })
1769 .await
1770 .unwrap();
1771
1772 // Publish diagnostics
1773 let fake_server = fake_servers.next().await.unwrap();
1774 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1775 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1776 version: None,
1777 diagnostics: vec![lsp::Diagnostic {
1778 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1779 severity: Some(lsp::DiagnosticSeverity::ERROR),
1780 message: "the message".to_string(),
1781 ..Default::default()
1782 }],
1783 });
1784
1785 cx.executor().run_until_parked();
1786 buffer.update(cx, |buffer, _| {
1787 assert_eq!(
1788 buffer
1789 .snapshot()
1790 .diagnostics_in_range::<_, usize>(0..1, false)
1791 .map(|entry| entry.diagnostic.message.clone())
1792 .collect::<Vec<_>>(),
1793 ["the message".to_string()]
1794 );
1795 });
1796 project.update(cx, |project, cx| {
1797 assert_eq!(
1798 project.diagnostic_summary(false, cx),
1799 DiagnosticSummary {
1800 error_count: 1,
1801 warning_count: 0,
1802 }
1803 );
1804 });
1805
1806 project.update(cx, |project, cx| {
1807 project.restart_language_servers_for_buffers(vec![buffer.clone()], cx);
1808 });
1809
1810 // The diagnostics are cleared.
1811 cx.executor().run_until_parked();
1812 buffer.update(cx, |buffer, _| {
1813 assert_eq!(
1814 buffer
1815 .snapshot()
1816 .diagnostics_in_range::<_, usize>(0..1, false)
1817 .map(|entry| entry.diagnostic.message.clone())
1818 .collect::<Vec<_>>(),
1819 Vec::<String>::new(),
1820 );
1821 });
1822 project.update(cx, |project, cx| {
1823 assert_eq!(
1824 project.diagnostic_summary(false, cx),
1825 DiagnosticSummary {
1826 error_count: 0,
1827 warning_count: 0,
1828 }
1829 );
1830 });
1831}
1832
1833#[gpui::test]
1834async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1835 init_test(cx);
1836
1837 let fs = FakeFs::new(cx.executor());
1838 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1839
1840 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1841 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1842
1843 language_registry.add(rust_lang());
1844 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1845
1846 let (buffer, _handle) = project
1847 .update(cx, |project, cx| {
1848 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1849 })
1850 .await
1851 .unwrap();
1852
1853 // Before restarting the server, report diagnostics with an unknown buffer version.
1854 let fake_server = fake_servers.next().await.unwrap();
1855 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1856 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1857 version: Some(10000),
1858 diagnostics: Vec::new(),
1859 });
1860 cx.executor().run_until_parked();
1861 project.update(cx, |project, cx| {
1862 project.restart_language_servers_for_buffers(vec![buffer.clone()], cx);
1863 });
1864
1865 let mut fake_server = fake_servers.next().await.unwrap();
1866 let notification = fake_server
1867 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1868 .await
1869 .text_document;
1870 assert_eq!(notification.version, 0);
1871}
1872
1873#[gpui::test]
1874async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
1875 init_test(cx);
1876
1877 let progress_token = "the-progress-token";
1878
1879 let fs = FakeFs::new(cx.executor());
1880 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1881
1882 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1883
1884 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1885 language_registry.add(rust_lang());
1886 let mut fake_servers = language_registry.register_fake_lsp(
1887 "Rust",
1888 FakeLspAdapter {
1889 name: "the-language-server",
1890 disk_based_diagnostics_sources: vec!["disk".into()],
1891 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1892 ..Default::default()
1893 },
1894 );
1895
1896 let (buffer, _handle) = project
1897 .update(cx, |project, cx| {
1898 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1899 })
1900 .await
1901 .unwrap();
1902
1903 // Simulate diagnostics starting to update.
1904 let mut fake_server = fake_servers.next().await.unwrap();
1905 fake_server
1906 .start_progress_with(
1907 "another-token",
1908 lsp::WorkDoneProgressBegin {
1909 cancellable: Some(false),
1910 ..Default::default()
1911 },
1912 )
1913 .await;
1914 fake_server
1915 .start_progress_with(
1916 progress_token,
1917 lsp::WorkDoneProgressBegin {
1918 cancellable: Some(true),
1919 ..Default::default()
1920 },
1921 )
1922 .await;
1923 cx.executor().run_until_parked();
1924
1925 project.update(cx, |project, cx| {
1926 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
1927 });
1928
1929 let cancel_notification = fake_server
1930 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
1931 .await;
1932 assert_eq!(
1933 cancel_notification.token,
1934 NumberOrString::String(progress_token.into())
1935 );
1936}
1937
1938#[gpui::test]
1939async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1940 init_test(cx);
1941
1942 let fs = FakeFs::new(cx.executor());
1943 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
1944 .await;
1945
1946 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1947 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1948
1949 let mut fake_rust_servers = language_registry.register_fake_lsp(
1950 "Rust",
1951 FakeLspAdapter {
1952 name: "rust-lsp",
1953 ..Default::default()
1954 },
1955 );
1956 let mut fake_js_servers = language_registry.register_fake_lsp(
1957 "JavaScript",
1958 FakeLspAdapter {
1959 name: "js-lsp",
1960 ..Default::default()
1961 },
1962 );
1963 language_registry.add(rust_lang());
1964 language_registry.add(js_lang());
1965
1966 let _rs_buffer = project
1967 .update(cx, |project, cx| {
1968 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1969 })
1970 .await
1971 .unwrap();
1972 let _js_buffer = project
1973 .update(cx, |project, cx| {
1974 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
1975 })
1976 .await
1977 .unwrap();
1978
1979 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1980 assert_eq!(
1981 fake_rust_server_1
1982 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1983 .await
1984 .text_document
1985 .uri
1986 .as_str(),
1987 uri!("file:///dir/a.rs")
1988 );
1989
1990 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1991 assert_eq!(
1992 fake_js_server
1993 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1994 .await
1995 .text_document
1996 .uri
1997 .as_str(),
1998 uri!("file:///dir/b.js")
1999 );
2000
2001 // Disable Rust language server, ensuring only that server gets stopped.
2002 cx.update(|cx| {
2003 SettingsStore::update_global(cx, |settings, cx| {
2004 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
2005 settings.languages.insert(
2006 "Rust".into(),
2007 LanguageSettingsContent {
2008 enable_language_server: Some(false),
2009 ..Default::default()
2010 },
2011 );
2012 });
2013 })
2014 });
2015 fake_rust_server_1
2016 .receive_notification::<lsp::notification::Exit>()
2017 .await;
2018
2019 // Enable Rust and disable JavaScript language servers, ensuring that the
2020 // former gets started again and that the latter stops.
2021 cx.update(|cx| {
2022 SettingsStore::update_global(cx, |settings, cx| {
2023 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
2024 settings.languages.insert(
2025 LanguageName::new("Rust"),
2026 LanguageSettingsContent {
2027 enable_language_server: Some(true),
2028 ..Default::default()
2029 },
2030 );
2031 settings.languages.insert(
2032 LanguageName::new("JavaScript"),
2033 LanguageSettingsContent {
2034 enable_language_server: Some(false),
2035 ..Default::default()
2036 },
2037 );
2038 });
2039 })
2040 });
2041 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
2042 assert_eq!(
2043 fake_rust_server_2
2044 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2045 .await
2046 .text_document
2047 .uri
2048 .as_str(),
2049 uri!("file:///dir/a.rs")
2050 );
2051 fake_js_server
2052 .receive_notification::<lsp::notification::Exit>()
2053 .await;
2054}
2055
2056#[gpui::test(iterations = 3)]
2057async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
2058 init_test(cx);
2059
2060 let text = "
2061 fn a() { A }
2062 fn b() { BB }
2063 fn c() { CCC }
2064 "
2065 .unindent();
2066
2067 let fs = FakeFs::new(cx.executor());
2068 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
2069
2070 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2071 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2072
2073 language_registry.add(rust_lang());
2074 let mut fake_servers = language_registry.register_fake_lsp(
2075 "Rust",
2076 FakeLspAdapter {
2077 disk_based_diagnostics_sources: vec!["disk".into()],
2078 ..Default::default()
2079 },
2080 );
2081
2082 let buffer = project
2083 .update(cx, |project, cx| {
2084 project.open_local_buffer(path!("/dir/a.rs"), cx)
2085 })
2086 .await
2087 .unwrap();
2088
2089 let _handle = project.update(cx, |project, cx| {
2090 project.register_buffer_with_language_servers(&buffer, cx)
2091 });
2092
2093 let mut fake_server = fake_servers.next().await.unwrap();
2094 let open_notification = fake_server
2095 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2096 .await;
2097
2098 // Edit the buffer, moving the content down
2099 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
2100 let change_notification_1 = fake_server
2101 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2102 .await;
2103 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
2104
2105 // Report some diagnostics for the initial version of the buffer
2106 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2107 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2108 version: Some(open_notification.text_document.version),
2109 diagnostics: vec![
2110 lsp::Diagnostic {
2111 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2112 severity: Some(DiagnosticSeverity::ERROR),
2113 message: "undefined variable 'A'".to_string(),
2114 source: Some("disk".to_string()),
2115 ..Default::default()
2116 },
2117 lsp::Diagnostic {
2118 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2119 severity: Some(DiagnosticSeverity::ERROR),
2120 message: "undefined variable 'BB'".to_string(),
2121 source: Some("disk".to_string()),
2122 ..Default::default()
2123 },
2124 lsp::Diagnostic {
2125 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
2126 severity: Some(DiagnosticSeverity::ERROR),
2127 source: Some("disk".to_string()),
2128 message: "undefined variable 'CCC'".to_string(),
2129 ..Default::default()
2130 },
2131 ],
2132 });
2133
2134 // The diagnostics have moved down since they were created.
2135 cx.executor().run_until_parked();
2136 buffer.update(cx, |buffer, _| {
2137 assert_eq!(
2138 buffer
2139 .snapshot()
2140 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
2141 .collect::<Vec<_>>(),
2142 &[
2143 DiagnosticEntry {
2144 range: Point::new(3, 9)..Point::new(3, 11),
2145 diagnostic: Diagnostic {
2146 source: Some("disk".into()),
2147 severity: DiagnosticSeverity::ERROR,
2148 message: "undefined variable 'BB'".to_string(),
2149 is_disk_based: true,
2150 group_id: 1,
2151 is_primary: true,
2152 ..Default::default()
2153 },
2154 },
2155 DiagnosticEntry {
2156 range: Point::new(4, 9)..Point::new(4, 12),
2157 diagnostic: Diagnostic {
2158 source: Some("disk".into()),
2159 severity: DiagnosticSeverity::ERROR,
2160 message: "undefined variable 'CCC'".to_string(),
2161 is_disk_based: true,
2162 group_id: 2,
2163 is_primary: true,
2164 ..Default::default()
2165 }
2166 }
2167 ]
2168 );
2169 assert_eq!(
2170 chunks_with_diagnostics(buffer, 0..buffer.len()),
2171 [
2172 ("\n\nfn a() { ".to_string(), None),
2173 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2174 (" }\nfn b() { ".to_string(), None),
2175 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
2176 (" }\nfn c() { ".to_string(), None),
2177 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
2178 (" }\n".to_string(), None),
2179 ]
2180 );
2181 assert_eq!(
2182 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
2183 [
2184 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
2185 (" }\nfn c() { ".to_string(), None),
2186 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
2187 ]
2188 );
2189 });
2190
2191 // Ensure overlapping diagnostics are highlighted correctly.
2192 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2193 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2194 version: Some(open_notification.text_document.version),
2195 diagnostics: vec![
2196 lsp::Diagnostic {
2197 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2198 severity: Some(DiagnosticSeverity::ERROR),
2199 message: "undefined variable 'A'".to_string(),
2200 source: Some("disk".to_string()),
2201 ..Default::default()
2202 },
2203 lsp::Diagnostic {
2204 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
2205 severity: Some(DiagnosticSeverity::WARNING),
2206 message: "unreachable statement".to_string(),
2207 source: Some("disk".to_string()),
2208 ..Default::default()
2209 },
2210 ],
2211 });
2212
2213 cx.executor().run_until_parked();
2214 buffer.update(cx, |buffer, _| {
2215 assert_eq!(
2216 buffer
2217 .snapshot()
2218 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
2219 .collect::<Vec<_>>(),
2220 &[
2221 DiagnosticEntry {
2222 range: Point::new(2, 9)..Point::new(2, 12),
2223 diagnostic: Diagnostic {
2224 source: Some("disk".into()),
2225 severity: DiagnosticSeverity::WARNING,
2226 message: "unreachable statement".to_string(),
2227 is_disk_based: true,
2228 group_id: 4,
2229 is_primary: true,
2230 ..Default::default()
2231 }
2232 },
2233 DiagnosticEntry {
2234 range: Point::new(2, 9)..Point::new(2, 10),
2235 diagnostic: Diagnostic {
2236 source: Some("disk".into()),
2237 severity: DiagnosticSeverity::ERROR,
2238 message: "undefined variable 'A'".to_string(),
2239 is_disk_based: true,
2240 group_id: 3,
2241 is_primary: true,
2242 ..Default::default()
2243 },
2244 }
2245 ]
2246 );
2247 assert_eq!(
2248 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
2249 [
2250 ("fn a() { ".to_string(), None),
2251 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2252 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2253 ("\n".to_string(), None),
2254 ]
2255 );
2256 assert_eq!(
2257 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
2258 [
2259 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2260 ("\n".to_string(), None),
2261 ]
2262 );
2263 });
2264
2265 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
2266 // changes since the last save.
2267 buffer.update(cx, |buffer, cx| {
2268 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
2269 buffer.edit(
2270 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
2271 None,
2272 cx,
2273 );
2274 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
2275 });
2276 let change_notification_2 = fake_server
2277 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2278 .await;
2279 assert!(
2280 change_notification_2.text_document.version > change_notification_1.text_document.version
2281 );
2282
2283 // Handle out-of-order diagnostics
2284 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2285 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2286 version: Some(change_notification_2.text_document.version),
2287 diagnostics: vec![
2288 lsp::Diagnostic {
2289 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2290 severity: Some(DiagnosticSeverity::ERROR),
2291 message: "undefined variable 'BB'".to_string(),
2292 source: Some("disk".to_string()),
2293 ..Default::default()
2294 },
2295 lsp::Diagnostic {
2296 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2297 severity: Some(DiagnosticSeverity::WARNING),
2298 message: "undefined variable 'A'".to_string(),
2299 source: Some("disk".to_string()),
2300 ..Default::default()
2301 },
2302 ],
2303 });
2304
2305 cx.executor().run_until_parked();
2306 buffer.update(cx, |buffer, _| {
2307 assert_eq!(
2308 buffer
2309 .snapshot()
2310 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2311 .collect::<Vec<_>>(),
2312 &[
2313 DiagnosticEntry {
2314 range: Point::new(2, 21)..Point::new(2, 22),
2315 diagnostic: Diagnostic {
2316 source: Some("disk".into()),
2317 severity: DiagnosticSeverity::WARNING,
2318 message: "undefined variable 'A'".to_string(),
2319 is_disk_based: true,
2320 group_id: 6,
2321 is_primary: true,
2322 ..Default::default()
2323 }
2324 },
2325 DiagnosticEntry {
2326 range: Point::new(3, 9)..Point::new(3, 14),
2327 diagnostic: Diagnostic {
2328 source: Some("disk".into()),
2329 severity: DiagnosticSeverity::ERROR,
2330 message: "undefined variable 'BB'".to_string(),
2331 is_disk_based: true,
2332 group_id: 5,
2333 is_primary: true,
2334 ..Default::default()
2335 },
2336 }
2337 ]
2338 );
2339 });
2340}
2341
2342#[gpui::test]
2343async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
2344 init_test(cx);
2345
2346 let text = concat!(
2347 "let one = ;\n", //
2348 "let two = \n",
2349 "let three = 3;\n",
2350 );
2351
2352 let fs = FakeFs::new(cx.executor());
2353 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
2354
2355 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2356 let buffer = project
2357 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2358 .await
2359 .unwrap();
2360
2361 project.update(cx, |project, cx| {
2362 project.lsp_store.update(cx, |lsp_store, cx| {
2363 lsp_store
2364 .update_diagnostic_entries(
2365 LanguageServerId(0),
2366 PathBuf::from("/dir/a.rs"),
2367 None,
2368 vec![
2369 DiagnosticEntry {
2370 range: Unclipped(PointUtf16::new(0, 10))
2371 ..Unclipped(PointUtf16::new(0, 10)),
2372 diagnostic: Diagnostic {
2373 severity: DiagnosticSeverity::ERROR,
2374 message: "syntax error 1".to_string(),
2375 ..Default::default()
2376 },
2377 },
2378 DiagnosticEntry {
2379 range: Unclipped(PointUtf16::new(1, 10))
2380 ..Unclipped(PointUtf16::new(1, 10)),
2381 diagnostic: Diagnostic {
2382 severity: DiagnosticSeverity::ERROR,
2383 message: "syntax error 2".to_string(),
2384 ..Default::default()
2385 },
2386 },
2387 ],
2388 cx,
2389 )
2390 .unwrap();
2391 })
2392 });
2393
2394 // An empty range is extended forward to include the following character.
2395 // At the end of a line, an empty range is extended backward to include
2396 // the preceding character.
2397 buffer.update(cx, |buffer, _| {
2398 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2399 assert_eq!(
2400 chunks
2401 .iter()
2402 .map(|(s, d)| (s.as_str(), *d))
2403 .collect::<Vec<_>>(),
2404 &[
2405 ("let one = ", None),
2406 (";", Some(DiagnosticSeverity::ERROR)),
2407 ("\nlet two =", None),
2408 (" ", Some(DiagnosticSeverity::ERROR)),
2409 ("\nlet three = 3;\n", None)
2410 ]
2411 );
2412 });
2413}
2414
2415#[gpui::test]
2416async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2417 init_test(cx);
2418
2419 let fs = FakeFs::new(cx.executor());
2420 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2421 .await;
2422
2423 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2424 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
2425
2426 lsp_store.update(cx, |lsp_store, cx| {
2427 lsp_store
2428 .update_diagnostic_entries(
2429 LanguageServerId(0),
2430 Path::new("/dir/a.rs").to_owned(),
2431 None,
2432 vec![DiagnosticEntry {
2433 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2434 diagnostic: Diagnostic {
2435 severity: DiagnosticSeverity::ERROR,
2436 is_primary: true,
2437 message: "syntax error a1".to_string(),
2438 ..Default::default()
2439 },
2440 }],
2441 cx,
2442 )
2443 .unwrap();
2444 lsp_store
2445 .update_diagnostic_entries(
2446 LanguageServerId(1),
2447 Path::new("/dir/a.rs").to_owned(),
2448 None,
2449 vec![DiagnosticEntry {
2450 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2451 diagnostic: Diagnostic {
2452 severity: DiagnosticSeverity::ERROR,
2453 is_primary: true,
2454 message: "syntax error b1".to_string(),
2455 ..Default::default()
2456 },
2457 }],
2458 cx,
2459 )
2460 .unwrap();
2461
2462 assert_eq!(
2463 lsp_store.diagnostic_summary(false, cx),
2464 DiagnosticSummary {
2465 error_count: 2,
2466 warning_count: 0,
2467 }
2468 );
2469 });
2470}
2471
2472#[gpui::test]
2473async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2474 init_test(cx);
2475
2476 let text = "
2477 fn a() {
2478 f1();
2479 }
2480 fn b() {
2481 f2();
2482 }
2483 fn c() {
2484 f3();
2485 }
2486 "
2487 .unindent();
2488
2489 let fs = FakeFs::new(cx.executor());
2490 fs.insert_tree(
2491 path!("/dir"),
2492 json!({
2493 "a.rs": text.clone(),
2494 }),
2495 )
2496 .await;
2497
2498 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2499 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2500
2501 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2502 language_registry.add(rust_lang());
2503 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2504
2505 let (buffer, _handle) = project
2506 .update(cx, |project, cx| {
2507 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2508 })
2509 .await
2510 .unwrap();
2511
2512 let mut fake_server = fake_servers.next().await.unwrap();
2513 let lsp_document_version = fake_server
2514 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2515 .await
2516 .text_document
2517 .version;
2518
2519 // Simulate editing the buffer after the language server computes some edits.
2520 buffer.update(cx, |buffer, cx| {
2521 buffer.edit(
2522 [(
2523 Point::new(0, 0)..Point::new(0, 0),
2524 "// above first function\n",
2525 )],
2526 None,
2527 cx,
2528 );
2529 buffer.edit(
2530 [(
2531 Point::new(2, 0)..Point::new(2, 0),
2532 " // inside first function\n",
2533 )],
2534 None,
2535 cx,
2536 );
2537 buffer.edit(
2538 [(
2539 Point::new(6, 4)..Point::new(6, 4),
2540 "// inside second function ",
2541 )],
2542 None,
2543 cx,
2544 );
2545
2546 assert_eq!(
2547 buffer.text(),
2548 "
2549 // above first function
2550 fn a() {
2551 // inside first function
2552 f1();
2553 }
2554 fn b() {
2555 // inside second function f2();
2556 }
2557 fn c() {
2558 f3();
2559 }
2560 "
2561 .unindent()
2562 );
2563 });
2564
2565 let edits = lsp_store
2566 .update(cx, |lsp_store, cx| {
2567 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2568 &buffer,
2569 vec![
2570 // replace body of first function
2571 lsp::TextEdit {
2572 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2573 new_text: "
2574 fn a() {
2575 f10();
2576 }
2577 "
2578 .unindent(),
2579 },
2580 // edit inside second function
2581 lsp::TextEdit {
2582 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2583 new_text: "00".into(),
2584 },
2585 // edit inside third function via two distinct edits
2586 lsp::TextEdit {
2587 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2588 new_text: "4000".into(),
2589 },
2590 lsp::TextEdit {
2591 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2592 new_text: "".into(),
2593 },
2594 ],
2595 LanguageServerId(0),
2596 Some(lsp_document_version),
2597 cx,
2598 )
2599 })
2600 .await
2601 .unwrap();
2602
2603 buffer.update(cx, |buffer, cx| {
2604 for (range, new_text) in edits {
2605 buffer.edit([(range, new_text)], None, cx);
2606 }
2607 assert_eq!(
2608 buffer.text(),
2609 "
2610 // above first function
2611 fn a() {
2612 // inside first function
2613 f10();
2614 }
2615 fn b() {
2616 // inside second function f200();
2617 }
2618 fn c() {
2619 f4000();
2620 }
2621 "
2622 .unindent()
2623 );
2624 });
2625}
2626
2627#[gpui::test]
2628async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2629 init_test(cx);
2630
2631 let text = "
2632 use a::b;
2633 use a::c;
2634
2635 fn f() {
2636 b();
2637 c();
2638 }
2639 "
2640 .unindent();
2641
2642 let fs = FakeFs::new(cx.executor());
2643 fs.insert_tree(
2644 path!("/dir"),
2645 json!({
2646 "a.rs": text.clone(),
2647 }),
2648 )
2649 .await;
2650
2651 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2652 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2653 let buffer = project
2654 .update(cx, |project, cx| {
2655 project.open_local_buffer(path!("/dir/a.rs"), cx)
2656 })
2657 .await
2658 .unwrap();
2659
2660 // Simulate the language server sending us a small edit in the form of a very large diff.
2661 // Rust-analyzer does this when performing a merge-imports code action.
2662 let edits = lsp_store
2663 .update(cx, |lsp_store, cx| {
2664 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2665 &buffer,
2666 [
2667 // Replace the first use statement without editing the semicolon.
2668 lsp::TextEdit {
2669 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2670 new_text: "a::{b, c}".into(),
2671 },
2672 // Reinsert the remainder of the file between the semicolon and the final
2673 // newline of the file.
2674 lsp::TextEdit {
2675 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2676 new_text: "\n\n".into(),
2677 },
2678 lsp::TextEdit {
2679 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2680 new_text: "
2681 fn f() {
2682 b();
2683 c();
2684 }"
2685 .unindent(),
2686 },
2687 // Delete everything after the first newline of the file.
2688 lsp::TextEdit {
2689 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2690 new_text: "".into(),
2691 },
2692 ],
2693 LanguageServerId(0),
2694 None,
2695 cx,
2696 )
2697 })
2698 .await
2699 .unwrap();
2700
2701 buffer.update(cx, |buffer, cx| {
2702 let edits = edits
2703 .into_iter()
2704 .map(|(range, text)| {
2705 (
2706 range.start.to_point(buffer)..range.end.to_point(buffer),
2707 text,
2708 )
2709 })
2710 .collect::<Vec<_>>();
2711
2712 assert_eq!(
2713 edits,
2714 [
2715 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2716 (Point::new(1, 0)..Point::new(2, 0), "".into())
2717 ]
2718 );
2719
2720 for (range, new_text) in edits {
2721 buffer.edit([(range, new_text)], None, cx);
2722 }
2723 assert_eq!(
2724 buffer.text(),
2725 "
2726 use a::{b, c};
2727
2728 fn f() {
2729 b();
2730 c();
2731 }
2732 "
2733 .unindent()
2734 );
2735 });
2736}
2737
2738#[gpui::test]
2739async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
2740 cx: &mut gpui::TestAppContext,
2741) {
2742 init_test(cx);
2743
2744 let text = "Path()";
2745
2746 let fs = FakeFs::new(cx.executor());
2747 fs.insert_tree(
2748 path!("/dir"),
2749 json!({
2750 "a.rs": text
2751 }),
2752 )
2753 .await;
2754
2755 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2756 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2757 let buffer = project
2758 .update(cx, |project, cx| {
2759 project.open_local_buffer(path!("/dir/a.rs"), cx)
2760 })
2761 .await
2762 .unwrap();
2763
2764 // Simulate the language server sending us a pair of edits at the same location,
2765 // with an insertion following a replacement (which violates the LSP spec).
2766 let edits = lsp_store
2767 .update(cx, |lsp_store, cx| {
2768 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2769 &buffer,
2770 [
2771 lsp::TextEdit {
2772 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
2773 new_text: "Path".into(),
2774 },
2775 lsp::TextEdit {
2776 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
2777 new_text: "from path import Path\n\n\n".into(),
2778 },
2779 ],
2780 LanguageServerId(0),
2781 None,
2782 cx,
2783 )
2784 })
2785 .await
2786 .unwrap();
2787
2788 buffer.update(cx, |buffer, cx| {
2789 buffer.edit(edits, None, cx);
2790 assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
2791 });
2792}
2793
2794#[gpui::test]
2795async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2796 init_test(cx);
2797
2798 let text = "
2799 use a::b;
2800 use a::c;
2801
2802 fn f() {
2803 b();
2804 c();
2805 }
2806 "
2807 .unindent();
2808
2809 let fs = FakeFs::new(cx.executor());
2810 fs.insert_tree(
2811 path!("/dir"),
2812 json!({
2813 "a.rs": text.clone(),
2814 }),
2815 )
2816 .await;
2817
2818 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2819 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2820 let buffer = project
2821 .update(cx, |project, cx| {
2822 project.open_local_buffer(path!("/dir/a.rs"), cx)
2823 })
2824 .await
2825 .unwrap();
2826
2827 // Simulate the language server sending us edits in a non-ordered fashion,
2828 // with ranges sometimes being inverted or pointing to invalid locations.
2829 let edits = lsp_store
2830 .update(cx, |lsp_store, cx| {
2831 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2832 &buffer,
2833 [
2834 lsp::TextEdit {
2835 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2836 new_text: "\n\n".into(),
2837 },
2838 lsp::TextEdit {
2839 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2840 new_text: "a::{b, c}".into(),
2841 },
2842 lsp::TextEdit {
2843 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2844 new_text: "".into(),
2845 },
2846 lsp::TextEdit {
2847 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2848 new_text: "
2849 fn f() {
2850 b();
2851 c();
2852 }"
2853 .unindent(),
2854 },
2855 ],
2856 LanguageServerId(0),
2857 None,
2858 cx,
2859 )
2860 })
2861 .await
2862 .unwrap();
2863
2864 buffer.update(cx, |buffer, cx| {
2865 let edits = edits
2866 .into_iter()
2867 .map(|(range, text)| {
2868 (
2869 range.start.to_point(buffer)..range.end.to_point(buffer),
2870 text,
2871 )
2872 })
2873 .collect::<Vec<_>>();
2874
2875 assert_eq!(
2876 edits,
2877 [
2878 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2879 (Point::new(1, 0)..Point::new(2, 0), "".into())
2880 ]
2881 );
2882
2883 for (range, new_text) in edits {
2884 buffer.edit([(range, new_text)], None, cx);
2885 }
2886 assert_eq!(
2887 buffer.text(),
2888 "
2889 use a::{b, c};
2890
2891 fn f() {
2892 b();
2893 c();
2894 }
2895 "
2896 .unindent()
2897 );
2898 });
2899}
2900
2901fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2902 buffer: &Buffer,
2903 range: Range<T>,
2904) -> Vec<(String, Option<DiagnosticSeverity>)> {
2905 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2906 for chunk in buffer.snapshot().chunks(range, true) {
2907 if chunks.last().map_or(false, |prev_chunk| {
2908 prev_chunk.1 == chunk.diagnostic_severity
2909 }) {
2910 chunks.last_mut().unwrap().0.push_str(chunk.text);
2911 } else {
2912 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2913 }
2914 }
2915 chunks
2916}
2917
2918#[gpui::test(iterations = 10)]
2919async fn test_definition(cx: &mut gpui::TestAppContext) {
2920 init_test(cx);
2921
2922 let fs = FakeFs::new(cx.executor());
2923 fs.insert_tree(
2924 path!("/dir"),
2925 json!({
2926 "a.rs": "const fn a() { A }",
2927 "b.rs": "const y: i32 = crate::a()",
2928 }),
2929 )
2930 .await;
2931
2932 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
2933
2934 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2935 language_registry.add(rust_lang());
2936 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2937
2938 let (buffer, _handle) = project
2939 .update(cx, |project, cx| {
2940 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
2941 })
2942 .await
2943 .unwrap();
2944
2945 let fake_server = fake_servers.next().await.unwrap();
2946 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2947 let params = params.text_document_position_params;
2948 assert_eq!(
2949 params.text_document.uri.to_file_path().unwrap(),
2950 Path::new(path!("/dir/b.rs")),
2951 );
2952 assert_eq!(params.position, lsp::Position::new(0, 22));
2953
2954 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2955 lsp::Location::new(
2956 lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2957 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2958 ),
2959 )))
2960 });
2961 let mut definitions = project
2962 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2963 .await
2964 .unwrap();
2965
2966 // Assert no new language server started
2967 cx.executor().run_until_parked();
2968 assert!(fake_servers.try_next().is_err());
2969
2970 assert_eq!(definitions.len(), 1);
2971 let definition = definitions.pop().unwrap();
2972 cx.update(|cx| {
2973 let target_buffer = definition.target.buffer.read(cx);
2974 assert_eq!(
2975 target_buffer
2976 .file()
2977 .unwrap()
2978 .as_local()
2979 .unwrap()
2980 .abs_path(cx),
2981 Path::new(path!("/dir/a.rs")),
2982 );
2983 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2984 assert_eq!(
2985 list_worktrees(&project, cx),
2986 [
2987 (path!("/dir/a.rs").as_ref(), false),
2988 (path!("/dir/b.rs").as_ref(), true)
2989 ],
2990 );
2991
2992 drop(definition);
2993 });
2994 cx.update(|cx| {
2995 assert_eq!(
2996 list_worktrees(&project, cx),
2997 [(path!("/dir/b.rs").as_ref(), true)]
2998 );
2999 });
3000
3001 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
3002 project
3003 .read(cx)
3004 .worktrees(cx)
3005 .map(|worktree| {
3006 let worktree = worktree.read(cx);
3007 (
3008 worktree.as_local().unwrap().abs_path().as_ref(),
3009 worktree.is_visible(),
3010 )
3011 })
3012 .collect::<Vec<_>>()
3013 }
3014}
3015
3016#[gpui::test]
3017async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
3018 init_test(cx);
3019
3020 let fs = FakeFs::new(cx.executor());
3021 fs.insert_tree(
3022 path!("/dir"),
3023 json!({
3024 "a.ts": "",
3025 }),
3026 )
3027 .await;
3028
3029 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3030
3031 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3032 language_registry.add(typescript_lang());
3033 let mut fake_language_servers = language_registry.register_fake_lsp(
3034 "TypeScript",
3035 FakeLspAdapter {
3036 capabilities: lsp::ServerCapabilities {
3037 completion_provider: Some(lsp::CompletionOptions {
3038 trigger_characters: Some(vec![".".to_string()]),
3039 ..Default::default()
3040 }),
3041 ..Default::default()
3042 },
3043 ..Default::default()
3044 },
3045 );
3046
3047 let (buffer, _handle) = project
3048 .update(cx, |p, cx| {
3049 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3050 })
3051 .await
3052 .unwrap();
3053
3054 let fake_server = fake_language_servers.next().await.unwrap();
3055
3056 // When text_edit exists, it takes precedence over insert_text and label
3057 let text = "let a = obj.fqn";
3058 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3059 let completions = project.update(cx, |project, cx| {
3060 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3061 });
3062
3063 fake_server
3064 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3065 Ok(Some(lsp::CompletionResponse::Array(vec![
3066 lsp::CompletionItem {
3067 label: "labelText".into(),
3068 insert_text: Some("insertText".into()),
3069 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
3070 range: lsp::Range::new(
3071 lsp::Position::new(0, text.len() as u32 - 3),
3072 lsp::Position::new(0, text.len() as u32),
3073 ),
3074 new_text: "textEditText".into(),
3075 })),
3076 ..Default::default()
3077 },
3078 ])))
3079 })
3080 .next()
3081 .await;
3082
3083 let completions = completions
3084 .await
3085 .unwrap()
3086 .into_iter()
3087 .flat_map(|response| response.completions)
3088 .collect::<Vec<_>>();
3089 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3090
3091 assert_eq!(completions.len(), 1);
3092 assert_eq!(completions[0].new_text, "textEditText");
3093 assert_eq!(
3094 completions[0].replace_range.to_offset(&snapshot),
3095 text.len() - 3..text.len()
3096 );
3097}
3098
3099#[gpui::test]
3100async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
3101 init_test(cx);
3102
3103 let fs = FakeFs::new(cx.executor());
3104 fs.insert_tree(
3105 path!("/dir"),
3106 json!({
3107 "a.ts": "",
3108 }),
3109 )
3110 .await;
3111
3112 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3113
3114 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3115 language_registry.add(typescript_lang());
3116 let mut fake_language_servers = language_registry.register_fake_lsp(
3117 "TypeScript",
3118 FakeLspAdapter {
3119 capabilities: lsp::ServerCapabilities {
3120 completion_provider: Some(lsp::CompletionOptions {
3121 trigger_characters: Some(vec![".".to_string()]),
3122 ..Default::default()
3123 }),
3124 ..Default::default()
3125 },
3126 ..Default::default()
3127 },
3128 );
3129
3130 let (buffer, _handle) = project
3131 .update(cx, |p, cx| {
3132 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3133 })
3134 .await
3135 .unwrap();
3136
3137 let fake_server = fake_language_servers.next().await.unwrap();
3138 let text = "let a = obj.fqn";
3139
3140 // Test 1: When text_edit is None but insert_text exists with default edit_range
3141 {
3142 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3143 let completions = project.update(cx, |project, cx| {
3144 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3145 });
3146
3147 fake_server
3148 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3149 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3150 is_incomplete: false,
3151 item_defaults: Some(lsp::CompletionListItemDefaults {
3152 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3153 lsp::Range::new(
3154 lsp::Position::new(0, text.len() as u32 - 3),
3155 lsp::Position::new(0, text.len() as u32),
3156 ),
3157 )),
3158 ..Default::default()
3159 }),
3160 items: vec![lsp::CompletionItem {
3161 label: "labelText".into(),
3162 insert_text: Some("insertText".into()),
3163 text_edit: None,
3164 ..Default::default()
3165 }],
3166 })))
3167 })
3168 .next()
3169 .await;
3170
3171 let completions = completions
3172 .await
3173 .unwrap()
3174 .into_iter()
3175 .flat_map(|response| response.completions)
3176 .collect::<Vec<_>>();
3177 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3178
3179 assert_eq!(completions.len(), 1);
3180 assert_eq!(completions[0].new_text, "insertText");
3181 assert_eq!(
3182 completions[0].replace_range.to_offset(&snapshot),
3183 text.len() - 3..text.len()
3184 );
3185 }
3186
3187 // Test 2: When both text_edit and insert_text are None with default edit_range
3188 {
3189 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3190 let completions = project.update(cx, |project, cx| {
3191 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3192 });
3193
3194 fake_server
3195 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3196 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3197 is_incomplete: false,
3198 item_defaults: Some(lsp::CompletionListItemDefaults {
3199 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3200 lsp::Range::new(
3201 lsp::Position::new(0, text.len() as u32 - 3),
3202 lsp::Position::new(0, text.len() as u32),
3203 ),
3204 )),
3205 ..Default::default()
3206 }),
3207 items: vec![lsp::CompletionItem {
3208 label: "labelText".into(),
3209 insert_text: None,
3210 text_edit: None,
3211 ..Default::default()
3212 }],
3213 })))
3214 })
3215 .next()
3216 .await;
3217
3218 let completions = completions
3219 .await
3220 .unwrap()
3221 .into_iter()
3222 .flat_map(|response| response.completions)
3223 .collect::<Vec<_>>();
3224 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3225
3226 assert_eq!(completions.len(), 1);
3227 assert_eq!(completions[0].new_text, "labelText");
3228 assert_eq!(
3229 completions[0].replace_range.to_offset(&snapshot),
3230 text.len() - 3..text.len()
3231 );
3232 }
3233}
3234
3235#[gpui::test]
3236async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
3237 init_test(cx);
3238
3239 let fs = FakeFs::new(cx.executor());
3240 fs.insert_tree(
3241 path!("/dir"),
3242 json!({
3243 "a.ts": "",
3244 }),
3245 )
3246 .await;
3247
3248 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3249
3250 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3251 language_registry.add(typescript_lang());
3252 let mut fake_language_servers = language_registry.register_fake_lsp(
3253 "TypeScript",
3254 FakeLspAdapter {
3255 capabilities: lsp::ServerCapabilities {
3256 completion_provider: Some(lsp::CompletionOptions {
3257 trigger_characters: Some(vec![":".to_string()]),
3258 ..Default::default()
3259 }),
3260 ..Default::default()
3261 },
3262 ..Default::default()
3263 },
3264 );
3265
3266 let (buffer, _handle) = project
3267 .update(cx, |p, cx| {
3268 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3269 })
3270 .await
3271 .unwrap();
3272
3273 let fake_server = fake_language_servers.next().await.unwrap();
3274
3275 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
3276 let text = "let a = b.fqn";
3277 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3278 let completions = project.update(cx, |project, cx| {
3279 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3280 });
3281
3282 fake_server
3283 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3284 Ok(Some(lsp::CompletionResponse::Array(vec![
3285 lsp::CompletionItem {
3286 label: "fullyQualifiedName?".into(),
3287 insert_text: Some("fullyQualifiedName".into()),
3288 ..Default::default()
3289 },
3290 ])))
3291 })
3292 .next()
3293 .await;
3294 let completions = completions
3295 .await
3296 .unwrap()
3297 .into_iter()
3298 .flat_map(|response| response.completions)
3299 .collect::<Vec<_>>();
3300 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3301 assert_eq!(completions.len(), 1);
3302 assert_eq!(completions[0].new_text, "fullyQualifiedName");
3303 assert_eq!(
3304 completions[0].replace_range.to_offset(&snapshot),
3305 text.len() - 3..text.len()
3306 );
3307
3308 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
3309 let text = "let a = \"atoms/cmp\"";
3310 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3311 let completions = project.update(cx, |project, cx| {
3312 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
3313 });
3314
3315 fake_server
3316 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3317 Ok(Some(lsp::CompletionResponse::Array(vec![
3318 lsp::CompletionItem {
3319 label: "component".into(),
3320 ..Default::default()
3321 },
3322 ])))
3323 })
3324 .next()
3325 .await;
3326 let completions = completions
3327 .await
3328 .unwrap()
3329 .into_iter()
3330 .flat_map(|response| response.completions)
3331 .collect::<Vec<_>>();
3332 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3333 assert_eq!(completions.len(), 1);
3334 assert_eq!(completions[0].new_text, "component");
3335 assert_eq!(
3336 completions[0].replace_range.to_offset(&snapshot),
3337 text.len() - 4..text.len() - 1
3338 );
3339}
3340
3341#[gpui::test]
3342async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
3343 init_test(cx);
3344
3345 let fs = FakeFs::new(cx.executor());
3346 fs.insert_tree(
3347 path!("/dir"),
3348 json!({
3349 "a.ts": "",
3350 }),
3351 )
3352 .await;
3353
3354 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3355
3356 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3357 language_registry.add(typescript_lang());
3358 let mut fake_language_servers = language_registry.register_fake_lsp(
3359 "TypeScript",
3360 FakeLspAdapter {
3361 capabilities: lsp::ServerCapabilities {
3362 completion_provider: Some(lsp::CompletionOptions {
3363 trigger_characters: Some(vec![":".to_string()]),
3364 ..Default::default()
3365 }),
3366 ..Default::default()
3367 },
3368 ..Default::default()
3369 },
3370 );
3371
3372 let (buffer, _handle) = project
3373 .update(cx, |p, cx| {
3374 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3375 })
3376 .await
3377 .unwrap();
3378
3379 let fake_server = fake_language_servers.next().await.unwrap();
3380
3381 let text = "let a = b.fqn";
3382 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3383 let completions = project.update(cx, |project, cx| {
3384 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3385 });
3386
3387 fake_server
3388 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3389 Ok(Some(lsp::CompletionResponse::Array(vec![
3390 lsp::CompletionItem {
3391 label: "fullyQualifiedName?".into(),
3392 insert_text: Some("fully\rQualified\r\nName".into()),
3393 ..Default::default()
3394 },
3395 ])))
3396 })
3397 .next()
3398 .await;
3399 let completions = completions
3400 .await
3401 .unwrap()
3402 .into_iter()
3403 .flat_map(|response| response.completions)
3404 .collect::<Vec<_>>();
3405 assert_eq!(completions.len(), 1);
3406 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
3407}
3408
3409#[gpui::test(iterations = 10)]
3410async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
3411 init_test(cx);
3412
3413 let fs = FakeFs::new(cx.executor());
3414 fs.insert_tree(
3415 path!("/dir"),
3416 json!({
3417 "a.ts": "a",
3418 }),
3419 )
3420 .await;
3421
3422 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3423
3424 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3425 language_registry.add(typescript_lang());
3426 let mut fake_language_servers = language_registry.register_fake_lsp(
3427 "TypeScript",
3428 FakeLspAdapter {
3429 capabilities: lsp::ServerCapabilities {
3430 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
3431 lsp::CodeActionOptions {
3432 resolve_provider: Some(true),
3433 ..lsp::CodeActionOptions::default()
3434 },
3435 )),
3436 execute_command_provider: Some(lsp::ExecuteCommandOptions {
3437 commands: vec!["_the/command".to_string()],
3438 ..lsp::ExecuteCommandOptions::default()
3439 }),
3440 ..lsp::ServerCapabilities::default()
3441 },
3442 ..FakeLspAdapter::default()
3443 },
3444 );
3445
3446 let (buffer, _handle) = project
3447 .update(cx, |p, cx| {
3448 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3449 })
3450 .await
3451 .unwrap();
3452
3453 let fake_server = fake_language_servers.next().await.unwrap();
3454
3455 // Language server returns code actions that contain commands, and not edits.
3456 let actions = project.update(cx, |project, cx| {
3457 project.code_actions(&buffer, 0..0, None, cx)
3458 });
3459 fake_server
3460 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
3461 Ok(Some(vec![
3462 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3463 title: "The code action".into(),
3464 data: Some(serde_json::json!({
3465 "command": "_the/command",
3466 })),
3467 ..lsp::CodeAction::default()
3468 }),
3469 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3470 title: "two".into(),
3471 ..lsp::CodeAction::default()
3472 }),
3473 ]))
3474 })
3475 .next()
3476 .await;
3477
3478 let action = actions.await.unwrap()[0].clone();
3479 let apply = project.update(cx, |project, cx| {
3480 project.apply_code_action(buffer.clone(), action, true, cx)
3481 });
3482
3483 // Resolving the code action does not populate its edits. In absence of
3484 // edits, we must execute the given command.
3485 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
3486 |mut action, _| async move {
3487 if action.data.is_some() {
3488 action.command = Some(lsp::Command {
3489 title: "The command".into(),
3490 command: "_the/command".into(),
3491 arguments: Some(vec![json!("the-argument")]),
3492 });
3493 }
3494 Ok(action)
3495 },
3496 );
3497
3498 // While executing the command, the language server sends the editor
3499 // a `workspaceEdit` request.
3500 fake_server
3501 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
3502 let fake = fake_server.clone();
3503 move |params, _| {
3504 assert_eq!(params.command, "_the/command");
3505 let fake = fake.clone();
3506 async move {
3507 fake.server
3508 .request::<lsp::request::ApplyWorkspaceEdit>(
3509 lsp::ApplyWorkspaceEditParams {
3510 label: None,
3511 edit: lsp::WorkspaceEdit {
3512 changes: Some(
3513 [(
3514 lsp::Url::from_file_path(path!("/dir/a.ts")).unwrap(),
3515 vec![lsp::TextEdit {
3516 range: lsp::Range::new(
3517 lsp::Position::new(0, 0),
3518 lsp::Position::new(0, 0),
3519 ),
3520 new_text: "X".into(),
3521 }],
3522 )]
3523 .into_iter()
3524 .collect(),
3525 ),
3526 ..Default::default()
3527 },
3528 },
3529 )
3530 .await
3531 .into_response()
3532 .unwrap();
3533 Ok(Some(json!(null)))
3534 }
3535 }
3536 })
3537 .next()
3538 .await;
3539
3540 // Applying the code action returns a project transaction containing the edits
3541 // sent by the language server in its `workspaceEdit` request.
3542 let transaction = apply.await.unwrap();
3543 assert!(transaction.0.contains_key(&buffer));
3544 buffer.update(cx, |buffer, cx| {
3545 assert_eq!(buffer.text(), "Xa");
3546 buffer.undo(cx);
3547 assert_eq!(buffer.text(), "a");
3548 });
3549}
3550
3551#[gpui::test(iterations = 10)]
3552async fn test_save_file(cx: &mut gpui::TestAppContext) {
3553 init_test(cx);
3554
3555 let fs = FakeFs::new(cx.executor());
3556 fs.insert_tree(
3557 path!("/dir"),
3558 json!({
3559 "file1": "the old contents",
3560 }),
3561 )
3562 .await;
3563
3564 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3565 let buffer = project
3566 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3567 .await
3568 .unwrap();
3569 buffer.update(cx, |buffer, cx| {
3570 assert_eq!(buffer.text(), "the old contents");
3571 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3572 });
3573
3574 project
3575 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3576 .await
3577 .unwrap();
3578
3579 let new_text = fs
3580 .load(Path::new(path!("/dir/file1")))
3581 .await
3582 .unwrap()
3583 .replace("\r\n", "\n");
3584 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3585}
3586
3587#[gpui::test(iterations = 10)]
3588async fn test_save_file_spawns_language_server(cx: &mut gpui::TestAppContext) {
3589 // Issue: #24349
3590 init_test(cx);
3591
3592 let fs = FakeFs::new(cx.executor());
3593 fs.insert_tree(path!("/dir"), json!({})).await;
3594
3595 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3596 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3597
3598 language_registry.add(rust_lang());
3599 let mut fake_rust_servers = language_registry.register_fake_lsp(
3600 "Rust",
3601 FakeLspAdapter {
3602 name: "the-rust-language-server",
3603 capabilities: lsp::ServerCapabilities {
3604 completion_provider: Some(lsp::CompletionOptions {
3605 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
3606 ..Default::default()
3607 }),
3608 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
3609 lsp::TextDocumentSyncOptions {
3610 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
3611 ..Default::default()
3612 },
3613 )),
3614 ..Default::default()
3615 },
3616 ..Default::default()
3617 },
3618 );
3619
3620 let buffer = project
3621 .update(cx, |this, cx| this.create_buffer(cx))
3622 .unwrap()
3623 .await;
3624 project.update(cx, |this, cx| {
3625 this.register_buffer_with_language_servers(&buffer, cx);
3626 buffer.update(cx, |buffer, cx| {
3627 assert!(!this.has_language_servers_for(buffer, cx));
3628 })
3629 });
3630
3631 project
3632 .update(cx, |this, cx| {
3633 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
3634 this.save_buffer_as(
3635 buffer.clone(),
3636 ProjectPath {
3637 worktree_id,
3638 path: Arc::from("file.rs".as_ref()),
3639 },
3640 cx,
3641 )
3642 })
3643 .await
3644 .unwrap();
3645 // A server is started up, and it is notified about Rust files.
3646 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
3647 assert_eq!(
3648 fake_rust_server
3649 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3650 .await
3651 .text_document,
3652 lsp::TextDocumentItem {
3653 uri: lsp::Url::from_file_path(path!("/dir/file.rs")).unwrap(),
3654 version: 0,
3655 text: "".to_string(),
3656 language_id: "rust".to_string(),
3657 }
3658 );
3659
3660 project.update(cx, |this, cx| {
3661 buffer.update(cx, |buffer, cx| {
3662 assert!(this.has_language_servers_for(buffer, cx));
3663 })
3664 });
3665}
3666
3667#[gpui::test(iterations = 30)]
3668async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
3669 init_test(cx);
3670
3671 let fs = FakeFs::new(cx.executor().clone());
3672 fs.insert_tree(
3673 path!("/dir"),
3674 json!({
3675 "file1": "the original contents",
3676 }),
3677 )
3678 .await;
3679
3680 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3681 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3682 let buffer = project
3683 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3684 .await
3685 .unwrap();
3686
3687 // Simulate buffer diffs being slow, so that they don't complete before
3688 // the next file change occurs.
3689 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3690
3691 // Change the buffer's file on disk, and then wait for the file change
3692 // to be detected by the worktree, so that the buffer starts reloading.
3693 fs.save(
3694 path!("/dir/file1").as_ref(),
3695 &"the first contents".into(),
3696 Default::default(),
3697 )
3698 .await
3699 .unwrap();
3700 worktree.next_event(cx).await;
3701
3702 // Change the buffer's file again. Depending on the random seed, the
3703 // previous file change may still be in progress.
3704 fs.save(
3705 path!("/dir/file1").as_ref(),
3706 &"the second contents".into(),
3707 Default::default(),
3708 )
3709 .await
3710 .unwrap();
3711 worktree.next_event(cx).await;
3712
3713 cx.executor().run_until_parked();
3714 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3715 buffer.read_with(cx, |buffer, _| {
3716 assert_eq!(buffer.text(), on_disk_text);
3717 assert!(!buffer.is_dirty(), "buffer should not be dirty");
3718 assert!(!buffer.has_conflict(), "buffer should not be dirty");
3719 });
3720}
3721
3722#[gpui::test(iterations = 30)]
3723async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
3724 init_test(cx);
3725
3726 let fs = FakeFs::new(cx.executor().clone());
3727 fs.insert_tree(
3728 path!("/dir"),
3729 json!({
3730 "file1": "the original contents",
3731 }),
3732 )
3733 .await;
3734
3735 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3736 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3737 let buffer = project
3738 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3739 .await
3740 .unwrap();
3741
3742 // Simulate buffer diffs being slow, so that they don't complete before
3743 // the next file change occurs.
3744 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3745
3746 // Change the buffer's file on disk, and then wait for the file change
3747 // to be detected by the worktree, so that the buffer starts reloading.
3748 fs.save(
3749 path!("/dir/file1").as_ref(),
3750 &"the first contents".into(),
3751 Default::default(),
3752 )
3753 .await
3754 .unwrap();
3755 worktree.next_event(cx).await;
3756
3757 cx.executor()
3758 .spawn(cx.executor().simulate_random_delay())
3759 .await;
3760
3761 // Perform a noop edit, causing the buffer's version to increase.
3762 buffer.update(cx, |buffer, cx| {
3763 buffer.edit([(0..0, " ")], None, cx);
3764 buffer.undo(cx);
3765 });
3766
3767 cx.executor().run_until_parked();
3768 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3769 buffer.read_with(cx, |buffer, _| {
3770 let buffer_text = buffer.text();
3771 if buffer_text == on_disk_text {
3772 assert!(
3773 !buffer.is_dirty() && !buffer.has_conflict(),
3774 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
3775 );
3776 }
3777 // If the file change occurred while the buffer was processing the first
3778 // change, the buffer will be in a conflicting state.
3779 else {
3780 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3781 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3782 }
3783 });
3784}
3785
3786#[gpui::test]
3787async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
3788 init_test(cx);
3789
3790 let fs = FakeFs::new(cx.executor());
3791 fs.insert_tree(
3792 path!("/dir"),
3793 json!({
3794 "file1": "the old contents",
3795 }),
3796 )
3797 .await;
3798
3799 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
3800 let buffer = project
3801 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3802 .await
3803 .unwrap();
3804 buffer.update(cx, |buffer, cx| {
3805 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3806 });
3807
3808 project
3809 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3810 .await
3811 .unwrap();
3812
3813 let new_text = fs
3814 .load(Path::new(path!("/dir/file1")))
3815 .await
3816 .unwrap()
3817 .replace("\r\n", "\n");
3818 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3819}
3820
3821#[gpui::test]
3822async fn test_save_as(cx: &mut gpui::TestAppContext) {
3823 init_test(cx);
3824
3825 let fs = FakeFs::new(cx.executor());
3826 fs.insert_tree("/dir", json!({})).await;
3827
3828 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3829
3830 let languages = project.update(cx, |project, _| project.languages().clone());
3831 languages.add(rust_lang());
3832
3833 let buffer = project.update(cx, |project, cx| project.create_local_buffer("", None, cx));
3834 buffer.update(cx, |buffer, cx| {
3835 buffer.edit([(0..0, "abc")], None, cx);
3836 assert!(buffer.is_dirty());
3837 assert!(!buffer.has_conflict());
3838 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
3839 });
3840 project
3841 .update(cx, |project, cx| {
3842 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
3843 let path = ProjectPath {
3844 worktree_id,
3845 path: Arc::from(Path::new("file1.rs")),
3846 };
3847 project.save_buffer_as(buffer.clone(), path, cx)
3848 })
3849 .await
3850 .unwrap();
3851 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
3852
3853 cx.executor().run_until_parked();
3854 buffer.update(cx, |buffer, cx| {
3855 assert_eq!(
3856 buffer.file().unwrap().full_path(cx),
3857 Path::new("dir/file1.rs")
3858 );
3859 assert!(!buffer.is_dirty());
3860 assert!(!buffer.has_conflict());
3861 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
3862 });
3863
3864 let opened_buffer = project
3865 .update(cx, |project, cx| {
3866 project.open_local_buffer("/dir/file1.rs", cx)
3867 })
3868 .await
3869 .unwrap();
3870 assert_eq!(opened_buffer, buffer);
3871}
3872
3873#[gpui::test(retries = 5)]
3874async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
3875 use worktree::WorktreeModelHandle as _;
3876
3877 init_test(cx);
3878 cx.executor().allow_parking();
3879
3880 let dir = TempTree::new(json!({
3881 "a": {
3882 "file1": "",
3883 "file2": "",
3884 "file3": "",
3885 },
3886 "b": {
3887 "c": {
3888 "file4": "",
3889 "file5": "",
3890 }
3891 }
3892 }));
3893
3894 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
3895
3896 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3897 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
3898 async move { buffer.await.unwrap() }
3899 };
3900 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3901 project.update(cx, |project, cx| {
3902 let tree = project.worktrees(cx).next().unwrap();
3903 tree.read(cx)
3904 .entry_for_path(path)
3905 .unwrap_or_else(|| panic!("no entry for path {}", path))
3906 .id
3907 })
3908 };
3909
3910 let buffer2 = buffer_for_path("a/file2", cx).await;
3911 let buffer3 = buffer_for_path("a/file3", cx).await;
3912 let buffer4 = buffer_for_path("b/c/file4", cx).await;
3913 let buffer5 = buffer_for_path("b/c/file5", cx).await;
3914
3915 let file2_id = id_for_path("a/file2", cx);
3916 let file3_id = id_for_path("a/file3", cx);
3917 let file4_id = id_for_path("b/c/file4", cx);
3918
3919 // Create a remote copy of this worktree.
3920 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3921 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
3922
3923 let updates = Arc::new(Mutex::new(Vec::new()));
3924 tree.update(cx, |tree, cx| {
3925 let updates = updates.clone();
3926 tree.observe_updates(0, cx, move |update| {
3927 updates.lock().push(update);
3928 async { true }
3929 });
3930 });
3931
3932 let remote =
3933 cx.update(|cx| Worktree::remote(0, 1, metadata, project.read(cx).client().into(), cx));
3934
3935 cx.executor().run_until_parked();
3936
3937 cx.update(|cx| {
3938 assert!(!buffer2.read(cx).is_dirty());
3939 assert!(!buffer3.read(cx).is_dirty());
3940 assert!(!buffer4.read(cx).is_dirty());
3941 assert!(!buffer5.read(cx).is_dirty());
3942 });
3943
3944 // Rename and delete files and directories.
3945 tree.flush_fs_events(cx).await;
3946 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3947 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3948 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3949 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3950 tree.flush_fs_events(cx).await;
3951
3952 cx.update(|app| {
3953 assert_eq!(
3954 tree.read(app)
3955 .paths()
3956 .map(|p| p.to_str().unwrap())
3957 .collect::<Vec<_>>(),
3958 vec![
3959 "a",
3960 separator!("a/file1"),
3961 separator!("a/file2.new"),
3962 "b",
3963 "d",
3964 separator!("d/file3"),
3965 separator!("d/file4"),
3966 ]
3967 );
3968 });
3969
3970 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
3971 assert_eq!(id_for_path("d/file3", cx), file3_id);
3972 assert_eq!(id_for_path("d/file4", cx), file4_id);
3973
3974 cx.update(|cx| {
3975 assert_eq!(
3976 buffer2.read(cx).file().unwrap().path().as_ref(),
3977 Path::new("a/file2.new")
3978 );
3979 assert_eq!(
3980 buffer3.read(cx).file().unwrap().path().as_ref(),
3981 Path::new("d/file3")
3982 );
3983 assert_eq!(
3984 buffer4.read(cx).file().unwrap().path().as_ref(),
3985 Path::new("d/file4")
3986 );
3987 assert_eq!(
3988 buffer5.read(cx).file().unwrap().path().as_ref(),
3989 Path::new("b/c/file5")
3990 );
3991
3992 assert_matches!(
3993 buffer2.read(cx).file().unwrap().disk_state(),
3994 DiskState::Present { .. }
3995 );
3996 assert_matches!(
3997 buffer3.read(cx).file().unwrap().disk_state(),
3998 DiskState::Present { .. }
3999 );
4000 assert_matches!(
4001 buffer4.read(cx).file().unwrap().disk_state(),
4002 DiskState::Present { .. }
4003 );
4004 assert_eq!(
4005 buffer5.read(cx).file().unwrap().disk_state(),
4006 DiskState::Deleted
4007 );
4008 });
4009
4010 // Update the remote worktree. Check that it becomes consistent with the
4011 // local worktree.
4012 cx.executor().run_until_parked();
4013
4014 remote.update(cx, |remote, _| {
4015 for update in updates.lock().drain(..) {
4016 remote.as_remote_mut().unwrap().update_from_remote(update);
4017 }
4018 });
4019 cx.executor().run_until_parked();
4020 remote.update(cx, |remote, _| {
4021 assert_eq!(
4022 remote
4023 .paths()
4024 .map(|p| p.to_str().unwrap())
4025 .collect::<Vec<_>>(),
4026 vec![
4027 "a",
4028 separator!("a/file1"),
4029 separator!("a/file2.new"),
4030 "b",
4031 "d",
4032 separator!("d/file3"),
4033 separator!("d/file4"),
4034 ]
4035 );
4036 });
4037}
4038
4039#[gpui::test(iterations = 10)]
4040async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
4041 init_test(cx);
4042
4043 let fs = FakeFs::new(cx.executor());
4044 fs.insert_tree(
4045 path!("/dir"),
4046 json!({
4047 "a": {
4048 "file1": "",
4049 }
4050 }),
4051 )
4052 .await;
4053
4054 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
4055 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
4056 let tree_id = tree.update(cx, |tree, _| tree.id());
4057
4058 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4059 project.update(cx, |project, cx| {
4060 let tree = project.worktrees(cx).next().unwrap();
4061 tree.read(cx)
4062 .entry_for_path(path)
4063 .unwrap_or_else(|| panic!("no entry for path {}", path))
4064 .id
4065 })
4066 };
4067
4068 let dir_id = id_for_path("a", cx);
4069 let file_id = id_for_path("a/file1", cx);
4070 let buffer = project
4071 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
4072 .await
4073 .unwrap();
4074 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4075
4076 project
4077 .update(cx, |project, cx| {
4078 project.rename_entry(dir_id, Path::new("b"), cx)
4079 })
4080 .unwrap()
4081 .await
4082 .to_included()
4083 .unwrap();
4084 cx.executor().run_until_parked();
4085
4086 assert_eq!(id_for_path("b", cx), dir_id);
4087 assert_eq!(id_for_path("b/file1", cx), file_id);
4088 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4089}
4090
4091#[gpui::test]
4092async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
4093 init_test(cx);
4094
4095 let fs = FakeFs::new(cx.executor());
4096 fs.insert_tree(
4097 "/dir",
4098 json!({
4099 "a.txt": "a-contents",
4100 "b.txt": "b-contents",
4101 }),
4102 )
4103 .await;
4104
4105 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4106
4107 // Spawn multiple tasks to open paths, repeating some paths.
4108 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
4109 (
4110 p.open_local_buffer("/dir/a.txt", cx),
4111 p.open_local_buffer("/dir/b.txt", cx),
4112 p.open_local_buffer("/dir/a.txt", cx),
4113 )
4114 });
4115
4116 let buffer_a_1 = buffer_a_1.await.unwrap();
4117 let buffer_a_2 = buffer_a_2.await.unwrap();
4118 let buffer_b = buffer_b.await.unwrap();
4119 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
4120 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
4121
4122 // There is only one buffer per path.
4123 let buffer_a_id = buffer_a_1.entity_id();
4124 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
4125
4126 // Open the same path again while it is still open.
4127 drop(buffer_a_1);
4128 let buffer_a_3 = project
4129 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
4130 .await
4131 .unwrap();
4132
4133 // There's still only one buffer per path.
4134 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
4135}
4136
4137#[gpui::test]
4138async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
4139 init_test(cx);
4140
4141 let fs = FakeFs::new(cx.executor());
4142 fs.insert_tree(
4143 path!("/dir"),
4144 json!({
4145 "file1": "abc",
4146 "file2": "def",
4147 "file3": "ghi",
4148 }),
4149 )
4150 .await;
4151
4152 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4153
4154 let buffer1 = project
4155 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4156 .await
4157 .unwrap();
4158 let events = Arc::new(Mutex::new(Vec::new()));
4159
4160 // initially, the buffer isn't dirty.
4161 buffer1.update(cx, |buffer, cx| {
4162 cx.subscribe(&buffer1, {
4163 let events = events.clone();
4164 move |_, _, event, _| match event {
4165 BufferEvent::Operation { .. } => {}
4166 _ => events.lock().push(event.clone()),
4167 }
4168 })
4169 .detach();
4170
4171 assert!(!buffer.is_dirty());
4172 assert!(events.lock().is_empty());
4173
4174 buffer.edit([(1..2, "")], None, cx);
4175 });
4176
4177 // after the first edit, the buffer is dirty, and emits a dirtied event.
4178 buffer1.update(cx, |buffer, cx| {
4179 assert!(buffer.text() == "ac");
4180 assert!(buffer.is_dirty());
4181 assert_eq!(
4182 *events.lock(),
4183 &[
4184 language::BufferEvent::Edited,
4185 language::BufferEvent::DirtyChanged
4186 ]
4187 );
4188 events.lock().clear();
4189 buffer.did_save(
4190 buffer.version(),
4191 buffer.file().unwrap().disk_state().mtime(),
4192 cx,
4193 );
4194 });
4195
4196 // after saving, the buffer is not dirty, and emits a saved event.
4197 buffer1.update(cx, |buffer, cx| {
4198 assert!(!buffer.is_dirty());
4199 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
4200 events.lock().clear();
4201
4202 buffer.edit([(1..1, "B")], None, cx);
4203 buffer.edit([(2..2, "D")], None, cx);
4204 });
4205
4206 // after editing again, the buffer is dirty, and emits another dirty event.
4207 buffer1.update(cx, |buffer, cx| {
4208 assert!(buffer.text() == "aBDc");
4209 assert!(buffer.is_dirty());
4210 assert_eq!(
4211 *events.lock(),
4212 &[
4213 language::BufferEvent::Edited,
4214 language::BufferEvent::DirtyChanged,
4215 language::BufferEvent::Edited,
4216 ],
4217 );
4218 events.lock().clear();
4219
4220 // After restoring the buffer to its previously-saved state,
4221 // the buffer is not considered dirty anymore.
4222 buffer.edit([(1..3, "")], None, cx);
4223 assert!(buffer.text() == "ac");
4224 assert!(!buffer.is_dirty());
4225 });
4226
4227 assert_eq!(
4228 *events.lock(),
4229 &[
4230 language::BufferEvent::Edited,
4231 language::BufferEvent::DirtyChanged
4232 ]
4233 );
4234
4235 // When a file is deleted, it is not considered dirty.
4236 let events = Arc::new(Mutex::new(Vec::new()));
4237 let buffer2 = project
4238 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4239 .await
4240 .unwrap();
4241 buffer2.update(cx, |_, cx| {
4242 cx.subscribe(&buffer2, {
4243 let events = events.clone();
4244 move |_, _, event, _| match event {
4245 BufferEvent::Operation { .. } => {}
4246 _ => events.lock().push(event.clone()),
4247 }
4248 })
4249 .detach();
4250 });
4251
4252 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
4253 .await
4254 .unwrap();
4255 cx.executor().run_until_parked();
4256 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4257 assert_eq!(
4258 mem::take(&mut *events.lock()),
4259 &[language::BufferEvent::FileHandleChanged]
4260 );
4261
4262 // Buffer becomes dirty when edited.
4263 buffer2.update(cx, |buffer, cx| {
4264 buffer.edit([(2..3, "")], None, cx);
4265 assert_eq!(buffer.is_dirty(), true);
4266 });
4267 assert_eq!(
4268 mem::take(&mut *events.lock()),
4269 &[
4270 language::BufferEvent::Edited,
4271 language::BufferEvent::DirtyChanged
4272 ]
4273 );
4274
4275 // Buffer becomes clean again when all of its content is removed, because
4276 // the file was deleted.
4277 buffer2.update(cx, |buffer, cx| {
4278 buffer.edit([(0..2, "")], None, cx);
4279 assert_eq!(buffer.is_empty(), true);
4280 assert_eq!(buffer.is_dirty(), false);
4281 });
4282 assert_eq!(
4283 *events.lock(),
4284 &[
4285 language::BufferEvent::Edited,
4286 language::BufferEvent::DirtyChanged
4287 ]
4288 );
4289
4290 // When a file is already dirty when deleted, we don't emit a Dirtied event.
4291 let events = Arc::new(Mutex::new(Vec::new()));
4292 let buffer3 = project
4293 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
4294 .await
4295 .unwrap();
4296 buffer3.update(cx, |_, cx| {
4297 cx.subscribe(&buffer3, {
4298 let events = events.clone();
4299 move |_, _, event, _| match event {
4300 BufferEvent::Operation { .. } => {}
4301 _ => events.lock().push(event.clone()),
4302 }
4303 })
4304 .detach();
4305 });
4306
4307 buffer3.update(cx, |buffer, cx| {
4308 buffer.edit([(0..0, "x")], None, cx);
4309 });
4310 events.lock().clear();
4311 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
4312 .await
4313 .unwrap();
4314 cx.executor().run_until_parked();
4315 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
4316 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
4317}
4318
4319#[gpui::test]
4320async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
4321 init_test(cx);
4322
4323 let (initial_contents, initial_offsets) =
4324 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
4325 let fs = FakeFs::new(cx.executor());
4326 fs.insert_tree(
4327 path!("/dir"),
4328 json!({
4329 "the-file": initial_contents,
4330 }),
4331 )
4332 .await;
4333 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4334 let buffer = project
4335 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
4336 .await
4337 .unwrap();
4338
4339 let anchors = initial_offsets
4340 .iter()
4341 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
4342 .collect::<Vec<_>>();
4343
4344 // Change the file on disk, adding two new lines of text, and removing
4345 // one line.
4346 buffer.update(cx, |buffer, _| {
4347 assert!(!buffer.is_dirty());
4348 assert!(!buffer.has_conflict());
4349 });
4350
4351 let (new_contents, new_offsets) =
4352 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
4353 fs.save(
4354 path!("/dir/the-file").as_ref(),
4355 &new_contents.as_str().into(),
4356 LineEnding::Unix,
4357 )
4358 .await
4359 .unwrap();
4360
4361 // Because the buffer was not modified, it is reloaded from disk. Its
4362 // contents are edited according to the diff between the old and new
4363 // file contents.
4364 cx.executor().run_until_parked();
4365 buffer.update(cx, |buffer, _| {
4366 assert_eq!(buffer.text(), new_contents);
4367 assert!(!buffer.is_dirty());
4368 assert!(!buffer.has_conflict());
4369
4370 let anchor_offsets = anchors
4371 .iter()
4372 .map(|anchor| anchor.to_offset(&*buffer))
4373 .collect::<Vec<_>>();
4374 assert_eq!(anchor_offsets, new_offsets);
4375 });
4376
4377 // Modify the buffer
4378 buffer.update(cx, |buffer, cx| {
4379 buffer.edit([(0..0, " ")], None, cx);
4380 assert!(buffer.is_dirty());
4381 assert!(!buffer.has_conflict());
4382 });
4383
4384 // Change the file on disk again, adding blank lines to the beginning.
4385 fs.save(
4386 path!("/dir/the-file").as_ref(),
4387 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
4388 LineEnding::Unix,
4389 )
4390 .await
4391 .unwrap();
4392
4393 // Because the buffer is modified, it doesn't reload from disk, but is
4394 // marked as having a conflict.
4395 cx.executor().run_until_parked();
4396 buffer.update(cx, |buffer, _| {
4397 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
4398 assert!(buffer.has_conflict());
4399 });
4400}
4401
4402#[gpui::test]
4403async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
4404 init_test(cx);
4405
4406 let fs = FakeFs::new(cx.executor());
4407 fs.insert_tree(
4408 path!("/dir"),
4409 json!({
4410 "file1": "a\nb\nc\n",
4411 "file2": "one\r\ntwo\r\nthree\r\n",
4412 }),
4413 )
4414 .await;
4415
4416 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4417 let buffer1 = project
4418 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4419 .await
4420 .unwrap();
4421 let buffer2 = project
4422 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4423 .await
4424 .unwrap();
4425
4426 buffer1.update(cx, |buffer, _| {
4427 assert_eq!(buffer.text(), "a\nb\nc\n");
4428 assert_eq!(buffer.line_ending(), LineEnding::Unix);
4429 });
4430 buffer2.update(cx, |buffer, _| {
4431 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
4432 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4433 });
4434
4435 // Change a file's line endings on disk from unix to windows. The buffer's
4436 // state updates correctly.
4437 fs.save(
4438 path!("/dir/file1").as_ref(),
4439 &"aaa\nb\nc\n".into(),
4440 LineEnding::Windows,
4441 )
4442 .await
4443 .unwrap();
4444 cx.executor().run_until_parked();
4445 buffer1.update(cx, |buffer, _| {
4446 assert_eq!(buffer.text(), "aaa\nb\nc\n");
4447 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4448 });
4449
4450 // Save a file with windows line endings. The file is written correctly.
4451 buffer2.update(cx, |buffer, cx| {
4452 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
4453 });
4454 project
4455 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
4456 .await
4457 .unwrap();
4458 assert_eq!(
4459 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
4460 "one\r\ntwo\r\nthree\r\nfour\r\n",
4461 );
4462}
4463
4464#[gpui::test]
4465async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
4466 init_test(cx);
4467
4468 let fs = FakeFs::new(cx.executor());
4469 fs.insert_tree(
4470 path!("/dir"),
4471 json!({
4472 "a.rs": "
4473 fn foo(mut v: Vec<usize>) {
4474 for x in &v {
4475 v.push(1);
4476 }
4477 }
4478 "
4479 .unindent(),
4480 }),
4481 )
4482 .await;
4483
4484 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4485 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4486 let buffer = project
4487 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
4488 .await
4489 .unwrap();
4490
4491 let buffer_uri = Url::from_file_path(path!("/dir/a.rs")).unwrap();
4492 let message = lsp::PublishDiagnosticsParams {
4493 uri: buffer_uri.clone(),
4494 diagnostics: vec![
4495 lsp::Diagnostic {
4496 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4497 severity: Some(DiagnosticSeverity::WARNING),
4498 message: "error 1".to_string(),
4499 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4500 location: lsp::Location {
4501 uri: buffer_uri.clone(),
4502 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4503 },
4504 message: "error 1 hint 1".to_string(),
4505 }]),
4506 ..Default::default()
4507 },
4508 lsp::Diagnostic {
4509 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4510 severity: Some(DiagnosticSeverity::HINT),
4511 message: "error 1 hint 1".to_string(),
4512 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4513 location: lsp::Location {
4514 uri: buffer_uri.clone(),
4515 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4516 },
4517 message: "original diagnostic".to_string(),
4518 }]),
4519 ..Default::default()
4520 },
4521 lsp::Diagnostic {
4522 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4523 severity: Some(DiagnosticSeverity::ERROR),
4524 message: "error 2".to_string(),
4525 related_information: Some(vec![
4526 lsp::DiagnosticRelatedInformation {
4527 location: lsp::Location {
4528 uri: buffer_uri.clone(),
4529 range: lsp::Range::new(
4530 lsp::Position::new(1, 13),
4531 lsp::Position::new(1, 15),
4532 ),
4533 },
4534 message: "error 2 hint 1".to_string(),
4535 },
4536 lsp::DiagnosticRelatedInformation {
4537 location: lsp::Location {
4538 uri: buffer_uri.clone(),
4539 range: lsp::Range::new(
4540 lsp::Position::new(1, 13),
4541 lsp::Position::new(1, 15),
4542 ),
4543 },
4544 message: "error 2 hint 2".to_string(),
4545 },
4546 ]),
4547 ..Default::default()
4548 },
4549 lsp::Diagnostic {
4550 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4551 severity: Some(DiagnosticSeverity::HINT),
4552 message: "error 2 hint 1".to_string(),
4553 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4554 location: lsp::Location {
4555 uri: buffer_uri.clone(),
4556 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4557 },
4558 message: "original diagnostic".to_string(),
4559 }]),
4560 ..Default::default()
4561 },
4562 lsp::Diagnostic {
4563 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4564 severity: Some(DiagnosticSeverity::HINT),
4565 message: "error 2 hint 2".to_string(),
4566 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4567 location: lsp::Location {
4568 uri: buffer_uri,
4569 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4570 },
4571 message: "original diagnostic".to_string(),
4572 }]),
4573 ..Default::default()
4574 },
4575 ],
4576 version: None,
4577 };
4578
4579 lsp_store
4580 .update(cx, |lsp_store, cx| {
4581 lsp_store.update_diagnostics(LanguageServerId(0), message, &[], cx)
4582 })
4583 .unwrap();
4584 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
4585
4586 assert_eq!(
4587 buffer
4588 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
4589 .collect::<Vec<_>>(),
4590 &[
4591 DiagnosticEntry {
4592 range: Point::new(1, 8)..Point::new(1, 9),
4593 diagnostic: Diagnostic {
4594 severity: DiagnosticSeverity::WARNING,
4595 message: "error 1".to_string(),
4596 group_id: 1,
4597 is_primary: true,
4598 ..Default::default()
4599 }
4600 },
4601 DiagnosticEntry {
4602 range: Point::new(1, 8)..Point::new(1, 9),
4603 diagnostic: Diagnostic {
4604 severity: DiagnosticSeverity::HINT,
4605 message: "error 1 hint 1".to_string(),
4606 group_id: 1,
4607 is_primary: false,
4608 ..Default::default()
4609 }
4610 },
4611 DiagnosticEntry {
4612 range: Point::new(1, 13)..Point::new(1, 15),
4613 diagnostic: Diagnostic {
4614 severity: DiagnosticSeverity::HINT,
4615 message: "error 2 hint 1".to_string(),
4616 group_id: 0,
4617 is_primary: false,
4618 ..Default::default()
4619 }
4620 },
4621 DiagnosticEntry {
4622 range: Point::new(1, 13)..Point::new(1, 15),
4623 diagnostic: Diagnostic {
4624 severity: DiagnosticSeverity::HINT,
4625 message: "error 2 hint 2".to_string(),
4626 group_id: 0,
4627 is_primary: false,
4628 ..Default::default()
4629 }
4630 },
4631 DiagnosticEntry {
4632 range: Point::new(2, 8)..Point::new(2, 17),
4633 diagnostic: Diagnostic {
4634 severity: DiagnosticSeverity::ERROR,
4635 message: "error 2".to_string(),
4636 group_id: 0,
4637 is_primary: true,
4638 ..Default::default()
4639 }
4640 }
4641 ]
4642 );
4643
4644 assert_eq!(
4645 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
4646 &[
4647 DiagnosticEntry {
4648 range: Point::new(1, 13)..Point::new(1, 15),
4649 diagnostic: Diagnostic {
4650 severity: DiagnosticSeverity::HINT,
4651 message: "error 2 hint 1".to_string(),
4652 group_id: 0,
4653 is_primary: false,
4654 ..Default::default()
4655 }
4656 },
4657 DiagnosticEntry {
4658 range: Point::new(1, 13)..Point::new(1, 15),
4659 diagnostic: Diagnostic {
4660 severity: DiagnosticSeverity::HINT,
4661 message: "error 2 hint 2".to_string(),
4662 group_id: 0,
4663 is_primary: false,
4664 ..Default::default()
4665 }
4666 },
4667 DiagnosticEntry {
4668 range: Point::new(2, 8)..Point::new(2, 17),
4669 diagnostic: Diagnostic {
4670 severity: DiagnosticSeverity::ERROR,
4671 message: "error 2".to_string(),
4672 group_id: 0,
4673 is_primary: true,
4674 ..Default::default()
4675 }
4676 }
4677 ]
4678 );
4679
4680 assert_eq!(
4681 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4682 &[
4683 DiagnosticEntry {
4684 range: Point::new(1, 8)..Point::new(1, 9),
4685 diagnostic: Diagnostic {
4686 severity: DiagnosticSeverity::WARNING,
4687 message: "error 1".to_string(),
4688 group_id: 1,
4689 is_primary: true,
4690 ..Default::default()
4691 }
4692 },
4693 DiagnosticEntry {
4694 range: Point::new(1, 8)..Point::new(1, 9),
4695 diagnostic: Diagnostic {
4696 severity: DiagnosticSeverity::HINT,
4697 message: "error 1 hint 1".to_string(),
4698 group_id: 1,
4699 is_primary: false,
4700 ..Default::default()
4701 }
4702 },
4703 ]
4704 );
4705}
4706
4707#[gpui::test]
4708async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
4709 init_test(cx);
4710
4711 let fs = FakeFs::new(cx.executor());
4712 fs.insert_tree(
4713 path!("/dir"),
4714 json!({
4715 "one.rs": "const ONE: usize = 1;",
4716 "two": {
4717 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4718 }
4719
4720 }),
4721 )
4722 .await;
4723 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4724
4725 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4726 language_registry.add(rust_lang());
4727 let watched_paths = lsp::FileOperationRegistrationOptions {
4728 filters: vec![
4729 FileOperationFilter {
4730 scheme: Some("file".to_owned()),
4731 pattern: lsp::FileOperationPattern {
4732 glob: "**/*.rs".to_owned(),
4733 matches: Some(lsp::FileOperationPatternKind::File),
4734 options: None,
4735 },
4736 },
4737 FileOperationFilter {
4738 scheme: Some("file".to_owned()),
4739 pattern: lsp::FileOperationPattern {
4740 glob: "**/**".to_owned(),
4741 matches: Some(lsp::FileOperationPatternKind::Folder),
4742 options: None,
4743 },
4744 },
4745 ],
4746 };
4747 let mut fake_servers = language_registry.register_fake_lsp(
4748 "Rust",
4749 FakeLspAdapter {
4750 capabilities: lsp::ServerCapabilities {
4751 workspace: Some(lsp::WorkspaceServerCapabilities {
4752 workspace_folders: None,
4753 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
4754 did_rename: Some(watched_paths.clone()),
4755 will_rename: Some(watched_paths),
4756 ..Default::default()
4757 }),
4758 }),
4759 ..Default::default()
4760 },
4761 ..Default::default()
4762 },
4763 );
4764
4765 let _ = project
4766 .update(cx, |project, cx| {
4767 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
4768 })
4769 .await
4770 .unwrap();
4771
4772 let fake_server = fake_servers.next().await.unwrap();
4773 let response = project.update(cx, |project, cx| {
4774 let worktree = project.worktrees(cx).next().unwrap();
4775 let entry = worktree.read(cx).entry_for_path("one.rs").unwrap();
4776 project.rename_entry(entry.id, "three.rs".as_ref(), cx)
4777 });
4778 let expected_edit = lsp::WorkspaceEdit {
4779 changes: None,
4780 document_changes: Some(DocumentChanges::Edits({
4781 vec![TextDocumentEdit {
4782 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
4783 range: lsp::Range {
4784 start: lsp::Position {
4785 line: 0,
4786 character: 1,
4787 },
4788 end: lsp::Position {
4789 line: 0,
4790 character: 3,
4791 },
4792 },
4793 new_text: "This is not a drill".to_owned(),
4794 })],
4795 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
4796 uri: Url::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
4797 version: Some(1337),
4798 },
4799 }]
4800 })),
4801 change_annotations: None,
4802 };
4803 let resolved_workspace_edit = Arc::new(OnceLock::new());
4804 fake_server
4805 .set_request_handler::<WillRenameFiles, _, _>({
4806 let resolved_workspace_edit = resolved_workspace_edit.clone();
4807 let expected_edit = expected_edit.clone();
4808 move |params, _| {
4809 let resolved_workspace_edit = resolved_workspace_edit.clone();
4810 let expected_edit = expected_edit.clone();
4811 async move {
4812 assert_eq!(params.files.len(), 1);
4813 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
4814 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
4815 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
4816 Ok(Some(expected_edit))
4817 }
4818 }
4819 })
4820 .next()
4821 .await
4822 .unwrap();
4823 let _ = response.await.unwrap();
4824 fake_server
4825 .handle_notification::<DidRenameFiles, _>(|params, _| {
4826 assert_eq!(params.files.len(), 1);
4827 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
4828 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
4829 })
4830 .next()
4831 .await
4832 .unwrap();
4833 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
4834}
4835
4836#[gpui::test]
4837async fn test_rename(cx: &mut gpui::TestAppContext) {
4838 // hi
4839 init_test(cx);
4840
4841 let fs = FakeFs::new(cx.executor());
4842 fs.insert_tree(
4843 path!("/dir"),
4844 json!({
4845 "one.rs": "const ONE: usize = 1;",
4846 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4847 }),
4848 )
4849 .await;
4850
4851 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4852
4853 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4854 language_registry.add(rust_lang());
4855 let mut fake_servers = language_registry.register_fake_lsp(
4856 "Rust",
4857 FakeLspAdapter {
4858 capabilities: lsp::ServerCapabilities {
4859 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
4860 prepare_provider: Some(true),
4861 work_done_progress_options: Default::default(),
4862 })),
4863 ..Default::default()
4864 },
4865 ..Default::default()
4866 },
4867 );
4868
4869 let (buffer, _handle) = project
4870 .update(cx, |project, cx| {
4871 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
4872 })
4873 .await
4874 .unwrap();
4875
4876 let fake_server = fake_servers.next().await.unwrap();
4877
4878 let response = project.update(cx, |project, cx| {
4879 project.prepare_rename(buffer.clone(), 7, cx)
4880 });
4881 fake_server
4882 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
4883 assert_eq!(
4884 params.text_document.uri.as_str(),
4885 uri!("file:///dir/one.rs")
4886 );
4887 assert_eq!(params.position, lsp::Position::new(0, 7));
4888 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
4889 lsp::Position::new(0, 6),
4890 lsp::Position::new(0, 9),
4891 ))))
4892 })
4893 .next()
4894 .await
4895 .unwrap();
4896 let response = response.await.unwrap();
4897 let PrepareRenameResponse::Success(range) = response else {
4898 panic!("{:?}", response);
4899 };
4900 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
4901 assert_eq!(range, 6..9);
4902
4903 let response = project.update(cx, |project, cx| {
4904 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
4905 });
4906 fake_server
4907 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
4908 assert_eq!(
4909 params.text_document_position.text_document.uri.as_str(),
4910 uri!("file:///dir/one.rs")
4911 );
4912 assert_eq!(
4913 params.text_document_position.position,
4914 lsp::Position::new(0, 7)
4915 );
4916 assert_eq!(params.new_name, "THREE");
4917 Ok(Some(lsp::WorkspaceEdit {
4918 changes: Some(
4919 [
4920 (
4921 lsp::Url::from_file_path(path!("/dir/one.rs")).unwrap(),
4922 vec![lsp::TextEdit::new(
4923 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
4924 "THREE".to_string(),
4925 )],
4926 ),
4927 (
4928 lsp::Url::from_file_path(path!("/dir/two.rs")).unwrap(),
4929 vec![
4930 lsp::TextEdit::new(
4931 lsp::Range::new(
4932 lsp::Position::new(0, 24),
4933 lsp::Position::new(0, 27),
4934 ),
4935 "THREE".to_string(),
4936 ),
4937 lsp::TextEdit::new(
4938 lsp::Range::new(
4939 lsp::Position::new(0, 35),
4940 lsp::Position::new(0, 38),
4941 ),
4942 "THREE".to_string(),
4943 ),
4944 ],
4945 ),
4946 ]
4947 .into_iter()
4948 .collect(),
4949 ),
4950 ..Default::default()
4951 }))
4952 })
4953 .next()
4954 .await
4955 .unwrap();
4956 let mut transaction = response.await.unwrap().0;
4957 assert_eq!(transaction.len(), 2);
4958 assert_eq!(
4959 transaction
4960 .remove_entry(&buffer)
4961 .unwrap()
4962 .0
4963 .update(cx, |buffer, _| buffer.text()),
4964 "const THREE: usize = 1;"
4965 );
4966 assert_eq!(
4967 transaction
4968 .into_keys()
4969 .next()
4970 .unwrap()
4971 .update(cx, |buffer, _| buffer.text()),
4972 "const TWO: usize = one::THREE + one::THREE;"
4973 );
4974}
4975
4976#[gpui::test]
4977async fn test_search(cx: &mut gpui::TestAppContext) {
4978 init_test(cx);
4979
4980 let fs = FakeFs::new(cx.executor());
4981 fs.insert_tree(
4982 path!("/dir"),
4983 json!({
4984 "one.rs": "const ONE: usize = 1;",
4985 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
4986 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
4987 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
4988 }),
4989 )
4990 .await;
4991 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4992 assert_eq!(
4993 search(
4994 &project,
4995 SearchQuery::text(
4996 "TWO",
4997 false,
4998 true,
4999 false,
5000 Default::default(),
5001 Default::default(),
5002 false,
5003 None
5004 )
5005 .unwrap(),
5006 cx
5007 )
5008 .await
5009 .unwrap(),
5010 HashMap::from_iter([
5011 (separator!("dir/two.rs").to_string(), vec![6..9]),
5012 (separator!("dir/three.rs").to_string(), vec![37..40])
5013 ])
5014 );
5015
5016 let buffer_4 = project
5017 .update(cx, |project, cx| {
5018 project.open_local_buffer(path!("/dir/four.rs"), cx)
5019 })
5020 .await
5021 .unwrap();
5022 buffer_4.update(cx, |buffer, cx| {
5023 let text = "two::TWO";
5024 buffer.edit([(20..28, text), (31..43, text)], None, cx);
5025 });
5026
5027 assert_eq!(
5028 search(
5029 &project,
5030 SearchQuery::text(
5031 "TWO",
5032 false,
5033 true,
5034 false,
5035 Default::default(),
5036 Default::default(),
5037 false,
5038 None,
5039 )
5040 .unwrap(),
5041 cx
5042 )
5043 .await
5044 .unwrap(),
5045 HashMap::from_iter([
5046 (separator!("dir/two.rs").to_string(), vec![6..9]),
5047 (separator!("dir/three.rs").to_string(), vec![37..40]),
5048 (separator!("dir/four.rs").to_string(), vec![25..28, 36..39])
5049 ])
5050 );
5051}
5052
5053#[gpui::test]
5054async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
5055 init_test(cx);
5056
5057 let search_query = "file";
5058
5059 let fs = FakeFs::new(cx.executor());
5060 fs.insert_tree(
5061 path!("/dir"),
5062 json!({
5063 "one.rs": r#"// Rust file one"#,
5064 "one.ts": r#"// TypeScript file one"#,
5065 "two.rs": r#"// Rust file two"#,
5066 "two.ts": r#"// TypeScript file two"#,
5067 }),
5068 )
5069 .await;
5070 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5071
5072 assert!(
5073 search(
5074 &project,
5075 SearchQuery::text(
5076 search_query,
5077 false,
5078 true,
5079 false,
5080 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5081 Default::default(),
5082 false,
5083 None
5084 )
5085 .unwrap(),
5086 cx
5087 )
5088 .await
5089 .unwrap()
5090 .is_empty(),
5091 "If no inclusions match, no files should be returned"
5092 );
5093
5094 assert_eq!(
5095 search(
5096 &project,
5097 SearchQuery::text(
5098 search_query,
5099 false,
5100 true,
5101 false,
5102 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
5103 Default::default(),
5104 false,
5105 None
5106 )
5107 .unwrap(),
5108 cx
5109 )
5110 .await
5111 .unwrap(),
5112 HashMap::from_iter([
5113 (separator!("dir/one.rs").to_string(), vec![8..12]),
5114 (separator!("dir/two.rs").to_string(), vec![8..12]),
5115 ]),
5116 "Rust only search should give only Rust files"
5117 );
5118
5119 assert_eq!(
5120 search(
5121 &project,
5122 SearchQuery::text(
5123 search_query,
5124 false,
5125 true,
5126 false,
5127 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5128 Default::default(),
5129 false,
5130 None,
5131 )
5132 .unwrap(),
5133 cx
5134 )
5135 .await
5136 .unwrap(),
5137 HashMap::from_iter([
5138 (separator!("dir/one.ts").to_string(), vec![14..18]),
5139 (separator!("dir/two.ts").to_string(), vec![14..18]),
5140 ]),
5141 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
5142 );
5143
5144 assert_eq!(
5145 search(
5146 &project,
5147 SearchQuery::text(
5148 search_query,
5149 false,
5150 true,
5151 false,
5152 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()])
5153 .unwrap(),
5154 Default::default(),
5155 false,
5156 None,
5157 )
5158 .unwrap(),
5159 cx
5160 )
5161 .await
5162 .unwrap(),
5163 HashMap::from_iter([
5164 (separator!("dir/two.ts").to_string(), vec![14..18]),
5165 (separator!("dir/one.rs").to_string(), vec![8..12]),
5166 (separator!("dir/one.ts").to_string(), vec![14..18]),
5167 (separator!("dir/two.rs").to_string(), vec![8..12]),
5168 ]),
5169 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
5170 );
5171}
5172
5173#[gpui::test]
5174async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
5175 init_test(cx);
5176
5177 let search_query = "file";
5178
5179 let fs = FakeFs::new(cx.executor());
5180 fs.insert_tree(
5181 path!("/dir"),
5182 json!({
5183 "one.rs": r#"// Rust file one"#,
5184 "one.ts": r#"// TypeScript file one"#,
5185 "two.rs": r#"// Rust file two"#,
5186 "two.ts": r#"// TypeScript file two"#,
5187 }),
5188 )
5189 .await;
5190 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5191
5192 assert_eq!(
5193 search(
5194 &project,
5195 SearchQuery::text(
5196 search_query,
5197 false,
5198 true,
5199 false,
5200 Default::default(),
5201 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5202 false,
5203 None,
5204 )
5205 .unwrap(),
5206 cx
5207 )
5208 .await
5209 .unwrap(),
5210 HashMap::from_iter([
5211 (separator!("dir/one.rs").to_string(), vec![8..12]),
5212 (separator!("dir/one.ts").to_string(), vec![14..18]),
5213 (separator!("dir/two.rs").to_string(), vec![8..12]),
5214 (separator!("dir/two.ts").to_string(), vec![14..18]),
5215 ]),
5216 "If no exclusions match, all files should be returned"
5217 );
5218
5219 assert_eq!(
5220 search(
5221 &project,
5222 SearchQuery::text(
5223 search_query,
5224 false,
5225 true,
5226 false,
5227 Default::default(),
5228 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
5229 false,
5230 None,
5231 )
5232 .unwrap(),
5233 cx
5234 )
5235 .await
5236 .unwrap(),
5237 HashMap::from_iter([
5238 (separator!("dir/one.ts").to_string(), vec![14..18]),
5239 (separator!("dir/two.ts").to_string(), vec![14..18]),
5240 ]),
5241 "Rust exclusion search should give only TypeScript files"
5242 );
5243
5244 assert_eq!(
5245 search(
5246 &project,
5247 SearchQuery::text(
5248 search_query,
5249 false,
5250 true,
5251 false,
5252 Default::default(),
5253 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5254 false,
5255 None,
5256 )
5257 .unwrap(),
5258 cx
5259 )
5260 .await
5261 .unwrap(),
5262 HashMap::from_iter([
5263 (separator!("dir/one.rs").to_string(), vec![8..12]),
5264 (separator!("dir/two.rs").to_string(), vec![8..12]),
5265 ]),
5266 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
5267 );
5268
5269 assert!(
5270 search(
5271 &project,
5272 SearchQuery::text(
5273 search_query,
5274 false,
5275 true,
5276 false,
5277 Default::default(),
5278 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()])
5279 .unwrap(),
5280 false,
5281 None,
5282 )
5283 .unwrap(),
5284 cx
5285 )
5286 .await
5287 .unwrap()
5288 .is_empty(),
5289 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
5290 );
5291}
5292
5293#[gpui::test]
5294async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
5295 init_test(cx);
5296
5297 let search_query = "file";
5298
5299 let fs = FakeFs::new(cx.executor());
5300 fs.insert_tree(
5301 path!("/dir"),
5302 json!({
5303 "one.rs": r#"// Rust file one"#,
5304 "one.ts": r#"// TypeScript file one"#,
5305 "two.rs": r#"// Rust file two"#,
5306 "two.ts": r#"// TypeScript file two"#,
5307 }),
5308 )
5309 .await;
5310 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5311
5312 assert!(
5313 search(
5314 &project,
5315 SearchQuery::text(
5316 search_query,
5317 false,
5318 true,
5319 false,
5320 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5321 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5322 false,
5323 None,
5324 )
5325 .unwrap(),
5326 cx
5327 )
5328 .await
5329 .unwrap()
5330 .is_empty(),
5331 "If both no exclusions and inclusions match, exclusions should win and return nothing"
5332 );
5333
5334 assert!(
5335 search(
5336 &project,
5337 SearchQuery::text(
5338 search_query,
5339 false,
5340 true,
5341 false,
5342 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5343 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5344 false,
5345 None,
5346 )
5347 .unwrap(),
5348 cx
5349 )
5350 .await
5351 .unwrap()
5352 .is_empty(),
5353 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
5354 );
5355
5356 assert!(
5357 search(
5358 &project,
5359 SearchQuery::text(
5360 search_query,
5361 false,
5362 true,
5363 false,
5364 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5365 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5366 false,
5367 None,
5368 )
5369 .unwrap(),
5370 cx
5371 )
5372 .await
5373 .unwrap()
5374 .is_empty(),
5375 "Non-matching inclusions and exclusions should not change that."
5376 );
5377
5378 assert_eq!(
5379 search(
5380 &project,
5381 SearchQuery::text(
5382 search_query,
5383 false,
5384 true,
5385 false,
5386 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5387 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()]).unwrap(),
5388 false,
5389 None,
5390 )
5391 .unwrap(),
5392 cx
5393 )
5394 .await
5395 .unwrap(),
5396 HashMap::from_iter([
5397 (separator!("dir/one.ts").to_string(), vec![14..18]),
5398 (separator!("dir/two.ts").to_string(), vec![14..18]),
5399 ]),
5400 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
5401 );
5402}
5403
5404#[gpui::test]
5405async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
5406 init_test(cx);
5407
5408 let fs = FakeFs::new(cx.executor());
5409 fs.insert_tree(
5410 path!("/worktree-a"),
5411 json!({
5412 "haystack.rs": r#"// NEEDLE"#,
5413 "haystack.ts": r#"// NEEDLE"#,
5414 }),
5415 )
5416 .await;
5417 fs.insert_tree(
5418 path!("/worktree-b"),
5419 json!({
5420 "haystack.rs": r#"// NEEDLE"#,
5421 "haystack.ts": r#"// NEEDLE"#,
5422 }),
5423 )
5424 .await;
5425
5426 let project = Project::test(
5427 fs.clone(),
5428 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
5429 cx,
5430 )
5431 .await;
5432
5433 assert_eq!(
5434 search(
5435 &project,
5436 SearchQuery::text(
5437 "NEEDLE",
5438 false,
5439 true,
5440 false,
5441 PathMatcher::new(&["worktree-a/*.rs".to_owned()]).unwrap(),
5442 Default::default(),
5443 true,
5444 None,
5445 )
5446 .unwrap(),
5447 cx
5448 )
5449 .await
5450 .unwrap(),
5451 HashMap::from_iter([(separator!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
5452 "should only return results from included worktree"
5453 );
5454 assert_eq!(
5455 search(
5456 &project,
5457 SearchQuery::text(
5458 "NEEDLE",
5459 false,
5460 true,
5461 false,
5462 PathMatcher::new(&["worktree-b/*.rs".to_owned()]).unwrap(),
5463 Default::default(),
5464 true,
5465 None,
5466 )
5467 .unwrap(),
5468 cx
5469 )
5470 .await
5471 .unwrap(),
5472 HashMap::from_iter([(separator!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
5473 "should only return results from included worktree"
5474 );
5475
5476 assert_eq!(
5477 search(
5478 &project,
5479 SearchQuery::text(
5480 "NEEDLE",
5481 false,
5482 true,
5483 false,
5484 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5485 Default::default(),
5486 false,
5487 None,
5488 )
5489 .unwrap(),
5490 cx
5491 )
5492 .await
5493 .unwrap(),
5494 HashMap::from_iter([
5495 (separator!("worktree-a/haystack.ts").to_string(), vec![3..9]),
5496 (separator!("worktree-b/haystack.ts").to_string(), vec![3..9])
5497 ]),
5498 "should return results from both worktrees"
5499 );
5500}
5501
5502#[gpui::test]
5503async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
5504 init_test(cx);
5505
5506 let fs = FakeFs::new(cx.background_executor.clone());
5507 fs.insert_tree(
5508 path!("/dir"),
5509 json!({
5510 ".git": {},
5511 ".gitignore": "**/target\n/node_modules\n",
5512 "target": {
5513 "index.txt": "index_key:index_value"
5514 },
5515 "node_modules": {
5516 "eslint": {
5517 "index.ts": "const eslint_key = 'eslint value'",
5518 "package.json": r#"{ "some_key": "some value" }"#,
5519 },
5520 "prettier": {
5521 "index.ts": "const prettier_key = 'prettier value'",
5522 "package.json": r#"{ "other_key": "other value" }"#,
5523 },
5524 },
5525 "package.json": r#"{ "main_key": "main value" }"#,
5526 }),
5527 )
5528 .await;
5529 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5530
5531 let query = "key";
5532 assert_eq!(
5533 search(
5534 &project,
5535 SearchQuery::text(
5536 query,
5537 false,
5538 false,
5539 false,
5540 Default::default(),
5541 Default::default(),
5542 false,
5543 None,
5544 )
5545 .unwrap(),
5546 cx
5547 )
5548 .await
5549 .unwrap(),
5550 HashMap::from_iter([(separator!("dir/package.json").to_string(), vec![8..11])]),
5551 "Only one non-ignored file should have the query"
5552 );
5553
5554 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5555 assert_eq!(
5556 search(
5557 &project,
5558 SearchQuery::text(
5559 query,
5560 false,
5561 false,
5562 true,
5563 Default::default(),
5564 Default::default(),
5565 false,
5566 None,
5567 )
5568 .unwrap(),
5569 cx
5570 )
5571 .await
5572 .unwrap(),
5573 HashMap::from_iter([
5574 (separator!("dir/package.json").to_string(), vec![8..11]),
5575 (separator!("dir/target/index.txt").to_string(), vec![6..9]),
5576 (
5577 separator!("dir/node_modules/prettier/package.json").to_string(),
5578 vec![9..12]
5579 ),
5580 (
5581 separator!("dir/node_modules/prettier/index.ts").to_string(),
5582 vec![15..18]
5583 ),
5584 (
5585 separator!("dir/node_modules/eslint/index.ts").to_string(),
5586 vec![13..16]
5587 ),
5588 (
5589 separator!("dir/node_modules/eslint/package.json").to_string(),
5590 vec![8..11]
5591 ),
5592 ]),
5593 "Unrestricted search with ignored directories should find every file with the query"
5594 );
5595
5596 let files_to_include = PathMatcher::new(&["node_modules/prettier/**".to_owned()]).unwrap();
5597 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()]).unwrap();
5598 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5599 assert_eq!(
5600 search(
5601 &project,
5602 SearchQuery::text(
5603 query,
5604 false,
5605 false,
5606 true,
5607 files_to_include,
5608 files_to_exclude,
5609 false,
5610 None,
5611 )
5612 .unwrap(),
5613 cx
5614 )
5615 .await
5616 .unwrap(),
5617 HashMap::from_iter([(
5618 separator!("dir/node_modules/prettier/package.json").to_string(),
5619 vec![9..12]
5620 )]),
5621 "With search including ignored prettier directory and excluding TS files, only one file should be found"
5622 );
5623}
5624
5625#[gpui::test]
5626async fn test_search_with_unicode(cx: &mut gpui::TestAppContext) {
5627 init_test(cx);
5628
5629 let fs = FakeFs::new(cx.executor());
5630 fs.insert_tree(
5631 path!("/dir"),
5632 json!({
5633 "one.rs": "// ПРИВЕТ? привет!",
5634 "two.rs": "// ПРИВЕТ.",
5635 "three.rs": "// привет",
5636 }),
5637 )
5638 .await;
5639 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5640
5641 let unicode_case_sensitive_query = SearchQuery::text(
5642 "привет",
5643 false,
5644 true,
5645 false,
5646 Default::default(),
5647 Default::default(),
5648 false,
5649 None,
5650 );
5651 assert_matches!(unicode_case_sensitive_query, Ok(SearchQuery::Text { .. }));
5652 assert_eq!(
5653 search(&project, unicode_case_sensitive_query.unwrap(), cx)
5654 .await
5655 .unwrap(),
5656 HashMap::from_iter([
5657 (separator!("dir/one.rs").to_string(), vec![17..29]),
5658 (separator!("dir/three.rs").to_string(), vec![3..15]),
5659 ])
5660 );
5661
5662 let unicode_case_insensitive_query = SearchQuery::text(
5663 "привет",
5664 false,
5665 false,
5666 false,
5667 Default::default(),
5668 Default::default(),
5669 false,
5670 None,
5671 );
5672 assert_matches!(
5673 unicode_case_insensitive_query,
5674 Ok(SearchQuery::Regex { .. })
5675 );
5676 assert_eq!(
5677 search(&project, unicode_case_insensitive_query.unwrap(), cx)
5678 .await
5679 .unwrap(),
5680 HashMap::from_iter([
5681 (separator!("dir/one.rs").to_string(), vec![3..15, 17..29]),
5682 (separator!("dir/two.rs").to_string(), vec![3..15]),
5683 (separator!("dir/three.rs").to_string(), vec![3..15]),
5684 ])
5685 );
5686
5687 assert_eq!(
5688 search(
5689 &project,
5690 SearchQuery::text(
5691 "привет.",
5692 false,
5693 false,
5694 false,
5695 Default::default(),
5696 Default::default(),
5697 false,
5698 None,
5699 )
5700 .unwrap(),
5701 cx
5702 )
5703 .await
5704 .unwrap(),
5705 HashMap::from_iter([(separator!("dir/two.rs").to_string(), vec![3..16]),])
5706 );
5707}
5708
5709#[gpui::test]
5710async fn test_create_entry(cx: &mut gpui::TestAppContext) {
5711 init_test(cx);
5712
5713 let fs = FakeFs::new(cx.executor().clone());
5714 fs.insert_tree(
5715 "/one/two",
5716 json!({
5717 "three": {
5718 "a.txt": "",
5719 "four": {}
5720 },
5721 "c.rs": ""
5722 }),
5723 )
5724 .await;
5725
5726 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
5727 project
5728 .update(cx, |project, cx| {
5729 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5730 project.create_entry((id, "b.."), true, cx)
5731 })
5732 .await
5733 .unwrap()
5734 .to_included()
5735 .unwrap();
5736
5737 // Can't create paths outside the project
5738 let result = project
5739 .update(cx, |project, cx| {
5740 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5741 project.create_entry((id, "../../boop"), true, cx)
5742 })
5743 .await;
5744 assert!(result.is_err());
5745
5746 // Can't create paths with '..'
5747 let result = project
5748 .update(cx, |project, cx| {
5749 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5750 project.create_entry((id, "four/../beep"), true, cx)
5751 })
5752 .await;
5753 assert!(result.is_err());
5754
5755 assert_eq!(
5756 fs.paths(true),
5757 vec![
5758 PathBuf::from(path!("/")),
5759 PathBuf::from(path!("/one")),
5760 PathBuf::from(path!("/one/two")),
5761 PathBuf::from(path!("/one/two/c.rs")),
5762 PathBuf::from(path!("/one/two/three")),
5763 PathBuf::from(path!("/one/two/three/a.txt")),
5764 PathBuf::from(path!("/one/two/three/b..")),
5765 PathBuf::from(path!("/one/two/three/four")),
5766 ]
5767 );
5768
5769 // And we cannot open buffers with '..'
5770 let result = project
5771 .update(cx, |project, cx| {
5772 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5773 project.open_buffer((id, "../c.rs"), cx)
5774 })
5775 .await;
5776 assert!(result.is_err())
5777}
5778
5779#[gpui::test]
5780async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
5781 init_test(cx);
5782
5783 let fs = FakeFs::new(cx.executor());
5784 fs.insert_tree(
5785 path!("/dir"),
5786 json!({
5787 "a.tsx": "a",
5788 }),
5789 )
5790 .await;
5791
5792 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5793
5794 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5795 language_registry.add(tsx_lang());
5796 let language_server_names = [
5797 "TypeScriptServer",
5798 "TailwindServer",
5799 "ESLintServer",
5800 "NoHoverCapabilitiesServer",
5801 ];
5802 let mut language_servers = [
5803 language_registry.register_fake_lsp(
5804 "tsx",
5805 FakeLspAdapter {
5806 name: language_server_names[0],
5807 capabilities: lsp::ServerCapabilities {
5808 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5809 ..lsp::ServerCapabilities::default()
5810 },
5811 ..FakeLspAdapter::default()
5812 },
5813 ),
5814 language_registry.register_fake_lsp(
5815 "tsx",
5816 FakeLspAdapter {
5817 name: language_server_names[1],
5818 capabilities: lsp::ServerCapabilities {
5819 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5820 ..lsp::ServerCapabilities::default()
5821 },
5822 ..FakeLspAdapter::default()
5823 },
5824 ),
5825 language_registry.register_fake_lsp(
5826 "tsx",
5827 FakeLspAdapter {
5828 name: language_server_names[2],
5829 capabilities: lsp::ServerCapabilities {
5830 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5831 ..lsp::ServerCapabilities::default()
5832 },
5833 ..FakeLspAdapter::default()
5834 },
5835 ),
5836 language_registry.register_fake_lsp(
5837 "tsx",
5838 FakeLspAdapter {
5839 name: language_server_names[3],
5840 capabilities: lsp::ServerCapabilities {
5841 hover_provider: None,
5842 ..lsp::ServerCapabilities::default()
5843 },
5844 ..FakeLspAdapter::default()
5845 },
5846 ),
5847 ];
5848
5849 let (buffer, _handle) = project
5850 .update(cx, |p, cx| {
5851 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
5852 })
5853 .await
5854 .unwrap();
5855 cx.executor().run_until_parked();
5856
5857 let mut servers_with_hover_requests = HashMap::default();
5858 for i in 0..language_server_names.len() {
5859 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
5860 panic!(
5861 "Failed to get language server #{i} with name {}",
5862 &language_server_names[i]
5863 )
5864 });
5865 let new_server_name = new_server.server.name();
5866 assert!(
5867 !servers_with_hover_requests.contains_key(&new_server_name),
5868 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5869 );
5870 match new_server_name.as_ref() {
5871 "TailwindServer" | "TypeScriptServer" => {
5872 servers_with_hover_requests.insert(
5873 new_server_name.clone(),
5874 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
5875 move |_, _| {
5876 let name = new_server_name.clone();
5877 async move {
5878 Ok(Some(lsp::Hover {
5879 contents: lsp::HoverContents::Scalar(
5880 lsp::MarkedString::String(format!("{name} hover")),
5881 ),
5882 range: None,
5883 }))
5884 }
5885 },
5886 ),
5887 );
5888 }
5889 "ESLintServer" => {
5890 servers_with_hover_requests.insert(
5891 new_server_name,
5892 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
5893 |_, _| async move { Ok(None) },
5894 ),
5895 );
5896 }
5897 "NoHoverCapabilitiesServer" => {
5898 let _never_handled = new_server
5899 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
5900 panic!(
5901 "Should not call for hovers server with no corresponding capabilities"
5902 )
5903 });
5904 }
5905 unexpected => panic!("Unexpected server name: {unexpected}"),
5906 }
5907 }
5908
5909 let hover_task = project.update(cx, |project, cx| {
5910 project.hover(&buffer, Point::new(0, 0), cx)
5911 });
5912 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
5913 |mut hover_request| async move {
5914 hover_request
5915 .next()
5916 .await
5917 .expect("All hover requests should have been triggered")
5918 },
5919 ))
5920 .await;
5921 assert_eq!(
5922 vec!["TailwindServer hover", "TypeScriptServer hover"],
5923 hover_task
5924 .await
5925 .into_iter()
5926 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
5927 .sorted()
5928 .collect::<Vec<_>>(),
5929 "Should receive hover responses from all related servers with hover capabilities"
5930 );
5931}
5932
5933#[gpui::test]
5934async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
5935 init_test(cx);
5936
5937 let fs = FakeFs::new(cx.executor());
5938 fs.insert_tree(
5939 path!("/dir"),
5940 json!({
5941 "a.ts": "a",
5942 }),
5943 )
5944 .await;
5945
5946 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5947
5948 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5949 language_registry.add(typescript_lang());
5950 let mut fake_language_servers = language_registry.register_fake_lsp(
5951 "TypeScript",
5952 FakeLspAdapter {
5953 capabilities: lsp::ServerCapabilities {
5954 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5955 ..lsp::ServerCapabilities::default()
5956 },
5957 ..FakeLspAdapter::default()
5958 },
5959 );
5960
5961 let (buffer, _handle) = project
5962 .update(cx, |p, cx| {
5963 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
5964 })
5965 .await
5966 .unwrap();
5967 cx.executor().run_until_parked();
5968
5969 let fake_server = fake_language_servers
5970 .next()
5971 .await
5972 .expect("failed to get the language server");
5973
5974 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
5975 move |_, _| async move {
5976 Ok(Some(lsp::Hover {
5977 contents: lsp::HoverContents::Array(vec![
5978 lsp::MarkedString::String("".to_string()),
5979 lsp::MarkedString::String(" ".to_string()),
5980 lsp::MarkedString::String("\n\n\n".to_string()),
5981 ]),
5982 range: None,
5983 }))
5984 },
5985 );
5986
5987 let hover_task = project.update(cx, |project, cx| {
5988 project.hover(&buffer, Point::new(0, 0), cx)
5989 });
5990 let () = request_handled
5991 .next()
5992 .await
5993 .expect("All hover requests should have been triggered");
5994 assert_eq!(
5995 Vec::<String>::new(),
5996 hover_task
5997 .await
5998 .into_iter()
5999 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
6000 .sorted()
6001 .collect::<Vec<_>>(),
6002 "Empty hover parts should be ignored"
6003 );
6004}
6005
6006#[gpui::test]
6007async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
6008 init_test(cx);
6009
6010 let fs = FakeFs::new(cx.executor());
6011 fs.insert_tree(
6012 path!("/dir"),
6013 json!({
6014 "a.ts": "a",
6015 }),
6016 )
6017 .await;
6018
6019 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6020
6021 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6022 language_registry.add(typescript_lang());
6023 let mut fake_language_servers = language_registry.register_fake_lsp(
6024 "TypeScript",
6025 FakeLspAdapter {
6026 capabilities: lsp::ServerCapabilities {
6027 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6028 ..lsp::ServerCapabilities::default()
6029 },
6030 ..FakeLspAdapter::default()
6031 },
6032 );
6033
6034 let (buffer, _handle) = project
6035 .update(cx, |p, cx| {
6036 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6037 })
6038 .await
6039 .unwrap();
6040 cx.executor().run_until_parked();
6041
6042 let fake_server = fake_language_servers
6043 .next()
6044 .await
6045 .expect("failed to get the language server");
6046
6047 let mut request_handled = fake_server
6048 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
6049 Ok(Some(vec![
6050 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6051 title: "organize imports".to_string(),
6052 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
6053 ..lsp::CodeAction::default()
6054 }),
6055 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6056 title: "fix code".to_string(),
6057 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
6058 ..lsp::CodeAction::default()
6059 }),
6060 ]))
6061 });
6062
6063 let code_actions_task = project.update(cx, |project, cx| {
6064 project.code_actions(
6065 &buffer,
6066 0..buffer.read(cx).len(),
6067 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
6068 cx,
6069 )
6070 });
6071
6072 let () = request_handled
6073 .next()
6074 .await
6075 .expect("The code action request should have been triggered");
6076
6077 let code_actions = code_actions_task.await.unwrap();
6078 assert_eq!(code_actions.len(), 1);
6079 assert_eq!(
6080 code_actions[0].lsp_action.action_kind(),
6081 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
6082 );
6083}
6084
6085#[gpui::test]
6086async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
6087 init_test(cx);
6088
6089 let fs = FakeFs::new(cx.executor());
6090 fs.insert_tree(
6091 path!("/dir"),
6092 json!({
6093 "a.tsx": "a",
6094 }),
6095 )
6096 .await;
6097
6098 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6099
6100 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6101 language_registry.add(tsx_lang());
6102 let language_server_names = [
6103 "TypeScriptServer",
6104 "TailwindServer",
6105 "ESLintServer",
6106 "NoActionsCapabilitiesServer",
6107 ];
6108
6109 let mut language_server_rxs = [
6110 language_registry.register_fake_lsp(
6111 "tsx",
6112 FakeLspAdapter {
6113 name: language_server_names[0],
6114 capabilities: lsp::ServerCapabilities {
6115 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6116 ..lsp::ServerCapabilities::default()
6117 },
6118 ..FakeLspAdapter::default()
6119 },
6120 ),
6121 language_registry.register_fake_lsp(
6122 "tsx",
6123 FakeLspAdapter {
6124 name: language_server_names[1],
6125 capabilities: lsp::ServerCapabilities {
6126 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6127 ..lsp::ServerCapabilities::default()
6128 },
6129 ..FakeLspAdapter::default()
6130 },
6131 ),
6132 language_registry.register_fake_lsp(
6133 "tsx",
6134 FakeLspAdapter {
6135 name: language_server_names[2],
6136 capabilities: lsp::ServerCapabilities {
6137 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6138 ..lsp::ServerCapabilities::default()
6139 },
6140 ..FakeLspAdapter::default()
6141 },
6142 ),
6143 language_registry.register_fake_lsp(
6144 "tsx",
6145 FakeLspAdapter {
6146 name: language_server_names[3],
6147 capabilities: lsp::ServerCapabilities {
6148 code_action_provider: None,
6149 ..lsp::ServerCapabilities::default()
6150 },
6151 ..FakeLspAdapter::default()
6152 },
6153 ),
6154 ];
6155
6156 let (buffer, _handle) = project
6157 .update(cx, |p, cx| {
6158 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
6159 })
6160 .await
6161 .unwrap();
6162 cx.executor().run_until_parked();
6163
6164 let mut servers_with_actions_requests = HashMap::default();
6165 for i in 0..language_server_names.len() {
6166 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
6167 panic!(
6168 "Failed to get language server #{i} with name {}",
6169 &language_server_names[i]
6170 )
6171 });
6172 let new_server_name = new_server.server.name();
6173
6174 assert!(
6175 !servers_with_actions_requests.contains_key(&new_server_name),
6176 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6177 );
6178 match new_server_name.0.as_ref() {
6179 "TailwindServer" | "TypeScriptServer" => {
6180 servers_with_actions_requests.insert(
6181 new_server_name.clone(),
6182 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6183 move |_, _| {
6184 let name = new_server_name.clone();
6185 async move {
6186 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
6187 lsp::CodeAction {
6188 title: format!("{name} code action"),
6189 ..lsp::CodeAction::default()
6190 },
6191 )]))
6192 }
6193 },
6194 ),
6195 );
6196 }
6197 "ESLintServer" => {
6198 servers_with_actions_requests.insert(
6199 new_server_name,
6200 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6201 |_, _| async move { Ok(None) },
6202 ),
6203 );
6204 }
6205 "NoActionsCapabilitiesServer" => {
6206 let _never_handled = new_server
6207 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6208 panic!(
6209 "Should not call for code actions server with no corresponding capabilities"
6210 )
6211 });
6212 }
6213 unexpected => panic!("Unexpected server name: {unexpected}"),
6214 }
6215 }
6216
6217 let code_actions_task = project.update(cx, |project, cx| {
6218 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
6219 });
6220
6221 // cx.run_until_parked();
6222 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
6223 |mut code_actions_request| async move {
6224 code_actions_request
6225 .next()
6226 .await
6227 .expect("All code actions requests should have been triggered")
6228 },
6229 ))
6230 .await;
6231 assert_eq!(
6232 vec!["TailwindServer code action", "TypeScriptServer code action"],
6233 code_actions_task
6234 .await
6235 .unwrap()
6236 .into_iter()
6237 .map(|code_action| code_action.lsp_action.title().to_owned())
6238 .sorted()
6239 .collect::<Vec<_>>(),
6240 "Should receive code actions responses from all related servers with hover capabilities"
6241 );
6242}
6243
6244#[gpui::test]
6245async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
6246 init_test(cx);
6247
6248 let fs = FakeFs::new(cx.executor());
6249 fs.insert_tree(
6250 "/dir",
6251 json!({
6252 "a.rs": "let a = 1;",
6253 "b.rs": "let b = 2;",
6254 "c.rs": "let c = 2;",
6255 }),
6256 )
6257 .await;
6258
6259 let project = Project::test(
6260 fs,
6261 [
6262 "/dir/a.rs".as_ref(),
6263 "/dir/b.rs".as_ref(),
6264 "/dir/c.rs".as_ref(),
6265 ],
6266 cx,
6267 )
6268 .await;
6269
6270 // check the initial state and get the worktrees
6271 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
6272 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6273 assert_eq!(worktrees.len(), 3);
6274
6275 let worktree_a = worktrees[0].read(cx);
6276 let worktree_b = worktrees[1].read(cx);
6277 let worktree_c = worktrees[2].read(cx);
6278
6279 // check they start in the right order
6280 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
6281 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
6282 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
6283
6284 (
6285 worktrees[0].clone(),
6286 worktrees[1].clone(),
6287 worktrees[2].clone(),
6288 )
6289 });
6290
6291 // move first worktree to after the second
6292 // [a, b, c] -> [b, a, c]
6293 project
6294 .update(cx, |project, cx| {
6295 let first = worktree_a.read(cx);
6296 let second = worktree_b.read(cx);
6297 project.move_worktree(first.id(), second.id(), cx)
6298 })
6299 .expect("moving first after second");
6300
6301 // check the state after moving
6302 project.update(cx, |project, cx| {
6303 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6304 assert_eq!(worktrees.len(), 3);
6305
6306 let first = worktrees[0].read(cx);
6307 let second = worktrees[1].read(cx);
6308 let third = worktrees[2].read(cx);
6309
6310 // check they are now in the right order
6311 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
6312 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
6313 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6314 });
6315
6316 // move the second worktree to before the first
6317 // [b, a, c] -> [a, b, c]
6318 project
6319 .update(cx, |project, cx| {
6320 let second = worktree_a.read(cx);
6321 let first = worktree_b.read(cx);
6322 project.move_worktree(first.id(), second.id(), cx)
6323 })
6324 .expect("moving second before first");
6325
6326 // check the state after moving
6327 project.update(cx, |project, cx| {
6328 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6329 assert_eq!(worktrees.len(), 3);
6330
6331 let first = worktrees[0].read(cx);
6332 let second = worktrees[1].read(cx);
6333 let third = worktrees[2].read(cx);
6334
6335 // check they are now in the right order
6336 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6337 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6338 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6339 });
6340
6341 // move the second worktree to after the third
6342 // [a, b, c] -> [a, c, b]
6343 project
6344 .update(cx, |project, cx| {
6345 let second = worktree_b.read(cx);
6346 let third = worktree_c.read(cx);
6347 project.move_worktree(second.id(), third.id(), cx)
6348 })
6349 .expect("moving second after third");
6350
6351 // check the state after moving
6352 project.update(cx, |project, cx| {
6353 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6354 assert_eq!(worktrees.len(), 3);
6355
6356 let first = worktrees[0].read(cx);
6357 let second = worktrees[1].read(cx);
6358 let third = worktrees[2].read(cx);
6359
6360 // check they are now in the right order
6361 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6362 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
6363 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
6364 });
6365
6366 // move the third worktree to before the second
6367 // [a, c, b] -> [a, b, c]
6368 project
6369 .update(cx, |project, cx| {
6370 let third = worktree_c.read(cx);
6371 let second = worktree_b.read(cx);
6372 project.move_worktree(third.id(), second.id(), cx)
6373 })
6374 .expect("moving third before second");
6375
6376 // check the state after moving
6377 project.update(cx, |project, cx| {
6378 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6379 assert_eq!(worktrees.len(), 3);
6380
6381 let first = worktrees[0].read(cx);
6382 let second = worktrees[1].read(cx);
6383 let third = worktrees[2].read(cx);
6384
6385 // check they are now in the right order
6386 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6387 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6388 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6389 });
6390
6391 // move the first worktree to after the third
6392 // [a, b, c] -> [b, c, a]
6393 project
6394 .update(cx, |project, cx| {
6395 let first = worktree_a.read(cx);
6396 let third = worktree_c.read(cx);
6397 project.move_worktree(first.id(), third.id(), cx)
6398 })
6399 .expect("moving first after third");
6400
6401 // check the state after moving
6402 project.update(cx, |project, cx| {
6403 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6404 assert_eq!(worktrees.len(), 3);
6405
6406 let first = worktrees[0].read(cx);
6407 let second = worktrees[1].read(cx);
6408 let third = worktrees[2].read(cx);
6409
6410 // check they are now in the right order
6411 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
6412 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
6413 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
6414 });
6415
6416 // move the third worktree to before the first
6417 // [b, c, a] -> [a, b, c]
6418 project
6419 .update(cx, |project, cx| {
6420 let third = worktree_a.read(cx);
6421 let first = worktree_b.read(cx);
6422 project.move_worktree(third.id(), first.id(), cx)
6423 })
6424 .expect("moving third before first");
6425
6426 // check the state after moving
6427 project.update(cx, |project, cx| {
6428 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6429 assert_eq!(worktrees.len(), 3);
6430
6431 let first = worktrees[0].read(cx);
6432 let second = worktrees[1].read(cx);
6433 let third = worktrees[2].read(cx);
6434
6435 // check they are now in the right order
6436 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6437 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6438 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6439 });
6440}
6441
6442#[gpui::test]
6443async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
6444 init_test(cx);
6445
6446 let staged_contents = r#"
6447 fn main() {
6448 println!("hello world");
6449 }
6450 "#
6451 .unindent();
6452 let file_contents = r#"
6453 // print goodbye
6454 fn main() {
6455 println!("goodbye world");
6456 }
6457 "#
6458 .unindent();
6459
6460 let fs = FakeFs::new(cx.background_executor.clone());
6461 fs.insert_tree(
6462 "/dir",
6463 json!({
6464 ".git": {},
6465 "src": {
6466 "main.rs": file_contents,
6467 }
6468 }),
6469 )
6470 .await;
6471
6472 fs.set_index_for_repo(
6473 Path::new("/dir/.git"),
6474 &[("src/main.rs".into(), staged_contents)],
6475 );
6476
6477 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6478
6479 let buffer = project
6480 .update(cx, |project, cx| {
6481 project.open_local_buffer("/dir/src/main.rs", cx)
6482 })
6483 .await
6484 .unwrap();
6485 let unstaged_diff = project
6486 .update(cx, |project, cx| {
6487 project.open_unstaged_diff(buffer.clone(), cx)
6488 })
6489 .await
6490 .unwrap();
6491
6492 cx.run_until_parked();
6493 unstaged_diff.update(cx, |unstaged_diff, cx| {
6494 let snapshot = buffer.read(cx).snapshot();
6495 assert_hunks(
6496 unstaged_diff.hunks(&snapshot, cx),
6497 &snapshot,
6498 &unstaged_diff.base_text_string().unwrap(),
6499 &[
6500 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
6501 (
6502 2..3,
6503 " println!(\"hello world\");\n",
6504 " println!(\"goodbye world\");\n",
6505 DiffHunkStatus::modified_none(),
6506 ),
6507 ],
6508 );
6509 });
6510
6511 let staged_contents = r#"
6512 // print goodbye
6513 fn main() {
6514 }
6515 "#
6516 .unindent();
6517
6518 fs.set_index_for_repo(
6519 Path::new("/dir/.git"),
6520 &[("src/main.rs".into(), staged_contents)],
6521 );
6522
6523 cx.run_until_parked();
6524 unstaged_diff.update(cx, |unstaged_diff, cx| {
6525 let snapshot = buffer.read(cx).snapshot();
6526 assert_hunks(
6527 unstaged_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6528 &snapshot,
6529 &unstaged_diff.base_text().text(),
6530 &[(
6531 2..3,
6532 "",
6533 " println!(\"goodbye world\");\n",
6534 DiffHunkStatus::added_none(),
6535 )],
6536 );
6537 });
6538}
6539
6540#[gpui::test]
6541async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
6542 init_test(cx);
6543
6544 let committed_contents = r#"
6545 fn main() {
6546 println!("hello world");
6547 }
6548 "#
6549 .unindent();
6550 let staged_contents = r#"
6551 fn main() {
6552 println!("goodbye world");
6553 }
6554 "#
6555 .unindent();
6556 let file_contents = r#"
6557 // print goodbye
6558 fn main() {
6559 println!("goodbye world");
6560 }
6561 "#
6562 .unindent();
6563
6564 let fs = FakeFs::new(cx.background_executor.clone());
6565 fs.insert_tree(
6566 "/dir",
6567 json!({
6568 ".git": {},
6569 "src": {
6570 "modification.rs": file_contents,
6571 }
6572 }),
6573 )
6574 .await;
6575
6576 fs.set_head_for_repo(
6577 Path::new("/dir/.git"),
6578 &[
6579 ("src/modification.rs".into(), committed_contents),
6580 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6581 ],
6582 "deadbeef",
6583 );
6584 fs.set_index_for_repo(
6585 Path::new("/dir/.git"),
6586 &[
6587 ("src/modification.rs".into(), staged_contents),
6588 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6589 ],
6590 );
6591
6592 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6593 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6594 let language = rust_lang();
6595 language_registry.add(language.clone());
6596
6597 let buffer_1 = project
6598 .update(cx, |project, cx| {
6599 project.open_local_buffer("/dir/src/modification.rs", cx)
6600 })
6601 .await
6602 .unwrap();
6603 let diff_1 = project
6604 .update(cx, |project, cx| {
6605 project.open_uncommitted_diff(buffer_1.clone(), cx)
6606 })
6607 .await
6608 .unwrap();
6609 diff_1.read_with(cx, |diff, _| {
6610 assert_eq!(diff.base_text().language().cloned(), Some(language))
6611 });
6612 cx.run_until_parked();
6613 diff_1.update(cx, |diff, cx| {
6614 let snapshot = buffer_1.read(cx).snapshot();
6615 assert_hunks(
6616 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6617 &snapshot,
6618 &diff.base_text_string().unwrap(),
6619 &[
6620 (
6621 0..1,
6622 "",
6623 "// print goodbye\n",
6624 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
6625 ),
6626 (
6627 2..3,
6628 " println!(\"hello world\");\n",
6629 " println!(\"goodbye world\");\n",
6630 DiffHunkStatus::modified_none(),
6631 ),
6632 ],
6633 );
6634 });
6635
6636 // Reset HEAD to a version that differs from both the buffer and the index.
6637 let committed_contents = r#"
6638 // print goodbye
6639 fn main() {
6640 }
6641 "#
6642 .unindent();
6643 fs.set_head_for_repo(
6644 Path::new("/dir/.git"),
6645 &[
6646 ("src/modification.rs".into(), committed_contents.clone()),
6647 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6648 ],
6649 "deadbeef",
6650 );
6651
6652 // Buffer now has an unstaged hunk.
6653 cx.run_until_parked();
6654 diff_1.update(cx, |diff, cx| {
6655 let snapshot = buffer_1.read(cx).snapshot();
6656 assert_hunks(
6657 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6658 &snapshot,
6659 &diff.base_text().text(),
6660 &[(
6661 2..3,
6662 "",
6663 " println!(\"goodbye world\");\n",
6664 DiffHunkStatus::added_none(),
6665 )],
6666 );
6667 });
6668
6669 // Open a buffer for a file that's been deleted.
6670 let buffer_2 = project
6671 .update(cx, |project, cx| {
6672 project.open_local_buffer("/dir/src/deletion.rs", cx)
6673 })
6674 .await
6675 .unwrap();
6676 let diff_2 = project
6677 .update(cx, |project, cx| {
6678 project.open_uncommitted_diff(buffer_2.clone(), cx)
6679 })
6680 .await
6681 .unwrap();
6682 cx.run_until_parked();
6683 diff_2.update(cx, |diff, cx| {
6684 let snapshot = buffer_2.read(cx).snapshot();
6685 assert_hunks(
6686 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6687 &snapshot,
6688 &diff.base_text_string().unwrap(),
6689 &[(
6690 0..0,
6691 "// the-deleted-contents\n",
6692 "",
6693 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
6694 )],
6695 );
6696 });
6697
6698 // Stage the deletion of this file
6699 fs.set_index_for_repo(
6700 Path::new("/dir/.git"),
6701 &[("src/modification.rs".into(), committed_contents.clone())],
6702 );
6703 cx.run_until_parked();
6704 diff_2.update(cx, |diff, cx| {
6705 let snapshot = buffer_2.read(cx).snapshot();
6706 assert_hunks(
6707 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6708 &snapshot,
6709 &diff.base_text_string().unwrap(),
6710 &[(
6711 0..0,
6712 "// the-deleted-contents\n",
6713 "",
6714 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
6715 )],
6716 );
6717 });
6718}
6719
6720#[gpui::test]
6721async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
6722 use DiffHunkSecondaryStatus::*;
6723 init_test(cx);
6724
6725 let committed_contents = r#"
6726 zero
6727 one
6728 two
6729 three
6730 four
6731 five
6732 "#
6733 .unindent();
6734 let file_contents = r#"
6735 one
6736 TWO
6737 three
6738 FOUR
6739 five
6740 "#
6741 .unindent();
6742
6743 let fs = FakeFs::new(cx.background_executor.clone());
6744 fs.insert_tree(
6745 "/dir",
6746 json!({
6747 ".git": {},
6748 "file.txt": file_contents.clone()
6749 }),
6750 )
6751 .await;
6752
6753 fs.set_head_and_index_for_repo(
6754 "/dir/.git".as_ref(),
6755 &[("file.txt".into(), committed_contents.clone())],
6756 );
6757
6758 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6759
6760 let buffer = project
6761 .update(cx, |project, cx| {
6762 project.open_local_buffer("/dir/file.txt", cx)
6763 })
6764 .await
6765 .unwrap();
6766 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6767 let uncommitted_diff = project
6768 .update(cx, |project, cx| {
6769 project.open_uncommitted_diff(buffer.clone(), cx)
6770 })
6771 .await
6772 .unwrap();
6773 let mut diff_events = cx.events(&uncommitted_diff);
6774
6775 // The hunks are initially unstaged.
6776 uncommitted_diff.read_with(cx, |diff, cx| {
6777 assert_hunks(
6778 diff.hunks(&snapshot, cx),
6779 &snapshot,
6780 &diff.base_text_string().unwrap(),
6781 &[
6782 (
6783 0..0,
6784 "zero\n",
6785 "",
6786 DiffHunkStatus::deleted(HasSecondaryHunk),
6787 ),
6788 (
6789 1..2,
6790 "two\n",
6791 "TWO\n",
6792 DiffHunkStatus::modified(HasSecondaryHunk),
6793 ),
6794 (
6795 3..4,
6796 "four\n",
6797 "FOUR\n",
6798 DiffHunkStatus::modified(HasSecondaryHunk),
6799 ),
6800 ],
6801 );
6802 });
6803
6804 // Stage a hunk. It appears as optimistically staged.
6805 uncommitted_diff.update(cx, |diff, cx| {
6806 let range =
6807 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
6808 let hunks = diff
6809 .hunks_intersecting_range(range, &snapshot, cx)
6810 .collect::<Vec<_>>();
6811 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
6812
6813 assert_hunks(
6814 diff.hunks(&snapshot, cx),
6815 &snapshot,
6816 &diff.base_text_string().unwrap(),
6817 &[
6818 (
6819 0..0,
6820 "zero\n",
6821 "",
6822 DiffHunkStatus::deleted(HasSecondaryHunk),
6823 ),
6824 (
6825 1..2,
6826 "two\n",
6827 "TWO\n",
6828 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6829 ),
6830 (
6831 3..4,
6832 "four\n",
6833 "FOUR\n",
6834 DiffHunkStatus::modified(HasSecondaryHunk),
6835 ),
6836 ],
6837 );
6838 });
6839
6840 // The diff emits a change event for the range of the staged hunk.
6841 assert!(matches!(
6842 diff_events.next().await.unwrap(),
6843 BufferDiffEvent::HunksStagedOrUnstaged(_)
6844 ));
6845 let event = diff_events.next().await.unwrap();
6846 if let BufferDiffEvent::DiffChanged {
6847 changed_range: Some(changed_range),
6848 } = event
6849 {
6850 let changed_range = changed_range.to_point(&snapshot);
6851 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
6852 } else {
6853 panic!("Unexpected event {event:?}");
6854 }
6855
6856 // When the write to the index completes, it appears as staged.
6857 cx.run_until_parked();
6858 uncommitted_diff.update(cx, |diff, cx| {
6859 assert_hunks(
6860 diff.hunks(&snapshot, cx),
6861 &snapshot,
6862 &diff.base_text_string().unwrap(),
6863 &[
6864 (
6865 0..0,
6866 "zero\n",
6867 "",
6868 DiffHunkStatus::deleted(HasSecondaryHunk),
6869 ),
6870 (
6871 1..2,
6872 "two\n",
6873 "TWO\n",
6874 DiffHunkStatus::modified(NoSecondaryHunk),
6875 ),
6876 (
6877 3..4,
6878 "four\n",
6879 "FOUR\n",
6880 DiffHunkStatus::modified(HasSecondaryHunk),
6881 ),
6882 ],
6883 );
6884 });
6885
6886 // The diff emits a change event for the changed index text.
6887 let event = diff_events.next().await.unwrap();
6888 if let BufferDiffEvent::DiffChanged {
6889 changed_range: Some(changed_range),
6890 } = event
6891 {
6892 let changed_range = changed_range.to_point(&snapshot);
6893 assert_eq!(changed_range, Point::new(0, 0)..Point::new(4, 0));
6894 } else {
6895 panic!("Unexpected event {event:?}");
6896 }
6897
6898 // Simulate a problem writing to the git index.
6899 fs.set_error_message_for_index_write(
6900 "/dir/.git".as_ref(),
6901 Some("failed to write git index".into()),
6902 );
6903
6904 // Stage another hunk.
6905 uncommitted_diff.update(cx, |diff, cx| {
6906 let range =
6907 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
6908 let hunks = diff
6909 .hunks_intersecting_range(range, &snapshot, cx)
6910 .collect::<Vec<_>>();
6911 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
6912
6913 assert_hunks(
6914 diff.hunks(&snapshot, cx),
6915 &snapshot,
6916 &diff.base_text_string().unwrap(),
6917 &[
6918 (
6919 0..0,
6920 "zero\n",
6921 "",
6922 DiffHunkStatus::deleted(HasSecondaryHunk),
6923 ),
6924 (
6925 1..2,
6926 "two\n",
6927 "TWO\n",
6928 DiffHunkStatus::modified(NoSecondaryHunk),
6929 ),
6930 (
6931 3..4,
6932 "four\n",
6933 "FOUR\n",
6934 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6935 ),
6936 ],
6937 );
6938 });
6939 assert!(matches!(
6940 diff_events.next().await.unwrap(),
6941 BufferDiffEvent::HunksStagedOrUnstaged(_)
6942 ));
6943 let event = diff_events.next().await.unwrap();
6944 if let BufferDiffEvent::DiffChanged {
6945 changed_range: Some(changed_range),
6946 } = event
6947 {
6948 let changed_range = changed_range.to_point(&snapshot);
6949 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
6950 } else {
6951 panic!("Unexpected event {event:?}");
6952 }
6953
6954 // When the write fails, the hunk returns to being unstaged.
6955 cx.run_until_parked();
6956 uncommitted_diff.update(cx, |diff, cx| {
6957 assert_hunks(
6958 diff.hunks(&snapshot, cx),
6959 &snapshot,
6960 &diff.base_text_string().unwrap(),
6961 &[
6962 (
6963 0..0,
6964 "zero\n",
6965 "",
6966 DiffHunkStatus::deleted(HasSecondaryHunk),
6967 ),
6968 (
6969 1..2,
6970 "two\n",
6971 "TWO\n",
6972 DiffHunkStatus::modified(NoSecondaryHunk),
6973 ),
6974 (
6975 3..4,
6976 "four\n",
6977 "FOUR\n",
6978 DiffHunkStatus::modified(HasSecondaryHunk),
6979 ),
6980 ],
6981 );
6982 });
6983
6984 let event = diff_events.next().await.unwrap();
6985 if let BufferDiffEvent::DiffChanged {
6986 changed_range: Some(changed_range),
6987 } = event
6988 {
6989 let changed_range = changed_range.to_point(&snapshot);
6990 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
6991 } else {
6992 panic!("Unexpected event {event:?}");
6993 }
6994
6995 // Allow writing to the git index to succeed again.
6996 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
6997
6998 // Stage two hunks with separate operations.
6999 uncommitted_diff.update(cx, |diff, cx| {
7000 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
7001 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
7002 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
7003 });
7004
7005 // Both staged hunks appear as pending.
7006 uncommitted_diff.update(cx, |diff, cx| {
7007 assert_hunks(
7008 diff.hunks(&snapshot, cx),
7009 &snapshot,
7010 &diff.base_text_string().unwrap(),
7011 &[
7012 (
7013 0..0,
7014 "zero\n",
7015 "",
7016 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7017 ),
7018 (
7019 1..2,
7020 "two\n",
7021 "TWO\n",
7022 DiffHunkStatus::modified(NoSecondaryHunk),
7023 ),
7024 (
7025 3..4,
7026 "four\n",
7027 "FOUR\n",
7028 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7029 ),
7030 ],
7031 );
7032 });
7033
7034 // Both staging operations take effect.
7035 cx.run_until_parked();
7036 uncommitted_diff.update(cx, |diff, cx| {
7037 assert_hunks(
7038 diff.hunks(&snapshot, cx),
7039 &snapshot,
7040 &diff.base_text_string().unwrap(),
7041 &[
7042 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7043 (
7044 1..2,
7045 "two\n",
7046 "TWO\n",
7047 DiffHunkStatus::modified(NoSecondaryHunk),
7048 ),
7049 (
7050 3..4,
7051 "four\n",
7052 "FOUR\n",
7053 DiffHunkStatus::modified(NoSecondaryHunk),
7054 ),
7055 ],
7056 );
7057 });
7058}
7059
7060#[gpui::test(seeds(340, 472))]
7061async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
7062 use DiffHunkSecondaryStatus::*;
7063 init_test(cx);
7064
7065 let committed_contents = r#"
7066 zero
7067 one
7068 two
7069 three
7070 four
7071 five
7072 "#
7073 .unindent();
7074 let file_contents = r#"
7075 one
7076 TWO
7077 three
7078 FOUR
7079 five
7080 "#
7081 .unindent();
7082
7083 let fs = FakeFs::new(cx.background_executor.clone());
7084 fs.insert_tree(
7085 "/dir",
7086 json!({
7087 ".git": {},
7088 "file.txt": file_contents.clone()
7089 }),
7090 )
7091 .await;
7092
7093 fs.set_head_for_repo(
7094 "/dir/.git".as_ref(),
7095 &[("file.txt".into(), committed_contents.clone())],
7096 "deadbeef",
7097 );
7098 fs.set_index_for_repo(
7099 "/dir/.git".as_ref(),
7100 &[("file.txt".into(), committed_contents.clone())],
7101 );
7102
7103 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7104
7105 let buffer = project
7106 .update(cx, |project, cx| {
7107 project.open_local_buffer("/dir/file.txt", cx)
7108 })
7109 .await
7110 .unwrap();
7111 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7112 let uncommitted_diff = project
7113 .update(cx, |project, cx| {
7114 project.open_uncommitted_diff(buffer.clone(), cx)
7115 })
7116 .await
7117 .unwrap();
7118
7119 // The hunks are initially unstaged.
7120 uncommitted_diff.read_with(cx, |diff, cx| {
7121 assert_hunks(
7122 diff.hunks(&snapshot, cx),
7123 &snapshot,
7124 &diff.base_text_string().unwrap(),
7125 &[
7126 (
7127 0..0,
7128 "zero\n",
7129 "",
7130 DiffHunkStatus::deleted(HasSecondaryHunk),
7131 ),
7132 (
7133 1..2,
7134 "two\n",
7135 "TWO\n",
7136 DiffHunkStatus::modified(HasSecondaryHunk),
7137 ),
7138 (
7139 3..4,
7140 "four\n",
7141 "FOUR\n",
7142 DiffHunkStatus::modified(HasSecondaryHunk),
7143 ),
7144 ],
7145 );
7146 });
7147
7148 // Pause IO events
7149 fs.pause_events();
7150
7151 // Stage the first hunk.
7152 uncommitted_diff.update(cx, |diff, cx| {
7153 let hunk = diff.hunks(&snapshot, cx).next().unwrap();
7154 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7155 assert_hunks(
7156 diff.hunks(&snapshot, cx),
7157 &snapshot,
7158 &diff.base_text_string().unwrap(),
7159 &[
7160 (
7161 0..0,
7162 "zero\n",
7163 "",
7164 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7165 ),
7166 (
7167 1..2,
7168 "two\n",
7169 "TWO\n",
7170 DiffHunkStatus::modified(HasSecondaryHunk),
7171 ),
7172 (
7173 3..4,
7174 "four\n",
7175 "FOUR\n",
7176 DiffHunkStatus::modified(HasSecondaryHunk),
7177 ),
7178 ],
7179 );
7180 });
7181
7182 // Stage the second hunk *before* receiving the FS event for the first hunk.
7183 cx.run_until_parked();
7184 uncommitted_diff.update(cx, |diff, cx| {
7185 let hunk = diff.hunks(&snapshot, cx).nth(1).unwrap();
7186 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7187 assert_hunks(
7188 diff.hunks(&snapshot, cx),
7189 &snapshot,
7190 &diff.base_text_string().unwrap(),
7191 &[
7192 (
7193 0..0,
7194 "zero\n",
7195 "",
7196 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7197 ),
7198 (
7199 1..2,
7200 "two\n",
7201 "TWO\n",
7202 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7203 ),
7204 (
7205 3..4,
7206 "four\n",
7207 "FOUR\n",
7208 DiffHunkStatus::modified(HasSecondaryHunk),
7209 ),
7210 ],
7211 );
7212 });
7213
7214 // Process the FS event for staging the first hunk (second event is still pending).
7215 fs.flush_events(1);
7216 cx.run_until_parked();
7217
7218 // Stage the third hunk before receiving the second FS event.
7219 uncommitted_diff.update(cx, |diff, cx| {
7220 let hunk = diff.hunks(&snapshot, cx).nth(2).unwrap();
7221 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7222 });
7223
7224 // Wait for all remaining IO.
7225 cx.run_until_parked();
7226 fs.flush_events(fs.buffered_event_count());
7227
7228 // Now all hunks are staged.
7229 cx.run_until_parked();
7230 uncommitted_diff.update(cx, |diff, cx| {
7231 assert_hunks(
7232 diff.hunks(&snapshot, cx),
7233 &snapshot,
7234 &diff.base_text_string().unwrap(),
7235 &[
7236 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7237 (
7238 1..2,
7239 "two\n",
7240 "TWO\n",
7241 DiffHunkStatus::modified(NoSecondaryHunk),
7242 ),
7243 (
7244 3..4,
7245 "four\n",
7246 "FOUR\n",
7247 DiffHunkStatus::modified(NoSecondaryHunk),
7248 ),
7249 ],
7250 );
7251 });
7252}
7253
7254#[gpui::test(iterations = 25)]
7255async fn test_staging_random_hunks(
7256 mut rng: StdRng,
7257 executor: BackgroundExecutor,
7258 cx: &mut gpui::TestAppContext,
7259) {
7260 let operations = env::var("OPERATIONS")
7261 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
7262 .unwrap_or(20);
7263
7264 // Try to induce races between diff recalculation and index writes.
7265 if rng.gen_bool(0.5) {
7266 executor.deprioritize(*CALCULATE_DIFF_TASK);
7267 }
7268
7269 use DiffHunkSecondaryStatus::*;
7270 init_test(cx);
7271
7272 let committed_text = (0..30).map(|i| format!("line {i}\n")).collect::<String>();
7273 let index_text = committed_text.clone();
7274 let buffer_text = (0..30)
7275 .map(|i| match i % 5 {
7276 0 => format!("line {i} (modified)\n"),
7277 _ => format!("line {i}\n"),
7278 })
7279 .collect::<String>();
7280
7281 let fs = FakeFs::new(cx.background_executor.clone());
7282 fs.insert_tree(
7283 path!("/dir"),
7284 json!({
7285 ".git": {},
7286 "file.txt": buffer_text.clone()
7287 }),
7288 )
7289 .await;
7290 fs.set_head_for_repo(
7291 path!("/dir/.git").as_ref(),
7292 &[("file.txt".into(), committed_text.clone())],
7293 "deadbeef",
7294 );
7295 fs.set_index_for_repo(
7296 path!("/dir/.git").as_ref(),
7297 &[("file.txt".into(), index_text.clone())],
7298 );
7299 let repo = fs.open_repo(path!("/dir/.git").as_ref()).unwrap();
7300
7301 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7302 let buffer = project
7303 .update(cx, |project, cx| {
7304 project.open_local_buffer(path!("/dir/file.txt"), cx)
7305 })
7306 .await
7307 .unwrap();
7308 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7309 let uncommitted_diff = project
7310 .update(cx, |project, cx| {
7311 project.open_uncommitted_diff(buffer.clone(), cx)
7312 })
7313 .await
7314 .unwrap();
7315
7316 let mut hunks =
7317 uncommitted_diff.update(cx, |diff, cx| diff.hunks(&snapshot, cx).collect::<Vec<_>>());
7318 assert_eq!(hunks.len(), 6);
7319
7320 for _i in 0..operations {
7321 let hunk_ix = rng.gen_range(0..hunks.len());
7322 let hunk = &mut hunks[hunk_ix];
7323 let row = hunk.range.start.row;
7324
7325 if hunk.status().has_secondary_hunk() {
7326 log::info!("staging hunk at {row}");
7327 uncommitted_diff.update(cx, |diff, cx| {
7328 diff.stage_or_unstage_hunks(true, &[hunk.clone()], &snapshot, true, cx);
7329 });
7330 hunk.secondary_status = SecondaryHunkRemovalPending;
7331 } else {
7332 log::info!("unstaging hunk at {row}");
7333 uncommitted_diff.update(cx, |diff, cx| {
7334 diff.stage_or_unstage_hunks(false, &[hunk.clone()], &snapshot, true, cx);
7335 });
7336 hunk.secondary_status = SecondaryHunkAdditionPending;
7337 }
7338
7339 for _ in 0..rng.gen_range(0..10) {
7340 log::info!("yielding");
7341 cx.executor().simulate_random_delay().await;
7342 }
7343 }
7344
7345 cx.executor().run_until_parked();
7346
7347 for hunk in &mut hunks {
7348 if hunk.secondary_status == SecondaryHunkRemovalPending {
7349 hunk.secondary_status = NoSecondaryHunk;
7350 } else if hunk.secondary_status == SecondaryHunkAdditionPending {
7351 hunk.secondary_status = HasSecondaryHunk;
7352 }
7353 }
7354
7355 log::info!(
7356 "index text:\n{}",
7357 repo.load_index_text("file.txt".into()).await.unwrap()
7358 );
7359
7360 uncommitted_diff.update(cx, |diff, cx| {
7361 let expected_hunks = hunks
7362 .iter()
7363 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
7364 .collect::<Vec<_>>();
7365 let actual_hunks = diff
7366 .hunks(&snapshot, cx)
7367 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
7368 .collect::<Vec<_>>();
7369 assert_eq!(actual_hunks, expected_hunks);
7370 });
7371}
7372
7373#[gpui::test]
7374async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
7375 init_test(cx);
7376
7377 let committed_contents = r#"
7378 fn main() {
7379 println!("hello from HEAD");
7380 }
7381 "#
7382 .unindent();
7383 let file_contents = r#"
7384 fn main() {
7385 println!("hello from the working copy");
7386 }
7387 "#
7388 .unindent();
7389
7390 let fs = FakeFs::new(cx.background_executor.clone());
7391 fs.insert_tree(
7392 "/dir",
7393 json!({
7394 ".git": {},
7395 "src": {
7396 "main.rs": file_contents,
7397 }
7398 }),
7399 )
7400 .await;
7401
7402 fs.set_head_for_repo(
7403 Path::new("/dir/.git"),
7404 &[("src/main.rs".into(), committed_contents.clone())],
7405 "deadbeef",
7406 );
7407 fs.set_index_for_repo(
7408 Path::new("/dir/.git"),
7409 &[("src/main.rs".into(), committed_contents.clone())],
7410 );
7411
7412 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
7413
7414 let buffer = project
7415 .update(cx, |project, cx| {
7416 project.open_local_buffer("/dir/src/main.rs", cx)
7417 })
7418 .await
7419 .unwrap();
7420 let uncommitted_diff = project
7421 .update(cx, |project, cx| {
7422 project.open_uncommitted_diff(buffer.clone(), cx)
7423 })
7424 .await
7425 .unwrap();
7426
7427 cx.run_until_parked();
7428 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
7429 let snapshot = buffer.read(cx).snapshot();
7430 assert_hunks(
7431 uncommitted_diff.hunks(&snapshot, cx),
7432 &snapshot,
7433 &uncommitted_diff.base_text_string().unwrap(),
7434 &[(
7435 1..2,
7436 " println!(\"hello from HEAD\");\n",
7437 " println!(\"hello from the working copy\");\n",
7438 DiffHunkStatus {
7439 kind: DiffHunkStatusKind::Modified,
7440 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
7441 },
7442 )],
7443 );
7444 });
7445}
7446
7447#[gpui::test]
7448async fn test_repository_and_path_for_project_path(
7449 background_executor: BackgroundExecutor,
7450 cx: &mut gpui::TestAppContext,
7451) {
7452 init_test(cx);
7453 let fs = FakeFs::new(background_executor);
7454 fs.insert_tree(
7455 path!("/root"),
7456 json!({
7457 "c.txt": "",
7458 "dir1": {
7459 ".git": {},
7460 "deps": {
7461 "dep1": {
7462 ".git": {},
7463 "src": {
7464 "a.txt": ""
7465 }
7466 }
7467 },
7468 "src": {
7469 "b.txt": ""
7470 }
7471 },
7472 }),
7473 )
7474 .await;
7475
7476 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
7477 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7478 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7479 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
7480 .await;
7481 cx.run_until_parked();
7482
7483 project.read_with(cx, |project, cx| {
7484 let git_store = project.git_store().read(cx);
7485 let pairs = [
7486 ("c.txt", None),
7487 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
7488 (
7489 "dir1/deps/dep1/src/a.txt",
7490 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
7491 ),
7492 ];
7493 let expected = pairs
7494 .iter()
7495 .map(|(path, result)| {
7496 (
7497 path,
7498 result.map(|(repo, repo_path)| {
7499 (Path::new(repo).into(), RepoPath::from(repo_path))
7500 }),
7501 )
7502 })
7503 .collect::<Vec<_>>();
7504 let actual = pairs
7505 .iter()
7506 .map(|(path, _)| {
7507 let project_path = (tree_id, Path::new(path)).into();
7508 let result = maybe!({
7509 let (repo, repo_path) =
7510 git_store.repository_and_path_for_project_path(&project_path, cx)?;
7511 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
7512 });
7513 (path, result)
7514 })
7515 .collect::<Vec<_>>();
7516 pretty_assertions::assert_eq!(expected, actual);
7517 });
7518
7519 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
7520 .await
7521 .unwrap();
7522 cx.run_until_parked();
7523
7524 project.read_with(cx, |project, cx| {
7525 let git_store = project.git_store().read(cx);
7526 assert_eq!(
7527 git_store.repository_and_path_for_project_path(
7528 &(tree_id, Path::new("dir1/src/b.txt")).into(),
7529 cx
7530 ),
7531 None
7532 );
7533 });
7534}
7535
7536#[gpui::test]
7537async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
7538 init_test(cx);
7539 let fs = FakeFs::new(cx.background_executor.clone());
7540 fs.insert_tree(
7541 path!("/root"),
7542 json!({
7543 "home": {
7544 ".git": {},
7545 "project": {
7546 "a.txt": "A"
7547 },
7548 },
7549 }),
7550 )
7551 .await;
7552 fs.set_home_dir(Path::new(path!("/root/home")).to_owned());
7553
7554 let project = Project::test(fs.clone(), [path!("/root/home/project").as_ref()], cx).await;
7555 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7556 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7557 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
7558 .await;
7559 tree.flush_fs_events(cx).await;
7560
7561 project.read_with(cx, |project, cx| {
7562 let containing = project
7563 .git_store()
7564 .read(cx)
7565 .repository_and_path_for_project_path(&(tree_id, "a.txt").into(), cx);
7566 assert!(containing.is_none());
7567 });
7568
7569 let project = Project::test(fs.clone(), [path!("/root/home").as_ref()], cx).await;
7570 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7571 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7572 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
7573 .await;
7574 tree.flush_fs_events(cx).await;
7575
7576 project.read_with(cx, |project, cx| {
7577 let containing = project
7578 .git_store()
7579 .read(cx)
7580 .repository_and_path_for_project_path(&(tree_id, "project/a.txt").into(), cx);
7581 assert_eq!(
7582 containing
7583 .unwrap()
7584 .0
7585 .read(cx)
7586 .work_directory_abs_path
7587 .as_ref(),
7588 Path::new(path!("/root/home"))
7589 );
7590 });
7591}
7592
7593#[gpui::test]
7594async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
7595 init_test(cx);
7596 cx.executor().allow_parking();
7597
7598 let root = TempTree::new(json!({
7599 "project": {
7600 "a.txt": "a", // Modified
7601 "b.txt": "bb", // Added
7602 "c.txt": "ccc", // Unchanged
7603 "d.txt": "dddd", // Deleted
7604 },
7605 }));
7606
7607 // Set up git repository before creating the project.
7608 let work_dir = root.path().join("project");
7609 let repo = git_init(work_dir.as_path());
7610 git_add("a.txt", &repo);
7611 git_add("c.txt", &repo);
7612 git_add("d.txt", &repo);
7613 git_commit("Initial commit", &repo);
7614 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
7615 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
7616
7617 let project = Project::test(
7618 Arc::new(RealFs::new(None, cx.executor())),
7619 [root.path()],
7620 cx,
7621 )
7622 .await;
7623
7624 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7625 tree.flush_fs_events(cx).await;
7626 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7627 .await;
7628 cx.executor().run_until_parked();
7629
7630 let repository = project.read_with(cx, |project, cx| {
7631 project.repositories(cx).values().next().unwrap().clone()
7632 });
7633
7634 // Check that the right git state is observed on startup
7635 repository.read_with(cx, |repository, _| {
7636 let entries = repository.cached_status().collect::<Vec<_>>();
7637 assert_eq!(
7638 entries,
7639 [
7640 StatusEntry {
7641 repo_path: "a.txt".into(),
7642 status: StatusCode::Modified.worktree(),
7643 },
7644 StatusEntry {
7645 repo_path: "b.txt".into(),
7646 status: FileStatus::Untracked,
7647 },
7648 StatusEntry {
7649 repo_path: "d.txt".into(),
7650 status: StatusCode::Deleted.worktree(),
7651 },
7652 ]
7653 );
7654 });
7655
7656 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
7657
7658 tree.flush_fs_events(cx).await;
7659 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7660 .await;
7661 cx.executor().run_until_parked();
7662
7663 repository.read_with(cx, |repository, _| {
7664 let entries = repository.cached_status().collect::<Vec<_>>();
7665 assert_eq!(
7666 entries,
7667 [
7668 StatusEntry {
7669 repo_path: "a.txt".into(),
7670 status: StatusCode::Modified.worktree(),
7671 },
7672 StatusEntry {
7673 repo_path: "b.txt".into(),
7674 status: FileStatus::Untracked,
7675 },
7676 StatusEntry {
7677 repo_path: "c.txt".into(),
7678 status: StatusCode::Modified.worktree(),
7679 },
7680 StatusEntry {
7681 repo_path: "d.txt".into(),
7682 status: StatusCode::Deleted.worktree(),
7683 },
7684 ]
7685 );
7686 });
7687
7688 git_add("a.txt", &repo);
7689 git_add("c.txt", &repo);
7690 git_remove_index(Path::new("d.txt"), &repo);
7691 git_commit("Another commit", &repo);
7692 tree.flush_fs_events(cx).await;
7693 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7694 .await;
7695 cx.executor().run_until_parked();
7696
7697 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
7698 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
7699 tree.flush_fs_events(cx).await;
7700 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7701 .await;
7702 cx.executor().run_until_parked();
7703
7704 repository.read_with(cx, |repository, _cx| {
7705 let entries = repository.cached_status().collect::<Vec<_>>();
7706
7707 // Deleting an untracked entry, b.txt, should leave no status
7708 // a.txt was tracked, and so should have a status
7709 assert_eq!(
7710 entries,
7711 [StatusEntry {
7712 repo_path: "a.txt".into(),
7713 status: StatusCode::Deleted.worktree(),
7714 }]
7715 );
7716 });
7717}
7718
7719#[gpui::test]
7720async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
7721 init_test(cx);
7722 cx.executor().allow_parking();
7723
7724 let root = TempTree::new(json!({
7725 "project": {
7726 "sub": {},
7727 "a.txt": "",
7728 },
7729 }));
7730
7731 let work_dir = root.path().join("project");
7732 let repo = git_init(work_dir.as_path());
7733 // a.txt exists in HEAD and the working copy but is deleted in the index.
7734 git_add("a.txt", &repo);
7735 git_commit("Initial commit", &repo);
7736 git_remove_index("a.txt".as_ref(), &repo);
7737 // `sub` is a nested git repository.
7738 let _sub = git_init(&work_dir.join("sub"));
7739
7740 let project = Project::test(
7741 Arc::new(RealFs::new(None, cx.executor())),
7742 [root.path()],
7743 cx,
7744 )
7745 .await;
7746
7747 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7748 tree.flush_fs_events(cx).await;
7749 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7750 .await;
7751 cx.executor().run_until_parked();
7752
7753 let repository = project.read_with(cx, |project, cx| {
7754 project
7755 .repositories(cx)
7756 .values()
7757 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
7758 .unwrap()
7759 .clone()
7760 });
7761
7762 repository.read_with(cx, |repository, _cx| {
7763 let entries = repository.cached_status().collect::<Vec<_>>();
7764
7765 // `sub` doesn't appear in our computed statuses.
7766 // a.txt appears with a combined `DA` status.
7767 assert_eq!(
7768 entries,
7769 [StatusEntry {
7770 repo_path: "a.txt".into(),
7771 status: TrackedStatus {
7772 index_status: StatusCode::Deleted,
7773 worktree_status: StatusCode::Added
7774 }
7775 .into(),
7776 }]
7777 )
7778 });
7779}
7780
7781#[gpui::test]
7782async fn test_repository_subfolder_git_status(
7783 executor: gpui::BackgroundExecutor,
7784 cx: &mut gpui::TestAppContext,
7785) {
7786 init_test(cx);
7787
7788 let fs = FakeFs::new(executor);
7789 fs.insert_tree(
7790 path!("/root"),
7791 json!({
7792 "my-repo": {
7793 ".git": {},
7794 "a.txt": "a",
7795 "sub-folder-1": {
7796 "sub-folder-2": {
7797 "c.txt": "cc",
7798 "d": {
7799 "e.txt": "eee"
7800 }
7801 },
7802 }
7803 },
7804 }),
7805 )
7806 .await;
7807
7808 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
7809 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
7810
7811 fs.set_status_for_repo(
7812 path!("/root/my-repo/.git").as_ref(),
7813 &[(E_TXT.as_ref(), FileStatus::Untracked)],
7814 );
7815
7816 let project = Project::test(
7817 fs.clone(),
7818 [path!("/root/my-repo/sub-folder-1/sub-folder-2").as_ref()],
7819 cx,
7820 )
7821 .await;
7822
7823 project
7824 .update(cx, |project, cx| project.git_scans_complete(cx))
7825 .await;
7826 cx.run_until_parked();
7827
7828 let repository = project.read_with(cx, |project, cx| {
7829 project.repositories(cx).values().next().unwrap().clone()
7830 });
7831
7832 // Ensure that the git status is loaded correctly
7833 repository.read_with(cx, |repository, _cx| {
7834 assert_eq!(
7835 repository.work_directory_abs_path,
7836 Path::new(path!("/root/my-repo")).into()
7837 );
7838
7839 assert_eq!(repository.status_for_path(&C_TXT.into()), None);
7840 assert_eq!(
7841 repository.status_for_path(&E_TXT.into()).unwrap().status,
7842 FileStatus::Untracked
7843 );
7844 });
7845
7846 fs.set_status_for_repo(path!("/root/my-repo/.git").as_ref(), &[]);
7847 project
7848 .update(cx, |project, cx| project.git_scans_complete(cx))
7849 .await;
7850 cx.run_until_parked();
7851
7852 repository.read_with(cx, |repository, _cx| {
7853 assert_eq!(repository.status_for_path(&C_TXT.into()), None);
7854 assert_eq!(repository.status_for_path(&E_TXT.into()), None);
7855 });
7856}
7857
7858// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
7859#[cfg(any())]
7860#[gpui::test]
7861async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
7862 init_test(cx);
7863 cx.executor().allow_parking();
7864
7865 let root = TempTree::new(json!({
7866 "project": {
7867 "a.txt": "a",
7868 },
7869 }));
7870 let root_path = root.path();
7871
7872 let repo = git_init(&root_path.join("project"));
7873 git_add("a.txt", &repo);
7874 git_commit("init", &repo);
7875
7876 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
7877
7878 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7879 tree.flush_fs_events(cx).await;
7880 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7881 .await;
7882 cx.executor().run_until_parked();
7883
7884 let repository = project.read_with(cx, |project, cx| {
7885 project.repositories(cx).values().next().unwrap().clone()
7886 });
7887
7888 git_branch("other-branch", &repo);
7889 git_checkout("refs/heads/other-branch", &repo);
7890 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
7891 git_add("a.txt", &repo);
7892 git_commit("capitalize", &repo);
7893 let commit = repo
7894 .head()
7895 .expect("Failed to get HEAD")
7896 .peel_to_commit()
7897 .expect("HEAD is not a commit");
7898 git_checkout("refs/heads/main", &repo);
7899 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
7900 git_add("a.txt", &repo);
7901 git_commit("improve letter", &repo);
7902 git_cherry_pick(&commit, &repo);
7903 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
7904 .expect("No CHERRY_PICK_HEAD");
7905 pretty_assertions::assert_eq!(
7906 git_status(&repo),
7907 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
7908 );
7909 tree.flush_fs_events(cx).await;
7910 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7911 .await;
7912 cx.executor().run_until_parked();
7913 let conflicts = repository.update(cx, |repository, _| {
7914 repository
7915 .merge_conflicts
7916 .iter()
7917 .cloned()
7918 .collect::<Vec<_>>()
7919 });
7920 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
7921
7922 git_add("a.txt", &repo);
7923 // Attempt to manually simulate what `git cherry-pick --continue` would do.
7924 git_commit("whatevs", &repo);
7925 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
7926 .expect("Failed to remove CHERRY_PICK_HEAD");
7927 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
7928 tree.flush_fs_events(cx).await;
7929 let conflicts = repository.update(cx, |repository, _| {
7930 repository
7931 .merge_conflicts
7932 .iter()
7933 .cloned()
7934 .collect::<Vec<_>>()
7935 });
7936 pretty_assertions::assert_eq!(conflicts, []);
7937}
7938
7939#[gpui::test]
7940async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
7941 init_test(cx);
7942 let fs = FakeFs::new(cx.background_executor.clone());
7943 fs.insert_tree(
7944 path!("/root"),
7945 json!({
7946 ".git": {},
7947 ".gitignore": "*.txt\n",
7948 "a.xml": "<a></a>",
7949 "b.txt": "Some text"
7950 }),
7951 )
7952 .await;
7953
7954 fs.set_head_and_index_for_repo(
7955 path!("/root/.git").as_ref(),
7956 &[
7957 (".gitignore".into(), "*.txt\n".into()),
7958 ("a.xml".into(), "<a></a>".into()),
7959 ],
7960 );
7961
7962 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
7963
7964 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7965 tree.flush_fs_events(cx).await;
7966 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7967 .await;
7968 cx.executor().run_until_parked();
7969
7970 let repository = project.read_with(cx, |project, cx| {
7971 project.repositories(cx).values().next().unwrap().clone()
7972 });
7973
7974 // One file is unmodified, the other is ignored.
7975 cx.read(|cx| {
7976 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
7977 assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
7978 });
7979
7980 // Change the gitignore, and stage the newly non-ignored file.
7981 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
7982 .await
7983 .unwrap();
7984 fs.set_index_for_repo(
7985 Path::new(path!("/root/.git")),
7986 &[
7987 (".gitignore".into(), "*.txt\n".into()),
7988 ("a.xml".into(), "<a></a>".into()),
7989 ("b.txt".into(), "Some text".into()),
7990 ],
7991 );
7992
7993 cx.executor().run_until_parked();
7994 cx.read(|cx| {
7995 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
7996 assert_entry_git_state(
7997 tree.read(cx),
7998 repository.read(cx),
7999 "b.txt",
8000 Some(StatusCode::Added),
8001 false,
8002 );
8003 });
8004}
8005
8006// NOTE:
8007// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
8008// a directory which some program has already open.
8009// This is a limitation of the Windows.
8010// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
8011#[gpui::test]
8012#[cfg_attr(target_os = "windows", ignore)]
8013async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
8014 init_test(cx);
8015 cx.executor().allow_parking();
8016 let root = TempTree::new(json!({
8017 "projects": {
8018 "project1": {
8019 "a": "",
8020 "b": "",
8021 }
8022 },
8023
8024 }));
8025 let root_path = root.path();
8026
8027 let repo = git_init(&root_path.join("projects/project1"));
8028 git_add("a", &repo);
8029 git_commit("init", &repo);
8030 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
8031
8032 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8033
8034 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8035 tree.flush_fs_events(cx).await;
8036 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
8037 .await;
8038 cx.executor().run_until_parked();
8039
8040 let repository = project.read_with(cx, |project, cx| {
8041 project.repositories(cx).values().next().unwrap().clone()
8042 });
8043
8044 repository.read_with(cx, |repository, _| {
8045 assert_eq!(
8046 repository.work_directory_abs_path.as_ref(),
8047 root_path.join("projects/project1").as_path()
8048 );
8049 assert_eq!(
8050 repository
8051 .status_for_path(&"a".into())
8052 .map(|entry| entry.status),
8053 Some(StatusCode::Modified.worktree()),
8054 );
8055 assert_eq!(
8056 repository
8057 .status_for_path(&"b".into())
8058 .map(|entry| entry.status),
8059 Some(FileStatus::Untracked),
8060 );
8061 });
8062
8063 std::fs::rename(
8064 root_path.join("projects/project1"),
8065 root_path.join("projects/project2"),
8066 )
8067 .unwrap();
8068 tree.flush_fs_events(cx).await;
8069
8070 repository.read_with(cx, |repository, _| {
8071 assert_eq!(
8072 repository.work_directory_abs_path.as_ref(),
8073 root_path.join("projects/project2").as_path()
8074 );
8075 assert_eq!(
8076 repository.status_for_path(&"a".into()).unwrap().status,
8077 StatusCode::Modified.worktree(),
8078 );
8079 assert_eq!(
8080 repository.status_for_path(&"b".into()).unwrap().status,
8081 FileStatus::Untracked,
8082 );
8083 });
8084}
8085
8086// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
8087// you can't rename a directory which some program has already open. This is a
8088// limitation of the Windows. See:
8089// https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
8090#[gpui::test]
8091#[cfg_attr(target_os = "windows", ignore)]
8092async fn test_file_status(cx: &mut gpui::TestAppContext) {
8093 init_test(cx);
8094 cx.executor().allow_parking();
8095 const IGNORE_RULE: &str = "**/target";
8096
8097 let root = TempTree::new(json!({
8098 "project": {
8099 "a.txt": "a",
8100 "b.txt": "bb",
8101 "c": {
8102 "d": {
8103 "e.txt": "eee"
8104 }
8105 },
8106 "f.txt": "ffff",
8107 "target": {
8108 "build_file": "???"
8109 },
8110 ".gitignore": IGNORE_RULE
8111 },
8112
8113 }));
8114 let root_path = root.path();
8115
8116 const A_TXT: &str = "a.txt";
8117 const B_TXT: &str = "b.txt";
8118 const E_TXT: &str = "c/d/e.txt";
8119 const F_TXT: &str = "f.txt";
8120 const DOTGITIGNORE: &str = ".gitignore";
8121 const BUILD_FILE: &str = "target/build_file";
8122
8123 // Set up git repository before creating the worktree.
8124 let work_dir = root.path().join("project");
8125 let mut repo = git_init(work_dir.as_path());
8126 repo.add_ignore_rule(IGNORE_RULE).unwrap();
8127 git_add(A_TXT, &repo);
8128 git_add(E_TXT, &repo);
8129 git_add(DOTGITIGNORE, &repo);
8130 git_commit("Initial commit", &repo);
8131
8132 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8133
8134 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8135 tree.flush_fs_events(cx).await;
8136 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
8137 .await;
8138 cx.executor().run_until_parked();
8139
8140 let repository = project.read_with(cx, |project, cx| {
8141 project.repositories(cx).values().next().unwrap().clone()
8142 });
8143
8144 // Check that the right git state is observed on startup
8145 repository.read_with(cx, |repository, _cx| {
8146 assert_eq!(
8147 repository.work_directory_abs_path.as_ref(),
8148 root_path.join("project").as_path()
8149 );
8150
8151 assert_eq!(
8152 repository.status_for_path(&B_TXT.into()).unwrap().status,
8153 FileStatus::Untracked,
8154 );
8155 assert_eq!(
8156 repository.status_for_path(&F_TXT.into()).unwrap().status,
8157 FileStatus::Untracked,
8158 );
8159 });
8160
8161 // Modify a file in the working copy.
8162 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
8163 tree.flush_fs_events(cx).await;
8164 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
8165 .await;
8166 cx.executor().run_until_parked();
8167
8168 // The worktree detects that the file's git status has changed.
8169 repository.read_with(cx, |repository, _| {
8170 assert_eq!(
8171 repository.status_for_path(&A_TXT.into()).unwrap().status,
8172 StatusCode::Modified.worktree(),
8173 );
8174 });
8175
8176 // Create a commit in the git repository.
8177 git_add(A_TXT, &repo);
8178 git_add(B_TXT, &repo);
8179 git_commit("Committing modified and added", &repo);
8180 tree.flush_fs_events(cx).await;
8181 cx.executor().run_until_parked();
8182
8183 // The worktree detects that the files' git status have changed.
8184 repository.read_with(cx, |repository, _cx| {
8185 assert_eq!(
8186 repository.status_for_path(&F_TXT.into()).unwrap().status,
8187 FileStatus::Untracked,
8188 );
8189 assert_eq!(repository.status_for_path(&B_TXT.into()), None);
8190 assert_eq!(repository.status_for_path(&A_TXT.into()), None);
8191 });
8192
8193 // Modify files in the working copy and perform git operations on other files.
8194 git_reset(0, &repo);
8195 git_remove_index(Path::new(B_TXT), &repo);
8196 git_stash(&mut repo);
8197 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
8198 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
8199 tree.flush_fs_events(cx).await;
8200 cx.executor().run_until_parked();
8201
8202 // Check that more complex repo changes are tracked
8203 repository.read_with(cx, |repository, _cx| {
8204 assert_eq!(repository.status_for_path(&A_TXT.into()), None);
8205 assert_eq!(
8206 repository.status_for_path(&B_TXT.into()).unwrap().status,
8207 FileStatus::Untracked,
8208 );
8209 assert_eq!(
8210 repository.status_for_path(&E_TXT.into()).unwrap().status,
8211 StatusCode::Modified.worktree(),
8212 );
8213 });
8214
8215 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
8216 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
8217 std::fs::write(
8218 work_dir.join(DOTGITIGNORE),
8219 [IGNORE_RULE, "f.txt"].join("\n"),
8220 )
8221 .unwrap();
8222
8223 git_add(Path::new(DOTGITIGNORE), &repo);
8224 git_commit("Committing modified git ignore", &repo);
8225
8226 tree.flush_fs_events(cx).await;
8227 cx.executor().run_until_parked();
8228
8229 let mut renamed_dir_name = "first_directory/second_directory";
8230 const RENAMED_FILE: &str = "rf.txt";
8231
8232 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
8233 std::fs::write(
8234 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
8235 "new-contents",
8236 )
8237 .unwrap();
8238
8239 tree.flush_fs_events(cx).await;
8240 cx.executor().run_until_parked();
8241
8242 repository.read_with(cx, |repository, _cx| {
8243 assert_eq!(
8244 repository
8245 .status_for_path(&Path::new(renamed_dir_name).join(RENAMED_FILE).into())
8246 .unwrap()
8247 .status,
8248 FileStatus::Untracked,
8249 );
8250 });
8251
8252 renamed_dir_name = "new_first_directory/second_directory";
8253
8254 std::fs::rename(
8255 work_dir.join("first_directory"),
8256 work_dir.join("new_first_directory"),
8257 )
8258 .unwrap();
8259
8260 tree.flush_fs_events(cx).await;
8261 cx.executor().run_until_parked();
8262
8263 repository.read_with(cx, |repository, _cx| {
8264 assert_eq!(
8265 repository
8266 .status_for_path(&Path::new(renamed_dir_name).join(RENAMED_FILE).into())
8267 .unwrap()
8268 .status,
8269 FileStatus::Untracked,
8270 );
8271 });
8272}
8273
8274#[gpui::test]
8275async fn test_repos_in_invisible_worktrees(
8276 executor: BackgroundExecutor,
8277 cx: &mut gpui::TestAppContext,
8278) {
8279 init_test(cx);
8280 let fs = FakeFs::new(executor);
8281 fs.insert_tree(
8282 path!("/root"),
8283 json!({
8284 "dir1": {
8285 ".git": {},
8286 "dep1": {
8287 ".git": {},
8288 "src": {
8289 "a.txt": "",
8290 },
8291 },
8292 "b.txt": "",
8293 },
8294 }),
8295 )
8296 .await;
8297
8298 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
8299 let visible_worktree =
8300 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8301 visible_worktree
8302 .read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
8303 .await;
8304
8305 let repos = project.read_with(cx, |project, cx| {
8306 project
8307 .repositories(cx)
8308 .values()
8309 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8310 .collect::<Vec<_>>()
8311 });
8312 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
8313
8314 let (invisible_worktree, _) = project
8315 .update(cx, |project, cx| {
8316 project.worktree_store.update(cx, |worktree_store, cx| {
8317 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
8318 })
8319 })
8320 .await
8321 .expect("failed to create worktree");
8322 invisible_worktree
8323 .read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
8324 .await;
8325
8326 let repos = project.read_with(cx, |project, cx| {
8327 project
8328 .repositories(cx)
8329 .values()
8330 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8331 .collect::<Vec<_>>()
8332 });
8333 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
8334}
8335
8336#[gpui::test(iterations = 10)]
8337async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
8338 init_test(cx);
8339 cx.update(|cx| {
8340 cx.update_global::<SettingsStore, _>(|store, cx| {
8341 store.update_user_settings::<WorktreeSettings>(cx, |project_settings| {
8342 project_settings.file_scan_exclusions = Some(Vec::new());
8343 });
8344 });
8345 });
8346 let fs = FakeFs::new(cx.background_executor.clone());
8347 fs.insert_tree(
8348 path!("/root"),
8349 json!({
8350 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
8351 "tree": {
8352 ".git": {},
8353 ".gitignore": "ignored-dir\n",
8354 "tracked-dir": {
8355 "tracked-file1": "",
8356 "ancestor-ignored-file1": "",
8357 },
8358 "ignored-dir": {
8359 "ignored-file1": ""
8360 }
8361 }
8362 }),
8363 )
8364 .await;
8365 fs.set_head_and_index_for_repo(
8366 path!("/root/tree/.git").as_ref(),
8367 &[
8368 (".gitignore".into(), "ignored-dir\n".into()),
8369 ("tracked-dir/tracked-file1".into(), "".into()),
8370 ],
8371 );
8372
8373 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
8374
8375 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8376 tree.flush_fs_events(cx).await;
8377 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
8378 .await;
8379 cx.executor().run_until_parked();
8380
8381 let repository = project.read_with(cx, |project, cx| {
8382 project.repositories(cx).values().next().unwrap().clone()
8383 });
8384
8385 tree.read_with(cx, |tree, _| {
8386 tree.as_local()
8387 .unwrap()
8388 .manually_refresh_entries_for_paths(vec![Path::new("ignored-dir").into()])
8389 })
8390 .recv()
8391 .await;
8392
8393 cx.read(|cx| {
8394 assert_entry_git_state(
8395 tree.read(cx),
8396 repository.read(cx),
8397 "tracked-dir/tracked-file1",
8398 None,
8399 false,
8400 );
8401 assert_entry_git_state(
8402 tree.read(cx),
8403 repository.read(cx),
8404 "tracked-dir/ancestor-ignored-file1",
8405 None,
8406 false,
8407 );
8408 assert_entry_git_state(
8409 tree.read(cx),
8410 repository.read(cx),
8411 "ignored-dir/ignored-file1",
8412 None,
8413 true,
8414 );
8415 });
8416
8417 fs.create_file(
8418 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
8419 Default::default(),
8420 )
8421 .await
8422 .unwrap();
8423 fs.set_index_for_repo(
8424 path!("/root/tree/.git").as_ref(),
8425 &[
8426 (".gitignore".into(), "ignored-dir\n".into()),
8427 ("tracked-dir/tracked-file1".into(), "".into()),
8428 ("tracked-dir/tracked-file2".into(), "".into()),
8429 ],
8430 );
8431 fs.create_file(
8432 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
8433 Default::default(),
8434 )
8435 .await
8436 .unwrap();
8437 fs.create_file(
8438 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
8439 Default::default(),
8440 )
8441 .await
8442 .unwrap();
8443
8444 cx.executor().run_until_parked();
8445 cx.read(|cx| {
8446 assert_entry_git_state(
8447 tree.read(cx),
8448 repository.read(cx),
8449 "tracked-dir/tracked-file2",
8450 Some(StatusCode::Added),
8451 false,
8452 );
8453 assert_entry_git_state(
8454 tree.read(cx),
8455 repository.read(cx),
8456 "tracked-dir/ancestor-ignored-file2",
8457 None,
8458 false,
8459 );
8460 assert_entry_git_state(
8461 tree.read(cx),
8462 repository.read(cx),
8463 "ignored-dir/ignored-file2",
8464 None,
8465 true,
8466 );
8467 assert!(tree.read(cx).entry_for_path(".git").unwrap().is_ignored);
8468 });
8469}
8470
8471#[gpui::test]
8472async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) {
8473 init_test(cx);
8474
8475 let fs = FakeFs::new(cx.executor());
8476 fs.insert_tree(
8477 path!("/project"),
8478 json!({
8479 ".git": {
8480 "worktrees": {
8481 "some-worktree": {
8482 "commondir": "../..\n",
8483 // For is_git_dir
8484 "HEAD": "",
8485 "config": ""
8486 }
8487 },
8488 "modules": {
8489 "subdir": {
8490 "some-submodule": {
8491 // For is_git_dir
8492 "HEAD": "",
8493 "config": "",
8494 }
8495 }
8496 }
8497 },
8498 "src": {
8499 "a.txt": "A",
8500 },
8501 "some-worktree": {
8502 ".git": "gitdir: ../.git/worktrees/some-worktree\n",
8503 "src": {
8504 "b.txt": "B",
8505 }
8506 },
8507 "subdir": {
8508 "some-submodule": {
8509 ".git": "gitdir: ../../.git/modules/subdir/some-submodule\n",
8510 "c.txt": "C",
8511 }
8512 }
8513 }),
8514 )
8515 .await;
8516
8517 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
8518 let scan_complete = project.update(cx, |project, cx| {
8519 project
8520 .worktrees(cx)
8521 .next()
8522 .unwrap()
8523 .read(cx)
8524 .as_local()
8525 .unwrap()
8526 .scan_complete()
8527 });
8528 scan_complete.await;
8529
8530 let mut repositories = project.update(cx, |project, cx| {
8531 project
8532 .repositories(cx)
8533 .values()
8534 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8535 .collect::<Vec<_>>()
8536 });
8537 repositories.sort();
8538 pretty_assertions::assert_eq!(
8539 repositories,
8540 [
8541 Path::new(path!("/project")).into(),
8542 Path::new(path!("/project/some-worktree")).into(),
8543 Path::new(path!("/project/subdir/some-submodule")).into(),
8544 ]
8545 );
8546
8547 // Generate a git-related event for the worktree and check that it's refreshed.
8548 fs.with_git_state(
8549 path!("/project/some-worktree/.git").as_ref(),
8550 true,
8551 |state| {
8552 state
8553 .head_contents
8554 .insert("src/b.txt".into(), "b".to_owned());
8555 state
8556 .index_contents
8557 .insert("src/b.txt".into(), "b".to_owned());
8558 },
8559 )
8560 .unwrap();
8561 cx.run_until_parked();
8562
8563 let buffer = project
8564 .update(cx, |project, cx| {
8565 project.open_local_buffer(path!("/project/some-worktree/src/b.txt"), cx)
8566 })
8567 .await
8568 .unwrap();
8569 let (worktree_repo, barrier) = project.update(cx, |project, cx| {
8570 let (repo, _) = project
8571 .git_store()
8572 .read(cx)
8573 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
8574 .unwrap();
8575 pretty_assertions::assert_eq!(
8576 repo.read(cx).work_directory_abs_path,
8577 Path::new(path!("/project/some-worktree")).into(),
8578 );
8579 let barrier = repo.update(cx, |repo, _| repo.barrier());
8580 (repo.clone(), barrier)
8581 });
8582 barrier.await.unwrap();
8583 worktree_repo.update(cx, |repo, _| {
8584 pretty_assertions::assert_eq!(
8585 repo.status_for_path(&"src/b.txt".into()).unwrap().status,
8586 StatusCode::Modified.worktree(),
8587 );
8588 });
8589
8590 // The same for the submodule.
8591 fs.with_git_state(
8592 path!("/project/subdir/some-submodule/.git").as_ref(),
8593 true,
8594 |state| {
8595 state.head_contents.insert("c.txt".into(), "c".to_owned());
8596 state.index_contents.insert("c.txt".into(), "c".to_owned());
8597 },
8598 )
8599 .unwrap();
8600 cx.run_until_parked();
8601
8602 let buffer = project
8603 .update(cx, |project, cx| {
8604 project.open_local_buffer(path!("/project/subdir/some-submodule/c.txt"), cx)
8605 })
8606 .await
8607 .unwrap();
8608 let (submodule_repo, barrier) = project.update(cx, |project, cx| {
8609 let (repo, _) = project
8610 .git_store()
8611 .read(cx)
8612 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
8613 .unwrap();
8614 pretty_assertions::assert_eq!(
8615 repo.read(cx).work_directory_abs_path,
8616 Path::new(path!("/project/subdir/some-submodule")).into(),
8617 );
8618 let barrier = repo.update(cx, |repo, _| repo.barrier());
8619 (repo.clone(), barrier)
8620 });
8621 barrier.await.unwrap();
8622 submodule_repo.update(cx, |repo, _| {
8623 pretty_assertions::assert_eq!(
8624 repo.status_for_path(&"c.txt".into()).unwrap().status,
8625 StatusCode::Modified.worktree(),
8626 );
8627 });
8628}
8629
8630#[gpui::test]
8631async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
8632 init_test(cx);
8633 let fs = FakeFs::new(cx.background_executor.clone());
8634 fs.insert_tree(
8635 path!("/root"),
8636 json!({
8637 "project": {
8638 ".git": {},
8639 "child1": {
8640 "a.txt": "A",
8641 },
8642 "child2": {
8643 "b.txt": "B",
8644 }
8645 }
8646 }),
8647 )
8648 .await;
8649
8650 let project = Project::test(
8651 fs.clone(),
8652 [
8653 path!("/root/project/child1").as_ref(),
8654 path!("/root/project/child2").as_ref(),
8655 ],
8656 cx,
8657 )
8658 .await;
8659
8660 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8661 tree.flush_fs_events(cx).await;
8662 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
8663 .await;
8664 cx.executor().run_until_parked();
8665
8666 let repos = project.read_with(cx, |project, cx| {
8667 project
8668 .repositories(cx)
8669 .values()
8670 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8671 .collect::<Vec<_>>()
8672 });
8673 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
8674}
8675
8676async fn search(
8677 project: &Entity<Project>,
8678 query: SearchQuery,
8679 cx: &mut gpui::TestAppContext,
8680) -> Result<HashMap<String, Vec<Range<usize>>>> {
8681 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
8682 let mut results = HashMap::default();
8683 while let Ok(search_result) = search_rx.recv().await {
8684 match search_result {
8685 SearchResult::Buffer { buffer, ranges } => {
8686 results.entry(buffer).or_insert(ranges);
8687 }
8688 SearchResult::LimitReached => {}
8689 }
8690 }
8691 Ok(results
8692 .into_iter()
8693 .map(|(buffer, ranges)| {
8694 buffer.update(cx, |buffer, cx| {
8695 let path = buffer
8696 .file()
8697 .unwrap()
8698 .full_path(cx)
8699 .to_string_lossy()
8700 .to_string();
8701 let ranges = ranges
8702 .into_iter()
8703 .map(|range| range.to_offset(buffer))
8704 .collect::<Vec<_>>();
8705 (path, ranges)
8706 })
8707 })
8708 .collect())
8709}
8710
8711pub fn init_test(cx: &mut gpui::TestAppContext) {
8712 zlog::init_test();
8713
8714 cx.update(|cx| {
8715 let settings_store = SettingsStore::test(cx);
8716 cx.set_global(settings_store);
8717 release_channel::init(SemanticVersion::default(), cx);
8718 language::init(cx);
8719 Project::init_settings(cx);
8720 });
8721}
8722
8723fn json_lang() -> Arc<Language> {
8724 Arc::new(Language::new(
8725 LanguageConfig {
8726 name: "JSON".into(),
8727 matcher: LanguageMatcher {
8728 path_suffixes: vec!["json".to_string()],
8729 ..Default::default()
8730 },
8731 ..Default::default()
8732 },
8733 None,
8734 ))
8735}
8736
8737fn js_lang() -> Arc<Language> {
8738 Arc::new(Language::new(
8739 LanguageConfig {
8740 name: "JavaScript".into(),
8741 matcher: LanguageMatcher {
8742 path_suffixes: vec!["js".to_string()],
8743 ..Default::default()
8744 },
8745 ..Default::default()
8746 },
8747 None,
8748 ))
8749}
8750
8751fn rust_lang() -> Arc<Language> {
8752 Arc::new(Language::new(
8753 LanguageConfig {
8754 name: "Rust".into(),
8755 matcher: LanguageMatcher {
8756 path_suffixes: vec!["rs".to_string()],
8757 ..Default::default()
8758 },
8759 ..Default::default()
8760 },
8761 Some(tree_sitter_rust::LANGUAGE.into()),
8762 ))
8763}
8764
8765fn typescript_lang() -> Arc<Language> {
8766 Arc::new(Language::new(
8767 LanguageConfig {
8768 name: "TypeScript".into(),
8769 matcher: LanguageMatcher {
8770 path_suffixes: vec!["ts".to_string()],
8771 ..Default::default()
8772 },
8773 ..Default::default()
8774 },
8775 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
8776 ))
8777}
8778
8779fn tsx_lang() -> Arc<Language> {
8780 Arc::new(Language::new(
8781 LanguageConfig {
8782 name: "tsx".into(),
8783 matcher: LanguageMatcher {
8784 path_suffixes: vec!["tsx".to_string()],
8785 ..Default::default()
8786 },
8787 ..Default::default()
8788 },
8789 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
8790 ))
8791}
8792
8793fn get_all_tasks(
8794 project: &Entity<Project>,
8795 task_contexts: &TaskContexts,
8796 cx: &mut App,
8797) -> Vec<(TaskSourceKind, ResolvedTask)> {
8798 let (mut old, new) = project.update(cx, |project, cx| {
8799 project
8800 .task_store
8801 .read(cx)
8802 .task_inventory()
8803 .unwrap()
8804 .read(cx)
8805 .used_and_current_resolved_tasks(task_contexts, cx)
8806 });
8807 old.extend(new);
8808 old
8809}
8810
8811#[track_caller]
8812fn assert_entry_git_state(
8813 tree: &Worktree,
8814 repository: &Repository,
8815 path: &str,
8816 index_status: Option<StatusCode>,
8817 is_ignored: bool,
8818) {
8819 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
8820 let entry = tree
8821 .entry_for_path(path)
8822 .unwrap_or_else(|| panic!("entry {path} not found"));
8823 let status = repository
8824 .status_for_path(&path.into())
8825 .map(|entry| entry.status);
8826 let expected = index_status.map(|index_status| {
8827 TrackedStatus {
8828 index_status,
8829 worktree_status: StatusCode::Unmodified,
8830 }
8831 .into()
8832 });
8833 assert_eq!(
8834 status, expected,
8835 "expected {path} to have git status: {expected:?}"
8836 );
8837 assert_eq!(
8838 entry.is_ignored, is_ignored,
8839 "expected {path} to have is_ignored: {is_ignored}"
8840 );
8841}
8842
8843#[track_caller]
8844fn git_init(path: &Path) -> git2::Repository {
8845 let mut init_opts = RepositoryInitOptions::new();
8846 init_opts.initial_head("main");
8847 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
8848}
8849
8850#[track_caller]
8851fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
8852 let path = path.as_ref();
8853 let mut index = repo.index().expect("Failed to get index");
8854 index.add_path(path).expect("Failed to add file");
8855 index.write().expect("Failed to write index");
8856}
8857
8858#[track_caller]
8859fn git_remove_index(path: &Path, repo: &git2::Repository) {
8860 let mut index = repo.index().expect("Failed to get index");
8861 index.remove_path(path).expect("Failed to add file");
8862 index.write().expect("Failed to write index");
8863}
8864
8865#[track_caller]
8866fn git_commit(msg: &'static str, repo: &git2::Repository) {
8867 use git2::Signature;
8868
8869 let signature = Signature::now("test", "test@zed.dev").unwrap();
8870 let oid = repo.index().unwrap().write_tree().unwrap();
8871 let tree = repo.find_tree(oid).unwrap();
8872 if let Ok(head) = repo.head() {
8873 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
8874
8875 let parent_commit = parent_obj.as_commit().unwrap();
8876
8877 repo.commit(
8878 Some("HEAD"),
8879 &signature,
8880 &signature,
8881 msg,
8882 &tree,
8883 &[parent_commit],
8884 )
8885 .expect("Failed to commit with parent");
8886 } else {
8887 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
8888 .expect("Failed to commit");
8889 }
8890}
8891
8892#[cfg(any())]
8893#[track_caller]
8894fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
8895 repo.cherrypick(commit, None).expect("Failed to cherrypick");
8896}
8897
8898#[track_caller]
8899fn git_stash(repo: &mut git2::Repository) {
8900 use git2::Signature;
8901
8902 let signature = Signature::now("test", "test@zed.dev").unwrap();
8903 repo.stash_save(&signature, "N/A", None)
8904 .expect("Failed to stash");
8905}
8906
8907#[track_caller]
8908fn git_reset(offset: usize, repo: &git2::Repository) {
8909 let head = repo.head().expect("Couldn't get repo head");
8910 let object = head.peel(git2::ObjectType::Commit).unwrap();
8911 let commit = object.as_commit().unwrap();
8912 let new_head = commit
8913 .parents()
8914 .inspect(|parnet| {
8915 parnet.message();
8916 })
8917 .nth(offset)
8918 .expect("Not enough history");
8919 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
8920 .expect("Could not reset");
8921}
8922
8923#[cfg(any())]
8924#[track_caller]
8925fn git_branch(name: &str, repo: &git2::Repository) {
8926 let head = repo
8927 .head()
8928 .expect("Couldn't get repo head")
8929 .peel_to_commit()
8930 .expect("HEAD is not a commit");
8931 repo.branch(name, &head, false).expect("Failed to commit");
8932}
8933
8934#[cfg(any())]
8935#[track_caller]
8936fn git_checkout(name: &str, repo: &git2::Repository) {
8937 repo.set_head(name).expect("Failed to set head");
8938 repo.checkout_head(None).expect("Failed to check out head");
8939}
8940
8941#[cfg(any())]
8942#[track_caller]
8943fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
8944 repo.statuses(None)
8945 .unwrap()
8946 .iter()
8947 .map(|status| (status.path().unwrap().to_string(), status.status()))
8948 .collect()
8949}
8950
8951#[gpui::test]
8952async fn test_find_project_path_abs(
8953 background_executor: BackgroundExecutor,
8954 cx: &mut gpui::TestAppContext,
8955) {
8956 // find_project_path should work with absolute paths
8957 init_test(cx);
8958
8959 let fs = FakeFs::new(background_executor);
8960 fs.insert_tree(
8961 path!("/root"),
8962 json!({
8963 "project1": {
8964 "file1.txt": "content1",
8965 "subdir": {
8966 "file2.txt": "content2"
8967 }
8968 },
8969 "project2": {
8970 "file3.txt": "content3"
8971 }
8972 }),
8973 )
8974 .await;
8975
8976 let project = Project::test(
8977 fs.clone(),
8978 [
8979 path!("/root/project1").as_ref(),
8980 path!("/root/project2").as_ref(),
8981 ],
8982 cx,
8983 )
8984 .await;
8985
8986 // Make sure the worktrees are fully initialized
8987 for worktree in project.read_with(cx, |project, cx| project.worktrees(cx).collect::<Vec<_>>()) {
8988 worktree
8989 .read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
8990 .await;
8991 }
8992 cx.run_until_parked();
8993
8994 let (project1_abs_path, project1_id, project2_abs_path, project2_id) =
8995 project.read_with(cx, |project, cx| {
8996 let worktrees: Vec<_> = project.worktrees(cx).collect();
8997 let abs_path1 = worktrees[0].read(cx).abs_path().to_path_buf();
8998 let id1 = worktrees[0].read(cx).id();
8999 let abs_path2 = worktrees[1].read(cx).abs_path().to_path_buf();
9000 let id2 = worktrees[1].read(cx).id();
9001 (abs_path1, id1, abs_path2, id2)
9002 });
9003
9004 project.update(cx, |project, cx| {
9005 let abs_path = project1_abs_path.join("file1.txt");
9006 let found_path = project.find_project_path(abs_path, cx).unwrap();
9007 assert_eq!(found_path.worktree_id, project1_id);
9008 assert_eq!(found_path.path.as_ref(), Path::new("file1.txt"));
9009
9010 let abs_path = project1_abs_path.join("subdir").join("file2.txt");
9011 let found_path = project.find_project_path(abs_path, cx).unwrap();
9012 assert_eq!(found_path.worktree_id, project1_id);
9013 assert_eq!(found_path.path.as_ref(), Path::new("subdir/file2.txt"));
9014
9015 let abs_path = project2_abs_path.join("file3.txt");
9016 let found_path = project.find_project_path(abs_path, cx).unwrap();
9017 assert_eq!(found_path.worktree_id, project2_id);
9018 assert_eq!(found_path.path.as_ref(), Path::new("file3.txt"));
9019
9020 let abs_path = project1_abs_path.join("nonexistent.txt");
9021 let found_path = project.find_project_path(abs_path, cx);
9022 assert!(
9023 found_path.is_some(),
9024 "Should find project path for nonexistent file in worktree"
9025 );
9026
9027 // Test with an absolute path outside any worktree
9028 let abs_path = Path::new("/some/other/path");
9029 let found_path = project.find_project_path(abs_path, cx);
9030 assert!(
9031 found_path.is_none(),
9032 "Should not find project path for path outside any worktree"
9033 );
9034 });
9035}