1#![allow(clippy::format_collect)]
2
3use crate::{
4 Event, git_store::StatusEntry, task_inventory::TaskContexts, task_store::TaskSettingsLocation,
5 *,
6};
7use buffer_diff::{
8 BufferDiffEvent, CALCULATE_DIFF_TASK, DiffHunkSecondaryStatus, DiffHunkStatus,
9 DiffHunkStatusKind, assert_hunks,
10};
11use fs::FakeFs;
12use futures::{StreamExt, future};
13use git::{
14 GitHostingProviderRegistry,
15 repository::RepoPath,
16 status::{StatusCode, TrackedStatus},
17};
18use git2::RepositoryInitOptions;
19use gpui::{App, BackgroundExecutor, SemanticVersion, UpdateGlobal};
20use http_client::Url;
21use language::{
22 Diagnostic, DiagnosticEntry, DiagnosticSet, DiskState, FakeLspAdapter, LanguageConfig,
23 LanguageMatcher, LanguageName, LineEnding, OffsetRangeExt, Point, ToPoint,
24 language_settings::{AllLanguageSettings, LanguageSettingsContent, language_settings},
25 tree_sitter_rust, tree_sitter_typescript,
26};
27use lsp::{
28 DiagnosticSeverity, DocumentChanges, FileOperationFilter, NumberOrString, TextDocumentEdit,
29 WillRenameFiles, notification::DidRenameFiles,
30};
31use parking_lot::Mutex;
32use paths::{config_dir, tasks_file};
33use postage::stream::Stream as _;
34use pretty_assertions::{assert_eq, assert_matches};
35use rand::{Rng as _, rngs::StdRng};
36use serde_json::json;
37#[cfg(not(windows))]
38use std::os;
39use std::{env, mem, num::NonZeroU32, ops::Range, str::FromStr, sync::OnceLock, task::Poll};
40use task::{ResolvedTask, TaskContext};
41use unindent::Unindent as _;
42use util::{
43 TryFutureExt as _, assert_set_eq, maybe, path,
44 paths::PathMatcher,
45 test::{TempTree, marked_text_offsets},
46 uri,
47};
48use worktree::WorktreeModelHandle as _;
49
50#[gpui::test]
51async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
52 cx.executor().allow_parking();
53
54 let (tx, mut rx) = futures::channel::mpsc::unbounded();
55 let _thread = std::thread::spawn(move || {
56 #[cfg(not(target_os = "windows"))]
57 std::fs::metadata("/tmp").unwrap();
58 #[cfg(target_os = "windows")]
59 std::fs::metadata("C:/Windows").unwrap();
60 std::thread::sleep(Duration::from_millis(1000));
61 tx.unbounded_send(1).unwrap();
62 });
63 rx.next().await.unwrap();
64}
65
66#[gpui::test]
67async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
68 cx.executor().allow_parking();
69
70 let io_task = smol::unblock(move || {
71 println!("sleeping on thread {:?}", std::thread::current().id());
72 std::thread::sleep(Duration::from_millis(10));
73 1
74 });
75
76 let task = cx.foreground_executor().spawn(async move {
77 io_task.await;
78 });
79
80 task.await;
81}
82
83#[cfg(not(windows))]
84#[gpui::test]
85async fn test_symlinks(cx: &mut gpui::TestAppContext) {
86 init_test(cx);
87 cx.executor().allow_parking();
88
89 let dir = TempTree::new(json!({
90 "root": {
91 "apple": "",
92 "banana": {
93 "carrot": {
94 "date": "",
95 "endive": "",
96 }
97 },
98 "fennel": {
99 "grape": "",
100 }
101 }
102 }));
103
104 let root_link_path = dir.path().join("root_link");
105 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
106 os::unix::fs::symlink(
107 dir.path().join("root/fennel"),
108 dir.path().join("root/finnochio"),
109 )
110 .unwrap();
111
112 let project = Project::test(
113 Arc::new(RealFs::new(None, cx.executor())),
114 [root_link_path.as_ref()],
115 cx,
116 )
117 .await;
118
119 project.update(cx, |project, cx| {
120 let tree = project.worktrees(cx).next().unwrap().read(cx);
121 assert_eq!(tree.file_count(), 5);
122 assert_eq!(
123 tree.inode_for_path("fennel/grape"),
124 tree.inode_for_path("finnochio/grape")
125 );
126 });
127}
128
129#[gpui::test]
130async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
131 init_test(cx);
132
133 let dir = TempTree::new(json!({
134 ".editorconfig": r#"
135 root = true
136 [*.rs]
137 indent_style = tab
138 indent_size = 3
139 end_of_line = lf
140 insert_final_newline = true
141 trim_trailing_whitespace = true
142 [*.js]
143 tab_width = 10
144 "#,
145 ".zed": {
146 "settings.json": r#"{
147 "tab_size": 8,
148 "hard_tabs": false,
149 "ensure_final_newline_on_save": false,
150 "remove_trailing_whitespace_on_save": false,
151 "soft_wrap": "editor_width"
152 }"#,
153 },
154 "a.rs": "fn a() {\n A\n}",
155 "b": {
156 ".editorconfig": r#"
157 [*.rs]
158 indent_size = 2
159 "#,
160 "b.rs": "fn b() {\n B\n}",
161 },
162 "c.js": "def c\n C\nend",
163 "README.json": "tabs are better\n",
164 }));
165
166 let path = dir.path();
167 let fs = FakeFs::new(cx.executor());
168 fs.insert_tree_from_real_fs(path, path).await;
169 let project = Project::test(fs, [path], cx).await;
170
171 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
172 language_registry.add(js_lang());
173 language_registry.add(json_lang());
174 language_registry.add(rust_lang());
175
176 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
177
178 cx.executor().run_until_parked();
179
180 cx.update(|cx| {
181 let tree = worktree.read(cx);
182 let settings_for = |path: &str| {
183 let file_entry = tree.entry_for_path(path).unwrap().clone();
184 let file = File::for_entry(file_entry, worktree.clone());
185 let file_language = project
186 .read(cx)
187 .languages()
188 .language_for_file_path(file.path.as_ref());
189 let file_language = cx
190 .background_executor()
191 .block(file_language)
192 .expect("Failed to get file language");
193 let file = file as _;
194 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
195 };
196
197 let settings_a = settings_for("a.rs");
198 let settings_b = settings_for("b/b.rs");
199 let settings_c = settings_for("c.js");
200 let settings_readme = settings_for("README.json");
201
202 // .editorconfig overrides .zed/settings
203 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
204 assert_eq!(settings_a.hard_tabs, true);
205 assert_eq!(settings_a.ensure_final_newline_on_save, true);
206 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
207
208 // .editorconfig in b/ overrides .editorconfig in root
209 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
210
211 // "indent_size" is not set, so "tab_width" is used
212 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
213
214 // README.md should not be affected by .editorconfig's globe "*.rs"
215 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
216 });
217}
218
219#[gpui::test]
220async fn test_git_provider_project_setting(cx: &mut gpui::TestAppContext) {
221 init_test(cx);
222 cx.update(|cx| {
223 GitHostingProviderRegistry::default_global(cx);
224 git_hosting_providers::init(cx);
225 });
226
227 let fs = FakeFs::new(cx.executor());
228 let str_path = path!("/dir");
229 let path = Path::new(str_path);
230
231 fs.insert_tree(
232 path!("/dir"),
233 json!({
234 ".zed": {
235 "settings.json": r#"{
236 "git_hosting_providers": [
237 {
238 "provider": "gitlab",
239 "base_url": "https://google.com",
240 "name": "foo"
241 }
242 ]
243 }"#
244 },
245 }),
246 )
247 .await;
248
249 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
250 let (_worktree, _) =
251 project.read_with(cx, |project, cx| project.find_worktree(path, cx).unwrap());
252 cx.executor().run_until_parked();
253
254 cx.update(|cx| {
255 let provider = GitHostingProviderRegistry::global(cx);
256 assert!(
257 provider
258 .list_hosting_providers()
259 .into_iter()
260 .any(|provider| provider.name() == "foo")
261 );
262 });
263
264 fs.atomic_write(
265 Path::new(path!("/dir/.zed/settings.json")).to_owned(),
266 "{}".into(),
267 )
268 .await
269 .unwrap();
270
271 cx.run_until_parked();
272
273 cx.update(|cx| {
274 let provider = GitHostingProviderRegistry::global(cx);
275 assert!(
276 !provider
277 .list_hosting_providers()
278 .into_iter()
279 .any(|provider| provider.name() == "foo")
280 );
281 });
282}
283
284#[gpui::test]
285async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
286 init_test(cx);
287 TaskStore::init(None);
288
289 let fs = FakeFs::new(cx.executor());
290 fs.insert_tree(
291 path!("/dir"),
292 json!({
293 ".zed": {
294 "settings.json": r#"{ "tab_size": 8 }"#,
295 "tasks.json": r#"[{
296 "label": "cargo check all",
297 "command": "cargo",
298 "args": ["check", "--all"]
299 },]"#,
300 },
301 "a": {
302 "a.rs": "fn a() {\n A\n}"
303 },
304 "b": {
305 ".zed": {
306 "settings.json": r#"{ "tab_size": 2 }"#,
307 "tasks.json": r#"[{
308 "label": "cargo check",
309 "command": "cargo",
310 "args": ["check"]
311 },]"#,
312 },
313 "b.rs": "fn b() {\n B\n}"
314 }
315 }),
316 )
317 .await;
318
319 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
320 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
321
322 cx.executor().run_until_parked();
323 let worktree_id = cx.update(|cx| {
324 project.update(cx, |project, cx| {
325 project.worktrees(cx).next().unwrap().read(cx).id()
326 })
327 });
328
329 let mut task_contexts = TaskContexts::default();
330 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
331 let task_contexts = Arc::new(task_contexts);
332
333 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
334 id: worktree_id,
335 directory_in_worktree: PathBuf::from(".zed"),
336 id_base: "local worktree tasks from directory \".zed\"".into(),
337 };
338
339 let all_tasks = cx
340 .update(|cx| {
341 let tree = worktree.read(cx);
342
343 let file_a = File::for_entry(
344 tree.entry_for_path("a/a.rs").unwrap().clone(),
345 worktree.clone(),
346 ) as _;
347 let settings_a = language_settings(None, Some(&file_a), cx);
348 let file_b = File::for_entry(
349 tree.entry_for_path("b/b.rs").unwrap().clone(),
350 worktree.clone(),
351 ) as _;
352 let settings_b = language_settings(None, Some(&file_b), cx);
353
354 assert_eq!(settings_a.tab_size.get(), 8);
355 assert_eq!(settings_b.tab_size.get(), 2);
356
357 get_all_tasks(&project, task_contexts.clone(), cx)
358 })
359 .await
360 .into_iter()
361 .map(|(source_kind, task)| {
362 let resolved = task.resolved;
363 (
364 source_kind,
365 task.resolved_label,
366 resolved.args,
367 resolved.env,
368 )
369 })
370 .collect::<Vec<_>>();
371 assert_eq!(
372 all_tasks,
373 vec![
374 (
375 TaskSourceKind::Worktree {
376 id: worktree_id,
377 directory_in_worktree: PathBuf::from(path!("b/.zed")),
378 id_base: if cfg!(windows) {
379 "local worktree tasks from directory \"b\\\\.zed\"".into()
380 } else {
381 "local worktree tasks from directory \"b/.zed\"".into()
382 },
383 },
384 "cargo check".to_string(),
385 vec!["check".to_string()],
386 HashMap::default(),
387 ),
388 (
389 topmost_local_task_source_kind.clone(),
390 "cargo check all".to_string(),
391 vec!["check".to_string(), "--all".to_string()],
392 HashMap::default(),
393 ),
394 ]
395 );
396
397 let (_, resolved_task) = cx
398 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
399 .await
400 .into_iter()
401 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
402 .expect("should have one global task");
403 project.update(cx, |project, cx| {
404 let task_inventory = project
405 .task_store
406 .read(cx)
407 .task_inventory()
408 .cloned()
409 .unwrap();
410 task_inventory.update(cx, |inventory, _| {
411 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
412 inventory
413 .update_file_based_tasks(
414 TaskSettingsLocation::Global(tasks_file()),
415 Some(
416 &json!([{
417 "label": "cargo check unstable",
418 "command": "cargo",
419 "args": [
420 "check",
421 "--all",
422 "--all-targets"
423 ],
424 "env": {
425 "RUSTFLAGS": "-Zunstable-options"
426 }
427 }])
428 .to_string(),
429 ),
430 )
431 .unwrap();
432 });
433 });
434 cx.run_until_parked();
435
436 let all_tasks = cx
437 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
438 .await
439 .into_iter()
440 .map(|(source_kind, task)| {
441 let resolved = task.resolved;
442 (
443 source_kind,
444 task.resolved_label,
445 resolved.args,
446 resolved.env,
447 )
448 })
449 .collect::<Vec<_>>();
450 assert_eq!(
451 all_tasks,
452 vec![
453 (
454 topmost_local_task_source_kind.clone(),
455 "cargo check all".to_string(),
456 vec!["check".to_string(), "--all".to_string()],
457 HashMap::default(),
458 ),
459 (
460 TaskSourceKind::Worktree {
461 id: worktree_id,
462 directory_in_worktree: PathBuf::from(path!("b/.zed")),
463 id_base: if cfg!(windows) {
464 "local worktree tasks from directory \"b\\\\.zed\"".into()
465 } else {
466 "local worktree tasks from directory \"b/.zed\"".into()
467 },
468 },
469 "cargo check".to_string(),
470 vec!["check".to_string()],
471 HashMap::default(),
472 ),
473 (
474 TaskSourceKind::AbsPath {
475 abs_path: paths::tasks_file().clone(),
476 id_base: "global tasks.json".into(),
477 },
478 "cargo check unstable".to_string(),
479 vec![
480 "check".to_string(),
481 "--all".to_string(),
482 "--all-targets".to_string(),
483 ],
484 HashMap::from_iter(Some((
485 "RUSTFLAGS".to_string(),
486 "-Zunstable-options".to_string()
487 ))),
488 ),
489 ]
490 );
491}
492
493#[gpui::test]
494async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
495 init_test(cx);
496 TaskStore::init(None);
497
498 let fs = FakeFs::new(cx.executor());
499 fs.insert_tree(
500 path!("/dir"),
501 json!({
502 ".zed": {
503 "tasks.json": r#"[{
504 "label": "test worktree root",
505 "command": "echo $ZED_WORKTREE_ROOT"
506 }]"#,
507 },
508 "a": {
509 "a.rs": "fn a() {\n A\n}"
510 },
511 }),
512 )
513 .await;
514
515 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
516 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
517
518 cx.executor().run_until_parked();
519 let worktree_id = cx.update(|cx| {
520 project.update(cx, |project, cx| {
521 project.worktrees(cx).next().unwrap().read(cx).id()
522 })
523 });
524
525 let active_non_worktree_item_tasks = cx
526 .update(|cx| {
527 get_all_tasks(
528 &project,
529 Arc::new(TaskContexts {
530 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
531 active_worktree_context: None,
532 other_worktree_contexts: Vec::new(),
533 lsp_task_sources: HashMap::default(),
534 latest_selection: None,
535 }),
536 cx,
537 )
538 })
539 .await;
540 assert!(
541 active_non_worktree_item_tasks.is_empty(),
542 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
543 );
544
545 let active_worktree_tasks = cx
546 .update(|cx| {
547 get_all_tasks(
548 &project,
549 Arc::new(TaskContexts {
550 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
551 active_worktree_context: Some((worktree_id, {
552 let mut worktree_context = TaskContext::default();
553 worktree_context
554 .task_variables
555 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
556 worktree_context
557 })),
558 other_worktree_contexts: Vec::new(),
559 lsp_task_sources: HashMap::default(),
560 latest_selection: None,
561 }),
562 cx,
563 )
564 })
565 .await;
566 assert_eq!(
567 active_worktree_tasks
568 .into_iter()
569 .map(|(source_kind, task)| {
570 let resolved = task.resolved;
571 (source_kind, resolved.command)
572 })
573 .collect::<Vec<_>>(),
574 vec![(
575 TaskSourceKind::Worktree {
576 id: worktree_id,
577 directory_in_worktree: PathBuf::from(path!(".zed")),
578 id_base: if cfg!(windows) {
579 "local worktree tasks from directory \".zed\"".into()
580 } else {
581 "local worktree tasks from directory \".zed\"".into()
582 },
583 },
584 "echo /dir".to_string(),
585 )]
586 );
587}
588
589#[gpui::test]
590async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
591 init_test(cx);
592
593 let fs = FakeFs::new(cx.executor());
594 fs.insert_tree(
595 path!("/dir"),
596 json!({
597 "test.rs": "const A: i32 = 1;",
598 "test2.rs": "",
599 "Cargo.toml": "a = 1",
600 "package.json": "{\"a\": 1}",
601 }),
602 )
603 .await;
604
605 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
606 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
607
608 let mut fake_rust_servers = language_registry.register_fake_lsp(
609 "Rust",
610 FakeLspAdapter {
611 name: "the-rust-language-server",
612 capabilities: lsp::ServerCapabilities {
613 completion_provider: Some(lsp::CompletionOptions {
614 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
615 ..Default::default()
616 }),
617 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
618 lsp::TextDocumentSyncOptions {
619 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
620 ..Default::default()
621 },
622 )),
623 ..Default::default()
624 },
625 ..Default::default()
626 },
627 );
628 let mut fake_json_servers = language_registry.register_fake_lsp(
629 "JSON",
630 FakeLspAdapter {
631 name: "the-json-language-server",
632 capabilities: lsp::ServerCapabilities {
633 completion_provider: Some(lsp::CompletionOptions {
634 trigger_characters: Some(vec![":".to_string()]),
635 ..Default::default()
636 }),
637 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
638 lsp::TextDocumentSyncOptions {
639 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
640 ..Default::default()
641 },
642 )),
643 ..Default::default()
644 },
645 ..Default::default()
646 },
647 );
648
649 // Open a buffer without an associated language server.
650 let (toml_buffer, _handle) = project
651 .update(cx, |project, cx| {
652 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
653 })
654 .await
655 .unwrap();
656
657 // Open a buffer with an associated language server before the language for it has been loaded.
658 let (rust_buffer, _handle2) = project
659 .update(cx, |project, cx| {
660 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
661 })
662 .await
663 .unwrap();
664 rust_buffer.update(cx, |buffer, _| {
665 assert_eq!(buffer.language().map(|l| l.name()), None);
666 });
667
668 // Now we add the languages to the project, and ensure they get assigned to all
669 // the relevant open buffers.
670 language_registry.add(json_lang());
671 language_registry.add(rust_lang());
672 cx.executor().run_until_parked();
673 rust_buffer.update(cx, |buffer, _| {
674 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
675 });
676
677 // A server is started up, and it is notified about Rust files.
678 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
679 assert_eq!(
680 fake_rust_server
681 .receive_notification::<lsp::notification::DidOpenTextDocument>()
682 .await
683 .text_document,
684 lsp::TextDocumentItem {
685 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
686 version: 0,
687 text: "const A: i32 = 1;".to_string(),
688 language_id: "rust".to_string(),
689 }
690 );
691
692 // The buffer is configured based on the language server's capabilities.
693 rust_buffer.update(cx, |buffer, _| {
694 assert_eq!(
695 buffer
696 .completion_triggers()
697 .into_iter()
698 .cloned()
699 .collect::<Vec<_>>(),
700 &[".".to_string(), "::".to_string()]
701 );
702 });
703 toml_buffer.update(cx, |buffer, _| {
704 assert!(buffer.completion_triggers().is_empty());
705 });
706
707 // Edit a buffer. The changes are reported to the language server.
708 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
709 assert_eq!(
710 fake_rust_server
711 .receive_notification::<lsp::notification::DidChangeTextDocument>()
712 .await
713 .text_document,
714 lsp::VersionedTextDocumentIdentifier::new(
715 lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
716 1
717 )
718 );
719
720 // Open a third buffer with a different associated language server.
721 let (json_buffer, _json_handle) = project
722 .update(cx, |project, cx| {
723 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
724 })
725 .await
726 .unwrap();
727
728 // A json language server is started up and is only notified about the json buffer.
729 let mut fake_json_server = fake_json_servers.next().await.unwrap();
730 assert_eq!(
731 fake_json_server
732 .receive_notification::<lsp::notification::DidOpenTextDocument>()
733 .await
734 .text_document,
735 lsp::TextDocumentItem {
736 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
737 version: 0,
738 text: "{\"a\": 1}".to_string(),
739 language_id: "json".to_string(),
740 }
741 );
742
743 // This buffer is configured based on the second language server's
744 // capabilities.
745 json_buffer.update(cx, |buffer, _| {
746 assert_eq!(
747 buffer
748 .completion_triggers()
749 .into_iter()
750 .cloned()
751 .collect::<Vec<_>>(),
752 &[":".to_string()]
753 );
754 });
755
756 // When opening another buffer whose language server is already running,
757 // it is also configured based on the existing language server's capabilities.
758 let (rust_buffer2, _handle4) = project
759 .update(cx, |project, cx| {
760 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
761 })
762 .await
763 .unwrap();
764 rust_buffer2.update(cx, |buffer, _| {
765 assert_eq!(
766 buffer
767 .completion_triggers()
768 .into_iter()
769 .cloned()
770 .collect::<Vec<_>>(),
771 &[".".to_string(), "::".to_string()]
772 );
773 });
774
775 // Changes are reported only to servers matching the buffer's language.
776 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
777 rust_buffer2.update(cx, |buffer, cx| {
778 buffer.edit([(0..0, "let x = 1;")], None, cx)
779 });
780 assert_eq!(
781 fake_rust_server
782 .receive_notification::<lsp::notification::DidChangeTextDocument>()
783 .await
784 .text_document,
785 lsp::VersionedTextDocumentIdentifier::new(
786 lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap(),
787 1
788 )
789 );
790
791 // Save notifications are reported to all servers.
792 project
793 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
794 .await
795 .unwrap();
796 assert_eq!(
797 fake_rust_server
798 .receive_notification::<lsp::notification::DidSaveTextDocument>()
799 .await
800 .text_document,
801 lsp::TextDocumentIdentifier::new(
802 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
803 )
804 );
805 assert_eq!(
806 fake_json_server
807 .receive_notification::<lsp::notification::DidSaveTextDocument>()
808 .await
809 .text_document,
810 lsp::TextDocumentIdentifier::new(
811 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
812 )
813 );
814
815 // Renames are reported only to servers matching the buffer's language.
816 fs.rename(
817 Path::new(path!("/dir/test2.rs")),
818 Path::new(path!("/dir/test3.rs")),
819 Default::default(),
820 )
821 .await
822 .unwrap();
823 assert_eq!(
824 fake_rust_server
825 .receive_notification::<lsp::notification::DidCloseTextDocument>()
826 .await
827 .text_document,
828 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap()),
829 );
830 assert_eq!(
831 fake_rust_server
832 .receive_notification::<lsp::notification::DidOpenTextDocument>()
833 .await
834 .text_document,
835 lsp::TextDocumentItem {
836 uri: lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap(),
837 version: 0,
838 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
839 language_id: "rust".to_string(),
840 },
841 );
842
843 rust_buffer2.update(cx, |buffer, cx| {
844 buffer.update_diagnostics(
845 LanguageServerId(0),
846 DiagnosticSet::from_sorted_entries(
847 vec![DiagnosticEntry {
848 diagnostic: Default::default(),
849 range: Anchor::MIN..Anchor::MAX,
850 }],
851 &buffer.snapshot(),
852 ),
853 cx,
854 );
855 assert_eq!(
856 buffer
857 .snapshot()
858 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
859 .count(),
860 1
861 );
862 });
863
864 // When the rename changes the extension of the file, the buffer gets closed on the old
865 // language server and gets opened on the new one.
866 fs.rename(
867 Path::new(path!("/dir/test3.rs")),
868 Path::new(path!("/dir/test3.json")),
869 Default::default(),
870 )
871 .await
872 .unwrap();
873 assert_eq!(
874 fake_rust_server
875 .receive_notification::<lsp::notification::DidCloseTextDocument>()
876 .await
877 .text_document,
878 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap()),
879 );
880 assert_eq!(
881 fake_json_server
882 .receive_notification::<lsp::notification::DidOpenTextDocument>()
883 .await
884 .text_document,
885 lsp::TextDocumentItem {
886 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
887 version: 0,
888 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
889 language_id: "json".to_string(),
890 },
891 );
892
893 // We clear the diagnostics, since the language has changed.
894 rust_buffer2.update(cx, |buffer, _| {
895 assert_eq!(
896 buffer
897 .snapshot()
898 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
899 .count(),
900 0
901 );
902 });
903
904 // The renamed file's version resets after changing language server.
905 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
906 assert_eq!(
907 fake_json_server
908 .receive_notification::<lsp::notification::DidChangeTextDocument>()
909 .await
910 .text_document,
911 lsp::VersionedTextDocumentIdentifier::new(
912 lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
913 1
914 )
915 );
916
917 // Restart language servers
918 project.update(cx, |project, cx| {
919 project.restart_language_servers_for_buffers(
920 vec![rust_buffer.clone(), json_buffer.clone()],
921 cx,
922 );
923 });
924
925 let mut rust_shutdown_requests = fake_rust_server
926 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
927 let mut json_shutdown_requests = fake_json_server
928 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
929 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
930
931 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
932 let mut fake_json_server = fake_json_servers.next().await.unwrap();
933
934 // Ensure rust document is reopened in new rust language server
935 assert_eq!(
936 fake_rust_server
937 .receive_notification::<lsp::notification::DidOpenTextDocument>()
938 .await
939 .text_document,
940 lsp::TextDocumentItem {
941 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
942 version: 0,
943 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
944 language_id: "rust".to_string(),
945 }
946 );
947
948 // Ensure json documents are reopened in new json language server
949 assert_set_eq!(
950 [
951 fake_json_server
952 .receive_notification::<lsp::notification::DidOpenTextDocument>()
953 .await
954 .text_document,
955 fake_json_server
956 .receive_notification::<lsp::notification::DidOpenTextDocument>()
957 .await
958 .text_document,
959 ],
960 [
961 lsp::TextDocumentItem {
962 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
963 version: 0,
964 text: json_buffer.update(cx, |buffer, _| buffer.text()),
965 language_id: "json".to_string(),
966 },
967 lsp::TextDocumentItem {
968 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
969 version: 0,
970 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
971 language_id: "json".to_string(),
972 }
973 ]
974 );
975
976 // Close notifications are reported only to servers matching the buffer's language.
977 cx.update(|_| drop(_json_handle));
978 let close_message = lsp::DidCloseTextDocumentParams {
979 text_document: lsp::TextDocumentIdentifier::new(
980 lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
981 ),
982 };
983 assert_eq!(
984 fake_json_server
985 .receive_notification::<lsp::notification::DidCloseTextDocument>()
986 .await,
987 close_message,
988 );
989}
990
991#[gpui::test]
992async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
993 init_test(cx);
994
995 let fs = FakeFs::new(cx.executor());
996 fs.insert_tree(
997 path!("/the-root"),
998 json!({
999 ".gitignore": "target\n",
1000 "Cargo.lock": "",
1001 "src": {
1002 "a.rs": "",
1003 "b.rs": "",
1004 },
1005 "target": {
1006 "x": {
1007 "out": {
1008 "x.rs": ""
1009 }
1010 },
1011 "y": {
1012 "out": {
1013 "y.rs": "",
1014 }
1015 },
1016 "z": {
1017 "out": {
1018 "z.rs": ""
1019 }
1020 }
1021 }
1022 }),
1023 )
1024 .await;
1025 fs.insert_tree(
1026 path!("/the-registry"),
1027 json!({
1028 "dep1": {
1029 "src": {
1030 "dep1.rs": "",
1031 }
1032 },
1033 "dep2": {
1034 "src": {
1035 "dep2.rs": "",
1036 }
1037 },
1038 }),
1039 )
1040 .await;
1041 fs.insert_tree(
1042 path!("/the/stdlib"),
1043 json!({
1044 "LICENSE": "",
1045 "src": {
1046 "string.rs": "",
1047 }
1048 }),
1049 )
1050 .await;
1051
1052 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1053 let (language_registry, lsp_store) = project.read_with(cx, |project, _| {
1054 (project.languages().clone(), project.lsp_store())
1055 });
1056 language_registry.add(rust_lang());
1057 let mut fake_servers = language_registry.register_fake_lsp(
1058 "Rust",
1059 FakeLspAdapter {
1060 name: "the-language-server",
1061 ..Default::default()
1062 },
1063 );
1064
1065 cx.executor().run_until_parked();
1066
1067 // Start the language server by opening a buffer with a compatible file extension.
1068 project
1069 .update(cx, |project, cx| {
1070 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
1071 })
1072 .await
1073 .unwrap();
1074
1075 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
1076 project.update(cx, |project, cx| {
1077 let worktree = project.worktrees(cx).next().unwrap();
1078 assert_eq!(
1079 worktree
1080 .read(cx)
1081 .snapshot()
1082 .entries(true, 0)
1083 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
1084 .collect::<Vec<_>>(),
1085 &[
1086 (Path::new(""), false),
1087 (Path::new(".gitignore"), false),
1088 (Path::new("Cargo.lock"), false),
1089 (Path::new("src"), false),
1090 (Path::new("src/a.rs"), false),
1091 (Path::new("src/b.rs"), false),
1092 (Path::new("target"), true),
1093 ]
1094 );
1095 });
1096
1097 let prev_read_dir_count = fs.read_dir_call_count();
1098
1099 let fake_server = fake_servers.next().await.unwrap();
1100 let (server_id, server_name) = lsp_store.read_with(cx, |lsp_store, _| {
1101 let (id, status) = lsp_store.language_server_statuses().next().unwrap();
1102 (id, LanguageServerName::from(status.name.as_str()))
1103 });
1104
1105 // Simulate jumping to a definition in a dependency outside of the worktree.
1106 let _out_of_worktree_buffer = project
1107 .update(cx, |project, cx| {
1108 project.open_local_buffer_via_lsp(
1109 lsp::Url::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
1110 server_id,
1111 server_name.clone(),
1112 cx,
1113 )
1114 })
1115 .await
1116 .unwrap();
1117
1118 // Keep track of the FS events reported to the language server.
1119 let file_changes = Arc::new(Mutex::new(Vec::new()));
1120 fake_server
1121 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
1122 registrations: vec![lsp::Registration {
1123 id: Default::default(),
1124 method: "workspace/didChangeWatchedFiles".to_string(),
1125 register_options: serde_json::to_value(
1126 lsp::DidChangeWatchedFilesRegistrationOptions {
1127 watchers: vec![
1128 lsp::FileSystemWatcher {
1129 glob_pattern: lsp::GlobPattern::String(
1130 path!("/the-root/Cargo.toml").to_string(),
1131 ),
1132 kind: None,
1133 },
1134 lsp::FileSystemWatcher {
1135 glob_pattern: lsp::GlobPattern::String(
1136 path!("/the-root/src/*.{rs,c}").to_string(),
1137 ),
1138 kind: None,
1139 },
1140 lsp::FileSystemWatcher {
1141 glob_pattern: lsp::GlobPattern::String(
1142 path!("/the-root/target/y/**/*.rs").to_string(),
1143 ),
1144 kind: None,
1145 },
1146 lsp::FileSystemWatcher {
1147 glob_pattern: lsp::GlobPattern::String(
1148 path!("/the/stdlib/src/**/*.rs").to_string(),
1149 ),
1150 kind: None,
1151 },
1152 lsp::FileSystemWatcher {
1153 glob_pattern: lsp::GlobPattern::String(
1154 path!("**/Cargo.lock").to_string(),
1155 ),
1156 kind: None,
1157 },
1158 ],
1159 },
1160 )
1161 .ok(),
1162 }],
1163 })
1164 .await
1165 .into_response()
1166 .unwrap();
1167 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
1168 let file_changes = file_changes.clone();
1169 move |params, _| {
1170 let mut file_changes = file_changes.lock();
1171 file_changes.extend(params.changes);
1172 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
1173 }
1174 });
1175
1176 cx.executor().run_until_parked();
1177 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
1178 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 5);
1179
1180 let mut new_watched_paths = fs.watched_paths();
1181 new_watched_paths.retain(|path| !path.starts_with(config_dir()));
1182 assert_eq!(
1183 &new_watched_paths,
1184 &[
1185 Path::new(path!("/the-root")),
1186 Path::new(path!("/the-registry/dep1/src/dep1.rs")),
1187 Path::new(path!("/the/stdlib/src"))
1188 ]
1189 );
1190
1191 // Now the language server has asked us to watch an ignored directory path,
1192 // so we recursively load it.
1193 project.update(cx, |project, cx| {
1194 let worktree = project.visible_worktrees(cx).next().unwrap();
1195 assert_eq!(
1196 worktree
1197 .read(cx)
1198 .snapshot()
1199 .entries(true, 0)
1200 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
1201 .collect::<Vec<_>>(),
1202 &[
1203 (Path::new(""), false),
1204 (Path::new(".gitignore"), false),
1205 (Path::new("Cargo.lock"), false),
1206 (Path::new("src"), false),
1207 (Path::new("src/a.rs"), false),
1208 (Path::new("src/b.rs"), false),
1209 (Path::new("target"), true),
1210 (Path::new("target/x"), true),
1211 (Path::new("target/y"), true),
1212 (Path::new("target/y/out"), true),
1213 (Path::new("target/y/out/y.rs"), true),
1214 (Path::new("target/z"), true),
1215 ]
1216 );
1217 });
1218
1219 // Perform some file system mutations, two of which match the watched patterns,
1220 // and one of which does not.
1221 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
1222 .await
1223 .unwrap();
1224 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
1225 .await
1226 .unwrap();
1227 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
1228 .await
1229 .unwrap();
1230 fs.create_file(
1231 path!("/the-root/target/x/out/x2.rs").as_ref(),
1232 Default::default(),
1233 )
1234 .await
1235 .unwrap();
1236 fs.create_file(
1237 path!("/the-root/target/y/out/y2.rs").as_ref(),
1238 Default::default(),
1239 )
1240 .await
1241 .unwrap();
1242 fs.save(
1243 path!("/the-root/Cargo.lock").as_ref(),
1244 &"".into(),
1245 Default::default(),
1246 )
1247 .await
1248 .unwrap();
1249 fs.save(
1250 path!("/the-stdlib/LICENSE").as_ref(),
1251 &"".into(),
1252 Default::default(),
1253 )
1254 .await
1255 .unwrap();
1256 fs.save(
1257 path!("/the/stdlib/src/string.rs").as_ref(),
1258 &"".into(),
1259 Default::default(),
1260 )
1261 .await
1262 .unwrap();
1263
1264 // The language server receives events for the FS mutations that match its watch patterns.
1265 cx.executor().run_until_parked();
1266 assert_eq!(
1267 &*file_changes.lock(),
1268 &[
1269 lsp::FileEvent {
1270 uri: lsp::Url::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
1271 typ: lsp::FileChangeType::CHANGED,
1272 },
1273 lsp::FileEvent {
1274 uri: lsp::Url::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
1275 typ: lsp::FileChangeType::DELETED,
1276 },
1277 lsp::FileEvent {
1278 uri: lsp::Url::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
1279 typ: lsp::FileChangeType::CREATED,
1280 },
1281 lsp::FileEvent {
1282 uri: lsp::Url::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
1283 typ: lsp::FileChangeType::CREATED,
1284 },
1285 lsp::FileEvent {
1286 uri: lsp::Url::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
1287 typ: lsp::FileChangeType::CHANGED,
1288 },
1289 ]
1290 );
1291}
1292
1293#[gpui::test]
1294async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
1295 init_test(cx);
1296
1297 let fs = FakeFs::new(cx.executor());
1298 fs.insert_tree(
1299 path!("/dir"),
1300 json!({
1301 "a.rs": "let a = 1;",
1302 "b.rs": "let b = 2;"
1303 }),
1304 )
1305 .await;
1306
1307 let project = Project::test(
1308 fs,
1309 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
1310 cx,
1311 )
1312 .await;
1313 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1314
1315 let buffer_a = project
1316 .update(cx, |project, cx| {
1317 project.open_local_buffer(path!("/dir/a.rs"), cx)
1318 })
1319 .await
1320 .unwrap();
1321 let buffer_b = project
1322 .update(cx, |project, cx| {
1323 project.open_local_buffer(path!("/dir/b.rs"), cx)
1324 })
1325 .await
1326 .unwrap();
1327
1328 lsp_store.update(cx, |lsp_store, cx| {
1329 lsp_store
1330 .update_diagnostics(
1331 LanguageServerId(0),
1332 lsp::PublishDiagnosticsParams {
1333 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1334 version: None,
1335 diagnostics: vec![lsp::Diagnostic {
1336 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1337 severity: Some(lsp::DiagnosticSeverity::ERROR),
1338 message: "error 1".to_string(),
1339 ..Default::default()
1340 }],
1341 },
1342 None,
1343 DiagnosticSourceKind::Pushed,
1344 &[],
1345 cx,
1346 )
1347 .unwrap();
1348 lsp_store
1349 .update_diagnostics(
1350 LanguageServerId(0),
1351 lsp::PublishDiagnosticsParams {
1352 uri: Url::from_file_path(path!("/dir/b.rs")).unwrap(),
1353 version: None,
1354 diagnostics: vec![lsp::Diagnostic {
1355 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1356 severity: Some(DiagnosticSeverity::WARNING),
1357 message: "error 2".to_string(),
1358 ..Default::default()
1359 }],
1360 },
1361 None,
1362 DiagnosticSourceKind::Pushed,
1363 &[],
1364 cx,
1365 )
1366 .unwrap();
1367 });
1368
1369 buffer_a.update(cx, |buffer, _| {
1370 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1371 assert_eq!(
1372 chunks
1373 .iter()
1374 .map(|(s, d)| (s.as_str(), *d))
1375 .collect::<Vec<_>>(),
1376 &[
1377 ("let ", None),
1378 ("a", Some(DiagnosticSeverity::ERROR)),
1379 (" = 1;", None),
1380 ]
1381 );
1382 });
1383 buffer_b.update(cx, |buffer, _| {
1384 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1385 assert_eq!(
1386 chunks
1387 .iter()
1388 .map(|(s, d)| (s.as_str(), *d))
1389 .collect::<Vec<_>>(),
1390 &[
1391 ("let ", None),
1392 ("b", Some(DiagnosticSeverity::WARNING)),
1393 (" = 2;", None),
1394 ]
1395 );
1396 });
1397}
1398
1399#[gpui::test]
1400async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1401 init_test(cx);
1402
1403 let fs = FakeFs::new(cx.executor());
1404 fs.insert_tree(
1405 path!("/root"),
1406 json!({
1407 "dir": {
1408 ".git": {
1409 "HEAD": "ref: refs/heads/main",
1410 },
1411 ".gitignore": "b.rs",
1412 "a.rs": "let a = 1;",
1413 "b.rs": "let b = 2;",
1414 },
1415 "other.rs": "let b = c;"
1416 }),
1417 )
1418 .await;
1419
1420 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
1421 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1422 let (worktree, _) = project
1423 .update(cx, |project, cx| {
1424 project.find_or_create_worktree(path!("/root/dir"), true, cx)
1425 })
1426 .await
1427 .unwrap();
1428 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1429
1430 let (worktree, _) = project
1431 .update(cx, |project, cx| {
1432 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
1433 })
1434 .await
1435 .unwrap();
1436 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1437
1438 let server_id = LanguageServerId(0);
1439 lsp_store.update(cx, |lsp_store, cx| {
1440 lsp_store
1441 .update_diagnostics(
1442 server_id,
1443 lsp::PublishDiagnosticsParams {
1444 uri: Url::from_file_path(path!("/root/dir/b.rs")).unwrap(),
1445 version: None,
1446 diagnostics: vec![lsp::Diagnostic {
1447 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1448 severity: Some(lsp::DiagnosticSeverity::ERROR),
1449 message: "unused variable 'b'".to_string(),
1450 ..Default::default()
1451 }],
1452 },
1453 None,
1454 DiagnosticSourceKind::Pushed,
1455 &[],
1456 cx,
1457 )
1458 .unwrap();
1459 lsp_store
1460 .update_diagnostics(
1461 server_id,
1462 lsp::PublishDiagnosticsParams {
1463 uri: Url::from_file_path(path!("/root/other.rs")).unwrap(),
1464 version: None,
1465 diagnostics: vec![lsp::Diagnostic {
1466 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1467 severity: Some(lsp::DiagnosticSeverity::ERROR),
1468 message: "unknown variable 'c'".to_string(),
1469 ..Default::default()
1470 }],
1471 },
1472 None,
1473 DiagnosticSourceKind::Pushed,
1474 &[],
1475 cx,
1476 )
1477 .unwrap();
1478 });
1479
1480 let main_ignored_buffer = project
1481 .update(cx, |project, cx| {
1482 project.open_buffer((main_worktree_id, "b.rs"), cx)
1483 })
1484 .await
1485 .unwrap();
1486 main_ignored_buffer.update(cx, |buffer, _| {
1487 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1488 assert_eq!(
1489 chunks
1490 .iter()
1491 .map(|(s, d)| (s.as_str(), *d))
1492 .collect::<Vec<_>>(),
1493 &[
1494 ("let ", None),
1495 ("b", Some(DiagnosticSeverity::ERROR)),
1496 (" = 2;", None),
1497 ],
1498 "Gigitnored buffers should still get in-buffer diagnostics",
1499 );
1500 });
1501 let other_buffer = project
1502 .update(cx, |project, cx| {
1503 project.open_buffer((other_worktree_id, ""), cx)
1504 })
1505 .await
1506 .unwrap();
1507 other_buffer.update(cx, |buffer, _| {
1508 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1509 assert_eq!(
1510 chunks
1511 .iter()
1512 .map(|(s, d)| (s.as_str(), *d))
1513 .collect::<Vec<_>>(),
1514 &[
1515 ("let b = ", None),
1516 ("c", Some(DiagnosticSeverity::ERROR)),
1517 (";", None),
1518 ],
1519 "Buffers from hidden projects should still get in-buffer diagnostics"
1520 );
1521 });
1522
1523 project.update(cx, |project, cx| {
1524 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1525 assert_eq!(
1526 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1527 vec![(
1528 ProjectPath {
1529 worktree_id: main_worktree_id,
1530 path: Arc::from(Path::new("b.rs")),
1531 },
1532 server_id,
1533 DiagnosticSummary {
1534 error_count: 1,
1535 warning_count: 0,
1536 }
1537 )]
1538 );
1539 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1540 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1541 });
1542}
1543
1544#[gpui::test]
1545async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1546 init_test(cx);
1547
1548 let progress_token = "the-progress-token";
1549
1550 let fs = FakeFs::new(cx.executor());
1551 fs.insert_tree(
1552 path!("/dir"),
1553 json!({
1554 "a.rs": "fn a() { A }",
1555 "b.rs": "const y: i32 = 1",
1556 }),
1557 )
1558 .await;
1559
1560 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1561 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1562
1563 language_registry.add(rust_lang());
1564 let mut fake_servers = language_registry.register_fake_lsp(
1565 "Rust",
1566 FakeLspAdapter {
1567 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1568 disk_based_diagnostics_sources: vec!["disk".into()],
1569 ..Default::default()
1570 },
1571 );
1572
1573 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1574
1575 // Cause worktree to start the fake language server
1576 let _ = project
1577 .update(cx, |project, cx| {
1578 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
1579 })
1580 .await
1581 .unwrap();
1582
1583 let mut events = cx.events(&project);
1584
1585 let fake_server = fake_servers.next().await.unwrap();
1586 assert_eq!(
1587 events.next().await.unwrap(),
1588 Event::LanguageServerAdded(
1589 LanguageServerId(0),
1590 fake_server.server.name(),
1591 Some(worktree_id)
1592 ),
1593 );
1594
1595 fake_server
1596 .start_progress(format!("{}/0", progress_token))
1597 .await;
1598 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1599 assert_eq!(
1600 events.next().await.unwrap(),
1601 Event::DiskBasedDiagnosticsStarted {
1602 language_server_id: LanguageServerId(0),
1603 }
1604 );
1605
1606 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1607 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1608 version: None,
1609 diagnostics: vec![lsp::Diagnostic {
1610 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1611 severity: Some(lsp::DiagnosticSeverity::ERROR),
1612 message: "undefined variable 'A'".to_string(),
1613 ..Default::default()
1614 }],
1615 });
1616 assert_eq!(
1617 events.next().await.unwrap(),
1618 Event::DiagnosticsUpdated {
1619 language_server_id: LanguageServerId(0),
1620 path: (worktree_id, Path::new("a.rs")).into()
1621 }
1622 );
1623
1624 fake_server.end_progress(format!("{}/0", progress_token));
1625 assert_eq!(
1626 events.next().await.unwrap(),
1627 Event::DiskBasedDiagnosticsFinished {
1628 language_server_id: LanguageServerId(0)
1629 }
1630 );
1631
1632 let buffer = project
1633 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
1634 .await
1635 .unwrap();
1636
1637 buffer.update(cx, |buffer, _| {
1638 let snapshot = buffer.snapshot();
1639 let diagnostics = snapshot
1640 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1641 .collect::<Vec<_>>();
1642 assert_eq!(
1643 diagnostics,
1644 &[DiagnosticEntry {
1645 range: Point::new(0, 9)..Point::new(0, 10),
1646 diagnostic: Diagnostic {
1647 severity: lsp::DiagnosticSeverity::ERROR,
1648 message: "undefined variable 'A'".to_string(),
1649 group_id: 0,
1650 is_primary: true,
1651 source_kind: DiagnosticSourceKind::Pushed,
1652 ..Diagnostic::default()
1653 }
1654 }]
1655 )
1656 });
1657
1658 // Ensure publishing empty diagnostics twice only results in one update event.
1659 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1660 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1661 version: None,
1662 diagnostics: Default::default(),
1663 });
1664 assert_eq!(
1665 events.next().await.unwrap(),
1666 Event::DiagnosticsUpdated {
1667 language_server_id: LanguageServerId(0),
1668 path: (worktree_id, Path::new("a.rs")).into()
1669 }
1670 );
1671
1672 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1673 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1674 version: None,
1675 diagnostics: Default::default(),
1676 });
1677 cx.executor().run_until_parked();
1678 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1679}
1680
1681#[gpui::test]
1682async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1683 init_test(cx);
1684
1685 let progress_token = "the-progress-token";
1686
1687 let fs = FakeFs::new(cx.executor());
1688 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1689
1690 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1691
1692 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1693 language_registry.add(rust_lang());
1694 let mut fake_servers = language_registry.register_fake_lsp(
1695 "Rust",
1696 FakeLspAdapter {
1697 name: "the-language-server",
1698 disk_based_diagnostics_sources: vec!["disk".into()],
1699 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1700 ..Default::default()
1701 },
1702 );
1703
1704 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1705
1706 let (buffer, _handle) = project
1707 .update(cx, |project, cx| {
1708 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1709 })
1710 .await
1711 .unwrap();
1712 // Simulate diagnostics starting to update.
1713 let fake_server = fake_servers.next().await.unwrap();
1714 fake_server.start_progress(progress_token).await;
1715
1716 // Restart the server before the diagnostics finish updating.
1717 project.update(cx, |project, cx| {
1718 project.restart_language_servers_for_buffers(vec![buffer], cx);
1719 });
1720 let mut events = cx.events(&project);
1721
1722 // Simulate the newly started server sending more diagnostics.
1723 let fake_server = fake_servers.next().await.unwrap();
1724 assert_eq!(
1725 events.next().await.unwrap(),
1726 Event::LanguageServerAdded(
1727 LanguageServerId(1),
1728 fake_server.server.name(),
1729 Some(worktree_id)
1730 )
1731 );
1732 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1733 fake_server.start_progress(progress_token).await;
1734 assert_eq!(
1735 events.next().await.unwrap(),
1736 Event::DiskBasedDiagnosticsStarted {
1737 language_server_id: LanguageServerId(1)
1738 }
1739 );
1740 project.update(cx, |project, cx| {
1741 assert_eq!(
1742 project
1743 .language_servers_running_disk_based_diagnostics(cx)
1744 .collect::<Vec<_>>(),
1745 [LanguageServerId(1)]
1746 );
1747 });
1748
1749 // All diagnostics are considered done, despite the old server's diagnostic
1750 // task never completing.
1751 fake_server.end_progress(progress_token);
1752 assert_eq!(
1753 events.next().await.unwrap(),
1754 Event::DiskBasedDiagnosticsFinished {
1755 language_server_id: LanguageServerId(1)
1756 }
1757 );
1758 project.update(cx, |project, cx| {
1759 assert_eq!(
1760 project
1761 .language_servers_running_disk_based_diagnostics(cx)
1762 .collect::<Vec<_>>(),
1763 [] as [language::LanguageServerId; 0]
1764 );
1765 });
1766}
1767
1768#[gpui::test]
1769async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1770 init_test(cx);
1771
1772 let fs = FakeFs::new(cx.executor());
1773 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
1774
1775 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1776
1777 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1778 language_registry.add(rust_lang());
1779 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1780
1781 let (buffer, _) = project
1782 .update(cx, |project, cx| {
1783 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1784 })
1785 .await
1786 .unwrap();
1787
1788 // Publish diagnostics
1789 let fake_server = fake_servers.next().await.unwrap();
1790 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1791 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1792 version: None,
1793 diagnostics: vec![lsp::Diagnostic {
1794 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1795 severity: Some(lsp::DiagnosticSeverity::ERROR),
1796 message: "the message".to_string(),
1797 ..Default::default()
1798 }],
1799 });
1800
1801 cx.executor().run_until_parked();
1802 buffer.update(cx, |buffer, _| {
1803 assert_eq!(
1804 buffer
1805 .snapshot()
1806 .diagnostics_in_range::<_, usize>(0..1, false)
1807 .map(|entry| entry.diagnostic.message.clone())
1808 .collect::<Vec<_>>(),
1809 ["the message".to_string()]
1810 );
1811 });
1812 project.update(cx, |project, cx| {
1813 assert_eq!(
1814 project.diagnostic_summary(false, cx),
1815 DiagnosticSummary {
1816 error_count: 1,
1817 warning_count: 0,
1818 }
1819 );
1820 });
1821
1822 project.update(cx, |project, cx| {
1823 project.restart_language_servers_for_buffers(vec![buffer.clone()], cx);
1824 });
1825
1826 // The diagnostics are cleared.
1827 cx.executor().run_until_parked();
1828 buffer.update(cx, |buffer, _| {
1829 assert_eq!(
1830 buffer
1831 .snapshot()
1832 .diagnostics_in_range::<_, usize>(0..1, false)
1833 .map(|entry| entry.diagnostic.message.clone())
1834 .collect::<Vec<_>>(),
1835 Vec::<String>::new(),
1836 );
1837 });
1838 project.update(cx, |project, cx| {
1839 assert_eq!(
1840 project.diagnostic_summary(false, cx),
1841 DiagnosticSummary {
1842 error_count: 0,
1843 warning_count: 0,
1844 }
1845 );
1846 });
1847}
1848
1849#[gpui::test]
1850async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1851 init_test(cx);
1852
1853 let fs = FakeFs::new(cx.executor());
1854 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1855
1856 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1857 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1858
1859 language_registry.add(rust_lang());
1860 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1861
1862 let (buffer, _handle) = project
1863 .update(cx, |project, cx| {
1864 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1865 })
1866 .await
1867 .unwrap();
1868
1869 // Before restarting the server, report diagnostics with an unknown buffer version.
1870 let fake_server = fake_servers.next().await.unwrap();
1871 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1872 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1873 version: Some(10000),
1874 diagnostics: Vec::new(),
1875 });
1876 cx.executor().run_until_parked();
1877 project.update(cx, |project, cx| {
1878 project.restart_language_servers_for_buffers(vec![buffer.clone()], cx);
1879 });
1880
1881 let mut fake_server = fake_servers.next().await.unwrap();
1882 let notification = fake_server
1883 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1884 .await
1885 .text_document;
1886 assert_eq!(notification.version, 0);
1887}
1888
1889#[gpui::test]
1890async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
1891 init_test(cx);
1892
1893 let progress_token = "the-progress-token";
1894
1895 let fs = FakeFs::new(cx.executor());
1896 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1897
1898 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1899
1900 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1901 language_registry.add(rust_lang());
1902 let mut fake_servers = language_registry.register_fake_lsp(
1903 "Rust",
1904 FakeLspAdapter {
1905 name: "the-language-server",
1906 disk_based_diagnostics_sources: vec!["disk".into()],
1907 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1908 ..Default::default()
1909 },
1910 );
1911
1912 let (buffer, _handle) = project
1913 .update(cx, |project, cx| {
1914 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1915 })
1916 .await
1917 .unwrap();
1918
1919 // Simulate diagnostics starting to update.
1920 let mut fake_server = fake_servers.next().await.unwrap();
1921 fake_server
1922 .start_progress_with(
1923 "another-token",
1924 lsp::WorkDoneProgressBegin {
1925 cancellable: Some(false),
1926 ..Default::default()
1927 },
1928 )
1929 .await;
1930 fake_server
1931 .start_progress_with(
1932 progress_token,
1933 lsp::WorkDoneProgressBegin {
1934 cancellable: Some(true),
1935 ..Default::default()
1936 },
1937 )
1938 .await;
1939 cx.executor().run_until_parked();
1940
1941 project.update(cx, |project, cx| {
1942 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
1943 });
1944
1945 let cancel_notification = fake_server
1946 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
1947 .await;
1948 assert_eq!(
1949 cancel_notification.token,
1950 NumberOrString::String(progress_token.into())
1951 );
1952}
1953
1954#[gpui::test]
1955async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1956 init_test(cx);
1957
1958 let fs = FakeFs::new(cx.executor());
1959 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
1960 .await;
1961
1962 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1963 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1964
1965 let mut fake_rust_servers = language_registry.register_fake_lsp(
1966 "Rust",
1967 FakeLspAdapter {
1968 name: "rust-lsp",
1969 ..Default::default()
1970 },
1971 );
1972 let mut fake_js_servers = language_registry.register_fake_lsp(
1973 "JavaScript",
1974 FakeLspAdapter {
1975 name: "js-lsp",
1976 ..Default::default()
1977 },
1978 );
1979 language_registry.add(rust_lang());
1980 language_registry.add(js_lang());
1981
1982 let _rs_buffer = project
1983 .update(cx, |project, cx| {
1984 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1985 })
1986 .await
1987 .unwrap();
1988 let _js_buffer = project
1989 .update(cx, |project, cx| {
1990 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
1991 })
1992 .await
1993 .unwrap();
1994
1995 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1996 assert_eq!(
1997 fake_rust_server_1
1998 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1999 .await
2000 .text_document
2001 .uri
2002 .as_str(),
2003 uri!("file:///dir/a.rs")
2004 );
2005
2006 let mut fake_js_server = fake_js_servers.next().await.unwrap();
2007 assert_eq!(
2008 fake_js_server
2009 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2010 .await
2011 .text_document
2012 .uri
2013 .as_str(),
2014 uri!("file:///dir/b.js")
2015 );
2016
2017 // Disable Rust language server, ensuring only that server gets stopped.
2018 cx.update(|cx| {
2019 SettingsStore::update_global(cx, |settings, cx| {
2020 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
2021 settings.languages.insert(
2022 "Rust".into(),
2023 LanguageSettingsContent {
2024 enable_language_server: Some(false),
2025 ..Default::default()
2026 },
2027 );
2028 });
2029 })
2030 });
2031 fake_rust_server_1
2032 .receive_notification::<lsp::notification::Exit>()
2033 .await;
2034
2035 // Enable Rust and disable JavaScript language servers, ensuring that the
2036 // former gets started again and that the latter stops.
2037 cx.update(|cx| {
2038 SettingsStore::update_global(cx, |settings, cx| {
2039 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
2040 settings.languages.insert(
2041 LanguageName::new("Rust"),
2042 LanguageSettingsContent {
2043 enable_language_server: Some(true),
2044 ..Default::default()
2045 },
2046 );
2047 settings.languages.insert(
2048 LanguageName::new("JavaScript"),
2049 LanguageSettingsContent {
2050 enable_language_server: Some(false),
2051 ..Default::default()
2052 },
2053 );
2054 });
2055 })
2056 });
2057 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
2058 assert_eq!(
2059 fake_rust_server_2
2060 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2061 .await
2062 .text_document
2063 .uri
2064 .as_str(),
2065 uri!("file:///dir/a.rs")
2066 );
2067 fake_js_server
2068 .receive_notification::<lsp::notification::Exit>()
2069 .await;
2070}
2071
2072#[gpui::test(iterations = 3)]
2073async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
2074 init_test(cx);
2075
2076 let text = "
2077 fn a() { A }
2078 fn b() { BB }
2079 fn c() { CCC }
2080 "
2081 .unindent();
2082
2083 let fs = FakeFs::new(cx.executor());
2084 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
2085
2086 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2087 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2088
2089 language_registry.add(rust_lang());
2090 let mut fake_servers = language_registry.register_fake_lsp(
2091 "Rust",
2092 FakeLspAdapter {
2093 disk_based_diagnostics_sources: vec!["disk".into()],
2094 ..Default::default()
2095 },
2096 );
2097
2098 let buffer = project
2099 .update(cx, |project, cx| {
2100 project.open_local_buffer(path!("/dir/a.rs"), cx)
2101 })
2102 .await
2103 .unwrap();
2104
2105 let _handle = project.update(cx, |project, cx| {
2106 project.register_buffer_with_language_servers(&buffer, cx)
2107 });
2108
2109 let mut fake_server = fake_servers.next().await.unwrap();
2110 let open_notification = fake_server
2111 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2112 .await;
2113
2114 // Edit the buffer, moving the content down
2115 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
2116 let change_notification_1 = fake_server
2117 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2118 .await;
2119 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
2120
2121 // Report some diagnostics for the initial version of the buffer
2122 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2123 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2124 version: Some(open_notification.text_document.version),
2125 diagnostics: vec![
2126 lsp::Diagnostic {
2127 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2128 severity: Some(DiagnosticSeverity::ERROR),
2129 message: "undefined variable 'A'".to_string(),
2130 source: Some("disk".to_string()),
2131 ..Default::default()
2132 },
2133 lsp::Diagnostic {
2134 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2135 severity: Some(DiagnosticSeverity::ERROR),
2136 message: "undefined variable 'BB'".to_string(),
2137 source: Some("disk".to_string()),
2138 ..Default::default()
2139 },
2140 lsp::Diagnostic {
2141 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
2142 severity: Some(DiagnosticSeverity::ERROR),
2143 source: Some("disk".to_string()),
2144 message: "undefined variable 'CCC'".to_string(),
2145 ..Default::default()
2146 },
2147 ],
2148 });
2149
2150 // The diagnostics have moved down since they were created.
2151 cx.executor().run_until_parked();
2152 buffer.update(cx, |buffer, _| {
2153 assert_eq!(
2154 buffer
2155 .snapshot()
2156 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
2157 .collect::<Vec<_>>(),
2158 &[
2159 DiagnosticEntry {
2160 range: Point::new(3, 9)..Point::new(3, 11),
2161 diagnostic: Diagnostic {
2162 source: Some("disk".into()),
2163 severity: DiagnosticSeverity::ERROR,
2164 message: "undefined variable 'BB'".to_string(),
2165 is_disk_based: true,
2166 group_id: 1,
2167 is_primary: true,
2168 source_kind: DiagnosticSourceKind::Pushed,
2169 ..Diagnostic::default()
2170 },
2171 },
2172 DiagnosticEntry {
2173 range: Point::new(4, 9)..Point::new(4, 12),
2174 diagnostic: Diagnostic {
2175 source: Some("disk".into()),
2176 severity: DiagnosticSeverity::ERROR,
2177 message: "undefined variable 'CCC'".to_string(),
2178 is_disk_based: true,
2179 group_id: 2,
2180 is_primary: true,
2181 source_kind: DiagnosticSourceKind::Pushed,
2182 ..Diagnostic::default()
2183 }
2184 }
2185 ]
2186 );
2187 assert_eq!(
2188 chunks_with_diagnostics(buffer, 0..buffer.len()),
2189 [
2190 ("\n\nfn a() { ".to_string(), None),
2191 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2192 (" }\nfn b() { ".to_string(), None),
2193 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
2194 (" }\nfn c() { ".to_string(), None),
2195 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
2196 (" }\n".to_string(), None),
2197 ]
2198 );
2199 assert_eq!(
2200 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
2201 [
2202 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
2203 (" }\nfn c() { ".to_string(), None),
2204 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
2205 ]
2206 );
2207 });
2208
2209 // Ensure overlapping diagnostics are highlighted correctly.
2210 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2211 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2212 version: Some(open_notification.text_document.version),
2213 diagnostics: vec![
2214 lsp::Diagnostic {
2215 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2216 severity: Some(DiagnosticSeverity::ERROR),
2217 message: "undefined variable 'A'".to_string(),
2218 source: Some("disk".to_string()),
2219 ..Default::default()
2220 },
2221 lsp::Diagnostic {
2222 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
2223 severity: Some(DiagnosticSeverity::WARNING),
2224 message: "unreachable statement".to_string(),
2225 source: Some("disk".to_string()),
2226 ..Default::default()
2227 },
2228 ],
2229 });
2230
2231 cx.executor().run_until_parked();
2232 buffer.update(cx, |buffer, _| {
2233 assert_eq!(
2234 buffer
2235 .snapshot()
2236 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
2237 .collect::<Vec<_>>(),
2238 &[
2239 DiagnosticEntry {
2240 range: Point::new(2, 9)..Point::new(2, 12),
2241 diagnostic: Diagnostic {
2242 source: Some("disk".into()),
2243 severity: DiagnosticSeverity::WARNING,
2244 message: "unreachable statement".to_string(),
2245 is_disk_based: true,
2246 group_id: 4,
2247 is_primary: true,
2248 source_kind: DiagnosticSourceKind::Pushed,
2249 ..Diagnostic::default()
2250 }
2251 },
2252 DiagnosticEntry {
2253 range: Point::new(2, 9)..Point::new(2, 10),
2254 diagnostic: Diagnostic {
2255 source: Some("disk".into()),
2256 severity: DiagnosticSeverity::ERROR,
2257 message: "undefined variable 'A'".to_string(),
2258 is_disk_based: true,
2259 group_id: 3,
2260 is_primary: true,
2261 source_kind: DiagnosticSourceKind::Pushed,
2262 ..Diagnostic::default()
2263 },
2264 }
2265 ]
2266 );
2267 assert_eq!(
2268 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
2269 [
2270 ("fn a() { ".to_string(), None),
2271 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2272 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2273 ("\n".to_string(), None),
2274 ]
2275 );
2276 assert_eq!(
2277 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
2278 [
2279 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2280 ("\n".to_string(), None),
2281 ]
2282 );
2283 });
2284
2285 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
2286 // changes since the last save.
2287 buffer.update(cx, |buffer, cx| {
2288 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
2289 buffer.edit(
2290 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
2291 None,
2292 cx,
2293 );
2294 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
2295 });
2296 let change_notification_2 = fake_server
2297 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2298 .await;
2299 assert!(
2300 change_notification_2.text_document.version > change_notification_1.text_document.version
2301 );
2302
2303 // Handle out-of-order diagnostics
2304 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2305 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2306 version: Some(change_notification_2.text_document.version),
2307 diagnostics: vec![
2308 lsp::Diagnostic {
2309 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2310 severity: Some(DiagnosticSeverity::ERROR),
2311 message: "undefined variable 'BB'".to_string(),
2312 source: Some("disk".to_string()),
2313 ..Default::default()
2314 },
2315 lsp::Diagnostic {
2316 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2317 severity: Some(DiagnosticSeverity::WARNING),
2318 message: "undefined variable 'A'".to_string(),
2319 source: Some("disk".to_string()),
2320 ..Default::default()
2321 },
2322 ],
2323 });
2324
2325 cx.executor().run_until_parked();
2326 buffer.update(cx, |buffer, _| {
2327 assert_eq!(
2328 buffer
2329 .snapshot()
2330 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2331 .collect::<Vec<_>>(),
2332 &[
2333 DiagnosticEntry {
2334 range: Point::new(2, 21)..Point::new(2, 22),
2335 diagnostic: Diagnostic {
2336 source: Some("disk".into()),
2337 severity: DiagnosticSeverity::WARNING,
2338 message: "undefined variable 'A'".to_string(),
2339 is_disk_based: true,
2340 group_id: 6,
2341 is_primary: true,
2342 source_kind: DiagnosticSourceKind::Pushed,
2343 ..Diagnostic::default()
2344 }
2345 },
2346 DiagnosticEntry {
2347 range: Point::new(3, 9)..Point::new(3, 14),
2348 diagnostic: Diagnostic {
2349 source: Some("disk".into()),
2350 severity: DiagnosticSeverity::ERROR,
2351 message: "undefined variable 'BB'".to_string(),
2352 is_disk_based: true,
2353 group_id: 5,
2354 is_primary: true,
2355 source_kind: DiagnosticSourceKind::Pushed,
2356 ..Diagnostic::default()
2357 },
2358 }
2359 ]
2360 );
2361 });
2362}
2363
2364#[gpui::test]
2365async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
2366 init_test(cx);
2367
2368 let text = concat!(
2369 "let one = ;\n", //
2370 "let two = \n",
2371 "let three = 3;\n",
2372 );
2373
2374 let fs = FakeFs::new(cx.executor());
2375 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
2376
2377 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2378 let buffer = project
2379 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2380 .await
2381 .unwrap();
2382
2383 project.update(cx, |project, cx| {
2384 project.lsp_store.update(cx, |lsp_store, cx| {
2385 lsp_store
2386 .update_diagnostic_entries(
2387 LanguageServerId(0),
2388 PathBuf::from("/dir/a.rs"),
2389 None,
2390 None,
2391 vec![
2392 DiagnosticEntry {
2393 range: Unclipped(PointUtf16::new(0, 10))
2394 ..Unclipped(PointUtf16::new(0, 10)),
2395 diagnostic: Diagnostic {
2396 severity: DiagnosticSeverity::ERROR,
2397 message: "syntax error 1".to_string(),
2398 source_kind: DiagnosticSourceKind::Pushed,
2399 ..Diagnostic::default()
2400 },
2401 },
2402 DiagnosticEntry {
2403 range: Unclipped(PointUtf16::new(1, 10))
2404 ..Unclipped(PointUtf16::new(1, 10)),
2405 diagnostic: Diagnostic {
2406 severity: DiagnosticSeverity::ERROR,
2407 message: "syntax error 2".to_string(),
2408 source_kind: DiagnosticSourceKind::Pushed,
2409 ..Diagnostic::default()
2410 },
2411 },
2412 ],
2413 cx,
2414 )
2415 .unwrap();
2416 })
2417 });
2418
2419 // An empty range is extended forward to include the following character.
2420 // At the end of a line, an empty range is extended backward to include
2421 // the preceding character.
2422 buffer.update(cx, |buffer, _| {
2423 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2424 assert_eq!(
2425 chunks
2426 .iter()
2427 .map(|(s, d)| (s.as_str(), *d))
2428 .collect::<Vec<_>>(),
2429 &[
2430 ("let one = ", None),
2431 (";", Some(DiagnosticSeverity::ERROR)),
2432 ("\nlet two =", None),
2433 (" ", Some(DiagnosticSeverity::ERROR)),
2434 ("\nlet three = 3;\n", None)
2435 ]
2436 );
2437 });
2438}
2439
2440#[gpui::test]
2441async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2442 init_test(cx);
2443
2444 let fs = FakeFs::new(cx.executor());
2445 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2446 .await;
2447
2448 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2449 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
2450
2451 lsp_store.update(cx, |lsp_store, cx| {
2452 lsp_store
2453 .update_diagnostic_entries(
2454 LanguageServerId(0),
2455 Path::new("/dir/a.rs").to_owned(),
2456 None,
2457 None,
2458 vec![DiagnosticEntry {
2459 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2460 diagnostic: Diagnostic {
2461 severity: DiagnosticSeverity::ERROR,
2462 is_primary: true,
2463 message: "syntax error a1".to_string(),
2464 source_kind: DiagnosticSourceKind::Pushed,
2465 ..Diagnostic::default()
2466 },
2467 }],
2468 cx,
2469 )
2470 .unwrap();
2471 lsp_store
2472 .update_diagnostic_entries(
2473 LanguageServerId(1),
2474 Path::new("/dir/a.rs").to_owned(),
2475 None,
2476 None,
2477 vec![DiagnosticEntry {
2478 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2479 diagnostic: Diagnostic {
2480 severity: DiagnosticSeverity::ERROR,
2481 is_primary: true,
2482 message: "syntax error b1".to_string(),
2483 source_kind: DiagnosticSourceKind::Pushed,
2484 ..Diagnostic::default()
2485 },
2486 }],
2487 cx,
2488 )
2489 .unwrap();
2490
2491 assert_eq!(
2492 lsp_store.diagnostic_summary(false, cx),
2493 DiagnosticSummary {
2494 error_count: 2,
2495 warning_count: 0,
2496 }
2497 );
2498 });
2499}
2500
2501#[gpui::test]
2502async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2503 init_test(cx);
2504
2505 let text = "
2506 fn a() {
2507 f1();
2508 }
2509 fn b() {
2510 f2();
2511 }
2512 fn c() {
2513 f3();
2514 }
2515 "
2516 .unindent();
2517
2518 let fs = FakeFs::new(cx.executor());
2519 fs.insert_tree(
2520 path!("/dir"),
2521 json!({
2522 "a.rs": text.clone(),
2523 }),
2524 )
2525 .await;
2526
2527 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2528 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2529
2530 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2531 language_registry.add(rust_lang());
2532 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2533
2534 let (buffer, _handle) = project
2535 .update(cx, |project, cx| {
2536 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2537 })
2538 .await
2539 .unwrap();
2540
2541 let mut fake_server = fake_servers.next().await.unwrap();
2542 let lsp_document_version = fake_server
2543 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2544 .await
2545 .text_document
2546 .version;
2547
2548 // Simulate editing the buffer after the language server computes some edits.
2549 buffer.update(cx, |buffer, cx| {
2550 buffer.edit(
2551 [(
2552 Point::new(0, 0)..Point::new(0, 0),
2553 "// above first function\n",
2554 )],
2555 None,
2556 cx,
2557 );
2558 buffer.edit(
2559 [(
2560 Point::new(2, 0)..Point::new(2, 0),
2561 " // inside first function\n",
2562 )],
2563 None,
2564 cx,
2565 );
2566 buffer.edit(
2567 [(
2568 Point::new(6, 4)..Point::new(6, 4),
2569 "// inside second function ",
2570 )],
2571 None,
2572 cx,
2573 );
2574
2575 assert_eq!(
2576 buffer.text(),
2577 "
2578 // above first function
2579 fn a() {
2580 // inside first function
2581 f1();
2582 }
2583 fn b() {
2584 // inside second function f2();
2585 }
2586 fn c() {
2587 f3();
2588 }
2589 "
2590 .unindent()
2591 );
2592 });
2593
2594 let edits = lsp_store
2595 .update(cx, |lsp_store, cx| {
2596 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2597 &buffer,
2598 vec![
2599 // replace body of first function
2600 lsp::TextEdit {
2601 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2602 new_text: "
2603 fn a() {
2604 f10();
2605 }
2606 "
2607 .unindent(),
2608 },
2609 // edit inside second function
2610 lsp::TextEdit {
2611 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2612 new_text: "00".into(),
2613 },
2614 // edit inside third function via two distinct edits
2615 lsp::TextEdit {
2616 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2617 new_text: "4000".into(),
2618 },
2619 lsp::TextEdit {
2620 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2621 new_text: "".into(),
2622 },
2623 ],
2624 LanguageServerId(0),
2625 Some(lsp_document_version),
2626 cx,
2627 )
2628 })
2629 .await
2630 .unwrap();
2631
2632 buffer.update(cx, |buffer, cx| {
2633 for (range, new_text) in edits {
2634 buffer.edit([(range, new_text)], None, cx);
2635 }
2636 assert_eq!(
2637 buffer.text(),
2638 "
2639 // above first function
2640 fn a() {
2641 // inside first function
2642 f10();
2643 }
2644 fn b() {
2645 // inside second function f200();
2646 }
2647 fn c() {
2648 f4000();
2649 }
2650 "
2651 .unindent()
2652 );
2653 });
2654}
2655
2656#[gpui::test]
2657async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2658 init_test(cx);
2659
2660 let text = "
2661 use a::b;
2662 use a::c;
2663
2664 fn f() {
2665 b();
2666 c();
2667 }
2668 "
2669 .unindent();
2670
2671 let fs = FakeFs::new(cx.executor());
2672 fs.insert_tree(
2673 path!("/dir"),
2674 json!({
2675 "a.rs": text.clone(),
2676 }),
2677 )
2678 .await;
2679
2680 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2681 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2682 let buffer = project
2683 .update(cx, |project, cx| {
2684 project.open_local_buffer(path!("/dir/a.rs"), cx)
2685 })
2686 .await
2687 .unwrap();
2688
2689 // Simulate the language server sending us a small edit in the form of a very large diff.
2690 // Rust-analyzer does this when performing a merge-imports code action.
2691 let edits = lsp_store
2692 .update(cx, |lsp_store, cx| {
2693 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2694 &buffer,
2695 [
2696 // Replace the first use statement without editing the semicolon.
2697 lsp::TextEdit {
2698 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2699 new_text: "a::{b, c}".into(),
2700 },
2701 // Reinsert the remainder of the file between the semicolon and the final
2702 // newline of the file.
2703 lsp::TextEdit {
2704 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2705 new_text: "\n\n".into(),
2706 },
2707 lsp::TextEdit {
2708 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2709 new_text: "
2710 fn f() {
2711 b();
2712 c();
2713 }"
2714 .unindent(),
2715 },
2716 // Delete everything after the first newline of the file.
2717 lsp::TextEdit {
2718 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2719 new_text: "".into(),
2720 },
2721 ],
2722 LanguageServerId(0),
2723 None,
2724 cx,
2725 )
2726 })
2727 .await
2728 .unwrap();
2729
2730 buffer.update(cx, |buffer, cx| {
2731 let edits = edits
2732 .into_iter()
2733 .map(|(range, text)| {
2734 (
2735 range.start.to_point(buffer)..range.end.to_point(buffer),
2736 text,
2737 )
2738 })
2739 .collect::<Vec<_>>();
2740
2741 assert_eq!(
2742 edits,
2743 [
2744 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2745 (Point::new(1, 0)..Point::new(2, 0), "".into())
2746 ]
2747 );
2748
2749 for (range, new_text) in edits {
2750 buffer.edit([(range, new_text)], None, cx);
2751 }
2752 assert_eq!(
2753 buffer.text(),
2754 "
2755 use a::{b, c};
2756
2757 fn f() {
2758 b();
2759 c();
2760 }
2761 "
2762 .unindent()
2763 );
2764 });
2765}
2766
2767#[gpui::test]
2768async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
2769 cx: &mut gpui::TestAppContext,
2770) {
2771 init_test(cx);
2772
2773 let text = "Path()";
2774
2775 let fs = FakeFs::new(cx.executor());
2776 fs.insert_tree(
2777 path!("/dir"),
2778 json!({
2779 "a.rs": text
2780 }),
2781 )
2782 .await;
2783
2784 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2785 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2786 let buffer = project
2787 .update(cx, |project, cx| {
2788 project.open_local_buffer(path!("/dir/a.rs"), cx)
2789 })
2790 .await
2791 .unwrap();
2792
2793 // Simulate the language server sending us a pair of edits at the same location,
2794 // with an insertion following a replacement (which violates the LSP spec).
2795 let edits = lsp_store
2796 .update(cx, |lsp_store, cx| {
2797 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2798 &buffer,
2799 [
2800 lsp::TextEdit {
2801 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
2802 new_text: "Path".into(),
2803 },
2804 lsp::TextEdit {
2805 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
2806 new_text: "from path import Path\n\n\n".into(),
2807 },
2808 ],
2809 LanguageServerId(0),
2810 None,
2811 cx,
2812 )
2813 })
2814 .await
2815 .unwrap();
2816
2817 buffer.update(cx, |buffer, cx| {
2818 buffer.edit(edits, None, cx);
2819 assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
2820 });
2821}
2822
2823#[gpui::test]
2824async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2825 init_test(cx);
2826
2827 let text = "
2828 use a::b;
2829 use a::c;
2830
2831 fn f() {
2832 b();
2833 c();
2834 }
2835 "
2836 .unindent();
2837
2838 let fs = FakeFs::new(cx.executor());
2839 fs.insert_tree(
2840 path!("/dir"),
2841 json!({
2842 "a.rs": text.clone(),
2843 }),
2844 )
2845 .await;
2846
2847 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2848 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2849 let buffer = project
2850 .update(cx, |project, cx| {
2851 project.open_local_buffer(path!("/dir/a.rs"), cx)
2852 })
2853 .await
2854 .unwrap();
2855
2856 // Simulate the language server sending us edits in a non-ordered fashion,
2857 // with ranges sometimes being inverted or pointing to invalid locations.
2858 let edits = lsp_store
2859 .update(cx, |lsp_store, cx| {
2860 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2861 &buffer,
2862 [
2863 lsp::TextEdit {
2864 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2865 new_text: "\n\n".into(),
2866 },
2867 lsp::TextEdit {
2868 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2869 new_text: "a::{b, c}".into(),
2870 },
2871 lsp::TextEdit {
2872 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2873 new_text: "".into(),
2874 },
2875 lsp::TextEdit {
2876 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2877 new_text: "
2878 fn f() {
2879 b();
2880 c();
2881 }"
2882 .unindent(),
2883 },
2884 ],
2885 LanguageServerId(0),
2886 None,
2887 cx,
2888 )
2889 })
2890 .await
2891 .unwrap();
2892
2893 buffer.update(cx, |buffer, cx| {
2894 let edits = edits
2895 .into_iter()
2896 .map(|(range, text)| {
2897 (
2898 range.start.to_point(buffer)..range.end.to_point(buffer),
2899 text,
2900 )
2901 })
2902 .collect::<Vec<_>>();
2903
2904 assert_eq!(
2905 edits,
2906 [
2907 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2908 (Point::new(1, 0)..Point::new(2, 0), "".into())
2909 ]
2910 );
2911
2912 for (range, new_text) in edits {
2913 buffer.edit([(range, new_text)], None, cx);
2914 }
2915 assert_eq!(
2916 buffer.text(),
2917 "
2918 use a::{b, c};
2919
2920 fn f() {
2921 b();
2922 c();
2923 }
2924 "
2925 .unindent()
2926 );
2927 });
2928}
2929
2930fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2931 buffer: &Buffer,
2932 range: Range<T>,
2933) -> Vec<(String, Option<DiagnosticSeverity>)> {
2934 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2935 for chunk in buffer.snapshot().chunks(range, true) {
2936 if chunks.last().map_or(false, |prev_chunk| {
2937 prev_chunk.1 == chunk.diagnostic_severity
2938 }) {
2939 chunks.last_mut().unwrap().0.push_str(chunk.text);
2940 } else {
2941 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2942 }
2943 }
2944 chunks
2945}
2946
2947#[gpui::test(iterations = 10)]
2948async fn test_definition(cx: &mut gpui::TestAppContext) {
2949 init_test(cx);
2950
2951 let fs = FakeFs::new(cx.executor());
2952 fs.insert_tree(
2953 path!("/dir"),
2954 json!({
2955 "a.rs": "const fn a() { A }",
2956 "b.rs": "const y: i32 = crate::a()",
2957 }),
2958 )
2959 .await;
2960
2961 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
2962
2963 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2964 language_registry.add(rust_lang());
2965 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2966
2967 let (buffer, _handle) = project
2968 .update(cx, |project, cx| {
2969 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
2970 })
2971 .await
2972 .unwrap();
2973
2974 let fake_server = fake_servers.next().await.unwrap();
2975 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2976 let params = params.text_document_position_params;
2977 assert_eq!(
2978 params.text_document.uri.to_file_path().unwrap(),
2979 Path::new(path!("/dir/b.rs")),
2980 );
2981 assert_eq!(params.position, lsp::Position::new(0, 22));
2982
2983 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2984 lsp::Location::new(
2985 lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2986 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2987 ),
2988 )))
2989 });
2990 let mut definitions = project
2991 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2992 .await
2993 .unwrap();
2994
2995 // Assert no new language server started
2996 cx.executor().run_until_parked();
2997 assert!(fake_servers.try_next().is_err());
2998
2999 assert_eq!(definitions.len(), 1);
3000 let definition = definitions.pop().unwrap();
3001 cx.update(|cx| {
3002 let target_buffer = definition.target.buffer.read(cx);
3003 assert_eq!(
3004 target_buffer
3005 .file()
3006 .unwrap()
3007 .as_local()
3008 .unwrap()
3009 .abs_path(cx),
3010 Path::new(path!("/dir/a.rs")),
3011 );
3012 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
3013 assert_eq!(
3014 list_worktrees(&project, cx),
3015 [
3016 (path!("/dir/a.rs").as_ref(), false),
3017 (path!("/dir/b.rs").as_ref(), true)
3018 ],
3019 );
3020
3021 drop(definition);
3022 });
3023 cx.update(|cx| {
3024 assert_eq!(
3025 list_worktrees(&project, cx),
3026 [(path!("/dir/b.rs").as_ref(), true)]
3027 );
3028 });
3029
3030 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
3031 project
3032 .read(cx)
3033 .worktrees(cx)
3034 .map(|worktree| {
3035 let worktree = worktree.read(cx);
3036 (
3037 worktree.as_local().unwrap().abs_path().as_ref(),
3038 worktree.is_visible(),
3039 )
3040 })
3041 .collect::<Vec<_>>()
3042 }
3043}
3044
3045#[gpui::test]
3046async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
3047 init_test(cx);
3048
3049 let fs = FakeFs::new(cx.executor());
3050 fs.insert_tree(
3051 path!("/dir"),
3052 json!({
3053 "a.ts": "",
3054 }),
3055 )
3056 .await;
3057
3058 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3059
3060 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3061 language_registry.add(typescript_lang());
3062 let mut fake_language_servers = language_registry.register_fake_lsp(
3063 "TypeScript",
3064 FakeLspAdapter {
3065 capabilities: lsp::ServerCapabilities {
3066 completion_provider: Some(lsp::CompletionOptions {
3067 trigger_characters: Some(vec![".".to_string()]),
3068 ..Default::default()
3069 }),
3070 ..Default::default()
3071 },
3072 ..Default::default()
3073 },
3074 );
3075
3076 let (buffer, _handle) = project
3077 .update(cx, |p, cx| {
3078 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3079 })
3080 .await
3081 .unwrap();
3082
3083 let fake_server = fake_language_servers.next().await.unwrap();
3084
3085 // When text_edit exists, it takes precedence over insert_text and label
3086 let text = "let a = obj.fqn";
3087 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3088 let completions = project.update(cx, |project, cx| {
3089 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3090 });
3091
3092 fake_server
3093 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3094 Ok(Some(lsp::CompletionResponse::Array(vec![
3095 lsp::CompletionItem {
3096 label: "labelText".into(),
3097 insert_text: Some("insertText".into()),
3098 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
3099 range: lsp::Range::new(
3100 lsp::Position::new(0, text.len() as u32 - 3),
3101 lsp::Position::new(0, text.len() as u32),
3102 ),
3103 new_text: "textEditText".into(),
3104 })),
3105 ..Default::default()
3106 },
3107 ])))
3108 })
3109 .next()
3110 .await;
3111
3112 let completions = completions
3113 .await
3114 .unwrap()
3115 .into_iter()
3116 .flat_map(|response| response.completions)
3117 .collect::<Vec<_>>();
3118 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3119
3120 assert_eq!(completions.len(), 1);
3121 assert_eq!(completions[0].new_text, "textEditText");
3122 assert_eq!(
3123 completions[0].replace_range.to_offset(&snapshot),
3124 text.len() - 3..text.len()
3125 );
3126}
3127
3128#[gpui::test]
3129async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
3130 init_test(cx);
3131
3132 let fs = FakeFs::new(cx.executor());
3133 fs.insert_tree(
3134 path!("/dir"),
3135 json!({
3136 "a.ts": "",
3137 }),
3138 )
3139 .await;
3140
3141 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3142
3143 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3144 language_registry.add(typescript_lang());
3145 let mut fake_language_servers = language_registry.register_fake_lsp(
3146 "TypeScript",
3147 FakeLspAdapter {
3148 capabilities: lsp::ServerCapabilities {
3149 completion_provider: Some(lsp::CompletionOptions {
3150 trigger_characters: Some(vec![".".to_string()]),
3151 ..Default::default()
3152 }),
3153 ..Default::default()
3154 },
3155 ..Default::default()
3156 },
3157 );
3158
3159 let (buffer, _handle) = project
3160 .update(cx, |p, cx| {
3161 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3162 })
3163 .await
3164 .unwrap();
3165
3166 let fake_server = fake_language_servers.next().await.unwrap();
3167 let text = "let a = obj.fqn";
3168
3169 // Test 1: When text_edit is None but insert_text exists with default edit_range
3170 {
3171 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3172 let completions = project.update(cx, |project, cx| {
3173 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3174 });
3175
3176 fake_server
3177 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3178 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3179 is_incomplete: false,
3180 item_defaults: Some(lsp::CompletionListItemDefaults {
3181 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3182 lsp::Range::new(
3183 lsp::Position::new(0, text.len() as u32 - 3),
3184 lsp::Position::new(0, text.len() as u32),
3185 ),
3186 )),
3187 ..Default::default()
3188 }),
3189 items: vec![lsp::CompletionItem {
3190 label: "labelText".into(),
3191 insert_text: Some("insertText".into()),
3192 text_edit: None,
3193 ..Default::default()
3194 }],
3195 })))
3196 })
3197 .next()
3198 .await;
3199
3200 let completions = completions
3201 .await
3202 .unwrap()
3203 .into_iter()
3204 .flat_map(|response| response.completions)
3205 .collect::<Vec<_>>();
3206 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3207
3208 assert_eq!(completions.len(), 1);
3209 assert_eq!(completions[0].new_text, "insertText");
3210 assert_eq!(
3211 completions[0].replace_range.to_offset(&snapshot),
3212 text.len() - 3..text.len()
3213 );
3214 }
3215
3216 // Test 2: When both text_edit and insert_text are None with default edit_range
3217 {
3218 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3219 let completions = project.update(cx, |project, cx| {
3220 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3221 });
3222
3223 fake_server
3224 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3225 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3226 is_incomplete: false,
3227 item_defaults: Some(lsp::CompletionListItemDefaults {
3228 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3229 lsp::Range::new(
3230 lsp::Position::new(0, text.len() as u32 - 3),
3231 lsp::Position::new(0, text.len() as u32),
3232 ),
3233 )),
3234 ..Default::default()
3235 }),
3236 items: vec![lsp::CompletionItem {
3237 label: "labelText".into(),
3238 insert_text: None,
3239 text_edit: None,
3240 ..Default::default()
3241 }],
3242 })))
3243 })
3244 .next()
3245 .await;
3246
3247 let completions = completions
3248 .await
3249 .unwrap()
3250 .into_iter()
3251 .flat_map(|response| response.completions)
3252 .collect::<Vec<_>>();
3253 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3254
3255 assert_eq!(completions.len(), 1);
3256 assert_eq!(completions[0].new_text, "labelText");
3257 assert_eq!(
3258 completions[0].replace_range.to_offset(&snapshot),
3259 text.len() - 3..text.len()
3260 );
3261 }
3262}
3263
3264#[gpui::test]
3265async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
3266 init_test(cx);
3267
3268 let fs = FakeFs::new(cx.executor());
3269 fs.insert_tree(
3270 path!("/dir"),
3271 json!({
3272 "a.ts": "",
3273 }),
3274 )
3275 .await;
3276
3277 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3278
3279 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3280 language_registry.add(typescript_lang());
3281 let mut fake_language_servers = language_registry.register_fake_lsp(
3282 "TypeScript",
3283 FakeLspAdapter {
3284 capabilities: lsp::ServerCapabilities {
3285 completion_provider: Some(lsp::CompletionOptions {
3286 trigger_characters: Some(vec![":".to_string()]),
3287 ..Default::default()
3288 }),
3289 ..Default::default()
3290 },
3291 ..Default::default()
3292 },
3293 );
3294
3295 let (buffer, _handle) = project
3296 .update(cx, |p, cx| {
3297 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3298 })
3299 .await
3300 .unwrap();
3301
3302 let fake_server = fake_language_servers.next().await.unwrap();
3303
3304 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
3305 let text = "let a = b.fqn";
3306 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3307 let completions = project.update(cx, |project, cx| {
3308 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3309 });
3310
3311 fake_server
3312 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3313 Ok(Some(lsp::CompletionResponse::Array(vec![
3314 lsp::CompletionItem {
3315 label: "fullyQualifiedName?".into(),
3316 insert_text: Some("fullyQualifiedName".into()),
3317 ..Default::default()
3318 },
3319 ])))
3320 })
3321 .next()
3322 .await;
3323 let completions = completions
3324 .await
3325 .unwrap()
3326 .into_iter()
3327 .flat_map(|response| response.completions)
3328 .collect::<Vec<_>>();
3329 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3330 assert_eq!(completions.len(), 1);
3331 assert_eq!(completions[0].new_text, "fullyQualifiedName");
3332 assert_eq!(
3333 completions[0].replace_range.to_offset(&snapshot),
3334 text.len() - 3..text.len()
3335 );
3336
3337 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
3338 let text = "let a = \"atoms/cmp\"";
3339 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3340 let completions = project.update(cx, |project, cx| {
3341 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
3342 });
3343
3344 fake_server
3345 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3346 Ok(Some(lsp::CompletionResponse::Array(vec![
3347 lsp::CompletionItem {
3348 label: "component".into(),
3349 ..Default::default()
3350 },
3351 ])))
3352 })
3353 .next()
3354 .await;
3355 let completions = completions
3356 .await
3357 .unwrap()
3358 .into_iter()
3359 .flat_map(|response| response.completions)
3360 .collect::<Vec<_>>();
3361 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3362 assert_eq!(completions.len(), 1);
3363 assert_eq!(completions[0].new_text, "component");
3364 assert_eq!(
3365 completions[0].replace_range.to_offset(&snapshot),
3366 text.len() - 4..text.len() - 1
3367 );
3368}
3369
3370#[gpui::test]
3371async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
3372 init_test(cx);
3373
3374 let fs = FakeFs::new(cx.executor());
3375 fs.insert_tree(
3376 path!("/dir"),
3377 json!({
3378 "a.ts": "",
3379 }),
3380 )
3381 .await;
3382
3383 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3384
3385 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3386 language_registry.add(typescript_lang());
3387 let mut fake_language_servers = language_registry.register_fake_lsp(
3388 "TypeScript",
3389 FakeLspAdapter {
3390 capabilities: lsp::ServerCapabilities {
3391 completion_provider: Some(lsp::CompletionOptions {
3392 trigger_characters: Some(vec![":".to_string()]),
3393 ..Default::default()
3394 }),
3395 ..Default::default()
3396 },
3397 ..Default::default()
3398 },
3399 );
3400
3401 let (buffer, _handle) = project
3402 .update(cx, |p, cx| {
3403 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3404 })
3405 .await
3406 .unwrap();
3407
3408 let fake_server = fake_language_servers.next().await.unwrap();
3409
3410 let text = "let a = b.fqn";
3411 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3412 let completions = project.update(cx, |project, cx| {
3413 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3414 });
3415
3416 fake_server
3417 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3418 Ok(Some(lsp::CompletionResponse::Array(vec![
3419 lsp::CompletionItem {
3420 label: "fullyQualifiedName?".into(),
3421 insert_text: Some("fully\rQualified\r\nName".into()),
3422 ..Default::default()
3423 },
3424 ])))
3425 })
3426 .next()
3427 .await;
3428 let completions = completions
3429 .await
3430 .unwrap()
3431 .into_iter()
3432 .flat_map(|response| response.completions)
3433 .collect::<Vec<_>>();
3434 assert_eq!(completions.len(), 1);
3435 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
3436}
3437
3438#[gpui::test(iterations = 10)]
3439async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
3440 init_test(cx);
3441
3442 let fs = FakeFs::new(cx.executor());
3443 fs.insert_tree(
3444 path!("/dir"),
3445 json!({
3446 "a.ts": "a",
3447 }),
3448 )
3449 .await;
3450
3451 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3452
3453 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3454 language_registry.add(typescript_lang());
3455 let mut fake_language_servers = language_registry.register_fake_lsp(
3456 "TypeScript",
3457 FakeLspAdapter {
3458 capabilities: lsp::ServerCapabilities {
3459 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
3460 lsp::CodeActionOptions {
3461 resolve_provider: Some(true),
3462 ..lsp::CodeActionOptions::default()
3463 },
3464 )),
3465 execute_command_provider: Some(lsp::ExecuteCommandOptions {
3466 commands: vec!["_the/command".to_string()],
3467 ..lsp::ExecuteCommandOptions::default()
3468 }),
3469 ..lsp::ServerCapabilities::default()
3470 },
3471 ..FakeLspAdapter::default()
3472 },
3473 );
3474
3475 let (buffer, _handle) = project
3476 .update(cx, |p, cx| {
3477 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3478 })
3479 .await
3480 .unwrap();
3481
3482 let fake_server = fake_language_servers.next().await.unwrap();
3483
3484 // Language server returns code actions that contain commands, and not edits.
3485 let actions = project.update(cx, |project, cx| {
3486 project.code_actions(&buffer, 0..0, None, cx)
3487 });
3488 fake_server
3489 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
3490 Ok(Some(vec![
3491 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3492 title: "The code action".into(),
3493 data: Some(serde_json::json!({
3494 "command": "_the/command",
3495 })),
3496 ..lsp::CodeAction::default()
3497 }),
3498 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3499 title: "two".into(),
3500 ..lsp::CodeAction::default()
3501 }),
3502 ]))
3503 })
3504 .next()
3505 .await;
3506
3507 let action = actions.await.unwrap()[0].clone();
3508 let apply = project.update(cx, |project, cx| {
3509 project.apply_code_action(buffer.clone(), action, true, cx)
3510 });
3511
3512 // Resolving the code action does not populate its edits. In absence of
3513 // edits, we must execute the given command.
3514 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
3515 |mut action, _| async move {
3516 if action.data.is_some() {
3517 action.command = Some(lsp::Command {
3518 title: "The command".into(),
3519 command: "_the/command".into(),
3520 arguments: Some(vec![json!("the-argument")]),
3521 });
3522 }
3523 Ok(action)
3524 },
3525 );
3526
3527 // While executing the command, the language server sends the editor
3528 // a `workspaceEdit` request.
3529 fake_server
3530 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
3531 let fake = fake_server.clone();
3532 move |params, _| {
3533 assert_eq!(params.command, "_the/command");
3534 let fake = fake.clone();
3535 async move {
3536 fake.server
3537 .request::<lsp::request::ApplyWorkspaceEdit>(
3538 lsp::ApplyWorkspaceEditParams {
3539 label: None,
3540 edit: lsp::WorkspaceEdit {
3541 changes: Some(
3542 [(
3543 lsp::Url::from_file_path(path!("/dir/a.ts")).unwrap(),
3544 vec![lsp::TextEdit {
3545 range: lsp::Range::new(
3546 lsp::Position::new(0, 0),
3547 lsp::Position::new(0, 0),
3548 ),
3549 new_text: "X".into(),
3550 }],
3551 )]
3552 .into_iter()
3553 .collect(),
3554 ),
3555 ..Default::default()
3556 },
3557 },
3558 )
3559 .await
3560 .into_response()
3561 .unwrap();
3562 Ok(Some(json!(null)))
3563 }
3564 }
3565 })
3566 .next()
3567 .await;
3568
3569 // Applying the code action returns a project transaction containing the edits
3570 // sent by the language server in its `workspaceEdit` request.
3571 let transaction = apply.await.unwrap();
3572 assert!(transaction.0.contains_key(&buffer));
3573 buffer.update(cx, |buffer, cx| {
3574 assert_eq!(buffer.text(), "Xa");
3575 buffer.undo(cx);
3576 assert_eq!(buffer.text(), "a");
3577 });
3578}
3579
3580#[gpui::test(iterations = 10)]
3581async fn test_save_file(cx: &mut gpui::TestAppContext) {
3582 init_test(cx);
3583
3584 let fs = FakeFs::new(cx.executor());
3585 fs.insert_tree(
3586 path!("/dir"),
3587 json!({
3588 "file1": "the old contents",
3589 }),
3590 )
3591 .await;
3592
3593 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3594 let buffer = project
3595 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3596 .await
3597 .unwrap();
3598 buffer.update(cx, |buffer, cx| {
3599 assert_eq!(buffer.text(), "the old contents");
3600 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3601 });
3602
3603 project
3604 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3605 .await
3606 .unwrap();
3607
3608 let new_text = fs
3609 .load(Path::new(path!("/dir/file1")))
3610 .await
3611 .unwrap()
3612 .replace("\r\n", "\n");
3613 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3614}
3615
3616#[gpui::test(iterations = 10)]
3617async fn test_save_file_spawns_language_server(cx: &mut gpui::TestAppContext) {
3618 // Issue: #24349
3619 init_test(cx);
3620
3621 let fs = FakeFs::new(cx.executor());
3622 fs.insert_tree(path!("/dir"), json!({})).await;
3623
3624 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3625 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3626
3627 language_registry.add(rust_lang());
3628 let mut fake_rust_servers = language_registry.register_fake_lsp(
3629 "Rust",
3630 FakeLspAdapter {
3631 name: "the-rust-language-server",
3632 capabilities: lsp::ServerCapabilities {
3633 completion_provider: Some(lsp::CompletionOptions {
3634 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
3635 ..Default::default()
3636 }),
3637 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
3638 lsp::TextDocumentSyncOptions {
3639 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
3640 ..Default::default()
3641 },
3642 )),
3643 ..Default::default()
3644 },
3645 ..Default::default()
3646 },
3647 );
3648
3649 let buffer = project
3650 .update(cx, |this, cx| this.create_buffer(cx))
3651 .unwrap()
3652 .await;
3653 project.update(cx, |this, cx| {
3654 this.register_buffer_with_language_servers(&buffer, cx);
3655 buffer.update(cx, |buffer, cx| {
3656 assert!(!this.has_language_servers_for(buffer, cx));
3657 })
3658 });
3659
3660 project
3661 .update(cx, |this, cx| {
3662 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
3663 this.save_buffer_as(
3664 buffer.clone(),
3665 ProjectPath {
3666 worktree_id,
3667 path: Arc::from("file.rs".as_ref()),
3668 },
3669 cx,
3670 )
3671 })
3672 .await
3673 .unwrap();
3674 // A server is started up, and it is notified about Rust files.
3675 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
3676 assert_eq!(
3677 fake_rust_server
3678 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3679 .await
3680 .text_document,
3681 lsp::TextDocumentItem {
3682 uri: lsp::Url::from_file_path(path!("/dir/file.rs")).unwrap(),
3683 version: 0,
3684 text: "".to_string(),
3685 language_id: "rust".to_string(),
3686 }
3687 );
3688
3689 project.update(cx, |this, cx| {
3690 buffer.update(cx, |buffer, cx| {
3691 assert!(this.has_language_servers_for(buffer, cx));
3692 })
3693 });
3694}
3695
3696#[gpui::test(iterations = 30)]
3697async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
3698 init_test(cx);
3699
3700 let fs = FakeFs::new(cx.executor().clone());
3701 fs.insert_tree(
3702 path!("/dir"),
3703 json!({
3704 "file1": "the original contents",
3705 }),
3706 )
3707 .await;
3708
3709 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3710 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3711 let buffer = project
3712 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3713 .await
3714 .unwrap();
3715
3716 // Simulate buffer diffs being slow, so that they don't complete before
3717 // the next file change occurs.
3718 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3719
3720 // Change the buffer's file on disk, and then wait for the file change
3721 // to be detected by the worktree, so that the buffer starts reloading.
3722 fs.save(
3723 path!("/dir/file1").as_ref(),
3724 &"the first contents".into(),
3725 Default::default(),
3726 )
3727 .await
3728 .unwrap();
3729 worktree.next_event(cx).await;
3730
3731 // Change the buffer's file again. Depending on the random seed, the
3732 // previous file change may still be in progress.
3733 fs.save(
3734 path!("/dir/file1").as_ref(),
3735 &"the second contents".into(),
3736 Default::default(),
3737 )
3738 .await
3739 .unwrap();
3740 worktree.next_event(cx).await;
3741
3742 cx.executor().run_until_parked();
3743 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3744 buffer.read_with(cx, |buffer, _| {
3745 assert_eq!(buffer.text(), on_disk_text);
3746 assert!(!buffer.is_dirty(), "buffer should not be dirty");
3747 assert!(!buffer.has_conflict(), "buffer should not be dirty");
3748 });
3749}
3750
3751#[gpui::test(iterations = 30)]
3752async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
3753 init_test(cx);
3754
3755 let fs = FakeFs::new(cx.executor().clone());
3756 fs.insert_tree(
3757 path!("/dir"),
3758 json!({
3759 "file1": "the original contents",
3760 }),
3761 )
3762 .await;
3763
3764 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3765 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3766 let buffer = project
3767 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3768 .await
3769 .unwrap();
3770
3771 // Simulate buffer diffs being slow, so that they don't complete before
3772 // the next file change occurs.
3773 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3774
3775 // Change the buffer's file on disk, and then wait for the file change
3776 // to be detected by the worktree, so that the buffer starts reloading.
3777 fs.save(
3778 path!("/dir/file1").as_ref(),
3779 &"the first contents".into(),
3780 Default::default(),
3781 )
3782 .await
3783 .unwrap();
3784 worktree.next_event(cx).await;
3785
3786 cx.executor()
3787 .spawn(cx.executor().simulate_random_delay())
3788 .await;
3789
3790 // Perform a noop edit, causing the buffer's version to increase.
3791 buffer.update(cx, |buffer, cx| {
3792 buffer.edit([(0..0, " ")], None, cx);
3793 buffer.undo(cx);
3794 });
3795
3796 cx.executor().run_until_parked();
3797 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3798 buffer.read_with(cx, |buffer, _| {
3799 let buffer_text = buffer.text();
3800 if buffer_text == on_disk_text {
3801 assert!(
3802 !buffer.is_dirty() && !buffer.has_conflict(),
3803 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
3804 );
3805 }
3806 // If the file change occurred while the buffer was processing the first
3807 // change, the buffer will be in a conflicting state.
3808 else {
3809 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3810 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3811 }
3812 });
3813}
3814
3815#[gpui::test]
3816async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
3817 init_test(cx);
3818
3819 let fs = FakeFs::new(cx.executor());
3820 fs.insert_tree(
3821 path!("/dir"),
3822 json!({
3823 "file1": "the old contents",
3824 }),
3825 )
3826 .await;
3827
3828 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
3829 let buffer = project
3830 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3831 .await
3832 .unwrap();
3833 buffer.update(cx, |buffer, cx| {
3834 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3835 });
3836
3837 project
3838 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3839 .await
3840 .unwrap();
3841
3842 let new_text = fs
3843 .load(Path::new(path!("/dir/file1")))
3844 .await
3845 .unwrap()
3846 .replace("\r\n", "\n");
3847 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3848}
3849
3850#[gpui::test]
3851async fn test_save_as(cx: &mut gpui::TestAppContext) {
3852 init_test(cx);
3853
3854 let fs = FakeFs::new(cx.executor());
3855 fs.insert_tree("/dir", json!({})).await;
3856
3857 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3858
3859 let languages = project.update(cx, |project, _| project.languages().clone());
3860 languages.add(rust_lang());
3861
3862 let buffer = project.update(cx, |project, cx| project.create_local_buffer("", None, cx));
3863 buffer.update(cx, |buffer, cx| {
3864 buffer.edit([(0..0, "abc")], None, cx);
3865 assert!(buffer.is_dirty());
3866 assert!(!buffer.has_conflict());
3867 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
3868 });
3869 project
3870 .update(cx, |project, cx| {
3871 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
3872 let path = ProjectPath {
3873 worktree_id,
3874 path: Arc::from(Path::new("file1.rs")),
3875 };
3876 project.save_buffer_as(buffer.clone(), path, cx)
3877 })
3878 .await
3879 .unwrap();
3880 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
3881
3882 cx.executor().run_until_parked();
3883 buffer.update(cx, |buffer, cx| {
3884 assert_eq!(
3885 buffer.file().unwrap().full_path(cx),
3886 Path::new("dir/file1.rs")
3887 );
3888 assert!(!buffer.is_dirty());
3889 assert!(!buffer.has_conflict());
3890 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
3891 });
3892
3893 let opened_buffer = project
3894 .update(cx, |project, cx| {
3895 project.open_local_buffer("/dir/file1.rs", cx)
3896 })
3897 .await
3898 .unwrap();
3899 assert_eq!(opened_buffer, buffer);
3900}
3901
3902#[gpui::test(retries = 5)]
3903async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
3904 use worktree::WorktreeModelHandle as _;
3905
3906 init_test(cx);
3907 cx.executor().allow_parking();
3908
3909 let dir = TempTree::new(json!({
3910 "a": {
3911 "file1": "",
3912 "file2": "",
3913 "file3": "",
3914 },
3915 "b": {
3916 "c": {
3917 "file4": "",
3918 "file5": "",
3919 }
3920 }
3921 }));
3922
3923 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
3924
3925 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3926 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
3927 async move { buffer.await.unwrap() }
3928 };
3929 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3930 project.update(cx, |project, cx| {
3931 let tree = project.worktrees(cx).next().unwrap();
3932 tree.read(cx)
3933 .entry_for_path(path)
3934 .unwrap_or_else(|| panic!("no entry for path {}", path))
3935 .id
3936 })
3937 };
3938
3939 let buffer2 = buffer_for_path("a/file2", cx).await;
3940 let buffer3 = buffer_for_path("a/file3", cx).await;
3941 let buffer4 = buffer_for_path("b/c/file4", cx).await;
3942 let buffer5 = buffer_for_path("b/c/file5", cx).await;
3943
3944 let file2_id = id_for_path("a/file2", cx);
3945 let file3_id = id_for_path("a/file3", cx);
3946 let file4_id = id_for_path("b/c/file4", cx);
3947
3948 // Create a remote copy of this worktree.
3949 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3950 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
3951
3952 let updates = Arc::new(Mutex::new(Vec::new()));
3953 tree.update(cx, |tree, cx| {
3954 let updates = updates.clone();
3955 tree.observe_updates(0, cx, move |update| {
3956 updates.lock().push(update);
3957 async { true }
3958 });
3959 });
3960
3961 let remote =
3962 cx.update(|cx| Worktree::remote(0, 1, metadata, project.read(cx).client().into(), cx));
3963
3964 cx.executor().run_until_parked();
3965
3966 cx.update(|cx| {
3967 assert!(!buffer2.read(cx).is_dirty());
3968 assert!(!buffer3.read(cx).is_dirty());
3969 assert!(!buffer4.read(cx).is_dirty());
3970 assert!(!buffer5.read(cx).is_dirty());
3971 });
3972
3973 // Rename and delete files and directories.
3974 tree.flush_fs_events(cx).await;
3975 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3976 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3977 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3978 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3979 tree.flush_fs_events(cx).await;
3980
3981 cx.update(|app| {
3982 assert_eq!(
3983 tree.read(app)
3984 .paths()
3985 .map(|p| p.to_str().unwrap())
3986 .collect::<Vec<_>>(),
3987 vec![
3988 "a",
3989 path!("a/file1"),
3990 path!("a/file2.new"),
3991 "b",
3992 "d",
3993 path!("d/file3"),
3994 path!("d/file4"),
3995 ]
3996 );
3997 });
3998
3999 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
4000 assert_eq!(id_for_path("d/file3", cx), file3_id);
4001 assert_eq!(id_for_path("d/file4", cx), file4_id);
4002
4003 cx.update(|cx| {
4004 assert_eq!(
4005 buffer2.read(cx).file().unwrap().path().as_ref(),
4006 Path::new("a/file2.new")
4007 );
4008 assert_eq!(
4009 buffer3.read(cx).file().unwrap().path().as_ref(),
4010 Path::new("d/file3")
4011 );
4012 assert_eq!(
4013 buffer4.read(cx).file().unwrap().path().as_ref(),
4014 Path::new("d/file4")
4015 );
4016 assert_eq!(
4017 buffer5.read(cx).file().unwrap().path().as_ref(),
4018 Path::new("b/c/file5")
4019 );
4020
4021 assert_matches!(
4022 buffer2.read(cx).file().unwrap().disk_state(),
4023 DiskState::Present { .. }
4024 );
4025 assert_matches!(
4026 buffer3.read(cx).file().unwrap().disk_state(),
4027 DiskState::Present { .. }
4028 );
4029 assert_matches!(
4030 buffer4.read(cx).file().unwrap().disk_state(),
4031 DiskState::Present { .. }
4032 );
4033 assert_eq!(
4034 buffer5.read(cx).file().unwrap().disk_state(),
4035 DiskState::Deleted
4036 );
4037 });
4038
4039 // Update the remote worktree. Check that it becomes consistent with the
4040 // local worktree.
4041 cx.executor().run_until_parked();
4042
4043 remote.update(cx, |remote, _| {
4044 for update in updates.lock().drain(..) {
4045 remote.as_remote_mut().unwrap().update_from_remote(update);
4046 }
4047 });
4048 cx.executor().run_until_parked();
4049 remote.update(cx, |remote, _| {
4050 assert_eq!(
4051 remote
4052 .paths()
4053 .map(|p| p.to_str().unwrap())
4054 .collect::<Vec<_>>(),
4055 vec![
4056 "a",
4057 path!("a/file1"),
4058 path!("a/file2.new"),
4059 "b",
4060 "d",
4061 path!("d/file3"),
4062 path!("d/file4"),
4063 ]
4064 );
4065 });
4066}
4067
4068#[gpui::test(iterations = 10)]
4069async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
4070 init_test(cx);
4071
4072 let fs = FakeFs::new(cx.executor());
4073 fs.insert_tree(
4074 path!("/dir"),
4075 json!({
4076 "a": {
4077 "file1": "",
4078 }
4079 }),
4080 )
4081 .await;
4082
4083 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
4084 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
4085 let tree_id = tree.update(cx, |tree, _| tree.id());
4086
4087 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4088 project.update(cx, |project, cx| {
4089 let tree = project.worktrees(cx).next().unwrap();
4090 tree.read(cx)
4091 .entry_for_path(path)
4092 .unwrap_or_else(|| panic!("no entry for path {}", path))
4093 .id
4094 })
4095 };
4096
4097 let dir_id = id_for_path("a", cx);
4098 let file_id = id_for_path("a/file1", cx);
4099 let buffer = project
4100 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
4101 .await
4102 .unwrap();
4103 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4104
4105 project
4106 .update(cx, |project, cx| {
4107 project.rename_entry(dir_id, Path::new("b"), cx)
4108 })
4109 .unwrap()
4110 .await
4111 .to_included()
4112 .unwrap();
4113 cx.executor().run_until_parked();
4114
4115 assert_eq!(id_for_path("b", cx), dir_id);
4116 assert_eq!(id_for_path("b/file1", cx), file_id);
4117 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4118}
4119
4120#[gpui::test]
4121async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
4122 init_test(cx);
4123
4124 let fs = FakeFs::new(cx.executor());
4125 fs.insert_tree(
4126 "/dir",
4127 json!({
4128 "a.txt": "a-contents",
4129 "b.txt": "b-contents",
4130 }),
4131 )
4132 .await;
4133
4134 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4135
4136 // Spawn multiple tasks to open paths, repeating some paths.
4137 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
4138 (
4139 p.open_local_buffer("/dir/a.txt", cx),
4140 p.open_local_buffer("/dir/b.txt", cx),
4141 p.open_local_buffer("/dir/a.txt", cx),
4142 )
4143 });
4144
4145 let buffer_a_1 = buffer_a_1.await.unwrap();
4146 let buffer_a_2 = buffer_a_2.await.unwrap();
4147 let buffer_b = buffer_b.await.unwrap();
4148 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
4149 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
4150
4151 // There is only one buffer per path.
4152 let buffer_a_id = buffer_a_1.entity_id();
4153 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
4154
4155 // Open the same path again while it is still open.
4156 drop(buffer_a_1);
4157 let buffer_a_3 = project
4158 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
4159 .await
4160 .unwrap();
4161
4162 // There's still only one buffer per path.
4163 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
4164}
4165
4166#[gpui::test]
4167async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
4168 init_test(cx);
4169
4170 let fs = FakeFs::new(cx.executor());
4171 fs.insert_tree(
4172 path!("/dir"),
4173 json!({
4174 "file1": "abc",
4175 "file2": "def",
4176 "file3": "ghi",
4177 }),
4178 )
4179 .await;
4180
4181 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4182
4183 let buffer1 = project
4184 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4185 .await
4186 .unwrap();
4187 let events = Arc::new(Mutex::new(Vec::new()));
4188
4189 // initially, the buffer isn't dirty.
4190 buffer1.update(cx, |buffer, cx| {
4191 cx.subscribe(&buffer1, {
4192 let events = events.clone();
4193 move |_, _, event, _| match event {
4194 BufferEvent::Operation { .. } => {}
4195 _ => events.lock().push(event.clone()),
4196 }
4197 })
4198 .detach();
4199
4200 assert!(!buffer.is_dirty());
4201 assert!(events.lock().is_empty());
4202
4203 buffer.edit([(1..2, "")], None, cx);
4204 });
4205
4206 // after the first edit, the buffer is dirty, and emits a dirtied event.
4207 buffer1.update(cx, |buffer, cx| {
4208 assert!(buffer.text() == "ac");
4209 assert!(buffer.is_dirty());
4210 assert_eq!(
4211 *events.lock(),
4212 &[
4213 language::BufferEvent::Edited,
4214 language::BufferEvent::DirtyChanged
4215 ]
4216 );
4217 events.lock().clear();
4218 buffer.did_save(
4219 buffer.version(),
4220 buffer.file().unwrap().disk_state().mtime(),
4221 cx,
4222 );
4223 });
4224
4225 // after saving, the buffer is not dirty, and emits a saved event.
4226 buffer1.update(cx, |buffer, cx| {
4227 assert!(!buffer.is_dirty());
4228 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
4229 events.lock().clear();
4230
4231 buffer.edit([(1..1, "B")], None, cx);
4232 buffer.edit([(2..2, "D")], None, cx);
4233 });
4234
4235 // after editing again, the buffer is dirty, and emits another dirty event.
4236 buffer1.update(cx, |buffer, cx| {
4237 assert!(buffer.text() == "aBDc");
4238 assert!(buffer.is_dirty());
4239 assert_eq!(
4240 *events.lock(),
4241 &[
4242 language::BufferEvent::Edited,
4243 language::BufferEvent::DirtyChanged,
4244 language::BufferEvent::Edited,
4245 ],
4246 );
4247 events.lock().clear();
4248
4249 // After restoring the buffer to its previously-saved state,
4250 // the buffer is not considered dirty anymore.
4251 buffer.edit([(1..3, "")], None, cx);
4252 assert!(buffer.text() == "ac");
4253 assert!(!buffer.is_dirty());
4254 });
4255
4256 assert_eq!(
4257 *events.lock(),
4258 &[
4259 language::BufferEvent::Edited,
4260 language::BufferEvent::DirtyChanged
4261 ]
4262 );
4263
4264 // When a file is deleted, it is not considered dirty.
4265 let events = Arc::new(Mutex::new(Vec::new()));
4266 let buffer2 = project
4267 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4268 .await
4269 .unwrap();
4270 buffer2.update(cx, |_, cx| {
4271 cx.subscribe(&buffer2, {
4272 let events = events.clone();
4273 move |_, _, event, _| match event {
4274 BufferEvent::Operation { .. } => {}
4275 _ => events.lock().push(event.clone()),
4276 }
4277 })
4278 .detach();
4279 });
4280
4281 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
4282 .await
4283 .unwrap();
4284 cx.executor().run_until_parked();
4285 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4286 assert_eq!(
4287 mem::take(&mut *events.lock()),
4288 &[language::BufferEvent::FileHandleChanged]
4289 );
4290
4291 // Buffer becomes dirty when edited.
4292 buffer2.update(cx, |buffer, cx| {
4293 buffer.edit([(2..3, "")], None, cx);
4294 assert_eq!(buffer.is_dirty(), true);
4295 });
4296 assert_eq!(
4297 mem::take(&mut *events.lock()),
4298 &[
4299 language::BufferEvent::Edited,
4300 language::BufferEvent::DirtyChanged
4301 ]
4302 );
4303
4304 // Buffer becomes clean again when all of its content is removed, because
4305 // the file was deleted.
4306 buffer2.update(cx, |buffer, cx| {
4307 buffer.edit([(0..2, "")], None, cx);
4308 assert_eq!(buffer.is_empty(), true);
4309 assert_eq!(buffer.is_dirty(), false);
4310 });
4311 assert_eq!(
4312 *events.lock(),
4313 &[
4314 language::BufferEvent::Edited,
4315 language::BufferEvent::DirtyChanged
4316 ]
4317 );
4318
4319 // When a file is already dirty when deleted, we don't emit a Dirtied event.
4320 let events = Arc::new(Mutex::new(Vec::new()));
4321 let buffer3 = project
4322 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
4323 .await
4324 .unwrap();
4325 buffer3.update(cx, |_, cx| {
4326 cx.subscribe(&buffer3, {
4327 let events = events.clone();
4328 move |_, _, event, _| match event {
4329 BufferEvent::Operation { .. } => {}
4330 _ => events.lock().push(event.clone()),
4331 }
4332 })
4333 .detach();
4334 });
4335
4336 buffer3.update(cx, |buffer, cx| {
4337 buffer.edit([(0..0, "x")], None, cx);
4338 });
4339 events.lock().clear();
4340 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
4341 .await
4342 .unwrap();
4343 cx.executor().run_until_parked();
4344 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
4345 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
4346}
4347
4348#[gpui::test]
4349async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
4350 init_test(cx);
4351
4352 let (initial_contents, initial_offsets) =
4353 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
4354 let fs = FakeFs::new(cx.executor());
4355 fs.insert_tree(
4356 path!("/dir"),
4357 json!({
4358 "the-file": initial_contents,
4359 }),
4360 )
4361 .await;
4362 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4363 let buffer = project
4364 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
4365 .await
4366 .unwrap();
4367
4368 let anchors = initial_offsets
4369 .iter()
4370 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
4371 .collect::<Vec<_>>();
4372
4373 // Change the file on disk, adding two new lines of text, and removing
4374 // one line.
4375 buffer.update(cx, |buffer, _| {
4376 assert!(!buffer.is_dirty());
4377 assert!(!buffer.has_conflict());
4378 });
4379
4380 let (new_contents, new_offsets) =
4381 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
4382 fs.save(
4383 path!("/dir/the-file").as_ref(),
4384 &new_contents.as_str().into(),
4385 LineEnding::Unix,
4386 )
4387 .await
4388 .unwrap();
4389
4390 // Because the buffer was not modified, it is reloaded from disk. Its
4391 // contents are edited according to the diff between the old and new
4392 // file contents.
4393 cx.executor().run_until_parked();
4394 buffer.update(cx, |buffer, _| {
4395 assert_eq!(buffer.text(), new_contents);
4396 assert!(!buffer.is_dirty());
4397 assert!(!buffer.has_conflict());
4398
4399 let anchor_offsets = anchors
4400 .iter()
4401 .map(|anchor| anchor.to_offset(&*buffer))
4402 .collect::<Vec<_>>();
4403 assert_eq!(anchor_offsets, new_offsets);
4404 });
4405
4406 // Modify the buffer
4407 buffer.update(cx, |buffer, cx| {
4408 buffer.edit([(0..0, " ")], None, cx);
4409 assert!(buffer.is_dirty());
4410 assert!(!buffer.has_conflict());
4411 });
4412
4413 // Change the file on disk again, adding blank lines to the beginning.
4414 fs.save(
4415 path!("/dir/the-file").as_ref(),
4416 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
4417 LineEnding::Unix,
4418 )
4419 .await
4420 .unwrap();
4421
4422 // Because the buffer is modified, it doesn't reload from disk, but is
4423 // marked as having a conflict.
4424 cx.executor().run_until_parked();
4425 buffer.update(cx, |buffer, _| {
4426 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
4427 assert!(buffer.has_conflict());
4428 });
4429}
4430
4431#[gpui::test]
4432async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
4433 init_test(cx);
4434
4435 let fs = FakeFs::new(cx.executor());
4436 fs.insert_tree(
4437 path!("/dir"),
4438 json!({
4439 "file1": "a\nb\nc\n",
4440 "file2": "one\r\ntwo\r\nthree\r\n",
4441 }),
4442 )
4443 .await;
4444
4445 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4446 let buffer1 = project
4447 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4448 .await
4449 .unwrap();
4450 let buffer2 = project
4451 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4452 .await
4453 .unwrap();
4454
4455 buffer1.update(cx, |buffer, _| {
4456 assert_eq!(buffer.text(), "a\nb\nc\n");
4457 assert_eq!(buffer.line_ending(), LineEnding::Unix);
4458 });
4459 buffer2.update(cx, |buffer, _| {
4460 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
4461 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4462 });
4463
4464 // Change a file's line endings on disk from unix to windows. The buffer's
4465 // state updates correctly.
4466 fs.save(
4467 path!("/dir/file1").as_ref(),
4468 &"aaa\nb\nc\n".into(),
4469 LineEnding::Windows,
4470 )
4471 .await
4472 .unwrap();
4473 cx.executor().run_until_parked();
4474 buffer1.update(cx, |buffer, _| {
4475 assert_eq!(buffer.text(), "aaa\nb\nc\n");
4476 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4477 });
4478
4479 // Save a file with windows line endings. The file is written correctly.
4480 buffer2.update(cx, |buffer, cx| {
4481 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
4482 });
4483 project
4484 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
4485 .await
4486 .unwrap();
4487 assert_eq!(
4488 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
4489 "one\r\ntwo\r\nthree\r\nfour\r\n",
4490 );
4491}
4492
4493#[gpui::test]
4494async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
4495 init_test(cx);
4496
4497 let fs = FakeFs::new(cx.executor());
4498 fs.insert_tree(
4499 path!("/dir"),
4500 json!({
4501 "a.rs": "
4502 fn foo(mut v: Vec<usize>) {
4503 for x in &v {
4504 v.push(1);
4505 }
4506 }
4507 "
4508 .unindent(),
4509 }),
4510 )
4511 .await;
4512
4513 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4514 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4515 let buffer = project
4516 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
4517 .await
4518 .unwrap();
4519
4520 let buffer_uri = Url::from_file_path(path!("/dir/a.rs")).unwrap();
4521 let message = lsp::PublishDiagnosticsParams {
4522 uri: buffer_uri.clone(),
4523 diagnostics: vec![
4524 lsp::Diagnostic {
4525 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4526 severity: Some(DiagnosticSeverity::WARNING),
4527 message: "error 1".to_string(),
4528 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4529 location: lsp::Location {
4530 uri: buffer_uri.clone(),
4531 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4532 },
4533 message: "error 1 hint 1".to_string(),
4534 }]),
4535 ..Default::default()
4536 },
4537 lsp::Diagnostic {
4538 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4539 severity: Some(DiagnosticSeverity::HINT),
4540 message: "error 1 hint 1".to_string(),
4541 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4542 location: lsp::Location {
4543 uri: buffer_uri.clone(),
4544 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4545 },
4546 message: "original diagnostic".to_string(),
4547 }]),
4548 ..Default::default()
4549 },
4550 lsp::Diagnostic {
4551 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4552 severity: Some(DiagnosticSeverity::ERROR),
4553 message: "error 2".to_string(),
4554 related_information: Some(vec![
4555 lsp::DiagnosticRelatedInformation {
4556 location: lsp::Location {
4557 uri: buffer_uri.clone(),
4558 range: lsp::Range::new(
4559 lsp::Position::new(1, 13),
4560 lsp::Position::new(1, 15),
4561 ),
4562 },
4563 message: "error 2 hint 1".to_string(),
4564 },
4565 lsp::DiagnosticRelatedInformation {
4566 location: lsp::Location {
4567 uri: buffer_uri.clone(),
4568 range: lsp::Range::new(
4569 lsp::Position::new(1, 13),
4570 lsp::Position::new(1, 15),
4571 ),
4572 },
4573 message: "error 2 hint 2".to_string(),
4574 },
4575 ]),
4576 ..Default::default()
4577 },
4578 lsp::Diagnostic {
4579 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4580 severity: Some(DiagnosticSeverity::HINT),
4581 message: "error 2 hint 1".to_string(),
4582 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4583 location: lsp::Location {
4584 uri: buffer_uri.clone(),
4585 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4586 },
4587 message: "original diagnostic".to_string(),
4588 }]),
4589 ..Default::default()
4590 },
4591 lsp::Diagnostic {
4592 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4593 severity: Some(DiagnosticSeverity::HINT),
4594 message: "error 2 hint 2".to_string(),
4595 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4596 location: lsp::Location {
4597 uri: buffer_uri,
4598 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4599 },
4600 message: "original diagnostic".to_string(),
4601 }]),
4602 ..Default::default()
4603 },
4604 ],
4605 version: None,
4606 };
4607
4608 lsp_store
4609 .update(cx, |lsp_store, cx| {
4610 lsp_store.update_diagnostics(
4611 LanguageServerId(0),
4612 message,
4613 None,
4614 DiagnosticSourceKind::Pushed,
4615 &[],
4616 cx,
4617 )
4618 })
4619 .unwrap();
4620 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
4621
4622 assert_eq!(
4623 buffer
4624 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
4625 .collect::<Vec<_>>(),
4626 &[
4627 DiagnosticEntry {
4628 range: Point::new(1, 8)..Point::new(1, 9),
4629 diagnostic: Diagnostic {
4630 severity: DiagnosticSeverity::WARNING,
4631 message: "error 1".to_string(),
4632 group_id: 1,
4633 is_primary: true,
4634 source_kind: DiagnosticSourceKind::Pushed,
4635 ..Diagnostic::default()
4636 }
4637 },
4638 DiagnosticEntry {
4639 range: Point::new(1, 8)..Point::new(1, 9),
4640 diagnostic: Diagnostic {
4641 severity: DiagnosticSeverity::HINT,
4642 message: "error 1 hint 1".to_string(),
4643 group_id: 1,
4644 is_primary: false,
4645 source_kind: DiagnosticSourceKind::Pushed,
4646 ..Diagnostic::default()
4647 }
4648 },
4649 DiagnosticEntry {
4650 range: Point::new(1, 13)..Point::new(1, 15),
4651 diagnostic: Diagnostic {
4652 severity: DiagnosticSeverity::HINT,
4653 message: "error 2 hint 1".to_string(),
4654 group_id: 0,
4655 is_primary: false,
4656 source_kind: DiagnosticSourceKind::Pushed,
4657 ..Diagnostic::default()
4658 }
4659 },
4660 DiagnosticEntry {
4661 range: Point::new(1, 13)..Point::new(1, 15),
4662 diagnostic: Diagnostic {
4663 severity: DiagnosticSeverity::HINT,
4664 message: "error 2 hint 2".to_string(),
4665 group_id: 0,
4666 is_primary: false,
4667 source_kind: DiagnosticSourceKind::Pushed,
4668 ..Diagnostic::default()
4669 }
4670 },
4671 DiagnosticEntry {
4672 range: Point::new(2, 8)..Point::new(2, 17),
4673 diagnostic: Diagnostic {
4674 severity: DiagnosticSeverity::ERROR,
4675 message: "error 2".to_string(),
4676 group_id: 0,
4677 is_primary: true,
4678 source_kind: DiagnosticSourceKind::Pushed,
4679 ..Diagnostic::default()
4680 }
4681 }
4682 ]
4683 );
4684
4685 assert_eq!(
4686 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
4687 &[
4688 DiagnosticEntry {
4689 range: Point::new(1, 13)..Point::new(1, 15),
4690 diagnostic: Diagnostic {
4691 severity: DiagnosticSeverity::HINT,
4692 message: "error 2 hint 1".to_string(),
4693 group_id: 0,
4694 is_primary: false,
4695 source_kind: DiagnosticSourceKind::Pushed,
4696 ..Diagnostic::default()
4697 }
4698 },
4699 DiagnosticEntry {
4700 range: Point::new(1, 13)..Point::new(1, 15),
4701 diagnostic: Diagnostic {
4702 severity: DiagnosticSeverity::HINT,
4703 message: "error 2 hint 2".to_string(),
4704 group_id: 0,
4705 is_primary: false,
4706 source_kind: DiagnosticSourceKind::Pushed,
4707 ..Diagnostic::default()
4708 }
4709 },
4710 DiagnosticEntry {
4711 range: Point::new(2, 8)..Point::new(2, 17),
4712 diagnostic: Diagnostic {
4713 severity: DiagnosticSeverity::ERROR,
4714 message: "error 2".to_string(),
4715 group_id: 0,
4716 is_primary: true,
4717 source_kind: DiagnosticSourceKind::Pushed,
4718 ..Diagnostic::default()
4719 }
4720 }
4721 ]
4722 );
4723
4724 assert_eq!(
4725 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4726 &[
4727 DiagnosticEntry {
4728 range: Point::new(1, 8)..Point::new(1, 9),
4729 diagnostic: Diagnostic {
4730 severity: DiagnosticSeverity::WARNING,
4731 message: "error 1".to_string(),
4732 group_id: 1,
4733 is_primary: true,
4734 source_kind: DiagnosticSourceKind::Pushed,
4735 ..Diagnostic::default()
4736 }
4737 },
4738 DiagnosticEntry {
4739 range: Point::new(1, 8)..Point::new(1, 9),
4740 diagnostic: Diagnostic {
4741 severity: DiagnosticSeverity::HINT,
4742 message: "error 1 hint 1".to_string(),
4743 group_id: 1,
4744 is_primary: false,
4745 source_kind: DiagnosticSourceKind::Pushed,
4746 ..Diagnostic::default()
4747 }
4748 },
4749 ]
4750 );
4751}
4752
4753#[gpui::test]
4754async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
4755 init_test(cx);
4756
4757 let fs = FakeFs::new(cx.executor());
4758 fs.insert_tree(
4759 path!("/dir"),
4760 json!({
4761 "one.rs": "const ONE: usize = 1;",
4762 "two": {
4763 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4764 }
4765
4766 }),
4767 )
4768 .await;
4769 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4770
4771 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4772 language_registry.add(rust_lang());
4773 let watched_paths = lsp::FileOperationRegistrationOptions {
4774 filters: vec![
4775 FileOperationFilter {
4776 scheme: Some("file".to_owned()),
4777 pattern: lsp::FileOperationPattern {
4778 glob: "**/*.rs".to_owned(),
4779 matches: Some(lsp::FileOperationPatternKind::File),
4780 options: None,
4781 },
4782 },
4783 FileOperationFilter {
4784 scheme: Some("file".to_owned()),
4785 pattern: lsp::FileOperationPattern {
4786 glob: "**/**".to_owned(),
4787 matches: Some(lsp::FileOperationPatternKind::Folder),
4788 options: None,
4789 },
4790 },
4791 ],
4792 };
4793 let mut fake_servers = language_registry.register_fake_lsp(
4794 "Rust",
4795 FakeLspAdapter {
4796 capabilities: lsp::ServerCapabilities {
4797 workspace: Some(lsp::WorkspaceServerCapabilities {
4798 workspace_folders: None,
4799 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
4800 did_rename: Some(watched_paths.clone()),
4801 will_rename: Some(watched_paths),
4802 ..Default::default()
4803 }),
4804 }),
4805 ..Default::default()
4806 },
4807 ..Default::default()
4808 },
4809 );
4810
4811 let _ = project
4812 .update(cx, |project, cx| {
4813 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
4814 })
4815 .await
4816 .unwrap();
4817
4818 let fake_server = fake_servers.next().await.unwrap();
4819 let response = project.update(cx, |project, cx| {
4820 let worktree = project.worktrees(cx).next().unwrap();
4821 let entry = worktree.read(cx).entry_for_path("one.rs").unwrap();
4822 project.rename_entry(entry.id, "three.rs".as_ref(), cx)
4823 });
4824 let expected_edit = lsp::WorkspaceEdit {
4825 changes: None,
4826 document_changes: Some(DocumentChanges::Edits({
4827 vec![TextDocumentEdit {
4828 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
4829 range: lsp::Range {
4830 start: lsp::Position {
4831 line: 0,
4832 character: 1,
4833 },
4834 end: lsp::Position {
4835 line: 0,
4836 character: 3,
4837 },
4838 },
4839 new_text: "This is not a drill".to_owned(),
4840 })],
4841 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
4842 uri: Url::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
4843 version: Some(1337),
4844 },
4845 }]
4846 })),
4847 change_annotations: None,
4848 };
4849 let resolved_workspace_edit = Arc::new(OnceLock::new());
4850 fake_server
4851 .set_request_handler::<WillRenameFiles, _, _>({
4852 let resolved_workspace_edit = resolved_workspace_edit.clone();
4853 let expected_edit = expected_edit.clone();
4854 move |params, _| {
4855 let resolved_workspace_edit = resolved_workspace_edit.clone();
4856 let expected_edit = expected_edit.clone();
4857 async move {
4858 assert_eq!(params.files.len(), 1);
4859 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
4860 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
4861 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
4862 Ok(Some(expected_edit))
4863 }
4864 }
4865 })
4866 .next()
4867 .await
4868 .unwrap();
4869 let _ = response.await.unwrap();
4870 fake_server
4871 .handle_notification::<DidRenameFiles, _>(|params, _| {
4872 assert_eq!(params.files.len(), 1);
4873 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
4874 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
4875 })
4876 .next()
4877 .await
4878 .unwrap();
4879 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
4880}
4881
4882#[gpui::test]
4883async fn test_rename(cx: &mut gpui::TestAppContext) {
4884 // hi
4885 init_test(cx);
4886
4887 let fs = FakeFs::new(cx.executor());
4888 fs.insert_tree(
4889 path!("/dir"),
4890 json!({
4891 "one.rs": "const ONE: usize = 1;",
4892 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4893 }),
4894 )
4895 .await;
4896
4897 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4898
4899 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4900 language_registry.add(rust_lang());
4901 let mut fake_servers = language_registry.register_fake_lsp(
4902 "Rust",
4903 FakeLspAdapter {
4904 capabilities: lsp::ServerCapabilities {
4905 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
4906 prepare_provider: Some(true),
4907 work_done_progress_options: Default::default(),
4908 })),
4909 ..Default::default()
4910 },
4911 ..Default::default()
4912 },
4913 );
4914
4915 let (buffer, _handle) = project
4916 .update(cx, |project, cx| {
4917 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
4918 })
4919 .await
4920 .unwrap();
4921
4922 let fake_server = fake_servers.next().await.unwrap();
4923
4924 let response = project.update(cx, |project, cx| {
4925 project.prepare_rename(buffer.clone(), 7, cx)
4926 });
4927 fake_server
4928 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
4929 assert_eq!(
4930 params.text_document.uri.as_str(),
4931 uri!("file:///dir/one.rs")
4932 );
4933 assert_eq!(params.position, lsp::Position::new(0, 7));
4934 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
4935 lsp::Position::new(0, 6),
4936 lsp::Position::new(0, 9),
4937 ))))
4938 })
4939 .next()
4940 .await
4941 .unwrap();
4942 let response = response.await.unwrap();
4943 let PrepareRenameResponse::Success(range) = response else {
4944 panic!("{:?}", response);
4945 };
4946 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
4947 assert_eq!(range, 6..9);
4948
4949 let response = project.update(cx, |project, cx| {
4950 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
4951 });
4952 fake_server
4953 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
4954 assert_eq!(
4955 params.text_document_position.text_document.uri.as_str(),
4956 uri!("file:///dir/one.rs")
4957 );
4958 assert_eq!(
4959 params.text_document_position.position,
4960 lsp::Position::new(0, 7)
4961 );
4962 assert_eq!(params.new_name, "THREE");
4963 Ok(Some(lsp::WorkspaceEdit {
4964 changes: Some(
4965 [
4966 (
4967 lsp::Url::from_file_path(path!("/dir/one.rs")).unwrap(),
4968 vec![lsp::TextEdit::new(
4969 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
4970 "THREE".to_string(),
4971 )],
4972 ),
4973 (
4974 lsp::Url::from_file_path(path!("/dir/two.rs")).unwrap(),
4975 vec![
4976 lsp::TextEdit::new(
4977 lsp::Range::new(
4978 lsp::Position::new(0, 24),
4979 lsp::Position::new(0, 27),
4980 ),
4981 "THREE".to_string(),
4982 ),
4983 lsp::TextEdit::new(
4984 lsp::Range::new(
4985 lsp::Position::new(0, 35),
4986 lsp::Position::new(0, 38),
4987 ),
4988 "THREE".to_string(),
4989 ),
4990 ],
4991 ),
4992 ]
4993 .into_iter()
4994 .collect(),
4995 ),
4996 ..Default::default()
4997 }))
4998 })
4999 .next()
5000 .await
5001 .unwrap();
5002 let mut transaction = response.await.unwrap().0;
5003 assert_eq!(transaction.len(), 2);
5004 assert_eq!(
5005 transaction
5006 .remove_entry(&buffer)
5007 .unwrap()
5008 .0
5009 .update(cx, |buffer, _| buffer.text()),
5010 "const THREE: usize = 1;"
5011 );
5012 assert_eq!(
5013 transaction
5014 .into_keys()
5015 .next()
5016 .unwrap()
5017 .update(cx, |buffer, _| buffer.text()),
5018 "const TWO: usize = one::THREE + one::THREE;"
5019 );
5020}
5021
5022#[gpui::test]
5023async fn test_search(cx: &mut gpui::TestAppContext) {
5024 init_test(cx);
5025
5026 let fs = FakeFs::new(cx.executor());
5027 fs.insert_tree(
5028 path!("/dir"),
5029 json!({
5030 "one.rs": "const ONE: usize = 1;",
5031 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
5032 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
5033 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
5034 }),
5035 )
5036 .await;
5037 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5038 assert_eq!(
5039 search(
5040 &project,
5041 SearchQuery::text(
5042 "TWO",
5043 false,
5044 true,
5045 false,
5046 Default::default(),
5047 Default::default(),
5048 false,
5049 None
5050 )
5051 .unwrap(),
5052 cx
5053 )
5054 .await
5055 .unwrap(),
5056 HashMap::from_iter([
5057 (path!("dir/two.rs").to_string(), vec![6..9]),
5058 (path!("dir/three.rs").to_string(), vec![37..40])
5059 ])
5060 );
5061
5062 let buffer_4 = project
5063 .update(cx, |project, cx| {
5064 project.open_local_buffer(path!("/dir/four.rs"), cx)
5065 })
5066 .await
5067 .unwrap();
5068 buffer_4.update(cx, |buffer, cx| {
5069 let text = "two::TWO";
5070 buffer.edit([(20..28, text), (31..43, text)], None, cx);
5071 });
5072
5073 assert_eq!(
5074 search(
5075 &project,
5076 SearchQuery::text(
5077 "TWO",
5078 false,
5079 true,
5080 false,
5081 Default::default(),
5082 Default::default(),
5083 false,
5084 None,
5085 )
5086 .unwrap(),
5087 cx
5088 )
5089 .await
5090 .unwrap(),
5091 HashMap::from_iter([
5092 (path!("dir/two.rs").to_string(), vec![6..9]),
5093 (path!("dir/three.rs").to_string(), vec![37..40]),
5094 (path!("dir/four.rs").to_string(), vec![25..28, 36..39])
5095 ])
5096 );
5097}
5098
5099#[gpui::test]
5100async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
5101 init_test(cx);
5102
5103 let search_query = "file";
5104
5105 let fs = FakeFs::new(cx.executor());
5106 fs.insert_tree(
5107 path!("/dir"),
5108 json!({
5109 "one.rs": r#"// Rust file one"#,
5110 "one.ts": r#"// TypeScript file one"#,
5111 "two.rs": r#"// Rust file two"#,
5112 "two.ts": r#"// TypeScript file two"#,
5113 }),
5114 )
5115 .await;
5116 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5117
5118 assert!(
5119 search(
5120 &project,
5121 SearchQuery::text(
5122 search_query,
5123 false,
5124 true,
5125 false,
5126 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5127 Default::default(),
5128 false,
5129 None
5130 )
5131 .unwrap(),
5132 cx
5133 )
5134 .await
5135 .unwrap()
5136 .is_empty(),
5137 "If no inclusions match, no files should be returned"
5138 );
5139
5140 assert_eq!(
5141 search(
5142 &project,
5143 SearchQuery::text(
5144 search_query,
5145 false,
5146 true,
5147 false,
5148 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
5149 Default::default(),
5150 false,
5151 None
5152 )
5153 .unwrap(),
5154 cx
5155 )
5156 .await
5157 .unwrap(),
5158 HashMap::from_iter([
5159 (path!("dir/one.rs").to_string(), vec![8..12]),
5160 (path!("dir/two.rs").to_string(), vec![8..12]),
5161 ]),
5162 "Rust only search should give only Rust files"
5163 );
5164
5165 assert_eq!(
5166 search(
5167 &project,
5168 SearchQuery::text(
5169 search_query,
5170 false,
5171 true,
5172 false,
5173 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5174 Default::default(),
5175 false,
5176 None,
5177 )
5178 .unwrap(),
5179 cx
5180 )
5181 .await
5182 .unwrap(),
5183 HashMap::from_iter([
5184 (path!("dir/one.ts").to_string(), vec![14..18]),
5185 (path!("dir/two.ts").to_string(), vec![14..18]),
5186 ]),
5187 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
5188 );
5189
5190 assert_eq!(
5191 search(
5192 &project,
5193 SearchQuery::text(
5194 search_query,
5195 false,
5196 true,
5197 false,
5198 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()])
5199 .unwrap(),
5200 Default::default(),
5201 false,
5202 None,
5203 )
5204 .unwrap(),
5205 cx
5206 )
5207 .await
5208 .unwrap(),
5209 HashMap::from_iter([
5210 (path!("dir/two.ts").to_string(), vec![14..18]),
5211 (path!("dir/one.rs").to_string(), vec![8..12]),
5212 (path!("dir/one.ts").to_string(), vec![14..18]),
5213 (path!("dir/two.rs").to_string(), vec![8..12]),
5214 ]),
5215 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
5216 );
5217}
5218
5219#[gpui::test]
5220async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
5221 init_test(cx);
5222
5223 let search_query = "file";
5224
5225 let fs = FakeFs::new(cx.executor());
5226 fs.insert_tree(
5227 path!("/dir"),
5228 json!({
5229 "one.rs": r#"// Rust file one"#,
5230 "one.ts": r#"// TypeScript file one"#,
5231 "two.rs": r#"// Rust file two"#,
5232 "two.ts": r#"// TypeScript file two"#,
5233 }),
5234 )
5235 .await;
5236 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5237
5238 assert_eq!(
5239 search(
5240 &project,
5241 SearchQuery::text(
5242 search_query,
5243 false,
5244 true,
5245 false,
5246 Default::default(),
5247 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5248 false,
5249 None,
5250 )
5251 .unwrap(),
5252 cx
5253 )
5254 .await
5255 .unwrap(),
5256 HashMap::from_iter([
5257 (path!("dir/one.rs").to_string(), vec![8..12]),
5258 (path!("dir/one.ts").to_string(), vec![14..18]),
5259 (path!("dir/two.rs").to_string(), vec![8..12]),
5260 (path!("dir/two.ts").to_string(), vec![14..18]),
5261 ]),
5262 "If no exclusions match, all files should be returned"
5263 );
5264
5265 assert_eq!(
5266 search(
5267 &project,
5268 SearchQuery::text(
5269 search_query,
5270 false,
5271 true,
5272 false,
5273 Default::default(),
5274 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
5275 false,
5276 None,
5277 )
5278 .unwrap(),
5279 cx
5280 )
5281 .await
5282 .unwrap(),
5283 HashMap::from_iter([
5284 (path!("dir/one.ts").to_string(), vec![14..18]),
5285 (path!("dir/two.ts").to_string(), vec![14..18]),
5286 ]),
5287 "Rust exclusion search should give only TypeScript files"
5288 );
5289
5290 assert_eq!(
5291 search(
5292 &project,
5293 SearchQuery::text(
5294 search_query,
5295 false,
5296 true,
5297 false,
5298 Default::default(),
5299 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5300 false,
5301 None,
5302 )
5303 .unwrap(),
5304 cx
5305 )
5306 .await
5307 .unwrap(),
5308 HashMap::from_iter([
5309 (path!("dir/one.rs").to_string(), vec![8..12]),
5310 (path!("dir/two.rs").to_string(), vec![8..12]),
5311 ]),
5312 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
5313 );
5314
5315 assert!(
5316 search(
5317 &project,
5318 SearchQuery::text(
5319 search_query,
5320 false,
5321 true,
5322 false,
5323 Default::default(),
5324 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()])
5325 .unwrap(),
5326 false,
5327 None,
5328 )
5329 .unwrap(),
5330 cx
5331 )
5332 .await
5333 .unwrap()
5334 .is_empty(),
5335 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
5336 );
5337}
5338
5339#[gpui::test]
5340async fn test_search_with_buffer_exclusions(cx: &mut gpui::TestAppContext) {
5341 init_test(cx);
5342
5343 let search_query = "file";
5344
5345 let fs = FakeFs::new(cx.executor());
5346 fs.insert_tree(
5347 path!("/dir"),
5348 json!({
5349 "one.rs": r#"// Rust file one"#,
5350 "one.ts": r#"// TypeScript file one"#,
5351 "two.rs": r#"// Rust file two"#,
5352 "two.ts": r#"// TypeScript file two"#,
5353 }),
5354 )
5355 .await;
5356
5357 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5358 let _buffer = project.update(cx, |project, cx| {
5359 let buffer = project.create_local_buffer("file", None, cx);
5360 project.mark_buffer_as_non_searchable(buffer.read(cx).remote_id(), cx);
5361 buffer
5362 });
5363
5364 assert_eq!(
5365 search(
5366 &project,
5367 SearchQuery::text(
5368 search_query,
5369 false,
5370 true,
5371 false,
5372 Default::default(),
5373 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5374 false,
5375 None,
5376 )
5377 .unwrap(),
5378 cx
5379 )
5380 .await
5381 .unwrap(),
5382 HashMap::from_iter([
5383 (path!("dir/one.rs").to_string(), vec![8..12]),
5384 (path!("dir/one.ts").to_string(), vec![14..18]),
5385 (path!("dir/two.rs").to_string(), vec![8..12]),
5386 (path!("dir/two.ts").to_string(), vec![14..18]),
5387 ]),
5388 "If no exclusions match, all files should be returned"
5389 );
5390
5391 assert_eq!(
5392 search(
5393 &project,
5394 SearchQuery::text(
5395 search_query,
5396 false,
5397 true,
5398 false,
5399 Default::default(),
5400 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
5401 false,
5402 None,
5403 )
5404 .unwrap(),
5405 cx
5406 )
5407 .await
5408 .unwrap(),
5409 HashMap::from_iter([
5410 (path!("dir/one.ts").to_string(), vec![14..18]),
5411 (path!("dir/two.ts").to_string(), vec![14..18]),
5412 ]),
5413 "Rust exclusion search should give only TypeScript files"
5414 );
5415
5416 assert_eq!(
5417 search(
5418 &project,
5419 SearchQuery::text(
5420 search_query,
5421 false,
5422 true,
5423 false,
5424 Default::default(),
5425 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5426 false,
5427 None,
5428 )
5429 .unwrap(),
5430 cx
5431 )
5432 .await
5433 .unwrap(),
5434 HashMap::from_iter([
5435 (path!("dir/one.rs").to_string(), vec![8..12]),
5436 (path!("dir/two.rs").to_string(), vec![8..12]),
5437 ]),
5438 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
5439 );
5440
5441 assert!(
5442 search(
5443 &project,
5444 SearchQuery::text(
5445 search_query,
5446 false,
5447 true,
5448 false,
5449 Default::default(),
5450 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()])
5451 .unwrap(),
5452 false,
5453 None,
5454 )
5455 .unwrap(),
5456 cx
5457 )
5458 .await
5459 .unwrap()
5460 .is_empty(),
5461 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
5462 );
5463}
5464
5465#[gpui::test]
5466async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
5467 init_test(cx);
5468
5469 let search_query = "file";
5470
5471 let fs = FakeFs::new(cx.executor());
5472 fs.insert_tree(
5473 path!("/dir"),
5474 json!({
5475 "one.rs": r#"// Rust file one"#,
5476 "one.ts": r#"// TypeScript file one"#,
5477 "two.rs": r#"// Rust file two"#,
5478 "two.ts": r#"// TypeScript file two"#,
5479 }),
5480 )
5481 .await;
5482 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5483
5484 assert!(
5485 search(
5486 &project,
5487 SearchQuery::text(
5488 search_query,
5489 false,
5490 true,
5491 false,
5492 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5493 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5494 false,
5495 None,
5496 )
5497 .unwrap(),
5498 cx
5499 )
5500 .await
5501 .unwrap()
5502 .is_empty(),
5503 "If both no exclusions and inclusions match, exclusions should win and return nothing"
5504 );
5505
5506 assert!(
5507 search(
5508 &project,
5509 SearchQuery::text(
5510 search_query,
5511 false,
5512 true,
5513 false,
5514 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5515 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5516 false,
5517 None,
5518 )
5519 .unwrap(),
5520 cx
5521 )
5522 .await
5523 .unwrap()
5524 .is_empty(),
5525 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
5526 );
5527
5528 assert!(
5529 search(
5530 &project,
5531 SearchQuery::text(
5532 search_query,
5533 false,
5534 true,
5535 false,
5536 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5537 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5538 false,
5539 None,
5540 )
5541 .unwrap(),
5542 cx
5543 )
5544 .await
5545 .unwrap()
5546 .is_empty(),
5547 "Non-matching inclusions and exclusions should not change that."
5548 );
5549
5550 assert_eq!(
5551 search(
5552 &project,
5553 SearchQuery::text(
5554 search_query,
5555 false,
5556 true,
5557 false,
5558 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5559 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()]).unwrap(),
5560 false,
5561 None,
5562 )
5563 .unwrap(),
5564 cx
5565 )
5566 .await
5567 .unwrap(),
5568 HashMap::from_iter([
5569 (path!("dir/one.ts").to_string(), vec![14..18]),
5570 (path!("dir/two.ts").to_string(), vec![14..18]),
5571 ]),
5572 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
5573 );
5574}
5575
5576#[gpui::test]
5577async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
5578 init_test(cx);
5579
5580 let fs = FakeFs::new(cx.executor());
5581 fs.insert_tree(
5582 path!("/worktree-a"),
5583 json!({
5584 "haystack.rs": r#"// NEEDLE"#,
5585 "haystack.ts": r#"// NEEDLE"#,
5586 }),
5587 )
5588 .await;
5589 fs.insert_tree(
5590 path!("/worktree-b"),
5591 json!({
5592 "haystack.rs": r#"// NEEDLE"#,
5593 "haystack.ts": r#"// NEEDLE"#,
5594 }),
5595 )
5596 .await;
5597
5598 let project = Project::test(
5599 fs.clone(),
5600 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
5601 cx,
5602 )
5603 .await;
5604
5605 assert_eq!(
5606 search(
5607 &project,
5608 SearchQuery::text(
5609 "NEEDLE",
5610 false,
5611 true,
5612 false,
5613 PathMatcher::new(&["worktree-a/*.rs".to_owned()]).unwrap(),
5614 Default::default(),
5615 true,
5616 None,
5617 )
5618 .unwrap(),
5619 cx
5620 )
5621 .await
5622 .unwrap(),
5623 HashMap::from_iter([(path!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
5624 "should only return results from included worktree"
5625 );
5626 assert_eq!(
5627 search(
5628 &project,
5629 SearchQuery::text(
5630 "NEEDLE",
5631 false,
5632 true,
5633 false,
5634 PathMatcher::new(&["worktree-b/*.rs".to_owned()]).unwrap(),
5635 Default::default(),
5636 true,
5637 None,
5638 )
5639 .unwrap(),
5640 cx
5641 )
5642 .await
5643 .unwrap(),
5644 HashMap::from_iter([(path!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
5645 "should only return results from included worktree"
5646 );
5647
5648 assert_eq!(
5649 search(
5650 &project,
5651 SearchQuery::text(
5652 "NEEDLE",
5653 false,
5654 true,
5655 false,
5656 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5657 Default::default(),
5658 false,
5659 None,
5660 )
5661 .unwrap(),
5662 cx
5663 )
5664 .await
5665 .unwrap(),
5666 HashMap::from_iter([
5667 (path!("worktree-a/haystack.ts").to_string(), vec![3..9]),
5668 (path!("worktree-b/haystack.ts").to_string(), vec![3..9])
5669 ]),
5670 "should return results from both worktrees"
5671 );
5672}
5673
5674#[gpui::test]
5675async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
5676 init_test(cx);
5677
5678 let fs = FakeFs::new(cx.background_executor.clone());
5679 fs.insert_tree(
5680 path!("/dir"),
5681 json!({
5682 ".git": {},
5683 ".gitignore": "**/target\n/node_modules\n",
5684 "target": {
5685 "index.txt": "index_key:index_value"
5686 },
5687 "node_modules": {
5688 "eslint": {
5689 "index.ts": "const eslint_key = 'eslint value'",
5690 "package.json": r#"{ "some_key": "some value" }"#,
5691 },
5692 "prettier": {
5693 "index.ts": "const prettier_key = 'prettier value'",
5694 "package.json": r#"{ "other_key": "other value" }"#,
5695 },
5696 },
5697 "package.json": r#"{ "main_key": "main value" }"#,
5698 }),
5699 )
5700 .await;
5701 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5702
5703 let query = "key";
5704 assert_eq!(
5705 search(
5706 &project,
5707 SearchQuery::text(
5708 query,
5709 false,
5710 false,
5711 false,
5712 Default::default(),
5713 Default::default(),
5714 false,
5715 None,
5716 )
5717 .unwrap(),
5718 cx
5719 )
5720 .await
5721 .unwrap(),
5722 HashMap::from_iter([(path!("dir/package.json").to_string(), vec![8..11])]),
5723 "Only one non-ignored file should have the query"
5724 );
5725
5726 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5727 assert_eq!(
5728 search(
5729 &project,
5730 SearchQuery::text(
5731 query,
5732 false,
5733 false,
5734 true,
5735 Default::default(),
5736 Default::default(),
5737 false,
5738 None,
5739 )
5740 .unwrap(),
5741 cx
5742 )
5743 .await
5744 .unwrap(),
5745 HashMap::from_iter([
5746 (path!("dir/package.json").to_string(), vec![8..11]),
5747 (path!("dir/target/index.txt").to_string(), vec![6..9]),
5748 (
5749 path!("dir/node_modules/prettier/package.json").to_string(),
5750 vec![9..12]
5751 ),
5752 (
5753 path!("dir/node_modules/prettier/index.ts").to_string(),
5754 vec![15..18]
5755 ),
5756 (
5757 path!("dir/node_modules/eslint/index.ts").to_string(),
5758 vec![13..16]
5759 ),
5760 (
5761 path!("dir/node_modules/eslint/package.json").to_string(),
5762 vec![8..11]
5763 ),
5764 ]),
5765 "Unrestricted search with ignored directories should find every file with the query"
5766 );
5767
5768 let files_to_include = PathMatcher::new(&["node_modules/prettier/**".to_owned()]).unwrap();
5769 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()]).unwrap();
5770 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5771 assert_eq!(
5772 search(
5773 &project,
5774 SearchQuery::text(
5775 query,
5776 false,
5777 false,
5778 true,
5779 files_to_include,
5780 files_to_exclude,
5781 false,
5782 None,
5783 )
5784 .unwrap(),
5785 cx
5786 )
5787 .await
5788 .unwrap(),
5789 HashMap::from_iter([(
5790 path!("dir/node_modules/prettier/package.json").to_string(),
5791 vec![9..12]
5792 )]),
5793 "With search including ignored prettier directory and excluding TS files, only one file should be found"
5794 );
5795}
5796
5797#[gpui::test]
5798async fn test_search_with_unicode(cx: &mut gpui::TestAppContext) {
5799 init_test(cx);
5800
5801 let fs = FakeFs::new(cx.executor());
5802 fs.insert_tree(
5803 path!("/dir"),
5804 json!({
5805 "one.rs": "// ПРИВЕТ? привет!",
5806 "two.rs": "// ПРИВЕТ.",
5807 "three.rs": "// привет",
5808 }),
5809 )
5810 .await;
5811 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5812
5813 let unicode_case_sensitive_query = SearchQuery::text(
5814 "привет",
5815 false,
5816 true,
5817 false,
5818 Default::default(),
5819 Default::default(),
5820 false,
5821 None,
5822 );
5823 assert_matches!(unicode_case_sensitive_query, Ok(SearchQuery::Text { .. }));
5824 assert_eq!(
5825 search(&project, unicode_case_sensitive_query.unwrap(), cx)
5826 .await
5827 .unwrap(),
5828 HashMap::from_iter([
5829 (path!("dir/one.rs").to_string(), vec![17..29]),
5830 (path!("dir/three.rs").to_string(), vec![3..15]),
5831 ])
5832 );
5833
5834 let unicode_case_insensitive_query = SearchQuery::text(
5835 "привет",
5836 false,
5837 false,
5838 false,
5839 Default::default(),
5840 Default::default(),
5841 false,
5842 None,
5843 );
5844 assert_matches!(
5845 unicode_case_insensitive_query,
5846 Ok(SearchQuery::Regex { .. })
5847 );
5848 assert_eq!(
5849 search(&project, unicode_case_insensitive_query.unwrap(), cx)
5850 .await
5851 .unwrap(),
5852 HashMap::from_iter([
5853 (path!("dir/one.rs").to_string(), vec![3..15, 17..29]),
5854 (path!("dir/two.rs").to_string(), vec![3..15]),
5855 (path!("dir/three.rs").to_string(), vec![3..15]),
5856 ])
5857 );
5858
5859 assert_eq!(
5860 search(
5861 &project,
5862 SearchQuery::text(
5863 "привет.",
5864 false,
5865 false,
5866 false,
5867 Default::default(),
5868 Default::default(),
5869 false,
5870 None,
5871 )
5872 .unwrap(),
5873 cx
5874 )
5875 .await
5876 .unwrap(),
5877 HashMap::from_iter([(path!("dir/two.rs").to_string(), vec![3..16]),])
5878 );
5879}
5880
5881#[gpui::test]
5882async fn test_create_entry(cx: &mut gpui::TestAppContext) {
5883 init_test(cx);
5884
5885 let fs = FakeFs::new(cx.executor().clone());
5886 fs.insert_tree(
5887 "/one/two",
5888 json!({
5889 "three": {
5890 "a.txt": "",
5891 "four": {}
5892 },
5893 "c.rs": ""
5894 }),
5895 )
5896 .await;
5897
5898 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
5899 project
5900 .update(cx, |project, cx| {
5901 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5902 project.create_entry((id, "b.."), true, cx)
5903 })
5904 .await
5905 .unwrap()
5906 .to_included()
5907 .unwrap();
5908
5909 // Can't create paths outside the project
5910 let result = project
5911 .update(cx, |project, cx| {
5912 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5913 project.create_entry((id, "../../boop"), true, cx)
5914 })
5915 .await;
5916 assert!(result.is_err());
5917
5918 // Can't create paths with '..'
5919 let result = project
5920 .update(cx, |project, cx| {
5921 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5922 project.create_entry((id, "four/../beep"), true, cx)
5923 })
5924 .await;
5925 assert!(result.is_err());
5926
5927 assert_eq!(
5928 fs.paths(true),
5929 vec![
5930 PathBuf::from(path!("/")),
5931 PathBuf::from(path!("/one")),
5932 PathBuf::from(path!("/one/two")),
5933 PathBuf::from(path!("/one/two/c.rs")),
5934 PathBuf::from(path!("/one/two/three")),
5935 PathBuf::from(path!("/one/two/three/a.txt")),
5936 PathBuf::from(path!("/one/two/three/b..")),
5937 PathBuf::from(path!("/one/two/three/four")),
5938 ]
5939 );
5940
5941 // And we cannot open buffers with '..'
5942 let result = project
5943 .update(cx, |project, cx| {
5944 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5945 project.open_buffer((id, "../c.rs"), cx)
5946 })
5947 .await;
5948 assert!(result.is_err())
5949}
5950
5951#[gpui::test]
5952async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
5953 init_test(cx);
5954
5955 let fs = FakeFs::new(cx.executor());
5956 fs.insert_tree(
5957 path!("/dir"),
5958 json!({
5959 "a.tsx": "a",
5960 }),
5961 )
5962 .await;
5963
5964 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5965
5966 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5967 language_registry.add(tsx_lang());
5968 let language_server_names = [
5969 "TypeScriptServer",
5970 "TailwindServer",
5971 "ESLintServer",
5972 "NoHoverCapabilitiesServer",
5973 ];
5974 let mut language_servers = [
5975 language_registry.register_fake_lsp(
5976 "tsx",
5977 FakeLspAdapter {
5978 name: language_server_names[0],
5979 capabilities: lsp::ServerCapabilities {
5980 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5981 ..lsp::ServerCapabilities::default()
5982 },
5983 ..FakeLspAdapter::default()
5984 },
5985 ),
5986 language_registry.register_fake_lsp(
5987 "tsx",
5988 FakeLspAdapter {
5989 name: language_server_names[1],
5990 capabilities: lsp::ServerCapabilities {
5991 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5992 ..lsp::ServerCapabilities::default()
5993 },
5994 ..FakeLspAdapter::default()
5995 },
5996 ),
5997 language_registry.register_fake_lsp(
5998 "tsx",
5999 FakeLspAdapter {
6000 name: language_server_names[2],
6001 capabilities: lsp::ServerCapabilities {
6002 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6003 ..lsp::ServerCapabilities::default()
6004 },
6005 ..FakeLspAdapter::default()
6006 },
6007 ),
6008 language_registry.register_fake_lsp(
6009 "tsx",
6010 FakeLspAdapter {
6011 name: language_server_names[3],
6012 capabilities: lsp::ServerCapabilities {
6013 hover_provider: None,
6014 ..lsp::ServerCapabilities::default()
6015 },
6016 ..FakeLspAdapter::default()
6017 },
6018 ),
6019 ];
6020
6021 let (buffer, _handle) = project
6022 .update(cx, |p, cx| {
6023 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
6024 })
6025 .await
6026 .unwrap();
6027 cx.executor().run_until_parked();
6028
6029 let mut servers_with_hover_requests = HashMap::default();
6030 for i in 0..language_server_names.len() {
6031 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
6032 panic!(
6033 "Failed to get language server #{i} with name {}",
6034 &language_server_names[i]
6035 )
6036 });
6037 let new_server_name = new_server.server.name();
6038 assert!(
6039 !servers_with_hover_requests.contains_key(&new_server_name),
6040 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6041 );
6042 match new_server_name.as_ref() {
6043 "TailwindServer" | "TypeScriptServer" => {
6044 servers_with_hover_requests.insert(
6045 new_server_name.clone(),
6046 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6047 move |_, _| {
6048 let name = new_server_name.clone();
6049 async move {
6050 Ok(Some(lsp::Hover {
6051 contents: lsp::HoverContents::Scalar(
6052 lsp::MarkedString::String(format!("{name} hover")),
6053 ),
6054 range: None,
6055 }))
6056 }
6057 },
6058 ),
6059 );
6060 }
6061 "ESLintServer" => {
6062 servers_with_hover_requests.insert(
6063 new_server_name,
6064 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6065 |_, _| async move { Ok(None) },
6066 ),
6067 );
6068 }
6069 "NoHoverCapabilitiesServer" => {
6070 let _never_handled = new_server
6071 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
6072 panic!(
6073 "Should not call for hovers server with no corresponding capabilities"
6074 )
6075 });
6076 }
6077 unexpected => panic!("Unexpected server name: {unexpected}"),
6078 }
6079 }
6080
6081 let hover_task = project.update(cx, |project, cx| {
6082 project.hover(&buffer, Point::new(0, 0), cx)
6083 });
6084 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
6085 |mut hover_request| async move {
6086 hover_request
6087 .next()
6088 .await
6089 .expect("All hover requests should have been triggered")
6090 },
6091 ))
6092 .await;
6093 assert_eq!(
6094 vec!["TailwindServer hover", "TypeScriptServer hover"],
6095 hover_task
6096 .await
6097 .into_iter()
6098 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
6099 .sorted()
6100 .collect::<Vec<_>>(),
6101 "Should receive hover responses from all related servers with hover capabilities"
6102 );
6103}
6104
6105#[gpui::test]
6106async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
6107 init_test(cx);
6108
6109 let fs = FakeFs::new(cx.executor());
6110 fs.insert_tree(
6111 path!("/dir"),
6112 json!({
6113 "a.ts": "a",
6114 }),
6115 )
6116 .await;
6117
6118 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6119
6120 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6121 language_registry.add(typescript_lang());
6122 let mut fake_language_servers = language_registry.register_fake_lsp(
6123 "TypeScript",
6124 FakeLspAdapter {
6125 capabilities: lsp::ServerCapabilities {
6126 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6127 ..lsp::ServerCapabilities::default()
6128 },
6129 ..FakeLspAdapter::default()
6130 },
6131 );
6132
6133 let (buffer, _handle) = project
6134 .update(cx, |p, cx| {
6135 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6136 })
6137 .await
6138 .unwrap();
6139 cx.executor().run_until_parked();
6140
6141 let fake_server = fake_language_servers
6142 .next()
6143 .await
6144 .expect("failed to get the language server");
6145
6146 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6147 move |_, _| async move {
6148 Ok(Some(lsp::Hover {
6149 contents: lsp::HoverContents::Array(vec![
6150 lsp::MarkedString::String("".to_string()),
6151 lsp::MarkedString::String(" ".to_string()),
6152 lsp::MarkedString::String("\n\n\n".to_string()),
6153 ]),
6154 range: None,
6155 }))
6156 },
6157 );
6158
6159 let hover_task = project.update(cx, |project, cx| {
6160 project.hover(&buffer, Point::new(0, 0), cx)
6161 });
6162 let () = request_handled
6163 .next()
6164 .await
6165 .expect("All hover requests should have been triggered");
6166 assert_eq!(
6167 Vec::<String>::new(),
6168 hover_task
6169 .await
6170 .into_iter()
6171 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
6172 .sorted()
6173 .collect::<Vec<_>>(),
6174 "Empty hover parts should be ignored"
6175 );
6176}
6177
6178#[gpui::test]
6179async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
6180 init_test(cx);
6181
6182 let fs = FakeFs::new(cx.executor());
6183 fs.insert_tree(
6184 path!("/dir"),
6185 json!({
6186 "a.ts": "a",
6187 }),
6188 )
6189 .await;
6190
6191 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6192
6193 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6194 language_registry.add(typescript_lang());
6195 let mut fake_language_servers = language_registry.register_fake_lsp(
6196 "TypeScript",
6197 FakeLspAdapter {
6198 capabilities: lsp::ServerCapabilities {
6199 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6200 ..lsp::ServerCapabilities::default()
6201 },
6202 ..FakeLspAdapter::default()
6203 },
6204 );
6205
6206 let (buffer, _handle) = project
6207 .update(cx, |p, cx| {
6208 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6209 })
6210 .await
6211 .unwrap();
6212 cx.executor().run_until_parked();
6213
6214 let fake_server = fake_language_servers
6215 .next()
6216 .await
6217 .expect("failed to get the language server");
6218
6219 let mut request_handled = fake_server
6220 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
6221 Ok(Some(vec![
6222 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6223 title: "organize imports".to_string(),
6224 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
6225 ..lsp::CodeAction::default()
6226 }),
6227 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6228 title: "fix code".to_string(),
6229 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
6230 ..lsp::CodeAction::default()
6231 }),
6232 ]))
6233 });
6234
6235 let code_actions_task = project.update(cx, |project, cx| {
6236 project.code_actions(
6237 &buffer,
6238 0..buffer.read(cx).len(),
6239 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
6240 cx,
6241 )
6242 });
6243
6244 let () = request_handled
6245 .next()
6246 .await
6247 .expect("The code action request should have been triggered");
6248
6249 let code_actions = code_actions_task.await.unwrap();
6250 assert_eq!(code_actions.len(), 1);
6251 assert_eq!(
6252 code_actions[0].lsp_action.action_kind(),
6253 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
6254 );
6255}
6256
6257#[gpui::test]
6258async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
6259 init_test(cx);
6260
6261 let fs = FakeFs::new(cx.executor());
6262 fs.insert_tree(
6263 path!("/dir"),
6264 json!({
6265 "a.tsx": "a",
6266 }),
6267 )
6268 .await;
6269
6270 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6271
6272 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6273 language_registry.add(tsx_lang());
6274 let language_server_names = [
6275 "TypeScriptServer",
6276 "TailwindServer",
6277 "ESLintServer",
6278 "NoActionsCapabilitiesServer",
6279 ];
6280
6281 let mut language_server_rxs = [
6282 language_registry.register_fake_lsp(
6283 "tsx",
6284 FakeLspAdapter {
6285 name: language_server_names[0],
6286 capabilities: lsp::ServerCapabilities {
6287 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6288 ..lsp::ServerCapabilities::default()
6289 },
6290 ..FakeLspAdapter::default()
6291 },
6292 ),
6293 language_registry.register_fake_lsp(
6294 "tsx",
6295 FakeLspAdapter {
6296 name: language_server_names[1],
6297 capabilities: lsp::ServerCapabilities {
6298 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6299 ..lsp::ServerCapabilities::default()
6300 },
6301 ..FakeLspAdapter::default()
6302 },
6303 ),
6304 language_registry.register_fake_lsp(
6305 "tsx",
6306 FakeLspAdapter {
6307 name: language_server_names[2],
6308 capabilities: lsp::ServerCapabilities {
6309 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6310 ..lsp::ServerCapabilities::default()
6311 },
6312 ..FakeLspAdapter::default()
6313 },
6314 ),
6315 language_registry.register_fake_lsp(
6316 "tsx",
6317 FakeLspAdapter {
6318 name: language_server_names[3],
6319 capabilities: lsp::ServerCapabilities {
6320 code_action_provider: None,
6321 ..lsp::ServerCapabilities::default()
6322 },
6323 ..FakeLspAdapter::default()
6324 },
6325 ),
6326 ];
6327
6328 let (buffer, _handle) = project
6329 .update(cx, |p, cx| {
6330 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
6331 })
6332 .await
6333 .unwrap();
6334 cx.executor().run_until_parked();
6335
6336 let mut servers_with_actions_requests = HashMap::default();
6337 for i in 0..language_server_names.len() {
6338 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
6339 panic!(
6340 "Failed to get language server #{i} with name {}",
6341 &language_server_names[i]
6342 )
6343 });
6344 let new_server_name = new_server.server.name();
6345
6346 assert!(
6347 !servers_with_actions_requests.contains_key(&new_server_name),
6348 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6349 );
6350 match new_server_name.0.as_ref() {
6351 "TailwindServer" | "TypeScriptServer" => {
6352 servers_with_actions_requests.insert(
6353 new_server_name.clone(),
6354 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6355 move |_, _| {
6356 let name = new_server_name.clone();
6357 async move {
6358 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
6359 lsp::CodeAction {
6360 title: format!("{name} code action"),
6361 ..lsp::CodeAction::default()
6362 },
6363 )]))
6364 }
6365 },
6366 ),
6367 );
6368 }
6369 "ESLintServer" => {
6370 servers_with_actions_requests.insert(
6371 new_server_name,
6372 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6373 |_, _| async move { Ok(None) },
6374 ),
6375 );
6376 }
6377 "NoActionsCapabilitiesServer" => {
6378 let _never_handled = new_server
6379 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6380 panic!(
6381 "Should not call for code actions server with no corresponding capabilities"
6382 )
6383 });
6384 }
6385 unexpected => panic!("Unexpected server name: {unexpected}"),
6386 }
6387 }
6388
6389 let code_actions_task = project.update(cx, |project, cx| {
6390 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
6391 });
6392
6393 // cx.run_until_parked();
6394 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
6395 |mut code_actions_request| async move {
6396 code_actions_request
6397 .next()
6398 .await
6399 .expect("All code actions requests should have been triggered")
6400 },
6401 ))
6402 .await;
6403 assert_eq!(
6404 vec!["TailwindServer code action", "TypeScriptServer code action"],
6405 code_actions_task
6406 .await
6407 .unwrap()
6408 .into_iter()
6409 .map(|code_action| code_action.lsp_action.title().to_owned())
6410 .sorted()
6411 .collect::<Vec<_>>(),
6412 "Should receive code actions responses from all related servers with hover capabilities"
6413 );
6414}
6415
6416#[gpui::test]
6417async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
6418 init_test(cx);
6419
6420 let fs = FakeFs::new(cx.executor());
6421 fs.insert_tree(
6422 "/dir",
6423 json!({
6424 "a.rs": "let a = 1;",
6425 "b.rs": "let b = 2;",
6426 "c.rs": "let c = 2;",
6427 }),
6428 )
6429 .await;
6430
6431 let project = Project::test(
6432 fs,
6433 [
6434 "/dir/a.rs".as_ref(),
6435 "/dir/b.rs".as_ref(),
6436 "/dir/c.rs".as_ref(),
6437 ],
6438 cx,
6439 )
6440 .await;
6441
6442 // check the initial state and get the worktrees
6443 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
6444 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6445 assert_eq!(worktrees.len(), 3);
6446
6447 let worktree_a = worktrees[0].read(cx);
6448 let worktree_b = worktrees[1].read(cx);
6449 let worktree_c = worktrees[2].read(cx);
6450
6451 // check they start in the right order
6452 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
6453 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
6454 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
6455
6456 (
6457 worktrees[0].clone(),
6458 worktrees[1].clone(),
6459 worktrees[2].clone(),
6460 )
6461 });
6462
6463 // move first worktree to after the second
6464 // [a, b, c] -> [b, a, c]
6465 project
6466 .update(cx, |project, cx| {
6467 let first = worktree_a.read(cx);
6468 let second = worktree_b.read(cx);
6469 project.move_worktree(first.id(), second.id(), cx)
6470 })
6471 .expect("moving first after second");
6472
6473 // check the state after moving
6474 project.update(cx, |project, cx| {
6475 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6476 assert_eq!(worktrees.len(), 3);
6477
6478 let first = worktrees[0].read(cx);
6479 let second = worktrees[1].read(cx);
6480 let third = worktrees[2].read(cx);
6481
6482 // check they are now in the right order
6483 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
6484 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
6485 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6486 });
6487
6488 // move the second worktree to before the first
6489 // [b, a, c] -> [a, b, c]
6490 project
6491 .update(cx, |project, cx| {
6492 let second = worktree_a.read(cx);
6493 let first = worktree_b.read(cx);
6494 project.move_worktree(first.id(), second.id(), cx)
6495 })
6496 .expect("moving second before first");
6497
6498 // check the state after moving
6499 project.update(cx, |project, cx| {
6500 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6501 assert_eq!(worktrees.len(), 3);
6502
6503 let first = worktrees[0].read(cx);
6504 let second = worktrees[1].read(cx);
6505 let third = worktrees[2].read(cx);
6506
6507 // check they are now in the right order
6508 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6509 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6510 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6511 });
6512
6513 // move the second worktree to after the third
6514 // [a, b, c] -> [a, c, b]
6515 project
6516 .update(cx, |project, cx| {
6517 let second = worktree_b.read(cx);
6518 let third = worktree_c.read(cx);
6519 project.move_worktree(second.id(), third.id(), cx)
6520 })
6521 .expect("moving second after third");
6522
6523 // check the state after moving
6524 project.update(cx, |project, cx| {
6525 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6526 assert_eq!(worktrees.len(), 3);
6527
6528 let first = worktrees[0].read(cx);
6529 let second = worktrees[1].read(cx);
6530 let third = worktrees[2].read(cx);
6531
6532 // check they are now in the right order
6533 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6534 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
6535 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
6536 });
6537
6538 // move the third worktree to before the second
6539 // [a, c, b] -> [a, b, c]
6540 project
6541 .update(cx, |project, cx| {
6542 let third = worktree_c.read(cx);
6543 let second = worktree_b.read(cx);
6544 project.move_worktree(third.id(), second.id(), cx)
6545 })
6546 .expect("moving third before second");
6547
6548 // check the state after moving
6549 project.update(cx, |project, cx| {
6550 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6551 assert_eq!(worktrees.len(), 3);
6552
6553 let first = worktrees[0].read(cx);
6554 let second = worktrees[1].read(cx);
6555 let third = worktrees[2].read(cx);
6556
6557 // check they are now in the right order
6558 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6559 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6560 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6561 });
6562
6563 // move the first worktree to after the third
6564 // [a, b, c] -> [b, c, a]
6565 project
6566 .update(cx, |project, cx| {
6567 let first = worktree_a.read(cx);
6568 let third = worktree_c.read(cx);
6569 project.move_worktree(first.id(), third.id(), cx)
6570 })
6571 .expect("moving first after third");
6572
6573 // check the state after moving
6574 project.update(cx, |project, cx| {
6575 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6576 assert_eq!(worktrees.len(), 3);
6577
6578 let first = worktrees[0].read(cx);
6579 let second = worktrees[1].read(cx);
6580 let third = worktrees[2].read(cx);
6581
6582 // check they are now in the right order
6583 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
6584 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
6585 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
6586 });
6587
6588 // move the third worktree to before the first
6589 // [b, c, a] -> [a, b, c]
6590 project
6591 .update(cx, |project, cx| {
6592 let third = worktree_a.read(cx);
6593 let first = worktree_b.read(cx);
6594 project.move_worktree(third.id(), first.id(), cx)
6595 })
6596 .expect("moving third before first");
6597
6598 // check the state after moving
6599 project.update(cx, |project, cx| {
6600 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6601 assert_eq!(worktrees.len(), 3);
6602
6603 let first = worktrees[0].read(cx);
6604 let second = worktrees[1].read(cx);
6605 let third = worktrees[2].read(cx);
6606
6607 // check they are now in the right order
6608 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6609 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6610 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6611 });
6612}
6613
6614#[gpui::test]
6615async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
6616 init_test(cx);
6617
6618 let staged_contents = r#"
6619 fn main() {
6620 println!("hello world");
6621 }
6622 "#
6623 .unindent();
6624 let file_contents = r#"
6625 // print goodbye
6626 fn main() {
6627 println!("goodbye world");
6628 }
6629 "#
6630 .unindent();
6631
6632 let fs = FakeFs::new(cx.background_executor.clone());
6633 fs.insert_tree(
6634 "/dir",
6635 json!({
6636 ".git": {},
6637 "src": {
6638 "main.rs": file_contents,
6639 }
6640 }),
6641 )
6642 .await;
6643
6644 fs.set_index_for_repo(
6645 Path::new("/dir/.git"),
6646 &[("src/main.rs".into(), staged_contents)],
6647 );
6648
6649 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6650
6651 let buffer = project
6652 .update(cx, |project, cx| {
6653 project.open_local_buffer("/dir/src/main.rs", cx)
6654 })
6655 .await
6656 .unwrap();
6657 let unstaged_diff = project
6658 .update(cx, |project, cx| {
6659 project.open_unstaged_diff(buffer.clone(), cx)
6660 })
6661 .await
6662 .unwrap();
6663
6664 cx.run_until_parked();
6665 unstaged_diff.update(cx, |unstaged_diff, cx| {
6666 let snapshot = buffer.read(cx).snapshot();
6667 assert_hunks(
6668 unstaged_diff.hunks(&snapshot, cx),
6669 &snapshot,
6670 &unstaged_diff.base_text_string().unwrap(),
6671 &[
6672 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
6673 (
6674 2..3,
6675 " println!(\"hello world\");\n",
6676 " println!(\"goodbye world\");\n",
6677 DiffHunkStatus::modified_none(),
6678 ),
6679 ],
6680 );
6681 });
6682
6683 let staged_contents = r#"
6684 // print goodbye
6685 fn main() {
6686 }
6687 "#
6688 .unindent();
6689
6690 fs.set_index_for_repo(
6691 Path::new("/dir/.git"),
6692 &[("src/main.rs".into(), staged_contents)],
6693 );
6694
6695 cx.run_until_parked();
6696 unstaged_diff.update(cx, |unstaged_diff, cx| {
6697 let snapshot = buffer.read(cx).snapshot();
6698 assert_hunks(
6699 unstaged_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6700 &snapshot,
6701 &unstaged_diff.base_text().text(),
6702 &[(
6703 2..3,
6704 "",
6705 " println!(\"goodbye world\");\n",
6706 DiffHunkStatus::added_none(),
6707 )],
6708 );
6709 });
6710}
6711
6712#[gpui::test]
6713async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
6714 init_test(cx);
6715
6716 let committed_contents = r#"
6717 fn main() {
6718 println!("hello world");
6719 }
6720 "#
6721 .unindent();
6722 let staged_contents = r#"
6723 fn main() {
6724 println!("goodbye world");
6725 }
6726 "#
6727 .unindent();
6728 let file_contents = r#"
6729 // print goodbye
6730 fn main() {
6731 println!("goodbye world");
6732 }
6733 "#
6734 .unindent();
6735
6736 let fs = FakeFs::new(cx.background_executor.clone());
6737 fs.insert_tree(
6738 "/dir",
6739 json!({
6740 ".git": {},
6741 "src": {
6742 "modification.rs": file_contents,
6743 }
6744 }),
6745 )
6746 .await;
6747
6748 fs.set_head_for_repo(
6749 Path::new("/dir/.git"),
6750 &[
6751 ("src/modification.rs".into(), committed_contents),
6752 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6753 ],
6754 "deadbeef",
6755 );
6756 fs.set_index_for_repo(
6757 Path::new("/dir/.git"),
6758 &[
6759 ("src/modification.rs".into(), staged_contents),
6760 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6761 ],
6762 );
6763
6764 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6765 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6766 let language = rust_lang();
6767 language_registry.add(language.clone());
6768
6769 let buffer_1 = project
6770 .update(cx, |project, cx| {
6771 project.open_local_buffer("/dir/src/modification.rs", cx)
6772 })
6773 .await
6774 .unwrap();
6775 let diff_1 = project
6776 .update(cx, |project, cx| {
6777 project.open_uncommitted_diff(buffer_1.clone(), cx)
6778 })
6779 .await
6780 .unwrap();
6781 diff_1.read_with(cx, |diff, _| {
6782 assert_eq!(diff.base_text().language().cloned(), Some(language))
6783 });
6784 cx.run_until_parked();
6785 diff_1.update(cx, |diff, cx| {
6786 let snapshot = buffer_1.read(cx).snapshot();
6787 assert_hunks(
6788 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6789 &snapshot,
6790 &diff.base_text_string().unwrap(),
6791 &[
6792 (
6793 0..1,
6794 "",
6795 "// print goodbye\n",
6796 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
6797 ),
6798 (
6799 2..3,
6800 " println!(\"hello world\");\n",
6801 " println!(\"goodbye world\");\n",
6802 DiffHunkStatus::modified_none(),
6803 ),
6804 ],
6805 );
6806 });
6807
6808 // Reset HEAD to a version that differs from both the buffer and the index.
6809 let committed_contents = r#"
6810 // print goodbye
6811 fn main() {
6812 }
6813 "#
6814 .unindent();
6815 fs.set_head_for_repo(
6816 Path::new("/dir/.git"),
6817 &[
6818 ("src/modification.rs".into(), committed_contents.clone()),
6819 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6820 ],
6821 "deadbeef",
6822 );
6823
6824 // Buffer now has an unstaged hunk.
6825 cx.run_until_parked();
6826 diff_1.update(cx, |diff, cx| {
6827 let snapshot = buffer_1.read(cx).snapshot();
6828 assert_hunks(
6829 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6830 &snapshot,
6831 &diff.base_text().text(),
6832 &[(
6833 2..3,
6834 "",
6835 " println!(\"goodbye world\");\n",
6836 DiffHunkStatus::added_none(),
6837 )],
6838 );
6839 });
6840
6841 // Open a buffer for a file that's been deleted.
6842 let buffer_2 = project
6843 .update(cx, |project, cx| {
6844 project.open_local_buffer("/dir/src/deletion.rs", cx)
6845 })
6846 .await
6847 .unwrap();
6848 let diff_2 = project
6849 .update(cx, |project, cx| {
6850 project.open_uncommitted_diff(buffer_2.clone(), cx)
6851 })
6852 .await
6853 .unwrap();
6854 cx.run_until_parked();
6855 diff_2.update(cx, |diff, cx| {
6856 let snapshot = buffer_2.read(cx).snapshot();
6857 assert_hunks(
6858 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6859 &snapshot,
6860 &diff.base_text_string().unwrap(),
6861 &[(
6862 0..0,
6863 "// the-deleted-contents\n",
6864 "",
6865 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
6866 )],
6867 );
6868 });
6869
6870 // Stage the deletion of this file
6871 fs.set_index_for_repo(
6872 Path::new("/dir/.git"),
6873 &[("src/modification.rs".into(), committed_contents.clone())],
6874 );
6875 cx.run_until_parked();
6876 diff_2.update(cx, |diff, cx| {
6877 let snapshot = buffer_2.read(cx).snapshot();
6878 assert_hunks(
6879 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6880 &snapshot,
6881 &diff.base_text_string().unwrap(),
6882 &[(
6883 0..0,
6884 "// the-deleted-contents\n",
6885 "",
6886 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
6887 )],
6888 );
6889 });
6890}
6891
6892#[gpui::test]
6893async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
6894 use DiffHunkSecondaryStatus::*;
6895 init_test(cx);
6896
6897 let committed_contents = r#"
6898 zero
6899 one
6900 two
6901 three
6902 four
6903 five
6904 "#
6905 .unindent();
6906 let file_contents = r#"
6907 one
6908 TWO
6909 three
6910 FOUR
6911 five
6912 "#
6913 .unindent();
6914
6915 let fs = FakeFs::new(cx.background_executor.clone());
6916 fs.insert_tree(
6917 "/dir",
6918 json!({
6919 ".git": {},
6920 "file.txt": file_contents.clone()
6921 }),
6922 )
6923 .await;
6924
6925 fs.set_head_and_index_for_repo(
6926 "/dir/.git".as_ref(),
6927 &[("file.txt".into(), committed_contents.clone())],
6928 );
6929
6930 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6931
6932 let buffer = project
6933 .update(cx, |project, cx| {
6934 project.open_local_buffer("/dir/file.txt", cx)
6935 })
6936 .await
6937 .unwrap();
6938 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6939 let uncommitted_diff = project
6940 .update(cx, |project, cx| {
6941 project.open_uncommitted_diff(buffer.clone(), cx)
6942 })
6943 .await
6944 .unwrap();
6945 let mut diff_events = cx.events(&uncommitted_diff);
6946
6947 // The hunks are initially unstaged.
6948 uncommitted_diff.read_with(cx, |diff, cx| {
6949 assert_hunks(
6950 diff.hunks(&snapshot, cx),
6951 &snapshot,
6952 &diff.base_text_string().unwrap(),
6953 &[
6954 (
6955 0..0,
6956 "zero\n",
6957 "",
6958 DiffHunkStatus::deleted(HasSecondaryHunk),
6959 ),
6960 (
6961 1..2,
6962 "two\n",
6963 "TWO\n",
6964 DiffHunkStatus::modified(HasSecondaryHunk),
6965 ),
6966 (
6967 3..4,
6968 "four\n",
6969 "FOUR\n",
6970 DiffHunkStatus::modified(HasSecondaryHunk),
6971 ),
6972 ],
6973 );
6974 });
6975
6976 // Stage a hunk. It appears as optimistically staged.
6977 uncommitted_diff.update(cx, |diff, cx| {
6978 let range =
6979 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
6980 let hunks = diff
6981 .hunks_intersecting_range(range, &snapshot, cx)
6982 .collect::<Vec<_>>();
6983 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
6984
6985 assert_hunks(
6986 diff.hunks(&snapshot, cx),
6987 &snapshot,
6988 &diff.base_text_string().unwrap(),
6989 &[
6990 (
6991 0..0,
6992 "zero\n",
6993 "",
6994 DiffHunkStatus::deleted(HasSecondaryHunk),
6995 ),
6996 (
6997 1..2,
6998 "two\n",
6999 "TWO\n",
7000 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7001 ),
7002 (
7003 3..4,
7004 "four\n",
7005 "FOUR\n",
7006 DiffHunkStatus::modified(HasSecondaryHunk),
7007 ),
7008 ],
7009 );
7010 });
7011
7012 // The diff emits a change event for the range of the staged hunk.
7013 assert!(matches!(
7014 diff_events.next().await.unwrap(),
7015 BufferDiffEvent::HunksStagedOrUnstaged(_)
7016 ));
7017 let event = diff_events.next().await.unwrap();
7018 if let BufferDiffEvent::DiffChanged {
7019 changed_range: Some(changed_range),
7020 } = event
7021 {
7022 let changed_range = changed_range.to_point(&snapshot);
7023 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
7024 } else {
7025 panic!("Unexpected event {event:?}");
7026 }
7027
7028 // When the write to the index completes, it appears as staged.
7029 cx.run_until_parked();
7030 uncommitted_diff.update(cx, |diff, cx| {
7031 assert_hunks(
7032 diff.hunks(&snapshot, cx),
7033 &snapshot,
7034 &diff.base_text_string().unwrap(),
7035 &[
7036 (
7037 0..0,
7038 "zero\n",
7039 "",
7040 DiffHunkStatus::deleted(HasSecondaryHunk),
7041 ),
7042 (
7043 1..2,
7044 "two\n",
7045 "TWO\n",
7046 DiffHunkStatus::modified(NoSecondaryHunk),
7047 ),
7048 (
7049 3..4,
7050 "four\n",
7051 "FOUR\n",
7052 DiffHunkStatus::modified(HasSecondaryHunk),
7053 ),
7054 ],
7055 );
7056 });
7057
7058 // The diff emits a change event for the changed index text.
7059 let event = diff_events.next().await.unwrap();
7060 if let BufferDiffEvent::DiffChanged {
7061 changed_range: Some(changed_range),
7062 } = event
7063 {
7064 let changed_range = changed_range.to_point(&snapshot);
7065 assert_eq!(changed_range, Point::new(0, 0)..Point::new(4, 0));
7066 } else {
7067 panic!("Unexpected event {event:?}");
7068 }
7069
7070 // Simulate a problem writing to the git index.
7071 fs.set_error_message_for_index_write(
7072 "/dir/.git".as_ref(),
7073 Some("failed to write git index".into()),
7074 );
7075
7076 // Stage another hunk.
7077 uncommitted_diff.update(cx, |diff, cx| {
7078 let range =
7079 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
7080 let hunks = diff
7081 .hunks_intersecting_range(range, &snapshot, cx)
7082 .collect::<Vec<_>>();
7083 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
7084
7085 assert_hunks(
7086 diff.hunks(&snapshot, cx),
7087 &snapshot,
7088 &diff.base_text_string().unwrap(),
7089 &[
7090 (
7091 0..0,
7092 "zero\n",
7093 "",
7094 DiffHunkStatus::deleted(HasSecondaryHunk),
7095 ),
7096 (
7097 1..2,
7098 "two\n",
7099 "TWO\n",
7100 DiffHunkStatus::modified(NoSecondaryHunk),
7101 ),
7102 (
7103 3..4,
7104 "four\n",
7105 "FOUR\n",
7106 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7107 ),
7108 ],
7109 );
7110 });
7111 assert!(matches!(
7112 diff_events.next().await.unwrap(),
7113 BufferDiffEvent::HunksStagedOrUnstaged(_)
7114 ));
7115 let event = diff_events.next().await.unwrap();
7116 if let BufferDiffEvent::DiffChanged {
7117 changed_range: Some(changed_range),
7118 } = event
7119 {
7120 let changed_range = changed_range.to_point(&snapshot);
7121 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
7122 } else {
7123 panic!("Unexpected event {event:?}");
7124 }
7125
7126 // When the write fails, the hunk returns to being unstaged.
7127 cx.run_until_parked();
7128 uncommitted_diff.update(cx, |diff, cx| {
7129 assert_hunks(
7130 diff.hunks(&snapshot, cx),
7131 &snapshot,
7132 &diff.base_text_string().unwrap(),
7133 &[
7134 (
7135 0..0,
7136 "zero\n",
7137 "",
7138 DiffHunkStatus::deleted(HasSecondaryHunk),
7139 ),
7140 (
7141 1..2,
7142 "two\n",
7143 "TWO\n",
7144 DiffHunkStatus::modified(NoSecondaryHunk),
7145 ),
7146 (
7147 3..4,
7148 "four\n",
7149 "FOUR\n",
7150 DiffHunkStatus::modified(HasSecondaryHunk),
7151 ),
7152 ],
7153 );
7154 });
7155
7156 let event = diff_events.next().await.unwrap();
7157 if let BufferDiffEvent::DiffChanged {
7158 changed_range: Some(changed_range),
7159 } = event
7160 {
7161 let changed_range = changed_range.to_point(&snapshot);
7162 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
7163 } else {
7164 panic!("Unexpected event {event:?}");
7165 }
7166
7167 // Allow writing to the git index to succeed again.
7168 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
7169
7170 // Stage two hunks with separate operations.
7171 uncommitted_diff.update(cx, |diff, cx| {
7172 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
7173 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
7174 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
7175 });
7176
7177 // Both staged hunks appear as pending.
7178 uncommitted_diff.update(cx, |diff, cx| {
7179 assert_hunks(
7180 diff.hunks(&snapshot, cx),
7181 &snapshot,
7182 &diff.base_text_string().unwrap(),
7183 &[
7184 (
7185 0..0,
7186 "zero\n",
7187 "",
7188 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7189 ),
7190 (
7191 1..2,
7192 "two\n",
7193 "TWO\n",
7194 DiffHunkStatus::modified(NoSecondaryHunk),
7195 ),
7196 (
7197 3..4,
7198 "four\n",
7199 "FOUR\n",
7200 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7201 ),
7202 ],
7203 );
7204 });
7205
7206 // Both staging operations take effect.
7207 cx.run_until_parked();
7208 uncommitted_diff.update(cx, |diff, cx| {
7209 assert_hunks(
7210 diff.hunks(&snapshot, cx),
7211 &snapshot,
7212 &diff.base_text_string().unwrap(),
7213 &[
7214 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7215 (
7216 1..2,
7217 "two\n",
7218 "TWO\n",
7219 DiffHunkStatus::modified(NoSecondaryHunk),
7220 ),
7221 (
7222 3..4,
7223 "four\n",
7224 "FOUR\n",
7225 DiffHunkStatus::modified(NoSecondaryHunk),
7226 ),
7227 ],
7228 );
7229 });
7230}
7231
7232#[gpui::test(seeds(340, 472))]
7233async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
7234 use DiffHunkSecondaryStatus::*;
7235 init_test(cx);
7236
7237 let committed_contents = r#"
7238 zero
7239 one
7240 two
7241 three
7242 four
7243 five
7244 "#
7245 .unindent();
7246 let file_contents = r#"
7247 one
7248 TWO
7249 three
7250 FOUR
7251 five
7252 "#
7253 .unindent();
7254
7255 let fs = FakeFs::new(cx.background_executor.clone());
7256 fs.insert_tree(
7257 "/dir",
7258 json!({
7259 ".git": {},
7260 "file.txt": file_contents.clone()
7261 }),
7262 )
7263 .await;
7264
7265 fs.set_head_for_repo(
7266 "/dir/.git".as_ref(),
7267 &[("file.txt".into(), committed_contents.clone())],
7268 "deadbeef",
7269 );
7270 fs.set_index_for_repo(
7271 "/dir/.git".as_ref(),
7272 &[("file.txt".into(), committed_contents.clone())],
7273 );
7274
7275 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7276
7277 let buffer = project
7278 .update(cx, |project, cx| {
7279 project.open_local_buffer("/dir/file.txt", cx)
7280 })
7281 .await
7282 .unwrap();
7283 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7284 let uncommitted_diff = project
7285 .update(cx, |project, cx| {
7286 project.open_uncommitted_diff(buffer.clone(), cx)
7287 })
7288 .await
7289 .unwrap();
7290
7291 // The hunks are initially unstaged.
7292 uncommitted_diff.read_with(cx, |diff, cx| {
7293 assert_hunks(
7294 diff.hunks(&snapshot, cx),
7295 &snapshot,
7296 &diff.base_text_string().unwrap(),
7297 &[
7298 (
7299 0..0,
7300 "zero\n",
7301 "",
7302 DiffHunkStatus::deleted(HasSecondaryHunk),
7303 ),
7304 (
7305 1..2,
7306 "two\n",
7307 "TWO\n",
7308 DiffHunkStatus::modified(HasSecondaryHunk),
7309 ),
7310 (
7311 3..4,
7312 "four\n",
7313 "FOUR\n",
7314 DiffHunkStatus::modified(HasSecondaryHunk),
7315 ),
7316 ],
7317 );
7318 });
7319
7320 // Pause IO events
7321 fs.pause_events();
7322
7323 // Stage the first hunk.
7324 uncommitted_diff.update(cx, |diff, cx| {
7325 let hunk = diff.hunks(&snapshot, cx).next().unwrap();
7326 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7327 assert_hunks(
7328 diff.hunks(&snapshot, cx),
7329 &snapshot,
7330 &diff.base_text_string().unwrap(),
7331 &[
7332 (
7333 0..0,
7334 "zero\n",
7335 "",
7336 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7337 ),
7338 (
7339 1..2,
7340 "two\n",
7341 "TWO\n",
7342 DiffHunkStatus::modified(HasSecondaryHunk),
7343 ),
7344 (
7345 3..4,
7346 "four\n",
7347 "FOUR\n",
7348 DiffHunkStatus::modified(HasSecondaryHunk),
7349 ),
7350 ],
7351 );
7352 });
7353
7354 // Stage the second hunk *before* receiving the FS event for the first hunk.
7355 cx.run_until_parked();
7356 uncommitted_diff.update(cx, |diff, cx| {
7357 let hunk = diff.hunks(&snapshot, cx).nth(1).unwrap();
7358 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7359 assert_hunks(
7360 diff.hunks(&snapshot, cx),
7361 &snapshot,
7362 &diff.base_text_string().unwrap(),
7363 &[
7364 (
7365 0..0,
7366 "zero\n",
7367 "",
7368 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7369 ),
7370 (
7371 1..2,
7372 "two\n",
7373 "TWO\n",
7374 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7375 ),
7376 (
7377 3..4,
7378 "four\n",
7379 "FOUR\n",
7380 DiffHunkStatus::modified(HasSecondaryHunk),
7381 ),
7382 ],
7383 );
7384 });
7385
7386 // Process the FS event for staging the first hunk (second event is still pending).
7387 fs.flush_events(1);
7388 cx.run_until_parked();
7389
7390 // Stage the third hunk before receiving the second FS event.
7391 uncommitted_diff.update(cx, |diff, cx| {
7392 let hunk = diff.hunks(&snapshot, cx).nth(2).unwrap();
7393 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7394 });
7395
7396 // Wait for all remaining IO.
7397 cx.run_until_parked();
7398 fs.flush_events(fs.buffered_event_count());
7399
7400 // Now all hunks are staged.
7401 cx.run_until_parked();
7402 uncommitted_diff.update(cx, |diff, cx| {
7403 assert_hunks(
7404 diff.hunks(&snapshot, cx),
7405 &snapshot,
7406 &diff.base_text_string().unwrap(),
7407 &[
7408 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7409 (
7410 1..2,
7411 "two\n",
7412 "TWO\n",
7413 DiffHunkStatus::modified(NoSecondaryHunk),
7414 ),
7415 (
7416 3..4,
7417 "four\n",
7418 "FOUR\n",
7419 DiffHunkStatus::modified(NoSecondaryHunk),
7420 ),
7421 ],
7422 );
7423 });
7424}
7425
7426#[gpui::test(iterations = 25)]
7427async fn test_staging_random_hunks(
7428 mut rng: StdRng,
7429 executor: BackgroundExecutor,
7430 cx: &mut gpui::TestAppContext,
7431) {
7432 let operations = env::var("OPERATIONS")
7433 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
7434 .unwrap_or(20);
7435
7436 // Try to induce races between diff recalculation and index writes.
7437 if rng.gen_bool(0.5) {
7438 executor.deprioritize(*CALCULATE_DIFF_TASK);
7439 }
7440
7441 use DiffHunkSecondaryStatus::*;
7442 init_test(cx);
7443
7444 let committed_text = (0..30).map(|i| format!("line {i}\n")).collect::<String>();
7445 let index_text = committed_text.clone();
7446 let buffer_text = (0..30)
7447 .map(|i| match i % 5 {
7448 0 => format!("line {i} (modified)\n"),
7449 _ => format!("line {i}\n"),
7450 })
7451 .collect::<String>();
7452
7453 let fs = FakeFs::new(cx.background_executor.clone());
7454 fs.insert_tree(
7455 path!("/dir"),
7456 json!({
7457 ".git": {},
7458 "file.txt": buffer_text.clone()
7459 }),
7460 )
7461 .await;
7462 fs.set_head_for_repo(
7463 path!("/dir/.git").as_ref(),
7464 &[("file.txt".into(), committed_text.clone())],
7465 "deadbeef",
7466 );
7467 fs.set_index_for_repo(
7468 path!("/dir/.git").as_ref(),
7469 &[("file.txt".into(), index_text.clone())],
7470 );
7471 let repo = fs.open_repo(path!("/dir/.git").as_ref()).unwrap();
7472
7473 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7474 let buffer = project
7475 .update(cx, |project, cx| {
7476 project.open_local_buffer(path!("/dir/file.txt"), cx)
7477 })
7478 .await
7479 .unwrap();
7480 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7481 let uncommitted_diff = project
7482 .update(cx, |project, cx| {
7483 project.open_uncommitted_diff(buffer.clone(), cx)
7484 })
7485 .await
7486 .unwrap();
7487
7488 let mut hunks =
7489 uncommitted_diff.update(cx, |diff, cx| diff.hunks(&snapshot, cx).collect::<Vec<_>>());
7490 assert_eq!(hunks.len(), 6);
7491
7492 for _i in 0..operations {
7493 let hunk_ix = rng.gen_range(0..hunks.len());
7494 let hunk = &mut hunks[hunk_ix];
7495 let row = hunk.range.start.row;
7496
7497 if hunk.status().has_secondary_hunk() {
7498 log::info!("staging hunk at {row}");
7499 uncommitted_diff.update(cx, |diff, cx| {
7500 diff.stage_or_unstage_hunks(true, &[hunk.clone()], &snapshot, true, cx);
7501 });
7502 hunk.secondary_status = SecondaryHunkRemovalPending;
7503 } else {
7504 log::info!("unstaging hunk at {row}");
7505 uncommitted_diff.update(cx, |diff, cx| {
7506 diff.stage_or_unstage_hunks(false, &[hunk.clone()], &snapshot, true, cx);
7507 });
7508 hunk.secondary_status = SecondaryHunkAdditionPending;
7509 }
7510
7511 for _ in 0..rng.gen_range(0..10) {
7512 log::info!("yielding");
7513 cx.executor().simulate_random_delay().await;
7514 }
7515 }
7516
7517 cx.executor().run_until_parked();
7518
7519 for hunk in &mut hunks {
7520 if hunk.secondary_status == SecondaryHunkRemovalPending {
7521 hunk.secondary_status = NoSecondaryHunk;
7522 } else if hunk.secondary_status == SecondaryHunkAdditionPending {
7523 hunk.secondary_status = HasSecondaryHunk;
7524 }
7525 }
7526
7527 log::info!(
7528 "index text:\n{}",
7529 repo.load_index_text("file.txt".into()).await.unwrap()
7530 );
7531
7532 uncommitted_diff.update(cx, |diff, cx| {
7533 let expected_hunks = hunks
7534 .iter()
7535 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
7536 .collect::<Vec<_>>();
7537 let actual_hunks = diff
7538 .hunks(&snapshot, cx)
7539 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
7540 .collect::<Vec<_>>();
7541 assert_eq!(actual_hunks, expected_hunks);
7542 });
7543}
7544
7545#[gpui::test]
7546async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
7547 init_test(cx);
7548
7549 let committed_contents = r#"
7550 fn main() {
7551 println!("hello from HEAD");
7552 }
7553 "#
7554 .unindent();
7555 let file_contents = r#"
7556 fn main() {
7557 println!("hello from the working copy");
7558 }
7559 "#
7560 .unindent();
7561
7562 let fs = FakeFs::new(cx.background_executor.clone());
7563 fs.insert_tree(
7564 "/dir",
7565 json!({
7566 ".git": {},
7567 "src": {
7568 "main.rs": file_contents,
7569 }
7570 }),
7571 )
7572 .await;
7573
7574 fs.set_head_for_repo(
7575 Path::new("/dir/.git"),
7576 &[("src/main.rs".into(), committed_contents.clone())],
7577 "deadbeef",
7578 );
7579 fs.set_index_for_repo(
7580 Path::new("/dir/.git"),
7581 &[("src/main.rs".into(), committed_contents.clone())],
7582 );
7583
7584 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
7585
7586 let buffer = project
7587 .update(cx, |project, cx| {
7588 project.open_local_buffer("/dir/src/main.rs", cx)
7589 })
7590 .await
7591 .unwrap();
7592 let uncommitted_diff = project
7593 .update(cx, |project, cx| {
7594 project.open_uncommitted_diff(buffer.clone(), cx)
7595 })
7596 .await
7597 .unwrap();
7598
7599 cx.run_until_parked();
7600 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
7601 let snapshot = buffer.read(cx).snapshot();
7602 assert_hunks(
7603 uncommitted_diff.hunks(&snapshot, cx),
7604 &snapshot,
7605 &uncommitted_diff.base_text_string().unwrap(),
7606 &[(
7607 1..2,
7608 " println!(\"hello from HEAD\");\n",
7609 " println!(\"hello from the working copy\");\n",
7610 DiffHunkStatus {
7611 kind: DiffHunkStatusKind::Modified,
7612 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
7613 },
7614 )],
7615 );
7616 });
7617}
7618
7619#[gpui::test]
7620async fn test_repository_and_path_for_project_path(
7621 background_executor: BackgroundExecutor,
7622 cx: &mut gpui::TestAppContext,
7623) {
7624 init_test(cx);
7625 let fs = FakeFs::new(background_executor);
7626 fs.insert_tree(
7627 path!("/root"),
7628 json!({
7629 "c.txt": "",
7630 "dir1": {
7631 ".git": {},
7632 "deps": {
7633 "dep1": {
7634 ".git": {},
7635 "src": {
7636 "a.txt": ""
7637 }
7638 }
7639 },
7640 "src": {
7641 "b.txt": ""
7642 }
7643 },
7644 }),
7645 )
7646 .await;
7647
7648 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
7649 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7650 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7651 project
7652 .update(cx, |project, cx| project.git_scans_complete(cx))
7653 .await;
7654 cx.run_until_parked();
7655
7656 project.read_with(cx, |project, cx| {
7657 let git_store = project.git_store().read(cx);
7658 let pairs = [
7659 ("c.txt", None),
7660 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
7661 (
7662 "dir1/deps/dep1/src/a.txt",
7663 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
7664 ),
7665 ];
7666 let expected = pairs
7667 .iter()
7668 .map(|(path, result)| {
7669 (
7670 path,
7671 result.map(|(repo, repo_path)| {
7672 (Path::new(repo).into(), RepoPath::from(repo_path))
7673 }),
7674 )
7675 })
7676 .collect::<Vec<_>>();
7677 let actual = pairs
7678 .iter()
7679 .map(|(path, _)| {
7680 let project_path = (tree_id, Path::new(path)).into();
7681 let result = maybe!({
7682 let (repo, repo_path) =
7683 git_store.repository_and_path_for_project_path(&project_path, cx)?;
7684 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
7685 });
7686 (path, result)
7687 })
7688 .collect::<Vec<_>>();
7689 pretty_assertions::assert_eq!(expected, actual);
7690 });
7691
7692 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
7693 .await
7694 .unwrap();
7695 cx.run_until_parked();
7696
7697 project.read_with(cx, |project, cx| {
7698 let git_store = project.git_store().read(cx);
7699 assert_eq!(
7700 git_store.repository_and_path_for_project_path(
7701 &(tree_id, Path::new("dir1/src/b.txt")).into(),
7702 cx
7703 ),
7704 None
7705 );
7706 });
7707}
7708
7709#[gpui::test]
7710async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
7711 init_test(cx);
7712 let fs = FakeFs::new(cx.background_executor.clone());
7713 fs.insert_tree(
7714 path!("/root"),
7715 json!({
7716 "home": {
7717 ".git": {},
7718 "project": {
7719 "a.txt": "A"
7720 },
7721 },
7722 }),
7723 )
7724 .await;
7725 fs.set_home_dir(Path::new(path!("/root/home")).to_owned());
7726
7727 let project = Project::test(fs.clone(), [path!("/root/home/project").as_ref()], cx).await;
7728 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7729 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7730
7731 project
7732 .update(cx, |project, cx| project.git_scans_complete(cx))
7733 .await;
7734 tree.flush_fs_events(cx).await;
7735
7736 project.read_with(cx, |project, cx| {
7737 let containing = project
7738 .git_store()
7739 .read(cx)
7740 .repository_and_path_for_project_path(&(tree_id, "a.txt").into(), cx);
7741 assert!(containing.is_none());
7742 });
7743
7744 let project = Project::test(fs.clone(), [path!("/root/home").as_ref()], cx).await;
7745 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7746 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7747 project
7748 .update(cx, |project, cx| project.git_scans_complete(cx))
7749 .await;
7750 tree.flush_fs_events(cx).await;
7751
7752 project.read_with(cx, |project, cx| {
7753 let containing = project
7754 .git_store()
7755 .read(cx)
7756 .repository_and_path_for_project_path(&(tree_id, "project/a.txt").into(), cx);
7757 assert_eq!(
7758 containing
7759 .unwrap()
7760 .0
7761 .read(cx)
7762 .work_directory_abs_path
7763 .as_ref(),
7764 Path::new(path!("/root/home"))
7765 );
7766 });
7767}
7768
7769#[gpui::test]
7770async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
7771 init_test(cx);
7772 cx.executor().allow_parking();
7773
7774 let root = TempTree::new(json!({
7775 "project": {
7776 "a.txt": "a", // Modified
7777 "b.txt": "bb", // Added
7778 "c.txt": "ccc", // Unchanged
7779 "d.txt": "dddd", // Deleted
7780 },
7781 }));
7782
7783 // Set up git repository before creating the project.
7784 let work_dir = root.path().join("project");
7785 let repo = git_init(work_dir.as_path());
7786 git_add("a.txt", &repo);
7787 git_add("c.txt", &repo);
7788 git_add("d.txt", &repo);
7789 git_commit("Initial commit", &repo);
7790 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
7791 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
7792
7793 let project = Project::test(
7794 Arc::new(RealFs::new(None, cx.executor())),
7795 [root.path()],
7796 cx,
7797 )
7798 .await;
7799
7800 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7801 tree.flush_fs_events(cx).await;
7802 project
7803 .update(cx, |project, cx| project.git_scans_complete(cx))
7804 .await;
7805 cx.executor().run_until_parked();
7806
7807 let repository = project.read_with(cx, |project, cx| {
7808 project.repositories(cx).values().next().unwrap().clone()
7809 });
7810
7811 // Check that the right git state is observed on startup
7812 repository.read_with(cx, |repository, _| {
7813 let entries = repository.cached_status().collect::<Vec<_>>();
7814 assert_eq!(
7815 entries,
7816 [
7817 StatusEntry {
7818 repo_path: "a.txt".into(),
7819 status: StatusCode::Modified.worktree(),
7820 },
7821 StatusEntry {
7822 repo_path: "b.txt".into(),
7823 status: FileStatus::Untracked,
7824 },
7825 StatusEntry {
7826 repo_path: "d.txt".into(),
7827 status: StatusCode::Deleted.worktree(),
7828 },
7829 ]
7830 );
7831 });
7832
7833 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
7834
7835 tree.flush_fs_events(cx).await;
7836 project
7837 .update(cx, |project, cx| project.git_scans_complete(cx))
7838 .await;
7839 cx.executor().run_until_parked();
7840
7841 repository.read_with(cx, |repository, _| {
7842 let entries = repository.cached_status().collect::<Vec<_>>();
7843 assert_eq!(
7844 entries,
7845 [
7846 StatusEntry {
7847 repo_path: "a.txt".into(),
7848 status: StatusCode::Modified.worktree(),
7849 },
7850 StatusEntry {
7851 repo_path: "b.txt".into(),
7852 status: FileStatus::Untracked,
7853 },
7854 StatusEntry {
7855 repo_path: "c.txt".into(),
7856 status: StatusCode::Modified.worktree(),
7857 },
7858 StatusEntry {
7859 repo_path: "d.txt".into(),
7860 status: StatusCode::Deleted.worktree(),
7861 },
7862 ]
7863 );
7864 });
7865
7866 git_add("a.txt", &repo);
7867 git_add("c.txt", &repo);
7868 git_remove_index(Path::new("d.txt"), &repo);
7869 git_commit("Another commit", &repo);
7870 tree.flush_fs_events(cx).await;
7871 project
7872 .update(cx, |project, cx| project.git_scans_complete(cx))
7873 .await;
7874 cx.executor().run_until_parked();
7875
7876 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
7877 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
7878 tree.flush_fs_events(cx).await;
7879 project
7880 .update(cx, |project, cx| project.git_scans_complete(cx))
7881 .await;
7882 cx.executor().run_until_parked();
7883
7884 repository.read_with(cx, |repository, _cx| {
7885 let entries = repository.cached_status().collect::<Vec<_>>();
7886
7887 // Deleting an untracked entry, b.txt, should leave no status
7888 // a.txt was tracked, and so should have a status
7889 assert_eq!(
7890 entries,
7891 [StatusEntry {
7892 repo_path: "a.txt".into(),
7893 status: StatusCode::Deleted.worktree(),
7894 }]
7895 );
7896 });
7897}
7898
7899#[gpui::test]
7900async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
7901 init_test(cx);
7902 cx.executor().allow_parking();
7903
7904 let root = TempTree::new(json!({
7905 "project": {
7906 "sub": {},
7907 "a.txt": "",
7908 },
7909 }));
7910
7911 let work_dir = root.path().join("project");
7912 let repo = git_init(work_dir.as_path());
7913 // a.txt exists in HEAD and the working copy but is deleted in the index.
7914 git_add("a.txt", &repo);
7915 git_commit("Initial commit", &repo);
7916 git_remove_index("a.txt".as_ref(), &repo);
7917 // `sub` is a nested git repository.
7918 let _sub = git_init(&work_dir.join("sub"));
7919
7920 let project = Project::test(
7921 Arc::new(RealFs::new(None, cx.executor())),
7922 [root.path()],
7923 cx,
7924 )
7925 .await;
7926
7927 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7928 tree.flush_fs_events(cx).await;
7929 project
7930 .update(cx, |project, cx| project.git_scans_complete(cx))
7931 .await;
7932 cx.executor().run_until_parked();
7933
7934 let repository = project.read_with(cx, |project, cx| {
7935 project
7936 .repositories(cx)
7937 .values()
7938 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
7939 .unwrap()
7940 .clone()
7941 });
7942
7943 repository.read_with(cx, |repository, _cx| {
7944 let entries = repository.cached_status().collect::<Vec<_>>();
7945
7946 // `sub` doesn't appear in our computed statuses.
7947 // a.txt appears with a combined `DA` status.
7948 assert_eq!(
7949 entries,
7950 [StatusEntry {
7951 repo_path: "a.txt".into(),
7952 status: TrackedStatus {
7953 index_status: StatusCode::Deleted,
7954 worktree_status: StatusCode::Added
7955 }
7956 .into(),
7957 }]
7958 )
7959 });
7960}
7961
7962#[gpui::test]
7963async fn test_repository_subfolder_git_status(
7964 executor: gpui::BackgroundExecutor,
7965 cx: &mut gpui::TestAppContext,
7966) {
7967 init_test(cx);
7968
7969 let fs = FakeFs::new(executor);
7970 fs.insert_tree(
7971 path!("/root"),
7972 json!({
7973 "my-repo": {
7974 ".git": {},
7975 "a.txt": "a",
7976 "sub-folder-1": {
7977 "sub-folder-2": {
7978 "c.txt": "cc",
7979 "d": {
7980 "e.txt": "eee"
7981 }
7982 },
7983 }
7984 },
7985 }),
7986 )
7987 .await;
7988
7989 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
7990 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
7991
7992 fs.set_status_for_repo(
7993 path!("/root/my-repo/.git").as_ref(),
7994 &[(E_TXT.as_ref(), FileStatus::Untracked)],
7995 );
7996
7997 let project = Project::test(
7998 fs.clone(),
7999 [path!("/root/my-repo/sub-folder-1/sub-folder-2").as_ref()],
8000 cx,
8001 )
8002 .await;
8003
8004 project
8005 .update(cx, |project, cx| project.git_scans_complete(cx))
8006 .await;
8007 cx.run_until_parked();
8008
8009 let repository = project.read_with(cx, |project, cx| {
8010 project.repositories(cx).values().next().unwrap().clone()
8011 });
8012
8013 // Ensure that the git status is loaded correctly
8014 repository.read_with(cx, |repository, _cx| {
8015 assert_eq!(
8016 repository.work_directory_abs_path,
8017 Path::new(path!("/root/my-repo")).into()
8018 );
8019
8020 assert_eq!(repository.status_for_path(&C_TXT.into()), None);
8021 assert_eq!(
8022 repository.status_for_path(&E_TXT.into()).unwrap().status,
8023 FileStatus::Untracked
8024 );
8025 });
8026
8027 fs.set_status_for_repo(path!("/root/my-repo/.git").as_ref(), &[]);
8028 project
8029 .update(cx, |project, cx| project.git_scans_complete(cx))
8030 .await;
8031 cx.run_until_parked();
8032
8033 repository.read_with(cx, |repository, _cx| {
8034 assert_eq!(repository.status_for_path(&C_TXT.into()), None);
8035 assert_eq!(repository.status_for_path(&E_TXT.into()), None);
8036 });
8037}
8038
8039// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
8040#[cfg(any())]
8041#[gpui::test]
8042async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
8043 init_test(cx);
8044 cx.executor().allow_parking();
8045
8046 let root = TempTree::new(json!({
8047 "project": {
8048 "a.txt": "a",
8049 },
8050 }));
8051 let root_path = root.path();
8052
8053 let repo = git_init(&root_path.join("project"));
8054 git_add("a.txt", &repo);
8055 git_commit("init", &repo);
8056
8057 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8058
8059 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8060 tree.flush_fs_events(cx).await;
8061 project
8062 .update(cx, |project, cx| project.git_scans_complete(cx))
8063 .await;
8064 cx.executor().run_until_parked();
8065
8066 let repository = project.read_with(cx, |project, cx| {
8067 project.repositories(cx).values().next().unwrap().clone()
8068 });
8069
8070 git_branch("other-branch", &repo);
8071 git_checkout("refs/heads/other-branch", &repo);
8072 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
8073 git_add("a.txt", &repo);
8074 git_commit("capitalize", &repo);
8075 let commit = repo
8076 .head()
8077 .expect("Failed to get HEAD")
8078 .peel_to_commit()
8079 .expect("HEAD is not a commit");
8080 git_checkout("refs/heads/main", &repo);
8081 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
8082 git_add("a.txt", &repo);
8083 git_commit("improve letter", &repo);
8084 git_cherry_pick(&commit, &repo);
8085 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
8086 .expect("No CHERRY_PICK_HEAD");
8087 pretty_assertions::assert_eq!(
8088 git_status(&repo),
8089 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
8090 );
8091 tree.flush_fs_events(cx).await;
8092 project
8093 .update(cx, |project, cx| project.git_scans_complete(cx))
8094 .await;
8095 cx.executor().run_until_parked();
8096 let conflicts = repository.update(cx, |repository, _| {
8097 repository
8098 .merge_conflicts
8099 .iter()
8100 .cloned()
8101 .collect::<Vec<_>>()
8102 });
8103 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
8104
8105 git_add("a.txt", &repo);
8106 // Attempt to manually simulate what `git cherry-pick --continue` would do.
8107 git_commit("whatevs", &repo);
8108 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
8109 .expect("Failed to remove CHERRY_PICK_HEAD");
8110 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
8111 tree.flush_fs_events(cx).await;
8112 let conflicts = repository.update(cx, |repository, _| {
8113 repository
8114 .merge_conflicts
8115 .iter()
8116 .cloned()
8117 .collect::<Vec<_>>()
8118 });
8119 pretty_assertions::assert_eq!(conflicts, []);
8120}
8121
8122#[gpui::test]
8123async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
8124 init_test(cx);
8125 let fs = FakeFs::new(cx.background_executor.clone());
8126 fs.insert_tree(
8127 path!("/root"),
8128 json!({
8129 ".git": {},
8130 ".gitignore": "*.txt\n",
8131 "a.xml": "<a></a>",
8132 "b.txt": "Some text"
8133 }),
8134 )
8135 .await;
8136
8137 fs.set_head_and_index_for_repo(
8138 path!("/root/.git").as_ref(),
8139 &[
8140 (".gitignore".into(), "*.txt\n".into()),
8141 ("a.xml".into(), "<a></a>".into()),
8142 ],
8143 );
8144
8145 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
8146
8147 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8148 tree.flush_fs_events(cx).await;
8149 project
8150 .update(cx, |project, cx| project.git_scans_complete(cx))
8151 .await;
8152 cx.executor().run_until_parked();
8153
8154 let repository = project.read_with(cx, |project, cx| {
8155 project.repositories(cx).values().next().unwrap().clone()
8156 });
8157
8158 // One file is unmodified, the other is ignored.
8159 cx.read(|cx| {
8160 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
8161 assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
8162 });
8163
8164 // Change the gitignore, and stage the newly non-ignored file.
8165 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
8166 .await
8167 .unwrap();
8168 fs.set_index_for_repo(
8169 Path::new(path!("/root/.git")),
8170 &[
8171 (".gitignore".into(), "*.txt\n".into()),
8172 ("a.xml".into(), "<a></a>".into()),
8173 ("b.txt".into(), "Some text".into()),
8174 ],
8175 );
8176
8177 cx.executor().run_until_parked();
8178 cx.read(|cx| {
8179 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
8180 assert_entry_git_state(
8181 tree.read(cx),
8182 repository.read(cx),
8183 "b.txt",
8184 Some(StatusCode::Added),
8185 false,
8186 );
8187 });
8188}
8189
8190// NOTE:
8191// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
8192// a directory which some program has already open.
8193// This is a limitation of the Windows.
8194// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
8195#[gpui::test]
8196#[cfg_attr(target_os = "windows", ignore)]
8197async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
8198 init_test(cx);
8199 cx.executor().allow_parking();
8200 let root = TempTree::new(json!({
8201 "projects": {
8202 "project1": {
8203 "a": "",
8204 "b": "",
8205 }
8206 },
8207
8208 }));
8209 let root_path = root.path();
8210
8211 let repo = git_init(&root_path.join("projects/project1"));
8212 git_add("a", &repo);
8213 git_commit("init", &repo);
8214 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
8215
8216 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8217
8218 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8219 tree.flush_fs_events(cx).await;
8220 project
8221 .update(cx, |project, cx| project.git_scans_complete(cx))
8222 .await;
8223 cx.executor().run_until_parked();
8224
8225 let repository = project.read_with(cx, |project, cx| {
8226 project.repositories(cx).values().next().unwrap().clone()
8227 });
8228
8229 repository.read_with(cx, |repository, _| {
8230 assert_eq!(
8231 repository.work_directory_abs_path.as_ref(),
8232 root_path.join("projects/project1").as_path()
8233 );
8234 assert_eq!(
8235 repository
8236 .status_for_path(&"a".into())
8237 .map(|entry| entry.status),
8238 Some(StatusCode::Modified.worktree()),
8239 );
8240 assert_eq!(
8241 repository
8242 .status_for_path(&"b".into())
8243 .map(|entry| entry.status),
8244 Some(FileStatus::Untracked),
8245 );
8246 });
8247
8248 std::fs::rename(
8249 root_path.join("projects/project1"),
8250 root_path.join("projects/project2"),
8251 )
8252 .unwrap();
8253 tree.flush_fs_events(cx).await;
8254
8255 repository.read_with(cx, |repository, _| {
8256 assert_eq!(
8257 repository.work_directory_abs_path.as_ref(),
8258 root_path.join("projects/project2").as_path()
8259 );
8260 assert_eq!(
8261 repository.status_for_path(&"a".into()).unwrap().status,
8262 StatusCode::Modified.worktree(),
8263 );
8264 assert_eq!(
8265 repository.status_for_path(&"b".into()).unwrap().status,
8266 FileStatus::Untracked,
8267 );
8268 });
8269}
8270
8271// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
8272// you can't rename a directory which some program has already open. This is a
8273// limitation of the Windows. See:
8274// https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
8275#[gpui::test]
8276#[cfg_attr(target_os = "windows", ignore)]
8277async fn test_file_status(cx: &mut gpui::TestAppContext) {
8278 init_test(cx);
8279 cx.executor().allow_parking();
8280 const IGNORE_RULE: &str = "**/target";
8281
8282 let root = TempTree::new(json!({
8283 "project": {
8284 "a.txt": "a",
8285 "b.txt": "bb",
8286 "c": {
8287 "d": {
8288 "e.txt": "eee"
8289 }
8290 },
8291 "f.txt": "ffff",
8292 "target": {
8293 "build_file": "???"
8294 },
8295 ".gitignore": IGNORE_RULE
8296 },
8297
8298 }));
8299 let root_path = root.path();
8300
8301 const A_TXT: &str = "a.txt";
8302 const B_TXT: &str = "b.txt";
8303 const E_TXT: &str = "c/d/e.txt";
8304 const F_TXT: &str = "f.txt";
8305 const DOTGITIGNORE: &str = ".gitignore";
8306 const BUILD_FILE: &str = "target/build_file";
8307
8308 // Set up git repository before creating the worktree.
8309 let work_dir = root.path().join("project");
8310 let mut repo = git_init(work_dir.as_path());
8311 repo.add_ignore_rule(IGNORE_RULE).unwrap();
8312 git_add(A_TXT, &repo);
8313 git_add(E_TXT, &repo);
8314 git_add(DOTGITIGNORE, &repo);
8315 git_commit("Initial commit", &repo);
8316
8317 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8318
8319 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8320 tree.flush_fs_events(cx).await;
8321 project
8322 .update(cx, |project, cx| project.git_scans_complete(cx))
8323 .await;
8324 cx.executor().run_until_parked();
8325
8326 let repository = project.read_with(cx, |project, cx| {
8327 project.repositories(cx).values().next().unwrap().clone()
8328 });
8329
8330 // Check that the right git state is observed on startup
8331 repository.read_with(cx, |repository, _cx| {
8332 assert_eq!(
8333 repository.work_directory_abs_path.as_ref(),
8334 root_path.join("project").as_path()
8335 );
8336
8337 assert_eq!(
8338 repository.status_for_path(&B_TXT.into()).unwrap().status,
8339 FileStatus::Untracked,
8340 );
8341 assert_eq!(
8342 repository.status_for_path(&F_TXT.into()).unwrap().status,
8343 FileStatus::Untracked,
8344 );
8345 });
8346
8347 // Modify a file in the working copy.
8348 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
8349 tree.flush_fs_events(cx).await;
8350 project
8351 .update(cx, |project, cx| project.git_scans_complete(cx))
8352 .await;
8353 cx.executor().run_until_parked();
8354
8355 // The worktree detects that the file's git status has changed.
8356 repository.read_with(cx, |repository, _| {
8357 assert_eq!(
8358 repository.status_for_path(&A_TXT.into()).unwrap().status,
8359 StatusCode::Modified.worktree(),
8360 );
8361 });
8362
8363 // Create a commit in the git repository.
8364 git_add(A_TXT, &repo);
8365 git_add(B_TXT, &repo);
8366 git_commit("Committing modified and added", &repo);
8367 tree.flush_fs_events(cx).await;
8368 project
8369 .update(cx, |project, cx| project.git_scans_complete(cx))
8370 .await;
8371 cx.executor().run_until_parked();
8372
8373 // The worktree detects that the files' git status have changed.
8374 repository.read_with(cx, |repository, _cx| {
8375 assert_eq!(
8376 repository.status_for_path(&F_TXT.into()).unwrap().status,
8377 FileStatus::Untracked,
8378 );
8379 assert_eq!(repository.status_for_path(&B_TXT.into()), None);
8380 assert_eq!(repository.status_for_path(&A_TXT.into()), None);
8381 });
8382
8383 // Modify files in the working copy and perform git operations on other files.
8384 git_reset(0, &repo);
8385 git_remove_index(Path::new(B_TXT), &repo);
8386 git_stash(&mut repo);
8387 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
8388 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
8389 tree.flush_fs_events(cx).await;
8390 project
8391 .update(cx, |project, cx| project.git_scans_complete(cx))
8392 .await;
8393 cx.executor().run_until_parked();
8394
8395 // Check that more complex repo changes are tracked
8396 repository.read_with(cx, |repository, _cx| {
8397 assert_eq!(repository.status_for_path(&A_TXT.into()), None);
8398 assert_eq!(
8399 repository.status_for_path(&B_TXT.into()).unwrap().status,
8400 FileStatus::Untracked,
8401 );
8402 assert_eq!(
8403 repository.status_for_path(&E_TXT.into()).unwrap().status,
8404 StatusCode::Modified.worktree(),
8405 );
8406 });
8407
8408 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
8409 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
8410 std::fs::write(
8411 work_dir.join(DOTGITIGNORE),
8412 [IGNORE_RULE, "f.txt"].join("\n"),
8413 )
8414 .unwrap();
8415
8416 git_add(Path::new(DOTGITIGNORE), &repo);
8417 git_commit("Committing modified git ignore", &repo);
8418
8419 tree.flush_fs_events(cx).await;
8420 cx.executor().run_until_parked();
8421
8422 let mut renamed_dir_name = "first_directory/second_directory";
8423 const RENAMED_FILE: &str = "rf.txt";
8424
8425 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
8426 std::fs::write(
8427 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
8428 "new-contents",
8429 )
8430 .unwrap();
8431
8432 tree.flush_fs_events(cx).await;
8433 project
8434 .update(cx, |project, cx| project.git_scans_complete(cx))
8435 .await;
8436 cx.executor().run_until_parked();
8437
8438 repository.read_with(cx, |repository, _cx| {
8439 assert_eq!(
8440 repository
8441 .status_for_path(&Path::new(renamed_dir_name).join(RENAMED_FILE).into())
8442 .unwrap()
8443 .status,
8444 FileStatus::Untracked,
8445 );
8446 });
8447
8448 renamed_dir_name = "new_first_directory/second_directory";
8449
8450 std::fs::rename(
8451 work_dir.join("first_directory"),
8452 work_dir.join("new_first_directory"),
8453 )
8454 .unwrap();
8455
8456 tree.flush_fs_events(cx).await;
8457 project
8458 .update(cx, |project, cx| project.git_scans_complete(cx))
8459 .await;
8460 cx.executor().run_until_parked();
8461
8462 repository.read_with(cx, |repository, _cx| {
8463 assert_eq!(
8464 repository
8465 .status_for_path(&Path::new(renamed_dir_name).join(RENAMED_FILE).into())
8466 .unwrap()
8467 .status,
8468 FileStatus::Untracked,
8469 );
8470 });
8471}
8472
8473#[gpui::test]
8474async fn test_repos_in_invisible_worktrees(
8475 executor: BackgroundExecutor,
8476 cx: &mut gpui::TestAppContext,
8477) {
8478 init_test(cx);
8479 let fs = FakeFs::new(executor);
8480 fs.insert_tree(
8481 path!("/root"),
8482 json!({
8483 "dir1": {
8484 ".git": {},
8485 "dep1": {
8486 ".git": {},
8487 "src": {
8488 "a.txt": "",
8489 },
8490 },
8491 "b.txt": "",
8492 },
8493 }),
8494 )
8495 .await;
8496
8497 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
8498 let _visible_worktree =
8499 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8500 project
8501 .update(cx, |project, cx| project.git_scans_complete(cx))
8502 .await;
8503
8504 let repos = project.read_with(cx, |project, cx| {
8505 project
8506 .repositories(cx)
8507 .values()
8508 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8509 .collect::<Vec<_>>()
8510 });
8511 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
8512
8513 let (_invisible_worktree, _) = project
8514 .update(cx, |project, cx| {
8515 project.worktree_store.update(cx, |worktree_store, cx| {
8516 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
8517 })
8518 })
8519 .await
8520 .expect("failed to create worktree");
8521 project
8522 .update(cx, |project, cx| project.git_scans_complete(cx))
8523 .await;
8524
8525 let repos = project.read_with(cx, |project, cx| {
8526 project
8527 .repositories(cx)
8528 .values()
8529 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8530 .collect::<Vec<_>>()
8531 });
8532 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
8533}
8534
8535#[gpui::test(iterations = 10)]
8536async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
8537 init_test(cx);
8538 cx.update(|cx| {
8539 cx.update_global::<SettingsStore, _>(|store, cx| {
8540 store.update_user_settings::<WorktreeSettings>(cx, |project_settings| {
8541 project_settings.file_scan_exclusions = Some(Vec::new());
8542 });
8543 });
8544 });
8545 let fs = FakeFs::new(cx.background_executor.clone());
8546 fs.insert_tree(
8547 path!("/root"),
8548 json!({
8549 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
8550 "tree": {
8551 ".git": {},
8552 ".gitignore": "ignored-dir\n",
8553 "tracked-dir": {
8554 "tracked-file1": "",
8555 "ancestor-ignored-file1": "",
8556 },
8557 "ignored-dir": {
8558 "ignored-file1": ""
8559 }
8560 }
8561 }),
8562 )
8563 .await;
8564 fs.set_head_and_index_for_repo(
8565 path!("/root/tree/.git").as_ref(),
8566 &[
8567 (".gitignore".into(), "ignored-dir\n".into()),
8568 ("tracked-dir/tracked-file1".into(), "".into()),
8569 ],
8570 );
8571
8572 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
8573
8574 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8575 tree.flush_fs_events(cx).await;
8576 project
8577 .update(cx, |project, cx| project.git_scans_complete(cx))
8578 .await;
8579 cx.executor().run_until_parked();
8580
8581 let repository = project.read_with(cx, |project, cx| {
8582 project.repositories(cx).values().next().unwrap().clone()
8583 });
8584
8585 tree.read_with(cx, |tree, _| {
8586 tree.as_local()
8587 .unwrap()
8588 .manually_refresh_entries_for_paths(vec![Path::new("ignored-dir").into()])
8589 })
8590 .recv()
8591 .await;
8592
8593 cx.read(|cx| {
8594 assert_entry_git_state(
8595 tree.read(cx),
8596 repository.read(cx),
8597 "tracked-dir/tracked-file1",
8598 None,
8599 false,
8600 );
8601 assert_entry_git_state(
8602 tree.read(cx),
8603 repository.read(cx),
8604 "tracked-dir/ancestor-ignored-file1",
8605 None,
8606 false,
8607 );
8608 assert_entry_git_state(
8609 tree.read(cx),
8610 repository.read(cx),
8611 "ignored-dir/ignored-file1",
8612 None,
8613 true,
8614 );
8615 });
8616
8617 fs.create_file(
8618 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
8619 Default::default(),
8620 )
8621 .await
8622 .unwrap();
8623 fs.set_index_for_repo(
8624 path!("/root/tree/.git").as_ref(),
8625 &[
8626 (".gitignore".into(), "ignored-dir\n".into()),
8627 ("tracked-dir/tracked-file1".into(), "".into()),
8628 ("tracked-dir/tracked-file2".into(), "".into()),
8629 ],
8630 );
8631 fs.create_file(
8632 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
8633 Default::default(),
8634 )
8635 .await
8636 .unwrap();
8637 fs.create_file(
8638 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
8639 Default::default(),
8640 )
8641 .await
8642 .unwrap();
8643
8644 cx.executor().run_until_parked();
8645 cx.read(|cx| {
8646 assert_entry_git_state(
8647 tree.read(cx),
8648 repository.read(cx),
8649 "tracked-dir/tracked-file2",
8650 Some(StatusCode::Added),
8651 false,
8652 );
8653 assert_entry_git_state(
8654 tree.read(cx),
8655 repository.read(cx),
8656 "tracked-dir/ancestor-ignored-file2",
8657 None,
8658 false,
8659 );
8660 assert_entry_git_state(
8661 tree.read(cx),
8662 repository.read(cx),
8663 "ignored-dir/ignored-file2",
8664 None,
8665 true,
8666 );
8667 assert!(tree.read(cx).entry_for_path(".git").unwrap().is_ignored);
8668 });
8669}
8670
8671#[gpui::test]
8672async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) {
8673 init_test(cx);
8674
8675 let fs = FakeFs::new(cx.executor());
8676 fs.insert_tree(
8677 path!("/project"),
8678 json!({
8679 ".git": {
8680 "worktrees": {
8681 "some-worktree": {
8682 "commondir": "../..\n",
8683 // For is_git_dir
8684 "HEAD": "",
8685 "config": ""
8686 }
8687 },
8688 "modules": {
8689 "subdir": {
8690 "some-submodule": {
8691 // For is_git_dir
8692 "HEAD": "",
8693 "config": "",
8694 }
8695 }
8696 }
8697 },
8698 "src": {
8699 "a.txt": "A",
8700 },
8701 "some-worktree": {
8702 ".git": "gitdir: ../.git/worktrees/some-worktree\n",
8703 "src": {
8704 "b.txt": "B",
8705 }
8706 },
8707 "subdir": {
8708 "some-submodule": {
8709 ".git": "gitdir: ../../.git/modules/subdir/some-submodule\n",
8710 "c.txt": "C",
8711 }
8712 }
8713 }),
8714 )
8715 .await;
8716
8717 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
8718 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
8719 scan_complete.await;
8720
8721 let mut repositories = project.update(cx, |project, cx| {
8722 project
8723 .repositories(cx)
8724 .values()
8725 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8726 .collect::<Vec<_>>()
8727 });
8728 repositories.sort();
8729 pretty_assertions::assert_eq!(
8730 repositories,
8731 [
8732 Path::new(path!("/project")).into(),
8733 Path::new(path!("/project/some-worktree")).into(),
8734 Path::new(path!("/project/subdir/some-submodule")).into(),
8735 ]
8736 );
8737
8738 // Generate a git-related event for the worktree and check that it's refreshed.
8739 fs.with_git_state(
8740 path!("/project/some-worktree/.git").as_ref(),
8741 true,
8742 |state| {
8743 state
8744 .head_contents
8745 .insert("src/b.txt".into(), "b".to_owned());
8746 state
8747 .index_contents
8748 .insert("src/b.txt".into(), "b".to_owned());
8749 },
8750 )
8751 .unwrap();
8752 cx.run_until_parked();
8753
8754 let buffer = project
8755 .update(cx, |project, cx| {
8756 project.open_local_buffer(path!("/project/some-worktree/src/b.txt"), cx)
8757 })
8758 .await
8759 .unwrap();
8760 let (worktree_repo, barrier) = project.update(cx, |project, cx| {
8761 let (repo, _) = project
8762 .git_store()
8763 .read(cx)
8764 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
8765 .unwrap();
8766 pretty_assertions::assert_eq!(
8767 repo.read(cx).work_directory_abs_path,
8768 Path::new(path!("/project/some-worktree")).into(),
8769 );
8770 let barrier = repo.update(cx, |repo, _| repo.barrier());
8771 (repo.clone(), barrier)
8772 });
8773 barrier.await.unwrap();
8774 worktree_repo.update(cx, |repo, _| {
8775 pretty_assertions::assert_eq!(
8776 repo.status_for_path(&"src/b.txt".into()).unwrap().status,
8777 StatusCode::Modified.worktree(),
8778 );
8779 });
8780
8781 // The same for the submodule.
8782 fs.with_git_state(
8783 path!("/project/subdir/some-submodule/.git").as_ref(),
8784 true,
8785 |state| {
8786 state.head_contents.insert("c.txt".into(), "c".to_owned());
8787 state.index_contents.insert("c.txt".into(), "c".to_owned());
8788 },
8789 )
8790 .unwrap();
8791 cx.run_until_parked();
8792
8793 let buffer = project
8794 .update(cx, |project, cx| {
8795 project.open_local_buffer(path!("/project/subdir/some-submodule/c.txt"), cx)
8796 })
8797 .await
8798 .unwrap();
8799 let (submodule_repo, barrier) = project.update(cx, |project, cx| {
8800 let (repo, _) = project
8801 .git_store()
8802 .read(cx)
8803 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
8804 .unwrap();
8805 pretty_assertions::assert_eq!(
8806 repo.read(cx).work_directory_abs_path,
8807 Path::new(path!("/project/subdir/some-submodule")).into(),
8808 );
8809 let barrier = repo.update(cx, |repo, _| repo.barrier());
8810 (repo.clone(), barrier)
8811 });
8812 barrier.await.unwrap();
8813 submodule_repo.update(cx, |repo, _| {
8814 pretty_assertions::assert_eq!(
8815 repo.status_for_path(&"c.txt".into()).unwrap().status,
8816 StatusCode::Modified.worktree(),
8817 );
8818 });
8819}
8820
8821#[gpui::test]
8822async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
8823 init_test(cx);
8824 let fs = FakeFs::new(cx.background_executor.clone());
8825 fs.insert_tree(
8826 path!("/root"),
8827 json!({
8828 "project": {
8829 ".git": {},
8830 "child1": {
8831 "a.txt": "A",
8832 },
8833 "child2": {
8834 "b.txt": "B",
8835 }
8836 }
8837 }),
8838 )
8839 .await;
8840
8841 let project = Project::test(
8842 fs.clone(),
8843 [
8844 path!("/root/project/child1").as_ref(),
8845 path!("/root/project/child2").as_ref(),
8846 ],
8847 cx,
8848 )
8849 .await;
8850
8851 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8852 tree.flush_fs_events(cx).await;
8853 project
8854 .update(cx, |project, cx| project.git_scans_complete(cx))
8855 .await;
8856 cx.executor().run_until_parked();
8857
8858 let repos = project.read_with(cx, |project, cx| {
8859 project
8860 .repositories(cx)
8861 .values()
8862 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8863 .collect::<Vec<_>>()
8864 });
8865 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
8866}
8867
8868async fn search(
8869 project: &Entity<Project>,
8870 query: SearchQuery,
8871 cx: &mut gpui::TestAppContext,
8872) -> Result<HashMap<String, Vec<Range<usize>>>> {
8873 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
8874 let mut results = HashMap::default();
8875 while let Ok(search_result) = search_rx.recv().await {
8876 match search_result {
8877 SearchResult::Buffer { buffer, ranges } => {
8878 results.entry(buffer).or_insert(ranges);
8879 }
8880 SearchResult::LimitReached => {}
8881 }
8882 }
8883 Ok(results
8884 .into_iter()
8885 .map(|(buffer, ranges)| {
8886 buffer.update(cx, |buffer, cx| {
8887 let path = buffer
8888 .file()
8889 .unwrap()
8890 .full_path(cx)
8891 .to_string_lossy()
8892 .to_string();
8893 let ranges = ranges
8894 .into_iter()
8895 .map(|range| range.to_offset(buffer))
8896 .collect::<Vec<_>>();
8897 (path, ranges)
8898 })
8899 })
8900 .collect())
8901}
8902
8903pub fn init_test(cx: &mut gpui::TestAppContext) {
8904 zlog::init_test();
8905
8906 cx.update(|cx| {
8907 let settings_store = SettingsStore::test(cx);
8908 cx.set_global(settings_store);
8909 release_channel::init(SemanticVersion::default(), cx);
8910 language::init(cx);
8911 Project::init_settings(cx);
8912 });
8913}
8914
8915fn json_lang() -> Arc<Language> {
8916 Arc::new(Language::new(
8917 LanguageConfig {
8918 name: "JSON".into(),
8919 matcher: LanguageMatcher {
8920 path_suffixes: vec!["json".to_string()],
8921 ..Default::default()
8922 },
8923 ..Default::default()
8924 },
8925 None,
8926 ))
8927}
8928
8929fn js_lang() -> Arc<Language> {
8930 Arc::new(Language::new(
8931 LanguageConfig {
8932 name: "JavaScript".into(),
8933 matcher: LanguageMatcher {
8934 path_suffixes: vec!["js".to_string()],
8935 ..Default::default()
8936 },
8937 ..Default::default()
8938 },
8939 None,
8940 ))
8941}
8942
8943fn rust_lang() -> Arc<Language> {
8944 Arc::new(Language::new(
8945 LanguageConfig {
8946 name: "Rust".into(),
8947 matcher: LanguageMatcher {
8948 path_suffixes: vec!["rs".to_string()],
8949 ..Default::default()
8950 },
8951 ..Default::default()
8952 },
8953 Some(tree_sitter_rust::LANGUAGE.into()),
8954 ))
8955}
8956
8957fn typescript_lang() -> Arc<Language> {
8958 Arc::new(Language::new(
8959 LanguageConfig {
8960 name: "TypeScript".into(),
8961 matcher: LanguageMatcher {
8962 path_suffixes: vec!["ts".to_string()],
8963 ..Default::default()
8964 },
8965 ..Default::default()
8966 },
8967 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
8968 ))
8969}
8970
8971fn tsx_lang() -> Arc<Language> {
8972 Arc::new(Language::new(
8973 LanguageConfig {
8974 name: "tsx".into(),
8975 matcher: LanguageMatcher {
8976 path_suffixes: vec!["tsx".to_string()],
8977 ..Default::default()
8978 },
8979 ..Default::default()
8980 },
8981 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
8982 ))
8983}
8984
8985fn get_all_tasks(
8986 project: &Entity<Project>,
8987 task_contexts: Arc<TaskContexts>,
8988 cx: &mut App,
8989) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
8990 let new_tasks = project.update(cx, |project, cx| {
8991 project.task_store.update(cx, |task_store, cx| {
8992 task_store.task_inventory().unwrap().update(cx, |this, cx| {
8993 this.used_and_current_resolved_tasks(task_contexts, cx)
8994 })
8995 })
8996 });
8997
8998 cx.background_spawn(async move {
8999 let (mut old, new) = new_tasks.await;
9000 old.extend(new);
9001 old
9002 })
9003}
9004
9005#[track_caller]
9006fn assert_entry_git_state(
9007 tree: &Worktree,
9008 repository: &Repository,
9009 path: &str,
9010 index_status: Option<StatusCode>,
9011 is_ignored: bool,
9012) {
9013 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
9014 let entry = tree
9015 .entry_for_path(path)
9016 .unwrap_or_else(|| panic!("entry {path} not found"));
9017 let status = repository
9018 .status_for_path(&path.into())
9019 .map(|entry| entry.status);
9020 let expected = index_status.map(|index_status| {
9021 TrackedStatus {
9022 index_status,
9023 worktree_status: StatusCode::Unmodified,
9024 }
9025 .into()
9026 });
9027 assert_eq!(
9028 status, expected,
9029 "expected {path} to have git status: {expected:?}"
9030 );
9031 assert_eq!(
9032 entry.is_ignored, is_ignored,
9033 "expected {path} to have is_ignored: {is_ignored}"
9034 );
9035}
9036
9037#[track_caller]
9038fn git_init(path: &Path) -> git2::Repository {
9039 let mut init_opts = RepositoryInitOptions::new();
9040 init_opts.initial_head("main");
9041 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
9042}
9043
9044#[track_caller]
9045fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
9046 let path = path.as_ref();
9047 let mut index = repo.index().expect("Failed to get index");
9048 index.add_path(path).expect("Failed to add file");
9049 index.write().expect("Failed to write index");
9050}
9051
9052#[track_caller]
9053fn git_remove_index(path: &Path, repo: &git2::Repository) {
9054 let mut index = repo.index().expect("Failed to get index");
9055 index.remove_path(path).expect("Failed to add file");
9056 index.write().expect("Failed to write index");
9057}
9058
9059#[track_caller]
9060fn git_commit(msg: &'static str, repo: &git2::Repository) {
9061 use git2::Signature;
9062
9063 let signature = Signature::now("test", "test@zed.dev").unwrap();
9064 let oid = repo.index().unwrap().write_tree().unwrap();
9065 let tree = repo.find_tree(oid).unwrap();
9066 if let Ok(head) = repo.head() {
9067 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
9068
9069 let parent_commit = parent_obj.as_commit().unwrap();
9070
9071 repo.commit(
9072 Some("HEAD"),
9073 &signature,
9074 &signature,
9075 msg,
9076 &tree,
9077 &[parent_commit],
9078 )
9079 .expect("Failed to commit with parent");
9080 } else {
9081 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
9082 .expect("Failed to commit");
9083 }
9084}
9085
9086#[cfg(any())]
9087#[track_caller]
9088fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
9089 repo.cherrypick(commit, None).expect("Failed to cherrypick");
9090}
9091
9092#[track_caller]
9093fn git_stash(repo: &mut git2::Repository) {
9094 use git2::Signature;
9095
9096 let signature = Signature::now("test", "test@zed.dev").unwrap();
9097 repo.stash_save(&signature, "N/A", None)
9098 .expect("Failed to stash");
9099}
9100
9101#[track_caller]
9102fn git_reset(offset: usize, repo: &git2::Repository) {
9103 let head = repo.head().expect("Couldn't get repo head");
9104 let object = head.peel(git2::ObjectType::Commit).unwrap();
9105 let commit = object.as_commit().unwrap();
9106 let new_head = commit
9107 .parents()
9108 .inspect(|parnet| {
9109 parnet.message();
9110 })
9111 .nth(offset)
9112 .expect("Not enough history");
9113 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
9114 .expect("Could not reset");
9115}
9116
9117#[cfg(any())]
9118#[track_caller]
9119fn git_branch(name: &str, repo: &git2::Repository) {
9120 let head = repo
9121 .head()
9122 .expect("Couldn't get repo head")
9123 .peel_to_commit()
9124 .expect("HEAD is not a commit");
9125 repo.branch(name, &head, false).expect("Failed to commit");
9126}
9127
9128#[cfg(any())]
9129#[track_caller]
9130fn git_checkout(name: &str, repo: &git2::Repository) {
9131 repo.set_head(name).expect("Failed to set head");
9132 repo.checkout_head(None).expect("Failed to check out head");
9133}
9134
9135#[cfg(any())]
9136#[track_caller]
9137fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
9138 repo.statuses(None)
9139 .unwrap()
9140 .iter()
9141 .map(|status| (status.path().unwrap().to_string(), status.status()))
9142 .collect()
9143}
9144
9145#[gpui::test]
9146async fn test_find_project_path_abs(
9147 background_executor: BackgroundExecutor,
9148 cx: &mut gpui::TestAppContext,
9149) {
9150 // find_project_path should work with absolute paths
9151 init_test(cx);
9152
9153 let fs = FakeFs::new(background_executor);
9154 fs.insert_tree(
9155 path!("/root"),
9156 json!({
9157 "project1": {
9158 "file1.txt": "content1",
9159 "subdir": {
9160 "file2.txt": "content2"
9161 }
9162 },
9163 "project2": {
9164 "file3.txt": "content3"
9165 }
9166 }),
9167 )
9168 .await;
9169
9170 let project = Project::test(
9171 fs.clone(),
9172 [
9173 path!("/root/project1").as_ref(),
9174 path!("/root/project2").as_ref(),
9175 ],
9176 cx,
9177 )
9178 .await;
9179
9180 // Make sure the worktrees are fully initialized
9181 project
9182 .update(cx, |project, cx| project.git_scans_complete(cx))
9183 .await;
9184 cx.run_until_parked();
9185
9186 let (project1_abs_path, project1_id, project2_abs_path, project2_id) =
9187 project.read_with(cx, |project, cx| {
9188 let worktrees: Vec<_> = project.worktrees(cx).collect();
9189 let abs_path1 = worktrees[0].read(cx).abs_path().to_path_buf();
9190 let id1 = worktrees[0].read(cx).id();
9191 let abs_path2 = worktrees[1].read(cx).abs_path().to_path_buf();
9192 let id2 = worktrees[1].read(cx).id();
9193 (abs_path1, id1, abs_path2, id2)
9194 });
9195
9196 project.update(cx, |project, cx| {
9197 let abs_path = project1_abs_path.join("file1.txt");
9198 let found_path = project.find_project_path(abs_path, cx).unwrap();
9199 assert_eq!(found_path.worktree_id, project1_id);
9200 assert_eq!(found_path.path.as_ref(), Path::new("file1.txt"));
9201
9202 let abs_path = project1_abs_path.join("subdir").join("file2.txt");
9203 let found_path = project.find_project_path(abs_path, cx).unwrap();
9204 assert_eq!(found_path.worktree_id, project1_id);
9205 assert_eq!(found_path.path.as_ref(), Path::new("subdir/file2.txt"));
9206
9207 let abs_path = project2_abs_path.join("file3.txt");
9208 let found_path = project.find_project_path(abs_path, cx).unwrap();
9209 assert_eq!(found_path.worktree_id, project2_id);
9210 assert_eq!(found_path.path.as_ref(), Path::new("file3.txt"));
9211
9212 let abs_path = project1_abs_path.join("nonexistent.txt");
9213 let found_path = project.find_project_path(abs_path, cx);
9214 assert!(
9215 found_path.is_some(),
9216 "Should find project path for nonexistent file in worktree"
9217 );
9218
9219 // Test with an absolute path outside any worktree
9220 let abs_path = Path::new("/some/other/path");
9221 let found_path = project.find_project_path(abs_path, cx);
9222 assert!(
9223 found_path.is_none(),
9224 "Should not find project path for path outside any worktree"
9225 );
9226 });
9227}