1#![allow(clippy::format_collect)]
2
3use crate::{
4 Event, git_store::StatusEntry, task_inventory::TaskContexts, task_store::TaskSettingsLocation,
5 *,
6};
7use buffer_diff::{
8 BufferDiffEvent, CALCULATE_DIFF_TASK, DiffHunkSecondaryStatus, DiffHunkStatus,
9 DiffHunkStatusKind, assert_hunks,
10};
11use fs::FakeFs;
12use futures::{StreamExt, future};
13use git::{
14 GitHostingProviderRegistry,
15 repository::RepoPath,
16 status::{StatusCode, TrackedStatus},
17};
18use git2::RepositoryInitOptions;
19use gpui::{App, BackgroundExecutor, SemanticVersion, UpdateGlobal};
20use http_client::Url;
21use itertools::Itertools;
22use language::{
23 Diagnostic, DiagnosticEntry, DiagnosticSet, DiagnosticSourceKind, DiskState, FakeLspAdapter,
24 LanguageConfig, LanguageMatcher, LanguageName, LineEnding, OffsetRangeExt, Point, ToPoint,
25 language_settings::{AllLanguageSettings, LanguageSettingsContent, language_settings},
26 tree_sitter_rust, tree_sitter_typescript,
27};
28use lsp::{
29 DiagnosticSeverity, DocumentChanges, FileOperationFilter, NumberOrString, TextDocumentEdit,
30 WillRenameFiles, notification::DidRenameFiles,
31};
32use parking_lot::Mutex;
33use paths::{config_dir, tasks_file};
34use postage::stream::Stream as _;
35use pretty_assertions::{assert_eq, assert_matches};
36use rand::{Rng as _, rngs::StdRng};
37use serde_json::json;
38#[cfg(not(windows))]
39use std::os;
40use std::{env, mem, num::NonZeroU32, ops::Range, str::FromStr, sync::OnceLock, task::Poll};
41use task::{ResolvedTask, TaskContext};
42use unindent::Unindent as _;
43use util::{
44 TryFutureExt as _, assert_set_eq, maybe, path,
45 paths::PathMatcher,
46 test::{TempTree, marked_text_offsets},
47 uri,
48};
49use worktree::WorktreeModelHandle as _;
50
51#[gpui::test]
52async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
53 cx.executor().allow_parking();
54
55 let (tx, mut rx) = futures::channel::mpsc::unbounded();
56 let _thread = std::thread::spawn(move || {
57 #[cfg(not(target_os = "windows"))]
58 std::fs::metadata("/tmp").unwrap();
59 #[cfg(target_os = "windows")]
60 std::fs::metadata("C:/Windows").unwrap();
61 std::thread::sleep(Duration::from_millis(1000));
62 tx.unbounded_send(1).unwrap();
63 });
64 rx.next().await.unwrap();
65}
66
67#[gpui::test]
68async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
69 cx.executor().allow_parking();
70
71 let io_task = smol::unblock(move || {
72 println!("sleeping on thread {:?}", std::thread::current().id());
73 std::thread::sleep(Duration::from_millis(10));
74 1
75 });
76
77 let task = cx.foreground_executor().spawn(async move {
78 io_task.await;
79 });
80
81 task.await;
82}
83
84#[cfg(not(windows))]
85#[gpui::test]
86async fn test_symlinks(cx: &mut gpui::TestAppContext) {
87 init_test(cx);
88 cx.executor().allow_parking();
89
90 let dir = TempTree::new(json!({
91 "root": {
92 "apple": "",
93 "banana": {
94 "carrot": {
95 "date": "",
96 "endive": "",
97 }
98 },
99 "fennel": {
100 "grape": "",
101 }
102 }
103 }));
104
105 let root_link_path = dir.path().join("root_link");
106 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
107 os::unix::fs::symlink(
108 dir.path().join("root/fennel"),
109 dir.path().join("root/finnochio"),
110 )
111 .unwrap();
112
113 let project = Project::test(
114 Arc::new(RealFs::new(None, cx.executor())),
115 [root_link_path.as_ref()],
116 cx,
117 )
118 .await;
119
120 project.update(cx, |project, cx| {
121 let tree = project.worktrees(cx).next().unwrap().read(cx);
122 assert_eq!(tree.file_count(), 5);
123 assert_eq!(
124 tree.inode_for_path("fennel/grape"),
125 tree.inode_for_path("finnochio/grape")
126 );
127 });
128}
129
130#[gpui::test]
131async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
132 init_test(cx);
133
134 let dir = TempTree::new(json!({
135 ".editorconfig": r#"
136 root = true
137 [*.rs]
138 indent_style = tab
139 indent_size = 3
140 end_of_line = lf
141 insert_final_newline = true
142 trim_trailing_whitespace = true
143 [*.js]
144 tab_width = 10
145 "#,
146 ".zed": {
147 "settings.json": r#"{
148 "tab_size": 8,
149 "hard_tabs": false,
150 "ensure_final_newline_on_save": false,
151 "remove_trailing_whitespace_on_save": false,
152 "soft_wrap": "editor_width"
153 }"#,
154 },
155 "a.rs": "fn a() {\n A\n}",
156 "b": {
157 ".editorconfig": r#"
158 [*.rs]
159 indent_size = 2
160 "#,
161 "b.rs": "fn b() {\n B\n}",
162 },
163 "c.js": "def c\n C\nend",
164 "README.json": "tabs are better\n",
165 }));
166
167 let path = dir.path();
168 let fs = FakeFs::new(cx.executor());
169 fs.insert_tree_from_real_fs(path, path).await;
170 let project = Project::test(fs, [path], cx).await;
171
172 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
173 language_registry.add(js_lang());
174 language_registry.add(json_lang());
175 language_registry.add(rust_lang());
176
177 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
178
179 cx.executor().run_until_parked();
180
181 cx.update(|cx| {
182 let tree = worktree.read(cx);
183 let settings_for = |path: &str| {
184 let file_entry = tree.entry_for_path(path).unwrap().clone();
185 let file = File::for_entry(file_entry, worktree.clone());
186 let file_language = project
187 .read(cx)
188 .languages()
189 .language_for_file_path(file.path.as_ref());
190 let file_language = cx
191 .background_executor()
192 .block(file_language)
193 .expect("Failed to get file language");
194 let file = file as _;
195 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
196 };
197
198 let settings_a = settings_for("a.rs");
199 let settings_b = settings_for("b/b.rs");
200 let settings_c = settings_for("c.js");
201 let settings_readme = settings_for("README.json");
202
203 // .editorconfig overrides .zed/settings
204 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
205 assert_eq!(settings_a.hard_tabs, true);
206 assert_eq!(settings_a.ensure_final_newline_on_save, true);
207 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
208
209 // .editorconfig in b/ overrides .editorconfig in root
210 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
211
212 // "indent_size" is not set, so "tab_width" is used
213 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
214
215 // README.md should not be affected by .editorconfig's globe "*.rs"
216 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
217 });
218}
219
220#[gpui::test]
221async fn test_git_provider_project_setting(cx: &mut gpui::TestAppContext) {
222 init_test(cx);
223 cx.update(|cx| {
224 GitHostingProviderRegistry::default_global(cx);
225 git_hosting_providers::init(cx);
226 });
227
228 let fs = FakeFs::new(cx.executor());
229 let str_path = path!("/dir");
230 let path = Path::new(str_path);
231
232 fs.insert_tree(
233 path!("/dir"),
234 json!({
235 ".zed": {
236 "settings.json": r#"{
237 "git_hosting_providers": [
238 {
239 "provider": "gitlab",
240 "base_url": "https://google.com",
241 "name": "foo"
242 }
243 ]
244 }"#
245 },
246 }),
247 )
248 .await;
249
250 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
251 let (_worktree, _) =
252 project.read_with(cx, |project, cx| project.find_worktree(path, cx).unwrap());
253 cx.executor().run_until_parked();
254
255 cx.update(|cx| {
256 let provider = GitHostingProviderRegistry::global(cx);
257 assert!(
258 provider
259 .list_hosting_providers()
260 .into_iter()
261 .any(|provider| provider.name() == "foo")
262 );
263 });
264
265 fs.atomic_write(
266 Path::new(path!("/dir/.zed/settings.json")).to_owned(),
267 "{}".into(),
268 )
269 .await
270 .unwrap();
271
272 cx.run_until_parked();
273
274 cx.update(|cx| {
275 let provider = GitHostingProviderRegistry::global(cx);
276 assert!(
277 !provider
278 .list_hosting_providers()
279 .into_iter()
280 .any(|provider| provider.name() == "foo")
281 );
282 });
283}
284
285#[gpui::test]
286async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
287 init_test(cx);
288 TaskStore::init(None);
289
290 let fs = FakeFs::new(cx.executor());
291 fs.insert_tree(
292 path!("/dir"),
293 json!({
294 ".zed": {
295 "settings.json": r#"{ "tab_size": 8 }"#,
296 "tasks.json": r#"[{
297 "label": "cargo check all",
298 "command": "cargo",
299 "args": ["check", "--all"]
300 },]"#,
301 },
302 "a": {
303 "a.rs": "fn a() {\n A\n}"
304 },
305 "b": {
306 ".zed": {
307 "settings.json": r#"{ "tab_size": 2 }"#,
308 "tasks.json": r#"[{
309 "label": "cargo check",
310 "command": "cargo",
311 "args": ["check"]
312 },]"#,
313 },
314 "b.rs": "fn b() {\n B\n}"
315 }
316 }),
317 )
318 .await;
319
320 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
321 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
322
323 cx.executor().run_until_parked();
324 let worktree_id = cx.update(|cx| {
325 project.update(cx, |project, cx| {
326 project.worktrees(cx).next().unwrap().read(cx).id()
327 })
328 });
329
330 let mut task_contexts = TaskContexts::default();
331 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
332 let task_contexts = Arc::new(task_contexts);
333
334 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
335 id: worktree_id,
336 directory_in_worktree: PathBuf::from(".zed"),
337 id_base: "local worktree tasks from directory \".zed\"".into(),
338 };
339
340 let all_tasks = cx
341 .update(|cx| {
342 let tree = worktree.read(cx);
343
344 let file_a = File::for_entry(
345 tree.entry_for_path("a/a.rs").unwrap().clone(),
346 worktree.clone(),
347 ) as _;
348 let settings_a = language_settings(None, Some(&file_a), cx);
349 let file_b = File::for_entry(
350 tree.entry_for_path("b/b.rs").unwrap().clone(),
351 worktree.clone(),
352 ) as _;
353 let settings_b = language_settings(None, Some(&file_b), cx);
354
355 assert_eq!(settings_a.tab_size.get(), 8);
356 assert_eq!(settings_b.tab_size.get(), 2);
357
358 get_all_tasks(&project, task_contexts.clone(), cx)
359 })
360 .await
361 .into_iter()
362 .map(|(source_kind, task)| {
363 let resolved = task.resolved;
364 (
365 source_kind,
366 task.resolved_label,
367 resolved.args,
368 resolved.env,
369 )
370 })
371 .collect::<Vec<_>>();
372 assert_eq!(
373 all_tasks,
374 vec![
375 (
376 TaskSourceKind::Worktree {
377 id: worktree_id,
378 directory_in_worktree: PathBuf::from(path!("b/.zed")),
379 id_base: if cfg!(windows) {
380 "local worktree tasks from directory \"b\\\\.zed\"".into()
381 } else {
382 "local worktree tasks from directory \"b/.zed\"".into()
383 },
384 },
385 "cargo check".to_string(),
386 vec!["check".to_string()],
387 HashMap::default(),
388 ),
389 (
390 topmost_local_task_source_kind.clone(),
391 "cargo check all".to_string(),
392 vec!["check".to_string(), "--all".to_string()],
393 HashMap::default(),
394 ),
395 ]
396 );
397
398 let (_, resolved_task) = cx
399 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
400 .await
401 .into_iter()
402 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
403 .expect("should have one global task");
404 project.update(cx, |project, cx| {
405 let task_inventory = project
406 .task_store
407 .read(cx)
408 .task_inventory()
409 .cloned()
410 .unwrap();
411 task_inventory.update(cx, |inventory, _| {
412 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
413 inventory
414 .update_file_based_tasks(
415 TaskSettingsLocation::Global(tasks_file()),
416 Some(
417 &json!([{
418 "label": "cargo check unstable",
419 "command": "cargo",
420 "args": [
421 "check",
422 "--all",
423 "--all-targets"
424 ],
425 "env": {
426 "RUSTFLAGS": "-Zunstable-options"
427 }
428 }])
429 .to_string(),
430 ),
431 )
432 .unwrap();
433 });
434 });
435 cx.run_until_parked();
436
437 let all_tasks = cx
438 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
439 .await
440 .into_iter()
441 .map(|(source_kind, task)| {
442 let resolved = task.resolved;
443 (
444 source_kind,
445 task.resolved_label,
446 resolved.args,
447 resolved.env,
448 )
449 })
450 .collect::<Vec<_>>();
451 assert_eq!(
452 all_tasks,
453 vec![
454 (
455 topmost_local_task_source_kind.clone(),
456 "cargo check all".to_string(),
457 vec!["check".to_string(), "--all".to_string()],
458 HashMap::default(),
459 ),
460 (
461 TaskSourceKind::Worktree {
462 id: worktree_id,
463 directory_in_worktree: PathBuf::from(path!("b/.zed")),
464 id_base: if cfg!(windows) {
465 "local worktree tasks from directory \"b\\\\.zed\"".into()
466 } else {
467 "local worktree tasks from directory \"b/.zed\"".into()
468 },
469 },
470 "cargo check".to_string(),
471 vec!["check".to_string()],
472 HashMap::default(),
473 ),
474 (
475 TaskSourceKind::AbsPath {
476 abs_path: paths::tasks_file().clone(),
477 id_base: "global tasks.json".into(),
478 },
479 "cargo check unstable".to_string(),
480 vec![
481 "check".to_string(),
482 "--all".to_string(),
483 "--all-targets".to_string(),
484 ],
485 HashMap::from_iter(Some((
486 "RUSTFLAGS".to_string(),
487 "-Zunstable-options".to_string()
488 ))),
489 ),
490 ]
491 );
492}
493
494#[gpui::test]
495async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
496 init_test(cx);
497 TaskStore::init(None);
498
499 let fs = FakeFs::new(cx.executor());
500 fs.insert_tree(
501 path!("/dir"),
502 json!({
503 ".zed": {
504 "tasks.json": r#"[{
505 "label": "test worktree root",
506 "command": "echo $ZED_WORKTREE_ROOT"
507 }]"#,
508 },
509 "a": {
510 "a.rs": "fn a() {\n A\n}"
511 },
512 }),
513 )
514 .await;
515
516 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
517 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
518
519 cx.executor().run_until_parked();
520 let worktree_id = cx.update(|cx| {
521 project.update(cx, |project, cx| {
522 project.worktrees(cx).next().unwrap().read(cx).id()
523 })
524 });
525
526 let active_non_worktree_item_tasks = cx
527 .update(|cx| {
528 get_all_tasks(
529 &project,
530 Arc::new(TaskContexts {
531 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
532 active_worktree_context: None,
533 other_worktree_contexts: Vec::new(),
534 lsp_task_sources: HashMap::default(),
535 latest_selection: None,
536 }),
537 cx,
538 )
539 })
540 .await;
541 assert!(
542 active_non_worktree_item_tasks.is_empty(),
543 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
544 );
545
546 let active_worktree_tasks = cx
547 .update(|cx| {
548 get_all_tasks(
549 &project,
550 Arc::new(TaskContexts {
551 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
552 active_worktree_context: Some((worktree_id, {
553 let mut worktree_context = TaskContext::default();
554 worktree_context
555 .task_variables
556 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
557 worktree_context
558 })),
559 other_worktree_contexts: Vec::new(),
560 lsp_task_sources: HashMap::default(),
561 latest_selection: None,
562 }),
563 cx,
564 )
565 })
566 .await;
567 assert_eq!(
568 active_worktree_tasks
569 .into_iter()
570 .map(|(source_kind, task)| {
571 let resolved = task.resolved;
572 (source_kind, resolved.command.unwrap())
573 })
574 .collect::<Vec<_>>(),
575 vec![(
576 TaskSourceKind::Worktree {
577 id: worktree_id,
578 directory_in_worktree: PathBuf::from(path!(".zed")),
579 id_base: if cfg!(windows) {
580 "local worktree tasks from directory \".zed\"".into()
581 } else {
582 "local worktree tasks from directory \".zed\"".into()
583 },
584 },
585 "echo /dir".to_string(),
586 )]
587 );
588}
589
590#[gpui::test]
591async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
592 init_test(cx);
593
594 let fs = FakeFs::new(cx.executor());
595 fs.insert_tree(
596 path!("/dir"),
597 json!({
598 "test.rs": "const A: i32 = 1;",
599 "test2.rs": "",
600 "Cargo.toml": "a = 1",
601 "package.json": "{\"a\": 1}",
602 }),
603 )
604 .await;
605
606 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
607 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
608
609 let mut fake_rust_servers = language_registry.register_fake_lsp(
610 "Rust",
611 FakeLspAdapter {
612 name: "the-rust-language-server",
613 capabilities: lsp::ServerCapabilities {
614 completion_provider: Some(lsp::CompletionOptions {
615 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
616 ..Default::default()
617 }),
618 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
619 lsp::TextDocumentSyncOptions {
620 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
621 ..Default::default()
622 },
623 )),
624 ..Default::default()
625 },
626 ..Default::default()
627 },
628 );
629 let mut fake_json_servers = language_registry.register_fake_lsp(
630 "JSON",
631 FakeLspAdapter {
632 name: "the-json-language-server",
633 capabilities: lsp::ServerCapabilities {
634 completion_provider: Some(lsp::CompletionOptions {
635 trigger_characters: Some(vec![":".to_string()]),
636 ..Default::default()
637 }),
638 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
639 lsp::TextDocumentSyncOptions {
640 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
641 ..Default::default()
642 },
643 )),
644 ..Default::default()
645 },
646 ..Default::default()
647 },
648 );
649
650 // Open a buffer without an associated language server.
651 let (toml_buffer, _handle) = project
652 .update(cx, |project, cx| {
653 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
654 })
655 .await
656 .unwrap();
657
658 // Open a buffer with an associated language server before the language for it has been loaded.
659 let (rust_buffer, _handle2) = project
660 .update(cx, |project, cx| {
661 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
662 })
663 .await
664 .unwrap();
665 rust_buffer.update(cx, |buffer, _| {
666 assert_eq!(buffer.language().map(|l| l.name()), None);
667 });
668
669 // Now we add the languages to the project, and ensure they get assigned to all
670 // the relevant open buffers.
671 language_registry.add(json_lang());
672 language_registry.add(rust_lang());
673 cx.executor().run_until_parked();
674 rust_buffer.update(cx, |buffer, _| {
675 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
676 });
677
678 // A server is started up, and it is notified about Rust files.
679 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
680 assert_eq!(
681 fake_rust_server
682 .receive_notification::<lsp::notification::DidOpenTextDocument>()
683 .await
684 .text_document,
685 lsp::TextDocumentItem {
686 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
687 version: 0,
688 text: "const A: i32 = 1;".to_string(),
689 language_id: "rust".to_string(),
690 }
691 );
692
693 // The buffer is configured based on the language server's capabilities.
694 rust_buffer.update(cx, |buffer, _| {
695 assert_eq!(
696 buffer
697 .completion_triggers()
698 .iter()
699 .cloned()
700 .collect::<Vec<_>>(),
701 &[".".to_string(), "::".to_string()]
702 );
703 });
704 toml_buffer.update(cx, |buffer, _| {
705 assert!(buffer.completion_triggers().is_empty());
706 });
707
708 // Edit a buffer. The changes are reported to the language server.
709 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
710 assert_eq!(
711 fake_rust_server
712 .receive_notification::<lsp::notification::DidChangeTextDocument>()
713 .await
714 .text_document,
715 lsp::VersionedTextDocumentIdentifier::new(
716 lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
717 1
718 )
719 );
720
721 // Open a third buffer with a different associated language server.
722 let (json_buffer, _json_handle) = project
723 .update(cx, |project, cx| {
724 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
725 })
726 .await
727 .unwrap();
728
729 // A json language server is started up and is only notified about the json buffer.
730 let mut fake_json_server = fake_json_servers.next().await.unwrap();
731 assert_eq!(
732 fake_json_server
733 .receive_notification::<lsp::notification::DidOpenTextDocument>()
734 .await
735 .text_document,
736 lsp::TextDocumentItem {
737 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
738 version: 0,
739 text: "{\"a\": 1}".to_string(),
740 language_id: "json".to_string(),
741 }
742 );
743
744 // This buffer is configured based on the second language server's
745 // capabilities.
746 json_buffer.update(cx, |buffer, _| {
747 assert_eq!(
748 buffer
749 .completion_triggers()
750 .iter()
751 .cloned()
752 .collect::<Vec<_>>(),
753 &[":".to_string()]
754 );
755 });
756
757 // When opening another buffer whose language server is already running,
758 // it is also configured based on the existing language server's capabilities.
759 let (rust_buffer2, _handle4) = project
760 .update(cx, |project, cx| {
761 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
762 })
763 .await
764 .unwrap();
765 rust_buffer2.update(cx, |buffer, _| {
766 assert_eq!(
767 buffer
768 .completion_triggers()
769 .iter()
770 .cloned()
771 .collect::<Vec<_>>(),
772 &[".".to_string(), "::".to_string()]
773 );
774 });
775
776 // Changes are reported only to servers matching the buffer's language.
777 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
778 rust_buffer2.update(cx, |buffer, cx| {
779 buffer.edit([(0..0, "let x = 1;")], None, cx)
780 });
781 assert_eq!(
782 fake_rust_server
783 .receive_notification::<lsp::notification::DidChangeTextDocument>()
784 .await
785 .text_document,
786 lsp::VersionedTextDocumentIdentifier::new(
787 lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap(),
788 1
789 )
790 );
791
792 // Save notifications are reported to all servers.
793 project
794 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
795 .await
796 .unwrap();
797 assert_eq!(
798 fake_rust_server
799 .receive_notification::<lsp::notification::DidSaveTextDocument>()
800 .await
801 .text_document,
802 lsp::TextDocumentIdentifier::new(
803 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
804 )
805 );
806 assert_eq!(
807 fake_json_server
808 .receive_notification::<lsp::notification::DidSaveTextDocument>()
809 .await
810 .text_document,
811 lsp::TextDocumentIdentifier::new(
812 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
813 )
814 );
815
816 // Renames are reported only to servers matching the buffer's language.
817 fs.rename(
818 Path::new(path!("/dir/test2.rs")),
819 Path::new(path!("/dir/test3.rs")),
820 Default::default(),
821 )
822 .await
823 .unwrap();
824 assert_eq!(
825 fake_rust_server
826 .receive_notification::<lsp::notification::DidCloseTextDocument>()
827 .await
828 .text_document,
829 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap()),
830 );
831 assert_eq!(
832 fake_rust_server
833 .receive_notification::<lsp::notification::DidOpenTextDocument>()
834 .await
835 .text_document,
836 lsp::TextDocumentItem {
837 uri: lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap(),
838 version: 0,
839 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
840 language_id: "rust".to_string(),
841 },
842 );
843
844 rust_buffer2.update(cx, |buffer, cx| {
845 buffer.update_diagnostics(
846 LanguageServerId(0),
847 DiagnosticSet::from_sorted_entries(
848 vec![DiagnosticEntry {
849 diagnostic: Default::default(),
850 range: Anchor::MIN..Anchor::MAX,
851 }],
852 &buffer.snapshot(),
853 ),
854 cx,
855 );
856 assert_eq!(
857 buffer
858 .snapshot()
859 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
860 .count(),
861 1
862 );
863 });
864
865 // When the rename changes the extension of the file, the buffer gets closed on the old
866 // language server and gets opened on the new one.
867 fs.rename(
868 Path::new(path!("/dir/test3.rs")),
869 Path::new(path!("/dir/test3.json")),
870 Default::default(),
871 )
872 .await
873 .unwrap();
874 assert_eq!(
875 fake_rust_server
876 .receive_notification::<lsp::notification::DidCloseTextDocument>()
877 .await
878 .text_document,
879 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap()),
880 );
881 assert_eq!(
882 fake_json_server
883 .receive_notification::<lsp::notification::DidOpenTextDocument>()
884 .await
885 .text_document,
886 lsp::TextDocumentItem {
887 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
888 version: 0,
889 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
890 language_id: "json".to_string(),
891 },
892 );
893
894 // We clear the diagnostics, since the language has changed.
895 rust_buffer2.update(cx, |buffer, _| {
896 assert_eq!(
897 buffer
898 .snapshot()
899 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
900 .count(),
901 0
902 );
903 });
904
905 // The renamed file's version resets after changing language server.
906 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
907 assert_eq!(
908 fake_json_server
909 .receive_notification::<lsp::notification::DidChangeTextDocument>()
910 .await
911 .text_document,
912 lsp::VersionedTextDocumentIdentifier::new(
913 lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
914 1
915 )
916 );
917
918 // Restart language servers
919 project.update(cx, |project, cx| {
920 project.restart_language_servers_for_buffers(
921 vec![rust_buffer.clone(), json_buffer.clone()],
922 HashSet::default(),
923 cx,
924 );
925 });
926
927 let mut rust_shutdown_requests = fake_rust_server
928 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
929 let mut json_shutdown_requests = fake_json_server
930 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
931 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
932
933 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
934 let mut fake_json_server = fake_json_servers.next().await.unwrap();
935
936 // Ensure rust document is reopened in new rust language server
937 assert_eq!(
938 fake_rust_server
939 .receive_notification::<lsp::notification::DidOpenTextDocument>()
940 .await
941 .text_document,
942 lsp::TextDocumentItem {
943 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
944 version: 0,
945 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
946 language_id: "rust".to_string(),
947 }
948 );
949
950 // Ensure json documents are reopened in new json language server
951 assert_set_eq!(
952 [
953 fake_json_server
954 .receive_notification::<lsp::notification::DidOpenTextDocument>()
955 .await
956 .text_document,
957 fake_json_server
958 .receive_notification::<lsp::notification::DidOpenTextDocument>()
959 .await
960 .text_document,
961 ],
962 [
963 lsp::TextDocumentItem {
964 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
965 version: 0,
966 text: json_buffer.update(cx, |buffer, _| buffer.text()),
967 language_id: "json".to_string(),
968 },
969 lsp::TextDocumentItem {
970 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
971 version: 0,
972 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
973 language_id: "json".to_string(),
974 }
975 ]
976 );
977
978 // Close notifications are reported only to servers matching the buffer's language.
979 cx.update(|_| drop(_json_handle));
980 let close_message = lsp::DidCloseTextDocumentParams {
981 text_document: lsp::TextDocumentIdentifier::new(
982 lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
983 ),
984 };
985 assert_eq!(
986 fake_json_server
987 .receive_notification::<lsp::notification::DidCloseTextDocument>()
988 .await,
989 close_message,
990 );
991}
992
993#[gpui::test]
994async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
995 init_test(cx);
996
997 let fs = FakeFs::new(cx.executor());
998 fs.insert_tree(
999 path!("/the-root"),
1000 json!({
1001 ".gitignore": "target\n",
1002 "Cargo.lock": "",
1003 "src": {
1004 "a.rs": "",
1005 "b.rs": "",
1006 },
1007 "target": {
1008 "x": {
1009 "out": {
1010 "x.rs": ""
1011 }
1012 },
1013 "y": {
1014 "out": {
1015 "y.rs": "",
1016 }
1017 },
1018 "z": {
1019 "out": {
1020 "z.rs": ""
1021 }
1022 }
1023 }
1024 }),
1025 )
1026 .await;
1027 fs.insert_tree(
1028 path!("/the-registry"),
1029 json!({
1030 "dep1": {
1031 "src": {
1032 "dep1.rs": "",
1033 }
1034 },
1035 "dep2": {
1036 "src": {
1037 "dep2.rs": "",
1038 }
1039 },
1040 }),
1041 )
1042 .await;
1043 fs.insert_tree(
1044 path!("/the/stdlib"),
1045 json!({
1046 "LICENSE": "",
1047 "src": {
1048 "string.rs": "",
1049 }
1050 }),
1051 )
1052 .await;
1053
1054 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1055 let (language_registry, lsp_store) = project.read_with(cx, |project, _| {
1056 (project.languages().clone(), project.lsp_store())
1057 });
1058 language_registry.add(rust_lang());
1059 let mut fake_servers = language_registry.register_fake_lsp(
1060 "Rust",
1061 FakeLspAdapter {
1062 name: "the-language-server",
1063 ..Default::default()
1064 },
1065 );
1066
1067 cx.executor().run_until_parked();
1068
1069 // Start the language server by opening a buffer with a compatible file extension.
1070 project
1071 .update(cx, |project, cx| {
1072 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
1073 })
1074 .await
1075 .unwrap();
1076
1077 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
1078 project.update(cx, |project, cx| {
1079 let worktree = project.worktrees(cx).next().unwrap();
1080 assert_eq!(
1081 worktree
1082 .read(cx)
1083 .snapshot()
1084 .entries(true, 0)
1085 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
1086 .collect::<Vec<_>>(),
1087 &[
1088 (Path::new(""), false),
1089 (Path::new(".gitignore"), false),
1090 (Path::new("Cargo.lock"), false),
1091 (Path::new("src"), false),
1092 (Path::new("src/a.rs"), false),
1093 (Path::new("src/b.rs"), false),
1094 (Path::new("target"), true),
1095 ]
1096 );
1097 });
1098
1099 let prev_read_dir_count = fs.read_dir_call_count();
1100
1101 let fake_server = fake_servers.next().await.unwrap();
1102 let server_id = lsp_store.read_with(cx, |lsp_store, _| {
1103 let (id, _) = lsp_store.language_server_statuses().next().unwrap();
1104 id
1105 });
1106
1107 // Simulate jumping to a definition in a dependency outside of the worktree.
1108 let _out_of_worktree_buffer = project
1109 .update(cx, |project, cx| {
1110 project.open_local_buffer_via_lsp(
1111 lsp::Url::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
1112 server_id,
1113 cx,
1114 )
1115 })
1116 .await
1117 .unwrap();
1118
1119 // Keep track of the FS events reported to the language server.
1120 let file_changes = Arc::new(Mutex::new(Vec::new()));
1121 fake_server
1122 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
1123 registrations: vec![lsp::Registration {
1124 id: Default::default(),
1125 method: "workspace/didChangeWatchedFiles".to_string(),
1126 register_options: serde_json::to_value(
1127 lsp::DidChangeWatchedFilesRegistrationOptions {
1128 watchers: vec![
1129 lsp::FileSystemWatcher {
1130 glob_pattern: lsp::GlobPattern::String(
1131 path!("/the-root/Cargo.toml").to_string(),
1132 ),
1133 kind: None,
1134 },
1135 lsp::FileSystemWatcher {
1136 glob_pattern: lsp::GlobPattern::String(
1137 path!("/the-root/src/*.{rs,c}").to_string(),
1138 ),
1139 kind: None,
1140 },
1141 lsp::FileSystemWatcher {
1142 glob_pattern: lsp::GlobPattern::String(
1143 path!("/the-root/target/y/**/*.rs").to_string(),
1144 ),
1145 kind: None,
1146 },
1147 lsp::FileSystemWatcher {
1148 glob_pattern: lsp::GlobPattern::String(
1149 path!("/the/stdlib/src/**/*.rs").to_string(),
1150 ),
1151 kind: None,
1152 },
1153 lsp::FileSystemWatcher {
1154 glob_pattern: lsp::GlobPattern::String(
1155 path!("**/Cargo.lock").to_string(),
1156 ),
1157 kind: None,
1158 },
1159 ],
1160 },
1161 )
1162 .ok(),
1163 }],
1164 })
1165 .await
1166 .into_response()
1167 .unwrap();
1168 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
1169 let file_changes = file_changes.clone();
1170 move |params, _| {
1171 let mut file_changes = file_changes.lock();
1172 file_changes.extend(params.changes);
1173 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
1174 }
1175 });
1176
1177 cx.executor().run_until_parked();
1178 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
1179 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 5);
1180
1181 let mut new_watched_paths = fs.watched_paths();
1182 new_watched_paths.retain(|path| !path.starts_with(config_dir()));
1183 assert_eq!(
1184 &new_watched_paths,
1185 &[
1186 Path::new(path!("/the-root")),
1187 Path::new(path!("/the-registry/dep1/src/dep1.rs")),
1188 Path::new(path!("/the/stdlib/src"))
1189 ]
1190 );
1191
1192 // Now the language server has asked us to watch an ignored directory path,
1193 // so we recursively load it.
1194 project.update(cx, |project, cx| {
1195 let worktree = project.visible_worktrees(cx).next().unwrap();
1196 assert_eq!(
1197 worktree
1198 .read(cx)
1199 .snapshot()
1200 .entries(true, 0)
1201 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
1202 .collect::<Vec<_>>(),
1203 &[
1204 (Path::new(""), false),
1205 (Path::new(".gitignore"), false),
1206 (Path::new("Cargo.lock"), false),
1207 (Path::new("src"), false),
1208 (Path::new("src/a.rs"), false),
1209 (Path::new("src/b.rs"), false),
1210 (Path::new("target"), true),
1211 (Path::new("target/x"), true),
1212 (Path::new("target/y"), true),
1213 (Path::new("target/y/out"), true),
1214 (Path::new("target/y/out/y.rs"), true),
1215 (Path::new("target/z"), true),
1216 ]
1217 );
1218 });
1219
1220 // Perform some file system mutations, two of which match the watched patterns,
1221 // and one of which does not.
1222 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
1223 .await
1224 .unwrap();
1225 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
1226 .await
1227 .unwrap();
1228 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
1229 .await
1230 .unwrap();
1231 fs.create_file(
1232 path!("/the-root/target/x/out/x2.rs").as_ref(),
1233 Default::default(),
1234 )
1235 .await
1236 .unwrap();
1237 fs.create_file(
1238 path!("/the-root/target/y/out/y2.rs").as_ref(),
1239 Default::default(),
1240 )
1241 .await
1242 .unwrap();
1243 fs.save(
1244 path!("/the-root/Cargo.lock").as_ref(),
1245 &"".into(),
1246 Default::default(),
1247 )
1248 .await
1249 .unwrap();
1250 fs.save(
1251 path!("/the-stdlib/LICENSE").as_ref(),
1252 &"".into(),
1253 Default::default(),
1254 )
1255 .await
1256 .unwrap();
1257 fs.save(
1258 path!("/the/stdlib/src/string.rs").as_ref(),
1259 &"".into(),
1260 Default::default(),
1261 )
1262 .await
1263 .unwrap();
1264
1265 // The language server receives events for the FS mutations that match its watch patterns.
1266 cx.executor().run_until_parked();
1267 assert_eq!(
1268 &*file_changes.lock(),
1269 &[
1270 lsp::FileEvent {
1271 uri: lsp::Url::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
1272 typ: lsp::FileChangeType::CHANGED,
1273 },
1274 lsp::FileEvent {
1275 uri: lsp::Url::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
1276 typ: lsp::FileChangeType::DELETED,
1277 },
1278 lsp::FileEvent {
1279 uri: lsp::Url::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
1280 typ: lsp::FileChangeType::CREATED,
1281 },
1282 lsp::FileEvent {
1283 uri: lsp::Url::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
1284 typ: lsp::FileChangeType::CREATED,
1285 },
1286 lsp::FileEvent {
1287 uri: lsp::Url::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
1288 typ: lsp::FileChangeType::CHANGED,
1289 },
1290 ]
1291 );
1292}
1293
1294#[gpui::test]
1295async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
1296 init_test(cx);
1297
1298 let fs = FakeFs::new(cx.executor());
1299 fs.insert_tree(
1300 path!("/dir"),
1301 json!({
1302 "a.rs": "let a = 1;",
1303 "b.rs": "let b = 2;"
1304 }),
1305 )
1306 .await;
1307
1308 let project = Project::test(
1309 fs,
1310 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
1311 cx,
1312 )
1313 .await;
1314 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1315
1316 let buffer_a = project
1317 .update(cx, |project, cx| {
1318 project.open_local_buffer(path!("/dir/a.rs"), cx)
1319 })
1320 .await
1321 .unwrap();
1322 let buffer_b = project
1323 .update(cx, |project, cx| {
1324 project.open_local_buffer(path!("/dir/b.rs"), cx)
1325 })
1326 .await
1327 .unwrap();
1328
1329 lsp_store.update(cx, |lsp_store, cx| {
1330 lsp_store
1331 .update_diagnostics(
1332 LanguageServerId(0),
1333 lsp::PublishDiagnosticsParams {
1334 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1335 version: None,
1336 diagnostics: vec![lsp::Diagnostic {
1337 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1338 severity: Some(lsp::DiagnosticSeverity::ERROR),
1339 message: "error 1".to_string(),
1340 ..Default::default()
1341 }],
1342 },
1343 None,
1344 DiagnosticSourceKind::Pushed,
1345 &[],
1346 cx,
1347 )
1348 .unwrap();
1349 lsp_store
1350 .update_diagnostics(
1351 LanguageServerId(0),
1352 lsp::PublishDiagnosticsParams {
1353 uri: Url::from_file_path(path!("/dir/b.rs")).unwrap(),
1354 version: None,
1355 diagnostics: vec![lsp::Diagnostic {
1356 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1357 severity: Some(DiagnosticSeverity::WARNING),
1358 message: "error 2".to_string(),
1359 ..Default::default()
1360 }],
1361 },
1362 None,
1363 DiagnosticSourceKind::Pushed,
1364 &[],
1365 cx,
1366 )
1367 .unwrap();
1368 });
1369
1370 buffer_a.update(cx, |buffer, _| {
1371 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1372 assert_eq!(
1373 chunks
1374 .iter()
1375 .map(|(s, d)| (s.as_str(), *d))
1376 .collect::<Vec<_>>(),
1377 &[
1378 ("let ", None),
1379 ("a", Some(DiagnosticSeverity::ERROR)),
1380 (" = 1;", None),
1381 ]
1382 );
1383 });
1384 buffer_b.update(cx, |buffer, _| {
1385 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1386 assert_eq!(
1387 chunks
1388 .iter()
1389 .map(|(s, d)| (s.as_str(), *d))
1390 .collect::<Vec<_>>(),
1391 &[
1392 ("let ", None),
1393 ("b", Some(DiagnosticSeverity::WARNING)),
1394 (" = 2;", None),
1395 ]
1396 );
1397 });
1398}
1399
1400#[gpui::test]
1401async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1402 init_test(cx);
1403
1404 let fs = FakeFs::new(cx.executor());
1405 fs.insert_tree(
1406 path!("/root"),
1407 json!({
1408 "dir": {
1409 ".git": {
1410 "HEAD": "ref: refs/heads/main",
1411 },
1412 ".gitignore": "b.rs",
1413 "a.rs": "let a = 1;",
1414 "b.rs": "let b = 2;",
1415 },
1416 "other.rs": "let b = c;"
1417 }),
1418 )
1419 .await;
1420
1421 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
1422 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1423 let (worktree, _) = project
1424 .update(cx, |project, cx| {
1425 project.find_or_create_worktree(path!("/root/dir"), true, cx)
1426 })
1427 .await
1428 .unwrap();
1429 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1430
1431 let (worktree, _) = project
1432 .update(cx, |project, cx| {
1433 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
1434 })
1435 .await
1436 .unwrap();
1437 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1438
1439 let server_id = LanguageServerId(0);
1440 lsp_store.update(cx, |lsp_store, cx| {
1441 lsp_store
1442 .update_diagnostics(
1443 server_id,
1444 lsp::PublishDiagnosticsParams {
1445 uri: Url::from_file_path(path!("/root/dir/b.rs")).unwrap(),
1446 version: None,
1447 diagnostics: vec![lsp::Diagnostic {
1448 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1449 severity: Some(lsp::DiagnosticSeverity::ERROR),
1450 message: "unused variable 'b'".to_string(),
1451 ..Default::default()
1452 }],
1453 },
1454 None,
1455 DiagnosticSourceKind::Pushed,
1456 &[],
1457 cx,
1458 )
1459 .unwrap();
1460 lsp_store
1461 .update_diagnostics(
1462 server_id,
1463 lsp::PublishDiagnosticsParams {
1464 uri: Url::from_file_path(path!("/root/other.rs")).unwrap(),
1465 version: None,
1466 diagnostics: vec![lsp::Diagnostic {
1467 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1468 severity: Some(lsp::DiagnosticSeverity::ERROR),
1469 message: "unknown variable 'c'".to_string(),
1470 ..Default::default()
1471 }],
1472 },
1473 None,
1474 DiagnosticSourceKind::Pushed,
1475 &[],
1476 cx,
1477 )
1478 .unwrap();
1479 });
1480
1481 let main_ignored_buffer = project
1482 .update(cx, |project, cx| {
1483 project.open_buffer((main_worktree_id, "b.rs"), cx)
1484 })
1485 .await
1486 .unwrap();
1487 main_ignored_buffer.update(cx, |buffer, _| {
1488 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1489 assert_eq!(
1490 chunks
1491 .iter()
1492 .map(|(s, d)| (s.as_str(), *d))
1493 .collect::<Vec<_>>(),
1494 &[
1495 ("let ", None),
1496 ("b", Some(DiagnosticSeverity::ERROR)),
1497 (" = 2;", None),
1498 ],
1499 "Gigitnored buffers should still get in-buffer diagnostics",
1500 );
1501 });
1502 let other_buffer = project
1503 .update(cx, |project, cx| {
1504 project.open_buffer((other_worktree_id, ""), cx)
1505 })
1506 .await
1507 .unwrap();
1508 other_buffer.update(cx, |buffer, _| {
1509 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1510 assert_eq!(
1511 chunks
1512 .iter()
1513 .map(|(s, d)| (s.as_str(), *d))
1514 .collect::<Vec<_>>(),
1515 &[
1516 ("let b = ", None),
1517 ("c", Some(DiagnosticSeverity::ERROR)),
1518 (";", None),
1519 ],
1520 "Buffers from hidden projects should still get in-buffer diagnostics"
1521 );
1522 });
1523
1524 project.update(cx, |project, cx| {
1525 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1526 assert_eq!(
1527 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1528 vec![(
1529 ProjectPath {
1530 worktree_id: main_worktree_id,
1531 path: Arc::from(Path::new("b.rs")),
1532 },
1533 server_id,
1534 DiagnosticSummary {
1535 error_count: 1,
1536 warning_count: 0,
1537 }
1538 )]
1539 );
1540 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1541 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1542 });
1543}
1544
1545#[gpui::test]
1546async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1547 init_test(cx);
1548
1549 let progress_token = "the-progress-token";
1550
1551 let fs = FakeFs::new(cx.executor());
1552 fs.insert_tree(
1553 path!("/dir"),
1554 json!({
1555 "a.rs": "fn a() { A }",
1556 "b.rs": "const y: i32 = 1",
1557 }),
1558 )
1559 .await;
1560
1561 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1562 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1563
1564 language_registry.add(rust_lang());
1565 let mut fake_servers = language_registry.register_fake_lsp(
1566 "Rust",
1567 FakeLspAdapter {
1568 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1569 disk_based_diagnostics_sources: vec!["disk".into()],
1570 ..Default::default()
1571 },
1572 );
1573
1574 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1575
1576 // Cause worktree to start the fake language server
1577 let _ = project
1578 .update(cx, |project, cx| {
1579 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
1580 })
1581 .await
1582 .unwrap();
1583
1584 let mut events = cx.events(&project);
1585
1586 let fake_server = fake_servers.next().await.unwrap();
1587 assert_eq!(
1588 events.next().await.unwrap(),
1589 Event::LanguageServerAdded(
1590 LanguageServerId(0),
1591 fake_server.server.name(),
1592 Some(worktree_id)
1593 ),
1594 );
1595
1596 fake_server
1597 .start_progress(format!("{}/0", progress_token))
1598 .await;
1599 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1600 assert_eq!(
1601 events.next().await.unwrap(),
1602 Event::DiskBasedDiagnosticsStarted {
1603 language_server_id: LanguageServerId(0),
1604 }
1605 );
1606
1607 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1608 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1609 version: None,
1610 diagnostics: vec![lsp::Diagnostic {
1611 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1612 severity: Some(lsp::DiagnosticSeverity::ERROR),
1613 message: "undefined variable 'A'".to_string(),
1614 ..Default::default()
1615 }],
1616 });
1617 assert_eq!(
1618 events.next().await.unwrap(),
1619 Event::DiagnosticsUpdated {
1620 language_server_id: LanguageServerId(0),
1621 paths: vec![(worktree_id, Path::new("a.rs")).into()],
1622 }
1623 );
1624
1625 fake_server.end_progress(format!("{}/0", progress_token));
1626 assert_eq!(
1627 events.next().await.unwrap(),
1628 Event::DiskBasedDiagnosticsFinished {
1629 language_server_id: LanguageServerId(0)
1630 }
1631 );
1632
1633 let buffer = project
1634 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
1635 .await
1636 .unwrap();
1637
1638 buffer.update(cx, |buffer, _| {
1639 let snapshot = buffer.snapshot();
1640 let diagnostics = snapshot
1641 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1642 .collect::<Vec<_>>();
1643 assert_eq!(
1644 diagnostics,
1645 &[DiagnosticEntry {
1646 range: Point::new(0, 9)..Point::new(0, 10),
1647 diagnostic: Diagnostic {
1648 severity: lsp::DiagnosticSeverity::ERROR,
1649 message: "undefined variable 'A'".to_string(),
1650 group_id: 0,
1651 is_primary: true,
1652 source_kind: DiagnosticSourceKind::Pushed,
1653 ..Diagnostic::default()
1654 }
1655 }]
1656 )
1657 });
1658
1659 // Ensure publishing empty diagnostics twice only results in one update event.
1660 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1661 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1662 version: None,
1663 diagnostics: Default::default(),
1664 });
1665 assert_eq!(
1666 events.next().await.unwrap(),
1667 Event::DiagnosticsUpdated {
1668 language_server_id: LanguageServerId(0),
1669 paths: vec![(worktree_id, Path::new("a.rs")).into()],
1670 }
1671 );
1672
1673 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1674 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1675 version: None,
1676 diagnostics: Default::default(),
1677 });
1678 cx.executor().run_until_parked();
1679 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1680}
1681
1682#[gpui::test]
1683async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1684 init_test(cx);
1685
1686 let progress_token = "the-progress-token";
1687
1688 let fs = FakeFs::new(cx.executor());
1689 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1690
1691 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1692
1693 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1694 language_registry.add(rust_lang());
1695 let mut fake_servers = language_registry.register_fake_lsp(
1696 "Rust",
1697 FakeLspAdapter {
1698 name: "the-language-server",
1699 disk_based_diagnostics_sources: vec!["disk".into()],
1700 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1701 ..FakeLspAdapter::default()
1702 },
1703 );
1704
1705 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1706
1707 let (buffer, _handle) = project
1708 .update(cx, |project, cx| {
1709 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1710 })
1711 .await
1712 .unwrap();
1713 let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id());
1714 // Simulate diagnostics starting to update.
1715 let fake_server = fake_servers.next().await.unwrap();
1716 fake_server.start_progress(progress_token).await;
1717
1718 // Restart the server before the diagnostics finish updating.
1719 project.update(cx, |project, cx| {
1720 project.restart_language_servers_for_buffers(vec![buffer], HashSet::default(), cx);
1721 });
1722 let mut events = cx.events(&project);
1723
1724 // Simulate the newly started server sending more diagnostics.
1725 let fake_server = fake_servers.next().await.unwrap();
1726 assert_eq!(
1727 events.next().await.unwrap(),
1728 Event::LanguageServerRemoved(LanguageServerId(0))
1729 );
1730 assert_eq!(
1731 events.next().await.unwrap(),
1732 Event::LanguageServerAdded(
1733 LanguageServerId(1),
1734 fake_server.server.name(),
1735 Some(worktree_id)
1736 )
1737 );
1738 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1739 fake_server.start_progress(progress_token).await;
1740 assert_eq!(
1741 events.next().await.unwrap(),
1742 Event::LanguageServerBufferRegistered {
1743 server_id: LanguageServerId(1),
1744 buffer_id,
1745 buffer_abs_path: PathBuf::from(path!("/dir/a.rs")),
1746 }
1747 );
1748 assert_eq!(
1749 events.next().await.unwrap(),
1750 Event::DiskBasedDiagnosticsStarted {
1751 language_server_id: LanguageServerId(1)
1752 }
1753 );
1754 project.update(cx, |project, cx| {
1755 assert_eq!(
1756 project
1757 .language_servers_running_disk_based_diagnostics(cx)
1758 .collect::<Vec<_>>(),
1759 [LanguageServerId(1)]
1760 );
1761 });
1762
1763 // All diagnostics are considered done, despite the old server's diagnostic
1764 // task never completing.
1765 fake_server.end_progress(progress_token);
1766 assert_eq!(
1767 events.next().await.unwrap(),
1768 Event::DiskBasedDiagnosticsFinished {
1769 language_server_id: LanguageServerId(1)
1770 }
1771 );
1772 project.update(cx, |project, cx| {
1773 assert_eq!(
1774 project
1775 .language_servers_running_disk_based_diagnostics(cx)
1776 .collect::<Vec<_>>(),
1777 [] as [language::LanguageServerId; 0]
1778 );
1779 });
1780}
1781
1782#[gpui::test]
1783async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1784 init_test(cx);
1785
1786 let fs = FakeFs::new(cx.executor());
1787 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
1788
1789 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1790
1791 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1792 language_registry.add(rust_lang());
1793 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1794
1795 let (buffer, _) = project
1796 .update(cx, |project, cx| {
1797 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1798 })
1799 .await
1800 .unwrap();
1801
1802 // Publish diagnostics
1803 let fake_server = fake_servers.next().await.unwrap();
1804 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1805 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1806 version: None,
1807 diagnostics: vec![lsp::Diagnostic {
1808 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1809 severity: Some(lsp::DiagnosticSeverity::ERROR),
1810 message: "the message".to_string(),
1811 ..Default::default()
1812 }],
1813 });
1814
1815 cx.executor().run_until_parked();
1816 buffer.update(cx, |buffer, _| {
1817 assert_eq!(
1818 buffer
1819 .snapshot()
1820 .diagnostics_in_range::<_, usize>(0..1, false)
1821 .map(|entry| entry.diagnostic.message)
1822 .collect::<Vec<_>>(),
1823 ["the message".to_string()]
1824 );
1825 });
1826 project.update(cx, |project, cx| {
1827 assert_eq!(
1828 project.diagnostic_summary(false, cx),
1829 DiagnosticSummary {
1830 error_count: 1,
1831 warning_count: 0,
1832 }
1833 );
1834 });
1835
1836 project.update(cx, |project, cx| {
1837 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
1838 });
1839
1840 // The diagnostics are cleared.
1841 cx.executor().run_until_parked();
1842 buffer.update(cx, |buffer, _| {
1843 assert_eq!(
1844 buffer
1845 .snapshot()
1846 .diagnostics_in_range::<_, usize>(0..1, false)
1847 .map(|entry| entry.diagnostic.message)
1848 .collect::<Vec<_>>(),
1849 Vec::<String>::new(),
1850 );
1851 });
1852 project.update(cx, |project, cx| {
1853 assert_eq!(
1854 project.diagnostic_summary(false, cx),
1855 DiagnosticSummary {
1856 error_count: 0,
1857 warning_count: 0,
1858 }
1859 );
1860 });
1861}
1862
1863#[gpui::test]
1864async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1865 init_test(cx);
1866
1867 let fs = FakeFs::new(cx.executor());
1868 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1869
1870 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1871 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1872
1873 language_registry.add(rust_lang());
1874 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1875
1876 let (buffer, _handle) = project
1877 .update(cx, |project, cx| {
1878 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1879 })
1880 .await
1881 .unwrap();
1882
1883 // Before restarting the server, report diagnostics with an unknown buffer version.
1884 let fake_server = fake_servers.next().await.unwrap();
1885 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1886 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1887 version: Some(10000),
1888 diagnostics: Vec::new(),
1889 });
1890 cx.executor().run_until_parked();
1891 project.update(cx, |project, cx| {
1892 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
1893 });
1894
1895 let mut fake_server = fake_servers.next().await.unwrap();
1896 let notification = fake_server
1897 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1898 .await
1899 .text_document;
1900 assert_eq!(notification.version, 0);
1901}
1902
1903#[gpui::test]
1904async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
1905 init_test(cx);
1906
1907 let progress_token = "the-progress-token";
1908
1909 let fs = FakeFs::new(cx.executor());
1910 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1911
1912 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1913
1914 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1915 language_registry.add(rust_lang());
1916 let mut fake_servers = language_registry.register_fake_lsp(
1917 "Rust",
1918 FakeLspAdapter {
1919 name: "the-language-server",
1920 disk_based_diagnostics_sources: vec!["disk".into()],
1921 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1922 ..Default::default()
1923 },
1924 );
1925
1926 let (buffer, _handle) = project
1927 .update(cx, |project, cx| {
1928 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1929 })
1930 .await
1931 .unwrap();
1932
1933 // Simulate diagnostics starting to update.
1934 let mut fake_server = fake_servers.next().await.unwrap();
1935 fake_server
1936 .start_progress_with(
1937 "another-token",
1938 lsp::WorkDoneProgressBegin {
1939 cancellable: Some(false),
1940 ..Default::default()
1941 },
1942 )
1943 .await;
1944 fake_server
1945 .start_progress_with(
1946 progress_token,
1947 lsp::WorkDoneProgressBegin {
1948 cancellable: Some(true),
1949 ..Default::default()
1950 },
1951 )
1952 .await;
1953 cx.executor().run_until_parked();
1954
1955 project.update(cx, |project, cx| {
1956 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
1957 });
1958
1959 let cancel_notification = fake_server
1960 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
1961 .await;
1962 assert_eq!(
1963 cancel_notification.token,
1964 NumberOrString::String(progress_token.into())
1965 );
1966}
1967
1968#[gpui::test]
1969async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1970 init_test(cx);
1971
1972 let fs = FakeFs::new(cx.executor());
1973 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
1974 .await;
1975
1976 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1977 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1978
1979 let mut fake_rust_servers = language_registry.register_fake_lsp(
1980 "Rust",
1981 FakeLspAdapter {
1982 name: "rust-lsp",
1983 ..Default::default()
1984 },
1985 );
1986 let mut fake_js_servers = language_registry.register_fake_lsp(
1987 "JavaScript",
1988 FakeLspAdapter {
1989 name: "js-lsp",
1990 ..Default::default()
1991 },
1992 );
1993 language_registry.add(rust_lang());
1994 language_registry.add(js_lang());
1995
1996 let _rs_buffer = project
1997 .update(cx, |project, cx| {
1998 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1999 })
2000 .await
2001 .unwrap();
2002 let _js_buffer = project
2003 .update(cx, |project, cx| {
2004 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
2005 })
2006 .await
2007 .unwrap();
2008
2009 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
2010 assert_eq!(
2011 fake_rust_server_1
2012 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2013 .await
2014 .text_document
2015 .uri
2016 .as_str(),
2017 uri!("file:///dir/a.rs")
2018 );
2019
2020 let mut fake_js_server = fake_js_servers.next().await.unwrap();
2021 assert_eq!(
2022 fake_js_server
2023 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2024 .await
2025 .text_document
2026 .uri
2027 .as_str(),
2028 uri!("file:///dir/b.js")
2029 );
2030
2031 // Disable Rust language server, ensuring only that server gets stopped.
2032 cx.update(|cx| {
2033 SettingsStore::update_global(cx, |settings, cx| {
2034 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
2035 settings.languages.0.insert(
2036 "Rust".into(),
2037 LanguageSettingsContent {
2038 enable_language_server: Some(false),
2039 ..Default::default()
2040 },
2041 );
2042 });
2043 })
2044 });
2045 fake_rust_server_1
2046 .receive_notification::<lsp::notification::Exit>()
2047 .await;
2048
2049 // Enable Rust and disable JavaScript language servers, ensuring that the
2050 // former gets started again and that the latter stops.
2051 cx.update(|cx| {
2052 SettingsStore::update_global(cx, |settings, cx| {
2053 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
2054 settings.languages.0.insert(
2055 LanguageName::new("Rust"),
2056 LanguageSettingsContent {
2057 enable_language_server: Some(true),
2058 ..Default::default()
2059 },
2060 );
2061 settings.languages.0.insert(
2062 LanguageName::new("JavaScript"),
2063 LanguageSettingsContent {
2064 enable_language_server: Some(false),
2065 ..Default::default()
2066 },
2067 );
2068 });
2069 })
2070 });
2071 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
2072 assert_eq!(
2073 fake_rust_server_2
2074 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2075 .await
2076 .text_document
2077 .uri
2078 .as_str(),
2079 uri!("file:///dir/a.rs")
2080 );
2081 fake_js_server
2082 .receive_notification::<lsp::notification::Exit>()
2083 .await;
2084}
2085
2086#[gpui::test(iterations = 3)]
2087async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
2088 init_test(cx);
2089
2090 let text = "
2091 fn a() { A }
2092 fn b() { BB }
2093 fn c() { CCC }
2094 "
2095 .unindent();
2096
2097 let fs = FakeFs::new(cx.executor());
2098 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
2099
2100 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2101 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2102
2103 language_registry.add(rust_lang());
2104 let mut fake_servers = language_registry.register_fake_lsp(
2105 "Rust",
2106 FakeLspAdapter {
2107 disk_based_diagnostics_sources: vec!["disk".into()],
2108 ..Default::default()
2109 },
2110 );
2111
2112 let buffer = project
2113 .update(cx, |project, cx| {
2114 project.open_local_buffer(path!("/dir/a.rs"), cx)
2115 })
2116 .await
2117 .unwrap();
2118
2119 let _handle = project.update(cx, |project, cx| {
2120 project.register_buffer_with_language_servers(&buffer, cx)
2121 });
2122
2123 let mut fake_server = fake_servers.next().await.unwrap();
2124 let open_notification = fake_server
2125 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2126 .await;
2127
2128 // Edit the buffer, moving the content down
2129 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
2130 let change_notification_1 = fake_server
2131 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2132 .await;
2133 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
2134
2135 // Report some diagnostics for the initial version of the buffer
2136 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2137 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2138 version: Some(open_notification.text_document.version),
2139 diagnostics: vec![
2140 lsp::Diagnostic {
2141 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2142 severity: Some(DiagnosticSeverity::ERROR),
2143 message: "undefined variable 'A'".to_string(),
2144 source: Some("disk".to_string()),
2145 ..Default::default()
2146 },
2147 lsp::Diagnostic {
2148 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2149 severity: Some(DiagnosticSeverity::ERROR),
2150 message: "undefined variable 'BB'".to_string(),
2151 source: Some("disk".to_string()),
2152 ..Default::default()
2153 },
2154 lsp::Diagnostic {
2155 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
2156 severity: Some(DiagnosticSeverity::ERROR),
2157 source: Some("disk".to_string()),
2158 message: "undefined variable 'CCC'".to_string(),
2159 ..Default::default()
2160 },
2161 ],
2162 });
2163
2164 // The diagnostics have moved down since they were created.
2165 cx.executor().run_until_parked();
2166 buffer.update(cx, |buffer, _| {
2167 assert_eq!(
2168 buffer
2169 .snapshot()
2170 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
2171 .collect::<Vec<_>>(),
2172 &[
2173 DiagnosticEntry {
2174 range: Point::new(3, 9)..Point::new(3, 11),
2175 diagnostic: Diagnostic {
2176 source: Some("disk".into()),
2177 severity: DiagnosticSeverity::ERROR,
2178 message: "undefined variable 'BB'".to_string(),
2179 is_disk_based: true,
2180 group_id: 1,
2181 is_primary: true,
2182 source_kind: DiagnosticSourceKind::Pushed,
2183 ..Diagnostic::default()
2184 },
2185 },
2186 DiagnosticEntry {
2187 range: Point::new(4, 9)..Point::new(4, 12),
2188 diagnostic: Diagnostic {
2189 source: Some("disk".into()),
2190 severity: DiagnosticSeverity::ERROR,
2191 message: "undefined variable 'CCC'".to_string(),
2192 is_disk_based: true,
2193 group_id: 2,
2194 is_primary: true,
2195 source_kind: DiagnosticSourceKind::Pushed,
2196 ..Diagnostic::default()
2197 }
2198 }
2199 ]
2200 );
2201 assert_eq!(
2202 chunks_with_diagnostics(buffer, 0..buffer.len()),
2203 [
2204 ("\n\nfn a() { ".to_string(), None),
2205 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2206 (" }\nfn b() { ".to_string(), None),
2207 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
2208 (" }\nfn c() { ".to_string(), None),
2209 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
2210 (" }\n".to_string(), None),
2211 ]
2212 );
2213 assert_eq!(
2214 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
2215 [
2216 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
2217 (" }\nfn c() { ".to_string(), None),
2218 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
2219 ]
2220 );
2221 });
2222
2223 // Ensure overlapping diagnostics are highlighted correctly.
2224 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2225 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2226 version: Some(open_notification.text_document.version),
2227 diagnostics: vec![
2228 lsp::Diagnostic {
2229 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2230 severity: Some(DiagnosticSeverity::ERROR),
2231 message: "undefined variable 'A'".to_string(),
2232 source: Some("disk".to_string()),
2233 ..Default::default()
2234 },
2235 lsp::Diagnostic {
2236 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
2237 severity: Some(DiagnosticSeverity::WARNING),
2238 message: "unreachable statement".to_string(),
2239 source: Some("disk".to_string()),
2240 ..Default::default()
2241 },
2242 ],
2243 });
2244
2245 cx.executor().run_until_parked();
2246 buffer.update(cx, |buffer, _| {
2247 assert_eq!(
2248 buffer
2249 .snapshot()
2250 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
2251 .collect::<Vec<_>>(),
2252 &[
2253 DiagnosticEntry {
2254 range: Point::new(2, 9)..Point::new(2, 12),
2255 diagnostic: Diagnostic {
2256 source: Some("disk".into()),
2257 severity: DiagnosticSeverity::WARNING,
2258 message: "unreachable statement".to_string(),
2259 is_disk_based: true,
2260 group_id: 4,
2261 is_primary: true,
2262 source_kind: DiagnosticSourceKind::Pushed,
2263 ..Diagnostic::default()
2264 }
2265 },
2266 DiagnosticEntry {
2267 range: Point::new(2, 9)..Point::new(2, 10),
2268 diagnostic: Diagnostic {
2269 source: Some("disk".into()),
2270 severity: DiagnosticSeverity::ERROR,
2271 message: "undefined variable 'A'".to_string(),
2272 is_disk_based: true,
2273 group_id: 3,
2274 is_primary: true,
2275 source_kind: DiagnosticSourceKind::Pushed,
2276 ..Diagnostic::default()
2277 },
2278 }
2279 ]
2280 );
2281 assert_eq!(
2282 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
2283 [
2284 ("fn a() { ".to_string(), None),
2285 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2286 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2287 ("\n".to_string(), None),
2288 ]
2289 );
2290 assert_eq!(
2291 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
2292 [
2293 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2294 ("\n".to_string(), None),
2295 ]
2296 );
2297 });
2298
2299 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
2300 // changes since the last save.
2301 buffer.update(cx, |buffer, cx| {
2302 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
2303 buffer.edit(
2304 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
2305 None,
2306 cx,
2307 );
2308 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
2309 });
2310 let change_notification_2 = fake_server
2311 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2312 .await;
2313 assert!(
2314 change_notification_2.text_document.version > change_notification_1.text_document.version
2315 );
2316
2317 // Handle out-of-order diagnostics
2318 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2319 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2320 version: Some(change_notification_2.text_document.version),
2321 diagnostics: vec![
2322 lsp::Diagnostic {
2323 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2324 severity: Some(DiagnosticSeverity::ERROR),
2325 message: "undefined variable 'BB'".to_string(),
2326 source: Some("disk".to_string()),
2327 ..Default::default()
2328 },
2329 lsp::Diagnostic {
2330 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2331 severity: Some(DiagnosticSeverity::WARNING),
2332 message: "undefined variable 'A'".to_string(),
2333 source: Some("disk".to_string()),
2334 ..Default::default()
2335 },
2336 ],
2337 });
2338
2339 cx.executor().run_until_parked();
2340 buffer.update(cx, |buffer, _| {
2341 assert_eq!(
2342 buffer
2343 .snapshot()
2344 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2345 .collect::<Vec<_>>(),
2346 &[
2347 DiagnosticEntry {
2348 range: Point::new(2, 21)..Point::new(2, 22),
2349 diagnostic: Diagnostic {
2350 source: Some("disk".into()),
2351 severity: DiagnosticSeverity::WARNING,
2352 message: "undefined variable 'A'".to_string(),
2353 is_disk_based: true,
2354 group_id: 6,
2355 is_primary: true,
2356 source_kind: DiagnosticSourceKind::Pushed,
2357 ..Diagnostic::default()
2358 }
2359 },
2360 DiagnosticEntry {
2361 range: Point::new(3, 9)..Point::new(3, 14),
2362 diagnostic: Diagnostic {
2363 source: Some("disk".into()),
2364 severity: DiagnosticSeverity::ERROR,
2365 message: "undefined variable 'BB'".to_string(),
2366 is_disk_based: true,
2367 group_id: 5,
2368 is_primary: true,
2369 source_kind: DiagnosticSourceKind::Pushed,
2370 ..Diagnostic::default()
2371 },
2372 }
2373 ]
2374 );
2375 });
2376}
2377
2378#[gpui::test]
2379async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
2380 init_test(cx);
2381
2382 let text = concat!(
2383 "let one = ;\n", //
2384 "let two = \n",
2385 "let three = 3;\n",
2386 );
2387
2388 let fs = FakeFs::new(cx.executor());
2389 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
2390
2391 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2392 let buffer = project
2393 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2394 .await
2395 .unwrap();
2396
2397 project.update(cx, |project, cx| {
2398 project.lsp_store.update(cx, |lsp_store, cx| {
2399 lsp_store
2400 .update_diagnostic_entries(
2401 LanguageServerId(0),
2402 PathBuf::from("/dir/a.rs"),
2403 None,
2404 None,
2405 vec![
2406 DiagnosticEntry {
2407 range: Unclipped(PointUtf16::new(0, 10))
2408 ..Unclipped(PointUtf16::new(0, 10)),
2409 diagnostic: Diagnostic {
2410 severity: DiagnosticSeverity::ERROR,
2411 message: "syntax error 1".to_string(),
2412 source_kind: DiagnosticSourceKind::Pushed,
2413 ..Diagnostic::default()
2414 },
2415 },
2416 DiagnosticEntry {
2417 range: Unclipped(PointUtf16::new(1, 10))
2418 ..Unclipped(PointUtf16::new(1, 10)),
2419 diagnostic: Diagnostic {
2420 severity: DiagnosticSeverity::ERROR,
2421 message: "syntax error 2".to_string(),
2422 source_kind: DiagnosticSourceKind::Pushed,
2423 ..Diagnostic::default()
2424 },
2425 },
2426 ],
2427 cx,
2428 )
2429 .unwrap();
2430 })
2431 });
2432
2433 // An empty range is extended forward to include the following character.
2434 // At the end of a line, an empty range is extended backward to include
2435 // the preceding character.
2436 buffer.update(cx, |buffer, _| {
2437 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2438 assert_eq!(
2439 chunks
2440 .iter()
2441 .map(|(s, d)| (s.as_str(), *d))
2442 .collect::<Vec<_>>(),
2443 &[
2444 ("let one = ", None),
2445 (";", Some(DiagnosticSeverity::ERROR)),
2446 ("\nlet two =", None),
2447 (" ", Some(DiagnosticSeverity::ERROR)),
2448 ("\nlet three = 3;\n", None)
2449 ]
2450 );
2451 });
2452}
2453
2454#[gpui::test]
2455async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2456 init_test(cx);
2457
2458 let fs = FakeFs::new(cx.executor());
2459 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2460 .await;
2461
2462 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2463 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
2464
2465 lsp_store.update(cx, |lsp_store, cx| {
2466 lsp_store
2467 .update_diagnostic_entries(
2468 LanguageServerId(0),
2469 Path::new("/dir/a.rs").to_owned(),
2470 None,
2471 None,
2472 vec![DiagnosticEntry {
2473 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2474 diagnostic: Diagnostic {
2475 severity: DiagnosticSeverity::ERROR,
2476 is_primary: true,
2477 message: "syntax error a1".to_string(),
2478 source_kind: DiagnosticSourceKind::Pushed,
2479 ..Diagnostic::default()
2480 },
2481 }],
2482 cx,
2483 )
2484 .unwrap();
2485 lsp_store
2486 .update_diagnostic_entries(
2487 LanguageServerId(1),
2488 Path::new("/dir/a.rs").to_owned(),
2489 None,
2490 None,
2491 vec![DiagnosticEntry {
2492 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2493 diagnostic: Diagnostic {
2494 severity: DiagnosticSeverity::ERROR,
2495 is_primary: true,
2496 message: "syntax error b1".to_string(),
2497 source_kind: DiagnosticSourceKind::Pushed,
2498 ..Diagnostic::default()
2499 },
2500 }],
2501 cx,
2502 )
2503 .unwrap();
2504
2505 assert_eq!(
2506 lsp_store.diagnostic_summary(false, cx),
2507 DiagnosticSummary {
2508 error_count: 2,
2509 warning_count: 0,
2510 }
2511 );
2512 });
2513}
2514
2515#[gpui::test]
2516async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2517 init_test(cx);
2518
2519 let text = "
2520 fn a() {
2521 f1();
2522 }
2523 fn b() {
2524 f2();
2525 }
2526 fn c() {
2527 f3();
2528 }
2529 "
2530 .unindent();
2531
2532 let fs = FakeFs::new(cx.executor());
2533 fs.insert_tree(
2534 path!("/dir"),
2535 json!({
2536 "a.rs": text.clone(),
2537 }),
2538 )
2539 .await;
2540
2541 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2542 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2543
2544 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2545 language_registry.add(rust_lang());
2546 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2547
2548 let (buffer, _handle) = project
2549 .update(cx, |project, cx| {
2550 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2551 })
2552 .await
2553 .unwrap();
2554
2555 let mut fake_server = fake_servers.next().await.unwrap();
2556 let lsp_document_version = fake_server
2557 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2558 .await
2559 .text_document
2560 .version;
2561
2562 // Simulate editing the buffer after the language server computes some edits.
2563 buffer.update(cx, |buffer, cx| {
2564 buffer.edit(
2565 [(
2566 Point::new(0, 0)..Point::new(0, 0),
2567 "// above first function\n",
2568 )],
2569 None,
2570 cx,
2571 );
2572 buffer.edit(
2573 [(
2574 Point::new(2, 0)..Point::new(2, 0),
2575 " // inside first function\n",
2576 )],
2577 None,
2578 cx,
2579 );
2580 buffer.edit(
2581 [(
2582 Point::new(6, 4)..Point::new(6, 4),
2583 "// inside second function ",
2584 )],
2585 None,
2586 cx,
2587 );
2588
2589 assert_eq!(
2590 buffer.text(),
2591 "
2592 // above first function
2593 fn a() {
2594 // inside first function
2595 f1();
2596 }
2597 fn b() {
2598 // inside second function f2();
2599 }
2600 fn c() {
2601 f3();
2602 }
2603 "
2604 .unindent()
2605 );
2606 });
2607
2608 let edits = lsp_store
2609 .update(cx, |lsp_store, cx| {
2610 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2611 &buffer,
2612 vec![
2613 // replace body of first function
2614 lsp::TextEdit {
2615 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2616 new_text: "
2617 fn a() {
2618 f10();
2619 }
2620 "
2621 .unindent(),
2622 },
2623 // edit inside second function
2624 lsp::TextEdit {
2625 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2626 new_text: "00".into(),
2627 },
2628 // edit inside third function via two distinct edits
2629 lsp::TextEdit {
2630 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2631 new_text: "4000".into(),
2632 },
2633 lsp::TextEdit {
2634 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2635 new_text: "".into(),
2636 },
2637 ],
2638 LanguageServerId(0),
2639 Some(lsp_document_version),
2640 cx,
2641 )
2642 })
2643 .await
2644 .unwrap();
2645
2646 buffer.update(cx, |buffer, cx| {
2647 for (range, new_text) in edits {
2648 buffer.edit([(range, new_text)], None, cx);
2649 }
2650 assert_eq!(
2651 buffer.text(),
2652 "
2653 // above first function
2654 fn a() {
2655 // inside first function
2656 f10();
2657 }
2658 fn b() {
2659 // inside second function f200();
2660 }
2661 fn c() {
2662 f4000();
2663 }
2664 "
2665 .unindent()
2666 );
2667 });
2668}
2669
2670#[gpui::test]
2671async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2672 init_test(cx);
2673
2674 let text = "
2675 use a::b;
2676 use a::c;
2677
2678 fn f() {
2679 b();
2680 c();
2681 }
2682 "
2683 .unindent();
2684
2685 let fs = FakeFs::new(cx.executor());
2686 fs.insert_tree(
2687 path!("/dir"),
2688 json!({
2689 "a.rs": text.clone(),
2690 }),
2691 )
2692 .await;
2693
2694 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2695 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2696 let buffer = project
2697 .update(cx, |project, cx| {
2698 project.open_local_buffer(path!("/dir/a.rs"), cx)
2699 })
2700 .await
2701 .unwrap();
2702
2703 // Simulate the language server sending us a small edit in the form of a very large diff.
2704 // Rust-analyzer does this when performing a merge-imports code action.
2705 let edits = lsp_store
2706 .update(cx, |lsp_store, cx| {
2707 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2708 &buffer,
2709 [
2710 // Replace the first use statement without editing the semicolon.
2711 lsp::TextEdit {
2712 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2713 new_text: "a::{b, c}".into(),
2714 },
2715 // Reinsert the remainder of the file between the semicolon and the final
2716 // newline of the file.
2717 lsp::TextEdit {
2718 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2719 new_text: "\n\n".into(),
2720 },
2721 lsp::TextEdit {
2722 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2723 new_text: "
2724 fn f() {
2725 b();
2726 c();
2727 }"
2728 .unindent(),
2729 },
2730 // Delete everything after the first newline of the file.
2731 lsp::TextEdit {
2732 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2733 new_text: "".into(),
2734 },
2735 ],
2736 LanguageServerId(0),
2737 None,
2738 cx,
2739 )
2740 })
2741 .await
2742 .unwrap();
2743
2744 buffer.update(cx, |buffer, cx| {
2745 let edits = edits
2746 .into_iter()
2747 .map(|(range, text)| {
2748 (
2749 range.start.to_point(buffer)..range.end.to_point(buffer),
2750 text,
2751 )
2752 })
2753 .collect::<Vec<_>>();
2754
2755 assert_eq!(
2756 edits,
2757 [
2758 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2759 (Point::new(1, 0)..Point::new(2, 0), "".into())
2760 ]
2761 );
2762
2763 for (range, new_text) in edits {
2764 buffer.edit([(range, new_text)], None, cx);
2765 }
2766 assert_eq!(
2767 buffer.text(),
2768 "
2769 use a::{b, c};
2770
2771 fn f() {
2772 b();
2773 c();
2774 }
2775 "
2776 .unindent()
2777 );
2778 });
2779}
2780
2781#[gpui::test]
2782async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
2783 cx: &mut gpui::TestAppContext,
2784) {
2785 init_test(cx);
2786
2787 let text = "Path()";
2788
2789 let fs = FakeFs::new(cx.executor());
2790 fs.insert_tree(
2791 path!("/dir"),
2792 json!({
2793 "a.rs": text
2794 }),
2795 )
2796 .await;
2797
2798 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2799 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2800 let buffer = project
2801 .update(cx, |project, cx| {
2802 project.open_local_buffer(path!("/dir/a.rs"), cx)
2803 })
2804 .await
2805 .unwrap();
2806
2807 // Simulate the language server sending us a pair of edits at the same location,
2808 // with an insertion following a replacement (which violates the LSP spec).
2809 let edits = lsp_store
2810 .update(cx, |lsp_store, cx| {
2811 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2812 &buffer,
2813 [
2814 lsp::TextEdit {
2815 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
2816 new_text: "Path".into(),
2817 },
2818 lsp::TextEdit {
2819 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
2820 new_text: "from path import Path\n\n\n".into(),
2821 },
2822 ],
2823 LanguageServerId(0),
2824 None,
2825 cx,
2826 )
2827 })
2828 .await
2829 .unwrap();
2830
2831 buffer.update(cx, |buffer, cx| {
2832 buffer.edit(edits, None, cx);
2833 assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
2834 });
2835}
2836
2837#[gpui::test]
2838async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2839 init_test(cx);
2840
2841 let text = "
2842 use a::b;
2843 use a::c;
2844
2845 fn f() {
2846 b();
2847 c();
2848 }
2849 "
2850 .unindent();
2851
2852 let fs = FakeFs::new(cx.executor());
2853 fs.insert_tree(
2854 path!("/dir"),
2855 json!({
2856 "a.rs": text.clone(),
2857 }),
2858 )
2859 .await;
2860
2861 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2862 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2863 let buffer = project
2864 .update(cx, |project, cx| {
2865 project.open_local_buffer(path!("/dir/a.rs"), cx)
2866 })
2867 .await
2868 .unwrap();
2869
2870 // Simulate the language server sending us edits in a non-ordered fashion,
2871 // with ranges sometimes being inverted or pointing to invalid locations.
2872 let edits = lsp_store
2873 .update(cx, |lsp_store, cx| {
2874 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2875 &buffer,
2876 [
2877 lsp::TextEdit {
2878 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2879 new_text: "\n\n".into(),
2880 },
2881 lsp::TextEdit {
2882 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2883 new_text: "a::{b, c}".into(),
2884 },
2885 lsp::TextEdit {
2886 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2887 new_text: "".into(),
2888 },
2889 lsp::TextEdit {
2890 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2891 new_text: "
2892 fn f() {
2893 b();
2894 c();
2895 }"
2896 .unindent(),
2897 },
2898 ],
2899 LanguageServerId(0),
2900 None,
2901 cx,
2902 )
2903 })
2904 .await
2905 .unwrap();
2906
2907 buffer.update(cx, |buffer, cx| {
2908 let edits = edits
2909 .into_iter()
2910 .map(|(range, text)| {
2911 (
2912 range.start.to_point(buffer)..range.end.to_point(buffer),
2913 text,
2914 )
2915 })
2916 .collect::<Vec<_>>();
2917
2918 assert_eq!(
2919 edits,
2920 [
2921 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2922 (Point::new(1, 0)..Point::new(2, 0), "".into())
2923 ]
2924 );
2925
2926 for (range, new_text) in edits {
2927 buffer.edit([(range, new_text)], None, cx);
2928 }
2929 assert_eq!(
2930 buffer.text(),
2931 "
2932 use a::{b, c};
2933
2934 fn f() {
2935 b();
2936 c();
2937 }
2938 "
2939 .unindent()
2940 );
2941 });
2942}
2943
2944fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2945 buffer: &Buffer,
2946 range: Range<T>,
2947) -> Vec<(String, Option<DiagnosticSeverity>)> {
2948 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2949 for chunk in buffer.snapshot().chunks(range, true) {
2950 if chunks
2951 .last()
2952 .is_some_and(|prev_chunk| prev_chunk.1 == chunk.diagnostic_severity)
2953 {
2954 chunks.last_mut().unwrap().0.push_str(chunk.text);
2955 } else {
2956 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2957 }
2958 }
2959 chunks
2960}
2961
2962#[gpui::test(iterations = 10)]
2963async fn test_definition(cx: &mut gpui::TestAppContext) {
2964 init_test(cx);
2965
2966 let fs = FakeFs::new(cx.executor());
2967 fs.insert_tree(
2968 path!("/dir"),
2969 json!({
2970 "a.rs": "const fn a() { A }",
2971 "b.rs": "const y: i32 = crate::a()",
2972 }),
2973 )
2974 .await;
2975
2976 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
2977
2978 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2979 language_registry.add(rust_lang());
2980 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2981
2982 let (buffer, _handle) = project
2983 .update(cx, |project, cx| {
2984 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
2985 })
2986 .await
2987 .unwrap();
2988
2989 let fake_server = fake_servers.next().await.unwrap();
2990 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2991 let params = params.text_document_position_params;
2992 assert_eq!(
2993 params.text_document.uri.to_file_path().unwrap(),
2994 Path::new(path!("/dir/b.rs")),
2995 );
2996 assert_eq!(params.position, lsp::Position::new(0, 22));
2997
2998 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2999 lsp::Location::new(
3000 lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
3001 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3002 ),
3003 )))
3004 });
3005 let mut definitions = project
3006 .update(cx, |project, cx| project.definitions(&buffer, 22, cx))
3007 .await
3008 .unwrap()
3009 .unwrap();
3010
3011 // Assert no new language server started
3012 cx.executor().run_until_parked();
3013 assert!(fake_servers.try_next().is_err());
3014
3015 assert_eq!(definitions.len(), 1);
3016 let definition = definitions.pop().unwrap();
3017 cx.update(|cx| {
3018 let target_buffer = definition.target.buffer.read(cx);
3019 assert_eq!(
3020 target_buffer
3021 .file()
3022 .unwrap()
3023 .as_local()
3024 .unwrap()
3025 .abs_path(cx),
3026 Path::new(path!("/dir/a.rs")),
3027 );
3028 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
3029 assert_eq!(
3030 list_worktrees(&project, cx),
3031 [
3032 (path!("/dir/a.rs").as_ref(), false),
3033 (path!("/dir/b.rs").as_ref(), true)
3034 ],
3035 );
3036
3037 drop(definition);
3038 });
3039 cx.update(|cx| {
3040 assert_eq!(
3041 list_worktrees(&project, cx),
3042 [(path!("/dir/b.rs").as_ref(), true)]
3043 );
3044 });
3045
3046 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
3047 project
3048 .read(cx)
3049 .worktrees(cx)
3050 .map(|worktree| {
3051 let worktree = worktree.read(cx);
3052 (
3053 worktree.as_local().unwrap().abs_path().as_ref(),
3054 worktree.is_visible(),
3055 )
3056 })
3057 .collect::<Vec<_>>()
3058 }
3059}
3060
3061#[gpui::test]
3062async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
3063 init_test(cx);
3064
3065 let fs = FakeFs::new(cx.executor());
3066 fs.insert_tree(
3067 path!("/dir"),
3068 json!({
3069 "a.ts": "",
3070 }),
3071 )
3072 .await;
3073
3074 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3075
3076 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3077 language_registry.add(typescript_lang());
3078 let mut fake_language_servers = language_registry.register_fake_lsp(
3079 "TypeScript",
3080 FakeLspAdapter {
3081 capabilities: lsp::ServerCapabilities {
3082 completion_provider: Some(lsp::CompletionOptions {
3083 trigger_characters: Some(vec![".".to_string()]),
3084 ..Default::default()
3085 }),
3086 ..Default::default()
3087 },
3088 ..Default::default()
3089 },
3090 );
3091
3092 let (buffer, _handle) = project
3093 .update(cx, |p, cx| {
3094 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3095 })
3096 .await
3097 .unwrap();
3098
3099 let fake_server = fake_language_servers.next().await.unwrap();
3100
3101 // When text_edit exists, it takes precedence over insert_text and label
3102 let text = "let a = obj.fqn";
3103 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3104 let completions = project.update(cx, |project, cx| {
3105 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3106 });
3107
3108 fake_server
3109 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3110 Ok(Some(lsp::CompletionResponse::Array(vec![
3111 lsp::CompletionItem {
3112 label: "labelText".into(),
3113 insert_text: Some("insertText".into()),
3114 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
3115 range: lsp::Range::new(
3116 lsp::Position::new(0, text.len() as u32 - 3),
3117 lsp::Position::new(0, text.len() as u32),
3118 ),
3119 new_text: "textEditText".into(),
3120 })),
3121 ..Default::default()
3122 },
3123 ])))
3124 })
3125 .next()
3126 .await;
3127
3128 let completions = completions
3129 .await
3130 .unwrap()
3131 .into_iter()
3132 .flat_map(|response| response.completions)
3133 .collect::<Vec<_>>();
3134 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3135
3136 assert_eq!(completions.len(), 1);
3137 assert_eq!(completions[0].new_text, "textEditText");
3138 assert_eq!(
3139 completions[0].replace_range.to_offset(&snapshot),
3140 text.len() - 3..text.len()
3141 );
3142}
3143
3144#[gpui::test]
3145async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
3146 init_test(cx);
3147
3148 let fs = FakeFs::new(cx.executor());
3149 fs.insert_tree(
3150 path!("/dir"),
3151 json!({
3152 "a.ts": "",
3153 }),
3154 )
3155 .await;
3156
3157 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3158
3159 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3160 language_registry.add(typescript_lang());
3161 let mut fake_language_servers = language_registry.register_fake_lsp(
3162 "TypeScript",
3163 FakeLspAdapter {
3164 capabilities: lsp::ServerCapabilities {
3165 completion_provider: Some(lsp::CompletionOptions {
3166 trigger_characters: Some(vec![".".to_string()]),
3167 ..Default::default()
3168 }),
3169 ..Default::default()
3170 },
3171 ..Default::default()
3172 },
3173 );
3174
3175 let (buffer, _handle) = project
3176 .update(cx, |p, cx| {
3177 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3178 })
3179 .await
3180 .unwrap();
3181
3182 let fake_server = fake_language_servers.next().await.unwrap();
3183 let text = "let a = obj.fqn";
3184
3185 // Test 1: When text_edit is None but insert_text exists with default edit_range
3186 {
3187 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3188 let completions = project.update(cx, |project, cx| {
3189 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3190 });
3191
3192 fake_server
3193 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3194 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3195 is_incomplete: false,
3196 item_defaults: Some(lsp::CompletionListItemDefaults {
3197 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3198 lsp::Range::new(
3199 lsp::Position::new(0, text.len() as u32 - 3),
3200 lsp::Position::new(0, text.len() as u32),
3201 ),
3202 )),
3203 ..Default::default()
3204 }),
3205 items: vec![lsp::CompletionItem {
3206 label: "labelText".into(),
3207 insert_text: Some("insertText".into()),
3208 text_edit: None,
3209 ..Default::default()
3210 }],
3211 })))
3212 })
3213 .next()
3214 .await;
3215
3216 let completions = completions
3217 .await
3218 .unwrap()
3219 .into_iter()
3220 .flat_map(|response| response.completions)
3221 .collect::<Vec<_>>();
3222 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3223
3224 assert_eq!(completions.len(), 1);
3225 assert_eq!(completions[0].new_text, "insertText");
3226 assert_eq!(
3227 completions[0].replace_range.to_offset(&snapshot),
3228 text.len() - 3..text.len()
3229 );
3230 }
3231
3232 // Test 2: When both text_edit and insert_text are None with default edit_range
3233 {
3234 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3235 let completions = project.update(cx, |project, cx| {
3236 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3237 });
3238
3239 fake_server
3240 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3241 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3242 is_incomplete: false,
3243 item_defaults: Some(lsp::CompletionListItemDefaults {
3244 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3245 lsp::Range::new(
3246 lsp::Position::new(0, text.len() as u32 - 3),
3247 lsp::Position::new(0, text.len() as u32),
3248 ),
3249 )),
3250 ..Default::default()
3251 }),
3252 items: vec![lsp::CompletionItem {
3253 label: "labelText".into(),
3254 insert_text: None,
3255 text_edit: None,
3256 ..Default::default()
3257 }],
3258 })))
3259 })
3260 .next()
3261 .await;
3262
3263 let completions = completions
3264 .await
3265 .unwrap()
3266 .into_iter()
3267 .flat_map(|response| response.completions)
3268 .collect::<Vec<_>>();
3269 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3270
3271 assert_eq!(completions.len(), 1);
3272 assert_eq!(completions[0].new_text, "labelText");
3273 assert_eq!(
3274 completions[0].replace_range.to_offset(&snapshot),
3275 text.len() - 3..text.len()
3276 );
3277 }
3278}
3279
3280#[gpui::test]
3281async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
3282 init_test(cx);
3283
3284 let fs = FakeFs::new(cx.executor());
3285 fs.insert_tree(
3286 path!("/dir"),
3287 json!({
3288 "a.ts": "",
3289 }),
3290 )
3291 .await;
3292
3293 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3294
3295 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3296 language_registry.add(typescript_lang());
3297 let mut fake_language_servers = language_registry.register_fake_lsp(
3298 "TypeScript",
3299 FakeLspAdapter {
3300 capabilities: lsp::ServerCapabilities {
3301 completion_provider: Some(lsp::CompletionOptions {
3302 trigger_characters: Some(vec![":".to_string()]),
3303 ..Default::default()
3304 }),
3305 ..Default::default()
3306 },
3307 ..Default::default()
3308 },
3309 );
3310
3311 let (buffer, _handle) = project
3312 .update(cx, |p, cx| {
3313 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3314 })
3315 .await
3316 .unwrap();
3317
3318 let fake_server = fake_language_servers.next().await.unwrap();
3319
3320 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
3321 let text = "let a = b.fqn";
3322 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3323 let completions = project.update(cx, |project, cx| {
3324 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3325 });
3326
3327 fake_server
3328 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3329 Ok(Some(lsp::CompletionResponse::Array(vec![
3330 lsp::CompletionItem {
3331 label: "fullyQualifiedName?".into(),
3332 insert_text: Some("fullyQualifiedName".into()),
3333 ..Default::default()
3334 },
3335 ])))
3336 })
3337 .next()
3338 .await;
3339 let completions = completions
3340 .await
3341 .unwrap()
3342 .into_iter()
3343 .flat_map(|response| response.completions)
3344 .collect::<Vec<_>>();
3345 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3346 assert_eq!(completions.len(), 1);
3347 assert_eq!(completions[0].new_text, "fullyQualifiedName");
3348 assert_eq!(
3349 completions[0].replace_range.to_offset(&snapshot),
3350 text.len() - 3..text.len()
3351 );
3352
3353 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
3354 let text = "let a = \"atoms/cmp\"";
3355 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3356 let completions = project.update(cx, |project, cx| {
3357 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
3358 });
3359
3360 fake_server
3361 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3362 Ok(Some(lsp::CompletionResponse::Array(vec![
3363 lsp::CompletionItem {
3364 label: "component".into(),
3365 ..Default::default()
3366 },
3367 ])))
3368 })
3369 .next()
3370 .await;
3371 let completions = completions
3372 .await
3373 .unwrap()
3374 .into_iter()
3375 .flat_map(|response| response.completions)
3376 .collect::<Vec<_>>();
3377 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3378 assert_eq!(completions.len(), 1);
3379 assert_eq!(completions[0].new_text, "component");
3380 assert_eq!(
3381 completions[0].replace_range.to_offset(&snapshot),
3382 text.len() - 4..text.len() - 1
3383 );
3384}
3385
3386#[gpui::test]
3387async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
3388 init_test(cx);
3389
3390 let fs = FakeFs::new(cx.executor());
3391 fs.insert_tree(
3392 path!("/dir"),
3393 json!({
3394 "a.ts": "",
3395 }),
3396 )
3397 .await;
3398
3399 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3400
3401 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3402 language_registry.add(typescript_lang());
3403 let mut fake_language_servers = language_registry.register_fake_lsp(
3404 "TypeScript",
3405 FakeLspAdapter {
3406 capabilities: lsp::ServerCapabilities {
3407 completion_provider: Some(lsp::CompletionOptions {
3408 trigger_characters: Some(vec![":".to_string()]),
3409 ..Default::default()
3410 }),
3411 ..Default::default()
3412 },
3413 ..Default::default()
3414 },
3415 );
3416
3417 let (buffer, _handle) = project
3418 .update(cx, |p, cx| {
3419 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3420 })
3421 .await
3422 .unwrap();
3423
3424 let fake_server = fake_language_servers.next().await.unwrap();
3425
3426 let text = "let a = b.fqn";
3427 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3428 let completions = project.update(cx, |project, cx| {
3429 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3430 });
3431
3432 fake_server
3433 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3434 Ok(Some(lsp::CompletionResponse::Array(vec![
3435 lsp::CompletionItem {
3436 label: "fullyQualifiedName?".into(),
3437 insert_text: Some("fully\rQualified\r\nName".into()),
3438 ..Default::default()
3439 },
3440 ])))
3441 })
3442 .next()
3443 .await;
3444 let completions = completions
3445 .await
3446 .unwrap()
3447 .into_iter()
3448 .flat_map(|response| response.completions)
3449 .collect::<Vec<_>>();
3450 assert_eq!(completions.len(), 1);
3451 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
3452}
3453
3454#[gpui::test(iterations = 10)]
3455async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
3456 init_test(cx);
3457
3458 let fs = FakeFs::new(cx.executor());
3459 fs.insert_tree(
3460 path!("/dir"),
3461 json!({
3462 "a.ts": "a",
3463 }),
3464 )
3465 .await;
3466
3467 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3468
3469 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3470 language_registry.add(typescript_lang());
3471 let mut fake_language_servers = language_registry.register_fake_lsp(
3472 "TypeScript",
3473 FakeLspAdapter {
3474 capabilities: lsp::ServerCapabilities {
3475 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
3476 lsp::CodeActionOptions {
3477 resolve_provider: Some(true),
3478 ..lsp::CodeActionOptions::default()
3479 },
3480 )),
3481 execute_command_provider: Some(lsp::ExecuteCommandOptions {
3482 commands: vec!["_the/command".to_string()],
3483 ..lsp::ExecuteCommandOptions::default()
3484 }),
3485 ..lsp::ServerCapabilities::default()
3486 },
3487 ..FakeLspAdapter::default()
3488 },
3489 );
3490
3491 let (buffer, _handle) = project
3492 .update(cx, |p, cx| {
3493 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3494 })
3495 .await
3496 .unwrap();
3497
3498 let fake_server = fake_language_servers.next().await.unwrap();
3499
3500 // Language server returns code actions that contain commands, and not edits.
3501 let actions = project.update(cx, |project, cx| {
3502 project.code_actions(&buffer, 0..0, None, cx)
3503 });
3504 fake_server
3505 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
3506 Ok(Some(vec![
3507 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3508 title: "The code action".into(),
3509 data: Some(serde_json::json!({
3510 "command": "_the/command",
3511 })),
3512 ..lsp::CodeAction::default()
3513 }),
3514 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3515 title: "two".into(),
3516 ..lsp::CodeAction::default()
3517 }),
3518 ]))
3519 })
3520 .next()
3521 .await;
3522
3523 let action = actions.await.unwrap().unwrap()[0].clone();
3524 let apply = project.update(cx, |project, cx| {
3525 project.apply_code_action(buffer.clone(), action, true, cx)
3526 });
3527
3528 // Resolving the code action does not populate its edits. In absence of
3529 // edits, we must execute the given command.
3530 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
3531 |mut action, _| async move {
3532 if action.data.is_some() {
3533 action.command = Some(lsp::Command {
3534 title: "The command".into(),
3535 command: "_the/command".into(),
3536 arguments: Some(vec![json!("the-argument")]),
3537 });
3538 }
3539 Ok(action)
3540 },
3541 );
3542
3543 // While executing the command, the language server sends the editor
3544 // a `workspaceEdit` request.
3545 fake_server
3546 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
3547 let fake = fake_server.clone();
3548 move |params, _| {
3549 assert_eq!(params.command, "_the/command");
3550 let fake = fake.clone();
3551 async move {
3552 fake.server
3553 .request::<lsp::request::ApplyWorkspaceEdit>(
3554 lsp::ApplyWorkspaceEditParams {
3555 label: None,
3556 edit: lsp::WorkspaceEdit {
3557 changes: Some(
3558 [(
3559 lsp::Url::from_file_path(path!("/dir/a.ts")).unwrap(),
3560 vec![lsp::TextEdit {
3561 range: lsp::Range::new(
3562 lsp::Position::new(0, 0),
3563 lsp::Position::new(0, 0),
3564 ),
3565 new_text: "X".into(),
3566 }],
3567 )]
3568 .into_iter()
3569 .collect(),
3570 ),
3571 ..Default::default()
3572 },
3573 },
3574 )
3575 .await
3576 .into_response()
3577 .unwrap();
3578 Ok(Some(json!(null)))
3579 }
3580 }
3581 })
3582 .next()
3583 .await;
3584
3585 // Applying the code action returns a project transaction containing the edits
3586 // sent by the language server in its `workspaceEdit` request.
3587 let transaction = apply.await.unwrap();
3588 assert!(transaction.0.contains_key(&buffer));
3589 buffer.update(cx, |buffer, cx| {
3590 assert_eq!(buffer.text(), "Xa");
3591 buffer.undo(cx);
3592 assert_eq!(buffer.text(), "a");
3593 });
3594}
3595
3596#[gpui::test(iterations = 10)]
3597async fn test_save_file(cx: &mut gpui::TestAppContext) {
3598 init_test(cx);
3599
3600 let fs = FakeFs::new(cx.executor());
3601 fs.insert_tree(
3602 path!("/dir"),
3603 json!({
3604 "file1": "the old contents",
3605 }),
3606 )
3607 .await;
3608
3609 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3610 let buffer = project
3611 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3612 .await
3613 .unwrap();
3614 buffer.update(cx, |buffer, cx| {
3615 assert_eq!(buffer.text(), "the old contents");
3616 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3617 });
3618
3619 project
3620 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3621 .await
3622 .unwrap();
3623
3624 let new_text = fs
3625 .load(Path::new(path!("/dir/file1")))
3626 .await
3627 .unwrap()
3628 .replace("\r\n", "\n");
3629 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3630}
3631
3632#[gpui::test(iterations = 10)]
3633async fn test_save_file_spawns_language_server(cx: &mut gpui::TestAppContext) {
3634 // Issue: #24349
3635 init_test(cx);
3636
3637 let fs = FakeFs::new(cx.executor());
3638 fs.insert_tree(path!("/dir"), json!({})).await;
3639
3640 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3641 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3642
3643 language_registry.add(rust_lang());
3644 let mut fake_rust_servers = language_registry.register_fake_lsp(
3645 "Rust",
3646 FakeLspAdapter {
3647 name: "the-rust-language-server",
3648 capabilities: lsp::ServerCapabilities {
3649 completion_provider: Some(lsp::CompletionOptions {
3650 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
3651 ..Default::default()
3652 }),
3653 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
3654 lsp::TextDocumentSyncOptions {
3655 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
3656 ..Default::default()
3657 },
3658 )),
3659 ..Default::default()
3660 },
3661 ..Default::default()
3662 },
3663 );
3664
3665 let buffer = project
3666 .update(cx, |this, cx| this.create_buffer(cx))
3667 .unwrap()
3668 .await;
3669 project.update(cx, |this, cx| {
3670 this.register_buffer_with_language_servers(&buffer, cx);
3671 buffer.update(cx, |buffer, cx| {
3672 assert!(!this.has_language_servers_for(buffer, cx));
3673 })
3674 });
3675
3676 project
3677 .update(cx, |this, cx| {
3678 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
3679 this.save_buffer_as(
3680 buffer.clone(),
3681 ProjectPath {
3682 worktree_id,
3683 path: Arc::from("file.rs".as_ref()),
3684 },
3685 cx,
3686 )
3687 })
3688 .await
3689 .unwrap();
3690 // A server is started up, and it is notified about Rust files.
3691 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
3692 assert_eq!(
3693 fake_rust_server
3694 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3695 .await
3696 .text_document,
3697 lsp::TextDocumentItem {
3698 uri: lsp::Url::from_file_path(path!("/dir/file.rs")).unwrap(),
3699 version: 0,
3700 text: "".to_string(),
3701 language_id: "rust".to_string(),
3702 }
3703 );
3704
3705 project.update(cx, |this, cx| {
3706 buffer.update(cx, |buffer, cx| {
3707 assert!(this.has_language_servers_for(buffer, cx));
3708 })
3709 });
3710}
3711
3712#[gpui::test(iterations = 30)]
3713async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
3714 init_test(cx);
3715
3716 let fs = FakeFs::new(cx.executor());
3717 fs.insert_tree(
3718 path!("/dir"),
3719 json!({
3720 "file1": "the original contents",
3721 }),
3722 )
3723 .await;
3724
3725 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3726 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3727 let buffer = project
3728 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3729 .await
3730 .unwrap();
3731
3732 // Simulate buffer diffs being slow, so that they don't complete before
3733 // the next file change occurs.
3734 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3735
3736 // Change the buffer's file on disk, and then wait for the file change
3737 // to be detected by the worktree, so that the buffer starts reloading.
3738 fs.save(
3739 path!("/dir/file1").as_ref(),
3740 &"the first contents".into(),
3741 Default::default(),
3742 )
3743 .await
3744 .unwrap();
3745 worktree.next_event(cx).await;
3746
3747 // Change the buffer's file again. Depending on the random seed, the
3748 // previous file change may still be in progress.
3749 fs.save(
3750 path!("/dir/file1").as_ref(),
3751 &"the second contents".into(),
3752 Default::default(),
3753 )
3754 .await
3755 .unwrap();
3756 worktree.next_event(cx).await;
3757
3758 cx.executor().run_until_parked();
3759 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3760 buffer.read_with(cx, |buffer, _| {
3761 assert_eq!(buffer.text(), on_disk_text);
3762 assert!(!buffer.is_dirty(), "buffer should not be dirty");
3763 assert!(!buffer.has_conflict(), "buffer should not be dirty");
3764 });
3765}
3766
3767#[gpui::test(iterations = 30)]
3768async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
3769 init_test(cx);
3770
3771 let fs = FakeFs::new(cx.executor());
3772 fs.insert_tree(
3773 path!("/dir"),
3774 json!({
3775 "file1": "the original contents",
3776 }),
3777 )
3778 .await;
3779
3780 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3781 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3782 let buffer = project
3783 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3784 .await
3785 .unwrap();
3786
3787 // Simulate buffer diffs being slow, so that they don't complete before
3788 // the next file change occurs.
3789 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3790
3791 // Change the buffer's file on disk, and then wait for the file change
3792 // to be detected by the worktree, so that the buffer starts reloading.
3793 fs.save(
3794 path!("/dir/file1").as_ref(),
3795 &"the first contents".into(),
3796 Default::default(),
3797 )
3798 .await
3799 .unwrap();
3800 worktree.next_event(cx).await;
3801
3802 cx.executor()
3803 .spawn(cx.executor().simulate_random_delay())
3804 .await;
3805
3806 // Perform a noop edit, causing the buffer's version to increase.
3807 buffer.update(cx, |buffer, cx| {
3808 buffer.edit([(0..0, " ")], None, cx);
3809 buffer.undo(cx);
3810 });
3811
3812 cx.executor().run_until_parked();
3813 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3814 buffer.read_with(cx, |buffer, _| {
3815 let buffer_text = buffer.text();
3816 if buffer_text == on_disk_text {
3817 assert!(
3818 !buffer.is_dirty() && !buffer.has_conflict(),
3819 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
3820 );
3821 }
3822 // If the file change occurred while the buffer was processing the first
3823 // change, the buffer will be in a conflicting state.
3824 else {
3825 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3826 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3827 }
3828 });
3829}
3830
3831#[gpui::test]
3832async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
3833 init_test(cx);
3834
3835 let fs = FakeFs::new(cx.executor());
3836 fs.insert_tree(
3837 path!("/dir"),
3838 json!({
3839 "file1": "the old contents",
3840 }),
3841 )
3842 .await;
3843
3844 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
3845 let buffer = project
3846 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3847 .await
3848 .unwrap();
3849 buffer.update(cx, |buffer, cx| {
3850 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3851 });
3852
3853 project
3854 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3855 .await
3856 .unwrap();
3857
3858 let new_text = fs
3859 .load(Path::new(path!("/dir/file1")))
3860 .await
3861 .unwrap()
3862 .replace("\r\n", "\n");
3863 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3864}
3865
3866#[gpui::test]
3867async fn test_save_as(cx: &mut gpui::TestAppContext) {
3868 init_test(cx);
3869
3870 let fs = FakeFs::new(cx.executor());
3871 fs.insert_tree("/dir", json!({})).await;
3872
3873 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3874
3875 let languages = project.update(cx, |project, _| project.languages().clone());
3876 languages.add(rust_lang());
3877
3878 let buffer = project.update(cx, |project, cx| project.create_local_buffer("", None, cx));
3879 buffer.update(cx, |buffer, cx| {
3880 buffer.edit([(0..0, "abc")], None, cx);
3881 assert!(buffer.is_dirty());
3882 assert!(!buffer.has_conflict());
3883 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
3884 });
3885 project
3886 .update(cx, |project, cx| {
3887 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
3888 let path = ProjectPath {
3889 worktree_id,
3890 path: Arc::from(Path::new("file1.rs")),
3891 };
3892 project.save_buffer_as(buffer.clone(), path, cx)
3893 })
3894 .await
3895 .unwrap();
3896 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
3897
3898 cx.executor().run_until_parked();
3899 buffer.update(cx, |buffer, cx| {
3900 assert_eq!(
3901 buffer.file().unwrap().full_path(cx),
3902 Path::new("dir/file1.rs")
3903 );
3904 assert!(!buffer.is_dirty());
3905 assert!(!buffer.has_conflict());
3906 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
3907 });
3908
3909 let opened_buffer = project
3910 .update(cx, |project, cx| {
3911 project.open_local_buffer("/dir/file1.rs", cx)
3912 })
3913 .await
3914 .unwrap();
3915 assert_eq!(opened_buffer, buffer);
3916}
3917
3918#[gpui::test(retries = 5)]
3919async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
3920 use worktree::WorktreeModelHandle as _;
3921
3922 init_test(cx);
3923 cx.executor().allow_parking();
3924
3925 let dir = TempTree::new(json!({
3926 "a": {
3927 "file1": "",
3928 "file2": "",
3929 "file3": "",
3930 },
3931 "b": {
3932 "c": {
3933 "file4": "",
3934 "file5": "",
3935 }
3936 }
3937 }));
3938
3939 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
3940
3941 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3942 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
3943 async move { buffer.await.unwrap() }
3944 };
3945 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3946 project.update(cx, |project, cx| {
3947 let tree = project.worktrees(cx).next().unwrap();
3948 tree.read(cx)
3949 .entry_for_path(path)
3950 .unwrap_or_else(|| panic!("no entry for path {}", path))
3951 .id
3952 })
3953 };
3954
3955 let buffer2 = buffer_for_path("a/file2", cx).await;
3956 let buffer3 = buffer_for_path("a/file3", cx).await;
3957 let buffer4 = buffer_for_path("b/c/file4", cx).await;
3958 let buffer5 = buffer_for_path("b/c/file5", cx).await;
3959
3960 let file2_id = id_for_path("a/file2", cx);
3961 let file3_id = id_for_path("a/file3", cx);
3962 let file4_id = id_for_path("b/c/file4", cx);
3963
3964 // Create a remote copy of this worktree.
3965 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3966 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
3967
3968 let updates = Arc::new(Mutex::new(Vec::new()));
3969 tree.update(cx, |tree, cx| {
3970 let updates = updates.clone();
3971 tree.observe_updates(0, cx, move |update| {
3972 updates.lock().push(update);
3973 async { true }
3974 });
3975 });
3976
3977 let remote =
3978 cx.update(|cx| Worktree::remote(0, 1, metadata, project.read(cx).client().into(), cx));
3979
3980 cx.executor().run_until_parked();
3981
3982 cx.update(|cx| {
3983 assert!(!buffer2.read(cx).is_dirty());
3984 assert!(!buffer3.read(cx).is_dirty());
3985 assert!(!buffer4.read(cx).is_dirty());
3986 assert!(!buffer5.read(cx).is_dirty());
3987 });
3988
3989 // Rename and delete files and directories.
3990 tree.flush_fs_events(cx).await;
3991 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3992 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3993 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3994 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3995 tree.flush_fs_events(cx).await;
3996
3997 cx.update(|app| {
3998 assert_eq!(
3999 tree.read(app)
4000 .paths()
4001 .map(|p| p.to_str().unwrap())
4002 .collect::<Vec<_>>(),
4003 vec![
4004 "a",
4005 path!("a/file1"),
4006 path!("a/file2.new"),
4007 "b",
4008 "d",
4009 path!("d/file3"),
4010 path!("d/file4"),
4011 ]
4012 );
4013 });
4014
4015 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
4016 assert_eq!(id_for_path("d/file3", cx), file3_id);
4017 assert_eq!(id_for_path("d/file4", cx), file4_id);
4018
4019 cx.update(|cx| {
4020 assert_eq!(
4021 buffer2.read(cx).file().unwrap().path().as_ref(),
4022 Path::new("a/file2.new")
4023 );
4024 assert_eq!(
4025 buffer3.read(cx).file().unwrap().path().as_ref(),
4026 Path::new("d/file3")
4027 );
4028 assert_eq!(
4029 buffer4.read(cx).file().unwrap().path().as_ref(),
4030 Path::new("d/file4")
4031 );
4032 assert_eq!(
4033 buffer5.read(cx).file().unwrap().path().as_ref(),
4034 Path::new("b/c/file5")
4035 );
4036
4037 assert_matches!(
4038 buffer2.read(cx).file().unwrap().disk_state(),
4039 DiskState::Present { .. }
4040 );
4041 assert_matches!(
4042 buffer3.read(cx).file().unwrap().disk_state(),
4043 DiskState::Present { .. }
4044 );
4045 assert_matches!(
4046 buffer4.read(cx).file().unwrap().disk_state(),
4047 DiskState::Present { .. }
4048 );
4049 assert_eq!(
4050 buffer5.read(cx).file().unwrap().disk_state(),
4051 DiskState::Deleted
4052 );
4053 });
4054
4055 // Update the remote worktree. Check that it becomes consistent with the
4056 // local worktree.
4057 cx.executor().run_until_parked();
4058
4059 remote.update(cx, |remote, _| {
4060 for update in updates.lock().drain(..) {
4061 remote.as_remote_mut().unwrap().update_from_remote(update);
4062 }
4063 });
4064 cx.executor().run_until_parked();
4065 remote.update(cx, |remote, _| {
4066 assert_eq!(
4067 remote
4068 .paths()
4069 .map(|p| p.to_str().unwrap())
4070 .collect::<Vec<_>>(),
4071 vec![
4072 "a",
4073 path!("a/file1"),
4074 path!("a/file2.new"),
4075 "b",
4076 "d",
4077 path!("d/file3"),
4078 path!("d/file4"),
4079 ]
4080 );
4081 });
4082}
4083
4084#[gpui::test(iterations = 10)]
4085async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
4086 init_test(cx);
4087
4088 let fs = FakeFs::new(cx.executor());
4089 fs.insert_tree(
4090 path!("/dir"),
4091 json!({
4092 "a": {
4093 "file1": "",
4094 }
4095 }),
4096 )
4097 .await;
4098
4099 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
4100 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
4101 let tree_id = tree.update(cx, |tree, _| tree.id());
4102
4103 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4104 project.update(cx, |project, cx| {
4105 let tree = project.worktrees(cx).next().unwrap();
4106 tree.read(cx)
4107 .entry_for_path(path)
4108 .unwrap_or_else(|| panic!("no entry for path {}", path))
4109 .id
4110 })
4111 };
4112
4113 let dir_id = id_for_path("a", cx);
4114 let file_id = id_for_path("a/file1", cx);
4115 let buffer = project
4116 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
4117 .await
4118 .unwrap();
4119 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4120
4121 project
4122 .update(cx, |project, cx| {
4123 project.rename_entry(dir_id, Path::new("b"), cx)
4124 })
4125 .unwrap()
4126 .await
4127 .into_included()
4128 .unwrap();
4129 cx.executor().run_until_parked();
4130
4131 assert_eq!(id_for_path("b", cx), dir_id);
4132 assert_eq!(id_for_path("b/file1", cx), file_id);
4133 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4134}
4135
4136#[gpui::test]
4137async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
4138 init_test(cx);
4139
4140 let fs = FakeFs::new(cx.executor());
4141 fs.insert_tree(
4142 "/dir",
4143 json!({
4144 "a.txt": "a-contents",
4145 "b.txt": "b-contents",
4146 }),
4147 )
4148 .await;
4149
4150 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4151
4152 // Spawn multiple tasks to open paths, repeating some paths.
4153 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
4154 (
4155 p.open_local_buffer("/dir/a.txt", cx),
4156 p.open_local_buffer("/dir/b.txt", cx),
4157 p.open_local_buffer("/dir/a.txt", cx),
4158 )
4159 });
4160
4161 let buffer_a_1 = buffer_a_1.await.unwrap();
4162 let buffer_a_2 = buffer_a_2.await.unwrap();
4163 let buffer_b = buffer_b.await.unwrap();
4164 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
4165 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
4166
4167 // There is only one buffer per path.
4168 let buffer_a_id = buffer_a_1.entity_id();
4169 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
4170
4171 // Open the same path again while it is still open.
4172 drop(buffer_a_1);
4173 let buffer_a_3 = project
4174 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
4175 .await
4176 .unwrap();
4177
4178 // There's still only one buffer per path.
4179 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
4180}
4181
4182#[gpui::test]
4183async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
4184 init_test(cx);
4185
4186 let fs = FakeFs::new(cx.executor());
4187 fs.insert_tree(
4188 path!("/dir"),
4189 json!({
4190 "file1": "abc",
4191 "file2": "def",
4192 "file3": "ghi",
4193 }),
4194 )
4195 .await;
4196
4197 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4198
4199 let buffer1 = project
4200 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4201 .await
4202 .unwrap();
4203 let events = Arc::new(Mutex::new(Vec::new()));
4204
4205 // initially, the buffer isn't dirty.
4206 buffer1.update(cx, |buffer, cx| {
4207 cx.subscribe(&buffer1, {
4208 let events = events.clone();
4209 move |_, _, event, _| match event {
4210 BufferEvent::Operation { .. } => {}
4211 _ => events.lock().push(event.clone()),
4212 }
4213 })
4214 .detach();
4215
4216 assert!(!buffer.is_dirty());
4217 assert!(events.lock().is_empty());
4218
4219 buffer.edit([(1..2, "")], None, cx);
4220 });
4221
4222 // after the first edit, the buffer is dirty, and emits a dirtied event.
4223 buffer1.update(cx, |buffer, cx| {
4224 assert!(buffer.text() == "ac");
4225 assert!(buffer.is_dirty());
4226 assert_eq!(
4227 *events.lock(),
4228 &[
4229 language::BufferEvent::Edited,
4230 language::BufferEvent::DirtyChanged
4231 ]
4232 );
4233 events.lock().clear();
4234 buffer.did_save(
4235 buffer.version(),
4236 buffer.file().unwrap().disk_state().mtime(),
4237 cx,
4238 );
4239 });
4240
4241 // after saving, the buffer is not dirty, and emits a saved event.
4242 buffer1.update(cx, |buffer, cx| {
4243 assert!(!buffer.is_dirty());
4244 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
4245 events.lock().clear();
4246
4247 buffer.edit([(1..1, "B")], None, cx);
4248 buffer.edit([(2..2, "D")], None, cx);
4249 });
4250
4251 // after editing again, the buffer is dirty, and emits another dirty event.
4252 buffer1.update(cx, |buffer, cx| {
4253 assert!(buffer.text() == "aBDc");
4254 assert!(buffer.is_dirty());
4255 assert_eq!(
4256 *events.lock(),
4257 &[
4258 language::BufferEvent::Edited,
4259 language::BufferEvent::DirtyChanged,
4260 language::BufferEvent::Edited,
4261 ],
4262 );
4263 events.lock().clear();
4264
4265 // After restoring the buffer to its previously-saved state,
4266 // the buffer is not considered dirty anymore.
4267 buffer.edit([(1..3, "")], None, cx);
4268 assert!(buffer.text() == "ac");
4269 assert!(!buffer.is_dirty());
4270 });
4271
4272 assert_eq!(
4273 *events.lock(),
4274 &[
4275 language::BufferEvent::Edited,
4276 language::BufferEvent::DirtyChanged
4277 ]
4278 );
4279
4280 // When a file is deleted, it is not considered dirty.
4281 let events = Arc::new(Mutex::new(Vec::new()));
4282 let buffer2 = project
4283 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4284 .await
4285 .unwrap();
4286 buffer2.update(cx, |_, cx| {
4287 cx.subscribe(&buffer2, {
4288 let events = events.clone();
4289 move |_, _, event, _| match event {
4290 BufferEvent::Operation { .. } => {}
4291 _ => events.lock().push(event.clone()),
4292 }
4293 })
4294 .detach();
4295 });
4296
4297 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
4298 .await
4299 .unwrap();
4300 cx.executor().run_until_parked();
4301 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4302 assert_eq!(
4303 mem::take(&mut *events.lock()),
4304 &[language::BufferEvent::FileHandleChanged]
4305 );
4306
4307 // Buffer becomes dirty when edited.
4308 buffer2.update(cx, |buffer, cx| {
4309 buffer.edit([(2..3, "")], None, cx);
4310 assert_eq!(buffer.is_dirty(), true);
4311 });
4312 assert_eq!(
4313 mem::take(&mut *events.lock()),
4314 &[
4315 language::BufferEvent::Edited,
4316 language::BufferEvent::DirtyChanged
4317 ]
4318 );
4319
4320 // Buffer becomes clean again when all of its content is removed, because
4321 // the file was deleted.
4322 buffer2.update(cx, |buffer, cx| {
4323 buffer.edit([(0..2, "")], None, cx);
4324 assert_eq!(buffer.is_empty(), true);
4325 assert_eq!(buffer.is_dirty(), false);
4326 });
4327 assert_eq!(
4328 *events.lock(),
4329 &[
4330 language::BufferEvent::Edited,
4331 language::BufferEvent::DirtyChanged
4332 ]
4333 );
4334
4335 // When a file is already dirty when deleted, we don't emit a Dirtied event.
4336 let events = Arc::new(Mutex::new(Vec::new()));
4337 let buffer3 = project
4338 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
4339 .await
4340 .unwrap();
4341 buffer3.update(cx, |_, cx| {
4342 cx.subscribe(&buffer3, {
4343 let events = events.clone();
4344 move |_, _, event, _| match event {
4345 BufferEvent::Operation { .. } => {}
4346 _ => events.lock().push(event.clone()),
4347 }
4348 })
4349 .detach();
4350 });
4351
4352 buffer3.update(cx, |buffer, cx| {
4353 buffer.edit([(0..0, "x")], None, cx);
4354 });
4355 events.lock().clear();
4356 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
4357 .await
4358 .unwrap();
4359 cx.executor().run_until_parked();
4360 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
4361 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
4362}
4363
4364#[gpui::test]
4365async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
4366 init_test(cx);
4367
4368 let (initial_contents, initial_offsets) =
4369 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
4370 let fs = FakeFs::new(cx.executor());
4371 fs.insert_tree(
4372 path!("/dir"),
4373 json!({
4374 "the-file": initial_contents,
4375 }),
4376 )
4377 .await;
4378 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4379 let buffer = project
4380 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
4381 .await
4382 .unwrap();
4383
4384 let anchors = initial_offsets
4385 .iter()
4386 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
4387 .collect::<Vec<_>>();
4388
4389 // Change the file on disk, adding two new lines of text, and removing
4390 // one line.
4391 buffer.update(cx, |buffer, _| {
4392 assert!(!buffer.is_dirty());
4393 assert!(!buffer.has_conflict());
4394 });
4395
4396 let (new_contents, new_offsets) =
4397 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
4398 fs.save(
4399 path!("/dir/the-file").as_ref(),
4400 &new_contents.as_str().into(),
4401 LineEnding::Unix,
4402 )
4403 .await
4404 .unwrap();
4405
4406 // Because the buffer was not modified, it is reloaded from disk. Its
4407 // contents are edited according to the diff between the old and new
4408 // file contents.
4409 cx.executor().run_until_parked();
4410 buffer.update(cx, |buffer, _| {
4411 assert_eq!(buffer.text(), new_contents);
4412 assert!(!buffer.is_dirty());
4413 assert!(!buffer.has_conflict());
4414
4415 let anchor_offsets = anchors
4416 .iter()
4417 .map(|anchor| anchor.to_offset(&*buffer))
4418 .collect::<Vec<_>>();
4419 assert_eq!(anchor_offsets, new_offsets);
4420 });
4421
4422 // Modify the buffer
4423 buffer.update(cx, |buffer, cx| {
4424 buffer.edit([(0..0, " ")], None, cx);
4425 assert!(buffer.is_dirty());
4426 assert!(!buffer.has_conflict());
4427 });
4428
4429 // Change the file on disk again, adding blank lines to the beginning.
4430 fs.save(
4431 path!("/dir/the-file").as_ref(),
4432 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
4433 LineEnding::Unix,
4434 )
4435 .await
4436 .unwrap();
4437
4438 // Because the buffer is modified, it doesn't reload from disk, but is
4439 // marked as having a conflict.
4440 cx.executor().run_until_parked();
4441 buffer.update(cx, |buffer, _| {
4442 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
4443 assert!(buffer.has_conflict());
4444 });
4445}
4446
4447#[gpui::test]
4448async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
4449 init_test(cx);
4450
4451 let fs = FakeFs::new(cx.executor());
4452 fs.insert_tree(
4453 path!("/dir"),
4454 json!({
4455 "file1": "a\nb\nc\n",
4456 "file2": "one\r\ntwo\r\nthree\r\n",
4457 }),
4458 )
4459 .await;
4460
4461 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4462 let buffer1 = project
4463 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4464 .await
4465 .unwrap();
4466 let buffer2 = project
4467 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4468 .await
4469 .unwrap();
4470
4471 buffer1.update(cx, |buffer, _| {
4472 assert_eq!(buffer.text(), "a\nb\nc\n");
4473 assert_eq!(buffer.line_ending(), LineEnding::Unix);
4474 });
4475 buffer2.update(cx, |buffer, _| {
4476 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
4477 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4478 });
4479
4480 // Change a file's line endings on disk from unix to windows. The buffer's
4481 // state updates correctly.
4482 fs.save(
4483 path!("/dir/file1").as_ref(),
4484 &"aaa\nb\nc\n".into(),
4485 LineEnding::Windows,
4486 )
4487 .await
4488 .unwrap();
4489 cx.executor().run_until_parked();
4490 buffer1.update(cx, |buffer, _| {
4491 assert_eq!(buffer.text(), "aaa\nb\nc\n");
4492 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4493 });
4494
4495 // Save a file with windows line endings. The file is written correctly.
4496 buffer2.update(cx, |buffer, cx| {
4497 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
4498 });
4499 project
4500 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
4501 .await
4502 .unwrap();
4503 assert_eq!(
4504 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
4505 "one\r\ntwo\r\nthree\r\nfour\r\n",
4506 );
4507}
4508
4509#[gpui::test]
4510async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
4511 init_test(cx);
4512
4513 let fs = FakeFs::new(cx.executor());
4514 fs.insert_tree(
4515 path!("/dir"),
4516 json!({
4517 "a.rs": "
4518 fn foo(mut v: Vec<usize>) {
4519 for x in &v {
4520 v.push(1);
4521 }
4522 }
4523 "
4524 .unindent(),
4525 }),
4526 )
4527 .await;
4528
4529 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4530 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4531 let buffer = project
4532 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
4533 .await
4534 .unwrap();
4535
4536 let buffer_uri = Url::from_file_path(path!("/dir/a.rs")).unwrap();
4537 let message = lsp::PublishDiagnosticsParams {
4538 uri: buffer_uri.clone(),
4539 diagnostics: vec![
4540 lsp::Diagnostic {
4541 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4542 severity: Some(DiagnosticSeverity::WARNING),
4543 message: "error 1".to_string(),
4544 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4545 location: lsp::Location {
4546 uri: buffer_uri.clone(),
4547 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4548 },
4549 message: "error 1 hint 1".to_string(),
4550 }]),
4551 ..Default::default()
4552 },
4553 lsp::Diagnostic {
4554 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4555 severity: Some(DiagnosticSeverity::HINT),
4556 message: "error 1 hint 1".to_string(),
4557 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4558 location: lsp::Location {
4559 uri: buffer_uri.clone(),
4560 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4561 },
4562 message: "original diagnostic".to_string(),
4563 }]),
4564 ..Default::default()
4565 },
4566 lsp::Diagnostic {
4567 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4568 severity: Some(DiagnosticSeverity::ERROR),
4569 message: "error 2".to_string(),
4570 related_information: Some(vec![
4571 lsp::DiagnosticRelatedInformation {
4572 location: lsp::Location {
4573 uri: buffer_uri.clone(),
4574 range: lsp::Range::new(
4575 lsp::Position::new(1, 13),
4576 lsp::Position::new(1, 15),
4577 ),
4578 },
4579 message: "error 2 hint 1".to_string(),
4580 },
4581 lsp::DiagnosticRelatedInformation {
4582 location: lsp::Location {
4583 uri: buffer_uri.clone(),
4584 range: lsp::Range::new(
4585 lsp::Position::new(1, 13),
4586 lsp::Position::new(1, 15),
4587 ),
4588 },
4589 message: "error 2 hint 2".to_string(),
4590 },
4591 ]),
4592 ..Default::default()
4593 },
4594 lsp::Diagnostic {
4595 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4596 severity: Some(DiagnosticSeverity::HINT),
4597 message: "error 2 hint 1".to_string(),
4598 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4599 location: lsp::Location {
4600 uri: buffer_uri.clone(),
4601 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4602 },
4603 message: "original diagnostic".to_string(),
4604 }]),
4605 ..Default::default()
4606 },
4607 lsp::Diagnostic {
4608 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4609 severity: Some(DiagnosticSeverity::HINT),
4610 message: "error 2 hint 2".to_string(),
4611 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4612 location: lsp::Location {
4613 uri: buffer_uri,
4614 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4615 },
4616 message: "original diagnostic".to_string(),
4617 }]),
4618 ..Default::default()
4619 },
4620 ],
4621 version: None,
4622 };
4623
4624 lsp_store
4625 .update(cx, |lsp_store, cx| {
4626 lsp_store.update_diagnostics(
4627 LanguageServerId(0),
4628 message,
4629 None,
4630 DiagnosticSourceKind::Pushed,
4631 &[],
4632 cx,
4633 )
4634 })
4635 .unwrap();
4636 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
4637
4638 assert_eq!(
4639 buffer
4640 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
4641 .collect::<Vec<_>>(),
4642 &[
4643 DiagnosticEntry {
4644 range: Point::new(1, 8)..Point::new(1, 9),
4645 diagnostic: Diagnostic {
4646 severity: DiagnosticSeverity::WARNING,
4647 message: "error 1".to_string(),
4648 group_id: 1,
4649 is_primary: true,
4650 source_kind: DiagnosticSourceKind::Pushed,
4651 ..Diagnostic::default()
4652 }
4653 },
4654 DiagnosticEntry {
4655 range: Point::new(1, 8)..Point::new(1, 9),
4656 diagnostic: Diagnostic {
4657 severity: DiagnosticSeverity::HINT,
4658 message: "error 1 hint 1".to_string(),
4659 group_id: 1,
4660 is_primary: false,
4661 source_kind: DiagnosticSourceKind::Pushed,
4662 ..Diagnostic::default()
4663 }
4664 },
4665 DiagnosticEntry {
4666 range: Point::new(1, 13)..Point::new(1, 15),
4667 diagnostic: Diagnostic {
4668 severity: DiagnosticSeverity::HINT,
4669 message: "error 2 hint 1".to_string(),
4670 group_id: 0,
4671 is_primary: false,
4672 source_kind: DiagnosticSourceKind::Pushed,
4673 ..Diagnostic::default()
4674 }
4675 },
4676 DiagnosticEntry {
4677 range: Point::new(1, 13)..Point::new(1, 15),
4678 diagnostic: Diagnostic {
4679 severity: DiagnosticSeverity::HINT,
4680 message: "error 2 hint 2".to_string(),
4681 group_id: 0,
4682 is_primary: false,
4683 source_kind: DiagnosticSourceKind::Pushed,
4684 ..Diagnostic::default()
4685 }
4686 },
4687 DiagnosticEntry {
4688 range: Point::new(2, 8)..Point::new(2, 17),
4689 diagnostic: Diagnostic {
4690 severity: DiagnosticSeverity::ERROR,
4691 message: "error 2".to_string(),
4692 group_id: 0,
4693 is_primary: true,
4694 source_kind: DiagnosticSourceKind::Pushed,
4695 ..Diagnostic::default()
4696 }
4697 }
4698 ]
4699 );
4700
4701 assert_eq!(
4702 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
4703 &[
4704 DiagnosticEntry {
4705 range: Point::new(1, 13)..Point::new(1, 15),
4706 diagnostic: Diagnostic {
4707 severity: DiagnosticSeverity::HINT,
4708 message: "error 2 hint 1".to_string(),
4709 group_id: 0,
4710 is_primary: false,
4711 source_kind: DiagnosticSourceKind::Pushed,
4712 ..Diagnostic::default()
4713 }
4714 },
4715 DiagnosticEntry {
4716 range: Point::new(1, 13)..Point::new(1, 15),
4717 diagnostic: Diagnostic {
4718 severity: DiagnosticSeverity::HINT,
4719 message: "error 2 hint 2".to_string(),
4720 group_id: 0,
4721 is_primary: false,
4722 source_kind: DiagnosticSourceKind::Pushed,
4723 ..Diagnostic::default()
4724 }
4725 },
4726 DiagnosticEntry {
4727 range: Point::new(2, 8)..Point::new(2, 17),
4728 diagnostic: Diagnostic {
4729 severity: DiagnosticSeverity::ERROR,
4730 message: "error 2".to_string(),
4731 group_id: 0,
4732 is_primary: true,
4733 source_kind: DiagnosticSourceKind::Pushed,
4734 ..Diagnostic::default()
4735 }
4736 }
4737 ]
4738 );
4739
4740 assert_eq!(
4741 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4742 &[
4743 DiagnosticEntry {
4744 range: Point::new(1, 8)..Point::new(1, 9),
4745 diagnostic: Diagnostic {
4746 severity: DiagnosticSeverity::WARNING,
4747 message: "error 1".to_string(),
4748 group_id: 1,
4749 is_primary: true,
4750 source_kind: DiagnosticSourceKind::Pushed,
4751 ..Diagnostic::default()
4752 }
4753 },
4754 DiagnosticEntry {
4755 range: Point::new(1, 8)..Point::new(1, 9),
4756 diagnostic: Diagnostic {
4757 severity: DiagnosticSeverity::HINT,
4758 message: "error 1 hint 1".to_string(),
4759 group_id: 1,
4760 is_primary: false,
4761 source_kind: DiagnosticSourceKind::Pushed,
4762 ..Diagnostic::default()
4763 }
4764 },
4765 ]
4766 );
4767}
4768
4769#[gpui::test]
4770async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
4771 init_test(cx);
4772
4773 let fs = FakeFs::new(cx.executor());
4774 fs.insert_tree(
4775 path!("/dir"),
4776 json!({
4777 "one.rs": "const ONE: usize = 1;",
4778 "two": {
4779 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4780 }
4781
4782 }),
4783 )
4784 .await;
4785 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4786
4787 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4788 language_registry.add(rust_lang());
4789 let watched_paths = lsp::FileOperationRegistrationOptions {
4790 filters: vec![
4791 FileOperationFilter {
4792 scheme: Some("file".to_owned()),
4793 pattern: lsp::FileOperationPattern {
4794 glob: "**/*.rs".to_owned(),
4795 matches: Some(lsp::FileOperationPatternKind::File),
4796 options: None,
4797 },
4798 },
4799 FileOperationFilter {
4800 scheme: Some("file".to_owned()),
4801 pattern: lsp::FileOperationPattern {
4802 glob: "**/**".to_owned(),
4803 matches: Some(lsp::FileOperationPatternKind::Folder),
4804 options: None,
4805 },
4806 },
4807 ],
4808 };
4809 let mut fake_servers = language_registry.register_fake_lsp(
4810 "Rust",
4811 FakeLspAdapter {
4812 capabilities: lsp::ServerCapabilities {
4813 workspace: Some(lsp::WorkspaceServerCapabilities {
4814 workspace_folders: None,
4815 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
4816 did_rename: Some(watched_paths.clone()),
4817 will_rename: Some(watched_paths),
4818 ..Default::default()
4819 }),
4820 }),
4821 ..Default::default()
4822 },
4823 ..Default::default()
4824 },
4825 );
4826
4827 let _ = project
4828 .update(cx, |project, cx| {
4829 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
4830 })
4831 .await
4832 .unwrap();
4833
4834 let fake_server = fake_servers.next().await.unwrap();
4835 let response = project.update(cx, |project, cx| {
4836 let worktree = project.worktrees(cx).next().unwrap();
4837 let entry = worktree.read(cx).entry_for_path("one.rs").unwrap();
4838 project.rename_entry(entry.id, "three.rs".as_ref(), cx)
4839 });
4840 let expected_edit = lsp::WorkspaceEdit {
4841 changes: None,
4842 document_changes: Some(DocumentChanges::Edits({
4843 vec![TextDocumentEdit {
4844 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
4845 range: lsp::Range {
4846 start: lsp::Position {
4847 line: 0,
4848 character: 1,
4849 },
4850 end: lsp::Position {
4851 line: 0,
4852 character: 3,
4853 },
4854 },
4855 new_text: "This is not a drill".to_owned(),
4856 })],
4857 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
4858 uri: Url::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
4859 version: Some(1337),
4860 },
4861 }]
4862 })),
4863 change_annotations: None,
4864 };
4865 let resolved_workspace_edit = Arc::new(OnceLock::new());
4866 fake_server
4867 .set_request_handler::<WillRenameFiles, _, _>({
4868 let resolved_workspace_edit = resolved_workspace_edit.clone();
4869 let expected_edit = expected_edit.clone();
4870 move |params, _| {
4871 let resolved_workspace_edit = resolved_workspace_edit.clone();
4872 let expected_edit = expected_edit.clone();
4873 async move {
4874 assert_eq!(params.files.len(), 1);
4875 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
4876 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
4877 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
4878 Ok(Some(expected_edit))
4879 }
4880 }
4881 })
4882 .next()
4883 .await
4884 .unwrap();
4885 let _ = response.await.unwrap();
4886 fake_server
4887 .handle_notification::<DidRenameFiles, _>(|params, _| {
4888 assert_eq!(params.files.len(), 1);
4889 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
4890 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
4891 })
4892 .next()
4893 .await
4894 .unwrap();
4895 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
4896}
4897
4898#[gpui::test]
4899async fn test_rename(cx: &mut gpui::TestAppContext) {
4900 // hi
4901 init_test(cx);
4902
4903 let fs = FakeFs::new(cx.executor());
4904 fs.insert_tree(
4905 path!("/dir"),
4906 json!({
4907 "one.rs": "const ONE: usize = 1;",
4908 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4909 }),
4910 )
4911 .await;
4912
4913 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4914
4915 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4916 language_registry.add(rust_lang());
4917 let mut fake_servers = language_registry.register_fake_lsp(
4918 "Rust",
4919 FakeLspAdapter {
4920 capabilities: lsp::ServerCapabilities {
4921 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
4922 prepare_provider: Some(true),
4923 work_done_progress_options: Default::default(),
4924 })),
4925 ..Default::default()
4926 },
4927 ..Default::default()
4928 },
4929 );
4930
4931 let (buffer, _handle) = project
4932 .update(cx, |project, cx| {
4933 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
4934 })
4935 .await
4936 .unwrap();
4937
4938 let fake_server = fake_servers.next().await.unwrap();
4939
4940 let response = project.update(cx, |project, cx| {
4941 project.prepare_rename(buffer.clone(), 7, cx)
4942 });
4943 fake_server
4944 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
4945 assert_eq!(
4946 params.text_document.uri.as_str(),
4947 uri!("file:///dir/one.rs")
4948 );
4949 assert_eq!(params.position, lsp::Position::new(0, 7));
4950 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
4951 lsp::Position::new(0, 6),
4952 lsp::Position::new(0, 9),
4953 ))))
4954 })
4955 .next()
4956 .await
4957 .unwrap();
4958 let response = response.await.unwrap();
4959 let PrepareRenameResponse::Success(range) = response else {
4960 panic!("{:?}", response);
4961 };
4962 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
4963 assert_eq!(range, 6..9);
4964
4965 let response = project.update(cx, |project, cx| {
4966 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
4967 });
4968 fake_server
4969 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
4970 assert_eq!(
4971 params.text_document_position.text_document.uri.as_str(),
4972 uri!("file:///dir/one.rs")
4973 );
4974 assert_eq!(
4975 params.text_document_position.position,
4976 lsp::Position::new(0, 7)
4977 );
4978 assert_eq!(params.new_name, "THREE");
4979 Ok(Some(lsp::WorkspaceEdit {
4980 changes: Some(
4981 [
4982 (
4983 lsp::Url::from_file_path(path!("/dir/one.rs")).unwrap(),
4984 vec![lsp::TextEdit::new(
4985 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
4986 "THREE".to_string(),
4987 )],
4988 ),
4989 (
4990 lsp::Url::from_file_path(path!("/dir/two.rs")).unwrap(),
4991 vec![
4992 lsp::TextEdit::new(
4993 lsp::Range::new(
4994 lsp::Position::new(0, 24),
4995 lsp::Position::new(0, 27),
4996 ),
4997 "THREE".to_string(),
4998 ),
4999 lsp::TextEdit::new(
5000 lsp::Range::new(
5001 lsp::Position::new(0, 35),
5002 lsp::Position::new(0, 38),
5003 ),
5004 "THREE".to_string(),
5005 ),
5006 ],
5007 ),
5008 ]
5009 .into_iter()
5010 .collect(),
5011 ),
5012 ..Default::default()
5013 }))
5014 })
5015 .next()
5016 .await
5017 .unwrap();
5018 let mut transaction = response.await.unwrap().0;
5019 assert_eq!(transaction.len(), 2);
5020 assert_eq!(
5021 transaction
5022 .remove_entry(&buffer)
5023 .unwrap()
5024 .0
5025 .update(cx, |buffer, _| buffer.text()),
5026 "const THREE: usize = 1;"
5027 );
5028 assert_eq!(
5029 transaction
5030 .into_keys()
5031 .next()
5032 .unwrap()
5033 .update(cx, |buffer, _| buffer.text()),
5034 "const TWO: usize = one::THREE + one::THREE;"
5035 );
5036}
5037
5038#[gpui::test]
5039async fn test_search(cx: &mut gpui::TestAppContext) {
5040 init_test(cx);
5041
5042 let fs = FakeFs::new(cx.executor());
5043 fs.insert_tree(
5044 path!("/dir"),
5045 json!({
5046 "one.rs": "const ONE: usize = 1;",
5047 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
5048 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
5049 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
5050 }),
5051 )
5052 .await;
5053 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5054 assert_eq!(
5055 search(
5056 &project,
5057 SearchQuery::text(
5058 "TWO",
5059 false,
5060 true,
5061 false,
5062 Default::default(),
5063 Default::default(),
5064 false,
5065 None
5066 )
5067 .unwrap(),
5068 cx
5069 )
5070 .await
5071 .unwrap(),
5072 HashMap::from_iter([
5073 (path!("dir/two.rs").to_string(), vec![6..9]),
5074 (path!("dir/three.rs").to_string(), vec![37..40])
5075 ])
5076 );
5077
5078 let buffer_4 = project
5079 .update(cx, |project, cx| {
5080 project.open_local_buffer(path!("/dir/four.rs"), cx)
5081 })
5082 .await
5083 .unwrap();
5084 buffer_4.update(cx, |buffer, cx| {
5085 let text = "two::TWO";
5086 buffer.edit([(20..28, text), (31..43, text)], None, cx);
5087 });
5088
5089 assert_eq!(
5090 search(
5091 &project,
5092 SearchQuery::text(
5093 "TWO",
5094 false,
5095 true,
5096 false,
5097 Default::default(),
5098 Default::default(),
5099 false,
5100 None,
5101 )
5102 .unwrap(),
5103 cx
5104 )
5105 .await
5106 .unwrap(),
5107 HashMap::from_iter([
5108 (path!("dir/two.rs").to_string(), vec![6..9]),
5109 (path!("dir/three.rs").to_string(), vec![37..40]),
5110 (path!("dir/four.rs").to_string(), vec![25..28, 36..39])
5111 ])
5112 );
5113}
5114
5115#[gpui::test]
5116async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
5117 init_test(cx);
5118
5119 let search_query = "file";
5120
5121 let fs = FakeFs::new(cx.executor());
5122 fs.insert_tree(
5123 path!("/dir"),
5124 json!({
5125 "one.rs": r#"// Rust file one"#,
5126 "one.ts": r#"// TypeScript file one"#,
5127 "two.rs": r#"// Rust file two"#,
5128 "two.ts": r#"// TypeScript file two"#,
5129 }),
5130 )
5131 .await;
5132 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5133
5134 assert!(
5135 search(
5136 &project,
5137 SearchQuery::text(
5138 search_query,
5139 false,
5140 true,
5141 false,
5142 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5143 Default::default(),
5144 false,
5145 None
5146 )
5147 .unwrap(),
5148 cx
5149 )
5150 .await
5151 .unwrap()
5152 .is_empty(),
5153 "If no inclusions match, no files should be returned"
5154 );
5155
5156 assert_eq!(
5157 search(
5158 &project,
5159 SearchQuery::text(
5160 search_query,
5161 false,
5162 true,
5163 false,
5164 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
5165 Default::default(),
5166 false,
5167 None
5168 )
5169 .unwrap(),
5170 cx
5171 )
5172 .await
5173 .unwrap(),
5174 HashMap::from_iter([
5175 (path!("dir/one.rs").to_string(), vec![8..12]),
5176 (path!("dir/two.rs").to_string(), vec![8..12]),
5177 ]),
5178 "Rust only search should give only Rust files"
5179 );
5180
5181 assert_eq!(
5182 search(
5183 &project,
5184 SearchQuery::text(
5185 search_query,
5186 false,
5187 true,
5188 false,
5189 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5190 Default::default(),
5191 false,
5192 None,
5193 )
5194 .unwrap(),
5195 cx
5196 )
5197 .await
5198 .unwrap(),
5199 HashMap::from_iter([
5200 (path!("dir/one.ts").to_string(), vec![14..18]),
5201 (path!("dir/two.ts").to_string(), vec![14..18]),
5202 ]),
5203 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
5204 );
5205
5206 assert_eq!(
5207 search(
5208 &project,
5209 SearchQuery::text(
5210 search_query,
5211 false,
5212 true,
5213 false,
5214 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()])
5215 .unwrap(),
5216 Default::default(),
5217 false,
5218 None,
5219 )
5220 .unwrap(),
5221 cx
5222 )
5223 .await
5224 .unwrap(),
5225 HashMap::from_iter([
5226 (path!("dir/two.ts").to_string(), vec![14..18]),
5227 (path!("dir/one.rs").to_string(), vec![8..12]),
5228 (path!("dir/one.ts").to_string(), vec![14..18]),
5229 (path!("dir/two.rs").to_string(), vec![8..12]),
5230 ]),
5231 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
5232 );
5233}
5234
5235#[gpui::test]
5236async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
5237 init_test(cx);
5238
5239 let search_query = "file";
5240
5241 let fs = FakeFs::new(cx.executor());
5242 fs.insert_tree(
5243 path!("/dir"),
5244 json!({
5245 "one.rs": r#"// Rust file one"#,
5246 "one.ts": r#"// TypeScript file one"#,
5247 "two.rs": r#"// Rust file two"#,
5248 "two.ts": r#"// TypeScript file two"#,
5249 }),
5250 )
5251 .await;
5252 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5253
5254 assert_eq!(
5255 search(
5256 &project,
5257 SearchQuery::text(
5258 search_query,
5259 false,
5260 true,
5261 false,
5262 Default::default(),
5263 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5264 false,
5265 None,
5266 )
5267 .unwrap(),
5268 cx
5269 )
5270 .await
5271 .unwrap(),
5272 HashMap::from_iter([
5273 (path!("dir/one.rs").to_string(), vec![8..12]),
5274 (path!("dir/one.ts").to_string(), vec![14..18]),
5275 (path!("dir/two.rs").to_string(), vec![8..12]),
5276 (path!("dir/two.ts").to_string(), vec![14..18]),
5277 ]),
5278 "If no exclusions match, all files should be returned"
5279 );
5280
5281 assert_eq!(
5282 search(
5283 &project,
5284 SearchQuery::text(
5285 search_query,
5286 false,
5287 true,
5288 false,
5289 Default::default(),
5290 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
5291 false,
5292 None,
5293 )
5294 .unwrap(),
5295 cx
5296 )
5297 .await
5298 .unwrap(),
5299 HashMap::from_iter([
5300 (path!("dir/one.ts").to_string(), vec![14..18]),
5301 (path!("dir/two.ts").to_string(), vec![14..18]),
5302 ]),
5303 "Rust exclusion search should give only TypeScript files"
5304 );
5305
5306 assert_eq!(
5307 search(
5308 &project,
5309 SearchQuery::text(
5310 search_query,
5311 false,
5312 true,
5313 false,
5314 Default::default(),
5315 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5316 false,
5317 None,
5318 )
5319 .unwrap(),
5320 cx
5321 )
5322 .await
5323 .unwrap(),
5324 HashMap::from_iter([
5325 (path!("dir/one.rs").to_string(), vec![8..12]),
5326 (path!("dir/two.rs").to_string(), vec![8..12]),
5327 ]),
5328 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
5329 );
5330
5331 assert!(
5332 search(
5333 &project,
5334 SearchQuery::text(
5335 search_query,
5336 false,
5337 true,
5338 false,
5339 Default::default(),
5340 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()])
5341 .unwrap(),
5342 false,
5343 None,
5344 )
5345 .unwrap(),
5346 cx
5347 )
5348 .await
5349 .unwrap()
5350 .is_empty(),
5351 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
5352 );
5353}
5354
5355#[gpui::test]
5356async fn test_search_with_buffer_exclusions(cx: &mut gpui::TestAppContext) {
5357 init_test(cx);
5358
5359 let search_query = "file";
5360
5361 let fs = FakeFs::new(cx.executor());
5362 fs.insert_tree(
5363 path!("/dir"),
5364 json!({
5365 "one.rs": r#"// Rust file one"#,
5366 "one.ts": r#"// TypeScript file one"#,
5367 "two.rs": r#"// Rust file two"#,
5368 "two.ts": r#"// TypeScript file two"#,
5369 }),
5370 )
5371 .await;
5372
5373 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5374 let _buffer = project.update(cx, |project, cx| {
5375 let buffer = project.create_local_buffer("file", None, cx);
5376 project.mark_buffer_as_non_searchable(buffer.read(cx).remote_id(), cx);
5377 buffer
5378 });
5379
5380 assert_eq!(
5381 search(
5382 &project,
5383 SearchQuery::text(
5384 search_query,
5385 false,
5386 true,
5387 false,
5388 Default::default(),
5389 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5390 false,
5391 None,
5392 )
5393 .unwrap(),
5394 cx
5395 )
5396 .await
5397 .unwrap(),
5398 HashMap::from_iter([
5399 (path!("dir/one.rs").to_string(), vec![8..12]),
5400 (path!("dir/one.ts").to_string(), vec![14..18]),
5401 (path!("dir/two.rs").to_string(), vec![8..12]),
5402 (path!("dir/two.ts").to_string(), vec![14..18]),
5403 ]),
5404 "If no exclusions match, all files should be returned"
5405 );
5406
5407 assert_eq!(
5408 search(
5409 &project,
5410 SearchQuery::text(
5411 search_query,
5412 false,
5413 true,
5414 false,
5415 Default::default(),
5416 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
5417 false,
5418 None,
5419 )
5420 .unwrap(),
5421 cx
5422 )
5423 .await
5424 .unwrap(),
5425 HashMap::from_iter([
5426 (path!("dir/one.ts").to_string(), vec![14..18]),
5427 (path!("dir/two.ts").to_string(), vec![14..18]),
5428 ]),
5429 "Rust exclusion search should give only TypeScript files"
5430 );
5431
5432 assert_eq!(
5433 search(
5434 &project,
5435 SearchQuery::text(
5436 search_query,
5437 false,
5438 true,
5439 false,
5440 Default::default(),
5441 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5442 false,
5443 None,
5444 )
5445 .unwrap(),
5446 cx
5447 )
5448 .await
5449 .unwrap(),
5450 HashMap::from_iter([
5451 (path!("dir/one.rs").to_string(), vec![8..12]),
5452 (path!("dir/two.rs").to_string(), vec![8..12]),
5453 ]),
5454 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
5455 );
5456
5457 assert!(
5458 search(
5459 &project,
5460 SearchQuery::text(
5461 search_query,
5462 false,
5463 true,
5464 false,
5465 Default::default(),
5466 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()])
5467 .unwrap(),
5468 false,
5469 None,
5470 )
5471 .unwrap(),
5472 cx
5473 )
5474 .await
5475 .unwrap()
5476 .is_empty(),
5477 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
5478 );
5479}
5480
5481#[gpui::test]
5482async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
5483 init_test(cx);
5484
5485 let search_query = "file";
5486
5487 let fs = FakeFs::new(cx.executor());
5488 fs.insert_tree(
5489 path!("/dir"),
5490 json!({
5491 "one.rs": r#"// Rust file one"#,
5492 "one.ts": r#"// TypeScript file one"#,
5493 "two.rs": r#"// Rust file two"#,
5494 "two.ts": r#"// TypeScript file two"#,
5495 }),
5496 )
5497 .await;
5498 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5499
5500 assert!(
5501 search(
5502 &project,
5503 SearchQuery::text(
5504 search_query,
5505 false,
5506 true,
5507 false,
5508 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5509 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5510 false,
5511 None,
5512 )
5513 .unwrap(),
5514 cx
5515 )
5516 .await
5517 .unwrap()
5518 .is_empty(),
5519 "If both no exclusions and inclusions match, exclusions should win and return nothing"
5520 );
5521
5522 assert!(
5523 search(
5524 &project,
5525 SearchQuery::text(
5526 search_query,
5527 false,
5528 true,
5529 false,
5530 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5531 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5532 false,
5533 None,
5534 )
5535 .unwrap(),
5536 cx
5537 )
5538 .await
5539 .unwrap()
5540 .is_empty(),
5541 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
5542 );
5543
5544 assert!(
5545 search(
5546 &project,
5547 SearchQuery::text(
5548 search_query,
5549 false,
5550 true,
5551 false,
5552 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5553 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5554 false,
5555 None,
5556 )
5557 .unwrap(),
5558 cx
5559 )
5560 .await
5561 .unwrap()
5562 .is_empty(),
5563 "Non-matching inclusions and exclusions should not change that."
5564 );
5565
5566 assert_eq!(
5567 search(
5568 &project,
5569 SearchQuery::text(
5570 search_query,
5571 false,
5572 true,
5573 false,
5574 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5575 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()]).unwrap(),
5576 false,
5577 None,
5578 )
5579 .unwrap(),
5580 cx
5581 )
5582 .await
5583 .unwrap(),
5584 HashMap::from_iter([
5585 (path!("dir/one.ts").to_string(), vec![14..18]),
5586 (path!("dir/two.ts").to_string(), vec![14..18]),
5587 ]),
5588 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
5589 );
5590}
5591
5592#[gpui::test]
5593async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
5594 init_test(cx);
5595
5596 let fs = FakeFs::new(cx.executor());
5597 fs.insert_tree(
5598 path!("/worktree-a"),
5599 json!({
5600 "haystack.rs": r#"// NEEDLE"#,
5601 "haystack.ts": r#"// NEEDLE"#,
5602 }),
5603 )
5604 .await;
5605 fs.insert_tree(
5606 path!("/worktree-b"),
5607 json!({
5608 "haystack.rs": r#"// NEEDLE"#,
5609 "haystack.ts": r#"// NEEDLE"#,
5610 }),
5611 )
5612 .await;
5613
5614 let project = Project::test(
5615 fs.clone(),
5616 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
5617 cx,
5618 )
5619 .await;
5620
5621 assert_eq!(
5622 search(
5623 &project,
5624 SearchQuery::text(
5625 "NEEDLE",
5626 false,
5627 true,
5628 false,
5629 PathMatcher::new(&["worktree-a/*.rs".to_owned()]).unwrap(),
5630 Default::default(),
5631 true,
5632 None,
5633 )
5634 .unwrap(),
5635 cx
5636 )
5637 .await
5638 .unwrap(),
5639 HashMap::from_iter([(path!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
5640 "should only return results from included worktree"
5641 );
5642 assert_eq!(
5643 search(
5644 &project,
5645 SearchQuery::text(
5646 "NEEDLE",
5647 false,
5648 true,
5649 false,
5650 PathMatcher::new(&["worktree-b/*.rs".to_owned()]).unwrap(),
5651 Default::default(),
5652 true,
5653 None,
5654 )
5655 .unwrap(),
5656 cx
5657 )
5658 .await
5659 .unwrap(),
5660 HashMap::from_iter([(path!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
5661 "should only return results from included worktree"
5662 );
5663
5664 assert_eq!(
5665 search(
5666 &project,
5667 SearchQuery::text(
5668 "NEEDLE",
5669 false,
5670 true,
5671 false,
5672 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5673 Default::default(),
5674 false,
5675 None,
5676 )
5677 .unwrap(),
5678 cx
5679 )
5680 .await
5681 .unwrap(),
5682 HashMap::from_iter([
5683 (path!("worktree-a/haystack.ts").to_string(), vec![3..9]),
5684 (path!("worktree-b/haystack.ts").to_string(), vec![3..9])
5685 ]),
5686 "should return results from both worktrees"
5687 );
5688}
5689
5690#[gpui::test]
5691async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
5692 init_test(cx);
5693
5694 let fs = FakeFs::new(cx.background_executor.clone());
5695 fs.insert_tree(
5696 path!("/dir"),
5697 json!({
5698 ".git": {},
5699 ".gitignore": "**/target\n/node_modules\n",
5700 "target": {
5701 "index.txt": "index_key:index_value"
5702 },
5703 "node_modules": {
5704 "eslint": {
5705 "index.ts": "const eslint_key = 'eslint value'",
5706 "package.json": r#"{ "some_key": "some value" }"#,
5707 },
5708 "prettier": {
5709 "index.ts": "const prettier_key = 'prettier value'",
5710 "package.json": r#"{ "other_key": "other value" }"#,
5711 },
5712 },
5713 "package.json": r#"{ "main_key": "main value" }"#,
5714 }),
5715 )
5716 .await;
5717 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5718
5719 let query = "key";
5720 assert_eq!(
5721 search(
5722 &project,
5723 SearchQuery::text(
5724 query,
5725 false,
5726 false,
5727 false,
5728 Default::default(),
5729 Default::default(),
5730 false,
5731 None,
5732 )
5733 .unwrap(),
5734 cx
5735 )
5736 .await
5737 .unwrap(),
5738 HashMap::from_iter([(path!("dir/package.json").to_string(), vec![8..11])]),
5739 "Only one non-ignored file should have the query"
5740 );
5741
5742 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5743 assert_eq!(
5744 search(
5745 &project,
5746 SearchQuery::text(
5747 query,
5748 false,
5749 false,
5750 true,
5751 Default::default(),
5752 Default::default(),
5753 false,
5754 None,
5755 )
5756 .unwrap(),
5757 cx
5758 )
5759 .await
5760 .unwrap(),
5761 HashMap::from_iter([
5762 (path!("dir/package.json").to_string(), vec![8..11]),
5763 (path!("dir/target/index.txt").to_string(), vec![6..9]),
5764 (
5765 path!("dir/node_modules/prettier/package.json").to_string(),
5766 vec![9..12]
5767 ),
5768 (
5769 path!("dir/node_modules/prettier/index.ts").to_string(),
5770 vec![15..18]
5771 ),
5772 (
5773 path!("dir/node_modules/eslint/index.ts").to_string(),
5774 vec![13..16]
5775 ),
5776 (
5777 path!("dir/node_modules/eslint/package.json").to_string(),
5778 vec![8..11]
5779 ),
5780 ]),
5781 "Unrestricted search with ignored directories should find every file with the query"
5782 );
5783
5784 let files_to_include = PathMatcher::new(&["node_modules/prettier/**".to_owned()]).unwrap();
5785 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()]).unwrap();
5786 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5787 assert_eq!(
5788 search(
5789 &project,
5790 SearchQuery::text(
5791 query,
5792 false,
5793 false,
5794 true,
5795 files_to_include,
5796 files_to_exclude,
5797 false,
5798 None,
5799 )
5800 .unwrap(),
5801 cx
5802 )
5803 .await
5804 .unwrap(),
5805 HashMap::from_iter([(
5806 path!("dir/node_modules/prettier/package.json").to_string(),
5807 vec![9..12]
5808 )]),
5809 "With search including ignored prettier directory and excluding TS files, only one file should be found"
5810 );
5811}
5812
5813#[gpui::test]
5814async fn test_search_with_unicode(cx: &mut gpui::TestAppContext) {
5815 init_test(cx);
5816
5817 let fs = FakeFs::new(cx.executor());
5818 fs.insert_tree(
5819 path!("/dir"),
5820 json!({
5821 "one.rs": "// ПРИВЕТ? привет!",
5822 "two.rs": "// ПРИВЕТ.",
5823 "three.rs": "// привет",
5824 }),
5825 )
5826 .await;
5827 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5828
5829 let unicode_case_sensitive_query = SearchQuery::text(
5830 "привет",
5831 false,
5832 true,
5833 false,
5834 Default::default(),
5835 Default::default(),
5836 false,
5837 None,
5838 );
5839 assert_matches!(unicode_case_sensitive_query, Ok(SearchQuery::Text { .. }));
5840 assert_eq!(
5841 search(&project, unicode_case_sensitive_query.unwrap(), cx)
5842 .await
5843 .unwrap(),
5844 HashMap::from_iter([
5845 (path!("dir/one.rs").to_string(), vec![17..29]),
5846 (path!("dir/three.rs").to_string(), vec![3..15]),
5847 ])
5848 );
5849
5850 let unicode_case_insensitive_query = SearchQuery::text(
5851 "привет",
5852 false,
5853 false,
5854 false,
5855 Default::default(),
5856 Default::default(),
5857 false,
5858 None,
5859 );
5860 assert_matches!(
5861 unicode_case_insensitive_query,
5862 Ok(SearchQuery::Regex { .. })
5863 );
5864 assert_eq!(
5865 search(&project, unicode_case_insensitive_query.unwrap(), cx)
5866 .await
5867 .unwrap(),
5868 HashMap::from_iter([
5869 (path!("dir/one.rs").to_string(), vec![3..15, 17..29]),
5870 (path!("dir/two.rs").to_string(), vec![3..15]),
5871 (path!("dir/three.rs").to_string(), vec![3..15]),
5872 ])
5873 );
5874
5875 assert_eq!(
5876 search(
5877 &project,
5878 SearchQuery::text(
5879 "привет.",
5880 false,
5881 false,
5882 false,
5883 Default::default(),
5884 Default::default(),
5885 false,
5886 None,
5887 )
5888 .unwrap(),
5889 cx
5890 )
5891 .await
5892 .unwrap(),
5893 HashMap::from_iter([(path!("dir/two.rs").to_string(), vec![3..16]),])
5894 );
5895}
5896
5897#[gpui::test]
5898async fn test_create_entry(cx: &mut gpui::TestAppContext) {
5899 init_test(cx);
5900
5901 let fs = FakeFs::new(cx.executor());
5902 fs.insert_tree(
5903 "/one/two",
5904 json!({
5905 "three": {
5906 "a.txt": "",
5907 "four": {}
5908 },
5909 "c.rs": ""
5910 }),
5911 )
5912 .await;
5913
5914 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
5915 project
5916 .update(cx, |project, cx| {
5917 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5918 project.create_entry((id, "b.."), true, cx)
5919 })
5920 .await
5921 .unwrap()
5922 .into_included()
5923 .unwrap();
5924
5925 // Can't create paths outside the project
5926 let result = project
5927 .update(cx, |project, cx| {
5928 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5929 project.create_entry((id, "../../boop"), true, cx)
5930 })
5931 .await;
5932 assert!(result.is_err());
5933
5934 // Can't create paths with '..'
5935 let result = project
5936 .update(cx, |project, cx| {
5937 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5938 project.create_entry((id, "four/../beep"), true, cx)
5939 })
5940 .await;
5941 assert!(result.is_err());
5942
5943 assert_eq!(
5944 fs.paths(true),
5945 vec![
5946 PathBuf::from(path!("/")),
5947 PathBuf::from(path!("/one")),
5948 PathBuf::from(path!("/one/two")),
5949 PathBuf::from(path!("/one/two/c.rs")),
5950 PathBuf::from(path!("/one/two/three")),
5951 PathBuf::from(path!("/one/two/three/a.txt")),
5952 PathBuf::from(path!("/one/two/three/b..")),
5953 PathBuf::from(path!("/one/two/three/four")),
5954 ]
5955 );
5956
5957 // And we cannot open buffers with '..'
5958 let result = project
5959 .update(cx, |project, cx| {
5960 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5961 project.open_buffer((id, "../c.rs"), cx)
5962 })
5963 .await;
5964 assert!(result.is_err())
5965}
5966
5967#[gpui::test]
5968async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
5969 init_test(cx);
5970
5971 let fs = FakeFs::new(cx.executor());
5972 fs.insert_tree(
5973 path!("/dir"),
5974 json!({
5975 "a.tsx": "a",
5976 }),
5977 )
5978 .await;
5979
5980 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5981
5982 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5983 language_registry.add(tsx_lang());
5984 let language_server_names = [
5985 "TypeScriptServer",
5986 "TailwindServer",
5987 "ESLintServer",
5988 "NoHoverCapabilitiesServer",
5989 ];
5990 let mut language_servers = [
5991 language_registry.register_fake_lsp(
5992 "tsx",
5993 FakeLspAdapter {
5994 name: language_server_names[0],
5995 capabilities: lsp::ServerCapabilities {
5996 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5997 ..lsp::ServerCapabilities::default()
5998 },
5999 ..FakeLspAdapter::default()
6000 },
6001 ),
6002 language_registry.register_fake_lsp(
6003 "tsx",
6004 FakeLspAdapter {
6005 name: language_server_names[1],
6006 capabilities: lsp::ServerCapabilities {
6007 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6008 ..lsp::ServerCapabilities::default()
6009 },
6010 ..FakeLspAdapter::default()
6011 },
6012 ),
6013 language_registry.register_fake_lsp(
6014 "tsx",
6015 FakeLspAdapter {
6016 name: language_server_names[2],
6017 capabilities: lsp::ServerCapabilities {
6018 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6019 ..lsp::ServerCapabilities::default()
6020 },
6021 ..FakeLspAdapter::default()
6022 },
6023 ),
6024 language_registry.register_fake_lsp(
6025 "tsx",
6026 FakeLspAdapter {
6027 name: language_server_names[3],
6028 capabilities: lsp::ServerCapabilities {
6029 hover_provider: None,
6030 ..lsp::ServerCapabilities::default()
6031 },
6032 ..FakeLspAdapter::default()
6033 },
6034 ),
6035 ];
6036
6037 let (buffer, _handle) = project
6038 .update(cx, |p, cx| {
6039 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
6040 })
6041 .await
6042 .unwrap();
6043 cx.executor().run_until_parked();
6044
6045 let mut servers_with_hover_requests = HashMap::default();
6046 for i in 0..language_server_names.len() {
6047 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
6048 panic!(
6049 "Failed to get language server #{i} with name {}",
6050 &language_server_names[i]
6051 )
6052 });
6053 let new_server_name = new_server.server.name();
6054 assert!(
6055 !servers_with_hover_requests.contains_key(&new_server_name),
6056 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6057 );
6058 match new_server_name.as_ref() {
6059 "TailwindServer" | "TypeScriptServer" => {
6060 servers_with_hover_requests.insert(
6061 new_server_name.clone(),
6062 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6063 move |_, _| {
6064 let name = new_server_name.clone();
6065 async move {
6066 Ok(Some(lsp::Hover {
6067 contents: lsp::HoverContents::Scalar(
6068 lsp::MarkedString::String(format!("{name} hover")),
6069 ),
6070 range: None,
6071 }))
6072 }
6073 },
6074 ),
6075 );
6076 }
6077 "ESLintServer" => {
6078 servers_with_hover_requests.insert(
6079 new_server_name,
6080 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6081 |_, _| async move { Ok(None) },
6082 ),
6083 );
6084 }
6085 "NoHoverCapabilitiesServer" => {
6086 let _never_handled = new_server
6087 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
6088 panic!(
6089 "Should not call for hovers server with no corresponding capabilities"
6090 )
6091 });
6092 }
6093 unexpected => panic!("Unexpected server name: {unexpected}"),
6094 }
6095 }
6096
6097 let hover_task = project.update(cx, |project, cx| {
6098 project.hover(&buffer, Point::new(0, 0), cx)
6099 });
6100 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
6101 |mut hover_request| async move {
6102 hover_request
6103 .next()
6104 .await
6105 .expect("All hover requests should have been triggered")
6106 },
6107 ))
6108 .await;
6109 assert_eq!(
6110 vec!["TailwindServer hover", "TypeScriptServer hover"],
6111 hover_task
6112 .await
6113 .into_iter()
6114 .flatten()
6115 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
6116 .sorted()
6117 .collect::<Vec<_>>(),
6118 "Should receive hover responses from all related servers with hover capabilities"
6119 );
6120}
6121
6122#[gpui::test]
6123async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
6124 init_test(cx);
6125
6126 let fs = FakeFs::new(cx.executor());
6127 fs.insert_tree(
6128 path!("/dir"),
6129 json!({
6130 "a.ts": "a",
6131 }),
6132 )
6133 .await;
6134
6135 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6136
6137 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6138 language_registry.add(typescript_lang());
6139 let mut fake_language_servers = language_registry.register_fake_lsp(
6140 "TypeScript",
6141 FakeLspAdapter {
6142 capabilities: lsp::ServerCapabilities {
6143 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6144 ..lsp::ServerCapabilities::default()
6145 },
6146 ..FakeLspAdapter::default()
6147 },
6148 );
6149
6150 let (buffer, _handle) = project
6151 .update(cx, |p, cx| {
6152 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6153 })
6154 .await
6155 .unwrap();
6156 cx.executor().run_until_parked();
6157
6158 let fake_server = fake_language_servers
6159 .next()
6160 .await
6161 .expect("failed to get the language server");
6162
6163 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6164 move |_, _| async move {
6165 Ok(Some(lsp::Hover {
6166 contents: lsp::HoverContents::Array(vec![
6167 lsp::MarkedString::String("".to_string()),
6168 lsp::MarkedString::String(" ".to_string()),
6169 lsp::MarkedString::String("\n\n\n".to_string()),
6170 ]),
6171 range: None,
6172 }))
6173 },
6174 );
6175
6176 let hover_task = project.update(cx, |project, cx| {
6177 project.hover(&buffer, Point::new(0, 0), cx)
6178 });
6179 let () = request_handled
6180 .next()
6181 .await
6182 .expect("All hover requests should have been triggered");
6183 assert_eq!(
6184 Vec::<String>::new(),
6185 hover_task
6186 .await
6187 .into_iter()
6188 .flatten()
6189 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
6190 .sorted()
6191 .collect::<Vec<_>>(),
6192 "Empty hover parts should be ignored"
6193 );
6194}
6195
6196#[gpui::test]
6197async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
6198 init_test(cx);
6199
6200 let fs = FakeFs::new(cx.executor());
6201 fs.insert_tree(
6202 path!("/dir"),
6203 json!({
6204 "a.ts": "a",
6205 }),
6206 )
6207 .await;
6208
6209 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6210
6211 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6212 language_registry.add(typescript_lang());
6213 let mut fake_language_servers = language_registry.register_fake_lsp(
6214 "TypeScript",
6215 FakeLspAdapter {
6216 capabilities: lsp::ServerCapabilities {
6217 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6218 ..lsp::ServerCapabilities::default()
6219 },
6220 ..FakeLspAdapter::default()
6221 },
6222 );
6223
6224 let (buffer, _handle) = project
6225 .update(cx, |p, cx| {
6226 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6227 })
6228 .await
6229 .unwrap();
6230 cx.executor().run_until_parked();
6231
6232 let fake_server = fake_language_servers
6233 .next()
6234 .await
6235 .expect("failed to get the language server");
6236
6237 let mut request_handled = fake_server
6238 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
6239 Ok(Some(vec![
6240 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6241 title: "organize imports".to_string(),
6242 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
6243 ..lsp::CodeAction::default()
6244 }),
6245 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6246 title: "fix code".to_string(),
6247 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
6248 ..lsp::CodeAction::default()
6249 }),
6250 ]))
6251 });
6252
6253 let code_actions_task = project.update(cx, |project, cx| {
6254 project.code_actions(
6255 &buffer,
6256 0..buffer.read(cx).len(),
6257 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
6258 cx,
6259 )
6260 });
6261
6262 let () = request_handled
6263 .next()
6264 .await
6265 .expect("The code action request should have been triggered");
6266
6267 let code_actions = code_actions_task.await.unwrap().unwrap();
6268 assert_eq!(code_actions.len(), 1);
6269 assert_eq!(
6270 code_actions[0].lsp_action.action_kind(),
6271 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
6272 );
6273}
6274
6275#[gpui::test]
6276async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
6277 init_test(cx);
6278
6279 let fs = FakeFs::new(cx.executor());
6280 fs.insert_tree(
6281 path!("/dir"),
6282 json!({
6283 "a.tsx": "a",
6284 }),
6285 )
6286 .await;
6287
6288 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6289
6290 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6291 language_registry.add(tsx_lang());
6292 let language_server_names = [
6293 "TypeScriptServer",
6294 "TailwindServer",
6295 "ESLintServer",
6296 "NoActionsCapabilitiesServer",
6297 ];
6298
6299 let mut language_server_rxs = [
6300 language_registry.register_fake_lsp(
6301 "tsx",
6302 FakeLspAdapter {
6303 name: language_server_names[0],
6304 capabilities: lsp::ServerCapabilities {
6305 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6306 ..lsp::ServerCapabilities::default()
6307 },
6308 ..FakeLspAdapter::default()
6309 },
6310 ),
6311 language_registry.register_fake_lsp(
6312 "tsx",
6313 FakeLspAdapter {
6314 name: language_server_names[1],
6315 capabilities: lsp::ServerCapabilities {
6316 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6317 ..lsp::ServerCapabilities::default()
6318 },
6319 ..FakeLspAdapter::default()
6320 },
6321 ),
6322 language_registry.register_fake_lsp(
6323 "tsx",
6324 FakeLspAdapter {
6325 name: language_server_names[2],
6326 capabilities: lsp::ServerCapabilities {
6327 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6328 ..lsp::ServerCapabilities::default()
6329 },
6330 ..FakeLspAdapter::default()
6331 },
6332 ),
6333 language_registry.register_fake_lsp(
6334 "tsx",
6335 FakeLspAdapter {
6336 name: language_server_names[3],
6337 capabilities: lsp::ServerCapabilities {
6338 code_action_provider: None,
6339 ..lsp::ServerCapabilities::default()
6340 },
6341 ..FakeLspAdapter::default()
6342 },
6343 ),
6344 ];
6345
6346 let (buffer, _handle) = project
6347 .update(cx, |p, cx| {
6348 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
6349 })
6350 .await
6351 .unwrap();
6352 cx.executor().run_until_parked();
6353
6354 let mut servers_with_actions_requests = HashMap::default();
6355 for i in 0..language_server_names.len() {
6356 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
6357 panic!(
6358 "Failed to get language server #{i} with name {}",
6359 &language_server_names[i]
6360 )
6361 });
6362 let new_server_name = new_server.server.name();
6363
6364 assert!(
6365 !servers_with_actions_requests.contains_key(&new_server_name),
6366 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6367 );
6368 match new_server_name.0.as_ref() {
6369 "TailwindServer" | "TypeScriptServer" => {
6370 servers_with_actions_requests.insert(
6371 new_server_name.clone(),
6372 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6373 move |_, _| {
6374 let name = new_server_name.clone();
6375 async move {
6376 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
6377 lsp::CodeAction {
6378 title: format!("{name} code action"),
6379 ..lsp::CodeAction::default()
6380 },
6381 )]))
6382 }
6383 },
6384 ),
6385 );
6386 }
6387 "ESLintServer" => {
6388 servers_with_actions_requests.insert(
6389 new_server_name,
6390 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6391 |_, _| async move { Ok(None) },
6392 ),
6393 );
6394 }
6395 "NoActionsCapabilitiesServer" => {
6396 let _never_handled = new_server
6397 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6398 panic!(
6399 "Should not call for code actions server with no corresponding capabilities"
6400 )
6401 });
6402 }
6403 unexpected => panic!("Unexpected server name: {unexpected}"),
6404 }
6405 }
6406
6407 let code_actions_task = project.update(cx, |project, cx| {
6408 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
6409 });
6410
6411 // cx.run_until_parked();
6412 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
6413 |mut code_actions_request| async move {
6414 code_actions_request
6415 .next()
6416 .await
6417 .expect("All code actions requests should have been triggered")
6418 },
6419 ))
6420 .await;
6421 assert_eq!(
6422 vec!["TailwindServer code action", "TypeScriptServer code action"],
6423 code_actions_task
6424 .await
6425 .unwrap()
6426 .unwrap()
6427 .into_iter()
6428 .map(|code_action| code_action.lsp_action.title().to_owned())
6429 .sorted()
6430 .collect::<Vec<_>>(),
6431 "Should receive code actions responses from all related servers with hover capabilities"
6432 );
6433}
6434
6435#[gpui::test]
6436async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
6437 init_test(cx);
6438
6439 let fs = FakeFs::new(cx.executor());
6440 fs.insert_tree(
6441 "/dir",
6442 json!({
6443 "a.rs": "let a = 1;",
6444 "b.rs": "let b = 2;",
6445 "c.rs": "let c = 2;",
6446 }),
6447 )
6448 .await;
6449
6450 let project = Project::test(
6451 fs,
6452 [
6453 "/dir/a.rs".as_ref(),
6454 "/dir/b.rs".as_ref(),
6455 "/dir/c.rs".as_ref(),
6456 ],
6457 cx,
6458 )
6459 .await;
6460
6461 // check the initial state and get the worktrees
6462 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
6463 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6464 assert_eq!(worktrees.len(), 3);
6465
6466 let worktree_a = worktrees[0].read(cx);
6467 let worktree_b = worktrees[1].read(cx);
6468 let worktree_c = worktrees[2].read(cx);
6469
6470 // check they start in the right order
6471 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
6472 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
6473 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
6474
6475 (
6476 worktrees[0].clone(),
6477 worktrees[1].clone(),
6478 worktrees[2].clone(),
6479 )
6480 });
6481
6482 // move first worktree to after the second
6483 // [a, b, c] -> [b, a, c]
6484 project
6485 .update(cx, |project, cx| {
6486 let first = worktree_a.read(cx);
6487 let second = worktree_b.read(cx);
6488 project.move_worktree(first.id(), second.id(), cx)
6489 })
6490 .expect("moving first after second");
6491
6492 // check the state after moving
6493 project.update(cx, |project, cx| {
6494 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6495 assert_eq!(worktrees.len(), 3);
6496
6497 let first = worktrees[0].read(cx);
6498 let second = worktrees[1].read(cx);
6499 let third = worktrees[2].read(cx);
6500
6501 // check they are now in the right order
6502 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
6503 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
6504 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6505 });
6506
6507 // move the second worktree to before the first
6508 // [b, a, c] -> [a, b, c]
6509 project
6510 .update(cx, |project, cx| {
6511 let second = worktree_a.read(cx);
6512 let first = worktree_b.read(cx);
6513 project.move_worktree(first.id(), second.id(), cx)
6514 })
6515 .expect("moving second before first");
6516
6517 // check the state after moving
6518 project.update(cx, |project, cx| {
6519 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6520 assert_eq!(worktrees.len(), 3);
6521
6522 let first = worktrees[0].read(cx);
6523 let second = worktrees[1].read(cx);
6524 let third = worktrees[2].read(cx);
6525
6526 // check they are now in the right order
6527 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6528 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6529 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6530 });
6531
6532 // move the second worktree to after the third
6533 // [a, b, c] -> [a, c, b]
6534 project
6535 .update(cx, |project, cx| {
6536 let second = worktree_b.read(cx);
6537 let third = worktree_c.read(cx);
6538 project.move_worktree(second.id(), third.id(), cx)
6539 })
6540 .expect("moving second after third");
6541
6542 // check the state after moving
6543 project.update(cx, |project, cx| {
6544 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6545 assert_eq!(worktrees.len(), 3);
6546
6547 let first = worktrees[0].read(cx);
6548 let second = worktrees[1].read(cx);
6549 let third = worktrees[2].read(cx);
6550
6551 // check they are now in the right order
6552 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6553 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
6554 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
6555 });
6556
6557 // move the third worktree to before the second
6558 // [a, c, b] -> [a, b, c]
6559 project
6560 .update(cx, |project, cx| {
6561 let third = worktree_c.read(cx);
6562 let second = worktree_b.read(cx);
6563 project.move_worktree(third.id(), second.id(), cx)
6564 })
6565 .expect("moving third before second");
6566
6567 // check the state after moving
6568 project.update(cx, |project, cx| {
6569 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6570 assert_eq!(worktrees.len(), 3);
6571
6572 let first = worktrees[0].read(cx);
6573 let second = worktrees[1].read(cx);
6574 let third = worktrees[2].read(cx);
6575
6576 // check they are now in the right order
6577 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6578 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6579 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6580 });
6581
6582 // move the first worktree to after the third
6583 // [a, b, c] -> [b, c, a]
6584 project
6585 .update(cx, |project, cx| {
6586 let first = worktree_a.read(cx);
6587 let third = worktree_c.read(cx);
6588 project.move_worktree(first.id(), third.id(), cx)
6589 })
6590 .expect("moving first after third");
6591
6592 // check the state after moving
6593 project.update(cx, |project, cx| {
6594 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6595 assert_eq!(worktrees.len(), 3);
6596
6597 let first = worktrees[0].read(cx);
6598 let second = worktrees[1].read(cx);
6599 let third = worktrees[2].read(cx);
6600
6601 // check they are now in the right order
6602 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
6603 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
6604 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
6605 });
6606
6607 // move the third worktree to before the first
6608 // [b, c, a] -> [a, b, c]
6609 project
6610 .update(cx, |project, cx| {
6611 let third = worktree_a.read(cx);
6612 let first = worktree_b.read(cx);
6613 project.move_worktree(third.id(), first.id(), cx)
6614 })
6615 .expect("moving third before first");
6616
6617 // check the state after moving
6618 project.update(cx, |project, cx| {
6619 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6620 assert_eq!(worktrees.len(), 3);
6621
6622 let first = worktrees[0].read(cx);
6623 let second = worktrees[1].read(cx);
6624 let third = worktrees[2].read(cx);
6625
6626 // check they are now in the right order
6627 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6628 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6629 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6630 });
6631}
6632
6633#[gpui::test]
6634async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
6635 init_test(cx);
6636
6637 let staged_contents = r#"
6638 fn main() {
6639 println!("hello world");
6640 }
6641 "#
6642 .unindent();
6643 let file_contents = r#"
6644 // print goodbye
6645 fn main() {
6646 println!("goodbye world");
6647 }
6648 "#
6649 .unindent();
6650
6651 let fs = FakeFs::new(cx.background_executor.clone());
6652 fs.insert_tree(
6653 "/dir",
6654 json!({
6655 ".git": {},
6656 "src": {
6657 "main.rs": file_contents,
6658 }
6659 }),
6660 )
6661 .await;
6662
6663 fs.set_index_for_repo(
6664 Path::new("/dir/.git"),
6665 &[("src/main.rs".into(), staged_contents)],
6666 );
6667
6668 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6669
6670 let buffer = project
6671 .update(cx, |project, cx| {
6672 project.open_local_buffer("/dir/src/main.rs", cx)
6673 })
6674 .await
6675 .unwrap();
6676 let unstaged_diff = project
6677 .update(cx, |project, cx| {
6678 project.open_unstaged_diff(buffer.clone(), cx)
6679 })
6680 .await
6681 .unwrap();
6682
6683 cx.run_until_parked();
6684 unstaged_diff.update(cx, |unstaged_diff, cx| {
6685 let snapshot = buffer.read(cx).snapshot();
6686 assert_hunks(
6687 unstaged_diff.hunks(&snapshot, cx),
6688 &snapshot,
6689 &unstaged_diff.base_text_string().unwrap(),
6690 &[
6691 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
6692 (
6693 2..3,
6694 " println!(\"hello world\");\n",
6695 " println!(\"goodbye world\");\n",
6696 DiffHunkStatus::modified_none(),
6697 ),
6698 ],
6699 );
6700 });
6701
6702 let staged_contents = r#"
6703 // print goodbye
6704 fn main() {
6705 }
6706 "#
6707 .unindent();
6708
6709 fs.set_index_for_repo(
6710 Path::new("/dir/.git"),
6711 &[("src/main.rs".into(), staged_contents)],
6712 );
6713
6714 cx.run_until_parked();
6715 unstaged_diff.update(cx, |unstaged_diff, cx| {
6716 let snapshot = buffer.read(cx).snapshot();
6717 assert_hunks(
6718 unstaged_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6719 &snapshot,
6720 &unstaged_diff.base_text().text(),
6721 &[(
6722 2..3,
6723 "",
6724 " println!(\"goodbye world\");\n",
6725 DiffHunkStatus::added_none(),
6726 )],
6727 );
6728 });
6729}
6730
6731#[gpui::test]
6732async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
6733 init_test(cx);
6734
6735 let committed_contents = r#"
6736 fn main() {
6737 println!("hello world");
6738 }
6739 "#
6740 .unindent();
6741 let staged_contents = r#"
6742 fn main() {
6743 println!("goodbye world");
6744 }
6745 "#
6746 .unindent();
6747 let file_contents = r#"
6748 // print goodbye
6749 fn main() {
6750 println!("goodbye world");
6751 }
6752 "#
6753 .unindent();
6754
6755 let fs = FakeFs::new(cx.background_executor.clone());
6756 fs.insert_tree(
6757 "/dir",
6758 json!({
6759 ".git": {},
6760 "src": {
6761 "modification.rs": file_contents,
6762 }
6763 }),
6764 )
6765 .await;
6766
6767 fs.set_head_for_repo(
6768 Path::new("/dir/.git"),
6769 &[
6770 ("src/modification.rs".into(), committed_contents),
6771 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6772 ],
6773 "deadbeef",
6774 );
6775 fs.set_index_for_repo(
6776 Path::new("/dir/.git"),
6777 &[
6778 ("src/modification.rs".into(), staged_contents),
6779 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6780 ],
6781 );
6782
6783 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6784 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6785 let language = rust_lang();
6786 language_registry.add(language.clone());
6787
6788 let buffer_1 = project
6789 .update(cx, |project, cx| {
6790 project.open_local_buffer("/dir/src/modification.rs", cx)
6791 })
6792 .await
6793 .unwrap();
6794 let diff_1 = project
6795 .update(cx, |project, cx| {
6796 project.open_uncommitted_diff(buffer_1.clone(), cx)
6797 })
6798 .await
6799 .unwrap();
6800 diff_1.read_with(cx, |diff, _| {
6801 assert_eq!(diff.base_text().language().cloned(), Some(language))
6802 });
6803 cx.run_until_parked();
6804 diff_1.update(cx, |diff, cx| {
6805 let snapshot = buffer_1.read(cx).snapshot();
6806 assert_hunks(
6807 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6808 &snapshot,
6809 &diff.base_text_string().unwrap(),
6810 &[
6811 (
6812 0..1,
6813 "",
6814 "// print goodbye\n",
6815 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
6816 ),
6817 (
6818 2..3,
6819 " println!(\"hello world\");\n",
6820 " println!(\"goodbye world\");\n",
6821 DiffHunkStatus::modified_none(),
6822 ),
6823 ],
6824 );
6825 });
6826
6827 // Reset HEAD to a version that differs from both the buffer and the index.
6828 let committed_contents = r#"
6829 // print goodbye
6830 fn main() {
6831 }
6832 "#
6833 .unindent();
6834 fs.set_head_for_repo(
6835 Path::new("/dir/.git"),
6836 &[
6837 ("src/modification.rs".into(), committed_contents.clone()),
6838 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6839 ],
6840 "deadbeef",
6841 );
6842
6843 // Buffer now has an unstaged hunk.
6844 cx.run_until_parked();
6845 diff_1.update(cx, |diff, cx| {
6846 let snapshot = buffer_1.read(cx).snapshot();
6847 assert_hunks(
6848 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6849 &snapshot,
6850 &diff.base_text().text(),
6851 &[(
6852 2..3,
6853 "",
6854 " println!(\"goodbye world\");\n",
6855 DiffHunkStatus::added_none(),
6856 )],
6857 );
6858 });
6859
6860 // Open a buffer for a file that's been deleted.
6861 let buffer_2 = project
6862 .update(cx, |project, cx| {
6863 project.open_local_buffer("/dir/src/deletion.rs", cx)
6864 })
6865 .await
6866 .unwrap();
6867 let diff_2 = project
6868 .update(cx, |project, cx| {
6869 project.open_uncommitted_diff(buffer_2.clone(), cx)
6870 })
6871 .await
6872 .unwrap();
6873 cx.run_until_parked();
6874 diff_2.update(cx, |diff, cx| {
6875 let snapshot = buffer_2.read(cx).snapshot();
6876 assert_hunks(
6877 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6878 &snapshot,
6879 &diff.base_text_string().unwrap(),
6880 &[(
6881 0..0,
6882 "// the-deleted-contents\n",
6883 "",
6884 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
6885 )],
6886 );
6887 });
6888
6889 // Stage the deletion of this file
6890 fs.set_index_for_repo(
6891 Path::new("/dir/.git"),
6892 &[("src/modification.rs".into(), committed_contents.clone())],
6893 );
6894 cx.run_until_parked();
6895 diff_2.update(cx, |diff, cx| {
6896 let snapshot = buffer_2.read(cx).snapshot();
6897 assert_hunks(
6898 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6899 &snapshot,
6900 &diff.base_text_string().unwrap(),
6901 &[(
6902 0..0,
6903 "// the-deleted-contents\n",
6904 "",
6905 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
6906 )],
6907 );
6908 });
6909}
6910
6911#[gpui::test]
6912async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
6913 use DiffHunkSecondaryStatus::*;
6914 init_test(cx);
6915
6916 let committed_contents = r#"
6917 zero
6918 one
6919 two
6920 three
6921 four
6922 five
6923 "#
6924 .unindent();
6925 let file_contents = r#"
6926 one
6927 TWO
6928 three
6929 FOUR
6930 five
6931 "#
6932 .unindent();
6933
6934 let fs = FakeFs::new(cx.background_executor.clone());
6935 fs.insert_tree(
6936 "/dir",
6937 json!({
6938 ".git": {},
6939 "file.txt": file_contents.clone()
6940 }),
6941 )
6942 .await;
6943
6944 fs.set_head_and_index_for_repo(
6945 "/dir/.git".as_ref(),
6946 &[("file.txt".into(), committed_contents.clone())],
6947 );
6948
6949 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6950
6951 let buffer = project
6952 .update(cx, |project, cx| {
6953 project.open_local_buffer("/dir/file.txt", cx)
6954 })
6955 .await
6956 .unwrap();
6957 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6958 let uncommitted_diff = project
6959 .update(cx, |project, cx| {
6960 project.open_uncommitted_diff(buffer.clone(), cx)
6961 })
6962 .await
6963 .unwrap();
6964 let mut diff_events = cx.events(&uncommitted_diff);
6965
6966 // The hunks are initially unstaged.
6967 uncommitted_diff.read_with(cx, |diff, cx| {
6968 assert_hunks(
6969 diff.hunks(&snapshot, cx),
6970 &snapshot,
6971 &diff.base_text_string().unwrap(),
6972 &[
6973 (
6974 0..0,
6975 "zero\n",
6976 "",
6977 DiffHunkStatus::deleted(HasSecondaryHunk),
6978 ),
6979 (
6980 1..2,
6981 "two\n",
6982 "TWO\n",
6983 DiffHunkStatus::modified(HasSecondaryHunk),
6984 ),
6985 (
6986 3..4,
6987 "four\n",
6988 "FOUR\n",
6989 DiffHunkStatus::modified(HasSecondaryHunk),
6990 ),
6991 ],
6992 );
6993 });
6994
6995 // Stage a hunk. It appears as optimistically staged.
6996 uncommitted_diff.update(cx, |diff, cx| {
6997 let range =
6998 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
6999 let hunks = diff
7000 .hunks_intersecting_range(range, &snapshot, cx)
7001 .collect::<Vec<_>>();
7002 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
7003
7004 assert_hunks(
7005 diff.hunks(&snapshot, cx),
7006 &snapshot,
7007 &diff.base_text_string().unwrap(),
7008 &[
7009 (
7010 0..0,
7011 "zero\n",
7012 "",
7013 DiffHunkStatus::deleted(HasSecondaryHunk),
7014 ),
7015 (
7016 1..2,
7017 "two\n",
7018 "TWO\n",
7019 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7020 ),
7021 (
7022 3..4,
7023 "four\n",
7024 "FOUR\n",
7025 DiffHunkStatus::modified(HasSecondaryHunk),
7026 ),
7027 ],
7028 );
7029 });
7030
7031 // The diff emits a change event for the range of the staged hunk.
7032 assert!(matches!(
7033 diff_events.next().await.unwrap(),
7034 BufferDiffEvent::HunksStagedOrUnstaged(_)
7035 ));
7036 let event = diff_events.next().await.unwrap();
7037 if let BufferDiffEvent::DiffChanged {
7038 changed_range: Some(changed_range),
7039 } = event
7040 {
7041 let changed_range = changed_range.to_point(&snapshot);
7042 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
7043 } else {
7044 panic!("Unexpected event {event:?}");
7045 }
7046
7047 // When the write to the index completes, it appears as staged.
7048 cx.run_until_parked();
7049 uncommitted_diff.update(cx, |diff, cx| {
7050 assert_hunks(
7051 diff.hunks(&snapshot, cx),
7052 &snapshot,
7053 &diff.base_text_string().unwrap(),
7054 &[
7055 (
7056 0..0,
7057 "zero\n",
7058 "",
7059 DiffHunkStatus::deleted(HasSecondaryHunk),
7060 ),
7061 (
7062 1..2,
7063 "two\n",
7064 "TWO\n",
7065 DiffHunkStatus::modified(NoSecondaryHunk),
7066 ),
7067 (
7068 3..4,
7069 "four\n",
7070 "FOUR\n",
7071 DiffHunkStatus::modified(HasSecondaryHunk),
7072 ),
7073 ],
7074 );
7075 });
7076
7077 // The diff emits a change event for the changed index text.
7078 let event = diff_events.next().await.unwrap();
7079 if let BufferDiffEvent::DiffChanged {
7080 changed_range: Some(changed_range),
7081 } = event
7082 {
7083 let changed_range = changed_range.to_point(&snapshot);
7084 assert_eq!(changed_range, Point::new(0, 0)..Point::new(4, 0));
7085 } else {
7086 panic!("Unexpected event {event:?}");
7087 }
7088
7089 // Simulate a problem writing to the git index.
7090 fs.set_error_message_for_index_write(
7091 "/dir/.git".as_ref(),
7092 Some("failed to write git index".into()),
7093 );
7094
7095 // Stage another hunk.
7096 uncommitted_diff.update(cx, |diff, cx| {
7097 let range =
7098 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
7099 let hunks = diff
7100 .hunks_intersecting_range(range, &snapshot, cx)
7101 .collect::<Vec<_>>();
7102 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
7103
7104 assert_hunks(
7105 diff.hunks(&snapshot, cx),
7106 &snapshot,
7107 &diff.base_text_string().unwrap(),
7108 &[
7109 (
7110 0..0,
7111 "zero\n",
7112 "",
7113 DiffHunkStatus::deleted(HasSecondaryHunk),
7114 ),
7115 (
7116 1..2,
7117 "two\n",
7118 "TWO\n",
7119 DiffHunkStatus::modified(NoSecondaryHunk),
7120 ),
7121 (
7122 3..4,
7123 "four\n",
7124 "FOUR\n",
7125 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7126 ),
7127 ],
7128 );
7129 });
7130 assert!(matches!(
7131 diff_events.next().await.unwrap(),
7132 BufferDiffEvent::HunksStagedOrUnstaged(_)
7133 ));
7134 let event = diff_events.next().await.unwrap();
7135 if let BufferDiffEvent::DiffChanged {
7136 changed_range: Some(changed_range),
7137 } = event
7138 {
7139 let changed_range = changed_range.to_point(&snapshot);
7140 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
7141 } else {
7142 panic!("Unexpected event {event:?}");
7143 }
7144
7145 // When the write fails, the hunk returns to being unstaged.
7146 cx.run_until_parked();
7147 uncommitted_diff.update(cx, |diff, cx| {
7148 assert_hunks(
7149 diff.hunks(&snapshot, cx),
7150 &snapshot,
7151 &diff.base_text_string().unwrap(),
7152 &[
7153 (
7154 0..0,
7155 "zero\n",
7156 "",
7157 DiffHunkStatus::deleted(HasSecondaryHunk),
7158 ),
7159 (
7160 1..2,
7161 "two\n",
7162 "TWO\n",
7163 DiffHunkStatus::modified(NoSecondaryHunk),
7164 ),
7165 (
7166 3..4,
7167 "four\n",
7168 "FOUR\n",
7169 DiffHunkStatus::modified(HasSecondaryHunk),
7170 ),
7171 ],
7172 );
7173 });
7174
7175 let event = diff_events.next().await.unwrap();
7176 if let BufferDiffEvent::DiffChanged {
7177 changed_range: Some(changed_range),
7178 } = event
7179 {
7180 let changed_range = changed_range.to_point(&snapshot);
7181 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
7182 } else {
7183 panic!("Unexpected event {event:?}");
7184 }
7185
7186 // Allow writing to the git index to succeed again.
7187 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
7188
7189 // Stage two hunks with separate operations.
7190 uncommitted_diff.update(cx, |diff, cx| {
7191 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
7192 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
7193 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
7194 });
7195
7196 // Both staged hunks appear as pending.
7197 uncommitted_diff.update(cx, |diff, cx| {
7198 assert_hunks(
7199 diff.hunks(&snapshot, cx),
7200 &snapshot,
7201 &diff.base_text_string().unwrap(),
7202 &[
7203 (
7204 0..0,
7205 "zero\n",
7206 "",
7207 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7208 ),
7209 (
7210 1..2,
7211 "two\n",
7212 "TWO\n",
7213 DiffHunkStatus::modified(NoSecondaryHunk),
7214 ),
7215 (
7216 3..4,
7217 "four\n",
7218 "FOUR\n",
7219 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7220 ),
7221 ],
7222 );
7223 });
7224
7225 // Both staging operations take effect.
7226 cx.run_until_parked();
7227 uncommitted_diff.update(cx, |diff, cx| {
7228 assert_hunks(
7229 diff.hunks(&snapshot, cx),
7230 &snapshot,
7231 &diff.base_text_string().unwrap(),
7232 &[
7233 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7234 (
7235 1..2,
7236 "two\n",
7237 "TWO\n",
7238 DiffHunkStatus::modified(NoSecondaryHunk),
7239 ),
7240 (
7241 3..4,
7242 "four\n",
7243 "FOUR\n",
7244 DiffHunkStatus::modified(NoSecondaryHunk),
7245 ),
7246 ],
7247 );
7248 });
7249}
7250
7251#[gpui::test(seeds(340, 472))]
7252async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
7253 use DiffHunkSecondaryStatus::*;
7254 init_test(cx);
7255
7256 let committed_contents = r#"
7257 zero
7258 one
7259 two
7260 three
7261 four
7262 five
7263 "#
7264 .unindent();
7265 let file_contents = r#"
7266 one
7267 TWO
7268 three
7269 FOUR
7270 five
7271 "#
7272 .unindent();
7273
7274 let fs = FakeFs::new(cx.background_executor.clone());
7275 fs.insert_tree(
7276 "/dir",
7277 json!({
7278 ".git": {},
7279 "file.txt": file_contents.clone()
7280 }),
7281 )
7282 .await;
7283
7284 fs.set_head_for_repo(
7285 "/dir/.git".as_ref(),
7286 &[("file.txt".into(), committed_contents.clone())],
7287 "deadbeef",
7288 );
7289 fs.set_index_for_repo(
7290 "/dir/.git".as_ref(),
7291 &[("file.txt".into(), committed_contents.clone())],
7292 );
7293
7294 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7295
7296 let buffer = project
7297 .update(cx, |project, cx| {
7298 project.open_local_buffer("/dir/file.txt", cx)
7299 })
7300 .await
7301 .unwrap();
7302 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7303 let uncommitted_diff = project
7304 .update(cx, |project, cx| {
7305 project.open_uncommitted_diff(buffer.clone(), cx)
7306 })
7307 .await
7308 .unwrap();
7309
7310 // The hunks are initially unstaged.
7311 uncommitted_diff.read_with(cx, |diff, cx| {
7312 assert_hunks(
7313 diff.hunks(&snapshot, cx),
7314 &snapshot,
7315 &diff.base_text_string().unwrap(),
7316 &[
7317 (
7318 0..0,
7319 "zero\n",
7320 "",
7321 DiffHunkStatus::deleted(HasSecondaryHunk),
7322 ),
7323 (
7324 1..2,
7325 "two\n",
7326 "TWO\n",
7327 DiffHunkStatus::modified(HasSecondaryHunk),
7328 ),
7329 (
7330 3..4,
7331 "four\n",
7332 "FOUR\n",
7333 DiffHunkStatus::modified(HasSecondaryHunk),
7334 ),
7335 ],
7336 );
7337 });
7338
7339 // Pause IO events
7340 fs.pause_events();
7341
7342 // Stage the first hunk.
7343 uncommitted_diff.update(cx, |diff, cx| {
7344 let hunk = diff.hunks(&snapshot, cx).next().unwrap();
7345 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7346 assert_hunks(
7347 diff.hunks(&snapshot, cx),
7348 &snapshot,
7349 &diff.base_text_string().unwrap(),
7350 &[
7351 (
7352 0..0,
7353 "zero\n",
7354 "",
7355 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7356 ),
7357 (
7358 1..2,
7359 "two\n",
7360 "TWO\n",
7361 DiffHunkStatus::modified(HasSecondaryHunk),
7362 ),
7363 (
7364 3..4,
7365 "four\n",
7366 "FOUR\n",
7367 DiffHunkStatus::modified(HasSecondaryHunk),
7368 ),
7369 ],
7370 );
7371 });
7372
7373 // Stage the second hunk *before* receiving the FS event for the first hunk.
7374 cx.run_until_parked();
7375 uncommitted_diff.update(cx, |diff, cx| {
7376 let hunk = diff.hunks(&snapshot, cx).nth(1).unwrap();
7377 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7378 assert_hunks(
7379 diff.hunks(&snapshot, cx),
7380 &snapshot,
7381 &diff.base_text_string().unwrap(),
7382 &[
7383 (
7384 0..0,
7385 "zero\n",
7386 "",
7387 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7388 ),
7389 (
7390 1..2,
7391 "two\n",
7392 "TWO\n",
7393 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7394 ),
7395 (
7396 3..4,
7397 "four\n",
7398 "FOUR\n",
7399 DiffHunkStatus::modified(HasSecondaryHunk),
7400 ),
7401 ],
7402 );
7403 });
7404
7405 // Process the FS event for staging the first hunk (second event is still pending).
7406 fs.flush_events(1);
7407 cx.run_until_parked();
7408
7409 // Stage the third hunk before receiving the second FS event.
7410 uncommitted_diff.update(cx, |diff, cx| {
7411 let hunk = diff.hunks(&snapshot, cx).nth(2).unwrap();
7412 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7413 });
7414
7415 // Wait for all remaining IO.
7416 cx.run_until_parked();
7417 fs.flush_events(fs.buffered_event_count());
7418
7419 // Now all hunks are staged.
7420 cx.run_until_parked();
7421 uncommitted_diff.update(cx, |diff, cx| {
7422 assert_hunks(
7423 diff.hunks(&snapshot, cx),
7424 &snapshot,
7425 &diff.base_text_string().unwrap(),
7426 &[
7427 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7428 (
7429 1..2,
7430 "two\n",
7431 "TWO\n",
7432 DiffHunkStatus::modified(NoSecondaryHunk),
7433 ),
7434 (
7435 3..4,
7436 "four\n",
7437 "FOUR\n",
7438 DiffHunkStatus::modified(NoSecondaryHunk),
7439 ),
7440 ],
7441 );
7442 });
7443}
7444
7445#[gpui::test(iterations = 25)]
7446async fn test_staging_random_hunks(
7447 mut rng: StdRng,
7448 executor: BackgroundExecutor,
7449 cx: &mut gpui::TestAppContext,
7450) {
7451 let operations = env::var("OPERATIONS")
7452 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
7453 .unwrap_or(20);
7454
7455 // Try to induce races between diff recalculation and index writes.
7456 if rng.gen_bool(0.5) {
7457 executor.deprioritize(*CALCULATE_DIFF_TASK);
7458 }
7459
7460 use DiffHunkSecondaryStatus::*;
7461 init_test(cx);
7462
7463 let committed_text = (0..30).map(|i| format!("line {i}\n")).collect::<String>();
7464 let index_text = committed_text.clone();
7465 let buffer_text = (0..30)
7466 .map(|i| match i % 5 {
7467 0 => format!("line {i} (modified)\n"),
7468 _ => format!("line {i}\n"),
7469 })
7470 .collect::<String>();
7471
7472 let fs = FakeFs::new(cx.background_executor.clone());
7473 fs.insert_tree(
7474 path!("/dir"),
7475 json!({
7476 ".git": {},
7477 "file.txt": buffer_text.clone()
7478 }),
7479 )
7480 .await;
7481 fs.set_head_for_repo(
7482 path!("/dir/.git").as_ref(),
7483 &[("file.txt".into(), committed_text.clone())],
7484 "deadbeef",
7485 );
7486 fs.set_index_for_repo(
7487 path!("/dir/.git").as_ref(),
7488 &[("file.txt".into(), index_text.clone())],
7489 );
7490 let repo = fs.open_repo(path!("/dir/.git").as_ref()).unwrap();
7491
7492 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7493 let buffer = project
7494 .update(cx, |project, cx| {
7495 project.open_local_buffer(path!("/dir/file.txt"), cx)
7496 })
7497 .await
7498 .unwrap();
7499 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7500 let uncommitted_diff = project
7501 .update(cx, |project, cx| {
7502 project.open_uncommitted_diff(buffer.clone(), cx)
7503 })
7504 .await
7505 .unwrap();
7506
7507 let mut hunks =
7508 uncommitted_diff.update(cx, |diff, cx| diff.hunks(&snapshot, cx).collect::<Vec<_>>());
7509 assert_eq!(hunks.len(), 6);
7510
7511 for _i in 0..operations {
7512 let hunk_ix = rng.gen_range(0..hunks.len());
7513 let hunk = &mut hunks[hunk_ix];
7514 let row = hunk.range.start.row;
7515
7516 if hunk.status().has_secondary_hunk() {
7517 log::info!("staging hunk at {row}");
7518 uncommitted_diff.update(cx, |diff, cx| {
7519 diff.stage_or_unstage_hunks(true, std::slice::from_ref(hunk), &snapshot, true, cx);
7520 });
7521 hunk.secondary_status = SecondaryHunkRemovalPending;
7522 } else {
7523 log::info!("unstaging hunk at {row}");
7524 uncommitted_diff.update(cx, |diff, cx| {
7525 diff.stage_or_unstage_hunks(false, std::slice::from_ref(hunk), &snapshot, true, cx);
7526 });
7527 hunk.secondary_status = SecondaryHunkAdditionPending;
7528 }
7529
7530 for _ in 0..rng.gen_range(0..10) {
7531 log::info!("yielding");
7532 cx.executor().simulate_random_delay().await;
7533 }
7534 }
7535
7536 cx.executor().run_until_parked();
7537
7538 for hunk in &mut hunks {
7539 if hunk.secondary_status == SecondaryHunkRemovalPending {
7540 hunk.secondary_status = NoSecondaryHunk;
7541 } else if hunk.secondary_status == SecondaryHunkAdditionPending {
7542 hunk.secondary_status = HasSecondaryHunk;
7543 }
7544 }
7545
7546 log::info!(
7547 "index text:\n{}",
7548 repo.load_index_text("file.txt".into()).await.unwrap()
7549 );
7550
7551 uncommitted_diff.update(cx, |diff, cx| {
7552 let expected_hunks = hunks
7553 .iter()
7554 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
7555 .collect::<Vec<_>>();
7556 let actual_hunks = diff
7557 .hunks(&snapshot, cx)
7558 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
7559 .collect::<Vec<_>>();
7560 assert_eq!(actual_hunks, expected_hunks);
7561 });
7562}
7563
7564#[gpui::test]
7565async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
7566 init_test(cx);
7567
7568 let committed_contents = r#"
7569 fn main() {
7570 println!("hello from HEAD");
7571 }
7572 "#
7573 .unindent();
7574 let file_contents = r#"
7575 fn main() {
7576 println!("hello from the working copy");
7577 }
7578 "#
7579 .unindent();
7580
7581 let fs = FakeFs::new(cx.background_executor.clone());
7582 fs.insert_tree(
7583 "/dir",
7584 json!({
7585 ".git": {},
7586 "src": {
7587 "main.rs": file_contents,
7588 }
7589 }),
7590 )
7591 .await;
7592
7593 fs.set_head_for_repo(
7594 Path::new("/dir/.git"),
7595 &[("src/main.rs".into(), committed_contents.clone())],
7596 "deadbeef",
7597 );
7598 fs.set_index_for_repo(
7599 Path::new("/dir/.git"),
7600 &[("src/main.rs".into(), committed_contents.clone())],
7601 );
7602
7603 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
7604
7605 let buffer = project
7606 .update(cx, |project, cx| {
7607 project.open_local_buffer("/dir/src/main.rs", cx)
7608 })
7609 .await
7610 .unwrap();
7611 let uncommitted_diff = project
7612 .update(cx, |project, cx| {
7613 project.open_uncommitted_diff(buffer.clone(), cx)
7614 })
7615 .await
7616 .unwrap();
7617
7618 cx.run_until_parked();
7619 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
7620 let snapshot = buffer.read(cx).snapshot();
7621 assert_hunks(
7622 uncommitted_diff.hunks(&snapshot, cx),
7623 &snapshot,
7624 &uncommitted_diff.base_text_string().unwrap(),
7625 &[(
7626 1..2,
7627 " println!(\"hello from HEAD\");\n",
7628 " println!(\"hello from the working copy\");\n",
7629 DiffHunkStatus {
7630 kind: DiffHunkStatusKind::Modified,
7631 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
7632 },
7633 )],
7634 );
7635 });
7636}
7637
7638#[gpui::test]
7639async fn test_repository_and_path_for_project_path(
7640 background_executor: BackgroundExecutor,
7641 cx: &mut gpui::TestAppContext,
7642) {
7643 init_test(cx);
7644 let fs = FakeFs::new(background_executor);
7645 fs.insert_tree(
7646 path!("/root"),
7647 json!({
7648 "c.txt": "",
7649 "dir1": {
7650 ".git": {},
7651 "deps": {
7652 "dep1": {
7653 ".git": {},
7654 "src": {
7655 "a.txt": ""
7656 }
7657 }
7658 },
7659 "src": {
7660 "b.txt": ""
7661 }
7662 },
7663 }),
7664 )
7665 .await;
7666
7667 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
7668 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7669 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7670 project
7671 .update(cx, |project, cx| project.git_scans_complete(cx))
7672 .await;
7673 cx.run_until_parked();
7674
7675 project.read_with(cx, |project, cx| {
7676 let git_store = project.git_store().read(cx);
7677 let pairs = [
7678 ("c.txt", None),
7679 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
7680 (
7681 "dir1/deps/dep1/src/a.txt",
7682 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
7683 ),
7684 ];
7685 let expected = pairs
7686 .iter()
7687 .map(|(path, result)| {
7688 (
7689 path,
7690 result.map(|(repo, repo_path)| {
7691 (Path::new(repo).into(), RepoPath::from(repo_path))
7692 }),
7693 )
7694 })
7695 .collect::<Vec<_>>();
7696 let actual = pairs
7697 .iter()
7698 .map(|(path, _)| {
7699 let project_path = (tree_id, Path::new(path)).into();
7700 let result = maybe!({
7701 let (repo, repo_path) =
7702 git_store.repository_and_path_for_project_path(&project_path, cx)?;
7703 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
7704 });
7705 (path, result)
7706 })
7707 .collect::<Vec<_>>();
7708 pretty_assertions::assert_eq!(expected, actual);
7709 });
7710
7711 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
7712 .await
7713 .unwrap();
7714 cx.run_until_parked();
7715
7716 project.read_with(cx, |project, cx| {
7717 let git_store = project.git_store().read(cx);
7718 assert_eq!(
7719 git_store.repository_and_path_for_project_path(
7720 &(tree_id, Path::new("dir1/src/b.txt")).into(),
7721 cx
7722 ),
7723 None
7724 );
7725 });
7726}
7727
7728#[gpui::test]
7729async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
7730 init_test(cx);
7731 let fs = FakeFs::new(cx.background_executor.clone());
7732 fs.insert_tree(
7733 path!("/root"),
7734 json!({
7735 "home": {
7736 ".git": {},
7737 "project": {
7738 "a.txt": "A"
7739 },
7740 },
7741 }),
7742 )
7743 .await;
7744 fs.set_home_dir(Path::new(path!("/root/home")).to_owned());
7745
7746 let project = Project::test(fs.clone(), [path!("/root/home/project").as_ref()], cx).await;
7747 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7748 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7749
7750 project
7751 .update(cx, |project, cx| project.git_scans_complete(cx))
7752 .await;
7753 tree.flush_fs_events(cx).await;
7754
7755 project.read_with(cx, |project, cx| {
7756 let containing = project
7757 .git_store()
7758 .read(cx)
7759 .repository_and_path_for_project_path(&(tree_id, "a.txt").into(), cx);
7760 assert!(containing.is_none());
7761 });
7762
7763 let project = Project::test(fs.clone(), [path!("/root/home").as_ref()], cx).await;
7764 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7765 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7766 project
7767 .update(cx, |project, cx| project.git_scans_complete(cx))
7768 .await;
7769 tree.flush_fs_events(cx).await;
7770
7771 project.read_with(cx, |project, cx| {
7772 let containing = project
7773 .git_store()
7774 .read(cx)
7775 .repository_and_path_for_project_path(&(tree_id, "project/a.txt").into(), cx);
7776 assert_eq!(
7777 containing
7778 .unwrap()
7779 .0
7780 .read(cx)
7781 .work_directory_abs_path
7782 .as_ref(),
7783 Path::new(path!("/root/home"))
7784 );
7785 });
7786}
7787
7788#[gpui::test]
7789async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
7790 init_test(cx);
7791 cx.executor().allow_parking();
7792
7793 let root = TempTree::new(json!({
7794 "project": {
7795 "a.txt": "a", // Modified
7796 "b.txt": "bb", // Added
7797 "c.txt": "ccc", // Unchanged
7798 "d.txt": "dddd", // Deleted
7799 },
7800 }));
7801
7802 // Set up git repository before creating the project.
7803 let work_dir = root.path().join("project");
7804 let repo = git_init(work_dir.as_path());
7805 git_add("a.txt", &repo);
7806 git_add("c.txt", &repo);
7807 git_add("d.txt", &repo);
7808 git_commit("Initial commit", &repo);
7809 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
7810 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
7811
7812 let project = Project::test(
7813 Arc::new(RealFs::new(None, cx.executor())),
7814 [root.path()],
7815 cx,
7816 )
7817 .await;
7818
7819 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7820 tree.flush_fs_events(cx).await;
7821 project
7822 .update(cx, |project, cx| project.git_scans_complete(cx))
7823 .await;
7824 cx.executor().run_until_parked();
7825
7826 let repository = project.read_with(cx, |project, cx| {
7827 project.repositories(cx).values().next().unwrap().clone()
7828 });
7829
7830 // Check that the right git state is observed on startup
7831 repository.read_with(cx, |repository, _| {
7832 let entries = repository.cached_status().collect::<Vec<_>>();
7833 assert_eq!(
7834 entries,
7835 [
7836 StatusEntry {
7837 repo_path: "a.txt".into(),
7838 status: StatusCode::Modified.worktree(),
7839 },
7840 StatusEntry {
7841 repo_path: "b.txt".into(),
7842 status: FileStatus::Untracked,
7843 },
7844 StatusEntry {
7845 repo_path: "d.txt".into(),
7846 status: StatusCode::Deleted.worktree(),
7847 },
7848 ]
7849 );
7850 });
7851
7852 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
7853
7854 tree.flush_fs_events(cx).await;
7855 project
7856 .update(cx, |project, cx| project.git_scans_complete(cx))
7857 .await;
7858 cx.executor().run_until_parked();
7859
7860 repository.read_with(cx, |repository, _| {
7861 let entries = repository.cached_status().collect::<Vec<_>>();
7862 assert_eq!(
7863 entries,
7864 [
7865 StatusEntry {
7866 repo_path: "a.txt".into(),
7867 status: StatusCode::Modified.worktree(),
7868 },
7869 StatusEntry {
7870 repo_path: "b.txt".into(),
7871 status: FileStatus::Untracked,
7872 },
7873 StatusEntry {
7874 repo_path: "c.txt".into(),
7875 status: StatusCode::Modified.worktree(),
7876 },
7877 StatusEntry {
7878 repo_path: "d.txt".into(),
7879 status: StatusCode::Deleted.worktree(),
7880 },
7881 ]
7882 );
7883 });
7884
7885 git_add("a.txt", &repo);
7886 git_add("c.txt", &repo);
7887 git_remove_index(Path::new("d.txt"), &repo);
7888 git_commit("Another commit", &repo);
7889 tree.flush_fs_events(cx).await;
7890 project
7891 .update(cx, |project, cx| project.git_scans_complete(cx))
7892 .await;
7893 cx.executor().run_until_parked();
7894
7895 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
7896 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
7897 tree.flush_fs_events(cx).await;
7898 project
7899 .update(cx, |project, cx| project.git_scans_complete(cx))
7900 .await;
7901 cx.executor().run_until_parked();
7902
7903 repository.read_with(cx, |repository, _cx| {
7904 let entries = repository.cached_status().collect::<Vec<_>>();
7905
7906 // Deleting an untracked entry, b.txt, should leave no status
7907 // a.txt was tracked, and so should have a status
7908 assert_eq!(
7909 entries,
7910 [StatusEntry {
7911 repo_path: "a.txt".into(),
7912 status: StatusCode::Deleted.worktree(),
7913 }]
7914 );
7915 });
7916}
7917
7918#[gpui::test]
7919async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
7920 init_test(cx);
7921 cx.executor().allow_parking();
7922
7923 let root = TempTree::new(json!({
7924 "project": {
7925 "sub": {},
7926 "a.txt": "",
7927 },
7928 }));
7929
7930 let work_dir = root.path().join("project");
7931 let repo = git_init(work_dir.as_path());
7932 // a.txt exists in HEAD and the working copy but is deleted in the index.
7933 git_add("a.txt", &repo);
7934 git_commit("Initial commit", &repo);
7935 git_remove_index("a.txt".as_ref(), &repo);
7936 // `sub` is a nested git repository.
7937 let _sub = git_init(&work_dir.join("sub"));
7938
7939 let project = Project::test(
7940 Arc::new(RealFs::new(None, cx.executor())),
7941 [root.path()],
7942 cx,
7943 )
7944 .await;
7945
7946 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7947 tree.flush_fs_events(cx).await;
7948 project
7949 .update(cx, |project, cx| project.git_scans_complete(cx))
7950 .await;
7951 cx.executor().run_until_parked();
7952
7953 let repository = project.read_with(cx, |project, cx| {
7954 project
7955 .repositories(cx)
7956 .values()
7957 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
7958 .unwrap()
7959 .clone()
7960 });
7961
7962 repository.read_with(cx, |repository, _cx| {
7963 let entries = repository.cached_status().collect::<Vec<_>>();
7964
7965 // `sub` doesn't appear in our computed statuses.
7966 // a.txt appears with a combined `DA` status.
7967 assert_eq!(
7968 entries,
7969 [StatusEntry {
7970 repo_path: "a.txt".into(),
7971 status: TrackedStatus {
7972 index_status: StatusCode::Deleted,
7973 worktree_status: StatusCode::Added
7974 }
7975 .into(),
7976 }]
7977 )
7978 });
7979}
7980
7981#[gpui::test]
7982async fn test_repository_subfolder_git_status(
7983 executor: gpui::BackgroundExecutor,
7984 cx: &mut gpui::TestAppContext,
7985) {
7986 init_test(cx);
7987
7988 let fs = FakeFs::new(executor);
7989 fs.insert_tree(
7990 path!("/root"),
7991 json!({
7992 "my-repo": {
7993 ".git": {},
7994 "a.txt": "a",
7995 "sub-folder-1": {
7996 "sub-folder-2": {
7997 "c.txt": "cc",
7998 "d": {
7999 "e.txt": "eee"
8000 }
8001 },
8002 }
8003 },
8004 }),
8005 )
8006 .await;
8007
8008 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
8009 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
8010
8011 fs.set_status_for_repo(
8012 path!("/root/my-repo/.git").as_ref(),
8013 &[(E_TXT.as_ref(), FileStatus::Untracked)],
8014 );
8015
8016 let project = Project::test(
8017 fs.clone(),
8018 [path!("/root/my-repo/sub-folder-1/sub-folder-2").as_ref()],
8019 cx,
8020 )
8021 .await;
8022
8023 project
8024 .update(cx, |project, cx| project.git_scans_complete(cx))
8025 .await;
8026 cx.run_until_parked();
8027
8028 let repository = project.read_with(cx, |project, cx| {
8029 project.repositories(cx).values().next().unwrap().clone()
8030 });
8031
8032 // Ensure that the git status is loaded correctly
8033 repository.read_with(cx, |repository, _cx| {
8034 assert_eq!(
8035 repository.work_directory_abs_path,
8036 Path::new(path!("/root/my-repo")).into()
8037 );
8038
8039 assert_eq!(repository.status_for_path(&C_TXT.into()), None);
8040 assert_eq!(
8041 repository.status_for_path(&E_TXT.into()).unwrap().status,
8042 FileStatus::Untracked
8043 );
8044 });
8045
8046 fs.set_status_for_repo(path!("/root/my-repo/.git").as_ref(), &[]);
8047 project
8048 .update(cx, |project, cx| project.git_scans_complete(cx))
8049 .await;
8050 cx.run_until_parked();
8051
8052 repository.read_with(cx, |repository, _cx| {
8053 assert_eq!(repository.status_for_path(&C_TXT.into()), None);
8054 assert_eq!(repository.status_for_path(&E_TXT.into()), None);
8055 });
8056}
8057
8058// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
8059#[cfg(any())]
8060#[gpui::test]
8061async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
8062 init_test(cx);
8063 cx.executor().allow_parking();
8064
8065 let root = TempTree::new(json!({
8066 "project": {
8067 "a.txt": "a",
8068 },
8069 }));
8070 let root_path = root.path();
8071
8072 let repo = git_init(&root_path.join("project"));
8073 git_add("a.txt", &repo);
8074 git_commit("init", &repo);
8075
8076 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8077
8078 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8079 tree.flush_fs_events(cx).await;
8080 project
8081 .update(cx, |project, cx| project.git_scans_complete(cx))
8082 .await;
8083 cx.executor().run_until_parked();
8084
8085 let repository = project.read_with(cx, |project, cx| {
8086 project.repositories(cx).values().next().unwrap().clone()
8087 });
8088
8089 git_branch("other-branch", &repo);
8090 git_checkout("refs/heads/other-branch", &repo);
8091 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
8092 git_add("a.txt", &repo);
8093 git_commit("capitalize", &repo);
8094 let commit = repo
8095 .head()
8096 .expect("Failed to get HEAD")
8097 .peel_to_commit()
8098 .expect("HEAD is not a commit");
8099 git_checkout("refs/heads/main", &repo);
8100 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
8101 git_add("a.txt", &repo);
8102 git_commit("improve letter", &repo);
8103 git_cherry_pick(&commit, &repo);
8104 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
8105 .expect("No CHERRY_PICK_HEAD");
8106 pretty_assertions::assert_eq!(
8107 git_status(&repo),
8108 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
8109 );
8110 tree.flush_fs_events(cx).await;
8111 project
8112 .update(cx, |project, cx| project.git_scans_complete(cx))
8113 .await;
8114 cx.executor().run_until_parked();
8115 let conflicts = repository.update(cx, |repository, _| {
8116 repository
8117 .merge_conflicts
8118 .iter()
8119 .cloned()
8120 .collect::<Vec<_>>()
8121 });
8122 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
8123
8124 git_add("a.txt", &repo);
8125 // Attempt to manually simulate what `git cherry-pick --continue` would do.
8126 git_commit("whatevs", &repo);
8127 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
8128 .expect("Failed to remove CHERRY_PICK_HEAD");
8129 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
8130 tree.flush_fs_events(cx).await;
8131 let conflicts = repository.update(cx, |repository, _| {
8132 repository
8133 .merge_conflicts
8134 .iter()
8135 .cloned()
8136 .collect::<Vec<_>>()
8137 });
8138 pretty_assertions::assert_eq!(conflicts, []);
8139}
8140
8141#[gpui::test]
8142async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
8143 init_test(cx);
8144 let fs = FakeFs::new(cx.background_executor.clone());
8145 fs.insert_tree(
8146 path!("/root"),
8147 json!({
8148 ".git": {},
8149 ".gitignore": "*.txt\n",
8150 "a.xml": "<a></a>",
8151 "b.txt": "Some text"
8152 }),
8153 )
8154 .await;
8155
8156 fs.set_head_and_index_for_repo(
8157 path!("/root/.git").as_ref(),
8158 &[
8159 (".gitignore".into(), "*.txt\n".into()),
8160 ("a.xml".into(), "<a></a>".into()),
8161 ],
8162 );
8163
8164 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
8165
8166 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8167 tree.flush_fs_events(cx).await;
8168 project
8169 .update(cx, |project, cx| project.git_scans_complete(cx))
8170 .await;
8171 cx.executor().run_until_parked();
8172
8173 let repository = project.read_with(cx, |project, cx| {
8174 project.repositories(cx).values().next().unwrap().clone()
8175 });
8176
8177 // One file is unmodified, the other is ignored.
8178 cx.read(|cx| {
8179 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
8180 assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
8181 });
8182
8183 // Change the gitignore, and stage the newly non-ignored file.
8184 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
8185 .await
8186 .unwrap();
8187 fs.set_index_for_repo(
8188 Path::new(path!("/root/.git")),
8189 &[
8190 (".gitignore".into(), "*.txt\n".into()),
8191 ("a.xml".into(), "<a></a>".into()),
8192 ("b.txt".into(), "Some text".into()),
8193 ],
8194 );
8195
8196 cx.executor().run_until_parked();
8197 cx.read(|cx| {
8198 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
8199 assert_entry_git_state(
8200 tree.read(cx),
8201 repository.read(cx),
8202 "b.txt",
8203 Some(StatusCode::Added),
8204 false,
8205 );
8206 });
8207}
8208
8209// NOTE:
8210// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
8211// a directory which some program has already open.
8212// This is a limitation of the Windows.
8213// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
8214#[gpui::test]
8215#[cfg_attr(target_os = "windows", ignore)]
8216async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
8217 init_test(cx);
8218 cx.executor().allow_parking();
8219 let root = TempTree::new(json!({
8220 "projects": {
8221 "project1": {
8222 "a": "",
8223 "b": "",
8224 }
8225 },
8226
8227 }));
8228 let root_path = root.path();
8229
8230 let repo = git_init(&root_path.join("projects/project1"));
8231 git_add("a", &repo);
8232 git_commit("init", &repo);
8233 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
8234
8235 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8236
8237 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8238 tree.flush_fs_events(cx).await;
8239 project
8240 .update(cx, |project, cx| project.git_scans_complete(cx))
8241 .await;
8242 cx.executor().run_until_parked();
8243
8244 let repository = project.read_with(cx, |project, cx| {
8245 project.repositories(cx).values().next().unwrap().clone()
8246 });
8247
8248 repository.read_with(cx, |repository, _| {
8249 assert_eq!(
8250 repository.work_directory_abs_path.as_ref(),
8251 root_path.join("projects/project1").as_path()
8252 );
8253 assert_eq!(
8254 repository
8255 .status_for_path(&"a".into())
8256 .map(|entry| entry.status),
8257 Some(StatusCode::Modified.worktree()),
8258 );
8259 assert_eq!(
8260 repository
8261 .status_for_path(&"b".into())
8262 .map(|entry| entry.status),
8263 Some(FileStatus::Untracked),
8264 );
8265 });
8266
8267 std::fs::rename(
8268 root_path.join("projects/project1"),
8269 root_path.join("projects/project2"),
8270 )
8271 .unwrap();
8272 tree.flush_fs_events(cx).await;
8273
8274 repository.read_with(cx, |repository, _| {
8275 assert_eq!(
8276 repository.work_directory_abs_path.as_ref(),
8277 root_path.join("projects/project2").as_path()
8278 );
8279 assert_eq!(
8280 repository.status_for_path(&"a".into()).unwrap().status,
8281 StatusCode::Modified.worktree(),
8282 );
8283 assert_eq!(
8284 repository.status_for_path(&"b".into()).unwrap().status,
8285 FileStatus::Untracked,
8286 );
8287 });
8288}
8289
8290// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
8291// you can't rename a directory which some program has already open. This is a
8292// limitation of the Windows. See:
8293// https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
8294#[gpui::test]
8295#[cfg_attr(target_os = "windows", ignore)]
8296async fn test_file_status(cx: &mut gpui::TestAppContext) {
8297 init_test(cx);
8298 cx.executor().allow_parking();
8299 const IGNORE_RULE: &str = "**/target";
8300
8301 let root = TempTree::new(json!({
8302 "project": {
8303 "a.txt": "a",
8304 "b.txt": "bb",
8305 "c": {
8306 "d": {
8307 "e.txt": "eee"
8308 }
8309 },
8310 "f.txt": "ffff",
8311 "target": {
8312 "build_file": "???"
8313 },
8314 ".gitignore": IGNORE_RULE
8315 },
8316
8317 }));
8318 let root_path = root.path();
8319
8320 const A_TXT: &str = "a.txt";
8321 const B_TXT: &str = "b.txt";
8322 const E_TXT: &str = "c/d/e.txt";
8323 const F_TXT: &str = "f.txt";
8324 const DOTGITIGNORE: &str = ".gitignore";
8325 const BUILD_FILE: &str = "target/build_file";
8326
8327 // Set up git repository before creating the worktree.
8328 let work_dir = root.path().join("project");
8329 let mut repo = git_init(work_dir.as_path());
8330 repo.add_ignore_rule(IGNORE_RULE).unwrap();
8331 git_add(A_TXT, &repo);
8332 git_add(E_TXT, &repo);
8333 git_add(DOTGITIGNORE, &repo);
8334 git_commit("Initial commit", &repo);
8335
8336 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8337
8338 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8339 tree.flush_fs_events(cx).await;
8340 project
8341 .update(cx, |project, cx| project.git_scans_complete(cx))
8342 .await;
8343 cx.executor().run_until_parked();
8344
8345 let repository = project.read_with(cx, |project, cx| {
8346 project.repositories(cx).values().next().unwrap().clone()
8347 });
8348
8349 // Check that the right git state is observed on startup
8350 repository.read_with(cx, |repository, _cx| {
8351 assert_eq!(
8352 repository.work_directory_abs_path.as_ref(),
8353 root_path.join("project").as_path()
8354 );
8355
8356 assert_eq!(
8357 repository.status_for_path(&B_TXT.into()).unwrap().status,
8358 FileStatus::Untracked,
8359 );
8360 assert_eq!(
8361 repository.status_for_path(&F_TXT.into()).unwrap().status,
8362 FileStatus::Untracked,
8363 );
8364 });
8365
8366 // Modify a file in the working copy.
8367 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
8368 tree.flush_fs_events(cx).await;
8369 project
8370 .update(cx, |project, cx| project.git_scans_complete(cx))
8371 .await;
8372 cx.executor().run_until_parked();
8373
8374 // The worktree detects that the file's git status has changed.
8375 repository.read_with(cx, |repository, _| {
8376 assert_eq!(
8377 repository.status_for_path(&A_TXT.into()).unwrap().status,
8378 StatusCode::Modified.worktree(),
8379 );
8380 });
8381
8382 // Create a commit in the git repository.
8383 git_add(A_TXT, &repo);
8384 git_add(B_TXT, &repo);
8385 git_commit("Committing modified and added", &repo);
8386 tree.flush_fs_events(cx).await;
8387 project
8388 .update(cx, |project, cx| project.git_scans_complete(cx))
8389 .await;
8390 cx.executor().run_until_parked();
8391
8392 // The worktree detects that the files' git status have changed.
8393 repository.read_with(cx, |repository, _cx| {
8394 assert_eq!(
8395 repository.status_for_path(&F_TXT.into()).unwrap().status,
8396 FileStatus::Untracked,
8397 );
8398 assert_eq!(repository.status_for_path(&B_TXT.into()), None);
8399 assert_eq!(repository.status_for_path(&A_TXT.into()), None);
8400 });
8401
8402 // Modify files in the working copy and perform git operations on other files.
8403 git_reset(0, &repo);
8404 git_remove_index(Path::new(B_TXT), &repo);
8405 git_stash(&mut repo);
8406 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
8407 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
8408 tree.flush_fs_events(cx).await;
8409 project
8410 .update(cx, |project, cx| project.git_scans_complete(cx))
8411 .await;
8412 cx.executor().run_until_parked();
8413
8414 // Check that more complex repo changes are tracked
8415 repository.read_with(cx, |repository, _cx| {
8416 assert_eq!(repository.status_for_path(&A_TXT.into()), None);
8417 assert_eq!(
8418 repository.status_for_path(&B_TXT.into()).unwrap().status,
8419 FileStatus::Untracked,
8420 );
8421 assert_eq!(
8422 repository.status_for_path(&E_TXT.into()).unwrap().status,
8423 StatusCode::Modified.worktree(),
8424 );
8425 });
8426
8427 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
8428 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
8429 std::fs::write(
8430 work_dir.join(DOTGITIGNORE),
8431 [IGNORE_RULE, "f.txt"].join("\n"),
8432 )
8433 .unwrap();
8434
8435 git_add(Path::new(DOTGITIGNORE), &repo);
8436 git_commit("Committing modified git ignore", &repo);
8437
8438 tree.flush_fs_events(cx).await;
8439 cx.executor().run_until_parked();
8440
8441 let mut renamed_dir_name = "first_directory/second_directory";
8442 const RENAMED_FILE: &str = "rf.txt";
8443
8444 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
8445 std::fs::write(
8446 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
8447 "new-contents",
8448 )
8449 .unwrap();
8450
8451 tree.flush_fs_events(cx).await;
8452 project
8453 .update(cx, |project, cx| project.git_scans_complete(cx))
8454 .await;
8455 cx.executor().run_until_parked();
8456
8457 repository.read_with(cx, |repository, _cx| {
8458 assert_eq!(
8459 repository
8460 .status_for_path(&Path::new(renamed_dir_name).join(RENAMED_FILE).into())
8461 .unwrap()
8462 .status,
8463 FileStatus::Untracked,
8464 );
8465 });
8466
8467 renamed_dir_name = "new_first_directory/second_directory";
8468
8469 std::fs::rename(
8470 work_dir.join("first_directory"),
8471 work_dir.join("new_first_directory"),
8472 )
8473 .unwrap();
8474
8475 tree.flush_fs_events(cx).await;
8476 project
8477 .update(cx, |project, cx| project.git_scans_complete(cx))
8478 .await;
8479 cx.executor().run_until_parked();
8480
8481 repository.read_with(cx, |repository, _cx| {
8482 assert_eq!(
8483 repository
8484 .status_for_path(&Path::new(renamed_dir_name).join(RENAMED_FILE).into())
8485 .unwrap()
8486 .status,
8487 FileStatus::Untracked,
8488 );
8489 });
8490}
8491
8492#[gpui::test]
8493async fn test_repos_in_invisible_worktrees(
8494 executor: BackgroundExecutor,
8495 cx: &mut gpui::TestAppContext,
8496) {
8497 init_test(cx);
8498 let fs = FakeFs::new(executor);
8499 fs.insert_tree(
8500 path!("/root"),
8501 json!({
8502 "dir1": {
8503 ".git": {},
8504 "dep1": {
8505 ".git": {},
8506 "src": {
8507 "a.txt": "",
8508 },
8509 },
8510 "b.txt": "",
8511 },
8512 }),
8513 )
8514 .await;
8515
8516 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
8517 let _visible_worktree =
8518 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8519 project
8520 .update(cx, |project, cx| project.git_scans_complete(cx))
8521 .await;
8522
8523 let repos = project.read_with(cx, |project, cx| {
8524 project
8525 .repositories(cx)
8526 .values()
8527 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8528 .collect::<Vec<_>>()
8529 });
8530 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
8531
8532 let (_invisible_worktree, _) = project
8533 .update(cx, |project, cx| {
8534 project.worktree_store.update(cx, |worktree_store, cx| {
8535 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
8536 })
8537 })
8538 .await
8539 .expect("failed to create worktree");
8540 project
8541 .update(cx, |project, cx| project.git_scans_complete(cx))
8542 .await;
8543
8544 let repos = project.read_with(cx, |project, cx| {
8545 project
8546 .repositories(cx)
8547 .values()
8548 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8549 .collect::<Vec<_>>()
8550 });
8551 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
8552}
8553
8554#[gpui::test(iterations = 10)]
8555async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
8556 init_test(cx);
8557 cx.update(|cx| {
8558 cx.update_global::<SettingsStore, _>(|store, cx| {
8559 store.update_user_settings::<WorktreeSettings>(cx, |project_settings| {
8560 project_settings.file_scan_exclusions = Some(Vec::new());
8561 });
8562 });
8563 });
8564 let fs = FakeFs::new(cx.background_executor.clone());
8565 fs.insert_tree(
8566 path!("/root"),
8567 json!({
8568 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
8569 "tree": {
8570 ".git": {},
8571 ".gitignore": "ignored-dir\n",
8572 "tracked-dir": {
8573 "tracked-file1": "",
8574 "ancestor-ignored-file1": "",
8575 },
8576 "ignored-dir": {
8577 "ignored-file1": ""
8578 }
8579 }
8580 }),
8581 )
8582 .await;
8583 fs.set_head_and_index_for_repo(
8584 path!("/root/tree/.git").as_ref(),
8585 &[
8586 (".gitignore".into(), "ignored-dir\n".into()),
8587 ("tracked-dir/tracked-file1".into(), "".into()),
8588 ],
8589 );
8590
8591 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
8592
8593 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8594 tree.flush_fs_events(cx).await;
8595 project
8596 .update(cx, |project, cx| project.git_scans_complete(cx))
8597 .await;
8598 cx.executor().run_until_parked();
8599
8600 let repository = project.read_with(cx, |project, cx| {
8601 project.repositories(cx).values().next().unwrap().clone()
8602 });
8603
8604 tree.read_with(cx, |tree, _| {
8605 tree.as_local()
8606 .unwrap()
8607 .manually_refresh_entries_for_paths(vec![Path::new("ignored-dir").into()])
8608 })
8609 .recv()
8610 .await;
8611
8612 cx.read(|cx| {
8613 assert_entry_git_state(
8614 tree.read(cx),
8615 repository.read(cx),
8616 "tracked-dir/tracked-file1",
8617 None,
8618 false,
8619 );
8620 assert_entry_git_state(
8621 tree.read(cx),
8622 repository.read(cx),
8623 "tracked-dir/ancestor-ignored-file1",
8624 None,
8625 false,
8626 );
8627 assert_entry_git_state(
8628 tree.read(cx),
8629 repository.read(cx),
8630 "ignored-dir/ignored-file1",
8631 None,
8632 true,
8633 );
8634 });
8635
8636 fs.create_file(
8637 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
8638 Default::default(),
8639 )
8640 .await
8641 .unwrap();
8642 fs.set_index_for_repo(
8643 path!("/root/tree/.git").as_ref(),
8644 &[
8645 (".gitignore".into(), "ignored-dir\n".into()),
8646 ("tracked-dir/tracked-file1".into(), "".into()),
8647 ("tracked-dir/tracked-file2".into(), "".into()),
8648 ],
8649 );
8650 fs.create_file(
8651 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
8652 Default::default(),
8653 )
8654 .await
8655 .unwrap();
8656 fs.create_file(
8657 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
8658 Default::default(),
8659 )
8660 .await
8661 .unwrap();
8662
8663 cx.executor().run_until_parked();
8664 cx.read(|cx| {
8665 assert_entry_git_state(
8666 tree.read(cx),
8667 repository.read(cx),
8668 "tracked-dir/tracked-file2",
8669 Some(StatusCode::Added),
8670 false,
8671 );
8672 assert_entry_git_state(
8673 tree.read(cx),
8674 repository.read(cx),
8675 "tracked-dir/ancestor-ignored-file2",
8676 None,
8677 false,
8678 );
8679 assert_entry_git_state(
8680 tree.read(cx),
8681 repository.read(cx),
8682 "ignored-dir/ignored-file2",
8683 None,
8684 true,
8685 );
8686 assert!(tree.read(cx).entry_for_path(".git").unwrap().is_ignored);
8687 });
8688}
8689
8690#[gpui::test]
8691async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) {
8692 init_test(cx);
8693
8694 let fs = FakeFs::new(cx.executor());
8695 fs.insert_tree(
8696 path!("/project"),
8697 json!({
8698 ".git": {
8699 "worktrees": {
8700 "some-worktree": {
8701 "commondir": "../..\n",
8702 // For is_git_dir
8703 "HEAD": "",
8704 "config": ""
8705 }
8706 },
8707 "modules": {
8708 "subdir": {
8709 "some-submodule": {
8710 // For is_git_dir
8711 "HEAD": "",
8712 "config": "",
8713 }
8714 }
8715 }
8716 },
8717 "src": {
8718 "a.txt": "A",
8719 },
8720 "some-worktree": {
8721 ".git": "gitdir: ../.git/worktrees/some-worktree\n",
8722 "src": {
8723 "b.txt": "B",
8724 }
8725 },
8726 "subdir": {
8727 "some-submodule": {
8728 ".git": "gitdir: ../../.git/modules/subdir/some-submodule\n",
8729 "c.txt": "C",
8730 }
8731 }
8732 }),
8733 )
8734 .await;
8735
8736 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
8737 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
8738 scan_complete.await;
8739
8740 let mut repositories = project.update(cx, |project, cx| {
8741 project
8742 .repositories(cx)
8743 .values()
8744 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8745 .collect::<Vec<_>>()
8746 });
8747 repositories.sort();
8748 pretty_assertions::assert_eq!(
8749 repositories,
8750 [
8751 Path::new(path!("/project")).into(),
8752 Path::new(path!("/project/some-worktree")).into(),
8753 Path::new(path!("/project/subdir/some-submodule")).into(),
8754 ]
8755 );
8756
8757 // Generate a git-related event for the worktree and check that it's refreshed.
8758 fs.with_git_state(
8759 path!("/project/some-worktree/.git").as_ref(),
8760 true,
8761 |state| {
8762 state
8763 .head_contents
8764 .insert("src/b.txt".into(), "b".to_owned());
8765 state
8766 .index_contents
8767 .insert("src/b.txt".into(), "b".to_owned());
8768 },
8769 )
8770 .unwrap();
8771 cx.run_until_parked();
8772
8773 let buffer = project
8774 .update(cx, |project, cx| {
8775 project.open_local_buffer(path!("/project/some-worktree/src/b.txt"), cx)
8776 })
8777 .await
8778 .unwrap();
8779 let (worktree_repo, barrier) = project.update(cx, |project, cx| {
8780 let (repo, _) = project
8781 .git_store()
8782 .read(cx)
8783 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
8784 .unwrap();
8785 pretty_assertions::assert_eq!(
8786 repo.read(cx).work_directory_abs_path,
8787 Path::new(path!("/project/some-worktree")).into(),
8788 );
8789 let barrier = repo.update(cx, |repo, _| repo.barrier());
8790 (repo.clone(), barrier)
8791 });
8792 barrier.await.unwrap();
8793 worktree_repo.update(cx, |repo, _| {
8794 pretty_assertions::assert_eq!(
8795 repo.status_for_path(&"src/b.txt".into()).unwrap().status,
8796 StatusCode::Modified.worktree(),
8797 );
8798 });
8799
8800 // The same for the submodule.
8801 fs.with_git_state(
8802 path!("/project/subdir/some-submodule/.git").as_ref(),
8803 true,
8804 |state| {
8805 state.head_contents.insert("c.txt".into(), "c".to_owned());
8806 state.index_contents.insert("c.txt".into(), "c".to_owned());
8807 },
8808 )
8809 .unwrap();
8810 cx.run_until_parked();
8811
8812 let buffer = project
8813 .update(cx, |project, cx| {
8814 project.open_local_buffer(path!("/project/subdir/some-submodule/c.txt"), cx)
8815 })
8816 .await
8817 .unwrap();
8818 let (submodule_repo, barrier) = project.update(cx, |project, cx| {
8819 let (repo, _) = project
8820 .git_store()
8821 .read(cx)
8822 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
8823 .unwrap();
8824 pretty_assertions::assert_eq!(
8825 repo.read(cx).work_directory_abs_path,
8826 Path::new(path!("/project/subdir/some-submodule")).into(),
8827 );
8828 let barrier = repo.update(cx, |repo, _| repo.barrier());
8829 (repo.clone(), barrier)
8830 });
8831 barrier.await.unwrap();
8832 submodule_repo.update(cx, |repo, _| {
8833 pretty_assertions::assert_eq!(
8834 repo.status_for_path(&"c.txt".into()).unwrap().status,
8835 StatusCode::Modified.worktree(),
8836 );
8837 });
8838}
8839
8840#[gpui::test]
8841async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
8842 init_test(cx);
8843 let fs = FakeFs::new(cx.background_executor.clone());
8844 fs.insert_tree(
8845 path!("/root"),
8846 json!({
8847 "project": {
8848 ".git": {},
8849 "child1": {
8850 "a.txt": "A",
8851 },
8852 "child2": {
8853 "b.txt": "B",
8854 }
8855 }
8856 }),
8857 )
8858 .await;
8859
8860 let project = Project::test(
8861 fs.clone(),
8862 [
8863 path!("/root/project/child1").as_ref(),
8864 path!("/root/project/child2").as_ref(),
8865 ],
8866 cx,
8867 )
8868 .await;
8869
8870 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8871 tree.flush_fs_events(cx).await;
8872 project
8873 .update(cx, |project, cx| project.git_scans_complete(cx))
8874 .await;
8875 cx.executor().run_until_parked();
8876
8877 let repos = project.read_with(cx, |project, cx| {
8878 project
8879 .repositories(cx)
8880 .values()
8881 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8882 .collect::<Vec<_>>()
8883 });
8884 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
8885}
8886
8887async fn search(
8888 project: &Entity<Project>,
8889 query: SearchQuery,
8890 cx: &mut gpui::TestAppContext,
8891) -> Result<HashMap<String, Vec<Range<usize>>>> {
8892 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
8893 let mut results = HashMap::default();
8894 while let Ok(search_result) = search_rx.recv().await {
8895 match search_result {
8896 SearchResult::Buffer { buffer, ranges } => {
8897 results.entry(buffer).or_insert(ranges);
8898 }
8899 SearchResult::LimitReached => {}
8900 }
8901 }
8902 Ok(results
8903 .into_iter()
8904 .map(|(buffer, ranges)| {
8905 buffer.update(cx, |buffer, cx| {
8906 let path = buffer
8907 .file()
8908 .unwrap()
8909 .full_path(cx)
8910 .to_string_lossy()
8911 .to_string();
8912 let ranges = ranges
8913 .into_iter()
8914 .map(|range| range.to_offset(buffer))
8915 .collect::<Vec<_>>();
8916 (path, ranges)
8917 })
8918 })
8919 .collect())
8920}
8921
8922pub fn init_test(cx: &mut gpui::TestAppContext) {
8923 zlog::init_test();
8924
8925 cx.update(|cx| {
8926 let settings_store = SettingsStore::test(cx);
8927 cx.set_global(settings_store);
8928 release_channel::init(SemanticVersion::default(), cx);
8929 language::init(cx);
8930 Project::init_settings(cx);
8931 });
8932}
8933
8934fn json_lang() -> Arc<Language> {
8935 Arc::new(Language::new(
8936 LanguageConfig {
8937 name: "JSON".into(),
8938 matcher: LanguageMatcher {
8939 path_suffixes: vec!["json".to_string()],
8940 ..Default::default()
8941 },
8942 ..Default::default()
8943 },
8944 None,
8945 ))
8946}
8947
8948fn js_lang() -> Arc<Language> {
8949 Arc::new(Language::new(
8950 LanguageConfig {
8951 name: "JavaScript".into(),
8952 matcher: LanguageMatcher {
8953 path_suffixes: vec!["js".to_string()],
8954 ..Default::default()
8955 },
8956 ..Default::default()
8957 },
8958 None,
8959 ))
8960}
8961
8962fn rust_lang() -> Arc<Language> {
8963 Arc::new(Language::new(
8964 LanguageConfig {
8965 name: "Rust".into(),
8966 matcher: LanguageMatcher {
8967 path_suffixes: vec!["rs".to_string()],
8968 ..Default::default()
8969 },
8970 ..Default::default()
8971 },
8972 Some(tree_sitter_rust::LANGUAGE.into()),
8973 ))
8974}
8975
8976fn typescript_lang() -> Arc<Language> {
8977 Arc::new(Language::new(
8978 LanguageConfig {
8979 name: "TypeScript".into(),
8980 matcher: LanguageMatcher {
8981 path_suffixes: vec!["ts".to_string()],
8982 ..Default::default()
8983 },
8984 ..Default::default()
8985 },
8986 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
8987 ))
8988}
8989
8990fn tsx_lang() -> Arc<Language> {
8991 Arc::new(Language::new(
8992 LanguageConfig {
8993 name: "tsx".into(),
8994 matcher: LanguageMatcher {
8995 path_suffixes: vec!["tsx".to_string()],
8996 ..Default::default()
8997 },
8998 ..Default::default()
8999 },
9000 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
9001 ))
9002}
9003
9004fn get_all_tasks(
9005 project: &Entity<Project>,
9006 task_contexts: Arc<TaskContexts>,
9007 cx: &mut App,
9008) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
9009 let new_tasks = project.update(cx, |project, cx| {
9010 project.task_store.update(cx, |task_store, cx| {
9011 task_store.task_inventory().unwrap().update(cx, |this, cx| {
9012 this.used_and_current_resolved_tasks(task_contexts, cx)
9013 })
9014 })
9015 });
9016
9017 cx.background_spawn(async move {
9018 let (mut old, new) = new_tasks.await;
9019 old.extend(new);
9020 old
9021 })
9022}
9023
9024#[track_caller]
9025fn assert_entry_git_state(
9026 tree: &Worktree,
9027 repository: &Repository,
9028 path: &str,
9029 index_status: Option<StatusCode>,
9030 is_ignored: bool,
9031) {
9032 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
9033 let entry = tree
9034 .entry_for_path(path)
9035 .unwrap_or_else(|| panic!("entry {path} not found"));
9036 let status = repository
9037 .status_for_path(&path.into())
9038 .map(|entry| entry.status);
9039 let expected = index_status.map(|index_status| {
9040 TrackedStatus {
9041 index_status,
9042 worktree_status: StatusCode::Unmodified,
9043 }
9044 .into()
9045 });
9046 assert_eq!(
9047 status, expected,
9048 "expected {path} to have git status: {expected:?}"
9049 );
9050 assert_eq!(
9051 entry.is_ignored, is_ignored,
9052 "expected {path} to have is_ignored: {is_ignored}"
9053 );
9054}
9055
9056#[track_caller]
9057fn git_init(path: &Path) -> git2::Repository {
9058 let mut init_opts = RepositoryInitOptions::new();
9059 init_opts.initial_head("main");
9060 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
9061}
9062
9063#[track_caller]
9064fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
9065 let path = path.as_ref();
9066 let mut index = repo.index().expect("Failed to get index");
9067 index.add_path(path).expect("Failed to add file");
9068 index.write().expect("Failed to write index");
9069}
9070
9071#[track_caller]
9072fn git_remove_index(path: &Path, repo: &git2::Repository) {
9073 let mut index = repo.index().expect("Failed to get index");
9074 index.remove_path(path).expect("Failed to add file");
9075 index.write().expect("Failed to write index");
9076}
9077
9078#[track_caller]
9079fn git_commit(msg: &'static str, repo: &git2::Repository) {
9080 use git2::Signature;
9081
9082 let signature = Signature::now("test", "test@zed.dev").unwrap();
9083 let oid = repo.index().unwrap().write_tree().unwrap();
9084 let tree = repo.find_tree(oid).unwrap();
9085 if let Ok(head) = repo.head() {
9086 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
9087
9088 let parent_commit = parent_obj.as_commit().unwrap();
9089
9090 repo.commit(
9091 Some("HEAD"),
9092 &signature,
9093 &signature,
9094 msg,
9095 &tree,
9096 &[parent_commit],
9097 )
9098 .expect("Failed to commit with parent");
9099 } else {
9100 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
9101 .expect("Failed to commit");
9102 }
9103}
9104
9105#[cfg(any())]
9106#[track_caller]
9107fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
9108 repo.cherrypick(commit, None).expect("Failed to cherrypick");
9109}
9110
9111#[track_caller]
9112fn git_stash(repo: &mut git2::Repository) {
9113 use git2::Signature;
9114
9115 let signature = Signature::now("test", "test@zed.dev").unwrap();
9116 repo.stash_save(&signature, "N/A", None)
9117 .expect("Failed to stash");
9118}
9119
9120#[track_caller]
9121fn git_reset(offset: usize, repo: &git2::Repository) {
9122 let head = repo.head().expect("Couldn't get repo head");
9123 let object = head.peel(git2::ObjectType::Commit).unwrap();
9124 let commit = object.as_commit().unwrap();
9125 let new_head = commit
9126 .parents()
9127 .inspect(|parnet| {
9128 parnet.message();
9129 })
9130 .nth(offset)
9131 .expect("Not enough history");
9132 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
9133 .expect("Could not reset");
9134}
9135
9136#[cfg(any())]
9137#[track_caller]
9138fn git_branch(name: &str, repo: &git2::Repository) {
9139 let head = repo
9140 .head()
9141 .expect("Couldn't get repo head")
9142 .peel_to_commit()
9143 .expect("HEAD is not a commit");
9144 repo.branch(name, &head, false).expect("Failed to commit");
9145}
9146
9147#[cfg(any())]
9148#[track_caller]
9149fn git_checkout(name: &str, repo: &git2::Repository) {
9150 repo.set_head(name).expect("Failed to set head");
9151 repo.checkout_head(None).expect("Failed to check out head");
9152}
9153
9154#[cfg(any())]
9155#[track_caller]
9156fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
9157 repo.statuses(None)
9158 .unwrap()
9159 .iter()
9160 .map(|status| (status.path().unwrap().to_string(), status.status()))
9161 .collect()
9162}
9163
9164#[gpui::test]
9165async fn test_find_project_path_abs(
9166 background_executor: BackgroundExecutor,
9167 cx: &mut gpui::TestAppContext,
9168) {
9169 // find_project_path should work with absolute paths
9170 init_test(cx);
9171
9172 let fs = FakeFs::new(background_executor);
9173 fs.insert_tree(
9174 path!("/root"),
9175 json!({
9176 "project1": {
9177 "file1.txt": "content1",
9178 "subdir": {
9179 "file2.txt": "content2"
9180 }
9181 },
9182 "project2": {
9183 "file3.txt": "content3"
9184 }
9185 }),
9186 )
9187 .await;
9188
9189 let project = Project::test(
9190 fs.clone(),
9191 [
9192 path!("/root/project1").as_ref(),
9193 path!("/root/project2").as_ref(),
9194 ],
9195 cx,
9196 )
9197 .await;
9198
9199 // Make sure the worktrees are fully initialized
9200 project
9201 .update(cx, |project, cx| project.git_scans_complete(cx))
9202 .await;
9203 cx.run_until_parked();
9204
9205 let (project1_abs_path, project1_id, project2_abs_path, project2_id) =
9206 project.read_with(cx, |project, cx| {
9207 let worktrees: Vec<_> = project.worktrees(cx).collect();
9208 let abs_path1 = worktrees[0].read(cx).abs_path().to_path_buf();
9209 let id1 = worktrees[0].read(cx).id();
9210 let abs_path2 = worktrees[1].read(cx).abs_path().to_path_buf();
9211 let id2 = worktrees[1].read(cx).id();
9212 (abs_path1, id1, abs_path2, id2)
9213 });
9214
9215 project.update(cx, |project, cx| {
9216 let abs_path = project1_abs_path.join("file1.txt");
9217 let found_path = project.find_project_path(abs_path, cx).unwrap();
9218 assert_eq!(found_path.worktree_id, project1_id);
9219 assert_eq!(found_path.path.as_ref(), Path::new("file1.txt"));
9220
9221 let abs_path = project1_abs_path.join("subdir").join("file2.txt");
9222 let found_path = project.find_project_path(abs_path, cx).unwrap();
9223 assert_eq!(found_path.worktree_id, project1_id);
9224 assert_eq!(found_path.path.as_ref(), Path::new("subdir/file2.txt"));
9225
9226 let abs_path = project2_abs_path.join("file3.txt");
9227 let found_path = project.find_project_path(abs_path, cx).unwrap();
9228 assert_eq!(found_path.worktree_id, project2_id);
9229 assert_eq!(found_path.path.as_ref(), Path::new("file3.txt"));
9230
9231 let abs_path = project1_abs_path.join("nonexistent.txt");
9232 let found_path = project.find_project_path(abs_path, cx);
9233 assert!(
9234 found_path.is_some(),
9235 "Should find project path for nonexistent file in worktree"
9236 );
9237
9238 // Test with an absolute path outside any worktree
9239 let abs_path = Path::new("/some/other/path");
9240 let found_path = project.find_project_path(abs_path, cx);
9241 assert!(
9242 found_path.is_none(),
9243 "Should not find project path for path outside any worktree"
9244 );
9245 });
9246}