1#![allow(clippy::format_collect)]
2
3use crate::{
4 Event, git_store::StatusEntry, task_inventory::TaskContexts, task_store::TaskSettingsLocation,
5 *,
6};
7use buffer_diff::{
8 BufferDiffEvent, CALCULATE_DIFF_TASK, DiffHunkSecondaryStatus, DiffHunkStatus,
9 DiffHunkStatusKind, assert_hunks,
10};
11use fs::FakeFs;
12use futures::{StreamExt, future};
13use git::{
14 GitHostingProviderRegistry,
15 repository::RepoPath,
16 status::{StatusCode, TrackedStatus},
17};
18use git2::RepositoryInitOptions;
19use gpui::{App, BackgroundExecutor, SemanticVersion, UpdateGlobal};
20use http_client::Url;
21use itertools::Itertools;
22use language::{
23 Diagnostic, DiagnosticEntry, DiagnosticSet, DiagnosticSourceKind, DiskState, FakeLspAdapter,
24 LanguageConfig, LanguageMatcher, LanguageName, LineEnding, OffsetRangeExt, Point, ToPoint,
25 language_settings::{AllLanguageSettings, LanguageSettingsContent, language_settings},
26 tree_sitter_rust, tree_sitter_typescript,
27};
28use lsp::{
29 DiagnosticSeverity, DocumentChanges, FileOperationFilter, NumberOrString, TextDocumentEdit,
30 WillRenameFiles, notification::DidRenameFiles,
31};
32use parking_lot::Mutex;
33use paths::{config_dir, tasks_file};
34use postage::stream::Stream as _;
35use pretty_assertions::{assert_eq, assert_matches};
36use rand::{Rng as _, rngs::StdRng};
37use serde_json::json;
38#[cfg(not(windows))]
39use std::os;
40use std::{env, mem, num::NonZeroU32, ops::Range, str::FromStr, sync::OnceLock, task::Poll};
41use task::{ResolvedTask, TaskContext};
42use unindent::Unindent as _;
43use util::{
44 TryFutureExt as _, assert_set_eq, maybe, path,
45 paths::PathMatcher,
46 test::{TempTree, marked_text_offsets},
47 uri,
48};
49use worktree::WorktreeModelHandle as _;
50
51#[gpui::test]
52async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
53 cx.executor().allow_parking();
54
55 let (tx, mut rx) = futures::channel::mpsc::unbounded();
56 let _thread = std::thread::spawn(move || {
57 #[cfg(not(target_os = "windows"))]
58 std::fs::metadata("/tmp").unwrap();
59 #[cfg(target_os = "windows")]
60 std::fs::metadata("C:/Windows").unwrap();
61 std::thread::sleep(Duration::from_millis(1000));
62 tx.unbounded_send(1).unwrap();
63 });
64 rx.next().await.unwrap();
65}
66
67#[gpui::test]
68async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
69 cx.executor().allow_parking();
70
71 let io_task = smol::unblock(move || {
72 println!("sleeping on thread {:?}", std::thread::current().id());
73 std::thread::sleep(Duration::from_millis(10));
74 1
75 });
76
77 let task = cx.foreground_executor().spawn(async move {
78 io_task.await;
79 });
80
81 task.await;
82}
83
84#[cfg(not(windows))]
85#[gpui::test]
86async fn test_symlinks(cx: &mut gpui::TestAppContext) {
87 init_test(cx);
88 cx.executor().allow_parking();
89
90 let dir = TempTree::new(json!({
91 "root": {
92 "apple": "",
93 "banana": {
94 "carrot": {
95 "date": "",
96 "endive": "",
97 }
98 },
99 "fennel": {
100 "grape": "",
101 }
102 }
103 }));
104
105 let root_link_path = dir.path().join("root_link");
106 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
107 os::unix::fs::symlink(
108 dir.path().join("root/fennel"),
109 dir.path().join("root/finnochio"),
110 )
111 .unwrap();
112
113 let project = Project::test(
114 Arc::new(RealFs::new(None, cx.executor())),
115 [root_link_path.as_ref()],
116 cx,
117 )
118 .await;
119
120 project.update(cx, |project, cx| {
121 let tree = project.worktrees(cx).next().unwrap().read(cx);
122 assert_eq!(tree.file_count(), 5);
123 assert_eq!(
124 tree.inode_for_path("fennel/grape"),
125 tree.inode_for_path("finnochio/grape")
126 );
127 });
128}
129
130#[gpui::test]
131async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
132 init_test(cx);
133
134 let dir = TempTree::new(json!({
135 ".editorconfig": r#"
136 root = true
137 [*.rs]
138 indent_style = tab
139 indent_size = 3
140 end_of_line = lf
141 insert_final_newline = true
142 trim_trailing_whitespace = true
143 [*.js]
144 tab_width = 10
145 "#,
146 ".zed": {
147 "settings.json": r#"{
148 "tab_size": 8,
149 "hard_tabs": false,
150 "ensure_final_newline_on_save": false,
151 "remove_trailing_whitespace_on_save": false,
152 "soft_wrap": "editor_width"
153 }"#,
154 },
155 "a.rs": "fn a() {\n A\n}",
156 "b": {
157 ".editorconfig": r#"
158 [*.rs]
159 indent_size = 2
160 "#,
161 "b.rs": "fn b() {\n B\n}",
162 },
163 "c.js": "def c\n C\nend",
164 "README.json": "tabs are better\n",
165 }));
166
167 let path = dir.path();
168 let fs = FakeFs::new(cx.executor());
169 fs.insert_tree_from_real_fs(path, path).await;
170 let project = Project::test(fs, [path], cx).await;
171
172 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
173 language_registry.add(js_lang());
174 language_registry.add(json_lang());
175 language_registry.add(rust_lang());
176
177 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
178
179 cx.executor().run_until_parked();
180
181 cx.update(|cx| {
182 let tree = worktree.read(cx);
183 let settings_for = |path: &str| {
184 let file_entry = tree.entry_for_path(path).unwrap().clone();
185 let file = File::for_entry(file_entry, worktree.clone());
186 let file_language = project
187 .read(cx)
188 .languages()
189 .language_for_file_path(file.path.as_ref());
190 let file_language = cx
191 .background_executor()
192 .block(file_language)
193 .expect("Failed to get file language");
194 let file = file as _;
195 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
196 };
197
198 let settings_a = settings_for("a.rs");
199 let settings_b = settings_for("b/b.rs");
200 let settings_c = settings_for("c.js");
201 let settings_readme = settings_for("README.json");
202
203 // .editorconfig overrides .zed/settings
204 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
205 assert_eq!(settings_a.hard_tabs, true);
206 assert_eq!(settings_a.ensure_final_newline_on_save, true);
207 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
208
209 // .editorconfig in b/ overrides .editorconfig in root
210 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
211
212 // "indent_size" is not set, so "tab_width" is used
213 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
214
215 // README.md should not be affected by .editorconfig's globe "*.rs"
216 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
217 });
218}
219
220#[gpui::test]
221async fn test_git_provider_project_setting(cx: &mut gpui::TestAppContext) {
222 init_test(cx);
223 cx.update(|cx| {
224 GitHostingProviderRegistry::default_global(cx);
225 git_hosting_providers::init(cx);
226 });
227
228 let fs = FakeFs::new(cx.executor());
229 let str_path = path!("/dir");
230 let path = Path::new(str_path);
231
232 fs.insert_tree(
233 path!("/dir"),
234 json!({
235 ".zed": {
236 "settings.json": r#"{
237 "git_hosting_providers": [
238 {
239 "provider": "gitlab",
240 "base_url": "https://google.com",
241 "name": "foo"
242 }
243 ]
244 }"#
245 },
246 }),
247 )
248 .await;
249
250 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
251 let (_worktree, _) =
252 project.read_with(cx, |project, cx| project.find_worktree(path, cx).unwrap());
253 cx.executor().run_until_parked();
254
255 cx.update(|cx| {
256 let provider = GitHostingProviderRegistry::global(cx);
257 assert!(
258 provider
259 .list_hosting_providers()
260 .into_iter()
261 .any(|provider| provider.name() == "foo")
262 );
263 });
264
265 fs.atomic_write(
266 Path::new(path!("/dir/.zed/settings.json")).to_owned(),
267 "{}".into(),
268 )
269 .await
270 .unwrap();
271
272 cx.run_until_parked();
273
274 cx.update(|cx| {
275 let provider = GitHostingProviderRegistry::global(cx);
276 assert!(
277 !provider
278 .list_hosting_providers()
279 .into_iter()
280 .any(|provider| provider.name() == "foo")
281 );
282 });
283}
284
285#[gpui::test]
286async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
287 init_test(cx);
288 TaskStore::init(None);
289
290 let fs = FakeFs::new(cx.executor());
291 fs.insert_tree(
292 path!("/dir"),
293 json!({
294 ".zed": {
295 "settings.json": r#"{ "tab_size": 8 }"#,
296 "tasks.json": r#"[{
297 "label": "cargo check all",
298 "command": "cargo",
299 "args": ["check", "--all"]
300 },]"#,
301 },
302 "a": {
303 "a.rs": "fn a() {\n A\n}"
304 },
305 "b": {
306 ".zed": {
307 "settings.json": r#"{ "tab_size": 2 }"#,
308 "tasks.json": r#"[{
309 "label": "cargo check",
310 "command": "cargo",
311 "args": ["check"]
312 },]"#,
313 },
314 "b.rs": "fn b() {\n B\n}"
315 }
316 }),
317 )
318 .await;
319
320 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
321 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
322
323 cx.executor().run_until_parked();
324 let worktree_id = cx.update(|cx| {
325 project.update(cx, |project, cx| {
326 project.worktrees(cx).next().unwrap().read(cx).id()
327 })
328 });
329
330 let mut task_contexts = TaskContexts::default();
331 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
332 let task_contexts = Arc::new(task_contexts);
333
334 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
335 id: worktree_id,
336 directory_in_worktree: PathBuf::from(".zed"),
337 id_base: "local worktree tasks from directory \".zed\"".into(),
338 };
339
340 let all_tasks = cx
341 .update(|cx| {
342 let tree = worktree.read(cx);
343
344 let file_a = File::for_entry(
345 tree.entry_for_path("a/a.rs").unwrap().clone(),
346 worktree.clone(),
347 ) as _;
348 let settings_a = language_settings(None, Some(&file_a), cx);
349 let file_b = File::for_entry(
350 tree.entry_for_path("b/b.rs").unwrap().clone(),
351 worktree.clone(),
352 ) as _;
353 let settings_b = language_settings(None, Some(&file_b), cx);
354
355 assert_eq!(settings_a.tab_size.get(), 8);
356 assert_eq!(settings_b.tab_size.get(), 2);
357
358 get_all_tasks(&project, task_contexts.clone(), cx)
359 })
360 .await
361 .into_iter()
362 .map(|(source_kind, task)| {
363 let resolved = task.resolved;
364 (
365 source_kind,
366 task.resolved_label,
367 resolved.args,
368 resolved.env,
369 )
370 })
371 .collect::<Vec<_>>();
372 assert_eq!(
373 all_tasks,
374 vec![
375 (
376 TaskSourceKind::Worktree {
377 id: worktree_id,
378 directory_in_worktree: PathBuf::from(path!("b/.zed")),
379 id_base: if cfg!(windows) {
380 "local worktree tasks from directory \"b\\\\.zed\"".into()
381 } else {
382 "local worktree tasks from directory \"b/.zed\"".into()
383 },
384 },
385 "cargo check".to_string(),
386 vec!["check".to_string()],
387 HashMap::default(),
388 ),
389 (
390 topmost_local_task_source_kind.clone(),
391 "cargo check all".to_string(),
392 vec!["check".to_string(), "--all".to_string()],
393 HashMap::default(),
394 ),
395 ]
396 );
397
398 let (_, resolved_task) = cx
399 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
400 .await
401 .into_iter()
402 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
403 .expect("should have one global task");
404 project.update(cx, |project, cx| {
405 let task_inventory = project
406 .task_store
407 .read(cx)
408 .task_inventory()
409 .cloned()
410 .unwrap();
411 task_inventory.update(cx, |inventory, _| {
412 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
413 inventory
414 .update_file_based_tasks(
415 TaskSettingsLocation::Global(tasks_file()),
416 Some(
417 &json!([{
418 "label": "cargo check unstable",
419 "command": "cargo",
420 "args": [
421 "check",
422 "--all",
423 "--all-targets"
424 ],
425 "env": {
426 "RUSTFLAGS": "-Zunstable-options"
427 }
428 }])
429 .to_string(),
430 ),
431 )
432 .unwrap();
433 });
434 });
435 cx.run_until_parked();
436
437 let all_tasks = cx
438 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
439 .await
440 .into_iter()
441 .map(|(source_kind, task)| {
442 let resolved = task.resolved;
443 (
444 source_kind,
445 task.resolved_label,
446 resolved.args,
447 resolved.env,
448 )
449 })
450 .collect::<Vec<_>>();
451 assert_eq!(
452 all_tasks,
453 vec![
454 (
455 topmost_local_task_source_kind.clone(),
456 "cargo check all".to_string(),
457 vec!["check".to_string(), "--all".to_string()],
458 HashMap::default(),
459 ),
460 (
461 TaskSourceKind::Worktree {
462 id: worktree_id,
463 directory_in_worktree: PathBuf::from(path!("b/.zed")),
464 id_base: if cfg!(windows) {
465 "local worktree tasks from directory \"b\\\\.zed\"".into()
466 } else {
467 "local worktree tasks from directory \"b/.zed\"".into()
468 },
469 },
470 "cargo check".to_string(),
471 vec!["check".to_string()],
472 HashMap::default(),
473 ),
474 (
475 TaskSourceKind::AbsPath {
476 abs_path: paths::tasks_file().clone(),
477 id_base: "global tasks.json".into(),
478 },
479 "cargo check unstable".to_string(),
480 vec![
481 "check".to_string(),
482 "--all".to_string(),
483 "--all-targets".to_string(),
484 ],
485 HashMap::from_iter(Some((
486 "RUSTFLAGS".to_string(),
487 "-Zunstable-options".to_string()
488 ))),
489 ),
490 ]
491 );
492}
493
494#[gpui::test]
495async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
496 init_test(cx);
497 TaskStore::init(None);
498
499 let fs = FakeFs::new(cx.executor());
500 fs.insert_tree(
501 path!("/dir"),
502 json!({
503 ".zed": {
504 "tasks.json": r#"[{
505 "label": "test worktree root",
506 "command": "echo $ZED_WORKTREE_ROOT"
507 }]"#,
508 },
509 "a": {
510 "a.rs": "fn a() {\n A\n}"
511 },
512 }),
513 )
514 .await;
515
516 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
517 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
518
519 cx.executor().run_until_parked();
520 let worktree_id = cx.update(|cx| {
521 project.update(cx, |project, cx| {
522 project.worktrees(cx).next().unwrap().read(cx).id()
523 })
524 });
525
526 let active_non_worktree_item_tasks = cx
527 .update(|cx| {
528 get_all_tasks(
529 &project,
530 Arc::new(TaskContexts {
531 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
532 active_worktree_context: None,
533 other_worktree_contexts: Vec::new(),
534 lsp_task_sources: HashMap::default(),
535 latest_selection: None,
536 }),
537 cx,
538 )
539 })
540 .await;
541 assert!(
542 active_non_worktree_item_tasks.is_empty(),
543 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
544 );
545
546 let active_worktree_tasks = cx
547 .update(|cx| {
548 get_all_tasks(
549 &project,
550 Arc::new(TaskContexts {
551 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
552 active_worktree_context: Some((worktree_id, {
553 let mut worktree_context = TaskContext::default();
554 worktree_context
555 .task_variables
556 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
557 worktree_context
558 })),
559 other_worktree_contexts: Vec::new(),
560 lsp_task_sources: HashMap::default(),
561 latest_selection: None,
562 }),
563 cx,
564 )
565 })
566 .await;
567 assert_eq!(
568 active_worktree_tasks
569 .into_iter()
570 .map(|(source_kind, task)| {
571 let resolved = task.resolved;
572 (source_kind, resolved.command.unwrap())
573 })
574 .collect::<Vec<_>>(),
575 vec![(
576 TaskSourceKind::Worktree {
577 id: worktree_id,
578 directory_in_worktree: PathBuf::from(path!(".zed")),
579 id_base: if cfg!(windows) {
580 "local worktree tasks from directory \".zed\"".into()
581 } else {
582 "local worktree tasks from directory \".zed\"".into()
583 },
584 },
585 "echo /dir".to_string(),
586 )]
587 );
588}
589
590#[gpui::test]
591async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
592 init_test(cx);
593
594 let fs = FakeFs::new(cx.executor());
595 fs.insert_tree(
596 path!("/dir"),
597 json!({
598 "test.rs": "const A: i32 = 1;",
599 "test2.rs": "",
600 "Cargo.toml": "a = 1",
601 "package.json": "{\"a\": 1}",
602 }),
603 )
604 .await;
605
606 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
607 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
608
609 let mut fake_rust_servers = language_registry.register_fake_lsp(
610 "Rust",
611 FakeLspAdapter {
612 name: "the-rust-language-server",
613 capabilities: lsp::ServerCapabilities {
614 completion_provider: Some(lsp::CompletionOptions {
615 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
616 ..Default::default()
617 }),
618 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
619 lsp::TextDocumentSyncOptions {
620 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
621 ..Default::default()
622 },
623 )),
624 ..Default::default()
625 },
626 ..Default::default()
627 },
628 );
629 let mut fake_json_servers = language_registry.register_fake_lsp(
630 "JSON",
631 FakeLspAdapter {
632 name: "the-json-language-server",
633 capabilities: lsp::ServerCapabilities {
634 completion_provider: Some(lsp::CompletionOptions {
635 trigger_characters: Some(vec![":".to_string()]),
636 ..Default::default()
637 }),
638 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
639 lsp::TextDocumentSyncOptions {
640 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
641 ..Default::default()
642 },
643 )),
644 ..Default::default()
645 },
646 ..Default::default()
647 },
648 );
649
650 // Open a buffer without an associated language server.
651 let (toml_buffer, _handle) = project
652 .update(cx, |project, cx| {
653 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
654 })
655 .await
656 .unwrap();
657
658 // Open a buffer with an associated language server before the language for it has been loaded.
659 let (rust_buffer, _handle2) = project
660 .update(cx, |project, cx| {
661 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
662 })
663 .await
664 .unwrap();
665 rust_buffer.update(cx, |buffer, _| {
666 assert_eq!(buffer.language().map(|l| l.name()), None);
667 });
668
669 // Now we add the languages to the project, and ensure they get assigned to all
670 // the relevant open buffers.
671 language_registry.add(json_lang());
672 language_registry.add(rust_lang());
673 cx.executor().run_until_parked();
674 rust_buffer.update(cx, |buffer, _| {
675 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
676 });
677
678 // A server is started up, and it is notified about Rust files.
679 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
680 assert_eq!(
681 fake_rust_server
682 .receive_notification::<lsp::notification::DidOpenTextDocument>()
683 .await
684 .text_document,
685 lsp::TextDocumentItem {
686 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
687 version: 0,
688 text: "const A: i32 = 1;".to_string(),
689 language_id: "rust".to_string(),
690 }
691 );
692
693 // The buffer is configured based on the language server's capabilities.
694 rust_buffer.update(cx, |buffer, _| {
695 assert_eq!(
696 buffer
697 .completion_triggers()
698 .into_iter()
699 .cloned()
700 .collect::<Vec<_>>(),
701 &[".".to_string(), "::".to_string()]
702 );
703 });
704 toml_buffer.update(cx, |buffer, _| {
705 assert!(buffer.completion_triggers().is_empty());
706 });
707
708 // Edit a buffer. The changes are reported to the language server.
709 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
710 assert_eq!(
711 fake_rust_server
712 .receive_notification::<lsp::notification::DidChangeTextDocument>()
713 .await
714 .text_document,
715 lsp::VersionedTextDocumentIdentifier::new(
716 lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
717 1
718 )
719 );
720
721 // Open a third buffer with a different associated language server.
722 let (json_buffer, _json_handle) = project
723 .update(cx, |project, cx| {
724 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
725 })
726 .await
727 .unwrap();
728
729 // A json language server is started up and is only notified about the json buffer.
730 let mut fake_json_server = fake_json_servers.next().await.unwrap();
731 assert_eq!(
732 fake_json_server
733 .receive_notification::<lsp::notification::DidOpenTextDocument>()
734 .await
735 .text_document,
736 lsp::TextDocumentItem {
737 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
738 version: 0,
739 text: "{\"a\": 1}".to_string(),
740 language_id: "json".to_string(),
741 }
742 );
743
744 // This buffer is configured based on the second language server's
745 // capabilities.
746 json_buffer.update(cx, |buffer, _| {
747 assert_eq!(
748 buffer
749 .completion_triggers()
750 .into_iter()
751 .cloned()
752 .collect::<Vec<_>>(),
753 &[":".to_string()]
754 );
755 });
756
757 // When opening another buffer whose language server is already running,
758 // it is also configured based on the existing language server's capabilities.
759 let (rust_buffer2, _handle4) = project
760 .update(cx, |project, cx| {
761 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
762 })
763 .await
764 .unwrap();
765 rust_buffer2.update(cx, |buffer, _| {
766 assert_eq!(
767 buffer
768 .completion_triggers()
769 .into_iter()
770 .cloned()
771 .collect::<Vec<_>>(),
772 &[".".to_string(), "::".to_string()]
773 );
774 });
775
776 // Changes are reported only to servers matching the buffer's language.
777 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
778 rust_buffer2.update(cx, |buffer, cx| {
779 buffer.edit([(0..0, "let x = 1;")], None, cx)
780 });
781 assert_eq!(
782 fake_rust_server
783 .receive_notification::<lsp::notification::DidChangeTextDocument>()
784 .await
785 .text_document,
786 lsp::VersionedTextDocumentIdentifier::new(
787 lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap(),
788 1
789 )
790 );
791
792 // Save notifications are reported to all servers.
793 project
794 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
795 .await
796 .unwrap();
797 assert_eq!(
798 fake_rust_server
799 .receive_notification::<lsp::notification::DidSaveTextDocument>()
800 .await
801 .text_document,
802 lsp::TextDocumentIdentifier::new(
803 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
804 )
805 );
806 assert_eq!(
807 fake_json_server
808 .receive_notification::<lsp::notification::DidSaveTextDocument>()
809 .await
810 .text_document,
811 lsp::TextDocumentIdentifier::new(
812 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
813 )
814 );
815
816 // Renames are reported only to servers matching the buffer's language.
817 fs.rename(
818 Path::new(path!("/dir/test2.rs")),
819 Path::new(path!("/dir/test3.rs")),
820 Default::default(),
821 )
822 .await
823 .unwrap();
824 assert_eq!(
825 fake_rust_server
826 .receive_notification::<lsp::notification::DidCloseTextDocument>()
827 .await
828 .text_document,
829 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap()),
830 );
831 assert_eq!(
832 fake_rust_server
833 .receive_notification::<lsp::notification::DidOpenTextDocument>()
834 .await
835 .text_document,
836 lsp::TextDocumentItem {
837 uri: lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap(),
838 version: 0,
839 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
840 language_id: "rust".to_string(),
841 },
842 );
843
844 rust_buffer2.update(cx, |buffer, cx| {
845 buffer.update_diagnostics(
846 LanguageServerId(0),
847 DiagnosticSet::from_sorted_entries(
848 vec![DiagnosticEntry {
849 diagnostic: Default::default(),
850 range: Anchor::MIN..Anchor::MAX,
851 }],
852 &buffer.snapshot(),
853 ),
854 cx,
855 );
856 assert_eq!(
857 buffer
858 .snapshot()
859 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
860 .count(),
861 1
862 );
863 });
864
865 // When the rename changes the extension of the file, the buffer gets closed on the old
866 // language server and gets opened on the new one.
867 fs.rename(
868 Path::new(path!("/dir/test3.rs")),
869 Path::new(path!("/dir/test3.json")),
870 Default::default(),
871 )
872 .await
873 .unwrap();
874 assert_eq!(
875 fake_rust_server
876 .receive_notification::<lsp::notification::DidCloseTextDocument>()
877 .await
878 .text_document,
879 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap()),
880 );
881 assert_eq!(
882 fake_json_server
883 .receive_notification::<lsp::notification::DidOpenTextDocument>()
884 .await
885 .text_document,
886 lsp::TextDocumentItem {
887 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
888 version: 0,
889 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
890 language_id: "json".to_string(),
891 },
892 );
893
894 // We clear the diagnostics, since the language has changed.
895 rust_buffer2.update(cx, |buffer, _| {
896 assert_eq!(
897 buffer
898 .snapshot()
899 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
900 .count(),
901 0
902 );
903 });
904
905 // The renamed file's version resets after changing language server.
906 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
907 assert_eq!(
908 fake_json_server
909 .receive_notification::<lsp::notification::DidChangeTextDocument>()
910 .await
911 .text_document,
912 lsp::VersionedTextDocumentIdentifier::new(
913 lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
914 1
915 )
916 );
917
918 // Restart language servers
919 project.update(cx, |project, cx| {
920 project.restart_language_servers_for_buffers(
921 vec![rust_buffer.clone(), json_buffer.clone()],
922 HashSet::default(),
923 cx,
924 );
925 });
926
927 let mut rust_shutdown_requests = fake_rust_server
928 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
929 let mut json_shutdown_requests = fake_json_server
930 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
931 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
932
933 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
934 let mut fake_json_server = fake_json_servers.next().await.unwrap();
935
936 // Ensure rust document is reopened in new rust language server
937 assert_eq!(
938 fake_rust_server
939 .receive_notification::<lsp::notification::DidOpenTextDocument>()
940 .await
941 .text_document,
942 lsp::TextDocumentItem {
943 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
944 version: 0,
945 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
946 language_id: "rust".to_string(),
947 }
948 );
949
950 // Ensure json documents are reopened in new json language server
951 assert_set_eq!(
952 [
953 fake_json_server
954 .receive_notification::<lsp::notification::DidOpenTextDocument>()
955 .await
956 .text_document,
957 fake_json_server
958 .receive_notification::<lsp::notification::DidOpenTextDocument>()
959 .await
960 .text_document,
961 ],
962 [
963 lsp::TextDocumentItem {
964 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
965 version: 0,
966 text: json_buffer.update(cx, |buffer, _| buffer.text()),
967 language_id: "json".to_string(),
968 },
969 lsp::TextDocumentItem {
970 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
971 version: 0,
972 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
973 language_id: "json".to_string(),
974 }
975 ]
976 );
977
978 // Close notifications are reported only to servers matching the buffer's language.
979 cx.update(|_| drop(_json_handle));
980 let close_message = lsp::DidCloseTextDocumentParams {
981 text_document: lsp::TextDocumentIdentifier::new(
982 lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
983 ),
984 };
985 assert_eq!(
986 fake_json_server
987 .receive_notification::<lsp::notification::DidCloseTextDocument>()
988 .await,
989 close_message,
990 );
991}
992
993#[gpui::test]
994async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
995 init_test(cx);
996
997 let fs = FakeFs::new(cx.executor());
998 fs.insert_tree(
999 path!("/the-root"),
1000 json!({
1001 ".gitignore": "target\n",
1002 "Cargo.lock": "",
1003 "src": {
1004 "a.rs": "",
1005 "b.rs": "",
1006 },
1007 "target": {
1008 "x": {
1009 "out": {
1010 "x.rs": ""
1011 }
1012 },
1013 "y": {
1014 "out": {
1015 "y.rs": "",
1016 }
1017 },
1018 "z": {
1019 "out": {
1020 "z.rs": ""
1021 }
1022 }
1023 }
1024 }),
1025 )
1026 .await;
1027 fs.insert_tree(
1028 path!("/the-registry"),
1029 json!({
1030 "dep1": {
1031 "src": {
1032 "dep1.rs": "",
1033 }
1034 },
1035 "dep2": {
1036 "src": {
1037 "dep2.rs": "",
1038 }
1039 },
1040 }),
1041 )
1042 .await;
1043 fs.insert_tree(
1044 path!("/the/stdlib"),
1045 json!({
1046 "LICENSE": "",
1047 "src": {
1048 "string.rs": "",
1049 }
1050 }),
1051 )
1052 .await;
1053
1054 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1055 let (language_registry, lsp_store) = project.read_with(cx, |project, _| {
1056 (project.languages().clone(), project.lsp_store())
1057 });
1058 language_registry.add(rust_lang());
1059 let mut fake_servers = language_registry.register_fake_lsp(
1060 "Rust",
1061 FakeLspAdapter {
1062 name: "the-language-server",
1063 ..Default::default()
1064 },
1065 );
1066
1067 cx.executor().run_until_parked();
1068
1069 // Start the language server by opening a buffer with a compatible file extension.
1070 project
1071 .update(cx, |project, cx| {
1072 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
1073 })
1074 .await
1075 .unwrap();
1076
1077 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
1078 project.update(cx, |project, cx| {
1079 let worktree = project.worktrees(cx).next().unwrap();
1080 assert_eq!(
1081 worktree
1082 .read(cx)
1083 .snapshot()
1084 .entries(true, 0)
1085 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
1086 .collect::<Vec<_>>(),
1087 &[
1088 (Path::new(""), false),
1089 (Path::new(".gitignore"), false),
1090 (Path::new("Cargo.lock"), false),
1091 (Path::new("src"), false),
1092 (Path::new("src/a.rs"), false),
1093 (Path::new("src/b.rs"), false),
1094 (Path::new("target"), true),
1095 ]
1096 );
1097 });
1098
1099 let prev_read_dir_count = fs.read_dir_call_count();
1100
1101 let fake_server = fake_servers.next().await.unwrap();
1102 let server_id = lsp_store.read_with(cx, |lsp_store, _| {
1103 let (id, _) = lsp_store.language_server_statuses().next().unwrap();
1104 id
1105 });
1106
1107 // Simulate jumping to a definition in a dependency outside of the worktree.
1108 let _out_of_worktree_buffer = project
1109 .update(cx, |project, cx| {
1110 project.open_local_buffer_via_lsp(
1111 lsp::Url::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
1112 server_id,
1113 cx,
1114 )
1115 })
1116 .await
1117 .unwrap();
1118
1119 // Keep track of the FS events reported to the language server.
1120 let file_changes = Arc::new(Mutex::new(Vec::new()));
1121 fake_server
1122 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
1123 registrations: vec![lsp::Registration {
1124 id: Default::default(),
1125 method: "workspace/didChangeWatchedFiles".to_string(),
1126 register_options: serde_json::to_value(
1127 lsp::DidChangeWatchedFilesRegistrationOptions {
1128 watchers: vec![
1129 lsp::FileSystemWatcher {
1130 glob_pattern: lsp::GlobPattern::String(
1131 path!("/the-root/Cargo.toml").to_string(),
1132 ),
1133 kind: None,
1134 },
1135 lsp::FileSystemWatcher {
1136 glob_pattern: lsp::GlobPattern::String(
1137 path!("/the-root/src/*.{rs,c}").to_string(),
1138 ),
1139 kind: None,
1140 },
1141 lsp::FileSystemWatcher {
1142 glob_pattern: lsp::GlobPattern::String(
1143 path!("/the-root/target/y/**/*.rs").to_string(),
1144 ),
1145 kind: None,
1146 },
1147 lsp::FileSystemWatcher {
1148 glob_pattern: lsp::GlobPattern::String(
1149 path!("/the/stdlib/src/**/*.rs").to_string(),
1150 ),
1151 kind: None,
1152 },
1153 lsp::FileSystemWatcher {
1154 glob_pattern: lsp::GlobPattern::String(
1155 path!("**/Cargo.lock").to_string(),
1156 ),
1157 kind: None,
1158 },
1159 ],
1160 },
1161 )
1162 .ok(),
1163 }],
1164 })
1165 .await
1166 .into_response()
1167 .unwrap();
1168 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
1169 let file_changes = file_changes.clone();
1170 move |params, _| {
1171 let mut file_changes = file_changes.lock();
1172 file_changes.extend(params.changes);
1173 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
1174 }
1175 });
1176
1177 cx.executor().run_until_parked();
1178 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
1179 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 5);
1180
1181 let mut new_watched_paths = fs.watched_paths();
1182 new_watched_paths.retain(|path| !path.starts_with(config_dir()));
1183 assert_eq!(
1184 &new_watched_paths,
1185 &[
1186 Path::new(path!("/the-root")),
1187 Path::new(path!("/the-registry/dep1/src/dep1.rs")),
1188 Path::new(path!("/the/stdlib/src"))
1189 ]
1190 );
1191
1192 // Now the language server has asked us to watch an ignored directory path,
1193 // so we recursively load it.
1194 project.update(cx, |project, cx| {
1195 let worktree = project.visible_worktrees(cx).next().unwrap();
1196 assert_eq!(
1197 worktree
1198 .read(cx)
1199 .snapshot()
1200 .entries(true, 0)
1201 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
1202 .collect::<Vec<_>>(),
1203 &[
1204 (Path::new(""), false),
1205 (Path::new(".gitignore"), false),
1206 (Path::new("Cargo.lock"), false),
1207 (Path::new("src"), false),
1208 (Path::new("src/a.rs"), false),
1209 (Path::new("src/b.rs"), false),
1210 (Path::new("target"), true),
1211 (Path::new("target/x"), true),
1212 (Path::new("target/y"), true),
1213 (Path::new("target/y/out"), true),
1214 (Path::new("target/y/out/y.rs"), true),
1215 (Path::new("target/z"), true),
1216 ]
1217 );
1218 });
1219
1220 // Perform some file system mutations, two of which match the watched patterns,
1221 // and one of which does not.
1222 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
1223 .await
1224 .unwrap();
1225 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
1226 .await
1227 .unwrap();
1228 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
1229 .await
1230 .unwrap();
1231 fs.create_file(
1232 path!("/the-root/target/x/out/x2.rs").as_ref(),
1233 Default::default(),
1234 )
1235 .await
1236 .unwrap();
1237 fs.create_file(
1238 path!("/the-root/target/y/out/y2.rs").as_ref(),
1239 Default::default(),
1240 )
1241 .await
1242 .unwrap();
1243 fs.save(
1244 path!("/the-root/Cargo.lock").as_ref(),
1245 &"".into(),
1246 Default::default(),
1247 )
1248 .await
1249 .unwrap();
1250 fs.save(
1251 path!("/the-stdlib/LICENSE").as_ref(),
1252 &"".into(),
1253 Default::default(),
1254 )
1255 .await
1256 .unwrap();
1257 fs.save(
1258 path!("/the/stdlib/src/string.rs").as_ref(),
1259 &"".into(),
1260 Default::default(),
1261 )
1262 .await
1263 .unwrap();
1264
1265 // The language server receives events for the FS mutations that match its watch patterns.
1266 cx.executor().run_until_parked();
1267 assert_eq!(
1268 &*file_changes.lock(),
1269 &[
1270 lsp::FileEvent {
1271 uri: lsp::Url::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
1272 typ: lsp::FileChangeType::CHANGED,
1273 },
1274 lsp::FileEvent {
1275 uri: lsp::Url::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
1276 typ: lsp::FileChangeType::DELETED,
1277 },
1278 lsp::FileEvent {
1279 uri: lsp::Url::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
1280 typ: lsp::FileChangeType::CREATED,
1281 },
1282 lsp::FileEvent {
1283 uri: lsp::Url::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
1284 typ: lsp::FileChangeType::CREATED,
1285 },
1286 lsp::FileEvent {
1287 uri: lsp::Url::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
1288 typ: lsp::FileChangeType::CHANGED,
1289 },
1290 ]
1291 );
1292}
1293
1294#[gpui::test]
1295async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
1296 init_test(cx);
1297
1298 let fs = FakeFs::new(cx.executor());
1299 fs.insert_tree(
1300 path!("/dir"),
1301 json!({
1302 "a.rs": "let a = 1;",
1303 "b.rs": "let b = 2;"
1304 }),
1305 )
1306 .await;
1307
1308 let project = Project::test(
1309 fs,
1310 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
1311 cx,
1312 )
1313 .await;
1314 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1315
1316 let buffer_a = project
1317 .update(cx, |project, cx| {
1318 project.open_local_buffer(path!("/dir/a.rs"), cx)
1319 })
1320 .await
1321 .unwrap();
1322 let buffer_b = project
1323 .update(cx, |project, cx| {
1324 project.open_local_buffer(path!("/dir/b.rs"), cx)
1325 })
1326 .await
1327 .unwrap();
1328
1329 lsp_store.update(cx, |lsp_store, cx| {
1330 lsp_store
1331 .update_diagnostics(
1332 LanguageServerId(0),
1333 lsp::PublishDiagnosticsParams {
1334 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1335 version: None,
1336 diagnostics: vec![lsp::Diagnostic {
1337 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1338 severity: Some(lsp::DiagnosticSeverity::ERROR),
1339 message: "error 1".to_string(),
1340 ..Default::default()
1341 }],
1342 },
1343 None,
1344 DiagnosticSourceKind::Pushed,
1345 &[],
1346 cx,
1347 )
1348 .unwrap();
1349 lsp_store
1350 .update_diagnostics(
1351 LanguageServerId(0),
1352 lsp::PublishDiagnosticsParams {
1353 uri: Url::from_file_path(path!("/dir/b.rs")).unwrap(),
1354 version: None,
1355 diagnostics: vec![lsp::Diagnostic {
1356 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1357 severity: Some(DiagnosticSeverity::WARNING),
1358 message: "error 2".to_string(),
1359 ..Default::default()
1360 }],
1361 },
1362 None,
1363 DiagnosticSourceKind::Pushed,
1364 &[],
1365 cx,
1366 )
1367 .unwrap();
1368 });
1369
1370 buffer_a.update(cx, |buffer, _| {
1371 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1372 assert_eq!(
1373 chunks
1374 .iter()
1375 .map(|(s, d)| (s.as_str(), *d))
1376 .collect::<Vec<_>>(),
1377 &[
1378 ("let ", None),
1379 ("a", Some(DiagnosticSeverity::ERROR)),
1380 (" = 1;", None),
1381 ]
1382 );
1383 });
1384 buffer_b.update(cx, |buffer, _| {
1385 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1386 assert_eq!(
1387 chunks
1388 .iter()
1389 .map(|(s, d)| (s.as_str(), *d))
1390 .collect::<Vec<_>>(),
1391 &[
1392 ("let ", None),
1393 ("b", Some(DiagnosticSeverity::WARNING)),
1394 (" = 2;", None),
1395 ]
1396 );
1397 });
1398}
1399
1400#[gpui::test]
1401async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1402 init_test(cx);
1403
1404 let fs = FakeFs::new(cx.executor());
1405 fs.insert_tree(
1406 path!("/root"),
1407 json!({
1408 "dir": {
1409 ".git": {
1410 "HEAD": "ref: refs/heads/main",
1411 },
1412 ".gitignore": "b.rs",
1413 "a.rs": "let a = 1;",
1414 "b.rs": "let b = 2;",
1415 },
1416 "other.rs": "let b = c;"
1417 }),
1418 )
1419 .await;
1420
1421 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
1422 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1423 let (worktree, _) = project
1424 .update(cx, |project, cx| {
1425 project.find_or_create_worktree(path!("/root/dir"), true, cx)
1426 })
1427 .await
1428 .unwrap();
1429 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1430
1431 let (worktree, _) = project
1432 .update(cx, |project, cx| {
1433 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
1434 })
1435 .await
1436 .unwrap();
1437 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1438
1439 let server_id = LanguageServerId(0);
1440 lsp_store.update(cx, |lsp_store, cx| {
1441 lsp_store
1442 .update_diagnostics(
1443 server_id,
1444 lsp::PublishDiagnosticsParams {
1445 uri: Url::from_file_path(path!("/root/dir/b.rs")).unwrap(),
1446 version: None,
1447 diagnostics: vec![lsp::Diagnostic {
1448 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1449 severity: Some(lsp::DiagnosticSeverity::ERROR),
1450 message: "unused variable 'b'".to_string(),
1451 ..Default::default()
1452 }],
1453 },
1454 None,
1455 DiagnosticSourceKind::Pushed,
1456 &[],
1457 cx,
1458 )
1459 .unwrap();
1460 lsp_store
1461 .update_diagnostics(
1462 server_id,
1463 lsp::PublishDiagnosticsParams {
1464 uri: Url::from_file_path(path!("/root/other.rs")).unwrap(),
1465 version: None,
1466 diagnostics: vec![lsp::Diagnostic {
1467 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1468 severity: Some(lsp::DiagnosticSeverity::ERROR),
1469 message: "unknown variable 'c'".to_string(),
1470 ..Default::default()
1471 }],
1472 },
1473 None,
1474 DiagnosticSourceKind::Pushed,
1475 &[],
1476 cx,
1477 )
1478 .unwrap();
1479 });
1480
1481 let main_ignored_buffer = project
1482 .update(cx, |project, cx| {
1483 project.open_buffer((main_worktree_id, "b.rs"), cx)
1484 })
1485 .await
1486 .unwrap();
1487 main_ignored_buffer.update(cx, |buffer, _| {
1488 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1489 assert_eq!(
1490 chunks
1491 .iter()
1492 .map(|(s, d)| (s.as_str(), *d))
1493 .collect::<Vec<_>>(),
1494 &[
1495 ("let ", None),
1496 ("b", Some(DiagnosticSeverity::ERROR)),
1497 (" = 2;", None),
1498 ],
1499 "Gigitnored buffers should still get in-buffer diagnostics",
1500 );
1501 });
1502 let other_buffer = project
1503 .update(cx, |project, cx| {
1504 project.open_buffer((other_worktree_id, ""), cx)
1505 })
1506 .await
1507 .unwrap();
1508 other_buffer.update(cx, |buffer, _| {
1509 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1510 assert_eq!(
1511 chunks
1512 .iter()
1513 .map(|(s, d)| (s.as_str(), *d))
1514 .collect::<Vec<_>>(),
1515 &[
1516 ("let b = ", None),
1517 ("c", Some(DiagnosticSeverity::ERROR)),
1518 (";", None),
1519 ],
1520 "Buffers from hidden projects should still get in-buffer diagnostics"
1521 );
1522 });
1523
1524 project.update(cx, |project, cx| {
1525 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1526 assert_eq!(
1527 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1528 vec![(
1529 ProjectPath {
1530 worktree_id: main_worktree_id,
1531 path: Arc::from(Path::new("b.rs")),
1532 },
1533 server_id,
1534 DiagnosticSummary {
1535 error_count: 1,
1536 warning_count: 0,
1537 }
1538 )]
1539 );
1540 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1541 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1542 });
1543}
1544
1545#[gpui::test]
1546async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1547 init_test(cx);
1548
1549 let progress_token = "the-progress-token";
1550
1551 let fs = FakeFs::new(cx.executor());
1552 fs.insert_tree(
1553 path!("/dir"),
1554 json!({
1555 "a.rs": "fn a() { A }",
1556 "b.rs": "const y: i32 = 1",
1557 }),
1558 )
1559 .await;
1560
1561 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1562 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1563
1564 language_registry.add(rust_lang());
1565 let mut fake_servers = language_registry.register_fake_lsp(
1566 "Rust",
1567 FakeLspAdapter {
1568 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1569 disk_based_diagnostics_sources: vec!["disk".into()],
1570 ..Default::default()
1571 },
1572 );
1573
1574 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1575
1576 // Cause worktree to start the fake language server
1577 let _ = project
1578 .update(cx, |project, cx| {
1579 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
1580 })
1581 .await
1582 .unwrap();
1583
1584 let mut events = cx.events(&project);
1585
1586 let fake_server = fake_servers.next().await.unwrap();
1587 assert_eq!(
1588 events.next().await.unwrap(),
1589 Event::LanguageServerAdded(
1590 LanguageServerId(0),
1591 fake_server.server.name(),
1592 Some(worktree_id)
1593 ),
1594 );
1595
1596 fake_server
1597 .start_progress(format!("{}/0", progress_token))
1598 .await;
1599 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1600 assert_eq!(
1601 events.next().await.unwrap(),
1602 Event::DiskBasedDiagnosticsStarted {
1603 language_server_id: LanguageServerId(0),
1604 }
1605 );
1606
1607 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1608 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1609 version: None,
1610 diagnostics: vec![lsp::Diagnostic {
1611 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1612 severity: Some(lsp::DiagnosticSeverity::ERROR),
1613 message: "undefined variable 'A'".to_string(),
1614 ..Default::default()
1615 }],
1616 });
1617 assert_eq!(
1618 events.next().await.unwrap(),
1619 Event::DiagnosticsUpdated {
1620 language_server_id: LanguageServerId(0),
1621 paths: vec![(worktree_id, Path::new("a.rs")).into()],
1622 }
1623 );
1624
1625 fake_server.end_progress(format!("{}/0", progress_token));
1626 assert_eq!(
1627 events.next().await.unwrap(),
1628 Event::DiskBasedDiagnosticsFinished {
1629 language_server_id: LanguageServerId(0)
1630 }
1631 );
1632
1633 let buffer = project
1634 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
1635 .await
1636 .unwrap();
1637
1638 buffer.update(cx, |buffer, _| {
1639 let snapshot = buffer.snapshot();
1640 let diagnostics = snapshot
1641 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1642 .collect::<Vec<_>>();
1643 assert_eq!(
1644 diagnostics,
1645 &[DiagnosticEntry {
1646 range: Point::new(0, 9)..Point::new(0, 10),
1647 diagnostic: Diagnostic {
1648 severity: lsp::DiagnosticSeverity::ERROR,
1649 message: "undefined variable 'A'".to_string(),
1650 group_id: 0,
1651 is_primary: true,
1652 source_kind: DiagnosticSourceKind::Pushed,
1653 ..Diagnostic::default()
1654 }
1655 }]
1656 )
1657 });
1658
1659 // Ensure publishing empty diagnostics twice only results in one update event.
1660 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1661 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1662 version: None,
1663 diagnostics: Default::default(),
1664 });
1665 assert_eq!(
1666 events.next().await.unwrap(),
1667 Event::DiagnosticsUpdated {
1668 language_server_id: LanguageServerId(0),
1669 paths: vec![(worktree_id, Path::new("a.rs")).into()],
1670 }
1671 );
1672
1673 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1674 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1675 version: None,
1676 diagnostics: Default::default(),
1677 });
1678 cx.executor().run_until_parked();
1679 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1680}
1681
1682#[gpui::test]
1683async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1684 init_test(cx);
1685
1686 let progress_token = "the-progress-token";
1687
1688 let fs = FakeFs::new(cx.executor());
1689 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1690
1691 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1692
1693 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1694 language_registry.add(rust_lang());
1695 let mut fake_servers = language_registry.register_fake_lsp(
1696 "Rust",
1697 FakeLspAdapter {
1698 name: "the-language-server",
1699 disk_based_diagnostics_sources: vec!["disk".into()],
1700 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1701 ..FakeLspAdapter::default()
1702 },
1703 );
1704
1705 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1706
1707 let (buffer, _handle) = project
1708 .update(cx, |project, cx| {
1709 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1710 })
1711 .await
1712 .unwrap();
1713 let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id());
1714 // Simulate diagnostics starting to update.
1715 let fake_server = fake_servers.next().await.unwrap();
1716 fake_server.start_progress(progress_token).await;
1717
1718 // Restart the server before the diagnostics finish updating.
1719 project.update(cx, |project, cx| {
1720 project.restart_language_servers_for_buffers(vec![buffer], HashSet::default(), cx);
1721 });
1722 let mut events = cx.events(&project);
1723
1724 // Simulate the newly started server sending more diagnostics.
1725 let fake_server = fake_servers.next().await.unwrap();
1726 assert_eq!(
1727 events.next().await.unwrap(),
1728 Event::LanguageServerRemoved(LanguageServerId(0))
1729 );
1730 assert_eq!(
1731 events.next().await.unwrap(),
1732 Event::LanguageServerAdded(
1733 LanguageServerId(1),
1734 fake_server.server.name(),
1735 Some(worktree_id)
1736 )
1737 );
1738 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1739 fake_server.start_progress(progress_token).await;
1740 assert_eq!(
1741 events.next().await.unwrap(),
1742 Event::LanguageServerBufferRegistered {
1743 server_id: LanguageServerId(1),
1744 buffer_id,
1745 buffer_abs_path: PathBuf::from(path!("/dir/a.rs")),
1746 }
1747 );
1748 assert_eq!(
1749 events.next().await.unwrap(),
1750 Event::DiskBasedDiagnosticsStarted {
1751 language_server_id: LanguageServerId(1)
1752 }
1753 );
1754 project.update(cx, |project, cx| {
1755 assert_eq!(
1756 project
1757 .language_servers_running_disk_based_diagnostics(cx)
1758 .collect::<Vec<_>>(),
1759 [LanguageServerId(1)]
1760 );
1761 });
1762
1763 // All diagnostics are considered done, despite the old server's diagnostic
1764 // task never completing.
1765 fake_server.end_progress(progress_token);
1766 assert_eq!(
1767 events.next().await.unwrap(),
1768 Event::DiskBasedDiagnosticsFinished {
1769 language_server_id: LanguageServerId(1)
1770 }
1771 );
1772 project.update(cx, |project, cx| {
1773 assert_eq!(
1774 project
1775 .language_servers_running_disk_based_diagnostics(cx)
1776 .collect::<Vec<_>>(),
1777 [] as [language::LanguageServerId; 0]
1778 );
1779 });
1780}
1781
1782#[gpui::test]
1783async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1784 init_test(cx);
1785
1786 let fs = FakeFs::new(cx.executor());
1787 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
1788
1789 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1790
1791 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1792 language_registry.add(rust_lang());
1793 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1794
1795 let (buffer, _) = project
1796 .update(cx, |project, cx| {
1797 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1798 })
1799 .await
1800 .unwrap();
1801
1802 // Publish diagnostics
1803 let fake_server = fake_servers.next().await.unwrap();
1804 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1805 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1806 version: None,
1807 diagnostics: vec![lsp::Diagnostic {
1808 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1809 severity: Some(lsp::DiagnosticSeverity::ERROR),
1810 message: "the message".to_string(),
1811 ..Default::default()
1812 }],
1813 });
1814
1815 cx.executor().run_until_parked();
1816 buffer.update(cx, |buffer, _| {
1817 assert_eq!(
1818 buffer
1819 .snapshot()
1820 .diagnostics_in_range::<_, usize>(0..1, false)
1821 .map(|entry| entry.diagnostic.message.clone())
1822 .collect::<Vec<_>>(),
1823 ["the message".to_string()]
1824 );
1825 });
1826 project.update(cx, |project, cx| {
1827 assert_eq!(
1828 project.diagnostic_summary(false, cx),
1829 DiagnosticSummary {
1830 error_count: 1,
1831 warning_count: 0,
1832 }
1833 );
1834 });
1835
1836 project.update(cx, |project, cx| {
1837 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
1838 });
1839
1840 // The diagnostics are cleared.
1841 cx.executor().run_until_parked();
1842 buffer.update(cx, |buffer, _| {
1843 assert_eq!(
1844 buffer
1845 .snapshot()
1846 .diagnostics_in_range::<_, usize>(0..1, false)
1847 .map(|entry| entry.diagnostic.message.clone())
1848 .collect::<Vec<_>>(),
1849 Vec::<String>::new(),
1850 );
1851 });
1852 project.update(cx, |project, cx| {
1853 assert_eq!(
1854 project.diagnostic_summary(false, cx),
1855 DiagnosticSummary {
1856 error_count: 0,
1857 warning_count: 0,
1858 }
1859 );
1860 });
1861}
1862
1863#[gpui::test]
1864async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1865 init_test(cx);
1866
1867 let fs = FakeFs::new(cx.executor());
1868 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1869
1870 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1871 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1872
1873 language_registry.add(rust_lang());
1874 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1875
1876 let (buffer, _handle) = project
1877 .update(cx, |project, cx| {
1878 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1879 })
1880 .await
1881 .unwrap();
1882
1883 // Before restarting the server, report diagnostics with an unknown buffer version.
1884 let fake_server = fake_servers.next().await.unwrap();
1885 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1886 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1887 version: Some(10000),
1888 diagnostics: Vec::new(),
1889 });
1890 cx.executor().run_until_parked();
1891 project.update(cx, |project, cx| {
1892 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
1893 });
1894
1895 let mut fake_server = fake_servers.next().await.unwrap();
1896 let notification = fake_server
1897 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1898 .await
1899 .text_document;
1900 assert_eq!(notification.version, 0);
1901}
1902
1903#[gpui::test]
1904async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
1905 init_test(cx);
1906
1907 let progress_token = "the-progress-token";
1908
1909 let fs = FakeFs::new(cx.executor());
1910 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1911
1912 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1913
1914 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1915 language_registry.add(rust_lang());
1916 let mut fake_servers = language_registry.register_fake_lsp(
1917 "Rust",
1918 FakeLspAdapter {
1919 name: "the-language-server",
1920 disk_based_diagnostics_sources: vec!["disk".into()],
1921 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1922 ..Default::default()
1923 },
1924 );
1925
1926 let (buffer, _handle) = project
1927 .update(cx, |project, cx| {
1928 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1929 })
1930 .await
1931 .unwrap();
1932
1933 // Simulate diagnostics starting to update.
1934 let mut fake_server = fake_servers.next().await.unwrap();
1935 fake_server
1936 .start_progress_with(
1937 "another-token",
1938 lsp::WorkDoneProgressBegin {
1939 cancellable: Some(false),
1940 ..Default::default()
1941 },
1942 )
1943 .await;
1944 fake_server
1945 .start_progress_with(
1946 progress_token,
1947 lsp::WorkDoneProgressBegin {
1948 cancellable: Some(true),
1949 ..Default::default()
1950 },
1951 )
1952 .await;
1953 cx.executor().run_until_parked();
1954
1955 project.update(cx, |project, cx| {
1956 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
1957 });
1958
1959 let cancel_notification = fake_server
1960 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
1961 .await;
1962 assert_eq!(
1963 cancel_notification.token,
1964 NumberOrString::String(progress_token.into())
1965 );
1966}
1967
1968#[gpui::test]
1969async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1970 init_test(cx);
1971
1972 let fs = FakeFs::new(cx.executor());
1973 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
1974 .await;
1975
1976 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1977 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1978
1979 let mut fake_rust_servers = language_registry.register_fake_lsp(
1980 "Rust",
1981 FakeLspAdapter {
1982 name: "rust-lsp",
1983 ..Default::default()
1984 },
1985 );
1986 let mut fake_js_servers = language_registry.register_fake_lsp(
1987 "JavaScript",
1988 FakeLspAdapter {
1989 name: "js-lsp",
1990 ..Default::default()
1991 },
1992 );
1993 language_registry.add(rust_lang());
1994 language_registry.add(js_lang());
1995
1996 let _rs_buffer = project
1997 .update(cx, |project, cx| {
1998 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1999 })
2000 .await
2001 .unwrap();
2002 let _js_buffer = project
2003 .update(cx, |project, cx| {
2004 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
2005 })
2006 .await
2007 .unwrap();
2008
2009 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
2010 assert_eq!(
2011 fake_rust_server_1
2012 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2013 .await
2014 .text_document
2015 .uri
2016 .as_str(),
2017 uri!("file:///dir/a.rs")
2018 );
2019
2020 let mut fake_js_server = fake_js_servers.next().await.unwrap();
2021 assert_eq!(
2022 fake_js_server
2023 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2024 .await
2025 .text_document
2026 .uri
2027 .as_str(),
2028 uri!("file:///dir/b.js")
2029 );
2030
2031 // Disable Rust language server, ensuring only that server gets stopped.
2032 cx.update(|cx| {
2033 SettingsStore::update_global(cx, |settings, cx| {
2034 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
2035 settings.languages.0.insert(
2036 "Rust".into(),
2037 LanguageSettingsContent {
2038 enable_language_server: Some(false),
2039 ..Default::default()
2040 },
2041 );
2042 });
2043 })
2044 });
2045 fake_rust_server_1
2046 .receive_notification::<lsp::notification::Exit>()
2047 .await;
2048
2049 // Enable Rust and disable JavaScript language servers, ensuring that the
2050 // former gets started again and that the latter stops.
2051 cx.update(|cx| {
2052 SettingsStore::update_global(cx, |settings, cx| {
2053 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
2054 settings.languages.0.insert(
2055 LanguageName::new("Rust"),
2056 LanguageSettingsContent {
2057 enable_language_server: Some(true),
2058 ..Default::default()
2059 },
2060 );
2061 settings.languages.0.insert(
2062 LanguageName::new("JavaScript"),
2063 LanguageSettingsContent {
2064 enable_language_server: Some(false),
2065 ..Default::default()
2066 },
2067 );
2068 });
2069 })
2070 });
2071 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
2072 assert_eq!(
2073 fake_rust_server_2
2074 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2075 .await
2076 .text_document
2077 .uri
2078 .as_str(),
2079 uri!("file:///dir/a.rs")
2080 );
2081 fake_js_server
2082 .receive_notification::<lsp::notification::Exit>()
2083 .await;
2084}
2085
2086#[gpui::test(iterations = 3)]
2087async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
2088 init_test(cx);
2089
2090 let text = "
2091 fn a() { A }
2092 fn b() { BB }
2093 fn c() { CCC }
2094 "
2095 .unindent();
2096
2097 let fs = FakeFs::new(cx.executor());
2098 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
2099
2100 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2101 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2102
2103 language_registry.add(rust_lang());
2104 let mut fake_servers = language_registry.register_fake_lsp(
2105 "Rust",
2106 FakeLspAdapter {
2107 disk_based_diagnostics_sources: vec!["disk".into()],
2108 ..Default::default()
2109 },
2110 );
2111
2112 let buffer = project
2113 .update(cx, |project, cx| {
2114 project.open_local_buffer(path!("/dir/a.rs"), cx)
2115 })
2116 .await
2117 .unwrap();
2118
2119 let _handle = project.update(cx, |project, cx| {
2120 project.register_buffer_with_language_servers(&buffer, cx)
2121 });
2122
2123 let mut fake_server = fake_servers.next().await.unwrap();
2124 let open_notification = fake_server
2125 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2126 .await;
2127
2128 // Edit the buffer, moving the content down
2129 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
2130 let change_notification_1 = fake_server
2131 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2132 .await;
2133 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
2134
2135 // Report some diagnostics for the initial version of the buffer
2136 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2137 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2138 version: Some(open_notification.text_document.version),
2139 diagnostics: vec![
2140 lsp::Diagnostic {
2141 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2142 severity: Some(DiagnosticSeverity::ERROR),
2143 message: "undefined variable 'A'".to_string(),
2144 source: Some("disk".to_string()),
2145 ..Default::default()
2146 },
2147 lsp::Diagnostic {
2148 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2149 severity: Some(DiagnosticSeverity::ERROR),
2150 message: "undefined variable 'BB'".to_string(),
2151 source: Some("disk".to_string()),
2152 ..Default::default()
2153 },
2154 lsp::Diagnostic {
2155 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
2156 severity: Some(DiagnosticSeverity::ERROR),
2157 source: Some("disk".to_string()),
2158 message: "undefined variable 'CCC'".to_string(),
2159 ..Default::default()
2160 },
2161 ],
2162 });
2163
2164 // The diagnostics have moved down since they were created.
2165 cx.executor().run_until_parked();
2166 buffer.update(cx, |buffer, _| {
2167 assert_eq!(
2168 buffer
2169 .snapshot()
2170 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
2171 .collect::<Vec<_>>(),
2172 &[
2173 DiagnosticEntry {
2174 range: Point::new(3, 9)..Point::new(3, 11),
2175 diagnostic: Diagnostic {
2176 source: Some("disk".into()),
2177 severity: DiagnosticSeverity::ERROR,
2178 message: "undefined variable 'BB'".to_string(),
2179 is_disk_based: true,
2180 group_id: 1,
2181 is_primary: true,
2182 source_kind: DiagnosticSourceKind::Pushed,
2183 ..Diagnostic::default()
2184 },
2185 },
2186 DiagnosticEntry {
2187 range: Point::new(4, 9)..Point::new(4, 12),
2188 diagnostic: Diagnostic {
2189 source: Some("disk".into()),
2190 severity: DiagnosticSeverity::ERROR,
2191 message: "undefined variable 'CCC'".to_string(),
2192 is_disk_based: true,
2193 group_id: 2,
2194 is_primary: true,
2195 source_kind: DiagnosticSourceKind::Pushed,
2196 ..Diagnostic::default()
2197 }
2198 }
2199 ]
2200 );
2201 assert_eq!(
2202 chunks_with_diagnostics(buffer, 0..buffer.len()),
2203 [
2204 ("\n\nfn a() { ".to_string(), None),
2205 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2206 (" }\nfn b() { ".to_string(), None),
2207 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
2208 (" }\nfn c() { ".to_string(), None),
2209 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
2210 (" }\n".to_string(), None),
2211 ]
2212 );
2213 assert_eq!(
2214 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
2215 [
2216 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
2217 (" }\nfn c() { ".to_string(), None),
2218 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
2219 ]
2220 );
2221 });
2222
2223 // Ensure overlapping diagnostics are highlighted correctly.
2224 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2225 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2226 version: Some(open_notification.text_document.version),
2227 diagnostics: vec![
2228 lsp::Diagnostic {
2229 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2230 severity: Some(DiagnosticSeverity::ERROR),
2231 message: "undefined variable 'A'".to_string(),
2232 source: Some("disk".to_string()),
2233 ..Default::default()
2234 },
2235 lsp::Diagnostic {
2236 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
2237 severity: Some(DiagnosticSeverity::WARNING),
2238 message: "unreachable statement".to_string(),
2239 source: Some("disk".to_string()),
2240 ..Default::default()
2241 },
2242 ],
2243 });
2244
2245 cx.executor().run_until_parked();
2246 buffer.update(cx, |buffer, _| {
2247 assert_eq!(
2248 buffer
2249 .snapshot()
2250 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
2251 .collect::<Vec<_>>(),
2252 &[
2253 DiagnosticEntry {
2254 range: Point::new(2, 9)..Point::new(2, 12),
2255 diagnostic: Diagnostic {
2256 source: Some("disk".into()),
2257 severity: DiagnosticSeverity::WARNING,
2258 message: "unreachable statement".to_string(),
2259 is_disk_based: true,
2260 group_id: 4,
2261 is_primary: true,
2262 source_kind: DiagnosticSourceKind::Pushed,
2263 ..Diagnostic::default()
2264 }
2265 },
2266 DiagnosticEntry {
2267 range: Point::new(2, 9)..Point::new(2, 10),
2268 diagnostic: Diagnostic {
2269 source: Some("disk".into()),
2270 severity: DiagnosticSeverity::ERROR,
2271 message: "undefined variable 'A'".to_string(),
2272 is_disk_based: true,
2273 group_id: 3,
2274 is_primary: true,
2275 source_kind: DiagnosticSourceKind::Pushed,
2276 ..Diagnostic::default()
2277 },
2278 }
2279 ]
2280 );
2281 assert_eq!(
2282 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
2283 [
2284 ("fn a() { ".to_string(), None),
2285 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2286 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2287 ("\n".to_string(), None),
2288 ]
2289 );
2290 assert_eq!(
2291 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
2292 [
2293 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2294 ("\n".to_string(), None),
2295 ]
2296 );
2297 });
2298
2299 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
2300 // changes since the last save.
2301 buffer.update(cx, |buffer, cx| {
2302 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
2303 buffer.edit(
2304 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
2305 None,
2306 cx,
2307 );
2308 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
2309 });
2310 let change_notification_2 = fake_server
2311 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2312 .await;
2313 assert!(
2314 change_notification_2.text_document.version > change_notification_1.text_document.version
2315 );
2316
2317 // Handle out-of-order diagnostics
2318 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2319 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2320 version: Some(change_notification_2.text_document.version),
2321 diagnostics: vec![
2322 lsp::Diagnostic {
2323 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2324 severity: Some(DiagnosticSeverity::ERROR),
2325 message: "undefined variable 'BB'".to_string(),
2326 source: Some("disk".to_string()),
2327 ..Default::default()
2328 },
2329 lsp::Diagnostic {
2330 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2331 severity: Some(DiagnosticSeverity::WARNING),
2332 message: "undefined variable 'A'".to_string(),
2333 source: Some("disk".to_string()),
2334 ..Default::default()
2335 },
2336 ],
2337 });
2338
2339 cx.executor().run_until_parked();
2340 buffer.update(cx, |buffer, _| {
2341 assert_eq!(
2342 buffer
2343 .snapshot()
2344 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2345 .collect::<Vec<_>>(),
2346 &[
2347 DiagnosticEntry {
2348 range: Point::new(2, 21)..Point::new(2, 22),
2349 diagnostic: Diagnostic {
2350 source: Some("disk".into()),
2351 severity: DiagnosticSeverity::WARNING,
2352 message: "undefined variable 'A'".to_string(),
2353 is_disk_based: true,
2354 group_id: 6,
2355 is_primary: true,
2356 source_kind: DiagnosticSourceKind::Pushed,
2357 ..Diagnostic::default()
2358 }
2359 },
2360 DiagnosticEntry {
2361 range: Point::new(3, 9)..Point::new(3, 14),
2362 diagnostic: Diagnostic {
2363 source: Some("disk".into()),
2364 severity: DiagnosticSeverity::ERROR,
2365 message: "undefined variable 'BB'".to_string(),
2366 is_disk_based: true,
2367 group_id: 5,
2368 is_primary: true,
2369 source_kind: DiagnosticSourceKind::Pushed,
2370 ..Diagnostic::default()
2371 },
2372 }
2373 ]
2374 );
2375 });
2376}
2377
2378#[gpui::test]
2379async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
2380 init_test(cx);
2381
2382 let text = concat!(
2383 "let one = ;\n", //
2384 "let two = \n",
2385 "let three = 3;\n",
2386 );
2387
2388 let fs = FakeFs::new(cx.executor());
2389 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
2390
2391 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2392 let buffer = project
2393 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2394 .await
2395 .unwrap();
2396
2397 project.update(cx, |project, cx| {
2398 project.lsp_store.update(cx, |lsp_store, cx| {
2399 lsp_store
2400 .update_diagnostic_entries(
2401 LanguageServerId(0),
2402 PathBuf::from("/dir/a.rs"),
2403 None,
2404 None,
2405 vec![
2406 DiagnosticEntry {
2407 range: Unclipped(PointUtf16::new(0, 10))
2408 ..Unclipped(PointUtf16::new(0, 10)),
2409 diagnostic: Diagnostic {
2410 severity: DiagnosticSeverity::ERROR,
2411 message: "syntax error 1".to_string(),
2412 source_kind: DiagnosticSourceKind::Pushed,
2413 ..Diagnostic::default()
2414 },
2415 },
2416 DiagnosticEntry {
2417 range: Unclipped(PointUtf16::new(1, 10))
2418 ..Unclipped(PointUtf16::new(1, 10)),
2419 diagnostic: Diagnostic {
2420 severity: DiagnosticSeverity::ERROR,
2421 message: "syntax error 2".to_string(),
2422 source_kind: DiagnosticSourceKind::Pushed,
2423 ..Diagnostic::default()
2424 },
2425 },
2426 ],
2427 cx,
2428 )
2429 .unwrap();
2430 })
2431 });
2432
2433 // An empty range is extended forward to include the following character.
2434 // At the end of a line, an empty range is extended backward to include
2435 // the preceding character.
2436 buffer.update(cx, |buffer, _| {
2437 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2438 assert_eq!(
2439 chunks
2440 .iter()
2441 .map(|(s, d)| (s.as_str(), *d))
2442 .collect::<Vec<_>>(),
2443 &[
2444 ("let one = ", None),
2445 (";", Some(DiagnosticSeverity::ERROR)),
2446 ("\nlet two =", None),
2447 (" ", Some(DiagnosticSeverity::ERROR)),
2448 ("\nlet three = 3;\n", None)
2449 ]
2450 );
2451 });
2452}
2453
2454#[gpui::test]
2455async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2456 init_test(cx);
2457
2458 let fs = FakeFs::new(cx.executor());
2459 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2460 .await;
2461
2462 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2463 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
2464
2465 lsp_store.update(cx, |lsp_store, cx| {
2466 lsp_store
2467 .update_diagnostic_entries(
2468 LanguageServerId(0),
2469 Path::new("/dir/a.rs").to_owned(),
2470 None,
2471 None,
2472 vec![DiagnosticEntry {
2473 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2474 diagnostic: Diagnostic {
2475 severity: DiagnosticSeverity::ERROR,
2476 is_primary: true,
2477 message: "syntax error a1".to_string(),
2478 source_kind: DiagnosticSourceKind::Pushed,
2479 ..Diagnostic::default()
2480 },
2481 }],
2482 cx,
2483 )
2484 .unwrap();
2485 lsp_store
2486 .update_diagnostic_entries(
2487 LanguageServerId(1),
2488 Path::new("/dir/a.rs").to_owned(),
2489 None,
2490 None,
2491 vec![DiagnosticEntry {
2492 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2493 diagnostic: Diagnostic {
2494 severity: DiagnosticSeverity::ERROR,
2495 is_primary: true,
2496 message: "syntax error b1".to_string(),
2497 source_kind: DiagnosticSourceKind::Pushed,
2498 ..Diagnostic::default()
2499 },
2500 }],
2501 cx,
2502 )
2503 .unwrap();
2504
2505 assert_eq!(
2506 lsp_store.diagnostic_summary(false, cx),
2507 DiagnosticSummary {
2508 error_count: 2,
2509 warning_count: 0,
2510 }
2511 );
2512 });
2513}
2514
2515#[gpui::test]
2516async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2517 init_test(cx);
2518
2519 let text = "
2520 fn a() {
2521 f1();
2522 }
2523 fn b() {
2524 f2();
2525 }
2526 fn c() {
2527 f3();
2528 }
2529 "
2530 .unindent();
2531
2532 let fs = FakeFs::new(cx.executor());
2533 fs.insert_tree(
2534 path!("/dir"),
2535 json!({
2536 "a.rs": text.clone(),
2537 }),
2538 )
2539 .await;
2540
2541 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2542 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2543
2544 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2545 language_registry.add(rust_lang());
2546 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2547
2548 let (buffer, _handle) = project
2549 .update(cx, |project, cx| {
2550 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2551 })
2552 .await
2553 .unwrap();
2554
2555 let mut fake_server = fake_servers.next().await.unwrap();
2556 let lsp_document_version = fake_server
2557 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2558 .await
2559 .text_document
2560 .version;
2561
2562 // Simulate editing the buffer after the language server computes some edits.
2563 buffer.update(cx, |buffer, cx| {
2564 buffer.edit(
2565 [(
2566 Point::new(0, 0)..Point::new(0, 0),
2567 "// above first function\n",
2568 )],
2569 None,
2570 cx,
2571 );
2572 buffer.edit(
2573 [(
2574 Point::new(2, 0)..Point::new(2, 0),
2575 " // inside first function\n",
2576 )],
2577 None,
2578 cx,
2579 );
2580 buffer.edit(
2581 [(
2582 Point::new(6, 4)..Point::new(6, 4),
2583 "// inside second function ",
2584 )],
2585 None,
2586 cx,
2587 );
2588
2589 assert_eq!(
2590 buffer.text(),
2591 "
2592 // above first function
2593 fn a() {
2594 // inside first function
2595 f1();
2596 }
2597 fn b() {
2598 // inside second function f2();
2599 }
2600 fn c() {
2601 f3();
2602 }
2603 "
2604 .unindent()
2605 );
2606 });
2607
2608 let edits = lsp_store
2609 .update(cx, |lsp_store, cx| {
2610 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2611 &buffer,
2612 vec![
2613 // replace body of first function
2614 lsp::TextEdit {
2615 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2616 new_text: "
2617 fn a() {
2618 f10();
2619 }
2620 "
2621 .unindent(),
2622 },
2623 // edit inside second function
2624 lsp::TextEdit {
2625 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2626 new_text: "00".into(),
2627 },
2628 // edit inside third function via two distinct edits
2629 lsp::TextEdit {
2630 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2631 new_text: "4000".into(),
2632 },
2633 lsp::TextEdit {
2634 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2635 new_text: "".into(),
2636 },
2637 ],
2638 LanguageServerId(0),
2639 Some(lsp_document_version),
2640 cx,
2641 )
2642 })
2643 .await
2644 .unwrap();
2645
2646 buffer.update(cx, |buffer, cx| {
2647 for (range, new_text) in edits {
2648 buffer.edit([(range, new_text)], None, cx);
2649 }
2650 assert_eq!(
2651 buffer.text(),
2652 "
2653 // above first function
2654 fn a() {
2655 // inside first function
2656 f10();
2657 }
2658 fn b() {
2659 // inside second function f200();
2660 }
2661 fn c() {
2662 f4000();
2663 }
2664 "
2665 .unindent()
2666 );
2667 });
2668}
2669
2670#[gpui::test]
2671async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2672 init_test(cx);
2673
2674 let text = "
2675 use a::b;
2676 use a::c;
2677
2678 fn f() {
2679 b();
2680 c();
2681 }
2682 "
2683 .unindent();
2684
2685 let fs = FakeFs::new(cx.executor());
2686 fs.insert_tree(
2687 path!("/dir"),
2688 json!({
2689 "a.rs": text.clone(),
2690 }),
2691 )
2692 .await;
2693
2694 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2695 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2696 let buffer = project
2697 .update(cx, |project, cx| {
2698 project.open_local_buffer(path!("/dir/a.rs"), cx)
2699 })
2700 .await
2701 .unwrap();
2702
2703 // Simulate the language server sending us a small edit in the form of a very large diff.
2704 // Rust-analyzer does this when performing a merge-imports code action.
2705 let edits = lsp_store
2706 .update(cx, |lsp_store, cx| {
2707 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2708 &buffer,
2709 [
2710 // Replace the first use statement without editing the semicolon.
2711 lsp::TextEdit {
2712 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2713 new_text: "a::{b, c}".into(),
2714 },
2715 // Reinsert the remainder of the file between the semicolon and the final
2716 // newline of the file.
2717 lsp::TextEdit {
2718 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2719 new_text: "\n\n".into(),
2720 },
2721 lsp::TextEdit {
2722 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2723 new_text: "
2724 fn f() {
2725 b();
2726 c();
2727 }"
2728 .unindent(),
2729 },
2730 // Delete everything after the first newline of the file.
2731 lsp::TextEdit {
2732 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2733 new_text: "".into(),
2734 },
2735 ],
2736 LanguageServerId(0),
2737 None,
2738 cx,
2739 )
2740 })
2741 .await
2742 .unwrap();
2743
2744 buffer.update(cx, |buffer, cx| {
2745 let edits = edits
2746 .into_iter()
2747 .map(|(range, text)| {
2748 (
2749 range.start.to_point(buffer)..range.end.to_point(buffer),
2750 text,
2751 )
2752 })
2753 .collect::<Vec<_>>();
2754
2755 assert_eq!(
2756 edits,
2757 [
2758 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2759 (Point::new(1, 0)..Point::new(2, 0), "".into())
2760 ]
2761 );
2762
2763 for (range, new_text) in edits {
2764 buffer.edit([(range, new_text)], None, cx);
2765 }
2766 assert_eq!(
2767 buffer.text(),
2768 "
2769 use a::{b, c};
2770
2771 fn f() {
2772 b();
2773 c();
2774 }
2775 "
2776 .unindent()
2777 );
2778 });
2779}
2780
2781#[gpui::test]
2782async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
2783 cx: &mut gpui::TestAppContext,
2784) {
2785 init_test(cx);
2786
2787 let text = "Path()";
2788
2789 let fs = FakeFs::new(cx.executor());
2790 fs.insert_tree(
2791 path!("/dir"),
2792 json!({
2793 "a.rs": text
2794 }),
2795 )
2796 .await;
2797
2798 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2799 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2800 let buffer = project
2801 .update(cx, |project, cx| {
2802 project.open_local_buffer(path!("/dir/a.rs"), cx)
2803 })
2804 .await
2805 .unwrap();
2806
2807 // Simulate the language server sending us a pair of edits at the same location,
2808 // with an insertion following a replacement (which violates the LSP spec).
2809 let edits = lsp_store
2810 .update(cx, |lsp_store, cx| {
2811 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2812 &buffer,
2813 [
2814 lsp::TextEdit {
2815 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
2816 new_text: "Path".into(),
2817 },
2818 lsp::TextEdit {
2819 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
2820 new_text: "from path import Path\n\n\n".into(),
2821 },
2822 ],
2823 LanguageServerId(0),
2824 None,
2825 cx,
2826 )
2827 })
2828 .await
2829 .unwrap();
2830
2831 buffer.update(cx, |buffer, cx| {
2832 buffer.edit(edits, None, cx);
2833 assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
2834 });
2835}
2836
2837#[gpui::test]
2838async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2839 init_test(cx);
2840
2841 let text = "
2842 use a::b;
2843 use a::c;
2844
2845 fn f() {
2846 b();
2847 c();
2848 }
2849 "
2850 .unindent();
2851
2852 let fs = FakeFs::new(cx.executor());
2853 fs.insert_tree(
2854 path!("/dir"),
2855 json!({
2856 "a.rs": text.clone(),
2857 }),
2858 )
2859 .await;
2860
2861 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2862 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2863 let buffer = project
2864 .update(cx, |project, cx| {
2865 project.open_local_buffer(path!("/dir/a.rs"), cx)
2866 })
2867 .await
2868 .unwrap();
2869
2870 // Simulate the language server sending us edits in a non-ordered fashion,
2871 // with ranges sometimes being inverted or pointing to invalid locations.
2872 let edits = lsp_store
2873 .update(cx, |lsp_store, cx| {
2874 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2875 &buffer,
2876 [
2877 lsp::TextEdit {
2878 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2879 new_text: "\n\n".into(),
2880 },
2881 lsp::TextEdit {
2882 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2883 new_text: "a::{b, c}".into(),
2884 },
2885 lsp::TextEdit {
2886 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2887 new_text: "".into(),
2888 },
2889 lsp::TextEdit {
2890 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2891 new_text: "
2892 fn f() {
2893 b();
2894 c();
2895 }"
2896 .unindent(),
2897 },
2898 ],
2899 LanguageServerId(0),
2900 None,
2901 cx,
2902 )
2903 })
2904 .await
2905 .unwrap();
2906
2907 buffer.update(cx, |buffer, cx| {
2908 let edits = edits
2909 .into_iter()
2910 .map(|(range, text)| {
2911 (
2912 range.start.to_point(buffer)..range.end.to_point(buffer),
2913 text,
2914 )
2915 })
2916 .collect::<Vec<_>>();
2917
2918 assert_eq!(
2919 edits,
2920 [
2921 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2922 (Point::new(1, 0)..Point::new(2, 0), "".into())
2923 ]
2924 );
2925
2926 for (range, new_text) in edits {
2927 buffer.edit([(range, new_text)], None, cx);
2928 }
2929 assert_eq!(
2930 buffer.text(),
2931 "
2932 use a::{b, c};
2933
2934 fn f() {
2935 b();
2936 c();
2937 }
2938 "
2939 .unindent()
2940 );
2941 });
2942}
2943
2944fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2945 buffer: &Buffer,
2946 range: Range<T>,
2947) -> Vec<(String, Option<DiagnosticSeverity>)> {
2948 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2949 for chunk in buffer.snapshot().chunks(range, true) {
2950 if chunks.last().map_or(false, |prev_chunk| {
2951 prev_chunk.1 == chunk.diagnostic_severity
2952 }) {
2953 chunks.last_mut().unwrap().0.push_str(chunk.text);
2954 } else {
2955 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2956 }
2957 }
2958 chunks
2959}
2960
2961#[gpui::test(iterations = 10)]
2962async fn test_definition(cx: &mut gpui::TestAppContext) {
2963 init_test(cx);
2964
2965 let fs = FakeFs::new(cx.executor());
2966 fs.insert_tree(
2967 path!("/dir"),
2968 json!({
2969 "a.rs": "const fn a() { A }",
2970 "b.rs": "const y: i32 = crate::a()",
2971 }),
2972 )
2973 .await;
2974
2975 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
2976
2977 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2978 language_registry.add(rust_lang());
2979 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2980
2981 let (buffer, _handle) = project
2982 .update(cx, |project, cx| {
2983 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
2984 })
2985 .await
2986 .unwrap();
2987
2988 let fake_server = fake_servers.next().await.unwrap();
2989 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2990 let params = params.text_document_position_params;
2991 assert_eq!(
2992 params.text_document.uri.to_file_path().unwrap(),
2993 Path::new(path!("/dir/b.rs")),
2994 );
2995 assert_eq!(params.position, lsp::Position::new(0, 22));
2996
2997 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2998 lsp::Location::new(
2999 lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
3000 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3001 ),
3002 )))
3003 });
3004 let mut definitions = project
3005 .update(cx, |project, cx| project.definitions(&buffer, 22, cx))
3006 .await
3007 .unwrap();
3008
3009 // Assert no new language server started
3010 cx.executor().run_until_parked();
3011 assert!(fake_servers.try_next().is_err());
3012
3013 assert_eq!(definitions.len(), 1);
3014 let definition = definitions.pop().unwrap();
3015 cx.update(|cx| {
3016 let target_buffer = definition.target.buffer.read(cx);
3017 assert_eq!(
3018 target_buffer
3019 .file()
3020 .unwrap()
3021 .as_local()
3022 .unwrap()
3023 .abs_path(cx),
3024 Path::new(path!("/dir/a.rs")),
3025 );
3026 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
3027 assert_eq!(
3028 list_worktrees(&project, cx),
3029 [
3030 (path!("/dir/a.rs").as_ref(), false),
3031 (path!("/dir/b.rs").as_ref(), true)
3032 ],
3033 );
3034
3035 drop(definition);
3036 });
3037 cx.update(|cx| {
3038 assert_eq!(
3039 list_worktrees(&project, cx),
3040 [(path!("/dir/b.rs").as_ref(), true)]
3041 );
3042 });
3043
3044 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
3045 project
3046 .read(cx)
3047 .worktrees(cx)
3048 .map(|worktree| {
3049 let worktree = worktree.read(cx);
3050 (
3051 worktree.as_local().unwrap().abs_path().as_ref(),
3052 worktree.is_visible(),
3053 )
3054 })
3055 .collect::<Vec<_>>()
3056 }
3057}
3058
3059#[gpui::test]
3060async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
3061 init_test(cx);
3062
3063 let fs = FakeFs::new(cx.executor());
3064 fs.insert_tree(
3065 path!("/dir"),
3066 json!({
3067 "a.ts": "",
3068 }),
3069 )
3070 .await;
3071
3072 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3073
3074 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3075 language_registry.add(typescript_lang());
3076 let mut fake_language_servers = language_registry.register_fake_lsp(
3077 "TypeScript",
3078 FakeLspAdapter {
3079 capabilities: lsp::ServerCapabilities {
3080 completion_provider: Some(lsp::CompletionOptions {
3081 trigger_characters: Some(vec![".".to_string()]),
3082 ..Default::default()
3083 }),
3084 ..Default::default()
3085 },
3086 ..Default::default()
3087 },
3088 );
3089
3090 let (buffer, _handle) = project
3091 .update(cx, |p, cx| {
3092 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3093 })
3094 .await
3095 .unwrap();
3096
3097 let fake_server = fake_language_servers.next().await.unwrap();
3098
3099 // When text_edit exists, it takes precedence over insert_text and label
3100 let text = "let a = obj.fqn";
3101 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3102 let completions = project.update(cx, |project, cx| {
3103 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3104 });
3105
3106 fake_server
3107 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3108 Ok(Some(lsp::CompletionResponse::Array(vec![
3109 lsp::CompletionItem {
3110 label: "labelText".into(),
3111 insert_text: Some("insertText".into()),
3112 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
3113 range: lsp::Range::new(
3114 lsp::Position::new(0, text.len() as u32 - 3),
3115 lsp::Position::new(0, text.len() as u32),
3116 ),
3117 new_text: "textEditText".into(),
3118 })),
3119 ..Default::default()
3120 },
3121 ])))
3122 })
3123 .next()
3124 .await;
3125
3126 let completions = completions
3127 .await
3128 .unwrap()
3129 .into_iter()
3130 .flat_map(|response| response.completions)
3131 .collect::<Vec<_>>();
3132 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3133
3134 assert_eq!(completions.len(), 1);
3135 assert_eq!(completions[0].new_text, "textEditText");
3136 assert_eq!(
3137 completions[0].replace_range.to_offset(&snapshot),
3138 text.len() - 3..text.len()
3139 );
3140}
3141
3142#[gpui::test]
3143async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
3144 init_test(cx);
3145
3146 let fs = FakeFs::new(cx.executor());
3147 fs.insert_tree(
3148 path!("/dir"),
3149 json!({
3150 "a.ts": "",
3151 }),
3152 )
3153 .await;
3154
3155 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3156
3157 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3158 language_registry.add(typescript_lang());
3159 let mut fake_language_servers = language_registry.register_fake_lsp(
3160 "TypeScript",
3161 FakeLspAdapter {
3162 capabilities: lsp::ServerCapabilities {
3163 completion_provider: Some(lsp::CompletionOptions {
3164 trigger_characters: Some(vec![".".to_string()]),
3165 ..Default::default()
3166 }),
3167 ..Default::default()
3168 },
3169 ..Default::default()
3170 },
3171 );
3172
3173 let (buffer, _handle) = project
3174 .update(cx, |p, cx| {
3175 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3176 })
3177 .await
3178 .unwrap();
3179
3180 let fake_server = fake_language_servers.next().await.unwrap();
3181 let text = "let a = obj.fqn";
3182
3183 // Test 1: When text_edit is None but insert_text exists with default edit_range
3184 {
3185 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3186 let completions = project.update(cx, |project, cx| {
3187 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3188 });
3189
3190 fake_server
3191 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3192 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3193 is_incomplete: false,
3194 item_defaults: Some(lsp::CompletionListItemDefaults {
3195 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3196 lsp::Range::new(
3197 lsp::Position::new(0, text.len() as u32 - 3),
3198 lsp::Position::new(0, text.len() as u32),
3199 ),
3200 )),
3201 ..Default::default()
3202 }),
3203 items: vec![lsp::CompletionItem {
3204 label: "labelText".into(),
3205 insert_text: Some("insertText".into()),
3206 text_edit: None,
3207 ..Default::default()
3208 }],
3209 })))
3210 })
3211 .next()
3212 .await;
3213
3214 let completions = completions
3215 .await
3216 .unwrap()
3217 .into_iter()
3218 .flat_map(|response| response.completions)
3219 .collect::<Vec<_>>();
3220 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3221
3222 assert_eq!(completions.len(), 1);
3223 assert_eq!(completions[0].new_text, "insertText");
3224 assert_eq!(
3225 completions[0].replace_range.to_offset(&snapshot),
3226 text.len() - 3..text.len()
3227 );
3228 }
3229
3230 // Test 2: When both text_edit and insert_text are None with default edit_range
3231 {
3232 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3233 let completions = project.update(cx, |project, cx| {
3234 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3235 });
3236
3237 fake_server
3238 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3239 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3240 is_incomplete: false,
3241 item_defaults: Some(lsp::CompletionListItemDefaults {
3242 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3243 lsp::Range::new(
3244 lsp::Position::new(0, text.len() as u32 - 3),
3245 lsp::Position::new(0, text.len() as u32),
3246 ),
3247 )),
3248 ..Default::default()
3249 }),
3250 items: vec![lsp::CompletionItem {
3251 label: "labelText".into(),
3252 insert_text: None,
3253 text_edit: None,
3254 ..Default::default()
3255 }],
3256 })))
3257 })
3258 .next()
3259 .await;
3260
3261 let completions = completions
3262 .await
3263 .unwrap()
3264 .into_iter()
3265 .flat_map(|response| response.completions)
3266 .collect::<Vec<_>>();
3267 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3268
3269 assert_eq!(completions.len(), 1);
3270 assert_eq!(completions[0].new_text, "labelText");
3271 assert_eq!(
3272 completions[0].replace_range.to_offset(&snapshot),
3273 text.len() - 3..text.len()
3274 );
3275 }
3276}
3277
3278#[gpui::test]
3279async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
3280 init_test(cx);
3281
3282 let fs = FakeFs::new(cx.executor());
3283 fs.insert_tree(
3284 path!("/dir"),
3285 json!({
3286 "a.ts": "",
3287 }),
3288 )
3289 .await;
3290
3291 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3292
3293 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3294 language_registry.add(typescript_lang());
3295 let mut fake_language_servers = language_registry.register_fake_lsp(
3296 "TypeScript",
3297 FakeLspAdapter {
3298 capabilities: lsp::ServerCapabilities {
3299 completion_provider: Some(lsp::CompletionOptions {
3300 trigger_characters: Some(vec![":".to_string()]),
3301 ..Default::default()
3302 }),
3303 ..Default::default()
3304 },
3305 ..Default::default()
3306 },
3307 );
3308
3309 let (buffer, _handle) = project
3310 .update(cx, |p, cx| {
3311 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3312 })
3313 .await
3314 .unwrap();
3315
3316 let fake_server = fake_language_servers.next().await.unwrap();
3317
3318 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
3319 let text = "let a = b.fqn";
3320 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3321 let completions = project.update(cx, |project, cx| {
3322 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3323 });
3324
3325 fake_server
3326 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3327 Ok(Some(lsp::CompletionResponse::Array(vec![
3328 lsp::CompletionItem {
3329 label: "fullyQualifiedName?".into(),
3330 insert_text: Some("fullyQualifiedName".into()),
3331 ..Default::default()
3332 },
3333 ])))
3334 })
3335 .next()
3336 .await;
3337 let completions = completions
3338 .await
3339 .unwrap()
3340 .into_iter()
3341 .flat_map(|response| response.completions)
3342 .collect::<Vec<_>>();
3343 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3344 assert_eq!(completions.len(), 1);
3345 assert_eq!(completions[0].new_text, "fullyQualifiedName");
3346 assert_eq!(
3347 completions[0].replace_range.to_offset(&snapshot),
3348 text.len() - 3..text.len()
3349 );
3350
3351 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
3352 let text = "let a = \"atoms/cmp\"";
3353 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3354 let completions = project.update(cx, |project, cx| {
3355 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
3356 });
3357
3358 fake_server
3359 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3360 Ok(Some(lsp::CompletionResponse::Array(vec![
3361 lsp::CompletionItem {
3362 label: "component".into(),
3363 ..Default::default()
3364 },
3365 ])))
3366 })
3367 .next()
3368 .await;
3369 let completions = completions
3370 .await
3371 .unwrap()
3372 .into_iter()
3373 .flat_map(|response| response.completions)
3374 .collect::<Vec<_>>();
3375 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3376 assert_eq!(completions.len(), 1);
3377 assert_eq!(completions[0].new_text, "component");
3378 assert_eq!(
3379 completions[0].replace_range.to_offset(&snapshot),
3380 text.len() - 4..text.len() - 1
3381 );
3382}
3383
3384#[gpui::test]
3385async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
3386 init_test(cx);
3387
3388 let fs = FakeFs::new(cx.executor());
3389 fs.insert_tree(
3390 path!("/dir"),
3391 json!({
3392 "a.ts": "",
3393 }),
3394 )
3395 .await;
3396
3397 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3398
3399 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3400 language_registry.add(typescript_lang());
3401 let mut fake_language_servers = language_registry.register_fake_lsp(
3402 "TypeScript",
3403 FakeLspAdapter {
3404 capabilities: lsp::ServerCapabilities {
3405 completion_provider: Some(lsp::CompletionOptions {
3406 trigger_characters: Some(vec![":".to_string()]),
3407 ..Default::default()
3408 }),
3409 ..Default::default()
3410 },
3411 ..Default::default()
3412 },
3413 );
3414
3415 let (buffer, _handle) = project
3416 .update(cx, |p, cx| {
3417 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3418 })
3419 .await
3420 .unwrap();
3421
3422 let fake_server = fake_language_servers.next().await.unwrap();
3423
3424 let text = "let a = b.fqn";
3425 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3426 let completions = project.update(cx, |project, cx| {
3427 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3428 });
3429
3430 fake_server
3431 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3432 Ok(Some(lsp::CompletionResponse::Array(vec![
3433 lsp::CompletionItem {
3434 label: "fullyQualifiedName?".into(),
3435 insert_text: Some("fully\rQualified\r\nName".into()),
3436 ..Default::default()
3437 },
3438 ])))
3439 })
3440 .next()
3441 .await;
3442 let completions = completions
3443 .await
3444 .unwrap()
3445 .into_iter()
3446 .flat_map(|response| response.completions)
3447 .collect::<Vec<_>>();
3448 assert_eq!(completions.len(), 1);
3449 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
3450}
3451
3452#[gpui::test(iterations = 10)]
3453async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
3454 init_test(cx);
3455
3456 let fs = FakeFs::new(cx.executor());
3457 fs.insert_tree(
3458 path!("/dir"),
3459 json!({
3460 "a.ts": "a",
3461 }),
3462 )
3463 .await;
3464
3465 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3466
3467 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3468 language_registry.add(typescript_lang());
3469 let mut fake_language_servers = language_registry.register_fake_lsp(
3470 "TypeScript",
3471 FakeLspAdapter {
3472 capabilities: lsp::ServerCapabilities {
3473 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
3474 lsp::CodeActionOptions {
3475 resolve_provider: Some(true),
3476 ..lsp::CodeActionOptions::default()
3477 },
3478 )),
3479 execute_command_provider: Some(lsp::ExecuteCommandOptions {
3480 commands: vec!["_the/command".to_string()],
3481 ..lsp::ExecuteCommandOptions::default()
3482 }),
3483 ..lsp::ServerCapabilities::default()
3484 },
3485 ..FakeLspAdapter::default()
3486 },
3487 );
3488
3489 let (buffer, _handle) = project
3490 .update(cx, |p, cx| {
3491 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3492 })
3493 .await
3494 .unwrap();
3495
3496 let fake_server = fake_language_servers.next().await.unwrap();
3497
3498 // Language server returns code actions that contain commands, and not edits.
3499 let actions = project.update(cx, |project, cx| {
3500 project.code_actions(&buffer, 0..0, None, cx)
3501 });
3502 fake_server
3503 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
3504 Ok(Some(vec![
3505 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3506 title: "The code action".into(),
3507 data: Some(serde_json::json!({
3508 "command": "_the/command",
3509 })),
3510 ..lsp::CodeAction::default()
3511 }),
3512 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3513 title: "two".into(),
3514 ..lsp::CodeAction::default()
3515 }),
3516 ]))
3517 })
3518 .next()
3519 .await;
3520
3521 let action = actions.await.unwrap()[0].clone();
3522 let apply = project.update(cx, |project, cx| {
3523 project.apply_code_action(buffer.clone(), action, true, cx)
3524 });
3525
3526 // Resolving the code action does not populate its edits. In absence of
3527 // edits, we must execute the given command.
3528 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
3529 |mut action, _| async move {
3530 if action.data.is_some() {
3531 action.command = Some(lsp::Command {
3532 title: "The command".into(),
3533 command: "_the/command".into(),
3534 arguments: Some(vec![json!("the-argument")]),
3535 });
3536 }
3537 Ok(action)
3538 },
3539 );
3540
3541 // While executing the command, the language server sends the editor
3542 // a `workspaceEdit` request.
3543 fake_server
3544 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
3545 let fake = fake_server.clone();
3546 move |params, _| {
3547 assert_eq!(params.command, "_the/command");
3548 let fake = fake.clone();
3549 async move {
3550 fake.server
3551 .request::<lsp::request::ApplyWorkspaceEdit>(
3552 lsp::ApplyWorkspaceEditParams {
3553 label: None,
3554 edit: lsp::WorkspaceEdit {
3555 changes: Some(
3556 [(
3557 lsp::Url::from_file_path(path!("/dir/a.ts")).unwrap(),
3558 vec![lsp::TextEdit {
3559 range: lsp::Range::new(
3560 lsp::Position::new(0, 0),
3561 lsp::Position::new(0, 0),
3562 ),
3563 new_text: "X".into(),
3564 }],
3565 )]
3566 .into_iter()
3567 .collect(),
3568 ),
3569 ..Default::default()
3570 },
3571 },
3572 )
3573 .await
3574 .into_response()
3575 .unwrap();
3576 Ok(Some(json!(null)))
3577 }
3578 }
3579 })
3580 .next()
3581 .await;
3582
3583 // Applying the code action returns a project transaction containing the edits
3584 // sent by the language server in its `workspaceEdit` request.
3585 let transaction = apply.await.unwrap();
3586 assert!(transaction.0.contains_key(&buffer));
3587 buffer.update(cx, |buffer, cx| {
3588 assert_eq!(buffer.text(), "Xa");
3589 buffer.undo(cx);
3590 assert_eq!(buffer.text(), "a");
3591 });
3592}
3593
3594#[gpui::test(iterations = 10)]
3595async fn test_save_file(cx: &mut gpui::TestAppContext) {
3596 init_test(cx);
3597
3598 let fs = FakeFs::new(cx.executor());
3599 fs.insert_tree(
3600 path!("/dir"),
3601 json!({
3602 "file1": "the old contents",
3603 }),
3604 )
3605 .await;
3606
3607 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3608 let buffer = project
3609 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3610 .await
3611 .unwrap();
3612 buffer.update(cx, |buffer, cx| {
3613 assert_eq!(buffer.text(), "the old contents");
3614 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3615 });
3616
3617 project
3618 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3619 .await
3620 .unwrap();
3621
3622 let new_text = fs
3623 .load(Path::new(path!("/dir/file1")))
3624 .await
3625 .unwrap()
3626 .replace("\r\n", "\n");
3627 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3628}
3629
3630#[gpui::test(iterations = 10)]
3631async fn test_save_file_spawns_language_server(cx: &mut gpui::TestAppContext) {
3632 // Issue: #24349
3633 init_test(cx);
3634
3635 let fs = FakeFs::new(cx.executor());
3636 fs.insert_tree(path!("/dir"), json!({})).await;
3637
3638 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3639 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3640
3641 language_registry.add(rust_lang());
3642 let mut fake_rust_servers = language_registry.register_fake_lsp(
3643 "Rust",
3644 FakeLspAdapter {
3645 name: "the-rust-language-server",
3646 capabilities: lsp::ServerCapabilities {
3647 completion_provider: Some(lsp::CompletionOptions {
3648 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
3649 ..Default::default()
3650 }),
3651 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
3652 lsp::TextDocumentSyncOptions {
3653 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
3654 ..Default::default()
3655 },
3656 )),
3657 ..Default::default()
3658 },
3659 ..Default::default()
3660 },
3661 );
3662
3663 let buffer = project
3664 .update(cx, |this, cx| this.create_buffer(cx))
3665 .unwrap()
3666 .await;
3667 project.update(cx, |this, cx| {
3668 this.register_buffer_with_language_servers(&buffer, cx);
3669 buffer.update(cx, |buffer, cx| {
3670 assert!(!this.has_language_servers_for(buffer, cx));
3671 })
3672 });
3673
3674 project
3675 .update(cx, |this, cx| {
3676 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
3677 this.save_buffer_as(
3678 buffer.clone(),
3679 ProjectPath {
3680 worktree_id,
3681 path: Arc::from("file.rs".as_ref()),
3682 },
3683 cx,
3684 )
3685 })
3686 .await
3687 .unwrap();
3688 // A server is started up, and it is notified about Rust files.
3689 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
3690 assert_eq!(
3691 fake_rust_server
3692 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3693 .await
3694 .text_document,
3695 lsp::TextDocumentItem {
3696 uri: lsp::Url::from_file_path(path!("/dir/file.rs")).unwrap(),
3697 version: 0,
3698 text: "".to_string(),
3699 language_id: "rust".to_string(),
3700 }
3701 );
3702
3703 project.update(cx, |this, cx| {
3704 buffer.update(cx, |buffer, cx| {
3705 assert!(this.has_language_servers_for(buffer, cx));
3706 })
3707 });
3708}
3709
3710#[gpui::test(iterations = 30)]
3711async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
3712 init_test(cx);
3713
3714 let fs = FakeFs::new(cx.executor().clone());
3715 fs.insert_tree(
3716 path!("/dir"),
3717 json!({
3718 "file1": "the original contents",
3719 }),
3720 )
3721 .await;
3722
3723 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3724 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3725 let buffer = project
3726 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3727 .await
3728 .unwrap();
3729
3730 // Simulate buffer diffs being slow, so that they don't complete before
3731 // the next file change occurs.
3732 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3733
3734 // Change the buffer's file on disk, and then wait for the file change
3735 // to be detected by the worktree, so that the buffer starts reloading.
3736 fs.save(
3737 path!("/dir/file1").as_ref(),
3738 &"the first contents".into(),
3739 Default::default(),
3740 )
3741 .await
3742 .unwrap();
3743 worktree.next_event(cx).await;
3744
3745 // Change the buffer's file again. Depending on the random seed, the
3746 // previous file change may still be in progress.
3747 fs.save(
3748 path!("/dir/file1").as_ref(),
3749 &"the second contents".into(),
3750 Default::default(),
3751 )
3752 .await
3753 .unwrap();
3754 worktree.next_event(cx).await;
3755
3756 cx.executor().run_until_parked();
3757 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3758 buffer.read_with(cx, |buffer, _| {
3759 assert_eq!(buffer.text(), on_disk_text);
3760 assert!(!buffer.is_dirty(), "buffer should not be dirty");
3761 assert!(!buffer.has_conflict(), "buffer should not be dirty");
3762 });
3763}
3764
3765#[gpui::test(iterations = 30)]
3766async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
3767 init_test(cx);
3768
3769 let fs = FakeFs::new(cx.executor().clone());
3770 fs.insert_tree(
3771 path!("/dir"),
3772 json!({
3773 "file1": "the original contents",
3774 }),
3775 )
3776 .await;
3777
3778 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3779 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3780 let buffer = project
3781 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3782 .await
3783 .unwrap();
3784
3785 // Simulate buffer diffs being slow, so that they don't complete before
3786 // the next file change occurs.
3787 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3788
3789 // Change the buffer's file on disk, and then wait for the file change
3790 // to be detected by the worktree, so that the buffer starts reloading.
3791 fs.save(
3792 path!("/dir/file1").as_ref(),
3793 &"the first contents".into(),
3794 Default::default(),
3795 )
3796 .await
3797 .unwrap();
3798 worktree.next_event(cx).await;
3799
3800 cx.executor()
3801 .spawn(cx.executor().simulate_random_delay())
3802 .await;
3803
3804 // Perform a noop edit, causing the buffer's version to increase.
3805 buffer.update(cx, |buffer, cx| {
3806 buffer.edit([(0..0, " ")], None, cx);
3807 buffer.undo(cx);
3808 });
3809
3810 cx.executor().run_until_parked();
3811 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3812 buffer.read_with(cx, |buffer, _| {
3813 let buffer_text = buffer.text();
3814 if buffer_text == on_disk_text {
3815 assert!(
3816 !buffer.is_dirty() && !buffer.has_conflict(),
3817 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
3818 );
3819 }
3820 // If the file change occurred while the buffer was processing the first
3821 // change, the buffer will be in a conflicting state.
3822 else {
3823 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3824 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3825 }
3826 });
3827}
3828
3829#[gpui::test]
3830async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
3831 init_test(cx);
3832
3833 let fs = FakeFs::new(cx.executor());
3834 fs.insert_tree(
3835 path!("/dir"),
3836 json!({
3837 "file1": "the old contents",
3838 }),
3839 )
3840 .await;
3841
3842 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
3843 let buffer = project
3844 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3845 .await
3846 .unwrap();
3847 buffer.update(cx, |buffer, cx| {
3848 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3849 });
3850
3851 project
3852 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3853 .await
3854 .unwrap();
3855
3856 let new_text = fs
3857 .load(Path::new(path!("/dir/file1")))
3858 .await
3859 .unwrap()
3860 .replace("\r\n", "\n");
3861 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3862}
3863
3864#[gpui::test]
3865async fn test_save_as(cx: &mut gpui::TestAppContext) {
3866 init_test(cx);
3867
3868 let fs = FakeFs::new(cx.executor());
3869 fs.insert_tree("/dir", json!({})).await;
3870
3871 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3872
3873 let languages = project.update(cx, |project, _| project.languages().clone());
3874 languages.add(rust_lang());
3875
3876 let buffer = project.update(cx, |project, cx| project.create_local_buffer("", None, cx));
3877 buffer.update(cx, |buffer, cx| {
3878 buffer.edit([(0..0, "abc")], None, cx);
3879 assert!(buffer.is_dirty());
3880 assert!(!buffer.has_conflict());
3881 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
3882 });
3883 project
3884 .update(cx, |project, cx| {
3885 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
3886 let path = ProjectPath {
3887 worktree_id,
3888 path: Arc::from(Path::new("file1.rs")),
3889 };
3890 project.save_buffer_as(buffer.clone(), path, cx)
3891 })
3892 .await
3893 .unwrap();
3894 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
3895
3896 cx.executor().run_until_parked();
3897 buffer.update(cx, |buffer, cx| {
3898 assert_eq!(
3899 buffer.file().unwrap().full_path(cx),
3900 Path::new("dir/file1.rs")
3901 );
3902 assert!(!buffer.is_dirty());
3903 assert!(!buffer.has_conflict());
3904 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
3905 });
3906
3907 let opened_buffer = project
3908 .update(cx, |project, cx| {
3909 project.open_local_buffer("/dir/file1.rs", cx)
3910 })
3911 .await
3912 .unwrap();
3913 assert_eq!(opened_buffer, buffer);
3914}
3915
3916#[gpui::test(retries = 5)]
3917async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
3918 use worktree::WorktreeModelHandle as _;
3919
3920 init_test(cx);
3921 cx.executor().allow_parking();
3922
3923 let dir = TempTree::new(json!({
3924 "a": {
3925 "file1": "",
3926 "file2": "",
3927 "file3": "",
3928 },
3929 "b": {
3930 "c": {
3931 "file4": "",
3932 "file5": "",
3933 }
3934 }
3935 }));
3936
3937 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
3938
3939 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3940 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
3941 async move { buffer.await.unwrap() }
3942 };
3943 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3944 project.update(cx, |project, cx| {
3945 let tree = project.worktrees(cx).next().unwrap();
3946 tree.read(cx)
3947 .entry_for_path(path)
3948 .unwrap_or_else(|| panic!("no entry for path {}", path))
3949 .id
3950 })
3951 };
3952
3953 let buffer2 = buffer_for_path("a/file2", cx).await;
3954 let buffer3 = buffer_for_path("a/file3", cx).await;
3955 let buffer4 = buffer_for_path("b/c/file4", cx).await;
3956 let buffer5 = buffer_for_path("b/c/file5", cx).await;
3957
3958 let file2_id = id_for_path("a/file2", cx);
3959 let file3_id = id_for_path("a/file3", cx);
3960 let file4_id = id_for_path("b/c/file4", cx);
3961
3962 // Create a remote copy of this worktree.
3963 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3964 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
3965
3966 let updates = Arc::new(Mutex::new(Vec::new()));
3967 tree.update(cx, |tree, cx| {
3968 let updates = updates.clone();
3969 tree.observe_updates(0, cx, move |update| {
3970 updates.lock().push(update);
3971 async { true }
3972 });
3973 });
3974
3975 let remote =
3976 cx.update(|cx| Worktree::remote(0, 1, metadata, project.read(cx).client().into(), cx));
3977
3978 cx.executor().run_until_parked();
3979
3980 cx.update(|cx| {
3981 assert!(!buffer2.read(cx).is_dirty());
3982 assert!(!buffer3.read(cx).is_dirty());
3983 assert!(!buffer4.read(cx).is_dirty());
3984 assert!(!buffer5.read(cx).is_dirty());
3985 });
3986
3987 // Rename and delete files and directories.
3988 tree.flush_fs_events(cx).await;
3989 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3990 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3991 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3992 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3993 tree.flush_fs_events(cx).await;
3994
3995 cx.update(|app| {
3996 assert_eq!(
3997 tree.read(app)
3998 .paths()
3999 .map(|p| p.to_str().unwrap())
4000 .collect::<Vec<_>>(),
4001 vec![
4002 "a",
4003 path!("a/file1"),
4004 path!("a/file2.new"),
4005 "b",
4006 "d",
4007 path!("d/file3"),
4008 path!("d/file4"),
4009 ]
4010 );
4011 });
4012
4013 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
4014 assert_eq!(id_for_path("d/file3", cx), file3_id);
4015 assert_eq!(id_for_path("d/file4", cx), file4_id);
4016
4017 cx.update(|cx| {
4018 assert_eq!(
4019 buffer2.read(cx).file().unwrap().path().as_ref(),
4020 Path::new("a/file2.new")
4021 );
4022 assert_eq!(
4023 buffer3.read(cx).file().unwrap().path().as_ref(),
4024 Path::new("d/file3")
4025 );
4026 assert_eq!(
4027 buffer4.read(cx).file().unwrap().path().as_ref(),
4028 Path::new("d/file4")
4029 );
4030 assert_eq!(
4031 buffer5.read(cx).file().unwrap().path().as_ref(),
4032 Path::new("b/c/file5")
4033 );
4034
4035 assert_matches!(
4036 buffer2.read(cx).file().unwrap().disk_state(),
4037 DiskState::Present { .. }
4038 );
4039 assert_matches!(
4040 buffer3.read(cx).file().unwrap().disk_state(),
4041 DiskState::Present { .. }
4042 );
4043 assert_matches!(
4044 buffer4.read(cx).file().unwrap().disk_state(),
4045 DiskState::Present { .. }
4046 );
4047 assert_eq!(
4048 buffer5.read(cx).file().unwrap().disk_state(),
4049 DiskState::Deleted
4050 );
4051 });
4052
4053 // Update the remote worktree. Check that it becomes consistent with the
4054 // local worktree.
4055 cx.executor().run_until_parked();
4056
4057 remote.update(cx, |remote, _| {
4058 for update in updates.lock().drain(..) {
4059 remote.as_remote_mut().unwrap().update_from_remote(update);
4060 }
4061 });
4062 cx.executor().run_until_parked();
4063 remote.update(cx, |remote, _| {
4064 assert_eq!(
4065 remote
4066 .paths()
4067 .map(|p| p.to_str().unwrap())
4068 .collect::<Vec<_>>(),
4069 vec![
4070 "a",
4071 path!("a/file1"),
4072 path!("a/file2.new"),
4073 "b",
4074 "d",
4075 path!("d/file3"),
4076 path!("d/file4"),
4077 ]
4078 );
4079 });
4080}
4081
4082#[gpui::test(iterations = 10)]
4083async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
4084 init_test(cx);
4085
4086 let fs = FakeFs::new(cx.executor());
4087 fs.insert_tree(
4088 path!("/dir"),
4089 json!({
4090 "a": {
4091 "file1": "",
4092 }
4093 }),
4094 )
4095 .await;
4096
4097 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
4098 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
4099 let tree_id = tree.update(cx, |tree, _| tree.id());
4100
4101 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4102 project.update(cx, |project, cx| {
4103 let tree = project.worktrees(cx).next().unwrap();
4104 tree.read(cx)
4105 .entry_for_path(path)
4106 .unwrap_or_else(|| panic!("no entry for path {}", path))
4107 .id
4108 })
4109 };
4110
4111 let dir_id = id_for_path("a", cx);
4112 let file_id = id_for_path("a/file1", cx);
4113 let buffer = project
4114 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
4115 .await
4116 .unwrap();
4117 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4118
4119 project
4120 .update(cx, |project, cx| {
4121 project.rename_entry(dir_id, Path::new("b"), cx)
4122 })
4123 .unwrap()
4124 .await
4125 .to_included()
4126 .unwrap();
4127 cx.executor().run_until_parked();
4128
4129 assert_eq!(id_for_path("b", cx), dir_id);
4130 assert_eq!(id_for_path("b/file1", cx), file_id);
4131 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4132}
4133
4134#[gpui::test]
4135async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
4136 init_test(cx);
4137
4138 let fs = FakeFs::new(cx.executor());
4139 fs.insert_tree(
4140 "/dir",
4141 json!({
4142 "a.txt": "a-contents",
4143 "b.txt": "b-contents",
4144 }),
4145 )
4146 .await;
4147
4148 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4149
4150 // Spawn multiple tasks to open paths, repeating some paths.
4151 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
4152 (
4153 p.open_local_buffer("/dir/a.txt", cx),
4154 p.open_local_buffer("/dir/b.txt", cx),
4155 p.open_local_buffer("/dir/a.txt", cx),
4156 )
4157 });
4158
4159 let buffer_a_1 = buffer_a_1.await.unwrap();
4160 let buffer_a_2 = buffer_a_2.await.unwrap();
4161 let buffer_b = buffer_b.await.unwrap();
4162 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
4163 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
4164
4165 // There is only one buffer per path.
4166 let buffer_a_id = buffer_a_1.entity_id();
4167 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
4168
4169 // Open the same path again while it is still open.
4170 drop(buffer_a_1);
4171 let buffer_a_3 = project
4172 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
4173 .await
4174 .unwrap();
4175
4176 // There's still only one buffer per path.
4177 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
4178}
4179
4180#[gpui::test]
4181async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
4182 init_test(cx);
4183
4184 let fs = FakeFs::new(cx.executor());
4185 fs.insert_tree(
4186 path!("/dir"),
4187 json!({
4188 "file1": "abc",
4189 "file2": "def",
4190 "file3": "ghi",
4191 }),
4192 )
4193 .await;
4194
4195 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4196
4197 let buffer1 = project
4198 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4199 .await
4200 .unwrap();
4201 let events = Arc::new(Mutex::new(Vec::new()));
4202
4203 // initially, the buffer isn't dirty.
4204 buffer1.update(cx, |buffer, cx| {
4205 cx.subscribe(&buffer1, {
4206 let events = events.clone();
4207 move |_, _, event, _| match event {
4208 BufferEvent::Operation { .. } => {}
4209 _ => events.lock().push(event.clone()),
4210 }
4211 })
4212 .detach();
4213
4214 assert!(!buffer.is_dirty());
4215 assert!(events.lock().is_empty());
4216
4217 buffer.edit([(1..2, "")], None, cx);
4218 });
4219
4220 // after the first edit, the buffer is dirty, and emits a dirtied event.
4221 buffer1.update(cx, |buffer, cx| {
4222 assert!(buffer.text() == "ac");
4223 assert!(buffer.is_dirty());
4224 assert_eq!(
4225 *events.lock(),
4226 &[
4227 language::BufferEvent::Edited,
4228 language::BufferEvent::DirtyChanged
4229 ]
4230 );
4231 events.lock().clear();
4232 buffer.did_save(
4233 buffer.version(),
4234 buffer.file().unwrap().disk_state().mtime(),
4235 cx,
4236 );
4237 });
4238
4239 // after saving, the buffer is not dirty, and emits a saved event.
4240 buffer1.update(cx, |buffer, cx| {
4241 assert!(!buffer.is_dirty());
4242 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
4243 events.lock().clear();
4244
4245 buffer.edit([(1..1, "B")], None, cx);
4246 buffer.edit([(2..2, "D")], None, cx);
4247 });
4248
4249 // after editing again, the buffer is dirty, and emits another dirty event.
4250 buffer1.update(cx, |buffer, cx| {
4251 assert!(buffer.text() == "aBDc");
4252 assert!(buffer.is_dirty());
4253 assert_eq!(
4254 *events.lock(),
4255 &[
4256 language::BufferEvent::Edited,
4257 language::BufferEvent::DirtyChanged,
4258 language::BufferEvent::Edited,
4259 ],
4260 );
4261 events.lock().clear();
4262
4263 // After restoring the buffer to its previously-saved state,
4264 // the buffer is not considered dirty anymore.
4265 buffer.edit([(1..3, "")], None, cx);
4266 assert!(buffer.text() == "ac");
4267 assert!(!buffer.is_dirty());
4268 });
4269
4270 assert_eq!(
4271 *events.lock(),
4272 &[
4273 language::BufferEvent::Edited,
4274 language::BufferEvent::DirtyChanged
4275 ]
4276 );
4277
4278 // When a file is deleted, it is not considered dirty.
4279 let events = Arc::new(Mutex::new(Vec::new()));
4280 let buffer2 = project
4281 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4282 .await
4283 .unwrap();
4284 buffer2.update(cx, |_, cx| {
4285 cx.subscribe(&buffer2, {
4286 let events = events.clone();
4287 move |_, _, event, _| match event {
4288 BufferEvent::Operation { .. } => {}
4289 _ => events.lock().push(event.clone()),
4290 }
4291 })
4292 .detach();
4293 });
4294
4295 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
4296 .await
4297 .unwrap();
4298 cx.executor().run_until_parked();
4299 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4300 assert_eq!(
4301 mem::take(&mut *events.lock()),
4302 &[language::BufferEvent::FileHandleChanged]
4303 );
4304
4305 // Buffer becomes dirty when edited.
4306 buffer2.update(cx, |buffer, cx| {
4307 buffer.edit([(2..3, "")], None, cx);
4308 assert_eq!(buffer.is_dirty(), true);
4309 });
4310 assert_eq!(
4311 mem::take(&mut *events.lock()),
4312 &[
4313 language::BufferEvent::Edited,
4314 language::BufferEvent::DirtyChanged
4315 ]
4316 );
4317
4318 // Buffer becomes clean again when all of its content is removed, because
4319 // the file was deleted.
4320 buffer2.update(cx, |buffer, cx| {
4321 buffer.edit([(0..2, "")], None, cx);
4322 assert_eq!(buffer.is_empty(), true);
4323 assert_eq!(buffer.is_dirty(), false);
4324 });
4325 assert_eq!(
4326 *events.lock(),
4327 &[
4328 language::BufferEvent::Edited,
4329 language::BufferEvent::DirtyChanged
4330 ]
4331 );
4332
4333 // When a file is already dirty when deleted, we don't emit a Dirtied event.
4334 let events = Arc::new(Mutex::new(Vec::new()));
4335 let buffer3 = project
4336 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
4337 .await
4338 .unwrap();
4339 buffer3.update(cx, |_, cx| {
4340 cx.subscribe(&buffer3, {
4341 let events = events.clone();
4342 move |_, _, event, _| match event {
4343 BufferEvent::Operation { .. } => {}
4344 _ => events.lock().push(event.clone()),
4345 }
4346 })
4347 .detach();
4348 });
4349
4350 buffer3.update(cx, |buffer, cx| {
4351 buffer.edit([(0..0, "x")], None, cx);
4352 });
4353 events.lock().clear();
4354 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
4355 .await
4356 .unwrap();
4357 cx.executor().run_until_parked();
4358 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
4359 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
4360}
4361
4362#[gpui::test]
4363async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
4364 init_test(cx);
4365
4366 let (initial_contents, initial_offsets) =
4367 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
4368 let fs = FakeFs::new(cx.executor());
4369 fs.insert_tree(
4370 path!("/dir"),
4371 json!({
4372 "the-file": initial_contents,
4373 }),
4374 )
4375 .await;
4376 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4377 let buffer = project
4378 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
4379 .await
4380 .unwrap();
4381
4382 let anchors = initial_offsets
4383 .iter()
4384 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
4385 .collect::<Vec<_>>();
4386
4387 // Change the file on disk, adding two new lines of text, and removing
4388 // one line.
4389 buffer.update(cx, |buffer, _| {
4390 assert!(!buffer.is_dirty());
4391 assert!(!buffer.has_conflict());
4392 });
4393
4394 let (new_contents, new_offsets) =
4395 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
4396 fs.save(
4397 path!("/dir/the-file").as_ref(),
4398 &new_contents.as_str().into(),
4399 LineEnding::Unix,
4400 )
4401 .await
4402 .unwrap();
4403
4404 // Because the buffer was not modified, it is reloaded from disk. Its
4405 // contents are edited according to the diff between the old and new
4406 // file contents.
4407 cx.executor().run_until_parked();
4408 buffer.update(cx, |buffer, _| {
4409 assert_eq!(buffer.text(), new_contents);
4410 assert!(!buffer.is_dirty());
4411 assert!(!buffer.has_conflict());
4412
4413 let anchor_offsets = anchors
4414 .iter()
4415 .map(|anchor| anchor.to_offset(&*buffer))
4416 .collect::<Vec<_>>();
4417 assert_eq!(anchor_offsets, new_offsets);
4418 });
4419
4420 // Modify the buffer
4421 buffer.update(cx, |buffer, cx| {
4422 buffer.edit([(0..0, " ")], None, cx);
4423 assert!(buffer.is_dirty());
4424 assert!(!buffer.has_conflict());
4425 });
4426
4427 // Change the file on disk again, adding blank lines to the beginning.
4428 fs.save(
4429 path!("/dir/the-file").as_ref(),
4430 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
4431 LineEnding::Unix,
4432 )
4433 .await
4434 .unwrap();
4435
4436 // Because the buffer is modified, it doesn't reload from disk, but is
4437 // marked as having a conflict.
4438 cx.executor().run_until_parked();
4439 buffer.update(cx, |buffer, _| {
4440 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
4441 assert!(buffer.has_conflict());
4442 });
4443}
4444
4445#[gpui::test]
4446async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
4447 init_test(cx);
4448
4449 let fs = FakeFs::new(cx.executor());
4450 fs.insert_tree(
4451 path!("/dir"),
4452 json!({
4453 "file1": "a\nb\nc\n",
4454 "file2": "one\r\ntwo\r\nthree\r\n",
4455 }),
4456 )
4457 .await;
4458
4459 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4460 let buffer1 = project
4461 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4462 .await
4463 .unwrap();
4464 let buffer2 = project
4465 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4466 .await
4467 .unwrap();
4468
4469 buffer1.update(cx, |buffer, _| {
4470 assert_eq!(buffer.text(), "a\nb\nc\n");
4471 assert_eq!(buffer.line_ending(), LineEnding::Unix);
4472 });
4473 buffer2.update(cx, |buffer, _| {
4474 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
4475 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4476 });
4477
4478 // Change a file's line endings on disk from unix to windows. The buffer's
4479 // state updates correctly.
4480 fs.save(
4481 path!("/dir/file1").as_ref(),
4482 &"aaa\nb\nc\n".into(),
4483 LineEnding::Windows,
4484 )
4485 .await
4486 .unwrap();
4487 cx.executor().run_until_parked();
4488 buffer1.update(cx, |buffer, _| {
4489 assert_eq!(buffer.text(), "aaa\nb\nc\n");
4490 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4491 });
4492
4493 // Save a file with windows line endings. The file is written correctly.
4494 buffer2.update(cx, |buffer, cx| {
4495 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
4496 });
4497 project
4498 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
4499 .await
4500 .unwrap();
4501 assert_eq!(
4502 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
4503 "one\r\ntwo\r\nthree\r\nfour\r\n",
4504 );
4505}
4506
4507#[gpui::test]
4508async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
4509 init_test(cx);
4510
4511 let fs = FakeFs::new(cx.executor());
4512 fs.insert_tree(
4513 path!("/dir"),
4514 json!({
4515 "a.rs": "
4516 fn foo(mut v: Vec<usize>) {
4517 for x in &v {
4518 v.push(1);
4519 }
4520 }
4521 "
4522 .unindent(),
4523 }),
4524 )
4525 .await;
4526
4527 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4528 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4529 let buffer = project
4530 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
4531 .await
4532 .unwrap();
4533
4534 let buffer_uri = Url::from_file_path(path!("/dir/a.rs")).unwrap();
4535 let message = lsp::PublishDiagnosticsParams {
4536 uri: buffer_uri.clone(),
4537 diagnostics: vec![
4538 lsp::Diagnostic {
4539 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4540 severity: Some(DiagnosticSeverity::WARNING),
4541 message: "error 1".to_string(),
4542 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4543 location: lsp::Location {
4544 uri: buffer_uri.clone(),
4545 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4546 },
4547 message: "error 1 hint 1".to_string(),
4548 }]),
4549 ..Default::default()
4550 },
4551 lsp::Diagnostic {
4552 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4553 severity: Some(DiagnosticSeverity::HINT),
4554 message: "error 1 hint 1".to_string(),
4555 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4556 location: lsp::Location {
4557 uri: buffer_uri.clone(),
4558 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4559 },
4560 message: "original diagnostic".to_string(),
4561 }]),
4562 ..Default::default()
4563 },
4564 lsp::Diagnostic {
4565 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4566 severity: Some(DiagnosticSeverity::ERROR),
4567 message: "error 2".to_string(),
4568 related_information: Some(vec![
4569 lsp::DiagnosticRelatedInformation {
4570 location: lsp::Location {
4571 uri: buffer_uri.clone(),
4572 range: lsp::Range::new(
4573 lsp::Position::new(1, 13),
4574 lsp::Position::new(1, 15),
4575 ),
4576 },
4577 message: "error 2 hint 1".to_string(),
4578 },
4579 lsp::DiagnosticRelatedInformation {
4580 location: lsp::Location {
4581 uri: buffer_uri.clone(),
4582 range: lsp::Range::new(
4583 lsp::Position::new(1, 13),
4584 lsp::Position::new(1, 15),
4585 ),
4586 },
4587 message: "error 2 hint 2".to_string(),
4588 },
4589 ]),
4590 ..Default::default()
4591 },
4592 lsp::Diagnostic {
4593 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4594 severity: Some(DiagnosticSeverity::HINT),
4595 message: "error 2 hint 1".to_string(),
4596 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4597 location: lsp::Location {
4598 uri: buffer_uri.clone(),
4599 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4600 },
4601 message: "original diagnostic".to_string(),
4602 }]),
4603 ..Default::default()
4604 },
4605 lsp::Diagnostic {
4606 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4607 severity: Some(DiagnosticSeverity::HINT),
4608 message: "error 2 hint 2".to_string(),
4609 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4610 location: lsp::Location {
4611 uri: buffer_uri,
4612 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4613 },
4614 message: "original diagnostic".to_string(),
4615 }]),
4616 ..Default::default()
4617 },
4618 ],
4619 version: None,
4620 };
4621
4622 lsp_store
4623 .update(cx, |lsp_store, cx| {
4624 lsp_store.update_diagnostics(
4625 LanguageServerId(0),
4626 message,
4627 None,
4628 DiagnosticSourceKind::Pushed,
4629 &[],
4630 cx,
4631 )
4632 })
4633 .unwrap();
4634 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
4635
4636 assert_eq!(
4637 buffer
4638 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
4639 .collect::<Vec<_>>(),
4640 &[
4641 DiagnosticEntry {
4642 range: Point::new(1, 8)..Point::new(1, 9),
4643 diagnostic: Diagnostic {
4644 severity: DiagnosticSeverity::WARNING,
4645 message: "error 1".to_string(),
4646 group_id: 1,
4647 is_primary: true,
4648 source_kind: DiagnosticSourceKind::Pushed,
4649 ..Diagnostic::default()
4650 }
4651 },
4652 DiagnosticEntry {
4653 range: Point::new(1, 8)..Point::new(1, 9),
4654 diagnostic: Diagnostic {
4655 severity: DiagnosticSeverity::HINT,
4656 message: "error 1 hint 1".to_string(),
4657 group_id: 1,
4658 is_primary: false,
4659 source_kind: DiagnosticSourceKind::Pushed,
4660 ..Diagnostic::default()
4661 }
4662 },
4663 DiagnosticEntry {
4664 range: Point::new(1, 13)..Point::new(1, 15),
4665 diagnostic: Diagnostic {
4666 severity: DiagnosticSeverity::HINT,
4667 message: "error 2 hint 1".to_string(),
4668 group_id: 0,
4669 is_primary: false,
4670 source_kind: DiagnosticSourceKind::Pushed,
4671 ..Diagnostic::default()
4672 }
4673 },
4674 DiagnosticEntry {
4675 range: Point::new(1, 13)..Point::new(1, 15),
4676 diagnostic: Diagnostic {
4677 severity: DiagnosticSeverity::HINT,
4678 message: "error 2 hint 2".to_string(),
4679 group_id: 0,
4680 is_primary: false,
4681 source_kind: DiagnosticSourceKind::Pushed,
4682 ..Diagnostic::default()
4683 }
4684 },
4685 DiagnosticEntry {
4686 range: Point::new(2, 8)..Point::new(2, 17),
4687 diagnostic: Diagnostic {
4688 severity: DiagnosticSeverity::ERROR,
4689 message: "error 2".to_string(),
4690 group_id: 0,
4691 is_primary: true,
4692 source_kind: DiagnosticSourceKind::Pushed,
4693 ..Diagnostic::default()
4694 }
4695 }
4696 ]
4697 );
4698
4699 assert_eq!(
4700 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
4701 &[
4702 DiagnosticEntry {
4703 range: Point::new(1, 13)..Point::new(1, 15),
4704 diagnostic: Diagnostic {
4705 severity: DiagnosticSeverity::HINT,
4706 message: "error 2 hint 1".to_string(),
4707 group_id: 0,
4708 is_primary: false,
4709 source_kind: DiagnosticSourceKind::Pushed,
4710 ..Diagnostic::default()
4711 }
4712 },
4713 DiagnosticEntry {
4714 range: Point::new(1, 13)..Point::new(1, 15),
4715 diagnostic: Diagnostic {
4716 severity: DiagnosticSeverity::HINT,
4717 message: "error 2 hint 2".to_string(),
4718 group_id: 0,
4719 is_primary: false,
4720 source_kind: DiagnosticSourceKind::Pushed,
4721 ..Diagnostic::default()
4722 }
4723 },
4724 DiagnosticEntry {
4725 range: Point::new(2, 8)..Point::new(2, 17),
4726 diagnostic: Diagnostic {
4727 severity: DiagnosticSeverity::ERROR,
4728 message: "error 2".to_string(),
4729 group_id: 0,
4730 is_primary: true,
4731 source_kind: DiagnosticSourceKind::Pushed,
4732 ..Diagnostic::default()
4733 }
4734 }
4735 ]
4736 );
4737
4738 assert_eq!(
4739 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4740 &[
4741 DiagnosticEntry {
4742 range: Point::new(1, 8)..Point::new(1, 9),
4743 diagnostic: Diagnostic {
4744 severity: DiagnosticSeverity::WARNING,
4745 message: "error 1".to_string(),
4746 group_id: 1,
4747 is_primary: true,
4748 source_kind: DiagnosticSourceKind::Pushed,
4749 ..Diagnostic::default()
4750 }
4751 },
4752 DiagnosticEntry {
4753 range: Point::new(1, 8)..Point::new(1, 9),
4754 diagnostic: Diagnostic {
4755 severity: DiagnosticSeverity::HINT,
4756 message: "error 1 hint 1".to_string(),
4757 group_id: 1,
4758 is_primary: false,
4759 source_kind: DiagnosticSourceKind::Pushed,
4760 ..Diagnostic::default()
4761 }
4762 },
4763 ]
4764 );
4765}
4766
4767#[gpui::test]
4768async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
4769 init_test(cx);
4770
4771 let fs = FakeFs::new(cx.executor());
4772 fs.insert_tree(
4773 path!("/dir"),
4774 json!({
4775 "one.rs": "const ONE: usize = 1;",
4776 "two": {
4777 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4778 }
4779
4780 }),
4781 )
4782 .await;
4783 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4784
4785 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4786 language_registry.add(rust_lang());
4787 let watched_paths = lsp::FileOperationRegistrationOptions {
4788 filters: vec![
4789 FileOperationFilter {
4790 scheme: Some("file".to_owned()),
4791 pattern: lsp::FileOperationPattern {
4792 glob: "**/*.rs".to_owned(),
4793 matches: Some(lsp::FileOperationPatternKind::File),
4794 options: None,
4795 },
4796 },
4797 FileOperationFilter {
4798 scheme: Some("file".to_owned()),
4799 pattern: lsp::FileOperationPattern {
4800 glob: "**/**".to_owned(),
4801 matches: Some(lsp::FileOperationPatternKind::Folder),
4802 options: None,
4803 },
4804 },
4805 ],
4806 };
4807 let mut fake_servers = language_registry.register_fake_lsp(
4808 "Rust",
4809 FakeLspAdapter {
4810 capabilities: lsp::ServerCapabilities {
4811 workspace: Some(lsp::WorkspaceServerCapabilities {
4812 workspace_folders: None,
4813 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
4814 did_rename: Some(watched_paths.clone()),
4815 will_rename: Some(watched_paths),
4816 ..Default::default()
4817 }),
4818 }),
4819 ..Default::default()
4820 },
4821 ..Default::default()
4822 },
4823 );
4824
4825 let _ = project
4826 .update(cx, |project, cx| {
4827 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
4828 })
4829 .await
4830 .unwrap();
4831
4832 let fake_server = fake_servers.next().await.unwrap();
4833 let response = project.update(cx, |project, cx| {
4834 let worktree = project.worktrees(cx).next().unwrap();
4835 let entry = worktree.read(cx).entry_for_path("one.rs").unwrap();
4836 project.rename_entry(entry.id, "three.rs".as_ref(), cx)
4837 });
4838 let expected_edit = lsp::WorkspaceEdit {
4839 changes: None,
4840 document_changes: Some(DocumentChanges::Edits({
4841 vec![TextDocumentEdit {
4842 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
4843 range: lsp::Range {
4844 start: lsp::Position {
4845 line: 0,
4846 character: 1,
4847 },
4848 end: lsp::Position {
4849 line: 0,
4850 character: 3,
4851 },
4852 },
4853 new_text: "This is not a drill".to_owned(),
4854 })],
4855 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
4856 uri: Url::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
4857 version: Some(1337),
4858 },
4859 }]
4860 })),
4861 change_annotations: None,
4862 };
4863 let resolved_workspace_edit = Arc::new(OnceLock::new());
4864 fake_server
4865 .set_request_handler::<WillRenameFiles, _, _>({
4866 let resolved_workspace_edit = resolved_workspace_edit.clone();
4867 let expected_edit = expected_edit.clone();
4868 move |params, _| {
4869 let resolved_workspace_edit = resolved_workspace_edit.clone();
4870 let expected_edit = expected_edit.clone();
4871 async move {
4872 assert_eq!(params.files.len(), 1);
4873 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
4874 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
4875 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
4876 Ok(Some(expected_edit))
4877 }
4878 }
4879 })
4880 .next()
4881 .await
4882 .unwrap();
4883 let _ = response.await.unwrap();
4884 fake_server
4885 .handle_notification::<DidRenameFiles, _>(|params, _| {
4886 assert_eq!(params.files.len(), 1);
4887 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
4888 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
4889 })
4890 .next()
4891 .await
4892 .unwrap();
4893 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
4894}
4895
4896#[gpui::test]
4897async fn test_rename(cx: &mut gpui::TestAppContext) {
4898 // hi
4899 init_test(cx);
4900
4901 let fs = FakeFs::new(cx.executor());
4902 fs.insert_tree(
4903 path!("/dir"),
4904 json!({
4905 "one.rs": "const ONE: usize = 1;",
4906 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4907 }),
4908 )
4909 .await;
4910
4911 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4912
4913 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4914 language_registry.add(rust_lang());
4915 let mut fake_servers = language_registry.register_fake_lsp(
4916 "Rust",
4917 FakeLspAdapter {
4918 capabilities: lsp::ServerCapabilities {
4919 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
4920 prepare_provider: Some(true),
4921 work_done_progress_options: Default::default(),
4922 })),
4923 ..Default::default()
4924 },
4925 ..Default::default()
4926 },
4927 );
4928
4929 let (buffer, _handle) = project
4930 .update(cx, |project, cx| {
4931 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
4932 })
4933 .await
4934 .unwrap();
4935
4936 let fake_server = fake_servers.next().await.unwrap();
4937
4938 let response = project.update(cx, |project, cx| {
4939 project.prepare_rename(buffer.clone(), 7, cx)
4940 });
4941 fake_server
4942 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
4943 assert_eq!(
4944 params.text_document.uri.as_str(),
4945 uri!("file:///dir/one.rs")
4946 );
4947 assert_eq!(params.position, lsp::Position::new(0, 7));
4948 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
4949 lsp::Position::new(0, 6),
4950 lsp::Position::new(0, 9),
4951 ))))
4952 })
4953 .next()
4954 .await
4955 .unwrap();
4956 let response = response.await.unwrap();
4957 let PrepareRenameResponse::Success(range) = response else {
4958 panic!("{:?}", response);
4959 };
4960 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
4961 assert_eq!(range, 6..9);
4962
4963 let response = project.update(cx, |project, cx| {
4964 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
4965 });
4966 fake_server
4967 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
4968 assert_eq!(
4969 params.text_document_position.text_document.uri.as_str(),
4970 uri!("file:///dir/one.rs")
4971 );
4972 assert_eq!(
4973 params.text_document_position.position,
4974 lsp::Position::new(0, 7)
4975 );
4976 assert_eq!(params.new_name, "THREE");
4977 Ok(Some(lsp::WorkspaceEdit {
4978 changes: Some(
4979 [
4980 (
4981 lsp::Url::from_file_path(path!("/dir/one.rs")).unwrap(),
4982 vec![lsp::TextEdit::new(
4983 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
4984 "THREE".to_string(),
4985 )],
4986 ),
4987 (
4988 lsp::Url::from_file_path(path!("/dir/two.rs")).unwrap(),
4989 vec![
4990 lsp::TextEdit::new(
4991 lsp::Range::new(
4992 lsp::Position::new(0, 24),
4993 lsp::Position::new(0, 27),
4994 ),
4995 "THREE".to_string(),
4996 ),
4997 lsp::TextEdit::new(
4998 lsp::Range::new(
4999 lsp::Position::new(0, 35),
5000 lsp::Position::new(0, 38),
5001 ),
5002 "THREE".to_string(),
5003 ),
5004 ],
5005 ),
5006 ]
5007 .into_iter()
5008 .collect(),
5009 ),
5010 ..Default::default()
5011 }))
5012 })
5013 .next()
5014 .await
5015 .unwrap();
5016 let mut transaction = response.await.unwrap().0;
5017 assert_eq!(transaction.len(), 2);
5018 assert_eq!(
5019 transaction
5020 .remove_entry(&buffer)
5021 .unwrap()
5022 .0
5023 .update(cx, |buffer, _| buffer.text()),
5024 "const THREE: usize = 1;"
5025 );
5026 assert_eq!(
5027 transaction
5028 .into_keys()
5029 .next()
5030 .unwrap()
5031 .update(cx, |buffer, _| buffer.text()),
5032 "const TWO: usize = one::THREE + one::THREE;"
5033 );
5034}
5035
5036#[gpui::test]
5037async fn test_search(cx: &mut gpui::TestAppContext) {
5038 init_test(cx);
5039
5040 let fs = FakeFs::new(cx.executor());
5041 fs.insert_tree(
5042 path!("/dir"),
5043 json!({
5044 "one.rs": "const ONE: usize = 1;",
5045 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
5046 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
5047 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
5048 }),
5049 )
5050 .await;
5051 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5052 assert_eq!(
5053 search(
5054 &project,
5055 SearchQuery::text(
5056 "TWO",
5057 false,
5058 true,
5059 false,
5060 Default::default(),
5061 Default::default(),
5062 false,
5063 None
5064 )
5065 .unwrap(),
5066 cx
5067 )
5068 .await
5069 .unwrap(),
5070 HashMap::from_iter([
5071 (path!("dir/two.rs").to_string(), vec![6..9]),
5072 (path!("dir/three.rs").to_string(), vec![37..40])
5073 ])
5074 );
5075
5076 let buffer_4 = project
5077 .update(cx, |project, cx| {
5078 project.open_local_buffer(path!("/dir/four.rs"), cx)
5079 })
5080 .await
5081 .unwrap();
5082 buffer_4.update(cx, |buffer, cx| {
5083 let text = "two::TWO";
5084 buffer.edit([(20..28, text), (31..43, text)], None, cx);
5085 });
5086
5087 assert_eq!(
5088 search(
5089 &project,
5090 SearchQuery::text(
5091 "TWO",
5092 false,
5093 true,
5094 false,
5095 Default::default(),
5096 Default::default(),
5097 false,
5098 None,
5099 )
5100 .unwrap(),
5101 cx
5102 )
5103 .await
5104 .unwrap(),
5105 HashMap::from_iter([
5106 (path!("dir/two.rs").to_string(), vec![6..9]),
5107 (path!("dir/three.rs").to_string(), vec![37..40]),
5108 (path!("dir/four.rs").to_string(), vec![25..28, 36..39])
5109 ])
5110 );
5111}
5112
5113#[gpui::test]
5114async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
5115 init_test(cx);
5116
5117 let search_query = "file";
5118
5119 let fs = FakeFs::new(cx.executor());
5120 fs.insert_tree(
5121 path!("/dir"),
5122 json!({
5123 "one.rs": r#"// Rust file one"#,
5124 "one.ts": r#"// TypeScript file one"#,
5125 "two.rs": r#"// Rust file two"#,
5126 "two.ts": r#"// TypeScript file two"#,
5127 }),
5128 )
5129 .await;
5130 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5131
5132 assert!(
5133 search(
5134 &project,
5135 SearchQuery::text(
5136 search_query,
5137 false,
5138 true,
5139 false,
5140 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5141 Default::default(),
5142 false,
5143 None
5144 )
5145 .unwrap(),
5146 cx
5147 )
5148 .await
5149 .unwrap()
5150 .is_empty(),
5151 "If no inclusions match, no files should be returned"
5152 );
5153
5154 assert_eq!(
5155 search(
5156 &project,
5157 SearchQuery::text(
5158 search_query,
5159 false,
5160 true,
5161 false,
5162 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
5163 Default::default(),
5164 false,
5165 None
5166 )
5167 .unwrap(),
5168 cx
5169 )
5170 .await
5171 .unwrap(),
5172 HashMap::from_iter([
5173 (path!("dir/one.rs").to_string(), vec![8..12]),
5174 (path!("dir/two.rs").to_string(), vec![8..12]),
5175 ]),
5176 "Rust only search should give only Rust files"
5177 );
5178
5179 assert_eq!(
5180 search(
5181 &project,
5182 SearchQuery::text(
5183 search_query,
5184 false,
5185 true,
5186 false,
5187 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5188 Default::default(),
5189 false,
5190 None,
5191 )
5192 .unwrap(),
5193 cx
5194 )
5195 .await
5196 .unwrap(),
5197 HashMap::from_iter([
5198 (path!("dir/one.ts").to_string(), vec![14..18]),
5199 (path!("dir/two.ts").to_string(), vec![14..18]),
5200 ]),
5201 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
5202 );
5203
5204 assert_eq!(
5205 search(
5206 &project,
5207 SearchQuery::text(
5208 search_query,
5209 false,
5210 true,
5211 false,
5212 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()])
5213 .unwrap(),
5214 Default::default(),
5215 false,
5216 None,
5217 )
5218 .unwrap(),
5219 cx
5220 )
5221 .await
5222 .unwrap(),
5223 HashMap::from_iter([
5224 (path!("dir/two.ts").to_string(), vec![14..18]),
5225 (path!("dir/one.rs").to_string(), vec![8..12]),
5226 (path!("dir/one.ts").to_string(), vec![14..18]),
5227 (path!("dir/two.rs").to_string(), vec![8..12]),
5228 ]),
5229 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
5230 );
5231}
5232
5233#[gpui::test]
5234async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
5235 init_test(cx);
5236
5237 let search_query = "file";
5238
5239 let fs = FakeFs::new(cx.executor());
5240 fs.insert_tree(
5241 path!("/dir"),
5242 json!({
5243 "one.rs": r#"// Rust file one"#,
5244 "one.ts": r#"// TypeScript file one"#,
5245 "two.rs": r#"// Rust file two"#,
5246 "two.ts": r#"// TypeScript file two"#,
5247 }),
5248 )
5249 .await;
5250 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5251
5252 assert_eq!(
5253 search(
5254 &project,
5255 SearchQuery::text(
5256 search_query,
5257 false,
5258 true,
5259 false,
5260 Default::default(),
5261 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5262 false,
5263 None,
5264 )
5265 .unwrap(),
5266 cx
5267 )
5268 .await
5269 .unwrap(),
5270 HashMap::from_iter([
5271 (path!("dir/one.rs").to_string(), vec![8..12]),
5272 (path!("dir/one.ts").to_string(), vec![14..18]),
5273 (path!("dir/two.rs").to_string(), vec![8..12]),
5274 (path!("dir/two.ts").to_string(), vec![14..18]),
5275 ]),
5276 "If no exclusions match, all files should be returned"
5277 );
5278
5279 assert_eq!(
5280 search(
5281 &project,
5282 SearchQuery::text(
5283 search_query,
5284 false,
5285 true,
5286 false,
5287 Default::default(),
5288 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
5289 false,
5290 None,
5291 )
5292 .unwrap(),
5293 cx
5294 )
5295 .await
5296 .unwrap(),
5297 HashMap::from_iter([
5298 (path!("dir/one.ts").to_string(), vec![14..18]),
5299 (path!("dir/two.ts").to_string(), vec![14..18]),
5300 ]),
5301 "Rust exclusion search should give only TypeScript files"
5302 );
5303
5304 assert_eq!(
5305 search(
5306 &project,
5307 SearchQuery::text(
5308 search_query,
5309 false,
5310 true,
5311 false,
5312 Default::default(),
5313 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5314 false,
5315 None,
5316 )
5317 .unwrap(),
5318 cx
5319 )
5320 .await
5321 .unwrap(),
5322 HashMap::from_iter([
5323 (path!("dir/one.rs").to_string(), vec![8..12]),
5324 (path!("dir/two.rs").to_string(), vec![8..12]),
5325 ]),
5326 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
5327 );
5328
5329 assert!(
5330 search(
5331 &project,
5332 SearchQuery::text(
5333 search_query,
5334 false,
5335 true,
5336 false,
5337 Default::default(),
5338 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()])
5339 .unwrap(),
5340 false,
5341 None,
5342 )
5343 .unwrap(),
5344 cx
5345 )
5346 .await
5347 .unwrap()
5348 .is_empty(),
5349 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
5350 );
5351}
5352
5353#[gpui::test]
5354async fn test_search_with_buffer_exclusions(cx: &mut gpui::TestAppContext) {
5355 init_test(cx);
5356
5357 let search_query = "file";
5358
5359 let fs = FakeFs::new(cx.executor());
5360 fs.insert_tree(
5361 path!("/dir"),
5362 json!({
5363 "one.rs": r#"// Rust file one"#,
5364 "one.ts": r#"// TypeScript file one"#,
5365 "two.rs": r#"// Rust file two"#,
5366 "two.ts": r#"// TypeScript file two"#,
5367 }),
5368 )
5369 .await;
5370
5371 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5372 let _buffer = project.update(cx, |project, cx| {
5373 let buffer = project.create_local_buffer("file", None, cx);
5374 project.mark_buffer_as_non_searchable(buffer.read(cx).remote_id(), cx);
5375 buffer
5376 });
5377
5378 assert_eq!(
5379 search(
5380 &project,
5381 SearchQuery::text(
5382 search_query,
5383 false,
5384 true,
5385 false,
5386 Default::default(),
5387 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5388 false,
5389 None,
5390 )
5391 .unwrap(),
5392 cx
5393 )
5394 .await
5395 .unwrap(),
5396 HashMap::from_iter([
5397 (path!("dir/one.rs").to_string(), vec![8..12]),
5398 (path!("dir/one.ts").to_string(), vec![14..18]),
5399 (path!("dir/two.rs").to_string(), vec![8..12]),
5400 (path!("dir/two.ts").to_string(), vec![14..18]),
5401 ]),
5402 "If no exclusions match, all files should be returned"
5403 );
5404
5405 assert_eq!(
5406 search(
5407 &project,
5408 SearchQuery::text(
5409 search_query,
5410 false,
5411 true,
5412 false,
5413 Default::default(),
5414 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
5415 false,
5416 None,
5417 )
5418 .unwrap(),
5419 cx
5420 )
5421 .await
5422 .unwrap(),
5423 HashMap::from_iter([
5424 (path!("dir/one.ts").to_string(), vec![14..18]),
5425 (path!("dir/two.ts").to_string(), vec![14..18]),
5426 ]),
5427 "Rust exclusion search should give only TypeScript files"
5428 );
5429
5430 assert_eq!(
5431 search(
5432 &project,
5433 SearchQuery::text(
5434 search_query,
5435 false,
5436 true,
5437 false,
5438 Default::default(),
5439 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5440 false,
5441 None,
5442 )
5443 .unwrap(),
5444 cx
5445 )
5446 .await
5447 .unwrap(),
5448 HashMap::from_iter([
5449 (path!("dir/one.rs").to_string(), vec![8..12]),
5450 (path!("dir/two.rs").to_string(), vec![8..12]),
5451 ]),
5452 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
5453 );
5454
5455 assert!(
5456 search(
5457 &project,
5458 SearchQuery::text(
5459 search_query,
5460 false,
5461 true,
5462 false,
5463 Default::default(),
5464 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()])
5465 .unwrap(),
5466 false,
5467 None,
5468 )
5469 .unwrap(),
5470 cx
5471 )
5472 .await
5473 .unwrap()
5474 .is_empty(),
5475 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
5476 );
5477}
5478
5479#[gpui::test]
5480async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
5481 init_test(cx);
5482
5483 let search_query = "file";
5484
5485 let fs = FakeFs::new(cx.executor());
5486 fs.insert_tree(
5487 path!("/dir"),
5488 json!({
5489 "one.rs": r#"// Rust file one"#,
5490 "one.ts": r#"// TypeScript file one"#,
5491 "two.rs": r#"// Rust file two"#,
5492 "two.ts": r#"// TypeScript file two"#,
5493 }),
5494 )
5495 .await;
5496 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5497
5498 assert!(
5499 search(
5500 &project,
5501 SearchQuery::text(
5502 search_query,
5503 false,
5504 true,
5505 false,
5506 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5507 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5508 false,
5509 None,
5510 )
5511 .unwrap(),
5512 cx
5513 )
5514 .await
5515 .unwrap()
5516 .is_empty(),
5517 "If both no exclusions and inclusions match, exclusions should win and return nothing"
5518 );
5519
5520 assert!(
5521 search(
5522 &project,
5523 SearchQuery::text(
5524 search_query,
5525 false,
5526 true,
5527 false,
5528 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5529 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5530 false,
5531 None,
5532 )
5533 .unwrap(),
5534 cx
5535 )
5536 .await
5537 .unwrap()
5538 .is_empty(),
5539 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
5540 );
5541
5542 assert!(
5543 search(
5544 &project,
5545 SearchQuery::text(
5546 search_query,
5547 false,
5548 true,
5549 false,
5550 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5551 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5552 false,
5553 None,
5554 )
5555 .unwrap(),
5556 cx
5557 )
5558 .await
5559 .unwrap()
5560 .is_empty(),
5561 "Non-matching inclusions and exclusions should not change that."
5562 );
5563
5564 assert_eq!(
5565 search(
5566 &project,
5567 SearchQuery::text(
5568 search_query,
5569 false,
5570 true,
5571 false,
5572 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5573 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()]).unwrap(),
5574 false,
5575 None,
5576 )
5577 .unwrap(),
5578 cx
5579 )
5580 .await
5581 .unwrap(),
5582 HashMap::from_iter([
5583 (path!("dir/one.ts").to_string(), vec![14..18]),
5584 (path!("dir/two.ts").to_string(), vec![14..18]),
5585 ]),
5586 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
5587 );
5588}
5589
5590#[gpui::test]
5591async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
5592 init_test(cx);
5593
5594 let fs = FakeFs::new(cx.executor());
5595 fs.insert_tree(
5596 path!("/worktree-a"),
5597 json!({
5598 "haystack.rs": r#"// NEEDLE"#,
5599 "haystack.ts": r#"// NEEDLE"#,
5600 }),
5601 )
5602 .await;
5603 fs.insert_tree(
5604 path!("/worktree-b"),
5605 json!({
5606 "haystack.rs": r#"// NEEDLE"#,
5607 "haystack.ts": r#"// NEEDLE"#,
5608 }),
5609 )
5610 .await;
5611
5612 let project = Project::test(
5613 fs.clone(),
5614 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
5615 cx,
5616 )
5617 .await;
5618
5619 assert_eq!(
5620 search(
5621 &project,
5622 SearchQuery::text(
5623 "NEEDLE",
5624 false,
5625 true,
5626 false,
5627 PathMatcher::new(&["worktree-a/*.rs".to_owned()]).unwrap(),
5628 Default::default(),
5629 true,
5630 None,
5631 )
5632 .unwrap(),
5633 cx
5634 )
5635 .await
5636 .unwrap(),
5637 HashMap::from_iter([(path!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
5638 "should only return results from included worktree"
5639 );
5640 assert_eq!(
5641 search(
5642 &project,
5643 SearchQuery::text(
5644 "NEEDLE",
5645 false,
5646 true,
5647 false,
5648 PathMatcher::new(&["worktree-b/*.rs".to_owned()]).unwrap(),
5649 Default::default(),
5650 true,
5651 None,
5652 )
5653 .unwrap(),
5654 cx
5655 )
5656 .await
5657 .unwrap(),
5658 HashMap::from_iter([(path!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
5659 "should only return results from included worktree"
5660 );
5661
5662 assert_eq!(
5663 search(
5664 &project,
5665 SearchQuery::text(
5666 "NEEDLE",
5667 false,
5668 true,
5669 false,
5670 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5671 Default::default(),
5672 false,
5673 None,
5674 )
5675 .unwrap(),
5676 cx
5677 )
5678 .await
5679 .unwrap(),
5680 HashMap::from_iter([
5681 (path!("worktree-a/haystack.ts").to_string(), vec![3..9]),
5682 (path!("worktree-b/haystack.ts").to_string(), vec![3..9])
5683 ]),
5684 "should return results from both worktrees"
5685 );
5686}
5687
5688#[gpui::test]
5689async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
5690 init_test(cx);
5691
5692 let fs = FakeFs::new(cx.background_executor.clone());
5693 fs.insert_tree(
5694 path!("/dir"),
5695 json!({
5696 ".git": {},
5697 ".gitignore": "**/target\n/node_modules\n",
5698 "target": {
5699 "index.txt": "index_key:index_value"
5700 },
5701 "node_modules": {
5702 "eslint": {
5703 "index.ts": "const eslint_key = 'eslint value'",
5704 "package.json": r#"{ "some_key": "some value" }"#,
5705 },
5706 "prettier": {
5707 "index.ts": "const prettier_key = 'prettier value'",
5708 "package.json": r#"{ "other_key": "other value" }"#,
5709 },
5710 },
5711 "package.json": r#"{ "main_key": "main value" }"#,
5712 }),
5713 )
5714 .await;
5715 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5716
5717 let query = "key";
5718 assert_eq!(
5719 search(
5720 &project,
5721 SearchQuery::text(
5722 query,
5723 false,
5724 false,
5725 false,
5726 Default::default(),
5727 Default::default(),
5728 false,
5729 None,
5730 )
5731 .unwrap(),
5732 cx
5733 )
5734 .await
5735 .unwrap(),
5736 HashMap::from_iter([(path!("dir/package.json").to_string(), vec![8..11])]),
5737 "Only one non-ignored file should have the query"
5738 );
5739
5740 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5741 assert_eq!(
5742 search(
5743 &project,
5744 SearchQuery::text(
5745 query,
5746 false,
5747 false,
5748 true,
5749 Default::default(),
5750 Default::default(),
5751 false,
5752 None,
5753 )
5754 .unwrap(),
5755 cx
5756 )
5757 .await
5758 .unwrap(),
5759 HashMap::from_iter([
5760 (path!("dir/package.json").to_string(), vec![8..11]),
5761 (path!("dir/target/index.txt").to_string(), vec![6..9]),
5762 (
5763 path!("dir/node_modules/prettier/package.json").to_string(),
5764 vec![9..12]
5765 ),
5766 (
5767 path!("dir/node_modules/prettier/index.ts").to_string(),
5768 vec![15..18]
5769 ),
5770 (
5771 path!("dir/node_modules/eslint/index.ts").to_string(),
5772 vec![13..16]
5773 ),
5774 (
5775 path!("dir/node_modules/eslint/package.json").to_string(),
5776 vec![8..11]
5777 ),
5778 ]),
5779 "Unrestricted search with ignored directories should find every file with the query"
5780 );
5781
5782 let files_to_include = PathMatcher::new(&["node_modules/prettier/**".to_owned()]).unwrap();
5783 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()]).unwrap();
5784 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5785 assert_eq!(
5786 search(
5787 &project,
5788 SearchQuery::text(
5789 query,
5790 false,
5791 false,
5792 true,
5793 files_to_include,
5794 files_to_exclude,
5795 false,
5796 None,
5797 )
5798 .unwrap(),
5799 cx
5800 )
5801 .await
5802 .unwrap(),
5803 HashMap::from_iter([(
5804 path!("dir/node_modules/prettier/package.json").to_string(),
5805 vec![9..12]
5806 )]),
5807 "With search including ignored prettier directory and excluding TS files, only one file should be found"
5808 );
5809}
5810
5811#[gpui::test]
5812async fn test_search_with_unicode(cx: &mut gpui::TestAppContext) {
5813 init_test(cx);
5814
5815 let fs = FakeFs::new(cx.executor());
5816 fs.insert_tree(
5817 path!("/dir"),
5818 json!({
5819 "one.rs": "// ПРИВЕТ? привет!",
5820 "two.rs": "// ПРИВЕТ.",
5821 "three.rs": "// привет",
5822 }),
5823 )
5824 .await;
5825 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5826
5827 let unicode_case_sensitive_query = SearchQuery::text(
5828 "привет",
5829 false,
5830 true,
5831 false,
5832 Default::default(),
5833 Default::default(),
5834 false,
5835 None,
5836 );
5837 assert_matches!(unicode_case_sensitive_query, Ok(SearchQuery::Text { .. }));
5838 assert_eq!(
5839 search(&project, unicode_case_sensitive_query.unwrap(), cx)
5840 .await
5841 .unwrap(),
5842 HashMap::from_iter([
5843 (path!("dir/one.rs").to_string(), vec![17..29]),
5844 (path!("dir/three.rs").to_string(), vec![3..15]),
5845 ])
5846 );
5847
5848 let unicode_case_insensitive_query = SearchQuery::text(
5849 "привет",
5850 false,
5851 false,
5852 false,
5853 Default::default(),
5854 Default::default(),
5855 false,
5856 None,
5857 );
5858 assert_matches!(
5859 unicode_case_insensitive_query,
5860 Ok(SearchQuery::Regex { .. })
5861 );
5862 assert_eq!(
5863 search(&project, unicode_case_insensitive_query.unwrap(), cx)
5864 .await
5865 .unwrap(),
5866 HashMap::from_iter([
5867 (path!("dir/one.rs").to_string(), vec![3..15, 17..29]),
5868 (path!("dir/two.rs").to_string(), vec![3..15]),
5869 (path!("dir/three.rs").to_string(), vec![3..15]),
5870 ])
5871 );
5872
5873 assert_eq!(
5874 search(
5875 &project,
5876 SearchQuery::text(
5877 "привет.",
5878 false,
5879 false,
5880 false,
5881 Default::default(),
5882 Default::default(),
5883 false,
5884 None,
5885 )
5886 .unwrap(),
5887 cx
5888 )
5889 .await
5890 .unwrap(),
5891 HashMap::from_iter([(path!("dir/two.rs").to_string(), vec![3..16]),])
5892 );
5893}
5894
5895#[gpui::test]
5896async fn test_create_entry(cx: &mut gpui::TestAppContext) {
5897 init_test(cx);
5898
5899 let fs = FakeFs::new(cx.executor().clone());
5900 fs.insert_tree(
5901 "/one/two",
5902 json!({
5903 "three": {
5904 "a.txt": "",
5905 "four": {}
5906 },
5907 "c.rs": ""
5908 }),
5909 )
5910 .await;
5911
5912 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
5913 project
5914 .update(cx, |project, cx| {
5915 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5916 project.create_entry((id, "b.."), true, cx)
5917 })
5918 .await
5919 .unwrap()
5920 .to_included()
5921 .unwrap();
5922
5923 // Can't create paths outside the project
5924 let result = project
5925 .update(cx, |project, cx| {
5926 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5927 project.create_entry((id, "../../boop"), true, cx)
5928 })
5929 .await;
5930 assert!(result.is_err());
5931
5932 // Can't create paths with '..'
5933 let result = project
5934 .update(cx, |project, cx| {
5935 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5936 project.create_entry((id, "four/../beep"), true, cx)
5937 })
5938 .await;
5939 assert!(result.is_err());
5940
5941 assert_eq!(
5942 fs.paths(true),
5943 vec![
5944 PathBuf::from(path!("/")),
5945 PathBuf::from(path!("/one")),
5946 PathBuf::from(path!("/one/two")),
5947 PathBuf::from(path!("/one/two/c.rs")),
5948 PathBuf::from(path!("/one/two/three")),
5949 PathBuf::from(path!("/one/two/three/a.txt")),
5950 PathBuf::from(path!("/one/two/three/b..")),
5951 PathBuf::from(path!("/one/two/three/four")),
5952 ]
5953 );
5954
5955 // And we cannot open buffers with '..'
5956 let result = project
5957 .update(cx, |project, cx| {
5958 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5959 project.open_buffer((id, "../c.rs"), cx)
5960 })
5961 .await;
5962 assert!(result.is_err())
5963}
5964
5965#[gpui::test]
5966async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
5967 init_test(cx);
5968
5969 let fs = FakeFs::new(cx.executor());
5970 fs.insert_tree(
5971 path!("/dir"),
5972 json!({
5973 "a.tsx": "a",
5974 }),
5975 )
5976 .await;
5977
5978 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5979
5980 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5981 language_registry.add(tsx_lang());
5982 let language_server_names = [
5983 "TypeScriptServer",
5984 "TailwindServer",
5985 "ESLintServer",
5986 "NoHoverCapabilitiesServer",
5987 ];
5988 let mut language_servers = [
5989 language_registry.register_fake_lsp(
5990 "tsx",
5991 FakeLspAdapter {
5992 name: language_server_names[0],
5993 capabilities: lsp::ServerCapabilities {
5994 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5995 ..lsp::ServerCapabilities::default()
5996 },
5997 ..FakeLspAdapter::default()
5998 },
5999 ),
6000 language_registry.register_fake_lsp(
6001 "tsx",
6002 FakeLspAdapter {
6003 name: language_server_names[1],
6004 capabilities: lsp::ServerCapabilities {
6005 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6006 ..lsp::ServerCapabilities::default()
6007 },
6008 ..FakeLspAdapter::default()
6009 },
6010 ),
6011 language_registry.register_fake_lsp(
6012 "tsx",
6013 FakeLspAdapter {
6014 name: language_server_names[2],
6015 capabilities: lsp::ServerCapabilities {
6016 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6017 ..lsp::ServerCapabilities::default()
6018 },
6019 ..FakeLspAdapter::default()
6020 },
6021 ),
6022 language_registry.register_fake_lsp(
6023 "tsx",
6024 FakeLspAdapter {
6025 name: language_server_names[3],
6026 capabilities: lsp::ServerCapabilities {
6027 hover_provider: None,
6028 ..lsp::ServerCapabilities::default()
6029 },
6030 ..FakeLspAdapter::default()
6031 },
6032 ),
6033 ];
6034
6035 let (buffer, _handle) = project
6036 .update(cx, |p, cx| {
6037 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
6038 })
6039 .await
6040 .unwrap();
6041 cx.executor().run_until_parked();
6042
6043 let mut servers_with_hover_requests = HashMap::default();
6044 for i in 0..language_server_names.len() {
6045 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
6046 panic!(
6047 "Failed to get language server #{i} with name {}",
6048 &language_server_names[i]
6049 )
6050 });
6051 let new_server_name = new_server.server.name();
6052 assert!(
6053 !servers_with_hover_requests.contains_key(&new_server_name),
6054 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6055 );
6056 match new_server_name.as_ref() {
6057 "TailwindServer" | "TypeScriptServer" => {
6058 servers_with_hover_requests.insert(
6059 new_server_name.clone(),
6060 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6061 move |_, _| {
6062 let name = new_server_name.clone();
6063 async move {
6064 Ok(Some(lsp::Hover {
6065 contents: lsp::HoverContents::Scalar(
6066 lsp::MarkedString::String(format!("{name} hover")),
6067 ),
6068 range: None,
6069 }))
6070 }
6071 },
6072 ),
6073 );
6074 }
6075 "ESLintServer" => {
6076 servers_with_hover_requests.insert(
6077 new_server_name,
6078 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6079 |_, _| async move { Ok(None) },
6080 ),
6081 );
6082 }
6083 "NoHoverCapabilitiesServer" => {
6084 let _never_handled = new_server
6085 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
6086 panic!(
6087 "Should not call for hovers server with no corresponding capabilities"
6088 )
6089 });
6090 }
6091 unexpected => panic!("Unexpected server name: {unexpected}"),
6092 }
6093 }
6094
6095 let hover_task = project.update(cx, |project, cx| {
6096 project.hover(&buffer, Point::new(0, 0), cx)
6097 });
6098 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
6099 |mut hover_request| async move {
6100 hover_request
6101 .next()
6102 .await
6103 .expect("All hover requests should have been triggered")
6104 },
6105 ))
6106 .await;
6107 assert_eq!(
6108 vec!["TailwindServer hover", "TypeScriptServer hover"],
6109 hover_task
6110 .await
6111 .into_iter()
6112 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
6113 .sorted()
6114 .collect::<Vec<_>>(),
6115 "Should receive hover responses from all related servers with hover capabilities"
6116 );
6117}
6118
6119#[gpui::test]
6120async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
6121 init_test(cx);
6122
6123 let fs = FakeFs::new(cx.executor());
6124 fs.insert_tree(
6125 path!("/dir"),
6126 json!({
6127 "a.ts": "a",
6128 }),
6129 )
6130 .await;
6131
6132 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6133
6134 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6135 language_registry.add(typescript_lang());
6136 let mut fake_language_servers = language_registry.register_fake_lsp(
6137 "TypeScript",
6138 FakeLspAdapter {
6139 capabilities: lsp::ServerCapabilities {
6140 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6141 ..lsp::ServerCapabilities::default()
6142 },
6143 ..FakeLspAdapter::default()
6144 },
6145 );
6146
6147 let (buffer, _handle) = project
6148 .update(cx, |p, cx| {
6149 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6150 })
6151 .await
6152 .unwrap();
6153 cx.executor().run_until_parked();
6154
6155 let fake_server = fake_language_servers
6156 .next()
6157 .await
6158 .expect("failed to get the language server");
6159
6160 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6161 move |_, _| async move {
6162 Ok(Some(lsp::Hover {
6163 contents: lsp::HoverContents::Array(vec![
6164 lsp::MarkedString::String("".to_string()),
6165 lsp::MarkedString::String(" ".to_string()),
6166 lsp::MarkedString::String("\n\n\n".to_string()),
6167 ]),
6168 range: None,
6169 }))
6170 },
6171 );
6172
6173 let hover_task = project.update(cx, |project, cx| {
6174 project.hover(&buffer, Point::new(0, 0), cx)
6175 });
6176 let () = request_handled
6177 .next()
6178 .await
6179 .expect("All hover requests should have been triggered");
6180 assert_eq!(
6181 Vec::<String>::new(),
6182 hover_task
6183 .await
6184 .into_iter()
6185 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
6186 .sorted()
6187 .collect::<Vec<_>>(),
6188 "Empty hover parts should be ignored"
6189 );
6190}
6191
6192#[gpui::test]
6193async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
6194 init_test(cx);
6195
6196 let fs = FakeFs::new(cx.executor());
6197 fs.insert_tree(
6198 path!("/dir"),
6199 json!({
6200 "a.ts": "a",
6201 }),
6202 )
6203 .await;
6204
6205 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6206
6207 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6208 language_registry.add(typescript_lang());
6209 let mut fake_language_servers = language_registry.register_fake_lsp(
6210 "TypeScript",
6211 FakeLspAdapter {
6212 capabilities: lsp::ServerCapabilities {
6213 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6214 ..lsp::ServerCapabilities::default()
6215 },
6216 ..FakeLspAdapter::default()
6217 },
6218 );
6219
6220 let (buffer, _handle) = project
6221 .update(cx, |p, cx| {
6222 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6223 })
6224 .await
6225 .unwrap();
6226 cx.executor().run_until_parked();
6227
6228 let fake_server = fake_language_servers
6229 .next()
6230 .await
6231 .expect("failed to get the language server");
6232
6233 let mut request_handled = fake_server
6234 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
6235 Ok(Some(vec![
6236 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6237 title: "organize imports".to_string(),
6238 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
6239 ..lsp::CodeAction::default()
6240 }),
6241 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6242 title: "fix code".to_string(),
6243 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
6244 ..lsp::CodeAction::default()
6245 }),
6246 ]))
6247 });
6248
6249 let code_actions_task = project.update(cx, |project, cx| {
6250 project.code_actions(
6251 &buffer,
6252 0..buffer.read(cx).len(),
6253 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
6254 cx,
6255 )
6256 });
6257
6258 let () = request_handled
6259 .next()
6260 .await
6261 .expect("The code action request should have been triggered");
6262
6263 let code_actions = code_actions_task.await.unwrap();
6264 assert_eq!(code_actions.len(), 1);
6265 assert_eq!(
6266 code_actions[0].lsp_action.action_kind(),
6267 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
6268 );
6269}
6270
6271#[gpui::test]
6272async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
6273 init_test(cx);
6274
6275 let fs = FakeFs::new(cx.executor());
6276 fs.insert_tree(
6277 path!("/dir"),
6278 json!({
6279 "a.tsx": "a",
6280 }),
6281 )
6282 .await;
6283
6284 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6285
6286 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6287 language_registry.add(tsx_lang());
6288 let language_server_names = [
6289 "TypeScriptServer",
6290 "TailwindServer",
6291 "ESLintServer",
6292 "NoActionsCapabilitiesServer",
6293 ];
6294
6295 let mut language_server_rxs = [
6296 language_registry.register_fake_lsp(
6297 "tsx",
6298 FakeLspAdapter {
6299 name: language_server_names[0],
6300 capabilities: lsp::ServerCapabilities {
6301 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6302 ..lsp::ServerCapabilities::default()
6303 },
6304 ..FakeLspAdapter::default()
6305 },
6306 ),
6307 language_registry.register_fake_lsp(
6308 "tsx",
6309 FakeLspAdapter {
6310 name: language_server_names[1],
6311 capabilities: lsp::ServerCapabilities {
6312 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6313 ..lsp::ServerCapabilities::default()
6314 },
6315 ..FakeLspAdapter::default()
6316 },
6317 ),
6318 language_registry.register_fake_lsp(
6319 "tsx",
6320 FakeLspAdapter {
6321 name: language_server_names[2],
6322 capabilities: lsp::ServerCapabilities {
6323 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6324 ..lsp::ServerCapabilities::default()
6325 },
6326 ..FakeLspAdapter::default()
6327 },
6328 ),
6329 language_registry.register_fake_lsp(
6330 "tsx",
6331 FakeLspAdapter {
6332 name: language_server_names[3],
6333 capabilities: lsp::ServerCapabilities {
6334 code_action_provider: None,
6335 ..lsp::ServerCapabilities::default()
6336 },
6337 ..FakeLspAdapter::default()
6338 },
6339 ),
6340 ];
6341
6342 let (buffer, _handle) = project
6343 .update(cx, |p, cx| {
6344 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
6345 })
6346 .await
6347 .unwrap();
6348 cx.executor().run_until_parked();
6349
6350 let mut servers_with_actions_requests = HashMap::default();
6351 for i in 0..language_server_names.len() {
6352 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
6353 panic!(
6354 "Failed to get language server #{i} with name {}",
6355 &language_server_names[i]
6356 )
6357 });
6358 let new_server_name = new_server.server.name();
6359
6360 assert!(
6361 !servers_with_actions_requests.contains_key(&new_server_name),
6362 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6363 );
6364 match new_server_name.0.as_ref() {
6365 "TailwindServer" | "TypeScriptServer" => {
6366 servers_with_actions_requests.insert(
6367 new_server_name.clone(),
6368 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6369 move |_, _| {
6370 let name = new_server_name.clone();
6371 async move {
6372 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
6373 lsp::CodeAction {
6374 title: format!("{name} code action"),
6375 ..lsp::CodeAction::default()
6376 },
6377 )]))
6378 }
6379 },
6380 ),
6381 );
6382 }
6383 "ESLintServer" => {
6384 servers_with_actions_requests.insert(
6385 new_server_name,
6386 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6387 |_, _| async move { Ok(None) },
6388 ),
6389 );
6390 }
6391 "NoActionsCapabilitiesServer" => {
6392 let _never_handled = new_server
6393 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6394 panic!(
6395 "Should not call for code actions server with no corresponding capabilities"
6396 )
6397 });
6398 }
6399 unexpected => panic!("Unexpected server name: {unexpected}"),
6400 }
6401 }
6402
6403 let code_actions_task = project.update(cx, |project, cx| {
6404 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
6405 });
6406
6407 // cx.run_until_parked();
6408 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
6409 |mut code_actions_request| async move {
6410 code_actions_request
6411 .next()
6412 .await
6413 .expect("All code actions requests should have been triggered")
6414 },
6415 ))
6416 .await;
6417 assert_eq!(
6418 vec!["TailwindServer code action", "TypeScriptServer code action"],
6419 code_actions_task
6420 .await
6421 .unwrap()
6422 .into_iter()
6423 .map(|code_action| code_action.lsp_action.title().to_owned())
6424 .sorted()
6425 .collect::<Vec<_>>(),
6426 "Should receive code actions responses from all related servers with hover capabilities"
6427 );
6428}
6429
6430#[gpui::test]
6431async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
6432 init_test(cx);
6433
6434 let fs = FakeFs::new(cx.executor());
6435 fs.insert_tree(
6436 "/dir",
6437 json!({
6438 "a.rs": "let a = 1;",
6439 "b.rs": "let b = 2;",
6440 "c.rs": "let c = 2;",
6441 }),
6442 )
6443 .await;
6444
6445 let project = Project::test(
6446 fs,
6447 [
6448 "/dir/a.rs".as_ref(),
6449 "/dir/b.rs".as_ref(),
6450 "/dir/c.rs".as_ref(),
6451 ],
6452 cx,
6453 )
6454 .await;
6455
6456 // check the initial state and get the worktrees
6457 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
6458 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6459 assert_eq!(worktrees.len(), 3);
6460
6461 let worktree_a = worktrees[0].read(cx);
6462 let worktree_b = worktrees[1].read(cx);
6463 let worktree_c = worktrees[2].read(cx);
6464
6465 // check they start in the right order
6466 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
6467 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
6468 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
6469
6470 (
6471 worktrees[0].clone(),
6472 worktrees[1].clone(),
6473 worktrees[2].clone(),
6474 )
6475 });
6476
6477 // move first worktree to after the second
6478 // [a, b, c] -> [b, a, c]
6479 project
6480 .update(cx, |project, cx| {
6481 let first = worktree_a.read(cx);
6482 let second = worktree_b.read(cx);
6483 project.move_worktree(first.id(), second.id(), cx)
6484 })
6485 .expect("moving first after second");
6486
6487 // check the state after moving
6488 project.update(cx, |project, cx| {
6489 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6490 assert_eq!(worktrees.len(), 3);
6491
6492 let first = worktrees[0].read(cx);
6493 let second = worktrees[1].read(cx);
6494 let third = worktrees[2].read(cx);
6495
6496 // check they are now in the right order
6497 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
6498 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
6499 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6500 });
6501
6502 // move the second worktree to before the first
6503 // [b, a, c] -> [a, b, c]
6504 project
6505 .update(cx, |project, cx| {
6506 let second = worktree_a.read(cx);
6507 let first = worktree_b.read(cx);
6508 project.move_worktree(first.id(), second.id(), cx)
6509 })
6510 .expect("moving second before first");
6511
6512 // check the state after moving
6513 project.update(cx, |project, cx| {
6514 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6515 assert_eq!(worktrees.len(), 3);
6516
6517 let first = worktrees[0].read(cx);
6518 let second = worktrees[1].read(cx);
6519 let third = worktrees[2].read(cx);
6520
6521 // check they are now in the right order
6522 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6523 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6524 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6525 });
6526
6527 // move the second worktree to after the third
6528 // [a, b, c] -> [a, c, b]
6529 project
6530 .update(cx, |project, cx| {
6531 let second = worktree_b.read(cx);
6532 let third = worktree_c.read(cx);
6533 project.move_worktree(second.id(), third.id(), cx)
6534 })
6535 .expect("moving second after third");
6536
6537 // check the state after moving
6538 project.update(cx, |project, cx| {
6539 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6540 assert_eq!(worktrees.len(), 3);
6541
6542 let first = worktrees[0].read(cx);
6543 let second = worktrees[1].read(cx);
6544 let third = worktrees[2].read(cx);
6545
6546 // check they are now in the right order
6547 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6548 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
6549 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
6550 });
6551
6552 // move the third worktree to before the second
6553 // [a, c, b] -> [a, b, c]
6554 project
6555 .update(cx, |project, cx| {
6556 let third = worktree_c.read(cx);
6557 let second = worktree_b.read(cx);
6558 project.move_worktree(third.id(), second.id(), cx)
6559 })
6560 .expect("moving third before second");
6561
6562 // check the state after moving
6563 project.update(cx, |project, cx| {
6564 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6565 assert_eq!(worktrees.len(), 3);
6566
6567 let first = worktrees[0].read(cx);
6568 let second = worktrees[1].read(cx);
6569 let third = worktrees[2].read(cx);
6570
6571 // check they are now in the right order
6572 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6573 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6574 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6575 });
6576
6577 // move the first worktree to after the third
6578 // [a, b, c] -> [b, c, a]
6579 project
6580 .update(cx, |project, cx| {
6581 let first = worktree_a.read(cx);
6582 let third = worktree_c.read(cx);
6583 project.move_worktree(first.id(), third.id(), cx)
6584 })
6585 .expect("moving first after third");
6586
6587 // check the state after moving
6588 project.update(cx, |project, cx| {
6589 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6590 assert_eq!(worktrees.len(), 3);
6591
6592 let first = worktrees[0].read(cx);
6593 let second = worktrees[1].read(cx);
6594 let third = worktrees[2].read(cx);
6595
6596 // check they are now in the right order
6597 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
6598 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
6599 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
6600 });
6601
6602 // move the third worktree to before the first
6603 // [b, c, a] -> [a, b, c]
6604 project
6605 .update(cx, |project, cx| {
6606 let third = worktree_a.read(cx);
6607 let first = worktree_b.read(cx);
6608 project.move_worktree(third.id(), first.id(), cx)
6609 })
6610 .expect("moving third before first");
6611
6612 // check the state after moving
6613 project.update(cx, |project, cx| {
6614 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6615 assert_eq!(worktrees.len(), 3);
6616
6617 let first = worktrees[0].read(cx);
6618 let second = worktrees[1].read(cx);
6619 let third = worktrees[2].read(cx);
6620
6621 // check they are now in the right order
6622 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6623 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6624 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6625 });
6626}
6627
6628#[gpui::test]
6629async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
6630 init_test(cx);
6631
6632 let staged_contents = r#"
6633 fn main() {
6634 println!("hello world");
6635 }
6636 "#
6637 .unindent();
6638 let file_contents = r#"
6639 // print goodbye
6640 fn main() {
6641 println!("goodbye world");
6642 }
6643 "#
6644 .unindent();
6645
6646 let fs = FakeFs::new(cx.background_executor.clone());
6647 fs.insert_tree(
6648 "/dir",
6649 json!({
6650 ".git": {},
6651 "src": {
6652 "main.rs": file_contents,
6653 }
6654 }),
6655 )
6656 .await;
6657
6658 fs.set_index_for_repo(
6659 Path::new("/dir/.git"),
6660 &[("src/main.rs".into(), staged_contents)],
6661 );
6662
6663 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6664
6665 let buffer = project
6666 .update(cx, |project, cx| {
6667 project.open_local_buffer("/dir/src/main.rs", cx)
6668 })
6669 .await
6670 .unwrap();
6671 let unstaged_diff = project
6672 .update(cx, |project, cx| {
6673 project.open_unstaged_diff(buffer.clone(), cx)
6674 })
6675 .await
6676 .unwrap();
6677
6678 cx.run_until_parked();
6679 unstaged_diff.update(cx, |unstaged_diff, cx| {
6680 let snapshot = buffer.read(cx).snapshot();
6681 assert_hunks(
6682 unstaged_diff.hunks(&snapshot, cx),
6683 &snapshot,
6684 &unstaged_diff.base_text_string().unwrap(),
6685 &[
6686 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
6687 (
6688 2..3,
6689 " println!(\"hello world\");\n",
6690 " println!(\"goodbye world\");\n",
6691 DiffHunkStatus::modified_none(),
6692 ),
6693 ],
6694 );
6695 });
6696
6697 let staged_contents = r#"
6698 // print goodbye
6699 fn main() {
6700 }
6701 "#
6702 .unindent();
6703
6704 fs.set_index_for_repo(
6705 Path::new("/dir/.git"),
6706 &[("src/main.rs".into(), staged_contents)],
6707 );
6708
6709 cx.run_until_parked();
6710 unstaged_diff.update(cx, |unstaged_diff, cx| {
6711 let snapshot = buffer.read(cx).snapshot();
6712 assert_hunks(
6713 unstaged_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6714 &snapshot,
6715 &unstaged_diff.base_text().text(),
6716 &[(
6717 2..3,
6718 "",
6719 " println!(\"goodbye world\");\n",
6720 DiffHunkStatus::added_none(),
6721 )],
6722 );
6723 });
6724}
6725
6726#[gpui::test]
6727async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
6728 init_test(cx);
6729
6730 let committed_contents = r#"
6731 fn main() {
6732 println!("hello world");
6733 }
6734 "#
6735 .unindent();
6736 let staged_contents = r#"
6737 fn main() {
6738 println!("goodbye world");
6739 }
6740 "#
6741 .unindent();
6742 let file_contents = r#"
6743 // print goodbye
6744 fn main() {
6745 println!("goodbye world");
6746 }
6747 "#
6748 .unindent();
6749
6750 let fs = FakeFs::new(cx.background_executor.clone());
6751 fs.insert_tree(
6752 "/dir",
6753 json!({
6754 ".git": {},
6755 "src": {
6756 "modification.rs": file_contents,
6757 }
6758 }),
6759 )
6760 .await;
6761
6762 fs.set_head_for_repo(
6763 Path::new("/dir/.git"),
6764 &[
6765 ("src/modification.rs".into(), committed_contents),
6766 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6767 ],
6768 "deadbeef",
6769 );
6770 fs.set_index_for_repo(
6771 Path::new("/dir/.git"),
6772 &[
6773 ("src/modification.rs".into(), staged_contents),
6774 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6775 ],
6776 );
6777
6778 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6779 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6780 let language = rust_lang();
6781 language_registry.add(language.clone());
6782
6783 let buffer_1 = project
6784 .update(cx, |project, cx| {
6785 project.open_local_buffer("/dir/src/modification.rs", cx)
6786 })
6787 .await
6788 .unwrap();
6789 let diff_1 = project
6790 .update(cx, |project, cx| {
6791 project.open_uncommitted_diff(buffer_1.clone(), cx)
6792 })
6793 .await
6794 .unwrap();
6795 diff_1.read_with(cx, |diff, _| {
6796 assert_eq!(diff.base_text().language().cloned(), Some(language))
6797 });
6798 cx.run_until_parked();
6799 diff_1.update(cx, |diff, cx| {
6800 let snapshot = buffer_1.read(cx).snapshot();
6801 assert_hunks(
6802 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6803 &snapshot,
6804 &diff.base_text_string().unwrap(),
6805 &[
6806 (
6807 0..1,
6808 "",
6809 "// print goodbye\n",
6810 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
6811 ),
6812 (
6813 2..3,
6814 " println!(\"hello world\");\n",
6815 " println!(\"goodbye world\");\n",
6816 DiffHunkStatus::modified_none(),
6817 ),
6818 ],
6819 );
6820 });
6821
6822 // Reset HEAD to a version that differs from both the buffer and the index.
6823 let committed_contents = r#"
6824 // print goodbye
6825 fn main() {
6826 }
6827 "#
6828 .unindent();
6829 fs.set_head_for_repo(
6830 Path::new("/dir/.git"),
6831 &[
6832 ("src/modification.rs".into(), committed_contents.clone()),
6833 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6834 ],
6835 "deadbeef",
6836 );
6837
6838 // Buffer now has an unstaged hunk.
6839 cx.run_until_parked();
6840 diff_1.update(cx, |diff, cx| {
6841 let snapshot = buffer_1.read(cx).snapshot();
6842 assert_hunks(
6843 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6844 &snapshot,
6845 &diff.base_text().text(),
6846 &[(
6847 2..3,
6848 "",
6849 " println!(\"goodbye world\");\n",
6850 DiffHunkStatus::added_none(),
6851 )],
6852 );
6853 });
6854
6855 // Open a buffer for a file that's been deleted.
6856 let buffer_2 = project
6857 .update(cx, |project, cx| {
6858 project.open_local_buffer("/dir/src/deletion.rs", cx)
6859 })
6860 .await
6861 .unwrap();
6862 let diff_2 = project
6863 .update(cx, |project, cx| {
6864 project.open_uncommitted_diff(buffer_2.clone(), cx)
6865 })
6866 .await
6867 .unwrap();
6868 cx.run_until_parked();
6869 diff_2.update(cx, |diff, cx| {
6870 let snapshot = buffer_2.read(cx).snapshot();
6871 assert_hunks(
6872 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6873 &snapshot,
6874 &diff.base_text_string().unwrap(),
6875 &[(
6876 0..0,
6877 "// the-deleted-contents\n",
6878 "",
6879 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
6880 )],
6881 );
6882 });
6883
6884 // Stage the deletion of this file
6885 fs.set_index_for_repo(
6886 Path::new("/dir/.git"),
6887 &[("src/modification.rs".into(), committed_contents.clone())],
6888 );
6889 cx.run_until_parked();
6890 diff_2.update(cx, |diff, cx| {
6891 let snapshot = buffer_2.read(cx).snapshot();
6892 assert_hunks(
6893 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6894 &snapshot,
6895 &diff.base_text_string().unwrap(),
6896 &[(
6897 0..0,
6898 "// the-deleted-contents\n",
6899 "",
6900 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
6901 )],
6902 );
6903 });
6904}
6905
6906#[gpui::test]
6907async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
6908 use DiffHunkSecondaryStatus::*;
6909 init_test(cx);
6910
6911 let committed_contents = r#"
6912 zero
6913 one
6914 two
6915 three
6916 four
6917 five
6918 "#
6919 .unindent();
6920 let file_contents = r#"
6921 one
6922 TWO
6923 three
6924 FOUR
6925 five
6926 "#
6927 .unindent();
6928
6929 let fs = FakeFs::new(cx.background_executor.clone());
6930 fs.insert_tree(
6931 "/dir",
6932 json!({
6933 ".git": {},
6934 "file.txt": file_contents.clone()
6935 }),
6936 )
6937 .await;
6938
6939 fs.set_head_and_index_for_repo(
6940 "/dir/.git".as_ref(),
6941 &[("file.txt".into(), committed_contents.clone())],
6942 );
6943
6944 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6945
6946 let buffer = project
6947 .update(cx, |project, cx| {
6948 project.open_local_buffer("/dir/file.txt", cx)
6949 })
6950 .await
6951 .unwrap();
6952 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6953 let uncommitted_diff = project
6954 .update(cx, |project, cx| {
6955 project.open_uncommitted_diff(buffer.clone(), cx)
6956 })
6957 .await
6958 .unwrap();
6959 let mut diff_events = cx.events(&uncommitted_diff);
6960
6961 // The hunks are initially unstaged.
6962 uncommitted_diff.read_with(cx, |diff, cx| {
6963 assert_hunks(
6964 diff.hunks(&snapshot, cx),
6965 &snapshot,
6966 &diff.base_text_string().unwrap(),
6967 &[
6968 (
6969 0..0,
6970 "zero\n",
6971 "",
6972 DiffHunkStatus::deleted(HasSecondaryHunk),
6973 ),
6974 (
6975 1..2,
6976 "two\n",
6977 "TWO\n",
6978 DiffHunkStatus::modified(HasSecondaryHunk),
6979 ),
6980 (
6981 3..4,
6982 "four\n",
6983 "FOUR\n",
6984 DiffHunkStatus::modified(HasSecondaryHunk),
6985 ),
6986 ],
6987 );
6988 });
6989
6990 // Stage a hunk. It appears as optimistically staged.
6991 uncommitted_diff.update(cx, |diff, cx| {
6992 let range =
6993 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
6994 let hunks = diff
6995 .hunks_intersecting_range(range, &snapshot, cx)
6996 .collect::<Vec<_>>();
6997 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
6998
6999 assert_hunks(
7000 diff.hunks(&snapshot, cx),
7001 &snapshot,
7002 &diff.base_text_string().unwrap(),
7003 &[
7004 (
7005 0..0,
7006 "zero\n",
7007 "",
7008 DiffHunkStatus::deleted(HasSecondaryHunk),
7009 ),
7010 (
7011 1..2,
7012 "two\n",
7013 "TWO\n",
7014 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7015 ),
7016 (
7017 3..4,
7018 "four\n",
7019 "FOUR\n",
7020 DiffHunkStatus::modified(HasSecondaryHunk),
7021 ),
7022 ],
7023 );
7024 });
7025
7026 // The diff emits a change event for the range of the staged hunk.
7027 assert!(matches!(
7028 diff_events.next().await.unwrap(),
7029 BufferDiffEvent::HunksStagedOrUnstaged(_)
7030 ));
7031 let event = diff_events.next().await.unwrap();
7032 if let BufferDiffEvent::DiffChanged {
7033 changed_range: Some(changed_range),
7034 } = event
7035 {
7036 let changed_range = changed_range.to_point(&snapshot);
7037 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
7038 } else {
7039 panic!("Unexpected event {event:?}");
7040 }
7041
7042 // When the write to the index completes, it appears as staged.
7043 cx.run_until_parked();
7044 uncommitted_diff.update(cx, |diff, cx| {
7045 assert_hunks(
7046 diff.hunks(&snapshot, cx),
7047 &snapshot,
7048 &diff.base_text_string().unwrap(),
7049 &[
7050 (
7051 0..0,
7052 "zero\n",
7053 "",
7054 DiffHunkStatus::deleted(HasSecondaryHunk),
7055 ),
7056 (
7057 1..2,
7058 "two\n",
7059 "TWO\n",
7060 DiffHunkStatus::modified(NoSecondaryHunk),
7061 ),
7062 (
7063 3..4,
7064 "four\n",
7065 "FOUR\n",
7066 DiffHunkStatus::modified(HasSecondaryHunk),
7067 ),
7068 ],
7069 );
7070 });
7071
7072 // The diff emits a change event for the changed index text.
7073 let event = diff_events.next().await.unwrap();
7074 if let BufferDiffEvent::DiffChanged {
7075 changed_range: Some(changed_range),
7076 } = event
7077 {
7078 let changed_range = changed_range.to_point(&snapshot);
7079 assert_eq!(changed_range, Point::new(0, 0)..Point::new(4, 0));
7080 } else {
7081 panic!("Unexpected event {event:?}");
7082 }
7083
7084 // Simulate a problem writing to the git index.
7085 fs.set_error_message_for_index_write(
7086 "/dir/.git".as_ref(),
7087 Some("failed to write git index".into()),
7088 );
7089
7090 // Stage another hunk.
7091 uncommitted_diff.update(cx, |diff, cx| {
7092 let range =
7093 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
7094 let hunks = diff
7095 .hunks_intersecting_range(range, &snapshot, cx)
7096 .collect::<Vec<_>>();
7097 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
7098
7099 assert_hunks(
7100 diff.hunks(&snapshot, cx),
7101 &snapshot,
7102 &diff.base_text_string().unwrap(),
7103 &[
7104 (
7105 0..0,
7106 "zero\n",
7107 "",
7108 DiffHunkStatus::deleted(HasSecondaryHunk),
7109 ),
7110 (
7111 1..2,
7112 "two\n",
7113 "TWO\n",
7114 DiffHunkStatus::modified(NoSecondaryHunk),
7115 ),
7116 (
7117 3..4,
7118 "four\n",
7119 "FOUR\n",
7120 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7121 ),
7122 ],
7123 );
7124 });
7125 assert!(matches!(
7126 diff_events.next().await.unwrap(),
7127 BufferDiffEvent::HunksStagedOrUnstaged(_)
7128 ));
7129 let event = diff_events.next().await.unwrap();
7130 if let BufferDiffEvent::DiffChanged {
7131 changed_range: Some(changed_range),
7132 } = event
7133 {
7134 let changed_range = changed_range.to_point(&snapshot);
7135 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
7136 } else {
7137 panic!("Unexpected event {event:?}");
7138 }
7139
7140 // When the write fails, the hunk returns to being unstaged.
7141 cx.run_until_parked();
7142 uncommitted_diff.update(cx, |diff, cx| {
7143 assert_hunks(
7144 diff.hunks(&snapshot, cx),
7145 &snapshot,
7146 &diff.base_text_string().unwrap(),
7147 &[
7148 (
7149 0..0,
7150 "zero\n",
7151 "",
7152 DiffHunkStatus::deleted(HasSecondaryHunk),
7153 ),
7154 (
7155 1..2,
7156 "two\n",
7157 "TWO\n",
7158 DiffHunkStatus::modified(NoSecondaryHunk),
7159 ),
7160 (
7161 3..4,
7162 "four\n",
7163 "FOUR\n",
7164 DiffHunkStatus::modified(HasSecondaryHunk),
7165 ),
7166 ],
7167 );
7168 });
7169
7170 let event = diff_events.next().await.unwrap();
7171 if let BufferDiffEvent::DiffChanged {
7172 changed_range: Some(changed_range),
7173 } = event
7174 {
7175 let changed_range = changed_range.to_point(&snapshot);
7176 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
7177 } else {
7178 panic!("Unexpected event {event:?}");
7179 }
7180
7181 // Allow writing to the git index to succeed again.
7182 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
7183
7184 // Stage two hunks with separate operations.
7185 uncommitted_diff.update(cx, |diff, cx| {
7186 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
7187 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
7188 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
7189 });
7190
7191 // Both staged hunks appear as pending.
7192 uncommitted_diff.update(cx, |diff, cx| {
7193 assert_hunks(
7194 diff.hunks(&snapshot, cx),
7195 &snapshot,
7196 &diff.base_text_string().unwrap(),
7197 &[
7198 (
7199 0..0,
7200 "zero\n",
7201 "",
7202 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7203 ),
7204 (
7205 1..2,
7206 "two\n",
7207 "TWO\n",
7208 DiffHunkStatus::modified(NoSecondaryHunk),
7209 ),
7210 (
7211 3..4,
7212 "four\n",
7213 "FOUR\n",
7214 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7215 ),
7216 ],
7217 );
7218 });
7219
7220 // Both staging operations take effect.
7221 cx.run_until_parked();
7222 uncommitted_diff.update(cx, |diff, cx| {
7223 assert_hunks(
7224 diff.hunks(&snapshot, cx),
7225 &snapshot,
7226 &diff.base_text_string().unwrap(),
7227 &[
7228 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7229 (
7230 1..2,
7231 "two\n",
7232 "TWO\n",
7233 DiffHunkStatus::modified(NoSecondaryHunk),
7234 ),
7235 (
7236 3..4,
7237 "four\n",
7238 "FOUR\n",
7239 DiffHunkStatus::modified(NoSecondaryHunk),
7240 ),
7241 ],
7242 );
7243 });
7244}
7245
7246#[gpui::test(seeds(340, 472))]
7247async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
7248 use DiffHunkSecondaryStatus::*;
7249 init_test(cx);
7250
7251 let committed_contents = r#"
7252 zero
7253 one
7254 two
7255 three
7256 four
7257 five
7258 "#
7259 .unindent();
7260 let file_contents = r#"
7261 one
7262 TWO
7263 three
7264 FOUR
7265 five
7266 "#
7267 .unindent();
7268
7269 let fs = FakeFs::new(cx.background_executor.clone());
7270 fs.insert_tree(
7271 "/dir",
7272 json!({
7273 ".git": {},
7274 "file.txt": file_contents.clone()
7275 }),
7276 )
7277 .await;
7278
7279 fs.set_head_for_repo(
7280 "/dir/.git".as_ref(),
7281 &[("file.txt".into(), committed_contents.clone())],
7282 "deadbeef",
7283 );
7284 fs.set_index_for_repo(
7285 "/dir/.git".as_ref(),
7286 &[("file.txt".into(), committed_contents.clone())],
7287 );
7288
7289 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7290
7291 let buffer = project
7292 .update(cx, |project, cx| {
7293 project.open_local_buffer("/dir/file.txt", cx)
7294 })
7295 .await
7296 .unwrap();
7297 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7298 let uncommitted_diff = project
7299 .update(cx, |project, cx| {
7300 project.open_uncommitted_diff(buffer.clone(), cx)
7301 })
7302 .await
7303 .unwrap();
7304
7305 // The hunks are initially unstaged.
7306 uncommitted_diff.read_with(cx, |diff, cx| {
7307 assert_hunks(
7308 diff.hunks(&snapshot, cx),
7309 &snapshot,
7310 &diff.base_text_string().unwrap(),
7311 &[
7312 (
7313 0..0,
7314 "zero\n",
7315 "",
7316 DiffHunkStatus::deleted(HasSecondaryHunk),
7317 ),
7318 (
7319 1..2,
7320 "two\n",
7321 "TWO\n",
7322 DiffHunkStatus::modified(HasSecondaryHunk),
7323 ),
7324 (
7325 3..4,
7326 "four\n",
7327 "FOUR\n",
7328 DiffHunkStatus::modified(HasSecondaryHunk),
7329 ),
7330 ],
7331 );
7332 });
7333
7334 // Pause IO events
7335 fs.pause_events();
7336
7337 // Stage the first hunk.
7338 uncommitted_diff.update(cx, |diff, cx| {
7339 let hunk = diff.hunks(&snapshot, cx).next().unwrap();
7340 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7341 assert_hunks(
7342 diff.hunks(&snapshot, cx),
7343 &snapshot,
7344 &diff.base_text_string().unwrap(),
7345 &[
7346 (
7347 0..0,
7348 "zero\n",
7349 "",
7350 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7351 ),
7352 (
7353 1..2,
7354 "two\n",
7355 "TWO\n",
7356 DiffHunkStatus::modified(HasSecondaryHunk),
7357 ),
7358 (
7359 3..4,
7360 "four\n",
7361 "FOUR\n",
7362 DiffHunkStatus::modified(HasSecondaryHunk),
7363 ),
7364 ],
7365 );
7366 });
7367
7368 // Stage the second hunk *before* receiving the FS event for the first hunk.
7369 cx.run_until_parked();
7370 uncommitted_diff.update(cx, |diff, cx| {
7371 let hunk = diff.hunks(&snapshot, cx).nth(1).unwrap();
7372 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7373 assert_hunks(
7374 diff.hunks(&snapshot, cx),
7375 &snapshot,
7376 &diff.base_text_string().unwrap(),
7377 &[
7378 (
7379 0..0,
7380 "zero\n",
7381 "",
7382 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7383 ),
7384 (
7385 1..2,
7386 "two\n",
7387 "TWO\n",
7388 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7389 ),
7390 (
7391 3..4,
7392 "four\n",
7393 "FOUR\n",
7394 DiffHunkStatus::modified(HasSecondaryHunk),
7395 ),
7396 ],
7397 );
7398 });
7399
7400 // Process the FS event for staging the first hunk (second event is still pending).
7401 fs.flush_events(1);
7402 cx.run_until_parked();
7403
7404 // Stage the third hunk before receiving the second FS event.
7405 uncommitted_diff.update(cx, |diff, cx| {
7406 let hunk = diff.hunks(&snapshot, cx).nth(2).unwrap();
7407 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7408 });
7409
7410 // Wait for all remaining IO.
7411 cx.run_until_parked();
7412 fs.flush_events(fs.buffered_event_count());
7413
7414 // Now all hunks are staged.
7415 cx.run_until_parked();
7416 uncommitted_diff.update(cx, |diff, cx| {
7417 assert_hunks(
7418 diff.hunks(&snapshot, cx),
7419 &snapshot,
7420 &diff.base_text_string().unwrap(),
7421 &[
7422 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7423 (
7424 1..2,
7425 "two\n",
7426 "TWO\n",
7427 DiffHunkStatus::modified(NoSecondaryHunk),
7428 ),
7429 (
7430 3..4,
7431 "four\n",
7432 "FOUR\n",
7433 DiffHunkStatus::modified(NoSecondaryHunk),
7434 ),
7435 ],
7436 );
7437 });
7438}
7439
7440#[gpui::test(iterations = 25)]
7441async fn test_staging_random_hunks(
7442 mut rng: StdRng,
7443 executor: BackgroundExecutor,
7444 cx: &mut gpui::TestAppContext,
7445) {
7446 let operations = env::var("OPERATIONS")
7447 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
7448 .unwrap_or(20);
7449
7450 // Try to induce races between diff recalculation and index writes.
7451 if rng.gen_bool(0.5) {
7452 executor.deprioritize(*CALCULATE_DIFF_TASK);
7453 }
7454
7455 use DiffHunkSecondaryStatus::*;
7456 init_test(cx);
7457
7458 let committed_text = (0..30).map(|i| format!("line {i}\n")).collect::<String>();
7459 let index_text = committed_text.clone();
7460 let buffer_text = (0..30)
7461 .map(|i| match i % 5 {
7462 0 => format!("line {i} (modified)\n"),
7463 _ => format!("line {i}\n"),
7464 })
7465 .collect::<String>();
7466
7467 let fs = FakeFs::new(cx.background_executor.clone());
7468 fs.insert_tree(
7469 path!("/dir"),
7470 json!({
7471 ".git": {},
7472 "file.txt": buffer_text.clone()
7473 }),
7474 )
7475 .await;
7476 fs.set_head_for_repo(
7477 path!("/dir/.git").as_ref(),
7478 &[("file.txt".into(), committed_text.clone())],
7479 "deadbeef",
7480 );
7481 fs.set_index_for_repo(
7482 path!("/dir/.git").as_ref(),
7483 &[("file.txt".into(), index_text.clone())],
7484 );
7485 let repo = fs.open_repo(path!("/dir/.git").as_ref()).unwrap();
7486
7487 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7488 let buffer = project
7489 .update(cx, |project, cx| {
7490 project.open_local_buffer(path!("/dir/file.txt"), cx)
7491 })
7492 .await
7493 .unwrap();
7494 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7495 let uncommitted_diff = project
7496 .update(cx, |project, cx| {
7497 project.open_uncommitted_diff(buffer.clone(), cx)
7498 })
7499 .await
7500 .unwrap();
7501
7502 let mut hunks =
7503 uncommitted_diff.update(cx, |diff, cx| diff.hunks(&snapshot, cx).collect::<Vec<_>>());
7504 assert_eq!(hunks.len(), 6);
7505
7506 for _i in 0..operations {
7507 let hunk_ix = rng.gen_range(0..hunks.len());
7508 let hunk = &mut hunks[hunk_ix];
7509 let row = hunk.range.start.row;
7510
7511 if hunk.status().has_secondary_hunk() {
7512 log::info!("staging hunk at {row}");
7513 uncommitted_diff.update(cx, |diff, cx| {
7514 diff.stage_or_unstage_hunks(true, std::slice::from_ref(hunk), &snapshot, true, cx);
7515 });
7516 hunk.secondary_status = SecondaryHunkRemovalPending;
7517 } else {
7518 log::info!("unstaging hunk at {row}");
7519 uncommitted_diff.update(cx, |diff, cx| {
7520 diff.stage_or_unstage_hunks(false, std::slice::from_ref(hunk), &snapshot, true, cx);
7521 });
7522 hunk.secondary_status = SecondaryHunkAdditionPending;
7523 }
7524
7525 for _ in 0..rng.gen_range(0..10) {
7526 log::info!("yielding");
7527 cx.executor().simulate_random_delay().await;
7528 }
7529 }
7530
7531 cx.executor().run_until_parked();
7532
7533 for hunk in &mut hunks {
7534 if hunk.secondary_status == SecondaryHunkRemovalPending {
7535 hunk.secondary_status = NoSecondaryHunk;
7536 } else if hunk.secondary_status == SecondaryHunkAdditionPending {
7537 hunk.secondary_status = HasSecondaryHunk;
7538 }
7539 }
7540
7541 log::info!(
7542 "index text:\n{}",
7543 repo.load_index_text("file.txt".into()).await.unwrap()
7544 );
7545
7546 uncommitted_diff.update(cx, |diff, cx| {
7547 let expected_hunks = hunks
7548 .iter()
7549 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
7550 .collect::<Vec<_>>();
7551 let actual_hunks = diff
7552 .hunks(&snapshot, cx)
7553 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
7554 .collect::<Vec<_>>();
7555 assert_eq!(actual_hunks, expected_hunks);
7556 });
7557}
7558
7559#[gpui::test]
7560async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
7561 init_test(cx);
7562
7563 let committed_contents = r#"
7564 fn main() {
7565 println!("hello from HEAD");
7566 }
7567 "#
7568 .unindent();
7569 let file_contents = r#"
7570 fn main() {
7571 println!("hello from the working copy");
7572 }
7573 "#
7574 .unindent();
7575
7576 let fs = FakeFs::new(cx.background_executor.clone());
7577 fs.insert_tree(
7578 "/dir",
7579 json!({
7580 ".git": {},
7581 "src": {
7582 "main.rs": file_contents,
7583 }
7584 }),
7585 )
7586 .await;
7587
7588 fs.set_head_for_repo(
7589 Path::new("/dir/.git"),
7590 &[("src/main.rs".into(), committed_contents.clone())],
7591 "deadbeef",
7592 );
7593 fs.set_index_for_repo(
7594 Path::new("/dir/.git"),
7595 &[("src/main.rs".into(), committed_contents.clone())],
7596 );
7597
7598 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
7599
7600 let buffer = project
7601 .update(cx, |project, cx| {
7602 project.open_local_buffer("/dir/src/main.rs", cx)
7603 })
7604 .await
7605 .unwrap();
7606 let uncommitted_diff = project
7607 .update(cx, |project, cx| {
7608 project.open_uncommitted_diff(buffer.clone(), cx)
7609 })
7610 .await
7611 .unwrap();
7612
7613 cx.run_until_parked();
7614 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
7615 let snapshot = buffer.read(cx).snapshot();
7616 assert_hunks(
7617 uncommitted_diff.hunks(&snapshot, cx),
7618 &snapshot,
7619 &uncommitted_diff.base_text_string().unwrap(),
7620 &[(
7621 1..2,
7622 " println!(\"hello from HEAD\");\n",
7623 " println!(\"hello from the working copy\");\n",
7624 DiffHunkStatus {
7625 kind: DiffHunkStatusKind::Modified,
7626 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
7627 },
7628 )],
7629 );
7630 });
7631}
7632
7633#[gpui::test]
7634async fn test_repository_and_path_for_project_path(
7635 background_executor: BackgroundExecutor,
7636 cx: &mut gpui::TestAppContext,
7637) {
7638 init_test(cx);
7639 let fs = FakeFs::new(background_executor);
7640 fs.insert_tree(
7641 path!("/root"),
7642 json!({
7643 "c.txt": "",
7644 "dir1": {
7645 ".git": {},
7646 "deps": {
7647 "dep1": {
7648 ".git": {},
7649 "src": {
7650 "a.txt": ""
7651 }
7652 }
7653 },
7654 "src": {
7655 "b.txt": ""
7656 }
7657 },
7658 }),
7659 )
7660 .await;
7661
7662 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
7663 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7664 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7665 project
7666 .update(cx, |project, cx| project.git_scans_complete(cx))
7667 .await;
7668 cx.run_until_parked();
7669
7670 project.read_with(cx, |project, cx| {
7671 let git_store = project.git_store().read(cx);
7672 let pairs = [
7673 ("c.txt", None),
7674 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
7675 (
7676 "dir1/deps/dep1/src/a.txt",
7677 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
7678 ),
7679 ];
7680 let expected = pairs
7681 .iter()
7682 .map(|(path, result)| {
7683 (
7684 path,
7685 result.map(|(repo, repo_path)| {
7686 (Path::new(repo).into(), RepoPath::from(repo_path))
7687 }),
7688 )
7689 })
7690 .collect::<Vec<_>>();
7691 let actual = pairs
7692 .iter()
7693 .map(|(path, _)| {
7694 let project_path = (tree_id, Path::new(path)).into();
7695 let result = maybe!({
7696 let (repo, repo_path) =
7697 git_store.repository_and_path_for_project_path(&project_path, cx)?;
7698 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
7699 });
7700 (path, result)
7701 })
7702 .collect::<Vec<_>>();
7703 pretty_assertions::assert_eq!(expected, actual);
7704 });
7705
7706 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
7707 .await
7708 .unwrap();
7709 cx.run_until_parked();
7710
7711 project.read_with(cx, |project, cx| {
7712 let git_store = project.git_store().read(cx);
7713 assert_eq!(
7714 git_store.repository_and_path_for_project_path(
7715 &(tree_id, Path::new("dir1/src/b.txt")).into(),
7716 cx
7717 ),
7718 None
7719 );
7720 });
7721}
7722
7723#[gpui::test]
7724async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
7725 init_test(cx);
7726 let fs = FakeFs::new(cx.background_executor.clone());
7727 fs.insert_tree(
7728 path!("/root"),
7729 json!({
7730 "home": {
7731 ".git": {},
7732 "project": {
7733 "a.txt": "A"
7734 },
7735 },
7736 }),
7737 )
7738 .await;
7739 fs.set_home_dir(Path::new(path!("/root/home")).to_owned());
7740
7741 let project = Project::test(fs.clone(), [path!("/root/home/project").as_ref()], cx).await;
7742 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7743 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7744
7745 project
7746 .update(cx, |project, cx| project.git_scans_complete(cx))
7747 .await;
7748 tree.flush_fs_events(cx).await;
7749
7750 project.read_with(cx, |project, cx| {
7751 let containing = project
7752 .git_store()
7753 .read(cx)
7754 .repository_and_path_for_project_path(&(tree_id, "a.txt").into(), cx);
7755 assert!(containing.is_none());
7756 });
7757
7758 let project = Project::test(fs.clone(), [path!("/root/home").as_ref()], cx).await;
7759 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7760 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7761 project
7762 .update(cx, |project, cx| project.git_scans_complete(cx))
7763 .await;
7764 tree.flush_fs_events(cx).await;
7765
7766 project.read_with(cx, |project, cx| {
7767 let containing = project
7768 .git_store()
7769 .read(cx)
7770 .repository_and_path_for_project_path(&(tree_id, "project/a.txt").into(), cx);
7771 assert_eq!(
7772 containing
7773 .unwrap()
7774 .0
7775 .read(cx)
7776 .work_directory_abs_path
7777 .as_ref(),
7778 Path::new(path!("/root/home"))
7779 );
7780 });
7781}
7782
7783#[gpui::test]
7784async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
7785 init_test(cx);
7786 cx.executor().allow_parking();
7787
7788 let root = TempTree::new(json!({
7789 "project": {
7790 "a.txt": "a", // Modified
7791 "b.txt": "bb", // Added
7792 "c.txt": "ccc", // Unchanged
7793 "d.txt": "dddd", // Deleted
7794 },
7795 }));
7796
7797 // Set up git repository before creating the project.
7798 let work_dir = root.path().join("project");
7799 let repo = git_init(work_dir.as_path());
7800 git_add("a.txt", &repo);
7801 git_add("c.txt", &repo);
7802 git_add("d.txt", &repo);
7803 git_commit("Initial commit", &repo);
7804 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
7805 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
7806
7807 let project = Project::test(
7808 Arc::new(RealFs::new(None, cx.executor())),
7809 [root.path()],
7810 cx,
7811 )
7812 .await;
7813
7814 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7815 tree.flush_fs_events(cx).await;
7816 project
7817 .update(cx, |project, cx| project.git_scans_complete(cx))
7818 .await;
7819 cx.executor().run_until_parked();
7820
7821 let repository = project.read_with(cx, |project, cx| {
7822 project.repositories(cx).values().next().unwrap().clone()
7823 });
7824
7825 // Check that the right git state is observed on startup
7826 repository.read_with(cx, |repository, _| {
7827 let entries = repository.cached_status().collect::<Vec<_>>();
7828 assert_eq!(
7829 entries,
7830 [
7831 StatusEntry {
7832 repo_path: "a.txt".into(),
7833 status: StatusCode::Modified.worktree(),
7834 },
7835 StatusEntry {
7836 repo_path: "b.txt".into(),
7837 status: FileStatus::Untracked,
7838 },
7839 StatusEntry {
7840 repo_path: "d.txt".into(),
7841 status: StatusCode::Deleted.worktree(),
7842 },
7843 ]
7844 );
7845 });
7846
7847 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
7848
7849 tree.flush_fs_events(cx).await;
7850 project
7851 .update(cx, |project, cx| project.git_scans_complete(cx))
7852 .await;
7853 cx.executor().run_until_parked();
7854
7855 repository.read_with(cx, |repository, _| {
7856 let entries = repository.cached_status().collect::<Vec<_>>();
7857 assert_eq!(
7858 entries,
7859 [
7860 StatusEntry {
7861 repo_path: "a.txt".into(),
7862 status: StatusCode::Modified.worktree(),
7863 },
7864 StatusEntry {
7865 repo_path: "b.txt".into(),
7866 status: FileStatus::Untracked,
7867 },
7868 StatusEntry {
7869 repo_path: "c.txt".into(),
7870 status: StatusCode::Modified.worktree(),
7871 },
7872 StatusEntry {
7873 repo_path: "d.txt".into(),
7874 status: StatusCode::Deleted.worktree(),
7875 },
7876 ]
7877 );
7878 });
7879
7880 git_add("a.txt", &repo);
7881 git_add("c.txt", &repo);
7882 git_remove_index(Path::new("d.txt"), &repo);
7883 git_commit("Another commit", &repo);
7884 tree.flush_fs_events(cx).await;
7885 project
7886 .update(cx, |project, cx| project.git_scans_complete(cx))
7887 .await;
7888 cx.executor().run_until_parked();
7889
7890 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
7891 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
7892 tree.flush_fs_events(cx).await;
7893 project
7894 .update(cx, |project, cx| project.git_scans_complete(cx))
7895 .await;
7896 cx.executor().run_until_parked();
7897
7898 repository.read_with(cx, |repository, _cx| {
7899 let entries = repository.cached_status().collect::<Vec<_>>();
7900
7901 // Deleting an untracked entry, b.txt, should leave no status
7902 // a.txt was tracked, and so should have a status
7903 assert_eq!(
7904 entries,
7905 [StatusEntry {
7906 repo_path: "a.txt".into(),
7907 status: StatusCode::Deleted.worktree(),
7908 }]
7909 );
7910 });
7911}
7912
7913#[gpui::test]
7914async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
7915 init_test(cx);
7916 cx.executor().allow_parking();
7917
7918 let root = TempTree::new(json!({
7919 "project": {
7920 "sub": {},
7921 "a.txt": "",
7922 },
7923 }));
7924
7925 let work_dir = root.path().join("project");
7926 let repo = git_init(work_dir.as_path());
7927 // a.txt exists in HEAD and the working copy but is deleted in the index.
7928 git_add("a.txt", &repo);
7929 git_commit("Initial commit", &repo);
7930 git_remove_index("a.txt".as_ref(), &repo);
7931 // `sub` is a nested git repository.
7932 let _sub = git_init(&work_dir.join("sub"));
7933
7934 let project = Project::test(
7935 Arc::new(RealFs::new(None, cx.executor())),
7936 [root.path()],
7937 cx,
7938 )
7939 .await;
7940
7941 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7942 tree.flush_fs_events(cx).await;
7943 project
7944 .update(cx, |project, cx| project.git_scans_complete(cx))
7945 .await;
7946 cx.executor().run_until_parked();
7947
7948 let repository = project.read_with(cx, |project, cx| {
7949 project
7950 .repositories(cx)
7951 .values()
7952 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
7953 .unwrap()
7954 .clone()
7955 });
7956
7957 repository.read_with(cx, |repository, _cx| {
7958 let entries = repository.cached_status().collect::<Vec<_>>();
7959
7960 // `sub` doesn't appear in our computed statuses.
7961 // a.txt appears with a combined `DA` status.
7962 assert_eq!(
7963 entries,
7964 [StatusEntry {
7965 repo_path: "a.txt".into(),
7966 status: TrackedStatus {
7967 index_status: StatusCode::Deleted,
7968 worktree_status: StatusCode::Added
7969 }
7970 .into(),
7971 }]
7972 )
7973 });
7974}
7975
7976#[gpui::test]
7977async fn test_repository_subfolder_git_status(
7978 executor: gpui::BackgroundExecutor,
7979 cx: &mut gpui::TestAppContext,
7980) {
7981 init_test(cx);
7982
7983 let fs = FakeFs::new(executor);
7984 fs.insert_tree(
7985 path!("/root"),
7986 json!({
7987 "my-repo": {
7988 ".git": {},
7989 "a.txt": "a",
7990 "sub-folder-1": {
7991 "sub-folder-2": {
7992 "c.txt": "cc",
7993 "d": {
7994 "e.txt": "eee"
7995 }
7996 },
7997 }
7998 },
7999 }),
8000 )
8001 .await;
8002
8003 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
8004 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
8005
8006 fs.set_status_for_repo(
8007 path!("/root/my-repo/.git").as_ref(),
8008 &[(E_TXT.as_ref(), FileStatus::Untracked)],
8009 );
8010
8011 let project = Project::test(
8012 fs.clone(),
8013 [path!("/root/my-repo/sub-folder-1/sub-folder-2").as_ref()],
8014 cx,
8015 )
8016 .await;
8017
8018 project
8019 .update(cx, |project, cx| project.git_scans_complete(cx))
8020 .await;
8021 cx.run_until_parked();
8022
8023 let repository = project.read_with(cx, |project, cx| {
8024 project.repositories(cx).values().next().unwrap().clone()
8025 });
8026
8027 // Ensure that the git status is loaded correctly
8028 repository.read_with(cx, |repository, _cx| {
8029 assert_eq!(
8030 repository.work_directory_abs_path,
8031 Path::new(path!("/root/my-repo")).into()
8032 );
8033
8034 assert_eq!(repository.status_for_path(&C_TXT.into()), None);
8035 assert_eq!(
8036 repository.status_for_path(&E_TXT.into()).unwrap().status,
8037 FileStatus::Untracked
8038 );
8039 });
8040
8041 fs.set_status_for_repo(path!("/root/my-repo/.git").as_ref(), &[]);
8042 project
8043 .update(cx, |project, cx| project.git_scans_complete(cx))
8044 .await;
8045 cx.run_until_parked();
8046
8047 repository.read_with(cx, |repository, _cx| {
8048 assert_eq!(repository.status_for_path(&C_TXT.into()), None);
8049 assert_eq!(repository.status_for_path(&E_TXT.into()), None);
8050 });
8051}
8052
8053// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
8054#[cfg(any())]
8055#[gpui::test]
8056async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
8057 init_test(cx);
8058 cx.executor().allow_parking();
8059
8060 let root = TempTree::new(json!({
8061 "project": {
8062 "a.txt": "a",
8063 },
8064 }));
8065 let root_path = root.path();
8066
8067 let repo = git_init(&root_path.join("project"));
8068 git_add("a.txt", &repo);
8069 git_commit("init", &repo);
8070
8071 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8072
8073 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8074 tree.flush_fs_events(cx).await;
8075 project
8076 .update(cx, |project, cx| project.git_scans_complete(cx))
8077 .await;
8078 cx.executor().run_until_parked();
8079
8080 let repository = project.read_with(cx, |project, cx| {
8081 project.repositories(cx).values().next().unwrap().clone()
8082 });
8083
8084 git_branch("other-branch", &repo);
8085 git_checkout("refs/heads/other-branch", &repo);
8086 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
8087 git_add("a.txt", &repo);
8088 git_commit("capitalize", &repo);
8089 let commit = repo
8090 .head()
8091 .expect("Failed to get HEAD")
8092 .peel_to_commit()
8093 .expect("HEAD is not a commit");
8094 git_checkout("refs/heads/main", &repo);
8095 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
8096 git_add("a.txt", &repo);
8097 git_commit("improve letter", &repo);
8098 git_cherry_pick(&commit, &repo);
8099 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
8100 .expect("No CHERRY_PICK_HEAD");
8101 pretty_assertions::assert_eq!(
8102 git_status(&repo),
8103 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
8104 );
8105 tree.flush_fs_events(cx).await;
8106 project
8107 .update(cx, |project, cx| project.git_scans_complete(cx))
8108 .await;
8109 cx.executor().run_until_parked();
8110 let conflicts = repository.update(cx, |repository, _| {
8111 repository
8112 .merge_conflicts
8113 .iter()
8114 .cloned()
8115 .collect::<Vec<_>>()
8116 });
8117 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
8118
8119 git_add("a.txt", &repo);
8120 // Attempt to manually simulate what `git cherry-pick --continue` would do.
8121 git_commit("whatevs", &repo);
8122 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
8123 .expect("Failed to remove CHERRY_PICK_HEAD");
8124 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
8125 tree.flush_fs_events(cx).await;
8126 let conflicts = repository.update(cx, |repository, _| {
8127 repository
8128 .merge_conflicts
8129 .iter()
8130 .cloned()
8131 .collect::<Vec<_>>()
8132 });
8133 pretty_assertions::assert_eq!(conflicts, []);
8134}
8135
8136#[gpui::test]
8137async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
8138 init_test(cx);
8139 let fs = FakeFs::new(cx.background_executor.clone());
8140 fs.insert_tree(
8141 path!("/root"),
8142 json!({
8143 ".git": {},
8144 ".gitignore": "*.txt\n",
8145 "a.xml": "<a></a>",
8146 "b.txt": "Some text"
8147 }),
8148 )
8149 .await;
8150
8151 fs.set_head_and_index_for_repo(
8152 path!("/root/.git").as_ref(),
8153 &[
8154 (".gitignore".into(), "*.txt\n".into()),
8155 ("a.xml".into(), "<a></a>".into()),
8156 ],
8157 );
8158
8159 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
8160
8161 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8162 tree.flush_fs_events(cx).await;
8163 project
8164 .update(cx, |project, cx| project.git_scans_complete(cx))
8165 .await;
8166 cx.executor().run_until_parked();
8167
8168 let repository = project.read_with(cx, |project, cx| {
8169 project.repositories(cx).values().next().unwrap().clone()
8170 });
8171
8172 // One file is unmodified, the other is ignored.
8173 cx.read(|cx| {
8174 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
8175 assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
8176 });
8177
8178 // Change the gitignore, and stage the newly non-ignored file.
8179 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
8180 .await
8181 .unwrap();
8182 fs.set_index_for_repo(
8183 Path::new(path!("/root/.git")),
8184 &[
8185 (".gitignore".into(), "*.txt\n".into()),
8186 ("a.xml".into(), "<a></a>".into()),
8187 ("b.txt".into(), "Some text".into()),
8188 ],
8189 );
8190
8191 cx.executor().run_until_parked();
8192 cx.read(|cx| {
8193 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
8194 assert_entry_git_state(
8195 tree.read(cx),
8196 repository.read(cx),
8197 "b.txt",
8198 Some(StatusCode::Added),
8199 false,
8200 );
8201 });
8202}
8203
8204// NOTE:
8205// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
8206// a directory which some program has already open.
8207// This is a limitation of the Windows.
8208// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
8209#[gpui::test]
8210#[cfg_attr(target_os = "windows", ignore)]
8211async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
8212 init_test(cx);
8213 cx.executor().allow_parking();
8214 let root = TempTree::new(json!({
8215 "projects": {
8216 "project1": {
8217 "a": "",
8218 "b": "",
8219 }
8220 },
8221
8222 }));
8223 let root_path = root.path();
8224
8225 let repo = git_init(&root_path.join("projects/project1"));
8226 git_add("a", &repo);
8227 git_commit("init", &repo);
8228 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
8229
8230 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8231
8232 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8233 tree.flush_fs_events(cx).await;
8234 project
8235 .update(cx, |project, cx| project.git_scans_complete(cx))
8236 .await;
8237 cx.executor().run_until_parked();
8238
8239 let repository = project.read_with(cx, |project, cx| {
8240 project.repositories(cx).values().next().unwrap().clone()
8241 });
8242
8243 repository.read_with(cx, |repository, _| {
8244 assert_eq!(
8245 repository.work_directory_abs_path.as_ref(),
8246 root_path.join("projects/project1").as_path()
8247 );
8248 assert_eq!(
8249 repository
8250 .status_for_path(&"a".into())
8251 .map(|entry| entry.status),
8252 Some(StatusCode::Modified.worktree()),
8253 );
8254 assert_eq!(
8255 repository
8256 .status_for_path(&"b".into())
8257 .map(|entry| entry.status),
8258 Some(FileStatus::Untracked),
8259 );
8260 });
8261
8262 std::fs::rename(
8263 root_path.join("projects/project1"),
8264 root_path.join("projects/project2"),
8265 )
8266 .unwrap();
8267 tree.flush_fs_events(cx).await;
8268
8269 repository.read_with(cx, |repository, _| {
8270 assert_eq!(
8271 repository.work_directory_abs_path.as_ref(),
8272 root_path.join("projects/project2").as_path()
8273 );
8274 assert_eq!(
8275 repository.status_for_path(&"a".into()).unwrap().status,
8276 StatusCode::Modified.worktree(),
8277 );
8278 assert_eq!(
8279 repository.status_for_path(&"b".into()).unwrap().status,
8280 FileStatus::Untracked,
8281 );
8282 });
8283}
8284
8285// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
8286// you can't rename a directory which some program has already open. This is a
8287// limitation of the Windows. See:
8288// https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
8289#[gpui::test]
8290#[cfg_attr(target_os = "windows", ignore)]
8291async fn test_file_status(cx: &mut gpui::TestAppContext) {
8292 init_test(cx);
8293 cx.executor().allow_parking();
8294 const IGNORE_RULE: &str = "**/target";
8295
8296 let root = TempTree::new(json!({
8297 "project": {
8298 "a.txt": "a",
8299 "b.txt": "bb",
8300 "c": {
8301 "d": {
8302 "e.txt": "eee"
8303 }
8304 },
8305 "f.txt": "ffff",
8306 "target": {
8307 "build_file": "???"
8308 },
8309 ".gitignore": IGNORE_RULE
8310 },
8311
8312 }));
8313 let root_path = root.path();
8314
8315 const A_TXT: &str = "a.txt";
8316 const B_TXT: &str = "b.txt";
8317 const E_TXT: &str = "c/d/e.txt";
8318 const F_TXT: &str = "f.txt";
8319 const DOTGITIGNORE: &str = ".gitignore";
8320 const BUILD_FILE: &str = "target/build_file";
8321
8322 // Set up git repository before creating the worktree.
8323 let work_dir = root.path().join("project");
8324 let mut repo = git_init(work_dir.as_path());
8325 repo.add_ignore_rule(IGNORE_RULE).unwrap();
8326 git_add(A_TXT, &repo);
8327 git_add(E_TXT, &repo);
8328 git_add(DOTGITIGNORE, &repo);
8329 git_commit("Initial commit", &repo);
8330
8331 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8332
8333 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8334 tree.flush_fs_events(cx).await;
8335 project
8336 .update(cx, |project, cx| project.git_scans_complete(cx))
8337 .await;
8338 cx.executor().run_until_parked();
8339
8340 let repository = project.read_with(cx, |project, cx| {
8341 project.repositories(cx).values().next().unwrap().clone()
8342 });
8343
8344 // Check that the right git state is observed on startup
8345 repository.read_with(cx, |repository, _cx| {
8346 assert_eq!(
8347 repository.work_directory_abs_path.as_ref(),
8348 root_path.join("project").as_path()
8349 );
8350
8351 assert_eq!(
8352 repository.status_for_path(&B_TXT.into()).unwrap().status,
8353 FileStatus::Untracked,
8354 );
8355 assert_eq!(
8356 repository.status_for_path(&F_TXT.into()).unwrap().status,
8357 FileStatus::Untracked,
8358 );
8359 });
8360
8361 // Modify a file in the working copy.
8362 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
8363 tree.flush_fs_events(cx).await;
8364 project
8365 .update(cx, |project, cx| project.git_scans_complete(cx))
8366 .await;
8367 cx.executor().run_until_parked();
8368
8369 // The worktree detects that the file's git status has changed.
8370 repository.read_with(cx, |repository, _| {
8371 assert_eq!(
8372 repository.status_for_path(&A_TXT.into()).unwrap().status,
8373 StatusCode::Modified.worktree(),
8374 );
8375 });
8376
8377 // Create a commit in the git repository.
8378 git_add(A_TXT, &repo);
8379 git_add(B_TXT, &repo);
8380 git_commit("Committing modified and added", &repo);
8381 tree.flush_fs_events(cx).await;
8382 project
8383 .update(cx, |project, cx| project.git_scans_complete(cx))
8384 .await;
8385 cx.executor().run_until_parked();
8386
8387 // The worktree detects that the files' git status have changed.
8388 repository.read_with(cx, |repository, _cx| {
8389 assert_eq!(
8390 repository.status_for_path(&F_TXT.into()).unwrap().status,
8391 FileStatus::Untracked,
8392 );
8393 assert_eq!(repository.status_for_path(&B_TXT.into()), None);
8394 assert_eq!(repository.status_for_path(&A_TXT.into()), None);
8395 });
8396
8397 // Modify files in the working copy and perform git operations on other files.
8398 git_reset(0, &repo);
8399 git_remove_index(Path::new(B_TXT), &repo);
8400 git_stash(&mut repo);
8401 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
8402 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
8403 tree.flush_fs_events(cx).await;
8404 project
8405 .update(cx, |project, cx| project.git_scans_complete(cx))
8406 .await;
8407 cx.executor().run_until_parked();
8408
8409 // Check that more complex repo changes are tracked
8410 repository.read_with(cx, |repository, _cx| {
8411 assert_eq!(repository.status_for_path(&A_TXT.into()), None);
8412 assert_eq!(
8413 repository.status_for_path(&B_TXT.into()).unwrap().status,
8414 FileStatus::Untracked,
8415 );
8416 assert_eq!(
8417 repository.status_for_path(&E_TXT.into()).unwrap().status,
8418 StatusCode::Modified.worktree(),
8419 );
8420 });
8421
8422 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
8423 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
8424 std::fs::write(
8425 work_dir.join(DOTGITIGNORE),
8426 [IGNORE_RULE, "f.txt"].join("\n"),
8427 )
8428 .unwrap();
8429
8430 git_add(Path::new(DOTGITIGNORE), &repo);
8431 git_commit("Committing modified git ignore", &repo);
8432
8433 tree.flush_fs_events(cx).await;
8434 cx.executor().run_until_parked();
8435
8436 let mut renamed_dir_name = "first_directory/second_directory";
8437 const RENAMED_FILE: &str = "rf.txt";
8438
8439 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
8440 std::fs::write(
8441 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
8442 "new-contents",
8443 )
8444 .unwrap();
8445
8446 tree.flush_fs_events(cx).await;
8447 project
8448 .update(cx, |project, cx| project.git_scans_complete(cx))
8449 .await;
8450 cx.executor().run_until_parked();
8451
8452 repository.read_with(cx, |repository, _cx| {
8453 assert_eq!(
8454 repository
8455 .status_for_path(&Path::new(renamed_dir_name).join(RENAMED_FILE).into())
8456 .unwrap()
8457 .status,
8458 FileStatus::Untracked,
8459 );
8460 });
8461
8462 renamed_dir_name = "new_first_directory/second_directory";
8463
8464 std::fs::rename(
8465 work_dir.join("first_directory"),
8466 work_dir.join("new_first_directory"),
8467 )
8468 .unwrap();
8469
8470 tree.flush_fs_events(cx).await;
8471 project
8472 .update(cx, |project, cx| project.git_scans_complete(cx))
8473 .await;
8474 cx.executor().run_until_parked();
8475
8476 repository.read_with(cx, |repository, _cx| {
8477 assert_eq!(
8478 repository
8479 .status_for_path(&Path::new(renamed_dir_name).join(RENAMED_FILE).into())
8480 .unwrap()
8481 .status,
8482 FileStatus::Untracked,
8483 );
8484 });
8485}
8486
8487#[gpui::test]
8488async fn test_repos_in_invisible_worktrees(
8489 executor: BackgroundExecutor,
8490 cx: &mut gpui::TestAppContext,
8491) {
8492 init_test(cx);
8493 let fs = FakeFs::new(executor);
8494 fs.insert_tree(
8495 path!("/root"),
8496 json!({
8497 "dir1": {
8498 ".git": {},
8499 "dep1": {
8500 ".git": {},
8501 "src": {
8502 "a.txt": "",
8503 },
8504 },
8505 "b.txt": "",
8506 },
8507 }),
8508 )
8509 .await;
8510
8511 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
8512 let _visible_worktree =
8513 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8514 project
8515 .update(cx, |project, cx| project.git_scans_complete(cx))
8516 .await;
8517
8518 let repos = project.read_with(cx, |project, cx| {
8519 project
8520 .repositories(cx)
8521 .values()
8522 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8523 .collect::<Vec<_>>()
8524 });
8525 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
8526
8527 let (_invisible_worktree, _) = project
8528 .update(cx, |project, cx| {
8529 project.worktree_store.update(cx, |worktree_store, cx| {
8530 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
8531 })
8532 })
8533 .await
8534 .expect("failed to create worktree");
8535 project
8536 .update(cx, |project, cx| project.git_scans_complete(cx))
8537 .await;
8538
8539 let repos = project.read_with(cx, |project, cx| {
8540 project
8541 .repositories(cx)
8542 .values()
8543 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8544 .collect::<Vec<_>>()
8545 });
8546 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
8547}
8548
8549#[gpui::test(iterations = 10)]
8550async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
8551 init_test(cx);
8552 cx.update(|cx| {
8553 cx.update_global::<SettingsStore, _>(|store, cx| {
8554 store.update_user_settings::<WorktreeSettings>(cx, |project_settings| {
8555 project_settings.file_scan_exclusions = Some(Vec::new());
8556 });
8557 });
8558 });
8559 let fs = FakeFs::new(cx.background_executor.clone());
8560 fs.insert_tree(
8561 path!("/root"),
8562 json!({
8563 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
8564 "tree": {
8565 ".git": {},
8566 ".gitignore": "ignored-dir\n",
8567 "tracked-dir": {
8568 "tracked-file1": "",
8569 "ancestor-ignored-file1": "",
8570 },
8571 "ignored-dir": {
8572 "ignored-file1": ""
8573 }
8574 }
8575 }),
8576 )
8577 .await;
8578 fs.set_head_and_index_for_repo(
8579 path!("/root/tree/.git").as_ref(),
8580 &[
8581 (".gitignore".into(), "ignored-dir\n".into()),
8582 ("tracked-dir/tracked-file1".into(), "".into()),
8583 ],
8584 );
8585
8586 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
8587
8588 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8589 tree.flush_fs_events(cx).await;
8590 project
8591 .update(cx, |project, cx| project.git_scans_complete(cx))
8592 .await;
8593 cx.executor().run_until_parked();
8594
8595 let repository = project.read_with(cx, |project, cx| {
8596 project.repositories(cx).values().next().unwrap().clone()
8597 });
8598
8599 tree.read_with(cx, |tree, _| {
8600 tree.as_local()
8601 .unwrap()
8602 .manually_refresh_entries_for_paths(vec![Path::new("ignored-dir").into()])
8603 })
8604 .recv()
8605 .await;
8606
8607 cx.read(|cx| {
8608 assert_entry_git_state(
8609 tree.read(cx),
8610 repository.read(cx),
8611 "tracked-dir/tracked-file1",
8612 None,
8613 false,
8614 );
8615 assert_entry_git_state(
8616 tree.read(cx),
8617 repository.read(cx),
8618 "tracked-dir/ancestor-ignored-file1",
8619 None,
8620 false,
8621 );
8622 assert_entry_git_state(
8623 tree.read(cx),
8624 repository.read(cx),
8625 "ignored-dir/ignored-file1",
8626 None,
8627 true,
8628 );
8629 });
8630
8631 fs.create_file(
8632 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
8633 Default::default(),
8634 )
8635 .await
8636 .unwrap();
8637 fs.set_index_for_repo(
8638 path!("/root/tree/.git").as_ref(),
8639 &[
8640 (".gitignore".into(), "ignored-dir\n".into()),
8641 ("tracked-dir/tracked-file1".into(), "".into()),
8642 ("tracked-dir/tracked-file2".into(), "".into()),
8643 ],
8644 );
8645 fs.create_file(
8646 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
8647 Default::default(),
8648 )
8649 .await
8650 .unwrap();
8651 fs.create_file(
8652 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
8653 Default::default(),
8654 )
8655 .await
8656 .unwrap();
8657
8658 cx.executor().run_until_parked();
8659 cx.read(|cx| {
8660 assert_entry_git_state(
8661 tree.read(cx),
8662 repository.read(cx),
8663 "tracked-dir/tracked-file2",
8664 Some(StatusCode::Added),
8665 false,
8666 );
8667 assert_entry_git_state(
8668 tree.read(cx),
8669 repository.read(cx),
8670 "tracked-dir/ancestor-ignored-file2",
8671 None,
8672 false,
8673 );
8674 assert_entry_git_state(
8675 tree.read(cx),
8676 repository.read(cx),
8677 "ignored-dir/ignored-file2",
8678 None,
8679 true,
8680 );
8681 assert!(tree.read(cx).entry_for_path(".git").unwrap().is_ignored);
8682 });
8683}
8684
8685#[gpui::test]
8686async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) {
8687 init_test(cx);
8688
8689 let fs = FakeFs::new(cx.executor());
8690 fs.insert_tree(
8691 path!("/project"),
8692 json!({
8693 ".git": {
8694 "worktrees": {
8695 "some-worktree": {
8696 "commondir": "../..\n",
8697 // For is_git_dir
8698 "HEAD": "",
8699 "config": ""
8700 }
8701 },
8702 "modules": {
8703 "subdir": {
8704 "some-submodule": {
8705 // For is_git_dir
8706 "HEAD": "",
8707 "config": "",
8708 }
8709 }
8710 }
8711 },
8712 "src": {
8713 "a.txt": "A",
8714 },
8715 "some-worktree": {
8716 ".git": "gitdir: ../.git/worktrees/some-worktree\n",
8717 "src": {
8718 "b.txt": "B",
8719 }
8720 },
8721 "subdir": {
8722 "some-submodule": {
8723 ".git": "gitdir: ../../.git/modules/subdir/some-submodule\n",
8724 "c.txt": "C",
8725 }
8726 }
8727 }),
8728 )
8729 .await;
8730
8731 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
8732 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
8733 scan_complete.await;
8734
8735 let mut repositories = project.update(cx, |project, cx| {
8736 project
8737 .repositories(cx)
8738 .values()
8739 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8740 .collect::<Vec<_>>()
8741 });
8742 repositories.sort();
8743 pretty_assertions::assert_eq!(
8744 repositories,
8745 [
8746 Path::new(path!("/project")).into(),
8747 Path::new(path!("/project/some-worktree")).into(),
8748 Path::new(path!("/project/subdir/some-submodule")).into(),
8749 ]
8750 );
8751
8752 // Generate a git-related event for the worktree and check that it's refreshed.
8753 fs.with_git_state(
8754 path!("/project/some-worktree/.git").as_ref(),
8755 true,
8756 |state| {
8757 state
8758 .head_contents
8759 .insert("src/b.txt".into(), "b".to_owned());
8760 state
8761 .index_contents
8762 .insert("src/b.txt".into(), "b".to_owned());
8763 },
8764 )
8765 .unwrap();
8766 cx.run_until_parked();
8767
8768 let buffer = project
8769 .update(cx, |project, cx| {
8770 project.open_local_buffer(path!("/project/some-worktree/src/b.txt"), cx)
8771 })
8772 .await
8773 .unwrap();
8774 let (worktree_repo, barrier) = project.update(cx, |project, cx| {
8775 let (repo, _) = project
8776 .git_store()
8777 .read(cx)
8778 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
8779 .unwrap();
8780 pretty_assertions::assert_eq!(
8781 repo.read(cx).work_directory_abs_path,
8782 Path::new(path!("/project/some-worktree")).into(),
8783 );
8784 let barrier = repo.update(cx, |repo, _| repo.barrier());
8785 (repo.clone(), barrier)
8786 });
8787 barrier.await.unwrap();
8788 worktree_repo.update(cx, |repo, _| {
8789 pretty_assertions::assert_eq!(
8790 repo.status_for_path(&"src/b.txt".into()).unwrap().status,
8791 StatusCode::Modified.worktree(),
8792 );
8793 });
8794
8795 // The same for the submodule.
8796 fs.with_git_state(
8797 path!("/project/subdir/some-submodule/.git").as_ref(),
8798 true,
8799 |state| {
8800 state.head_contents.insert("c.txt".into(), "c".to_owned());
8801 state.index_contents.insert("c.txt".into(), "c".to_owned());
8802 },
8803 )
8804 .unwrap();
8805 cx.run_until_parked();
8806
8807 let buffer = project
8808 .update(cx, |project, cx| {
8809 project.open_local_buffer(path!("/project/subdir/some-submodule/c.txt"), cx)
8810 })
8811 .await
8812 .unwrap();
8813 let (submodule_repo, barrier) = project.update(cx, |project, cx| {
8814 let (repo, _) = project
8815 .git_store()
8816 .read(cx)
8817 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
8818 .unwrap();
8819 pretty_assertions::assert_eq!(
8820 repo.read(cx).work_directory_abs_path,
8821 Path::new(path!("/project/subdir/some-submodule")).into(),
8822 );
8823 let barrier = repo.update(cx, |repo, _| repo.barrier());
8824 (repo.clone(), barrier)
8825 });
8826 barrier.await.unwrap();
8827 submodule_repo.update(cx, |repo, _| {
8828 pretty_assertions::assert_eq!(
8829 repo.status_for_path(&"c.txt".into()).unwrap().status,
8830 StatusCode::Modified.worktree(),
8831 );
8832 });
8833}
8834
8835#[gpui::test]
8836async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
8837 init_test(cx);
8838 let fs = FakeFs::new(cx.background_executor.clone());
8839 fs.insert_tree(
8840 path!("/root"),
8841 json!({
8842 "project": {
8843 ".git": {},
8844 "child1": {
8845 "a.txt": "A",
8846 },
8847 "child2": {
8848 "b.txt": "B",
8849 }
8850 }
8851 }),
8852 )
8853 .await;
8854
8855 let project = Project::test(
8856 fs.clone(),
8857 [
8858 path!("/root/project/child1").as_ref(),
8859 path!("/root/project/child2").as_ref(),
8860 ],
8861 cx,
8862 )
8863 .await;
8864
8865 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8866 tree.flush_fs_events(cx).await;
8867 project
8868 .update(cx, |project, cx| project.git_scans_complete(cx))
8869 .await;
8870 cx.executor().run_until_parked();
8871
8872 let repos = project.read_with(cx, |project, cx| {
8873 project
8874 .repositories(cx)
8875 .values()
8876 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8877 .collect::<Vec<_>>()
8878 });
8879 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
8880}
8881
8882async fn search(
8883 project: &Entity<Project>,
8884 query: SearchQuery,
8885 cx: &mut gpui::TestAppContext,
8886) -> Result<HashMap<String, Vec<Range<usize>>>> {
8887 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
8888 let mut results = HashMap::default();
8889 while let Ok(search_result) = search_rx.recv().await {
8890 match search_result {
8891 SearchResult::Buffer { buffer, ranges } => {
8892 results.entry(buffer).or_insert(ranges);
8893 }
8894 SearchResult::LimitReached => {}
8895 }
8896 }
8897 Ok(results
8898 .into_iter()
8899 .map(|(buffer, ranges)| {
8900 buffer.update(cx, |buffer, cx| {
8901 let path = buffer
8902 .file()
8903 .unwrap()
8904 .full_path(cx)
8905 .to_string_lossy()
8906 .to_string();
8907 let ranges = ranges
8908 .into_iter()
8909 .map(|range| range.to_offset(buffer))
8910 .collect::<Vec<_>>();
8911 (path, ranges)
8912 })
8913 })
8914 .collect())
8915}
8916
8917pub fn init_test(cx: &mut gpui::TestAppContext) {
8918 zlog::init_test();
8919
8920 cx.update(|cx| {
8921 let settings_store = SettingsStore::test(cx);
8922 cx.set_global(settings_store);
8923 release_channel::init(SemanticVersion::default(), cx);
8924 language::init(cx);
8925 Project::init_settings(cx);
8926 });
8927}
8928
8929fn json_lang() -> Arc<Language> {
8930 Arc::new(Language::new(
8931 LanguageConfig {
8932 name: "JSON".into(),
8933 matcher: LanguageMatcher {
8934 path_suffixes: vec!["json".to_string()],
8935 ..Default::default()
8936 },
8937 ..Default::default()
8938 },
8939 None,
8940 ))
8941}
8942
8943fn js_lang() -> Arc<Language> {
8944 Arc::new(Language::new(
8945 LanguageConfig {
8946 name: "JavaScript".into(),
8947 matcher: LanguageMatcher {
8948 path_suffixes: vec!["js".to_string()],
8949 ..Default::default()
8950 },
8951 ..Default::default()
8952 },
8953 None,
8954 ))
8955}
8956
8957fn rust_lang() -> Arc<Language> {
8958 Arc::new(Language::new(
8959 LanguageConfig {
8960 name: "Rust".into(),
8961 matcher: LanguageMatcher {
8962 path_suffixes: vec!["rs".to_string()],
8963 ..Default::default()
8964 },
8965 ..Default::default()
8966 },
8967 Some(tree_sitter_rust::LANGUAGE.into()),
8968 ))
8969}
8970
8971fn typescript_lang() -> Arc<Language> {
8972 Arc::new(Language::new(
8973 LanguageConfig {
8974 name: "TypeScript".into(),
8975 matcher: LanguageMatcher {
8976 path_suffixes: vec!["ts".to_string()],
8977 ..Default::default()
8978 },
8979 ..Default::default()
8980 },
8981 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
8982 ))
8983}
8984
8985fn tsx_lang() -> Arc<Language> {
8986 Arc::new(Language::new(
8987 LanguageConfig {
8988 name: "tsx".into(),
8989 matcher: LanguageMatcher {
8990 path_suffixes: vec!["tsx".to_string()],
8991 ..Default::default()
8992 },
8993 ..Default::default()
8994 },
8995 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
8996 ))
8997}
8998
8999fn get_all_tasks(
9000 project: &Entity<Project>,
9001 task_contexts: Arc<TaskContexts>,
9002 cx: &mut App,
9003) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
9004 let new_tasks = project.update(cx, |project, cx| {
9005 project.task_store.update(cx, |task_store, cx| {
9006 task_store.task_inventory().unwrap().update(cx, |this, cx| {
9007 this.used_and_current_resolved_tasks(task_contexts, cx)
9008 })
9009 })
9010 });
9011
9012 cx.background_spawn(async move {
9013 let (mut old, new) = new_tasks.await;
9014 old.extend(new);
9015 old
9016 })
9017}
9018
9019#[track_caller]
9020fn assert_entry_git_state(
9021 tree: &Worktree,
9022 repository: &Repository,
9023 path: &str,
9024 index_status: Option<StatusCode>,
9025 is_ignored: bool,
9026) {
9027 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
9028 let entry = tree
9029 .entry_for_path(path)
9030 .unwrap_or_else(|| panic!("entry {path} not found"));
9031 let status = repository
9032 .status_for_path(&path.into())
9033 .map(|entry| entry.status);
9034 let expected = index_status.map(|index_status| {
9035 TrackedStatus {
9036 index_status,
9037 worktree_status: StatusCode::Unmodified,
9038 }
9039 .into()
9040 });
9041 assert_eq!(
9042 status, expected,
9043 "expected {path} to have git status: {expected:?}"
9044 );
9045 assert_eq!(
9046 entry.is_ignored, is_ignored,
9047 "expected {path} to have is_ignored: {is_ignored}"
9048 );
9049}
9050
9051#[track_caller]
9052fn git_init(path: &Path) -> git2::Repository {
9053 let mut init_opts = RepositoryInitOptions::new();
9054 init_opts.initial_head("main");
9055 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
9056}
9057
9058#[track_caller]
9059fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
9060 let path = path.as_ref();
9061 let mut index = repo.index().expect("Failed to get index");
9062 index.add_path(path).expect("Failed to add file");
9063 index.write().expect("Failed to write index");
9064}
9065
9066#[track_caller]
9067fn git_remove_index(path: &Path, repo: &git2::Repository) {
9068 let mut index = repo.index().expect("Failed to get index");
9069 index.remove_path(path).expect("Failed to add file");
9070 index.write().expect("Failed to write index");
9071}
9072
9073#[track_caller]
9074fn git_commit(msg: &'static str, repo: &git2::Repository) {
9075 use git2::Signature;
9076
9077 let signature = Signature::now("test", "test@zed.dev").unwrap();
9078 let oid = repo.index().unwrap().write_tree().unwrap();
9079 let tree = repo.find_tree(oid).unwrap();
9080 if let Ok(head) = repo.head() {
9081 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
9082
9083 let parent_commit = parent_obj.as_commit().unwrap();
9084
9085 repo.commit(
9086 Some("HEAD"),
9087 &signature,
9088 &signature,
9089 msg,
9090 &tree,
9091 &[parent_commit],
9092 )
9093 .expect("Failed to commit with parent");
9094 } else {
9095 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
9096 .expect("Failed to commit");
9097 }
9098}
9099
9100#[cfg(any())]
9101#[track_caller]
9102fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
9103 repo.cherrypick(commit, None).expect("Failed to cherrypick");
9104}
9105
9106#[track_caller]
9107fn git_stash(repo: &mut git2::Repository) {
9108 use git2::Signature;
9109
9110 let signature = Signature::now("test", "test@zed.dev").unwrap();
9111 repo.stash_save(&signature, "N/A", None)
9112 .expect("Failed to stash");
9113}
9114
9115#[track_caller]
9116fn git_reset(offset: usize, repo: &git2::Repository) {
9117 let head = repo.head().expect("Couldn't get repo head");
9118 let object = head.peel(git2::ObjectType::Commit).unwrap();
9119 let commit = object.as_commit().unwrap();
9120 let new_head = commit
9121 .parents()
9122 .inspect(|parnet| {
9123 parnet.message();
9124 })
9125 .nth(offset)
9126 .expect("Not enough history");
9127 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
9128 .expect("Could not reset");
9129}
9130
9131#[cfg(any())]
9132#[track_caller]
9133fn git_branch(name: &str, repo: &git2::Repository) {
9134 let head = repo
9135 .head()
9136 .expect("Couldn't get repo head")
9137 .peel_to_commit()
9138 .expect("HEAD is not a commit");
9139 repo.branch(name, &head, false).expect("Failed to commit");
9140}
9141
9142#[cfg(any())]
9143#[track_caller]
9144fn git_checkout(name: &str, repo: &git2::Repository) {
9145 repo.set_head(name).expect("Failed to set head");
9146 repo.checkout_head(None).expect("Failed to check out head");
9147}
9148
9149#[cfg(any())]
9150#[track_caller]
9151fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
9152 repo.statuses(None)
9153 .unwrap()
9154 .iter()
9155 .map(|status| (status.path().unwrap().to_string(), status.status()))
9156 .collect()
9157}
9158
9159#[gpui::test]
9160async fn test_find_project_path_abs(
9161 background_executor: BackgroundExecutor,
9162 cx: &mut gpui::TestAppContext,
9163) {
9164 // find_project_path should work with absolute paths
9165 init_test(cx);
9166
9167 let fs = FakeFs::new(background_executor);
9168 fs.insert_tree(
9169 path!("/root"),
9170 json!({
9171 "project1": {
9172 "file1.txt": "content1",
9173 "subdir": {
9174 "file2.txt": "content2"
9175 }
9176 },
9177 "project2": {
9178 "file3.txt": "content3"
9179 }
9180 }),
9181 )
9182 .await;
9183
9184 let project = Project::test(
9185 fs.clone(),
9186 [
9187 path!("/root/project1").as_ref(),
9188 path!("/root/project2").as_ref(),
9189 ],
9190 cx,
9191 )
9192 .await;
9193
9194 // Make sure the worktrees are fully initialized
9195 project
9196 .update(cx, |project, cx| project.git_scans_complete(cx))
9197 .await;
9198 cx.run_until_parked();
9199
9200 let (project1_abs_path, project1_id, project2_abs_path, project2_id) =
9201 project.read_with(cx, |project, cx| {
9202 let worktrees: Vec<_> = project.worktrees(cx).collect();
9203 let abs_path1 = worktrees[0].read(cx).abs_path().to_path_buf();
9204 let id1 = worktrees[0].read(cx).id();
9205 let abs_path2 = worktrees[1].read(cx).abs_path().to_path_buf();
9206 let id2 = worktrees[1].read(cx).id();
9207 (abs_path1, id1, abs_path2, id2)
9208 });
9209
9210 project.update(cx, |project, cx| {
9211 let abs_path = project1_abs_path.join("file1.txt");
9212 let found_path = project.find_project_path(abs_path, cx).unwrap();
9213 assert_eq!(found_path.worktree_id, project1_id);
9214 assert_eq!(found_path.path.as_ref(), Path::new("file1.txt"));
9215
9216 let abs_path = project1_abs_path.join("subdir").join("file2.txt");
9217 let found_path = project.find_project_path(abs_path, cx).unwrap();
9218 assert_eq!(found_path.worktree_id, project1_id);
9219 assert_eq!(found_path.path.as_ref(), Path::new("subdir/file2.txt"));
9220
9221 let abs_path = project2_abs_path.join("file3.txt");
9222 let found_path = project.find_project_path(abs_path, cx).unwrap();
9223 assert_eq!(found_path.worktree_id, project2_id);
9224 assert_eq!(found_path.path.as_ref(), Path::new("file3.txt"));
9225
9226 let abs_path = project1_abs_path.join("nonexistent.txt");
9227 let found_path = project.find_project_path(abs_path, cx);
9228 assert!(
9229 found_path.is_some(),
9230 "Should find project path for nonexistent file in worktree"
9231 );
9232
9233 // Test with an absolute path outside any worktree
9234 let abs_path = Path::new("/some/other/path");
9235 let found_path = project.find_project_path(abs_path, cx);
9236 assert!(
9237 found_path.is_none(),
9238 "Should not find project path for path outside any worktree"
9239 );
9240 });
9241}