1#![allow(clippy::format_collect)]
2
3use crate::{
4 Event, git_store::StatusEntry, task_inventory::TaskContexts, task_store::TaskSettingsLocation,
5 *,
6};
7use buffer_diff::{
8 BufferDiffEvent, CALCULATE_DIFF_TASK, DiffHunkSecondaryStatus, DiffHunkStatus,
9 DiffHunkStatusKind, assert_hunks,
10};
11use fs::FakeFs;
12use futures::{StreamExt, future};
13use git::{
14 GitHostingProviderRegistry,
15 repository::RepoPath,
16 status::{StatusCode, TrackedStatus},
17};
18use git2::RepositoryInitOptions;
19use gpui::{App, BackgroundExecutor, SemanticVersion, UpdateGlobal};
20use http_client::Url;
21use itertools::Itertools;
22use language::{
23 Diagnostic, DiagnosticEntry, DiagnosticSet, DiagnosticSourceKind, DiskState, FakeLspAdapter,
24 LanguageConfig, LanguageMatcher, LanguageName, LineEnding, OffsetRangeExt, Point, ToPoint,
25 language_settings::{AllLanguageSettings, LanguageSettingsContent, language_settings},
26 tree_sitter_rust, tree_sitter_typescript,
27};
28use lsp::{
29 DiagnosticSeverity, DocumentChanges, FileOperationFilter, NumberOrString, TextDocumentEdit,
30 WillRenameFiles, notification::DidRenameFiles,
31};
32use parking_lot::Mutex;
33use paths::{config_dir, tasks_file};
34use postage::stream::Stream as _;
35use pretty_assertions::{assert_eq, assert_matches};
36use rand::{Rng as _, rngs::StdRng};
37use serde_json::json;
38#[cfg(not(windows))]
39use std::os;
40use std::{env, mem, num::NonZeroU32, ops::Range, str::FromStr, sync::OnceLock, task::Poll};
41use task::{ResolvedTask, TaskContext};
42use unindent::Unindent as _;
43use util::{
44 TryFutureExt as _, assert_set_eq, maybe, path,
45 paths::PathMatcher,
46 test::{TempTree, marked_text_offsets},
47 uri,
48};
49use worktree::WorktreeModelHandle as _;
50
51#[gpui::test]
52async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
53 cx.executor().allow_parking();
54
55 let (tx, mut rx) = futures::channel::mpsc::unbounded();
56 let _thread = std::thread::spawn(move || {
57 #[cfg(not(target_os = "windows"))]
58 std::fs::metadata("/tmp").unwrap();
59 #[cfg(target_os = "windows")]
60 std::fs::metadata("C:/Windows").unwrap();
61 std::thread::sleep(Duration::from_millis(1000));
62 tx.unbounded_send(1).unwrap();
63 });
64 rx.next().await.unwrap();
65}
66
67#[gpui::test]
68async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
69 cx.executor().allow_parking();
70
71 let io_task = smol::unblock(move || {
72 println!("sleeping on thread {:?}", std::thread::current().id());
73 std::thread::sleep(Duration::from_millis(10));
74 1
75 });
76
77 let task = cx.foreground_executor().spawn(async move {
78 io_task.await;
79 });
80
81 task.await;
82}
83
84#[cfg(not(windows))]
85#[gpui::test]
86async fn test_symlinks(cx: &mut gpui::TestAppContext) {
87 init_test(cx);
88 cx.executor().allow_parking();
89
90 let dir = TempTree::new(json!({
91 "root": {
92 "apple": "",
93 "banana": {
94 "carrot": {
95 "date": "",
96 "endive": "",
97 }
98 },
99 "fennel": {
100 "grape": "",
101 }
102 }
103 }));
104
105 let root_link_path = dir.path().join("root_link");
106 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
107 os::unix::fs::symlink(
108 dir.path().join("root/fennel"),
109 dir.path().join("root/finnochio"),
110 )
111 .unwrap();
112
113 let project = Project::test(
114 Arc::new(RealFs::new(None, cx.executor())),
115 [root_link_path.as_ref()],
116 cx,
117 )
118 .await;
119
120 project.update(cx, |project, cx| {
121 let tree = project.worktrees(cx).next().unwrap().read(cx);
122 assert_eq!(tree.file_count(), 5);
123 assert_eq!(
124 tree.inode_for_path("fennel/grape"),
125 tree.inode_for_path("finnochio/grape")
126 );
127 });
128}
129
130#[gpui::test]
131async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
132 init_test(cx);
133
134 let dir = TempTree::new(json!({
135 ".editorconfig": r#"
136 root = true
137 [*.rs]
138 indent_style = tab
139 indent_size = 3
140 end_of_line = lf
141 insert_final_newline = true
142 trim_trailing_whitespace = true
143 [*.js]
144 tab_width = 10
145 "#,
146 ".zed": {
147 "settings.json": r#"{
148 "tab_size": 8,
149 "hard_tabs": false,
150 "ensure_final_newline_on_save": false,
151 "remove_trailing_whitespace_on_save": false,
152 "soft_wrap": "editor_width"
153 }"#,
154 },
155 "a.rs": "fn a() {\n A\n}",
156 "b": {
157 ".editorconfig": r#"
158 [*.rs]
159 indent_size = 2
160 "#,
161 "b.rs": "fn b() {\n B\n}",
162 },
163 "c.js": "def c\n C\nend",
164 "README.json": "tabs are better\n",
165 }));
166
167 let path = dir.path();
168 let fs = FakeFs::new(cx.executor());
169 fs.insert_tree_from_real_fs(path, path).await;
170 let project = Project::test(fs, [path], cx).await;
171
172 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
173 language_registry.add(js_lang());
174 language_registry.add(json_lang());
175 language_registry.add(rust_lang());
176
177 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
178
179 cx.executor().run_until_parked();
180
181 cx.update(|cx| {
182 let tree = worktree.read(cx);
183 let settings_for = |path: &str| {
184 let file_entry = tree.entry_for_path(path).unwrap().clone();
185 let file = File::for_entry(file_entry, worktree.clone());
186 let file_language = project
187 .read(cx)
188 .languages()
189 .language_for_file_path(file.path.as_ref());
190 let file_language = cx
191 .background_executor()
192 .block(file_language)
193 .expect("Failed to get file language");
194 let file = file as _;
195 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
196 };
197
198 let settings_a = settings_for("a.rs");
199 let settings_b = settings_for("b/b.rs");
200 let settings_c = settings_for("c.js");
201 let settings_readme = settings_for("README.json");
202
203 // .editorconfig overrides .zed/settings
204 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
205 assert_eq!(settings_a.hard_tabs, true);
206 assert_eq!(settings_a.ensure_final_newline_on_save, true);
207 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
208
209 // .editorconfig in b/ overrides .editorconfig in root
210 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
211
212 // "indent_size" is not set, so "tab_width" is used
213 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
214
215 // README.md should not be affected by .editorconfig's globe "*.rs"
216 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
217 });
218}
219
220#[gpui::test]
221async fn test_git_provider_project_setting(cx: &mut gpui::TestAppContext) {
222 init_test(cx);
223 cx.update(|cx| {
224 GitHostingProviderRegistry::default_global(cx);
225 git_hosting_providers::init(cx);
226 });
227
228 let fs = FakeFs::new(cx.executor());
229 let str_path = path!("/dir");
230 let path = Path::new(str_path);
231
232 fs.insert_tree(
233 path!("/dir"),
234 json!({
235 ".zed": {
236 "settings.json": r#"{
237 "git_hosting_providers": [
238 {
239 "provider": "gitlab",
240 "base_url": "https://google.com",
241 "name": "foo"
242 }
243 ]
244 }"#
245 },
246 }),
247 )
248 .await;
249
250 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
251 let (_worktree, _) =
252 project.read_with(cx, |project, cx| project.find_worktree(path, cx).unwrap());
253 cx.executor().run_until_parked();
254
255 cx.update(|cx| {
256 let provider = GitHostingProviderRegistry::global(cx);
257 assert!(
258 provider
259 .list_hosting_providers()
260 .into_iter()
261 .any(|provider| provider.name() == "foo")
262 );
263 });
264
265 fs.atomic_write(
266 Path::new(path!("/dir/.zed/settings.json")).to_owned(),
267 "{}".into(),
268 )
269 .await
270 .unwrap();
271
272 cx.run_until_parked();
273
274 cx.update(|cx| {
275 let provider = GitHostingProviderRegistry::global(cx);
276 assert!(
277 !provider
278 .list_hosting_providers()
279 .into_iter()
280 .any(|provider| provider.name() == "foo")
281 );
282 });
283}
284
285#[gpui::test]
286async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
287 init_test(cx);
288 TaskStore::init(None);
289
290 let fs = FakeFs::new(cx.executor());
291 fs.insert_tree(
292 path!("/dir"),
293 json!({
294 ".zed": {
295 "settings.json": r#"{ "tab_size": 8 }"#,
296 "tasks.json": r#"[{
297 "label": "cargo check all",
298 "command": "cargo",
299 "args": ["check", "--all"]
300 },]"#,
301 },
302 "a": {
303 "a.rs": "fn a() {\n A\n}"
304 },
305 "b": {
306 ".zed": {
307 "settings.json": r#"{ "tab_size": 2 }"#,
308 "tasks.json": r#"[{
309 "label": "cargo check",
310 "command": "cargo",
311 "args": ["check"]
312 },]"#,
313 },
314 "b.rs": "fn b() {\n B\n}"
315 }
316 }),
317 )
318 .await;
319
320 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
321 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
322
323 cx.executor().run_until_parked();
324 let worktree_id = cx.update(|cx| {
325 project.update(cx, |project, cx| {
326 project.worktrees(cx).next().unwrap().read(cx).id()
327 })
328 });
329
330 let mut task_contexts = TaskContexts::default();
331 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
332 let task_contexts = Arc::new(task_contexts);
333
334 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
335 id: worktree_id,
336 directory_in_worktree: PathBuf::from(".zed"),
337 id_base: "local worktree tasks from directory \".zed\"".into(),
338 };
339
340 let all_tasks = cx
341 .update(|cx| {
342 let tree = worktree.read(cx);
343
344 let file_a = File::for_entry(
345 tree.entry_for_path("a/a.rs").unwrap().clone(),
346 worktree.clone(),
347 ) as _;
348 let settings_a = language_settings(None, Some(&file_a), cx);
349 let file_b = File::for_entry(
350 tree.entry_for_path("b/b.rs").unwrap().clone(),
351 worktree.clone(),
352 ) as _;
353 let settings_b = language_settings(None, Some(&file_b), cx);
354
355 assert_eq!(settings_a.tab_size.get(), 8);
356 assert_eq!(settings_b.tab_size.get(), 2);
357
358 get_all_tasks(&project, task_contexts.clone(), cx)
359 })
360 .await
361 .into_iter()
362 .map(|(source_kind, task)| {
363 let resolved = task.resolved;
364 (
365 source_kind,
366 task.resolved_label,
367 resolved.args,
368 resolved.env,
369 )
370 })
371 .collect::<Vec<_>>();
372 assert_eq!(
373 all_tasks,
374 vec![
375 (
376 TaskSourceKind::Worktree {
377 id: worktree_id,
378 directory_in_worktree: PathBuf::from(path!("b/.zed")),
379 id_base: if cfg!(windows) {
380 "local worktree tasks from directory \"b\\\\.zed\"".into()
381 } else {
382 "local worktree tasks from directory \"b/.zed\"".into()
383 },
384 },
385 "cargo check".to_string(),
386 vec!["check".to_string()],
387 HashMap::default(),
388 ),
389 (
390 topmost_local_task_source_kind.clone(),
391 "cargo check all".to_string(),
392 vec!["check".to_string(), "--all".to_string()],
393 HashMap::default(),
394 ),
395 ]
396 );
397
398 let (_, resolved_task) = cx
399 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
400 .await
401 .into_iter()
402 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
403 .expect("should have one global task");
404 project.update(cx, |project, cx| {
405 let task_inventory = project
406 .task_store
407 .read(cx)
408 .task_inventory()
409 .cloned()
410 .unwrap();
411 task_inventory.update(cx, |inventory, _| {
412 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
413 inventory
414 .update_file_based_tasks(
415 TaskSettingsLocation::Global(tasks_file()),
416 Some(
417 &json!([{
418 "label": "cargo check unstable",
419 "command": "cargo",
420 "args": [
421 "check",
422 "--all",
423 "--all-targets"
424 ],
425 "env": {
426 "RUSTFLAGS": "-Zunstable-options"
427 }
428 }])
429 .to_string(),
430 ),
431 )
432 .unwrap();
433 });
434 });
435 cx.run_until_parked();
436
437 let all_tasks = cx
438 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
439 .await
440 .into_iter()
441 .map(|(source_kind, task)| {
442 let resolved = task.resolved;
443 (
444 source_kind,
445 task.resolved_label,
446 resolved.args,
447 resolved.env,
448 )
449 })
450 .collect::<Vec<_>>();
451 assert_eq!(
452 all_tasks,
453 vec![
454 (
455 topmost_local_task_source_kind.clone(),
456 "cargo check all".to_string(),
457 vec!["check".to_string(), "--all".to_string()],
458 HashMap::default(),
459 ),
460 (
461 TaskSourceKind::Worktree {
462 id: worktree_id,
463 directory_in_worktree: PathBuf::from(path!("b/.zed")),
464 id_base: if cfg!(windows) {
465 "local worktree tasks from directory \"b\\\\.zed\"".into()
466 } else {
467 "local worktree tasks from directory \"b/.zed\"".into()
468 },
469 },
470 "cargo check".to_string(),
471 vec!["check".to_string()],
472 HashMap::default(),
473 ),
474 (
475 TaskSourceKind::AbsPath {
476 abs_path: paths::tasks_file().clone(),
477 id_base: "global tasks.json".into(),
478 },
479 "cargo check unstable".to_string(),
480 vec![
481 "check".to_string(),
482 "--all".to_string(),
483 "--all-targets".to_string(),
484 ],
485 HashMap::from_iter(Some((
486 "RUSTFLAGS".to_string(),
487 "-Zunstable-options".to_string()
488 ))),
489 ),
490 ]
491 );
492}
493
494#[gpui::test]
495async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
496 init_test(cx);
497 TaskStore::init(None);
498
499 let fs = FakeFs::new(cx.executor());
500 fs.insert_tree(
501 path!("/dir"),
502 json!({
503 ".zed": {
504 "tasks.json": r#"[{
505 "label": "test worktree root",
506 "command": "echo $ZED_WORKTREE_ROOT"
507 }]"#,
508 },
509 "a": {
510 "a.rs": "fn a() {\n A\n}"
511 },
512 }),
513 )
514 .await;
515
516 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
517 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
518
519 cx.executor().run_until_parked();
520 let worktree_id = cx.update(|cx| {
521 project.update(cx, |project, cx| {
522 project.worktrees(cx).next().unwrap().read(cx).id()
523 })
524 });
525
526 let active_non_worktree_item_tasks = cx
527 .update(|cx| {
528 get_all_tasks(
529 &project,
530 Arc::new(TaskContexts {
531 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
532 active_worktree_context: None,
533 other_worktree_contexts: Vec::new(),
534 lsp_task_sources: HashMap::default(),
535 latest_selection: None,
536 }),
537 cx,
538 )
539 })
540 .await;
541 assert!(
542 active_non_worktree_item_tasks.is_empty(),
543 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
544 );
545
546 let active_worktree_tasks = cx
547 .update(|cx| {
548 get_all_tasks(
549 &project,
550 Arc::new(TaskContexts {
551 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
552 active_worktree_context: Some((worktree_id, {
553 let mut worktree_context = TaskContext::default();
554 worktree_context
555 .task_variables
556 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
557 worktree_context
558 })),
559 other_worktree_contexts: Vec::new(),
560 lsp_task_sources: HashMap::default(),
561 latest_selection: None,
562 }),
563 cx,
564 )
565 })
566 .await;
567 assert_eq!(
568 active_worktree_tasks
569 .into_iter()
570 .map(|(source_kind, task)| {
571 let resolved = task.resolved;
572 (source_kind, resolved.command.unwrap())
573 })
574 .collect::<Vec<_>>(),
575 vec![(
576 TaskSourceKind::Worktree {
577 id: worktree_id,
578 directory_in_worktree: PathBuf::from(path!(".zed")),
579 id_base: if cfg!(windows) {
580 "local worktree tasks from directory \".zed\"".into()
581 } else {
582 "local worktree tasks from directory \".zed\"".into()
583 },
584 },
585 "echo /dir".to_string(),
586 )]
587 );
588}
589
590#[gpui::test]
591async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
592 init_test(cx);
593
594 let fs = FakeFs::new(cx.executor());
595 fs.insert_tree(
596 path!("/dir"),
597 json!({
598 "test.rs": "const A: i32 = 1;",
599 "test2.rs": "",
600 "Cargo.toml": "a = 1",
601 "package.json": "{\"a\": 1}",
602 }),
603 )
604 .await;
605
606 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
607 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
608
609 let mut fake_rust_servers = language_registry.register_fake_lsp(
610 "Rust",
611 FakeLspAdapter {
612 name: "the-rust-language-server",
613 capabilities: lsp::ServerCapabilities {
614 completion_provider: Some(lsp::CompletionOptions {
615 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
616 ..Default::default()
617 }),
618 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
619 lsp::TextDocumentSyncOptions {
620 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
621 ..Default::default()
622 },
623 )),
624 ..Default::default()
625 },
626 ..Default::default()
627 },
628 );
629 let mut fake_json_servers = language_registry.register_fake_lsp(
630 "JSON",
631 FakeLspAdapter {
632 name: "the-json-language-server",
633 capabilities: lsp::ServerCapabilities {
634 completion_provider: Some(lsp::CompletionOptions {
635 trigger_characters: Some(vec![":".to_string()]),
636 ..Default::default()
637 }),
638 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
639 lsp::TextDocumentSyncOptions {
640 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
641 ..Default::default()
642 },
643 )),
644 ..Default::default()
645 },
646 ..Default::default()
647 },
648 );
649
650 // Open a buffer without an associated language server.
651 let (toml_buffer, _handle) = project
652 .update(cx, |project, cx| {
653 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
654 })
655 .await
656 .unwrap();
657
658 // Open a buffer with an associated language server before the language for it has been loaded.
659 let (rust_buffer, _handle2) = project
660 .update(cx, |project, cx| {
661 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
662 })
663 .await
664 .unwrap();
665 rust_buffer.update(cx, |buffer, _| {
666 assert_eq!(buffer.language().map(|l| l.name()), None);
667 });
668
669 // Now we add the languages to the project, and ensure they get assigned to all
670 // the relevant open buffers.
671 language_registry.add(json_lang());
672 language_registry.add(rust_lang());
673 cx.executor().run_until_parked();
674 rust_buffer.update(cx, |buffer, _| {
675 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
676 });
677
678 // A server is started up, and it is notified about Rust files.
679 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
680 assert_eq!(
681 fake_rust_server
682 .receive_notification::<lsp::notification::DidOpenTextDocument>()
683 .await
684 .text_document,
685 lsp::TextDocumentItem {
686 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
687 version: 0,
688 text: "const A: i32 = 1;".to_string(),
689 language_id: "rust".to_string(),
690 }
691 );
692
693 // The buffer is configured based on the language server's capabilities.
694 rust_buffer.update(cx, |buffer, _| {
695 assert_eq!(
696 buffer
697 .completion_triggers()
698 .iter()
699 .cloned()
700 .collect::<Vec<_>>(),
701 &[".".to_string(), "::".to_string()]
702 );
703 });
704 toml_buffer.update(cx, |buffer, _| {
705 assert!(buffer.completion_triggers().is_empty());
706 });
707
708 // Edit a buffer. The changes are reported to the language server.
709 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
710 assert_eq!(
711 fake_rust_server
712 .receive_notification::<lsp::notification::DidChangeTextDocument>()
713 .await
714 .text_document,
715 lsp::VersionedTextDocumentIdentifier::new(
716 lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
717 1
718 )
719 );
720
721 // Open a third buffer with a different associated language server.
722 let (json_buffer, _json_handle) = project
723 .update(cx, |project, cx| {
724 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
725 })
726 .await
727 .unwrap();
728
729 // A json language server is started up and is only notified about the json buffer.
730 let mut fake_json_server = fake_json_servers.next().await.unwrap();
731 assert_eq!(
732 fake_json_server
733 .receive_notification::<lsp::notification::DidOpenTextDocument>()
734 .await
735 .text_document,
736 lsp::TextDocumentItem {
737 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
738 version: 0,
739 text: "{\"a\": 1}".to_string(),
740 language_id: "json".to_string(),
741 }
742 );
743
744 // This buffer is configured based on the second language server's
745 // capabilities.
746 json_buffer.update(cx, |buffer, _| {
747 assert_eq!(
748 buffer
749 .completion_triggers()
750 .iter()
751 .cloned()
752 .collect::<Vec<_>>(),
753 &[":".to_string()]
754 );
755 });
756
757 // When opening another buffer whose language server is already running,
758 // it is also configured based on the existing language server's capabilities.
759 let (rust_buffer2, _handle4) = project
760 .update(cx, |project, cx| {
761 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
762 })
763 .await
764 .unwrap();
765 rust_buffer2.update(cx, |buffer, _| {
766 assert_eq!(
767 buffer
768 .completion_triggers()
769 .iter()
770 .cloned()
771 .collect::<Vec<_>>(),
772 &[".".to_string(), "::".to_string()]
773 );
774 });
775
776 // Changes are reported only to servers matching the buffer's language.
777 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
778 rust_buffer2.update(cx, |buffer, cx| {
779 buffer.edit([(0..0, "let x = 1;")], None, cx)
780 });
781 assert_eq!(
782 fake_rust_server
783 .receive_notification::<lsp::notification::DidChangeTextDocument>()
784 .await
785 .text_document,
786 lsp::VersionedTextDocumentIdentifier::new(
787 lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap(),
788 1
789 )
790 );
791
792 // Save notifications are reported to all servers.
793 project
794 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
795 .await
796 .unwrap();
797 assert_eq!(
798 fake_rust_server
799 .receive_notification::<lsp::notification::DidSaveTextDocument>()
800 .await
801 .text_document,
802 lsp::TextDocumentIdentifier::new(
803 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
804 )
805 );
806 assert_eq!(
807 fake_json_server
808 .receive_notification::<lsp::notification::DidSaveTextDocument>()
809 .await
810 .text_document,
811 lsp::TextDocumentIdentifier::new(
812 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
813 )
814 );
815
816 // Renames are reported only to servers matching the buffer's language.
817 fs.rename(
818 Path::new(path!("/dir/test2.rs")),
819 Path::new(path!("/dir/test3.rs")),
820 Default::default(),
821 )
822 .await
823 .unwrap();
824 assert_eq!(
825 fake_rust_server
826 .receive_notification::<lsp::notification::DidCloseTextDocument>()
827 .await
828 .text_document,
829 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap()),
830 );
831 assert_eq!(
832 fake_rust_server
833 .receive_notification::<lsp::notification::DidOpenTextDocument>()
834 .await
835 .text_document,
836 lsp::TextDocumentItem {
837 uri: lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap(),
838 version: 0,
839 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
840 language_id: "rust".to_string(),
841 },
842 );
843
844 rust_buffer2.update(cx, |buffer, cx| {
845 buffer.update_diagnostics(
846 LanguageServerId(0),
847 DiagnosticSet::from_sorted_entries(
848 vec![DiagnosticEntry {
849 diagnostic: Default::default(),
850 range: Anchor::MIN..Anchor::MAX,
851 }],
852 &buffer.snapshot(),
853 ),
854 cx,
855 );
856 assert_eq!(
857 buffer
858 .snapshot()
859 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
860 .count(),
861 1
862 );
863 });
864
865 // When the rename changes the extension of the file, the buffer gets closed on the old
866 // language server and gets opened on the new one.
867 fs.rename(
868 Path::new(path!("/dir/test3.rs")),
869 Path::new(path!("/dir/test3.json")),
870 Default::default(),
871 )
872 .await
873 .unwrap();
874 assert_eq!(
875 fake_rust_server
876 .receive_notification::<lsp::notification::DidCloseTextDocument>()
877 .await
878 .text_document,
879 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap()),
880 );
881 assert_eq!(
882 fake_json_server
883 .receive_notification::<lsp::notification::DidOpenTextDocument>()
884 .await
885 .text_document,
886 lsp::TextDocumentItem {
887 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
888 version: 0,
889 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
890 language_id: "json".to_string(),
891 },
892 );
893
894 // We clear the diagnostics, since the language has changed.
895 rust_buffer2.update(cx, |buffer, _| {
896 assert_eq!(
897 buffer
898 .snapshot()
899 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
900 .count(),
901 0
902 );
903 });
904
905 // The renamed file's version resets after changing language server.
906 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
907 assert_eq!(
908 fake_json_server
909 .receive_notification::<lsp::notification::DidChangeTextDocument>()
910 .await
911 .text_document,
912 lsp::VersionedTextDocumentIdentifier::new(
913 lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
914 1
915 )
916 );
917
918 // Restart language servers
919 project.update(cx, |project, cx| {
920 project.restart_language_servers_for_buffers(
921 vec![rust_buffer.clone(), json_buffer.clone()],
922 HashSet::default(),
923 cx,
924 );
925 });
926
927 let mut rust_shutdown_requests = fake_rust_server
928 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
929 let mut json_shutdown_requests = fake_json_server
930 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
931 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
932
933 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
934 let mut fake_json_server = fake_json_servers.next().await.unwrap();
935
936 // Ensure rust document is reopened in new rust language server
937 assert_eq!(
938 fake_rust_server
939 .receive_notification::<lsp::notification::DidOpenTextDocument>()
940 .await
941 .text_document,
942 lsp::TextDocumentItem {
943 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
944 version: 0,
945 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
946 language_id: "rust".to_string(),
947 }
948 );
949
950 // Ensure json documents are reopened in new json language server
951 assert_set_eq!(
952 [
953 fake_json_server
954 .receive_notification::<lsp::notification::DidOpenTextDocument>()
955 .await
956 .text_document,
957 fake_json_server
958 .receive_notification::<lsp::notification::DidOpenTextDocument>()
959 .await
960 .text_document,
961 ],
962 [
963 lsp::TextDocumentItem {
964 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
965 version: 0,
966 text: json_buffer.update(cx, |buffer, _| buffer.text()),
967 language_id: "json".to_string(),
968 },
969 lsp::TextDocumentItem {
970 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
971 version: 0,
972 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
973 language_id: "json".to_string(),
974 }
975 ]
976 );
977
978 // Close notifications are reported only to servers matching the buffer's language.
979 cx.update(|_| drop(_json_handle));
980 let close_message = lsp::DidCloseTextDocumentParams {
981 text_document: lsp::TextDocumentIdentifier::new(
982 lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
983 ),
984 };
985 assert_eq!(
986 fake_json_server
987 .receive_notification::<lsp::notification::DidCloseTextDocument>()
988 .await,
989 close_message,
990 );
991}
992
993#[gpui::test]
994async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
995 init_test(cx);
996
997 let fs = FakeFs::new(cx.executor());
998 fs.insert_tree(
999 path!("/the-root"),
1000 json!({
1001 ".gitignore": "target\n",
1002 "Cargo.lock": "",
1003 "src": {
1004 "a.rs": "",
1005 "b.rs": "",
1006 },
1007 "target": {
1008 "x": {
1009 "out": {
1010 "x.rs": ""
1011 }
1012 },
1013 "y": {
1014 "out": {
1015 "y.rs": "",
1016 }
1017 },
1018 "z": {
1019 "out": {
1020 "z.rs": ""
1021 }
1022 }
1023 }
1024 }),
1025 )
1026 .await;
1027 fs.insert_tree(
1028 path!("/the-registry"),
1029 json!({
1030 "dep1": {
1031 "src": {
1032 "dep1.rs": "",
1033 }
1034 },
1035 "dep2": {
1036 "src": {
1037 "dep2.rs": "",
1038 }
1039 },
1040 }),
1041 )
1042 .await;
1043 fs.insert_tree(
1044 path!("/the/stdlib"),
1045 json!({
1046 "LICENSE": "",
1047 "src": {
1048 "string.rs": "",
1049 }
1050 }),
1051 )
1052 .await;
1053
1054 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1055 let (language_registry, lsp_store) = project.read_with(cx, |project, _| {
1056 (project.languages().clone(), project.lsp_store())
1057 });
1058 language_registry.add(rust_lang());
1059 let mut fake_servers = language_registry.register_fake_lsp(
1060 "Rust",
1061 FakeLspAdapter {
1062 name: "the-language-server",
1063 ..Default::default()
1064 },
1065 );
1066
1067 cx.executor().run_until_parked();
1068
1069 // Start the language server by opening a buffer with a compatible file extension.
1070 project
1071 .update(cx, |project, cx| {
1072 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
1073 })
1074 .await
1075 .unwrap();
1076
1077 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
1078 project.update(cx, |project, cx| {
1079 let worktree = project.worktrees(cx).next().unwrap();
1080 assert_eq!(
1081 worktree
1082 .read(cx)
1083 .snapshot()
1084 .entries(true, 0)
1085 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
1086 .collect::<Vec<_>>(),
1087 &[
1088 (Path::new(""), false),
1089 (Path::new(".gitignore"), false),
1090 (Path::new("Cargo.lock"), false),
1091 (Path::new("src"), false),
1092 (Path::new("src/a.rs"), false),
1093 (Path::new("src/b.rs"), false),
1094 (Path::new("target"), true),
1095 ]
1096 );
1097 });
1098
1099 let prev_read_dir_count = fs.read_dir_call_count();
1100
1101 let fake_server = fake_servers.next().await.unwrap();
1102 let server_id = lsp_store.read_with(cx, |lsp_store, _| {
1103 let (id, _) = lsp_store.language_server_statuses().next().unwrap();
1104 id
1105 });
1106
1107 // Simulate jumping to a definition in a dependency outside of the worktree.
1108 let _out_of_worktree_buffer = project
1109 .update(cx, |project, cx| {
1110 project.open_local_buffer_via_lsp(
1111 lsp::Url::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
1112 server_id,
1113 cx,
1114 )
1115 })
1116 .await
1117 .unwrap();
1118
1119 // Keep track of the FS events reported to the language server.
1120 let file_changes = Arc::new(Mutex::new(Vec::new()));
1121 fake_server
1122 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
1123 registrations: vec![lsp::Registration {
1124 id: Default::default(),
1125 method: "workspace/didChangeWatchedFiles".to_string(),
1126 register_options: serde_json::to_value(
1127 lsp::DidChangeWatchedFilesRegistrationOptions {
1128 watchers: vec![
1129 lsp::FileSystemWatcher {
1130 glob_pattern: lsp::GlobPattern::String(
1131 path!("/the-root/Cargo.toml").to_string(),
1132 ),
1133 kind: None,
1134 },
1135 lsp::FileSystemWatcher {
1136 glob_pattern: lsp::GlobPattern::String(
1137 path!("/the-root/src/*.{rs,c}").to_string(),
1138 ),
1139 kind: None,
1140 },
1141 lsp::FileSystemWatcher {
1142 glob_pattern: lsp::GlobPattern::String(
1143 path!("/the-root/target/y/**/*.rs").to_string(),
1144 ),
1145 kind: None,
1146 },
1147 lsp::FileSystemWatcher {
1148 glob_pattern: lsp::GlobPattern::String(
1149 path!("/the/stdlib/src/**/*.rs").to_string(),
1150 ),
1151 kind: None,
1152 },
1153 lsp::FileSystemWatcher {
1154 glob_pattern: lsp::GlobPattern::String(
1155 path!("**/Cargo.lock").to_string(),
1156 ),
1157 kind: None,
1158 },
1159 ],
1160 },
1161 )
1162 .ok(),
1163 }],
1164 })
1165 .await
1166 .into_response()
1167 .unwrap();
1168 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
1169 let file_changes = file_changes.clone();
1170 move |params, _| {
1171 let mut file_changes = file_changes.lock();
1172 file_changes.extend(params.changes);
1173 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
1174 }
1175 });
1176
1177 cx.executor().run_until_parked();
1178 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
1179 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 5);
1180
1181 let mut new_watched_paths = fs.watched_paths();
1182 new_watched_paths.retain(|path| !path.starts_with(config_dir()));
1183 assert_eq!(
1184 &new_watched_paths,
1185 &[
1186 Path::new(path!("/the-root")),
1187 Path::new(path!("/the-registry/dep1/src/dep1.rs")),
1188 Path::new(path!("/the/stdlib/src"))
1189 ]
1190 );
1191
1192 // Now the language server has asked us to watch an ignored directory path,
1193 // so we recursively load it.
1194 project.update(cx, |project, cx| {
1195 let worktree = project.visible_worktrees(cx).next().unwrap();
1196 assert_eq!(
1197 worktree
1198 .read(cx)
1199 .snapshot()
1200 .entries(true, 0)
1201 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
1202 .collect::<Vec<_>>(),
1203 &[
1204 (Path::new(""), false),
1205 (Path::new(".gitignore"), false),
1206 (Path::new("Cargo.lock"), false),
1207 (Path::new("src"), false),
1208 (Path::new("src/a.rs"), false),
1209 (Path::new("src/b.rs"), false),
1210 (Path::new("target"), true),
1211 (Path::new("target/x"), true),
1212 (Path::new("target/y"), true),
1213 (Path::new("target/y/out"), true),
1214 (Path::new("target/y/out/y.rs"), true),
1215 (Path::new("target/z"), true),
1216 ]
1217 );
1218 });
1219
1220 // Perform some file system mutations, two of which match the watched patterns,
1221 // and one of which does not.
1222 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
1223 .await
1224 .unwrap();
1225 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
1226 .await
1227 .unwrap();
1228 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
1229 .await
1230 .unwrap();
1231 fs.create_file(
1232 path!("/the-root/target/x/out/x2.rs").as_ref(),
1233 Default::default(),
1234 )
1235 .await
1236 .unwrap();
1237 fs.create_file(
1238 path!("/the-root/target/y/out/y2.rs").as_ref(),
1239 Default::default(),
1240 )
1241 .await
1242 .unwrap();
1243 fs.save(
1244 path!("/the-root/Cargo.lock").as_ref(),
1245 &"".into(),
1246 Default::default(),
1247 )
1248 .await
1249 .unwrap();
1250 fs.save(
1251 path!("/the-stdlib/LICENSE").as_ref(),
1252 &"".into(),
1253 Default::default(),
1254 )
1255 .await
1256 .unwrap();
1257 fs.save(
1258 path!("/the/stdlib/src/string.rs").as_ref(),
1259 &"".into(),
1260 Default::default(),
1261 )
1262 .await
1263 .unwrap();
1264
1265 // The language server receives events for the FS mutations that match its watch patterns.
1266 cx.executor().run_until_parked();
1267 assert_eq!(
1268 &*file_changes.lock(),
1269 &[
1270 lsp::FileEvent {
1271 uri: lsp::Url::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
1272 typ: lsp::FileChangeType::CHANGED,
1273 },
1274 lsp::FileEvent {
1275 uri: lsp::Url::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
1276 typ: lsp::FileChangeType::DELETED,
1277 },
1278 lsp::FileEvent {
1279 uri: lsp::Url::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
1280 typ: lsp::FileChangeType::CREATED,
1281 },
1282 lsp::FileEvent {
1283 uri: lsp::Url::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
1284 typ: lsp::FileChangeType::CREATED,
1285 },
1286 lsp::FileEvent {
1287 uri: lsp::Url::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
1288 typ: lsp::FileChangeType::CHANGED,
1289 },
1290 ]
1291 );
1292}
1293
1294#[gpui::test]
1295async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
1296 init_test(cx);
1297
1298 let fs = FakeFs::new(cx.executor());
1299 fs.insert_tree(
1300 path!("/dir"),
1301 json!({
1302 "a.rs": "let a = 1;",
1303 "b.rs": "let b = 2;"
1304 }),
1305 )
1306 .await;
1307
1308 let project = Project::test(
1309 fs,
1310 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
1311 cx,
1312 )
1313 .await;
1314 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1315
1316 let buffer_a = project
1317 .update(cx, |project, cx| {
1318 project.open_local_buffer(path!("/dir/a.rs"), cx)
1319 })
1320 .await
1321 .unwrap();
1322 let buffer_b = project
1323 .update(cx, |project, cx| {
1324 project.open_local_buffer(path!("/dir/b.rs"), cx)
1325 })
1326 .await
1327 .unwrap();
1328
1329 lsp_store.update(cx, |lsp_store, cx| {
1330 lsp_store
1331 .update_diagnostics(
1332 LanguageServerId(0),
1333 lsp::PublishDiagnosticsParams {
1334 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1335 version: None,
1336 diagnostics: vec![lsp::Diagnostic {
1337 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1338 severity: Some(lsp::DiagnosticSeverity::ERROR),
1339 message: "error 1".to_string(),
1340 ..Default::default()
1341 }],
1342 },
1343 None,
1344 DiagnosticSourceKind::Pushed,
1345 &[],
1346 cx,
1347 )
1348 .unwrap();
1349 lsp_store
1350 .update_diagnostics(
1351 LanguageServerId(0),
1352 lsp::PublishDiagnosticsParams {
1353 uri: Url::from_file_path(path!("/dir/b.rs")).unwrap(),
1354 version: None,
1355 diagnostics: vec![lsp::Diagnostic {
1356 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1357 severity: Some(DiagnosticSeverity::WARNING),
1358 message: "error 2".to_string(),
1359 ..Default::default()
1360 }],
1361 },
1362 None,
1363 DiagnosticSourceKind::Pushed,
1364 &[],
1365 cx,
1366 )
1367 .unwrap();
1368 });
1369
1370 buffer_a.update(cx, |buffer, _| {
1371 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1372 assert_eq!(
1373 chunks
1374 .iter()
1375 .map(|(s, d)| (s.as_str(), *d))
1376 .collect::<Vec<_>>(),
1377 &[
1378 ("let ", None),
1379 ("a", Some(DiagnosticSeverity::ERROR)),
1380 (" = 1;", None),
1381 ]
1382 );
1383 });
1384 buffer_b.update(cx, |buffer, _| {
1385 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1386 assert_eq!(
1387 chunks
1388 .iter()
1389 .map(|(s, d)| (s.as_str(), *d))
1390 .collect::<Vec<_>>(),
1391 &[
1392 ("let ", None),
1393 ("b", Some(DiagnosticSeverity::WARNING)),
1394 (" = 2;", None),
1395 ]
1396 );
1397 });
1398}
1399
1400#[gpui::test]
1401async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1402 init_test(cx);
1403
1404 let fs = FakeFs::new(cx.executor());
1405 fs.insert_tree(
1406 path!("/root"),
1407 json!({
1408 "dir": {
1409 ".git": {
1410 "HEAD": "ref: refs/heads/main",
1411 },
1412 ".gitignore": "b.rs",
1413 "a.rs": "let a = 1;",
1414 "b.rs": "let b = 2;",
1415 },
1416 "other.rs": "let b = c;"
1417 }),
1418 )
1419 .await;
1420
1421 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
1422 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1423 let (worktree, _) = project
1424 .update(cx, |project, cx| {
1425 project.find_or_create_worktree(path!("/root/dir"), true, cx)
1426 })
1427 .await
1428 .unwrap();
1429 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1430
1431 let (worktree, _) = project
1432 .update(cx, |project, cx| {
1433 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
1434 })
1435 .await
1436 .unwrap();
1437 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1438
1439 let server_id = LanguageServerId(0);
1440 lsp_store.update(cx, |lsp_store, cx| {
1441 lsp_store
1442 .update_diagnostics(
1443 server_id,
1444 lsp::PublishDiagnosticsParams {
1445 uri: Url::from_file_path(path!("/root/dir/b.rs")).unwrap(),
1446 version: None,
1447 diagnostics: vec![lsp::Diagnostic {
1448 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1449 severity: Some(lsp::DiagnosticSeverity::ERROR),
1450 message: "unused variable 'b'".to_string(),
1451 ..Default::default()
1452 }],
1453 },
1454 None,
1455 DiagnosticSourceKind::Pushed,
1456 &[],
1457 cx,
1458 )
1459 .unwrap();
1460 lsp_store
1461 .update_diagnostics(
1462 server_id,
1463 lsp::PublishDiagnosticsParams {
1464 uri: Url::from_file_path(path!("/root/other.rs")).unwrap(),
1465 version: None,
1466 diagnostics: vec![lsp::Diagnostic {
1467 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1468 severity: Some(lsp::DiagnosticSeverity::ERROR),
1469 message: "unknown variable 'c'".to_string(),
1470 ..Default::default()
1471 }],
1472 },
1473 None,
1474 DiagnosticSourceKind::Pushed,
1475 &[],
1476 cx,
1477 )
1478 .unwrap();
1479 });
1480
1481 let main_ignored_buffer = project
1482 .update(cx, |project, cx| {
1483 project.open_buffer((main_worktree_id, "b.rs"), cx)
1484 })
1485 .await
1486 .unwrap();
1487 main_ignored_buffer.update(cx, |buffer, _| {
1488 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1489 assert_eq!(
1490 chunks
1491 .iter()
1492 .map(|(s, d)| (s.as_str(), *d))
1493 .collect::<Vec<_>>(),
1494 &[
1495 ("let ", None),
1496 ("b", Some(DiagnosticSeverity::ERROR)),
1497 (" = 2;", None),
1498 ],
1499 "Gigitnored buffers should still get in-buffer diagnostics",
1500 );
1501 });
1502 let other_buffer = project
1503 .update(cx, |project, cx| {
1504 project.open_buffer((other_worktree_id, ""), cx)
1505 })
1506 .await
1507 .unwrap();
1508 other_buffer.update(cx, |buffer, _| {
1509 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1510 assert_eq!(
1511 chunks
1512 .iter()
1513 .map(|(s, d)| (s.as_str(), *d))
1514 .collect::<Vec<_>>(),
1515 &[
1516 ("let b = ", None),
1517 ("c", Some(DiagnosticSeverity::ERROR)),
1518 (";", None),
1519 ],
1520 "Buffers from hidden projects should still get in-buffer diagnostics"
1521 );
1522 });
1523
1524 project.update(cx, |project, cx| {
1525 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1526 assert_eq!(
1527 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1528 vec![(
1529 ProjectPath {
1530 worktree_id: main_worktree_id,
1531 path: Arc::from(Path::new("b.rs")),
1532 },
1533 server_id,
1534 DiagnosticSummary {
1535 error_count: 1,
1536 warning_count: 0,
1537 }
1538 )]
1539 );
1540 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1541 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1542 });
1543}
1544
1545#[gpui::test]
1546async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1547 init_test(cx);
1548
1549 let progress_token = "the-progress-token";
1550
1551 let fs = FakeFs::new(cx.executor());
1552 fs.insert_tree(
1553 path!("/dir"),
1554 json!({
1555 "a.rs": "fn a() { A }",
1556 "b.rs": "const y: i32 = 1",
1557 }),
1558 )
1559 .await;
1560
1561 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1562 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1563
1564 language_registry.add(rust_lang());
1565 let mut fake_servers = language_registry.register_fake_lsp(
1566 "Rust",
1567 FakeLspAdapter {
1568 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1569 disk_based_diagnostics_sources: vec!["disk".into()],
1570 ..Default::default()
1571 },
1572 );
1573
1574 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1575
1576 // Cause worktree to start the fake language server
1577 let _ = project
1578 .update(cx, |project, cx| {
1579 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
1580 })
1581 .await
1582 .unwrap();
1583
1584 let mut events = cx.events(&project);
1585
1586 let fake_server = fake_servers.next().await.unwrap();
1587 assert_eq!(
1588 events.next().await.unwrap(),
1589 Event::LanguageServerAdded(
1590 LanguageServerId(0),
1591 fake_server.server.name(),
1592 Some(worktree_id)
1593 ),
1594 );
1595
1596 fake_server
1597 .start_progress(format!("{}/0", progress_token))
1598 .await;
1599 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1600 assert_eq!(
1601 events.next().await.unwrap(),
1602 Event::DiskBasedDiagnosticsStarted {
1603 language_server_id: LanguageServerId(0),
1604 }
1605 );
1606
1607 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1608 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1609 version: None,
1610 diagnostics: vec![lsp::Diagnostic {
1611 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1612 severity: Some(lsp::DiagnosticSeverity::ERROR),
1613 message: "undefined variable 'A'".to_string(),
1614 ..Default::default()
1615 }],
1616 });
1617 assert_eq!(
1618 events.next().await.unwrap(),
1619 Event::DiagnosticsUpdated {
1620 language_server_id: LanguageServerId(0),
1621 paths: vec![(worktree_id, Path::new("a.rs")).into()],
1622 }
1623 );
1624
1625 fake_server.end_progress(format!("{}/0", progress_token));
1626 assert_eq!(
1627 events.next().await.unwrap(),
1628 Event::DiskBasedDiagnosticsFinished {
1629 language_server_id: LanguageServerId(0)
1630 }
1631 );
1632
1633 let buffer = project
1634 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
1635 .await
1636 .unwrap();
1637
1638 buffer.update(cx, |buffer, _| {
1639 let snapshot = buffer.snapshot();
1640 let diagnostics = snapshot
1641 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1642 .collect::<Vec<_>>();
1643 assert_eq!(
1644 diagnostics,
1645 &[DiagnosticEntry {
1646 range: Point::new(0, 9)..Point::new(0, 10),
1647 diagnostic: Diagnostic {
1648 severity: lsp::DiagnosticSeverity::ERROR,
1649 message: "undefined variable 'A'".to_string(),
1650 group_id: 0,
1651 is_primary: true,
1652 source_kind: DiagnosticSourceKind::Pushed,
1653 ..Diagnostic::default()
1654 }
1655 }]
1656 )
1657 });
1658
1659 // Ensure publishing empty diagnostics twice only results in one update event.
1660 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1661 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1662 version: None,
1663 diagnostics: Default::default(),
1664 });
1665 assert_eq!(
1666 events.next().await.unwrap(),
1667 Event::DiagnosticsUpdated {
1668 language_server_id: LanguageServerId(0),
1669 paths: vec![(worktree_id, Path::new("a.rs")).into()],
1670 }
1671 );
1672
1673 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1674 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1675 version: None,
1676 diagnostics: Default::default(),
1677 });
1678 cx.executor().run_until_parked();
1679 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1680}
1681
1682#[gpui::test]
1683async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1684 init_test(cx);
1685
1686 let progress_token = "the-progress-token";
1687
1688 let fs = FakeFs::new(cx.executor());
1689 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1690
1691 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1692
1693 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1694 language_registry.add(rust_lang());
1695 let mut fake_servers = language_registry.register_fake_lsp(
1696 "Rust",
1697 FakeLspAdapter {
1698 name: "the-language-server",
1699 disk_based_diagnostics_sources: vec!["disk".into()],
1700 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1701 ..FakeLspAdapter::default()
1702 },
1703 );
1704
1705 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1706
1707 let (buffer, _handle) = project
1708 .update(cx, |project, cx| {
1709 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1710 })
1711 .await
1712 .unwrap();
1713 let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id());
1714 // Simulate diagnostics starting to update.
1715 let fake_server = fake_servers.next().await.unwrap();
1716 fake_server.start_progress(progress_token).await;
1717
1718 // Restart the server before the diagnostics finish updating.
1719 project.update(cx, |project, cx| {
1720 project.restart_language_servers_for_buffers(vec![buffer], HashSet::default(), cx);
1721 });
1722 let mut events = cx.events(&project);
1723
1724 // Simulate the newly started server sending more diagnostics.
1725 let fake_server = fake_servers.next().await.unwrap();
1726 assert_eq!(
1727 events.next().await.unwrap(),
1728 Event::LanguageServerRemoved(LanguageServerId(0))
1729 );
1730 assert_eq!(
1731 events.next().await.unwrap(),
1732 Event::LanguageServerAdded(
1733 LanguageServerId(1),
1734 fake_server.server.name(),
1735 Some(worktree_id)
1736 )
1737 );
1738 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1739 fake_server.start_progress(progress_token).await;
1740 assert_eq!(
1741 events.next().await.unwrap(),
1742 Event::LanguageServerBufferRegistered {
1743 server_id: LanguageServerId(1),
1744 buffer_id,
1745 buffer_abs_path: PathBuf::from(path!("/dir/a.rs")),
1746 }
1747 );
1748 assert_eq!(
1749 events.next().await.unwrap(),
1750 Event::DiskBasedDiagnosticsStarted {
1751 language_server_id: LanguageServerId(1)
1752 }
1753 );
1754 project.update(cx, |project, cx| {
1755 assert_eq!(
1756 project
1757 .language_servers_running_disk_based_diagnostics(cx)
1758 .collect::<Vec<_>>(),
1759 [LanguageServerId(1)]
1760 );
1761 });
1762
1763 // All diagnostics are considered done, despite the old server's diagnostic
1764 // task never completing.
1765 fake_server.end_progress(progress_token);
1766 assert_eq!(
1767 events.next().await.unwrap(),
1768 Event::DiskBasedDiagnosticsFinished {
1769 language_server_id: LanguageServerId(1)
1770 }
1771 );
1772 project.update(cx, |project, cx| {
1773 assert_eq!(
1774 project
1775 .language_servers_running_disk_based_diagnostics(cx)
1776 .collect::<Vec<_>>(),
1777 [] as [language::LanguageServerId; 0]
1778 );
1779 });
1780}
1781
1782#[gpui::test]
1783async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1784 init_test(cx);
1785
1786 let fs = FakeFs::new(cx.executor());
1787 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
1788
1789 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1790
1791 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1792 language_registry.add(rust_lang());
1793 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1794
1795 let (buffer, _) = project
1796 .update(cx, |project, cx| {
1797 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1798 })
1799 .await
1800 .unwrap();
1801
1802 // Publish diagnostics
1803 let fake_server = fake_servers.next().await.unwrap();
1804 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1805 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1806 version: None,
1807 diagnostics: vec![lsp::Diagnostic {
1808 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1809 severity: Some(lsp::DiagnosticSeverity::ERROR),
1810 message: "the message".to_string(),
1811 ..Default::default()
1812 }],
1813 });
1814
1815 cx.executor().run_until_parked();
1816 buffer.update(cx, |buffer, _| {
1817 assert_eq!(
1818 buffer
1819 .snapshot()
1820 .diagnostics_in_range::<_, usize>(0..1, false)
1821 .map(|entry| entry.diagnostic.message.clone())
1822 .collect::<Vec<_>>(),
1823 ["the message".to_string()]
1824 );
1825 });
1826 project.update(cx, |project, cx| {
1827 assert_eq!(
1828 project.diagnostic_summary(false, cx),
1829 DiagnosticSummary {
1830 error_count: 1,
1831 warning_count: 0,
1832 }
1833 );
1834 });
1835
1836 project.update(cx, |project, cx| {
1837 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
1838 });
1839
1840 // The diagnostics are cleared.
1841 cx.executor().run_until_parked();
1842 buffer.update(cx, |buffer, _| {
1843 assert_eq!(
1844 buffer
1845 .snapshot()
1846 .diagnostics_in_range::<_, usize>(0..1, false)
1847 .map(|entry| entry.diagnostic.message.clone())
1848 .collect::<Vec<_>>(),
1849 Vec::<String>::new(),
1850 );
1851 });
1852 project.update(cx, |project, cx| {
1853 assert_eq!(
1854 project.diagnostic_summary(false, cx),
1855 DiagnosticSummary {
1856 error_count: 0,
1857 warning_count: 0,
1858 }
1859 );
1860 });
1861}
1862
1863#[gpui::test]
1864async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1865 init_test(cx);
1866
1867 let fs = FakeFs::new(cx.executor());
1868 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1869
1870 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1871 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1872
1873 language_registry.add(rust_lang());
1874 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1875
1876 let (buffer, _handle) = project
1877 .update(cx, |project, cx| {
1878 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1879 })
1880 .await
1881 .unwrap();
1882
1883 // Before restarting the server, report diagnostics with an unknown buffer version.
1884 let fake_server = fake_servers.next().await.unwrap();
1885 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1886 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1887 version: Some(10000),
1888 diagnostics: Vec::new(),
1889 });
1890 cx.executor().run_until_parked();
1891 project.update(cx, |project, cx| {
1892 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
1893 });
1894
1895 let mut fake_server = fake_servers.next().await.unwrap();
1896 let notification = fake_server
1897 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1898 .await
1899 .text_document;
1900 assert_eq!(notification.version, 0);
1901}
1902
1903#[gpui::test]
1904async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
1905 init_test(cx);
1906
1907 let progress_token = "the-progress-token";
1908
1909 let fs = FakeFs::new(cx.executor());
1910 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1911
1912 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1913
1914 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1915 language_registry.add(rust_lang());
1916 let mut fake_servers = language_registry.register_fake_lsp(
1917 "Rust",
1918 FakeLspAdapter {
1919 name: "the-language-server",
1920 disk_based_diagnostics_sources: vec!["disk".into()],
1921 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1922 ..Default::default()
1923 },
1924 );
1925
1926 let (buffer, _handle) = project
1927 .update(cx, |project, cx| {
1928 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1929 })
1930 .await
1931 .unwrap();
1932
1933 // Simulate diagnostics starting to update.
1934 let mut fake_server = fake_servers.next().await.unwrap();
1935 fake_server
1936 .start_progress_with(
1937 "another-token",
1938 lsp::WorkDoneProgressBegin {
1939 cancellable: Some(false),
1940 ..Default::default()
1941 },
1942 )
1943 .await;
1944 fake_server
1945 .start_progress_with(
1946 progress_token,
1947 lsp::WorkDoneProgressBegin {
1948 cancellable: Some(true),
1949 ..Default::default()
1950 },
1951 )
1952 .await;
1953 cx.executor().run_until_parked();
1954
1955 project.update(cx, |project, cx| {
1956 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
1957 });
1958
1959 let cancel_notification = fake_server
1960 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
1961 .await;
1962 assert_eq!(
1963 cancel_notification.token,
1964 NumberOrString::String(progress_token.into())
1965 );
1966}
1967
1968#[gpui::test]
1969async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1970 init_test(cx);
1971
1972 let fs = FakeFs::new(cx.executor());
1973 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
1974 .await;
1975
1976 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1977 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1978
1979 let mut fake_rust_servers = language_registry.register_fake_lsp(
1980 "Rust",
1981 FakeLspAdapter {
1982 name: "rust-lsp",
1983 ..Default::default()
1984 },
1985 );
1986 let mut fake_js_servers = language_registry.register_fake_lsp(
1987 "JavaScript",
1988 FakeLspAdapter {
1989 name: "js-lsp",
1990 ..Default::default()
1991 },
1992 );
1993 language_registry.add(rust_lang());
1994 language_registry.add(js_lang());
1995
1996 let _rs_buffer = project
1997 .update(cx, |project, cx| {
1998 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1999 })
2000 .await
2001 .unwrap();
2002 let _js_buffer = project
2003 .update(cx, |project, cx| {
2004 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
2005 })
2006 .await
2007 .unwrap();
2008
2009 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
2010 assert_eq!(
2011 fake_rust_server_1
2012 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2013 .await
2014 .text_document
2015 .uri
2016 .as_str(),
2017 uri!("file:///dir/a.rs")
2018 );
2019
2020 let mut fake_js_server = fake_js_servers.next().await.unwrap();
2021 assert_eq!(
2022 fake_js_server
2023 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2024 .await
2025 .text_document
2026 .uri
2027 .as_str(),
2028 uri!("file:///dir/b.js")
2029 );
2030
2031 // Disable Rust language server, ensuring only that server gets stopped.
2032 cx.update(|cx| {
2033 SettingsStore::update_global(cx, |settings, cx| {
2034 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
2035 settings.languages.0.insert(
2036 "Rust".into(),
2037 LanguageSettingsContent {
2038 enable_language_server: Some(false),
2039 ..Default::default()
2040 },
2041 );
2042 });
2043 })
2044 });
2045 fake_rust_server_1
2046 .receive_notification::<lsp::notification::Exit>()
2047 .await;
2048
2049 // Enable Rust and disable JavaScript language servers, ensuring that the
2050 // former gets started again and that the latter stops.
2051 cx.update(|cx| {
2052 SettingsStore::update_global(cx, |settings, cx| {
2053 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
2054 settings.languages.0.insert(
2055 LanguageName::new("Rust"),
2056 LanguageSettingsContent {
2057 enable_language_server: Some(true),
2058 ..Default::default()
2059 },
2060 );
2061 settings.languages.0.insert(
2062 LanguageName::new("JavaScript"),
2063 LanguageSettingsContent {
2064 enable_language_server: Some(false),
2065 ..Default::default()
2066 },
2067 );
2068 });
2069 })
2070 });
2071 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
2072 assert_eq!(
2073 fake_rust_server_2
2074 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2075 .await
2076 .text_document
2077 .uri
2078 .as_str(),
2079 uri!("file:///dir/a.rs")
2080 );
2081 fake_js_server
2082 .receive_notification::<lsp::notification::Exit>()
2083 .await;
2084}
2085
2086#[gpui::test(iterations = 3)]
2087async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
2088 init_test(cx);
2089
2090 let text = "
2091 fn a() { A }
2092 fn b() { BB }
2093 fn c() { CCC }
2094 "
2095 .unindent();
2096
2097 let fs = FakeFs::new(cx.executor());
2098 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
2099
2100 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2101 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2102
2103 language_registry.add(rust_lang());
2104 let mut fake_servers = language_registry.register_fake_lsp(
2105 "Rust",
2106 FakeLspAdapter {
2107 disk_based_diagnostics_sources: vec!["disk".into()],
2108 ..Default::default()
2109 },
2110 );
2111
2112 let buffer = project
2113 .update(cx, |project, cx| {
2114 project.open_local_buffer(path!("/dir/a.rs"), cx)
2115 })
2116 .await
2117 .unwrap();
2118
2119 let _handle = project.update(cx, |project, cx| {
2120 project.register_buffer_with_language_servers(&buffer, cx)
2121 });
2122
2123 let mut fake_server = fake_servers.next().await.unwrap();
2124 let open_notification = fake_server
2125 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2126 .await;
2127
2128 // Edit the buffer, moving the content down
2129 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
2130 let change_notification_1 = fake_server
2131 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2132 .await;
2133 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
2134
2135 // Report some diagnostics for the initial version of the buffer
2136 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2137 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2138 version: Some(open_notification.text_document.version),
2139 diagnostics: vec![
2140 lsp::Diagnostic {
2141 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2142 severity: Some(DiagnosticSeverity::ERROR),
2143 message: "undefined variable 'A'".to_string(),
2144 source: Some("disk".to_string()),
2145 ..Default::default()
2146 },
2147 lsp::Diagnostic {
2148 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2149 severity: Some(DiagnosticSeverity::ERROR),
2150 message: "undefined variable 'BB'".to_string(),
2151 source: Some("disk".to_string()),
2152 ..Default::default()
2153 },
2154 lsp::Diagnostic {
2155 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
2156 severity: Some(DiagnosticSeverity::ERROR),
2157 source: Some("disk".to_string()),
2158 message: "undefined variable 'CCC'".to_string(),
2159 ..Default::default()
2160 },
2161 ],
2162 });
2163
2164 // The diagnostics have moved down since they were created.
2165 cx.executor().run_until_parked();
2166 buffer.update(cx, |buffer, _| {
2167 assert_eq!(
2168 buffer
2169 .snapshot()
2170 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
2171 .collect::<Vec<_>>(),
2172 &[
2173 DiagnosticEntry {
2174 range: Point::new(3, 9)..Point::new(3, 11),
2175 diagnostic: Diagnostic {
2176 source: Some("disk".into()),
2177 severity: DiagnosticSeverity::ERROR,
2178 message: "undefined variable 'BB'".to_string(),
2179 is_disk_based: true,
2180 group_id: 1,
2181 is_primary: true,
2182 source_kind: DiagnosticSourceKind::Pushed,
2183 ..Diagnostic::default()
2184 },
2185 },
2186 DiagnosticEntry {
2187 range: Point::new(4, 9)..Point::new(4, 12),
2188 diagnostic: Diagnostic {
2189 source: Some("disk".into()),
2190 severity: DiagnosticSeverity::ERROR,
2191 message: "undefined variable 'CCC'".to_string(),
2192 is_disk_based: true,
2193 group_id: 2,
2194 is_primary: true,
2195 source_kind: DiagnosticSourceKind::Pushed,
2196 ..Diagnostic::default()
2197 }
2198 }
2199 ]
2200 );
2201 assert_eq!(
2202 chunks_with_diagnostics(buffer, 0..buffer.len()),
2203 [
2204 ("\n\nfn a() { ".to_string(), None),
2205 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2206 (" }\nfn b() { ".to_string(), None),
2207 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
2208 (" }\nfn c() { ".to_string(), None),
2209 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
2210 (" }\n".to_string(), None),
2211 ]
2212 );
2213 assert_eq!(
2214 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
2215 [
2216 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
2217 (" }\nfn c() { ".to_string(), None),
2218 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
2219 ]
2220 );
2221 });
2222
2223 // Ensure overlapping diagnostics are highlighted correctly.
2224 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2225 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2226 version: Some(open_notification.text_document.version),
2227 diagnostics: vec![
2228 lsp::Diagnostic {
2229 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2230 severity: Some(DiagnosticSeverity::ERROR),
2231 message: "undefined variable 'A'".to_string(),
2232 source: Some("disk".to_string()),
2233 ..Default::default()
2234 },
2235 lsp::Diagnostic {
2236 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
2237 severity: Some(DiagnosticSeverity::WARNING),
2238 message: "unreachable statement".to_string(),
2239 source: Some("disk".to_string()),
2240 ..Default::default()
2241 },
2242 ],
2243 });
2244
2245 cx.executor().run_until_parked();
2246 buffer.update(cx, |buffer, _| {
2247 assert_eq!(
2248 buffer
2249 .snapshot()
2250 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
2251 .collect::<Vec<_>>(),
2252 &[
2253 DiagnosticEntry {
2254 range: Point::new(2, 9)..Point::new(2, 12),
2255 diagnostic: Diagnostic {
2256 source: Some("disk".into()),
2257 severity: DiagnosticSeverity::WARNING,
2258 message: "unreachable statement".to_string(),
2259 is_disk_based: true,
2260 group_id: 4,
2261 is_primary: true,
2262 source_kind: DiagnosticSourceKind::Pushed,
2263 ..Diagnostic::default()
2264 }
2265 },
2266 DiagnosticEntry {
2267 range: Point::new(2, 9)..Point::new(2, 10),
2268 diagnostic: Diagnostic {
2269 source: Some("disk".into()),
2270 severity: DiagnosticSeverity::ERROR,
2271 message: "undefined variable 'A'".to_string(),
2272 is_disk_based: true,
2273 group_id: 3,
2274 is_primary: true,
2275 source_kind: DiagnosticSourceKind::Pushed,
2276 ..Diagnostic::default()
2277 },
2278 }
2279 ]
2280 );
2281 assert_eq!(
2282 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
2283 [
2284 ("fn a() { ".to_string(), None),
2285 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2286 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2287 ("\n".to_string(), None),
2288 ]
2289 );
2290 assert_eq!(
2291 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
2292 [
2293 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2294 ("\n".to_string(), None),
2295 ]
2296 );
2297 });
2298
2299 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
2300 // changes since the last save.
2301 buffer.update(cx, |buffer, cx| {
2302 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
2303 buffer.edit(
2304 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
2305 None,
2306 cx,
2307 );
2308 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
2309 });
2310 let change_notification_2 = fake_server
2311 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2312 .await;
2313 assert!(
2314 change_notification_2.text_document.version > change_notification_1.text_document.version
2315 );
2316
2317 // Handle out-of-order diagnostics
2318 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2319 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2320 version: Some(change_notification_2.text_document.version),
2321 diagnostics: vec![
2322 lsp::Diagnostic {
2323 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2324 severity: Some(DiagnosticSeverity::ERROR),
2325 message: "undefined variable 'BB'".to_string(),
2326 source: Some("disk".to_string()),
2327 ..Default::default()
2328 },
2329 lsp::Diagnostic {
2330 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2331 severity: Some(DiagnosticSeverity::WARNING),
2332 message: "undefined variable 'A'".to_string(),
2333 source: Some("disk".to_string()),
2334 ..Default::default()
2335 },
2336 ],
2337 });
2338
2339 cx.executor().run_until_parked();
2340 buffer.update(cx, |buffer, _| {
2341 assert_eq!(
2342 buffer
2343 .snapshot()
2344 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2345 .collect::<Vec<_>>(),
2346 &[
2347 DiagnosticEntry {
2348 range: Point::new(2, 21)..Point::new(2, 22),
2349 diagnostic: Diagnostic {
2350 source: Some("disk".into()),
2351 severity: DiagnosticSeverity::WARNING,
2352 message: "undefined variable 'A'".to_string(),
2353 is_disk_based: true,
2354 group_id: 6,
2355 is_primary: true,
2356 source_kind: DiagnosticSourceKind::Pushed,
2357 ..Diagnostic::default()
2358 }
2359 },
2360 DiagnosticEntry {
2361 range: Point::new(3, 9)..Point::new(3, 14),
2362 diagnostic: Diagnostic {
2363 source: Some("disk".into()),
2364 severity: DiagnosticSeverity::ERROR,
2365 message: "undefined variable 'BB'".to_string(),
2366 is_disk_based: true,
2367 group_id: 5,
2368 is_primary: true,
2369 source_kind: DiagnosticSourceKind::Pushed,
2370 ..Diagnostic::default()
2371 },
2372 }
2373 ]
2374 );
2375 });
2376}
2377
2378#[gpui::test]
2379async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
2380 init_test(cx);
2381
2382 let text = concat!(
2383 "let one = ;\n", //
2384 "let two = \n",
2385 "let three = 3;\n",
2386 );
2387
2388 let fs = FakeFs::new(cx.executor());
2389 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
2390
2391 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2392 let buffer = project
2393 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2394 .await
2395 .unwrap();
2396
2397 project.update(cx, |project, cx| {
2398 project.lsp_store.update(cx, |lsp_store, cx| {
2399 lsp_store
2400 .update_diagnostic_entries(
2401 LanguageServerId(0),
2402 PathBuf::from("/dir/a.rs"),
2403 None,
2404 None,
2405 vec![
2406 DiagnosticEntry {
2407 range: Unclipped(PointUtf16::new(0, 10))
2408 ..Unclipped(PointUtf16::new(0, 10)),
2409 diagnostic: Diagnostic {
2410 severity: DiagnosticSeverity::ERROR,
2411 message: "syntax error 1".to_string(),
2412 source_kind: DiagnosticSourceKind::Pushed,
2413 ..Diagnostic::default()
2414 },
2415 },
2416 DiagnosticEntry {
2417 range: Unclipped(PointUtf16::new(1, 10))
2418 ..Unclipped(PointUtf16::new(1, 10)),
2419 diagnostic: Diagnostic {
2420 severity: DiagnosticSeverity::ERROR,
2421 message: "syntax error 2".to_string(),
2422 source_kind: DiagnosticSourceKind::Pushed,
2423 ..Diagnostic::default()
2424 },
2425 },
2426 ],
2427 cx,
2428 )
2429 .unwrap();
2430 })
2431 });
2432
2433 // An empty range is extended forward to include the following character.
2434 // At the end of a line, an empty range is extended backward to include
2435 // the preceding character.
2436 buffer.update(cx, |buffer, _| {
2437 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2438 assert_eq!(
2439 chunks
2440 .iter()
2441 .map(|(s, d)| (s.as_str(), *d))
2442 .collect::<Vec<_>>(),
2443 &[
2444 ("let one = ", None),
2445 (";", Some(DiagnosticSeverity::ERROR)),
2446 ("\nlet two =", None),
2447 (" ", Some(DiagnosticSeverity::ERROR)),
2448 ("\nlet three = 3;\n", None)
2449 ]
2450 );
2451 });
2452}
2453
2454#[gpui::test]
2455async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2456 init_test(cx);
2457
2458 let fs = FakeFs::new(cx.executor());
2459 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2460 .await;
2461
2462 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2463 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
2464
2465 lsp_store.update(cx, |lsp_store, cx| {
2466 lsp_store
2467 .update_diagnostic_entries(
2468 LanguageServerId(0),
2469 Path::new("/dir/a.rs").to_owned(),
2470 None,
2471 None,
2472 vec![DiagnosticEntry {
2473 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2474 diagnostic: Diagnostic {
2475 severity: DiagnosticSeverity::ERROR,
2476 is_primary: true,
2477 message: "syntax error a1".to_string(),
2478 source_kind: DiagnosticSourceKind::Pushed,
2479 ..Diagnostic::default()
2480 },
2481 }],
2482 cx,
2483 )
2484 .unwrap();
2485 lsp_store
2486 .update_diagnostic_entries(
2487 LanguageServerId(1),
2488 Path::new("/dir/a.rs").to_owned(),
2489 None,
2490 None,
2491 vec![DiagnosticEntry {
2492 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2493 diagnostic: Diagnostic {
2494 severity: DiagnosticSeverity::ERROR,
2495 is_primary: true,
2496 message: "syntax error b1".to_string(),
2497 source_kind: DiagnosticSourceKind::Pushed,
2498 ..Diagnostic::default()
2499 },
2500 }],
2501 cx,
2502 )
2503 .unwrap();
2504
2505 assert_eq!(
2506 lsp_store.diagnostic_summary(false, cx),
2507 DiagnosticSummary {
2508 error_count: 2,
2509 warning_count: 0,
2510 }
2511 );
2512 });
2513}
2514
2515#[gpui::test]
2516async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2517 init_test(cx);
2518
2519 let text = "
2520 fn a() {
2521 f1();
2522 }
2523 fn b() {
2524 f2();
2525 }
2526 fn c() {
2527 f3();
2528 }
2529 "
2530 .unindent();
2531
2532 let fs = FakeFs::new(cx.executor());
2533 fs.insert_tree(
2534 path!("/dir"),
2535 json!({
2536 "a.rs": text.clone(),
2537 }),
2538 )
2539 .await;
2540
2541 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2542 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2543
2544 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2545 language_registry.add(rust_lang());
2546 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2547
2548 let (buffer, _handle) = project
2549 .update(cx, |project, cx| {
2550 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2551 })
2552 .await
2553 .unwrap();
2554
2555 let mut fake_server = fake_servers.next().await.unwrap();
2556 let lsp_document_version = fake_server
2557 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2558 .await
2559 .text_document
2560 .version;
2561
2562 // Simulate editing the buffer after the language server computes some edits.
2563 buffer.update(cx, |buffer, cx| {
2564 buffer.edit(
2565 [(
2566 Point::new(0, 0)..Point::new(0, 0),
2567 "// above first function\n",
2568 )],
2569 None,
2570 cx,
2571 );
2572 buffer.edit(
2573 [(
2574 Point::new(2, 0)..Point::new(2, 0),
2575 " // inside first function\n",
2576 )],
2577 None,
2578 cx,
2579 );
2580 buffer.edit(
2581 [(
2582 Point::new(6, 4)..Point::new(6, 4),
2583 "// inside second function ",
2584 )],
2585 None,
2586 cx,
2587 );
2588
2589 assert_eq!(
2590 buffer.text(),
2591 "
2592 // above first function
2593 fn a() {
2594 // inside first function
2595 f1();
2596 }
2597 fn b() {
2598 // inside second function f2();
2599 }
2600 fn c() {
2601 f3();
2602 }
2603 "
2604 .unindent()
2605 );
2606 });
2607
2608 let edits = lsp_store
2609 .update(cx, |lsp_store, cx| {
2610 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2611 &buffer,
2612 vec![
2613 // replace body of first function
2614 lsp::TextEdit {
2615 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2616 new_text: "
2617 fn a() {
2618 f10();
2619 }
2620 "
2621 .unindent(),
2622 },
2623 // edit inside second function
2624 lsp::TextEdit {
2625 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2626 new_text: "00".into(),
2627 },
2628 // edit inside third function via two distinct edits
2629 lsp::TextEdit {
2630 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2631 new_text: "4000".into(),
2632 },
2633 lsp::TextEdit {
2634 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2635 new_text: "".into(),
2636 },
2637 ],
2638 LanguageServerId(0),
2639 Some(lsp_document_version),
2640 cx,
2641 )
2642 })
2643 .await
2644 .unwrap();
2645
2646 buffer.update(cx, |buffer, cx| {
2647 for (range, new_text) in edits {
2648 buffer.edit([(range, new_text)], None, cx);
2649 }
2650 assert_eq!(
2651 buffer.text(),
2652 "
2653 // above first function
2654 fn a() {
2655 // inside first function
2656 f10();
2657 }
2658 fn b() {
2659 // inside second function f200();
2660 }
2661 fn c() {
2662 f4000();
2663 }
2664 "
2665 .unindent()
2666 );
2667 });
2668}
2669
2670#[gpui::test]
2671async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2672 init_test(cx);
2673
2674 let text = "
2675 use a::b;
2676 use a::c;
2677
2678 fn f() {
2679 b();
2680 c();
2681 }
2682 "
2683 .unindent();
2684
2685 let fs = FakeFs::new(cx.executor());
2686 fs.insert_tree(
2687 path!("/dir"),
2688 json!({
2689 "a.rs": text.clone(),
2690 }),
2691 )
2692 .await;
2693
2694 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2695 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2696 let buffer = project
2697 .update(cx, |project, cx| {
2698 project.open_local_buffer(path!("/dir/a.rs"), cx)
2699 })
2700 .await
2701 .unwrap();
2702
2703 // Simulate the language server sending us a small edit in the form of a very large diff.
2704 // Rust-analyzer does this when performing a merge-imports code action.
2705 let edits = lsp_store
2706 .update(cx, |lsp_store, cx| {
2707 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2708 &buffer,
2709 [
2710 // Replace the first use statement without editing the semicolon.
2711 lsp::TextEdit {
2712 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2713 new_text: "a::{b, c}".into(),
2714 },
2715 // Reinsert the remainder of the file between the semicolon and the final
2716 // newline of the file.
2717 lsp::TextEdit {
2718 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2719 new_text: "\n\n".into(),
2720 },
2721 lsp::TextEdit {
2722 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2723 new_text: "
2724 fn f() {
2725 b();
2726 c();
2727 }"
2728 .unindent(),
2729 },
2730 // Delete everything after the first newline of the file.
2731 lsp::TextEdit {
2732 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2733 new_text: "".into(),
2734 },
2735 ],
2736 LanguageServerId(0),
2737 None,
2738 cx,
2739 )
2740 })
2741 .await
2742 .unwrap();
2743
2744 buffer.update(cx, |buffer, cx| {
2745 let edits = edits
2746 .into_iter()
2747 .map(|(range, text)| {
2748 (
2749 range.start.to_point(buffer)..range.end.to_point(buffer),
2750 text,
2751 )
2752 })
2753 .collect::<Vec<_>>();
2754
2755 assert_eq!(
2756 edits,
2757 [
2758 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2759 (Point::new(1, 0)..Point::new(2, 0), "".into())
2760 ]
2761 );
2762
2763 for (range, new_text) in edits {
2764 buffer.edit([(range, new_text)], None, cx);
2765 }
2766 assert_eq!(
2767 buffer.text(),
2768 "
2769 use a::{b, c};
2770
2771 fn f() {
2772 b();
2773 c();
2774 }
2775 "
2776 .unindent()
2777 );
2778 });
2779}
2780
2781#[gpui::test]
2782async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
2783 cx: &mut gpui::TestAppContext,
2784) {
2785 init_test(cx);
2786
2787 let text = "Path()";
2788
2789 let fs = FakeFs::new(cx.executor());
2790 fs.insert_tree(
2791 path!("/dir"),
2792 json!({
2793 "a.rs": text
2794 }),
2795 )
2796 .await;
2797
2798 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2799 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2800 let buffer = project
2801 .update(cx, |project, cx| {
2802 project.open_local_buffer(path!("/dir/a.rs"), cx)
2803 })
2804 .await
2805 .unwrap();
2806
2807 // Simulate the language server sending us a pair of edits at the same location,
2808 // with an insertion following a replacement (which violates the LSP spec).
2809 let edits = lsp_store
2810 .update(cx, |lsp_store, cx| {
2811 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2812 &buffer,
2813 [
2814 lsp::TextEdit {
2815 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
2816 new_text: "Path".into(),
2817 },
2818 lsp::TextEdit {
2819 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
2820 new_text: "from path import Path\n\n\n".into(),
2821 },
2822 ],
2823 LanguageServerId(0),
2824 None,
2825 cx,
2826 )
2827 })
2828 .await
2829 .unwrap();
2830
2831 buffer.update(cx, |buffer, cx| {
2832 buffer.edit(edits, None, cx);
2833 assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
2834 });
2835}
2836
2837#[gpui::test]
2838async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2839 init_test(cx);
2840
2841 let text = "
2842 use a::b;
2843 use a::c;
2844
2845 fn f() {
2846 b();
2847 c();
2848 }
2849 "
2850 .unindent();
2851
2852 let fs = FakeFs::new(cx.executor());
2853 fs.insert_tree(
2854 path!("/dir"),
2855 json!({
2856 "a.rs": text.clone(),
2857 }),
2858 )
2859 .await;
2860
2861 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2862 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2863 let buffer = project
2864 .update(cx, |project, cx| {
2865 project.open_local_buffer(path!("/dir/a.rs"), cx)
2866 })
2867 .await
2868 .unwrap();
2869
2870 // Simulate the language server sending us edits in a non-ordered fashion,
2871 // with ranges sometimes being inverted or pointing to invalid locations.
2872 let edits = lsp_store
2873 .update(cx, |lsp_store, cx| {
2874 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2875 &buffer,
2876 [
2877 lsp::TextEdit {
2878 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2879 new_text: "\n\n".into(),
2880 },
2881 lsp::TextEdit {
2882 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2883 new_text: "a::{b, c}".into(),
2884 },
2885 lsp::TextEdit {
2886 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2887 new_text: "".into(),
2888 },
2889 lsp::TextEdit {
2890 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2891 new_text: "
2892 fn f() {
2893 b();
2894 c();
2895 }"
2896 .unindent(),
2897 },
2898 ],
2899 LanguageServerId(0),
2900 None,
2901 cx,
2902 )
2903 })
2904 .await
2905 .unwrap();
2906
2907 buffer.update(cx, |buffer, cx| {
2908 let edits = edits
2909 .into_iter()
2910 .map(|(range, text)| {
2911 (
2912 range.start.to_point(buffer)..range.end.to_point(buffer),
2913 text,
2914 )
2915 })
2916 .collect::<Vec<_>>();
2917
2918 assert_eq!(
2919 edits,
2920 [
2921 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2922 (Point::new(1, 0)..Point::new(2, 0), "".into())
2923 ]
2924 );
2925
2926 for (range, new_text) in edits {
2927 buffer.edit([(range, new_text)], None, cx);
2928 }
2929 assert_eq!(
2930 buffer.text(),
2931 "
2932 use a::{b, c};
2933
2934 fn f() {
2935 b();
2936 c();
2937 }
2938 "
2939 .unindent()
2940 );
2941 });
2942}
2943
2944fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2945 buffer: &Buffer,
2946 range: Range<T>,
2947) -> Vec<(String, Option<DiagnosticSeverity>)> {
2948 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2949 for chunk in buffer.snapshot().chunks(range, true) {
2950 if chunks
2951 .last()
2952 .is_some_and(|prev_chunk| prev_chunk.1 == chunk.diagnostic_severity)
2953 {
2954 chunks.last_mut().unwrap().0.push_str(chunk.text);
2955 } else {
2956 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2957 }
2958 }
2959 chunks
2960}
2961
2962#[gpui::test(iterations = 10)]
2963async fn test_definition(cx: &mut gpui::TestAppContext) {
2964 init_test(cx);
2965
2966 let fs = FakeFs::new(cx.executor());
2967 fs.insert_tree(
2968 path!("/dir"),
2969 json!({
2970 "a.rs": "const fn a() { A }",
2971 "b.rs": "const y: i32 = crate::a()",
2972 }),
2973 )
2974 .await;
2975
2976 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
2977
2978 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2979 language_registry.add(rust_lang());
2980 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2981
2982 let (buffer, _handle) = project
2983 .update(cx, |project, cx| {
2984 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
2985 })
2986 .await
2987 .unwrap();
2988
2989 let fake_server = fake_servers.next().await.unwrap();
2990 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2991 let params = params.text_document_position_params;
2992 assert_eq!(
2993 params.text_document.uri.to_file_path().unwrap(),
2994 Path::new(path!("/dir/b.rs")),
2995 );
2996 assert_eq!(params.position, lsp::Position::new(0, 22));
2997
2998 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2999 lsp::Location::new(
3000 lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
3001 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3002 ),
3003 )))
3004 });
3005 let mut definitions = project
3006 .update(cx, |project, cx| project.definitions(&buffer, 22, cx))
3007 .await
3008 .unwrap();
3009
3010 // Assert no new language server started
3011 cx.executor().run_until_parked();
3012 assert!(fake_servers.try_next().is_err());
3013
3014 assert_eq!(definitions.len(), 1);
3015 let definition = definitions.pop().unwrap();
3016 cx.update(|cx| {
3017 let target_buffer = definition.target.buffer.read(cx);
3018 assert_eq!(
3019 target_buffer
3020 .file()
3021 .unwrap()
3022 .as_local()
3023 .unwrap()
3024 .abs_path(cx),
3025 Path::new(path!("/dir/a.rs")),
3026 );
3027 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
3028 assert_eq!(
3029 list_worktrees(&project, cx),
3030 [
3031 (path!("/dir/a.rs").as_ref(), false),
3032 (path!("/dir/b.rs").as_ref(), true)
3033 ],
3034 );
3035
3036 drop(definition);
3037 });
3038 cx.update(|cx| {
3039 assert_eq!(
3040 list_worktrees(&project, cx),
3041 [(path!("/dir/b.rs").as_ref(), true)]
3042 );
3043 });
3044
3045 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
3046 project
3047 .read(cx)
3048 .worktrees(cx)
3049 .map(|worktree| {
3050 let worktree = worktree.read(cx);
3051 (
3052 worktree.as_local().unwrap().abs_path().as_ref(),
3053 worktree.is_visible(),
3054 )
3055 })
3056 .collect::<Vec<_>>()
3057 }
3058}
3059
3060#[gpui::test]
3061async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
3062 init_test(cx);
3063
3064 let fs = FakeFs::new(cx.executor());
3065 fs.insert_tree(
3066 path!("/dir"),
3067 json!({
3068 "a.ts": "",
3069 }),
3070 )
3071 .await;
3072
3073 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3074
3075 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3076 language_registry.add(typescript_lang());
3077 let mut fake_language_servers = language_registry.register_fake_lsp(
3078 "TypeScript",
3079 FakeLspAdapter {
3080 capabilities: lsp::ServerCapabilities {
3081 completion_provider: Some(lsp::CompletionOptions {
3082 trigger_characters: Some(vec![".".to_string()]),
3083 ..Default::default()
3084 }),
3085 ..Default::default()
3086 },
3087 ..Default::default()
3088 },
3089 );
3090
3091 let (buffer, _handle) = project
3092 .update(cx, |p, cx| {
3093 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3094 })
3095 .await
3096 .unwrap();
3097
3098 let fake_server = fake_language_servers.next().await.unwrap();
3099
3100 // When text_edit exists, it takes precedence over insert_text and label
3101 let text = "let a = obj.fqn";
3102 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3103 let completions = project.update(cx, |project, cx| {
3104 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3105 });
3106
3107 fake_server
3108 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3109 Ok(Some(lsp::CompletionResponse::Array(vec![
3110 lsp::CompletionItem {
3111 label: "labelText".into(),
3112 insert_text: Some("insertText".into()),
3113 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
3114 range: lsp::Range::new(
3115 lsp::Position::new(0, text.len() as u32 - 3),
3116 lsp::Position::new(0, text.len() as u32),
3117 ),
3118 new_text: "textEditText".into(),
3119 })),
3120 ..Default::default()
3121 },
3122 ])))
3123 })
3124 .next()
3125 .await;
3126
3127 let completions = completions
3128 .await
3129 .unwrap()
3130 .into_iter()
3131 .flat_map(|response| response.completions)
3132 .collect::<Vec<_>>();
3133 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3134
3135 assert_eq!(completions.len(), 1);
3136 assert_eq!(completions[0].new_text, "textEditText");
3137 assert_eq!(
3138 completions[0].replace_range.to_offset(&snapshot),
3139 text.len() - 3..text.len()
3140 );
3141}
3142
3143#[gpui::test]
3144async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
3145 init_test(cx);
3146
3147 let fs = FakeFs::new(cx.executor());
3148 fs.insert_tree(
3149 path!("/dir"),
3150 json!({
3151 "a.ts": "",
3152 }),
3153 )
3154 .await;
3155
3156 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3157
3158 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3159 language_registry.add(typescript_lang());
3160 let mut fake_language_servers = language_registry.register_fake_lsp(
3161 "TypeScript",
3162 FakeLspAdapter {
3163 capabilities: lsp::ServerCapabilities {
3164 completion_provider: Some(lsp::CompletionOptions {
3165 trigger_characters: Some(vec![".".to_string()]),
3166 ..Default::default()
3167 }),
3168 ..Default::default()
3169 },
3170 ..Default::default()
3171 },
3172 );
3173
3174 let (buffer, _handle) = project
3175 .update(cx, |p, cx| {
3176 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3177 })
3178 .await
3179 .unwrap();
3180
3181 let fake_server = fake_language_servers.next().await.unwrap();
3182 let text = "let a = obj.fqn";
3183
3184 // Test 1: When text_edit is None but insert_text exists with default edit_range
3185 {
3186 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3187 let completions = project.update(cx, |project, cx| {
3188 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3189 });
3190
3191 fake_server
3192 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3193 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3194 is_incomplete: false,
3195 item_defaults: Some(lsp::CompletionListItemDefaults {
3196 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3197 lsp::Range::new(
3198 lsp::Position::new(0, text.len() as u32 - 3),
3199 lsp::Position::new(0, text.len() as u32),
3200 ),
3201 )),
3202 ..Default::default()
3203 }),
3204 items: vec![lsp::CompletionItem {
3205 label: "labelText".into(),
3206 insert_text: Some("insertText".into()),
3207 text_edit: None,
3208 ..Default::default()
3209 }],
3210 })))
3211 })
3212 .next()
3213 .await;
3214
3215 let completions = completions
3216 .await
3217 .unwrap()
3218 .into_iter()
3219 .flat_map(|response| response.completions)
3220 .collect::<Vec<_>>();
3221 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3222
3223 assert_eq!(completions.len(), 1);
3224 assert_eq!(completions[0].new_text, "insertText");
3225 assert_eq!(
3226 completions[0].replace_range.to_offset(&snapshot),
3227 text.len() - 3..text.len()
3228 );
3229 }
3230
3231 // Test 2: When both text_edit and insert_text are None with default edit_range
3232 {
3233 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3234 let completions = project.update(cx, |project, cx| {
3235 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3236 });
3237
3238 fake_server
3239 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3240 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3241 is_incomplete: false,
3242 item_defaults: Some(lsp::CompletionListItemDefaults {
3243 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3244 lsp::Range::new(
3245 lsp::Position::new(0, text.len() as u32 - 3),
3246 lsp::Position::new(0, text.len() as u32),
3247 ),
3248 )),
3249 ..Default::default()
3250 }),
3251 items: vec![lsp::CompletionItem {
3252 label: "labelText".into(),
3253 insert_text: None,
3254 text_edit: None,
3255 ..Default::default()
3256 }],
3257 })))
3258 })
3259 .next()
3260 .await;
3261
3262 let completions = completions
3263 .await
3264 .unwrap()
3265 .into_iter()
3266 .flat_map(|response| response.completions)
3267 .collect::<Vec<_>>();
3268 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3269
3270 assert_eq!(completions.len(), 1);
3271 assert_eq!(completions[0].new_text, "labelText");
3272 assert_eq!(
3273 completions[0].replace_range.to_offset(&snapshot),
3274 text.len() - 3..text.len()
3275 );
3276 }
3277}
3278
3279#[gpui::test]
3280async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
3281 init_test(cx);
3282
3283 let fs = FakeFs::new(cx.executor());
3284 fs.insert_tree(
3285 path!("/dir"),
3286 json!({
3287 "a.ts": "",
3288 }),
3289 )
3290 .await;
3291
3292 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3293
3294 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3295 language_registry.add(typescript_lang());
3296 let mut fake_language_servers = language_registry.register_fake_lsp(
3297 "TypeScript",
3298 FakeLspAdapter {
3299 capabilities: lsp::ServerCapabilities {
3300 completion_provider: Some(lsp::CompletionOptions {
3301 trigger_characters: Some(vec![":".to_string()]),
3302 ..Default::default()
3303 }),
3304 ..Default::default()
3305 },
3306 ..Default::default()
3307 },
3308 );
3309
3310 let (buffer, _handle) = project
3311 .update(cx, |p, cx| {
3312 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3313 })
3314 .await
3315 .unwrap();
3316
3317 let fake_server = fake_language_servers.next().await.unwrap();
3318
3319 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
3320 let text = "let a = b.fqn";
3321 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3322 let completions = project.update(cx, |project, cx| {
3323 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3324 });
3325
3326 fake_server
3327 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3328 Ok(Some(lsp::CompletionResponse::Array(vec![
3329 lsp::CompletionItem {
3330 label: "fullyQualifiedName?".into(),
3331 insert_text: Some("fullyQualifiedName".into()),
3332 ..Default::default()
3333 },
3334 ])))
3335 })
3336 .next()
3337 .await;
3338 let completions = completions
3339 .await
3340 .unwrap()
3341 .into_iter()
3342 .flat_map(|response| response.completions)
3343 .collect::<Vec<_>>();
3344 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3345 assert_eq!(completions.len(), 1);
3346 assert_eq!(completions[0].new_text, "fullyQualifiedName");
3347 assert_eq!(
3348 completions[0].replace_range.to_offset(&snapshot),
3349 text.len() - 3..text.len()
3350 );
3351
3352 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
3353 let text = "let a = \"atoms/cmp\"";
3354 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3355 let completions = project.update(cx, |project, cx| {
3356 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
3357 });
3358
3359 fake_server
3360 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3361 Ok(Some(lsp::CompletionResponse::Array(vec![
3362 lsp::CompletionItem {
3363 label: "component".into(),
3364 ..Default::default()
3365 },
3366 ])))
3367 })
3368 .next()
3369 .await;
3370 let completions = completions
3371 .await
3372 .unwrap()
3373 .into_iter()
3374 .flat_map(|response| response.completions)
3375 .collect::<Vec<_>>();
3376 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3377 assert_eq!(completions.len(), 1);
3378 assert_eq!(completions[0].new_text, "component");
3379 assert_eq!(
3380 completions[0].replace_range.to_offset(&snapshot),
3381 text.len() - 4..text.len() - 1
3382 );
3383}
3384
3385#[gpui::test]
3386async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
3387 init_test(cx);
3388
3389 let fs = FakeFs::new(cx.executor());
3390 fs.insert_tree(
3391 path!("/dir"),
3392 json!({
3393 "a.ts": "",
3394 }),
3395 )
3396 .await;
3397
3398 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3399
3400 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3401 language_registry.add(typescript_lang());
3402 let mut fake_language_servers = language_registry.register_fake_lsp(
3403 "TypeScript",
3404 FakeLspAdapter {
3405 capabilities: lsp::ServerCapabilities {
3406 completion_provider: Some(lsp::CompletionOptions {
3407 trigger_characters: Some(vec![":".to_string()]),
3408 ..Default::default()
3409 }),
3410 ..Default::default()
3411 },
3412 ..Default::default()
3413 },
3414 );
3415
3416 let (buffer, _handle) = project
3417 .update(cx, |p, cx| {
3418 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3419 })
3420 .await
3421 .unwrap();
3422
3423 let fake_server = fake_language_servers.next().await.unwrap();
3424
3425 let text = "let a = b.fqn";
3426 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3427 let completions = project.update(cx, |project, cx| {
3428 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3429 });
3430
3431 fake_server
3432 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3433 Ok(Some(lsp::CompletionResponse::Array(vec![
3434 lsp::CompletionItem {
3435 label: "fullyQualifiedName?".into(),
3436 insert_text: Some("fully\rQualified\r\nName".into()),
3437 ..Default::default()
3438 },
3439 ])))
3440 })
3441 .next()
3442 .await;
3443 let completions = completions
3444 .await
3445 .unwrap()
3446 .into_iter()
3447 .flat_map(|response| response.completions)
3448 .collect::<Vec<_>>();
3449 assert_eq!(completions.len(), 1);
3450 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
3451}
3452
3453#[gpui::test(iterations = 10)]
3454async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
3455 init_test(cx);
3456
3457 let fs = FakeFs::new(cx.executor());
3458 fs.insert_tree(
3459 path!("/dir"),
3460 json!({
3461 "a.ts": "a",
3462 }),
3463 )
3464 .await;
3465
3466 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3467
3468 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3469 language_registry.add(typescript_lang());
3470 let mut fake_language_servers = language_registry.register_fake_lsp(
3471 "TypeScript",
3472 FakeLspAdapter {
3473 capabilities: lsp::ServerCapabilities {
3474 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
3475 lsp::CodeActionOptions {
3476 resolve_provider: Some(true),
3477 ..lsp::CodeActionOptions::default()
3478 },
3479 )),
3480 execute_command_provider: Some(lsp::ExecuteCommandOptions {
3481 commands: vec!["_the/command".to_string()],
3482 ..lsp::ExecuteCommandOptions::default()
3483 }),
3484 ..lsp::ServerCapabilities::default()
3485 },
3486 ..FakeLspAdapter::default()
3487 },
3488 );
3489
3490 let (buffer, _handle) = project
3491 .update(cx, |p, cx| {
3492 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3493 })
3494 .await
3495 .unwrap();
3496
3497 let fake_server = fake_language_servers.next().await.unwrap();
3498
3499 // Language server returns code actions that contain commands, and not edits.
3500 let actions = project.update(cx, |project, cx| {
3501 project.code_actions(&buffer, 0..0, None, cx)
3502 });
3503 fake_server
3504 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
3505 Ok(Some(vec![
3506 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3507 title: "The code action".into(),
3508 data: Some(serde_json::json!({
3509 "command": "_the/command",
3510 })),
3511 ..lsp::CodeAction::default()
3512 }),
3513 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3514 title: "two".into(),
3515 ..lsp::CodeAction::default()
3516 }),
3517 ]))
3518 })
3519 .next()
3520 .await;
3521
3522 let action = actions.await.unwrap()[0].clone();
3523 let apply = project.update(cx, |project, cx| {
3524 project.apply_code_action(buffer.clone(), action, true, cx)
3525 });
3526
3527 // Resolving the code action does not populate its edits. In absence of
3528 // edits, we must execute the given command.
3529 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
3530 |mut action, _| async move {
3531 if action.data.is_some() {
3532 action.command = Some(lsp::Command {
3533 title: "The command".into(),
3534 command: "_the/command".into(),
3535 arguments: Some(vec![json!("the-argument")]),
3536 });
3537 }
3538 Ok(action)
3539 },
3540 );
3541
3542 // While executing the command, the language server sends the editor
3543 // a `workspaceEdit` request.
3544 fake_server
3545 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
3546 let fake = fake_server.clone();
3547 move |params, _| {
3548 assert_eq!(params.command, "_the/command");
3549 let fake = fake.clone();
3550 async move {
3551 fake.server
3552 .request::<lsp::request::ApplyWorkspaceEdit>(
3553 lsp::ApplyWorkspaceEditParams {
3554 label: None,
3555 edit: lsp::WorkspaceEdit {
3556 changes: Some(
3557 [(
3558 lsp::Url::from_file_path(path!("/dir/a.ts")).unwrap(),
3559 vec![lsp::TextEdit {
3560 range: lsp::Range::new(
3561 lsp::Position::new(0, 0),
3562 lsp::Position::new(0, 0),
3563 ),
3564 new_text: "X".into(),
3565 }],
3566 )]
3567 .into_iter()
3568 .collect(),
3569 ),
3570 ..Default::default()
3571 },
3572 },
3573 )
3574 .await
3575 .into_response()
3576 .unwrap();
3577 Ok(Some(json!(null)))
3578 }
3579 }
3580 })
3581 .next()
3582 .await;
3583
3584 // Applying the code action returns a project transaction containing the edits
3585 // sent by the language server in its `workspaceEdit` request.
3586 let transaction = apply.await.unwrap();
3587 assert!(transaction.0.contains_key(&buffer));
3588 buffer.update(cx, |buffer, cx| {
3589 assert_eq!(buffer.text(), "Xa");
3590 buffer.undo(cx);
3591 assert_eq!(buffer.text(), "a");
3592 });
3593}
3594
3595#[gpui::test(iterations = 10)]
3596async fn test_save_file(cx: &mut gpui::TestAppContext) {
3597 init_test(cx);
3598
3599 let fs = FakeFs::new(cx.executor());
3600 fs.insert_tree(
3601 path!("/dir"),
3602 json!({
3603 "file1": "the old contents",
3604 }),
3605 )
3606 .await;
3607
3608 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3609 let buffer = project
3610 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3611 .await
3612 .unwrap();
3613 buffer.update(cx, |buffer, cx| {
3614 assert_eq!(buffer.text(), "the old contents");
3615 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3616 });
3617
3618 project
3619 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3620 .await
3621 .unwrap();
3622
3623 let new_text = fs
3624 .load(Path::new(path!("/dir/file1")))
3625 .await
3626 .unwrap()
3627 .replace("\r\n", "\n");
3628 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3629}
3630
3631#[gpui::test(iterations = 10)]
3632async fn test_save_file_spawns_language_server(cx: &mut gpui::TestAppContext) {
3633 // Issue: #24349
3634 init_test(cx);
3635
3636 let fs = FakeFs::new(cx.executor());
3637 fs.insert_tree(path!("/dir"), json!({})).await;
3638
3639 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3640 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3641
3642 language_registry.add(rust_lang());
3643 let mut fake_rust_servers = language_registry.register_fake_lsp(
3644 "Rust",
3645 FakeLspAdapter {
3646 name: "the-rust-language-server",
3647 capabilities: lsp::ServerCapabilities {
3648 completion_provider: Some(lsp::CompletionOptions {
3649 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
3650 ..Default::default()
3651 }),
3652 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
3653 lsp::TextDocumentSyncOptions {
3654 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
3655 ..Default::default()
3656 },
3657 )),
3658 ..Default::default()
3659 },
3660 ..Default::default()
3661 },
3662 );
3663
3664 let buffer = project
3665 .update(cx, |this, cx| this.create_buffer(cx))
3666 .unwrap()
3667 .await;
3668 project.update(cx, |this, cx| {
3669 this.register_buffer_with_language_servers(&buffer, cx);
3670 buffer.update(cx, |buffer, cx| {
3671 assert!(!this.has_language_servers_for(buffer, cx));
3672 })
3673 });
3674
3675 project
3676 .update(cx, |this, cx| {
3677 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
3678 this.save_buffer_as(
3679 buffer.clone(),
3680 ProjectPath {
3681 worktree_id,
3682 path: Arc::from("file.rs".as_ref()),
3683 },
3684 cx,
3685 )
3686 })
3687 .await
3688 .unwrap();
3689 // A server is started up, and it is notified about Rust files.
3690 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
3691 assert_eq!(
3692 fake_rust_server
3693 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3694 .await
3695 .text_document,
3696 lsp::TextDocumentItem {
3697 uri: lsp::Url::from_file_path(path!("/dir/file.rs")).unwrap(),
3698 version: 0,
3699 text: "".to_string(),
3700 language_id: "rust".to_string(),
3701 }
3702 );
3703
3704 project.update(cx, |this, cx| {
3705 buffer.update(cx, |buffer, cx| {
3706 assert!(this.has_language_servers_for(buffer, cx));
3707 })
3708 });
3709}
3710
3711#[gpui::test(iterations = 30)]
3712async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
3713 init_test(cx);
3714
3715 let fs = FakeFs::new(cx.executor().clone());
3716 fs.insert_tree(
3717 path!("/dir"),
3718 json!({
3719 "file1": "the original contents",
3720 }),
3721 )
3722 .await;
3723
3724 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3725 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3726 let buffer = project
3727 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3728 .await
3729 .unwrap();
3730
3731 // Simulate buffer diffs being slow, so that they don't complete before
3732 // the next file change occurs.
3733 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3734
3735 // Change the buffer's file on disk, and then wait for the file change
3736 // to be detected by the worktree, so that the buffer starts reloading.
3737 fs.save(
3738 path!("/dir/file1").as_ref(),
3739 &"the first contents".into(),
3740 Default::default(),
3741 )
3742 .await
3743 .unwrap();
3744 worktree.next_event(cx).await;
3745
3746 // Change the buffer's file again. Depending on the random seed, the
3747 // previous file change may still be in progress.
3748 fs.save(
3749 path!("/dir/file1").as_ref(),
3750 &"the second contents".into(),
3751 Default::default(),
3752 )
3753 .await
3754 .unwrap();
3755 worktree.next_event(cx).await;
3756
3757 cx.executor().run_until_parked();
3758 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3759 buffer.read_with(cx, |buffer, _| {
3760 assert_eq!(buffer.text(), on_disk_text);
3761 assert!(!buffer.is_dirty(), "buffer should not be dirty");
3762 assert!(!buffer.has_conflict(), "buffer should not be dirty");
3763 });
3764}
3765
3766#[gpui::test(iterations = 30)]
3767async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
3768 init_test(cx);
3769
3770 let fs = FakeFs::new(cx.executor().clone());
3771 fs.insert_tree(
3772 path!("/dir"),
3773 json!({
3774 "file1": "the original contents",
3775 }),
3776 )
3777 .await;
3778
3779 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3780 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3781 let buffer = project
3782 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3783 .await
3784 .unwrap();
3785
3786 // Simulate buffer diffs being slow, so that they don't complete before
3787 // the next file change occurs.
3788 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3789
3790 // Change the buffer's file on disk, and then wait for the file change
3791 // to be detected by the worktree, so that the buffer starts reloading.
3792 fs.save(
3793 path!("/dir/file1").as_ref(),
3794 &"the first contents".into(),
3795 Default::default(),
3796 )
3797 .await
3798 .unwrap();
3799 worktree.next_event(cx).await;
3800
3801 cx.executor()
3802 .spawn(cx.executor().simulate_random_delay())
3803 .await;
3804
3805 // Perform a noop edit, causing the buffer's version to increase.
3806 buffer.update(cx, |buffer, cx| {
3807 buffer.edit([(0..0, " ")], None, cx);
3808 buffer.undo(cx);
3809 });
3810
3811 cx.executor().run_until_parked();
3812 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3813 buffer.read_with(cx, |buffer, _| {
3814 let buffer_text = buffer.text();
3815 if buffer_text == on_disk_text {
3816 assert!(
3817 !buffer.is_dirty() && !buffer.has_conflict(),
3818 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
3819 );
3820 }
3821 // If the file change occurred while the buffer was processing the first
3822 // change, the buffer will be in a conflicting state.
3823 else {
3824 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3825 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3826 }
3827 });
3828}
3829
3830#[gpui::test]
3831async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
3832 init_test(cx);
3833
3834 let fs = FakeFs::new(cx.executor());
3835 fs.insert_tree(
3836 path!("/dir"),
3837 json!({
3838 "file1": "the old contents",
3839 }),
3840 )
3841 .await;
3842
3843 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
3844 let buffer = project
3845 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3846 .await
3847 .unwrap();
3848 buffer.update(cx, |buffer, cx| {
3849 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3850 });
3851
3852 project
3853 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3854 .await
3855 .unwrap();
3856
3857 let new_text = fs
3858 .load(Path::new(path!("/dir/file1")))
3859 .await
3860 .unwrap()
3861 .replace("\r\n", "\n");
3862 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3863}
3864
3865#[gpui::test]
3866async fn test_save_as(cx: &mut gpui::TestAppContext) {
3867 init_test(cx);
3868
3869 let fs = FakeFs::new(cx.executor());
3870 fs.insert_tree("/dir", json!({})).await;
3871
3872 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3873
3874 let languages = project.update(cx, |project, _| project.languages().clone());
3875 languages.add(rust_lang());
3876
3877 let buffer = project.update(cx, |project, cx| project.create_local_buffer("", None, cx));
3878 buffer.update(cx, |buffer, cx| {
3879 buffer.edit([(0..0, "abc")], None, cx);
3880 assert!(buffer.is_dirty());
3881 assert!(!buffer.has_conflict());
3882 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
3883 });
3884 project
3885 .update(cx, |project, cx| {
3886 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
3887 let path = ProjectPath {
3888 worktree_id,
3889 path: Arc::from(Path::new("file1.rs")),
3890 };
3891 project.save_buffer_as(buffer.clone(), path, cx)
3892 })
3893 .await
3894 .unwrap();
3895 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
3896
3897 cx.executor().run_until_parked();
3898 buffer.update(cx, |buffer, cx| {
3899 assert_eq!(
3900 buffer.file().unwrap().full_path(cx),
3901 Path::new("dir/file1.rs")
3902 );
3903 assert!(!buffer.is_dirty());
3904 assert!(!buffer.has_conflict());
3905 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
3906 });
3907
3908 let opened_buffer = project
3909 .update(cx, |project, cx| {
3910 project.open_local_buffer("/dir/file1.rs", cx)
3911 })
3912 .await
3913 .unwrap();
3914 assert_eq!(opened_buffer, buffer);
3915}
3916
3917#[gpui::test(retries = 5)]
3918async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
3919 use worktree::WorktreeModelHandle as _;
3920
3921 init_test(cx);
3922 cx.executor().allow_parking();
3923
3924 let dir = TempTree::new(json!({
3925 "a": {
3926 "file1": "",
3927 "file2": "",
3928 "file3": "",
3929 },
3930 "b": {
3931 "c": {
3932 "file4": "",
3933 "file5": "",
3934 }
3935 }
3936 }));
3937
3938 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
3939
3940 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3941 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
3942 async move { buffer.await.unwrap() }
3943 };
3944 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3945 project.update(cx, |project, cx| {
3946 let tree = project.worktrees(cx).next().unwrap();
3947 tree.read(cx)
3948 .entry_for_path(path)
3949 .unwrap_or_else(|| panic!("no entry for path {}", path))
3950 .id
3951 })
3952 };
3953
3954 let buffer2 = buffer_for_path("a/file2", cx).await;
3955 let buffer3 = buffer_for_path("a/file3", cx).await;
3956 let buffer4 = buffer_for_path("b/c/file4", cx).await;
3957 let buffer5 = buffer_for_path("b/c/file5", cx).await;
3958
3959 let file2_id = id_for_path("a/file2", cx);
3960 let file3_id = id_for_path("a/file3", cx);
3961 let file4_id = id_for_path("b/c/file4", cx);
3962
3963 // Create a remote copy of this worktree.
3964 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3965 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
3966
3967 let updates = Arc::new(Mutex::new(Vec::new()));
3968 tree.update(cx, |tree, cx| {
3969 let updates = updates.clone();
3970 tree.observe_updates(0, cx, move |update| {
3971 updates.lock().push(update);
3972 async { true }
3973 });
3974 });
3975
3976 let remote =
3977 cx.update(|cx| Worktree::remote(0, 1, metadata, project.read(cx).client().into(), cx));
3978
3979 cx.executor().run_until_parked();
3980
3981 cx.update(|cx| {
3982 assert!(!buffer2.read(cx).is_dirty());
3983 assert!(!buffer3.read(cx).is_dirty());
3984 assert!(!buffer4.read(cx).is_dirty());
3985 assert!(!buffer5.read(cx).is_dirty());
3986 });
3987
3988 // Rename and delete files and directories.
3989 tree.flush_fs_events(cx).await;
3990 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3991 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3992 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3993 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3994 tree.flush_fs_events(cx).await;
3995
3996 cx.update(|app| {
3997 assert_eq!(
3998 tree.read(app)
3999 .paths()
4000 .map(|p| p.to_str().unwrap())
4001 .collect::<Vec<_>>(),
4002 vec![
4003 "a",
4004 path!("a/file1"),
4005 path!("a/file2.new"),
4006 "b",
4007 "d",
4008 path!("d/file3"),
4009 path!("d/file4"),
4010 ]
4011 );
4012 });
4013
4014 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
4015 assert_eq!(id_for_path("d/file3", cx), file3_id);
4016 assert_eq!(id_for_path("d/file4", cx), file4_id);
4017
4018 cx.update(|cx| {
4019 assert_eq!(
4020 buffer2.read(cx).file().unwrap().path().as_ref(),
4021 Path::new("a/file2.new")
4022 );
4023 assert_eq!(
4024 buffer3.read(cx).file().unwrap().path().as_ref(),
4025 Path::new("d/file3")
4026 );
4027 assert_eq!(
4028 buffer4.read(cx).file().unwrap().path().as_ref(),
4029 Path::new("d/file4")
4030 );
4031 assert_eq!(
4032 buffer5.read(cx).file().unwrap().path().as_ref(),
4033 Path::new("b/c/file5")
4034 );
4035
4036 assert_matches!(
4037 buffer2.read(cx).file().unwrap().disk_state(),
4038 DiskState::Present { .. }
4039 );
4040 assert_matches!(
4041 buffer3.read(cx).file().unwrap().disk_state(),
4042 DiskState::Present { .. }
4043 );
4044 assert_matches!(
4045 buffer4.read(cx).file().unwrap().disk_state(),
4046 DiskState::Present { .. }
4047 );
4048 assert_eq!(
4049 buffer5.read(cx).file().unwrap().disk_state(),
4050 DiskState::Deleted
4051 );
4052 });
4053
4054 // Update the remote worktree. Check that it becomes consistent with the
4055 // local worktree.
4056 cx.executor().run_until_parked();
4057
4058 remote.update(cx, |remote, _| {
4059 for update in updates.lock().drain(..) {
4060 remote.as_remote_mut().unwrap().update_from_remote(update);
4061 }
4062 });
4063 cx.executor().run_until_parked();
4064 remote.update(cx, |remote, _| {
4065 assert_eq!(
4066 remote
4067 .paths()
4068 .map(|p| p.to_str().unwrap())
4069 .collect::<Vec<_>>(),
4070 vec![
4071 "a",
4072 path!("a/file1"),
4073 path!("a/file2.new"),
4074 "b",
4075 "d",
4076 path!("d/file3"),
4077 path!("d/file4"),
4078 ]
4079 );
4080 });
4081}
4082
4083#[gpui::test(iterations = 10)]
4084async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
4085 init_test(cx);
4086
4087 let fs = FakeFs::new(cx.executor());
4088 fs.insert_tree(
4089 path!("/dir"),
4090 json!({
4091 "a": {
4092 "file1": "",
4093 }
4094 }),
4095 )
4096 .await;
4097
4098 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
4099 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
4100 let tree_id = tree.update(cx, |tree, _| tree.id());
4101
4102 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4103 project.update(cx, |project, cx| {
4104 let tree = project.worktrees(cx).next().unwrap();
4105 tree.read(cx)
4106 .entry_for_path(path)
4107 .unwrap_or_else(|| panic!("no entry for path {}", path))
4108 .id
4109 })
4110 };
4111
4112 let dir_id = id_for_path("a", cx);
4113 let file_id = id_for_path("a/file1", cx);
4114 let buffer = project
4115 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
4116 .await
4117 .unwrap();
4118 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4119
4120 project
4121 .update(cx, |project, cx| {
4122 project.rename_entry(dir_id, Path::new("b"), cx)
4123 })
4124 .unwrap()
4125 .await
4126 .into_included()
4127 .unwrap();
4128 cx.executor().run_until_parked();
4129
4130 assert_eq!(id_for_path("b", cx), dir_id);
4131 assert_eq!(id_for_path("b/file1", cx), file_id);
4132 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4133}
4134
4135#[gpui::test]
4136async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
4137 init_test(cx);
4138
4139 let fs = FakeFs::new(cx.executor());
4140 fs.insert_tree(
4141 "/dir",
4142 json!({
4143 "a.txt": "a-contents",
4144 "b.txt": "b-contents",
4145 }),
4146 )
4147 .await;
4148
4149 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4150
4151 // Spawn multiple tasks to open paths, repeating some paths.
4152 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
4153 (
4154 p.open_local_buffer("/dir/a.txt", cx),
4155 p.open_local_buffer("/dir/b.txt", cx),
4156 p.open_local_buffer("/dir/a.txt", cx),
4157 )
4158 });
4159
4160 let buffer_a_1 = buffer_a_1.await.unwrap();
4161 let buffer_a_2 = buffer_a_2.await.unwrap();
4162 let buffer_b = buffer_b.await.unwrap();
4163 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
4164 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
4165
4166 // There is only one buffer per path.
4167 let buffer_a_id = buffer_a_1.entity_id();
4168 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
4169
4170 // Open the same path again while it is still open.
4171 drop(buffer_a_1);
4172 let buffer_a_3 = project
4173 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
4174 .await
4175 .unwrap();
4176
4177 // There's still only one buffer per path.
4178 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
4179}
4180
4181#[gpui::test]
4182async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
4183 init_test(cx);
4184
4185 let fs = FakeFs::new(cx.executor());
4186 fs.insert_tree(
4187 path!("/dir"),
4188 json!({
4189 "file1": "abc",
4190 "file2": "def",
4191 "file3": "ghi",
4192 }),
4193 )
4194 .await;
4195
4196 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4197
4198 let buffer1 = project
4199 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4200 .await
4201 .unwrap();
4202 let events = Arc::new(Mutex::new(Vec::new()));
4203
4204 // initially, the buffer isn't dirty.
4205 buffer1.update(cx, |buffer, cx| {
4206 cx.subscribe(&buffer1, {
4207 let events = events.clone();
4208 move |_, _, event, _| match event {
4209 BufferEvent::Operation { .. } => {}
4210 _ => events.lock().push(event.clone()),
4211 }
4212 })
4213 .detach();
4214
4215 assert!(!buffer.is_dirty());
4216 assert!(events.lock().is_empty());
4217
4218 buffer.edit([(1..2, "")], None, cx);
4219 });
4220
4221 // after the first edit, the buffer is dirty, and emits a dirtied event.
4222 buffer1.update(cx, |buffer, cx| {
4223 assert!(buffer.text() == "ac");
4224 assert!(buffer.is_dirty());
4225 assert_eq!(
4226 *events.lock(),
4227 &[
4228 language::BufferEvent::Edited,
4229 language::BufferEvent::DirtyChanged
4230 ]
4231 );
4232 events.lock().clear();
4233 buffer.did_save(
4234 buffer.version(),
4235 buffer.file().unwrap().disk_state().mtime(),
4236 cx,
4237 );
4238 });
4239
4240 // after saving, the buffer is not dirty, and emits a saved event.
4241 buffer1.update(cx, |buffer, cx| {
4242 assert!(!buffer.is_dirty());
4243 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
4244 events.lock().clear();
4245
4246 buffer.edit([(1..1, "B")], None, cx);
4247 buffer.edit([(2..2, "D")], None, cx);
4248 });
4249
4250 // after editing again, the buffer is dirty, and emits another dirty event.
4251 buffer1.update(cx, |buffer, cx| {
4252 assert!(buffer.text() == "aBDc");
4253 assert!(buffer.is_dirty());
4254 assert_eq!(
4255 *events.lock(),
4256 &[
4257 language::BufferEvent::Edited,
4258 language::BufferEvent::DirtyChanged,
4259 language::BufferEvent::Edited,
4260 ],
4261 );
4262 events.lock().clear();
4263
4264 // After restoring the buffer to its previously-saved state,
4265 // the buffer is not considered dirty anymore.
4266 buffer.edit([(1..3, "")], None, cx);
4267 assert!(buffer.text() == "ac");
4268 assert!(!buffer.is_dirty());
4269 });
4270
4271 assert_eq!(
4272 *events.lock(),
4273 &[
4274 language::BufferEvent::Edited,
4275 language::BufferEvent::DirtyChanged
4276 ]
4277 );
4278
4279 // When a file is deleted, it is not considered dirty.
4280 let events = Arc::new(Mutex::new(Vec::new()));
4281 let buffer2 = project
4282 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4283 .await
4284 .unwrap();
4285 buffer2.update(cx, |_, cx| {
4286 cx.subscribe(&buffer2, {
4287 let events = events.clone();
4288 move |_, _, event, _| match event {
4289 BufferEvent::Operation { .. } => {}
4290 _ => events.lock().push(event.clone()),
4291 }
4292 })
4293 .detach();
4294 });
4295
4296 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
4297 .await
4298 .unwrap();
4299 cx.executor().run_until_parked();
4300 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4301 assert_eq!(
4302 mem::take(&mut *events.lock()),
4303 &[language::BufferEvent::FileHandleChanged]
4304 );
4305
4306 // Buffer becomes dirty when edited.
4307 buffer2.update(cx, |buffer, cx| {
4308 buffer.edit([(2..3, "")], None, cx);
4309 assert_eq!(buffer.is_dirty(), true);
4310 });
4311 assert_eq!(
4312 mem::take(&mut *events.lock()),
4313 &[
4314 language::BufferEvent::Edited,
4315 language::BufferEvent::DirtyChanged
4316 ]
4317 );
4318
4319 // Buffer becomes clean again when all of its content is removed, because
4320 // the file was deleted.
4321 buffer2.update(cx, |buffer, cx| {
4322 buffer.edit([(0..2, "")], None, cx);
4323 assert_eq!(buffer.is_empty(), true);
4324 assert_eq!(buffer.is_dirty(), false);
4325 });
4326 assert_eq!(
4327 *events.lock(),
4328 &[
4329 language::BufferEvent::Edited,
4330 language::BufferEvent::DirtyChanged
4331 ]
4332 );
4333
4334 // When a file is already dirty when deleted, we don't emit a Dirtied event.
4335 let events = Arc::new(Mutex::new(Vec::new()));
4336 let buffer3 = project
4337 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
4338 .await
4339 .unwrap();
4340 buffer3.update(cx, |_, cx| {
4341 cx.subscribe(&buffer3, {
4342 let events = events.clone();
4343 move |_, _, event, _| match event {
4344 BufferEvent::Operation { .. } => {}
4345 _ => events.lock().push(event.clone()),
4346 }
4347 })
4348 .detach();
4349 });
4350
4351 buffer3.update(cx, |buffer, cx| {
4352 buffer.edit([(0..0, "x")], None, cx);
4353 });
4354 events.lock().clear();
4355 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
4356 .await
4357 .unwrap();
4358 cx.executor().run_until_parked();
4359 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
4360 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
4361}
4362
4363#[gpui::test]
4364async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
4365 init_test(cx);
4366
4367 let (initial_contents, initial_offsets) =
4368 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
4369 let fs = FakeFs::new(cx.executor());
4370 fs.insert_tree(
4371 path!("/dir"),
4372 json!({
4373 "the-file": initial_contents,
4374 }),
4375 )
4376 .await;
4377 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4378 let buffer = project
4379 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
4380 .await
4381 .unwrap();
4382
4383 let anchors = initial_offsets
4384 .iter()
4385 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
4386 .collect::<Vec<_>>();
4387
4388 // Change the file on disk, adding two new lines of text, and removing
4389 // one line.
4390 buffer.update(cx, |buffer, _| {
4391 assert!(!buffer.is_dirty());
4392 assert!(!buffer.has_conflict());
4393 });
4394
4395 let (new_contents, new_offsets) =
4396 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
4397 fs.save(
4398 path!("/dir/the-file").as_ref(),
4399 &new_contents.as_str().into(),
4400 LineEnding::Unix,
4401 )
4402 .await
4403 .unwrap();
4404
4405 // Because the buffer was not modified, it is reloaded from disk. Its
4406 // contents are edited according to the diff between the old and new
4407 // file contents.
4408 cx.executor().run_until_parked();
4409 buffer.update(cx, |buffer, _| {
4410 assert_eq!(buffer.text(), new_contents);
4411 assert!(!buffer.is_dirty());
4412 assert!(!buffer.has_conflict());
4413
4414 let anchor_offsets = anchors
4415 .iter()
4416 .map(|anchor| anchor.to_offset(&*buffer))
4417 .collect::<Vec<_>>();
4418 assert_eq!(anchor_offsets, new_offsets);
4419 });
4420
4421 // Modify the buffer
4422 buffer.update(cx, |buffer, cx| {
4423 buffer.edit([(0..0, " ")], None, cx);
4424 assert!(buffer.is_dirty());
4425 assert!(!buffer.has_conflict());
4426 });
4427
4428 // Change the file on disk again, adding blank lines to the beginning.
4429 fs.save(
4430 path!("/dir/the-file").as_ref(),
4431 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
4432 LineEnding::Unix,
4433 )
4434 .await
4435 .unwrap();
4436
4437 // Because the buffer is modified, it doesn't reload from disk, but is
4438 // marked as having a conflict.
4439 cx.executor().run_until_parked();
4440 buffer.update(cx, |buffer, _| {
4441 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
4442 assert!(buffer.has_conflict());
4443 });
4444}
4445
4446#[gpui::test]
4447async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
4448 init_test(cx);
4449
4450 let fs = FakeFs::new(cx.executor());
4451 fs.insert_tree(
4452 path!("/dir"),
4453 json!({
4454 "file1": "a\nb\nc\n",
4455 "file2": "one\r\ntwo\r\nthree\r\n",
4456 }),
4457 )
4458 .await;
4459
4460 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4461 let buffer1 = project
4462 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4463 .await
4464 .unwrap();
4465 let buffer2 = project
4466 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4467 .await
4468 .unwrap();
4469
4470 buffer1.update(cx, |buffer, _| {
4471 assert_eq!(buffer.text(), "a\nb\nc\n");
4472 assert_eq!(buffer.line_ending(), LineEnding::Unix);
4473 });
4474 buffer2.update(cx, |buffer, _| {
4475 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
4476 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4477 });
4478
4479 // Change a file's line endings on disk from unix to windows. The buffer's
4480 // state updates correctly.
4481 fs.save(
4482 path!("/dir/file1").as_ref(),
4483 &"aaa\nb\nc\n".into(),
4484 LineEnding::Windows,
4485 )
4486 .await
4487 .unwrap();
4488 cx.executor().run_until_parked();
4489 buffer1.update(cx, |buffer, _| {
4490 assert_eq!(buffer.text(), "aaa\nb\nc\n");
4491 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4492 });
4493
4494 // Save a file with windows line endings. The file is written correctly.
4495 buffer2.update(cx, |buffer, cx| {
4496 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
4497 });
4498 project
4499 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
4500 .await
4501 .unwrap();
4502 assert_eq!(
4503 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
4504 "one\r\ntwo\r\nthree\r\nfour\r\n",
4505 );
4506}
4507
4508#[gpui::test]
4509async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
4510 init_test(cx);
4511
4512 let fs = FakeFs::new(cx.executor());
4513 fs.insert_tree(
4514 path!("/dir"),
4515 json!({
4516 "a.rs": "
4517 fn foo(mut v: Vec<usize>) {
4518 for x in &v {
4519 v.push(1);
4520 }
4521 }
4522 "
4523 .unindent(),
4524 }),
4525 )
4526 .await;
4527
4528 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4529 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4530 let buffer = project
4531 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
4532 .await
4533 .unwrap();
4534
4535 let buffer_uri = Url::from_file_path(path!("/dir/a.rs")).unwrap();
4536 let message = lsp::PublishDiagnosticsParams {
4537 uri: buffer_uri.clone(),
4538 diagnostics: vec![
4539 lsp::Diagnostic {
4540 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4541 severity: Some(DiagnosticSeverity::WARNING),
4542 message: "error 1".to_string(),
4543 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4544 location: lsp::Location {
4545 uri: buffer_uri.clone(),
4546 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4547 },
4548 message: "error 1 hint 1".to_string(),
4549 }]),
4550 ..Default::default()
4551 },
4552 lsp::Diagnostic {
4553 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4554 severity: Some(DiagnosticSeverity::HINT),
4555 message: "error 1 hint 1".to_string(),
4556 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4557 location: lsp::Location {
4558 uri: buffer_uri.clone(),
4559 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4560 },
4561 message: "original diagnostic".to_string(),
4562 }]),
4563 ..Default::default()
4564 },
4565 lsp::Diagnostic {
4566 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4567 severity: Some(DiagnosticSeverity::ERROR),
4568 message: "error 2".to_string(),
4569 related_information: Some(vec![
4570 lsp::DiagnosticRelatedInformation {
4571 location: lsp::Location {
4572 uri: buffer_uri.clone(),
4573 range: lsp::Range::new(
4574 lsp::Position::new(1, 13),
4575 lsp::Position::new(1, 15),
4576 ),
4577 },
4578 message: "error 2 hint 1".to_string(),
4579 },
4580 lsp::DiagnosticRelatedInformation {
4581 location: lsp::Location {
4582 uri: buffer_uri.clone(),
4583 range: lsp::Range::new(
4584 lsp::Position::new(1, 13),
4585 lsp::Position::new(1, 15),
4586 ),
4587 },
4588 message: "error 2 hint 2".to_string(),
4589 },
4590 ]),
4591 ..Default::default()
4592 },
4593 lsp::Diagnostic {
4594 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4595 severity: Some(DiagnosticSeverity::HINT),
4596 message: "error 2 hint 1".to_string(),
4597 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4598 location: lsp::Location {
4599 uri: buffer_uri.clone(),
4600 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4601 },
4602 message: "original diagnostic".to_string(),
4603 }]),
4604 ..Default::default()
4605 },
4606 lsp::Diagnostic {
4607 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4608 severity: Some(DiagnosticSeverity::HINT),
4609 message: "error 2 hint 2".to_string(),
4610 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4611 location: lsp::Location {
4612 uri: buffer_uri,
4613 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4614 },
4615 message: "original diagnostic".to_string(),
4616 }]),
4617 ..Default::default()
4618 },
4619 ],
4620 version: None,
4621 };
4622
4623 lsp_store
4624 .update(cx, |lsp_store, cx| {
4625 lsp_store.update_diagnostics(
4626 LanguageServerId(0),
4627 message,
4628 None,
4629 DiagnosticSourceKind::Pushed,
4630 &[],
4631 cx,
4632 )
4633 })
4634 .unwrap();
4635 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
4636
4637 assert_eq!(
4638 buffer
4639 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
4640 .collect::<Vec<_>>(),
4641 &[
4642 DiagnosticEntry {
4643 range: Point::new(1, 8)..Point::new(1, 9),
4644 diagnostic: Diagnostic {
4645 severity: DiagnosticSeverity::WARNING,
4646 message: "error 1".to_string(),
4647 group_id: 1,
4648 is_primary: true,
4649 source_kind: DiagnosticSourceKind::Pushed,
4650 ..Diagnostic::default()
4651 }
4652 },
4653 DiagnosticEntry {
4654 range: Point::new(1, 8)..Point::new(1, 9),
4655 diagnostic: Diagnostic {
4656 severity: DiagnosticSeverity::HINT,
4657 message: "error 1 hint 1".to_string(),
4658 group_id: 1,
4659 is_primary: false,
4660 source_kind: DiagnosticSourceKind::Pushed,
4661 ..Diagnostic::default()
4662 }
4663 },
4664 DiagnosticEntry {
4665 range: Point::new(1, 13)..Point::new(1, 15),
4666 diagnostic: Diagnostic {
4667 severity: DiagnosticSeverity::HINT,
4668 message: "error 2 hint 1".to_string(),
4669 group_id: 0,
4670 is_primary: false,
4671 source_kind: DiagnosticSourceKind::Pushed,
4672 ..Diagnostic::default()
4673 }
4674 },
4675 DiagnosticEntry {
4676 range: Point::new(1, 13)..Point::new(1, 15),
4677 diagnostic: Diagnostic {
4678 severity: DiagnosticSeverity::HINT,
4679 message: "error 2 hint 2".to_string(),
4680 group_id: 0,
4681 is_primary: false,
4682 source_kind: DiagnosticSourceKind::Pushed,
4683 ..Diagnostic::default()
4684 }
4685 },
4686 DiagnosticEntry {
4687 range: Point::new(2, 8)..Point::new(2, 17),
4688 diagnostic: Diagnostic {
4689 severity: DiagnosticSeverity::ERROR,
4690 message: "error 2".to_string(),
4691 group_id: 0,
4692 is_primary: true,
4693 source_kind: DiagnosticSourceKind::Pushed,
4694 ..Diagnostic::default()
4695 }
4696 }
4697 ]
4698 );
4699
4700 assert_eq!(
4701 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
4702 &[
4703 DiagnosticEntry {
4704 range: Point::new(1, 13)..Point::new(1, 15),
4705 diagnostic: Diagnostic {
4706 severity: DiagnosticSeverity::HINT,
4707 message: "error 2 hint 1".to_string(),
4708 group_id: 0,
4709 is_primary: false,
4710 source_kind: DiagnosticSourceKind::Pushed,
4711 ..Diagnostic::default()
4712 }
4713 },
4714 DiagnosticEntry {
4715 range: Point::new(1, 13)..Point::new(1, 15),
4716 diagnostic: Diagnostic {
4717 severity: DiagnosticSeverity::HINT,
4718 message: "error 2 hint 2".to_string(),
4719 group_id: 0,
4720 is_primary: false,
4721 source_kind: DiagnosticSourceKind::Pushed,
4722 ..Diagnostic::default()
4723 }
4724 },
4725 DiagnosticEntry {
4726 range: Point::new(2, 8)..Point::new(2, 17),
4727 diagnostic: Diagnostic {
4728 severity: DiagnosticSeverity::ERROR,
4729 message: "error 2".to_string(),
4730 group_id: 0,
4731 is_primary: true,
4732 source_kind: DiagnosticSourceKind::Pushed,
4733 ..Diagnostic::default()
4734 }
4735 }
4736 ]
4737 );
4738
4739 assert_eq!(
4740 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4741 &[
4742 DiagnosticEntry {
4743 range: Point::new(1, 8)..Point::new(1, 9),
4744 diagnostic: Diagnostic {
4745 severity: DiagnosticSeverity::WARNING,
4746 message: "error 1".to_string(),
4747 group_id: 1,
4748 is_primary: true,
4749 source_kind: DiagnosticSourceKind::Pushed,
4750 ..Diagnostic::default()
4751 }
4752 },
4753 DiagnosticEntry {
4754 range: Point::new(1, 8)..Point::new(1, 9),
4755 diagnostic: Diagnostic {
4756 severity: DiagnosticSeverity::HINT,
4757 message: "error 1 hint 1".to_string(),
4758 group_id: 1,
4759 is_primary: false,
4760 source_kind: DiagnosticSourceKind::Pushed,
4761 ..Diagnostic::default()
4762 }
4763 },
4764 ]
4765 );
4766}
4767
4768#[gpui::test]
4769async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
4770 init_test(cx);
4771
4772 let fs = FakeFs::new(cx.executor());
4773 fs.insert_tree(
4774 path!("/dir"),
4775 json!({
4776 "one.rs": "const ONE: usize = 1;",
4777 "two": {
4778 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4779 }
4780
4781 }),
4782 )
4783 .await;
4784 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4785
4786 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4787 language_registry.add(rust_lang());
4788 let watched_paths = lsp::FileOperationRegistrationOptions {
4789 filters: vec![
4790 FileOperationFilter {
4791 scheme: Some("file".to_owned()),
4792 pattern: lsp::FileOperationPattern {
4793 glob: "**/*.rs".to_owned(),
4794 matches: Some(lsp::FileOperationPatternKind::File),
4795 options: None,
4796 },
4797 },
4798 FileOperationFilter {
4799 scheme: Some("file".to_owned()),
4800 pattern: lsp::FileOperationPattern {
4801 glob: "**/**".to_owned(),
4802 matches: Some(lsp::FileOperationPatternKind::Folder),
4803 options: None,
4804 },
4805 },
4806 ],
4807 };
4808 let mut fake_servers = language_registry.register_fake_lsp(
4809 "Rust",
4810 FakeLspAdapter {
4811 capabilities: lsp::ServerCapabilities {
4812 workspace: Some(lsp::WorkspaceServerCapabilities {
4813 workspace_folders: None,
4814 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
4815 did_rename: Some(watched_paths.clone()),
4816 will_rename: Some(watched_paths),
4817 ..Default::default()
4818 }),
4819 }),
4820 ..Default::default()
4821 },
4822 ..Default::default()
4823 },
4824 );
4825
4826 let _ = project
4827 .update(cx, |project, cx| {
4828 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
4829 })
4830 .await
4831 .unwrap();
4832
4833 let fake_server = fake_servers.next().await.unwrap();
4834 let response = project.update(cx, |project, cx| {
4835 let worktree = project.worktrees(cx).next().unwrap();
4836 let entry = worktree.read(cx).entry_for_path("one.rs").unwrap();
4837 project.rename_entry(entry.id, "three.rs".as_ref(), cx)
4838 });
4839 let expected_edit = lsp::WorkspaceEdit {
4840 changes: None,
4841 document_changes: Some(DocumentChanges::Edits({
4842 vec![TextDocumentEdit {
4843 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
4844 range: lsp::Range {
4845 start: lsp::Position {
4846 line: 0,
4847 character: 1,
4848 },
4849 end: lsp::Position {
4850 line: 0,
4851 character: 3,
4852 },
4853 },
4854 new_text: "This is not a drill".to_owned(),
4855 })],
4856 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
4857 uri: Url::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
4858 version: Some(1337),
4859 },
4860 }]
4861 })),
4862 change_annotations: None,
4863 };
4864 let resolved_workspace_edit = Arc::new(OnceLock::new());
4865 fake_server
4866 .set_request_handler::<WillRenameFiles, _, _>({
4867 let resolved_workspace_edit = resolved_workspace_edit.clone();
4868 let expected_edit = expected_edit.clone();
4869 move |params, _| {
4870 let resolved_workspace_edit = resolved_workspace_edit.clone();
4871 let expected_edit = expected_edit.clone();
4872 async move {
4873 assert_eq!(params.files.len(), 1);
4874 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
4875 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
4876 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
4877 Ok(Some(expected_edit))
4878 }
4879 }
4880 })
4881 .next()
4882 .await
4883 .unwrap();
4884 let _ = response.await.unwrap();
4885 fake_server
4886 .handle_notification::<DidRenameFiles, _>(|params, _| {
4887 assert_eq!(params.files.len(), 1);
4888 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
4889 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
4890 })
4891 .next()
4892 .await
4893 .unwrap();
4894 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
4895}
4896
4897#[gpui::test]
4898async fn test_rename(cx: &mut gpui::TestAppContext) {
4899 // hi
4900 init_test(cx);
4901
4902 let fs = FakeFs::new(cx.executor());
4903 fs.insert_tree(
4904 path!("/dir"),
4905 json!({
4906 "one.rs": "const ONE: usize = 1;",
4907 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4908 }),
4909 )
4910 .await;
4911
4912 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4913
4914 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4915 language_registry.add(rust_lang());
4916 let mut fake_servers = language_registry.register_fake_lsp(
4917 "Rust",
4918 FakeLspAdapter {
4919 capabilities: lsp::ServerCapabilities {
4920 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
4921 prepare_provider: Some(true),
4922 work_done_progress_options: Default::default(),
4923 })),
4924 ..Default::default()
4925 },
4926 ..Default::default()
4927 },
4928 );
4929
4930 let (buffer, _handle) = project
4931 .update(cx, |project, cx| {
4932 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
4933 })
4934 .await
4935 .unwrap();
4936
4937 let fake_server = fake_servers.next().await.unwrap();
4938
4939 let response = project.update(cx, |project, cx| {
4940 project.prepare_rename(buffer.clone(), 7, cx)
4941 });
4942 fake_server
4943 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
4944 assert_eq!(
4945 params.text_document.uri.as_str(),
4946 uri!("file:///dir/one.rs")
4947 );
4948 assert_eq!(params.position, lsp::Position::new(0, 7));
4949 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
4950 lsp::Position::new(0, 6),
4951 lsp::Position::new(0, 9),
4952 ))))
4953 })
4954 .next()
4955 .await
4956 .unwrap();
4957 let response = response.await.unwrap();
4958 let PrepareRenameResponse::Success(range) = response else {
4959 panic!("{:?}", response);
4960 };
4961 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
4962 assert_eq!(range, 6..9);
4963
4964 let response = project.update(cx, |project, cx| {
4965 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
4966 });
4967 fake_server
4968 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
4969 assert_eq!(
4970 params.text_document_position.text_document.uri.as_str(),
4971 uri!("file:///dir/one.rs")
4972 );
4973 assert_eq!(
4974 params.text_document_position.position,
4975 lsp::Position::new(0, 7)
4976 );
4977 assert_eq!(params.new_name, "THREE");
4978 Ok(Some(lsp::WorkspaceEdit {
4979 changes: Some(
4980 [
4981 (
4982 lsp::Url::from_file_path(path!("/dir/one.rs")).unwrap(),
4983 vec![lsp::TextEdit::new(
4984 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
4985 "THREE".to_string(),
4986 )],
4987 ),
4988 (
4989 lsp::Url::from_file_path(path!("/dir/two.rs")).unwrap(),
4990 vec![
4991 lsp::TextEdit::new(
4992 lsp::Range::new(
4993 lsp::Position::new(0, 24),
4994 lsp::Position::new(0, 27),
4995 ),
4996 "THREE".to_string(),
4997 ),
4998 lsp::TextEdit::new(
4999 lsp::Range::new(
5000 lsp::Position::new(0, 35),
5001 lsp::Position::new(0, 38),
5002 ),
5003 "THREE".to_string(),
5004 ),
5005 ],
5006 ),
5007 ]
5008 .into_iter()
5009 .collect(),
5010 ),
5011 ..Default::default()
5012 }))
5013 })
5014 .next()
5015 .await
5016 .unwrap();
5017 let mut transaction = response.await.unwrap().0;
5018 assert_eq!(transaction.len(), 2);
5019 assert_eq!(
5020 transaction
5021 .remove_entry(&buffer)
5022 .unwrap()
5023 .0
5024 .update(cx, |buffer, _| buffer.text()),
5025 "const THREE: usize = 1;"
5026 );
5027 assert_eq!(
5028 transaction
5029 .into_keys()
5030 .next()
5031 .unwrap()
5032 .update(cx, |buffer, _| buffer.text()),
5033 "const TWO: usize = one::THREE + one::THREE;"
5034 );
5035}
5036
5037#[gpui::test]
5038async fn test_search(cx: &mut gpui::TestAppContext) {
5039 init_test(cx);
5040
5041 let fs = FakeFs::new(cx.executor());
5042 fs.insert_tree(
5043 path!("/dir"),
5044 json!({
5045 "one.rs": "const ONE: usize = 1;",
5046 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
5047 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
5048 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
5049 }),
5050 )
5051 .await;
5052 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5053 assert_eq!(
5054 search(
5055 &project,
5056 SearchQuery::text(
5057 "TWO",
5058 false,
5059 true,
5060 false,
5061 Default::default(),
5062 Default::default(),
5063 false,
5064 None
5065 )
5066 .unwrap(),
5067 cx
5068 )
5069 .await
5070 .unwrap(),
5071 HashMap::from_iter([
5072 (path!("dir/two.rs").to_string(), vec![6..9]),
5073 (path!("dir/three.rs").to_string(), vec![37..40])
5074 ])
5075 );
5076
5077 let buffer_4 = project
5078 .update(cx, |project, cx| {
5079 project.open_local_buffer(path!("/dir/four.rs"), cx)
5080 })
5081 .await
5082 .unwrap();
5083 buffer_4.update(cx, |buffer, cx| {
5084 let text = "two::TWO";
5085 buffer.edit([(20..28, text), (31..43, text)], None, cx);
5086 });
5087
5088 assert_eq!(
5089 search(
5090 &project,
5091 SearchQuery::text(
5092 "TWO",
5093 false,
5094 true,
5095 false,
5096 Default::default(),
5097 Default::default(),
5098 false,
5099 None,
5100 )
5101 .unwrap(),
5102 cx
5103 )
5104 .await
5105 .unwrap(),
5106 HashMap::from_iter([
5107 (path!("dir/two.rs").to_string(), vec![6..9]),
5108 (path!("dir/three.rs").to_string(), vec![37..40]),
5109 (path!("dir/four.rs").to_string(), vec![25..28, 36..39])
5110 ])
5111 );
5112}
5113
5114#[gpui::test]
5115async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
5116 init_test(cx);
5117
5118 let search_query = "file";
5119
5120 let fs = FakeFs::new(cx.executor());
5121 fs.insert_tree(
5122 path!("/dir"),
5123 json!({
5124 "one.rs": r#"// Rust file one"#,
5125 "one.ts": r#"// TypeScript file one"#,
5126 "two.rs": r#"// Rust file two"#,
5127 "two.ts": r#"// TypeScript file two"#,
5128 }),
5129 )
5130 .await;
5131 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5132
5133 assert!(
5134 search(
5135 &project,
5136 SearchQuery::text(
5137 search_query,
5138 false,
5139 true,
5140 false,
5141 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5142 Default::default(),
5143 false,
5144 None
5145 )
5146 .unwrap(),
5147 cx
5148 )
5149 .await
5150 .unwrap()
5151 .is_empty(),
5152 "If no inclusions match, no files should be returned"
5153 );
5154
5155 assert_eq!(
5156 search(
5157 &project,
5158 SearchQuery::text(
5159 search_query,
5160 false,
5161 true,
5162 false,
5163 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
5164 Default::default(),
5165 false,
5166 None
5167 )
5168 .unwrap(),
5169 cx
5170 )
5171 .await
5172 .unwrap(),
5173 HashMap::from_iter([
5174 (path!("dir/one.rs").to_string(), vec![8..12]),
5175 (path!("dir/two.rs").to_string(), vec![8..12]),
5176 ]),
5177 "Rust only search should give only Rust files"
5178 );
5179
5180 assert_eq!(
5181 search(
5182 &project,
5183 SearchQuery::text(
5184 search_query,
5185 false,
5186 true,
5187 false,
5188 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5189 Default::default(),
5190 false,
5191 None,
5192 )
5193 .unwrap(),
5194 cx
5195 )
5196 .await
5197 .unwrap(),
5198 HashMap::from_iter([
5199 (path!("dir/one.ts").to_string(), vec![14..18]),
5200 (path!("dir/two.ts").to_string(), vec![14..18]),
5201 ]),
5202 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
5203 );
5204
5205 assert_eq!(
5206 search(
5207 &project,
5208 SearchQuery::text(
5209 search_query,
5210 false,
5211 true,
5212 false,
5213 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()])
5214 .unwrap(),
5215 Default::default(),
5216 false,
5217 None,
5218 )
5219 .unwrap(),
5220 cx
5221 )
5222 .await
5223 .unwrap(),
5224 HashMap::from_iter([
5225 (path!("dir/two.ts").to_string(), vec![14..18]),
5226 (path!("dir/one.rs").to_string(), vec![8..12]),
5227 (path!("dir/one.ts").to_string(), vec![14..18]),
5228 (path!("dir/two.rs").to_string(), vec![8..12]),
5229 ]),
5230 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
5231 );
5232}
5233
5234#[gpui::test]
5235async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
5236 init_test(cx);
5237
5238 let search_query = "file";
5239
5240 let fs = FakeFs::new(cx.executor());
5241 fs.insert_tree(
5242 path!("/dir"),
5243 json!({
5244 "one.rs": r#"// Rust file one"#,
5245 "one.ts": r#"// TypeScript file one"#,
5246 "two.rs": r#"// Rust file two"#,
5247 "two.ts": r#"// TypeScript file two"#,
5248 }),
5249 )
5250 .await;
5251 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5252
5253 assert_eq!(
5254 search(
5255 &project,
5256 SearchQuery::text(
5257 search_query,
5258 false,
5259 true,
5260 false,
5261 Default::default(),
5262 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5263 false,
5264 None,
5265 )
5266 .unwrap(),
5267 cx
5268 )
5269 .await
5270 .unwrap(),
5271 HashMap::from_iter([
5272 (path!("dir/one.rs").to_string(), vec![8..12]),
5273 (path!("dir/one.ts").to_string(), vec![14..18]),
5274 (path!("dir/two.rs").to_string(), vec![8..12]),
5275 (path!("dir/two.ts").to_string(), vec![14..18]),
5276 ]),
5277 "If no exclusions match, all files should be returned"
5278 );
5279
5280 assert_eq!(
5281 search(
5282 &project,
5283 SearchQuery::text(
5284 search_query,
5285 false,
5286 true,
5287 false,
5288 Default::default(),
5289 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
5290 false,
5291 None,
5292 )
5293 .unwrap(),
5294 cx
5295 )
5296 .await
5297 .unwrap(),
5298 HashMap::from_iter([
5299 (path!("dir/one.ts").to_string(), vec![14..18]),
5300 (path!("dir/two.ts").to_string(), vec![14..18]),
5301 ]),
5302 "Rust exclusion search should give only TypeScript files"
5303 );
5304
5305 assert_eq!(
5306 search(
5307 &project,
5308 SearchQuery::text(
5309 search_query,
5310 false,
5311 true,
5312 false,
5313 Default::default(),
5314 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5315 false,
5316 None,
5317 )
5318 .unwrap(),
5319 cx
5320 )
5321 .await
5322 .unwrap(),
5323 HashMap::from_iter([
5324 (path!("dir/one.rs").to_string(), vec![8..12]),
5325 (path!("dir/two.rs").to_string(), vec![8..12]),
5326 ]),
5327 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
5328 );
5329
5330 assert!(
5331 search(
5332 &project,
5333 SearchQuery::text(
5334 search_query,
5335 false,
5336 true,
5337 false,
5338 Default::default(),
5339 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()])
5340 .unwrap(),
5341 false,
5342 None,
5343 )
5344 .unwrap(),
5345 cx
5346 )
5347 .await
5348 .unwrap()
5349 .is_empty(),
5350 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
5351 );
5352}
5353
5354#[gpui::test]
5355async fn test_search_with_buffer_exclusions(cx: &mut gpui::TestAppContext) {
5356 init_test(cx);
5357
5358 let search_query = "file";
5359
5360 let fs = FakeFs::new(cx.executor());
5361 fs.insert_tree(
5362 path!("/dir"),
5363 json!({
5364 "one.rs": r#"// Rust file one"#,
5365 "one.ts": r#"// TypeScript file one"#,
5366 "two.rs": r#"// Rust file two"#,
5367 "two.ts": r#"// TypeScript file two"#,
5368 }),
5369 )
5370 .await;
5371
5372 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5373 let _buffer = project.update(cx, |project, cx| {
5374 let buffer = project.create_local_buffer("file", None, cx);
5375 project.mark_buffer_as_non_searchable(buffer.read(cx).remote_id(), cx);
5376 buffer
5377 });
5378
5379 assert_eq!(
5380 search(
5381 &project,
5382 SearchQuery::text(
5383 search_query,
5384 false,
5385 true,
5386 false,
5387 Default::default(),
5388 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5389 false,
5390 None,
5391 )
5392 .unwrap(),
5393 cx
5394 )
5395 .await
5396 .unwrap(),
5397 HashMap::from_iter([
5398 (path!("dir/one.rs").to_string(), vec![8..12]),
5399 (path!("dir/one.ts").to_string(), vec![14..18]),
5400 (path!("dir/two.rs").to_string(), vec![8..12]),
5401 (path!("dir/two.ts").to_string(), vec![14..18]),
5402 ]),
5403 "If no exclusions match, all files should be returned"
5404 );
5405
5406 assert_eq!(
5407 search(
5408 &project,
5409 SearchQuery::text(
5410 search_query,
5411 false,
5412 true,
5413 false,
5414 Default::default(),
5415 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
5416 false,
5417 None,
5418 )
5419 .unwrap(),
5420 cx
5421 )
5422 .await
5423 .unwrap(),
5424 HashMap::from_iter([
5425 (path!("dir/one.ts").to_string(), vec![14..18]),
5426 (path!("dir/two.ts").to_string(), vec![14..18]),
5427 ]),
5428 "Rust exclusion search should give only TypeScript files"
5429 );
5430
5431 assert_eq!(
5432 search(
5433 &project,
5434 SearchQuery::text(
5435 search_query,
5436 false,
5437 true,
5438 false,
5439 Default::default(),
5440 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5441 false,
5442 None,
5443 )
5444 .unwrap(),
5445 cx
5446 )
5447 .await
5448 .unwrap(),
5449 HashMap::from_iter([
5450 (path!("dir/one.rs").to_string(), vec![8..12]),
5451 (path!("dir/two.rs").to_string(), vec![8..12]),
5452 ]),
5453 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
5454 );
5455
5456 assert!(
5457 search(
5458 &project,
5459 SearchQuery::text(
5460 search_query,
5461 false,
5462 true,
5463 false,
5464 Default::default(),
5465 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()])
5466 .unwrap(),
5467 false,
5468 None,
5469 )
5470 .unwrap(),
5471 cx
5472 )
5473 .await
5474 .unwrap()
5475 .is_empty(),
5476 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
5477 );
5478}
5479
5480#[gpui::test]
5481async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
5482 init_test(cx);
5483
5484 let search_query = "file";
5485
5486 let fs = FakeFs::new(cx.executor());
5487 fs.insert_tree(
5488 path!("/dir"),
5489 json!({
5490 "one.rs": r#"// Rust file one"#,
5491 "one.ts": r#"// TypeScript file one"#,
5492 "two.rs": r#"// Rust file two"#,
5493 "two.ts": r#"// TypeScript file two"#,
5494 }),
5495 )
5496 .await;
5497 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5498
5499 assert!(
5500 search(
5501 &project,
5502 SearchQuery::text(
5503 search_query,
5504 false,
5505 true,
5506 false,
5507 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5508 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5509 false,
5510 None,
5511 )
5512 .unwrap(),
5513 cx
5514 )
5515 .await
5516 .unwrap()
5517 .is_empty(),
5518 "If both no exclusions and inclusions match, exclusions should win and return nothing"
5519 );
5520
5521 assert!(
5522 search(
5523 &project,
5524 SearchQuery::text(
5525 search_query,
5526 false,
5527 true,
5528 false,
5529 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5530 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5531 false,
5532 None,
5533 )
5534 .unwrap(),
5535 cx
5536 )
5537 .await
5538 .unwrap()
5539 .is_empty(),
5540 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
5541 );
5542
5543 assert!(
5544 search(
5545 &project,
5546 SearchQuery::text(
5547 search_query,
5548 false,
5549 true,
5550 false,
5551 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5552 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5553 false,
5554 None,
5555 )
5556 .unwrap(),
5557 cx
5558 )
5559 .await
5560 .unwrap()
5561 .is_empty(),
5562 "Non-matching inclusions and exclusions should not change that."
5563 );
5564
5565 assert_eq!(
5566 search(
5567 &project,
5568 SearchQuery::text(
5569 search_query,
5570 false,
5571 true,
5572 false,
5573 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5574 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()]).unwrap(),
5575 false,
5576 None,
5577 )
5578 .unwrap(),
5579 cx
5580 )
5581 .await
5582 .unwrap(),
5583 HashMap::from_iter([
5584 (path!("dir/one.ts").to_string(), vec![14..18]),
5585 (path!("dir/two.ts").to_string(), vec![14..18]),
5586 ]),
5587 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
5588 );
5589}
5590
5591#[gpui::test]
5592async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
5593 init_test(cx);
5594
5595 let fs = FakeFs::new(cx.executor());
5596 fs.insert_tree(
5597 path!("/worktree-a"),
5598 json!({
5599 "haystack.rs": r#"// NEEDLE"#,
5600 "haystack.ts": r#"// NEEDLE"#,
5601 }),
5602 )
5603 .await;
5604 fs.insert_tree(
5605 path!("/worktree-b"),
5606 json!({
5607 "haystack.rs": r#"// NEEDLE"#,
5608 "haystack.ts": r#"// NEEDLE"#,
5609 }),
5610 )
5611 .await;
5612
5613 let project = Project::test(
5614 fs.clone(),
5615 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
5616 cx,
5617 )
5618 .await;
5619
5620 assert_eq!(
5621 search(
5622 &project,
5623 SearchQuery::text(
5624 "NEEDLE",
5625 false,
5626 true,
5627 false,
5628 PathMatcher::new(&["worktree-a/*.rs".to_owned()]).unwrap(),
5629 Default::default(),
5630 true,
5631 None,
5632 )
5633 .unwrap(),
5634 cx
5635 )
5636 .await
5637 .unwrap(),
5638 HashMap::from_iter([(path!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
5639 "should only return results from included worktree"
5640 );
5641 assert_eq!(
5642 search(
5643 &project,
5644 SearchQuery::text(
5645 "NEEDLE",
5646 false,
5647 true,
5648 false,
5649 PathMatcher::new(&["worktree-b/*.rs".to_owned()]).unwrap(),
5650 Default::default(),
5651 true,
5652 None,
5653 )
5654 .unwrap(),
5655 cx
5656 )
5657 .await
5658 .unwrap(),
5659 HashMap::from_iter([(path!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
5660 "should only return results from included worktree"
5661 );
5662
5663 assert_eq!(
5664 search(
5665 &project,
5666 SearchQuery::text(
5667 "NEEDLE",
5668 false,
5669 true,
5670 false,
5671 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5672 Default::default(),
5673 false,
5674 None,
5675 )
5676 .unwrap(),
5677 cx
5678 )
5679 .await
5680 .unwrap(),
5681 HashMap::from_iter([
5682 (path!("worktree-a/haystack.ts").to_string(), vec![3..9]),
5683 (path!("worktree-b/haystack.ts").to_string(), vec![3..9])
5684 ]),
5685 "should return results from both worktrees"
5686 );
5687}
5688
5689#[gpui::test]
5690async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
5691 init_test(cx);
5692
5693 let fs = FakeFs::new(cx.background_executor.clone());
5694 fs.insert_tree(
5695 path!("/dir"),
5696 json!({
5697 ".git": {},
5698 ".gitignore": "**/target\n/node_modules\n",
5699 "target": {
5700 "index.txt": "index_key:index_value"
5701 },
5702 "node_modules": {
5703 "eslint": {
5704 "index.ts": "const eslint_key = 'eslint value'",
5705 "package.json": r#"{ "some_key": "some value" }"#,
5706 },
5707 "prettier": {
5708 "index.ts": "const prettier_key = 'prettier value'",
5709 "package.json": r#"{ "other_key": "other value" }"#,
5710 },
5711 },
5712 "package.json": r#"{ "main_key": "main value" }"#,
5713 }),
5714 )
5715 .await;
5716 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5717
5718 let query = "key";
5719 assert_eq!(
5720 search(
5721 &project,
5722 SearchQuery::text(
5723 query,
5724 false,
5725 false,
5726 false,
5727 Default::default(),
5728 Default::default(),
5729 false,
5730 None,
5731 )
5732 .unwrap(),
5733 cx
5734 )
5735 .await
5736 .unwrap(),
5737 HashMap::from_iter([(path!("dir/package.json").to_string(), vec![8..11])]),
5738 "Only one non-ignored file should have the query"
5739 );
5740
5741 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5742 assert_eq!(
5743 search(
5744 &project,
5745 SearchQuery::text(
5746 query,
5747 false,
5748 false,
5749 true,
5750 Default::default(),
5751 Default::default(),
5752 false,
5753 None,
5754 )
5755 .unwrap(),
5756 cx
5757 )
5758 .await
5759 .unwrap(),
5760 HashMap::from_iter([
5761 (path!("dir/package.json").to_string(), vec![8..11]),
5762 (path!("dir/target/index.txt").to_string(), vec![6..9]),
5763 (
5764 path!("dir/node_modules/prettier/package.json").to_string(),
5765 vec![9..12]
5766 ),
5767 (
5768 path!("dir/node_modules/prettier/index.ts").to_string(),
5769 vec![15..18]
5770 ),
5771 (
5772 path!("dir/node_modules/eslint/index.ts").to_string(),
5773 vec![13..16]
5774 ),
5775 (
5776 path!("dir/node_modules/eslint/package.json").to_string(),
5777 vec![8..11]
5778 ),
5779 ]),
5780 "Unrestricted search with ignored directories should find every file with the query"
5781 );
5782
5783 let files_to_include = PathMatcher::new(&["node_modules/prettier/**".to_owned()]).unwrap();
5784 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()]).unwrap();
5785 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5786 assert_eq!(
5787 search(
5788 &project,
5789 SearchQuery::text(
5790 query,
5791 false,
5792 false,
5793 true,
5794 files_to_include,
5795 files_to_exclude,
5796 false,
5797 None,
5798 )
5799 .unwrap(),
5800 cx
5801 )
5802 .await
5803 .unwrap(),
5804 HashMap::from_iter([(
5805 path!("dir/node_modules/prettier/package.json").to_string(),
5806 vec![9..12]
5807 )]),
5808 "With search including ignored prettier directory and excluding TS files, only one file should be found"
5809 );
5810}
5811
5812#[gpui::test]
5813async fn test_search_with_unicode(cx: &mut gpui::TestAppContext) {
5814 init_test(cx);
5815
5816 let fs = FakeFs::new(cx.executor());
5817 fs.insert_tree(
5818 path!("/dir"),
5819 json!({
5820 "one.rs": "// ПРИВЕТ? привет!",
5821 "two.rs": "// ПРИВЕТ.",
5822 "three.rs": "// привет",
5823 }),
5824 )
5825 .await;
5826 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5827
5828 let unicode_case_sensitive_query = SearchQuery::text(
5829 "привет",
5830 false,
5831 true,
5832 false,
5833 Default::default(),
5834 Default::default(),
5835 false,
5836 None,
5837 );
5838 assert_matches!(unicode_case_sensitive_query, Ok(SearchQuery::Text { .. }));
5839 assert_eq!(
5840 search(&project, unicode_case_sensitive_query.unwrap(), cx)
5841 .await
5842 .unwrap(),
5843 HashMap::from_iter([
5844 (path!("dir/one.rs").to_string(), vec![17..29]),
5845 (path!("dir/three.rs").to_string(), vec![3..15]),
5846 ])
5847 );
5848
5849 let unicode_case_insensitive_query = SearchQuery::text(
5850 "привет",
5851 false,
5852 false,
5853 false,
5854 Default::default(),
5855 Default::default(),
5856 false,
5857 None,
5858 );
5859 assert_matches!(
5860 unicode_case_insensitive_query,
5861 Ok(SearchQuery::Regex { .. })
5862 );
5863 assert_eq!(
5864 search(&project, unicode_case_insensitive_query.unwrap(), cx)
5865 .await
5866 .unwrap(),
5867 HashMap::from_iter([
5868 (path!("dir/one.rs").to_string(), vec![3..15, 17..29]),
5869 (path!("dir/two.rs").to_string(), vec![3..15]),
5870 (path!("dir/three.rs").to_string(), vec![3..15]),
5871 ])
5872 );
5873
5874 assert_eq!(
5875 search(
5876 &project,
5877 SearchQuery::text(
5878 "привет.",
5879 false,
5880 false,
5881 false,
5882 Default::default(),
5883 Default::default(),
5884 false,
5885 None,
5886 )
5887 .unwrap(),
5888 cx
5889 )
5890 .await
5891 .unwrap(),
5892 HashMap::from_iter([(path!("dir/two.rs").to_string(), vec![3..16]),])
5893 );
5894}
5895
5896#[gpui::test]
5897async fn test_create_entry(cx: &mut gpui::TestAppContext) {
5898 init_test(cx);
5899
5900 let fs = FakeFs::new(cx.executor().clone());
5901 fs.insert_tree(
5902 "/one/two",
5903 json!({
5904 "three": {
5905 "a.txt": "",
5906 "four": {}
5907 },
5908 "c.rs": ""
5909 }),
5910 )
5911 .await;
5912
5913 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
5914 project
5915 .update(cx, |project, cx| {
5916 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5917 project.create_entry((id, "b.."), true, cx)
5918 })
5919 .await
5920 .unwrap()
5921 .into_included()
5922 .unwrap();
5923
5924 // Can't create paths outside the project
5925 let result = project
5926 .update(cx, |project, cx| {
5927 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5928 project.create_entry((id, "../../boop"), true, cx)
5929 })
5930 .await;
5931 assert!(result.is_err());
5932
5933 // Can't create paths with '..'
5934 let result = project
5935 .update(cx, |project, cx| {
5936 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5937 project.create_entry((id, "four/../beep"), true, cx)
5938 })
5939 .await;
5940 assert!(result.is_err());
5941
5942 assert_eq!(
5943 fs.paths(true),
5944 vec![
5945 PathBuf::from(path!("/")),
5946 PathBuf::from(path!("/one")),
5947 PathBuf::from(path!("/one/two")),
5948 PathBuf::from(path!("/one/two/c.rs")),
5949 PathBuf::from(path!("/one/two/three")),
5950 PathBuf::from(path!("/one/two/three/a.txt")),
5951 PathBuf::from(path!("/one/two/three/b..")),
5952 PathBuf::from(path!("/one/two/three/four")),
5953 ]
5954 );
5955
5956 // And we cannot open buffers with '..'
5957 let result = project
5958 .update(cx, |project, cx| {
5959 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5960 project.open_buffer((id, "../c.rs"), cx)
5961 })
5962 .await;
5963 assert!(result.is_err())
5964}
5965
5966#[gpui::test]
5967async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
5968 init_test(cx);
5969
5970 let fs = FakeFs::new(cx.executor());
5971 fs.insert_tree(
5972 path!("/dir"),
5973 json!({
5974 "a.tsx": "a",
5975 }),
5976 )
5977 .await;
5978
5979 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5980
5981 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5982 language_registry.add(tsx_lang());
5983 let language_server_names = [
5984 "TypeScriptServer",
5985 "TailwindServer",
5986 "ESLintServer",
5987 "NoHoverCapabilitiesServer",
5988 ];
5989 let mut language_servers = [
5990 language_registry.register_fake_lsp(
5991 "tsx",
5992 FakeLspAdapter {
5993 name: language_server_names[0],
5994 capabilities: lsp::ServerCapabilities {
5995 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5996 ..lsp::ServerCapabilities::default()
5997 },
5998 ..FakeLspAdapter::default()
5999 },
6000 ),
6001 language_registry.register_fake_lsp(
6002 "tsx",
6003 FakeLspAdapter {
6004 name: language_server_names[1],
6005 capabilities: lsp::ServerCapabilities {
6006 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6007 ..lsp::ServerCapabilities::default()
6008 },
6009 ..FakeLspAdapter::default()
6010 },
6011 ),
6012 language_registry.register_fake_lsp(
6013 "tsx",
6014 FakeLspAdapter {
6015 name: language_server_names[2],
6016 capabilities: lsp::ServerCapabilities {
6017 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6018 ..lsp::ServerCapabilities::default()
6019 },
6020 ..FakeLspAdapter::default()
6021 },
6022 ),
6023 language_registry.register_fake_lsp(
6024 "tsx",
6025 FakeLspAdapter {
6026 name: language_server_names[3],
6027 capabilities: lsp::ServerCapabilities {
6028 hover_provider: None,
6029 ..lsp::ServerCapabilities::default()
6030 },
6031 ..FakeLspAdapter::default()
6032 },
6033 ),
6034 ];
6035
6036 let (buffer, _handle) = project
6037 .update(cx, |p, cx| {
6038 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
6039 })
6040 .await
6041 .unwrap();
6042 cx.executor().run_until_parked();
6043
6044 let mut servers_with_hover_requests = HashMap::default();
6045 for i in 0..language_server_names.len() {
6046 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
6047 panic!(
6048 "Failed to get language server #{i} with name {}",
6049 &language_server_names[i]
6050 )
6051 });
6052 let new_server_name = new_server.server.name();
6053 assert!(
6054 !servers_with_hover_requests.contains_key(&new_server_name),
6055 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6056 );
6057 match new_server_name.as_ref() {
6058 "TailwindServer" | "TypeScriptServer" => {
6059 servers_with_hover_requests.insert(
6060 new_server_name.clone(),
6061 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6062 move |_, _| {
6063 let name = new_server_name.clone();
6064 async move {
6065 Ok(Some(lsp::Hover {
6066 contents: lsp::HoverContents::Scalar(
6067 lsp::MarkedString::String(format!("{name} hover")),
6068 ),
6069 range: None,
6070 }))
6071 }
6072 },
6073 ),
6074 );
6075 }
6076 "ESLintServer" => {
6077 servers_with_hover_requests.insert(
6078 new_server_name,
6079 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6080 |_, _| async move { Ok(None) },
6081 ),
6082 );
6083 }
6084 "NoHoverCapabilitiesServer" => {
6085 let _never_handled = new_server
6086 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
6087 panic!(
6088 "Should not call for hovers server with no corresponding capabilities"
6089 )
6090 });
6091 }
6092 unexpected => panic!("Unexpected server name: {unexpected}"),
6093 }
6094 }
6095
6096 let hover_task = project.update(cx, |project, cx| {
6097 project.hover(&buffer, Point::new(0, 0), cx)
6098 });
6099 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
6100 |mut hover_request| async move {
6101 hover_request
6102 .next()
6103 .await
6104 .expect("All hover requests should have been triggered")
6105 },
6106 ))
6107 .await;
6108 assert_eq!(
6109 vec!["TailwindServer hover", "TypeScriptServer hover"],
6110 hover_task
6111 .await
6112 .into_iter()
6113 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
6114 .sorted()
6115 .collect::<Vec<_>>(),
6116 "Should receive hover responses from all related servers with hover capabilities"
6117 );
6118}
6119
6120#[gpui::test]
6121async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
6122 init_test(cx);
6123
6124 let fs = FakeFs::new(cx.executor());
6125 fs.insert_tree(
6126 path!("/dir"),
6127 json!({
6128 "a.ts": "a",
6129 }),
6130 )
6131 .await;
6132
6133 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6134
6135 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6136 language_registry.add(typescript_lang());
6137 let mut fake_language_servers = language_registry.register_fake_lsp(
6138 "TypeScript",
6139 FakeLspAdapter {
6140 capabilities: lsp::ServerCapabilities {
6141 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6142 ..lsp::ServerCapabilities::default()
6143 },
6144 ..FakeLspAdapter::default()
6145 },
6146 );
6147
6148 let (buffer, _handle) = project
6149 .update(cx, |p, cx| {
6150 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6151 })
6152 .await
6153 .unwrap();
6154 cx.executor().run_until_parked();
6155
6156 let fake_server = fake_language_servers
6157 .next()
6158 .await
6159 .expect("failed to get the language server");
6160
6161 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6162 move |_, _| async move {
6163 Ok(Some(lsp::Hover {
6164 contents: lsp::HoverContents::Array(vec![
6165 lsp::MarkedString::String("".to_string()),
6166 lsp::MarkedString::String(" ".to_string()),
6167 lsp::MarkedString::String("\n\n\n".to_string()),
6168 ]),
6169 range: None,
6170 }))
6171 },
6172 );
6173
6174 let hover_task = project.update(cx, |project, cx| {
6175 project.hover(&buffer, Point::new(0, 0), cx)
6176 });
6177 let () = request_handled
6178 .next()
6179 .await
6180 .expect("All hover requests should have been triggered");
6181 assert_eq!(
6182 Vec::<String>::new(),
6183 hover_task
6184 .await
6185 .into_iter()
6186 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
6187 .sorted()
6188 .collect::<Vec<_>>(),
6189 "Empty hover parts should be ignored"
6190 );
6191}
6192
6193#[gpui::test]
6194async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
6195 init_test(cx);
6196
6197 let fs = FakeFs::new(cx.executor());
6198 fs.insert_tree(
6199 path!("/dir"),
6200 json!({
6201 "a.ts": "a",
6202 }),
6203 )
6204 .await;
6205
6206 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6207
6208 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6209 language_registry.add(typescript_lang());
6210 let mut fake_language_servers = language_registry.register_fake_lsp(
6211 "TypeScript",
6212 FakeLspAdapter {
6213 capabilities: lsp::ServerCapabilities {
6214 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6215 ..lsp::ServerCapabilities::default()
6216 },
6217 ..FakeLspAdapter::default()
6218 },
6219 );
6220
6221 let (buffer, _handle) = project
6222 .update(cx, |p, cx| {
6223 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6224 })
6225 .await
6226 .unwrap();
6227 cx.executor().run_until_parked();
6228
6229 let fake_server = fake_language_servers
6230 .next()
6231 .await
6232 .expect("failed to get the language server");
6233
6234 let mut request_handled = fake_server
6235 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
6236 Ok(Some(vec![
6237 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6238 title: "organize imports".to_string(),
6239 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
6240 ..lsp::CodeAction::default()
6241 }),
6242 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6243 title: "fix code".to_string(),
6244 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
6245 ..lsp::CodeAction::default()
6246 }),
6247 ]))
6248 });
6249
6250 let code_actions_task = project.update(cx, |project, cx| {
6251 project.code_actions(
6252 &buffer,
6253 0..buffer.read(cx).len(),
6254 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
6255 cx,
6256 )
6257 });
6258
6259 let () = request_handled
6260 .next()
6261 .await
6262 .expect("The code action request should have been triggered");
6263
6264 let code_actions = code_actions_task.await.unwrap();
6265 assert_eq!(code_actions.len(), 1);
6266 assert_eq!(
6267 code_actions[0].lsp_action.action_kind(),
6268 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
6269 );
6270}
6271
6272#[gpui::test]
6273async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
6274 init_test(cx);
6275
6276 let fs = FakeFs::new(cx.executor());
6277 fs.insert_tree(
6278 path!("/dir"),
6279 json!({
6280 "a.tsx": "a",
6281 }),
6282 )
6283 .await;
6284
6285 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6286
6287 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6288 language_registry.add(tsx_lang());
6289 let language_server_names = [
6290 "TypeScriptServer",
6291 "TailwindServer",
6292 "ESLintServer",
6293 "NoActionsCapabilitiesServer",
6294 ];
6295
6296 let mut language_server_rxs = [
6297 language_registry.register_fake_lsp(
6298 "tsx",
6299 FakeLspAdapter {
6300 name: language_server_names[0],
6301 capabilities: lsp::ServerCapabilities {
6302 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6303 ..lsp::ServerCapabilities::default()
6304 },
6305 ..FakeLspAdapter::default()
6306 },
6307 ),
6308 language_registry.register_fake_lsp(
6309 "tsx",
6310 FakeLspAdapter {
6311 name: language_server_names[1],
6312 capabilities: lsp::ServerCapabilities {
6313 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6314 ..lsp::ServerCapabilities::default()
6315 },
6316 ..FakeLspAdapter::default()
6317 },
6318 ),
6319 language_registry.register_fake_lsp(
6320 "tsx",
6321 FakeLspAdapter {
6322 name: language_server_names[2],
6323 capabilities: lsp::ServerCapabilities {
6324 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6325 ..lsp::ServerCapabilities::default()
6326 },
6327 ..FakeLspAdapter::default()
6328 },
6329 ),
6330 language_registry.register_fake_lsp(
6331 "tsx",
6332 FakeLspAdapter {
6333 name: language_server_names[3],
6334 capabilities: lsp::ServerCapabilities {
6335 code_action_provider: None,
6336 ..lsp::ServerCapabilities::default()
6337 },
6338 ..FakeLspAdapter::default()
6339 },
6340 ),
6341 ];
6342
6343 let (buffer, _handle) = project
6344 .update(cx, |p, cx| {
6345 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
6346 })
6347 .await
6348 .unwrap();
6349 cx.executor().run_until_parked();
6350
6351 let mut servers_with_actions_requests = HashMap::default();
6352 for i in 0..language_server_names.len() {
6353 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
6354 panic!(
6355 "Failed to get language server #{i} with name {}",
6356 &language_server_names[i]
6357 )
6358 });
6359 let new_server_name = new_server.server.name();
6360
6361 assert!(
6362 !servers_with_actions_requests.contains_key(&new_server_name),
6363 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6364 );
6365 match new_server_name.0.as_ref() {
6366 "TailwindServer" | "TypeScriptServer" => {
6367 servers_with_actions_requests.insert(
6368 new_server_name.clone(),
6369 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6370 move |_, _| {
6371 let name = new_server_name.clone();
6372 async move {
6373 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
6374 lsp::CodeAction {
6375 title: format!("{name} code action"),
6376 ..lsp::CodeAction::default()
6377 },
6378 )]))
6379 }
6380 },
6381 ),
6382 );
6383 }
6384 "ESLintServer" => {
6385 servers_with_actions_requests.insert(
6386 new_server_name,
6387 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6388 |_, _| async move { Ok(None) },
6389 ),
6390 );
6391 }
6392 "NoActionsCapabilitiesServer" => {
6393 let _never_handled = new_server
6394 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6395 panic!(
6396 "Should not call for code actions server with no corresponding capabilities"
6397 )
6398 });
6399 }
6400 unexpected => panic!("Unexpected server name: {unexpected}"),
6401 }
6402 }
6403
6404 let code_actions_task = project.update(cx, |project, cx| {
6405 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
6406 });
6407
6408 // cx.run_until_parked();
6409 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
6410 |mut code_actions_request| async move {
6411 code_actions_request
6412 .next()
6413 .await
6414 .expect("All code actions requests should have been triggered")
6415 },
6416 ))
6417 .await;
6418 assert_eq!(
6419 vec!["TailwindServer code action", "TypeScriptServer code action"],
6420 code_actions_task
6421 .await
6422 .unwrap()
6423 .into_iter()
6424 .map(|code_action| code_action.lsp_action.title().to_owned())
6425 .sorted()
6426 .collect::<Vec<_>>(),
6427 "Should receive code actions responses from all related servers with hover capabilities"
6428 );
6429}
6430
6431#[gpui::test]
6432async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
6433 init_test(cx);
6434
6435 let fs = FakeFs::new(cx.executor());
6436 fs.insert_tree(
6437 "/dir",
6438 json!({
6439 "a.rs": "let a = 1;",
6440 "b.rs": "let b = 2;",
6441 "c.rs": "let c = 2;",
6442 }),
6443 )
6444 .await;
6445
6446 let project = Project::test(
6447 fs,
6448 [
6449 "/dir/a.rs".as_ref(),
6450 "/dir/b.rs".as_ref(),
6451 "/dir/c.rs".as_ref(),
6452 ],
6453 cx,
6454 )
6455 .await;
6456
6457 // check the initial state and get the worktrees
6458 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
6459 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6460 assert_eq!(worktrees.len(), 3);
6461
6462 let worktree_a = worktrees[0].read(cx);
6463 let worktree_b = worktrees[1].read(cx);
6464 let worktree_c = worktrees[2].read(cx);
6465
6466 // check they start in the right order
6467 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
6468 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
6469 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
6470
6471 (
6472 worktrees[0].clone(),
6473 worktrees[1].clone(),
6474 worktrees[2].clone(),
6475 )
6476 });
6477
6478 // move first worktree to after the second
6479 // [a, b, c] -> [b, a, c]
6480 project
6481 .update(cx, |project, cx| {
6482 let first = worktree_a.read(cx);
6483 let second = worktree_b.read(cx);
6484 project.move_worktree(first.id(), second.id(), cx)
6485 })
6486 .expect("moving first after second");
6487
6488 // check the state after moving
6489 project.update(cx, |project, cx| {
6490 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6491 assert_eq!(worktrees.len(), 3);
6492
6493 let first = worktrees[0].read(cx);
6494 let second = worktrees[1].read(cx);
6495 let third = worktrees[2].read(cx);
6496
6497 // check they are now in the right order
6498 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
6499 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
6500 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6501 });
6502
6503 // move the second worktree to before the first
6504 // [b, a, c] -> [a, b, c]
6505 project
6506 .update(cx, |project, cx| {
6507 let second = worktree_a.read(cx);
6508 let first = worktree_b.read(cx);
6509 project.move_worktree(first.id(), second.id(), cx)
6510 })
6511 .expect("moving second before first");
6512
6513 // check the state after moving
6514 project.update(cx, |project, cx| {
6515 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6516 assert_eq!(worktrees.len(), 3);
6517
6518 let first = worktrees[0].read(cx);
6519 let second = worktrees[1].read(cx);
6520 let third = worktrees[2].read(cx);
6521
6522 // check they are now in the right order
6523 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6524 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6525 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6526 });
6527
6528 // move the second worktree to after the third
6529 // [a, b, c] -> [a, c, b]
6530 project
6531 .update(cx, |project, cx| {
6532 let second = worktree_b.read(cx);
6533 let third = worktree_c.read(cx);
6534 project.move_worktree(second.id(), third.id(), cx)
6535 })
6536 .expect("moving second after third");
6537
6538 // check the state after moving
6539 project.update(cx, |project, cx| {
6540 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6541 assert_eq!(worktrees.len(), 3);
6542
6543 let first = worktrees[0].read(cx);
6544 let second = worktrees[1].read(cx);
6545 let third = worktrees[2].read(cx);
6546
6547 // check they are now in the right order
6548 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6549 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
6550 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
6551 });
6552
6553 // move the third worktree to before the second
6554 // [a, c, b] -> [a, b, c]
6555 project
6556 .update(cx, |project, cx| {
6557 let third = worktree_c.read(cx);
6558 let second = worktree_b.read(cx);
6559 project.move_worktree(third.id(), second.id(), cx)
6560 })
6561 .expect("moving third before second");
6562
6563 // check the state after moving
6564 project.update(cx, |project, cx| {
6565 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6566 assert_eq!(worktrees.len(), 3);
6567
6568 let first = worktrees[0].read(cx);
6569 let second = worktrees[1].read(cx);
6570 let third = worktrees[2].read(cx);
6571
6572 // check they are now in the right order
6573 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6574 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6575 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6576 });
6577
6578 // move the first worktree to after the third
6579 // [a, b, c] -> [b, c, a]
6580 project
6581 .update(cx, |project, cx| {
6582 let first = worktree_a.read(cx);
6583 let third = worktree_c.read(cx);
6584 project.move_worktree(first.id(), third.id(), cx)
6585 })
6586 .expect("moving first after third");
6587
6588 // check the state after moving
6589 project.update(cx, |project, cx| {
6590 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6591 assert_eq!(worktrees.len(), 3);
6592
6593 let first = worktrees[0].read(cx);
6594 let second = worktrees[1].read(cx);
6595 let third = worktrees[2].read(cx);
6596
6597 // check they are now in the right order
6598 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
6599 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
6600 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
6601 });
6602
6603 // move the third worktree to before the first
6604 // [b, c, a] -> [a, b, c]
6605 project
6606 .update(cx, |project, cx| {
6607 let third = worktree_a.read(cx);
6608 let first = worktree_b.read(cx);
6609 project.move_worktree(third.id(), first.id(), cx)
6610 })
6611 .expect("moving third before first");
6612
6613 // check the state after moving
6614 project.update(cx, |project, cx| {
6615 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6616 assert_eq!(worktrees.len(), 3);
6617
6618 let first = worktrees[0].read(cx);
6619 let second = worktrees[1].read(cx);
6620 let third = worktrees[2].read(cx);
6621
6622 // check they are now in the right order
6623 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6624 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6625 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6626 });
6627}
6628
6629#[gpui::test]
6630async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
6631 init_test(cx);
6632
6633 let staged_contents = r#"
6634 fn main() {
6635 println!("hello world");
6636 }
6637 "#
6638 .unindent();
6639 let file_contents = r#"
6640 // print goodbye
6641 fn main() {
6642 println!("goodbye world");
6643 }
6644 "#
6645 .unindent();
6646
6647 let fs = FakeFs::new(cx.background_executor.clone());
6648 fs.insert_tree(
6649 "/dir",
6650 json!({
6651 ".git": {},
6652 "src": {
6653 "main.rs": file_contents,
6654 }
6655 }),
6656 )
6657 .await;
6658
6659 fs.set_index_for_repo(
6660 Path::new("/dir/.git"),
6661 &[("src/main.rs".into(), staged_contents)],
6662 );
6663
6664 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6665
6666 let buffer = project
6667 .update(cx, |project, cx| {
6668 project.open_local_buffer("/dir/src/main.rs", cx)
6669 })
6670 .await
6671 .unwrap();
6672 let unstaged_diff = project
6673 .update(cx, |project, cx| {
6674 project.open_unstaged_diff(buffer.clone(), cx)
6675 })
6676 .await
6677 .unwrap();
6678
6679 cx.run_until_parked();
6680 unstaged_diff.update(cx, |unstaged_diff, cx| {
6681 let snapshot = buffer.read(cx).snapshot();
6682 assert_hunks(
6683 unstaged_diff.hunks(&snapshot, cx),
6684 &snapshot,
6685 &unstaged_diff.base_text_string().unwrap(),
6686 &[
6687 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
6688 (
6689 2..3,
6690 " println!(\"hello world\");\n",
6691 " println!(\"goodbye world\");\n",
6692 DiffHunkStatus::modified_none(),
6693 ),
6694 ],
6695 );
6696 });
6697
6698 let staged_contents = r#"
6699 // print goodbye
6700 fn main() {
6701 }
6702 "#
6703 .unindent();
6704
6705 fs.set_index_for_repo(
6706 Path::new("/dir/.git"),
6707 &[("src/main.rs".into(), staged_contents)],
6708 );
6709
6710 cx.run_until_parked();
6711 unstaged_diff.update(cx, |unstaged_diff, cx| {
6712 let snapshot = buffer.read(cx).snapshot();
6713 assert_hunks(
6714 unstaged_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6715 &snapshot,
6716 &unstaged_diff.base_text().text(),
6717 &[(
6718 2..3,
6719 "",
6720 " println!(\"goodbye world\");\n",
6721 DiffHunkStatus::added_none(),
6722 )],
6723 );
6724 });
6725}
6726
6727#[gpui::test]
6728async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
6729 init_test(cx);
6730
6731 let committed_contents = r#"
6732 fn main() {
6733 println!("hello world");
6734 }
6735 "#
6736 .unindent();
6737 let staged_contents = r#"
6738 fn main() {
6739 println!("goodbye world");
6740 }
6741 "#
6742 .unindent();
6743 let file_contents = r#"
6744 // print goodbye
6745 fn main() {
6746 println!("goodbye world");
6747 }
6748 "#
6749 .unindent();
6750
6751 let fs = FakeFs::new(cx.background_executor.clone());
6752 fs.insert_tree(
6753 "/dir",
6754 json!({
6755 ".git": {},
6756 "src": {
6757 "modification.rs": file_contents,
6758 }
6759 }),
6760 )
6761 .await;
6762
6763 fs.set_head_for_repo(
6764 Path::new("/dir/.git"),
6765 &[
6766 ("src/modification.rs".into(), committed_contents),
6767 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6768 ],
6769 "deadbeef",
6770 );
6771 fs.set_index_for_repo(
6772 Path::new("/dir/.git"),
6773 &[
6774 ("src/modification.rs".into(), staged_contents),
6775 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6776 ],
6777 );
6778
6779 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6780 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6781 let language = rust_lang();
6782 language_registry.add(language.clone());
6783
6784 let buffer_1 = project
6785 .update(cx, |project, cx| {
6786 project.open_local_buffer("/dir/src/modification.rs", cx)
6787 })
6788 .await
6789 .unwrap();
6790 let diff_1 = project
6791 .update(cx, |project, cx| {
6792 project.open_uncommitted_diff(buffer_1.clone(), cx)
6793 })
6794 .await
6795 .unwrap();
6796 diff_1.read_with(cx, |diff, _| {
6797 assert_eq!(diff.base_text().language().cloned(), Some(language))
6798 });
6799 cx.run_until_parked();
6800 diff_1.update(cx, |diff, cx| {
6801 let snapshot = buffer_1.read(cx).snapshot();
6802 assert_hunks(
6803 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6804 &snapshot,
6805 &diff.base_text_string().unwrap(),
6806 &[
6807 (
6808 0..1,
6809 "",
6810 "// print goodbye\n",
6811 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
6812 ),
6813 (
6814 2..3,
6815 " println!(\"hello world\");\n",
6816 " println!(\"goodbye world\");\n",
6817 DiffHunkStatus::modified_none(),
6818 ),
6819 ],
6820 );
6821 });
6822
6823 // Reset HEAD to a version that differs from both the buffer and the index.
6824 let committed_contents = r#"
6825 // print goodbye
6826 fn main() {
6827 }
6828 "#
6829 .unindent();
6830 fs.set_head_for_repo(
6831 Path::new("/dir/.git"),
6832 &[
6833 ("src/modification.rs".into(), committed_contents.clone()),
6834 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6835 ],
6836 "deadbeef",
6837 );
6838
6839 // Buffer now has an unstaged hunk.
6840 cx.run_until_parked();
6841 diff_1.update(cx, |diff, cx| {
6842 let snapshot = buffer_1.read(cx).snapshot();
6843 assert_hunks(
6844 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6845 &snapshot,
6846 &diff.base_text().text(),
6847 &[(
6848 2..3,
6849 "",
6850 " println!(\"goodbye world\");\n",
6851 DiffHunkStatus::added_none(),
6852 )],
6853 );
6854 });
6855
6856 // Open a buffer for a file that's been deleted.
6857 let buffer_2 = project
6858 .update(cx, |project, cx| {
6859 project.open_local_buffer("/dir/src/deletion.rs", cx)
6860 })
6861 .await
6862 .unwrap();
6863 let diff_2 = project
6864 .update(cx, |project, cx| {
6865 project.open_uncommitted_diff(buffer_2.clone(), cx)
6866 })
6867 .await
6868 .unwrap();
6869 cx.run_until_parked();
6870 diff_2.update(cx, |diff, cx| {
6871 let snapshot = buffer_2.read(cx).snapshot();
6872 assert_hunks(
6873 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6874 &snapshot,
6875 &diff.base_text_string().unwrap(),
6876 &[(
6877 0..0,
6878 "// the-deleted-contents\n",
6879 "",
6880 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
6881 )],
6882 );
6883 });
6884
6885 // Stage the deletion of this file
6886 fs.set_index_for_repo(
6887 Path::new("/dir/.git"),
6888 &[("src/modification.rs".into(), committed_contents.clone())],
6889 );
6890 cx.run_until_parked();
6891 diff_2.update(cx, |diff, cx| {
6892 let snapshot = buffer_2.read(cx).snapshot();
6893 assert_hunks(
6894 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6895 &snapshot,
6896 &diff.base_text_string().unwrap(),
6897 &[(
6898 0..0,
6899 "// the-deleted-contents\n",
6900 "",
6901 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
6902 )],
6903 );
6904 });
6905}
6906
6907#[gpui::test]
6908async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
6909 use DiffHunkSecondaryStatus::*;
6910 init_test(cx);
6911
6912 let committed_contents = r#"
6913 zero
6914 one
6915 two
6916 three
6917 four
6918 five
6919 "#
6920 .unindent();
6921 let file_contents = r#"
6922 one
6923 TWO
6924 three
6925 FOUR
6926 five
6927 "#
6928 .unindent();
6929
6930 let fs = FakeFs::new(cx.background_executor.clone());
6931 fs.insert_tree(
6932 "/dir",
6933 json!({
6934 ".git": {},
6935 "file.txt": file_contents.clone()
6936 }),
6937 )
6938 .await;
6939
6940 fs.set_head_and_index_for_repo(
6941 "/dir/.git".as_ref(),
6942 &[("file.txt".into(), committed_contents.clone())],
6943 );
6944
6945 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6946
6947 let buffer = project
6948 .update(cx, |project, cx| {
6949 project.open_local_buffer("/dir/file.txt", cx)
6950 })
6951 .await
6952 .unwrap();
6953 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6954 let uncommitted_diff = project
6955 .update(cx, |project, cx| {
6956 project.open_uncommitted_diff(buffer.clone(), cx)
6957 })
6958 .await
6959 .unwrap();
6960 let mut diff_events = cx.events(&uncommitted_diff);
6961
6962 // The hunks are initially unstaged.
6963 uncommitted_diff.read_with(cx, |diff, cx| {
6964 assert_hunks(
6965 diff.hunks(&snapshot, cx),
6966 &snapshot,
6967 &diff.base_text_string().unwrap(),
6968 &[
6969 (
6970 0..0,
6971 "zero\n",
6972 "",
6973 DiffHunkStatus::deleted(HasSecondaryHunk),
6974 ),
6975 (
6976 1..2,
6977 "two\n",
6978 "TWO\n",
6979 DiffHunkStatus::modified(HasSecondaryHunk),
6980 ),
6981 (
6982 3..4,
6983 "four\n",
6984 "FOUR\n",
6985 DiffHunkStatus::modified(HasSecondaryHunk),
6986 ),
6987 ],
6988 );
6989 });
6990
6991 // Stage a hunk. It appears as optimistically staged.
6992 uncommitted_diff.update(cx, |diff, cx| {
6993 let range =
6994 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
6995 let hunks = diff
6996 .hunks_intersecting_range(range, &snapshot, cx)
6997 .collect::<Vec<_>>();
6998 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
6999
7000 assert_hunks(
7001 diff.hunks(&snapshot, cx),
7002 &snapshot,
7003 &diff.base_text_string().unwrap(),
7004 &[
7005 (
7006 0..0,
7007 "zero\n",
7008 "",
7009 DiffHunkStatus::deleted(HasSecondaryHunk),
7010 ),
7011 (
7012 1..2,
7013 "two\n",
7014 "TWO\n",
7015 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7016 ),
7017 (
7018 3..4,
7019 "four\n",
7020 "FOUR\n",
7021 DiffHunkStatus::modified(HasSecondaryHunk),
7022 ),
7023 ],
7024 );
7025 });
7026
7027 // The diff emits a change event for the range of the staged hunk.
7028 assert!(matches!(
7029 diff_events.next().await.unwrap(),
7030 BufferDiffEvent::HunksStagedOrUnstaged(_)
7031 ));
7032 let event = diff_events.next().await.unwrap();
7033 if let BufferDiffEvent::DiffChanged {
7034 changed_range: Some(changed_range),
7035 } = event
7036 {
7037 let changed_range = changed_range.to_point(&snapshot);
7038 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
7039 } else {
7040 panic!("Unexpected event {event:?}");
7041 }
7042
7043 // When the write to the index completes, it appears as staged.
7044 cx.run_until_parked();
7045 uncommitted_diff.update(cx, |diff, cx| {
7046 assert_hunks(
7047 diff.hunks(&snapshot, cx),
7048 &snapshot,
7049 &diff.base_text_string().unwrap(),
7050 &[
7051 (
7052 0..0,
7053 "zero\n",
7054 "",
7055 DiffHunkStatus::deleted(HasSecondaryHunk),
7056 ),
7057 (
7058 1..2,
7059 "two\n",
7060 "TWO\n",
7061 DiffHunkStatus::modified(NoSecondaryHunk),
7062 ),
7063 (
7064 3..4,
7065 "four\n",
7066 "FOUR\n",
7067 DiffHunkStatus::modified(HasSecondaryHunk),
7068 ),
7069 ],
7070 );
7071 });
7072
7073 // The diff emits a change event for the changed index text.
7074 let event = diff_events.next().await.unwrap();
7075 if let BufferDiffEvent::DiffChanged {
7076 changed_range: Some(changed_range),
7077 } = event
7078 {
7079 let changed_range = changed_range.to_point(&snapshot);
7080 assert_eq!(changed_range, Point::new(0, 0)..Point::new(4, 0));
7081 } else {
7082 panic!("Unexpected event {event:?}");
7083 }
7084
7085 // Simulate a problem writing to the git index.
7086 fs.set_error_message_for_index_write(
7087 "/dir/.git".as_ref(),
7088 Some("failed to write git index".into()),
7089 );
7090
7091 // Stage another hunk.
7092 uncommitted_diff.update(cx, |diff, cx| {
7093 let range =
7094 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
7095 let hunks = diff
7096 .hunks_intersecting_range(range, &snapshot, cx)
7097 .collect::<Vec<_>>();
7098 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
7099
7100 assert_hunks(
7101 diff.hunks(&snapshot, cx),
7102 &snapshot,
7103 &diff.base_text_string().unwrap(),
7104 &[
7105 (
7106 0..0,
7107 "zero\n",
7108 "",
7109 DiffHunkStatus::deleted(HasSecondaryHunk),
7110 ),
7111 (
7112 1..2,
7113 "two\n",
7114 "TWO\n",
7115 DiffHunkStatus::modified(NoSecondaryHunk),
7116 ),
7117 (
7118 3..4,
7119 "four\n",
7120 "FOUR\n",
7121 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7122 ),
7123 ],
7124 );
7125 });
7126 assert!(matches!(
7127 diff_events.next().await.unwrap(),
7128 BufferDiffEvent::HunksStagedOrUnstaged(_)
7129 ));
7130 let event = diff_events.next().await.unwrap();
7131 if let BufferDiffEvent::DiffChanged {
7132 changed_range: Some(changed_range),
7133 } = event
7134 {
7135 let changed_range = changed_range.to_point(&snapshot);
7136 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
7137 } else {
7138 panic!("Unexpected event {event:?}");
7139 }
7140
7141 // When the write fails, the hunk returns to being unstaged.
7142 cx.run_until_parked();
7143 uncommitted_diff.update(cx, |diff, cx| {
7144 assert_hunks(
7145 diff.hunks(&snapshot, cx),
7146 &snapshot,
7147 &diff.base_text_string().unwrap(),
7148 &[
7149 (
7150 0..0,
7151 "zero\n",
7152 "",
7153 DiffHunkStatus::deleted(HasSecondaryHunk),
7154 ),
7155 (
7156 1..2,
7157 "two\n",
7158 "TWO\n",
7159 DiffHunkStatus::modified(NoSecondaryHunk),
7160 ),
7161 (
7162 3..4,
7163 "four\n",
7164 "FOUR\n",
7165 DiffHunkStatus::modified(HasSecondaryHunk),
7166 ),
7167 ],
7168 );
7169 });
7170
7171 let event = diff_events.next().await.unwrap();
7172 if let BufferDiffEvent::DiffChanged {
7173 changed_range: Some(changed_range),
7174 } = event
7175 {
7176 let changed_range = changed_range.to_point(&snapshot);
7177 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
7178 } else {
7179 panic!("Unexpected event {event:?}");
7180 }
7181
7182 // Allow writing to the git index to succeed again.
7183 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
7184
7185 // Stage two hunks with separate operations.
7186 uncommitted_diff.update(cx, |diff, cx| {
7187 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
7188 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
7189 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
7190 });
7191
7192 // Both staged hunks appear as pending.
7193 uncommitted_diff.update(cx, |diff, cx| {
7194 assert_hunks(
7195 diff.hunks(&snapshot, cx),
7196 &snapshot,
7197 &diff.base_text_string().unwrap(),
7198 &[
7199 (
7200 0..0,
7201 "zero\n",
7202 "",
7203 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7204 ),
7205 (
7206 1..2,
7207 "two\n",
7208 "TWO\n",
7209 DiffHunkStatus::modified(NoSecondaryHunk),
7210 ),
7211 (
7212 3..4,
7213 "four\n",
7214 "FOUR\n",
7215 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7216 ),
7217 ],
7218 );
7219 });
7220
7221 // Both staging operations take effect.
7222 cx.run_until_parked();
7223 uncommitted_diff.update(cx, |diff, cx| {
7224 assert_hunks(
7225 diff.hunks(&snapshot, cx),
7226 &snapshot,
7227 &diff.base_text_string().unwrap(),
7228 &[
7229 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7230 (
7231 1..2,
7232 "two\n",
7233 "TWO\n",
7234 DiffHunkStatus::modified(NoSecondaryHunk),
7235 ),
7236 (
7237 3..4,
7238 "four\n",
7239 "FOUR\n",
7240 DiffHunkStatus::modified(NoSecondaryHunk),
7241 ),
7242 ],
7243 );
7244 });
7245}
7246
7247#[gpui::test(seeds(340, 472))]
7248async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
7249 use DiffHunkSecondaryStatus::*;
7250 init_test(cx);
7251
7252 let committed_contents = r#"
7253 zero
7254 one
7255 two
7256 three
7257 four
7258 five
7259 "#
7260 .unindent();
7261 let file_contents = r#"
7262 one
7263 TWO
7264 three
7265 FOUR
7266 five
7267 "#
7268 .unindent();
7269
7270 let fs = FakeFs::new(cx.background_executor.clone());
7271 fs.insert_tree(
7272 "/dir",
7273 json!({
7274 ".git": {},
7275 "file.txt": file_contents.clone()
7276 }),
7277 )
7278 .await;
7279
7280 fs.set_head_for_repo(
7281 "/dir/.git".as_ref(),
7282 &[("file.txt".into(), committed_contents.clone())],
7283 "deadbeef",
7284 );
7285 fs.set_index_for_repo(
7286 "/dir/.git".as_ref(),
7287 &[("file.txt".into(), committed_contents.clone())],
7288 );
7289
7290 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7291
7292 let buffer = project
7293 .update(cx, |project, cx| {
7294 project.open_local_buffer("/dir/file.txt", cx)
7295 })
7296 .await
7297 .unwrap();
7298 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7299 let uncommitted_diff = project
7300 .update(cx, |project, cx| {
7301 project.open_uncommitted_diff(buffer.clone(), cx)
7302 })
7303 .await
7304 .unwrap();
7305
7306 // The hunks are initially unstaged.
7307 uncommitted_diff.read_with(cx, |diff, cx| {
7308 assert_hunks(
7309 diff.hunks(&snapshot, cx),
7310 &snapshot,
7311 &diff.base_text_string().unwrap(),
7312 &[
7313 (
7314 0..0,
7315 "zero\n",
7316 "",
7317 DiffHunkStatus::deleted(HasSecondaryHunk),
7318 ),
7319 (
7320 1..2,
7321 "two\n",
7322 "TWO\n",
7323 DiffHunkStatus::modified(HasSecondaryHunk),
7324 ),
7325 (
7326 3..4,
7327 "four\n",
7328 "FOUR\n",
7329 DiffHunkStatus::modified(HasSecondaryHunk),
7330 ),
7331 ],
7332 );
7333 });
7334
7335 // Pause IO events
7336 fs.pause_events();
7337
7338 // Stage the first hunk.
7339 uncommitted_diff.update(cx, |diff, cx| {
7340 let hunk = diff.hunks(&snapshot, cx).next().unwrap();
7341 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7342 assert_hunks(
7343 diff.hunks(&snapshot, cx),
7344 &snapshot,
7345 &diff.base_text_string().unwrap(),
7346 &[
7347 (
7348 0..0,
7349 "zero\n",
7350 "",
7351 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7352 ),
7353 (
7354 1..2,
7355 "two\n",
7356 "TWO\n",
7357 DiffHunkStatus::modified(HasSecondaryHunk),
7358 ),
7359 (
7360 3..4,
7361 "four\n",
7362 "FOUR\n",
7363 DiffHunkStatus::modified(HasSecondaryHunk),
7364 ),
7365 ],
7366 );
7367 });
7368
7369 // Stage the second hunk *before* receiving the FS event for the first hunk.
7370 cx.run_until_parked();
7371 uncommitted_diff.update(cx, |diff, cx| {
7372 let hunk = diff.hunks(&snapshot, cx).nth(1).unwrap();
7373 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7374 assert_hunks(
7375 diff.hunks(&snapshot, cx),
7376 &snapshot,
7377 &diff.base_text_string().unwrap(),
7378 &[
7379 (
7380 0..0,
7381 "zero\n",
7382 "",
7383 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7384 ),
7385 (
7386 1..2,
7387 "two\n",
7388 "TWO\n",
7389 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7390 ),
7391 (
7392 3..4,
7393 "four\n",
7394 "FOUR\n",
7395 DiffHunkStatus::modified(HasSecondaryHunk),
7396 ),
7397 ],
7398 );
7399 });
7400
7401 // Process the FS event for staging the first hunk (second event is still pending).
7402 fs.flush_events(1);
7403 cx.run_until_parked();
7404
7405 // Stage the third hunk before receiving the second FS event.
7406 uncommitted_diff.update(cx, |diff, cx| {
7407 let hunk = diff.hunks(&snapshot, cx).nth(2).unwrap();
7408 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7409 });
7410
7411 // Wait for all remaining IO.
7412 cx.run_until_parked();
7413 fs.flush_events(fs.buffered_event_count());
7414
7415 // Now all hunks are staged.
7416 cx.run_until_parked();
7417 uncommitted_diff.update(cx, |diff, cx| {
7418 assert_hunks(
7419 diff.hunks(&snapshot, cx),
7420 &snapshot,
7421 &diff.base_text_string().unwrap(),
7422 &[
7423 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7424 (
7425 1..2,
7426 "two\n",
7427 "TWO\n",
7428 DiffHunkStatus::modified(NoSecondaryHunk),
7429 ),
7430 (
7431 3..4,
7432 "four\n",
7433 "FOUR\n",
7434 DiffHunkStatus::modified(NoSecondaryHunk),
7435 ),
7436 ],
7437 );
7438 });
7439}
7440
7441#[gpui::test(iterations = 25)]
7442async fn test_staging_random_hunks(
7443 mut rng: StdRng,
7444 executor: BackgroundExecutor,
7445 cx: &mut gpui::TestAppContext,
7446) {
7447 let operations = env::var("OPERATIONS")
7448 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
7449 .unwrap_or(20);
7450
7451 // Try to induce races between diff recalculation and index writes.
7452 if rng.gen_bool(0.5) {
7453 executor.deprioritize(*CALCULATE_DIFF_TASK);
7454 }
7455
7456 use DiffHunkSecondaryStatus::*;
7457 init_test(cx);
7458
7459 let committed_text = (0..30).map(|i| format!("line {i}\n")).collect::<String>();
7460 let index_text = committed_text.clone();
7461 let buffer_text = (0..30)
7462 .map(|i| match i % 5 {
7463 0 => format!("line {i} (modified)\n"),
7464 _ => format!("line {i}\n"),
7465 })
7466 .collect::<String>();
7467
7468 let fs = FakeFs::new(cx.background_executor.clone());
7469 fs.insert_tree(
7470 path!("/dir"),
7471 json!({
7472 ".git": {},
7473 "file.txt": buffer_text.clone()
7474 }),
7475 )
7476 .await;
7477 fs.set_head_for_repo(
7478 path!("/dir/.git").as_ref(),
7479 &[("file.txt".into(), committed_text.clone())],
7480 "deadbeef",
7481 );
7482 fs.set_index_for_repo(
7483 path!("/dir/.git").as_ref(),
7484 &[("file.txt".into(), index_text.clone())],
7485 );
7486 let repo = fs.open_repo(path!("/dir/.git").as_ref()).unwrap();
7487
7488 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7489 let buffer = project
7490 .update(cx, |project, cx| {
7491 project.open_local_buffer(path!("/dir/file.txt"), cx)
7492 })
7493 .await
7494 .unwrap();
7495 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7496 let uncommitted_diff = project
7497 .update(cx, |project, cx| {
7498 project.open_uncommitted_diff(buffer.clone(), cx)
7499 })
7500 .await
7501 .unwrap();
7502
7503 let mut hunks =
7504 uncommitted_diff.update(cx, |diff, cx| diff.hunks(&snapshot, cx).collect::<Vec<_>>());
7505 assert_eq!(hunks.len(), 6);
7506
7507 for _i in 0..operations {
7508 let hunk_ix = rng.gen_range(0..hunks.len());
7509 let hunk = &mut hunks[hunk_ix];
7510 let row = hunk.range.start.row;
7511
7512 if hunk.status().has_secondary_hunk() {
7513 log::info!("staging hunk at {row}");
7514 uncommitted_diff.update(cx, |diff, cx| {
7515 diff.stage_or_unstage_hunks(true, std::slice::from_ref(hunk), &snapshot, true, cx);
7516 });
7517 hunk.secondary_status = SecondaryHunkRemovalPending;
7518 } else {
7519 log::info!("unstaging hunk at {row}");
7520 uncommitted_diff.update(cx, |diff, cx| {
7521 diff.stage_or_unstage_hunks(false, std::slice::from_ref(hunk), &snapshot, true, cx);
7522 });
7523 hunk.secondary_status = SecondaryHunkAdditionPending;
7524 }
7525
7526 for _ in 0..rng.gen_range(0..10) {
7527 log::info!("yielding");
7528 cx.executor().simulate_random_delay().await;
7529 }
7530 }
7531
7532 cx.executor().run_until_parked();
7533
7534 for hunk in &mut hunks {
7535 if hunk.secondary_status == SecondaryHunkRemovalPending {
7536 hunk.secondary_status = NoSecondaryHunk;
7537 } else if hunk.secondary_status == SecondaryHunkAdditionPending {
7538 hunk.secondary_status = HasSecondaryHunk;
7539 }
7540 }
7541
7542 log::info!(
7543 "index text:\n{}",
7544 repo.load_index_text("file.txt".into()).await.unwrap()
7545 );
7546
7547 uncommitted_diff.update(cx, |diff, cx| {
7548 let expected_hunks = hunks
7549 .iter()
7550 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
7551 .collect::<Vec<_>>();
7552 let actual_hunks = diff
7553 .hunks(&snapshot, cx)
7554 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
7555 .collect::<Vec<_>>();
7556 assert_eq!(actual_hunks, expected_hunks);
7557 });
7558}
7559
7560#[gpui::test]
7561async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
7562 init_test(cx);
7563
7564 let committed_contents = r#"
7565 fn main() {
7566 println!("hello from HEAD");
7567 }
7568 "#
7569 .unindent();
7570 let file_contents = r#"
7571 fn main() {
7572 println!("hello from the working copy");
7573 }
7574 "#
7575 .unindent();
7576
7577 let fs = FakeFs::new(cx.background_executor.clone());
7578 fs.insert_tree(
7579 "/dir",
7580 json!({
7581 ".git": {},
7582 "src": {
7583 "main.rs": file_contents,
7584 }
7585 }),
7586 )
7587 .await;
7588
7589 fs.set_head_for_repo(
7590 Path::new("/dir/.git"),
7591 &[("src/main.rs".into(), committed_contents.clone())],
7592 "deadbeef",
7593 );
7594 fs.set_index_for_repo(
7595 Path::new("/dir/.git"),
7596 &[("src/main.rs".into(), committed_contents.clone())],
7597 );
7598
7599 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
7600
7601 let buffer = project
7602 .update(cx, |project, cx| {
7603 project.open_local_buffer("/dir/src/main.rs", cx)
7604 })
7605 .await
7606 .unwrap();
7607 let uncommitted_diff = project
7608 .update(cx, |project, cx| {
7609 project.open_uncommitted_diff(buffer.clone(), cx)
7610 })
7611 .await
7612 .unwrap();
7613
7614 cx.run_until_parked();
7615 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
7616 let snapshot = buffer.read(cx).snapshot();
7617 assert_hunks(
7618 uncommitted_diff.hunks(&snapshot, cx),
7619 &snapshot,
7620 &uncommitted_diff.base_text_string().unwrap(),
7621 &[(
7622 1..2,
7623 " println!(\"hello from HEAD\");\n",
7624 " println!(\"hello from the working copy\");\n",
7625 DiffHunkStatus {
7626 kind: DiffHunkStatusKind::Modified,
7627 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
7628 },
7629 )],
7630 );
7631 });
7632}
7633
7634#[gpui::test]
7635async fn test_repository_and_path_for_project_path(
7636 background_executor: BackgroundExecutor,
7637 cx: &mut gpui::TestAppContext,
7638) {
7639 init_test(cx);
7640 let fs = FakeFs::new(background_executor);
7641 fs.insert_tree(
7642 path!("/root"),
7643 json!({
7644 "c.txt": "",
7645 "dir1": {
7646 ".git": {},
7647 "deps": {
7648 "dep1": {
7649 ".git": {},
7650 "src": {
7651 "a.txt": ""
7652 }
7653 }
7654 },
7655 "src": {
7656 "b.txt": ""
7657 }
7658 },
7659 }),
7660 )
7661 .await;
7662
7663 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
7664 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7665 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7666 project
7667 .update(cx, |project, cx| project.git_scans_complete(cx))
7668 .await;
7669 cx.run_until_parked();
7670
7671 project.read_with(cx, |project, cx| {
7672 let git_store = project.git_store().read(cx);
7673 let pairs = [
7674 ("c.txt", None),
7675 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
7676 (
7677 "dir1/deps/dep1/src/a.txt",
7678 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
7679 ),
7680 ];
7681 let expected = pairs
7682 .iter()
7683 .map(|(path, result)| {
7684 (
7685 path,
7686 result.map(|(repo, repo_path)| {
7687 (Path::new(repo).into(), RepoPath::from(repo_path))
7688 }),
7689 )
7690 })
7691 .collect::<Vec<_>>();
7692 let actual = pairs
7693 .iter()
7694 .map(|(path, _)| {
7695 let project_path = (tree_id, Path::new(path)).into();
7696 let result = maybe!({
7697 let (repo, repo_path) =
7698 git_store.repository_and_path_for_project_path(&project_path, cx)?;
7699 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
7700 });
7701 (path, result)
7702 })
7703 .collect::<Vec<_>>();
7704 pretty_assertions::assert_eq!(expected, actual);
7705 });
7706
7707 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
7708 .await
7709 .unwrap();
7710 cx.run_until_parked();
7711
7712 project.read_with(cx, |project, cx| {
7713 let git_store = project.git_store().read(cx);
7714 assert_eq!(
7715 git_store.repository_and_path_for_project_path(
7716 &(tree_id, Path::new("dir1/src/b.txt")).into(),
7717 cx
7718 ),
7719 None
7720 );
7721 });
7722}
7723
7724#[gpui::test]
7725async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
7726 init_test(cx);
7727 let fs = FakeFs::new(cx.background_executor.clone());
7728 fs.insert_tree(
7729 path!("/root"),
7730 json!({
7731 "home": {
7732 ".git": {},
7733 "project": {
7734 "a.txt": "A"
7735 },
7736 },
7737 }),
7738 )
7739 .await;
7740 fs.set_home_dir(Path::new(path!("/root/home")).to_owned());
7741
7742 let project = Project::test(fs.clone(), [path!("/root/home/project").as_ref()], cx).await;
7743 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7744 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7745
7746 project
7747 .update(cx, |project, cx| project.git_scans_complete(cx))
7748 .await;
7749 tree.flush_fs_events(cx).await;
7750
7751 project.read_with(cx, |project, cx| {
7752 let containing = project
7753 .git_store()
7754 .read(cx)
7755 .repository_and_path_for_project_path(&(tree_id, "a.txt").into(), cx);
7756 assert!(containing.is_none());
7757 });
7758
7759 let project = Project::test(fs.clone(), [path!("/root/home").as_ref()], cx).await;
7760 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7761 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7762 project
7763 .update(cx, |project, cx| project.git_scans_complete(cx))
7764 .await;
7765 tree.flush_fs_events(cx).await;
7766
7767 project.read_with(cx, |project, cx| {
7768 let containing = project
7769 .git_store()
7770 .read(cx)
7771 .repository_and_path_for_project_path(&(tree_id, "project/a.txt").into(), cx);
7772 assert_eq!(
7773 containing
7774 .unwrap()
7775 .0
7776 .read(cx)
7777 .work_directory_abs_path
7778 .as_ref(),
7779 Path::new(path!("/root/home"))
7780 );
7781 });
7782}
7783
7784#[gpui::test]
7785async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
7786 init_test(cx);
7787 cx.executor().allow_parking();
7788
7789 let root = TempTree::new(json!({
7790 "project": {
7791 "a.txt": "a", // Modified
7792 "b.txt": "bb", // Added
7793 "c.txt": "ccc", // Unchanged
7794 "d.txt": "dddd", // Deleted
7795 },
7796 }));
7797
7798 // Set up git repository before creating the project.
7799 let work_dir = root.path().join("project");
7800 let repo = git_init(work_dir.as_path());
7801 git_add("a.txt", &repo);
7802 git_add("c.txt", &repo);
7803 git_add("d.txt", &repo);
7804 git_commit("Initial commit", &repo);
7805 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
7806 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
7807
7808 let project = Project::test(
7809 Arc::new(RealFs::new(None, cx.executor())),
7810 [root.path()],
7811 cx,
7812 )
7813 .await;
7814
7815 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7816 tree.flush_fs_events(cx).await;
7817 project
7818 .update(cx, |project, cx| project.git_scans_complete(cx))
7819 .await;
7820 cx.executor().run_until_parked();
7821
7822 let repository = project.read_with(cx, |project, cx| {
7823 project.repositories(cx).values().next().unwrap().clone()
7824 });
7825
7826 // Check that the right git state is observed on startup
7827 repository.read_with(cx, |repository, _| {
7828 let entries = repository.cached_status().collect::<Vec<_>>();
7829 assert_eq!(
7830 entries,
7831 [
7832 StatusEntry {
7833 repo_path: "a.txt".into(),
7834 status: StatusCode::Modified.worktree(),
7835 },
7836 StatusEntry {
7837 repo_path: "b.txt".into(),
7838 status: FileStatus::Untracked,
7839 },
7840 StatusEntry {
7841 repo_path: "d.txt".into(),
7842 status: StatusCode::Deleted.worktree(),
7843 },
7844 ]
7845 );
7846 });
7847
7848 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
7849
7850 tree.flush_fs_events(cx).await;
7851 project
7852 .update(cx, |project, cx| project.git_scans_complete(cx))
7853 .await;
7854 cx.executor().run_until_parked();
7855
7856 repository.read_with(cx, |repository, _| {
7857 let entries = repository.cached_status().collect::<Vec<_>>();
7858 assert_eq!(
7859 entries,
7860 [
7861 StatusEntry {
7862 repo_path: "a.txt".into(),
7863 status: StatusCode::Modified.worktree(),
7864 },
7865 StatusEntry {
7866 repo_path: "b.txt".into(),
7867 status: FileStatus::Untracked,
7868 },
7869 StatusEntry {
7870 repo_path: "c.txt".into(),
7871 status: StatusCode::Modified.worktree(),
7872 },
7873 StatusEntry {
7874 repo_path: "d.txt".into(),
7875 status: StatusCode::Deleted.worktree(),
7876 },
7877 ]
7878 );
7879 });
7880
7881 git_add("a.txt", &repo);
7882 git_add("c.txt", &repo);
7883 git_remove_index(Path::new("d.txt"), &repo);
7884 git_commit("Another commit", &repo);
7885 tree.flush_fs_events(cx).await;
7886 project
7887 .update(cx, |project, cx| project.git_scans_complete(cx))
7888 .await;
7889 cx.executor().run_until_parked();
7890
7891 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
7892 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
7893 tree.flush_fs_events(cx).await;
7894 project
7895 .update(cx, |project, cx| project.git_scans_complete(cx))
7896 .await;
7897 cx.executor().run_until_parked();
7898
7899 repository.read_with(cx, |repository, _cx| {
7900 let entries = repository.cached_status().collect::<Vec<_>>();
7901
7902 // Deleting an untracked entry, b.txt, should leave no status
7903 // a.txt was tracked, and so should have a status
7904 assert_eq!(
7905 entries,
7906 [StatusEntry {
7907 repo_path: "a.txt".into(),
7908 status: StatusCode::Deleted.worktree(),
7909 }]
7910 );
7911 });
7912}
7913
7914#[gpui::test]
7915async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
7916 init_test(cx);
7917 cx.executor().allow_parking();
7918
7919 let root = TempTree::new(json!({
7920 "project": {
7921 "sub": {},
7922 "a.txt": "",
7923 },
7924 }));
7925
7926 let work_dir = root.path().join("project");
7927 let repo = git_init(work_dir.as_path());
7928 // a.txt exists in HEAD and the working copy but is deleted in the index.
7929 git_add("a.txt", &repo);
7930 git_commit("Initial commit", &repo);
7931 git_remove_index("a.txt".as_ref(), &repo);
7932 // `sub` is a nested git repository.
7933 let _sub = git_init(&work_dir.join("sub"));
7934
7935 let project = Project::test(
7936 Arc::new(RealFs::new(None, cx.executor())),
7937 [root.path()],
7938 cx,
7939 )
7940 .await;
7941
7942 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7943 tree.flush_fs_events(cx).await;
7944 project
7945 .update(cx, |project, cx| project.git_scans_complete(cx))
7946 .await;
7947 cx.executor().run_until_parked();
7948
7949 let repository = project.read_with(cx, |project, cx| {
7950 project
7951 .repositories(cx)
7952 .values()
7953 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
7954 .unwrap()
7955 .clone()
7956 });
7957
7958 repository.read_with(cx, |repository, _cx| {
7959 let entries = repository.cached_status().collect::<Vec<_>>();
7960
7961 // `sub` doesn't appear in our computed statuses.
7962 // a.txt appears with a combined `DA` status.
7963 assert_eq!(
7964 entries,
7965 [StatusEntry {
7966 repo_path: "a.txt".into(),
7967 status: TrackedStatus {
7968 index_status: StatusCode::Deleted,
7969 worktree_status: StatusCode::Added
7970 }
7971 .into(),
7972 }]
7973 )
7974 });
7975}
7976
7977#[gpui::test]
7978async fn test_repository_subfolder_git_status(
7979 executor: gpui::BackgroundExecutor,
7980 cx: &mut gpui::TestAppContext,
7981) {
7982 init_test(cx);
7983
7984 let fs = FakeFs::new(executor);
7985 fs.insert_tree(
7986 path!("/root"),
7987 json!({
7988 "my-repo": {
7989 ".git": {},
7990 "a.txt": "a",
7991 "sub-folder-1": {
7992 "sub-folder-2": {
7993 "c.txt": "cc",
7994 "d": {
7995 "e.txt": "eee"
7996 }
7997 },
7998 }
7999 },
8000 }),
8001 )
8002 .await;
8003
8004 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
8005 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
8006
8007 fs.set_status_for_repo(
8008 path!("/root/my-repo/.git").as_ref(),
8009 &[(E_TXT.as_ref(), FileStatus::Untracked)],
8010 );
8011
8012 let project = Project::test(
8013 fs.clone(),
8014 [path!("/root/my-repo/sub-folder-1/sub-folder-2").as_ref()],
8015 cx,
8016 )
8017 .await;
8018
8019 project
8020 .update(cx, |project, cx| project.git_scans_complete(cx))
8021 .await;
8022 cx.run_until_parked();
8023
8024 let repository = project.read_with(cx, |project, cx| {
8025 project.repositories(cx).values().next().unwrap().clone()
8026 });
8027
8028 // Ensure that the git status is loaded correctly
8029 repository.read_with(cx, |repository, _cx| {
8030 assert_eq!(
8031 repository.work_directory_abs_path,
8032 Path::new(path!("/root/my-repo")).into()
8033 );
8034
8035 assert_eq!(repository.status_for_path(&C_TXT.into()), None);
8036 assert_eq!(
8037 repository.status_for_path(&E_TXT.into()).unwrap().status,
8038 FileStatus::Untracked
8039 );
8040 });
8041
8042 fs.set_status_for_repo(path!("/root/my-repo/.git").as_ref(), &[]);
8043 project
8044 .update(cx, |project, cx| project.git_scans_complete(cx))
8045 .await;
8046 cx.run_until_parked();
8047
8048 repository.read_with(cx, |repository, _cx| {
8049 assert_eq!(repository.status_for_path(&C_TXT.into()), None);
8050 assert_eq!(repository.status_for_path(&E_TXT.into()), None);
8051 });
8052}
8053
8054// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
8055#[cfg(any())]
8056#[gpui::test]
8057async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
8058 init_test(cx);
8059 cx.executor().allow_parking();
8060
8061 let root = TempTree::new(json!({
8062 "project": {
8063 "a.txt": "a",
8064 },
8065 }));
8066 let root_path = root.path();
8067
8068 let repo = git_init(&root_path.join("project"));
8069 git_add("a.txt", &repo);
8070 git_commit("init", &repo);
8071
8072 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8073
8074 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8075 tree.flush_fs_events(cx).await;
8076 project
8077 .update(cx, |project, cx| project.git_scans_complete(cx))
8078 .await;
8079 cx.executor().run_until_parked();
8080
8081 let repository = project.read_with(cx, |project, cx| {
8082 project.repositories(cx).values().next().unwrap().clone()
8083 });
8084
8085 git_branch("other-branch", &repo);
8086 git_checkout("refs/heads/other-branch", &repo);
8087 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
8088 git_add("a.txt", &repo);
8089 git_commit("capitalize", &repo);
8090 let commit = repo
8091 .head()
8092 .expect("Failed to get HEAD")
8093 .peel_to_commit()
8094 .expect("HEAD is not a commit");
8095 git_checkout("refs/heads/main", &repo);
8096 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
8097 git_add("a.txt", &repo);
8098 git_commit("improve letter", &repo);
8099 git_cherry_pick(&commit, &repo);
8100 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
8101 .expect("No CHERRY_PICK_HEAD");
8102 pretty_assertions::assert_eq!(
8103 git_status(&repo),
8104 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
8105 );
8106 tree.flush_fs_events(cx).await;
8107 project
8108 .update(cx, |project, cx| project.git_scans_complete(cx))
8109 .await;
8110 cx.executor().run_until_parked();
8111 let conflicts = repository.update(cx, |repository, _| {
8112 repository
8113 .merge_conflicts
8114 .iter()
8115 .cloned()
8116 .collect::<Vec<_>>()
8117 });
8118 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
8119
8120 git_add("a.txt", &repo);
8121 // Attempt to manually simulate what `git cherry-pick --continue` would do.
8122 git_commit("whatevs", &repo);
8123 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
8124 .expect("Failed to remove CHERRY_PICK_HEAD");
8125 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
8126 tree.flush_fs_events(cx).await;
8127 let conflicts = repository.update(cx, |repository, _| {
8128 repository
8129 .merge_conflicts
8130 .iter()
8131 .cloned()
8132 .collect::<Vec<_>>()
8133 });
8134 pretty_assertions::assert_eq!(conflicts, []);
8135}
8136
8137#[gpui::test]
8138async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
8139 init_test(cx);
8140 let fs = FakeFs::new(cx.background_executor.clone());
8141 fs.insert_tree(
8142 path!("/root"),
8143 json!({
8144 ".git": {},
8145 ".gitignore": "*.txt\n",
8146 "a.xml": "<a></a>",
8147 "b.txt": "Some text"
8148 }),
8149 )
8150 .await;
8151
8152 fs.set_head_and_index_for_repo(
8153 path!("/root/.git").as_ref(),
8154 &[
8155 (".gitignore".into(), "*.txt\n".into()),
8156 ("a.xml".into(), "<a></a>".into()),
8157 ],
8158 );
8159
8160 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
8161
8162 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8163 tree.flush_fs_events(cx).await;
8164 project
8165 .update(cx, |project, cx| project.git_scans_complete(cx))
8166 .await;
8167 cx.executor().run_until_parked();
8168
8169 let repository = project.read_with(cx, |project, cx| {
8170 project.repositories(cx).values().next().unwrap().clone()
8171 });
8172
8173 // One file is unmodified, the other is ignored.
8174 cx.read(|cx| {
8175 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
8176 assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
8177 });
8178
8179 // Change the gitignore, and stage the newly non-ignored file.
8180 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
8181 .await
8182 .unwrap();
8183 fs.set_index_for_repo(
8184 Path::new(path!("/root/.git")),
8185 &[
8186 (".gitignore".into(), "*.txt\n".into()),
8187 ("a.xml".into(), "<a></a>".into()),
8188 ("b.txt".into(), "Some text".into()),
8189 ],
8190 );
8191
8192 cx.executor().run_until_parked();
8193 cx.read(|cx| {
8194 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
8195 assert_entry_git_state(
8196 tree.read(cx),
8197 repository.read(cx),
8198 "b.txt",
8199 Some(StatusCode::Added),
8200 false,
8201 );
8202 });
8203}
8204
8205// NOTE:
8206// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
8207// a directory which some program has already open.
8208// This is a limitation of the Windows.
8209// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
8210#[gpui::test]
8211#[cfg_attr(target_os = "windows", ignore)]
8212async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
8213 init_test(cx);
8214 cx.executor().allow_parking();
8215 let root = TempTree::new(json!({
8216 "projects": {
8217 "project1": {
8218 "a": "",
8219 "b": "",
8220 }
8221 },
8222
8223 }));
8224 let root_path = root.path();
8225
8226 let repo = git_init(&root_path.join("projects/project1"));
8227 git_add("a", &repo);
8228 git_commit("init", &repo);
8229 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
8230
8231 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8232
8233 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8234 tree.flush_fs_events(cx).await;
8235 project
8236 .update(cx, |project, cx| project.git_scans_complete(cx))
8237 .await;
8238 cx.executor().run_until_parked();
8239
8240 let repository = project.read_with(cx, |project, cx| {
8241 project.repositories(cx).values().next().unwrap().clone()
8242 });
8243
8244 repository.read_with(cx, |repository, _| {
8245 assert_eq!(
8246 repository.work_directory_abs_path.as_ref(),
8247 root_path.join("projects/project1").as_path()
8248 );
8249 assert_eq!(
8250 repository
8251 .status_for_path(&"a".into())
8252 .map(|entry| entry.status),
8253 Some(StatusCode::Modified.worktree()),
8254 );
8255 assert_eq!(
8256 repository
8257 .status_for_path(&"b".into())
8258 .map(|entry| entry.status),
8259 Some(FileStatus::Untracked),
8260 );
8261 });
8262
8263 std::fs::rename(
8264 root_path.join("projects/project1"),
8265 root_path.join("projects/project2"),
8266 )
8267 .unwrap();
8268 tree.flush_fs_events(cx).await;
8269
8270 repository.read_with(cx, |repository, _| {
8271 assert_eq!(
8272 repository.work_directory_abs_path.as_ref(),
8273 root_path.join("projects/project2").as_path()
8274 );
8275 assert_eq!(
8276 repository.status_for_path(&"a".into()).unwrap().status,
8277 StatusCode::Modified.worktree(),
8278 );
8279 assert_eq!(
8280 repository.status_for_path(&"b".into()).unwrap().status,
8281 FileStatus::Untracked,
8282 );
8283 });
8284}
8285
8286// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
8287// you can't rename a directory which some program has already open. This is a
8288// limitation of the Windows. See:
8289// https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
8290#[gpui::test]
8291#[cfg_attr(target_os = "windows", ignore)]
8292async fn test_file_status(cx: &mut gpui::TestAppContext) {
8293 init_test(cx);
8294 cx.executor().allow_parking();
8295 const IGNORE_RULE: &str = "**/target";
8296
8297 let root = TempTree::new(json!({
8298 "project": {
8299 "a.txt": "a",
8300 "b.txt": "bb",
8301 "c": {
8302 "d": {
8303 "e.txt": "eee"
8304 }
8305 },
8306 "f.txt": "ffff",
8307 "target": {
8308 "build_file": "???"
8309 },
8310 ".gitignore": IGNORE_RULE
8311 },
8312
8313 }));
8314 let root_path = root.path();
8315
8316 const A_TXT: &str = "a.txt";
8317 const B_TXT: &str = "b.txt";
8318 const E_TXT: &str = "c/d/e.txt";
8319 const F_TXT: &str = "f.txt";
8320 const DOTGITIGNORE: &str = ".gitignore";
8321 const BUILD_FILE: &str = "target/build_file";
8322
8323 // Set up git repository before creating the worktree.
8324 let work_dir = root.path().join("project");
8325 let mut repo = git_init(work_dir.as_path());
8326 repo.add_ignore_rule(IGNORE_RULE).unwrap();
8327 git_add(A_TXT, &repo);
8328 git_add(E_TXT, &repo);
8329 git_add(DOTGITIGNORE, &repo);
8330 git_commit("Initial commit", &repo);
8331
8332 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8333
8334 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8335 tree.flush_fs_events(cx).await;
8336 project
8337 .update(cx, |project, cx| project.git_scans_complete(cx))
8338 .await;
8339 cx.executor().run_until_parked();
8340
8341 let repository = project.read_with(cx, |project, cx| {
8342 project.repositories(cx).values().next().unwrap().clone()
8343 });
8344
8345 // Check that the right git state is observed on startup
8346 repository.read_with(cx, |repository, _cx| {
8347 assert_eq!(
8348 repository.work_directory_abs_path.as_ref(),
8349 root_path.join("project").as_path()
8350 );
8351
8352 assert_eq!(
8353 repository.status_for_path(&B_TXT.into()).unwrap().status,
8354 FileStatus::Untracked,
8355 );
8356 assert_eq!(
8357 repository.status_for_path(&F_TXT.into()).unwrap().status,
8358 FileStatus::Untracked,
8359 );
8360 });
8361
8362 // Modify a file in the working copy.
8363 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
8364 tree.flush_fs_events(cx).await;
8365 project
8366 .update(cx, |project, cx| project.git_scans_complete(cx))
8367 .await;
8368 cx.executor().run_until_parked();
8369
8370 // The worktree detects that the file's git status has changed.
8371 repository.read_with(cx, |repository, _| {
8372 assert_eq!(
8373 repository.status_for_path(&A_TXT.into()).unwrap().status,
8374 StatusCode::Modified.worktree(),
8375 );
8376 });
8377
8378 // Create a commit in the git repository.
8379 git_add(A_TXT, &repo);
8380 git_add(B_TXT, &repo);
8381 git_commit("Committing modified and added", &repo);
8382 tree.flush_fs_events(cx).await;
8383 project
8384 .update(cx, |project, cx| project.git_scans_complete(cx))
8385 .await;
8386 cx.executor().run_until_parked();
8387
8388 // The worktree detects that the files' git status have changed.
8389 repository.read_with(cx, |repository, _cx| {
8390 assert_eq!(
8391 repository.status_for_path(&F_TXT.into()).unwrap().status,
8392 FileStatus::Untracked,
8393 );
8394 assert_eq!(repository.status_for_path(&B_TXT.into()), None);
8395 assert_eq!(repository.status_for_path(&A_TXT.into()), None);
8396 });
8397
8398 // Modify files in the working copy and perform git operations on other files.
8399 git_reset(0, &repo);
8400 git_remove_index(Path::new(B_TXT), &repo);
8401 git_stash(&mut repo);
8402 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
8403 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
8404 tree.flush_fs_events(cx).await;
8405 project
8406 .update(cx, |project, cx| project.git_scans_complete(cx))
8407 .await;
8408 cx.executor().run_until_parked();
8409
8410 // Check that more complex repo changes are tracked
8411 repository.read_with(cx, |repository, _cx| {
8412 assert_eq!(repository.status_for_path(&A_TXT.into()), None);
8413 assert_eq!(
8414 repository.status_for_path(&B_TXT.into()).unwrap().status,
8415 FileStatus::Untracked,
8416 );
8417 assert_eq!(
8418 repository.status_for_path(&E_TXT.into()).unwrap().status,
8419 StatusCode::Modified.worktree(),
8420 );
8421 });
8422
8423 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
8424 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
8425 std::fs::write(
8426 work_dir.join(DOTGITIGNORE),
8427 [IGNORE_RULE, "f.txt"].join("\n"),
8428 )
8429 .unwrap();
8430
8431 git_add(Path::new(DOTGITIGNORE), &repo);
8432 git_commit("Committing modified git ignore", &repo);
8433
8434 tree.flush_fs_events(cx).await;
8435 cx.executor().run_until_parked();
8436
8437 let mut renamed_dir_name = "first_directory/second_directory";
8438 const RENAMED_FILE: &str = "rf.txt";
8439
8440 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
8441 std::fs::write(
8442 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
8443 "new-contents",
8444 )
8445 .unwrap();
8446
8447 tree.flush_fs_events(cx).await;
8448 project
8449 .update(cx, |project, cx| project.git_scans_complete(cx))
8450 .await;
8451 cx.executor().run_until_parked();
8452
8453 repository.read_with(cx, |repository, _cx| {
8454 assert_eq!(
8455 repository
8456 .status_for_path(&Path::new(renamed_dir_name).join(RENAMED_FILE).into())
8457 .unwrap()
8458 .status,
8459 FileStatus::Untracked,
8460 );
8461 });
8462
8463 renamed_dir_name = "new_first_directory/second_directory";
8464
8465 std::fs::rename(
8466 work_dir.join("first_directory"),
8467 work_dir.join("new_first_directory"),
8468 )
8469 .unwrap();
8470
8471 tree.flush_fs_events(cx).await;
8472 project
8473 .update(cx, |project, cx| project.git_scans_complete(cx))
8474 .await;
8475 cx.executor().run_until_parked();
8476
8477 repository.read_with(cx, |repository, _cx| {
8478 assert_eq!(
8479 repository
8480 .status_for_path(&Path::new(renamed_dir_name).join(RENAMED_FILE).into())
8481 .unwrap()
8482 .status,
8483 FileStatus::Untracked,
8484 );
8485 });
8486}
8487
8488#[gpui::test]
8489async fn test_repos_in_invisible_worktrees(
8490 executor: BackgroundExecutor,
8491 cx: &mut gpui::TestAppContext,
8492) {
8493 init_test(cx);
8494 let fs = FakeFs::new(executor);
8495 fs.insert_tree(
8496 path!("/root"),
8497 json!({
8498 "dir1": {
8499 ".git": {},
8500 "dep1": {
8501 ".git": {},
8502 "src": {
8503 "a.txt": "",
8504 },
8505 },
8506 "b.txt": "",
8507 },
8508 }),
8509 )
8510 .await;
8511
8512 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
8513 let _visible_worktree =
8514 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8515 project
8516 .update(cx, |project, cx| project.git_scans_complete(cx))
8517 .await;
8518
8519 let repos = project.read_with(cx, |project, cx| {
8520 project
8521 .repositories(cx)
8522 .values()
8523 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8524 .collect::<Vec<_>>()
8525 });
8526 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
8527
8528 let (_invisible_worktree, _) = project
8529 .update(cx, |project, cx| {
8530 project.worktree_store.update(cx, |worktree_store, cx| {
8531 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
8532 })
8533 })
8534 .await
8535 .expect("failed to create worktree");
8536 project
8537 .update(cx, |project, cx| project.git_scans_complete(cx))
8538 .await;
8539
8540 let repos = project.read_with(cx, |project, cx| {
8541 project
8542 .repositories(cx)
8543 .values()
8544 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8545 .collect::<Vec<_>>()
8546 });
8547 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
8548}
8549
8550#[gpui::test(iterations = 10)]
8551async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
8552 init_test(cx);
8553 cx.update(|cx| {
8554 cx.update_global::<SettingsStore, _>(|store, cx| {
8555 store.update_user_settings::<WorktreeSettings>(cx, |project_settings| {
8556 project_settings.file_scan_exclusions = Some(Vec::new());
8557 });
8558 });
8559 });
8560 let fs = FakeFs::new(cx.background_executor.clone());
8561 fs.insert_tree(
8562 path!("/root"),
8563 json!({
8564 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
8565 "tree": {
8566 ".git": {},
8567 ".gitignore": "ignored-dir\n",
8568 "tracked-dir": {
8569 "tracked-file1": "",
8570 "ancestor-ignored-file1": "",
8571 },
8572 "ignored-dir": {
8573 "ignored-file1": ""
8574 }
8575 }
8576 }),
8577 )
8578 .await;
8579 fs.set_head_and_index_for_repo(
8580 path!("/root/tree/.git").as_ref(),
8581 &[
8582 (".gitignore".into(), "ignored-dir\n".into()),
8583 ("tracked-dir/tracked-file1".into(), "".into()),
8584 ],
8585 );
8586
8587 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
8588
8589 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8590 tree.flush_fs_events(cx).await;
8591 project
8592 .update(cx, |project, cx| project.git_scans_complete(cx))
8593 .await;
8594 cx.executor().run_until_parked();
8595
8596 let repository = project.read_with(cx, |project, cx| {
8597 project.repositories(cx).values().next().unwrap().clone()
8598 });
8599
8600 tree.read_with(cx, |tree, _| {
8601 tree.as_local()
8602 .unwrap()
8603 .manually_refresh_entries_for_paths(vec![Path::new("ignored-dir").into()])
8604 })
8605 .recv()
8606 .await;
8607
8608 cx.read(|cx| {
8609 assert_entry_git_state(
8610 tree.read(cx),
8611 repository.read(cx),
8612 "tracked-dir/tracked-file1",
8613 None,
8614 false,
8615 );
8616 assert_entry_git_state(
8617 tree.read(cx),
8618 repository.read(cx),
8619 "tracked-dir/ancestor-ignored-file1",
8620 None,
8621 false,
8622 );
8623 assert_entry_git_state(
8624 tree.read(cx),
8625 repository.read(cx),
8626 "ignored-dir/ignored-file1",
8627 None,
8628 true,
8629 );
8630 });
8631
8632 fs.create_file(
8633 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
8634 Default::default(),
8635 )
8636 .await
8637 .unwrap();
8638 fs.set_index_for_repo(
8639 path!("/root/tree/.git").as_ref(),
8640 &[
8641 (".gitignore".into(), "ignored-dir\n".into()),
8642 ("tracked-dir/tracked-file1".into(), "".into()),
8643 ("tracked-dir/tracked-file2".into(), "".into()),
8644 ],
8645 );
8646 fs.create_file(
8647 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
8648 Default::default(),
8649 )
8650 .await
8651 .unwrap();
8652 fs.create_file(
8653 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
8654 Default::default(),
8655 )
8656 .await
8657 .unwrap();
8658
8659 cx.executor().run_until_parked();
8660 cx.read(|cx| {
8661 assert_entry_git_state(
8662 tree.read(cx),
8663 repository.read(cx),
8664 "tracked-dir/tracked-file2",
8665 Some(StatusCode::Added),
8666 false,
8667 );
8668 assert_entry_git_state(
8669 tree.read(cx),
8670 repository.read(cx),
8671 "tracked-dir/ancestor-ignored-file2",
8672 None,
8673 false,
8674 );
8675 assert_entry_git_state(
8676 tree.read(cx),
8677 repository.read(cx),
8678 "ignored-dir/ignored-file2",
8679 None,
8680 true,
8681 );
8682 assert!(tree.read(cx).entry_for_path(".git").unwrap().is_ignored);
8683 });
8684}
8685
8686#[gpui::test]
8687async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) {
8688 init_test(cx);
8689
8690 let fs = FakeFs::new(cx.executor());
8691 fs.insert_tree(
8692 path!("/project"),
8693 json!({
8694 ".git": {
8695 "worktrees": {
8696 "some-worktree": {
8697 "commondir": "../..\n",
8698 // For is_git_dir
8699 "HEAD": "",
8700 "config": ""
8701 }
8702 },
8703 "modules": {
8704 "subdir": {
8705 "some-submodule": {
8706 // For is_git_dir
8707 "HEAD": "",
8708 "config": "",
8709 }
8710 }
8711 }
8712 },
8713 "src": {
8714 "a.txt": "A",
8715 },
8716 "some-worktree": {
8717 ".git": "gitdir: ../.git/worktrees/some-worktree\n",
8718 "src": {
8719 "b.txt": "B",
8720 }
8721 },
8722 "subdir": {
8723 "some-submodule": {
8724 ".git": "gitdir: ../../.git/modules/subdir/some-submodule\n",
8725 "c.txt": "C",
8726 }
8727 }
8728 }),
8729 )
8730 .await;
8731
8732 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
8733 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
8734 scan_complete.await;
8735
8736 let mut repositories = project.update(cx, |project, cx| {
8737 project
8738 .repositories(cx)
8739 .values()
8740 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8741 .collect::<Vec<_>>()
8742 });
8743 repositories.sort();
8744 pretty_assertions::assert_eq!(
8745 repositories,
8746 [
8747 Path::new(path!("/project")).into(),
8748 Path::new(path!("/project/some-worktree")).into(),
8749 Path::new(path!("/project/subdir/some-submodule")).into(),
8750 ]
8751 );
8752
8753 // Generate a git-related event for the worktree and check that it's refreshed.
8754 fs.with_git_state(
8755 path!("/project/some-worktree/.git").as_ref(),
8756 true,
8757 |state| {
8758 state
8759 .head_contents
8760 .insert("src/b.txt".into(), "b".to_owned());
8761 state
8762 .index_contents
8763 .insert("src/b.txt".into(), "b".to_owned());
8764 },
8765 )
8766 .unwrap();
8767 cx.run_until_parked();
8768
8769 let buffer = project
8770 .update(cx, |project, cx| {
8771 project.open_local_buffer(path!("/project/some-worktree/src/b.txt"), cx)
8772 })
8773 .await
8774 .unwrap();
8775 let (worktree_repo, barrier) = project.update(cx, |project, cx| {
8776 let (repo, _) = project
8777 .git_store()
8778 .read(cx)
8779 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
8780 .unwrap();
8781 pretty_assertions::assert_eq!(
8782 repo.read(cx).work_directory_abs_path,
8783 Path::new(path!("/project/some-worktree")).into(),
8784 );
8785 let barrier = repo.update(cx, |repo, _| repo.barrier());
8786 (repo.clone(), barrier)
8787 });
8788 barrier.await.unwrap();
8789 worktree_repo.update(cx, |repo, _| {
8790 pretty_assertions::assert_eq!(
8791 repo.status_for_path(&"src/b.txt".into()).unwrap().status,
8792 StatusCode::Modified.worktree(),
8793 );
8794 });
8795
8796 // The same for the submodule.
8797 fs.with_git_state(
8798 path!("/project/subdir/some-submodule/.git").as_ref(),
8799 true,
8800 |state| {
8801 state.head_contents.insert("c.txt".into(), "c".to_owned());
8802 state.index_contents.insert("c.txt".into(), "c".to_owned());
8803 },
8804 )
8805 .unwrap();
8806 cx.run_until_parked();
8807
8808 let buffer = project
8809 .update(cx, |project, cx| {
8810 project.open_local_buffer(path!("/project/subdir/some-submodule/c.txt"), cx)
8811 })
8812 .await
8813 .unwrap();
8814 let (submodule_repo, barrier) = project.update(cx, |project, cx| {
8815 let (repo, _) = project
8816 .git_store()
8817 .read(cx)
8818 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
8819 .unwrap();
8820 pretty_assertions::assert_eq!(
8821 repo.read(cx).work_directory_abs_path,
8822 Path::new(path!("/project/subdir/some-submodule")).into(),
8823 );
8824 let barrier = repo.update(cx, |repo, _| repo.barrier());
8825 (repo.clone(), barrier)
8826 });
8827 barrier.await.unwrap();
8828 submodule_repo.update(cx, |repo, _| {
8829 pretty_assertions::assert_eq!(
8830 repo.status_for_path(&"c.txt".into()).unwrap().status,
8831 StatusCode::Modified.worktree(),
8832 );
8833 });
8834}
8835
8836#[gpui::test]
8837async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
8838 init_test(cx);
8839 let fs = FakeFs::new(cx.background_executor.clone());
8840 fs.insert_tree(
8841 path!("/root"),
8842 json!({
8843 "project": {
8844 ".git": {},
8845 "child1": {
8846 "a.txt": "A",
8847 },
8848 "child2": {
8849 "b.txt": "B",
8850 }
8851 }
8852 }),
8853 )
8854 .await;
8855
8856 let project = Project::test(
8857 fs.clone(),
8858 [
8859 path!("/root/project/child1").as_ref(),
8860 path!("/root/project/child2").as_ref(),
8861 ],
8862 cx,
8863 )
8864 .await;
8865
8866 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8867 tree.flush_fs_events(cx).await;
8868 project
8869 .update(cx, |project, cx| project.git_scans_complete(cx))
8870 .await;
8871 cx.executor().run_until_parked();
8872
8873 let repos = project.read_with(cx, |project, cx| {
8874 project
8875 .repositories(cx)
8876 .values()
8877 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8878 .collect::<Vec<_>>()
8879 });
8880 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
8881}
8882
8883async fn search(
8884 project: &Entity<Project>,
8885 query: SearchQuery,
8886 cx: &mut gpui::TestAppContext,
8887) -> Result<HashMap<String, Vec<Range<usize>>>> {
8888 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
8889 let mut results = HashMap::default();
8890 while let Ok(search_result) = search_rx.recv().await {
8891 match search_result {
8892 SearchResult::Buffer { buffer, ranges } => {
8893 results.entry(buffer).or_insert(ranges);
8894 }
8895 SearchResult::LimitReached => {}
8896 }
8897 }
8898 Ok(results
8899 .into_iter()
8900 .map(|(buffer, ranges)| {
8901 buffer.update(cx, |buffer, cx| {
8902 let path = buffer
8903 .file()
8904 .unwrap()
8905 .full_path(cx)
8906 .to_string_lossy()
8907 .to_string();
8908 let ranges = ranges
8909 .into_iter()
8910 .map(|range| range.to_offset(buffer))
8911 .collect::<Vec<_>>();
8912 (path, ranges)
8913 })
8914 })
8915 .collect())
8916}
8917
8918pub fn init_test(cx: &mut gpui::TestAppContext) {
8919 zlog::init_test();
8920
8921 cx.update(|cx| {
8922 let settings_store = SettingsStore::test(cx);
8923 cx.set_global(settings_store);
8924 release_channel::init(SemanticVersion::default(), cx);
8925 language::init(cx);
8926 Project::init_settings(cx);
8927 });
8928}
8929
8930fn json_lang() -> Arc<Language> {
8931 Arc::new(Language::new(
8932 LanguageConfig {
8933 name: "JSON".into(),
8934 matcher: LanguageMatcher {
8935 path_suffixes: vec!["json".to_string()],
8936 ..Default::default()
8937 },
8938 ..Default::default()
8939 },
8940 None,
8941 ))
8942}
8943
8944fn js_lang() -> Arc<Language> {
8945 Arc::new(Language::new(
8946 LanguageConfig {
8947 name: "JavaScript".into(),
8948 matcher: LanguageMatcher {
8949 path_suffixes: vec!["js".to_string()],
8950 ..Default::default()
8951 },
8952 ..Default::default()
8953 },
8954 None,
8955 ))
8956}
8957
8958fn rust_lang() -> Arc<Language> {
8959 Arc::new(Language::new(
8960 LanguageConfig {
8961 name: "Rust".into(),
8962 matcher: LanguageMatcher {
8963 path_suffixes: vec!["rs".to_string()],
8964 ..Default::default()
8965 },
8966 ..Default::default()
8967 },
8968 Some(tree_sitter_rust::LANGUAGE.into()),
8969 ))
8970}
8971
8972fn typescript_lang() -> Arc<Language> {
8973 Arc::new(Language::new(
8974 LanguageConfig {
8975 name: "TypeScript".into(),
8976 matcher: LanguageMatcher {
8977 path_suffixes: vec!["ts".to_string()],
8978 ..Default::default()
8979 },
8980 ..Default::default()
8981 },
8982 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
8983 ))
8984}
8985
8986fn tsx_lang() -> Arc<Language> {
8987 Arc::new(Language::new(
8988 LanguageConfig {
8989 name: "tsx".into(),
8990 matcher: LanguageMatcher {
8991 path_suffixes: vec!["tsx".to_string()],
8992 ..Default::default()
8993 },
8994 ..Default::default()
8995 },
8996 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
8997 ))
8998}
8999
9000fn get_all_tasks(
9001 project: &Entity<Project>,
9002 task_contexts: Arc<TaskContexts>,
9003 cx: &mut App,
9004) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
9005 let new_tasks = project.update(cx, |project, cx| {
9006 project.task_store.update(cx, |task_store, cx| {
9007 task_store.task_inventory().unwrap().update(cx, |this, cx| {
9008 this.used_and_current_resolved_tasks(task_contexts, cx)
9009 })
9010 })
9011 });
9012
9013 cx.background_spawn(async move {
9014 let (mut old, new) = new_tasks.await;
9015 old.extend(new);
9016 old
9017 })
9018}
9019
9020#[track_caller]
9021fn assert_entry_git_state(
9022 tree: &Worktree,
9023 repository: &Repository,
9024 path: &str,
9025 index_status: Option<StatusCode>,
9026 is_ignored: bool,
9027) {
9028 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
9029 let entry = tree
9030 .entry_for_path(path)
9031 .unwrap_or_else(|| panic!("entry {path} not found"));
9032 let status = repository
9033 .status_for_path(&path.into())
9034 .map(|entry| entry.status);
9035 let expected = index_status.map(|index_status| {
9036 TrackedStatus {
9037 index_status,
9038 worktree_status: StatusCode::Unmodified,
9039 }
9040 .into()
9041 });
9042 assert_eq!(
9043 status, expected,
9044 "expected {path} to have git status: {expected:?}"
9045 );
9046 assert_eq!(
9047 entry.is_ignored, is_ignored,
9048 "expected {path} to have is_ignored: {is_ignored}"
9049 );
9050}
9051
9052#[track_caller]
9053fn git_init(path: &Path) -> git2::Repository {
9054 let mut init_opts = RepositoryInitOptions::new();
9055 init_opts.initial_head("main");
9056 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
9057}
9058
9059#[track_caller]
9060fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
9061 let path = path.as_ref();
9062 let mut index = repo.index().expect("Failed to get index");
9063 index.add_path(path).expect("Failed to add file");
9064 index.write().expect("Failed to write index");
9065}
9066
9067#[track_caller]
9068fn git_remove_index(path: &Path, repo: &git2::Repository) {
9069 let mut index = repo.index().expect("Failed to get index");
9070 index.remove_path(path).expect("Failed to add file");
9071 index.write().expect("Failed to write index");
9072}
9073
9074#[track_caller]
9075fn git_commit(msg: &'static str, repo: &git2::Repository) {
9076 use git2::Signature;
9077
9078 let signature = Signature::now("test", "test@zed.dev").unwrap();
9079 let oid = repo.index().unwrap().write_tree().unwrap();
9080 let tree = repo.find_tree(oid).unwrap();
9081 if let Ok(head) = repo.head() {
9082 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
9083
9084 let parent_commit = parent_obj.as_commit().unwrap();
9085
9086 repo.commit(
9087 Some("HEAD"),
9088 &signature,
9089 &signature,
9090 msg,
9091 &tree,
9092 &[parent_commit],
9093 )
9094 .expect("Failed to commit with parent");
9095 } else {
9096 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
9097 .expect("Failed to commit");
9098 }
9099}
9100
9101#[cfg(any())]
9102#[track_caller]
9103fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
9104 repo.cherrypick(commit, None).expect("Failed to cherrypick");
9105}
9106
9107#[track_caller]
9108fn git_stash(repo: &mut git2::Repository) {
9109 use git2::Signature;
9110
9111 let signature = Signature::now("test", "test@zed.dev").unwrap();
9112 repo.stash_save(&signature, "N/A", None)
9113 .expect("Failed to stash");
9114}
9115
9116#[track_caller]
9117fn git_reset(offset: usize, repo: &git2::Repository) {
9118 let head = repo.head().expect("Couldn't get repo head");
9119 let object = head.peel(git2::ObjectType::Commit).unwrap();
9120 let commit = object.as_commit().unwrap();
9121 let new_head = commit
9122 .parents()
9123 .inspect(|parnet| {
9124 parnet.message();
9125 })
9126 .nth(offset)
9127 .expect("Not enough history");
9128 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
9129 .expect("Could not reset");
9130}
9131
9132#[cfg(any())]
9133#[track_caller]
9134fn git_branch(name: &str, repo: &git2::Repository) {
9135 let head = repo
9136 .head()
9137 .expect("Couldn't get repo head")
9138 .peel_to_commit()
9139 .expect("HEAD is not a commit");
9140 repo.branch(name, &head, false).expect("Failed to commit");
9141}
9142
9143#[cfg(any())]
9144#[track_caller]
9145fn git_checkout(name: &str, repo: &git2::Repository) {
9146 repo.set_head(name).expect("Failed to set head");
9147 repo.checkout_head(None).expect("Failed to check out head");
9148}
9149
9150#[cfg(any())]
9151#[track_caller]
9152fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
9153 repo.statuses(None)
9154 .unwrap()
9155 .iter()
9156 .map(|status| (status.path().unwrap().to_string(), status.status()))
9157 .collect()
9158}
9159
9160#[gpui::test]
9161async fn test_find_project_path_abs(
9162 background_executor: BackgroundExecutor,
9163 cx: &mut gpui::TestAppContext,
9164) {
9165 // find_project_path should work with absolute paths
9166 init_test(cx);
9167
9168 let fs = FakeFs::new(background_executor);
9169 fs.insert_tree(
9170 path!("/root"),
9171 json!({
9172 "project1": {
9173 "file1.txt": "content1",
9174 "subdir": {
9175 "file2.txt": "content2"
9176 }
9177 },
9178 "project2": {
9179 "file3.txt": "content3"
9180 }
9181 }),
9182 )
9183 .await;
9184
9185 let project = Project::test(
9186 fs.clone(),
9187 [
9188 path!("/root/project1").as_ref(),
9189 path!("/root/project2").as_ref(),
9190 ],
9191 cx,
9192 )
9193 .await;
9194
9195 // Make sure the worktrees are fully initialized
9196 project
9197 .update(cx, |project, cx| project.git_scans_complete(cx))
9198 .await;
9199 cx.run_until_parked();
9200
9201 let (project1_abs_path, project1_id, project2_abs_path, project2_id) =
9202 project.read_with(cx, |project, cx| {
9203 let worktrees: Vec<_> = project.worktrees(cx).collect();
9204 let abs_path1 = worktrees[0].read(cx).abs_path().to_path_buf();
9205 let id1 = worktrees[0].read(cx).id();
9206 let abs_path2 = worktrees[1].read(cx).abs_path().to_path_buf();
9207 let id2 = worktrees[1].read(cx).id();
9208 (abs_path1, id1, abs_path2, id2)
9209 });
9210
9211 project.update(cx, |project, cx| {
9212 let abs_path = project1_abs_path.join("file1.txt");
9213 let found_path = project.find_project_path(abs_path, cx).unwrap();
9214 assert_eq!(found_path.worktree_id, project1_id);
9215 assert_eq!(found_path.path.as_ref(), Path::new("file1.txt"));
9216
9217 let abs_path = project1_abs_path.join("subdir").join("file2.txt");
9218 let found_path = project.find_project_path(abs_path, cx).unwrap();
9219 assert_eq!(found_path.worktree_id, project1_id);
9220 assert_eq!(found_path.path.as_ref(), Path::new("subdir/file2.txt"));
9221
9222 let abs_path = project2_abs_path.join("file3.txt");
9223 let found_path = project.find_project_path(abs_path, cx).unwrap();
9224 assert_eq!(found_path.worktree_id, project2_id);
9225 assert_eq!(found_path.path.as_ref(), Path::new("file3.txt"));
9226
9227 let abs_path = project1_abs_path.join("nonexistent.txt");
9228 let found_path = project.find_project_path(abs_path, cx);
9229 assert!(
9230 found_path.is_some(),
9231 "Should find project path for nonexistent file in worktree"
9232 );
9233
9234 // Test with an absolute path outside any worktree
9235 let abs_path = Path::new("/some/other/path");
9236 let found_path = project.find_project_path(abs_path, cx);
9237 assert!(
9238 found_path.is_none(),
9239 "Should not find project path for path outside any worktree"
9240 );
9241 });
9242}