1#![allow(clippy::format_collect)]
2
3use crate::{
4 Event, git_store::StatusEntry, task_inventory::TaskContexts, task_store::TaskSettingsLocation,
5 *,
6};
7use buffer_diff::{
8 BufferDiffEvent, CALCULATE_DIFF_TASK, DiffHunkSecondaryStatus, DiffHunkStatus,
9 DiffHunkStatusKind, assert_hunks,
10};
11use fs::FakeFs;
12use futures::{StreamExt, future};
13use git::{
14 GitHostingProviderRegistry,
15 repository::RepoPath,
16 status::{StatusCode, TrackedStatus},
17};
18use git2::RepositoryInitOptions;
19use gpui::{App, BackgroundExecutor, SemanticVersion, UpdateGlobal};
20use http_client::Url;
21use language::{
22 Diagnostic, DiagnosticEntry, DiagnosticSet, DiskState, FakeLspAdapter, LanguageConfig,
23 LanguageMatcher, LanguageName, LineEnding, OffsetRangeExt, Point, ToPoint,
24 language_settings::{AllLanguageSettings, LanguageSettingsContent, language_settings},
25 tree_sitter_rust, tree_sitter_typescript,
26};
27use lsp::{
28 DiagnosticSeverity, DocumentChanges, FileOperationFilter, NumberOrString, TextDocumentEdit,
29 WillRenameFiles, notification::DidRenameFiles,
30};
31use parking_lot::Mutex;
32use paths::{config_dir, tasks_file};
33use postage::stream::Stream as _;
34use pretty_assertions::{assert_eq, assert_matches};
35use rand::{Rng as _, rngs::StdRng};
36use serde_json::json;
37#[cfg(not(windows))]
38use std::os;
39use std::{env, mem, num::NonZeroU32, ops::Range, str::FromStr, sync::OnceLock, task::Poll};
40use task::{ResolvedTask, TaskContext};
41use unindent::Unindent as _;
42use util::{
43 TryFutureExt as _, assert_set_eq, maybe, path,
44 paths::PathMatcher,
45 test::{TempTree, marked_text_offsets},
46 uri,
47};
48use worktree::WorktreeModelHandle as _;
49
50#[gpui::test]
51async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
52 cx.executor().allow_parking();
53
54 let (tx, mut rx) = futures::channel::mpsc::unbounded();
55 let _thread = std::thread::spawn(move || {
56 #[cfg(not(target_os = "windows"))]
57 std::fs::metadata("/tmp").unwrap();
58 #[cfg(target_os = "windows")]
59 std::fs::metadata("C:/Windows").unwrap();
60 std::thread::sleep(Duration::from_millis(1000));
61 tx.unbounded_send(1).unwrap();
62 });
63 rx.next().await.unwrap();
64}
65
66#[gpui::test]
67async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
68 cx.executor().allow_parking();
69
70 let io_task = smol::unblock(move || {
71 println!("sleeping on thread {:?}", std::thread::current().id());
72 std::thread::sleep(Duration::from_millis(10));
73 1
74 });
75
76 let task = cx.foreground_executor().spawn(async move {
77 io_task.await;
78 });
79
80 task.await;
81}
82
83#[cfg(not(windows))]
84#[gpui::test]
85async fn test_symlinks(cx: &mut gpui::TestAppContext) {
86 init_test(cx);
87 cx.executor().allow_parking();
88
89 let dir = TempTree::new(json!({
90 "root": {
91 "apple": "",
92 "banana": {
93 "carrot": {
94 "date": "",
95 "endive": "",
96 }
97 },
98 "fennel": {
99 "grape": "",
100 }
101 }
102 }));
103
104 let root_link_path = dir.path().join("root_link");
105 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
106 os::unix::fs::symlink(
107 dir.path().join("root/fennel"),
108 dir.path().join("root/finnochio"),
109 )
110 .unwrap();
111
112 let project = Project::test(
113 Arc::new(RealFs::new(None, cx.executor())),
114 [root_link_path.as_ref()],
115 cx,
116 )
117 .await;
118
119 project.update(cx, |project, cx| {
120 let worktree = project.worktrees(cx).next().unwrap();
121 let tree = worktree.read(cx);
122 assert_eq!(tree.file_count(), 5);
123 assert_eq!(
124 tree.inode_for_path("fennel/grape"),
125 tree.inode_for_path("finnochio/grape")
126 );
127 });
128}
129
130#[gpui::test]
131async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
132 init_test(cx);
133
134 let dir = TempTree::new(json!({
135 ".editorconfig": r#"
136 root = true
137 [*.rs]
138 indent_style = tab
139 indent_size = 3
140 end_of_line = lf
141 insert_final_newline = true
142 trim_trailing_whitespace = true
143 [*.js]
144 tab_width = 10
145 "#,
146 ".zed": {
147 "settings.json": r#"{
148 "tab_size": 8,
149 "hard_tabs": false,
150 "ensure_final_newline_on_save": false,
151 "remove_trailing_whitespace_on_save": false,
152 "soft_wrap": "editor_width"
153 }"#,
154 },
155 "a.rs": "fn a() {\n A\n}",
156 "b": {
157 ".editorconfig": r#"
158 [*.rs]
159 indent_size = 2
160 "#,
161 "b.rs": "fn b() {\n B\n}",
162 },
163 "c.js": "def c\n C\nend",
164 "README.json": "tabs are better\n",
165 }));
166
167 let path = dir.path();
168 let fs = FakeFs::new(cx.executor());
169 fs.insert_tree_from_real_fs(path, path).await;
170 let project = Project::test(fs, [path], cx).await;
171
172 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
173 language_registry.add(js_lang());
174 language_registry.add(json_lang());
175 language_registry.add(rust_lang());
176
177 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
178
179 cx.executor().run_until_parked();
180
181 cx.update(|cx| {
182 let tree = worktree.read(cx);
183 let settings_for = |path: &str| {
184 let file_entry = tree.entry_for_path(path).unwrap().clone();
185 let file = File::for_entry(file_entry, worktree.clone());
186 let file_language = project
187 .read(cx)
188 .languages()
189 .language_for_file_path(file.path.as_ref());
190 let file_language = cx
191 .background_executor()
192 .block(file_language)
193 .expect("Failed to get file language");
194 let file = file as _;
195 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
196 };
197
198 let settings_a = settings_for("a.rs");
199 let settings_b = settings_for("b/b.rs");
200 let settings_c = settings_for("c.js");
201 let settings_readme = settings_for("README.json");
202
203 // .editorconfig overrides .zed/settings
204 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
205 assert_eq!(settings_a.hard_tabs, true);
206 assert_eq!(settings_a.ensure_final_newline_on_save, true);
207 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
208
209 // .editorconfig in b/ overrides .editorconfig in root
210 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
211
212 // "indent_size" is not set, so "tab_width" is used
213 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
214
215 // README.md should not be affected by .editorconfig's globe "*.rs"
216 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
217 });
218}
219
220#[gpui::test]
221async fn test_git_provider_project_setting(cx: &mut gpui::TestAppContext) {
222 init_test(cx);
223 cx.update(|cx| {
224 GitHostingProviderRegistry::default_global(cx);
225 git_hosting_providers::init(cx);
226 });
227
228 let fs = FakeFs::new(cx.executor());
229 let str_path = path!("/dir");
230 let path = Path::new(str_path);
231
232 fs.insert_tree(
233 path!("/dir"),
234 json!({
235 ".zed": {
236 "settings.json": r#"{
237 "git_hosting_providers": [
238 {
239 "provider": "gitlab",
240 "base_url": "https://google.com",
241 "name": "foo"
242 }
243 ]
244 }"#
245 },
246 }),
247 )
248 .await;
249
250 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
251 let (_worktree, _) =
252 project.read_with(cx, |project, cx| project.find_worktree(path, cx).unwrap());
253 cx.executor().run_until_parked();
254
255 cx.update(|cx| {
256 let provider = GitHostingProviderRegistry::global(cx);
257 assert!(
258 provider
259 .list_hosting_providers()
260 .into_iter()
261 .any(|provider| provider.name() == "foo")
262 );
263 });
264
265 fs.atomic_write(
266 Path::new(path!("/dir/.zed/settings.json")).to_owned(),
267 "{}".into(),
268 )
269 .await
270 .unwrap();
271
272 cx.run_until_parked();
273
274 cx.update(|cx| {
275 let provider = GitHostingProviderRegistry::global(cx);
276 assert!(
277 !provider
278 .list_hosting_providers()
279 .into_iter()
280 .any(|provider| provider.name() == "foo")
281 );
282 });
283}
284
285#[gpui::test]
286async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
287 init_test(cx);
288 TaskStore::init(None);
289
290 let fs = FakeFs::new(cx.executor());
291 fs.insert_tree(
292 path!("/dir"),
293 json!({
294 ".zed": {
295 "settings.json": r#"{ "tab_size": 8 }"#,
296 "tasks.json": r#"[{
297 "label": "cargo check all",
298 "command": "cargo",
299 "args": ["check", "--all"]
300 },]"#,
301 },
302 "a": {
303 "a.rs": "fn a() {\n A\n}"
304 },
305 "b": {
306 ".zed": {
307 "settings.json": r#"{ "tab_size": 2 }"#,
308 "tasks.json": r#"[{
309 "label": "cargo check",
310 "command": "cargo",
311 "args": ["check"]
312 },]"#,
313 },
314 "b.rs": "fn b() {\n B\n}"
315 }
316 }),
317 )
318 .await;
319
320 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
321 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
322
323 cx.executor().run_until_parked();
324 let worktree_id = cx.update(|cx| {
325 project.update(cx, |project, cx| {
326 project.worktrees(cx).next().unwrap().read(cx).id()
327 })
328 });
329
330 let mut task_contexts = TaskContexts::default();
331 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
332 let task_contexts = Arc::new(task_contexts);
333
334 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
335 id: worktree_id,
336 directory_in_worktree: PathBuf::from(".zed"),
337 id_base: "local worktree tasks from directory \".zed\"".into(),
338 };
339
340 let all_tasks = cx
341 .update(|cx| {
342 let tree = worktree.read(cx);
343
344 let file_a = File::for_entry(
345 tree.entry_for_path("a/a.rs").unwrap().clone(),
346 worktree.clone(),
347 ) as _;
348 let settings_a = language_settings(None, Some(&file_a), cx);
349 let file_b = File::for_entry(
350 tree.entry_for_path("b/b.rs").unwrap().clone(),
351 worktree.clone(),
352 ) as _;
353 let settings_b = language_settings(None, Some(&file_b), cx);
354
355 assert_eq!(settings_a.tab_size.get(), 8);
356 assert_eq!(settings_b.tab_size.get(), 2);
357
358 get_all_tasks(&project, task_contexts.clone(), cx)
359 })
360 .await
361 .into_iter()
362 .map(|(source_kind, task)| {
363 let resolved = task.resolved;
364 (
365 source_kind,
366 task.resolved_label,
367 resolved.args,
368 resolved.env,
369 )
370 })
371 .collect::<Vec<_>>();
372 assert_eq!(
373 all_tasks,
374 vec![
375 (
376 TaskSourceKind::Worktree {
377 id: worktree_id,
378 directory_in_worktree: PathBuf::from(path!("b/.zed")),
379 id_base: if cfg!(windows) {
380 "local worktree tasks from directory \"b\\\\.zed\"".into()
381 } else {
382 "local worktree tasks from directory \"b/.zed\"".into()
383 },
384 },
385 "cargo check".to_string(),
386 vec!["check".to_string()],
387 HashMap::default(),
388 ),
389 (
390 topmost_local_task_source_kind.clone(),
391 "cargo check all".to_string(),
392 vec!["check".to_string(), "--all".to_string()],
393 HashMap::default(),
394 ),
395 ]
396 );
397
398 let (_, resolved_task) = cx
399 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
400 .await
401 .into_iter()
402 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
403 .expect("should have one global task");
404 project.update(cx, |project, cx| {
405 let task_inventory = project
406 .task_store
407 .read(cx)
408 .task_inventory()
409 .cloned()
410 .unwrap();
411 task_inventory.update(cx, |inventory, _| {
412 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
413 inventory
414 .update_file_based_tasks(
415 TaskSettingsLocation::Global(tasks_file()),
416 Some(
417 &json!([{
418 "label": "cargo check unstable",
419 "command": "cargo",
420 "args": [
421 "check",
422 "--all",
423 "--all-targets"
424 ],
425 "env": {
426 "RUSTFLAGS": "-Zunstable-options"
427 }
428 }])
429 .to_string(),
430 ),
431 )
432 .unwrap();
433 });
434 });
435 cx.run_until_parked();
436
437 let all_tasks = cx
438 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
439 .await
440 .into_iter()
441 .map(|(source_kind, task)| {
442 let resolved = task.resolved;
443 (
444 source_kind,
445 task.resolved_label,
446 resolved.args,
447 resolved.env,
448 )
449 })
450 .collect::<Vec<_>>();
451 assert_eq!(
452 all_tasks,
453 vec![
454 (
455 topmost_local_task_source_kind.clone(),
456 "cargo check all".to_string(),
457 vec!["check".to_string(), "--all".to_string()],
458 HashMap::default(),
459 ),
460 (
461 TaskSourceKind::Worktree {
462 id: worktree_id,
463 directory_in_worktree: PathBuf::from(path!("b/.zed")),
464 id_base: if cfg!(windows) {
465 "local worktree tasks from directory \"b\\\\.zed\"".into()
466 } else {
467 "local worktree tasks from directory \"b/.zed\"".into()
468 },
469 },
470 "cargo check".to_string(),
471 vec!["check".to_string()],
472 HashMap::default(),
473 ),
474 (
475 TaskSourceKind::AbsPath {
476 abs_path: paths::tasks_file().clone(),
477 id_base: "global tasks.json".into(),
478 },
479 "cargo check unstable".to_string(),
480 vec![
481 "check".to_string(),
482 "--all".to_string(),
483 "--all-targets".to_string(),
484 ],
485 HashMap::from_iter(Some((
486 "RUSTFLAGS".to_string(),
487 "-Zunstable-options".to_string()
488 ))),
489 ),
490 ]
491 );
492}
493
494#[gpui::test]
495async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
496 init_test(cx);
497 TaskStore::init(None);
498
499 let fs = FakeFs::new(cx.executor());
500 fs.insert_tree(
501 path!("/dir"),
502 json!({
503 ".zed": {
504 "tasks.json": r#"[{
505 "label": "test worktree root",
506 "command": "echo $ZED_WORKTREE_ROOT"
507 }]"#,
508 },
509 "a": {
510 "a.rs": "fn a() {\n A\n}"
511 },
512 }),
513 )
514 .await;
515
516 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
517 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
518
519 cx.executor().run_until_parked();
520 let worktree_id = cx.update(|cx| {
521 project.update(cx, |project, cx| {
522 project.worktrees(cx).next().unwrap().read(cx).id()
523 })
524 });
525
526 let active_non_worktree_item_tasks = cx
527 .update(|cx| {
528 get_all_tasks(
529 &project,
530 Arc::new(TaskContexts {
531 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
532 active_worktree_context: None,
533 other_worktree_contexts: Vec::new(),
534 lsp_task_sources: HashMap::default(),
535 latest_selection: None,
536 }),
537 cx,
538 )
539 })
540 .await;
541 assert!(
542 active_non_worktree_item_tasks.is_empty(),
543 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
544 );
545
546 let active_worktree_tasks = cx
547 .update(|cx| {
548 get_all_tasks(
549 &project,
550 Arc::new(TaskContexts {
551 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
552 active_worktree_context: Some((worktree_id, {
553 let mut worktree_context = TaskContext::default();
554 worktree_context
555 .task_variables
556 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
557 worktree_context
558 })),
559 other_worktree_contexts: Vec::new(),
560 lsp_task_sources: HashMap::default(),
561 latest_selection: None,
562 }),
563 cx,
564 )
565 })
566 .await;
567 assert_eq!(
568 active_worktree_tasks
569 .into_iter()
570 .map(|(source_kind, task)| {
571 let resolved = task.resolved;
572 (source_kind, resolved.command.unwrap())
573 })
574 .collect::<Vec<_>>(),
575 vec![(
576 TaskSourceKind::Worktree {
577 id: worktree_id,
578 directory_in_worktree: PathBuf::from(path!(".zed")),
579 id_base: if cfg!(windows) {
580 "local worktree tasks from directory \".zed\"".into()
581 } else {
582 "local worktree tasks from directory \".zed\"".into()
583 },
584 },
585 "echo /dir".to_string(),
586 )]
587 );
588}
589
590#[gpui::test]
591async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
592 init_test(cx);
593
594 let fs = FakeFs::new(cx.executor());
595 fs.insert_tree(
596 path!("/dir"),
597 json!({
598 "test.rs": "const A: i32 = 1;",
599 "test2.rs": "",
600 "Cargo.toml": "a = 1",
601 "package.json": "{\"a\": 1}",
602 }),
603 )
604 .await;
605
606 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
607 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
608
609 let mut fake_rust_servers = language_registry.register_fake_lsp(
610 "Rust",
611 FakeLspAdapter {
612 name: "the-rust-language-server",
613 capabilities: lsp::ServerCapabilities {
614 completion_provider: Some(lsp::CompletionOptions {
615 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
616 ..Default::default()
617 }),
618 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
619 lsp::TextDocumentSyncOptions {
620 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
621 ..Default::default()
622 },
623 )),
624 ..Default::default()
625 },
626 ..Default::default()
627 },
628 );
629 let mut fake_json_servers = language_registry.register_fake_lsp(
630 "JSON",
631 FakeLspAdapter {
632 name: "the-json-language-server",
633 capabilities: lsp::ServerCapabilities {
634 completion_provider: Some(lsp::CompletionOptions {
635 trigger_characters: Some(vec![":".to_string()]),
636 ..Default::default()
637 }),
638 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
639 lsp::TextDocumentSyncOptions {
640 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
641 ..Default::default()
642 },
643 )),
644 ..Default::default()
645 },
646 ..Default::default()
647 },
648 );
649
650 // Open a buffer without an associated language server.
651 let (toml_buffer, _handle) = project
652 .update(cx, |project, cx| {
653 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
654 })
655 .await
656 .unwrap();
657
658 // Open a buffer with an associated language server before the language for it has been loaded.
659 let (rust_buffer, _handle2) = project
660 .update(cx, |project, cx| {
661 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
662 })
663 .await
664 .unwrap();
665 rust_buffer.update(cx, |buffer, _| {
666 assert_eq!(buffer.language().map(|l| l.name()), None);
667 });
668
669 // Now we add the languages to the project, and ensure they get assigned to all
670 // the relevant open buffers.
671 language_registry.add(json_lang());
672 language_registry.add(rust_lang());
673 cx.executor().run_until_parked();
674 rust_buffer.update(cx, |buffer, _| {
675 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
676 });
677
678 // A server is started up, and it is notified about Rust files.
679 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
680 assert_eq!(
681 fake_rust_server
682 .receive_notification::<lsp::notification::DidOpenTextDocument>()
683 .await
684 .text_document,
685 lsp::TextDocumentItem {
686 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
687 version: 0,
688 text: "const A: i32 = 1;".to_string(),
689 language_id: "rust".to_string(),
690 }
691 );
692
693 // The buffer is configured based on the language server's capabilities.
694 rust_buffer.update(cx, |buffer, _| {
695 assert_eq!(
696 buffer
697 .completion_triggers()
698 .into_iter()
699 .cloned()
700 .collect::<Vec<_>>(),
701 &[".".to_string(), "::".to_string()]
702 );
703 });
704 toml_buffer.update(cx, |buffer, _| {
705 assert!(buffer.completion_triggers().is_empty());
706 });
707
708 // Edit a buffer. The changes are reported to the language server.
709 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
710 assert_eq!(
711 fake_rust_server
712 .receive_notification::<lsp::notification::DidChangeTextDocument>()
713 .await
714 .text_document,
715 lsp::VersionedTextDocumentIdentifier::new(
716 lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
717 1
718 )
719 );
720
721 // Open a third buffer with a different associated language server.
722 let (json_buffer, _json_handle) = project
723 .update(cx, |project, cx| {
724 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
725 })
726 .await
727 .unwrap();
728
729 // A json language server is started up and is only notified about the json buffer.
730 let mut fake_json_server = fake_json_servers.next().await.unwrap();
731 assert_eq!(
732 fake_json_server
733 .receive_notification::<lsp::notification::DidOpenTextDocument>()
734 .await
735 .text_document,
736 lsp::TextDocumentItem {
737 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
738 version: 0,
739 text: "{\"a\": 1}".to_string(),
740 language_id: "json".to_string(),
741 }
742 );
743
744 // This buffer is configured based on the second language server's
745 // capabilities.
746 json_buffer.update(cx, |buffer, _| {
747 assert_eq!(
748 buffer
749 .completion_triggers()
750 .into_iter()
751 .cloned()
752 .collect::<Vec<_>>(),
753 &[":".to_string()]
754 );
755 });
756
757 // When opening another buffer whose language server is already running,
758 // it is also configured based on the existing language server's capabilities.
759 let (rust_buffer2, _handle4) = project
760 .update(cx, |project, cx| {
761 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
762 })
763 .await
764 .unwrap();
765 rust_buffer2.update(cx, |buffer, _| {
766 assert_eq!(
767 buffer
768 .completion_triggers()
769 .into_iter()
770 .cloned()
771 .collect::<Vec<_>>(),
772 &[".".to_string(), "::".to_string()]
773 );
774 });
775
776 // Changes are reported only to servers matching the buffer's language.
777 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
778 rust_buffer2.update(cx, |buffer, cx| {
779 buffer.edit([(0..0, "let x = 1;")], None, cx)
780 });
781 assert_eq!(
782 fake_rust_server
783 .receive_notification::<lsp::notification::DidChangeTextDocument>()
784 .await
785 .text_document,
786 lsp::VersionedTextDocumentIdentifier::new(
787 lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap(),
788 1
789 )
790 );
791
792 // Save notifications are reported to all servers.
793 project
794 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
795 .await
796 .unwrap();
797 assert_eq!(
798 fake_rust_server
799 .receive_notification::<lsp::notification::DidSaveTextDocument>()
800 .await
801 .text_document,
802 lsp::TextDocumentIdentifier::new(
803 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
804 )
805 );
806 assert_eq!(
807 fake_json_server
808 .receive_notification::<lsp::notification::DidSaveTextDocument>()
809 .await
810 .text_document,
811 lsp::TextDocumentIdentifier::new(
812 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
813 )
814 );
815
816 // Renames are reported only to servers matching the buffer's language.
817 fs.rename(
818 Path::new(path!("/dir/test2.rs")),
819 Path::new(path!("/dir/test3.rs")),
820 Default::default(),
821 )
822 .await
823 .unwrap();
824 assert_eq!(
825 fake_rust_server
826 .receive_notification::<lsp::notification::DidCloseTextDocument>()
827 .await
828 .text_document,
829 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap()),
830 );
831 assert_eq!(
832 fake_rust_server
833 .receive_notification::<lsp::notification::DidOpenTextDocument>()
834 .await
835 .text_document,
836 lsp::TextDocumentItem {
837 uri: lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap(),
838 version: 0,
839 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
840 language_id: "rust".to_string(),
841 },
842 );
843
844 rust_buffer2.update(cx, |buffer, cx| {
845 buffer.update_diagnostics(
846 LanguageServerId(0),
847 DiagnosticSet::from_sorted_entries(
848 vec![DiagnosticEntry {
849 diagnostic: Default::default(),
850 range: Anchor::MIN..Anchor::MAX,
851 }],
852 &buffer.snapshot(),
853 ),
854 cx,
855 );
856 assert_eq!(
857 buffer
858 .snapshot()
859 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
860 .count(),
861 1
862 );
863 });
864
865 // When the rename changes the extension of the file, the buffer gets closed on the old
866 // language server and gets opened on the new one.
867 fs.rename(
868 Path::new(path!("/dir/test3.rs")),
869 Path::new(path!("/dir/test3.json")),
870 Default::default(),
871 )
872 .await
873 .unwrap();
874 assert_eq!(
875 fake_rust_server
876 .receive_notification::<lsp::notification::DidCloseTextDocument>()
877 .await
878 .text_document,
879 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap()),
880 );
881 assert_eq!(
882 fake_json_server
883 .receive_notification::<lsp::notification::DidOpenTextDocument>()
884 .await
885 .text_document,
886 lsp::TextDocumentItem {
887 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
888 version: 0,
889 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
890 language_id: "json".to_string(),
891 },
892 );
893
894 // We clear the diagnostics, since the language has changed.
895 rust_buffer2.update(cx, |buffer, _| {
896 assert_eq!(
897 buffer
898 .snapshot()
899 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
900 .count(),
901 0
902 );
903 });
904
905 // The renamed file's version resets after changing language server.
906 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
907 assert_eq!(
908 fake_json_server
909 .receive_notification::<lsp::notification::DidChangeTextDocument>()
910 .await
911 .text_document,
912 lsp::VersionedTextDocumentIdentifier::new(
913 lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
914 1
915 )
916 );
917
918 // Restart language servers
919 project.update(cx, |project, cx| {
920 project.restart_language_servers_for_buffers(
921 vec![rust_buffer.clone(), json_buffer.clone()],
922 HashSet::default(),
923 cx,
924 );
925 });
926
927 let mut rust_shutdown_requests = fake_rust_server
928 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
929 let mut json_shutdown_requests = fake_json_server
930 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
931 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
932
933 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
934 let mut fake_json_server = fake_json_servers.next().await.unwrap();
935
936 // Ensure rust document is reopened in new rust language server
937 assert_eq!(
938 fake_rust_server
939 .receive_notification::<lsp::notification::DidOpenTextDocument>()
940 .await
941 .text_document,
942 lsp::TextDocumentItem {
943 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
944 version: 0,
945 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
946 language_id: "rust".to_string(),
947 }
948 );
949
950 // Ensure json documents are reopened in new json language server
951 assert_set_eq!(
952 [
953 fake_json_server
954 .receive_notification::<lsp::notification::DidOpenTextDocument>()
955 .await
956 .text_document,
957 fake_json_server
958 .receive_notification::<lsp::notification::DidOpenTextDocument>()
959 .await
960 .text_document,
961 ],
962 [
963 lsp::TextDocumentItem {
964 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
965 version: 0,
966 text: json_buffer.update(cx, |buffer, _| buffer.text()),
967 language_id: "json".to_string(),
968 },
969 lsp::TextDocumentItem {
970 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
971 version: 0,
972 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
973 language_id: "json".to_string(),
974 }
975 ]
976 );
977
978 // Close notifications are reported only to servers matching the buffer's language.
979 cx.update(|_| drop(_json_handle));
980 let close_message = lsp::DidCloseTextDocumentParams {
981 text_document: lsp::TextDocumentIdentifier::new(
982 lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
983 ),
984 };
985 assert_eq!(
986 fake_json_server
987 .receive_notification::<lsp::notification::DidCloseTextDocument>()
988 .await,
989 close_message,
990 );
991}
992
993#[gpui::test]
994async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
995 init_test(cx);
996
997 let fs = FakeFs::new(cx.executor());
998 fs.insert_tree(
999 path!("/the-root"),
1000 json!({
1001 ".gitignore": "target\n",
1002 "Cargo.lock": "",
1003 "src": {
1004 "a.rs": "",
1005 "b.rs": "",
1006 },
1007 "target": {
1008 "x": {
1009 "out": {
1010 "x.rs": ""
1011 }
1012 },
1013 "y": {
1014 "out": {
1015 "y.rs": "",
1016 }
1017 },
1018 "z": {
1019 "out": {
1020 "z.rs": ""
1021 }
1022 }
1023 }
1024 }),
1025 )
1026 .await;
1027 fs.insert_tree(
1028 path!("/the-registry"),
1029 json!({
1030 "dep1": {
1031 "src": {
1032 "dep1.rs": "",
1033 }
1034 },
1035 "dep2": {
1036 "src": {
1037 "dep2.rs": "",
1038 }
1039 },
1040 }),
1041 )
1042 .await;
1043 fs.insert_tree(
1044 path!("/the/stdlib"),
1045 json!({
1046 "LICENSE": "",
1047 "src": {
1048 "string.rs": "",
1049 }
1050 }),
1051 )
1052 .await;
1053
1054 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1055 let (language_registry, lsp_store) = project.read_with(cx, |project, _| {
1056 (project.languages().clone(), project.lsp_store())
1057 });
1058 language_registry.add(rust_lang());
1059 let mut fake_servers = language_registry.register_fake_lsp(
1060 "Rust",
1061 FakeLspAdapter {
1062 name: "the-language-server",
1063 ..Default::default()
1064 },
1065 );
1066
1067 cx.executor().run_until_parked();
1068
1069 // Start the language server by opening a buffer with a compatible file extension.
1070 project
1071 .update(cx, |project, cx| {
1072 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
1073 })
1074 .await
1075 .unwrap();
1076
1077 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
1078 project.update(cx, |project, cx| {
1079 let worktree_entity = project.worktrees(cx).next().unwrap();
1080 let worktree = worktree_entity.read(cx);
1081 assert_eq!(
1082 worktree
1083 .snapshot()
1084 .entries(true, 0)
1085 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
1086 .collect::<Vec<_>>(),
1087 &[
1088 (Path::new(""), false),
1089 (Path::new(".gitignore"), false),
1090 (Path::new("Cargo.lock"), false),
1091 (Path::new("src"), false),
1092 (Path::new("src/a.rs"), false),
1093 (Path::new("src/b.rs"), false),
1094 (Path::new("target"), true),
1095 ]
1096 );
1097 });
1098
1099 let prev_read_dir_count = fs.read_dir_call_count();
1100
1101 let fake_server = fake_servers.next().await.unwrap();
1102 let (server_id, server_name) = lsp_store.read_with(cx, |lsp_store, _| {
1103 let (id, status) = lsp_store.language_server_statuses().next().unwrap();
1104 (id, LanguageServerName::from(status.name.as_str()))
1105 });
1106
1107 // Simulate jumping to a definition in a dependency outside of the worktree.
1108 let _out_of_worktree_buffer = project
1109 .update(cx, |project, cx| {
1110 project.open_local_buffer_via_lsp(
1111 lsp::Url::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
1112 server_id,
1113 server_name.clone(),
1114 cx,
1115 )
1116 })
1117 .await
1118 .unwrap();
1119
1120 // Keep track of the FS events reported to the language server.
1121 let file_changes = Arc::new(Mutex::new(Vec::new()));
1122 fake_server
1123 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
1124 registrations: vec![lsp::Registration {
1125 id: Default::default(),
1126 method: "workspace/didChangeWatchedFiles".to_string(),
1127 register_options: serde_json::to_value(
1128 lsp::DidChangeWatchedFilesRegistrationOptions {
1129 watchers: vec![
1130 lsp::FileSystemWatcher {
1131 glob_pattern: lsp::GlobPattern::String(
1132 path!("/the-root/Cargo.toml").to_string(),
1133 ),
1134 kind: None,
1135 },
1136 lsp::FileSystemWatcher {
1137 glob_pattern: lsp::GlobPattern::String(
1138 path!("/the-root/src/*.{rs,c}").to_string(),
1139 ),
1140 kind: None,
1141 },
1142 lsp::FileSystemWatcher {
1143 glob_pattern: lsp::GlobPattern::String(
1144 path!("/the-root/target/y/**/*.rs").to_string(),
1145 ),
1146 kind: None,
1147 },
1148 lsp::FileSystemWatcher {
1149 glob_pattern: lsp::GlobPattern::String(
1150 path!("/the/stdlib/src/**/*.rs").to_string(),
1151 ),
1152 kind: None,
1153 },
1154 lsp::FileSystemWatcher {
1155 glob_pattern: lsp::GlobPattern::String(
1156 path!("**/Cargo.lock").to_string(),
1157 ),
1158 kind: None,
1159 },
1160 ],
1161 },
1162 )
1163 .ok(),
1164 }],
1165 })
1166 .await
1167 .into_response()
1168 .unwrap();
1169 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
1170 let file_changes = file_changes.clone();
1171 move |params, _| {
1172 let mut file_changes = file_changes.lock();
1173 file_changes.extend(params.changes);
1174 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
1175 }
1176 });
1177
1178 cx.executor().run_until_parked();
1179 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
1180 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 5);
1181
1182 let mut new_watched_paths = fs.watched_paths();
1183 new_watched_paths.retain(|path| !path.starts_with(config_dir()));
1184 assert_eq!(
1185 &new_watched_paths,
1186 &[
1187 Path::new(path!("/the-root")),
1188 Path::new(path!("/the-registry/dep1/src/dep1.rs")),
1189 Path::new(path!("/the/stdlib/src"))
1190 ]
1191 );
1192
1193 // Now the language server has asked us to watch an ignored directory path,
1194 // so we recursively load it.
1195 project.update(cx, |project, cx| {
1196 let worktree = project.visible_worktrees(cx).next().unwrap();
1197 assert_eq!(
1198 worktree
1199 .read(cx)
1200 .snapshot()
1201 .entries(true, 0)
1202 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
1203 .collect::<Vec<_>>(),
1204 &[
1205 (Path::new(""), false),
1206 (Path::new(".gitignore"), false),
1207 (Path::new("Cargo.lock"), false),
1208 (Path::new("src"), false),
1209 (Path::new("src/a.rs"), false),
1210 (Path::new("src/b.rs"), false),
1211 (Path::new("target"), true),
1212 (Path::new("target/x"), true),
1213 (Path::new("target/y"), true),
1214 (Path::new("target/y/out"), true),
1215 (Path::new("target/y/out/y.rs"), true),
1216 (Path::new("target/z"), true),
1217 ]
1218 );
1219 });
1220
1221 // Perform some file system mutations, two of which match the watched patterns,
1222 // and one of which does not.
1223 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
1224 .await
1225 .unwrap();
1226 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
1227 .await
1228 .unwrap();
1229 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
1230 .await
1231 .unwrap();
1232 fs.create_file(
1233 path!("/the-root/target/x/out/x2.rs").as_ref(),
1234 Default::default(),
1235 )
1236 .await
1237 .unwrap();
1238 fs.create_file(
1239 path!("/the-root/target/y/out/y2.rs").as_ref(),
1240 Default::default(),
1241 )
1242 .await
1243 .unwrap();
1244 fs.save(
1245 path!("/the-root/Cargo.lock").as_ref(),
1246 &"".into(),
1247 Default::default(),
1248 )
1249 .await
1250 .unwrap();
1251 fs.save(
1252 path!("/the-stdlib/LICENSE").as_ref(),
1253 &"".into(),
1254 Default::default(),
1255 )
1256 .await
1257 .unwrap();
1258 fs.save(
1259 path!("/the/stdlib/src/string.rs").as_ref(),
1260 &"".into(),
1261 Default::default(),
1262 )
1263 .await
1264 .unwrap();
1265
1266 // The language server receives events for the FS mutations that match its watch patterns.
1267 cx.executor().run_until_parked();
1268 assert_eq!(
1269 &*file_changes.lock(),
1270 &[
1271 lsp::FileEvent {
1272 uri: lsp::Url::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
1273 typ: lsp::FileChangeType::CHANGED,
1274 },
1275 lsp::FileEvent {
1276 uri: lsp::Url::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
1277 typ: lsp::FileChangeType::DELETED,
1278 },
1279 lsp::FileEvent {
1280 uri: lsp::Url::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
1281 typ: lsp::FileChangeType::CREATED,
1282 },
1283 lsp::FileEvent {
1284 uri: lsp::Url::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
1285 typ: lsp::FileChangeType::CREATED,
1286 },
1287 lsp::FileEvent {
1288 uri: lsp::Url::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
1289 typ: lsp::FileChangeType::CHANGED,
1290 },
1291 ]
1292 );
1293}
1294
1295#[gpui::test]
1296async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
1297 init_test(cx);
1298
1299 let fs = FakeFs::new(cx.executor());
1300 fs.insert_tree(
1301 path!("/dir"),
1302 json!({
1303 "a.rs": "let a = 1;",
1304 "b.rs": "let b = 2;"
1305 }),
1306 )
1307 .await;
1308
1309 let project = Project::test(
1310 fs,
1311 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
1312 cx,
1313 )
1314 .await;
1315 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1316
1317 let buffer_a = project
1318 .update(cx, |project, cx| {
1319 project.open_local_buffer(path!("/dir/a.rs"), cx)
1320 })
1321 .await
1322 .unwrap();
1323 let buffer_b = project
1324 .update(cx, |project, cx| {
1325 project.open_local_buffer(path!("/dir/b.rs"), cx)
1326 })
1327 .await
1328 .unwrap();
1329
1330 lsp_store.update(cx, |lsp_store, cx| {
1331 lsp_store
1332 .update_diagnostics(
1333 LanguageServerId(0),
1334 lsp::PublishDiagnosticsParams {
1335 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1336 version: None,
1337 diagnostics: vec![lsp::Diagnostic {
1338 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1339 severity: Some(lsp::DiagnosticSeverity::ERROR),
1340 message: "error 1".to_string(),
1341 ..Default::default()
1342 }],
1343 },
1344 None,
1345 DiagnosticSourceKind::Pushed,
1346 &[],
1347 cx,
1348 )
1349 .unwrap();
1350 lsp_store
1351 .update_diagnostics(
1352 LanguageServerId(0),
1353 lsp::PublishDiagnosticsParams {
1354 uri: Url::from_file_path(path!("/dir/b.rs")).unwrap(),
1355 version: None,
1356 diagnostics: vec![lsp::Diagnostic {
1357 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1358 severity: Some(DiagnosticSeverity::WARNING),
1359 message: "error 2".to_string(),
1360 ..Default::default()
1361 }],
1362 },
1363 None,
1364 DiagnosticSourceKind::Pushed,
1365 &[],
1366 cx,
1367 )
1368 .unwrap();
1369 });
1370
1371 buffer_a.update(cx, |buffer, _| {
1372 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1373 assert_eq!(
1374 chunks
1375 .iter()
1376 .map(|(s, d)| (s.as_str(), *d))
1377 .collect::<Vec<_>>(),
1378 &[
1379 ("let ", None),
1380 ("a", Some(DiagnosticSeverity::ERROR)),
1381 (" = 1;", None),
1382 ]
1383 );
1384 });
1385 buffer_b.update(cx, |buffer, _| {
1386 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1387 assert_eq!(
1388 chunks
1389 .iter()
1390 .map(|(s, d)| (s.as_str(), *d))
1391 .collect::<Vec<_>>(),
1392 &[
1393 ("let ", None),
1394 ("b", Some(DiagnosticSeverity::WARNING)),
1395 (" = 2;", None),
1396 ]
1397 );
1398 });
1399}
1400
1401#[gpui::test]
1402async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1403 init_test(cx);
1404
1405 let fs = FakeFs::new(cx.executor());
1406 fs.insert_tree(
1407 path!("/root"),
1408 json!({
1409 "dir": {
1410 ".git": {
1411 "HEAD": "ref: refs/heads/main",
1412 },
1413 ".gitignore": "b.rs",
1414 "a.rs": "let a = 1;",
1415 "b.rs": "let b = 2;",
1416 },
1417 "other.rs": "let b = c;"
1418 }),
1419 )
1420 .await;
1421
1422 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
1423 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1424 let (worktree, _) = project
1425 .update(cx, |project, cx| {
1426 project.find_or_create_worktree(path!("/root/dir"), true, cx)
1427 })
1428 .await
1429 .unwrap();
1430 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1431
1432 let (worktree, _) = project
1433 .update(cx, |project, cx| {
1434 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
1435 })
1436 .await
1437 .unwrap();
1438 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1439
1440 let server_id = LanguageServerId(0);
1441 lsp_store.update(cx, |lsp_store, cx| {
1442 lsp_store
1443 .update_diagnostics(
1444 server_id,
1445 lsp::PublishDiagnosticsParams {
1446 uri: Url::from_file_path(path!("/root/dir/b.rs")).unwrap(),
1447 version: None,
1448 diagnostics: vec![lsp::Diagnostic {
1449 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1450 severity: Some(lsp::DiagnosticSeverity::ERROR),
1451 message: "unused variable 'b'".to_string(),
1452 ..Default::default()
1453 }],
1454 },
1455 None,
1456 DiagnosticSourceKind::Pushed,
1457 &[],
1458 cx,
1459 )
1460 .unwrap();
1461 lsp_store
1462 .update_diagnostics(
1463 server_id,
1464 lsp::PublishDiagnosticsParams {
1465 uri: Url::from_file_path(path!("/root/other.rs")).unwrap(),
1466 version: None,
1467 diagnostics: vec![lsp::Diagnostic {
1468 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1469 severity: Some(lsp::DiagnosticSeverity::ERROR),
1470 message: "unknown variable 'c'".to_string(),
1471 ..Default::default()
1472 }],
1473 },
1474 None,
1475 DiagnosticSourceKind::Pushed,
1476 &[],
1477 cx,
1478 )
1479 .unwrap();
1480 });
1481
1482 let main_ignored_buffer = project
1483 .update(cx, |project, cx| {
1484 project.open_buffer((main_worktree_id, "b.rs"), cx)
1485 })
1486 .await
1487 .unwrap();
1488 main_ignored_buffer.update(cx, |buffer, _| {
1489 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1490 assert_eq!(
1491 chunks
1492 .iter()
1493 .map(|(s, d)| (s.as_str(), *d))
1494 .collect::<Vec<_>>(),
1495 &[
1496 ("let ", None),
1497 ("b", Some(DiagnosticSeverity::ERROR)),
1498 (" = 2;", None),
1499 ],
1500 "Gigitnored buffers should still get in-buffer diagnostics",
1501 );
1502 });
1503 let other_buffer = project
1504 .update(cx, |project, cx| {
1505 project.open_buffer((other_worktree_id, ""), cx)
1506 })
1507 .await
1508 .unwrap();
1509 other_buffer.update(cx, |buffer, _| {
1510 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1511 assert_eq!(
1512 chunks
1513 .iter()
1514 .map(|(s, d)| (s.as_str(), *d))
1515 .collect::<Vec<_>>(),
1516 &[
1517 ("let b = ", None),
1518 ("c", Some(DiagnosticSeverity::ERROR)),
1519 (";", None),
1520 ],
1521 "Buffers from hidden projects should still get in-buffer diagnostics"
1522 );
1523 });
1524
1525 project.update(cx, |project, cx| {
1526 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1527 assert_eq!(
1528 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1529 vec![(
1530 ProjectPath {
1531 worktree_id: main_worktree_id,
1532 path: Arc::from(Path::new("b.rs")),
1533 },
1534 server_id,
1535 DiagnosticSummary {
1536 error_count: 1,
1537 warning_count: 0,
1538 }
1539 )]
1540 );
1541 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1542 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1543 });
1544}
1545
1546#[gpui::test]
1547async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1548 init_test(cx);
1549
1550 let progress_token = "the-progress-token";
1551
1552 let fs = FakeFs::new(cx.executor());
1553 fs.insert_tree(
1554 path!("/dir"),
1555 json!({
1556 "a.rs": "fn a() { A }",
1557 "b.rs": "const y: i32 = 1",
1558 }),
1559 )
1560 .await;
1561
1562 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1563 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1564
1565 language_registry.add(rust_lang());
1566 let mut fake_servers = language_registry.register_fake_lsp(
1567 "Rust",
1568 FakeLspAdapter {
1569 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1570 disk_based_diagnostics_sources: vec!["disk".into()],
1571 ..Default::default()
1572 },
1573 );
1574
1575 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1576
1577 // Cause worktree to start the fake language server
1578 let _ = project
1579 .update(cx, |project, cx| {
1580 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
1581 })
1582 .await
1583 .unwrap();
1584
1585 let mut events = cx.events(&project);
1586
1587 let fake_server = fake_servers.next().await.unwrap();
1588 assert_eq!(
1589 events.next().await.unwrap(),
1590 Event::LanguageServerAdded(
1591 LanguageServerId(0),
1592 fake_server.server.name(),
1593 Some(worktree_id)
1594 ),
1595 );
1596
1597 fake_server
1598 .start_progress(format!("{}/0", progress_token))
1599 .await;
1600 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1601 assert_eq!(
1602 events.next().await.unwrap(),
1603 Event::DiskBasedDiagnosticsStarted {
1604 language_server_id: LanguageServerId(0),
1605 }
1606 );
1607
1608 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1609 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1610 version: None,
1611 diagnostics: vec![lsp::Diagnostic {
1612 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1613 severity: Some(lsp::DiagnosticSeverity::ERROR),
1614 message: "undefined variable 'A'".to_string(),
1615 ..Default::default()
1616 }],
1617 });
1618 assert_eq!(
1619 events.next().await.unwrap(),
1620 Event::DiagnosticsUpdated {
1621 language_server_id: LanguageServerId(0),
1622 path: (worktree_id, Path::new("a.rs")).into()
1623 }
1624 );
1625
1626 fake_server.end_progress(format!("{}/0", progress_token));
1627 assert_eq!(
1628 events.next().await.unwrap(),
1629 Event::DiskBasedDiagnosticsFinished {
1630 language_server_id: LanguageServerId(0)
1631 }
1632 );
1633
1634 let buffer = project
1635 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
1636 .await
1637 .unwrap();
1638
1639 buffer.update(cx, |buffer, _| {
1640 let snapshot = buffer.snapshot();
1641 let diagnostics = snapshot
1642 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1643 .collect::<Vec<_>>();
1644 assert_eq!(
1645 diagnostics,
1646 &[DiagnosticEntry {
1647 range: Point::new(0, 9)..Point::new(0, 10),
1648 diagnostic: Diagnostic {
1649 severity: lsp::DiagnosticSeverity::ERROR,
1650 message: "undefined variable 'A'".to_string(),
1651 group_id: 0,
1652 is_primary: true,
1653 source_kind: DiagnosticSourceKind::Pushed,
1654 ..Diagnostic::default()
1655 }
1656 }]
1657 )
1658 });
1659
1660 // Ensure publishing empty diagnostics twice only results in one update event.
1661 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1662 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1663 version: None,
1664 diagnostics: Default::default(),
1665 });
1666 assert_eq!(
1667 events.next().await.unwrap(),
1668 Event::DiagnosticsUpdated {
1669 language_server_id: LanguageServerId(0),
1670 path: (worktree_id, Path::new("a.rs")).into()
1671 }
1672 );
1673
1674 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1675 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1676 version: None,
1677 diagnostics: Default::default(),
1678 });
1679 cx.executor().run_until_parked();
1680 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1681}
1682
1683#[gpui::test]
1684async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1685 init_test(cx);
1686
1687 let progress_token = "the-progress-token";
1688
1689 let fs = FakeFs::new(cx.executor());
1690 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1691
1692 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1693
1694 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1695 language_registry.add(rust_lang());
1696 let mut fake_servers = language_registry.register_fake_lsp(
1697 "Rust",
1698 FakeLspAdapter {
1699 name: "the-language-server",
1700 disk_based_diagnostics_sources: vec!["disk".into()],
1701 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1702 ..Default::default()
1703 },
1704 );
1705
1706 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1707
1708 let (buffer, _handle) = project
1709 .update(cx, |project, cx| {
1710 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1711 })
1712 .await
1713 .unwrap();
1714 // Simulate diagnostics starting to update.
1715 let fake_server = fake_servers.next().await.unwrap();
1716 fake_server.start_progress(progress_token).await;
1717
1718 // Restart the server before the diagnostics finish updating.
1719 project.update(cx, |project, cx| {
1720 project.restart_language_servers_for_buffers(vec![buffer], HashSet::default(), cx);
1721 });
1722 let mut events = cx.events(&project);
1723
1724 // Simulate the newly started server sending more diagnostics.
1725 let fake_server = fake_servers.next().await.unwrap();
1726 assert_eq!(
1727 events.next().await.unwrap(),
1728 Event::LanguageServerRemoved(LanguageServerId(0))
1729 );
1730 assert_eq!(
1731 events.next().await.unwrap(),
1732 Event::LanguageServerAdded(
1733 LanguageServerId(1),
1734 fake_server.server.name(),
1735 Some(worktree_id)
1736 )
1737 );
1738 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1739 fake_server.start_progress(progress_token).await;
1740 assert_eq!(
1741 events.next().await.unwrap(),
1742 Event::DiskBasedDiagnosticsStarted {
1743 language_server_id: LanguageServerId(1)
1744 }
1745 );
1746 project.update(cx, |project, cx| {
1747 assert_eq!(
1748 project
1749 .language_servers_running_disk_based_diagnostics(cx)
1750 .collect::<Vec<_>>(),
1751 [LanguageServerId(1)]
1752 );
1753 });
1754
1755 // All diagnostics are considered done, despite the old server's diagnostic
1756 // task never completing.
1757 fake_server.end_progress(progress_token);
1758 assert_eq!(
1759 events.next().await.unwrap(),
1760 Event::DiskBasedDiagnosticsFinished {
1761 language_server_id: LanguageServerId(1)
1762 }
1763 );
1764 project.update(cx, |project, cx| {
1765 assert_eq!(
1766 project
1767 .language_servers_running_disk_based_diagnostics(cx)
1768 .collect::<Vec<_>>(),
1769 [] as [language::LanguageServerId; 0]
1770 );
1771 });
1772}
1773
1774#[gpui::test]
1775async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1776 init_test(cx);
1777
1778 let fs = FakeFs::new(cx.executor());
1779 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
1780
1781 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1782
1783 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1784 language_registry.add(rust_lang());
1785 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1786
1787 let (buffer, _) = project
1788 .update(cx, |project, cx| {
1789 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1790 })
1791 .await
1792 .unwrap();
1793
1794 // Publish diagnostics
1795 let fake_server = fake_servers.next().await.unwrap();
1796 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1797 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1798 version: None,
1799 diagnostics: vec![lsp::Diagnostic {
1800 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1801 severity: Some(lsp::DiagnosticSeverity::ERROR),
1802 message: "the message".to_string(),
1803 ..Default::default()
1804 }],
1805 });
1806
1807 cx.executor().run_until_parked();
1808 buffer.update(cx, |buffer, _| {
1809 assert_eq!(
1810 buffer
1811 .snapshot()
1812 .diagnostics_in_range::<_, usize>(0..1, false)
1813 .map(|entry| entry.diagnostic.message.clone())
1814 .collect::<Vec<_>>(),
1815 ["the message".to_string()]
1816 );
1817 });
1818 project.update(cx, |project, cx| {
1819 assert_eq!(
1820 project.diagnostic_summary(false, cx),
1821 DiagnosticSummary {
1822 error_count: 1,
1823 warning_count: 0,
1824 }
1825 );
1826 });
1827
1828 project.update(cx, |project, cx| {
1829 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
1830 });
1831
1832 // The diagnostics are cleared.
1833 cx.executor().run_until_parked();
1834 buffer.update(cx, |buffer, _| {
1835 assert_eq!(
1836 buffer
1837 .snapshot()
1838 .diagnostics_in_range::<_, usize>(0..1, false)
1839 .map(|entry| entry.diagnostic.message.clone())
1840 .collect::<Vec<_>>(),
1841 Vec::<String>::new(),
1842 );
1843 });
1844 project.update(cx, |project, cx| {
1845 assert_eq!(
1846 project.diagnostic_summary(false, cx),
1847 DiagnosticSummary {
1848 error_count: 0,
1849 warning_count: 0,
1850 }
1851 );
1852 });
1853}
1854
1855#[gpui::test]
1856async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1857 init_test(cx);
1858
1859 let fs = FakeFs::new(cx.executor());
1860 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1861
1862 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1863 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1864
1865 language_registry.add(rust_lang());
1866 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1867
1868 let (buffer, _handle) = project
1869 .update(cx, |project, cx| {
1870 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1871 })
1872 .await
1873 .unwrap();
1874
1875 // Before restarting the server, report diagnostics with an unknown buffer version.
1876 let fake_server = fake_servers.next().await.unwrap();
1877 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1878 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1879 version: Some(10000),
1880 diagnostics: Vec::new(),
1881 });
1882 cx.executor().run_until_parked();
1883 project.update(cx, |project, cx| {
1884 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
1885 });
1886
1887 let mut fake_server = fake_servers.next().await.unwrap();
1888 let notification = fake_server
1889 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1890 .await
1891 .text_document;
1892 assert_eq!(notification.version, 0);
1893}
1894
1895#[gpui::test]
1896async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
1897 init_test(cx);
1898
1899 let progress_token = "the-progress-token";
1900
1901 let fs = FakeFs::new(cx.executor());
1902 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1903
1904 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1905
1906 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1907 language_registry.add(rust_lang());
1908 let mut fake_servers = language_registry.register_fake_lsp(
1909 "Rust",
1910 FakeLspAdapter {
1911 name: "the-language-server",
1912 disk_based_diagnostics_sources: vec!["disk".into()],
1913 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1914 ..Default::default()
1915 },
1916 );
1917
1918 let (buffer, _handle) = project
1919 .update(cx, |project, cx| {
1920 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1921 })
1922 .await
1923 .unwrap();
1924
1925 // Simulate diagnostics starting to update.
1926 let mut fake_server = fake_servers.next().await.unwrap();
1927 fake_server
1928 .start_progress_with(
1929 "another-token",
1930 lsp::WorkDoneProgressBegin {
1931 cancellable: Some(false),
1932 ..Default::default()
1933 },
1934 )
1935 .await;
1936 fake_server
1937 .start_progress_with(
1938 progress_token,
1939 lsp::WorkDoneProgressBegin {
1940 cancellable: Some(true),
1941 ..Default::default()
1942 },
1943 )
1944 .await;
1945 cx.executor().run_until_parked();
1946
1947 project.update(cx, |project, cx| {
1948 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
1949 });
1950
1951 let cancel_notification = fake_server
1952 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
1953 .await;
1954 assert_eq!(
1955 cancel_notification.token,
1956 NumberOrString::String(progress_token.into())
1957 );
1958}
1959
1960#[gpui::test]
1961async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1962 init_test(cx);
1963
1964 let fs = FakeFs::new(cx.executor());
1965 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
1966 .await;
1967
1968 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1969 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1970
1971 let mut fake_rust_servers = language_registry.register_fake_lsp(
1972 "Rust",
1973 FakeLspAdapter {
1974 name: "rust-lsp",
1975 ..Default::default()
1976 },
1977 );
1978 let mut fake_js_servers = language_registry.register_fake_lsp(
1979 "JavaScript",
1980 FakeLspAdapter {
1981 name: "js-lsp",
1982 ..Default::default()
1983 },
1984 );
1985 language_registry.add(rust_lang());
1986 language_registry.add(js_lang());
1987
1988 let _rs_buffer = project
1989 .update(cx, |project, cx| {
1990 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1991 })
1992 .await
1993 .unwrap();
1994 let _js_buffer = project
1995 .update(cx, |project, cx| {
1996 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
1997 })
1998 .await
1999 .unwrap();
2000
2001 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
2002 assert_eq!(
2003 fake_rust_server_1
2004 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2005 .await
2006 .text_document
2007 .uri
2008 .as_str(),
2009 uri!("file:///dir/a.rs")
2010 );
2011
2012 let mut fake_js_server = fake_js_servers.next().await.unwrap();
2013 assert_eq!(
2014 fake_js_server
2015 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2016 .await
2017 .text_document
2018 .uri
2019 .as_str(),
2020 uri!("file:///dir/b.js")
2021 );
2022
2023 // Disable Rust language server, ensuring only that server gets stopped.
2024 cx.update(|cx| {
2025 SettingsStore::update_global(cx, |settings, cx| {
2026 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
2027 settings.languages.0.insert(
2028 "Rust".into(),
2029 LanguageSettingsContent {
2030 enable_language_server: Some(false),
2031 ..Default::default()
2032 },
2033 );
2034 });
2035 })
2036 });
2037 fake_rust_server_1
2038 .receive_notification::<lsp::notification::Exit>()
2039 .await;
2040
2041 // Enable Rust and disable JavaScript language servers, ensuring that the
2042 // former gets started again and that the latter stops.
2043 cx.update(|cx| {
2044 SettingsStore::update_global(cx, |settings, cx| {
2045 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
2046 settings.languages.0.insert(
2047 LanguageName::new("Rust"),
2048 LanguageSettingsContent {
2049 enable_language_server: Some(true),
2050 ..Default::default()
2051 },
2052 );
2053 settings.languages.0.insert(
2054 LanguageName::new("JavaScript"),
2055 LanguageSettingsContent {
2056 enable_language_server: Some(false),
2057 ..Default::default()
2058 },
2059 );
2060 });
2061 })
2062 });
2063 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
2064 assert_eq!(
2065 fake_rust_server_2
2066 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2067 .await
2068 .text_document
2069 .uri
2070 .as_str(),
2071 uri!("file:///dir/a.rs")
2072 );
2073 fake_js_server
2074 .receive_notification::<lsp::notification::Exit>()
2075 .await;
2076}
2077
2078#[gpui::test(iterations = 3)]
2079async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
2080 init_test(cx);
2081
2082 let text = "
2083 fn a() { A }
2084 fn b() { BB }
2085 fn c() { CCC }
2086 "
2087 .unindent();
2088
2089 let fs = FakeFs::new(cx.executor());
2090 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
2091
2092 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2093 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2094
2095 language_registry.add(rust_lang());
2096 let mut fake_servers = language_registry.register_fake_lsp(
2097 "Rust",
2098 FakeLspAdapter {
2099 disk_based_diagnostics_sources: vec!["disk".into()],
2100 ..Default::default()
2101 },
2102 );
2103
2104 let buffer = project
2105 .update(cx, |project, cx| {
2106 project.open_local_buffer(path!("/dir/a.rs"), cx)
2107 })
2108 .await
2109 .unwrap();
2110
2111 let _handle = project.update(cx, |project, cx| {
2112 project.register_buffer_with_language_servers(&buffer, cx)
2113 });
2114
2115 let mut fake_server = fake_servers.next().await.unwrap();
2116 let open_notification = fake_server
2117 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2118 .await;
2119
2120 // Edit the buffer, moving the content down
2121 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
2122 let change_notification_1 = fake_server
2123 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2124 .await;
2125 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
2126
2127 // Report some diagnostics for the initial version of the buffer
2128 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2129 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2130 version: Some(open_notification.text_document.version),
2131 diagnostics: vec![
2132 lsp::Diagnostic {
2133 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2134 severity: Some(DiagnosticSeverity::ERROR),
2135 message: "undefined variable 'A'".to_string(),
2136 source: Some("disk".to_string()),
2137 ..Default::default()
2138 },
2139 lsp::Diagnostic {
2140 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2141 severity: Some(DiagnosticSeverity::ERROR),
2142 message: "undefined variable 'BB'".to_string(),
2143 source: Some("disk".to_string()),
2144 ..Default::default()
2145 },
2146 lsp::Diagnostic {
2147 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
2148 severity: Some(DiagnosticSeverity::ERROR),
2149 source: Some("disk".to_string()),
2150 message: "undefined variable 'CCC'".to_string(),
2151 ..Default::default()
2152 },
2153 ],
2154 });
2155
2156 // The diagnostics have moved down since they were created.
2157 cx.executor().run_until_parked();
2158 buffer.update(cx, |buffer, _| {
2159 assert_eq!(
2160 buffer
2161 .snapshot()
2162 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
2163 .collect::<Vec<_>>(),
2164 &[
2165 DiagnosticEntry {
2166 range: Point::new(3, 9)..Point::new(3, 11),
2167 diagnostic: Diagnostic {
2168 source: Some("disk".into()),
2169 severity: DiagnosticSeverity::ERROR,
2170 message: "undefined variable 'BB'".to_string(),
2171 is_disk_based: true,
2172 group_id: 1,
2173 is_primary: true,
2174 source_kind: DiagnosticSourceKind::Pushed,
2175 ..Diagnostic::default()
2176 },
2177 },
2178 DiagnosticEntry {
2179 range: Point::new(4, 9)..Point::new(4, 12),
2180 diagnostic: Diagnostic {
2181 source: Some("disk".into()),
2182 severity: DiagnosticSeverity::ERROR,
2183 message: "undefined variable 'CCC'".to_string(),
2184 is_disk_based: true,
2185 group_id: 2,
2186 is_primary: true,
2187 source_kind: DiagnosticSourceKind::Pushed,
2188 ..Diagnostic::default()
2189 }
2190 }
2191 ]
2192 );
2193 assert_eq!(
2194 chunks_with_diagnostics(buffer, 0..buffer.len()),
2195 [
2196 ("\n\nfn a() { ".to_string(), None),
2197 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2198 (" }\nfn b() { ".to_string(), None),
2199 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
2200 (" }\nfn c() { ".to_string(), None),
2201 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
2202 (" }\n".to_string(), None),
2203 ]
2204 );
2205 assert_eq!(
2206 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
2207 [
2208 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
2209 (" }\nfn c() { ".to_string(), None),
2210 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
2211 ]
2212 );
2213 });
2214
2215 // Ensure overlapping diagnostics are highlighted correctly.
2216 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2217 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2218 version: Some(open_notification.text_document.version),
2219 diagnostics: vec![
2220 lsp::Diagnostic {
2221 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2222 severity: Some(DiagnosticSeverity::ERROR),
2223 message: "undefined variable 'A'".to_string(),
2224 source: Some("disk".to_string()),
2225 ..Default::default()
2226 },
2227 lsp::Diagnostic {
2228 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
2229 severity: Some(DiagnosticSeverity::WARNING),
2230 message: "unreachable statement".to_string(),
2231 source: Some("disk".to_string()),
2232 ..Default::default()
2233 },
2234 ],
2235 });
2236
2237 cx.executor().run_until_parked();
2238 buffer.update(cx, |buffer, _| {
2239 assert_eq!(
2240 buffer
2241 .snapshot()
2242 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
2243 .collect::<Vec<_>>(),
2244 &[
2245 DiagnosticEntry {
2246 range: Point::new(2, 9)..Point::new(2, 12),
2247 diagnostic: Diagnostic {
2248 source: Some("disk".into()),
2249 severity: DiagnosticSeverity::WARNING,
2250 message: "unreachable statement".to_string(),
2251 is_disk_based: true,
2252 group_id: 4,
2253 is_primary: true,
2254 source_kind: DiagnosticSourceKind::Pushed,
2255 ..Diagnostic::default()
2256 }
2257 },
2258 DiagnosticEntry {
2259 range: Point::new(2, 9)..Point::new(2, 10),
2260 diagnostic: Diagnostic {
2261 source: Some("disk".into()),
2262 severity: DiagnosticSeverity::ERROR,
2263 message: "undefined variable 'A'".to_string(),
2264 is_disk_based: true,
2265 group_id: 3,
2266 is_primary: true,
2267 source_kind: DiagnosticSourceKind::Pushed,
2268 ..Diagnostic::default()
2269 },
2270 }
2271 ]
2272 );
2273 assert_eq!(
2274 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
2275 [
2276 ("fn a() { ".to_string(), None),
2277 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2278 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2279 ("\n".to_string(), None),
2280 ]
2281 );
2282 assert_eq!(
2283 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
2284 [
2285 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2286 ("\n".to_string(), None),
2287 ]
2288 );
2289 });
2290
2291 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
2292 // changes since the last save.
2293 buffer.update(cx, |buffer, cx| {
2294 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
2295 buffer.edit(
2296 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
2297 None,
2298 cx,
2299 );
2300 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
2301 });
2302 let change_notification_2 = fake_server
2303 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2304 .await;
2305 assert!(
2306 change_notification_2.text_document.version > change_notification_1.text_document.version
2307 );
2308
2309 // Handle out-of-order diagnostics
2310 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2311 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2312 version: Some(change_notification_2.text_document.version),
2313 diagnostics: vec![
2314 lsp::Diagnostic {
2315 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2316 severity: Some(DiagnosticSeverity::ERROR),
2317 message: "undefined variable 'BB'".to_string(),
2318 source: Some("disk".to_string()),
2319 ..Default::default()
2320 },
2321 lsp::Diagnostic {
2322 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2323 severity: Some(DiagnosticSeverity::WARNING),
2324 message: "undefined variable 'A'".to_string(),
2325 source: Some("disk".to_string()),
2326 ..Default::default()
2327 },
2328 ],
2329 });
2330
2331 cx.executor().run_until_parked();
2332 buffer.update(cx, |buffer, _| {
2333 assert_eq!(
2334 buffer
2335 .snapshot()
2336 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2337 .collect::<Vec<_>>(),
2338 &[
2339 DiagnosticEntry {
2340 range: Point::new(2, 21)..Point::new(2, 22),
2341 diagnostic: Diagnostic {
2342 source: Some("disk".into()),
2343 severity: DiagnosticSeverity::WARNING,
2344 message: "undefined variable 'A'".to_string(),
2345 is_disk_based: true,
2346 group_id: 6,
2347 is_primary: true,
2348 source_kind: DiagnosticSourceKind::Pushed,
2349 ..Diagnostic::default()
2350 }
2351 },
2352 DiagnosticEntry {
2353 range: Point::new(3, 9)..Point::new(3, 14),
2354 diagnostic: Diagnostic {
2355 source: Some("disk".into()),
2356 severity: DiagnosticSeverity::ERROR,
2357 message: "undefined variable 'BB'".to_string(),
2358 is_disk_based: true,
2359 group_id: 5,
2360 is_primary: true,
2361 source_kind: DiagnosticSourceKind::Pushed,
2362 ..Diagnostic::default()
2363 },
2364 }
2365 ]
2366 );
2367 });
2368}
2369
2370#[gpui::test]
2371async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
2372 init_test(cx);
2373
2374 let text = concat!(
2375 "let one = ;\n", //
2376 "let two = \n",
2377 "let three = 3;\n",
2378 );
2379
2380 let fs = FakeFs::new(cx.executor());
2381 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
2382
2383 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2384 let buffer = project
2385 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2386 .await
2387 .unwrap();
2388
2389 project.update(cx, |project, cx| {
2390 project.lsp_store.update(cx, |lsp_store, cx| {
2391 lsp_store
2392 .update_diagnostic_entries(
2393 LanguageServerId(0),
2394 PathBuf::from("/dir/a.rs"),
2395 None,
2396 None,
2397 vec![
2398 DiagnosticEntry {
2399 range: Unclipped(PointUtf16::new(0, 10))
2400 ..Unclipped(PointUtf16::new(0, 10)),
2401 diagnostic: Diagnostic {
2402 severity: DiagnosticSeverity::ERROR,
2403 message: "syntax error 1".to_string(),
2404 source_kind: DiagnosticSourceKind::Pushed,
2405 ..Diagnostic::default()
2406 },
2407 },
2408 DiagnosticEntry {
2409 range: Unclipped(PointUtf16::new(1, 10))
2410 ..Unclipped(PointUtf16::new(1, 10)),
2411 diagnostic: Diagnostic {
2412 severity: DiagnosticSeverity::ERROR,
2413 message: "syntax error 2".to_string(),
2414 source_kind: DiagnosticSourceKind::Pushed,
2415 ..Diagnostic::default()
2416 },
2417 },
2418 ],
2419 cx,
2420 )
2421 .unwrap();
2422 })
2423 });
2424
2425 // An empty range is extended forward to include the following character.
2426 // At the end of a line, an empty range is extended backward to include
2427 // the preceding character.
2428 buffer.update(cx, |buffer, _| {
2429 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2430 assert_eq!(
2431 chunks
2432 .iter()
2433 .map(|(s, d)| (s.as_str(), *d))
2434 .collect::<Vec<_>>(),
2435 &[
2436 ("let one = ", None),
2437 (";", Some(DiagnosticSeverity::ERROR)),
2438 ("\nlet two =", None),
2439 (" ", Some(DiagnosticSeverity::ERROR)),
2440 ("\nlet three = 3;\n", None)
2441 ]
2442 );
2443 });
2444}
2445
2446#[gpui::test]
2447async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2448 init_test(cx);
2449
2450 let fs = FakeFs::new(cx.executor());
2451 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2452 .await;
2453
2454 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2455 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
2456
2457 lsp_store.update(cx, |lsp_store, cx| {
2458 lsp_store
2459 .update_diagnostic_entries(
2460 LanguageServerId(0),
2461 Path::new("/dir/a.rs").to_owned(),
2462 None,
2463 None,
2464 vec![DiagnosticEntry {
2465 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2466 diagnostic: Diagnostic {
2467 severity: DiagnosticSeverity::ERROR,
2468 is_primary: true,
2469 message: "syntax error a1".to_string(),
2470 source_kind: DiagnosticSourceKind::Pushed,
2471 ..Diagnostic::default()
2472 },
2473 }],
2474 cx,
2475 )
2476 .unwrap();
2477 lsp_store
2478 .update_diagnostic_entries(
2479 LanguageServerId(1),
2480 Path::new("/dir/a.rs").to_owned(),
2481 None,
2482 None,
2483 vec![DiagnosticEntry {
2484 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2485 diagnostic: Diagnostic {
2486 severity: DiagnosticSeverity::ERROR,
2487 is_primary: true,
2488 message: "syntax error b1".to_string(),
2489 source_kind: DiagnosticSourceKind::Pushed,
2490 ..Diagnostic::default()
2491 },
2492 }],
2493 cx,
2494 )
2495 .unwrap();
2496
2497 assert_eq!(
2498 lsp_store.diagnostic_summary(false, cx),
2499 DiagnosticSummary {
2500 error_count: 2,
2501 warning_count: 0,
2502 }
2503 );
2504 });
2505}
2506
2507#[gpui::test]
2508async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2509 init_test(cx);
2510
2511 let text = "
2512 fn a() {
2513 f1();
2514 }
2515 fn b() {
2516 f2();
2517 }
2518 fn c() {
2519 f3();
2520 }
2521 "
2522 .unindent();
2523
2524 let fs = FakeFs::new(cx.executor());
2525 fs.insert_tree(
2526 path!("/dir"),
2527 json!({
2528 "a.rs": text.clone(),
2529 }),
2530 )
2531 .await;
2532
2533 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2534 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2535
2536 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2537 language_registry.add(rust_lang());
2538 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2539
2540 let (buffer, _handle) = project
2541 .update(cx, |project, cx| {
2542 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2543 })
2544 .await
2545 .unwrap();
2546
2547 let mut fake_server = fake_servers.next().await.unwrap();
2548 let lsp_document_version = fake_server
2549 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2550 .await
2551 .text_document
2552 .version;
2553
2554 // Simulate editing the buffer after the language server computes some edits.
2555 buffer.update(cx, |buffer, cx| {
2556 buffer.edit(
2557 [(
2558 Point::new(0, 0)..Point::new(0, 0),
2559 "// above first function\n",
2560 )],
2561 None,
2562 cx,
2563 );
2564 buffer.edit(
2565 [(
2566 Point::new(2, 0)..Point::new(2, 0),
2567 " // inside first function\n",
2568 )],
2569 None,
2570 cx,
2571 );
2572 buffer.edit(
2573 [(
2574 Point::new(6, 4)..Point::new(6, 4),
2575 "// inside second function ",
2576 )],
2577 None,
2578 cx,
2579 );
2580
2581 assert_eq!(
2582 buffer.text(),
2583 "
2584 // above first function
2585 fn a() {
2586 // inside first function
2587 f1();
2588 }
2589 fn b() {
2590 // inside second function f2();
2591 }
2592 fn c() {
2593 f3();
2594 }
2595 "
2596 .unindent()
2597 );
2598 });
2599
2600 let edits = lsp_store
2601 .update(cx, |lsp_store, cx| {
2602 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2603 &buffer,
2604 vec![
2605 // replace body of first function
2606 lsp::TextEdit {
2607 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2608 new_text: "
2609 fn a() {
2610 f10();
2611 }
2612 "
2613 .unindent(),
2614 },
2615 // edit inside second function
2616 lsp::TextEdit {
2617 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2618 new_text: "00".into(),
2619 },
2620 // edit inside third function via two distinct edits
2621 lsp::TextEdit {
2622 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2623 new_text: "4000".into(),
2624 },
2625 lsp::TextEdit {
2626 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2627 new_text: "".into(),
2628 },
2629 ],
2630 LanguageServerId(0),
2631 Some(lsp_document_version),
2632 cx,
2633 )
2634 })
2635 .await
2636 .unwrap();
2637
2638 buffer.update(cx, |buffer, cx| {
2639 for (range, new_text) in edits {
2640 buffer.edit([(range, new_text)], None, cx);
2641 }
2642 assert_eq!(
2643 buffer.text(),
2644 "
2645 // above first function
2646 fn a() {
2647 // inside first function
2648 f10();
2649 }
2650 fn b() {
2651 // inside second function f200();
2652 }
2653 fn c() {
2654 f4000();
2655 }
2656 "
2657 .unindent()
2658 );
2659 });
2660}
2661
2662#[gpui::test]
2663async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2664 init_test(cx);
2665
2666 let text = "
2667 use a::b;
2668 use a::c;
2669
2670 fn f() {
2671 b();
2672 c();
2673 }
2674 "
2675 .unindent();
2676
2677 let fs = FakeFs::new(cx.executor());
2678 fs.insert_tree(
2679 path!("/dir"),
2680 json!({
2681 "a.rs": text.clone(),
2682 }),
2683 )
2684 .await;
2685
2686 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2687 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2688 let buffer = project
2689 .update(cx, |project, cx| {
2690 project.open_local_buffer(path!("/dir/a.rs"), cx)
2691 })
2692 .await
2693 .unwrap();
2694
2695 // Simulate the language server sending us a small edit in the form of a very large diff.
2696 // Rust-analyzer does this when performing a merge-imports code action.
2697 let edits = lsp_store
2698 .update(cx, |lsp_store, cx| {
2699 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2700 &buffer,
2701 [
2702 // Replace the first use statement without editing the semicolon.
2703 lsp::TextEdit {
2704 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2705 new_text: "a::{b, c}".into(),
2706 },
2707 // Reinsert the remainder of the file between the semicolon and the final
2708 // newline of the file.
2709 lsp::TextEdit {
2710 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2711 new_text: "\n\n".into(),
2712 },
2713 lsp::TextEdit {
2714 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2715 new_text: "
2716 fn f() {
2717 b();
2718 c();
2719 }"
2720 .unindent(),
2721 },
2722 // Delete everything after the first newline of the file.
2723 lsp::TextEdit {
2724 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2725 new_text: "".into(),
2726 },
2727 ],
2728 LanguageServerId(0),
2729 None,
2730 cx,
2731 )
2732 })
2733 .await
2734 .unwrap();
2735
2736 buffer.update(cx, |buffer, cx| {
2737 let edits = edits
2738 .into_iter()
2739 .map(|(range, text)| {
2740 (
2741 range.start.to_point(buffer)..range.end.to_point(buffer),
2742 text,
2743 )
2744 })
2745 .collect::<Vec<_>>();
2746
2747 assert_eq!(
2748 edits,
2749 [
2750 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2751 (Point::new(1, 0)..Point::new(2, 0), "".into())
2752 ]
2753 );
2754
2755 for (range, new_text) in edits {
2756 buffer.edit([(range, new_text)], None, cx);
2757 }
2758 assert_eq!(
2759 buffer.text(),
2760 "
2761 use a::{b, c};
2762
2763 fn f() {
2764 b();
2765 c();
2766 }
2767 "
2768 .unindent()
2769 );
2770 });
2771}
2772
2773#[gpui::test]
2774async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
2775 cx: &mut gpui::TestAppContext,
2776) {
2777 init_test(cx);
2778
2779 let text = "Path()";
2780
2781 let fs = FakeFs::new(cx.executor());
2782 fs.insert_tree(
2783 path!("/dir"),
2784 json!({
2785 "a.rs": text
2786 }),
2787 )
2788 .await;
2789
2790 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2791 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2792 let buffer = project
2793 .update(cx, |project, cx| {
2794 project.open_local_buffer(path!("/dir/a.rs"), cx)
2795 })
2796 .await
2797 .unwrap();
2798
2799 // Simulate the language server sending us a pair of edits at the same location,
2800 // with an insertion following a replacement (which violates the LSP spec).
2801 let edits = lsp_store
2802 .update(cx, |lsp_store, cx| {
2803 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2804 &buffer,
2805 [
2806 lsp::TextEdit {
2807 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
2808 new_text: "Path".into(),
2809 },
2810 lsp::TextEdit {
2811 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
2812 new_text: "from path import Path\n\n\n".into(),
2813 },
2814 ],
2815 LanguageServerId(0),
2816 None,
2817 cx,
2818 )
2819 })
2820 .await
2821 .unwrap();
2822
2823 buffer.update(cx, |buffer, cx| {
2824 buffer.edit(edits, None, cx);
2825 assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
2826 });
2827}
2828
2829#[gpui::test]
2830async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2831 init_test(cx);
2832
2833 let text = "
2834 use a::b;
2835 use a::c;
2836
2837 fn f() {
2838 b();
2839 c();
2840 }
2841 "
2842 .unindent();
2843
2844 let fs = FakeFs::new(cx.executor());
2845 fs.insert_tree(
2846 path!("/dir"),
2847 json!({
2848 "a.rs": text.clone(),
2849 }),
2850 )
2851 .await;
2852
2853 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2854 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2855 let buffer = project
2856 .update(cx, |project, cx| {
2857 project.open_local_buffer(path!("/dir/a.rs"), cx)
2858 })
2859 .await
2860 .unwrap();
2861
2862 // Simulate the language server sending us edits in a non-ordered fashion,
2863 // with ranges sometimes being inverted or pointing to invalid locations.
2864 let edits = lsp_store
2865 .update(cx, |lsp_store, cx| {
2866 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2867 &buffer,
2868 [
2869 lsp::TextEdit {
2870 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2871 new_text: "\n\n".into(),
2872 },
2873 lsp::TextEdit {
2874 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2875 new_text: "a::{b, c}".into(),
2876 },
2877 lsp::TextEdit {
2878 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2879 new_text: "".into(),
2880 },
2881 lsp::TextEdit {
2882 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2883 new_text: "
2884 fn f() {
2885 b();
2886 c();
2887 }"
2888 .unindent(),
2889 },
2890 ],
2891 LanguageServerId(0),
2892 None,
2893 cx,
2894 )
2895 })
2896 .await
2897 .unwrap();
2898
2899 buffer.update(cx, |buffer, cx| {
2900 let edits = edits
2901 .into_iter()
2902 .map(|(range, text)| {
2903 (
2904 range.start.to_point(buffer)..range.end.to_point(buffer),
2905 text,
2906 )
2907 })
2908 .collect::<Vec<_>>();
2909
2910 assert_eq!(
2911 edits,
2912 [
2913 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2914 (Point::new(1, 0)..Point::new(2, 0), "".into())
2915 ]
2916 );
2917
2918 for (range, new_text) in edits {
2919 buffer.edit([(range, new_text)], None, cx);
2920 }
2921 assert_eq!(
2922 buffer.text(),
2923 "
2924 use a::{b, c};
2925
2926 fn f() {
2927 b();
2928 c();
2929 }
2930 "
2931 .unindent()
2932 );
2933 });
2934}
2935
2936fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2937 buffer: &Buffer,
2938 range: Range<T>,
2939) -> Vec<(String, Option<DiagnosticSeverity>)> {
2940 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2941 for chunk in buffer.snapshot().chunks(range, true) {
2942 if chunks.last().map_or(false, |prev_chunk| {
2943 prev_chunk.1 == chunk.diagnostic_severity
2944 }) {
2945 chunks.last_mut().unwrap().0.push_str(chunk.text);
2946 } else {
2947 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2948 }
2949 }
2950 chunks
2951}
2952
2953#[gpui::test(iterations = 10)]
2954async fn test_definition(cx: &mut gpui::TestAppContext) {
2955 init_test(cx);
2956
2957 let fs = FakeFs::new(cx.executor());
2958 fs.insert_tree(
2959 path!("/dir"),
2960 json!({
2961 "a.rs": "const fn a() { A }",
2962 "b.rs": "const y: i32 = crate::a()",
2963 }),
2964 )
2965 .await;
2966
2967 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
2968
2969 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2970 language_registry.add(rust_lang());
2971 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2972
2973 let (buffer, _handle) = project
2974 .update(cx, |project, cx| {
2975 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
2976 })
2977 .await
2978 .unwrap();
2979
2980 let fake_server = fake_servers.next().await.unwrap();
2981 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2982 let params = params.text_document_position_params;
2983 assert_eq!(
2984 params.text_document.uri.to_file_path().unwrap(),
2985 Path::new(path!("/dir/b.rs")),
2986 );
2987 assert_eq!(params.position, lsp::Position::new(0, 22));
2988
2989 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2990 lsp::Location::new(
2991 lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2992 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2993 ),
2994 )))
2995 });
2996 let mut definitions = project
2997 .update(cx, |project, cx| project.definitions(&buffer, 22, cx))
2998 .await
2999 .unwrap();
3000
3001 // Assert no new language server started
3002 cx.executor().run_until_parked();
3003 assert!(fake_servers.try_next().is_err());
3004
3005 assert_eq!(definitions.len(), 1);
3006 let definition = definitions.pop().unwrap();
3007 cx.update(|cx| {
3008 let target_buffer = definition.target.buffer.read(cx);
3009 assert_eq!(
3010 target_buffer
3011 .file()
3012 .unwrap()
3013 .as_local()
3014 .unwrap()
3015 .abs_path(cx),
3016 Path::new(path!("/dir/a.rs")),
3017 );
3018 assert_eq!(
3019 definition.target.range.to_offset(&target_buffer.snapshot()),
3020 9..10
3021 );
3022 assert_eq!(
3023 list_worktrees(&project, cx),
3024 [
3025 (path!("/dir/a.rs").as_ref(), false),
3026 (path!("/dir/b.rs").as_ref(), true)
3027 ],
3028 );
3029
3030 drop(target_buffer);
3031 drop(definition);
3032 });
3033 cx.update(|cx| {
3034 assert_eq!(
3035 list_worktrees(&project, cx),
3036 [(path!("/dir/b.rs").as_ref(), true)]
3037 );
3038 });
3039
3040 // fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
3041 // project
3042 // .read(cx)
3043 // .worktrees(cx)
3044 // .map(|worktree| {
3045 // let worktree = worktree.read(cx);
3046 // (
3047 // worktree.as_local().unwrap().abs_path().as_ref(),
3048 // worktree.is_visible(),
3049 // )
3050 // })
3051 fn list_worktrees<'a>(_project: &'a Entity<Project>, _cx: &'a App) -> Vec<(&'a Path, bool)> {
3052 todo!("list_worktrees needs to be refactored to handle Ref type")
3053 }
3054}
3055
3056#[gpui::test]
3057async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
3058 init_test(cx);
3059
3060 let fs = FakeFs::new(cx.executor());
3061 fs.insert_tree(
3062 path!("/dir"),
3063 json!({
3064 "a.ts": "",
3065 }),
3066 )
3067 .await;
3068
3069 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3070
3071 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3072 language_registry.add(typescript_lang());
3073 let mut fake_language_servers = language_registry.register_fake_lsp(
3074 "TypeScript",
3075 FakeLspAdapter {
3076 capabilities: lsp::ServerCapabilities {
3077 completion_provider: Some(lsp::CompletionOptions {
3078 trigger_characters: Some(vec![".".to_string()]),
3079 ..Default::default()
3080 }),
3081 ..Default::default()
3082 },
3083 ..Default::default()
3084 },
3085 );
3086
3087 let (buffer, _handle) = project
3088 .update(cx, |p, cx| {
3089 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3090 })
3091 .await
3092 .unwrap();
3093
3094 let fake_server = fake_language_servers.next().await.unwrap();
3095
3096 // When text_edit exists, it takes precedence over insert_text and label
3097 let text = "let a = obj.fqn";
3098 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3099 let completions = project.update(cx, |project, cx| {
3100 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3101 });
3102
3103 fake_server
3104 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3105 Ok(Some(lsp::CompletionResponse::Array(vec![
3106 lsp::CompletionItem {
3107 label: "labelText".into(),
3108 insert_text: Some("insertText".into()),
3109 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
3110 range: lsp::Range::new(
3111 lsp::Position::new(0, text.len() as u32 - 3),
3112 lsp::Position::new(0, text.len() as u32),
3113 ),
3114 new_text: "textEditText".into(),
3115 })),
3116 ..Default::default()
3117 },
3118 ])))
3119 })
3120 .next()
3121 .await;
3122
3123 let completions = completions
3124 .await
3125 .unwrap()
3126 .into_iter()
3127 .flat_map(|response| response.completions)
3128 .collect::<Vec<_>>();
3129 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3130
3131 assert_eq!(completions.len(), 1);
3132 assert_eq!(completions[0].new_text, "textEditText");
3133 assert_eq!(
3134 completions[0].replace_range.to_offset(&snapshot),
3135 text.len() - 3..text.len()
3136 );
3137}
3138
3139#[gpui::test]
3140async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
3141 init_test(cx);
3142
3143 let fs = FakeFs::new(cx.executor());
3144 fs.insert_tree(
3145 path!("/dir"),
3146 json!({
3147 "a.ts": "",
3148 }),
3149 )
3150 .await;
3151
3152 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3153
3154 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3155 language_registry.add(typescript_lang());
3156 let mut fake_language_servers = language_registry.register_fake_lsp(
3157 "TypeScript",
3158 FakeLspAdapter {
3159 capabilities: lsp::ServerCapabilities {
3160 completion_provider: Some(lsp::CompletionOptions {
3161 trigger_characters: Some(vec![".".to_string()]),
3162 ..Default::default()
3163 }),
3164 ..Default::default()
3165 },
3166 ..Default::default()
3167 },
3168 );
3169
3170 let (buffer, _handle) = project
3171 .update(cx, |p, cx| {
3172 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3173 })
3174 .await
3175 .unwrap();
3176
3177 let fake_server = fake_language_servers.next().await.unwrap();
3178 let text = "let a = obj.fqn";
3179
3180 // Test 1: When text_edit is None but insert_text exists with default edit_range
3181 {
3182 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3183 let completions = project.update(cx, |project, cx| {
3184 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3185 });
3186
3187 fake_server
3188 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3189 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3190 is_incomplete: false,
3191 item_defaults: Some(lsp::CompletionListItemDefaults {
3192 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3193 lsp::Range::new(
3194 lsp::Position::new(0, text.len() as u32 - 3),
3195 lsp::Position::new(0, text.len() as u32),
3196 ),
3197 )),
3198 ..Default::default()
3199 }),
3200 items: vec![lsp::CompletionItem {
3201 label: "labelText".into(),
3202 insert_text: Some("insertText".into()),
3203 text_edit: None,
3204 ..Default::default()
3205 }],
3206 })))
3207 })
3208 .next()
3209 .await;
3210
3211 let completions = completions
3212 .await
3213 .unwrap()
3214 .into_iter()
3215 .flat_map(|response| response.completions)
3216 .collect::<Vec<_>>();
3217 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3218
3219 assert_eq!(completions.len(), 1);
3220 assert_eq!(completions[0].new_text, "insertText");
3221 assert_eq!(
3222 completions[0].replace_range.to_offset(&snapshot),
3223 text.len() - 3..text.len()
3224 );
3225 }
3226
3227 // Test 2: When both text_edit and insert_text are None with default edit_range
3228 {
3229 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3230 let completions = project.update(cx, |project, cx| {
3231 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3232 });
3233
3234 fake_server
3235 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3236 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3237 is_incomplete: false,
3238 item_defaults: Some(lsp::CompletionListItemDefaults {
3239 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3240 lsp::Range::new(
3241 lsp::Position::new(0, text.len() as u32 - 3),
3242 lsp::Position::new(0, text.len() as u32),
3243 ),
3244 )),
3245 ..Default::default()
3246 }),
3247 items: vec![lsp::CompletionItem {
3248 label: "labelText".into(),
3249 insert_text: None,
3250 text_edit: None,
3251 ..Default::default()
3252 }],
3253 })))
3254 })
3255 .next()
3256 .await;
3257
3258 let completions = completions
3259 .await
3260 .unwrap()
3261 .into_iter()
3262 .flat_map(|response| response.completions)
3263 .collect::<Vec<_>>();
3264 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3265
3266 assert_eq!(completions.len(), 1);
3267 assert_eq!(completions[0].new_text, "labelText");
3268 assert_eq!(
3269 completions[0].replace_range.to_offset(&snapshot),
3270 text.len() - 3..text.len()
3271 );
3272 }
3273}
3274
3275#[gpui::test]
3276async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
3277 init_test(cx);
3278
3279 let fs = FakeFs::new(cx.executor());
3280 fs.insert_tree(
3281 path!("/dir"),
3282 json!({
3283 "a.ts": "",
3284 }),
3285 )
3286 .await;
3287
3288 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3289
3290 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3291 language_registry.add(typescript_lang());
3292 let mut fake_language_servers = language_registry.register_fake_lsp(
3293 "TypeScript",
3294 FakeLspAdapter {
3295 capabilities: lsp::ServerCapabilities {
3296 completion_provider: Some(lsp::CompletionOptions {
3297 trigger_characters: Some(vec![":".to_string()]),
3298 ..Default::default()
3299 }),
3300 ..Default::default()
3301 },
3302 ..Default::default()
3303 },
3304 );
3305
3306 let (buffer, _handle) = project
3307 .update(cx, |p, cx| {
3308 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3309 })
3310 .await
3311 .unwrap();
3312
3313 let fake_server = fake_language_servers.next().await.unwrap();
3314
3315 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
3316 let text = "let a = b.fqn";
3317 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3318 let completions = project.update(cx, |project, cx| {
3319 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3320 });
3321
3322 fake_server
3323 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3324 Ok(Some(lsp::CompletionResponse::Array(vec![
3325 lsp::CompletionItem {
3326 label: "fullyQualifiedName?".into(),
3327 insert_text: Some("fullyQualifiedName".into()),
3328 ..Default::default()
3329 },
3330 ])))
3331 })
3332 .next()
3333 .await;
3334 let completions = completions
3335 .await
3336 .unwrap()
3337 .into_iter()
3338 .flat_map(|response| response.completions)
3339 .collect::<Vec<_>>();
3340 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3341 assert_eq!(completions.len(), 1);
3342 assert_eq!(completions[0].new_text, "fullyQualifiedName");
3343 assert_eq!(
3344 completions[0].replace_range.to_offset(&snapshot),
3345 text.len() - 3..text.len()
3346 );
3347
3348 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
3349 let text = "let a = \"atoms/cmp\"";
3350 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3351 let completions = project.update(cx, |project, cx| {
3352 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
3353 });
3354
3355 fake_server
3356 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3357 Ok(Some(lsp::CompletionResponse::Array(vec![
3358 lsp::CompletionItem {
3359 label: "component".into(),
3360 ..Default::default()
3361 },
3362 ])))
3363 })
3364 .next()
3365 .await;
3366 let completions = completions
3367 .await
3368 .unwrap()
3369 .into_iter()
3370 .flat_map(|response| response.completions)
3371 .collect::<Vec<_>>();
3372 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3373 assert_eq!(completions.len(), 1);
3374 assert_eq!(completions[0].new_text, "component");
3375 assert_eq!(
3376 completions[0].replace_range.to_offset(&snapshot),
3377 text.len() - 4..text.len() - 1
3378 );
3379}
3380
3381#[gpui::test]
3382async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
3383 init_test(cx);
3384
3385 let fs = FakeFs::new(cx.executor());
3386 fs.insert_tree(
3387 path!("/dir"),
3388 json!({
3389 "a.ts": "",
3390 }),
3391 )
3392 .await;
3393
3394 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3395
3396 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3397 language_registry.add(typescript_lang());
3398 let mut fake_language_servers = language_registry.register_fake_lsp(
3399 "TypeScript",
3400 FakeLspAdapter {
3401 capabilities: lsp::ServerCapabilities {
3402 completion_provider: Some(lsp::CompletionOptions {
3403 trigger_characters: Some(vec![":".to_string()]),
3404 ..Default::default()
3405 }),
3406 ..Default::default()
3407 },
3408 ..Default::default()
3409 },
3410 );
3411
3412 let (buffer, _handle) = project
3413 .update(cx, |p, cx| {
3414 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3415 })
3416 .await
3417 .unwrap();
3418
3419 let fake_server = fake_language_servers.next().await.unwrap();
3420
3421 let text = "let a = b.fqn";
3422 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3423 let completions = project.update(cx, |project, cx| {
3424 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3425 });
3426
3427 fake_server
3428 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3429 Ok(Some(lsp::CompletionResponse::Array(vec![
3430 lsp::CompletionItem {
3431 label: "fullyQualifiedName?".into(),
3432 insert_text: Some("fully\rQualified\r\nName".into()),
3433 ..Default::default()
3434 },
3435 ])))
3436 })
3437 .next()
3438 .await;
3439 let completions = completions
3440 .await
3441 .unwrap()
3442 .into_iter()
3443 .flat_map(|response| response.completions)
3444 .collect::<Vec<_>>();
3445 assert_eq!(completions.len(), 1);
3446 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
3447}
3448
3449#[gpui::test(iterations = 10)]
3450async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
3451 init_test(cx);
3452
3453 let fs = FakeFs::new(cx.executor());
3454 fs.insert_tree(
3455 path!("/dir"),
3456 json!({
3457 "a.ts": "a",
3458 }),
3459 )
3460 .await;
3461
3462 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3463
3464 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3465 language_registry.add(typescript_lang());
3466 let mut fake_language_servers = language_registry.register_fake_lsp(
3467 "TypeScript",
3468 FakeLspAdapter {
3469 capabilities: lsp::ServerCapabilities {
3470 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
3471 lsp::CodeActionOptions {
3472 resolve_provider: Some(true),
3473 ..lsp::CodeActionOptions::default()
3474 },
3475 )),
3476 execute_command_provider: Some(lsp::ExecuteCommandOptions {
3477 commands: vec!["_the/command".to_string()],
3478 ..lsp::ExecuteCommandOptions::default()
3479 }),
3480 ..lsp::ServerCapabilities::default()
3481 },
3482 ..FakeLspAdapter::default()
3483 },
3484 );
3485
3486 let (buffer, _handle) = project
3487 .update(cx, |p, cx| {
3488 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3489 })
3490 .await
3491 .unwrap();
3492
3493 let fake_server = fake_language_servers.next().await.unwrap();
3494
3495 // Language server returns code actions that contain commands, and not edits.
3496 let actions = project.update(cx, |project, cx| {
3497 project.code_actions(&buffer, 0..0, None, cx)
3498 });
3499 fake_server
3500 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
3501 Ok(Some(vec![
3502 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3503 title: "The code action".into(),
3504 data: Some(serde_json::json!({
3505 "command": "_the/command",
3506 })),
3507 ..lsp::CodeAction::default()
3508 }),
3509 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3510 title: "two".into(),
3511 ..lsp::CodeAction::default()
3512 }),
3513 ]))
3514 })
3515 .next()
3516 .await;
3517
3518 let action = actions.await.unwrap()[0].clone();
3519 let apply = project.update(cx, |project, cx| {
3520 project.apply_code_action(buffer.clone(), action, true, cx)
3521 });
3522
3523 // Resolving the code action does not populate its edits. In absence of
3524 // edits, we must execute the given command.
3525 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
3526 |mut action, _| async move {
3527 if action.data.is_some() {
3528 action.command = Some(lsp::Command {
3529 title: "The command".into(),
3530 command: "_the/command".into(),
3531 arguments: Some(vec![json!("the-argument")]),
3532 });
3533 }
3534 Ok(action)
3535 },
3536 );
3537
3538 // While executing the command, the language server sends the editor
3539 // a `workspaceEdit` request.
3540 fake_server
3541 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
3542 let fake = fake_server.clone();
3543 move |params, _| {
3544 assert_eq!(params.command, "_the/command");
3545 let fake = fake.clone();
3546 async move {
3547 fake.server
3548 .request::<lsp::request::ApplyWorkspaceEdit>(
3549 lsp::ApplyWorkspaceEditParams {
3550 label: None,
3551 edit: lsp::WorkspaceEdit {
3552 changes: Some(
3553 [(
3554 lsp::Url::from_file_path(path!("/dir/a.ts")).unwrap(),
3555 vec![lsp::TextEdit {
3556 range: lsp::Range::new(
3557 lsp::Position::new(0, 0),
3558 lsp::Position::new(0, 0),
3559 ),
3560 new_text: "X".into(),
3561 }],
3562 )]
3563 .into_iter()
3564 .collect(),
3565 ),
3566 ..Default::default()
3567 },
3568 },
3569 )
3570 .await
3571 .into_response()
3572 .unwrap();
3573 Ok(Some(json!(null)))
3574 }
3575 }
3576 })
3577 .next()
3578 .await;
3579
3580 // Applying the code action returns a project transaction containing the edits
3581 // sent by the language server in its `workspaceEdit` request.
3582 let transaction = apply.await.unwrap();
3583 assert!(transaction.0.contains_key(&buffer));
3584 buffer.update(cx, |buffer, cx| {
3585 assert_eq!(buffer.text(), "Xa");
3586 buffer.undo(cx);
3587 assert_eq!(buffer.text(), "a");
3588 });
3589}
3590
3591#[gpui::test(iterations = 10)]
3592async fn test_save_file(cx: &mut gpui::TestAppContext) {
3593 init_test(cx);
3594
3595 let fs = FakeFs::new(cx.executor());
3596 fs.insert_tree(
3597 path!("/dir"),
3598 json!({
3599 "file1": "the old contents",
3600 }),
3601 )
3602 .await;
3603
3604 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3605 let buffer = project
3606 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3607 .await
3608 .unwrap();
3609 buffer.update(cx, |buffer, cx| {
3610 assert_eq!(buffer.text(), "the old contents");
3611 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3612 });
3613
3614 project
3615 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3616 .await
3617 .unwrap();
3618
3619 let new_text = fs
3620 .load(Path::new(path!("/dir/file1")))
3621 .await
3622 .unwrap()
3623 .replace("\r\n", "\n");
3624 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3625}
3626
3627#[gpui::test(iterations = 10)]
3628async fn test_save_file_spawns_language_server(cx: &mut gpui::TestAppContext) {
3629 // Issue: #24349
3630 init_test(cx);
3631
3632 let fs = FakeFs::new(cx.executor());
3633 fs.insert_tree(path!("/dir"), json!({})).await;
3634
3635 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3636 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3637
3638 language_registry.add(rust_lang());
3639 let mut fake_rust_servers = language_registry.register_fake_lsp(
3640 "Rust",
3641 FakeLspAdapter {
3642 name: "the-rust-language-server",
3643 capabilities: lsp::ServerCapabilities {
3644 completion_provider: Some(lsp::CompletionOptions {
3645 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
3646 ..Default::default()
3647 }),
3648 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
3649 lsp::TextDocumentSyncOptions {
3650 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
3651 ..Default::default()
3652 },
3653 )),
3654 ..Default::default()
3655 },
3656 ..Default::default()
3657 },
3658 );
3659
3660 let buffer = project
3661 .update(cx, |this, cx| this.create_buffer(cx))
3662 .unwrap()
3663 .await;
3664 project.update(cx, |this, cx| {
3665 this.register_buffer_with_language_servers(&buffer, cx);
3666 buffer.update(cx, |buffer, cx| {
3667 assert!(!this.has_language_servers_for(buffer, cx));
3668 })
3669 });
3670
3671 project
3672 .update(cx, |this, cx| {
3673 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
3674 this.save_buffer_as(
3675 buffer.clone(),
3676 ProjectPath {
3677 worktree_id,
3678 path: Arc::from("file.rs".as_ref()),
3679 },
3680 cx,
3681 )
3682 })
3683 .await
3684 .unwrap();
3685 // A server is started up, and it is notified about Rust files.
3686 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
3687 assert_eq!(
3688 fake_rust_server
3689 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3690 .await
3691 .text_document,
3692 lsp::TextDocumentItem {
3693 uri: lsp::Url::from_file_path(path!("/dir/file.rs")).unwrap(),
3694 version: 0,
3695 text: "".to_string(),
3696 language_id: "rust".to_string(),
3697 }
3698 );
3699
3700 project.update(cx, |this, cx| {
3701 buffer.update(cx, |buffer, cx| {
3702 assert!(this.has_language_servers_for(buffer, cx));
3703 })
3704 });
3705}
3706
3707#[gpui::test(iterations = 30)]
3708async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
3709 init_test(cx);
3710
3711 let fs = FakeFs::new(cx.executor().clone());
3712 fs.insert_tree(
3713 path!("/dir"),
3714 json!({
3715 "file1": "the original contents",
3716 }),
3717 )
3718 .await;
3719
3720 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3721 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3722 let buffer = project
3723 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3724 .await
3725 .unwrap();
3726
3727 // Simulate buffer diffs being slow, so that they don't complete before
3728 // the next file change occurs.
3729 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3730
3731 // Change the buffer's file on disk, and then wait for the file change
3732 // to be detected by the worktree, so that the buffer starts reloading.
3733 fs.save(
3734 path!("/dir/file1").as_ref(),
3735 &"the first contents".into(),
3736 Default::default(),
3737 )
3738 .await
3739 .unwrap();
3740 worktree.next_event(cx).await;
3741
3742 // Change the buffer's file again. Depending on the random seed, the
3743 // previous file change may still be in progress.
3744 fs.save(
3745 path!("/dir/file1").as_ref(),
3746 &"the second contents".into(),
3747 Default::default(),
3748 )
3749 .await
3750 .unwrap();
3751 worktree.next_event(cx).await;
3752
3753 cx.executor().run_until_parked();
3754 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3755 buffer.read_with(cx, |buffer, _| {
3756 assert_eq!(buffer.text(), on_disk_text);
3757 assert!(!buffer.is_dirty(), "buffer should not be dirty");
3758 assert!(!buffer.has_conflict(), "buffer should not be dirty");
3759 });
3760}
3761
3762#[gpui::test(iterations = 30)]
3763async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
3764 init_test(cx);
3765
3766 let fs = FakeFs::new(cx.executor().clone());
3767 fs.insert_tree(
3768 path!("/dir"),
3769 json!({
3770 "file1": "the original contents",
3771 }),
3772 )
3773 .await;
3774
3775 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3776 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3777 let buffer = project
3778 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3779 .await
3780 .unwrap();
3781
3782 // Simulate buffer diffs being slow, so that they don't complete before
3783 // the next file change occurs.
3784 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3785
3786 // Change the buffer's file on disk, and then wait for the file change
3787 // to be detected by the worktree, so that the buffer starts reloading.
3788 fs.save(
3789 path!("/dir/file1").as_ref(),
3790 &"the first contents".into(),
3791 Default::default(),
3792 )
3793 .await
3794 .unwrap();
3795 worktree.next_event(cx).await;
3796
3797 cx.executor()
3798 .spawn(cx.executor().simulate_random_delay())
3799 .await;
3800
3801 // Perform a noop edit, causing the buffer's version to increase.
3802 buffer.update(cx, |buffer, cx| {
3803 buffer.edit([(0..0, " ")], None, cx);
3804 buffer.undo(cx);
3805 });
3806
3807 cx.executor().run_until_parked();
3808 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3809 buffer.read_with(cx, |buffer, _| {
3810 let buffer_text = buffer.text();
3811 if buffer_text == on_disk_text {
3812 assert!(
3813 !buffer.is_dirty() && !buffer.has_conflict(),
3814 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
3815 );
3816 }
3817 // If the file change occurred while the buffer was processing the first
3818 // change, the buffer will be in a conflicting state.
3819 else {
3820 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3821 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3822 }
3823 });
3824}
3825
3826#[gpui::test]
3827async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
3828 init_test(cx);
3829
3830 let fs = FakeFs::new(cx.executor());
3831 fs.insert_tree(
3832 path!("/dir"),
3833 json!({
3834 "file1": "the old contents",
3835 }),
3836 )
3837 .await;
3838
3839 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
3840 let buffer = project
3841 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3842 .await
3843 .unwrap();
3844 buffer.update(cx, |buffer, cx| {
3845 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3846 });
3847
3848 project
3849 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3850 .await
3851 .unwrap();
3852
3853 let new_text = fs
3854 .load(Path::new(path!("/dir/file1")))
3855 .await
3856 .unwrap()
3857 .replace("\r\n", "\n");
3858 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3859}
3860
3861#[gpui::test]
3862async fn test_save_as(cx: &mut gpui::TestAppContext) {
3863 init_test(cx);
3864
3865 let fs = FakeFs::new(cx.executor());
3866 fs.insert_tree("/dir", json!({})).await;
3867
3868 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3869
3870 let languages = project.update(cx, |project, _| project.languages().clone());
3871 languages.add(rust_lang());
3872
3873 let buffer = project.update(cx, |project, cx| project.create_local_buffer("", None, cx));
3874 buffer.update(cx, |buffer, cx| {
3875 buffer.edit([(0..0, "abc")], None, cx);
3876 assert!(buffer.is_dirty());
3877 assert!(!buffer.has_conflict());
3878 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
3879 });
3880 project
3881 .update(cx, |project, cx| {
3882 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
3883 let path = ProjectPath {
3884 worktree_id,
3885 path: Arc::from(Path::new("file1.rs")),
3886 };
3887 project.save_buffer_as(buffer.clone(), path, cx)
3888 })
3889 .await
3890 .unwrap();
3891 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
3892
3893 cx.executor().run_until_parked();
3894 buffer.update(cx, |buffer, cx| {
3895 assert_eq!(
3896 buffer.file().unwrap().full_path(cx),
3897 Path::new("dir/file1.rs")
3898 );
3899 assert!(!buffer.is_dirty());
3900 assert!(!buffer.has_conflict());
3901 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
3902 });
3903
3904 let opened_buffer = project
3905 .update(cx, |project, cx| {
3906 project.open_local_buffer("/dir/file1.rs", cx)
3907 })
3908 .await
3909 .unwrap();
3910 assert_eq!(opened_buffer, buffer);
3911}
3912
3913#[gpui::test(retries = 5)]
3914async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
3915 use worktree::WorktreeModelHandle as _;
3916
3917 init_test(cx);
3918 cx.executor().allow_parking();
3919
3920 let dir = TempTree::new(json!({
3921 "a": {
3922 "file1": "",
3923 "file2": "",
3924 "file3": "",
3925 },
3926 "b": {
3927 "c": {
3928 "file4": "",
3929 "file5": "",
3930 }
3931 }
3932 }));
3933
3934 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
3935
3936 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3937 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
3938 async move { buffer.await.unwrap() }
3939 };
3940 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3941 project.update(cx, |project, cx| {
3942 let tree = project.worktrees(cx).next().unwrap();
3943 tree.read(cx)
3944 .entry_for_path(path)
3945 .unwrap_or_else(|| panic!("no entry for path {}", path))
3946 .id
3947 })
3948 };
3949
3950 let buffer2 = buffer_for_path("a/file2", cx).await;
3951 let buffer3 = buffer_for_path("a/file3", cx).await;
3952 let buffer4 = buffer_for_path("b/c/file4", cx).await;
3953 let buffer5 = buffer_for_path("b/c/file5", cx).await;
3954
3955 let file2_id = id_for_path("a/file2", cx);
3956 let file3_id = id_for_path("a/file3", cx);
3957 let file4_id = id_for_path("b/c/file4", cx);
3958
3959 // Create a remote copy of this worktree.
3960 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3961 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
3962
3963 let updates = Arc::new(Mutex::new(Vec::new()));
3964 tree.update(cx, |tree, cx| {
3965 let updates = updates.clone();
3966 tree.observe_updates(0, cx, move |update| {
3967 updates.lock().push(update);
3968 async { true }
3969 });
3970 });
3971
3972 let remote =
3973 cx.update(|cx| Worktree::remote(0, 1, metadata, project.read(cx).client().into(), cx));
3974
3975 cx.executor().run_until_parked();
3976
3977 cx.update(|cx| {
3978 assert!(!buffer2.read(cx).is_dirty());
3979 assert!(!buffer3.read(cx).is_dirty());
3980 assert!(!buffer4.read(cx).is_dirty());
3981 assert!(!buffer5.read(cx).is_dirty());
3982 });
3983
3984 // Rename and delete files and directories.
3985 tree.flush_fs_events(cx).await;
3986 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3987 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3988 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3989 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3990 tree.flush_fs_events(cx).await;
3991
3992 cx.update(|app| {
3993 assert_eq!(
3994 tree.read(app)
3995 .paths()
3996 .map(|p| p.to_str().unwrap())
3997 .collect::<Vec<_>>(),
3998 vec![
3999 "a",
4000 path!("a/file1"),
4001 path!("a/file2.new"),
4002 "b",
4003 "d",
4004 path!("d/file3"),
4005 path!("d/file4"),
4006 ]
4007 );
4008 });
4009
4010 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
4011 assert_eq!(id_for_path("d/file3", cx), file3_id);
4012 assert_eq!(id_for_path("d/file4", cx), file4_id);
4013
4014 cx.update(|cx| {
4015 assert_eq!(
4016 buffer2.read(cx).file().unwrap().path().as_ref(),
4017 Path::new("a/file2.new")
4018 );
4019 assert_eq!(
4020 buffer3.read(cx).file().unwrap().path().as_ref(),
4021 Path::new("d/file3")
4022 );
4023 assert_eq!(
4024 buffer4.read(cx).file().unwrap().path().as_ref(),
4025 Path::new("d/file4")
4026 );
4027 assert_eq!(
4028 buffer5.read(cx).file().unwrap().path().as_ref(),
4029 Path::new("b/c/file5")
4030 );
4031
4032 assert_matches!(
4033 buffer2.read(cx).file().unwrap().disk_state(),
4034 DiskState::Present { .. }
4035 );
4036 assert_matches!(
4037 buffer3.read(cx).file().unwrap().disk_state(),
4038 DiskState::Present { .. }
4039 );
4040 assert_matches!(
4041 buffer4.read(cx).file().unwrap().disk_state(),
4042 DiskState::Present { .. }
4043 );
4044 assert_eq!(
4045 buffer5.read(cx).file().unwrap().disk_state(),
4046 DiskState::Deleted
4047 );
4048 });
4049
4050 // Update the remote worktree. Check that it becomes consistent with the
4051 // local worktree.
4052 cx.executor().run_until_parked();
4053
4054 remote.update(cx, |remote, _| {
4055 for update in updates.lock().drain(..) {
4056 remote.as_remote_mut().unwrap().update_from_remote(update);
4057 }
4058 });
4059 cx.executor().run_until_parked();
4060 remote.update(cx, |remote, _| {
4061 assert_eq!(
4062 remote
4063 .paths()
4064 .map(|p| p.to_str().unwrap())
4065 .collect::<Vec<_>>(),
4066 vec![
4067 "a",
4068 path!("a/file1"),
4069 path!("a/file2.new"),
4070 "b",
4071 "d",
4072 path!("d/file3"),
4073 path!("d/file4"),
4074 ]
4075 );
4076 });
4077}
4078
4079#[gpui::test(iterations = 10)]
4080async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
4081 init_test(cx);
4082
4083 let fs = FakeFs::new(cx.executor());
4084 fs.insert_tree(
4085 path!("/dir"),
4086 json!({
4087 "a": {
4088 "file1": "",
4089 }
4090 }),
4091 )
4092 .await;
4093
4094 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
4095 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
4096 let tree_id = tree.update(cx, |tree, _| tree.id());
4097
4098 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4099 project.update(cx, |project, cx| {
4100 let tree = project.worktrees(cx).next().unwrap();
4101 tree.read(cx)
4102 .entry_for_path(path)
4103 .unwrap_or_else(|| panic!("no entry for path {}", path))
4104 .id
4105 })
4106 };
4107
4108 let dir_id = id_for_path("a", cx);
4109 let file_id = id_for_path("a/file1", cx);
4110 let buffer = project
4111 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
4112 .await
4113 .unwrap();
4114 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4115
4116 project
4117 .update(cx, |project, cx| {
4118 project.rename_entry(dir_id, Path::new("b"), cx)
4119 })
4120 .unwrap()
4121 .await
4122 .to_included()
4123 .unwrap();
4124 cx.executor().run_until_parked();
4125
4126 assert_eq!(id_for_path("b", cx), dir_id);
4127 assert_eq!(id_for_path("b/file1", cx), file_id);
4128 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4129}
4130
4131#[gpui::test]
4132async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
4133 init_test(cx);
4134
4135 let fs = FakeFs::new(cx.executor());
4136 fs.insert_tree(
4137 "/dir",
4138 json!({
4139 "a.txt": "a-contents",
4140 "b.txt": "b-contents",
4141 }),
4142 )
4143 .await;
4144
4145 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4146
4147 // Spawn multiple tasks to open paths, repeating some paths.
4148 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
4149 (
4150 p.open_local_buffer("/dir/a.txt", cx),
4151 p.open_local_buffer("/dir/b.txt", cx),
4152 p.open_local_buffer("/dir/a.txt", cx),
4153 )
4154 });
4155
4156 let buffer_a_1 = buffer_a_1.await.unwrap();
4157 let buffer_a_2 = buffer_a_2.await.unwrap();
4158 let buffer_b = buffer_b.await.unwrap();
4159 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
4160 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
4161
4162 // There is only one buffer per path.
4163 let buffer_a_id = buffer_a_1.entity_id();
4164 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
4165
4166 // Open the same path again while it is still open.
4167 drop(buffer_a_1);
4168 let buffer_a_3 = project
4169 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
4170 .await
4171 .unwrap();
4172
4173 // There's still only one buffer per path.
4174 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
4175}
4176
4177#[gpui::test]
4178async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
4179 init_test(cx);
4180
4181 let fs = FakeFs::new(cx.executor());
4182 fs.insert_tree(
4183 path!("/dir"),
4184 json!({
4185 "file1": "abc",
4186 "file2": "def",
4187 "file3": "ghi",
4188 }),
4189 )
4190 .await;
4191
4192 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4193
4194 let buffer1 = project
4195 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4196 .await
4197 .unwrap();
4198 let events = Arc::new(Mutex::new(Vec::new()));
4199
4200 // initially, the buffer isn't dirty.
4201 buffer1.update(cx, |buffer, cx| {
4202 cx.subscribe(&buffer1, {
4203 let events = events.clone();
4204 move |_, _, event, _| match event {
4205 BufferEvent::Operation { .. } => {}
4206 _ => events.lock().push(event.clone()),
4207 }
4208 })
4209 .detach();
4210
4211 assert!(!buffer.is_dirty());
4212 assert!(events.lock().is_empty());
4213
4214 buffer.edit([(1..2, "")], None, cx);
4215 });
4216
4217 // after the first edit, the buffer is dirty, and emits a dirtied event.
4218 buffer1.update(cx, |buffer, cx| {
4219 assert!(buffer.text() == "ac");
4220 assert!(buffer.is_dirty());
4221 assert_eq!(
4222 *events.lock(),
4223 &[
4224 language::BufferEvent::Edited,
4225 language::BufferEvent::DirtyChanged
4226 ]
4227 );
4228 events.lock().clear();
4229 buffer.did_save(
4230 buffer.version(),
4231 buffer.file().unwrap().disk_state().mtime(),
4232 cx,
4233 );
4234 });
4235
4236 // after saving, the buffer is not dirty, and emits a saved event.
4237 buffer1.update(cx, |buffer, cx| {
4238 assert!(!buffer.is_dirty());
4239 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
4240 events.lock().clear();
4241
4242 buffer.edit([(1..1, "B")], None, cx);
4243 buffer.edit([(2..2, "D")], None, cx);
4244 });
4245
4246 // after editing again, the buffer is dirty, and emits another dirty event.
4247 buffer1.update(cx, |buffer, cx| {
4248 assert!(buffer.text() == "aBDc");
4249 assert!(buffer.is_dirty());
4250 assert_eq!(
4251 *events.lock(),
4252 &[
4253 language::BufferEvent::Edited,
4254 language::BufferEvent::DirtyChanged,
4255 language::BufferEvent::Edited,
4256 ],
4257 );
4258 events.lock().clear();
4259
4260 // After restoring the buffer to its previously-saved state,
4261 // the buffer is not considered dirty anymore.
4262 buffer.edit([(1..3, "")], None, cx);
4263 assert!(buffer.text() == "ac");
4264 assert!(!buffer.is_dirty());
4265 });
4266
4267 assert_eq!(
4268 *events.lock(),
4269 &[
4270 language::BufferEvent::Edited,
4271 language::BufferEvent::DirtyChanged
4272 ]
4273 );
4274
4275 // When a file is deleted, it is not considered dirty.
4276 let events = Arc::new(Mutex::new(Vec::new()));
4277 let buffer2 = project
4278 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4279 .await
4280 .unwrap();
4281 buffer2.update(cx, |_, cx| {
4282 cx.subscribe(&buffer2, {
4283 let events = events.clone();
4284 move |_, _, event, _| match event {
4285 BufferEvent::Operation { .. } => {}
4286 _ => events.lock().push(event.clone()),
4287 }
4288 })
4289 .detach();
4290 });
4291
4292 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
4293 .await
4294 .unwrap();
4295 cx.executor().run_until_parked();
4296 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4297 assert_eq!(
4298 mem::take(&mut *events.lock()),
4299 &[language::BufferEvent::FileHandleChanged]
4300 );
4301
4302 // Buffer becomes dirty when edited.
4303 buffer2.update(cx, |buffer, cx| {
4304 buffer.edit([(2..3, "")], None, cx);
4305 assert_eq!(buffer.is_dirty(), true);
4306 });
4307 assert_eq!(
4308 mem::take(&mut *events.lock()),
4309 &[
4310 language::BufferEvent::Edited,
4311 language::BufferEvent::DirtyChanged
4312 ]
4313 );
4314
4315 // Buffer becomes clean again when all of its content is removed, because
4316 // the file was deleted.
4317 buffer2.update(cx, |buffer, cx| {
4318 buffer.edit([(0..2, "")], None, cx);
4319 assert_eq!(buffer.is_empty(), true);
4320 assert_eq!(buffer.is_dirty(), false);
4321 });
4322 assert_eq!(
4323 *events.lock(),
4324 &[
4325 language::BufferEvent::Edited,
4326 language::BufferEvent::DirtyChanged
4327 ]
4328 );
4329
4330 // When a file is already dirty when deleted, we don't emit a Dirtied event.
4331 let events = Arc::new(Mutex::new(Vec::new()));
4332 let buffer3 = project
4333 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
4334 .await
4335 .unwrap();
4336 buffer3.update(cx, |_, cx| {
4337 cx.subscribe(&buffer3, {
4338 let events = events.clone();
4339 move |_, _, event, _| match event {
4340 BufferEvent::Operation { .. } => {}
4341 _ => events.lock().push(event.clone()),
4342 }
4343 })
4344 .detach();
4345 });
4346
4347 buffer3.update(cx, |buffer, cx| {
4348 buffer.edit([(0..0, "x")], None, cx);
4349 });
4350 events.lock().clear();
4351 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
4352 .await
4353 .unwrap();
4354 cx.executor().run_until_parked();
4355 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
4356 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
4357}
4358
4359#[gpui::test]
4360async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
4361 init_test(cx);
4362
4363 let (initial_contents, initial_offsets) =
4364 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
4365 let fs = FakeFs::new(cx.executor());
4366 fs.insert_tree(
4367 path!("/dir"),
4368 json!({
4369 "the-file": initial_contents,
4370 }),
4371 )
4372 .await;
4373 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4374 let buffer = project
4375 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
4376 .await
4377 .unwrap();
4378
4379 let anchors = initial_offsets
4380 .iter()
4381 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
4382 .collect::<Vec<_>>();
4383
4384 // Change the file on disk, adding two new lines of text, and removing
4385 // one line.
4386 buffer.update(cx, |buffer, _| {
4387 assert!(!buffer.is_dirty());
4388 assert!(!buffer.has_conflict());
4389 });
4390
4391 let (new_contents, new_offsets) =
4392 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
4393 fs.save(
4394 path!("/dir/the-file").as_ref(),
4395 &new_contents.as_str().into(),
4396 LineEnding::Unix,
4397 )
4398 .await
4399 .unwrap();
4400
4401 // Because the buffer was not modified, it is reloaded from disk. Its
4402 // contents are edited according to the diff between the old and new
4403 // file contents.
4404 cx.executor().run_until_parked();
4405 buffer.update(cx, |buffer, _| {
4406 assert_eq!(buffer.text(), new_contents);
4407 assert!(!buffer.is_dirty());
4408 assert!(!buffer.has_conflict());
4409
4410 let anchor_offsets = anchors
4411 .iter()
4412 .map(|anchor| anchor.to_offset(&*buffer))
4413 .collect::<Vec<_>>();
4414 assert_eq!(anchor_offsets, new_offsets);
4415 });
4416
4417 // Modify the buffer
4418 buffer.update(cx, |buffer, cx| {
4419 buffer.edit([(0..0, " ")], None, cx);
4420 assert!(buffer.is_dirty());
4421 assert!(!buffer.has_conflict());
4422 });
4423
4424 // Change the file on disk again, adding blank lines to the beginning.
4425 fs.save(
4426 path!("/dir/the-file").as_ref(),
4427 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
4428 LineEnding::Unix,
4429 )
4430 .await
4431 .unwrap();
4432
4433 // Because the buffer is modified, it doesn't reload from disk, but is
4434 // marked as having a conflict.
4435 cx.executor().run_until_parked();
4436 buffer.update(cx, |buffer, _| {
4437 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
4438 assert!(buffer.has_conflict());
4439 });
4440}
4441
4442#[gpui::test]
4443async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
4444 init_test(cx);
4445
4446 let fs = FakeFs::new(cx.executor());
4447 fs.insert_tree(
4448 path!("/dir"),
4449 json!({
4450 "file1": "a\nb\nc\n",
4451 "file2": "one\r\ntwo\r\nthree\r\n",
4452 }),
4453 )
4454 .await;
4455
4456 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4457 let buffer1 = project
4458 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4459 .await
4460 .unwrap();
4461 let buffer2 = project
4462 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4463 .await
4464 .unwrap();
4465
4466 buffer1.update(cx, |buffer, _| {
4467 assert_eq!(buffer.text(), "a\nb\nc\n");
4468 assert_eq!(buffer.line_ending(), LineEnding::Unix);
4469 });
4470 buffer2.update(cx, |buffer, _| {
4471 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
4472 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4473 });
4474
4475 // Change a file's line endings on disk from unix to windows. The buffer's
4476 // state updates correctly.
4477 fs.save(
4478 path!("/dir/file1").as_ref(),
4479 &"aaa\nb\nc\n".into(),
4480 LineEnding::Windows,
4481 )
4482 .await
4483 .unwrap();
4484 cx.executor().run_until_parked();
4485 buffer1.update(cx, |buffer, _| {
4486 assert_eq!(buffer.text(), "aaa\nb\nc\n");
4487 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4488 });
4489
4490 // Save a file with windows line endings. The file is written correctly.
4491 buffer2.update(cx, |buffer, cx| {
4492 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
4493 });
4494 project
4495 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
4496 .await
4497 .unwrap();
4498 assert_eq!(
4499 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
4500 "one\r\ntwo\r\nthree\r\nfour\r\n",
4501 );
4502}
4503
4504#[gpui::test]
4505async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
4506 init_test(cx);
4507
4508 let fs = FakeFs::new(cx.executor());
4509 fs.insert_tree(
4510 path!("/dir"),
4511 json!({
4512 "a.rs": "
4513 fn foo(mut v: Vec<usize>) {
4514 for x in &v {
4515 v.push(1);
4516 }
4517 }
4518 "
4519 .unindent(),
4520 }),
4521 )
4522 .await;
4523
4524 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4525 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4526 let buffer = project
4527 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
4528 .await
4529 .unwrap();
4530
4531 let buffer_uri = Url::from_file_path(path!("/dir/a.rs")).unwrap();
4532 let message = lsp::PublishDiagnosticsParams {
4533 uri: buffer_uri.clone(),
4534 diagnostics: vec![
4535 lsp::Diagnostic {
4536 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4537 severity: Some(DiagnosticSeverity::WARNING),
4538 message: "error 1".to_string(),
4539 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4540 location: lsp::Location {
4541 uri: buffer_uri.clone(),
4542 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4543 },
4544 message: "error 1 hint 1".to_string(),
4545 }]),
4546 ..Default::default()
4547 },
4548 lsp::Diagnostic {
4549 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4550 severity: Some(DiagnosticSeverity::HINT),
4551 message: "error 1 hint 1".to_string(),
4552 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4553 location: lsp::Location {
4554 uri: buffer_uri.clone(),
4555 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4556 },
4557 message: "original diagnostic".to_string(),
4558 }]),
4559 ..Default::default()
4560 },
4561 lsp::Diagnostic {
4562 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4563 severity: Some(DiagnosticSeverity::ERROR),
4564 message: "error 2".to_string(),
4565 related_information: Some(vec![
4566 lsp::DiagnosticRelatedInformation {
4567 location: lsp::Location {
4568 uri: buffer_uri.clone(),
4569 range: lsp::Range::new(
4570 lsp::Position::new(1, 13),
4571 lsp::Position::new(1, 15),
4572 ),
4573 },
4574 message: "error 2 hint 1".to_string(),
4575 },
4576 lsp::DiagnosticRelatedInformation {
4577 location: lsp::Location {
4578 uri: buffer_uri.clone(),
4579 range: lsp::Range::new(
4580 lsp::Position::new(1, 13),
4581 lsp::Position::new(1, 15),
4582 ),
4583 },
4584 message: "error 2 hint 2".to_string(),
4585 },
4586 ]),
4587 ..Default::default()
4588 },
4589 lsp::Diagnostic {
4590 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4591 severity: Some(DiagnosticSeverity::HINT),
4592 message: "error 2 hint 1".to_string(),
4593 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4594 location: lsp::Location {
4595 uri: buffer_uri.clone(),
4596 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4597 },
4598 message: "original diagnostic".to_string(),
4599 }]),
4600 ..Default::default()
4601 },
4602 lsp::Diagnostic {
4603 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4604 severity: Some(DiagnosticSeverity::HINT),
4605 message: "error 2 hint 2".to_string(),
4606 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4607 location: lsp::Location {
4608 uri: buffer_uri,
4609 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4610 },
4611 message: "original diagnostic".to_string(),
4612 }]),
4613 ..Default::default()
4614 },
4615 ],
4616 version: None,
4617 };
4618
4619 lsp_store
4620 .update(cx, |lsp_store, cx| {
4621 lsp_store.update_diagnostics(
4622 LanguageServerId(0),
4623 message,
4624 None,
4625 DiagnosticSourceKind::Pushed,
4626 &[],
4627 cx,
4628 )
4629 })
4630 .unwrap();
4631 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
4632
4633 assert_eq!(
4634 buffer
4635 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
4636 .collect::<Vec<_>>(),
4637 &[
4638 DiagnosticEntry {
4639 range: Point::new(1, 8)..Point::new(1, 9),
4640 diagnostic: Diagnostic {
4641 severity: DiagnosticSeverity::WARNING,
4642 message: "error 1".to_string(),
4643 group_id: 1,
4644 is_primary: true,
4645 source_kind: DiagnosticSourceKind::Pushed,
4646 ..Diagnostic::default()
4647 }
4648 },
4649 DiagnosticEntry {
4650 range: Point::new(1, 8)..Point::new(1, 9),
4651 diagnostic: Diagnostic {
4652 severity: DiagnosticSeverity::HINT,
4653 message: "error 1 hint 1".to_string(),
4654 group_id: 1,
4655 is_primary: false,
4656 source_kind: DiagnosticSourceKind::Pushed,
4657 ..Diagnostic::default()
4658 }
4659 },
4660 DiagnosticEntry {
4661 range: Point::new(1, 13)..Point::new(1, 15),
4662 diagnostic: Diagnostic {
4663 severity: DiagnosticSeverity::HINT,
4664 message: "error 2 hint 1".to_string(),
4665 group_id: 0,
4666 is_primary: false,
4667 source_kind: DiagnosticSourceKind::Pushed,
4668 ..Diagnostic::default()
4669 }
4670 },
4671 DiagnosticEntry {
4672 range: Point::new(1, 13)..Point::new(1, 15),
4673 diagnostic: Diagnostic {
4674 severity: DiagnosticSeverity::HINT,
4675 message: "error 2 hint 2".to_string(),
4676 group_id: 0,
4677 is_primary: false,
4678 source_kind: DiagnosticSourceKind::Pushed,
4679 ..Diagnostic::default()
4680 }
4681 },
4682 DiagnosticEntry {
4683 range: Point::new(2, 8)..Point::new(2, 17),
4684 diagnostic: Diagnostic {
4685 severity: DiagnosticSeverity::ERROR,
4686 message: "error 2".to_string(),
4687 group_id: 0,
4688 is_primary: true,
4689 source_kind: DiagnosticSourceKind::Pushed,
4690 ..Diagnostic::default()
4691 }
4692 }
4693 ]
4694 );
4695
4696 assert_eq!(
4697 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
4698 &[
4699 DiagnosticEntry {
4700 range: Point::new(1, 13)..Point::new(1, 15),
4701 diagnostic: Diagnostic {
4702 severity: DiagnosticSeverity::HINT,
4703 message: "error 2 hint 1".to_string(),
4704 group_id: 0,
4705 is_primary: false,
4706 source_kind: DiagnosticSourceKind::Pushed,
4707 ..Diagnostic::default()
4708 }
4709 },
4710 DiagnosticEntry {
4711 range: Point::new(1, 13)..Point::new(1, 15),
4712 diagnostic: Diagnostic {
4713 severity: DiagnosticSeverity::HINT,
4714 message: "error 2 hint 2".to_string(),
4715 group_id: 0,
4716 is_primary: false,
4717 source_kind: DiagnosticSourceKind::Pushed,
4718 ..Diagnostic::default()
4719 }
4720 },
4721 DiagnosticEntry {
4722 range: Point::new(2, 8)..Point::new(2, 17),
4723 diagnostic: Diagnostic {
4724 severity: DiagnosticSeverity::ERROR,
4725 message: "error 2".to_string(),
4726 group_id: 0,
4727 is_primary: true,
4728 source_kind: DiagnosticSourceKind::Pushed,
4729 ..Diagnostic::default()
4730 }
4731 }
4732 ]
4733 );
4734
4735 assert_eq!(
4736 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4737 &[
4738 DiagnosticEntry {
4739 range: Point::new(1, 8)..Point::new(1, 9),
4740 diagnostic: Diagnostic {
4741 severity: DiagnosticSeverity::WARNING,
4742 message: "error 1".to_string(),
4743 group_id: 1,
4744 is_primary: true,
4745 source_kind: DiagnosticSourceKind::Pushed,
4746 ..Diagnostic::default()
4747 }
4748 },
4749 DiagnosticEntry {
4750 range: Point::new(1, 8)..Point::new(1, 9),
4751 diagnostic: Diagnostic {
4752 severity: DiagnosticSeverity::HINT,
4753 message: "error 1 hint 1".to_string(),
4754 group_id: 1,
4755 is_primary: false,
4756 source_kind: DiagnosticSourceKind::Pushed,
4757 ..Diagnostic::default()
4758 }
4759 },
4760 ]
4761 );
4762}
4763
4764#[gpui::test]
4765async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
4766 init_test(cx);
4767
4768 let fs = FakeFs::new(cx.executor());
4769 fs.insert_tree(
4770 path!("/dir"),
4771 json!({
4772 "one.rs": "const ONE: usize = 1;",
4773 "two": {
4774 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4775 }
4776
4777 }),
4778 )
4779 .await;
4780 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4781
4782 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4783 language_registry.add(rust_lang());
4784 let watched_paths = lsp::FileOperationRegistrationOptions {
4785 filters: vec![
4786 FileOperationFilter {
4787 scheme: Some("file".to_owned()),
4788 pattern: lsp::FileOperationPattern {
4789 glob: "**/*.rs".to_owned(),
4790 matches: Some(lsp::FileOperationPatternKind::File),
4791 options: None,
4792 },
4793 },
4794 FileOperationFilter {
4795 scheme: Some("file".to_owned()),
4796 pattern: lsp::FileOperationPattern {
4797 glob: "**/**".to_owned(),
4798 matches: Some(lsp::FileOperationPatternKind::Folder),
4799 options: None,
4800 },
4801 },
4802 ],
4803 };
4804 let mut fake_servers = language_registry.register_fake_lsp(
4805 "Rust",
4806 FakeLspAdapter {
4807 capabilities: lsp::ServerCapabilities {
4808 workspace: Some(lsp::WorkspaceServerCapabilities {
4809 workspace_folders: None,
4810 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
4811 did_rename: Some(watched_paths.clone()),
4812 will_rename: Some(watched_paths),
4813 ..Default::default()
4814 }),
4815 }),
4816 ..Default::default()
4817 },
4818 ..Default::default()
4819 },
4820 );
4821
4822 let _ = project
4823 .update(cx, |project, cx| {
4824 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
4825 })
4826 .await
4827 .unwrap();
4828
4829 let fake_server = fake_servers.next().await.unwrap();
4830 let response = project.update(cx, |project, cx| {
4831 let worktree = project.worktrees(cx).next().unwrap();
4832 let entry_id = worktree.read(cx).entry_for_path("one.rs").unwrap().id;
4833 project.rename_entry(entry_id, "three.rs".as_ref(), cx)
4834 });
4835 let expected_edit = lsp::WorkspaceEdit {
4836 changes: None,
4837 document_changes: Some(DocumentChanges::Edits({
4838 vec![TextDocumentEdit {
4839 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
4840 range: lsp::Range {
4841 start: lsp::Position {
4842 line: 0,
4843 character: 1,
4844 },
4845 end: lsp::Position {
4846 line: 0,
4847 character: 3,
4848 },
4849 },
4850 new_text: "This is not a drill".to_owned(),
4851 })],
4852 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
4853 uri: Url::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
4854 version: Some(1337),
4855 },
4856 }]
4857 })),
4858 change_annotations: None,
4859 };
4860 let resolved_workspace_edit = Arc::new(OnceLock::new());
4861 fake_server
4862 .set_request_handler::<WillRenameFiles, _, _>({
4863 let resolved_workspace_edit = resolved_workspace_edit.clone();
4864 let expected_edit = expected_edit.clone();
4865 move |params, _| {
4866 let resolved_workspace_edit = resolved_workspace_edit.clone();
4867 let expected_edit = expected_edit.clone();
4868 async move {
4869 assert_eq!(params.files.len(), 1);
4870 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
4871 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
4872 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
4873 Ok(Some(expected_edit))
4874 }
4875 }
4876 })
4877 .next()
4878 .await
4879 .unwrap();
4880 let _ = response.await.unwrap();
4881 fake_server
4882 .handle_notification::<DidRenameFiles, _>(|params, _| {
4883 assert_eq!(params.files.len(), 1);
4884 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
4885 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
4886 })
4887 .next()
4888 .await
4889 .unwrap();
4890 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
4891}
4892
4893#[gpui::test]
4894async fn test_rename(cx: &mut gpui::TestAppContext) {
4895 // hi
4896 init_test(cx);
4897
4898 let fs = FakeFs::new(cx.executor());
4899 fs.insert_tree(
4900 path!("/dir"),
4901 json!({
4902 "one.rs": "const ONE: usize = 1;",
4903 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4904 }),
4905 )
4906 .await;
4907
4908 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4909
4910 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4911 language_registry.add(rust_lang());
4912 let mut fake_servers = language_registry.register_fake_lsp(
4913 "Rust",
4914 FakeLspAdapter {
4915 capabilities: lsp::ServerCapabilities {
4916 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
4917 prepare_provider: Some(true),
4918 work_done_progress_options: Default::default(),
4919 })),
4920 ..Default::default()
4921 },
4922 ..Default::default()
4923 },
4924 );
4925
4926 let (buffer, _handle) = project
4927 .update(cx, |project, cx| {
4928 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
4929 })
4930 .await
4931 .unwrap();
4932
4933 let fake_server = fake_servers.next().await.unwrap();
4934
4935 let response = project.update(cx, |project, cx| {
4936 project.prepare_rename(buffer.clone(), 7, cx)
4937 });
4938 fake_server
4939 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
4940 assert_eq!(
4941 params.text_document.uri.as_str(),
4942 uri!("file:///dir/one.rs")
4943 );
4944 assert_eq!(params.position, lsp::Position::new(0, 7));
4945 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
4946 lsp::Position::new(0, 6),
4947 lsp::Position::new(0, 9),
4948 ))))
4949 })
4950 .next()
4951 .await
4952 .unwrap();
4953 let response = response.await.unwrap();
4954 let PrepareRenameResponse::Success(range) = response else {
4955 panic!("{:?}", response);
4956 };
4957 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
4958 assert_eq!(range, 6..9);
4959
4960 let response = project.update(cx, |project, cx| {
4961 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
4962 });
4963 fake_server
4964 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
4965 assert_eq!(
4966 params.text_document_position.text_document.uri.as_str(),
4967 uri!("file:///dir/one.rs")
4968 );
4969 assert_eq!(
4970 params.text_document_position.position,
4971 lsp::Position::new(0, 7)
4972 );
4973 assert_eq!(params.new_name, "THREE");
4974 Ok(Some(lsp::WorkspaceEdit {
4975 changes: Some(
4976 [
4977 (
4978 lsp::Url::from_file_path(path!("/dir/one.rs")).unwrap(),
4979 vec![lsp::TextEdit::new(
4980 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
4981 "THREE".to_string(),
4982 )],
4983 ),
4984 (
4985 lsp::Url::from_file_path(path!("/dir/two.rs")).unwrap(),
4986 vec![
4987 lsp::TextEdit::new(
4988 lsp::Range::new(
4989 lsp::Position::new(0, 24),
4990 lsp::Position::new(0, 27),
4991 ),
4992 "THREE".to_string(),
4993 ),
4994 lsp::TextEdit::new(
4995 lsp::Range::new(
4996 lsp::Position::new(0, 35),
4997 lsp::Position::new(0, 38),
4998 ),
4999 "THREE".to_string(),
5000 ),
5001 ],
5002 ),
5003 ]
5004 .into_iter()
5005 .collect(),
5006 ),
5007 ..Default::default()
5008 }))
5009 })
5010 .next()
5011 .await
5012 .unwrap();
5013 let mut transaction = response.await.unwrap().0;
5014 assert_eq!(transaction.len(), 2);
5015 assert_eq!(
5016 transaction
5017 .remove_entry(&buffer)
5018 .unwrap()
5019 .0
5020 .update(cx, |buffer, _| buffer.text()),
5021 "const THREE: usize = 1;"
5022 );
5023 assert_eq!(
5024 transaction
5025 .into_keys()
5026 .next()
5027 .unwrap()
5028 .update(cx, |buffer, _| buffer.text()),
5029 "const TWO: usize = one::THREE + one::THREE;"
5030 );
5031}
5032
5033#[gpui::test]
5034async fn test_search(cx: &mut gpui::TestAppContext) {
5035 init_test(cx);
5036
5037 let fs = FakeFs::new(cx.executor());
5038 fs.insert_tree(
5039 path!("/dir"),
5040 json!({
5041 "one.rs": "const ONE: usize = 1;",
5042 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
5043 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
5044 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
5045 }),
5046 )
5047 .await;
5048 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5049 assert_eq!(
5050 search(
5051 &project,
5052 SearchQuery::text(
5053 "TWO",
5054 false,
5055 true,
5056 false,
5057 Default::default(),
5058 Default::default(),
5059 false,
5060 None
5061 )
5062 .unwrap(),
5063 cx
5064 )
5065 .await
5066 .unwrap(),
5067 HashMap::from_iter([
5068 (path!("dir/two.rs").to_string(), vec![6..9]),
5069 (path!("dir/three.rs").to_string(), vec![37..40])
5070 ])
5071 );
5072
5073 let buffer_4 = project
5074 .update(cx, |project, cx| {
5075 project.open_local_buffer(path!("/dir/four.rs"), cx)
5076 })
5077 .await
5078 .unwrap();
5079 buffer_4.update(cx, |buffer, cx| {
5080 let text = "two::TWO";
5081 buffer.edit([(20..28, text), (31..43, text)], None, cx);
5082 });
5083
5084 assert_eq!(
5085 search(
5086 &project,
5087 SearchQuery::text(
5088 "TWO",
5089 false,
5090 true,
5091 false,
5092 Default::default(),
5093 Default::default(),
5094 false,
5095 None,
5096 )
5097 .unwrap(),
5098 cx
5099 )
5100 .await
5101 .unwrap(),
5102 HashMap::from_iter([
5103 (path!("dir/two.rs").to_string(), vec![6..9]),
5104 (path!("dir/three.rs").to_string(), vec![37..40]),
5105 (path!("dir/four.rs").to_string(), vec![25..28, 36..39])
5106 ])
5107 );
5108}
5109
5110#[gpui::test]
5111async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
5112 init_test(cx);
5113
5114 let search_query = "file";
5115
5116 let fs = FakeFs::new(cx.executor());
5117 fs.insert_tree(
5118 path!("/dir"),
5119 json!({
5120 "one.rs": r#"// Rust file one"#,
5121 "one.ts": r#"// TypeScript file one"#,
5122 "two.rs": r#"// Rust file two"#,
5123 "two.ts": r#"// TypeScript file two"#,
5124 }),
5125 )
5126 .await;
5127 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5128
5129 assert!(
5130 search(
5131 &project,
5132 SearchQuery::text(
5133 search_query,
5134 false,
5135 true,
5136 false,
5137 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5138 Default::default(),
5139 false,
5140 None
5141 )
5142 .unwrap(),
5143 cx
5144 )
5145 .await
5146 .unwrap()
5147 .is_empty(),
5148 "If no inclusions match, no files should be returned"
5149 );
5150
5151 assert_eq!(
5152 search(
5153 &project,
5154 SearchQuery::text(
5155 search_query,
5156 false,
5157 true,
5158 false,
5159 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
5160 Default::default(),
5161 false,
5162 None
5163 )
5164 .unwrap(),
5165 cx
5166 )
5167 .await
5168 .unwrap(),
5169 HashMap::from_iter([
5170 (path!("dir/one.rs").to_string(), vec![8..12]),
5171 (path!("dir/two.rs").to_string(), vec![8..12]),
5172 ]),
5173 "Rust only search should give only Rust files"
5174 );
5175
5176 assert_eq!(
5177 search(
5178 &project,
5179 SearchQuery::text(
5180 search_query,
5181 false,
5182 true,
5183 false,
5184 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5185 Default::default(),
5186 false,
5187 None,
5188 )
5189 .unwrap(),
5190 cx
5191 )
5192 .await
5193 .unwrap(),
5194 HashMap::from_iter([
5195 (path!("dir/one.ts").to_string(), vec![14..18]),
5196 (path!("dir/two.ts").to_string(), vec![14..18]),
5197 ]),
5198 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
5199 );
5200
5201 assert_eq!(
5202 search(
5203 &project,
5204 SearchQuery::text(
5205 search_query,
5206 false,
5207 true,
5208 false,
5209 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()])
5210 .unwrap(),
5211 Default::default(),
5212 false,
5213 None,
5214 )
5215 .unwrap(),
5216 cx
5217 )
5218 .await
5219 .unwrap(),
5220 HashMap::from_iter([
5221 (path!("dir/two.ts").to_string(), vec![14..18]),
5222 (path!("dir/one.rs").to_string(), vec![8..12]),
5223 (path!("dir/one.ts").to_string(), vec![14..18]),
5224 (path!("dir/two.rs").to_string(), vec![8..12]),
5225 ]),
5226 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
5227 );
5228}
5229
5230#[gpui::test]
5231async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
5232 init_test(cx);
5233
5234 let search_query = "file";
5235
5236 let fs = FakeFs::new(cx.executor());
5237 fs.insert_tree(
5238 path!("/dir"),
5239 json!({
5240 "one.rs": r#"// Rust file one"#,
5241 "one.ts": r#"// TypeScript file one"#,
5242 "two.rs": r#"// Rust file two"#,
5243 "two.ts": r#"// TypeScript file two"#,
5244 }),
5245 )
5246 .await;
5247 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5248
5249 assert_eq!(
5250 search(
5251 &project,
5252 SearchQuery::text(
5253 search_query,
5254 false,
5255 true,
5256 false,
5257 Default::default(),
5258 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5259 false,
5260 None,
5261 )
5262 .unwrap(),
5263 cx
5264 )
5265 .await
5266 .unwrap(),
5267 HashMap::from_iter([
5268 (path!("dir/one.rs").to_string(), vec![8..12]),
5269 (path!("dir/one.ts").to_string(), vec![14..18]),
5270 (path!("dir/two.rs").to_string(), vec![8..12]),
5271 (path!("dir/two.ts").to_string(), vec![14..18]),
5272 ]),
5273 "If no exclusions match, all files should be returned"
5274 );
5275
5276 assert_eq!(
5277 search(
5278 &project,
5279 SearchQuery::text(
5280 search_query,
5281 false,
5282 true,
5283 false,
5284 Default::default(),
5285 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
5286 false,
5287 None,
5288 )
5289 .unwrap(),
5290 cx
5291 )
5292 .await
5293 .unwrap(),
5294 HashMap::from_iter([
5295 (path!("dir/one.ts").to_string(), vec![14..18]),
5296 (path!("dir/two.ts").to_string(), vec![14..18]),
5297 ]),
5298 "Rust exclusion search should give only TypeScript files"
5299 );
5300
5301 assert_eq!(
5302 search(
5303 &project,
5304 SearchQuery::text(
5305 search_query,
5306 false,
5307 true,
5308 false,
5309 Default::default(),
5310 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5311 false,
5312 None,
5313 )
5314 .unwrap(),
5315 cx
5316 )
5317 .await
5318 .unwrap(),
5319 HashMap::from_iter([
5320 (path!("dir/one.rs").to_string(), vec![8..12]),
5321 (path!("dir/two.rs").to_string(), vec![8..12]),
5322 ]),
5323 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
5324 );
5325
5326 assert!(
5327 search(
5328 &project,
5329 SearchQuery::text(
5330 search_query,
5331 false,
5332 true,
5333 false,
5334 Default::default(),
5335 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()])
5336 .unwrap(),
5337 false,
5338 None,
5339 )
5340 .unwrap(),
5341 cx
5342 )
5343 .await
5344 .unwrap()
5345 .is_empty(),
5346 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
5347 );
5348}
5349
5350#[gpui::test]
5351async fn test_search_with_buffer_exclusions(cx: &mut gpui::TestAppContext) {
5352 init_test(cx);
5353
5354 let search_query = "file";
5355
5356 let fs = FakeFs::new(cx.executor());
5357 fs.insert_tree(
5358 path!("/dir"),
5359 json!({
5360 "one.rs": r#"// Rust file one"#,
5361 "one.ts": r#"// TypeScript file one"#,
5362 "two.rs": r#"// Rust file two"#,
5363 "two.ts": r#"// TypeScript file two"#,
5364 }),
5365 )
5366 .await;
5367
5368 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5369 let _buffer = project.update(cx, |project, cx| {
5370 let buffer = project.create_local_buffer("file", None, cx);
5371 project.mark_buffer_as_non_searchable(buffer.read(cx).remote_id(), cx);
5372 buffer
5373 });
5374
5375 assert_eq!(
5376 search(
5377 &project,
5378 SearchQuery::text(
5379 search_query,
5380 false,
5381 true,
5382 false,
5383 Default::default(),
5384 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5385 false,
5386 None,
5387 )
5388 .unwrap(),
5389 cx
5390 )
5391 .await
5392 .unwrap(),
5393 HashMap::from_iter([
5394 (path!("dir/one.rs").to_string(), vec![8..12]),
5395 (path!("dir/one.ts").to_string(), vec![14..18]),
5396 (path!("dir/two.rs").to_string(), vec![8..12]),
5397 (path!("dir/two.ts").to_string(), vec![14..18]),
5398 ]),
5399 "If no exclusions match, all files should be returned"
5400 );
5401
5402 assert_eq!(
5403 search(
5404 &project,
5405 SearchQuery::text(
5406 search_query,
5407 false,
5408 true,
5409 false,
5410 Default::default(),
5411 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
5412 false,
5413 None,
5414 )
5415 .unwrap(),
5416 cx
5417 )
5418 .await
5419 .unwrap(),
5420 HashMap::from_iter([
5421 (path!("dir/one.ts").to_string(), vec![14..18]),
5422 (path!("dir/two.ts").to_string(), vec![14..18]),
5423 ]),
5424 "Rust exclusion search should give only TypeScript files"
5425 );
5426
5427 assert_eq!(
5428 search(
5429 &project,
5430 SearchQuery::text(
5431 search_query,
5432 false,
5433 true,
5434 false,
5435 Default::default(),
5436 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5437 false,
5438 None,
5439 )
5440 .unwrap(),
5441 cx
5442 )
5443 .await
5444 .unwrap(),
5445 HashMap::from_iter([
5446 (path!("dir/one.rs").to_string(), vec![8..12]),
5447 (path!("dir/two.rs").to_string(), vec![8..12]),
5448 ]),
5449 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
5450 );
5451
5452 assert!(
5453 search(
5454 &project,
5455 SearchQuery::text(
5456 search_query,
5457 false,
5458 true,
5459 false,
5460 Default::default(),
5461 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()])
5462 .unwrap(),
5463 false,
5464 None,
5465 )
5466 .unwrap(),
5467 cx
5468 )
5469 .await
5470 .unwrap()
5471 .is_empty(),
5472 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
5473 );
5474}
5475
5476#[gpui::test]
5477async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
5478 init_test(cx);
5479
5480 let search_query = "file";
5481
5482 let fs = FakeFs::new(cx.executor());
5483 fs.insert_tree(
5484 path!("/dir"),
5485 json!({
5486 "one.rs": r#"// Rust file one"#,
5487 "one.ts": r#"// TypeScript file one"#,
5488 "two.rs": r#"// Rust file two"#,
5489 "two.ts": r#"// TypeScript file two"#,
5490 }),
5491 )
5492 .await;
5493 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5494
5495 assert!(
5496 search(
5497 &project,
5498 SearchQuery::text(
5499 search_query,
5500 false,
5501 true,
5502 false,
5503 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5504 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5505 false,
5506 None,
5507 )
5508 .unwrap(),
5509 cx
5510 )
5511 .await
5512 .unwrap()
5513 .is_empty(),
5514 "If both no exclusions and inclusions match, exclusions should win and return nothing"
5515 );
5516
5517 assert!(
5518 search(
5519 &project,
5520 SearchQuery::text(
5521 search_query,
5522 false,
5523 true,
5524 false,
5525 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5526 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5527 false,
5528 None,
5529 )
5530 .unwrap(),
5531 cx
5532 )
5533 .await
5534 .unwrap()
5535 .is_empty(),
5536 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
5537 );
5538
5539 assert!(
5540 search(
5541 &project,
5542 SearchQuery::text(
5543 search_query,
5544 false,
5545 true,
5546 false,
5547 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5548 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5549 false,
5550 None,
5551 )
5552 .unwrap(),
5553 cx
5554 )
5555 .await
5556 .unwrap()
5557 .is_empty(),
5558 "Non-matching inclusions and exclusions should not change that."
5559 );
5560
5561 assert_eq!(
5562 search(
5563 &project,
5564 SearchQuery::text(
5565 search_query,
5566 false,
5567 true,
5568 false,
5569 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5570 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()]).unwrap(),
5571 false,
5572 None,
5573 )
5574 .unwrap(),
5575 cx
5576 )
5577 .await
5578 .unwrap(),
5579 HashMap::from_iter([
5580 (path!("dir/one.ts").to_string(), vec![14..18]),
5581 (path!("dir/two.ts").to_string(), vec![14..18]),
5582 ]),
5583 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
5584 );
5585}
5586
5587#[gpui::test]
5588async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
5589 init_test(cx);
5590
5591 let fs = FakeFs::new(cx.executor());
5592 fs.insert_tree(
5593 path!("/worktree-a"),
5594 json!({
5595 "haystack.rs": r#"// NEEDLE"#,
5596 "haystack.ts": r#"// NEEDLE"#,
5597 }),
5598 )
5599 .await;
5600 fs.insert_tree(
5601 path!("/worktree-b"),
5602 json!({
5603 "haystack.rs": r#"// NEEDLE"#,
5604 "haystack.ts": r#"// NEEDLE"#,
5605 }),
5606 )
5607 .await;
5608
5609 let project = Project::test(
5610 fs.clone(),
5611 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
5612 cx,
5613 )
5614 .await;
5615
5616 assert_eq!(
5617 search(
5618 &project,
5619 SearchQuery::text(
5620 "NEEDLE",
5621 false,
5622 true,
5623 false,
5624 PathMatcher::new(&["worktree-a/*.rs".to_owned()]).unwrap(),
5625 Default::default(),
5626 true,
5627 None,
5628 )
5629 .unwrap(),
5630 cx
5631 )
5632 .await
5633 .unwrap(),
5634 HashMap::from_iter([(path!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
5635 "should only return results from included worktree"
5636 );
5637 assert_eq!(
5638 search(
5639 &project,
5640 SearchQuery::text(
5641 "NEEDLE",
5642 false,
5643 true,
5644 false,
5645 PathMatcher::new(&["worktree-b/*.rs".to_owned()]).unwrap(),
5646 Default::default(),
5647 true,
5648 None,
5649 )
5650 .unwrap(),
5651 cx
5652 )
5653 .await
5654 .unwrap(),
5655 HashMap::from_iter([(path!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
5656 "should only return results from included worktree"
5657 );
5658
5659 assert_eq!(
5660 search(
5661 &project,
5662 SearchQuery::text(
5663 "NEEDLE",
5664 false,
5665 true,
5666 false,
5667 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5668 Default::default(),
5669 false,
5670 None,
5671 )
5672 .unwrap(),
5673 cx
5674 )
5675 .await
5676 .unwrap(),
5677 HashMap::from_iter([
5678 (path!("worktree-a/haystack.ts").to_string(), vec![3..9]),
5679 (path!("worktree-b/haystack.ts").to_string(), vec![3..9])
5680 ]),
5681 "should return results from both worktrees"
5682 );
5683}
5684
5685#[gpui::test]
5686async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
5687 init_test(cx);
5688
5689 let fs = FakeFs::new(cx.background_executor.clone());
5690 fs.insert_tree(
5691 path!("/dir"),
5692 json!({
5693 ".git": {},
5694 ".gitignore": "**/target\n/node_modules\n",
5695 "target": {
5696 "index.txt": "index_key:index_value"
5697 },
5698 "node_modules": {
5699 "eslint": {
5700 "index.ts": "const eslint_key = 'eslint value'",
5701 "package.json": r#"{ "some_key": "some value" }"#,
5702 },
5703 "prettier": {
5704 "index.ts": "const prettier_key = 'prettier value'",
5705 "package.json": r#"{ "other_key": "other value" }"#,
5706 },
5707 },
5708 "package.json": r#"{ "main_key": "main value" }"#,
5709 }),
5710 )
5711 .await;
5712 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5713
5714 let query = "key";
5715 assert_eq!(
5716 search(
5717 &project,
5718 SearchQuery::text(
5719 query,
5720 false,
5721 false,
5722 false,
5723 Default::default(),
5724 Default::default(),
5725 false,
5726 None,
5727 )
5728 .unwrap(),
5729 cx
5730 )
5731 .await
5732 .unwrap(),
5733 HashMap::from_iter([(path!("dir/package.json").to_string(), vec![8..11])]),
5734 "Only one non-ignored file should have the query"
5735 );
5736
5737 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5738 assert_eq!(
5739 search(
5740 &project,
5741 SearchQuery::text(
5742 query,
5743 false,
5744 false,
5745 true,
5746 Default::default(),
5747 Default::default(),
5748 false,
5749 None,
5750 )
5751 .unwrap(),
5752 cx
5753 )
5754 .await
5755 .unwrap(),
5756 HashMap::from_iter([
5757 (path!("dir/package.json").to_string(), vec![8..11]),
5758 (path!("dir/target/index.txt").to_string(), vec![6..9]),
5759 (
5760 path!("dir/node_modules/prettier/package.json").to_string(),
5761 vec![9..12]
5762 ),
5763 (
5764 path!("dir/node_modules/prettier/index.ts").to_string(),
5765 vec![15..18]
5766 ),
5767 (
5768 path!("dir/node_modules/eslint/index.ts").to_string(),
5769 vec![13..16]
5770 ),
5771 (
5772 path!("dir/node_modules/eslint/package.json").to_string(),
5773 vec![8..11]
5774 ),
5775 ]),
5776 "Unrestricted search with ignored directories should find every file with the query"
5777 );
5778
5779 let files_to_include = PathMatcher::new(&["node_modules/prettier/**".to_owned()]).unwrap();
5780 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()]).unwrap();
5781 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5782 assert_eq!(
5783 search(
5784 &project,
5785 SearchQuery::text(
5786 query,
5787 false,
5788 false,
5789 true,
5790 files_to_include,
5791 files_to_exclude,
5792 false,
5793 None,
5794 )
5795 .unwrap(),
5796 cx
5797 )
5798 .await
5799 .unwrap(),
5800 HashMap::from_iter([(
5801 path!("dir/node_modules/prettier/package.json").to_string(),
5802 vec![9..12]
5803 )]),
5804 "With search including ignored prettier directory and excluding TS files, only one file should be found"
5805 );
5806}
5807
5808#[gpui::test]
5809async fn test_search_with_unicode(cx: &mut gpui::TestAppContext) {
5810 init_test(cx);
5811
5812 let fs = FakeFs::new(cx.executor());
5813 fs.insert_tree(
5814 path!("/dir"),
5815 json!({
5816 "one.rs": "// ПРИВЕТ? привет!",
5817 "two.rs": "// ПРИВЕТ.",
5818 "three.rs": "// привет",
5819 }),
5820 )
5821 .await;
5822 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5823
5824 let unicode_case_sensitive_query = SearchQuery::text(
5825 "привет",
5826 false,
5827 true,
5828 false,
5829 Default::default(),
5830 Default::default(),
5831 false,
5832 None,
5833 );
5834 assert_matches!(unicode_case_sensitive_query, Ok(SearchQuery::Text { .. }));
5835 assert_eq!(
5836 search(&project, unicode_case_sensitive_query.unwrap(), cx)
5837 .await
5838 .unwrap(),
5839 HashMap::from_iter([
5840 (path!("dir/one.rs").to_string(), vec![17..29]),
5841 (path!("dir/three.rs").to_string(), vec![3..15]),
5842 ])
5843 );
5844
5845 let unicode_case_insensitive_query = SearchQuery::text(
5846 "привет",
5847 false,
5848 false,
5849 false,
5850 Default::default(),
5851 Default::default(),
5852 false,
5853 None,
5854 );
5855 assert_matches!(
5856 unicode_case_insensitive_query,
5857 Ok(SearchQuery::Regex { .. })
5858 );
5859 assert_eq!(
5860 search(&project, unicode_case_insensitive_query.unwrap(), cx)
5861 .await
5862 .unwrap(),
5863 HashMap::from_iter([
5864 (path!("dir/one.rs").to_string(), vec![3..15, 17..29]),
5865 (path!("dir/two.rs").to_string(), vec![3..15]),
5866 (path!("dir/three.rs").to_string(), vec![3..15]),
5867 ])
5868 );
5869
5870 assert_eq!(
5871 search(
5872 &project,
5873 SearchQuery::text(
5874 "привет.",
5875 false,
5876 false,
5877 false,
5878 Default::default(),
5879 Default::default(),
5880 false,
5881 None,
5882 )
5883 .unwrap(),
5884 cx
5885 )
5886 .await
5887 .unwrap(),
5888 HashMap::from_iter([(path!("dir/two.rs").to_string(), vec![3..16]),])
5889 );
5890}
5891
5892#[gpui::test]
5893async fn test_create_entry(cx: &mut gpui::TestAppContext) {
5894 init_test(cx);
5895
5896 let fs = FakeFs::new(cx.executor().clone());
5897 fs.insert_tree(
5898 "/one/two",
5899 json!({
5900 "three": {
5901 "a.txt": "",
5902 "four": {}
5903 },
5904 "c.rs": ""
5905 }),
5906 )
5907 .await;
5908
5909 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
5910 project
5911 .update(cx, |project, cx| {
5912 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5913 project.create_entry((id, "b.."), true, cx)
5914 })
5915 .await
5916 .unwrap()
5917 .to_included()
5918 .unwrap();
5919
5920 // Can't create paths outside the project
5921 let result = project
5922 .update(cx, |project, cx| {
5923 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5924 project.create_entry((id, "../../boop"), true, cx)
5925 })
5926 .await;
5927 assert!(result.is_err());
5928
5929 // Can't create paths with '..'
5930 let result = project
5931 .update(cx, |project, cx| {
5932 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5933 project.create_entry((id, "four/../beep"), true, cx)
5934 })
5935 .await;
5936 assert!(result.is_err());
5937
5938 assert_eq!(
5939 fs.paths(true),
5940 vec![
5941 PathBuf::from(path!("/")),
5942 PathBuf::from(path!("/one")),
5943 PathBuf::from(path!("/one/two")),
5944 PathBuf::from(path!("/one/two/c.rs")),
5945 PathBuf::from(path!("/one/two/three")),
5946 PathBuf::from(path!("/one/two/three/a.txt")),
5947 PathBuf::from(path!("/one/two/three/b..")),
5948 PathBuf::from(path!("/one/two/three/four")),
5949 ]
5950 );
5951
5952 // And we cannot open buffers with '..'
5953 let result = project
5954 .update(cx, |project, cx| {
5955 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5956 project.open_buffer((id, "../c.rs"), cx)
5957 })
5958 .await;
5959 assert!(result.is_err())
5960}
5961
5962#[gpui::test]
5963async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
5964 init_test(cx);
5965
5966 let fs = FakeFs::new(cx.executor());
5967 fs.insert_tree(
5968 path!("/dir"),
5969 json!({
5970 "a.tsx": "a",
5971 }),
5972 )
5973 .await;
5974
5975 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5976
5977 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5978 language_registry.add(tsx_lang());
5979 let language_server_names = [
5980 "TypeScriptServer",
5981 "TailwindServer",
5982 "ESLintServer",
5983 "NoHoverCapabilitiesServer",
5984 ];
5985 let mut language_servers = [
5986 language_registry.register_fake_lsp(
5987 "tsx",
5988 FakeLspAdapter {
5989 name: language_server_names[0],
5990 capabilities: lsp::ServerCapabilities {
5991 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5992 ..lsp::ServerCapabilities::default()
5993 },
5994 ..FakeLspAdapter::default()
5995 },
5996 ),
5997 language_registry.register_fake_lsp(
5998 "tsx",
5999 FakeLspAdapter {
6000 name: language_server_names[1],
6001 capabilities: lsp::ServerCapabilities {
6002 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6003 ..lsp::ServerCapabilities::default()
6004 },
6005 ..FakeLspAdapter::default()
6006 },
6007 ),
6008 language_registry.register_fake_lsp(
6009 "tsx",
6010 FakeLspAdapter {
6011 name: language_server_names[2],
6012 capabilities: lsp::ServerCapabilities {
6013 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6014 ..lsp::ServerCapabilities::default()
6015 },
6016 ..FakeLspAdapter::default()
6017 },
6018 ),
6019 language_registry.register_fake_lsp(
6020 "tsx",
6021 FakeLspAdapter {
6022 name: language_server_names[3],
6023 capabilities: lsp::ServerCapabilities {
6024 hover_provider: None,
6025 ..lsp::ServerCapabilities::default()
6026 },
6027 ..FakeLspAdapter::default()
6028 },
6029 ),
6030 ];
6031
6032 let (buffer, _handle) = project
6033 .update(cx, |p, cx| {
6034 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
6035 })
6036 .await
6037 .unwrap();
6038 cx.executor().run_until_parked();
6039
6040 let mut servers_with_hover_requests = HashMap::default();
6041 for i in 0..language_server_names.len() {
6042 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
6043 panic!(
6044 "Failed to get language server #{i} with name {}",
6045 &language_server_names[i]
6046 )
6047 });
6048 let new_server_name = new_server.server.name();
6049 assert!(
6050 !servers_with_hover_requests.contains_key(&new_server_name),
6051 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6052 );
6053 match new_server_name.as_ref() {
6054 "TailwindServer" | "TypeScriptServer" => {
6055 servers_with_hover_requests.insert(
6056 new_server_name.clone(),
6057 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6058 move |_, _| {
6059 let name = new_server_name.clone();
6060 async move {
6061 Ok(Some(lsp::Hover {
6062 contents: lsp::HoverContents::Scalar(
6063 lsp::MarkedString::String(format!("{name} hover")),
6064 ),
6065 range: None,
6066 }))
6067 }
6068 },
6069 ),
6070 );
6071 }
6072 "ESLintServer" => {
6073 servers_with_hover_requests.insert(
6074 new_server_name,
6075 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6076 |_, _| async move { Ok(None) },
6077 ),
6078 );
6079 }
6080 "NoHoverCapabilitiesServer" => {
6081 let _never_handled = new_server
6082 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
6083 panic!(
6084 "Should not call for hovers server with no corresponding capabilities"
6085 )
6086 });
6087 }
6088 unexpected => panic!("Unexpected server name: {unexpected}"),
6089 }
6090 }
6091
6092 let hover_task = project.update(cx, |project, cx| {
6093 project.hover(&buffer, Point::new(0, 0), cx)
6094 });
6095 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
6096 |mut hover_request| async move {
6097 hover_request
6098 .next()
6099 .await
6100 .expect("All hover requests should have been triggered")
6101 },
6102 ))
6103 .await;
6104 assert_eq!(
6105 vec!["TailwindServer hover", "TypeScriptServer hover"],
6106 hover_task
6107 .await
6108 .into_iter()
6109 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
6110 .sorted()
6111 .collect::<Vec<_>>(),
6112 "Should receive hover responses from all related servers with hover capabilities"
6113 );
6114}
6115
6116#[gpui::test]
6117async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
6118 init_test(cx);
6119
6120 let fs = FakeFs::new(cx.executor());
6121 fs.insert_tree(
6122 path!("/dir"),
6123 json!({
6124 "a.ts": "a",
6125 }),
6126 )
6127 .await;
6128
6129 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6130
6131 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6132 language_registry.add(typescript_lang());
6133 let mut fake_language_servers = language_registry.register_fake_lsp(
6134 "TypeScript",
6135 FakeLspAdapter {
6136 capabilities: lsp::ServerCapabilities {
6137 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6138 ..lsp::ServerCapabilities::default()
6139 },
6140 ..FakeLspAdapter::default()
6141 },
6142 );
6143
6144 let (buffer, _handle) = project
6145 .update(cx, |p, cx| {
6146 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6147 })
6148 .await
6149 .unwrap();
6150 cx.executor().run_until_parked();
6151
6152 let fake_server = fake_language_servers
6153 .next()
6154 .await
6155 .expect("failed to get the language server");
6156
6157 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6158 move |_, _| async move {
6159 Ok(Some(lsp::Hover {
6160 contents: lsp::HoverContents::Array(vec![
6161 lsp::MarkedString::String("".to_string()),
6162 lsp::MarkedString::String(" ".to_string()),
6163 lsp::MarkedString::String("\n\n\n".to_string()),
6164 ]),
6165 range: None,
6166 }))
6167 },
6168 );
6169
6170 let hover_task = project.update(cx, |project, cx| {
6171 project.hover(&buffer, Point::new(0, 0), cx)
6172 });
6173 let () = request_handled
6174 .next()
6175 .await
6176 .expect("All hover requests should have been triggered");
6177 assert_eq!(
6178 Vec::<String>::new(),
6179 hover_task
6180 .await
6181 .into_iter()
6182 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
6183 .sorted()
6184 .collect::<Vec<_>>(),
6185 "Empty hover parts should be ignored"
6186 );
6187}
6188
6189#[gpui::test]
6190async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
6191 init_test(cx);
6192
6193 let fs = FakeFs::new(cx.executor());
6194 fs.insert_tree(
6195 path!("/dir"),
6196 json!({
6197 "a.ts": "a",
6198 }),
6199 )
6200 .await;
6201
6202 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6203
6204 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6205 language_registry.add(typescript_lang());
6206 let mut fake_language_servers = language_registry.register_fake_lsp(
6207 "TypeScript",
6208 FakeLspAdapter {
6209 capabilities: lsp::ServerCapabilities {
6210 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6211 ..lsp::ServerCapabilities::default()
6212 },
6213 ..FakeLspAdapter::default()
6214 },
6215 );
6216
6217 let (buffer, _handle) = project
6218 .update(cx, |p, cx| {
6219 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6220 })
6221 .await
6222 .unwrap();
6223 cx.executor().run_until_parked();
6224
6225 let fake_server = fake_language_servers
6226 .next()
6227 .await
6228 .expect("failed to get the language server");
6229
6230 let mut request_handled = fake_server
6231 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
6232 Ok(Some(vec![
6233 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6234 title: "organize imports".to_string(),
6235 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
6236 ..lsp::CodeAction::default()
6237 }),
6238 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6239 title: "fix code".to_string(),
6240 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
6241 ..lsp::CodeAction::default()
6242 }),
6243 ]))
6244 });
6245
6246 let code_actions_task = project.update(cx, |project, cx| {
6247 project.code_actions(
6248 &buffer,
6249 0..buffer.read(cx).len(),
6250 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
6251 cx,
6252 )
6253 });
6254
6255 let () = request_handled
6256 .next()
6257 .await
6258 .expect("The code action request should have been triggered");
6259
6260 let code_actions = code_actions_task.await.unwrap();
6261 assert_eq!(code_actions.len(), 1);
6262 assert_eq!(
6263 code_actions[0].lsp_action.action_kind(),
6264 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
6265 );
6266}
6267
6268#[gpui::test]
6269async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
6270 init_test(cx);
6271
6272 let fs = FakeFs::new(cx.executor());
6273 fs.insert_tree(
6274 path!("/dir"),
6275 json!({
6276 "a.tsx": "a",
6277 }),
6278 )
6279 .await;
6280
6281 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6282
6283 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6284 language_registry.add(tsx_lang());
6285 let language_server_names = [
6286 "TypeScriptServer",
6287 "TailwindServer",
6288 "ESLintServer",
6289 "NoActionsCapabilitiesServer",
6290 ];
6291
6292 let mut language_server_rxs = [
6293 language_registry.register_fake_lsp(
6294 "tsx",
6295 FakeLspAdapter {
6296 name: language_server_names[0],
6297 capabilities: lsp::ServerCapabilities {
6298 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6299 ..lsp::ServerCapabilities::default()
6300 },
6301 ..FakeLspAdapter::default()
6302 },
6303 ),
6304 language_registry.register_fake_lsp(
6305 "tsx",
6306 FakeLspAdapter {
6307 name: language_server_names[1],
6308 capabilities: lsp::ServerCapabilities {
6309 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6310 ..lsp::ServerCapabilities::default()
6311 },
6312 ..FakeLspAdapter::default()
6313 },
6314 ),
6315 language_registry.register_fake_lsp(
6316 "tsx",
6317 FakeLspAdapter {
6318 name: language_server_names[2],
6319 capabilities: lsp::ServerCapabilities {
6320 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6321 ..lsp::ServerCapabilities::default()
6322 },
6323 ..FakeLspAdapter::default()
6324 },
6325 ),
6326 language_registry.register_fake_lsp(
6327 "tsx",
6328 FakeLspAdapter {
6329 name: language_server_names[3],
6330 capabilities: lsp::ServerCapabilities {
6331 code_action_provider: None,
6332 ..lsp::ServerCapabilities::default()
6333 },
6334 ..FakeLspAdapter::default()
6335 },
6336 ),
6337 ];
6338
6339 let (buffer, _handle) = project
6340 .update(cx, |p, cx| {
6341 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
6342 })
6343 .await
6344 .unwrap();
6345 cx.executor().run_until_parked();
6346
6347 let mut servers_with_actions_requests = HashMap::default();
6348 for i in 0..language_server_names.len() {
6349 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
6350 panic!(
6351 "Failed to get language server #{i} with name {}",
6352 &language_server_names[i]
6353 )
6354 });
6355 let new_server_name = new_server.server.name();
6356
6357 assert!(
6358 !servers_with_actions_requests.contains_key(&new_server_name),
6359 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6360 );
6361 match new_server_name.0.as_ref() {
6362 "TailwindServer" | "TypeScriptServer" => {
6363 servers_with_actions_requests.insert(
6364 new_server_name.clone(),
6365 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6366 move |_, _| {
6367 let name = new_server_name.clone();
6368 async move {
6369 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
6370 lsp::CodeAction {
6371 title: format!("{name} code action"),
6372 ..lsp::CodeAction::default()
6373 },
6374 )]))
6375 }
6376 },
6377 ),
6378 );
6379 }
6380 "ESLintServer" => {
6381 servers_with_actions_requests.insert(
6382 new_server_name,
6383 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6384 |_, _| async move { Ok(None) },
6385 ),
6386 );
6387 }
6388 "NoActionsCapabilitiesServer" => {
6389 let _never_handled = new_server
6390 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6391 panic!(
6392 "Should not call for code actions server with no corresponding capabilities"
6393 )
6394 });
6395 }
6396 unexpected => panic!("Unexpected server name: {unexpected}"),
6397 }
6398 }
6399
6400 let code_actions_task = project.update(cx, |project, cx| {
6401 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
6402 });
6403
6404 // cx.run_until_parked();
6405 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
6406 |mut code_actions_request| async move {
6407 code_actions_request
6408 .next()
6409 .await
6410 .expect("All code actions requests should have been triggered")
6411 },
6412 ))
6413 .await;
6414 assert_eq!(
6415 vec!["TailwindServer code action", "TypeScriptServer code action"],
6416 code_actions_task
6417 .await
6418 .unwrap()
6419 .into_iter()
6420 .map(|code_action| code_action.lsp_action.title().to_owned())
6421 .sorted()
6422 .collect::<Vec<_>>(),
6423 "Should receive code actions responses from all related servers with hover capabilities"
6424 );
6425}
6426
6427#[gpui::test]
6428async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
6429 init_test(cx);
6430
6431 let fs = FakeFs::new(cx.executor());
6432 fs.insert_tree(
6433 "/dir",
6434 json!({
6435 "a.rs": "let a = 1;",
6436 "b.rs": "let b = 2;",
6437 "c.rs": "let c = 2;",
6438 }),
6439 )
6440 .await;
6441
6442 let project = Project::test(
6443 fs,
6444 [
6445 "/dir/a.rs".as_ref(),
6446 "/dir/b.rs".as_ref(),
6447 "/dir/c.rs".as_ref(),
6448 ],
6449 cx,
6450 )
6451 .await;
6452
6453 // check the initial state and get the worktrees
6454 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
6455 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6456 assert_eq!(worktrees.len(), 3);
6457
6458 let worktree_a = worktrees[0].read(cx);
6459 let worktree_b = worktrees[1].read(cx);
6460 let worktree_c = worktrees[2].read(cx);
6461
6462 // check they start in the right order
6463 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
6464 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
6465 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
6466
6467 (
6468 worktrees[0].clone(),
6469 worktrees[1].clone(),
6470 worktrees[2].clone(),
6471 )
6472 });
6473
6474 // move first worktree to after the second
6475 // [a, b, c] -> [b, a, c]
6476 project
6477 .update(cx, |project, cx| {
6478 let first = worktree_a.read(cx);
6479 let second = worktree_b.read(cx);
6480 project.move_worktree(first.id(), second.id(), cx)
6481 })
6482 .expect("moving first after second");
6483
6484 // check the state after moving
6485 project.update(cx, |project, cx| {
6486 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6487 assert_eq!(worktrees.len(), 3);
6488
6489 let first = worktrees[0].read(cx);
6490 let second = worktrees[1].read(cx);
6491 let third = worktrees[2].read(cx);
6492
6493 // check they are now in the right order
6494 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
6495 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
6496 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6497 });
6498
6499 // move the second worktree to before the first
6500 // [b, a, c] -> [a, b, c]
6501 project
6502 .update(cx, |project, cx| {
6503 let second = worktree_a.read(cx);
6504 let first = worktree_b.read(cx);
6505 project.move_worktree(first.id(), second.id(), cx)
6506 })
6507 .expect("moving second before first");
6508
6509 // check the state after moving
6510 project.update(cx, |project, cx| {
6511 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6512 assert_eq!(worktrees.len(), 3);
6513
6514 let first = worktrees[0].read(cx);
6515 let second = worktrees[1].read(cx);
6516 let third = worktrees[2].read(cx);
6517
6518 // check they are now in the right order
6519 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6520 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6521 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6522 });
6523
6524 // move the second worktree to after the third
6525 // [a, b, c] -> [a, c, b]
6526 project
6527 .update(cx, |project, cx| {
6528 let second = worktree_b.read(cx);
6529 let third = worktree_c.read(cx);
6530 project.move_worktree(second.id(), third.id(), cx)
6531 })
6532 .expect("moving second after third");
6533
6534 // check the state after moving
6535 project.update(cx, |project, cx| {
6536 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6537 assert_eq!(worktrees.len(), 3);
6538
6539 let first = worktrees[0].read(cx);
6540 let second = worktrees[1].read(cx);
6541 let third = worktrees[2].read(cx);
6542
6543 // check they are now in the right order
6544 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6545 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
6546 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
6547 });
6548
6549 // move the third worktree to before the second
6550 // [a, c, b] -> [a, b, c]
6551 project
6552 .update(cx, |project, cx| {
6553 let third = worktree_c.read(cx);
6554 let second = worktree_b.read(cx);
6555 project.move_worktree(third.id(), second.id(), cx)
6556 })
6557 .expect("moving third before second");
6558
6559 // check the state after moving
6560 project.update(cx, |project, cx| {
6561 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6562 assert_eq!(worktrees.len(), 3);
6563
6564 let first = worktrees[0].read(cx);
6565 let second = worktrees[1].read(cx);
6566 let third = worktrees[2].read(cx);
6567
6568 // check they are now in the right order
6569 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6570 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6571 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6572 });
6573
6574 // move the first worktree to after the third
6575 // [a, b, c] -> [b, c, a]
6576 project
6577 .update(cx, |project, cx| {
6578 let first = worktree_a.read(cx);
6579 let third = worktree_c.read(cx);
6580 project.move_worktree(first.id(), third.id(), cx)
6581 })
6582 .expect("moving first after third");
6583
6584 // check the state after moving
6585 project.update(cx, |project, cx| {
6586 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6587 assert_eq!(worktrees.len(), 3);
6588
6589 let first = worktrees[0].read(cx);
6590 let second = worktrees[1].read(cx);
6591 let third = worktrees[2].read(cx);
6592
6593 // check they are now in the right order
6594 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
6595 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
6596 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
6597 });
6598
6599 // move the third worktree to before the first
6600 // [b, c, a] -> [a, b, c]
6601 project
6602 .update(cx, |project, cx| {
6603 let third = worktree_a.read(cx);
6604 let first = worktree_b.read(cx);
6605 project.move_worktree(third.id(), first.id(), cx)
6606 })
6607 .expect("moving third before first");
6608
6609 // check the state after moving
6610 project.update(cx, |project, cx| {
6611 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6612 assert_eq!(worktrees.len(), 3);
6613
6614 let first = worktrees[0].read(cx);
6615 let second = worktrees[1].read(cx);
6616 let third = worktrees[2].read(cx);
6617
6618 // check they are now in the right order
6619 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6620 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6621 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6622 });
6623}
6624
6625#[gpui::test]
6626async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
6627 init_test(cx);
6628
6629 let staged_contents = r#"
6630 fn main() {
6631 println!("hello world");
6632 }
6633 "#
6634 .unindent();
6635 let file_contents = r#"
6636 // print goodbye
6637 fn main() {
6638 println!("goodbye world");
6639 }
6640 "#
6641 .unindent();
6642
6643 let fs = FakeFs::new(cx.background_executor.clone());
6644 fs.insert_tree(
6645 "/dir",
6646 json!({
6647 ".git": {},
6648 "src": {
6649 "main.rs": file_contents,
6650 }
6651 }),
6652 )
6653 .await;
6654
6655 fs.set_index_for_repo(
6656 Path::new("/dir/.git"),
6657 &[("src/main.rs".into(), staged_contents)],
6658 );
6659
6660 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6661
6662 let buffer = project
6663 .update(cx, |project, cx| {
6664 project.open_local_buffer("/dir/src/main.rs", cx)
6665 })
6666 .await
6667 .unwrap();
6668 let unstaged_diff = project
6669 .update(cx, |project, cx| {
6670 project.open_unstaged_diff(buffer.clone(), cx)
6671 })
6672 .await
6673 .unwrap();
6674
6675 cx.run_until_parked();
6676 unstaged_diff.update(cx, |unstaged_diff, cx| {
6677 let snapshot = buffer.read(cx).snapshot();
6678 assert_hunks(
6679 unstaged_diff.hunks(&snapshot, cx),
6680 &snapshot,
6681 &unstaged_diff.base_text_string().unwrap(),
6682 &[
6683 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
6684 (
6685 2..3,
6686 " println!(\"hello world\");\n",
6687 " println!(\"goodbye world\");\n",
6688 DiffHunkStatus::modified_none(),
6689 ),
6690 ],
6691 );
6692 });
6693
6694 let staged_contents = r#"
6695 // print goodbye
6696 fn main() {
6697 }
6698 "#
6699 .unindent();
6700
6701 fs.set_index_for_repo(
6702 Path::new("/dir/.git"),
6703 &[("src/main.rs".into(), staged_contents)],
6704 );
6705
6706 cx.run_until_parked();
6707 unstaged_diff.update(cx, |unstaged_diff, cx| {
6708 let snapshot = buffer.read(cx).snapshot();
6709 assert_hunks(
6710 unstaged_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6711 &snapshot,
6712 &unstaged_diff.base_text().text(),
6713 &[(
6714 2..3,
6715 "",
6716 " println!(\"goodbye world\");\n",
6717 DiffHunkStatus::added_none(),
6718 )],
6719 );
6720 });
6721}
6722
6723#[gpui::test]
6724async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
6725 init_test(cx);
6726
6727 let committed_contents = r#"
6728 fn main() {
6729 println!("hello world");
6730 }
6731 "#
6732 .unindent();
6733 let staged_contents = r#"
6734 fn main() {
6735 println!("goodbye world");
6736 }
6737 "#
6738 .unindent();
6739 let file_contents = r#"
6740 // print goodbye
6741 fn main() {
6742 println!("goodbye world");
6743 }
6744 "#
6745 .unindent();
6746
6747 let fs = FakeFs::new(cx.background_executor.clone());
6748 fs.insert_tree(
6749 "/dir",
6750 json!({
6751 ".git": {},
6752 "src": {
6753 "modification.rs": file_contents,
6754 }
6755 }),
6756 )
6757 .await;
6758
6759 fs.set_head_for_repo(
6760 Path::new("/dir/.git"),
6761 &[
6762 ("src/modification.rs".into(), committed_contents),
6763 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6764 ],
6765 "deadbeef",
6766 );
6767 fs.set_index_for_repo(
6768 Path::new("/dir/.git"),
6769 &[
6770 ("src/modification.rs".into(), staged_contents),
6771 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6772 ],
6773 );
6774
6775 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6776 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6777 let language = rust_lang();
6778 language_registry.add(language.clone());
6779
6780 let buffer_1 = project
6781 .update(cx, |project, cx| {
6782 project.open_local_buffer("/dir/src/modification.rs", cx)
6783 })
6784 .await
6785 .unwrap();
6786 let diff_1 = project
6787 .update(cx, |project, cx| {
6788 project.open_uncommitted_diff(buffer_1.clone(), cx)
6789 })
6790 .await
6791 .unwrap();
6792 diff_1.read_with(cx, |diff, _| {
6793 assert_eq!(diff.base_text().language().cloned(), Some(language))
6794 });
6795 cx.run_until_parked();
6796 diff_1.update(cx, |diff, cx| {
6797 let snapshot = buffer_1.read(cx).snapshot();
6798 assert_hunks(
6799 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6800 &snapshot,
6801 &diff.base_text_string().unwrap(),
6802 &[
6803 (
6804 0..1,
6805 "",
6806 "// print goodbye\n",
6807 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
6808 ),
6809 (
6810 2..3,
6811 " println!(\"hello world\");\n",
6812 " println!(\"goodbye world\");\n",
6813 DiffHunkStatus::modified_none(),
6814 ),
6815 ],
6816 );
6817 });
6818
6819 // Reset HEAD to a version that differs from both the buffer and the index.
6820 let committed_contents = r#"
6821 // print goodbye
6822 fn main() {
6823 }
6824 "#
6825 .unindent();
6826 fs.set_head_for_repo(
6827 Path::new("/dir/.git"),
6828 &[
6829 ("src/modification.rs".into(), committed_contents.clone()),
6830 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6831 ],
6832 "deadbeef",
6833 );
6834
6835 // Buffer now has an unstaged hunk.
6836 cx.run_until_parked();
6837 diff_1.update(cx, |diff, cx| {
6838 let snapshot = buffer_1.read(cx).snapshot();
6839 assert_hunks(
6840 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6841 &snapshot,
6842 &diff.base_text().text(),
6843 &[(
6844 2..3,
6845 "",
6846 " println!(\"goodbye world\");\n",
6847 DiffHunkStatus::added_none(),
6848 )],
6849 );
6850 });
6851
6852 // Open a buffer for a file that's been deleted.
6853 let buffer_2 = project
6854 .update(cx, |project, cx| {
6855 project.open_local_buffer("/dir/src/deletion.rs", cx)
6856 })
6857 .await
6858 .unwrap();
6859 let diff_2 = project
6860 .update(cx, |project, cx| {
6861 project.open_uncommitted_diff(buffer_2.clone(), cx)
6862 })
6863 .await
6864 .unwrap();
6865 cx.run_until_parked();
6866 diff_2.update(cx, |diff, cx| {
6867 let snapshot = buffer_2.read(cx).snapshot();
6868 assert_hunks(
6869 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6870 &snapshot,
6871 &diff.base_text_string().unwrap(),
6872 &[(
6873 0..0,
6874 "// the-deleted-contents\n",
6875 "",
6876 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
6877 )],
6878 );
6879 });
6880
6881 // Stage the deletion of this file
6882 fs.set_index_for_repo(
6883 Path::new("/dir/.git"),
6884 &[("src/modification.rs".into(), committed_contents.clone())],
6885 );
6886 cx.run_until_parked();
6887 diff_2.update(cx, |diff, cx| {
6888 let snapshot = buffer_2.read(cx).snapshot();
6889 assert_hunks(
6890 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6891 &snapshot,
6892 &diff.base_text_string().unwrap(),
6893 &[(
6894 0..0,
6895 "// the-deleted-contents\n",
6896 "",
6897 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
6898 )],
6899 );
6900 });
6901}
6902
6903#[gpui::test]
6904async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
6905 use DiffHunkSecondaryStatus::*;
6906 init_test(cx);
6907
6908 let committed_contents = r#"
6909 zero
6910 one
6911 two
6912 three
6913 four
6914 five
6915 "#
6916 .unindent();
6917 let file_contents = r#"
6918 one
6919 TWO
6920 three
6921 FOUR
6922 five
6923 "#
6924 .unindent();
6925
6926 let fs = FakeFs::new(cx.background_executor.clone());
6927 fs.insert_tree(
6928 "/dir",
6929 json!({
6930 ".git": {},
6931 "file.txt": file_contents.clone()
6932 }),
6933 )
6934 .await;
6935
6936 fs.set_head_and_index_for_repo(
6937 "/dir/.git".as_ref(),
6938 &[("file.txt".into(), committed_contents.clone())],
6939 );
6940
6941 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6942
6943 let buffer = project
6944 .update(cx, |project, cx| {
6945 project.open_local_buffer("/dir/file.txt", cx)
6946 })
6947 .await
6948 .unwrap();
6949 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6950 let uncommitted_diff = project
6951 .update(cx, |project, cx| {
6952 project.open_uncommitted_diff(buffer.clone(), cx)
6953 })
6954 .await
6955 .unwrap();
6956 let mut diff_events = cx.events(&uncommitted_diff);
6957
6958 // The hunks are initially unstaged.
6959 uncommitted_diff.read_with(cx, |diff, cx| {
6960 assert_hunks(
6961 diff.hunks(&snapshot, cx),
6962 &snapshot,
6963 &diff.base_text_string().unwrap(),
6964 &[
6965 (
6966 0..0,
6967 "zero\n",
6968 "",
6969 DiffHunkStatus::deleted(HasSecondaryHunk),
6970 ),
6971 (
6972 1..2,
6973 "two\n",
6974 "TWO\n",
6975 DiffHunkStatus::modified(HasSecondaryHunk),
6976 ),
6977 (
6978 3..4,
6979 "four\n",
6980 "FOUR\n",
6981 DiffHunkStatus::modified(HasSecondaryHunk),
6982 ),
6983 ],
6984 );
6985 });
6986
6987 // Stage a hunk. It appears as optimistically staged.
6988 uncommitted_diff.update(cx, |diff, cx| {
6989 let range =
6990 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
6991 let hunks = diff
6992 .hunks_intersecting_range(range, &snapshot, cx)
6993 .collect::<Vec<_>>();
6994 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
6995
6996 assert_hunks(
6997 diff.hunks(&snapshot, cx),
6998 &snapshot,
6999 &diff.base_text_string().unwrap(),
7000 &[
7001 (
7002 0..0,
7003 "zero\n",
7004 "",
7005 DiffHunkStatus::deleted(HasSecondaryHunk),
7006 ),
7007 (
7008 1..2,
7009 "two\n",
7010 "TWO\n",
7011 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7012 ),
7013 (
7014 3..4,
7015 "four\n",
7016 "FOUR\n",
7017 DiffHunkStatus::modified(HasSecondaryHunk),
7018 ),
7019 ],
7020 );
7021 });
7022
7023 // The diff emits a change event for the range of the staged hunk.
7024 assert!(matches!(
7025 diff_events.next().await.unwrap(),
7026 BufferDiffEvent::HunksStagedOrUnstaged(_)
7027 ));
7028 let event = diff_events.next().await.unwrap();
7029 if let BufferDiffEvent::DiffChanged {
7030 changed_range: Some(changed_range),
7031 } = event
7032 {
7033 let changed_range = changed_range.to_point(&snapshot);
7034 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
7035 } else {
7036 panic!("Unexpected event {event:?}");
7037 }
7038
7039 // When the write to the index completes, it appears as staged.
7040 cx.run_until_parked();
7041 uncommitted_diff.update(cx, |diff, cx| {
7042 assert_hunks(
7043 diff.hunks(&snapshot, cx),
7044 &snapshot,
7045 &diff.base_text_string().unwrap(),
7046 &[
7047 (
7048 0..0,
7049 "zero\n",
7050 "",
7051 DiffHunkStatus::deleted(HasSecondaryHunk),
7052 ),
7053 (
7054 1..2,
7055 "two\n",
7056 "TWO\n",
7057 DiffHunkStatus::modified(NoSecondaryHunk),
7058 ),
7059 (
7060 3..4,
7061 "four\n",
7062 "FOUR\n",
7063 DiffHunkStatus::modified(HasSecondaryHunk),
7064 ),
7065 ],
7066 );
7067 });
7068
7069 // The diff emits a change event for the changed index text.
7070 let event = diff_events.next().await.unwrap();
7071 if let BufferDiffEvent::DiffChanged {
7072 changed_range: Some(changed_range),
7073 } = event
7074 {
7075 let changed_range = changed_range.to_point(&snapshot);
7076 assert_eq!(changed_range, Point::new(0, 0)..Point::new(4, 0));
7077 } else {
7078 panic!("Unexpected event {event:?}");
7079 }
7080
7081 // Simulate a problem writing to the git index.
7082 fs.set_error_message_for_index_write(
7083 "/dir/.git".as_ref(),
7084 Some("failed to write git index".into()),
7085 );
7086
7087 // Stage another hunk.
7088 uncommitted_diff.update(cx, |diff, cx| {
7089 let range =
7090 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
7091 let hunks = diff
7092 .hunks_intersecting_range(range, &snapshot, cx)
7093 .collect::<Vec<_>>();
7094 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
7095
7096 assert_hunks(
7097 diff.hunks(&snapshot, cx),
7098 &snapshot,
7099 &diff.base_text_string().unwrap(),
7100 &[
7101 (
7102 0..0,
7103 "zero\n",
7104 "",
7105 DiffHunkStatus::deleted(HasSecondaryHunk),
7106 ),
7107 (
7108 1..2,
7109 "two\n",
7110 "TWO\n",
7111 DiffHunkStatus::modified(NoSecondaryHunk),
7112 ),
7113 (
7114 3..4,
7115 "four\n",
7116 "FOUR\n",
7117 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7118 ),
7119 ],
7120 );
7121 });
7122 assert!(matches!(
7123 diff_events.next().await.unwrap(),
7124 BufferDiffEvent::HunksStagedOrUnstaged(_)
7125 ));
7126 let event = diff_events.next().await.unwrap();
7127 if let BufferDiffEvent::DiffChanged {
7128 changed_range: Some(changed_range),
7129 } = event
7130 {
7131 let changed_range = changed_range.to_point(&snapshot);
7132 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
7133 } else {
7134 panic!("Unexpected event {event:?}");
7135 }
7136
7137 // When the write fails, the hunk returns to being unstaged.
7138 cx.run_until_parked();
7139 uncommitted_diff.update(cx, |diff, cx| {
7140 assert_hunks(
7141 diff.hunks(&snapshot, cx),
7142 &snapshot,
7143 &diff.base_text_string().unwrap(),
7144 &[
7145 (
7146 0..0,
7147 "zero\n",
7148 "",
7149 DiffHunkStatus::deleted(HasSecondaryHunk),
7150 ),
7151 (
7152 1..2,
7153 "two\n",
7154 "TWO\n",
7155 DiffHunkStatus::modified(NoSecondaryHunk),
7156 ),
7157 (
7158 3..4,
7159 "four\n",
7160 "FOUR\n",
7161 DiffHunkStatus::modified(HasSecondaryHunk),
7162 ),
7163 ],
7164 );
7165 });
7166
7167 let event = diff_events.next().await.unwrap();
7168 if let BufferDiffEvent::DiffChanged {
7169 changed_range: Some(changed_range),
7170 } = event
7171 {
7172 let changed_range = changed_range.to_point(&snapshot);
7173 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
7174 } else {
7175 panic!("Unexpected event {event:?}");
7176 }
7177
7178 // Allow writing to the git index to succeed again.
7179 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
7180
7181 // Stage two hunks with separate operations.
7182 uncommitted_diff.update(cx, |diff, cx| {
7183 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
7184 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
7185 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
7186 });
7187
7188 // Both staged hunks appear as pending.
7189 uncommitted_diff.update(cx, |diff, cx| {
7190 assert_hunks(
7191 diff.hunks(&snapshot, cx),
7192 &snapshot,
7193 &diff.base_text_string().unwrap(),
7194 &[
7195 (
7196 0..0,
7197 "zero\n",
7198 "",
7199 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7200 ),
7201 (
7202 1..2,
7203 "two\n",
7204 "TWO\n",
7205 DiffHunkStatus::modified(NoSecondaryHunk),
7206 ),
7207 (
7208 3..4,
7209 "four\n",
7210 "FOUR\n",
7211 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7212 ),
7213 ],
7214 );
7215 });
7216
7217 // Both staging operations take effect.
7218 cx.run_until_parked();
7219 uncommitted_diff.update(cx, |diff, cx| {
7220 assert_hunks(
7221 diff.hunks(&snapshot, cx),
7222 &snapshot,
7223 &diff.base_text_string().unwrap(),
7224 &[
7225 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7226 (
7227 1..2,
7228 "two\n",
7229 "TWO\n",
7230 DiffHunkStatus::modified(NoSecondaryHunk),
7231 ),
7232 (
7233 3..4,
7234 "four\n",
7235 "FOUR\n",
7236 DiffHunkStatus::modified(NoSecondaryHunk),
7237 ),
7238 ],
7239 );
7240 });
7241}
7242
7243#[gpui::test(seeds(340, 472))]
7244async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
7245 use DiffHunkSecondaryStatus::*;
7246 init_test(cx);
7247
7248 let committed_contents = r#"
7249 zero
7250 one
7251 two
7252 three
7253 four
7254 five
7255 "#
7256 .unindent();
7257 let file_contents = r#"
7258 one
7259 TWO
7260 three
7261 FOUR
7262 five
7263 "#
7264 .unindent();
7265
7266 let fs = FakeFs::new(cx.background_executor.clone());
7267 fs.insert_tree(
7268 "/dir",
7269 json!({
7270 ".git": {},
7271 "file.txt": file_contents.clone()
7272 }),
7273 )
7274 .await;
7275
7276 fs.set_head_for_repo(
7277 "/dir/.git".as_ref(),
7278 &[("file.txt".into(), committed_contents.clone())],
7279 "deadbeef",
7280 );
7281 fs.set_index_for_repo(
7282 "/dir/.git".as_ref(),
7283 &[("file.txt".into(), committed_contents.clone())],
7284 );
7285
7286 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7287
7288 let buffer = project
7289 .update(cx, |project, cx| {
7290 project.open_local_buffer("/dir/file.txt", cx)
7291 })
7292 .await
7293 .unwrap();
7294 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7295 let uncommitted_diff = project
7296 .update(cx, |project, cx| {
7297 project.open_uncommitted_diff(buffer.clone(), cx)
7298 })
7299 .await
7300 .unwrap();
7301
7302 // The hunks are initially unstaged.
7303 uncommitted_diff.read_with(cx, |diff, cx| {
7304 assert_hunks(
7305 diff.hunks(&snapshot, cx),
7306 &snapshot,
7307 &diff.base_text_string().unwrap(),
7308 &[
7309 (
7310 0..0,
7311 "zero\n",
7312 "",
7313 DiffHunkStatus::deleted(HasSecondaryHunk),
7314 ),
7315 (
7316 1..2,
7317 "two\n",
7318 "TWO\n",
7319 DiffHunkStatus::modified(HasSecondaryHunk),
7320 ),
7321 (
7322 3..4,
7323 "four\n",
7324 "FOUR\n",
7325 DiffHunkStatus::modified(HasSecondaryHunk),
7326 ),
7327 ],
7328 );
7329 });
7330
7331 // Pause IO events
7332 fs.pause_events();
7333
7334 // Stage the first hunk.
7335 uncommitted_diff.update(cx, |diff, cx| {
7336 let hunk = diff.hunks(&snapshot, cx).next().unwrap();
7337 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7338 assert_hunks(
7339 diff.hunks(&snapshot, cx),
7340 &snapshot,
7341 &diff.base_text_string().unwrap(),
7342 &[
7343 (
7344 0..0,
7345 "zero\n",
7346 "",
7347 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7348 ),
7349 (
7350 1..2,
7351 "two\n",
7352 "TWO\n",
7353 DiffHunkStatus::modified(HasSecondaryHunk),
7354 ),
7355 (
7356 3..4,
7357 "four\n",
7358 "FOUR\n",
7359 DiffHunkStatus::modified(HasSecondaryHunk),
7360 ),
7361 ],
7362 );
7363 });
7364
7365 // Stage the second hunk *before* receiving the FS event for the first hunk.
7366 cx.run_until_parked();
7367 uncommitted_diff.update(cx, |diff, cx| {
7368 let hunk = diff.hunks(&snapshot, cx).nth(1).unwrap();
7369 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7370 assert_hunks(
7371 diff.hunks(&snapshot, cx),
7372 &snapshot,
7373 &diff.base_text_string().unwrap(),
7374 &[
7375 (
7376 0..0,
7377 "zero\n",
7378 "",
7379 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7380 ),
7381 (
7382 1..2,
7383 "two\n",
7384 "TWO\n",
7385 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7386 ),
7387 (
7388 3..4,
7389 "four\n",
7390 "FOUR\n",
7391 DiffHunkStatus::modified(HasSecondaryHunk),
7392 ),
7393 ],
7394 );
7395 });
7396
7397 // Process the FS event for staging the first hunk (second event is still pending).
7398 fs.flush_events(1);
7399 cx.run_until_parked();
7400
7401 // Stage the third hunk before receiving the second FS event.
7402 uncommitted_diff.update(cx, |diff, cx| {
7403 let hunk = diff.hunks(&snapshot, cx).nth(2).unwrap();
7404 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7405 });
7406
7407 // Wait for all remaining IO.
7408 cx.run_until_parked();
7409 fs.flush_events(fs.buffered_event_count());
7410
7411 // Now all hunks are staged.
7412 cx.run_until_parked();
7413 uncommitted_diff.update(cx, |diff, cx| {
7414 assert_hunks(
7415 diff.hunks(&snapshot, cx),
7416 &snapshot,
7417 &diff.base_text_string().unwrap(),
7418 &[
7419 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7420 (
7421 1..2,
7422 "two\n",
7423 "TWO\n",
7424 DiffHunkStatus::modified(NoSecondaryHunk),
7425 ),
7426 (
7427 3..4,
7428 "four\n",
7429 "FOUR\n",
7430 DiffHunkStatus::modified(NoSecondaryHunk),
7431 ),
7432 ],
7433 );
7434 });
7435}
7436
7437#[gpui::test(iterations = 25)]
7438async fn test_staging_random_hunks(
7439 mut rng: StdRng,
7440 executor: BackgroundExecutor,
7441 cx: &mut gpui::TestAppContext,
7442) {
7443 let operations = env::var("OPERATIONS")
7444 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
7445 .unwrap_or(20);
7446
7447 // Try to induce races between diff recalculation and index writes.
7448 if rng.gen_bool(0.5) {
7449 executor.deprioritize(*CALCULATE_DIFF_TASK);
7450 }
7451
7452 use DiffHunkSecondaryStatus::*;
7453 init_test(cx);
7454
7455 let committed_text = (0..30).map(|i| format!("line {i}\n")).collect::<String>();
7456 let index_text = committed_text.clone();
7457 let buffer_text = (0..30)
7458 .map(|i| match i % 5 {
7459 0 => format!("line {i} (modified)\n"),
7460 _ => format!("line {i}\n"),
7461 })
7462 .collect::<String>();
7463
7464 let fs = FakeFs::new(cx.background_executor.clone());
7465 fs.insert_tree(
7466 path!("/dir"),
7467 json!({
7468 ".git": {},
7469 "file.txt": buffer_text.clone()
7470 }),
7471 )
7472 .await;
7473 fs.set_head_for_repo(
7474 path!("/dir/.git").as_ref(),
7475 &[("file.txt".into(), committed_text.clone())],
7476 "deadbeef",
7477 );
7478 fs.set_index_for_repo(
7479 path!("/dir/.git").as_ref(),
7480 &[("file.txt".into(), index_text.clone())],
7481 );
7482 let repo = fs.open_repo(path!("/dir/.git").as_ref()).unwrap();
7483
7484 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7485 let buffer = project
7486 .update(cx, |project, cx| {
7487 project.open_local_buffer(path!("/dir/file.txt"), cx)
7488 })
7489 .await
7490 .unwrap();
7491 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7492 let uncommitted_diff = project
7493 .update(cx, |project, cx| {
7494 project.open_uncommitted_diff(buffer.clone(), cx)
7495 })
7496 .await
7497 .unwrap();
7498
7499 let mut hunks =
7500 uncommitted_diff.update(cx, |diff, cx| diff.hunks(&snapshot, cx).collect::<Vec<_>>());
7501 assert_eq!(hunks.len(), 6);
7502
7503 for _i in 0..operations {
7504 let hunk_ix = rng.gen_range(0..hunks.len());
7505 let hunk = &mut hunks[hunk_ix];
7506 let row = hunk.range.start.row;
7507
7508 if hunk.status().has_secondary_hunk() {
7509 log::info!("staging hunk at {row}");
7510 uncommitted_diff.update(cx, |diff, cx| {
7511 diff.stage_or_unstage_hunks(true, std::slice::from_ref(hunk), &snapshot, true, cx);
7512 });
7513 hunk.secondary_status = SecondaryHunkRemovalPending;
7514 } else {
7515 log::info!("unstaging hunk at {row}");
7516 uncommitted_diff.update(cx, |diff, cx| {
7517 diff.stage_or_unstage_hunks(false, std::slice::from_ref(hunk), &snapshot, true, cx);
7518 });
7519 hunk.secondary_status = SecondaryHunkAdditionPending;
7520 }
7521
7522 for _ in 0..rng.gen_range(0..10) {
7523 log::info!("yielding");
7524 cx.executor().simulate_random_delay().await;
7525 }
7526 }
7527
7528 cx.executor().run_until_parked();
7529
7530 for hunk in &mut hunks {
7531 if hunk.secondary_status == SecondaryHunkRemovalPending {
7532 hunk.secondary_status = NoSecondaryHunk;
7533 } else if hunk.secondary_status == SecondaryHunkAdditionPending {
7534 hunk.secondary_status = HasSecondaryHunk;
7535 }
7536 }
7537
7538 log::info!(
7539 "index text:\n{}",
7540 repo.load_index_text("file.txt".into()).await.unwrap()
7541 );
7542
7543 uncommitted_diff.update(cx, |diff, cx| {
7544 let expected_hunks = hunks
7545 .iter()
7546 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
7547 .collect::<Vec<_>>();
7548 let actual_hunks = diff
7549 .hunks(&snapshot, cx)
7550 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
7551 .collect::<Vec<_>>();
7552 assert_eq!(actual_hunks, expected_hunks);
7553 });
7554}
7555
7556#[gpui::test]
7557async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
7558 init_test(cx);
7559
7560 let committed_contents = r#"
7561 fn main() {
7562 println!("hello from HEAD");
7563 }
7564 "#
7565 .unindent();
7566 let file_contents = r#"
7567 fn main() {
7568 println!("hello from the working copy");
7569 }
7570 "#
7571 .unindent();
7572
7573 let fs = FakeFs::new(cx.background_executor.clone());
7574 fs.insert_tree(
7575 "/dir",
7576 json!({
7577 ".git": {},
7578 "src": {
7579 "main.rs": file_contents,
7580 }
7581 }),
7582 )
7583 .await;
7584
7585 fs.set_head_for_repo(
7586 Path::new("/dir/.git"),
7587 &[("src/main.rs".into(), committed_contents.clone())],
7588 "deadbeef",
7589 );
7590 fs.set_index_for_repo(
7591 Path::new("/dir/.git"),
7592 &[("src/main.rs".into(), committed_contents.clone())],
7593 );
7594
7595 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
7596
7597 let buffer = project
7598 .update(cx, |project, cx| {
7599 project.open_local_buffer("/dir/src/main.rs", cx)
7600 })
7601 .await
7602 .unwrap();
7603 let uncommitted_diff = project
7604 .update(cx, |project, cx| {
7605 project.open_uncommitted_diff(buffer.clone(), cx)
7606 })
7607 .await
7608 .unwrap();
7609
7610 cx.run_until_parked();
7611 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
7612 let snapshot = buffer.read(cx).snapshot();
7613 assert_hunks(
7614 uncommitted_diff.hunks(&snapshot, cx),
7615 &snapshot,
7616 &uncommitted_diff.base_text_string().unwrap(),
7617 &[(
7618 1..2,
7619 " println!(\"hello from HEAD\");\n",
7620 " println!(\"hello from the working copy\");\n",
7621 DiffHunkStatus {
7622 kind: DiffHunkStatusKind::Modified,
7623 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
7624 },
7625 )],
7626 );
7627 });
7628}
7629
7630#[gpui::test]
7631async fn test_repository_and_path_for_project_path(
7632 background_executor: BackgroundExecutor,
7633 cx: &mut gpui::TestAppContext,
7634) {
7635 init_test(cx);
7636 let fs = FakeFs::new(background_executor);
7637 fs.insert_tree(
7638 path!("/root"),
7639 json!({
7640 "c.txt": "",
7641 "dir1": {
7642 ".git": {},
7643 "deps": {
7644 "dep1": {
7645 ".git": {},
7646 "src": {
7647 "a.txt": ""
7648 }
7649 }
7650 },
7651 "src": {
7652 "b.txt": ""
7653 }
7654 },
7655 }),
7656 )
7657 .await;
7658
7659 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
7660 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7661 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7662 project
7663 .update(cx, |project, cx| project.git_scans_complete(cx))
7664 .await;
7665 cx.run_until_parked();
7666
7667 project.read_with(cx, |project, cx| {
7668 let git_store = project.git_store().read(cx);
7669 let pairs = [
7670 ("c.txt", None),
7671 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
7672 (
7673 "dir1/deps/dep1/src/a.txt",
7674 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
7675 ),
7676 ];
7677 let expected = pairs
7678 .iter()
7679 .map(|(path, result)| {
7680 (
7681 path,
7682 result.map(|(repo, repo_path)| {
7683 (Path::new(repo).into(), RepoPath::from(repo_path))
7684 }),
7685 )
7686 })
7687 .collect::<Vec<_>>();
7688 let actual = pairs
7689 .iter()
7690 .map(|(path, _)| {
7691 let project_path = (tree_id, Path::new(path)).into();
7692 let result = maybe!({
7693 let (repo, repo_path) =
7694 git_store.repository_and_path_for_project_path(&project_path, cx)?;
7695 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
7696 });
7697 (path, result)
7698 })
7699 .collect::<Vec<_>>();
7700 pretty_assertions::assert_eq!(expected, actual);
7701 });
7702
7703 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
7704 .await
7705 .unwrap();
7706 cx.run_until_parked();
7707
7708 project.read_with(cx, |project, cx| {
7709 let git_store = project.git_store().read(cx);
7710 assert_eq!(
7711 git_store.repository_and_path_for_project_path(
7712 &(tree_id, Path::new("dir1/src/b.txt")).into(),
7713 cx
7714 ),
7715 None
7716 );
7717 });
7718}
7719
7720#[gpui::test]
7721async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
7722 init_test(cx);
7723 let fs = FakeFs::new(cx.background_executor.clone());
7724 fs.insert_tree(
7725 path!("/root"),
7726 json!({
7727 "home": {
7728 ".git": {},
7729 "project": {
7730 "a.txt": "A"
7731 },
7732 },
7733 }),
7734 )
7735 .await;
7736 fs.set_home_dir(Path::new(path!("/root/home")).to_owned());
7737
7738 let project = Project::test(fs.clone(), [path!("/root/home/project").as_ref()], cx).await;
7739 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7740 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7741
7742 project
7743 .update(cx, |project, cx| project.git_scans_complete(cx))
7744 .await;
7745 tree.flush_fs_events(cx).await;
7746
7747 project.read_with(cx, |project, cx| {
7748 let containing = project
7749 .git_store()
7750 .read(cx)
7751 .repository_and_path_for_project_path(&(tree_id, "a.txt").into(), cx);
7752 assert!(containing.is_none());
7753 });
7754
7755 let project = Project::test(fs.clone(), [path!("/root/home").as_ref()], cx).await;
7756 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7757 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7758 project
7759 .update(cx, |project, cx| project.git_scans_complete(cx))
7760 .await;
7761 tree.flush_fs_events(cx).await;
7762
7763 project.read_with(cx, |project, cx| {
7764 let containing = project
7765 .git_store()
7766 .read(cx)
7767 .repository_and_path_for_project_path(&(tree_id, "project/a.txt").into(), cx);
7768 assert_eq!(
7769 containing
7770 .unwrap()
7771 .0
7772 .read(cx)
7773 .work_directory_abs_path
7774 .as_ref(),
7775 Path::new(path!("/root/home"))
7776 );
7777 });
7778}
7779
7780#[gpui::test]
7781async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
7782 init_test(cx);
7783 cx.executor().allow_parking();
7784
7785 let root = TempTree::new(json!({
7786 "project": {
7787 "a.txt": "a", // Modified
7788 "b.txt": "bb", // Added
7789 "c.txt": "ccc", // Unchanged
7790 "d.txt": "dddd", // Deleted
7791 },
7792 }));
7793
7794 // Set up git repository before creating the project.
7795 let work_dir = root.path().join("project");
7796 let repo = git_init(work_dir.as_path());
7797 git_add("a.txt", &repo);
7798 git_add("c.txt", &repo);
7799 git_add("d.txt", &repo);
7800 git_commit("Initial commit", &repo);
7801 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
7802 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
7803
7804 let project = Project::test(
7805 Arc::new(RealFs::new(None, cx.executor())),
7806 [root.path()],
7807 cx,
7808 )
7809 .await;
7810
7811 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7812 tree.flush_fs_events(cx).await;
7813 project
7814 .update(cx, |project, cx| project.git_scans_complete(cx))
7815 .await;
7816 cx.executor().run_until_parked();
7817
7818 let repository = project.read_with(cx, |project, cx| {
7819 project.repositories(cx).values().next().unwrap().clone()
7820 });
7821
7822 // Check that the right git state is observed on startup
7823 repository.read_with(cx, |repository, _| {
7824 let entries = repository.cached_status().collect::<Vec<_>>();
7825 assert_eq!(
7826 entries,
7827 [
7828 StatusEntry {
7829 repo_path: "a.txt".into(),
7830 status: StatusCode::Modified.worktree(),
7831 },
7832 StatusEntry {
7833 repo_path: "b.txt".into(),
7834 status: FileStatus::Untracked,
7835 },
7836 StatusEntry {
7837 repo_path: "d.txt".into(),
7838 status: StatusCode::Deleted.worktree(),
7839 },
7840 ]
7841 );
7842 });
7843
7844 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
7845
7846 tree.flush_fs_events(cx).await;
7847 project
7848 .update(cx, |project, cx| project.git_scans_complete(cx))
7849 .await;
7850 cx.executor().run_until_parked();
7851
7852 repository.read_with(cx, |repository, _| {
7853 let entries = repository.cached_status().collect::<Vec<_>>();
7854 assert_eq!(
7855 entries,
7856 [
7857 StatusEntry {
7858 repo_path: "a.txt".into(),
7859 status: StatusCode::Modified.worktree(),
7860 },
7861 StatusEntry {
7862 repo_path: "b.txt".into(),
7863 status: FileStatus::Untracked,
7864 },
7865 StatusEntry {
7866 repo_path: "c.txt".into(),
7867 status: StatusCode::Modified.worktree(),
7868 },
7869 StatusEntry {
7870 repo_path: "d.txt".into(),
7871 status: StatusCode::Deleted.worktree(),
7872 },
7873 ]
7874 );
7875 });
7876
7877 git_add("a.txt", &repo);
7878 git_add("c.txt", &repo);
7879 git_remove_index(Path::new("d.txt"), &repo);
7880 git_commit("Another commit", &repo);
7881 tree.flush_fs_events(cx).await;
7882 project
7883 .update(cx, |project, cx| project.git_scans_complete(cx))
7884 .await;
7885 cx.executor().run_until_parked();
7886
7887 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
7888 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
7889 tree.flush_fs_events(cx).await;
7890 project
7891 .update(cx, |project, cx| project.git_scans_complete(cx))
7892 .await;
7893 cx.executor().run_until_parked();
7894
7895 repository.read_with(cx, |repository, _cx| {
7896 let entries = repository.cached_status().collect::<Vec<_>>();
7897
7898 // Deleting an untracked entry, b.txt, should leave no status
7899 // a.txt was tracked, and so should have a status
7900 assert_eq!(
7901 entries,
7902 [StatusEntry {
7903 repo_path: "a.txt".into(),
7904 status: StatusCode::Deleted.worktree(),
7905 }]
7906 );
7907 });
7908}
7909
7910#[gpui::test]
7911async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
7912 init_test(cx);
7913 cx.executor().allow_parking();
7914
7915 let root = TempTree::new(json!({
7916 "project": {
7917 "sub": {},
7918 "a.txt": "",
7919 },
7920 }));
7921
7922 let work_dir = root.path().join("project");
7923 let repo = git_init(work_dir.as_path());
7924 // a.txt exists in HEAD and the working copy but is deleted in the index.
7925 git_add("a.txt", &repo);
7926 git_commit("Initial commit", &repo);
7927 git_remove_index("a.txt".as_ref(), &repo);
7928 // `sub` is a nested git repository.
7929 let _sub = git_init(&work_dir.join("sub"));
7930
7931 let project = Project::test(
7932 Arc::new(RealFs::new(None, cx.executor())),
7933 [root.path()],
7934 cx,
7935 )
7936 .await;
7937
7938 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7939 tree.flush_fs_events(cx).await;
7940 project
7941 .update(cx, |project, cx| project.git_scans_complete(cx))
7942 .await;
7943 cx.executor().run_until_parked();
7944
7945 let repository = project.read_with(cx, |project, cx| {
7946 project
7947 .repositories(cx)
7948 .values()
7949 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
7950 .unwrap()
7951 .clone()
7952 });
7953
7954 repository.read_with(cx, |repository, _cx| {
7955 let entries = repository.cached_status().collect::<Vec<_>>();
7956
7957 // `sub` doesn't appear in our computed statuses.
7958 // a.txt appears with a combined `DA` status.
7959 assert_eq!(
7960 entries,
7961 [StatusEntry {
7962 repo_path: "a.txt".into(),
7963 status: TrackedStatus {
7964 index_status: StatusCode::Deleted,
7965 worktree_status: StatusCode::Added
7966 }
7967 .into(),
7968 }]
7969 )
7970 });
7971}
7972
7973#[gpui::test]
7974async fn test_repository_subfolder_git_status(
7975 executor: gpui::BackgroundExecutor,
7976 cx: &mut gpui::TestAppContext,
7977) {
7978 init_test(cx);
7979
7980 let fs = FakeFs::new(executor);
7981 fs.insert_tree(
7982 path!("/root"),
7983 json!({
7984 "my-repo": {
7985 ".git": {},
7986 "a.txt": "a",
7987 "sub-folder-1": {
7988 "sub-folder-2": {
7989 "c.txt": "cc",
7990 "d": {
7991 "e.txt": "eee"
7992 }
7993 },
7994 }
7995 },
7996 }),
7997 )
7998 .await;
7999
8000 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
8001 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
8002
8003 fs.set_status_for_repo(
8004 path!("/root/my-repo/.git").as_ref(),
8005 &[(E_TXT.as_ref(), FileStatus::Untracked)],
8006 );
8007
8008 let project = Project::test(
8009 fs.clone(),
8010 [path!("/root/my-repo/sub-folder-1/sub-folder-2").as_ref()],
8011 cx,
8012 )
8013 .await;
8014
8015 project
8016 .update(cx, |project, cx| project.git_scans_complete(cx))
8017 .await;
8018 cx.run_until_parked();
8019
8020 let repository = project.read_with(cx, |project, cx| {
8021 project.repositories(cx).values().next().unwrap().clone()
8022 });
8023
8024 // Ensure that the git status is loaded correctly
8025 repository.read_with(cx, |repository, _cx| {
8026 assert_eq!(
8027 repository.work_directory_abs_path,
8028 Path::new(path!("/root/my-repo")).into()
8029 );
8030
8031 assert_eq!(repository.status_for_path(&C_TXT.into()), None);
8032 assert_eq!(
8033 repository.status_for_path(&E_TXT.into()).unwrap().status,
8034 FileStatus::Untracked
8035 );
8036 });
8037
8038 fs.set_status_for_repo(path!("/root/my-repo/.git").as_ref(), &[]);
8039 project
8040 .update(cx, |project, cx| project.git_scans_complete(cx))
8041 .await;
8042 cx.run_until_parked();
8043
8044 repository.read_with(cx, |repository, _cx| {
8045 assert_eq!(repository.status_for_path(&C_TXT.into()), None);
8046 assert_eq!(repository.status_for_path(&E_TXT.into()), None);
8047 });
8048}
8049
8050// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
8051#[cfg(any())]
8052#[gpui::test]
8053async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
8054 init_test(cx);
8055 cx.executor().allow_parking();
8056
8057 let root = TempTree::new(json!({
8058 "project": {
8059 "a.txt": "a",
8060 },
8061 }));
8062 let root_path = root.path();
8063
8064 let repo = git_init(&root_path.join("project"));
8065 git_add("a.txt", &repo);
8066 git_commit("init", &repo);
8067
8068 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8069
8070 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8071 tree.flush_fs_events(cx).await;
8072 project
8073 .update(cx, |project, cx| project.git_scans_complete(cx))
8074 .await;
8075 cx.executor().run_until_parked();
8076
8077 let repository = project.read_with(cx, |project, cx| {
8078 project.repositories(cx).values().next().unwrap().clone()
8079 });
8080
8081 git_branch("other-branch", &repo);
8082 git_checkout("refs/heads/other-branch", &repo);
8083 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
8084 git_add("a.txt", &repo);
8085 git_commit("capitalize", &repo);
8086 let commit = repo
8087 .head()
8088 .expect("Failed to get HEAD")
8089 .peel_to_commit()
8090 .expect("HEAD is not a commit");
8091 git_checkout("refs/heads/main", &repo);
8092 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
8093 git_add("a.txt", &repo);
8094 git_commit("improve letter", &repo);
8095 git_cherry_pick(&commit, &repo);
8096 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
8097 .expect("No CHERRY_PICK_HEAD");
8098 pretty_assertions::assert_eq!(
8099 git_status(&repo),
8100 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
8101 );
8102 tree.flush_fs_events(cx).await;
8103 project
8104 .update(cx, |project, cx| project.git_scans_complete(cx))
8105 .await;
8106 cx.executor().run_until_parked();
8107 let conflicts = repository.update(cx, |repository, _| {
8108 repository
8109 .merge_conflicts
8110 .iter()
8111 .cloned()
8112 .collect::<Vec<_>>()
8113 });
8114 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
8115
8116 git_add("a.txt", &repo);
8117 // Attempt to manually simulate what `git cherry-pick --continue` would do.
8118 git_commit("whatevs", &repo);
8119 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
8120 .expect("Failed to remove CHERRY_PICK_HEAD");
8121 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
8122 tree.flush_fs_events(cx).await;
8123 let conflicts = repository.update(cx, |repository, _| {
8124 repository
8125 .merge_conflicts
8126 .iter()
8127 .cloned()
8128 .collect::<Vec<_>>()
8129 });
8130 pretty_assertions::assert_eq!(conflicts, []);
8131}
8132
8133#[gpui::test]
8134async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
8135 init_test(cx);
8136 let fs = FakeFs::new(cx.background_executor.clone());
8137 fs.insert_tree(
8138 path!("/root"),
8139 json!({
8140 ".git": {},
8141 ".gitignore": "*.txt\n",
8142 "a.xml": "<a></a>",
8143 "b.txt": "Some text"
8144 }),
8145 )
8146 .await;
8147
8148 fs.set_head_and_index_for_repo(
8149 path!("/root/.git").as_ref(),
8150 &[
8151 (".gitignore".into(), "*.txt\n".into()),
8152 ("a.xml".into(), "<a></a>".into()),
8153 ],
8154 );
8155
8156 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
8157
8158 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8159 tree.flush_fs_events(cx).await;
8160 project
8161 .update(cx, |project, cx| project.git_scans_complete(cx))
8162 .await;
8163 cx.executor().run_until_parked();
8164
8165 let repository = project.read_with(cx, |project, cx| {
8166 project.repositories(cx).values().next().unwrap().clone()
8167 });
8168
8169 // One file is unmodified, the other is ignored.
8170 cx.read(|cx| {
8171 assert_entry_git_state(&tree.read(cx), &repository.read(cx), "a.xml", None, false);
8172 assert_entry_git_state(&tree.read(cx), &repository.read(cx), "b.txt", None, true);
8173 });
8174
8175 // Change the gitignore, and stage the newly non-ignored file.
8176 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
8177 .await
8178 .unwrap();
8179 fs.set_index_for_repo(
8180 Path::new(path!("/root/.git")),
8181 &[
8182 (".gitignore".into(), "*.txt\n".into()),
8183 ("a.xml".into(), "<a></a>".into()),
8184 ("b.txt".into(), "Some text".into()),
8185 ],
8186 );
8187
8188 cx.executor().run_until_parked();
8189 cx.read(|cx| {
8190 assert_entry_git_state(&tree.read(cx), &repository.read(cx), "a.xml", None, true);
8191 assert_entry_git_state(
8192 &tree.read(cx),
8193 &repository.read(cx),
8194 "b.txt",
8195 Some(StatusCode::Added),
8196 false,
8197 );
8198 });
8199}
8200
8201// NOTE:
8202// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
8203// a directory which some program has already open.
8204// This is a limitation of the Windows.
8205// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
8206#[gpui::test]
8207#[cfg_attr(target_os = "windows", ignore)]
8208async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
8209 init_test(cx);
8210 cx.executor().allow_parking();
8211 let root = TempTree::new(json!({
8212 "projects": {
8213 "project1": {
8214 "a": "",
8215 "b": "",
8216 }
8217 },
8218
8219 }));
8220 let root_path = root.path();
8221
8222 let repo = git_init(&root_path.join("projects/project1"));
8223 git_add("a", &repo);
8224 git_commit("init", &repo);
8225 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
8226
8227 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8228
8229 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8230 tree.flush_fs_events(cx).await;
8231 project
8232 .update(cx, |project, cx| project.git_scans_complete(cx))
8233 .await;
8234 cx.executor().run_until_parked();
8235
8236 let repository = project.read_with(cx, |project, cx| {
8237 project.repositories(cx).values().next().unwrap().clone()
8238 });
8239
8240 repository.read_with(cx, |repository, _| {
8241 assert_eq!(
8242 repository.work_directory_abs_path.as_ref(),
8243 root_path.join("projects/project1").as_path()
8244 );
8245 assert_eq!(
8246 repository
8247 .status_for_path(&"a".into())
8248 .map(|entry| entry.status),
8249 Some(StatusCode::Modified.worktree()),
8250 );
8251 assert_eq!(
8252 repository
8253 .status_for_path(&"b".into())
8254 .map(|entry| entry.status),
8255 Some(FileStatus::Untracked),
8256 );
8257 });
8258
8259 std::fs::rename(
8260 root_path.join("projects/project1"),
8261 root_path.join("projects/project2"),
8262 )
8263 .unwrap();
8264 tree.flush_fs_events(cx).await;
8265
8266 repository.read_with(cx, |repository, _| {
8267 assert_eq!(
8268 repository.work_directory_abs_path.as_ref(),
8269 root_path.join("projects/project2").as_path()
8270 );
8271 assert_eq!(
8272 repository.status_for_path(&"a".into()).unwrap().status,
8273 StatusCode::Modified.worktree(),
8274 );
8275 assert_eq!(
8276 repository.status_for_path(&"b".into()).unwrap().status,
8277 FileStatus::Untracked,
8278 );
8279 });
8280}
8281
8282// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
8283// you can't rename a directory which some program has already open. This is a
8284// limitation of the Windows. See:
8285// https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
8286#[gpui::test]
8287#[cfg_attr(target_os = "windows", ignore)]
8288async fn test_file_status(cx: &mut gpui::TestAppContext) {
8289 init_test(cx);
8290 cx.executor().allow_parking();
8291 const IGNORE_RULE: &str = "**/target";
8292
8293 let root = TempTree::new(json!({
8294 "project": {
8295 "a.txt": "a",
8296 "b.txt": "bb",
8297 "c": {
8298 "d": {
8299 "e.txt": "eee"
8300 }
8301 },
8302 "f.txt": "ffff",
8303 "target": {
8304 "build_file": "???"
8305 },
8306 ".gitignore": IGNORE_RULE
8307 },
8308
8309 }));
8310 let root_path = root.path();
8311
8312 const A_TXT: &str = "a.txt";
8313 const B_TXT: &str = "b.txt";
8314 const E_TXT: &str = "c/d/e.txt";
8315 const F_TXT: &str = "f.txt";
8316 const DOTGITIGNORE: &str = ".gitignore";
8317 const BUILD_FILE: &str = "target/build_file";
8318
8319 // Set up git repository before creating the worktree.
8320 let work_dir = root.path().join("project");
8321 let mut repo = git_init(work_dir.as_path());
8322 repo.add_ignore_rule(IGNORE_RULE).unwrap();
8323 git_add(A_TXT, &repo);
8324 git_add(E_TXT, &repo);
8325 git_add(DOTGITIGNORE, &repo);
8326 git_commit("Initial commit", &repo);
8327
8328 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8329
8330 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8331 tree.flush_fs_events(cx).await;
8332 project
8333 .update(cx, |project, cx| project.git_scans_complete(cx))
8334 .await;
8335 cx.executor().run_until_parked();
8336
8337 let repository = project.read_with(cx, |project, cx| {
8338 project.repositories(cx).values().next().unwrap().clone()
8339 });
8340
8341 // Check that the right git state is observed on startup
8342 repository.read_with(cx, |repository, _cx| {
8343 assert_eq!(
8344 repository.work_directory_abs_path.as_ref(),
8345 root_path.join("project").as_path()
8346 );
8347
8348 assert_eq!(
8349 repository.status_for_path(&B_TXT.into()).unwrap().status,
8350 FileStatus::Untracked,
8351 );
8352 assert_eq!(
8353 repository.status_for_path(&F_TXT.into()).unwrap().status,
8354 FileStatus::Untracked,
8355 );
8356 });
8357
8358 // Modify a file in the working copy.
8359 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
8360 tree.flush_fs_events(cx).await;
8361 project
8362 .update(cx, |project, cx| project.git_scans_complete(cx))
8363 .await;
8364 cx.executor().run_until_parked();
8365
8366 // The worktree detects that the file's git status has changed.
8367 repository.read_with(cx, |repository, _| {
8368 assert_eq!(
8369 repository.status_for_path(&A_TXT.into()).unwrap().status,
8370 StatusCode::Modified.worktree(),
8371 );
8372 });
8373
8374 // Create a commit in the git repository.
8375 git_add(A_TXT, &repo);
8376 git_add(B_TXT, &repo);
8377 git_commit("Committing modified and added", &repo);
8378 tree.flush_fs_events(cx).await;
8379 project
8380 .update(cx, |project, cx| project.git_scans_complete(cx))
8381 .await;
8382 cx.executor().run_until_parked();
8383
8384 // The worktree detects that the files' git status have changed.
8385 repository.read_with(cx, |repository, _cx| {
8386 assert_eq!(
8387 repository.status_for_path(&F_TXT.into()).unwrap().status,
8388 FileStatus::Untracked,
8389 );
8390 assert_eq!(repository.status_for_path(&B_TXT.into()), None);
8391 assert_eq!(repository.status_for_path(&A_TXT.into()), None);
8392 });
8393
8394 // Modify files in the working copy and perform git operations on other files.
8395 git_reset(0, &repo);
8396 git_remove_index(Path::new(B_TXT), &repo);
8397 git_stash(&mut repo);
8398 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
8399 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
8400 tree.flush_fs_events(cx).await;
8401 project
8402 .update(cx, |project, cx| project.git_scans_complete(cx))
8403 .await;
8404 cx.executor().run_until_parked();
8405
8406 // Check that more complex repo changes are tracked
8407 repository.read_with(cx, |repository, _cx| {
8408 assert_eq!(repository.status_for_path(&A_TXT.into()), None);
8409 assert_eq!(
8410 repository.status_for_path(&B_TXT.into()).unwrap().status,
8411 FileStatus::Untracked,
8412 );
8413 assert_eq!(
8414 repository.status_for_path(&E_TXT.into()).unwrap().status,
8415 StatusCode::Modified.worktree(),
8416 );
8417 });
8418
8419 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
8420 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
8421 std::fs::write(
8422 work_dir.join(DOTGITIGNORE),
8423 [IGNORE_RULE, "f.txt"].join("\n"),
8424 )
8425 .unwrap();
8426
8427 git_add(Path::new(DOTGITIGNORE), &repo);
8428 git_commit("Committing modified git ignore", &repo);
8429
8430 tree.flush_fs_events(cx).await;
8431 cx.executor().run_until_parked();
8432
8433 let mut renamed_dir_name = "first_directory/second_directory";
8434 const RENAMED_FILE: &str = "rf.txt";
8435
8436 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
8437 std::fs::write(
8438 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
8439 "new-contents",
8440 )
8441 .unwrap();
8442
8443 tree.flush_fs_events(cx).await;
8444 project
8445 .update(cx, |project, cx| project.git_scans_complete(cx))
8446 .await;
8447 cx.executor().run_until_parked();
8448
8449 repository.read_with(cx, |repository, _cx| {
8450 assert_eq!(
8451 repository
8452 .status_for_path(&Path::new(renamed_dir_name).join(RENAMED_FILE).into())
8453 .unwrap()
8454 .status,
8455 FileStatus::Untracked,
8456 );
8457 });
8458
8459 renamed_dir_name = "new_first_directory/second_directory";
8460
8461 std::fs::rename(
8462 work_dir.join("first_directory"),
8463 work_dir.join("new_first_directory"),
8464 )
8465 .unwrap();
8466
8467 tree.flush_fs_events(cx).await;
8468 project
8469 .update(cx, |project, cx| project.git_scans_complete(cx))
8470 .await;
8471 cx.executor().run_until_parked();
8472
8473 repository.read_with(cx, |repository, _cx| {
8474 assert_eq!(
8475 repository
8476 .status_for_path(&Path::new(renamed_dir_name).join(RENAMED_FILE).into())
8477 .unwrap()
8478 .status,
8479 FileStatus::Untracked,
8480 );
8481 });
8482}
8483
8484#[gpui::test]
8485async fn test_repos_in_invisible_worktrees(
8486 executor: BackgroundExecutor,
8487 cx: &mut gpui::TestAppContext,
8488) {
8489 init_test(cx);
8490 let fs = FakeFs::new(executor);
8491 fs.insert_tree(
8492 path!("/root"),
8493 json!({
8494 "dir1": {
8495 ".git": {},
8496 "dep1": {
8497 ".git": {},
8498 "src": {
8499 "a.txt": "",
8500 },
8501 },
8502 "b.txt": "",
8503 },
8504 }),
8505 )
8506 .await;
8507
8508 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
8509 let _visible_worktree =
8510 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8511 project
8512 .update(cx, |project, cx| project.git_scans_complete(cx))
8513 .await;
8514
8515 let repos = project.read_with(cx, |project, cx| {
8516 project
8517 .repositories(cx)
8518 .values()
8519 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8520 .collect::<Vec<_>>()
8521 });
8522 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
8523
8524 let (_invisible_worktree, _) = project
8525 .update(cx, |project, cx| {
8526 project.worktree_store.update(cx, |worktree_store, cx| {
8527 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
8528 })
8529 })
8530 .await
8531 .expect("failed to create worktree");
8532 project
8533 .update(cx, |project, cx| project.git_scans_complete(cx))
8534 .await;
8535
8536 let repos = project.read_with(cx, |project, cx| {
8537 project
8538 .repositories(cx)
8539 .values()
8540 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8541 .collect::<Vec<_>>()
8542 });
8543 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
8544}
8545
8546#[gpui::test(iterations = 10)]
8547async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
8548 init_test(cx);
8549 cx.update(|cx| {
8550 cx.update_global::<SettingsStore, _>(|store, cx| {
8551 store.update_user_settings::<WorktreeSettings>(cx, |project_settings| {
8552 project_settings.file_scan_exclusions = Some(Vec::new());
8553 });
8554 });
8555 });
8556 let fs = FakeFs::new(cx.background_executor.clone());
8557 fs.insert_tree(
8558 path!("/root"),
8559 json!({
8560 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
8561 "tree": {
8562 ".git": {},
8563 ".gitignore": "ignored-dir\n",
8564 "tracked-dir": {
8565 "tracked-file1": "",
8566 "ancestor-ignored-file1": "",
8567 },
8568 "ignored-dir": {
8569 "ignored-file1": ""
8570 }
8571 }
8572 }),
8573 )
8574 .await;
8575 fs.set_head_and_index_for_repo(
8576 path!("/root/tree/.git").as_ref(),
8577 &[
8578 (".gitignore".into(), "ignored-dir\n".into()),
8579 ("tracked-dir/tracked-file1".into(), "".into()),
8580 ],
8581 );
8582
8583 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
8584
8585 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8586 tree.flush_fs_events(cx).await;
8587 project
8588 .update(cx, |project, cx| project.git_scans_complete(cx))
8589 .await;
8590 cx.executor().run_until_parked();
8591
8592 let repository = project.read_with(cx, |project, cx| {
8593 project.repositories(cx).values().next().unwrap().clone()
8594 });
8595
8596 tree.read_with(cx, |tree, _| {
8597 tree.as_local()
8598 .unwrap()
8599 .manually_refresh_entries_for_paths(vec![Path::new("ignored-dir").into()])
8600 })
8601 .recv()
8602 .await;
8603
8604 cx.read(|cx| {
8605 assert_entry_git_state(
8606 &tree.read(cx),
8607 &repository.read(cx),
8608 "tracked-dir/tracked-file1",
8609 None,
8610 false,
8611 );
8612 assert_entry_git_state(
8613 &tree.read(cx),
8614 &repository.read(cx),
8615 "tracked-dir/ancestor-ignored-file1",
8616 None,
8617 false,
8618 );
8619 assert_entry_git_state(
8620 &tree.read(cx),
8621 &repository.read(cx),
8622 "ignored-dir/ignored-file1",
8623 None,
8624 true,
8625 );
8626 });
8627
8628 fs.create_file(
8629 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
8630 Default::default(),
8631 )
8632 .await
8633 .unwrap();
8634 fs.set_index_for_repo(
8635 path!("/root/tree/.git").as_ref(),
8636 &[
8637 (".gitignore".into(), "ignored-dir\n".into()),
8638 ("tracked-dir/tracked-file1".into(), "".into()),
8639 ("tracked-dir/tracked-file2".into(), "".into()),
8640 ],
8641 );
8642 fs.create_file(
8643 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
8644 Default::default(),
8645 )
8646 .await
8647 .unwrap();
8648 fs.create_file(
8649 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
8650 Default::default(),
8651 )
8652 .await
8653 .unwrap();
8654
8655 cx.executor().run_until_parked();
8656 cx.read(|cx| {
8657 assert_entry_git_state(
8658 &tree.read(cx),
8659 &repository.read(cx),
8660 "tracked-dir/tracked-file2",
8661 Some(StatusCode::Added),
8662 false,
8663 );
8664 assert_entry_git_state(
8665 &tree.read(cx),
8666 &repository.read(cx),
8667 "tracked-dir/ancestor-ignored-file2",
8668 None,
8669 false,
8670 );
8671 assert_entry_git_state(
8672 &tree.read(cx),
8673 &repository.read(cx),
8674 "ignored-dir/ignored-file2",
8675 None,
8676 true,
8677 );
8678 assert!(tree.read(cx).entry_for_path(".git").unwrap().is_ignored);
8679 });
8680}
8681
8682#[gpui::test]
8683async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) {
8684 init_test(cx);
8685
8686 let fs = FakeFs::new(cx.executor());
8687 fs.insert_tree(
8688 path!("/project"),
8689 json!({
8690 ".git": {
8691 "worktrees": {
8692 "some-worktree": {
8693 "commondir": "../..\n",
8694 // For is_git_dir
8695 "HEAD": "",
8696 "config": ""
8697 }
8698 },
8699 "modules": {
8700 "subdir": {
8701 "some-submodule": {
8702 // For is_git_dir
8703 "HEAD": "",
8704 "config": "",
8705 }
8706 }
8707 }
8708 },
8709 "src": {
8710 "a.txt": "A",
8711 },
8712 "some-worktree": {
8713 ".git": "gitdir: ../.git/worktrees/some-worktree\n",
8714 "src": {
8715 "b.txt": "B",
8716 }
8717 },
8718 "subdir": {
8719 "some-submodule": {
8720 ".git": "gitdir: ../../.git/modules/subdir/some-submodule\n",
8721 "c.txt": "C",
8722 }
8723 }
8724 }),
8725 )
8726 .await;
8727
8728 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
8729 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
8730 scan_complete.await;
8731
8732 let mut repositories = project.update(cx, |project, cx| {
8733 project
8734 .repositories(cx)
8735 .values()
8736 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8737 .collect::<Vec<_>>()
8738 });
8739 repositories.sort();
8740 pretty_assertions::assert_eq!(
8741 repositories,
8742 [
8743 Path::new(path!("/project")).into(),
8744 Path::new(path!("/project/some-worktree")).into(),
8745 Path::new(path!("/project/subdir/some-submodule")).into(),
8746 ]
8747 );
8748
8749 // Generate a git-related event for the worktree and check that it's refreshed.
8750 fs.with_git_state(
8751 path!("/project/some-worktree/.git").as_ref(),
8752 true,
8753 |state| {
8754 state
8755 .head_contents
8756 .insert("src/b.txt".into(), "b".to_owned());
8757 state
8758 .index_contents
8759 .insert("src/b.txt".into(), "b".to_owned());
8760 },
8761 )
8762 .unwrap();
8763 cx.run_until_parked();
8764
8765 let buffer = project
8766 .update(cx, |project, cx| {
8767 project.open_local_buffer(path!("/project/some-worktree/src/b.txt"), cx)
8768 })
8769 .await
8770 .unwrap();
8771 let (worktree_repo, barrier) = project.update(cx, |project, cx| {
8772 let (repo, _) = project
8773 .git_store()
8774 .read(cx)
8775 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
8776 .unwrap();
8777 pretty_assertions::assert_eq!(
8778 repo.read(cx).work_directory_abs_path,
8779 Path::new(path!("/project/some-worktree")).into(),
8780 );
8781 let barrier = repo.update(cx, |repo, _| repo.barrier());
8782 (repo.clone(), barrier)
8783 });
8784 barrier.await.unwrap();
8785 worktree_repo.update(cx, |repo, _| {
8786 pretty_assertions::assert_eq!(
8787 repo.status_for_path(&"src/b.txt".into()).unwrap().status,
8788 StatusCode::Modified.worktree(),
8789 );
8790 });
8791
8792 // The same for the submodule.
8793 fs.with_git_state(
8794 path!("/project/subdir/some-submodule/.git").as_ref(),
8795 true,
8796 |state| {
8797 state.head_contents.insert("c.txt".into(), "c".to_owned());
8798 state.index_contents.insert("c.txt".into(), "c".to_owned());
8799 },
8800 )
8801 .unwrap();
8802 cx.run_until_parked();
8803
8804 let buffer = project
8805 .update(cx, |project, cx| {
8806 project.open_local_buffer(path!("/project/subdir/some-submodule/c.txt"), cx)
8807 })
8808 .await
8809 .unwrap();
8810 let (submodule_repo, barrier) = project.update(cx, |project, cx| {
8811 let (repo, _) = project
8812 .git_store()
8813 .read(cx)
8814 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
8815 .unwrap();
8816 pretty_assertions::assert_eq!(
8817 repo.read(cx).work_directory_abs_path,
8818 Path::new(path!("/project/subdir/some-submodule")).into(),
8819 );
8820 let barrier = repo.update(cx, |repo, _| repo.barrier());
8821 (repo.clone(), barrier)
8822 });
8823 barrier.await.unwrap();
8824 submodule_repo.update(cx, |repo, _| {
8825 pretty_assertions::assert_eq!(
8826 repo.status_for_path(&"c.txt".into()).unwrap().status,
8827 StatusCode::Modified.worktree(),
8828 );
8829 });
8830}
8831
8832#[gpui::test]
8833async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
8834 init_test(cx);
8835 let fs = FakeFs::new(cx.background_executor.clone());
8836 fs.insert_tree(
8837 path!("/root"),
8838 json!({
8839 "project": {
8840 ".git": {},
8841 "child1": {
8842 "a.txt": "A",
8843 },
8844 "child2": {
8845 "b.txt": "B",
8846 }
8847 }
8848 }),
8849 )
8850 .await;
8851
8852 let project = Project::test(
8853 fs.clone(),
8854 [
8855 path!("/root/project/child1").as_ref(),
8856 path!("/root/project/child2").as_ref(),
8857 ],
8858 cx,
8859 )
8860 .await;
8861
8862 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8863 tree.flush_fs_events(cx).await;
8864 project
8865 .update(cx, |project, cx| project.git_scans_complete(cx))
8866 .await;
8867 cx.executor().run_until_parked();
8868
8869 let repos = project.read_with(cx, |project, cx| {
8870 project
8871 .repositories(cx)
8872 .values()
8873 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8874 .collect::<Vec<_>>()
8875 });
8876 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
8877}
8878
8879async fn search(
8880 project: &Entity<Project>,
8881 query: SearchQuery,
8882 cx: &mut gpui::TestAppContext,
8883) -> Result<HashMap<String, Vec<Range<usize>>>> {
8884 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
8885 let mut results = HashMap::default();
8886 while let Ok(search_result) = search_rx.recv().await {
8887 match search_result {
8888 SearchResult::Buffer { buffer, ranges } => {
8889 results.entry(buffer).or_insert(ranges);
8890 }
8891 SearchResult::LimitReached => {}
8892 }
8893 }
8894 Ok(results
8895 .into_iter()
8896 .map(|(buffer, ranges)| {
8897 buffer.update(cx, |buffer, cx| {
8898 let path = buffer
8899 .file()
8900 .unwrap()
8901 .full_path(cx)
8902 .to_string_lossy()
8903 .to_string();
8904 let ranges = ranges
8905 .into_iter()
8906 .map(|range| range.to_offset(buffer))
8907 .collect::<Vec<_>>();
8908 (path, ranges)
8909 })
8910 })
8911 .collect())
8912}
8913
8914pub fn init_test(cx: &mut gpui::TestAppContext) {
8915 zlog::init_test();
8916
8917 cx.update(|cx| {
8918 let settings_store = SettingsStore::test(cx);
8919 cx.set_global(settings_store);
8920 release_channel::init(SemanticVersion::default(), cx);
8921 language::init(cx);
8922 Project::init_settings(cx);
8923 });
8924}
8925
8926fn json_lang() -> Arc<Language> {
8927 Arc::new(Language::new(
8928 LanguageConfig {
8929 name: "JSON".into(),
8930 matcher: LanguageMatcher {
8931 path_suffixes: vec!["json".to_string()],
8932 ..Default::default()
8933 },
8934 ..Default::default()
8935 },
8936 None,
8937 ))
8938}
8939
8940fn js_lang() -> Arc<Language> {
8941 Arc::new(Language::new(
8942 LanguageConfig {
8943 name: "JavaScript".into(),
8944 matcher: LanguageMatcher {
8945 path_suffixes: vec!["js".to_string()],
8946 ..Default::default()
8947 },
8948 ..Default::default()
8949 },
8950 None,
8951 ))
8952}
8953
8954fn rust_lang() -> Arc<Language> {
8955 Arc::new(Language::new(
8956 LanguageConfig {
8957 name: "Rust".into(),
8958 matcher: LanguageMatcher {
8959 path_suffixes: vec!["rs".to_string()],
8960 ..Default::default()
8961 },
8962 ..Default::default()
8963 },
8964 Some(tree_sitter_rust::LANGUAGE.into()),
8965 ))
8966}
8967
8968fn typescript_lang() -> Arc<Language> {
8969 Arc::new(Language::new(
8970 LanguageConfig {
8971 name: "TypeScript".into(),
8972 matcher: LanguageMatcher {
8973 path_suffixes: vec!["ts".to_string()],
8974 ..Default::default()
8975 },
8976 ..Default::default()
8977 },
8978 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
8979 ))
8980}
8981
8982fn tsx_lang() -> Arc<Language> {
8983 Arc::new(Language::new(
8984 LanguageConfig {
8985 name: "tsx".into(),
8986 matcher: LanguageMatcher {
8987 path_suffixes: vec!["tsx".to_string()],
8988 ..Default::default()
8989 },
8990 ..Default::default()
8991 },
8992 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
8993 ))
8994}
8995
8996fn get_all_tasks(
8997 project: &Entity<Project>,
8998 task_contexts: Arc<TaskContexts>,
8999 cx: &mut App,
9000) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
9001 let new_tasks = project.update(cx, |project, cx| {
9002 project.task_store.update(cx, |task_store, cx| {
9003 task_store.task_inventory().unwrap().update(cx, |this, cx| {
9004 this.used_and_current_resolved_tasks(task_contexts, cx)
9005 })
9006 })
9007 });
9008
9009 cx.background_spawn(async move {
9010 let (mut old, new) = new_tasks.await;
9011 old.extend(new);
9012 old
9013 })
9014}
9015
9016#[track_caller]
9017fn assert_entry_git_state(
9018 tree: &Worktree,
9019 repository: &Repository,
9020 path: &str,
9021 index_status: Option<StatusCode>,
9022 is_ignored: bool,
9023) {
9024 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
9025 let entry = tree
9026 .entry_for_path(path)
9027 .unwrap_or_else(|| panic!("entry {path} not found"));
9028 let status = repository
9029 .status_for_path(&path.into())
9030 .map(|entry| entry.status);
9031 let expected = index_status.map(|index_status| {
9032 TrackedStatus {
9033 index_status,
9034 worktree_status: StatusCode::Unmodified,
9035 }
9036 .into()
9037 });
9038 assert_eq!(
9039 status, expected,
9040 "expected {path} to have git status: {expected:?}"
9041 );
9042 assert_eq!(
9043 entry.is_ignored, is_ignored,
9044 "expected {path} to have is_ignored: {is_ignored}"
9045 );
9046}
9047
9048#[track_caller]
9049fn git_init(path: &Path) -> git2::Repository {
9050 let mut init_opts = RepositoryInitOptions::new();
9051 init_opts.initial_head("main");
9052 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
9053}
9054
9055#[track_caller]
9056fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
9057 let path = path.as_ref();
9058 let mut index = repo.index().expect("Failed to get index");
9059 index.add_path(path).expect("Failed to add file");
9060 index.write().expect("Failed to write index");
9061}
9062
9063#[track_caller]
9064fn git_remove_index(path: &Path, repo: &git2::Repository) {
9065 let mut index = repo.index().expect("Failed to get index");
9066 index.remove_path(path).expect("Failed to add file");
9067 index.write().expect("Failed to write index");
9068}
9069
9070#[track_caller]
9071fn git_commit(msg: &'static str, repo: &git2::Repository) {
9072 use git2::Signature;
9073
9074 let signature = Signature::now("test", "test@zed.dev").unwrap();
9075 let oid = repo.index().unwrap().write_tree().unwrap();
9076 let tree = repo.find_tree(oid).unwrap();
9077 if let Ok(head) = repo.head() {
9078 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
9079
9080 let parent_commit = parent_obj.as_commit().unwrap();
9081
9082 repo.commit(
9083 Some("HEAD"),
9084 &signature,
9085 &signature,
9086 msg,
9087 &tree,
9088 &[parent_commit],
9089 )
9090 .expect("Failed to commit with parent");
9091 } else {
9092 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
9093 .expect("Failed to commit");
9094 }
9095}
9096
9097#[cfg(any())]
9098#[track_caller]
9099fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
9100 repo.cherrypick(commit, None).expect("Failed to cherrypick");
9101}
9102
9103#[track_caller]
9104fn git_stash(repo: &mut git2::Repository) {
9105 use git2::Signature;
9106
9107 let signature = Signature::now("test", "test@zed.dev").unwrap();
9108 repo.stash_save(&signature, "N/A", None)
9109 .expect("Failed to stash");
9110}
9111
9112#[track_caller]
9113fn git_reset(offset: usize, repo: &git2::Repository) {
9114 let head = repo.head().expect("Couldn't get repo head");
9115 let object = head.peel(git2::ObjectType::Commit).unwrap();
9116 let commit = object.as_commit().unwrap();
9117 let new_head = commit
9118 .parents()
9119 .inspect(|parnet| {
9120 parnet.message();
9121 })
9122 .nth(offset)
9123 .expect("Not enough history");
9124 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
9125 .expect("Could not reset");
9126}
9127
9128#[cfg(any())]
9129#[track_caller]
9130fn git_branch(name: &str, repo: &git2::Repository) {
9131 let head = repo
9132 .head()
9133 .expect("Couldn't get repo head")
9134 .peel_to_commit()
9135 .expect("HEAD is not a commit");
9136 repo.branch(name, &head, false).expect("Failed to commit");
9137}
9138
9139#[cfg(any())]
9140#[track_caller]
9141fn git_checkout(name: &str, repo: &git2::Repository) {
9142 repo.set_head(name).expect("Failed to set head");
9143 repo.checkout_head(None).expect("Failed to check out head");
9144}
9145
9146#[cfg(any())]
9147#[track_caller]
9148fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
9149 repo.statuses(None)
9150 .unwrap()
9151 .iter()
9152 .map(|status| (status.path().unwrap().to_string(), status.status()))
9153 .collect()
9154}
9155
9156#[gpui::test]
9157async fn test_find_project_path_abs(
9158 background_executor: BackgroundExecutor,
9159 cx: &mut gpui::TestAppContext,
9160) {
9161 // find_project_path should work with absolute paths
9162 init_test(cx);
9163
9164 let fs = FakeFs::new(background_executor);
9165 fs.insert_tree(
9166 path!("/root"),
9167 json!({
9168 "project1": {
9169 "file1.txt": "content1",
9170 "subdir": {
9171 "file2.txt": "content2"
9172 }
9173 },
9174 "project2": {
9175 "file3.txt": "content3"
9176 }
9177 }),
9178 )
9179 .await;
9180
9181 let project = Project::test(
9182 fs.clone(),
9183 [
9184 path!("/root/project1").as_ref(),
9185 path!("/root/project2").as_ref(),
9186 ],
9187 cx,
9188 )
9189 .await;
9190
9191 // Make sure the worktrees are fully initialized
9192 project
9193 .update(cx, |project, cx| project.git_scans_complete(cx))
9194 .await;
9195 cx.run_until_parked();
9196
9197 let (project1_abs_path, project1_id, project2_abs_path, project2_id) =
9198 project.read_with(cx, |project, cx| {
9199 let worktrees: Vec<_> = project.worktrees(cx).collect();
9200 let abs_path1 = worktrees[0].read(cx).abs_path().to_path_buf();
9201 let id1 = worktrees[0].read(cx).id();
9202 let abs_path2 = worktrees[1].read(cx).abs_path().to_path_buf();
9203 let id2 = worktrees[1].read(cx).id();
9204 (abs_path1, id1, abs_path2, id2)
9205 });
9206
9207 project.update(cx, |project, cx| {
9208 let abs_path = project1_abs_path.join("file1.txt");
9209 let found_path = project.find_project_path(abs_path, cx).unwrap();
9210 assert_eq!(found_path.worktree_id, project1_id);
9211 assert_eq!(found_path.path.as_ref(), Path::new("file1.txt"));
9212
9213 let abs_path = project1_abs_path.join("subdir").join("file2.txt");
9214 let found_path = project.find_project_path(abs_path, cx).unwrap();
9215 assert_eq!(found_path.worktree_id, project1_id);
9216 assert_eq!(found_path.path.as_ref(), Path::new("subdir/file2.txt"));
9217
9218 let abs_path = project2_abs_path.join("file3.txt");
9219 let found_path = project.find_project_path(abs_path, cx).unwrap();
9220 assert_eq!(found_path.worktree_id, project2_id);
9221 assert_eq!(found_path.path.as_ref(), Path::new("file3.txt"));
9222
9223 let abs_path = project1_abs_path.join("nonexistent.txt");
9224 let found_path = project.find_project_path(abs_path, cx);
9225 assert!(
9226 found_path.is_some(),
9227 "Should find project path for nonexistent file in worktree"
9228 );
9229
9230 // Test with an absolute path outside any worktree
9231 let abs_path = Path::new("/some/other/path");
9232 let found_path = project.find_project_path(abs_path, cx);
9233 assert!(
9234 found_path.is_none(),
9235 "Should not find project path for path outside any worktree"
9236 );
9237 });
9238}