1#![allow(clippy::format_collect)]
2
3use crate::{
4 Event, git_store::StatusEntry, task_inventory::TaskContexts, task_store::TaskSettingsLocation,
5 *,
6};
7use buffer_diff::{
8 BufferDiffEvent, CALCULATE_DIFF_TASK, DiffHunkSecondaryStatus, DiffHunkStatus,
9 DiffHunkStatusKind, assert_hunks,
10};
11use fs::FakeFs;
12use futures::{StreamExt, future};
13use git::{
14 GitHostingProviderRegistry,
15 repository::RepoPath,
16 status::{StatusCode, TrackedStatus},
17};
18use git2::RepositoryInitOptions;
19use gpui::{App, BackgroundExecutor, SemanticVersion, UpdateGlobal};
20use http_client::Url;
21use language::{
22 Diagnostic, DiagnosticEntry, DiagnosticSet, DiskState, FakeLspAdapter, LanguageConfig,
23 LanguageMatcher, LanguageName, LineEnding, OffsetRangeExt, Point, ToPoint,
24 language_settings::{AllLanguageSettings, LanguageSettingsContent, language_settings},
25 tree_sitter_rust, tree_sitter_typescript,
26};
27use lsp::{
28 DiagnosticSeverity, DocumentChanges, FileOperationFilter, NumberOrString, TextDocumentEdit,
29 WillRenameFiles, notification::DidRenameFiles,
30};
31use parking_lot::Mutex;
32use paths::{config_dir, tasks_file};
33use postage::stream::Stream as _;
34use pretty_assertions::{assert_eq, assert_matches};
35use rand::{Rng as _, rngs::StdRng};
36use serde_json::json;
37#[cfg(not(windows))]
38use std::os;
39use std::{env, mem, num::NonZeroU32, ops::Range, str::FromStr, sync::OnceLock, task::Poll};
40use task::{ResolvedTask, TaskContext};
41use unindent::Unindent as _;
42use util::{
43 TryFutureExt as _, assert_set_eq, maybe, path,
44 paths::PathMatcher,
45 test::{TempTree, marked_text_offsets},
46 uri,
47};
48use worktree::WorktreeModelHandle as _;
49
50#[gpui::test]
51async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
52 cx.executor().allow_parking();
53
54 let (tx, mut rx) = futures::channel::mpsc::unbounded();
55 let _thread = std::thread::spawn(move || {
56 #[cfg(not(target_os = "windows"))]
57 std::fs::metadata("/tmp").unwrap();
58 #[cfg(target_os = "windows")]
59 std::fs::metadata("C:/Windows").unwrap();
60 std::thread::sleep(Duration::from_millis(1000));
61 tx.unbounded_send(1).unwrap();
62 });
63 rx.next().await.unwrap();
64}
65
66#[gpui::test]
67async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
68 cx.executor().allow_parking();
69
70 let io_task = smol::unblock(move || {
71 println!("sleeping on thread {:?}", std::thread::current().id());
72 std::thread::sleep(Duration::from_millis(10));
73 1
74 });
75
76 let task = cx.foreground_executor().spawn(async move {
77 io_task.await;
78 });
79
80 task.await;
81}
82
83#[cfg(not(windows))]
84#[gpui::test]
85async fn test_symlinks(cx: &mut gpui::TestAppContext) {
86 init_test(cx);
87 cx.executor().allow_parking();
88
89 let dir = TempTree::new(json!({
90 "root": {
91 "apple": "",
92 "banana": {
93 "carrot": {
94 "date": "",
95 "endive": "",
96 }
97 },
98 "fennel": {
99 "grape": "",
100 }
101 }
102 }));
103
104 let root_link_path = dir.path().join("root_link");
105 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
106 os::unix::fs::symlink(
107 dir.path().join("root/fennel"),
108 dir.path().join("root/finnochio"),
109 )
110 .unwrap();
111
112 let project = Project::test(
113 Arc::new(RealFs::new(None, cx.executor())),
114 [root_link_path.as_ref()],
115 cx,
116 )
117 .await;
118
119 project.update(cx, |project, cx| {
120 let tree = project.worktrees(cx).next().unwrap().read(cx);
121 assert_eq!(tree.file_count(), 5);
122 assert_eq!(
123 tree.inode_for_path("fennel/grape"),
124 tree.inode_for_path("finnochio/grape")
125 );
126 });
127}
128
129#[gpui::test]
130async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
131 init_test(cx);
132
133 let dir = TempTree::new(json!({
134 ".editorconfig": r#"
135 root = true
136 [*.rs]
137 indent_style = tab
138 indent_size = 3
139 end_of_line = lf
140 insert_final_newline = true
141 trim_trailing_whitespace = true
142 [*.js]
143 tab_width = 10
144 "#,
145 ".zed": {
146 "settings.json": r#"{
147 "tab_size": 8,
148 "hard_tabs": false,
149 "ensure_final_newline_on_save": false,
150 "remove_trailing_whitespace_on_save": false,
151 "soft_wrap": "editor_width"
152 }"#,
153 },
154 "a.rs": "fn a() {\n A\n}",
155 "b": {
156 ".editorconfig": r#"
157 [*.rs]
158 indent_size = 2
159 "#,
160 "b.rs": "fn b() {\n B\n}",
161 },
162 "c.js": "def c\n C\nend",
163 "README.json": "tabs are better\n",
164 }));
165
166 let path = dir.path();
167 let fs = FakeFs::new(cx.executor());
168 fs.insert_tree_from_real_fs(path, path).await;
169 let project = Project::test(fs, [path], cx).await;
170
171 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
172 language_registry.add(js_lang());
173 language_registry.add(json_lang());
174 language_registry.add(rust_lang());
175
176 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
177
178 cx.executor().run_until_parked();
179
180 cx.update(|cx| {
181 let tree = worktree.read(cx);
182 let settings_for = |path: &str| {
183 let file_entry = tree.entry_for_path(path).unwrap().clone();
184 let file = File::for_entry(file_entry, worktree.clone());
185 let file_language = project
186 .read(cx)
187 .languages()
188 .language_for_file_path(file.path.as_ref());
189 let file_language = cx
190 .background_executor()
191 .block(file_language)
192 .expect("Failed to get file language");
193 let file = file as _;
194 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
195 };
196
197 let settings_a = settings_for("a.rs");
198 let settings_b = settings_for("b/b.rs");
199 let settings_c = settings_for("c.js");
200 let settings_readme = settings_for("README.json");
201
202 // .editorconfig overrides .zed/settings
203 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
204 assert_eq!(settings_a.hard_tabs, true);
205 assert_eq!(settings_a.ensure_final_newline_on_save, true);
206 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
207
208 // .editorconfig in b/ overrides .editorconfig in root
209 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
210
211 // "indent_size" is not set, so "tab_width" is used
212 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
213
214 // README.md should not be affected by .editorconfig's globe "*.rs"
215 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
216 });
217}
218
219#[gpui::test]
220async fn test_git_provider_project_setting(cx: &mut gpui::TestAppContext) {
221 init_test(cx);
222 cx.update(|cx| {
223 GitHostingProviderRegistry::default_global(cx);
224 git_hosting_providers::init(cx);
225 });
226
227 let fs = FakeFs::new(cx.executor());
228 let str_path = path!("/dir");
229 let path = Path::new(str_path);
230
231 fs.insert_tree(
232 path!("/dir"),
233 json!({
234 ".zed": {
235 "settings.json": r#"{
236 "git_hosting_providers": [
237 {
238 "provider": "gitlab",
239 "base_url": "https://google.com",
240 "name": "foo"
241 }
242 ]
243 }"#
244 },
245 }),
246 )
247 .await;
248
249 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
250 let (_worktree, _) =
251 project.read_with(cx, |project, cx| project.find_worktree(path, cx).unwrap());
252 cx.executor().run_until_parked();
253
254 cx.update(|cx| {
255 let provider = GitHostingProviderRegistry::global(cx);
256 assert!(
257 provider
258 .list_hosting_providers()
259 .into_iter()
260 .any(|provider| provider.name() == "foo")
261 );
262 });
263
264 fs.atomic_write(
265 Path::new(path!("/dir/.zed/settings.json")).to_owned(),
266 "{}".into(),
267 )
268 .await
269 .unwrap();
270
271 cx.run_until_parked();
272
273 cx.update(|cx| {
274 let provider = GitHostingProviderRegistry::global(cx);
275 assert!(
276 !provider
277 .list_hosting_providers()
278 .into_iter()
279 .any(|provider| provider.name() == "foo")
280 );
281 });
282}
283
284#[gpui::test]
285async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
286 init_test(cx);
287 TaskStore::init(None);
288
289 let fs = FakeFs::new(cx.executor());
290 fs.insert_tree(
291 path!("/dir"),
292 json!({
293 ".zed": {
294 "settings.json": r#"{ "tab_size": 8 }"#,
295 "tasks.json": r#"[{
296 "label": "cargo check all",
297 "command": "cargo",
298 "args": ["check", "--all"]
299 },]"#,
300 },
301 "a": {
302 "a.rs": "fn a() {\n A\n}"
303 },
304 "b": {
305 ".zed": {
306 "settings.json": r#"{ "tab_size": 2 }"#,
307 "tasks.json": r#"[{
308 "label": "cargo check",
309 "command": "cargo",
310 "args": ["check"]
311 },]"#,
312 },
313 "b.rs": "fn b() {\n B\n}"
314 }
315 }),
316 )
317 .await;
318
319 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
320 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
321
322 cx.executor().run_until_parked();
323 let worktree_id = cx.update(|cx| {
324 project.update(cx, |project, cx| {
325 project.worktrees(cx).next().unwrap().read(cx).id()
326 })
327 });
328
329 let mut task_contexts = TaskContexts::default();
330 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
331 let task_contexts = Arc::new(task_contexts);
332
333 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
334 id: worktree_id,
335 directory_in_worktree: PathBuf::from(".zed"),
336 id_base: "local worktree tasks from directory \".zed\"".into(),
337 };
338
339 let all_tasks = cx
340 .update(|cx| {
341 let tree = worktree.read(cx);
342
343 let file_a = File::for_entry(
344 tree.entry_for_path("a/a.rs").unwrap().clone(),
345 worktree.clone(),
346 ) as _;
347 let settings_a = language_settings(None, Some(&file_a), cx);
348 let file_b = File::for_entry(
349 tree.entry_for_path("b/b.rs").unwrap().clone(),
350 worktree.clone(),
351 ) as _;
352 let settings_b = language_settings(None, Some(&file_b), cx);
353
354 assert_eq!(settings_a.tab_size.get(), 8);
355 assert_eq!(settings_b.tab_size.get(), 2);
356
357 get_all_tasks(&project, task_contexts.clone(), cx)
358 })
359 .await
360 .into_iter()
361 .map(|(source_kind, task)| {
362 let resolved = task.resolved;
363 (
364 source_kind,
365 task.resolved_label,
366 resolved.args,
367 resolved.env,
368 )
369 })
370 .collect::<Vec<_>>();
371 assert_eq!(
372 all_tasks,
373 vec![
374 (
375 TaskSourceKind::Worktree {
376 id: worktree_id,
377 directory_in_worktree: PathBuf::from(path!("b/.zed")),
378 id_base: if cfg!(windows) {
379 "local worktree tasks from directory \"b\\\\.zed\"".into()
380 } else {
381 "local worktree tasks from directory \"b/.zed\"".into()
382 },
383 },
384 "cargo check".to_string(),
385 vec!["check".to_string()],
386 HashMap::default(),
387 ),
388 (
389 topmost_local_task_source_kind.clone(),
390 "cargo check all".to_string(),
391 vec!["check".to_string(), "--all".to_string()],
392 HashMap::default(),
393 ),
394 ]
395 );
396
397 let (_, resolved_task) = cx
398 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
399 .await
400 .into_iter()
401 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
402 .expect("should have one global task");
403 project.update(cx, |project, cx| {
404 let task_inventory = project
405 .task_store
406 .read(cx)
407 .task_inventory()
408 .cloned()
409 .unwrap();
410 task_inventory.update(cx, |inventory, _| {
411 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
412 inventory
413 .update_file_based_tasks(
414 TaskSettingsLocation::Global(tasks_file()),
415 Some(
416 &json!([{
417 "label": "cargo check unstable",
418 "command": "cargo",
419 "args": [
420 "check",
421 "--all",
422 "--all-targets"
423 ],
424 "env": {
425 "RUSTFLAGS": "-Zunstable-options"
426 }
427 }])
428 .to_string(),
429 ),
430 )
431 .unwrap();
432 });
433 });
434 cx.run_until_parked();
435
436 let all_tasks = cx
437 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
438 .await
439 .into_iter()
440 .map(|(source_kind, task)| {
441 let resolved = task.resolved;
442 (
443 source_kind,
444 task.resolved_label,
445 resolved.args,
446 resolved.env,
447 )
448 })
449 .collect::<Vec<_>>();
450 assert_eq!(
451 all_tasks,
452 vec![
453 (
454 topmost_local_task_source_kind.clone(),
455 "cargo check all".to_string(),
456 vec!["check".to_string(), "--all".to_string()],
457 HashMap::default(),
458 ),
459 (
460 TaskSourceKind::Worktree {
461 id: worktree_id,
462 directory_in_worktree: PathBuf::from(path!("b/.zed")),
463 id_base: if cfg!(windows) {
464 "local worktree tasks from directory \"b\\\\.zed\"".into()
465 } else {
466 "local worktree tasks from directory \"b/.zed\"".into()
467 },
468 },
469 "cargo check".to_string(),
470 vec!["check".to_string()],
471 HashMap::default(),
472 ),
473 (
474 TaskSourceKind::AbsPath {
475 abs_path: paths::tasks_file().clone(),
476 id_base: "global tasks.json".into(),
477 },
478 "cargo check unstable".to_string(),
479 vec![
480 "check".to_string(),
481 "--all".to_string(),
482 "--all-targets".to_string(),
483 ],
484 HashMap::from_iter(Some((
485 "RUSTFLAGS".to_string(),
486 "-Zunstable-options".to_string()
487 ))),
488 ),
489 ]
490 );
491}
492
493#[gpui::test]
494async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
495 init_test(cx);
496 TaskStore::init(None);
497
498 let fs = FakeFs::new(cx.executor());
499 fs.insert_tree(
500 path!("/dir"),
501 json!({
502 ".zed": {
503 "tasks.json": r#"[{
504 "label": "test worktree root",
505 "command": "echo $ZED_WORKTREE_ROOT"
506 }]"#,
507 },
508 "a": {
509 "a.rs": "fn a() {\n A\n}"
510 },
511 }),
512 )
513 .await;
514
515 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
516 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
517
518 cx.executor().run_until_parked();
519 let worktree_id = cx.update(|cx| {
520 project.update(cx, |project, cx| {
521 project.worktrees(cx).next().unwrap().read(cx).id()
522 })
523 });
524
525 let active_non_worktree_item_tasks = cx
526 .update(|cx| {
527 get_all_tasks(
528 &project,
529 Arc::new(TaskContexts {
530 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
531 active_worktree_context: None,
532 other_worktree_contexts: Vec::new(),
533 lsp_task_sources: HashMap::default(),
534 latest_selection: None,
535 }),
536 cx,
537 )
538 })
539 .await;
540 assert!(
541 active_non_worktree_item_tasks.is_empty(),
542 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
543 );
544
545 let active_worktree_tasks = cx
546 .update(|cx| {
547 get_all_tasks(
548 &project,
549 Arc::new(TaskContexts {
550 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
551 active_worktree_context: Some((worktree_id, {
552 let mut worktree_context = TaskContext::default();
553 worktree_context
554 .task_variables
555 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
556 worktree_context
557 })),
558 other_worktree_contexts: Vec::new(),
559 lsp_task_sources: HashMap::default(),
560 latest_selection: None,
561 }),
562 cx,
563 )
564 })
565 .await;
566 assert_eq!(
567 active_worktree_tasks
568 .into_iter()
569 .map(|(source_kind, task)| {
570 let resolved = task.resolved;
571 (source_kind, resolved.command.unwrap())
572 })
573 .collect::<Vec<_>>(),
574 vec![(
575 TaskSourceKind::Worktree {
576 id: worktree_id,
577 directory_in_worktree: PathBuf::from(path!(".zed")),
578 id_base: if cfg!(windows) {
579 "local worktree tasks from directory \".zed\"".into()
580 } else {
581 "local worktree tasks from directory \".zed\"".into()
582 },
583 },
584 "echo /dir".to_string(),
585 )]
586 );
587}
588
589#[gpui::test]
590async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
591 init_test(cx);
592
593 let fs = FakeFs::new(cx.executor());
594 fs.insert_tree(
595 path!("/dir"),
596 json!({
597 "test.rs": "const A: i32 = 1;",
598 "test2.rs": "",
599 "Cargo.toml": "a = 1",
600 "package.json": "{\"a\": 1}",
601 }),
602 )
603 .await;
604
605 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
606 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
607
608 let mut fake_rust_servers = language_registry.register_fake_lsp(
609 "Rust",
610 FakeLspAdapter {
611 name: "the-rust-language-server",
612 capabilities: lsp::ServerCapabilities {
613 completion_provider: Some(lsp::CompletionOptions {
614 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
615 ..Default::default()
616 }),
617 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
618 lsp::TextDocumentSyncOptions {
619 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
620 ..Default::default()
621 },
622 )),
623 ..Default::default()
624 },
625 ..Default::default()
626 },
627 );
628 let mut fake_json_servers = language_registry.register_fake_lsp(
629 "JSON",
630 FakeLspAdapter {
631 name: "the-json-language-server",
632 capabilities: lsp::ServerCapabilities {
633 completion_provider: Some(lsp::CompletionOptions {
634 trigger_characters: Some(vec![":".to_string()]),
635 ..Default::default()
636 }),
637 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
638 lsp::TextDocumentSyncOptions {
639 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
640 ..Default::default()
641 },
642 )),
643 ..Default::default()
644 },
645 ..Default::default()
646 },
647 );
648
649 // Open a buffer without an associated language server.
650 let (toml_buffer, _handle) = project
651 .update(cx, |project, cx| {
652 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
653 })
654 .await
655 .unwrap();
656
657 // Open a buffer with an associated language server before the language for it has been loaded.
658 let (rust_buffer, _handle2) = project
659 .update(cx, |project, cx| {
660 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
661 })
662 .await
663 .unwrap();
664 rust_buffer.update(cx, |buffer, _| {
665 assert_eq!(buffer.language().map(|l| l.name()), None);
666 });
667
668 // Now we add the languages to the project, and ensure they get assigned to all
669 // the relevant open buffers.
670 language_registry.add(json_lang());
671 language_registry.add(rust_lang());
672 cx.executor().run_until_parked();
673 rust_buffer.update(cx, |buffer, _| {
674 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
675 });
676
677 // A server is started up, and it is notified about Rust files.
678 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
679 assert_eq!(
680 fake_rust_server
681 .receive_notification::<lsp::notification::DidOpenTextDocument>()
682 .await
683 .text_document,
684 lsp::TextDocumentItem {
685 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
686 version: 0,
687 text: "const A: i32 = 1;".to_string(),
688 language_id: "rust".to_string(),
689 }
690 );
691
692 // The buffer is configured based on the language server's capabilities.
693 rust_buffer.update(cx, |buffer, _| {
694 assert_eq!(
695 buffer
696 .completion_triggers()
697 .into_iter()
698 .cloned()
699 .collect::<Vec<_>>(),
700 &[".".to_string(), "::".to_string()]
701 );
702 });
703 toml_buffer.update(cx, |buffer, _| {
704 assert!(buffer.completion_triggers().is_empty());
705 });
706
707 // Edit a buffer. The changes are reported to the language server.
708 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
709 assert_eq!(
710 fake_rust_server
711 .receive_notification::<lsp::notification::DidChangeTextDocument>()
712 .await
713 .text_document,
714 lsp::VersionedTextDocumentIdentifier::new(
715 lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
716 1
717 )
718 );
719
720 // Open a third buffer with a different associated language server.
721 let (json_buffer, _json_handle) = project
722 .update(cx, |project, cx| {
723 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
724 })
725 .await
726 .unwrap();
727
728 // A json language server is started up and is only notified about the json buffer.
729 let mut fake_json_server = fake_json_servers.next().await.unwrap();
730 assert_eq!(
731 fake_json_server
732 .receive_notification::<lsp::notification::DidOpenTextDocument>()
733 .await
734 .text_document,
735 lsp::TextDocumentItem {
736 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
737 version: 0,
738 text: "{\"a\": 1}".to_string(),
739 language_id: "json".to_string(),
740 }
741 );
742
743 // This buffer is configured based on the second language server's
744 // capabilities.
745 json_buffer.update(cx, |buffer, _| {
746 assert_eq!(
747 buffer
748 .completion_triggers()
749 .into_iter()
750 .cloned()
751 .collect::<Vec<_>>(),
752 &[":".to_string()]
753 );
754 });
755
756 // When opening another buffer whose language server is already running,
757 // it is also configured based on the existing language server's capabilities.
758 let (rust_buffer2, _handle4) = project
759 .update(cx, |project, cx| {
760 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
761 })
762 .await
763 .unwrap();
764 rust_buffer2.update(cx, |buffer, _| {
765 assert_eq!(
766 buffer
767 .completion_triggers()
768 .into_iter()
769 .cloned()
770 .collect::<Vec<_>>(),
771 &[".".to_string(), "::".to_string()]
772 );
773 });
774
775 // Changes are reported only to servers matching the buffer's language.
776 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
777 rust_buffer2.update(cx, |buffer, cx| {
778 buffer.edit([(0..0, "let x = 1;")], None, cx)
779 });
780 assert_eq!(
781 fake_rust_server
782 .receive_notification::<lsp::notification::DidChangeTextDocument>()
783 .await
784 .text_document,
785 lsp::VersionedTextDocumentIdentifier::new(
786 lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap(),
787 1
788 )
789 );
790
791 // Save notifications are reported to all servers.
792 project
793 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
794 .await
795 .unwrap();
796 assert_eq!(
797 fake_rust_server
798 .receive_notification::<lsp::notification::DidSaveTextDocument>()
799 .await
800 .text_document,
801 lsp::TextDocumentIdentifier::new(
802 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
803 )
804 );
805 assert_eq!(
806 fake_json_server
807 .receive_notification::<lsp::notification::DidSaveTextDocument>()
808 .await
809 .text_document,
810 lsp::TextDocumentIdentifier::new(
811 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
812 )
813 );
814
815 // Renames are reported only to servers matching the buffer's language.
816 fs.rename(
817 Path::new(path!("/dir/test2.rs")),
818 Path::new(path!("/dir/test3.rs")),
819 Default::default(),
820 )
821 .await
822 .unwrap();
823 assert_eq!(
824 fake_rust_server
825 .receive_notification::<lsp::notification::DidCloseTextDocument>()
826 .await
827 .text_document,
828 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap()),
829 );
830 assert_eq!(
831 fake_rust_server
832 .receive_notification::<lsp::notification::DidOpenTextDocument>()
833 .await
834 .text_document,
835 lsp::TextDocumentItem {
836 uri: lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap(),
837 version: 0,
838 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
839 language_id: "rust".to_string(),
840 },
841 );
842
843 rust_buffer2.update(cx, |buffer, cx| {
844 buffer.update_diagnostics(
845 LanguageServerId(0),
846 DiagnosticSet::from_sorted_entries(
847 vec![DiagnosticEntry {
848 diagnostic: Default::default(),
849 range: Anchor::MIN..Anchor::MAX,
850 }],
851 &buffer.snapshot(),
852 ),
853 cx,
854 );
855 assert_eq!(
856 buffer
857 .snapshot()
858 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
859 .count(),
860 1
861 );
862 });
863
864 // When the rename changes the extension of the file, the buffer gets closed on the old
865 // language server and gets opened on the new one.
866 fs.rename(
867 Path::new(path!("/dir/test3.rs")),
868 Path::new(path!("/dir/test3.json")),
869 Default::default(),
870 )
871 .await
872 .unwrap();
873 assert_eq!(
874 fake_rust_server
875 .receive_notification::<lsp::notification::DidCloseTextDocument>()
876 .await
877 .text_document,
878 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap()),
879 );
880 assert_eq!(
881 fake_json_server
882 .receive_notification::<lsp::notification::DidOpenTextDocument>()
883 .await
884 .text_document,
885 lsp::TextDocumentItem {
886 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
887 version: 0,
888 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
889 language_id: "json".to_string(),
890 },
891 );
892
893 // We clear the diagnostics, since the language has changed.
894 rust_buffer2.update(cx, |buffer, _| {
895 assert_eq!(
896 buffer
897 .snapshot()
898 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
899 .count(),
900 0
901 );
902 });
903
904 // The renamed file's version resets after changing language server.
905 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
906 assert_eq!(
907 fake_json_server
908 .receive_notification::<lsp::notification::DidChangeTextDocument>()
909 .await
910 .text_document,
911 lsp::VersionedTextDocumentIdentifier::new(
912 lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
913 1
914 )
915 );
916
917 // Restart language servers
918 project.update(cx, |project, cx| {
919 project.restart_language_servers_for_buffers(
920 vec![rust_buffer.clone(), json_buffer.clone()],
921 HashSet::default(),
922 cx,
923 );
924 });
925
926 let mut rust_shutdown_requests = fake_rust_server
927 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
928 let mut json_shutdown_requests = fake_json_server
929 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
930 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
931
932 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
933 let mut fake_json_server = fake_json_servers.next().await.unwrap();
934
935 // Ensure rust document is reopened in new rust language server
936 assert_eq!(
937 fake_rust_server
938 .receive_notification::<lsp::notification::DidOpenTextDocument>()
939 .await
940 .text_document,
941 lsp::TextDocumentItem {
942 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
943 version: 0,
944 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
945 language_id: "rust".to_string(),
946 }
947 );
948
949 // Ensure json documents are reopened in new json language server
950 assert_set_eq!(
951 [
952 fake_json_server
953 .receive_notification::<lsp::notification::DidOpenTextDocument>()
954 .await
955 .text_document,
956 fake_json_server
957 .receive_notification::<lsp::notification::DidOpenTextDocument>()
958 .await
959 .text_document,
960 ],
961 [
962 lsp::TextDocumentItem {
963 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
964 version: 0,
965 text: json_buffer.update(cx, |buffer, _| buffer.text()),
966 language_id: "json".to_string(),
967 },
968 lsp::TextDocumentItem {
969 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
970 version: 0,
971 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
972 language_id: "json".to_string(),
973 }
974 ]
975 );
976
977 // Close notifications are reported only to servers matching the buffer's language.
978 cx.update(|_| drop(_json_handle));
979 let close_message = lsp::DidCloseTextDocumentParams {
980 text_document: lsp::TextDocumentIdentifier::new(
981 lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
982 ),
983 };
984 assert_eq!(
985 fake_json_server
986 .receive_notification::<lsp::notification::DidCloseTextDocument>()
987 .await,
988 close_message,
989 );
990}
991
992#[gpui::test]
993async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
994 init_test(cx);
995
996 let fs = FakeFs::new(cx.executor());
997 fs.insert_tree(
998 path!("/the-root"),
999 json!({
1000 ".gitignore": "target\n",
1001 "Cargo.lock": "",
1002 "src": {
1003 "a.rs": "",
1004 "b.rs": "",
1005 },
1006 "target": {
1007 "x": {
1008 "out": {
1009 "x.rs": ""
1010 }
1011 },
1012 "y": {
1013 "out": {
1014 "y.rs": "",
1015 }
1016 },
1017 "z": {
1018 "out": {
1019 "z.rs": ""
1020 }
1021 }
1022 }
1023 }),
1024 )
1025 .await;
1026 fs.insert_tree(
1027 path!("/the-registry"),
1028 json!({
1029 "dep1": {
1030 "src": {
1031 "dep1.rs": "",
1032 }
1033 },
1034 "dep2": {
1035 "src": {
1036 "dep2.rs": "",
1037 }
1038 },
1039 }),
1040 )
1041 .await;
1042 fs.insert_tree(
1043 path!("/the/stdlib"),
1044 json!({
1045 "LICENSE": "",
1046 "src": {
1047 "string.rs": "",
1048 }
1049 }),
1050 )
1051 .await;
1052
1053 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1054 let (language_registry, lsp_store) = project.read_with(cx, |project, _| {
1055 (project.languages().clone(), project.lsp_store())
1056 });
1057 language_registry.add(rust_lang());
1058 let mut fake_servers = language_registry.register_fake_lsp(
1059 "Rust",
1060 FakeLspAdapter {
1061 name: "the-language-server",
1062 ..Default::default()
1063 },
1064 );
1065
1066 cx.executor().run_until_parked();
1067
1068 // Start the language server by opening a buffer with a compatible file extension.
1069 project
1070 .update(cx, |project, cx| {
1071 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
1072 })
1073 .await
1074 .unwrap();
1075
1076 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
1077 project.update(cx, |project, cx| {
1078 let worktree = project.worktrees(cx).next().unwrap();
1079 assert_eq!(
1080 worktree
1081 .read(cx)
1082 .snapshot()
1083 .entries(true, 0)
1084 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
1085 .collect::<Vec<_>>(),
1086 &[
1087 (Path::new(""), false),
1088 (Path::new(".gitignore"), false),
1089 (Path::new("Cargo.lock"), false),
1090 (Path::new("src"), false),
1091 (Path::new("src/a.rs"), false),
1092 (Path::new("src/b.rs"), false),
1093 (Path::new("target"), true),
1094 ]
1095 );
1096 });
1097
1098 let prev_read_dir_count = fs.read_dir_call_count();
1099
1100 let fake_server = fake_servers.next().await.unwrap();
1101 let (server_id, server_name) = lsp_store.read_with(cx, |lsp_store, _| {
1102 let (id, status) = lsp_store.language_server_statuses().next().unwrap();
1103 (id, LanguageServerName::from(status.name.as_str()))
1104 });
1105
1106 // Simulate jumping to a definition in a dependency outside of the worktree.
1107 let _out_of_worktree_buffer = project
1108 .update(cx, |project, cx| {
1109 project.open_local_buffer_via_lsp(
1110 lsp::Url::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
1111 server_id,
1112 server_name.clone(),
1113 cx,
1114 )
1115 })
1116 .await
1117 .unwrap();
1118
1119 // Keep track of the FS events reported to the language server.
1120 let file_changes = Arc::new(Mutex::new(Vec::new()));
1121 fake_server
1122 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
1123 registrations: vec![lsp::Registration {
1124 id: Default::default(),
1125 method: "workspace/didChangeWatchedFiles".to_string(),
1126 register_options: serde_json::to_value(
1127 lsp::DidChangeWatchedFilesRegistrationOptions {
1128 watchers: vec![
1129 lsp::FileSystemWatcher {
1130 glob_pattern: lsp::GlobPattern::String(
1131 path!("/the-root/Cargo.toml").to_string(),
1132 ),
1133 kind: None,
1134 },
1135 lsp::FileSystemWatcher {
1136 glob_pattern: lsp::GlobPattern::String(
1137 path!("/the-root/src/*.{rs,c}").to_string(),
1138 ),
1139 kind: None,
1140 },
1141 lsp::FileSystemWatcher {
1142 glob_pattern: lsp::GlobPattern::String(
1143 path!("/the-root/target/y/**/*.rs").to_string(),
1144 ),
1145 kind: None,
1146 },
1147 lsp::FileSystemWatcher {
1148 glob_pattern: lsp::GlobPattern::String(
1149 path!("/the/stdlib/src/**/*.rs").to_string(),
1150 ),
1151 kind: None,
1152 },
1153 lsp::FileSystemWatcher {
1154 glob_pattern: lsp::GlobPattern::String(
1155 path!("**/Cargo.lock").to_string(),
1156 ),
1157 kind: None,
1158 },
1159 ],
1160 },
1161 )
1162 .ok(),
1163 }],
1164 })
1165 .await
1166 .into_response()
1167 .unwrap();
1168 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
1169 let file_changes = file_changes.clone();
1170 move |params, _| {
1171 let mut file_changes = file_changes.lock();
1172 file_changes.extend(params.changes);
1173 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
1174 }
1175 });
1176
1177 cx.executor().run_until_parked();
1178 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
1179 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 5);
1180
1181 let mut new_watched_paths = fs.watched_paths();
1182 new_watched_paths.retain(|path| !path.starts_with(config_dir()));
1183 assert_eq!(
1184 &new_watched_paths,
1185 &[
1186 Path::new(path!("/the-root")),
1187 Path::new(path!("/the-registry/dep1/src/dep1.rs")),
1188 Path::new(path!("/the/stdlib/src"))
1189 ]
1190 );
1191
1192 // Now the language server has asked us to watch an ignored directory path,
1193 // so we recursively load it.
1194 project.update(cx, |project, cx| {
1195 let worktree = project.visible_worktrees(cx).next().unwrap();
1196 assert_eq!(
1197 worktree
1198 .read(cx)
1199 .snapshot()
1200 .entries(true, 0)
1201 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
1202 .collect::<Vec<_>>(),
1203 &[
1204 (Path::new(""), false),
1205 (Path::new(".gitignore"), false),
1206 (Path::new("Cargo.lock"), false),
1207 (Path::new("src"), false),
1208 (Path::new("src/a.rs"), false),
1209 (Path::new("src/b.rs"), false),
1210 (Path::new("target"), true),
1211 (Path::new("target/x"), true),
1212 (Path::new("target/y"), true),
1213 (Path::new("target/y/out"), true),
1214 (Path::new("target/y/out/y.rs"), true),
1215 (Path::new("target/z"), true),
1216 ]
1217 );
1218 });
1219
1220 // Perform some file system mutations, two of which match the watched patterns,
1221 // and one of which does not.
1222 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
1223 .await
1224 .unwrap();
1225 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
1226 .await
1227 .unwrap();
1228 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
1229 .await
1230 .unwrap();
1231 fs.create_file(
1232 path!("/the-root/target/x/out/x2.rs").as_ref(),
1233 Default::default(),
1234 )
1235 .await
1236 .unwrap();
1237 fs.create_file(
1238 path!("/the-root/target/y/out/y2.rs").as_ref(),
1239 Default::default(),
1240 )
1241 .await
1242 .unwrap();
1243 fs.save(
1244 path!("/the-root/Cargo.lock").as_ref(),
1245 &"".into(),
1246 Default::default(),
1247 )
1248 .await
1249 .unwrap();
1250 fs.save(
1251 path!("/the-stdlib/LICENSE").as_ref(),
1252 &"".into(),
1253 Default::default(),
1254 )
1255 .await
1256 .unwrap();
1257 fs.save(
1258 path!("/the/stdlib/src/string.rs").as_ref(),
1259 &"".into(),
1260 Default::default(),
1261 )
1262 .await
1263 .unwrap();
1264
1265 // The language server receives events for the FS mutations that match its watch patterns.
1266 cx.executor().run_until_parked();
1267 assert_eq!(
1268 &*file_changes.lock(),
1269 &[
1270 lsp::FileEvent {
1271 uri: lsp::Url::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
1272 typ: lsp::FileChangeType::CHANGED,
1273 },
1274 lsp::FileEvent {
1275 uri: lsp::Url::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
1276 typ: lsp::FileChangeType::DELETED,
1277 },
1278 lsp::FileEvent {
1279 uri: lsp::Url::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
1280 typ: lsp::FileChangeType::CREATED,
1281 },
1282 lsp::FileEvent {
1283 uri: lsp::Url::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
1284 typ: lsp::FileChangeType::CREATED,
1285 },
1286 lsp::FileEvent {
1287 uri: lsp::Url::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
1288 typ: lsp::FileChangeType::CHANGED,
1289 },
1290 ]
1291 );
1292}
1293
1294#[gpui::test]
1295async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
1296 init_test(cx);
1297
1298 let fs = FakeFs::new(cx.executor());
1299 fs.insert_tree(
1300 path!("/dir"),
1301 json!({
1302 "a.rs": "let a = 1;",
1303 "b.rs": "let b = 2;"
1304 }),
1305 )
1306 .await;
1307
1308 let project = Project::test(
1309 fs,
1310 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
1311 cx,
1312 )
1313 .await;
1314 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1315
1316 let buffer_a = project
1317 .update(cx, |project, cx| {
1318 project.open_local_buffer(path!("/dir/a.rs"), cx)
1319 })
1320 .await
1321 .unwrap();
1322 let buffer_b = project
1323 .update(cx, |project, cx| {
1324 project.open_local_buffer(path!("/dir/b.rs"), cx)
1325 })
1326 .await
1327 .unwrap();
1328
1329 lsp_store.update(cx, |lsp_store, cx| {
1330 lsp_store
1331 .update_diagnostics(
1332 LanguageServerId(0),
1333 lsp::PublishDiagnosticsParams {
1334 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1335 version: None,
1336 diagnostics: vec![lsp::Diagnostic {
1337 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1338 severity: Some(lsp::DiagnosticSeverity::ERROR),
1339 message: "error 1".to_string(),
1340 ..Default::default()
1341 }],
1342 },
1343 None,
1344 DiagnosticSourceKind::Pushed,
1345 &[],
1346 cx,
1347 )
1348 .unwrap();
1349 lsp_store
1350 .update_diagnostics(
1351 LanguageServerId(0),
1352 lsp::PublishDiagnosticsParams {
1353 uri: Url::from_file_path(path!("/dir/b.rs")).unwrap(),
1354 version: None,
1355 diagnostics: vec![lsp::Diagnostic {
1356 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1357 severity: Some(DiagnosticSeverity::WARNING),
1358 message: "error 2".to_string(),
1359 ..Default::default()
1360 }],
1361 },
1362 None,
1363 DiagnosticSourceKind::Pushed,
1364 &[],
1365 cx,
1366 )
1367 .unwrap();
1368 });
1369
1370 buffer_a.update(cx, |buffer, _| {
1371 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1372 assert_eq!(
1373 chunks
1374 .iter()
1375 .map(|(s, d)| (s.as_str(), *d))
1376 .collect::<Vec<_>>(),
1377 &[
1378 ("let ", None),
1379 ("a", Some(DiagnosticSeverity::ERROR)),
1380 (" = 1;", None),
1381 ]
1382 );
1383 });
1384 buffer_b.update(cx, |buffer, _| {
1385 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1386 assert_eq!(
1387 chunks
1388 .iter()
1389 .map(|(s, d)| (s.as_str(), *d))
1390 .collect::<Vec<_>>(),
1391 &[
1392 ("let ", None),
1393 ("b", Some(DiagnosticSeverity::WARNING)),
1394 (" = 2;", None),
1395 ]
1396 );
1397 });
1398}
1399
1400#[gpui::test]
1401async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1402 init_test(cx);
1403
1404 let fs = FakeFs::new(cx.executor());
1405 fs.insert_tree(
1406 path!("/root"),
1407 json!({
1408 "dir": {
1409 ".git": {
1410 "HEAD": "ref: refs/heads/main",
1411 },
1412 ".gitignore": "b.rs",
1413 "a.rs": "let a = 1;",
1414 "b.rs": "let b = 2;",
1415 },
1416 "other.rs": "let b = c;"
1417 }),
1418 )
1419 .await;
1420
1421 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
1422 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1423 let (worktree, _) = project
1424 .update(cx, |project, cx| {
1425 project.find_or_create_worktree(path!("/root/dir"), true, cx)
1426 })
1427 .await
1428 .unwrap();
1429 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1430
1431 let (worktree, _) = project
1432 .update(cx, |project, cx| {
1433 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
1434 })
1435 .await
1436 .unwrap();
1437 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1438
1439 let server_id = LanguageServerId(0);
1440 lsp_store.update(cx, |lsp_store, cx| {
1441 lsp_store
1442 .update_diagnostics(
1443 server_id,
1444 lsp::PublishDiagnosticsParams {
1445 uri: Url::from_file_path(path!("/root/dir/b.rs")).unwrap(),
1446 version: None,
1447 diagnostics: vec![lsp::Diagnostic {
1448 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1449 severity: Some(lsp::DiagnosticSeverity::ERROR),
1450 message: "unused variable 'b'".to_string(),
1451 ..Default::default()
1452 }],
1453 },
1454 None,
1455 DiagnosticSourceKind::Pushed,
1456 &[],
1457 cx,
1458 )
1459 .unwrap();
1460 lsp_store
1461 .update_diagnostics(
1462 server_id,
1463 lsp::PublishDiagnosticsParams {
1464 uri: Url::from_file_path(path!("/root/other.rs")).unwrap(),
1465 version: None,
1466 diagnostics: vec![lsp::Diagnostic {
1467 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1468 severity: Some(lsp::DiagnosticSeverity::ERROR),
1469 message: "unknown variable 'c'".to_string(),
1470 ..Default::default()
1471 }],
1472 },
1473 None,
1474 DiagnosticSourceKind::Pushed,
1475 &[],
1476 cx,
1477 )
1478 .unwrap();
1479 });
1480
1481 let main_ignored_buffer = project
1482 .update(cx, |project, cx| {
1483 project.open_buffer((main_worktree_id, "b.rs"), cx)
1484 })
1485 .await
1486 .unwrap();
1487 main_ignored_buffer.update(cx, |buffer, _| {
1488 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1489 assert_eq!(
1490 chunks
1491 .iter()
1492 .map(|(s, d)| (s.as_str(), *d))
1493 .collect::<Vec<_>>(),
1494 &[
1495 ("let ", None),
1496 ("b", Some(DiagnosticSeverity::ERROR)),
1497 (" = 2;", None),
1498 ],
1499 "Gigitnored buffers should still get in-buffer diagnostics",
1500 );
1501 });
1502 let other_buffer = project
1503 .update(cx, |project, cx| {
1504 project.open_buffer((other_worktree_id, ""), cx)
1505 })
1506 .await
1507 .unwrap();
1508 other_buffer.update(cx, |buffer, _| {
1509 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1510 assert_eq!(
1511 chunks
1512 .iter()
1513 .map(|(s, d)| (s.as_str(), *d))
1514 .collect::<Vec<_>>(),
1515 &[
1516 ("let b = ", None),
1517 ("c", Some(DiagnosticSeverity::ERROR)),
1518 (";", None),
1519 ],
1520 "Buffers from hidden projects should still get in-buffer diagnostics"
1521 );
1522 });
1523
1524 project.update(cx, |project, cx| {
1525 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1526 assert_eq!(
1527 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1528 vec![(
1529 ProjectPath {
1530 worktree_id: main_worktree_id,
1531 path: Arc::from(Path::new("b.rs")),
1532 },
1533 server_id,
1534 DiagnosticSummary {
1535 error_count: 1,
1536 warning_count: 0,
1537 }
1538 )]
1539 );
1540 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1541 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1542 });
1543}
1544
1545#[gpui::test]
1546async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1547 init_test(cx);
1548
1549 let progress_token = "the-progress-token";
1550
1551 let fs = FakeFs::new(cx.executor());
1552 fs.insert_tree(
1553 path!("/dir"),
1554 json!({
1555 "a.rs": "fn a() { A }",
1556 "b.rs": "const y: i32 = 1",
1557 }),
1558 )
1559 .await;
1560
1561 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1562 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1563
1564 language_registry.add(rust_lang());
1565 let mut fake_servers = language_registry.register_fake_lsp(
1566 "Rust",
1567 FakeLspAdapter {
1568 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1569 disk_based_diagnostics_sources: vec!["disk".into()],
1570 ..Default::default()
1571 },
1572 );
1573
1574 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1575
1576 // Cause worktree to start the fake language server
1577 let _ = project
1578 .update(cx, |project, cx| {
1579 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
1580 })
1581 .await
1582 .unwrap();
1583
1584 let mut events = cx.events(&project);
1585
1586 let fake_server = fake_servers.next().await.unwrap();
1587 assert_eq!(
1588 events.next().await.unwrap(),
1589 Event::LanguageServerAdded(
1590 LanguageServerId(0),
1591 fake_server.server.name(),
1592 Some(worktree_id)
1593 ),
1594 );
1595
1596 fake_server
1597 .start_progress(format!("{}/0", progress_token))
1598 .await;
1599 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1600 assert_eq!(
1601 events.next().await.unwrap(),
1602 Event::DiskBasedDiagnosticsStarted {
1603 language_server_id: LanguageServerId(0),
1604 }
1605 );
1606
1607 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1608 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1609 version: None,
1610 diagnostics: vec![lsp::Diagnostic {
1611 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1612 severity: Some(lsp::DiagnosticSeverity::ERROR),
1613 message: "undefined variable 'A'".to_string(),
1614 ..Default::default()
1615 }],
1616 });
1617 assert_eq!(
1618 events.next().await.unwrap(),
1619 Event::DiagnosticsUpdated {
1620 language_server_id: LanguageServerId(0),
1621 path: (worktree_id, Path::new("a.rs")).into()
1622 }
1623 );
1624
1625 fake_server.end_progress(format!("{}/0", progress_token));
1626 assert_eq!(
1627 events.next().await.unwrap(),
1628 Event::DiskBasedDiagnosticsFinished {
1629 language_server_id: LanguageServerId(0)
1630 }
1631 );
1632
1633 let buffer = project
1634 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
1635 .await
1636 .unwrap();
1637
1638 buffer.update(cx, |buffer, _| {
1639 let snapshot = buffer.snapshot();
1640 let diagnostics = snapshot
1641 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1642 .collect::<Vec<_>>();
1643 assert_eq!(
1644 diagnostics,
1645 &[DiagnosticEntry {
1646 range: Point::new(0, 9)..Point::new(0, 10),
1647 diagnostic: Diagnostic {
1648 severity: lsp::DiagnosticSeverity::ERROR,
1649 message: "undefined variable 'A'".to_string(),
1650 group_id: 0,
1651 is_primary: true,
1652 source_kind: DiagnosticSourceKind::Pushed,
1653 ..Diagnostic::default()
1654 }
1655 }]
1656 )
1657 });
1658
1659 // Ensure publishing empty diagnostics twice only results in one update event.
1660 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1661 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1662 version: None,
1663 diagnostics: Default::default(),
1664 });
1665 assert_eq!(
1666 events.next().await.unwrap(),
1667 Event::DiagnosticsUpdated {
1668 language_server_id: LanguageServerId(0),
1669 path: (worktree_id, Path::new("a.rs")).into()
1670 }
1671 );
1672
1673 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1674 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1675 version: None,
1676 diagnostics: Default::default(),
1677 });
1678 cx.executor().run_until_parked();
1679 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1680}
1681
1682#[gpui::test]
1683async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1684 init_test(cx);
1685
1686 let progress_token = "the-progress-token";
1687
1688 let fs = FakeFs::new(cx.executor());
1689 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1690
1691 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1692
1693 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1694 language_registry.add(rust_lang());
1695 let mut fake_servers = language_registry.register_fake_lsp(
1696 "Rust",
1697 FakeLspAdapter {
1698 name: "the-language-server",
1699 disk_based_diagnostics_sources: vec!["disk".into()],
1700 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1701 ..Default::default()
1702 },
1703 );
1704
1705 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1706
1707 let (buffer, _handle) = project
1708 .update(cx, |project, cx| {
1709 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1710 })
1711 .await
1712 .unwrap();
1713 // Simulate diagnostics starting to update.
1714 let fake_server = fake_servers.next().await.unwrap();
1715 fake_server.start_progress(progress_token).await;
1716
1717 // Restart the server before the diagnostics finish updating.
1718 project.update(cx, |project, cx| {
1719 project.restart_language_servers_for_buffers(vec![buffer], HashSet::default(), cx);
1720 });
1721 let mut events = cx.events(&project);
1722
1723 // Simulate the newly started server sending more diagnostics.
1724 let fake_server = fake_servers.next().await.unwrap();
1725 assert_eq!(
1726 events.next().await.unwrap(),
1727 Event::LanguageServerRemoved(LanguageServerId(0))
1728 );
1729 assert_eq!(
1730 events.next().await.unwrap(),
1731 Event::LanguageServerAdded(
1732 LanguageServerId(1),
1733 fake_server.server.name(),
1734 Some(worktree_id)
1735 )
1736 );
1737 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1738 fake_server.start_progress(progress_token).await;
1739 assert_eq!(
1740 events.next().await.unwrap(),
1741 Event::DiskBasedDiagnosticsStarted {
1742 language_server_id: LanguageServerId(1)
1743 }
1744 );
1745 project.update(cx, |project, cx| {
1746 assert_eq!(
1747 project
1748 .language_servers_running_disk_based_diagnostics(cx)
1749 .collect::<Vec<_>>(),
1750 [LanguageServerId(1)]
1751 );
1752 });
1753
1754 // All diagnostics are considered done, despite the old server's diagnostic
1755 // task never completing.
1756 fake_server.end_progress(progress_token);
1757 assert_eq!(
1758 events.next().await.unwrap(),
1759 Event::DiskBasedDiagnosticsFinished {
1760 language_server_id: LanguageServerId(1)
1761 }
1762 );
1763 project.update(cx, |project, cx| {
1764 assert_eq!(
1765 project
1766 .language_servers_running_disk_based_diagnostics(cx)
1767 .collect::<Vec<_>>(),
1768 [] as [language::LanguageServerId; 0]
1769 );
1770 });
1771}
1772
1773#[gpui::test]
1774async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1775 init_test(cx);
1776
1777 let fs = FakeFs::new(cx.executor());
1778 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
1779
1780 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1781
1782 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1783 language_registry.add(rust_lang());
1784 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1785
1786 let (buffer, _) = project
1787 .update(cx, |project, cx| {
1788 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1789 })
1790 .await
1791 .unwrap();
1792
1793 // Publish diagnostics
1794 let fake_server = fake_servers.next().await.unwrap();
1795 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1796 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1797 version: None,
1798 diagnostics: vec![lsp::Diagnostic {
1799 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1800 severity: Some(lsp::DiagnosticSeverity::ERROR),
1801 message: "the message".to_string(),
1802 ..Default::default()
1803 }],
1804 });
1805
1806 cx.executor().run_until_parked();
1807 buffer.update(cx, |buffer, _| {
1808 assert_eq!(
1809 buffer
1810 .snapshot()
1811 .diagnostics_in_range::<_, usize>(0..1, false)
1812 .map(|entry| entry.diagnostic.message.clone())
1813 .collect::<Vec<_>>(),
1814 ["the message".to_string()]
1815 );
1816 });
1817 project.update(cx, |project, cx| {
1818 assert_eq!(
1819 project.diagnostic_summary(false, cx),
1820 DiagnosticSummary {
1821 error_count: 1,
1822 warning_count: 0,
1823 }
1824 );
1825 });
1826
1827 project.update(cx, |project, cx| {
1828 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
1829 });
1830
1831 // The diagnostics are cleared.
1832 cx.executor().run_until_parked();
1833 buffer.update(cx, |buffer, _| {
1834 assert_eq!(
1835 buffer
1836 .snapshot()
1837 .diagnostics_in_range::<_, usize>(0..1, false)
1838 .map(|entry| entry.diagnostic.message.clone())
1839 .collect::<Vec<_>>(),
1840 Vec::<String>::new(),
1841 );
1842 });
1843 project.update(cx, |project, cx| {
1844 assert_eq!(
1845 project.diagnostic_summary(false, cx),
1846 DiagnosticSummary {
1847 error_count: 0,
1848 warning_count: 0,
1849 }
1850 );
1851 });
1852}
1853
1854#[gpui::test]
1855async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1856 init_test(cx);
1857
1858 let fs = FakeFs::new(cx.executor());
1859 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1860
1861 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1862 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1863
1864 language_registry.add(rust_lang());
1865 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1866
1867 let (buffer, _handle) = project
1868 .update(cx, |project, cx| {
1869 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1870 })
1871 .await
1872 .unwrap();
1873
1874 // Before restarting the server, report diagnostics with an unknown buffer version.
1875 let fake_server = fake_servers.next().await.unwrap();
1876 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1877 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1878 version: Some(10000),
1879 diagnostics: Vec::new(),
1880 });
1881 cx.executor().run_until_parked();
1882 project.update(cx, |project, cx| {
1883 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
1884 });
1885
1886 let mut fake_server = fake_servers.next().await.unwrap();
1887 let notification = fake_server
1888 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1889 .await
1890 .text_document;
1891 assert_eq!(notification.version, 0);
1892}
1893
1894#[gpui::test]
1895async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
1896 init_test(cx);
1897
1898 let progress_token = "the-progress-token";
1899
1900 let fs = FakeFs::new(cx.executor());
1901 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1902
1903 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1904
1905 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1906 language_registry.add(rust_lang());
1907 let mut fake_servers = language_registry.register_fake_lsp(
1908 "Rust",
1909 FakeLspAdapter {
1910 name: "the-language-server",
1911 disk_based_diagnostics_sources: vec!["disk".into()],
1912 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1913 ..Default::default()
1914 },
1915 );
1916
1917 let (buffer, _handle) = project
1918 .update(cx, |project, cx| {
1919 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1920 })
1921 .await
1922 .unwrap();
1923
1924 // Simulate diagnostics starting to update.
1925 let mut fake_server = fake_servers.next().await.unwrap();
1926 fake_server
1927 .start_progress_with(
1928 "another-token",
1929 lsp::WorkDoneProgressBegin {
1930 cancellable: Some(false),
1931 ..Default::default()
1932 },
1933 )
1934 .await;
1935 fake_server
1936 .start_progress_with(
1937 progress_token,
1938 lsp::WorkDoneProgressBegin {
1939 cancellable: Some(true),
1940 ..Default::default()
1941 },
1942 )
1943 .await;
1944 cx.executor().run_until_parked();
1945
1946 project.update(cx, |project, cx| {
1947 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
1948 });
1949
1950 let cancel_notification = fake_server
1951 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
1952 .await;
1953 assert_eq!(
1954 cancel_notification.token,
1955 NumberOrString::String(progress_token.into())
1956 );
1957}
1958
1959#[gpui::test]
1960async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1961 init_test(cx);
1962
1963 let fs = FakeFs::new(cx.executor());
1964 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
1965 .await;
1966
1967 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1968 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1969
1970 let mut fake_rust_servers = language_registry.register_fake_lsp(
1971 "Rust",
1972 FakeLspAdapter {
1973 name: "rust-lsp",
1974 ..Default::default()
1975 },
1976 );
1977 let mut fake_js_servers = language_registry.register_fake_lsp(
1978 "JavaScript",
1979 FakeLspAdapter {
1980 name: "js-lsp",
1981 ..Default::default()
1982 },
1983 );
1984 language_registry.add(rust_lang());
1985 language_registry.add(js_lang());
1986
1987 let _rs_buffer = project
1988 .update(cx, |project, cx| {
1989 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1990 })
1991 .await
1992 .unwrap();
1993 let _js_buffer = project
1994 .update(cx, |project, cx| {
1995 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
1996 })
1997 .await
1998 .unwrap();
1999
2000 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
2001 assert_eq!(
2002 fake_rust_server_1
2003 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2004 .await
2005 .text_document
2006 .uri
2007 .as_str(),
2008 uri!("file:///dir/a.rs")
2009 );
2010
2011 let mut fake_js_server = fake_js_servers.next().await.unwrap();
2012 assert_eq!(
2013 fake_js_server
2014 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2015 .await
2016 .text_document
2017 .uri
2018 .as_str(),
2019 uri!("file:///dir/b.js")
2020 );
2021
2022 // Disable Rust language server, ensuring only that server gets stopped.
2023 cx.update(|cx| {
2024 SettingsStore::update_global(cx, |settings, cx| {
2025 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
2026 settings.languages.0.insert(
2027 "Rust".into(),
2028 LanguageSettingsContent {
2029 enable_language_server: Some(false),
2030 ..Default::default()
2031 },
2032 );
2033 });
2034 })
2035 });
2036 fake_rust_server_1
2037 .receive_notification::<lsp::notification::Exit>()
2038 .await;
2039
2040 // Enable Rust and disable JavaScript language servers, ensuring that the
2041 // former gets started again and that the latter stops.
2042 cx.update(|cx| {
2043 SettingsStore::update_global(cx, |settings, cx| {
2044 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
2045 settings.languages.0.insert(
2046 LanguageName::new("Rust"),
2047 LanguageSettingsContent {
2048 enable_language_server: Some(true),
2049 ..Default::default()
2050 },
2051 );
2052 settings.languages.0.insert(
2053 LanguageName::new("JavaScript"),
2054 LanguageSettingsContent {
2055 enable_language_server: Some(false),
2056 ..Default::default()
2057 },
2058 );
2059 });
2060 })
2061 });
2062 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
2063 assert_eq!(
2064 fake_rust_server_2
2065 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2066 .await
2067 .text_document
2068 .uri
2069 .as_str(),
2070 uri!("file:///dir/a.rs")
2071 );
2072 fake_js_server
2073 .receive_notification::<lsp::notification::Exit>()
2074 .await;
2075}
2076
2077#[gpui::test(iterations = 3)]
2078async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
2079 init_test(cx);
2080
2081 let text = "
2082 fn a() { A }
2083 fn b() { BB }
2084 fn c() { CCC }
2085 "
2086 .unindent();
2087
2088 let fs = FakeFs::new(cx.executor());
2089 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
2090
2091 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2092 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2093
2094 language_registry.add(rust_lang());
2095 let mut fake_servers = language_registry.register_fake_lsp(
2096 "Rust",
2097 FakeLspAdapter {
2098 disk_based_diagnostics_sources: vec!["disk".into()],
2099 ..Default::default()
2100 },
2101 );
2102
2103 let buffer = project
2104 .update(cx, |project, cx| {
2105 project.open_local_buffer(path!("/dir/a.rs"), cx)
2106 })
2107 .await
2108 .unwrap();
2109
2110 let _handle = project.update(cx, |project, cx| {
2111 project.register_buffer_with_language_servers(&buffer, cx)
2112 });
2113
2114 let mut fake_server = fake_servers.next().await.unwrap();
2115 let open_notification = fake_server
2116 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2117 .await;
2118
2119 // Edit the buffer, moving the content down
2120 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
2121 let change_notification_1 = fake_server
2122 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2123 .await;
2124 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
2125
2126 // Report some diagnostics for the initial version of the buffer
2127 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2128 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2129 version: Some(open_notification.text_document.version),
2130 diagnostics: vec![
2131 lsp::Diagnostic {
2132 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2133 severity: Some(DiagnosticSeverity::ERROR),
2134 message: "undefined variable 'A'".to_string(),
2135 source: Some("disk".to_string()),
2136 ..Default::default()
2137 },
2138 lsp::Diagnostic {
2139 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2140 severity: Some(DiagnosticSeverity::ERROR),
2141 message: "undefined variable 'BB'".to_string(),
2142 source: Some("disk".to_string()),
2143 ..Default::default()
2144 },
2145 lsp::Diagnostic {
2146 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
2147 severity: Some(DiagnosticSeverity::ERROR),
2148 source: Some("disk".to_string()),
2149 message: "undefined variable 'CCC'".to_string(),
2150 ..Default::default()
2151 },
2152 ],
2153 });
2154
2155 // The diagnostics have moved down since they were created.
2156 cx.executor().run_until_parked();
2157 buffer.update(cx, |buffer, _| {
2158 assert_eq!(
2159 buffer
2160 .snapshot()
2161 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
2162 .collect::<Vec<_>>(),
2163 &[
2164 DiagnosticEntry {
2165 range: Point::new(3, 9)..Point::new(3, 11),
2166 diagnostic: Diagnostic {
2167 source: Some("disk".into()),
2168 severity: DiagnosticSeverity::ERROR,
2169 message: "undefined variable 'BB'".to_string(),
2170 is_disk_based: true,
2171 group_id: 1,
2172 is_primary: true,
2173 source_kind: DiagnosticSourceKind::Pushed,
2174 ..Diagnostic::default()
2175 },
2176 },
2177 DiagnosticEntry {
2178 range: Point::new(4, 9)..Point::new(4, 12),
2179 diagnostic: Diagnostic {
2180 source: Some("disk".into()),
2181 severity: DiagnosticSeverity::ERROR,
2182 message: "undefined variable 'CCC'".to_string(),
2183 is_disk_based: true,
2184 group_id: 2,
2185 is_primary: true,
2186 source_kind: DiagnosticSourceKind::Pushed,
2187 ..Diagnostic::default()
2188 }
2189 }
2190 ]
2191 );
2192 assert_eq!(
2193 chunks_with_diagnostics(buffer, 0..buffer.len()),
2194 [
2195 ("\n\nfn a() { ".to_string(), None),
2196 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2197 (" }\nfn b() { ".to_string(), None),
2198 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
2199 (" }\nfn c() { ".to_string(), None),
2200 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
2201 (" }\n".to_string(), None),
2202 ]
2203 );
2204 assert_eq!(
2205 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
2206 [
2207 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
2208 (" }\nfn c() { ".to_string(), None),
2209 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
2210 ]
2211 );
2212 });
2213
2214 // Ensure overlapping diagnostics are highlighted correctly.
2215 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2216 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2217 version: Some(open_notification.text_document.version),
2218 diagnostics: vec![
2219 lsp::Diagnostic {
2220 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2221 severity: Some(DiagnosticSeverity::ERROR),
2222 message: "undefined variable 'A'".to_string(),
2223 source: Some("disk".to_string()),
2224 ..Default::default()
2225 },
2226 lsp::Diagnostic {
2227 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
2228 severity: Some(DiagnosticSeverity::WARNING),
2229 message: "unreachable statement".to_string(),
2230 source: Some("disk".to_string()),
2231 ..Default::default()
2232 },
2233 ],
2234 });
2235
2236 cx.executor().run_until_parked();
2237 buffer.update(cx, |buffer, _| {
2238 assert_eq!(
2239 buffer
2240 .snapshot()
2241 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
2242 .collect::<Vec<_>>(),
2243 &[
2244 DiagnosticEntry {
2245 range: Point::new(2, 9)..Point::new(2, 12),
2246 diagnostic: Diagnostic {
2247 source: Some("disk".into()),
2248 severity: DiagnosticSeverity::WARNING,
2249 message: "unreachable statement".to_string(),
2250 is_disk_based: true,
2251 group_id: 4,
2252 is_primary: true,
2253 source_kind: DiagnosticSourceKind::Pushed,
2254 ..Diagnostic::default()
2255 }
2256 },
2257 DiagnosticEntry {
2258 range: Point::new(2, 9)..Point::new(2, 10),
2259 diagnostic: Diagnostic {
2260 source: Some("disk".into()),
2261 severity: DiagnosticSeverity::ERROR,
2262 message: "undefined variable 'A'".to_string(),
2263 is_disk_based: true,
2264 group_id: 3,
2265 is_primary: true,
2266 source_kind: DiagnosticSourceKind::Pushed,
2267 ..Diagnostic::default()
2268 },
2269 }
2270 ]
2271 );
2272 assert_eq!(
2273 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
2274 [
2275 ("fn a() { ".to_string(), None),
2276 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2277 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2278 ("\n".to_string(), None),
2279 ]
2280 );
2281 assert_eq!(
2282 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
2283 [
2284 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2285 ("\n".to_string(), None),
2286 ]
2287 );
2288 });
2289
2290 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
2291 // changes since the last save.
2292 buffer.update(cx, |buffer, cx| {
2293 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
2294 buffer.edit(
2295 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
2296 None,
2297 cx,
2298 );
2299 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
2300 });
2301 let change_notification_2 = fake_server
2302 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2303 .await;
2304 assert!(
2305 change_notification_2.text_document.version > change_notification_1.text_document.version
2306 );
2307
2308 // Handle out-of-order diagnostics
2309 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2310 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2311 version: Some(change_notification_2.text_document.version),
2312 diagnostics: vec![
2313 lsp::Diagnostic {
2314 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2315 severity: Some(DiagnosticSeverity::ERROR),
2316 message: "undefined variable 'BB'".to_string(),
2317 source: Some("disk".to_string()),
2318 ..Default::default()
2319 },
2320 lsp::Diagnostic {
2321 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2322 severity: Some(DiagnosticSeverity::WARNING),
2323 message: "undefined variable 'A'".to_string(),
2324 source: Some("disk".to_string()),
2325 ..Default::default()
2326 },
2327 ],
2328 });
2329
2330 cx.executor().run_until_parked();
2331 buffer.update(cx, |buffer, _| {
2332 assert_eq!(
2333 buffer
2334 .snapshot()
2335 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2336 .collect::<Vec<_>>(),
2337 &[
2338 DiagnosticEntry {
2339 range: Point::new(2, 21)..Point::new(2, 22),
2340 diagnostic: Diagnostic {
2341 source: Some("disk".into()),
2342 severity: DiagnosticSeverity::WARNING,
2343 message: "undefined variable 'A'".to_string(),
2344 is_disk_based: true,
2345 group_id: 6,
2346 is_primary: true,
2347 source_kind: DiagnosticSourceKind::Pushed,
2348 ..Diagnostic::default()
2349 }
2350 },
2351 DiagnosticEntry {
2352 range: Point::new(3, 9)..Point::new(3, 14),
2353 diagnostic: Diagnostic {
2354 source: Some("disk".into()),
2355 severity: DiagnosticSeverity::ERROR,
2356 message: "undefined variable 'BB'".to_string(),
2357 is_disk_based: true,
2358 group_id: 5,
2359 is_primary: true,
2360 source_kind: DiagnosticSourceKind::Pushed,
2361 ..Diagnostic::default()
2362 },
2363 }
2364 ]
2365 );
2366 });
2367}
2368
2369#[gpui::test]
2370async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
2371 init_test(cx);
2372
2373 let text = concat!(
2374 "let one = ;\n", //
2375 "let two = \n",
2376 "let three = 3;\n",
2377 );
2378
2379 let fs = FakeFs::new(cx.executor());
2380 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
2381
2382 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2383 let buffer = project
2384 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2385 .await
2386 .unwrap();
2387
2388 project.update(cx, |project, cx| {
2389 project.lsp_store.update(cx, |lsp_store, cx| {
2390 lsp_store
2391 .update_diagnostic_entries(
2392 LanguageServerId(0),
2393 PathBuf::from("/dir/a.rs"),
2394 None,
2395 None,
2396 vec![
2397 DiagnosticEntry {
2398 range: Unclipped(PointUtf16::new(0, 10))
2399 ..Unclipped(PointUtf16::new(0, 10)),
2400 diagnostic: Diagnostic {
2401 severity: DiagnosticSeverity::ERROR,
2402 message: "syntax error 1".to_string(),
2403 source_kind: DiagnosticSourceKind::Pushed,
2404 ..Diagnostic::default()
2405 },
2406 },
2407 DiagnosticEntry {
2408 range: Unclipped(PointUtf16::new(1, 10))
2409 ..Unclipped(PointUtf16::new(1, 10)),
2410 diagnostic: Diagnostic {
2411 severity: DiagnosticSeverity::ERROR,
2412 message: "syntax error 2".to_string(),
2413 source_kind: DiagnosticSourceKind::Pushed,
2414 ..Diagnostic::default()
2415 },
2416 },
2417 ],
2418 cx,
2419 )
2420 .unwrap();
2421 })
2422 });
2423
2424 // An empty range is extended forward to include the following character.
2425 // At the end of a line, an empty range is extended backward to include
2426 // the preceding character.
2427 buffer.update(cx, |buffer, _| {
2428 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2429 assert_eq!(
2430 chunks
2431 .iter()
2432 .map(|(s, d)| (s.as_str(), *d))
2433 .collect::<Vec<_>>(),
2434 &[
2435 ("let one = ", None),
2436 (";", Some(DiagnosticSeverity::ERROR)),
2437 ("\nlet two =", None),
2438 (" ", Some(DiagnosticSeverity::ERROR)),
2439 ("\nlet three = 3;\n", None)
2440 ]
2441 );
2442 });
2443}
2444
2445#[gpui::test]
2446async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2447 init_test(cx);
2448
2449 let fs = FakeFs::new(cx.executor());
2450 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2451 .await;
2452
2453 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2454 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
2455
2456 lsp_store.update(cx, |lsp_store, cx| {
2457 lsp_store
2458 .update_diagnostic_entries(
2459 LanguageServerId(0),
2460 Path::new("/dir/a.rs").to_owned(),
2461 None,
2462 None,
2463 vec![DiagnosticEntry {
2464 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2465 diagnostic: Diagnostic {
2466 severity: DiagnosticSeverity::ERROR,
2467 is_primary: true,
2468 message: "syntax error a1".to_string(),
2469 source_kind: DiagnosticSourceKind::Pushed,
2470 ..Diagnostic::default()
2471 },
2472 }],
2473 cx,
2474 )
2475 .unwrap();
2476 lsp_store
2477 .update_diagnostic_entries(
2478 LanguageServerId(1),
2479 Path::new("/dir/a.rs").to_owned(),
2480 None,
2481 None,
2482 vec![DiagnosticEntry {
2483 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2484 diagnostic: Diagnostic {
2485 severity: DiagnosticSeverity::ERROR,
2486 is_primary: true,
2487 message: "syntax error b1".to_string(),
2488 source_kind: DiagnosticSourceKind::Pushed,
2489 ..Diagnostic::default()
2490 },
2491 }],
2492 cx,
2493 )
2494 .unwrap();
2495
2496 assert_eq!(
2497 lsp_store.diagnostic_summary(false, cx),
2498 DiagnosticSummary {
2499 error_count: 2,
2500 warning_count: 0,
2501 }
2502 );
2503 });
2504}
2505
2506#[gpui::test]
2507async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2508 init_test(cx);
2509
2510 let text = "
2511 fn a() {
2512 f1();
2513 }
2514 fn b() {
2515 f2();
2516 }
2517 fn c() {
2518 f3();
2519 }
2520 "
2521 .unindent();
2522
2523 let fs = FakeFs::new(cx.executor());
2524 fs.insert_tree(
2525 path!("/dir"),
2526 json!({
2527 "a.rs": text.clone(),
2528 }),
2529 )
2530 .await;
2531
2532 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2533 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2534
2535 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2536 language_registry.add(rust_lang());
2537 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2538
2539 let (buffer, _handle) = project
2540 .update(cx, |project, cx| {
2541 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2542 })
2543 .await
2544 .unwrap();
2545
2546 let mut fake_server = fake_servers.next().await.unwrap();
2547 let lsp_document_version = fake_server
2548 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2549 .await
2550 .text_document
2551 .version;
2552
2553 // Simulate editing the buffer after the language server computes some edits.
2554 buffer.update(cx, |buffer, cx| {
2555 buffer.edit(
2556 [(
2557 Point::new(0, 0)..Point::new(0, 0),
2558 "// above first function\n",
2559 )],
2560 None,
2561 cx,
2562 );
2563 buffer.edit(
2564 [(
2565 Point::new(2, 0)..Point::new(2, 0),
2566 " // inside first function\n",
2567 )],
2568 None,
2569 cx,
2570 );
2571 buffer.edit(
2572 [(
2573 Point::new(6, 4)..Point::new(6, 4),
2574 "// inside second function ",
2575 )],
2576 None,
2577 cx,
2578 );
2579
2580 assert_eq!(
2581 buffer.text(),
2582 "
2583 // above first function
2584 fn a() {
2585 // inside first function
2586 f1();
2587 }
2588 fn b() {
2589 // inside second function f2();
2590 }
2591 fn c() {
2592 f3();
2593 }
2594 "
2595 .unindent()
2596 );
2597 });
2598
2599 let edits = lsp_store
2600 .update(cx, |lsp_store, cx| {
2601 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2602 &buffer,
2603 vec![
2604 // replace body of first function
2605 lsp::TextEdit {
2606 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2607 new_text: "
2608 fn a() {
2609 f10();
2610 }
2611 "
2612 .unindent(),
2613 },
2614 // edit inside second function
2615 lsp::TextEdit {
2616 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2617 new_text: "00".into(),
2618 },
2619 // edit inside third function via two distinct edits
2620 lsp::TextEdit {
2621 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2622 new_text: "4000".into(),
2623 },
2624 lsp::TextEdit {
2625 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2626 new_text: "".into(),
2627 },
2628 ],
2629 LanguageServerId(0),
2630 Some(lsp_document_version),
2631 cx,
2632 )
2633 })
2634 .await
2635 .unwrap();
2636
2637 buffer.update(cx, |buffer, cx| {
2638 for (range, new_text) in edits {
2639 buffer.edit([(range, new_text)], None, cx);
2640 }
2641 assert_eq!(
2642 buffer.text(),
2643 "
2644 // above first function
2645 fn a() {
2646 // inside first function
2647 f10();
2648 }
2649 fn b() {
2650 // inside second function f200();
2651 }
2652 fn c() {
2653 f4000();
2654 }
2655 "
2656 .unindent()
2657 );
2658 });
2659}
2660
2661#[gpui::test]
2662async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2663 init_test(cx);
2664
2665 let text = "
2666 use a::b;
2667 use a::c;
2668
2669 fn f() {
2670 b();
2671 c();
2672 }
2673 "
2674 .unindent();
2675
2676 let fs = FakeFs::new(cx.executor());
2677 fs.insert_tree(
2678 path!("/dir"),
2679 json!({
2680 "a.rs": text.clone(),
2681 }),
2682 )
2683 .await;
2684
2685 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2686 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2687 let buffer = project
2688 .update(cx, |project, cx| {
2689 project.open_local_buffer(path!("/dir/a.rs"), cx)
2690 })
2691 .await
2692 .unwrap();
2693
2694 // Simulate the language server sending us a small edit in the form of a very large diff.
2695 // Rust-analyzer does this when performing a merge-imports code action.
2696 let edits = lsp_store
2697 .update(cx, |lsp_store, cx| {
2698 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2699 &buffer,
2700 [
2701 // Replace the first use statement without editing the semicolon.
2702 lsp::TextEdit {
2703 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2704 new_text: "a::{b, c}".into(),
2705 },
2706 // Reinsert the remainder of the file between the semicolon and the final
2707 // newline of the file.
2708 lsp::TextEdit {
2709 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2710 new_text: "\n\n".into(),
2711 },
2712 lsp::TextEdit {
2713 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2714 new_text: "
2715 fn f() {
2716 b();
2717 c();
2718 }"
2719 .unindent(),
2720 },
2721 // Delete everything after the first newline of the file.
2722 lsp::TextEdit {
2723 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2724 new_text: "".into(),
2725 },
2726 ],
2727 LanguageServerId(0),
2728 None,
2729 cx,
2730 )
2731 })
2732 .await
2733 .unwrap();
2734
2735 buffer.update(cx, |buffer, cx| {
2736 let edits = edits
2737 .into_iter()
2738 .map(|(range, text)| {
2739 (
2740 range.start.to_point(buffer)..range.end.to_point(buffer),
2741 text,
2742 )
2743 })
2744 .collect::<Vec<_>>();
2745
2746 assert_eq!(
2747 edits,
2748 [
2749 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2750 (Point::new(1, 0)..Point::new(2, 0), "".into())
2751 ]
2752 );
2753
2754 for (range, new_text) in edits {
2755 buffer.edit([(range, new_text)], None, cx);
2756 }
2757 assert_eq!(
2758 buffer.text(),
2759 "
2760 use a::{b, c};
2761
2762 fn f() {
2763 b();
2764 c();
2765 }
2766 "
2767 .unindent()
2768 );
2769 });
2770}
2771
2772#[gpui::test]
2773async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
2774 cx: &mut gpui::TestAppContext,
2775) {
2776 init_test(cx);
2777
2778 let text = "Path()";
2779
2780 let fs = FakeFs::new(cx.executor());
2781 fs.insert_tree(
2782 path!("/dir"),
2783 json!({
2784 "a.rs": text
2785 }),
2786 )
2787 .await;
2788
2789 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2790 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2791 let buffer = project
2792 .update(cx, |project, cx| {
2793 project.open_local_buffer(path!("/dir/a.rs"), cx)
2794 })
2795 .await
2796 .unwrap();
2797
2798 // Simulate the language server sending us a pair of edits at the same location,
2799 // with an insertion following a replacement (which violates the LSP spec).
2800 let edits = lsp_store
2801 .update(cx, |lsp_store, cx| {
2802 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2803 &buffer,
2804 [
2805 lsp::TextEdit {
2806 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
2807 new_text: "Path".into(),
2808 },
2809 lsp::TextEdit {
2810 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
2811 new_text: "from path import Path\n\n\n".into(),
2812 },
2813 ],
2814 LanguageServerId(0),
2815 None,
2816 cx,
2817 )
2818 })
2819 .await
2820 .unwrap();
2821
2822 buffer.update(cx, |buffer, cx| {
2823 buffer.edit(edits, None, cx);
2824 assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
2825 });
2826}
2827
2828#[gpui::test]
2829async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2830 init_test(cx);
2831
2832 let text = "
2833 use a::b;
2834 use a::c;
2835
2836 fn f() {
2837 b();
2838 c();
2839 }
2840 "
2841 .unindent();
2842
2843 let fs = FakeFs::new(cx.executor());
2844 fs.insert_tree(
2845 path!("/dir"),
2846 json!({
2847 "a.rs": text.clone(),
2848 }),
2849 )
2850 .await;
2851
2852 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2853 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2854 let buffer = project
2855 .update(cx, |project, cx| {
2856 project.open_local_buffer(path!("/dir/a.rs"), cx)
2857 })
2858 .await
2859 .unwrap();
2860
2861 // Simulate the language server sending us edits in a non-ordered fashion,
2862 // with ranges sometimes being inverted or pointing to invalid locations.
2863 let edits = lsp_store
2864 .update(cx, |lsp_store, cx| {
2865 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2866 &buffer,
2867 [
2868 lsp::TextEdit {
2869 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2870 new_text: "\n\n".into(),
2871 },
2872 lsp::TextEdit {
2873 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2874 new_text: "a::{b, c}".into(),
2875 },
2876 lsp::TextEdit {
2877 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2878 new_text: "".into(),
2879 },
2880 lsp::TextEdit {
2881 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2882 new_text: "
2883 fn f() {
2884 b();
2885 c();
2886 }"
2887 .unindent(),
2888 },
2889 ],
2890 LanguageServerId(0),
2891 None,
2892 cx,
2893 )
2894 })
2895 .await
2896 .unwrap();
2897
2898 buffer.update(cx, |buffer, cx| {
2899 let edits = edits
2900 .into_iter()
2901 .map(|(range, text)| {
2902 (
2903 range.start.to_point(buffer)..range.end.to_point(buffer),
2904 text,
2905 )
2906 })
2907 .collect::<Vec<_>>();
2908
2909 assert_eq!(
2910 edits,
2911 [
2912 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2913 (Point::new(1, 0)..Point::new(2, 0), "".into())
2914 ]
2915 );
2916
2917 for (range, new_text) in edits {
2918 buffer.edit([(range, new_text)], None, cx);
2919 }
2920 assert_eq!(
2921 buffer.text(),
2922 "
2923 use a::{b, c};
2924
2925 fn f() {
2926 b();
2927 c();
2928 }
2929 "
2930 .unindent()
2931 );
2932 });
2933}
2934
2935fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2936 buffer: &Buffer,
2937 range: Range<T>,
2938) -> Vec<(String, Option<DiagnosticSeverity>)> {
2939 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2940 for chunk in buffer.snapshot().chunks(range, true) {
2941 if chunks.last().map_or(false, |prev_chunk| {
2942 prev_chunk.1 == chunk.diagnostic_severity
2943 }) {
2944 chunks.last_mut().unwrap().0.push_str(chunk.text);
2945 } else {
2946 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2947 }
2948 }
2949 chunks
2950}
2951
2952#[gpui::test(iterations = 10)]
2953async fn test_definition(cx: &mut gpui::TestAppContext) {
2954 init_test(cx);
2955
2956 let fs = FakeFs::new(cx.executor());
2957 fs.insert_tree(
2958 path!("/dir"),
2959 json!({
2960 "a.rs": "const fn a() { A }",
2961 "b.rs": "const y: i32 = crate::a()",
2962 }),
2963 )
2964 .await;
2965
2966 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
2967
2968 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2969 language_registry.add(rust_lang());
2970 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2971
2972 let (buffer, _handle) = project
2973 .update(cx, |project, cx| {
2974 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
2975 })
2976 .await
2977 .unwrap();
2978
2979 let fake_server = fake_servers.next().await.unwrap();
2980 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2981 let params = params.text_document_position_params;
2982 assert_eq!(
2983 params.text_document.uri.to_file_path().unwrap(),
2984 Path::new(path!("/dir/b.rs")),
2985 );
2986 assert_eq!(params.position, lsp::Position::new(0, 22));
2987
2988 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2989 lsp::Location::new(
2990 lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2991 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2992 ),
2993 )))
2994 });
2995 let mut definitions = project
2996 .update(cx, |project, cx| project.definitions(&buffer, 22, cx))
2997 .await
2998 .unwrap();
2999
3000 // Assert no new language server started
3001 cx.executor().run_until_parked();
3002 assert!(fake_servers.try_next().is_err());
3003
3004 assert_eq!(definitions.len(), 1);
3005 let definition = definitions.pop().unwrap();
3006 cx.update(|cx| {
3007 let target_buffer = definition.target.buffer.read(cx);
3008 assert_eq!(
3009 target_buffer
3010 .file()
3011 .unwrap()
3012 .as_local()
3013 .unwrap()
3014 .abs_path(cx),
3015 Path::new(path!("/dir/a.rs")),
3016 );
3017 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
3018 assert_eq!(
3019 list_worktrees(&project, cx),
3020 [
3021 (path!("/dir/a.rs").as_ref(), false),
3022 (path!("/dir/b.rs").as_ref(), true)
3023 ],
3024 );
3025
3026 drop(definition);
3027 });
3028 cx.update(|cx| {
3029 assert_eq!(
3030 list_worktrees(&project, cx),
3031 [(path!("/dir/b.rs").as_ref(), true)]
3032 );
3033 });
3034
3035 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
3036 project
3037 .read(cx)
3038 .worktrees(cx)
3039 .map(|worktree| {
3040 let worktree = worktree.read(cx);
3041 (
3042 worktree.as_local().unwrap().abs_path().as_ref(),
3043 worktree.is_visible(),
3044 )
3045 })
3046 .collect::<Vec<_>>()
3047 }
3048}
3049
3050#[gpui::test]
3051async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
3052 init_test(cx);
3053
3054 let fs = FakeFs::new(cx.executor());
3055 fs.insert_tree(
3056 path!("/dir"),
3057 json!({
3058 "a.ts": "",
3059 }),
3060 )
3061 .await;
3062
3063 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3064
3065 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3066 language_registry.add(typescript_lang());
3067 let mut fake_language_servers = language_registry.register_fake_lsp(
3068 "TypeScript",
3069 FakeLspAdapter {
3070 capabilities: lsp::ServerCapabilities {
3071 completion_provider: Some(lsp::CompletionOptions {
3072 trigger_characters: Some(vec![".".to_string()]),
3073 ..Default::default()
3074 }),
3075 ..Default::default()
3076 },
3077 ..Default::default()
3078 },
3079 );
3080
3081 let (buffer, _handle) = project
3082 .update(cx, |p, cx| {
3083 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3084 })
3085 .await
3086 .unwrap();
3087
3088 let fake_server = fake_language_servers.next().await.unwrap();
3089
3090 // When text_edit exists, it takes precedence over insert_text and label
3091 let text = "let a = obj.fqn";
3092 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3093 let completions = project.update(cx, |project, cx| {
3094 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3095 });
3096
3097 fake_server
3098 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3099 Ok(Some(lsp::CompletionResponse::Array(vec![
3100 lsp::CompletionItem {
3101 label: "labelText".into(),
3102 insert_text: Some("insertText".into()),
3103 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
3104 range: lsp::Range::new(
3105 lsp::Position::new(0, text.len() as u32 - 3),
3106 lsp::Position::new(0, text.len() as u32),
3107 ),
3108 new_text: "textEditText".into(),
3109 })),
3110 ..Default::default()
3111 },
3112 ])))
3113 })
3114 .next()
3115 .await;
3116
3117 let completions = completions
3118 .await
3119 .unwrap()
3120 .into_iter()
3121 .flat_map(|response| response.completions)
3122 .collect::<Vec<_>>();
3123 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3124
3125 assert_eq!(completions.len(), 1);
3126 assert_eq!(completions[0].new_text, "textEditText");
3127 assert_eq!(
3128 completions[0].replace_range.to_offset(&snapshot),
3129 text.len() - 3..text.len()
3130 );
3131}
3132
3133#[gpui::test]
3134async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
3135 init_test(cx);
3136
3137 let fs = FakeFs::new(cx.executor());
3138 fs.insert_tree(
3139 path!("/dir"),
3140 json!({
3141 "a.ts": "",
3142 }),
3143 )
3144 .await;
3145
3146 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3147
3148 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3149 language_registry.add(typescript_lang());
3150 let mut fake_language_servers = language_registry.register_fake_lsp(
3151 "TypeScript",
3152 FakeLspAdapter {
3153 capabilities: lsp::ServerCapabilities {
3154 completion_provider: Some(lsp::CompletionOptions {
3155 trigger_characters: Some(vec![".".to_string()]),
3156 ..Default::default()
3157 }),
3158 ..Default::default()
3159 },
3160 ..Default::default()
3161 },
3162 );
3163
3164 let (buffer, _handle) = project
3165 .update(cx, |p, cx| {
3166 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3167 })
3168 .await
3169 .unwrap();
3170
3171 let fake_server = fake_language_servers.next().await.unwrap();
3172 let text = "let a = obj.fqn";
3173
3174 // Test 1: When text_edit is None but insert_text exists with default edit_range
3175 {
3176 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3177 let completions = project.update(cx, |project, cx| {
3178 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3179 });
3180
3181 fake_server
3182 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3183 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3184 is_incomplete: false,
3185 item_defaults: Some(lsp::CompletionListItemDefaults {
3186 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3187 lsp::Range::new(
3188 lsp::Position::new(0, text.len() as u32 - 3),
3189 lsp::Position::new(0, text.len() as u32),
3190 ),
3191 )),
3192 ..Default::default()
3193 }),
3194 items: vec![lsp::CompletionItem {
3195 label: "labelText".into(),
3196 insert_text: Some("insertText".into()),
3197 text_edit: None,
3198 ..Default::default()
3199 }],
3200 })))
3201 })
3202 .next()
3203 .await;
3204
3205 let completions = completions
3206 .await
3207 .unwrap()
3208 .into_iter()
3209 .flat_map(|response| response.completions)
3210 .collect::<Vec<_>>();
3211 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3212
3213 assert_eq!(completions.len(), 1);
3214 assert_eq!(completions[0].new_text, "insertText");
3215 assert_eq!(
3216 completions[0].replace_range.to_offset(&snapshot),
3217 text.len() - 3..text.len()
3218 );
3219 }
3220
3221 // Test 2: When both text_edit and insert_text are None with default edit_range
3222 {
3223 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3224 let completions = project.update(cx, |project, cx| {
3225 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3226 });
3227
3228 fake_server
3229 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3230 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3231 is_incomplete: false,
3232 item_defaults: Some(lsp::CompletionListItemDefaults {
3233 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3234 lsp::Range::new(
3235 lsp::Position::new(0, text.len() as u32 - 3),
3236 lsp::Position::new(0, text.len() as u32),
3237 ),
3238 )),
3239 ..Default::default()
3240 }),
3241 items: vec![lsp::CompletionItem {
3242 label: "labelText".into(),
3243 insert_text: None,
3244 text_edit: None,
3245 ..Default::default()
3246 }],
3247 })))
3248 })
3249 .next()
3250 .await;
3251
3252 let completions = completions
3253 .await
3254 .unwrap()
3255 .into_iter()
3256 .flat_map(|response| response.completions)
3257 .collect::<Vec<_>>();
3258 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3259
3260 assert_eq!(completions.len(), 1);
3261 assert_eq!(completions[0].new_text, "labelText");
3262 assert_eq!(
3263 completions[0].replace_range.to_offset(&snapshot),
3264 text.len() - 3..text.len()
3265 );
3266 }
3267}
3268
3269#[gpui::test]
3270async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
3271 init_test(cx);
3272
3273 let fs = FakeFs::new(cx.executor());
3274 fs.insert_tree(
3275 path!("/dir"),
3276 json!({
3277 "a.ts": "",
3278 }),
3279 )
3280 .await;
3281
3282 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3283
3284 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3285 language_registry.add(typescript_lang());
3286 let mut fake_language_servers = language_registry.register_fake_lsp(
3287 "TypeScript",
3288 FakeLspAdapter {
3289 capabilities: lsp::ServerCapabilities {
3290 completion_provider: Some(lsp::CompletionOptions {
3291 trigger_characters: Some(vec![":".to_string()]),
3292 ..Default::default()
3293 }),
3294 ..Default::default()
3295 },
3296 ..Default::default()
3297 },
3298 );
3299
3300 let (buffer, _handle) = project
3301 .update(cx, |p, cx| {
3302 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3303 })
3304 .await
3305 .unwrap();
3306
3307 let fake_server = fake_language_servers.next().await.unwrap();
3308
3309 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
3310 let text = "let a = b.fqn";
3311 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3312 let completions = project.update(cx, |project, cx| {
3313 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3314 });
3315
3316 fake_server
3317 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3318 Ok(Some(lsp::CompletionResponse::Array(vec![
3319 lsp::CompletionItem {
3320 label: "fullyQualifiedName?".into(),
3321 insert_text: Some("fullyQualifiedName".into()),
3322 ..Default::default()
3323 },
3324 ])))
3325 })
3326 .next()
3327 .await;
3328 let completions = completions
3329 .await
3330 .unwrap()
3331 .into_iter()
3332 .flat_map(|response| response.completions)
3333 .collect::<Vec<_>>();
3334 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3335 assert_eq!(completions.len(), 1);
3336 assert_eq!(completions[0].new_text, "fullyQualifiedName");
3337 assert_eq!(
3338 completions[0].replace_range.to_offset(&snapshot),
3339 text.len() - 3..text.len()
3340 );
3341
3342 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
3343 let text = "let a = \"atoms/cmp\"";
3344 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3345 let completions = project.update(cx, |project, cx| {
3346 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
3347 });
3348
3349 fake_server
3350 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3351 Ok(Some(lsp::CompletionResponse::Array(vec![
3352 lsp::CompletionItem {
3353 label: "component".into(),
3354 ..Default::default()
3355 },
3356 ])))
3357 })
3358 .next()
3359 .await;
3360 let completions = completions
3361 .await
3362 .unwrap()
3363 .into_iter()
3364 .flat_map(|response| response.completions)
3365 .collect::<Vec<_>>();
3366 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3367 assert_eq!(completions.len(), 1);
3368 assert_eq!(completions[0].new_text, "component");
3369 assert_eq!(
3370 completions[0].replace_range.to_offset(&snapshot),
3371 text.len() - 4..text.len() - 1
3372 );
3373}
3374
3375#[gpui::test]
3376async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
3377 init_test(cx);
3378
3379 let fs = FakeFs::new(cx.executor());
3380 fs.insert_tree(
3381 path!("/dir"),
3382 json!({
3383 "a.ts": "",
3384 }),
3385 )
3386 .await;
3387
3388 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3389
3390 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3391 language_registry.add(typescript_lang());
3392 let mut fake_language_servers = language_registry.register_fake_lsp(
3393 "TypeScript",
3394 FakeLspAdapter {
3395 capabilities: lsp::ServerCapabilities {
3396 completion_provider: Some(lsp::CompletionOptions {
3397 trigger_characters: Some(vec![":".to_string()]),
3398 ..Default::default()
3399 }),
3400 ..Default::default()
3401 },
3402 ..Default::default()
3403 },
3404 );
3405
3406 let (buffer, _handle) = project
3407 .update(cx, |p, cx| {
3408 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3409 })
3410 .await
3411 .unwrap();
3412
3413 let fake_server = fake_language_servers.next().await.unwrap();
3414
3415 let text = "let a = b.fqn";
3416 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3417 let completions = project.update(cx, |project, cx| {
3418 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3419 });
3420
3421 fake_server
3422 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3423 Ok(Some(lsp::CompletionResponse::Array(vec![
3424 lsp::CompletionItem {
3425 label: "fullyQualifiedName?".into(),
3426 insert_text: Some("fully\rQualified\r\nName".into()),
3427 ..Default::default()
3428 },
3429 ])))
3430 })
3431 .next()
3432 .await;
3433 let completions = completions
3434 .await
3435 .unwrap()
3436 .into_iter()
3437 .flat_map(|response| response.completions)
3438 .collect::<Vec<_>>();
3439 assert_eq!(completions.len(), 1);
3440 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
3441}
3442
3443#[gpui::test(iterations = 10)]
3444async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
3445 init_test(cx);
3446
3447 let fs = FakeFs::new(cx.executor());
3448 fs.insert_tree(
3449 path!("/dir"),
3450 json!({
3451 "a.ts": "a",
3452 }),
3453 )
3454 .await;
3455
3456 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3457
3458 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3459 language_registry.add(typescript_lang());
3460 let mut fake_language_servers = language_registry.register_fake_lsp(
3461 "TypeScript",
3462 FakeLspAdapter {
3463 capabilities: lsp::ServerCapabilities {
3464 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
3465 lsp::CodeActionOptions {
3466 resolve_provider: Some(true),
3467 ..lsp::CodeActionOptions::default()
3468 },
3469 )),
3470 execute_command_provider: Some(lsp::ExecuteCommandOptions {
3471 commands: vec!["_the/command".to_string()],
3472 ..lsp::ExecuteCommandOptions::default()
3473 }),
3474 ..lsp::ServerCapabilities::default()
3475 },
3476 ..FakeLspAdapter::default()
3477 },
3478 );
3479
3480 let (buffer, _handle) = project
3481 .update(cx, |p, cx| {
3482 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3483 })
3484 .await
3485 .unwrap();
3486
3487 let fake_server = fake_language_servers.next().await.unwrap();
3488
3489 // Language server returns code actions that contain commands, and not edits.
3490 let actions = project.update(cx, |project, cx| {
3491 project.code_actions(&buffer, 0..0, None, cx)
3492 });
3493 fake_server
3494 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
3495 Ok(Some(vec![
3496 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3497 title: "The code action".into(),
3498 data: Some(serde_json::json!({
3499 "command": "_the/command",
3500 })),
3501 ..lsp::CodeAction::default()
3502 }),
3503 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3504 title: "two".into(),
3505 ..lsp::CodeAction::default()
3506 }),
3507 ]))
3508 })
3509 .next()
3510 .await;
3511
3512 let action = actions.await.unwrap()[0].clone();
3513 let apply = project.update(cx, |project, cx| {
3514 project.apply_code_action(buffer.clone(), action, true, cx)
3515 });
3516
3517 // Resolving the code action does not populate its edits. In absence of
3518 // edits, we must execute the given command.
3519 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
3520 |mut action, _| async move {
3521 if action.data.is_some() {
3522 action.command = Some(lsp::Command {
3523 title: "The command".into(),
3524 command: "_the/command".into(),
3525 arguments: Some(vec![json!("the-argument")]),
3526 });
3527 }
3528 Ok(action)
3529 },
3530 );
3531
3532 // While executing the command, the language server sends the editor
3533 // a `workspaceEdit` request.
3534 fake_server
3535 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
3536 let fake = fake_server.clone();
3537 move |params, _| {
3538 assert_eq!(params.command, "_the/command");
3539 let fake = fake.clone();
3540 async move {
3541 fake.server
3542 .request::<lsp::request::ApplyWorkspaceEdit>(
3543 lsp::ApplyWorkspaceEditParams {
3544 label: None,
3545 edit: lsp::WorkspaceEdit {
3546 changes: Some(
3547 [(
3548 lsp::Url::from_file_path(path!("/dir/a.ts")).unwrap(),
3549 vec![lsp::TextEdit {
3550 range: lsp::Range::new(
3551 lsp::Position::new(0, 0),
3552 lsp::Position::new(0, 0),
3553 ),
3554 new_text: "X".into(),
3555 }],
3556 )]
3557 .into_iter()
3558 .collect(),
3559 ),
3560 ..Default::default()
3561 },
3562 },
3563 )
3564 .await
3565 .into_response()
3566 .unwrap();
3567 Ok(Some(json!(null)))
3568 }
3569 }
3570 })
3571 .next()
3572 .await;
3573
3574 // Applying the code action returns a project transaction containing the edits
3575 // sent by the language server in its `workspaceEdit` request.
3576 let transaction = apply.await.unwrap();
3577 assert!(transaction.0.contains_key(&buffer));
3578 buffer.update(cx, |buffer, cx| {
3579 assert_eq!(buffer.text(), "Xa");
3580 buffer.undo(cx);
3581 assert_eq!(buffer.text(), "a");
3582 });
3583}
3584
3585#[gpui::test(iterations = 10)]
3586async fn test_save_file(cx: &mut gpui::TestAppContext) {
3587 init_test(cx);
3588
3589 let fs = FakeFs::new(cx.executor());
3590 fs.insert_tree(
3591 path!("/dir"),
3592 json!({
3593 "file1": "the old contents",
3594 }),
3595 )
3596 .await;
3597
3598 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3599 let buffer = project
3600 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3601 .await
3602 .unwrap();
3603 buffer.update(cx, |buffer, cx| {
3604 assert_eq!(buffer.text(), "the old contents");
3605 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3606 });
3607
3608 project
3609 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3610 .await
3611 .unwrap();
3612
3613 let new_text = fs
3614 .load(Path::new(path!("/dir/file1")))
3615 .await
3616 .unwrap()
3617 .replace("\r\n", "\n");
3618 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3619}
3620
3621#[gpui::test(iterations = 10)]
3622async fn test_save_file_spawns_language_server(cx: &mut gpui::TestAppContext) {
3623 // Issue: #24349
3624 init_test(cx);
3625
3626 let fs = FakeFs::new(cx.executor());
3627 fs.insert_tree(path!("/dir"), json!({})).await;
3628
3629 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3630 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3631
3632 language_registry.add(rust_lang());
3633 let mut fake_rust_servers = language_registry.register_fake_lsp(
3634 "Rust",
3635 FakeLspAdapter {
3636 name: "the-rust-language-server",
3637 capabilities: lsp::ServerCapabilities {
3638 completion_provider: Some(lsp::CompletionOptions {
3639 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
3640 ..Default::default()
3641 }),
3642 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
3643 lsp::TextDocumentSyncOptions {
3644 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
3645 ..Default::default()
3646 },
3647 )),
3648 ..Default::default()
3649 },
3650 ..Default::default()
3651 },
3652 );
3653
3654 let buffer = project
3655 .update(cx, |this, cx| this.create_buffer(cx))
3656 .unwrap()
3657 .await;
3658 project.update(cx, |this, cx| {
3659 this.register_buffer_with_language_servers(&buffer, cx);
3660 buffer.update(cx, |buffer, cx| {
3661 assert!(!this.has_language_servers_for(buffer, cx));
3662 })
3663 });
3664
3665 project
3666 .update(cx, |this, cx| {
3667 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
3668 this.save_buffer_as(
3669 buffer.clone(),
3670 ProjectPath {
3671 worktree_id,
3672 path: Arc::from("file.rs".as_ref()),
3673 },
3674 cx,
3675 )
3676 })
3677 .await
3678 .unwrap();
3679 // A server is started up, and it is notified about Rust files.
3680 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
3681 assert_eq!(
3682 fake_rust_server
3683 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3684 .await
3685 .text_document,
3686 lsp::TextDocumentItem {
3687 uri: lsp::Url::from_file_path(path!("/dir/file.rs")).unwrap(),
3688 version: 0,
3689 text: "".to_string(),
3690 language_id: "rust".to_string(),
3691 }
3692 );
3693
3694 project.update(cx, |this, cx| {
3695 buffer.update(cx, |buffer, cx| {
3696 assert!(this.has_language_servers_for(buffer, cx));
3697 })
3698 });
3699}
3700
3701#[gpui::test(iterations = 30)]
3702async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
3703 init_test(cx);
3704
3705 let fs = FakeFs::new(cx.executor().clone());
3706 fs.insert_tree(
3707 path!("/dir"),
3708 json!({
3709 "file1": "the original contents",
3710 }),
3711 )
3712 .await;
3713
3714 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3715 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3716 let buffer = project
3717 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3718 .await
3719 .unwrap();
3720
3721 // Simulate buffer diffs being slow, so that they don't complete before
3722 // the next file change occurs.
3723 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3724
3725 // Change the buffer's file on disk, and then wait for the file change
3726 // to be detected by the worktree, so that the buffer starts reloading.
3727 fs.save(
3728 path!("/dir/file1").as_ref(),
3729 &"the first contents".into(),
3730 Default::default(),
3731 )
3732 .await
3733 .unwrap();
3734 worktree.next_event(cx).await;
3735
3736 // Change the buffer's file again. Depending on the random seed, the
3737 // previous file change may still be in progress.
3738 fs.save(
3739 path!("/dir/file1").as_ref(),
3740 &"the second contents".into(),
3741 Default::default(),
3742 )
3743 .await
3744 .unwrap();
3745 worktree.next_event(cx).await;
3746
3747 cx.executor().run_until_parked();
3748 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3749 buffer.read_with(cx, |buffer, _| {
3750 assert_eq!(buffer.text(), on_disk_text);
3751 assert!(!buffer.is_dirty(), "buffer should not be dirty");
3752 assert!(!buffer.has_conflict(), "buffer should not be dirty");
3753 });
3754}
3755
3756#[gpui::test(iterations = 30)]
3757async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
3758 init_test(cx);
3759
3760 let fs = FakeFs::new(cx.executor().clone());
3761 fs.insert_tree(
3762 path!("/dir"),
3763 json!({
3764 "file1": "the original contents",
3765 }),
3766 )
3767 .await;
3768
3769 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3770 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3771 let buffer = project
3772 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3773 .await
3774 .unwrap();
3775
3776 // Simulate buffer diffs being slow, so that they don't complete before
3777 // the next file change occurs.
3778 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3779
3780 // Change the buffer's file on disk, and then wait for the file change
3781 // to be detected by the worktree, so that the buffer starts reloading.
3782 fs.save(
3783 path!("/dir/file1").as_ref(),
3784 &"the first contents".into(),
3785 Default::default(),
3786 )
3787 .await
3788 .unwrap();
3789 worktree.next_event(cx).await;
3790
3791 cx.executor()
3792 .spawn(cx.executor().simulate_random_delay())
3793 .await;
3794
3795 // Perform a noop edit, causing the buffer's version to increase.
3796 buffer.update(cx, |buffer, cx| {
3797 buffer.edit([(0..0, " ")], None, cx);
3798 buffer.undo(cx);
3799 });
3800
3801 cx.executor().run_until_parked();
3802 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3803 buffer.read_with(cx, |buffer, _| {
3804 let buffer_text = buffer.text();
3805 if buffer_text == on_disk_text {
3806 assert!(
3807 !buffer.is_dirty() && !buffer.has_conflict(),
3808 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
3809 );
3810 }
3811 // If the file change occurred while the buffer was processing the first
3812 // change, the buffer will be in a conflicting state.
3813 else {
3814 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3815 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3816 }
3817 });
3818}
3819
3820#[gpui::test]
3821async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
3822 init_test(cx);
3823
3824 let fs = FakeFs::new(cx.executor());
3825 fs.insert_tree(
3826 path!("/dir"),
3827 json!({
3828 "file1": "the old contents",
3829 }),
3830 )
3831 .await;
3832
3833 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
3834 let buffer = project
3835 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3836 .await
3837 .unwrap();
3838 buffer.update(cx, |buffer, cx| {
3839 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3840 });
3841
3842 project
3843 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3844 .await
3845 .unwrap();
3846
3847 let new_text = fs
3848 .load(Path::new(path!("/dir/file1")))
3849 .await
3850 .unwrap()
3851 .replace("\r\n", "\n");
3852 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3853}
3854
3855#[gpui::test]
3856async fn test_save_as(cx: &mut gpui::TestAppContext) {
3857 init_test(cx);
3858
3859 let fs = FakeFs::new(cx.executor());
3860 fs.insert_tree("/dir", json!({})).await;
3861
3862 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3863
3864 let languages = project.update(cx, |project, _| project.languages().clone());
3865 languages.add(rust_lang());
3866
3867 let buffer = project.update(cx, |project, cx| project.create_local_buffer("", None, cx));
3868 buffer.update(cx, |buffer, cx| {
3869 buffer.edit([(0..0, "abc")], None, cx);
3870 assert!(buffer.is_dirty());
3871 assert!(!buffer.has_conflict());
3872 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
3873 });
3874 project
3875 .update(cx, |project, cx| {
3876 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
3877 let path = ProjectPath {
3878 worktree_id,
3879 path: Arc::from(Path::new("file1.rs")),
3880 };
3881 project.save_buffer_as(buffer.clone(), path, cx)
3882 })
3883 .await
3884 .unwrap();
3885 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
3886
3887 cx.executor().run_until_parked();
3888 buffer.update(cx, |buffer, cx| {
3889 assert_eq!(
3890 buffer.file().unwrap().full_path(cx),
3891 Path::new("dir/file1.rs")
3892 );
3893 assert!(!buffer.is_dirty());
3894 assert!(!buffer.has_conflict());
3895 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
3896 });
3897
3898 let opened_buffer = project
3899 .update(cx, |project, cx| {
3900 project.open_local_buffer("/dir/file1.rs", cx)
3901 })
3902 .await
3903 .unwrap();
3904 assert_eq!(opened_buffer, buffer);
3905}
3906
3907#[gpui::test(retries = 5)]
3908async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
3909 use worktree::WorktreeModelHandle as _;
3910
3911 init_test(cx);
3912 cx.executor().allow_parking();
3913
3914 let dir = TempTree::new(json!({
3915 "a": {
3916 "file1": "",
3917 "file2": "",
3918 "file3": "",
3919 },
3920 "b": {
3921 "c": {
3922 "file4": "",
3923 "file5": "",
3924 }
3925 }
3926 }));
3927
3928 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
3929
3930 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3931 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
3932 async move { buffer.await.unwrap() }
3933 };
3934 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3935 project.update(cx, |project, cx| {
3936 let tree = project.worktrees(cx).next().unwrap();
3937 tree.read(cx)
3938 .entry_for_path(path)
3939 .unwrap_or_else(|| panic!("no entry for path {}", path))
3940 .id
3941 })
3942 };
3943
3944 let buffer2 = buffer_for_path("a/file2", cx).await;
3945 let buffer3 = buffer_for_path("a/file3", cx).await;
3946 let buffer4 = buffer_for_path("b/c/file4", cx).await;
3947 let buffer5 = buffer_for_path("b/c/file5", cx).await;
3948
3949 let file2_id = id_for_path("a/file2", cx);
3950 let file3_id = id_for_path("a/file3", cx);
3951 let file4_id = id_for_path("b/c/file4", cx);
3952
3953 // Create a remote copy of this worktree.
3954 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3955 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
3956
3957 let updates = Arc::new(Mutex::new(Vec::new()));
3958 tree.update(cx, |tree, cx| {
3959 let updates = updates.clone();
3960 tree.observe_updates(0, cx, move |update| {
3961 updates.lock().push(update);
3962 async { true }
3963 });
3964 });
3965
3966 let remote =
3967 cx.update(|cx| Worktree::remote(0, 1, metadata, project.read(cx).client().into(), cx));
3968
3969 cx.executor().run_until_parked();
3970
3971 cx.update(|cx| {
3972 assert!(!buffer2.read(cx).is_dirty());
3973 assert!(!buffer3.read(cx).is_dirty());
3974 assert!(!buffer4.read(cx).is_dirty());
3975 assert!(!buffer5.read(cx).is_dirty());
3976 });
3977
3978 // Rename and delete files and directories.
3979 tree.flush_fs_events(cx).await;
3980 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3981 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3982 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3983 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3984 tree.flush_fs_events(cx).await;
3985
3986 cx.update(|app| {
3987 assert_eq!(
3988 tree.read(app)
3989 .paths()
3990 .map(|p| p.to_str().unwrap())
3991 .collect::<Vec<_>>(),
3992 vec![
3993 "a",
3994 path!("a/file1"),
3995 path!("a/file2.new"),
3996 "b",
3997 "d",
3998 path!("d/file3"),
3999 path!("d/file4"),
4000 ]
4001 );
4002 });
4003
4004 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
4005 assert_eq!(id_for_path("d/file3", cx), file3_id);
4006 assert_eq!(id_for_path("d/file4", cx), file4_id);
4007
4008 cx.update(|cx| {
4009 assert_eq!(
4010 buffer2.read(cx).file().unwrap().path().as_ref(),
4011 Path::new("a/file2.new")
4012 );
4013 assert_eq!(
4014 buffer3.read(cx).file().unwrap().path().as_ref(),
4015 Path::new("d/file3")
4016 );
4017 assert_eq!(
4018 buffer4.read(cx).file().unwrap().path().as_ref(),
4019 Path::new("d/file4")
4020 );
4021 assert_eq!(
4022 buffer5.read(cx).file().unwrap().path().as_ref(),
4023 Path::new("b/c/file5")
4024 );
4025
4026 assert_matches!(
4027 buffer2.read(cx).file().unwrap().disk_state(),
4028 DiskState::Present { .. }
4029 );
4030 assert_matches!(
4031 buffer3.read(cx).file().unwrap().disk_state(),
4032 DiskState::Present { .. }
4033 );
4034 assert_matches!(
4035 buffer4.read(cx).file().unwrap().disk_state(),
4036 DiskState::Present { .. }
4037 );
4038 assert_eq!(
4039 buffer5.read(cx).file().unwrap().disk_state(),
4040 DiskState::Deleted
4041 );
4042 });
4043
4044 // Update the remote worktree. Check that it becomes consistent with the
4045 // local worktree.
4046 cx.executor().run_until_parked();
4047
4048 remote.update(cx, |remote, _| {
4049 for update in updates.lock().drain(..) {
4050 remote.as_remote_mut().unwrap().update_from_remote(update);
4051 }
4052 });
4053 cx.executor().run_until_parked();
4054 remote.update(cx, |remote, _| {
4055 assert_eq!(
4056 remote
4057 .paths()
4058 .map(|p| p.to_str().unwrap())
4059 .collect::<Vec<_>>(),
4060 vec![
4061 "a",
4062 path!("a/file1"),
4063 path!("a/file2.new"),
4064 "b",
4065 "d",
4066 path!("d/file3"),
4067 path!("d/file4"),
4068 ]
4069 );
4070 });
4071}
4072
4073#[gpui::test(iterations = 10)]
4074async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
4075 init_test(cx);
4076
4077 let fs = FakeFs::new(cx.executor());
4078 fs.insert_tree(
4079 path!("/dir"),
4080 json!({
4081 "a": {
4082 "file1": "",
4083 }
4084 }),
4085 )
4086 .await;
4087
4088 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
4089 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
4090 let tree_id = tree.update(cx, |tree, _| tree.id());
4091
4092 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4093 project.update(cx, |project, cx| {
4094 let tree = project.worktrees(cx).next().unwrap();
4095 tree.read(cx)
4096 .entry_for_path(path)
4097 .unwrap_or_else(|| panic!("no entry for path {}", path))
4098 .id
4099 })
4100 };
4101
4102 let dir_id = id_for_path("a", cx);
4103 let file_id = id_for_path("a/file1", cx);
4104 let buffer = project
4105 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
4106 .await
4107 .unwrap();
4108 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4109
4110 project
4111 .update(cx, |project, cx| {
4112 project.rename_entry(dir_id, Path::new("b"), cx)
4113 })
4114 .unwrap()
4115 .await
4116 .to_included()
4117 .unwrap();
4118 cx.executor().run_until_parked();
4119
4120 assert_eq!(id_for_path("b", cx), dir_id);
4121 assert_eq!(id_for_path("b/file1", cx), file_id);
4122 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4123}
4124
4125#[gpui::test]
4126async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
4127 init_test(cx);
4128
4129 let fs = FakeFs::new(cx.executor());
4130 fs.insert_tree(
4131 "/dir",
4132 json!({
4133 "a.txt": "a-contents",
4134 "b.txt": "b-contents",
4135 }),
4136 )
4137 .await;
4138
4139 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4140
4141 // Spawn multiple tasks to open paths, repeating some paths.
4142 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
4143 (
4144 p.open_local_buffer("/dir/a.txt", cx),
4145 p.open_local_buffer("/dir/b.txt", cx),
4146 p.open_local_buffer("/dir/a.txt", cx),
4147 )
4148 });
4149
4150 let buffer_a_1 = buffer_a_1.await.unwrap();
4151 let buffer_a_2 = buffer_a_2.await.unwrap();
4152 let buffer_b = buffer_b.await.unwrap();
4153 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
4154 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
4155
4156 // There is only one buffer per path.
4157 let buffer_a_id = buffer_a_1.entity_id();
4158 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
4159
4160 // Open the same path again while it is still open.
4161 drop(buffer_a_1);
4162 let buffer_a_3 = project
4163 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
4164 .await
4165 .unwrap();
4166
4167 // There's still only one buffer per path.
4168 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
4169}
4170
4171#[gpui::test]
4172async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
4173 init_test(cx);
4174
4175 let fs = FakeFs::new(cx.executor());
4176 fs.insert_tree(
4177 path!("/dir"),
4178 json!({
4179 "file1": "abc",
4180 "file2": "def",
4181 "file3": "ghi",
4182 }),
4183 )
4184 .await;
4185
4186 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4187
4188 let buffer1 = project
4189 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4190 .await
4191 .unwrap();
4192 let events = Arc::new(Mutex::new(Vec::new()));
4193
4194 // initially, the buffer isn't dirty.
4195 buffer1.update(cx, |buffer, cx| {
4196 cx.subscribe(&buffer1, {
4197 let events = events.clone();
4198 move |_, _, event, _| match event {
4199 BufferEvent::Operation { .. } => {}
4200 _ => events.lock().push(event.clone()),
4201 }
4202 })
4203 .detach();
4204
4205 assert!(!buffer.is_dirty());
4206 assert!(events.lock().is_empty());
4207
4208 buffer.edit([(1..2, "")], None, cx);
4209 });
4210
4211 // after the first edit, the buffer is dirty, and emits a dirtied event.
4212 buffer1.update(cx, |buffer, cx| {
4213 assert!(buffer.text() == "ac");
4214 assert!(buffer.is_dirty());
4215 assert_eq!(
4216 *events.lock(),
4217 &[
4218 language::BufferEvent::Edited,
4219 language::BufferEvent::DirtyChanged
4220 ]
4221 );
4222 events.lock().clear();
4223 buffer.did_save(
4224 buffer.version(),
4225 buffer.file().unwrap().disk_state().mtime(),
4226 cx,
4227 );
4228 });
4229
4230 // after saving, the buffer is not dirty, and emits a saved event.
4231 buffer1.update(cx, |buffer, cx| {
4232 assert!(!buffer.is_dirty());
4233 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
4234 events.lock().clear();
4235
4236 buffer.edit([(1..1, "B")], None, cx);
4237 buffer.edit([(2..2, "D")], None, cx);
4238 });
4239
4240 // after editing again, the buffer is dirty, and emits another dirty event.
4241 buffer1.update(cx, |buffer, cx| {
4242 assert!(buffer.text() == "aBDc");
4243 assert!(buffer.is_dirty());
4244 assert_eq!(
4245 *events.lock(),
4246 &[
4247 language::BufferEvent::Edited,
4248 language::BufferEvent::DirtyChanged,
4249 language::BufferEvent::Edited,
4250 ],
4251 );
4252 events.lock().clear();
4253
4254 // After restoring the buffer to its previously-saved state,
4255 // the buffer is not considered dirty anymore.
4256 buffer.edit([(1..3, "")], None, cx);
4257 assert!(buffer.text() == "ac");
4258 assert!(!buffer.is_dirty());
4259 });
4260
4261 assert_eq!(
4262 *events.lock(),
4263 &[
4264 language::BufferEvent::Edited,
4265 language::BufferEvent::DirtyChanged
4266 ]
4267 );
4268
4269 // When a file is deleted, it is not considered dirty.
4270 let events = Arc::new(Mutex::new(Vec::new()));
4271 let buffer2 = project
4272 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4273 .await
4274 .unwrap();
4275 buffer2.update(cx, |_, cx| {
4276 cx.subscribe(&buffer2, {
4277 let events = events.clone();
4278 move |_, _, event, _| match event {
4279 BufferEvent::Operation { .. } => {}
4280 _ => events.lock().push(event.clone()),
4281 }
4282 })
4283 .detach();
4284 });
4285
4286 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
4287 .await
4288 .unwrap();
4289 cx.executor().run_until_parked();
4290 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4291 assert_eq!(
4292 mem::take(&mut *events.lock()),
4293 &[language::BufferEvent::FileHandleChanged]
4294 );
4295
4296 // Buffer becomes dirty when edited.
4297 buffer2.update(cx, |buffer, cx| {
4298 buffer.edit([(2..3, "")], None, cx);
4299 assert_eq!(buffer.is_dirty(), true);
4300 });
4301 assert_eq!(
4302 mem::take(&mut *events.lock()),
4303 &[
4304 language::BufferEvent::Edited,
4305 language::BufferEvent::DirtyChanged
4306 ]
4307 );
4308
4309 // Buffer becomes clean again when all of its content is removed, because
4310 // the file was deleted.
4311 buffer2.update(cx, |buffer, cx| {
4312 buffer.edit([(0..2, "")], None, cx);
4313 assert_eq!(buffer.is_empty(), true);
4314 assert_eq!(buffer.is_dirty(), false);
4315 });
4316 assert_eq!(
4317 *events.lock(),
4318 &[
4319 language::BufferEvent::Edited,
4320 language::BufferEvent::DirtyChanged
4321 ]
4322 );
4323
4324 // When a file is already dirty when deleted, we don't emit a Dirtied event.
4325 let events = Arc::new(Mutex::new(Vec::new()));
4326 let buffer3 = project
4327 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
4328 .await
4329 .unwrap();
4330 buffer3.update(cx, |_, cx| {
4331 cx.subscribe(&buffer3, {
4332 let events = events.clone();
4333 move |_, _, event, _| match event {
4334 BufferEvent::Operation { .. } => {}
4335 _ => events.lock().push(event.clone()),
4336 }
4337 })
4338 .detach();
4339 });
4340
4341 buffer3.update(cx, |buffer, cx| {
4342 buffer.edit([(0..0, "x")], None, cx);
4343 });
4344 events.lock().clear();
4345 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
4346 .await
4347 .unwrap();
4348 cx.executor().run_until_parked();
4349 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
4350 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
4351}
4352
4353#[gpui::test]
4354async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
4355 init_test(cx);
4356
4357 let (initial_contents, initial_offsets) =
4358 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
4359 let fs = FakeFs::new(cx.executor());
4360 fs.insert_tree(
4361 path!("/dir"),
4362 json!({
4363 "the-file": initial_contents,
4364 }),
4365 )
4366 .await;
4367 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4368 let buffer = project
4369 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
4370 .await
4371 .unwrap();
4372
4373 let anchors = initial_offsets
4374 .iter()
4375 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
4376 .collect::<Vec<_>>();
4377
4378 // Change the file on disk, adding two new lines of text, and removing
4379 // one line.
4380 buffer.update(cx, |buffer, _| {
4381 assert!(!buffer.is_dirty());
4382 assert!(!buffer.has_conflict());
4383 });
4384
4385 let (new_contents, new_offsets) =
4386 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
4387 fs.save(
4388 path!("/dir/the-file").as_ref(),
4389 &new_contents.as_str().into(),
4390 LineEnding::Unix,
4391 )
4392 .await
4393 .unwrap();
4394
4395 // Because the buffer was not modified, it is reloaded from disk. Its
4396 // contents are edited according to the diff between the old and new
4397 // file contents.
4398 cx.executor().run_until_parked();
4399 buffer.update(cx, |buffer, _| {
4400 assert_eq!(buffer.text(), new_contents);
4401 assert!(!buffer.is_dirty());
4402 assert!(!buffer.has_conflict());
4403
4404 let anchor_offsets = anchors
4405 .iter()
4406 .map(|anchor| anchor.to_offset(&*buffer))
4407 .collect::<Vec<_>>();
4408 assert_eq!(anchor_offsets, new_offsets);
4409 });
4410
4411 // Modify the buffer
4412 buffer.update(cx, |buffer, cx| {
4413 buffer.edit([(0..0, " ")], None, cx);
4414 assert!(buffer.is_dirty());
4415 assert!(!buffer.has_conflict());
4416 });
4417
4418 // Change the file on disk again, adding blank lines to the beginning.
4419 fs.save(
4420 path!("/dir/the-file").as_ref(),
4421 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
4422 LineEnding::Unix,
4423 )
4424 .await
4425 .unwrap();
4426
4427 // Because the buffer is modified, it doesn't reload from disk, but is
4428 // marked as having a conflict.
4429 cx.executor().run_until_parked();
4430 buffer.update(cx, |buffer, _| {
4431 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
4432 assert!(buffer.has_conflict());
4433 });
4434}
4435
4436#[gpui::test]
4437async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
4438 init_test(cx);
4439
4440 let fs = FakeFs::new(cx.executor());
4441 fs.insert_tree(
4442 path!("/dir"),
4443 json!({
4444 "file1": "a\nb\nc\n",
4445 "file2": "one\r\ntwo\r\nthree\r\n",
4446 }),
4447 )
4448 .await;
4449
4450 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4451 let buffer1 = project
4452 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4453 .await
4454 .unwrap();
4455 let buffer2 = project
4456 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4457 .await
4458 .unwrap();
4459
4460 buffer1.update(cx, |buffer, _| {
4461 assert_eq!(buffer.text(), "a\nb\nc\n");
4462 assert_eq!(buffer.line_ending(), LineEnding::Unix);
4463 });
4464 buffer2.update(cx, |buffer, _| {
4465 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
4466 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4467 });
4468
4469 // Change a file's line endings on disk from unix to windows. The buffer's
4470 // state updates correctly.
4471 fs.save(
4472 path!("/dir/file1").as_ref(),
4473 &"aaa\nb\nc\n".into(),
4474 LineEnding::Windows,
4475 )
4476 .await
4477 .unwrap();
4478 cx.executor().run_until_parked();
4479 buffer1.update(cx, |buffer, _| {
4480 assert_eq!(buffer.text(), "aaa\nb\nc\n");
4481 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4482 });
4483
4484 // Save a file with windows line endings. The file is written correctly.
4485 buffer2.update(cx, |buffer, cx| {
4486 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
4487 });
4488 project
4489 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
4490 .await
4491 .unwrap();
4492 assert_eq!(
4493 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
4494 "one\r\ntwo\r\nthree\r\nfour\r\n",
4495 );
4496}
4497
4498#[gpui::test]
4499async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
4500 init_test(cx);
4501
4502 let fs = FakeFs::new(cx.executor());
4503 fs.insert_tree(
4504 path!("/dir"),
4505 json!({
4506 "a.rs": "
4507 fn foo(mut v: Vec<usize>) {
4508 for x in &v {
4509 v.push(1);
4510 }
4511 }
4512 "
4513 .unindent(),
4514 }),
4515 )
4516 .await;
4517
4518 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4519 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4520 let buffer = project
4521 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
4522 .await
4523 .unwrap();
4524
4525 let buffer_uri = Url::from_file_path(path!("/dir/a.rs")).unwrap();
4526 let message = lsp::PublishDiagnosticsParams {
4527 uri: buffer_uri.clone(),
4528 diagnostics: vec![
4529 lsp::Diagnostic {
4530 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4531 severity: Some(DiagnosticSeverity::WARNING),
4532 message: "error 1".to_string(),
4533 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4534 location: lsp::Location {
4535 uri: buffer_uri.clone(),
4536 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4537 },
4538 message: "error 1 hint 1".to_string(),
4539 }]),
4540 ..Default::default()
4541 },
4542 lsp::Diagnostic {
4543 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4544 severity: Some(DiagnosticSeverity::HINT),
4545 message: "error 1 hint 1".to_string(),
4546 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4547 location: lsp::Location {
4548 uri: buffer_uri.clone(),
4549 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4550 },
4551 message: "original diagnostic".to_string(),
4552 }]),
4553 ..Default::default()
4554 },
4555 lsp::Diagnostic {
4556 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4557 severity: Some(DiagnosticSeverity::ERROR),
4558 message: "error 2".to_string(),
4559 related_information: Some(vec![
4560 lsp::DiagnosticRelatedInformation {
4561 location: lsp::Location {
4562 uri: buffer_uri.clone(),
4563 range: lsp::Range::new(
4564 lsp::Position::new(1, 13),
4565 lsp::Position::new(1, 15),
4566 ),
4567 },
4568 message: "error 2 hint 1".to_string(),
4569 },
4570 lsp::DiagnosticRelatedInformation {
4571 location: lsp::Location {
4572 uri: buffer_uri.clone(),
4573 range: lsp::Range::new(
4574 lsp::Position::new(1, 13),
4575 lsp::Position::new(1, 15),
4576 ),
4577 },
4578 message: "error 2 hint 2".to_string(),
4579 },
4580 ]),
4581 ..Default::default()
4582 },
4583 lsp::Diagnostic {
4584 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4585 severity: Some(DiagnosticSeverity::HINT),
4586 message: "error 2 hint 1".to_string(),
4587 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4588 location: lsp::Location {
4589 uri: buffer_uri.clone(),
4590 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4591 },
4592 message: "original diagnostic".to_string(),
4593 }]),
4594 ..Default::default()
4595 },
4596 lsp::Diagnostic {
4597 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4598 severity: Some(DiagnosticSeverity::HINT),
4599 message: "error 2 hint 2".to_string(),
4600 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4601 location: lsp::Location {
4602 uri: buffer_uri,
4603 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4604 },
4605 message: "original diagnostic".to_string(),
4606 }]),
4607 ..Default::default()
4608 },
4609 ],
4610 version: None,
4611 };
4612
4613 lsp_store
4614 .update(cx, |lsp_store, cx| {
4615 lsp_store.update_diagnostics(
4616 LanguageServerId(0),
4617 message,
4618 None,
4619 DiagnosticSourceKind::Pushed,
4620 &[],
4621 cx,
4622 )
4623 })
4624 .unwrap();
4625 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
4626
4627 assert_eq!(
4628 buffer
4629 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
4630 .collect::<Vec<_>>(),
4631 &[
4632 DiagnosticEntry {
4633 range: Point::new(1, 8)..Point::new(1, 9),
4634 diagnostic: Diagnostic {
4635 severity: DiagnosticSeverity::WARNING,
4636 message: "error 1".to_string(),
4637 group_id: 1,
4638 is_primary: true,
4639 source_kind: DiagnosticSourceKind::Pushed,
4640 ..Diagnostic::default()
4641 }
4642 },
4643 DiagnosticEntry {
4644 range: Point::new(1, 8)..Point::new(1, 9),
4645 diagnostic: Diagnostic {
4646 severity: DiagnosticSeverity::HINT,
4647 message: "error 1 hint 1".to_string(),
4648 group_id: 1,
4649 is_primary: false,
4650 source_kind: DiagnosticSourceKind::Pushed,
4651 ..Diagnostic::default()
4652 }
4653 },
4654 DiagnosticEntry {
4655 range: Point::new(1, 13)..Point::new(1, 15),
4656 diagnostic: Diagnostic {
4657 severity: DiagnosticSeverity::HINT,
4658 message: "error 2 hint 1".to_string(),
4659 group_id: 0,
4660 is_primary: false,
4661 source_kind: DiagnosticSourceKind::Pushed,
4662 ..Diagnostic::default()
4663 }
4664 },
4665 DiagnosticEntry {
4666 range: Point::new(1, 13)..Point::new(1, 15),
4667 diagnostic: Diagnostic {
4668 severity: DiagnosticSeverity::HINT,
4669 message: "error 2 hint 2".to_string(),
4670 group_id: 0,
4671 is_primary: false,
4672 source_kind: DiagnosticSourceKind::Pushed,
4673 ..Diagnostic::default()
4674 }
4675 },
4676 DiagnosticEntry {
4677 range: Point::new(2, 8)..Point::new(2, 17),
4678 diagnostic: Diagnostic {
4679 severity: DiagnosticSeverity::ERROR,
4680 message: "error 2".to_string(),
4681 group_id: 0,
4682 is_primary: true,
4683 source_kind: DiagnosticSourceKind::Pushed,
4684 ..Diagnostic::default()
4685 }
4686 }
4687 ]
4688 );
4689
4690 assert_eq!(
4691 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
4692 &[
4693 DiagnosticEntry {
4694 range: Point::new(1, 13)..Point::new(1, 15),
4695 diagnostic: Diagnostic {
4696 severity: DiagnosticSeverity::HINT,
4697 message: "error 2 hint 1".to_string(),
4698 group_id: 0,
4699 is_primary: false,
4700 source_kind: DiagnosticSourceKind::Pushed,
4701 ..Diagnostic::default()
4702 }
4703 },
4704 DiagnosticEntry {
4705 range: Point::new(1, 13)..Point::new(1, 15),
4706 diagnostic: Diagnostic {
4707 severity: DiagnosticSeverity::HINT,
4708 message: "error 2 hint 2".to_string(),
4709 group_id: 0,
4710 is_primary: false,
4711 source_kind: DiagnosticSourceKind::Pushed,
4712 ..Diagnostic::default()
4713 }
4714 },
4715 DiagnosticEntry {
4716 range: Point::new(2, 8)..Point::new(2, 17),
4717 diagnostic: Diagnostic {
4718 severity: DiagnosticSeverity::ERROR,
4719 message: "error 2".to_string(),
4720 group_id: 0,
4721 is_primary: true,
4722 source_kind: DiagnosticSourceKind::Pushed,
4723 ..Diagnostic::default()
4724 }
4725 }
4726 ]
4727 );
4728
4729 assert_eq!(
4730 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4731 &[
4732 DiagnosticEntry {
4733 range: Point::new(1, 8)..Point::new(1, 9),
4734 diagnostic: Diagnostic {
4735 severity: DiagnosticSeverity::WARNING,
4736 message: "error 1".to_string(),
4737 group_id: 1,
4738 is_primary: true,
4739 source_kind: DiagnosticSourceKind::Pushed,
4740 ..Diagnostic::default()
4741 }
4742 },
4743 DiagnosticEntry {
4744 range: Point::new(1, 8)..Point::new(1, 9),
4745 diagnostic: Diagnostic {
4746 severity: DiagnosticSeverity::HINT,
4747 message: "error 1 hint 1".to_string(),
4748 group_id: 1,
4749 is_primary: false,
4750 source_kind: DiagnosticSourceKind::Pushed,
4751 ..Diagnostic::default()
4752 }
4753 },
4754 ]
4755 );
4756}
4757
4758#[gpui::test]
4759async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
4760 init_test(cx);
4761
4762 let fs = FakeFs::new(cx.executor());
4763 fs.insert_tree(
4764 path!("/dir"),
4765 json!({
4766 "one.rs": "const ONE: usize = 1;",
4767 "two": {
4768 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4769 }
4770
4771 }),
4772 )
4773 .await;
4774 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4775
4776 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4777 language_registry.add(rust_lang());
4778 let watched_paths = lsp::FileOperationRegistrationOptions {
4779 filters: vec![
4780 FileOperationFilter {
4781 scheme: Some("file".to_owned()),
4782 pattern: lsp::FileOperationPattern {
4783 glob: "**/*.rs".to_owned(),
4784 matches: Some(lsp::FileOperationPatternKind::File),
4785 options: None,
4786 },
4787 },
4788 FileOperationFilter {
4789 scheme: Some("file".to_owned()),
4790 pattern: lsp::FileOperationPattern {
4791 glob: "**/**".to_owned(),
4792 matches: Some(lsp::FileOperationPatternKind::Folder),
4793 options: None,
4794 },
4795 },
4796 ],
4797 };
4798 let mut fake_servers = language_registry.register_fake_lsp(
4799 "Rust",
4800 FakeLspAdapter {
4801 capabilities: lsp::ServerCapabilities {
4802 workspace: Some(lsp::WorkspaceServerCapabilities {
4803 workspace_folders: None,
4804 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
4805 did_rename: Some(watched_paths.clone()),
4806 will_rename: Some(watched_paths),
4807 ..Default::default()
4808 }),
4809 }),
4810 ..Default::default()
4811 },
4812 ..Default::default()
4813 },
4814 );
4815
4816 let _ = project
4817 .update(cx, |project, cx| {
4818 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
4819 })
4820 .await
4821 .unwrap();
4822
4823 let fake_server = fake_servers.next().await.unwrap();
4824 let response = project.update(cx, |project, cx| {
4825 let worktree = project.worktrees(cx).next().unwrap();
4826 let entry = worktree.read(cx).entry_for_path("one.rs").unwrap();
4827 project.rename_entry(entry.id, "three.rs".as_ref(), cx)
4828 });
4829 let expected_edit = lsp::WorkspaceEdit {
4830 changes: None,
4831 document_changes: Some(DocumentChanges::Edits({
4832 vec![TextDocumentEdit {
4833 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
4834 range: lsp::Range {
4835 start: lsp::Position {
4836 line: 0,
4837 character: 1,
4838 },
4839 end: lsp::Position {
4840 line: 0,
4841 character: 3,
4842 },
4843 },
4844 new_text: "This is not a drill".to_owned(),
4845 })],
4846 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
4847 uri: Url::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
4848 version: Some(1337),
4849 },
4850 }]
4851 })),
4852 change_annotations: None,
4853 };
4854 let resolved_workspace_edit = Arc::new(OnceLock::new());
4855 fake_server
4856 .set_request_handler::<WillRenameFiles, _, _>({
4857 let resolved_workspace_edit = resolved_workspace_edit.clone();
4858 let expected_edit = expected_edit.clone();
4859 move |params, _| {
4860 let resolved_workspace_edit = resolved_workspace_edit.clone();
4861 let expected_edit = expected_edit.clone();
4862 async move {
4863 assert_eq!(params.files.len(), 1);
4864 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
4865 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
4866 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
4867 Ok(Some(expected_edit))
4868 }
4869 }
4870 })
4871 .next()
4872 .await
4873 .unwrap();
4874 let _ = response.await.unwrap();
4875 fake_server
4876 .handle_notification::<DidRenameFiles, _>(|params, _| {
4877 assert_eq!(params.files.len(), 1);
4878 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
4879 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
4880 })
4881 .next()
4882 .await
4883 .unwrap();
4884 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
4885}
4886
4887#[gpui::test]
4888async fn test_rename(cx: &mut gpui::TestAppContext) {
4889 // hi
4890 init_test(cx);
4891
4892 let fs = FakeFs::new(cx.executor());
4893 fs.insert_tree(
4894 path!("/dir"),
4895 json!({
4896 "one.rs": "const ONE: usize = 1;",
4897 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4898 }),
4899 )
4900 .await;
4901
4902 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4903
4904 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4905 language_registry.add(rust_lang());
4906 let mut fake_servers = language_registry.register_fake_lsp(
4907 "Rust",
4908 FakeLspAdapter {
4909 capabilities: lsp::ServerCapabilities {
4910 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
4911 prepare_provider: Some(true),
4912 work_done_progress_options: Default::default(),
4913 })),
4914 ..Default::default()
4915 },
4916 ..Default::default()
4917 },
4918 );
4919
4920 let (buffer, _handle) = project
4921 .update(cx, |project, cx| {
4922 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
4923 })
4924 .await
4925 .unwrap();
4926
4927 let fake_server = fake_servers.next().await.unwrap();
4928
4929 let response = project.update(cx, |project, cx| {
4930 project.prepare_rename(buffer.clone(), 7, cx)
4931 });
4932 fake_server
4933 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
4934 assert_eq!(
4935 params.text_document.uri.as_str(),
4936 uri!("file:///dir/one.rs")
4937 );
4938 assert_eq!(params.position, lsp::Position::new(0, 7));
4939 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
4940 lsp::Position::new(0, 6),
4941 lsp::Position::new(0, 9),
4942 ))))
4943 })
4944 .next()
4945 .await
4946 .unwrap();
4947 let response = response.await.unwrap();
4948 let PrepareRenameResponse::Success(range) = response else {
4949 panic!("{:?}", response);
4950 };
4951 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
4952 assert_eq!(range, 6..9);
4953
4954 let response = project.update(cx, |project, cx| {
4955 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
4956 });
4957 fake_server
4958 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
4959 assert_eq!(
4960 params.text_document_position.text_document.uri.as_str(),
4961 uri!("file:///dir/one.rs")
4962 );
4963 assert_eq!(
4964 params.text_document_position.position,
4965 lsp::Position::new(0, 7)
4966 );
4967 assert_eq!(params.new_name, "THREE");
4968 Ok(Some(lsp::WorkspaceEdit {
4969 changes: Some(
4970 [
4971 (
4972 lsp::Url::from_file_path(path!("/dir/one.rs")).unwrap(),
4973 vec![lsp::TextEdit::new(
4974 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
4975 "THREE".to_string(),
4976 )],
4977 ),
4978 (
4979 lsp::Url::from_file_path(path!("/dir/two.rs")).unwrap(),
4980 vec![
4981 lsp::TextEdit::new(
4982 lsp::Range::new(
4983 lsp::Position::new(0, 24),
4984 lsp::Position::new(0, 27),
4985 ),
4986 "THREE".to_string(),
4987 ),
4988 lsp::TextEdit::new(
4989 lsp::Range::new(
4990 lsp::Position::new(0, 35),
4991 lsp::Position::new(0, 38),
4992 ),
4993 "THREE".to_string(),
4994 ),
4995 ],
4996 ),
4997 ]
4998 .into_iter()
4999 .collect(),
5000 ),
5001 ..Default::default()
5002 }))
5003 })
5004 .next()
5005 .await
5006 .unwrap();
5007 let mut transaction = response.await.unwrap().0;
5008 assert_eq!(transaction.len(), 2);
5009 assert_eq!(
5010 transaction
5011 .remove_entry(&buffer)
5012 .unwrap()
5013 .0
5014 .update(cx, |buffer, _| buffer.text()),
5015 "const THREE: usize = 1;"
5016 );
5017 assert_eq!(
5018 transaction
5019 .into_keys()
5020 .next()
5021 .unwrap()
5022 .update(cx, |buffer, _| buffer.text()),
5023 "const TWO: usize = one::THREE + one::THREE;"
5024 );
5025}
5026
5027#[gpui::test]
5028async fn test_search(cx: &mut gpui::TestAppContext) {
5029 init_test(cx);
5030
5031 let fs = FakeFs::new(cx.executor());
5032 fs.insert_tree(
5033 path!("/dir"),
5034 json!({
5035 "one.rs": "const ONE: usize = 1;",
5036 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
5037 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
5038 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
5039 }),
5040 )
5041 .await;
5042 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5043 assert_eq!(
5044 search(
5045 &project,
5046 SearchQuery::text(
5047 "TWO",
5048 false,
5049 true,
5050 false,
5051 Default::default(),
5052 Default::default(),
5053 false,
5054 None
5055 )
5056 .unwrap(),
5057 cx
5058 )
5059 .await
5060 .unwrap(),
5061 HashMap::from_iter([
5062 (path!("dir/two.rs").to_string(), vec![6..9]),
5063 (path!("dir/three.rs").to_string(), vec![37..40])
5064 ])
5065 );
5066
5067 let buffer_4 = project
5068 .update(cx, |project, cx| {
5069 project.open_local_buffer(path!("/dir/four.rs"), cx)
5070 })
5071 .await
5072 .unwrap();
5073 buffer_4.update(cx, |buffer, cx| {
5074 let text = "two::TWO";
5075 buffer.edit([(20..28, text), (31..43, text)], None, cx);
5076 });
5077
5078 assert_eq!(
5079 search(
5080 &project,
5081 SearchQuery::text(
5082 "TWO",
5083 false,
5084 true,
5085 false,
5086 Default::default(),
5087 Default::default(),
5088 false,
5089 None,
5090 )
5091 .unwrap(),
5092 cx
5093 )
5094 .await
5095 .unwrap(),
5096 HashMap::from_iter([
5097 (path!("dir/two.rs").to_string(), vec![6..9]),
5098 (path!("dir/three.rs").to_string(), vec![37..40]),
5099 (path!("dir/four.rs").to_string(), vec![25..28, 36..39])
5100 ])
5101 );
5102}
5103
5104#[gpui::test]
5105async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
5106 init_test(cx);
5107
5108 let search_query = "file";
5109
5110 let fs = FakeFs::new(cx.executor());
5111 fs.insert_tree(
5112 path!("/dir"),
5113 json!({
5114 "one.rs": r#"// Rust file one"#,
5115 "one.ts": r#"// TypeScript file one"#,
5116 "two.rs": r#"// Rust file two"#,
5117 "two.ts": r#"// TypeScript file two"#,
5118 }),
5119 )
5120 .await;
5121 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5122
5123 assert!(
5124 search(
5125 &project,
5126 SearchQuery::text(
5127 search_query,
5128 false,
5129 true,
5130 false,
5131 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5132 Default::default(),
5133 false,
5134 None
5135 )
5136 .unwrap(),
5137 cx
5138 )
5139 .await
5140 .unwrap()
5141 .is_empty(),
5142 "If no inclusions match, no files should be returned"
5143 );
5144
5145 assert_eq!(
5146 search(
5147 &project,
5148 SearchQuery::text(
5149 search_query,
5150 false,
5151 true,
5152 false,
5153 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
5154 Default::default(),
5155 false,
5156 None
5157 )
5158 .unwrap(),
5159 cx
5160 )
5161 .await
5162 .unwrap(),
5163 HashMap::from_iter([
5164 (path!("dir/one.rs").to_string(), vec![8..12]),
5165 (path!("dir/two.rs").to_string(), vec![8..12]),
5166 ]),
5167 "Rust only search should give only Rust files"
5168 );
5169
5170 assert_eq!(
5171 search(
5172 &project,
5173 SearchQuery::text(
5174 search_query,
5175 false,
5176 true,
5177 false,
5178 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5179 Default::default(),
5180 false,
5181 None,
5182 )
5183 .unwrap(),
5184 cx
5185 )
5186 .await
5187 .unwrap(),
5188 HashMap::from_iter([
5189 (path!("dir/one.ts").to_string(), vec![14..18]),
5190 (path!("dir/two.ts").to_string(), vec![14..18]),
5191 ]),
5192 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
5193 );
5194
5195 assert_eq!(
5196 search(
5197 &project,
5198 SearchQuery::text(
5199 search_query,
5200 false,
5201 true,
5202 false,
5203 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()])
5204 .unwrap(),
5205 Default::default(),
5206 false,
5207 None,
5208 )
5209 .unwrap(),
5210 cx
5211 )
5212 .await
5213 .unwrap(),
5214 HashMap::from_iter([
5215 (path!("dir/two.ts").to_string(), vec![14..18]),
5216 (path!("dir/one.rs").to_string(), vec![8..12]),
5217 (path!("dir/one.ts").to_string(), vec![14..18]),
5218 (path!("dir/two.rs").to_string(), vec![8..12]),
5219 ]),
5220 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
5221 );
5222}
5223
5224#[gpui::test]
5225async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
5226 init_test(cx);
5227
5228 let search_query = "file";
5229
5230 let fs = FakeFs::new(cx.executor());
5231 fs.insert_tree(
5232 path!("/dir"),
5233 json!({
5234 "one.rs": r#"// Rust file one"#,
5235 "one.ts": r#"// TypeScript file one"#,
5236 "two.rs": r#"// Rust file two"#,
5237 "two.ts": r#"// TypeScript file two"#,
5238 }),
5239 )
5240 .await;
5241 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5242
5243 assert_eq!(
5244 search(
5245 &project,
5246 SearchQuery::text(
5247 search_query,
5248 false,
5249 true,
5250 false,
5251 Default::default(),
5252 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5253 false,
5254 None,
5255 )
5256 .unwrap(),
5257 cx
5258 )
5259 .await
5260 .unwrap(),
5261 HashMap::from_iter([
5262 (path!("dir/one.rs").to_string(), vec![8..12]),
5263 (path!("dir/one.ts").to_string(), vec![14..18]),
5264 (path!("dir/two.rs").to_string(), vec![8..12]),
5265 (path!("dir/two.ts").to_string(), vec![14..18]),
5266 ]),
5267 "If no exclusions match, all files should be returned"
5268 );
5269
5270 assert_eq!(
5271 search(
5272 &project,
5273 SearchQuery::text(
5274 search_query,
5275 false,
5276 true,
5277 false,
5278 Default::default(),
5279 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
5280 false,
5281 None,
5282 )
5283 .unwrap(),
5284 cx
5285 )
5286 .await
5287 .unwrap(),
5288 HashMap::from_iter([
5289 (path!("dir/one.ts").to_string(), vec![14..18]),
5290 (path!("dir/two.ts").to_string(), vec![14..18]),
5291 ]),
5292 "Rust exclusion search should give only TypeScript files"
5293 );
5294
5295 assert_eq!(
5296 search(
5297 &project,
5298 SearchQuery::text(
5299 search_query,
5300 false,
5301 true,
5302 false,
5303 Default::default(),
5304 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5305 false,
5306 None,
5307 )
5308 .unwrap(),
5309 cx
5310 )
5311 .await
5312 .unwrap(),
5313 HashMap::from_iter([
5314 (path!("dir/one.rs").to_string(), vec![8..12]),
5315 (path!("dir/two.rs").to_string(), vec![8..12]),
5316 ]),
5317 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
5318 );
5319
5320 assert!(
5321 search(
5322 &project,
5323 SearchQuery::text(
5324 search_query,
5325 false,
5326 true,
5327 false,
5328 Default::default(),
5329 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()])
5330 .unwrap(),
5331 false,
5332 None,
5333 )
5334 .unwrap(),
5335 cx
5336 )
5337 .await
5338 .unwrap()
5339 .is_empty(),
5340 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
5341 );
5342}
5343
5344#[gpui::test]
5345async fn test_search_with_buffer_exclusions(cx: &mut gpui::TestAppContext) {
5346 init_test(cx);
5347
5348 let search_query = "file";
5349
5350 let fs = FakeFs::new(cx.executor());
5351 fs.insert_tree(
5352 path!("/dir"),
5353 json!({
5354 "one.rs": r#"// Rust file one"#,
5355 "one.ts": r#"// TypeScript file one"#,
5356 "two.rs": r#"// Rust file two"#,
5357 "two.ts": r#"// TypeScript file two"#,
5358 }),
5359 )
5360 .await;
5361
5362 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5363 let _buffer = project.update(cx, |project, cx| {
5364 let buffer = project.create_local_buffer("file", None, cx);
5365 project.mark_buffer_as_non_searchable(buffer.read(cx).remote_id(), cx);
5366 buffer
5367 });
5368
5369 assert_eq!(
5370 search(
5371 &project,
5372 SearchQuery::text(
5373 search_query,
5374 false,
5375 true,
5376 false,
5377 Default::default(),
5378 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5379 false,
5380 None,
5381 )
5382 .unwrap(),
5383 cx
5384 )
5385 .await
5386 .unwrap(),
5387 HashMap::from_iter([
5388 (path!("dir/one.rs").to_string(), vec![8..12]),
5389 (path!("dir/one.ts").to_string(), vec![14..18]),
5390 (path!("dir/two.rs").to_string(), vec![8..12]),
5391 (path!("dir/two.ts").to_string(), vec![14..18]),
5392 ]),
5393 "If no exclusions match, all files should be returned"
5394 );
5395
5396 assert_eq!(
5397 search(
5398 &project,
5399 SearchQuery::text(
5400 search_query,
5401 false,
5402 true,
5403 false,
5404 Default::default(),
5405 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
5406 false,
5407 None,
5408 )
5409 .unwrap(),
5410 cx
5411 )
5412 .await
5413 .unwrap(),
5414 HashMap::from_iter([
5415 (path!("dir/one.ts").to_string(), vec![14..18]),
5416 (path!("dir/two.ts").to_string(), vec![14..18]),
5417 ]),
5418 "Rust exclusion search should give only TypeScript files"
5419 );
5420
5421 assert_eq!(
5422 search(
5423 &project,
5424 SearchQuery::text(
5425 search_query,
5426 false,
5427 true,
5428 false,
5429 Default::default(),
5430 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5431 false,
5432 None,
5433 )
5434 .unwrap(),
5435 cx
5436 )
5437 .await
5438 .unwrap(),
5439 HashMap::from_iter([
5440 (path!("dir/one.rs").to_string(), vec![8..12]),
5441 (path!("dir/two.rs").to_string(), vec![8..12]),
5442 ]),
5443 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
5444 );
5445
5446 assert!(
5447 search(
5448 &project,
5449 SearchQuery::text(
5450 search_query,
5451 false,
5452 true,
5453 false,
5454 Default::default(),
5455 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()])
5456 .unwrap(),
5457 false,
5458 None,
5459 )
5460 .unwrap(),
5461 cx
5462 )
5463 .await
5464 .unwrap()
5465 .is_empty(),
5466 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
5467 );
5468}
5469
5470#[gpui::test]
5471async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
5472 init_test(cx);
5473
5474 let search_query = "file";
5475
5476 let fs = FakeFs::new(cx.executor());
5477 fs.insert_tree(
5478 path!("/dir"),
5479 json!({
5480 "one.rs": r#"// Rust file one"#,
5481 "one.ts": r#"// TypeScript file one"#,
5482 "two.rs": r#"// Rust file two"#,
5483 "two.ts": r#"// TypeScript file two"#,
5484 }),
5485 )
5486 .await;
5487 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5488
5489 assert!(
5490 search(
5491 &project,
5492 SearchQuery::text(
5493 search_query,
5494 false,
5495 true,
5496 false,
5497 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5498 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5499 false,
5500 None,
5501 )
5502 .unwrap(),
5503 cx
5504 )
5505 .await
5506 .unwrap()
5507 .is_empty(),
5508 "If both no exclusions and inclusions match, exclusions should win and return nothing"
5509 );
5510
5511 assert!(
5512 search(
5513 &project,
5514 SearchQuery::text(
5515 search_query,
5516 false,
5517 true,
5518 false,
5519 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5520 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5521 false,
5522 None,
5523 )
5524 .unwrap(),
5525 cx
5526 )
5527 .await
5528 .unwrap()
5529 .is_empty(),
5530 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
5531 );
5532
5533 assert!(
5534 search(
5535 &project,
5536 SearchQuery::text(
5537 search_query,
5538 false,
5539 true,
5540 false,
5541 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5542 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5543 false,
5544 None,
5545 )
5546 .unwrap(),
5547 cx
5548 )
5549 .await
5550 .unwrap()
5551 .is_empty(),
5552 "Non-matching inclusions and exclusions should not change that."
5553 );
5554
5555 assert_eq!(
5556 search(
5557 &project,
5558 SearchQuery::text(
5559 search_query,
5560 false,
5561 true,
5562 false,
5563 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5564 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()]).unwrap(),
5565 false,
5566 None,
5567 )
5568 .unwrap(),
5569 cx
5570 )
5571 .await
5572 .unwrap(),
5573 HashMap::from_iter([
5574 (path!("dir/one.ts").to_string(), vec![14..18]),
5575 (path!("dir/two.ts").to_string(), vec![14..18]),
5576 ]),
5577 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
5578 );
5579}
5580
5581#[gpui::test]
5582async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
5583 init_test(cx);
5584
5585 let fs = FakeFs::new(cx.executor());
5586 fs.insert_tree(
5587 path!("/worktree-a"),
5588 json!({
5589 "haystack.rs": r#"// NEEDLE"#,
5590 "haystack.ts": r#"// NEEDLE"#,
5591 }),
5592 )
5593 .await;
5594 fs.insert_tree(
5595 path!("/worktree-b"),
5596 json!({
5597 "haystack.rs": r#"// NEEDLE"#,
5598 "haystack.ts": r#"// NEEDLE"#,
5599 }),
5600 )
5601 .await;
5602
5603 let project = Project::test(
5604 fs.clone(),
5605 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
5606 cx,
5607 )
5608 .await;
5609
5610 assert_eq!(
5611 search(
5612 &project,
5613 SearchQuery::text(
5614 "NEEDLE",
5615 false,
5616 true,
5617 false,
5618 PathMatcher::new(&["worktree-a/*.rs".to_owned()]).unwrap(),
5619 Default::default(),
5620 true,
5621 None,
5622 )
5623 .unwrap(),
5624 cx
5625 )
5626 .await
5627 .unwrap(),
5628 HashMap::from_iter([(path!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
5629 "should only return results from included worktree"
5630 );
5631 assert_eq!(
5632 search(
5633 &project,
5634 SearchQuery::text(
5635 "NEEDLE",
5636 false,
5637 true,
5638 false,
5639 PathMatcher::new(&["worktree-b/*.rs".to_owned()]).unwrap(),
5640 Default::default(),
5641 true,
5642 None,
5643 )
5644 .unwrap(),
5645 cx
5646 )
5647 .await
5648 .unwrap(),
5649 HashMap::from_iter([(path!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
5650 "should only return results from included worktree"
5651 );
5652
5653 assert_eq!(
5654 search(
5655 &project,
5656 SearchQuery::text(
5657 "NEEDLE",
5658 false,
5659 true,
5660 false,
5661 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5662 Default::default(),
5663 false,
5664 None,
5665 )
5666 .unwrap(),
5667 cx
5668 )
5669 .await
5670 .unwrap(),
5671 HashMap::from_iter([
5672 (path!("worktree-a/haystack.ts").to_string(), vec![3..9]),
5673 (path!("worktree-b/haystack.ts").to_string(), vec![3..9])
5674 ]),
5675 "should return results from both worktrees"
5676 );
5677}
5678
5679#[gpui::test]
5680async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
5681 init_test(cx);
5682
5683 let fs = FakeFs::new(cx.background_executor.clone());
5684 fs.insert_tree(
5685 path!("/dir"),
5686 json!({
5687 ".git": {},
5688 ".gitignore": "**/target\n/node_modules\n",
5689 "target": {
5690 "index.txt": "index_key:index_value"
5691 },
5692 "node_modules": {
5693 "eslint": {
5694 "index.ts": "const eslint_key = 'eslint value'",
5695 "package.json": r#"{ "some_key": "some value" }"#,
5696 },
5697 "prettier": {
5698 "index.ts": "const prettier_key = 'prettier value'",
5699 "package.json": r#"{ "other_key": "other value" }"#,
5700 },
5701 },
5702 "package.json": r#"{ "main_key": "main value" }"#,
5703 }),
5704 )
5705 .await;
5706 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5707
5708 let query = "key";
5709 assert_eq!(
5710 search(
5711 &project,
5712 SearchQuery::text(
5713 query,
5714 false,
5715 false,
5716 false,
5717 Default::default(),
5718 Default::default(),
5719 false,
5720 None,
5721 )
5722 .unwrap(),
5723 cx
5724 )
5725 .await
5726 .unwrap(),
5727 HashMap::from_iter([(path!("dir/package.json").to_string(), vec![8..11])]),
5728 "Only one non-ignored file should have the query"
5729 );
5730
5731 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5732 assert_eq!(
5733 search(
5734 &project,
5735 SearchQuery::text(
5736 query,
5737 false,
5738 false,
5739 true,
5740 Default::default(),
5741 Default::default(),
5742 false,
5743 None,
5744 )
5745 .unwrap(),
5746 cx
5747 )
5748 .await
5749 .unwrap(),
5750 HashMap::from_iter([
5751 (path!("dir/package.json").to_string(), vec![8..11]),
5752 (path!("dir/target/index.txt").to_string(), vec![6..9]),
5753 (
5754 path!("dir/node_modules/prettier/package.json").to_string(),
5755 vec![9..12]
5756 ),
5757 (
5758 path!("dir/node_modules/prettier/index.ts").to_string(),
5759 vec![15..18]
5760 ),
5761 (
5762 path!("dir/node_modules/eslint/index.ts").to_string(),
5763 vec![13..16]
5764 ),
5765 (
5766 path!("dir/node_modules/eslint/package.json").to_string(),
5767 vec![8..11]
5768 ),
5769 ]),
5770 "Unrestricted search with ignored directories should find every file with the query"
5771 );
5772
5773 let files_to_include = PathMatcher::new(&["node_modules/prettier/**".to_owned()]).unwrap();
5774 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()]).unwrap();
5775 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5776 assert_eq!(
5777 search(
5778 &project,
5779 SearchQuery::text(
5780 query,
5781 false,
5782 false,
5783 true,
5784 files_to_include,
5785 files_to_exclude,
5786 false,
5787 None,
5788 )
5789 .unwrap(),
5790 cx
5791 )
5792 .await
5793 .unwrap(),
5794 HashMap::from_iter([(
5795 path!("dir/node_modules/prettier/package.json").to_string(),
5796 vec![9..12]
5797 )]),
5798 "With search including ignored prettier directory and excluding TS files, only one file should be found"
5799 );
5800}
5801
5802#[gpui::test]
5803async fn test_search_with_unicode(cx: &mut gpui::TestAppContext) {
5804 init_test(cx);
5805
5806 let fs = FakeFs::new(cx.executor());
5807 fs.insert_tree(
5808 path!("/dir"),
5809 json!({
5810 "one.rs": "// ПРИВЕТ? привет!",
5811 "two.rs": "// ПРИВЕТ.",
5812 "three.rs": "// привет",
5813 }),
5814 )
5815 .await;
5816 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5817
5818 let unicode_case_sensitive_query = SearchQuery::text(
5819 "привет",
5820 false,
5821 true,
5822 false,
5823 Default::default(),
5824 Default::default(),
5825 false,
5826 None,
5827 );
5828 assert_matches!(unicode_case_sensitive_query, Ok(SearchQuery::Text { .. }));
5829 assert_eq!(
5830 search(&project, unicode_case_sensitive_query.unwrap(), cx)
5831 .await
5832 .unwrap(),
5833 HashMap::from_iter([
5834 (path!("dir/one.rs").to_string(), vec![17..29]),
5835 (path!("dir/three.rs").to_string(), vec![3..15]),
5836 ])
5837 );
5838
5839 let unicode_case_insensitive_query = SearchQuery::text(
5840 "привет",
5841 false,
5842 false,
5843 false,
5844 Default::default(),
5845 Default::default(),
5846 false,
5847 None,
5848 );
5849 assert_matches!(
5850 unicode_case_insensitive_query,
5851 Ok(SearchQuery::Regex { .. })
5852 );
5853 assert_eq!(
5854 search(&project, unicode_case_insensitive_query.unwrap(), cx)
5855 .await
5856 .unwrap(),
5857 HashMap::from_iter([
5858 (path!("dir/one.rs").to_string(), vec![3..15, 17..29]),
5859 (path!("dir/two.rs").to_string(), vec![3..15]),
5860 (path!("dir/three.rs").to_string(), vec![3..15]),
5861 ])
5862 );
5863
5864 assert_eq!(
5865 search(
5866 &project,
5867 SearchQuery::text(
5868 "привет.",
5869 false,
5870 false,
5871 false,
5872 Default::default(),
5873 Default::default(),
5874 false,
5875 None,
5876 )
5877 .unwrap(),
5878 cx
5879 )
5880 .await
5881 .unwrap(),
5882 HashMap::from_iter([(path!("dir/two.rs").to_string(), vec![3..16]),])
5883 );
5884}
5885
5886#[gpui::test]
5887async fn test_create_entry(cx: &mut gpui::TestAppContext) {
5888 init_test(cx);
5889
5890 let fs = FakeFs::new(cx.executor().clone());
5891 fs.insert_tree(
5892 "/one/two",
5893 json!({
5894 "three": {
5895 "a.txt": "",
5896 "four": {}
5897 },
5898 "c.rs": ""
5899 }),
5900 )
5901 .await;
5902
5903 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
5904 project
5905 .update(cx, |project, cx| {
5906 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5907 project.create_entry((id, "b.."), true, cx)
5908 })
5909 .await
5910 .unwrap()
5911 .to_included()
5912 .unwrap();
5913
5914 // Can't create paths outside the project
5915 let result = project
5916 .update(cx, |project, cx| {
5917 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5918 project.create_entry((id, "../../boop"), true, cx)
5919 })
5920 .await;
5921 assert!(result.is_err());
5922
5923 // Can't create paths with '..'
5924 let result = project
5925 .update(cx, |project, cx| {
5926 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5927 project.create_entry((id, "four/../beep"), true, cx)
5928 })
5929 .await;
5930 assert!(result.is_err());
5931
5932 assert_eq!(
5933 fs.paths(true),
5934 vec![
5935 PathBuf::from(path!("/")),
5936 PathBuf::from(path!("/one")),
5937 PathBuf::from(path!("/one/two")),
5938 PathBuf::from(path!("/one/two/c.rs")),
5939 PathBuf::from(path!("/one/two/three")),
5940 PathBuf::from(path!("/one/two/three/a.txt")),
5941 PathBuf::from(path!("/one/two/three/b..")),
5942 PathBuf::from(path!("/one/two/three/four")),
5943 ]
5944 );
5945
5946 // And we cannot open buffers with '..'
5947 let result = project
5948 .update(cx, |project, cx| {
5949 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5950 project.open_buffer((id, "../c.rs"), cx)
5951 })
5952 .await;
5953 assert!(result.is_err())
5954}
5955
5956#[gpui::test]
5957async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
5958 init_test(cx);
5959
5960 let fs = FakeFs::new(cx.executor());
5961 fs.insert_tree(
5962 path!("/dir"),
5963 json!({
5964 "a.tsx": "a",
5965 }),
5966 )
5967 .await;
5968
5969 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5970
5971 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5972 language_registry.add(tsx_lang());
5973 let language_server_names = [
5974 "TypeScriptServer",
5975 "TailwindServer",
5976 "ESLintServer",
5977 "NoHoverCapabilitiesServer",
5978 ];
5979 let mut language_servers = [
5980 language_registry.register_fake_lsp(
5981 "tsx",
5982 FakeLspAdapter {
5983 name: language_server_names[0],
5984 capabilities: lsp::ServerCapabilities {
5985 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5986 ..lsp::ServerCapabilities::default()
5987 },
5988 ..FakeLspAdapter::default()
5989 },
5990 ),
5991 language_registry.register_fake_lsp(
5992 "tsx",
5993 FakeLspAdapter {
5994 name: language_server_names[1],
5995 capabilities: lsp::ServerCapabilities {
5996 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5997 ..lsp::ServerCapabilities::default()
5998 },
5999 ..FakeLspAdapter::default()
6000 },
6001 ),
6002 language_registry.register_fake_lsp(
6003 "tsx",
6004 FakeLspAdapter {
6005 name: language_server_names[2],
6006 capabilities: lsp::ServerCapabilities {
6007 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6008 ..lsp::ServerCapabilities::default()
6009 },
6010 ..FakeLspAdapter::default()
6011 },
6012 ),
6013 language_registry.register_fake_lsp(
6014 "tsx",
6015 FakeLspAdapter {
6016 name: language_server_names[3],
6017 capabilities: lsp::ServerCapabilities {
6018 hover_provider: None,
6019 ..lsp::ServerCapabilities::default()
6020 },
6021 ..FakeLspAdapter::default()
6022 },
6023 ),
6024 ];
6025
6026 let (buffer, _handle) = project
6027 .update(cx, |p, cx| {
6028 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
6029 })
6030 .await
6031 .unwrap();
6032 cx.executor().run_until_parked();
6033
6034 let mut servers_with_hover_requests = HashMap::default();
6035 for i in 0..language_server_names.len() {
6036 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
6037 panic!(
6038 "Failed to get language server #{i} with name {}",
6039 &language_server_names[i]
6040 )
6041 });
6042 let new_server_name = new_server.server.name();
6043 assert!(
6044 !servers_with_hover_requests.contains_key(&new_server_name),
6045 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6046 );
6047 match new_server_name.as_ref() {
6048 "TailwindServer" | "TypeScriptServer" => {
6049 servers_with_hover_requests.insert(
6050 new_server_name.clone(),
6051 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6052 move |_, _| {
6053 let name = new_server_name.clone();
6054 async move {
6055 Ok(Some(lsp::Hover {
6056 contents: lsp::HoverContents::Scalar(
6057 lsp::MarkedString::String(format!("{name} hover")),
6058 ),
6059 range: None,
6060 }))
6061 }
6062 },
6063 ),
6064 );
6065 }
6066 "ESLintServer" => {
6067 servers_with_hover_requests.insert(
6068 new_server_name,
6069 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6070 |_, _| async move { Ok(None) },
6071 ),
6072 );
6073 }
6074 "NoHoverCapabilitiesServer" => {
6075 let _never_handled = new_server
6076 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
6077 panic!(
6078 "Should not call for hovers server with no corresponding capabilities"
6079 )
6080 });
6081 }
6082 unexpected => panic!("Unexpected server name: {unexpected}"),
6083 }
6084 }
6085
6086 let hover_task = project.update(cx, |project, cx| {
6087 project.hover(&buffer, Point::new(0, 0), cx)
6088 });
6089 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
6090 |mut hover_request| async move {
6091 hover_request
6092 .next()
6093 .await
6094 .expect("All hover requests should have been triggered")
6095 },
6096 ))
6097 .await;
6098 assert_eq!(
6099 vec!["TailwindServer hover", "TypeScriptServer hover"],
6100 hover_task
6101 .await
6102 .into_iter()
6103 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
6104 .sorted()
6105 .collect::<Vec<_>>(),
6106 "Should receive hover responses from all related servers with hover capabilities"
6107 );
6108}
6109
6110#[gpui::test]
6111async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
6112 init_test(cx);
6113
6114 let fs = FakeFs::new(cx.executor());
6115 fs.insert_tree(
6116 path!("/dir"),
6117 json!({
6118 "a.ts": "a",
6119 }),
6120 )
6121 .await;
6122
6123 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6124
6125 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6126 language_registry.add(typescript_lang());
6127 let mut fake_language_servers = language_registry.register_fake_lsp(
6128 "TypeScript",
6129 FakeLspAdapter {
6130 capabilities: lsp::ServerCapabilities {
6131 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6132 ..lsp::ServerCapabilities::default()
6133 },
6134 ..FakeLspAdapter::default()
6135 },
6136 );
6137
6138 let (buffer, _handle) = project
6139 .update(cx, |p, cx| {
6140 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6141 })
6142 .await
6143 .unwrap();
6144 cx.executor().run_until_parked();
6145
6146 let fake_server = fake_language_servers
6147 .next()
6148 .await
6149 .expect("failed to get the language server");
6150
6151 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6152 move |_, _| async move {
6153 Ok(Some(lsp::Hover {
6154 contents: lsp::HoverContents::Array(vec![
6155 lsp::MarkedString::String("".to_string()),
6156 lsp::MarkedString::String(" ".to_string()),
6157 lsp::MarkedString::String("\n\n\n".to_string()),
6158 ]),
6159 range: None,
6160 }))
6161 },
6162 );
6163
6164 let hover_task = project.update(cx, |project, cx| {
6165 project.hover(&buffer, Point::new(0, 0), cx)
6166 });
6167 let () = request_handled
6168 .next()
6169 .await
6170 .expect("All hover requests should have been triggered");
6171 assert_eq!(
6172 Vec::<String>::new(),
6173 hover_task
6174 .await
6175 .into_iter()
6176 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
6177 .sorted()
6178 .collect::<Vec<_>>(),
6179 "Empty hover parts should be ignored"
6180 );
6181}
6182
6183#[gpui::test]
6184async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
6185 init_test(cx);
6186
6187 let fs = FakeFs::new(cx.executor());
6188 fs.insert_tree(
6189 path!("/dir"),
6190 json!({
6191 "a.ts": "a",
6192 }),
6193 )
6194 .await;
6195
6196 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6197
6198 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6199 language_registry.add(typescript_lang());
6200 let mut fake_language_servers = language_registry.register_fake_lsp(
6201 "TypeScript",
6202 FakeLspAdapter {
6203 capabilities: lsp::ServerCapabilities {
6204 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6205 ..lsp::ServerCapabilities::default()
6206 },
6207 ..FakeLspAdapter::default()
6208 },
6209 );
6210
6211 let (buffer, _handle) = project
6212 .update(cx, |p, cx| {
6213 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6214 })
6215 .await
6216 .unwrap();
6217 cx.executor().run_until_parked();
6218
6219 let fake_server = fake_language_servers
6220 .next()
6221 .await
6222 .expect("failed to get the language server");
6223
6224 let mut request_handled = fake_server
6225 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
6226 Ok(Some(vec![
6227 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6228 title: "organize imports".to_string(),
6229 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
6230 ..lsp::CodeAction::default()
6231 }),
6232 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6233 title: "fix code".to_string(),
6234 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
6235 ..lsp::CodeAction::default()
6236 }),
6237 ]))
6238 });
6239
6240 let code_actions_task = project.update(cx, |project, cx| {
6241 project.code_actions(
6242 &buffer,
6243 0..buffer.read(cx).len(),
6244 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
6245 cx,
6246 )
6247 });
6248
6249 let () = request_handled
6250 .next()
6251 .await
6252 .expect("The code action request should have been triggered");
6253
6254 let code_actions = code_actions_task.await.unwrap();
6255 assert_eq!(code_actions.len(), 1);
6256 assert_eq!(
6257 code_actions[0].lsp_action.action_kind(),
6258 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
6259 );
6260}
6261
6262#[gpui::test]
6263async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
6264 init_test(cx);
6265
6266 let fs = FakeFs::new(cx.executor());
6267 fs.insert_tree(
6268 path!("/dir"),
6269 json!({
6270 "a.tsx": "a",
6271 }),
6272 )
6273 .await;
6274
6275 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6276
6277 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6278 language_registry.add(tsx_lang());
6279 let language_server_names = [
6280 "TypeScriptServer",
6281 "TailwindServer",
6282 "ESLintServer",
6283 "NoActionsCapabilitiesServer",
6284 ];
6285
6286 let mut language_server_rxs = [
6287 language_registry.register_fake_lsp(
6288 "tsx",
6289 FakeLspAdapter {
6290 name: language_server_names[0],
6291 capabilities: lsp::ServerCapabilities {
6292 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6293 ..lsp::ServerCapabilities::default()
6294 },
6295 ..FakeLspAdapter::default()
6296 },
6297 ),
6298 language_registry.register_fake_lsp(
6299 "tsx",
6300 FakeLspAdapter {
6301 name: language_server_names[1],
6302 capabilities: lsp::ServerCapabilities {
6303 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6304 ..lsp::ServerCapabilities::default()
6305 },
6306 ..FakeLspAdapter::default()
6307 },
6308 ),
6309 language_registry.register_fake_lsp(
6310 "tsx",
6311 FakeLspAdapter {
6312 name: language_server_names[2],
6313 capabilities: lsp::ServerCapabilities {
6314 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6315 ..lsp::ServerCapabilities::default()
6316 },
6317 ..FakeLspAdapter::default()
6318 },
6319 ),
6320 language_registry.register_fake_lsp(
6321 "tsx",
6322 FakeLspAdapter {
6323 name: language_server_names[3],
6324 capabilities: lsp::ServerCapabilities {
6325 code_action_provider: None,
6326 ..lsp::ServerCapabilities::default()
6327 },
6328 ..FakeLspAdapter::default()
6329 },
6330 ),
6331 ];
6332
6333 let (buffer, _handle) = project
6334 .update(cx, |p, cx| {
6335 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
6336 })
6337 .await
6338 .unwrap();
6339 cx.executor().run_until_parked();
6340
6341 let mut servers_with_actions_requests = HashMap::default();
6342 for i in 0..language_server_names.len() {
6343 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
6344 panic!(
6345 "Failed to get language server #{i} with name {}",
6346 &language_server_names[i]
6347 )
6348 });
6349 let new_server_name = new_server.server.name();
6350
6351 assert!(
6352 !servers_with_actions_requests.contains_key(&new_server_name),
6353 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6354 );
6355 match new_server_name.0.as_ref() {
6356 "TailwindServer" | "TypeScriptServer" => {
6357 servers_with_actions_requests.insert(
6358 new_server_name.clone(),
6359 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6360 move |_, _| {
6361 let name = new_server_name.clone();
6362 async move {
6363 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
6364 lsp::CodeAction {
6365 title: format!("{name} code action"),
6366 ..lsp::CodeAction::default()
6367 },
6368 )]))
6369 }
6370 },
6371 ),
6372 );
6373 }
6374 "ESLintServer" => {
6375 servers_with_actions_requests.insert(
6376 new_server_name,
6377 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6378 |_, _| async move { Ok(None) },
6379 ),
6380 );
6381 }
6382 "NoActionsCapabilitiesServer" => {
6383 let _never_handled = new_server
6384 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6385 panic!(
6386 "Should not call for code actions server with no corresponding capabilities"
6387 )
6388 });
6389 }
6390 unexpected => panic!("Unexpected server name: {unexpected}"),
6391 }
6392 }
6393
6394 let code_actions_task = project.update(cx, |project, cx| {
6395 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
6396 });
6397
6398 // cx.run_until_parked();
6399 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
6400 |mut code_actions_request| async move {
6401 code_actions_request
6402 .next()
6403 .await
6404 .expect("All code actions requests should have been triggered")
6405 },
6406 ))
6407 .await;
6408 assert_eq!(
6409 vec!["TailwindServer code action", "TypeScriptServer code action"],
6410 code_actions_task
6411 .await
6412 .unwrap()
6413 .into_iter()
6414 .map(|code_action| code_action.lsp_action.title().to_owned())
6415 .sorted()
6416 .collect::<Vec<_>>(),
6417 "Should receive code actions responses from all related servers with hover capabilities"
6418 );
6419}
6420
6421#[gpui::test]
6422async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
6423 init_test(cx);
6424
6425 let fs = FakeFs::new(cx.executor());
6426 fs.insert_tree(
6427 "/dir",
6428 json!({
6429 "a.rs": "let a = 1;",
6430 "b.rs": "let b = 2;",
6431 "c.rs": "let c = 2;",
6432 }),
6433 )
6434 .await;
6435
6436 let project = Project::test(
6437 fs,
6438 [
6439 "/dir/a.rs".as_ref(),
6440 "/dir/b.rs".as_ref(),
6441 "/dir/c.rs".as_ref(),
6442 ],
6443 cx,
6444 )
6445 .await;
6446
6447 // check the initial state and get the worktrees
6448 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
6449 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6450 assert_eq!(worktrees.len(), 3);
6451
6452 let worktree_a = worktrees[0].read(cx);
6453 let worktree_b = worktrees[1].read(cx);
6454 let worktree_c = worktrees[2].read(cx);
6455
6456 // check they start in the right order
6457 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
6458 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
6459 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
6460
6461 (
6462 worktrees[0].clone(),
6463 worktrees[1].clone(),
6464 worktrees[2].clone(),
6465 )
6466 });
6467
6468 // move first worktree to after the second
6469 // [a, b, c] -> [b, a, c]
6470 project
6471 .update(cx, |project, cx| {
6472 let first = worktree_a.read(cx);
6473 let second = worktree_b.read(cx);
6474 project.move_worktree(first.id(), second.id(), cx)
6475 })
6476 .expect("moving first after second");
6477
6478 // check the state after moving
6479 project.update(cx, |project, cx| {
6480 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6481 assert_eq!(worktrees.len(), 3);
6482
6483 let first = worktrees[0].read(cx);
6484 let second = worktrees[1].read(cx);
6485 let third = worktrees[2].read(cx);
6486
6487 // check they are now in the right order
6488 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
6489 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
6490 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6491 });
6492
6493 // move the second worktree to before the first
6494 // [b, a, c] -> [a, b, c]
6495 project
6496 .update(cx, |project, cx| {
6497 let second = worktree_a.read(cx);
6498 let first = worktree_b.read(cx);
6499 project.move_worktree(first.id(), second.id(), cx)
6500 })
6501 .expect("moving second before first");
6502
6503 // check the state after moving
6504 project.update(cx, |project, cx| {
6505 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6506 assert_eq!(worktrees.len(), 3);
6507
6508 let first = worktrees[0].read(cx);
6509 let second = worktrees[1].read(cx);
6510 let third = worktrees[2].read(cx);
6511
6512 // check they are now in the right order
6513 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6514 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6515 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6516 });
6517
6518 // move the second worktree to after the third
6519 // [a, b, c] -> [a, c, b]
6520 project
6521 .update(cx, |project, cx| {
6522 let second = worktree_b.read(cx);
6523 let third = worktree_c.read(cx);
6524 project.move_worktree(second.id(), third.id(), cx)
6525 })
6526 .expect("moving second after third");
6527
6528 // check the state after moving
6529 project.update(cx, |project, cx| {
6530 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6531 assert_eq!(worktrees.len(), 3);
6532
6533 let first = worktrees[0].read(cx);
6534 let second = worktrees[1].read(cx);
6535 let third = worktrees[2].read(cx);
6536
6537 // check they are now in the right order
6538 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6539 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
6540 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
6541 });
6542
6543 // move the third worktree to before the second
6544 // [a, c, b] -> [a, b, c]
6545 project
6546 .update(cx, |project, cx| {
6547 let third = worktree_c.read(cx);
6548 let second = worktree_b.read(cx);
6549 project.move_worktree(third.id(), second.id(), cx)
6550 })
6551 .expect("moving third before second");
6552
6553 // check the state after moving
6554 project.update(cx, |project, cx| {
6555 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6556 assert_eq!(worktrees.len(), 3);
6557
6558 let first = worktrees[0].read(cx);
6559 let second = worktrees[1].read(cx);
6560 let third = worktrees[2].read(cx);
6561
6562 // check they are now in the right order
6563 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6564 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6565 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6566 });
6567
6568 // move the first worktree to after the third
6569 // [a, b, c] -> [b, c, a]
6570 project
6571 .update(cx, |project, cx| {
6572 let first = worktree_a.read(cx);
6573 let third = worktree_c.read(cx);
6574 project.move_worktree(first.id(), third.id(), cx)
6575 })
6576 .expect("moving first after third");
6577
6578 // check the state after moving
6579 project.update(cx, |project, cx| {
6580 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6581 assert_eq!(worktrees.len(), 3);
6582
6583 let first = worktrees[0].read(cx);
6584 let second = worktrees[1].read(cx);
6585 let third = worktrees[2].read(cx);
6586
6587 // check they are now in the right order
6588 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
6589 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
6590 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
6591 });
6592
6593 // move the third worktree to before the first
6594 // [b, c, a] -> [a, b, c]
6595 project
6596 .update(cx, |project, cx| {
6597 let third = worktree_a.read(cx);
6598 let first = worktree_b.read(cx);
6599 project.move_worktree(third.id(), first.id(), cx)
6600 })
6601 .expect("moving third before first");
6602
6603 // check the state after moving
6604 project.update(cx, |project, cx| {
6605 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6606 assert_eq!(worktrees.len(), 3);
6607
6608 let first = worktrees[0].read(cx);
6609 let second = worktrees[1].read(cx);
6610 let third = worktrees[2].read(cx);
6611
6612 // check they are now in the right order
6613 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6614 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6615 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6616 });
6617}
6618
6619#[gpui::test]
6620async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
6621 init_test(cx);
6622
6623 let staged_contents = r#"
6624 fn main() {
6625 println!("hello world");
6626 }
6627 "#
6628 .unindent();
6629 let file_contents = r#"
6630 // print goodbye
6631 fn main() {
6632 println!("goodbye world");
6633 }
6634 "#
6635 .unindent();
6636
6637 let fs = FakeFs::new(cx.background_executor.clone());
6638 fs.insert_tree(
6639 "/dir",
6640 json!({
6641 ".git": {},
6642 "src": {
6643 "main.rs": file_contents,
6644 }
6645 }),
6646 )
6647 .await;
6648
6649 fs.set_index_for_repo(
6650 Path::new("/dir/.git"),
6651 &[("src/main.rs".into(), staged_contents)],
6652 );
6653
6654 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6655
6656 let buffer = project
6657 .update(cx, |project, cx| {
6658 project.open_local_buffer("/dir/src/main.rs", cx)
6659 })
6660 .await
6661 .unwrap();
6662 let unstaged_diff = project
6663 .update(cx, |project, cx| {
6664 project.open_unstaged_diff(buffer.clone(), cx)
6665 })
6666 .await
6667 .unwrap();
6668
6669 cx.run_until_parked();
6670 unstaged_diff.update(cx, |unstaged_diff, cx| {
6671 let snapshot = buffer.read(cx).snapshot();
6672 assert_hunks(
6673 unstaged_diff.hunks(&snapshot, cx),
6674 &snapshot,
6675 &unstaged_diff.base_text_string().unwrap(),
6676 &[
6677 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
6678 (
6679 2..3,
6680 " println!(\"hello world\");\n",
6681 " println!(\"goodbye world\");\n",
6682 DiffHunkStatus::modified_none(),
6683 ),
6684 ],
6685 );
6686 });
6687
6688 let staged_contents = r#"
6689 // print goodbye
6690 fn main() {
6691 }
6692 "#
6693 .unindent();
6694
6695 fs.set_index_for_repo(
6696 Path::new("/dir/.git"),
6697 &[("src/main.rs".into(), staged_contents)],
6698 );
6699
6700 cx.run_until_parked();
6701 unstaged_diff.update(cx, |unstaged_diff, cx| {
6702 let snapshot = buffer.read(cx).snapshot();
6703 assert_hunks(
6704 unstaged_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6705 &snapshot,
6706 &unstaged_diff.base_text().text(),
6707 &[(
6708 2..3,
6709 "",
6710 " println!(\"goodbye world\");\n",
6711 DiffHunkStatus::added_none(),
6712 )],
6713 );
6714 });
6715}
6716
6717#[gpui::test]
6718async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
6719 init_test(cx);
6720
6721 let committed_contents = r#"
6722 fn main() {
6723 println!("hello world");
6724 }
6725 "#
6726 .unindent();
6727 let staged_contents = r#"
6728 fn main() {
6729 println!("goodbye world");
6730 }
6731 "#
6732 .unindent();
6733 let file_contents = r#"
6734 // print goodbye
6735 fn main() {
6736 println!("goodbye world");
6737 }
6738 "#
6739 .unindent();
6740
6741 let fs = FakeFs::new(cx.background_executor.clone());
6742 fs.insert_tree(
6743 "/dir",
6744 json!({
6745 ".git": {},
6746 "src": {
6747 "modification.rs": file_contents,
6748 }
6749 }),
6750 )
6751 .await;
6752
6753 fs.set_head_for_repo(
6754 Path::new("/dir/.git"),
6755 &[
6756 ("src/modification.rs".into(), committed_contents),
6757 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6758 ],
6759 "deadbeef",
6760 );
6761 fs.set_index_for_repo(
6762 Path::new("/dir/.git"),
6763 &[
6764 ("src/modification.rs".into(), staged_contents),
6765 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6766 ],
6767 );
6768
6769 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6770 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6771 let language = rust_lang();
6772 language_registry.add(language.clone());
6773
6774 let buffer_1 = project
6775 .update(cx, |project, cx| {
6776 project.open_local_buffer("/dir/src/modification.rs", cx)
6777 })
6778 .await
6779 .unwrap();
6780 let diff_1 = project
6781 .update(cx, |project, cx| {
6782 project.open_uncommitted_diff(buffer_1.clone(), cx)
6783 })
6784 .await
6785 .unwrap();
6786 diff_1.read_with(cx, |diff, _| {
6787 assert_eq!(diff.base_text().language().cloned(), Some(language))
6788 });
6789 cx.run_until_parked();
6790 diff_1.update(cx, |diff, cx| {
6791 let snapshot = buffer_1.read(cx).snapshot();
6792 assert_hunks(
6793 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6794 &snapshot,
6795 &diff.base_text_string().unwrap(),
6796 &[
6797 (
6798 0..1,
6799 "",
6800 "// print goodbye\n",
6801 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
6802 ),
6803 (
6804 2..3,
6805 " println!(\"hello world\");\n",
6806 " println!(\"goodbye world\");\n",
6807 DiffHunkStatus::modified_none(),
6808 ),
6809 ],
6810 );
6811 });
6812
6813 // Reset HEAD to a version that differs from both the buffer and the index.
6814 let committed_contents = r#"
6815 // print goodbye
6816 fn main() {
6817 }
6818 "#
6819 .unindent();
6820 fs.set_head_for_repo(
6821 Path::new("/dir/.git"),
6822 &[
6823 ("src/modification.rs".into(), committed_contents.clone()),
6824 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6825 ],
6826 "deadbeef",
6827 );
6828
6829 // Buffer now has an unstaged hunk.
6830 cx.run_until_parked();
6831 diff_1.update(cx, |diff, cx| {
6832 let snapshot = buffer_1.read(cx).snapshot();
6833 assert_hunks(
6834 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6835 &snapshot,
6836 &diff.base_text().text(),
6837 &[(
6838 2..3,
6839 "",
6840 " println!(\"goodbye world\");\n",
6841 DiffHunkStatus::added_none(),
6842 )],
6843 );
6844 });
6845
6846 // Open a buffer for a file that's been deleted.
6847 let buffer_2 = project
6848 .update(cx, |project, cx| {
6849 project.open_local_buffer("/dir/src/deletion.rs", cx)
6850 })
6851 .await
6852 .unwrap();
6853 let diff_2 = project
6854 .update(cx, |project, cx| {
6855 project.open_uncommitted_diff(buffer_2.clone(), cx)
6856 })
6857 .await
6858 .unwrap();
6859 cx.run_until_parked();
6860 diff_2.update(cx, |diff, cx| {
6861 let snapshot = buffer_2.read(cx).snapshot();
6862 assert_hunks(
6863 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6864 &snapshot,
6865 &diff.base_text_string().unwrap(),
6866 &[(
6867 0..0,
6868 "// the-deleted-contents\n",
6869 "",
6870 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
6871 )],
6872 );
6873 });
6874
6875 // Stage the deletion of this file
6876 fs.set_index_for_repo(
6877 Path::new("/dir/.git"),
6878 &[("src/modification.rs".into(), committed_contents.clone())],
6879 );
6880 cx.run_until_parked();
6881 diff_2.update(cx, |diff, cx| {
6882 let snapshot = buffer_2.read(cx).snapshot();
6883 assert_hunks(
6884 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6885 &snapshot,
6886 &diff.base_text_string().unwrap(),
6887 &[(
6888 0..0,
6889 "// the-deleted-contents\n",
6890 "",
6891 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
6892 )],
6893 );
6894 });
6895}
6896
6897#[gpui::test]
6898async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
6899 use DiffHunkSecondaryStatus::*;
6900 init_test(cx);
6901
6902 let committed_contents = r#"
6903 zero
6904 one
6905 two
6906 three
6907 four
6908 five
6909 "#
6910 .unindent();
6911 let file_contents = r#"
6912 one
6913 TWO
6914 three
6915 FOUR
6916 five
6917 "#
6918 .unindent();
6919
6920 let fs = FakeFs::new(cx.background_executor.clone());
6921 fs.insert_tree(
6922 "/dir",
6923 json!({
6924 ".git": {},
6925 "file.txt": file_contents.clone()
6926 }),
6927 )
6928 .await;
6929
6930 fs.set_head_and_index_for_repo(
6931 "/dir/.git".as_ref(),
6932 &[("file.txt".into(), committed_contents.clone())],
6933 );
6934
6935 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6936
6937 let buffer = project
6938 .update(cx, |project, cx| {
6939 project.open_local_buffer("/dir/file.txt", cx)
6940 })
6941 .await
6942 .unwrap();
6943 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6944 let uncommitted_diff = project
6945 .update(cx, |project, cx| {
6946 project.open_uncommitted_diff(buffer.clone(), cx)
6947 })
6948 .await
6949 .unwrap();
6950 let mut diff_events = cx.events(&uncommitted_diff);
6951
6952 // The hunks are initially unstaged.
6953 uncommitted_diff.read_with(cx, |diff, cx| {
6954 assert_hunks(
6955 diff.hunks(&snapshot, cx),
6956 &snapshot,
6957 &diff.base_text_string().unwrap(),
6958 &[
6959 (
6960 0..0,
6961 "zero\n",
6962 "",
6963 DiffHunkStatus::deleted(HasSecondaryHunk),
6964 ),
6965 (
6966 1..2,
6967 "two\n",
6968 "TWO\n",
6969 DiffHunkStatus::modified(HasSecondaryHunk),
6970 ),
6971 (
6972 3..4,
6973 "four\n",
6974 "FOUR\n",
6975 DiffHunkStatus::modified(HasSecondaryHunk),
6976 ),
6977 ],
6978 );
6979 });
6980
6981 // Stage a hunk. It appears as optimistically staged.
6982 uncommitted_diff.update(cx, |diff, cx| {
6983 let range =
6984 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
6985 let hunks = diff
6986 .hunks_intersecting_range(range, &snapshot, cx)
6987 .collect::<Vec<_>>();
6988 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
6989
6990 assert_hunks(
6991 diff.hunks(&snapshot, cx),
6992 &snapshot,
6993 &diff.base_text_string().unwrap(),
6994 &[
6995 (
6996 0..0,
6997 "zero\n",
6998 "",
6999 DiffHunkStatus::deleted(HasSecondaryHunk),
7000 ),
7001 (
7002 1..2,
7003 "two\n",
7004 "TWO\n",
7005 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7006 ),
7007 (
7008 3..4,
7009 "four\n",
7010 "FOUR\n",
7011 DiffHunkStatus::modified(HasSecondaryHunk),
7012 ),
7013 ],
7014 );
7015 });
7016
7017 // The diff emits a change event for the range of the staged hunk.
7018 assert!(matches!(
7019 diff_events.next().await.unwrap(),
7020 BufferDiffEvent::HunksStagedOrUnstaged(_)
7021 ));
7022 let event = diff_events.next().await.unwrap();
7023 if let BufferDiffEvent::DiffChanged {
7024 changed_range: Some(changed_range),
7025 } = event
7026 {
7027 let changed_range = changed_range.to_point(&snapshot);
7028 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
7029 } else {
7030 panic!("Unexpected event {event:?}");
7031 }
7032
7033 // When the write to the index completes, it appears as staged.
7034 cx.run_until_parked();
7035 uncommitted_diff.update(cx, |diff, cx| {
7036 assert_hunks(
7037 diff.hunks(&snapshot, cx),
7038 &snapshot,
7039 &diff.base_text_string().unwrap(),
7040 &[
7041 (
7042 0..0,
7043 "zero\n",
7044 "",
7045 DiffHunkStatus::deleted(HasSecondaryHunk),
7046 ),
7047 (
7048 1..2,
7049 "two\n",
7050 "TWO\n",
7051 DiffHunkStatus::modified(NoSecondaryHunk),
7052 ),
7053 (
7054 3..4,
7055 "four\n",
7056 "FOUR\n",
7057 DiffHunkStatus::modified(HasSecondaryHunk),
7058 ),
7059 ],
7060 );
7061 });
7062
7063 // The diff emits a change event for the changed index text.
7064 let event = diff_events.next().await.unwrap();
7065 if let BufferDiffEvent::DiffChanged {
7066 changed_range: Some(changed_range),
7067 } = event
7068 {
7069 let changed_range = changed_range.to_point(&snapshot);
7070 assert_eq!(changed_range, Point::new(0, 0)..Point::new(4, 0));
7071 } else {
7072 panic!("Unexpected event {event:?}");
7073 }
7074
7075 // Simulate a problem writing to the git index.
7076 fs.set_error_message_for_index_write(
7077 "/dir/.git".as_ref(),
7078 Some("failed to write git index".into()),
7079 );
7080
7081 // Stage another hunk.
7082 uncommitted_diff.update(cx, |diff, cx| {
7083 let range =
7084 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
7085 let hunks = diff
7086 .hunks_intersecting_range(range, &snapshot, cx)
7087 .collect::<Vec<_>>();
7088 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
7089
7090 assert_hunks(
7091 diff.hunks(&snapshot, cx),
7092 &snapshot,
7093 &diff.base_text_string().unwrap(),
7094 &[
7095 (
7096 0..0,
7097 "zero\n",
7098 "",
7099 DiffHunkStatus::deleted(HasSecondaryHunk),
7100 ),
7101 (
7102 1..2,
7103 "two\n",
7104 "TWO\n",
7105 DiffHunkStatus::modified(NoSecondaryHunk),
7106 ),
7107 (
7108 3..4,
7109 "four\n",
7110 "FOUR\n",
7111 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7112 ),
7113 ],
7114 );
7115 });
7116 assert!(matches!(
7117 diff_events.next().await.unwrap(),
7118 BufferDiffEvent::HunksStagedOrUnstaged(_)
7119 ));
7120 let event = diff_events.next().await.unwrap();
7121 if let BufferDiffEvent::DiffChanged {
7122 changed_range: Some(changed_range),
7123 } = event
7124 {
7125 let changed_range = changed_range.to_point(&snapshot);
7126 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
7127 } else {
7128 panic!("Unexpected event {event:?}");
7129 }
7130
7131 // When the write fails, the hunk returns to being unstaged.
7132 cx.run_until_parked();
7133 uncommitted_diff.update(cx, |diff, cx| {
7134 assert_hunks(
7135 diff.hunks(&snapshot, cx),
7136 &snapshot,
7137 &diff.base_text_string().unwrap(),
7138 &[
7139 (
7140 0..0,
7141 "zero\n",
7142 "",
7143 DiffHunkStatus::deleted(HasSecondaryHunk),
7144 ),
7145 (
7146 1..2,
7147 "two\n",
7148 "TWO\n",
7149 DiffHunkStatus::modified(NoSecondaryHunk),
7150 ),
7151 (
7152 3..4,
7153 "four\n",
7154 "FOUR\n",
7155 DiffHunkStatus::modified(HasSecondaryHunk),
7156 ),
7157 ],
7158 );
7159 });
7160
7161 let event = diff_events.next().await.unwrap();
7162 if let BufferDiffEvent::DiffChanged {
7163 changed_range: Some(changed_range),
7164 } = event
7165 {
7166 let changed_range = changed_range.to_point(&snapshot);
7167 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
7168 } else {
7169 panic!("Unexpected event {event:?}");
7170 }
7171
7172 // Allow writing to the git index to succeed again.
7173 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
7174
7175 // Stage two hunks with separate operations.
7176 uncommitted_diff.update(cx, |diff, cx| {
7177 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
7178 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
7179 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
7180 });
7181
7182 // Both staged hunks appear as pending.
7183 uncommitted_diff.update(cx, |diff, cx| {
7184 assert_hunks(
7185 diff.hunks(&snapshot, cx),
7186 &snapshot,
7187 &diff.base_text_string().unwrap(),
7188 &[
7189 (
7190 0..0,
7191 "zero\n",
7192 "",
7193 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7194 ),
7195 (
7196 1..2,
7197 "two\n",
7198 "TWO\n",
7199 DiffHunkStatus::modified(NoSecondaryHunk),
7200 ),
7201 (
7202 3..4,
7203 "four\n",
7204 "FOUR\n",
7205 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7206 ),
7207 ],
7208 );
7209 });
7210
7211 // Both staging operations take effect.
7212 cx.run_until_parked();
7213 uncommitted_diff.update(cx, |diff, cx| {
7214 assert_hunks(
7215 diff.hunks(&snapshot, cx),
7216 &snapshot,
7217 &diff.base_text_string().unwrap(),
7218 &[
7219 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7220 (
7221 1..2,
7222 "two\n",
7223 "TWO\n",
7224 DiffHunkStatus::modified(NoSecondaryHunk),
7225 ),
7226 (
7227 3..4,
7228 "four\n",
7229 "FOUR\n",
7230 DiffHunkStatus::modified(NoSecondaryHunk),
7231 ),
7232 ],
7233 );
7234 });
7235}
7236
7237#[gpui::test(seeds(340, 472))]
7238async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
7239 use DiffHunkSecondaryStatus::*;
7240 init_test(cx);
7241
7242 let committed_contents = r#"
7243 zero
7244 one
7245 two
7246 three
7247 four
7248 five
7249 "#
7250 .unindent();
7251 let file_contents = r#"
7252 one
7253 TWO
7254 three
7255 FOUR
7256 five
7257 "#
7258 .unindent();
7259
7260 let fs = FakeFs::new(cx.background_executor.clone());
7261 fs.insert_tree(
7262 "/dir",
7263 json!({
7264 ".git": {},
7265 "file.txt": file_contents.clone()
7266 }),
7267 )
7268 .await;
7269
7270 fs.set_head_for_repo(
7271 "/dir/.git".as_ref(),
7272 &[("file.txt".into(), committed_contents.clone())],
7273 "deadbeef",
7274 );
7275 fs.set_index_for_repo(
7276 "/dir/.git".as_ref(),
7277 &[("file.txt".into(), committed_contents.clone())],
7278 );
7279
7280 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7281
7282 let buffer = project
7283 .update(cx, |project, cx| {
7284 project.open_local_buffer("/dir/file.txt", cx)
7285 })
7286 .await
7287 .unwrap();
7288 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7289 let uncommitted_diff = project
7290 .update(cx, |project, cx| {
7291 project.open_uncommitted_diff(buffer.clone(), cx)
7292 })
7293 .await
7294 .unwrap();
7295
7296 // The hunks are initially unstaged.
7297 uncommitted_diff.read_with(cx, |diff, cx| {
7298 assert_hunks(
7299 diff.hunks(&snapshot, cx),
7300 &snapshot,
7301 &diff.base_text_string().unwrap(),
7302 &[
7303 (
7304 0..0,
7305 "zero\n",
7306 "",
7307 DiffHunkStatus::deleted(HasSecondaryHunk),
7308 ),
7309 (
7310 1..2,
7311 "two\n",
7312 "TWO\n",
7313 DiffHunkStatus::modified(HasSecondaryHunk),
7314 ),
7315 (
7316 3..4,
7317 "four\n",
7318 "FOUR\n",
7319 DiffHunkStatus::modified(HasSecondaryHunk),
7320 ),
7321 ],
7322 );
7323 });
7324
7325 // Pause IO events
7326 fs.pause_events();
7327
7328 // Stage the first hunk.
7329 uncommitted_diff.update(cx, |diff, cx| {
7330 let hunk = diff.hunks(&snapshot, cx).next().unwrap();
7331 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7332 assert_hunks(
7333 diff.hunks(&snapshot, cx),
7334 &snapshot,
7335 &diff.base_text_string().unwrap(),
7336 &[
7337 (
7338 0..0,
7339 "zero\n",
7340 "",
7341 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7342 ),
7343 (
7344 1..2,
7345 "two\n",
7346 "TWO\n",
7347 DiffHunkStatus::modified(HasSecondaryHunk),
7348 ),
7349 (
7350 3..4,
7351 "four\n",
7352 "FOUR\n",
7353 DiffHunkStatus::modified(HasSecondaryHunk),
7354 ),
7355 ],
7356 );
7357 });
7358
7359 // Stage the second hunk *before* receiving the FS event for the first hunk.
7360 cx.run_until_parked();
7361 uncommitted_diff.update(cx, |diff, cx| {
7362 let hunk = diff.hunks(&snapshot, cx).nth(1).unwrap();
7363 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7364 assert_hunks(
7365 diff.hunks(&snapshot, cx),
7366 &snapshot,
7367 &diff.base_text_string().unwrap(),
7368 &[
7369 (
7370 0..0,
7371 "zero\n",
7372 "",
7373 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7374 ),
7375 (
7376 1..2,
7377 "two\n",
7378 "TWO\n",
7379 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7380 ),
7381 (
7382 3..4,
7383 "four\n",
7384 "FOUR\n",
7385 DiffHunkStatus::modified(HasSecondaryHunk),
7386 ),
7387 ],
7388 );
7389 });
7390
7391 // Process the FS event for staging the first hunk (second event is still pending).
7392 fs.flush_events(1);
7393 cx.run_until_parked();
7394
7395 // Stage the third hunk before receiving the second FS event.
7396 uncommitted_diff.update(cx, |diff, cx| {
7397 let hunk = diff.hunks(&snapshot, cx).nth(2).unwrap();
7398 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7399 });
7400
7401 // Wait for all remaining IO.
7402 cx.run_until_parked();
7403 fs.flush_events(fs.buffered_event_count());
7404
7405 // Now all hunks are staged.
7406 cx.run_until_parked();
7407 uncommitted_diff.update(cx, |diff, cx| {
7408 assert_hunks(
7409 diff.hunks(&snapshot, cx),
7410 &snapshot,
7411 &diff.base_text_string().unwrap(),
7412 &[
7413 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7414 (
7415 1..2,
7416 "two\n",
7417 "TWO\n",
7418 DiffHunkStatus::modified(NoSecondaryHunk),
7419 ),
7420 (
7421 3..4,
7422 "four\n",
7423 "FOUR\n",
7424 DiffHunkStatus::modified(NoSecondaryHunk),
7425 ),
7426 ],
7427 );
7428 });
7429}
7430
7431#[gpui::test(iterations = 25)]
7432async fn test_staging_random_hunks(
7433 mut rng: StdRng,
7434 executor: BackgroundExecutor,
7435 cx: &mut gpui::TestAppContext,
7436) {
7437 let operations = env::var("OPERATIONS")
7438 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
7439 .unwrap_or(20);
7440
7441 // Try to induce races between diff recalculation and index writes.
7442 if rng.gen_bool(0.5) {
7443 executor.deprioritize(*CALCULATE_DIFF_TASK);
7444 }
7445
7446 use DiffHunkSecondaryStatus::*;
7447 init_test(cx);
7448
7449 let committed_text = (0..30).map(|i| format!("line {i}\n")).collect::<String>();
7450 let index_text = committed_text.clone();
7451 let buffer_text = (0..30)
7452 .map(|i| match i % 5 {
7453 0 => format!("line {i} (modified)\n"),
7454 _ => format!("line {i}\n"),
7455 })
7456 .collect::<String>();
7457
7458 let fs = FakeFs::new(cx.background_executor.clone());
7459 fs.insert_tree(
7460 path!("/dir"),
7461 json!({
7462 ".git": {},
7463 "file.txt": buffer_text.clone()
7464 }),
7465 )
7466 .await;
7467 fs.set_head_for_repo(
7468 path!("/dir/.git").as_ref(),
7469 &[("file.txt".into(), committed_text.clone())],
7470 "deadbeef",
7471 );
7472 fs.set_index_for_repo(
7473 path!("/dir/.git").as_ref(),
7474 &[("file.txt".into(), index_text.clone())],
7475 );
7476 let repo = fs.open_repo(path!("/dir/.git").as_ref()).unwrap();
7477
7478 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7479 let buffer = project
7480 .update(cx, |project, cx| {
7481 project.open_local_buffer(path!("/dir/file.txt"), cx)
7482 })
7483 .await
7484 .unwrap();
7485 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7486 let uncommitted_diff = project
7487 .update(cx, |project, cx| {
7488 project.open_uncommitted_diff(buffer.clone(), cx)
7489 })
7490 .await
7491 .unwrap();
7492
7493 let mut hunks =
7494 uncommitted_diff.update(cx, |diff, cx| diff.hunks(&snapshot, cx).collect::<Vec<_>>());
7495 assert_eq!(hunks.len(), 6);
7496
7497 for _i in 0..operations {
7498 let hunk_ix = rng.gen_range(0..hunks.len());
7499 let hunk = &mut hunks[hunk_ix];
7500 let row = hunk.range.start.row;
7501
7502 if hunk.status().has_secondary_hunk() {
7503 log::info!("staging hunk at {row}");
7504 uncommitted_diff.update(cx, |diff, cx| {
7505 diff.stage_or_unstage_hunks(true, std::slice::from_ref(hunk), &snapshot, true, cx);
7506 });
7507 hunk.secondary_status = SecondaryHunkRemovalPending;
7508 } else {
7509 log::info!("unstaging hunk at {row}");
7510 uncommitted_diff.update(cx, |diff, cx| {
7511 diff.stage_or_unstage_hunks(false, std::slice::from_ref(hunk), &snapshot, true, cx);
7512 });
7513 hunk.secondary_status = SecondaryHunkAdditionPending;
7514 }
7515
7516 for _ in 0..rng.gen_range(0..10) {
7517 log::info!("yielding");
7518 cx.executor().simulate_random_delay().await;
7519 }
7520 }
7521
7522 cx.executor().run_until_parked();
7523
7524 for hunk in &mut hunks {
7525 if hunk.secondary_status == SecondaryHunkRemovalPending {
7526 hunk.secondary_status = NoSecondaryHunk;
7527 } else if hunk.secondary_status == SecondaryHunkAdditionPending {
7528 hunk.secondary_status = HasSecondaryHunk;
7529 }
7530 }
7531
7532 log::info!(
7533 "index text:\n{}",
7534 repo.load_index_text("file.txt".into()).await.unwrap()
7535 );
7536
7537 uncommitted_diff.update(cx, |diff, cx| {
7538 let expected_hunks = hunks
7539 .iter()
7540 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
7541 .collect::<Vec<_>>();
7542 let actual_hunks = diff
7543 .hunks(&snapshot, cx)
7544 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
7545 .collect::<Vec<_>>();
7546 assert_eq!(actual_hunks, expected_hunks);
7547 });
7548}
7549
7550#[gpui::test]
7551async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
7552 init_test(cx);
7553
7554 let committed_contents = r#"
7555 fn main() {
7556 println!("hello from HEAD");
7557 }
7558 "#
7559 .unindent();
7560 let file_contents = r#"
7561 fn main() {
7562 println!("hello from the working copy");
7563 }
7564 "#
7565 .unindent();
7566
7567 let fs = FakeFs::new(cx.background_executor.clone());
7568 fs.insert_tree(
7569 "/dir",
7570 json!({
7571 ".git": {},
7572 "src": {
7573 "main.rs": file_contents,
7574 }
7575 }),
7576 )
7577 .await;
7578
7579 fs.set_head_for_repo(
7580 Path::new("/dir/.git"),
7581 &[("src/main.rs".into(), committed_contents.clone())],
7582 "deadbeef",
7583 );
7584 fs.set_index_for_repo(
7585 Path::new("/dir/.git"),
7586 &[("src/main.rs".into(), committed_contents.clone())],
7587 );
7588
7589 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
7590
7591 let buffer = project
7592 .update(cx, |project, cx| {
7593 project.open_local_buffer("/dir/src/main.rs", cx)
7594 })
7595 .await
7596 .unwrap();
7597 let uncommitted_diff = project
7598 .update(cx, |project, cx| {
7599 project.open_uncommitted_diff(buffer.clone(), cx)
7600 })
7601 .await
7602 .unwrap();
7603
7604 cx.run_until_parked();
7605 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
7606 let snapshot = buffer.read(cx).snapshot();
7607 assert_hunks(
7608 uncommitted_diff.hunks(&snapshot, cx),
7609 &snapshot,
7610 &uncommitted_diff.base_text_string().unwrap(),
7611 &[(
7612 1..2,
7613 " println!(\"hello from HEAD\");\n",
7614 " println!(\"hello from the working copy\");\n",
7615 DiffHunkStatus {
7616 kind: DiffHunkStatusKind::Modified,
7617 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
7618 },
7619 )],
7620 );
7621 });
7622}
7623
7624#[gpui::test]
7625async fn test_repository_and_path_for_project_path(
7626 background_executor: BackgroundExecutor,
7627 cx: &mut gpui::TestAppContext,
7628) {
7629 init_test(cx);
7630 let fs = FakeFs::new(background_executor);
7631 fs.insert_tree(
7632 path!("/root"),
7633 json!({
7634 "c.txt": "",
7635 "dir1": {
7636 ".git": {},
7637 "deps": {
7638 "dep1": {
7639 ".git": {},
7640 "src": {
7641 "a.txt": ""
7642 }
7643 }
7644 },
7645 "src": {
7646 "b.txt": ""
7647 }
7648 },
7649 }),
7650 )
7651 .await;
7652
7653 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
7654 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7655 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7656 project
7657 .update(cx, |project, cx| project.git_scans_complete(cx))
7658 .await;
7659 cx.run_until_parked();
7660
7661 project.read_with(cx, |project, cx| {
7662 let git_store = project.git_store().read(cx);
7663 let pairs = [
7664 ("c.txt", None),
7665 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
7666 (
7667 "dir1/deps/dep1/src/a.txt",
7668 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
7669 ),
7670 ];
7671 let expected = pairs
7672 .iter()
7673 .map(|(path, result)| {
7674 (
7675 path,
7676 result.map(|(repo, repo_path)| {
7677 (Path::new(repo).into(), RepoPath::from(repo_path))
7678 }),
7679 )
7680 })
7681 .collect::<Vec<_>>();
7682 let actual = pairs
7683 .iter()
7684 .map(|(path, _)| {
7685 let project_path = (tree_id, Path::new(path)).into();
7686 let result = maybe!({
7687 let (repo, repo_path) =
7688 git_store.repository_and_path_for_project_path(&project_path, cx)?;
7689 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
7690 });
7691 (path, result)
7692 })
7693 .collect::<Vec<_>>();
7694 pretty_assertions::assert_eq!(expected, actual);
7695 });
7696
7697 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
7698 .await
7699 .unwrap();
7700 cx.run_until_parked();
7701
7702 project.read_with(cx, |project, cx| {
7703 let git_store = project.git_store().read(cx);
7704 assert_eq!(
7705 git_store.repository_and_path_for_project_path(
7706 &(tree_id, Path::new("dir1/src/b.txt")).into(),
7707 cx
7708 ),
7709 None
7710 );
7711 });
7712}
7713
7714#[gpui::test]
7715async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
7716 init_test(cx);
7717 let fs = FakeFs::new(cx.background_executor.clone());
7718 fs.insert_tree(
7719 path!("/root"),
7720 json!({
7721 "home": {
7722 ".git": {},
7723 "project": {
7724 "a.txt": "A"
7725 },
7726 },
7727 }),
7728 )
7729 .await;
7730 fs.set_home_dir(Path::new(path!("/root/home")).to_owned());
7731
7732 let project = Project::test(fs.clone(), [path!("/root/home/project").as_ref()], cx).await;
7733 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7734 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7735
7736 project
7737 .update(cx, |project, cx| project.git_scans_complete(cx))
7738 .await;
7739 tree.flush_fs_events(cx).await;
7740
7741 project.read_with(cx, |project, cx| {
7742 let containing = project
7743 .git_store()
7744 .read(cx)
7745 .repository_and_path_for_project_path(&(tree_id, "a.txt").into(), cx);
7746 assert!(containing.is_none());
7747 });
7748
7749 let project = Project::test(fs.clone(), [path!("/root/home").as_ref()], cx).await;
7750 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7751 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7752 project
7753 .update(cx, |project, cx| project.git_scans_complete(cx))
7754 .await;
7755 tree.flush_fs_events(cx).await;
7756
7757 project.read_with(cx, |project, cx| {
7758 let containing = project
7759 .git_store()
7760 .read(cx)
7761 .repository_and_path_for_project_path(&(tree_id, "project/a.txt").into(), cx);
7762 assert_eq!(
7763 containing
7764 .unwrap()
7765 .0
7766 .read(cx)
7767 .work_directory_abs_path
7768 .as_ref(),
7769 Path::new(path!("/root/home"))
7770 );
7771 });
7772}
7773
7774#[gpui::test]
7775async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
7776 init_test(cx);
7777 cx.executor().allow_parking();
7778
7779 let root = TempTree::new(json!({
7780 "project": {
7781 "a.txt": "a", // Modified
7782 "b.txt": "bb", // Added
7783 "c.txt": "ccc", // Unchanged
7784 "d.txt": "dddd", // Deleted
7785 },
7786 }));
7787
7788 // Set up git repository before creating the project.
7789 let work_dir = root.path().join("project");
7790 let repo = git_init(work_dir.as_path());
7791 git_add("a.txt", &repo);
7792 git_add("c.txt", &repo);
7793 git_add("d.txt", &repo);
7794 git_commit("Initial commit", &repo);
7795 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
7796 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
7797
7798 let project = Project::test(
7799 Arc::new(RealFs::new(None, cx.executor())),
7800 [root.path()],
7801 cx,
7802 )
7803 .await;
7804
7805 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7806 tree.flush_fs_events(cx).await;
7807 project
7808 .update(cx, |project, cx| project.git_scans_complete(cx))
7809 .await;
7810 cx.executor().run_until_parked();
7811
7812 let repository = project.read_with(cx, |project, cx| {
7813 project.repositories(cx).values().next().unwrap().clone()
7814 });
7815
7816 // Check that the right git state is observed on startup
7817 repository.read_with(cx, |repository, _| {
7818 let entries = repository.cached_status().collect::<Vec<_>>();
7819 assert_eq!(
7820 entries,
7821 [
7822 StatusEntry {
7823 repo_path: "a.txt".into(),
7824 status: StatusCode::Modified.worktree(),
7825 },
7826 StatusEntry {
7827 repo_path: "b.txt".into(),
7828 status: FileStatus::Untracked,
7829 },
7830 StatusEntry {
7831 repo_path: "d.txt".into(),
7832 status: StatusCode::Deleted.worktree(),
7833 },
7834 ]
7835 );
7836 });
7837
7838 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
7839
7840 tree.flush_fs_events(cx).await;
7841 project
7842 .update(cx, |project, cx| project.git_scans_complete(cx))
7843 .await;
7844 cx.executor().run_until_parked();
7845
7846 repository.read_with(cx, |repository, _| {
7847 let entries = repository.cached_status().collect::<Vec<_>>();
7848 assert_eq!(
7849 entries,
7850 [
7851 StatusEntry {
7852 repo_path: "a.txt".into(),
7853 status: StatusCode::Modified.worktree(),
7854 },
7855 StatusEntry {
7856 repo_path: "b.txt".into(),
7857 status: FileStatus::Untracked,
7858 },
7859 StatusEntry {
7860 repo_path: "c.txt".into(),
7861 status: StatusCode::Modified.worktree(),
7862 },
7863 StatusEntry {
7864 repo_path: "d.txt".into(),
7865 status: StatusCode::Deleted.worktree(),
7866 },
7867 ]
7868 );
7869 });
7870
7871 git_add("a.txt", &repo);
7872 git_add("c.txt", &repo);
7873 git_remove_index(Path::new("d.txt"), &repo);
7874 git_commit("Another commit", &repo);
7875 tree.flush_fs_events(cx).await;
7876 project
7877 .update(cx, |project, cx| project.git_scans_complete(cx))
7878 .await;
7879 cx.executor().run_until_parked();
7880
7881 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
7882 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
7883 tree.flush_fs_events(cx).await;
7884 project
7885 .update(cx, |project, cx| project.git_scans_complete(cx))
7886 .await;
7887 cx.executor().run_until_parked();
7888
7889 repository.read_with(cx, |repository, _cx| {
7890 let entries = repository.cached_status().collect::<Vec<_>>();
7891
7892 // Deleting an untracked entry, b.txt, should leave no status
7893 // a.txt was tracked, and so should have a status
7894 assert_eq!(
7895 entries,
7896 [StatusEntry {
7897 repo_path: "a.txt".into(),
7898 status: StatusCode::Deleted.worktree(),
7899 }]
7900 );
7901 });
7902}
7903
7904#[gpui::test]
7905async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
7906 init_test(cx);
7907 cx.executor().allow_parking();
7908
7909 let root = TempTree::new(json!({
7910 "project": {
7911 "sub": {},
7912 "a.txt": "",
7913 },
7914 }));
7915
7916 let work_dir = root.path().join("project");
7917 let repo = git_init(work_dir.as_path());
7918 // a.txt exists in HEAD and the working copy but is deleted in the index.
7919 git_add("a.txt", &repo);
7920 git_commit("Initial commit", &repo);
7921 git_remove_index("a.txt".as_ref(), &repo);
7922 // `sub` is a nested git repository.
7923 let _sub = git_init(&work_dir.join("sub"));
7924
7925 let project = Project::test(
7926 Arc::new(RealFs::new(None, cx.executor())),
7927 [root.path()],
7928 cx,
7929 )
7930 .await;
7931
7932 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7933 tree.flush_fs_events(cx).await;
7934 project
7935 .update(cx, |project, cx| project.git_scans_complete(cx))
7936 .await;
7937 cx.executor().run_until_parked();
7938
7939 let repository = project.read_with(cx, |project, cx| {
7940 project
7941 .repositories(cx)
7942 .values()
7943 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
7944 .unwrap()
7945 .clone()
7946 });
7947
7948 repository.read_with(cx, |repository, _cx| {
7949 let entries = repository.cached_status().collect::<Vec<_>>();
7950
7951 // `sub` doesn't appear in our computed statuses.
7952 // a.txt appears with a combined `DA` status.
7953 assert_eq!(
7954 entries,
7955 [StatusEntry {
7956 repo_path: "a.txt".into(),
7957 status: TrackedStatus {
7958 index_status: StatusCode::Deleted,
7959 worktree_status: StatusCode::Added
7960 }
7961 .into(),
7962 }]
7963 )
7964 });
7965}
7966
7967#[gpui::test]
7968async fn test_repository_subfolder_git_status(
7969 executor: gpui::BackgroundExecutor,
7970 cx: &mut gpui::TestAppContext,
7971) {
7972 init_test(cx);
7973
7974 let fs = FakeFs::new(executor);
7975 fs.insert_tree(
7976 path!("/root"),
7977 json!({
7978 "my-repo": {
7979 ".git": {},
7980 "a.txt": "a",
7981 "sub-folder-1": {
7982 "sub-folder-2": {
7983 "c.txt": "cc",
7984 "d": {
7985 "e.txt": "eee"
7986 }
7987 },
7988 }
7989 },
7990 }),
7991 )
7992 .await;
7993
7994 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
7995 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
7996
7997 fs.set_status_for_repo(
7998 path!("/root/my-repo/.git").as_ref(),
7999 &[(E_TXT.as_ref(), FileStatus::Untracked)],
8000 );
8001
8002 let project = Project::test(
8003 fs.clone(),
8004 [path!("/root/my-repo/sub-folder-1/sub-folder-2").as_ref()],
8005 cx,
8006 )
8007 .await;
8008
8009 project
8010 .update(cx, |project, cx| project.git_scans_complete(cx))
8011 .await;
8012 cx.run_until_parked();
8013
8014 let repository = project.read_with(cx, |project, cx| {
8015 project.repositories(cx).values().next().unwrap().clone()
8016 });
8017
8018 // Ensure that the git status is loaded correctly
8019 repository.read_with(cx, |repository, _cx| {
8020 assert_eq!(
8021 repository.work_directory_abs_path,
8022 Path::new(path!("/root/my-repo")).into()
8023 );
8024
8025 assert_eq!(repository.status_for_path(&C_TXT.into()), None);
8026 assert_eq!(
8027 repository.status_for_path(&E_TXT.into()).unwrap().status,
8028 FileStatus::Untracked
8029 );
8030 });
8031
8032 fs.set_status_for_repo(path!("/root/my-repo/.git").as_ref(), &[]);
8033 project
8034 .update(cx, |project, cx| project.git_scans_complete(cx))
8035 .await;
8036 cx.run_until_parked();
8037
8038 repository.read_with(cx, |repository, _cx| {
8039 assert_eq!(repository.status_for_path(&C_TXT.into()), None);
8040 assert_eq!(repository.status_for_path(&E_TXT.into()), None);
8041 });
8042}
8043
8044// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
8045#[cfg(any())]
8046#[gpui::test]
8047async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
8048 init_test(cx);
8049 cx.executor().allow_parking();
8050
8051 let root = TempTree::new(json!({
8052 "project": {
8053 "a.txt": "a",
8054 },
8055 }));
8056 let root_path = root.path();
8057
8058 let repo = git_init(&root_path.join("project"));
8059 git_add("a.txt", &repo);
8060 git_commit("init", &repo);
8061
8062 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8063
8064 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8065 tree.flush_fs_events(cx).await;
8066 project
8067 .update(cx, |project, cx| project.git_scans_complete(cx))
8068 .await;
8069 cx.executor().run_until_parked();
8070
8071 let repository = project.read_with(cx, |project, cx| {
8072 project.repositories(cx).values().next().unwrap().clone()
8073 });
8074
8075 git_branch("other-branch", &repo);
8076 git_checkout("refs/heads/other-branch", &repo);
8077 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
8078 git_add("a.txt", &repo);
8079 git_commit("capitalize", &repo);
8080 let commit = repo
8081 .head()
8082 .expect("Failed to get HEAD")
8083 .peel_to_commit()
8084 .expect("HEAD is not a commit");
8085 git_checkout("refs/heads/main", &repo);
8086 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
8087 git_add("a.txt", &repo);
8088 git_commit("improve letter", &repo);
8089 git_cherry_pick(&commit, &repo);
8090 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
8091 .expect("No CHERRY_PICK_HEAD");
8092 pretty_assertions::assert_eq!(
8093 git_status(&repo),
8094 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
8095 );
8096 tree.flush_fs_events(cx).await;
8097 project
8098 .update(cx, |project, cx| project.git_scans_complete(cx))
8099 .await;
8100 cx.executor().run_until_parked();
8101 let conflicts = repository.update(cx, |repository, _| {
8102 repository
8103 .merge_conflicts
8104 .iter()
8105 .cloned()
8106 .collect::<Vec<_>>()
8107 });
8108 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
8109
8110 git_add("a.txt", &repo);
8111 // Attempt to manually simulate what `git cherry-pick --continue` would do.
8112 git_commit("whatevs", &repo);
8113 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
8114 .expect("Failed to remove CHERRY_PICK_HEAD");
8115 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
8116 tree.flush_fs_events(cx).await;
8117 let conflicts = repository.update(cx, |repository, _| {
8118 repository
8119 .merge_conflicts
8120 .iter()
8121 .cloned()
8122 .collect::<Vec<_>>()
8123 });
8124 pretty_assertions::assert_eq!(conflicts, []);
8125}
8126
8127#[gpui::test]
8128async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
8129 init_test(cx);
8130 let fs = FakeFs::new(cx.background_executor.clone());
8131 fs.insert_tree(
8132 path!("/root"),
8133 json!({
8134 ".git": {},
8135 ".gitignore": "*.txt\n",
8136 "a.xml": "<a></a>",
8137 "b.txt": "Some text"
8138 }),
8139 )
8140 .await;
8141
8142 fs.set_head_and_index_for_repo(
8143 path!("/root/.git").as_ref(),
8144 &[
8145 (".gitignore".into(), "*.txt\n".into()),
8146 ("a.xml".into(), "<a></a>".into()),
8147 ],
8148 );
8149
8150 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
8151
8152 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8153 tree.flush_fs_events(cx).await;
8154 project
8155 .update(cx, |project, cx| project.git_scans_complete(cx))
8156 .await;
8157 cx.executor().run_until_parked();
8158
8159 let repository = project.read_with(cx, |project, cx| {
8160 project.repositories(cx).values().next().unwrap().clone()
8161 });
8162
8163 // One file is unmodified, the other is ignored.
8164 cx.read(|cx| {
8165 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
8166 assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
8167 });
8168
8169 // Change the gitignore, and stage the newly non-ignored file.
8170 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
8171 .await
8172 .unwrap();
8173 fs.set_index_for_repo(
8174 Path::new(path!("/root/.git")),
8175 &[
8176 (".gitignore".into(), "*.txt\n".into()),
8177 ("a.xml".into(), "<a></a>".into()),
8178 ("b.txt".into(), "Some text".into()),
8179 ],
8180 );
8181
8182 cx.executor().run_until_parked();
8183 cx.read(|cx| {
8184 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
8185 assert_entry_git_state(
8186 tree.read(cx),
8187 repository.read(cx),
8188 "b.txt",
8189 Some(StatusCode::Added),
8190 false,
8191 );
8192 });
8193}
8194
8195// NOTE:
8196// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
8197// a directory which some program has already open.
8198// This is a limitation of the Windows.
8199// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
8200#[gpui::test]
8201#[cfg_attr(target_os = "windows", ignore)]
8202async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
8203 init_test(cx);
8204 cx.executor().allow_parking();
8205 let root = TempTree::new(json!({
8206 "projects": {
8207 "project1": {
8208 "a": "",
8209 "b": "",
8210 }
8211 },
8212
8213 }));
8214 let root_path = root.path();
8215
8216 let repo = git_init(&root_path.join("projects/project1"));
8217 git_add("a", &repo);
8218 git_commit("init", &repo);
8219 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
8220
8221 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8222
8223 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8224 tree.flush_fs_events(cx).await;
8225 project
8226 .update(cx, |project, cx| project.git_scans_complete(cx))
8227 .await;
8228 cx.executor().run_until_parked();
8229
8230 let repository = project.read_with(cx, |project, cx| {
8231 project.repositories(cx).values().next().unwrap().clone()
8232 });
8233
8234 repository.read_with(cx, |repository, _| {
8235 assert_eq!(
8236 repository.work_directory_abs_path.as_ref(),
8237 root_path.join("projects/project1").as_path()
8238 );
8239 assert_eq!(
8240 repository
8241 .status_for_path(&"a".into())
8242 .map(|entry| entry.status),
8243 Some(StatusCode::Modified.worktree()),
8244 );
8245 assert_eq!(
8246 repository
8247 .status_for_path(&"b".into())
8248 .map(|entry| entry.status),
8249 Some(FileStatus::Untracked),
8250 );
8251 });
8252
8253 std::fs::rename(
8254 root_path.join("projects/project1"),
8255 root_path.join("projects/project2"),
8256 )
8257 .unwrap();
8258 tree.flush_fs_events(cx).await;
8259
8260 repository.read_with(cx, |repository, _| {
8261 assert_eq!(
8262 repository.work_directory_abs_path.as_ref(),
8263 root_path.join("projects/project2").as_path()
8264 );
8265 assert_eq!(
8266 repository.status_for_path(&"a".into()).unwrap().status,
8267 StatusCode::Modified.worktree(),
8268 );
8269 assert_eq!(
8270 repository.status_for_path(&"b".into()).unwrap().status,
8271 FileStatus::Untracked,
8272 );
8273 });
8274}
8275
8276// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
8277// you can't rename a directory which some program has already open. This is a
8278// limitation of the Windows. See:
8279// https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
8280#[gpui::test]
8281#[cfg_attr(target_os = "windows", ignore)]
8282async fn test_file_status(cx: &mut gpui::TestAppContext) {
8283 init_test(cx);
8284 cx.executor().allow_parking();
8285 const IGNORE_RULE: &str = "**/target";
8286
8287 let root = TempTree::new(json!({
8288 "project": {
8289 "a.txt": "a",
8290 "b.txt": "bb",
8291 "c": {
8292 "d": {
8293 "e.txt": "eee"
8294 }
8295 },
8296 "f.txt": "ffff",
8297 "target": {
8298 "build_file": "???"
8299 },
8300 ".gitignore": IGNORE_RULE
8301 },
8302
8303 }));
8304 let root_path = root.path();
8305
8306 const A_TXT: &str = "a.txt";
8307 const B_TXT: &str = "b.txt";
8308 const E_TXT: &str = "c/d/e.txt";
8309 const F_TXT: &str = "f.txt";
8310 const DOTGITIGNORE: &str = ".gitignore";
8311 const BUILD_FILE: &str = "target/build_file";
8312
8313 // Set up git repository before creating the worktree.
8314 let work_dir = root.path().join("project");
8315 let mut repo = git_init(work_dir.as_path());
8316 repo.add_ignore_rule(IGNORE_RULE).unwrap();
8317 git_add(A_TXT, &repo);
8318 git_add(E_TXT, &repo);
8319 git_add(DOTGITIGNORE, &repo);
8320 git_commit("Initial commit", &repo);
8321
8322 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8323
8324 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8325 tree.flush_fs_events(cx).await;
8326 project
8327 .update(cx, |project, cx| project.git_scans_complete(cx))
8328 .await;
8329 cx.executor().run_until_parked();
8330
8331 let repository = project.read_with(cx, |project, cx| {
8332 project.repositories(cx).values().next().unwrap().clone()
8333 });
8334
8335 // Check that the right git state is observed on startup
8336 repository.read_with(cx, |repository, _cx| {
8337 assert_eq!(
8338 repository.work_directory_abs_path.as_ref(),
8339 root_path.join("project").as_path()
8340 );
8341
8342 assert_eq!(
8343 repository.status_for_path(&B_TXT.into()).unwrap().status,
8344 FileStatus::Untracked,
8345 );
8346 assert_eq!(
8347 repository.status_for_path(&F_TXT.into()).unwrap().status,
8348 FileStatus::Untracked,
8349 );
8350 });
8351
8352 // Modify a file in the working copy.
8353 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
8354 tree.flush_fs_events(cx).await;
8355 project
8356 .update(cx, |project, cx| project.git_scans_complete(cx))
8357 .await;
8358 cx.executor().run_until_parked();
8359
8360 // The worktree detects that the file's git status has changed.
8361 repository.read_with(cx, |repository, _| {
8362 assert_eq!(
8363 repository.status_for_path(&A_TXT.into()).unwrap().status,
8364 StatusCode::Modified.worktree(),
8365 );
8366 });
8367
8368 // Create a commit in the git repository.
8369 git_add(A_TXT, &repo);
8370 git_add(B_TXT, &repo);
8371 git_commit("Committing modified and added", &repo);
8372 tree.flush_fs_events(cx).await;
8373 project
8374 .update(cx, |project, cx| project.git_scans_complete(cx))
8375 .await;
8376 cx.executor().run_until_parked();
8377
8378 // The worktree detects that the files' git status have changed.
8379 repository.read_with(cx, |repository, _cx| {
8380 assert_eq!(
8381 repository.status_for_path(&F_TXT.into()).unwrap().status,
8382 FileStatus::Untracked,
8383 );
8384 assert_eq!(repository.status_for_path(&B_TXT.into()), None);
8385 assert_eq!(repository.status_for_path(&A_TXT.into()), None);
8386 });
8387
8388 // Modify files in the working copy and perform git operations on other files.
8389 git_reset(0, &repo);
8390 git_remove_index(Path::new(B_TXT), &repo);
8391 git_stash(&mut repo);
8392 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
8393 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
8394 tree.flush_fs_events(cx).await;
8395 project
8396 .update(cx, |project, cx| project.git_scans_complete(cx))
8397 .await;
8398 cx.executor().run_until_parked();
8399
8400 // Check that more complex repo changes are tracked
8401 repository.read_with(cx, |repository, _cx| {
8402 assert_eq!(repository.status_for_path(&A_TXT.into()), None);
8403 assert_eq!(
8404 repository.status_for_path(&B_TXT.into()).unwrap().status,
8405 FileStatus::Untracked,
8406 );
8407 assert_eq!(
8408 repository.status_for_path(&E_TXT.into()).unwrap().status,
8409 StatusCode::Modified.worktree(),
8410 );
8411 });
8412
8413 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
8414 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
8415 std::fs::write(
8416 work_dir.join(DOTGITIGNORE),
8417 [IGNORE_RULE, "f.txt"].join("\n"),
8418 )
8419 .unwrap();
8420
8421 git_add(Path::new(DOTGITIGNORE), &repo);
8422 git_commit("Committing modified git ignore", &repo);
8423
8424 tree.flush_fs_events(cx).await;
8425 cx.executor().run_until_parked();
8426
8427 let mut renamed_dir_name = "first_directory/second_directory";
8428 const RENAMED_FILE: &str = "rf.txt";
8429
8430 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
8431 std::fs::write(
8432 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
8433 "new-contents",
8434 )
8435 .unwrap();
8436
8437 tree.flush_fs_events(cx).await;
8438 project
8439 .update(cx, |project, cx| project.git_scans_complete(cx))
8440 .await;
8441 cx.executor().run_until_parked();
8442
8443 repository.read_with(cx, |repository, _cx| {
8444 assert_eq!(
8445 repository
8446 .status_for_path(&Path::new(renamed_dir_name).join(RENAMED_FILE).into())
8447 .unwrap()
8448 .status,
8449 FileStatus::Untracked,
8450 );
8451 });
8452
8453 renamed_dir_name = "new_first_directory/second_directory";
8454
8455 std::fs::rename(
8456 work_dir.join("first_directory"),
8457 work_dir.join("new_first_directory"),
8458 )
8459 .unwrap();
8460
8461 tree.flush_fs_events(cx).await;
8462 project
8463 .update(cx, |project, cx| project.git_scans_complete(cx))
8464 .await;
8465 cx.executor().run_until_parked();
8466
8467 repository.read_with(cx, |repository, _cx| {
8468 assert_eq!(
8469 repository
8470 .status_for_path(&Path::new(renamed_dir_name).join(RENAMED_FILE).into())
8471 .unwrap()
8472 .status,
8473 FileStatus::Untracked,
8474 );
8475 });
8476}
8477
8478#[gpui::test]
8479async fn test_repos_in_invisible_worktrees(
8480 executor: BackgroundExecutor,
8481 cx: &mut gpui::TestAppContext,
8482) {
8483 init_test(cx);
8484 let fs = FakeFs::new(executor);
8485 fs.insert_tree(
8486 path!("/root"),
8487 json!({
8488 "dir1": {
8489 ".git": {},
8490 "dep1": {
8491 ".git": {},
8492 "src": {
8493 "a.txt": "",
8494 },
8495 },
8496 "b.txt": "",
8497 },
8498 }),
8499 )
8500 .await;
8501
8502 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
8503 let _visible_worktree =
8504 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8505 project
8506 .update(cx, |project, cx| project.git_scans_complete(cx))
8507 .await;
8508
8509 let repos = project.read_with(cx, |project, cx| {
8510 project
8511 .repositories(cx)
8512 .values()
8513 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8514 .collect::<Vec<_>>()
8515 });
8516 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
8517
8518 let (_invisible_worktree, _) = project
8519 .update(cx, |project, cx| {
8520 project.worktree_store.update(cx, |worktree_store, cx| {
8521 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
8522 })
8523 })
8524 .await
8525 .expect("failed to create worktree");
8526 project
8527 .update(cx, |project, cx| project.git_scans_complete(cx))
8528 .await;
8529
8530 let repos = project.read_with(cx, |project, cx| {
8531 project
8532 .repositories(cx)
8533 .values()
8534 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8535 .collect::<Vec<_>>()
8536 });
8537 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
8538}
8539
8540#[gpui::test(iterations = 10)]
8541async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
8542 init_test(cx);
8543 cx.update(|cx| {
8544 cx.update_global::<SettingsStore, _>(|store, cx| {
8545 store.update_user_settings::<WorktreeSettings>(cx, |project_settings| {
8546 project_settings.file_scan_exclusions = Some(Vec::new());
8547 });
8548 });
8549 });
8550 let fs = FakeFs::new(cx.background_executor.clone());
8551 fs.insert_tree(
8552 path!("/root"),
8553 json!({
8554 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
8555 "tree": {
8556 ".git": {},
8557 ".gitignore": "ignored-dir\n",
8558 "tracked-dir": {
8559 "tracked-file1": "",
8560 "ancestor-ignored-file1": "",
8561 },
8562 "ignored-dir": {
8563 "ignored-file1": ""
8564 }
8565 }
8566 }),
8567 )
8568 .await;
8569 fs.set_head_and_index_for_repo(
8570 path!("/root/tree/.git").as_ref(),
8571 &[
8572 (".gitignore".into(), "ignored-dir\n".into()),
8573 ("tracked-dir/tracked-file1".into(), "".into()),
8574 ],
8575 );
8576
8577 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
8578
8579 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8580 tree.flush_fs_events(cx).await;
8581 project
8582 .update(cx, |project, cx| project.git_scans_complete(cx))
8583 .await;
8584 cx.executor().run_until_parked();
8585
8586 let repository = project.read_with(cx, |project, cx| {
8587 project.repositories(cx).values().next().unwrap().clone()
8588 });
8589
8590 tree.read_with(cx, |tree, _| {
8591 tree.as_local()
8592 .unwrap()
8593 .manually_refresh_entries_for_paths(vec![Path::new("ignored-dir").into()])
8594 })
8595 .recv()
8596 .await;
8597
8598 cx.read(|cx| {
8599 assert_entry_git_state(
8600 tree.read(cx),
8601 repository.read(cx),
8602 "tracked-dir/tracked-file1",
8603 None,
8604 false,
8605 );
8606 assert_entry_git_state(
8607 tree.read(cx),
8608 repository.read(cx),
8609 "tracked-dir/ancestor-ignored-file1",
8610 None,
8611 false,
8612 );
8613 assert_entry_git_state(
8614 tree.read(cx),
8615 repository.read(cx),
8616 "ignored-dir/ignored-file1",
8617 None,
8618 true,
8619 );
8620 });
8621
8622 fs.create_file(
8623 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
8624 Default::default(),
8625 )
8626 .await
8627 .unwrap();
8628 fs.set_index_for_repo(
8629 path!("/root/tree/.git").as_ref(),
8630 &[
8631 (".gitignore".into(), "ignored-dir\n".into()),
8632 ("tracked-dir/tracked-file1".into(), "".into()),
8633 ("tracked-dir/tracked-file2".into(), "".into()),
8634 ],
8635 );
8636 fs.create_file(
8637 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
8638 Default::default(),
8639 )
8640 .await
8641 .unwrap();
8642 fs.create_file(
8643 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
8644 Default::default(),
8645 )
8646 .await
8647 .unwrap();
8648
8649 cx.executor().run_until_parked();
8650 cx.read(|cx| {
8651 assert_entry_git_state(
8652 tree.read(cx),
8653 repository.read(cx),
8654 "tracked-dir/tracked-file2",
8655 Some(StatusCode::Added),
8656 false,
8657 );
8658 assert_entry_git_state(
8659 tree.read(cx),
8660 repository.read(cx),
8661 "tracked-dir/ancestor-ignored-file2",
8662 None,
8663 false,
8664 );
8665 assert_entry_git_state(
8666 tree.read(cx),
8667 repository.read(cx),
8668 "ignored-dir/ignored-file2",
8669 None,
8670 true,
8671 );
8672 assert!(tree.read(cx).entry_for_path(".git").unwrap().is_ignored);
8673 });
8674}
8675
8676#[gpui::test]
8677async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) {
8678 init_test(cx);
8679
8680 let fs = FakeFs::new(cx.executor());
8681 fs.insert_tree(
8682 path!("/project"),
8683 json!({
8684 ".git": {
8685 "worktrees": {
8686 "some-worktree": {
8687 "commondir": "../..\n",
8688 // For is_git_dir
8689 "HEAD": "",
8690 "config": ""
8691 }
8692 },
8693 "modules": {
8694 "subdir": {
8695 "some-submodule": {
8696 // For is_git_dir
8697 "HEAD": "",
8698 "config": "",
8699 }
8700 }
8701 }
8702 },
8703 "src": {
8704 "a.txt": "A",
8705 },
8706 "some-worktree": {
8707 ".git": "gitdir: ../.git/worktrees/some-worktree\n",
8708 "src": {
8709 "b.txt": "B",
8710 }
8711 },
8712 "subdir": {
8713 "some-submodule": {
8714 ".git": "gitdir: ../../.git/modules/subdir/some-submodule\n",
8715 "c.txt": "C",
8716 }
8717 }
8718 }),
8719 )
8720 .await;
8721
8722 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
8723 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
8724 scan_complete.await;
8725
8726 let mut repositories = project.update(cx, |project, cx| {
8727 project
8728 .repositories(cx)
8729 .values()
8730 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8731 .collect::<Vec<_>>()
8732 });
8733 repositories.sort();
8734 pretty_assertions::assert_eq!(
8735 repositories,
8736 [
8737 Path::new(path!("/project")).into(),
8738 Path::new(path!("/project/some-worktree")).into(),
8739 Path::new(path!("/project/subdir/some-submodule")).into(),
8740 ]
8741 );
8742
8743 // Generate a git-related event for the worktree and check that it's refreshed.
8744 fs.with_git_state(
8745 path!("/project/some-worktree/.git").as_ref(),
8746 true,
8747 |state| {
8748 state
8749 .head_contents
8750 .insert("src/b.txt".into(), "b".to_owned());
8751 state
8752 .index_contents
8753 .insert("src/b.txt".into(), "b".to_owned());
8754 },
8755 )
8756 .unwrap();
8757 cx.run_until_parked();
8758
8759 let buffer = project
8760 .update(cx, |project, cx| {
8761 project.open_local_buffer(path!("/project/some-worktree/src/b.txt"), cx)
8762 })
8763 .await
8764 .unwrap();
8765 let (worktree_repo, barrier) = project.update(cx, |project, cx| {
8766 let (repo, _) = project
8767 .git_store()
8768 .read(cx)
8769 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
8770 .unwrap();
8771 pretty_assertions::assert_eq!(
8772 repo.read(cx).work_directory_abs_path,
8773 Path::new(path!("/project/some-worktree")).into(),
8774 );
8775 let barrier = repo.update(cx, |repo, _| repo.barrier());
8776 (repo.clone(), barrier)
8777 });
8778 barrier.await.unwrap();
8779 worktree_repo.update(cx, |repo, _| {
8780 pretty_assertions::assert_eq!(
8781 repo.status_for_path(&"src/b.txt".into()).unwrap().status,
8782 StatusCode::Modified.worktree(),
8783 );
8784 });
8785
8786 // The same for the submodule.
8787 fs.with_git_state(
8788 path!("/project/subdir/some-submodule/.git").as_ref(),
8789 true,
8790 |state| {
8791 state.head_contents.insert("c.txt".into(), "c".to_owned());
8792 state.index_contents.insert("c.txt".into(), "c".to_owned());
8793 },
8794 )
8795 .unwrap();
8796 cx.run_until_parked();
8797
8798 let buffer = project
8799 .update(cx, |project, cx| {
8800 project.open_local_buffer(path!("/project/subdir/some-submodule/c.txt"), cx)
8801 })
8802 .await
8803 .unwrap();
8804 let (submodule_repo, barrier) = project.update(cx, |project, cx| {
8805 let (repo, _) = project
8806 .git_store()
8807 .read(cx)
8808 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
8809 .unwrap();
8810 pretty_assertions::assert_eq!(
8811 repo.read(cx).work_directory_abs_path,
8812 Path::new(path!("/project/subdir/some-submodule")).into(),
8813 );
8814 let barrier = repo.update(cx, |repo, _| repo.barrier());
8815 (repo.clone(), barrier)
8816 });
8817 barrier.await.unwrap();
8818 submodule_repo.update(cx, |repo, _| {
8819 pretty_assertions::assert_eq!(
8820 repo.status_for_path(&"c.txt".into()).unwrap().status,
8821 StatusCode::Modified.worktree(),
8822 );
8823 });
8824}
8825
8826#[gpui::test]
8827async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
8828 init_test(cx);
8829 let fs = FakeFs::new(cx.background_executor.clone());
8830 fs.insert_tree(
8831 path!("/root"),
8832 json!({
8833 "project": {
8834 ".git": {},
8835 "child1": {
8836 "a.txt": "A",
8837 },
8838 "child2": {
8839 "b.txt": "B",
8840 }
8841 }
8842 }),
8843 )
8844 .await;
8845
8846 let project = Project::test(
8847 fs.clone(),
8848 [
8849 path!("/root/project/child1").as_ref(),
8850 path!("/root/project/child2").as_ref(),
8851 ],
8852 cx,
8853 )
8854 .await;
8855
8856 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8857 tree.flush_fs_events(cx).await;
8858 project
8859 .update(cx, |project, cx| project.git_scans_complete(cx))
8860 .await;
8861 cx.executor().run_until_parked();
8862
8863 let repos = project.read_with(cx, |project, cx| {
8864 project
8865 .repositories(cx)
8866 .values()
8867 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8868 .collect::<Vec<_>>()
8869 });
8870 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
8871}
8872
8873async fn search(
8874 project: &Entity<Project>,
8875 query: SearchQuery,
8876 cx: &mut gpui::TestAppContext,
8877) -> Result<HashMap<String, Vec<Range<usize>>>> {
8878 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
8879 let mut results = HashMap::default();
8880 while let Ok(search_result) = search_rx.recv().await {
8881 match search_result {
8882 SearchResult::Buffer { buffer, ranges } => {
8883 results.entry(buffer).or_insert(ranges);
8884 }
8885 SearchResult::LimitReached => {}
8886 }
8887 }
8888 Ok(results
8889 .into_iter()
8890 .map(|(buffer, ranges)| {
8891 buffer.update(cx, |buffer, cx| {
8892 let path = buffer
8893 .file()
8894 .unwrap()
8895 .full_path(cx)
8896 .to_string_lossy()
8897 .to_string();
8898 let ranges = ranges
8899 .into_iter()
8900 .map(|range| range.to_offset(buffer))
8901 .collect::<Vec<_>>();
8902 (path, ranges)
8903 })
8904 })
8905 .collect())
8906}
8907
8908pub fn init_test(cx: &mut gpui::TestAppContext) {
8909 zlog::init_test();
8910
8911 cx.update(|cx| {
8912 let settings_store = SettingsStore::test(cx);
8913 cx.set_global(settings_store);
8914 release_channel::init(SemanticVersion::default(), cx);
8915 language::init(cx);
8916 Project::init_settings(cx);
8917 });
8918}
8919
8920fn json_lang() -> Arc<Language> {
8921 Arc::new(Language::new(
8922 LanguageConfig {
8923 name: "JSON".into(),
8924 matcher: LanguageMatcher {
8925 path_suffixes: vec!["json".to_string()],
8926 ..Default::default()
8927 },
8928 ..Default::default()
8929 },
8930 None,
8931 ))
8932}
8933
8934fn js_lang() -> Arc<Language> {
8935 Arc::new(Language::new(
8936 LanguageConfig {
8937 name: "JavaScript".into(),
8938 matcher: LanguageMatcher {
8939 path_suffixes: vec!["js".to_string()],
8940 ..Default::default()
8941 },
8942 ..Default::default()
8943 },
8944 None,
8945 ))
8946}
8947
8948fn rust_lang() -> Arc<Language> {
8949 Arc::new(Language::new(
8950 LanguageConfig {
8951 name: "Rust".into(),
8952 matcher: LanguageMatcher {
8953 path_suffixes: vec!["rs".to_string()],
8954 ..Default::default()
8955 },
8956 ..Default::default()
8957 },
8958 Some(tree_sitter_rust::LANGUAGE.into()),
8959 ))
8960}
8961
8962fn typescript_lang() -> Arc<Language> {
8963 Arc::new(Language::new(
8964 LanguageConfig {
8965 name: "TypeScript".into(),
8966 matcher: LanguageMatcher {
8967 path_suffixes: vec!["ts".to_string()],
8968 ..Default::default()
8969 },
8970 ..Default::default()
8971 },
8972 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
8973 ))
8974}
8975
8976fn tsx_lang() -> Arc<Language> {
8977 Arc::new(Language::new(
8978 LanguageConfig {
8979 name: "tsx".into(),
8980 matcher: LanguageMatcher {
8981 path_suffixes: vec!["tsx".to_string()],
8982 ..Default::default()
8983 },
8984 ..Default::default()
8985 },
8986 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
8987 ))
8988}
8989
8990fn get_all_tasks(
8991 project: &Entity<Project>,
8992 task_contexts: Arc<TaskContexts>,
8993 cx: &mut App,
8994) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
8995 let new_tasks = project.update(cx, |project, cx| {
8996 project.task_store.update(cx, |task_store, cx| {
8997 task_store.task_inventory().unwrap().update(cx, |this, cx| {
8998 this.used_and_current_resolved_tasks(task_contexts, cx)
8999 })
9000 })
9001 });
9002
9003 cx.background_spawn(async move {
9004 let (mut old, new) = new_tasks.await;
9005 old.extend(new);
9006 old
9007 })
9008}
9009
9010#[track_caller]
9011fn assert_entry_git_state(
9012 tree: &Worktree,
9013 repository: &Repository,
9014 path: &str,
9015 index_status: Option<StatusCode>,
9016 is_ignored: bool,
9017) {
9018 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
9019 let entry = tree
9020 .entry_for_path(path)
9021 .unwrap_or_else(|| panic!("entry {path} not found"));
9022 let status = repository
9023 .status_for_path(&path.into())
9024 .map(|entry| entry.status);
9025 let expected = index_status.map(|index_status| {
9026 TrackedStatus {
9027 index_status,
9028 worktree_status: StatusCode::Unmodified,
9029 }
9030 .into()
9031 });
9032 assert_eq!(
9033 status, expected,
9034 "expected {path} to have git status: {expected:?}"
9035 );
9036 assert_eq!(
9037 entry.is_ignored, is_ignored,
9038 "expected {path} to have is_ignored: {is_ignored}"
9039 );
9040}
9041
9042#[track_caller]
9043fn git_init(path: &Path) -> git2::Repository {
9044 let mut init_opts = RepositoryInitOptions::new();
9045 init_opts.initial_head("main");
9046 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
9047}
9048
9049#[track_caller]
9050fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
9051 let path = path.as_ref();
9052 let mut index = repo.index().expect("Failed to get index");
9053 index.add_path(path).expect("Failed to add file");
9054 index.write().expect("Failed to write index");
9055}
9056
9057#[track_caller]
9058fn git_remove_index(path: &Path, repo: &git2::Repository) {
9059 let mut index = repo.index().expect("Failed to get index");
9060 index.remove_path(path).expect("Failed to add file");
9061 index.write().expect("Failed to write index");
9062}
9063
9064#[track_caller]
9065fn git_commit(msg: &'static str, repo: &git2::Repository) {
9066 use git2::Signature;
9067
9068 let signature = Signature::now("test", "test@zed.dev").unwrap();
9069 let oid = repo.index().unwrap().write_tree().unwrap();
9070 let tree = repo.find_tree(oid).unwrap();
9071 if let Ok(head) = repo.head() {
9072 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
9073
9074 let parent_commit = parent_obj.as_commit().unwrap();
9075
9076 repo.commit(
9077 Some("HEAD"),
9078 &signature,
9079 &signature,
9080 msg,
9081 &tree,
9082 &[parent_commit],
9083 )
9084 .expect("Failed to commit with parent");
9085 } else {
9086 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
9087 .expect("Failed to commit");
9088 }
9089}
9090
9091#[cfg(any())]
9092#[track_caller]
9093fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
9094 repo.cherrypick(commit, None).expect("Failed to cherrypick");
9095}
9096
9097#[track_caller]
9098fn git_stash(repo: &mut git2::Repository) {
9099 use git2::Signature;
9100
9101 let signature = Signature::now("test", "test@zed.dev").unwrap();
9102 repo.stash_save(&signature, "N/A", None)
9103 .expect("Failed to stash");
9104}
9105
9106#[track_caller]
9107fn git_reset(offset: usize, repo: &git2::Repository) {
9108 let head = repo.head().expect("Couldn't get repo head");
9109 let object = head.peel(git2::ObjectType::Commit).unwrap();
9110 let commit = object.as_commit().unwrap();
9111 let new_head = commit
9112 .parents()
9113 .inspect(|parnet| {
9114 parnet.message();
9115 })
9116 .nth(offset)
9117 .expect("Not enough history");
9118 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
9119 .expect("Could not reset");
9120}
9121
9122#[cfg(any())]
9123#[track_caller]
9124fn git_branch(name: &str, repo: &git2::Repository) {
9125 let head = repo
9126 .head()
9127 .expect("Couldn't get repo head")
9128 .peel_to_commit()
9129 .expect("HEAD is not a commit");
9130 repo.branch(name, &head, false).expect("Failed to commit");
9131}
9132
9133#[cfg(any())]
9134#[track_caller]
9135fn git_checkout(name: &str, repo: &git2::Repository) {
9136 repo.set_head(name).expect("Failed to set head");
9137 repo.checkout_head(None).expect("Failed to check out head");
9138}
9139
9140#[cfg(any())]
9141#[track_caller]
9142fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
9143 repo.statuses(None)
9144 .unwrap()
9145 .iter()
9146 .map(|status| (status.path().unwrap().to_string(), status.status()))
9147 .collect()
9148}
9149
9150#[gpui::test]
9151async fn test_find_project_path_abs(
9152 background_executor: BackgroundExecutor,
9153 cx: &mut gpui::TestAppContext,
9154) {
9155 // find_project_path should work with absolute paths
9156 init_test(cx);
9157
9158 let fs = FakeFs::new(background_executor);
9159 fs.insert_tree(
9160 path!("/root"),
9161 json!({
9162 "project1": {
9163 "file1.txt": "content1",
9164 "subdir": {
9165 "file2.txt": "content2"
9166 }
9167 },
9168 "project2": {
9169 "file3.txt": "content3"
9170 }
9171 }),
9172 )
9173 .await;
9174
9175 let project = Project::test(
9176 fs.clone(),
9177 [
9178 path!("/root/project1").as_ref(),
9179 path!("/root/project2").as_ref(),
9180 ],
9181 cx,
9182 )
9183 .await;
9184
9185 // Make sure the worktrees are fully initialized
9186 project
9187 .update(cx, |project, cx| project.git_scans_complete(cx))
9188 .await;
9189 cx.run_until_parked();
9190
9191 let (project1_abs_path, project1_id, project2_abs_path, project2_id) =
9192 project.read_with(cx, |project, cx| {
9193 let worktrees: Vec<_> = project.worktrees(cx).collect();
9194 let abs_path1 = worktrees[0].read(cx).abs_path().to_path_buf();
9195 let id1 = worktrees[0].read(cx).id();
9196 let abs_path2 = worktrees[1].read(cx).abs_path().to_path_buf();
9197 let id2 = worktrees[1].read(cx).id();
9198 (abs_path1, id1, abs_path2, id2)
9199 });
9200
9201 project.update(cx, |project, cx| {
9202 let abs_path = project1_abs_path.join("file1.txt");
9203 let found_path = project.find_project_path(abs_path, cx).unwrap();
9204 assert_eq!(found_path.worktree_id, project1_id);
9205 assert_eq!(found_path.path.as_ref(), Path::new("file1.txt"));
9206
9207 let abs_path = project1_abs_path.join("subdir").join("file2.txt");
9208 let found_path = project.find_project_path(abs_path, cx).unwrap();
9209 assert_eq!(found_path.worktree_id, project1_id);
9210 assert_eq!(found_path.path.as_ref(), Path::new("subdir/file2.txt"));
9211
9212 let abs_path = project2_abs_path.join("file3.txt");
9213 let found_path = project.find_project_path(abs_path, cx).unwrap();
9214 assert_eq!(found_path.worktree_id, project2_id);
9215 assert_eq!(found_path.path.as_ref(), Path::new("file3.txt"));
9216
9217 let abs_path = project1_abs_path.join("nonexistent.txt");
9218 let found_path = project.find_project_path(abs_path, cx);
9219 assert!(
9220 found_path.is_some(),
9221 "Should find project path for nonexistent file in worktree"
9222 );
9223
9224 // Test with an absolute path outside any worktree
9225 let abs_path = Path::new("/some/other/path");
9226 let found_path = project.find_project_path(abs_path, cx);
9227 assert!(
9228 found_path.is_none(),
9229 "Should not find project path for path outside any worktree"
9230 );
9231 });
9232}