1#![allow(clippy::format_collect)]
2
3use crate::{
4 Event, git_store::StatusEntry, task_inventory::TaskContexts, task_store::TaskSettingsLocation,
5 *,
6};
7use buffer_diff::{
8 BufferDiffEvent, CALCULATE_DIFF_TASK, DiffHunkSecondaryStatus, DiffHunkStatus,
9 DiffHunkStatusKind, assert_hunks,
10};
11use fs::FakeFs;
12use futures::{StreamExt, future};
13use git::{
14 repository::RepoPath,
15 status::{StatusCode, TrackedStatus},
16};
17use git2::RepositoryInitOptions;
18use gpui::{App, BackgroundExecutor, SemanticVersion, UpdateGlobal};
19use http_client::Url;
20use language::{
21 Diagnostic, DiagnosticEntry, DiagnosticSet, DiskState, FakeLspAdapter, LanguageConfig,
22 LanguageMatcher, LanguageName, LineEnding, OffsetRangeExt, Point, ToPoint,
23 language_settings::{AllLanguageSettings, LanguageSettingsContent, language_settings},
24 tree_sitter_rust, tree_sitter_typescript,
25};
26use lsp::{
27 DiagnosticSeverity, DocumentChanges, FileOperationFilter, NumberOrString, TextDocumentEdit,
28 WillRenameFiles, notification::DidRenameFiles,
29};
30use parking_lot::Mutex;
31use paths::{config_dir, tasks_file};
32use postage::stream::Stream as _;
33use pretty_assertions::{assert_eq, assert_matches};
34use rand::{Rng as _, rngs::StdRng};
35use serde_json::json;
36#[cfg(not(windows))]
37use std::os;
38use std::{env, mem, num::NonZeroU32, ops::Range, str::FromStr, sync::OnceLock, task::Poll};
39use task::{ResolvedTask, TaskContext};
40use unindent::Unindent as _;
41use util::{
42 TryFutureExt as _, assert_set_eq, maybe, path,
43 paths::PathMatcher,
44 separator,
45 test::{TempTree, marked_text_offsets},
46 uri,
47};
48use worktree::WorktreeModelHandle as _;
49
50#[gpui::test]
51async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
52 cx.executor().allow_parking();
53
54 let (tx, mut rx) = futures::channel::mpsc::unbounded();
55 let _thread = std::thread::spawn(move || {
56 #[cfg(not(target_os = "windows"))]
57 std::fs::metadata("/tmp").unwrap();
58 #[cfg(target_os = "windows")]
59 std::fs::metadata("C:/Windows").unwrap();
60 std::thread::sleep(Duration::from_millis(1000));
61 tx.unbounded_send(1).unwrap();
62 });
63 rx.next().await.unwrap();
64}
65
66#[gpui::test]
67async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
68 cx.executor().allow_parking();
69
70 let io_task = smol::unblock(move || {
71 println!("sleeping on thread {:?}", std::thread::current().id());
72 std::thread::sleep(Duration::from_millis(10));
73 1
74 });
75
76 let task = cx.foreground_executor().spawn(async move {
77 io_task.await;
78 });
79
80 task.await;
81}
82
83#[cfg(not(windows))]
84#[gpui::test]
85async fn test_symlinks(cx: &mut gpui::TestAppContext) {
86 init_test(cx);
87 cx.executor().allow_parking();
88
89 let dir = TempTree::new(json!({
90 "root": {
91 "apple": "",
92 "banana": {
93 "carrot": {
94 "date": "",
95 "endive": "",
96 }
97 },
98 "fennel": {
99 "grape": "",
100 }
101 }
102 }));
103
104 let root_link_path = dir.path().join("root_link");
105 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
106 os::unix::fs::symlink(
107 dir.path().join("root/fennel"),
108 dir.path().join("root/finnochio"),
109 )
110 .unwrap();
111
112 let project = Project::test(
113 Arc::new(RealFs::new(None, cx.executor())),
114 [root_link_path.as_ref()],
115 cx,
116 )
117 .await;
118
119 project.update(cx, |project, cx| {
120 let tree = project.worktrees(cx).next().unwrap().read(cx);
121 assert_eq!(tree.file_count(), 5);
122 assert_eq!(
123 tree.inode_for_path("fennel/grape"),
124 tree.inode_for_path("finnochio/grape")
125 );
126 });
127}
128
129#[gpui::test]
130async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
131 init_test(cx);
132
133 let dir = TempTree::new(json!({
134 ".editorconfig": r#"
135 root = true
136 [*.rs]
137 indent_style = tab
138 indent_size = 3
139 end_of_line = lf
140 insert_final_newline = true
141 trim_trailing_whitespace = true
142 [*.js]
143 tab_width = 10
144 "#,
145 ".zed": {
146 "settings.json": r#"{
147 "tab_size": 8,
148 "hard_tabs": false,
149 "ensure_final_newline_on_save": false,
150 "remove_trailing_whitespace_on_save": false,
151 "soft_wrap": "editor_width"
152 }"#,
153 },
154 "a.rs": "fn a() {\n A\n}",
155 "b": {
156 ".editorconfig": r#"
157 [*.rs]
158 indent_size = 2
159 "#,
160 "b.rs": "fn b() {\n B\n}",
161 },
162 "c.js": "def c\n C\nend",
163 "README.json": "tabs are better\n",
164 }));
165
166 let path = dir.path();
167 let fs = FakeFs::new(cx.executor());
168 fs.insert_tree_from_real_fs(path, path).await;
169 let project = Project::test(fs, [path], cx).await;
170
171 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
172 language_registry.add(js_lang());
173 language_registry.add(json_lang());
174 language_registry.add(rust_lang());
175
176 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
177
178 cx.executor().run_until_parked();
179
180 cx.update(|cx| {
181 let tree = worktree.read(cx);
182 let settings_for = |path: &str| {
183 let file_entry = tree.entry_for_path(path).unwrap().clone();
184 let file = File::for_entry(file_entry, worktree.clone());
185 let file_language = project
186 .read(cx)
187 .languages()
188 .language_for_file_path(file.path.as_ref());
189 let file_language = cx
190 .background_executor()
191 .block(file_language)
192 .expect("Failed to get file language");
193 let file = file as _;
194 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
195 };
196
197 let settings_a = settings_for("a.rs");
198 let settings_b = settings_for("b/b.rs");
199 let settings_c = settings_for("c.js");
200 let settings_readme = settings_for("README.json");
201
202 // .editorconfig overrides .zed/settings
203 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
204 assert_eq!(settings_a.hard_tabs, true);
205 assert_eq!(settings_a.ensure_final_newline_on_save, true);
206 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
207
208 // .editorconfig in b/ overrides .editorconfig in root
209 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
210
211 // "indent_size" is not set, so "tab_width" is used
212 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
213
214 // README.md should not be affected by .editorconfig's globe "*.rs"
215 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
216 });
217}
218
219#[gpui::test]
220async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
221 init_test(cx);
222 TaskStore::init(None);
223
224 let fs = FakeFs::new(cx.executor());
225 fs.insert_tree(
226 path!("/dir"),
227 json!({
228 ".zed": {
229 "settings.json": r#"{ "tab_size": 8 }"#,
230 "tasks.json": r#"[{
231 "label": "cargo check all",
232 "command": "cargo",
233 "args": ["check", "--all"]
234 },]"#,
235 },
236 "a": {
237 "a.rs": "fn a() {\n A\n}"
238 },
239 "b": {
240 ".zed": {
241 "settings.json": r#"{ "tab_size": 2 }"#,
242 "tasks.json": r#"[{
243 "label": "cargo check",
244 "command": "cargo",
245 "args": ["check"]
246 },]"#,
247 },
248 "b.rs": "fn b() {\n B\n}"
249 }
250 }),
251 )
252 .await;
253
254 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
255 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
256
257 cx.executor().run_until_parked();
258 let worktree_id = cx.update(|cx| {
259 project.update(cx, |project, cx| {
260 project.worktrees(cx).next().unwrap().read(cx).id()
261 })
262 });
263
264 let mut task_contexts = TaskContexts::default();
265 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
266
267 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
268 id: worktree_id,
269 directory_in_worktree: PathBuf::from(".zed"),
270 id_base: "local worktree tasks from directory \".zed\"".into(),
271 };
272
273 let all_tasks = cx
274 .update(|cx| {
275 let tree = worktree.read(cx);
276
277 let file_a = File::for_entry(
278 tree.entry_for_path("a/a.rs").unwrap().clone(),
279 worktree.clone(),
280 ) as _;
281 let settings_a = language_settings(None, Some(&file_a), cx);
282 let file_b = File::for_entry(
283 tree.entry_for_path("b/b.rs").unwrap().clone(),
284 worktree.clone(),
285 ) as _;
286 let settings_b = language_settings(None, Some(&file_b), cx);
287
288 assert_eq!(settings_a.tab_size.get(), 8);
289 assert_eq!(settings_b.tab_size.get(), 2);
290
291 get_all_tasks(&project, &task_contexts, cx)
292 })
293 .into_iter()
294 .map(|(source_kind, task)| {
295 let resolved = task.resolved;
296 (
297 source_kind,
298 task.resolved_label,
299 resolved.args,
300 resolved.env,
301 )
302 })
303 .collect::<Vec<_>>();
304 assert_eq!(
305 all_tasks,
306 vec![
307 (
308 TaskSourceKind::Worktree {
309 id: worktree_id,
310 directory_in_worktree: PathBuf::from(separator!("b/.zed")),
311 id_base: if cfg!(windows) {
312 "local worktree tasks from directory \"b\\\\.zed\"".into()
313 } else {
314 "local worktree tasks from directory \"b/.zed\"".into()
315 },
316 },
317 "cargo check".to_string(),
318 vec!["check".to_string()],
319 HashMap::default(),
320 ),
321 (
322 topmost_local_task_source_kind.clone(),
323 "cargo check all".to_string(),
324 vec!["check".to_string(), "--all".to_string()],
325 HashMap::default(),
326 ),
327 ]
328 );
329
330 let (_, resolved_task) = cx
331 .update(|cx| get_all_tasks(&project, &task_contexts, cx))
332 .into_iter()
333 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
334 .expect("should have one global task");
335 project.update(cx, |project, cx| {
336 let task_inventory = project
337 .task_store
338 .read(cx)
339 .task_inventory()
340 .cloned()
341 .unwrap();
342 task_inventory.update(cx, |inventory, _| {
343 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
344 inventory
345 .update_file_based_tasks(
346 TaskSettingsLocation::Global(tasks_file()),
347 Some(
348 &json!([{
349 "label": "cargo check unstable",
350 "command": "cargo",
351 "args": [
352 "check",
353 "--all",
354 "--all-targets"
355 ],
356 "env": {
357 "RUSTFLAGS": "-Zunstable-options"
358 }
359 }])
360 .to_string(),
361 ),
362 )
363 .unwrap();
364 });
365 });
366 cx.run_until_parked();
367
368 let all_tasks = cx
369 .update(|cx| get_all_tasks(&project, &task_contexts, cx))
370 .into_iter()
371 .map(|(source_kind, task)| {
372 let resolved = task.resolved;
373 (
374 source_kind,
375 task.resolved_label,
376 resolved.args,
377 resolved.env,
378 )
379 })
380 .collect::<Vec<_>>();
381 assert_eq!(
382 all_tasks,
383 vec![
384 (
385 topmost_local_task_source_kind.clone(),
386 "cargo check all".to_string(),
387 vec!["check".to_string(), "--all".to_string()],
388 HashMap::default(),
389 ),
390 (
391 TaskSourceKind::Worktree {
392 id: worktree_id,
393 directory_in_worktree: PathBuf::from(separator!("b/.zed")),
394 id_base: if cfg!(windows) {
395 "local worktree tasks from directory \"b\\\\.zed\"".into()
396 } else {
397 "local worktree tasks from directory \"b/.zed\"".into()
398 },
399 },
400 "cargo check".to_string(),
401 vec!["check".to_string()],
402 HashMap::default(),
403 ),
404 (
405 TaskSourceKind::AbsPath {
406 abs_path: paths::tasks_file().clone(),
407 id_base: "global tasks.json".into(),
408 },
409 "cargo check unstable".to_string(),
410 vec![
411 "check".to_string(),
412 "--all".to_string(),
413 "--all-targets".to_string(),
414 ],
415 HashMap::from_iter(Some((
416 "RUSTFLAGS".to_string(),
417 "-Zunstable-options".to_string()
418 ))),
419 ),
420 ]
421 );
422}
423
424#[gpui::test]
425async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
426 init_test(cx);
427 TaskStore::init(None);
428
429 let fs = FakeFs::new(cx.executor());
430 fs.insert_tree(
431 path!("/dir"),
432 json!({
433 ".zed": {
434 "tasks.json": r#"[{
435 "label": "test worktree root",
436 "command": "echo $ZED_WORKTREE_ROOT"
437 }]"#,
438 },
439 "a": {
440 "a.rs": "fn a() {\n A\n}"
441 },
442 }),
443 )
444 .await;
445
446 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
447 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
448
449 cx.executor().run_until_parked();
450 let worktree_id = cx.update(|cx| {
451 project.update(cx, |project, cx| {
452 project.worktrees(cx).next().unwrap().read(cx).id()
453 })
454 });
455
456 let active_non_worktree_item_tasks = cx.update(|cx| {
457 get_all_tasks(
458 &project,
459 &TaskContexts {
460 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
461 active_worktree_context: None,
462 other_worktree_contexts: Vec::new(),
463 lsp_task_sources: HashMap::default(),
464 latest_selection: None,
465 },
466 cx,
467 )
468 });
469 assert!(
470 active_non_worktree_item_tasks.is_empty(),
471 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
472 );
473
474 let active_worktree_tasks = cx.update(|cx| {
475 get_all_tasks(
476 &project,
477 &TaskContexts {
478 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
479 active_worktree_context: Some((worktree_id, {
480 let mut worktree_context = TaskContext::default();
481 worktree_context
482 .task_variables
483 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
484 worktree_context
485 })),
486 other_worktree_contexts: Vec::new(),
487 lsp_task_sources: HashMap::default(),
488 latest_selection: None,
489 },
490 cx,
491 )
492 });
493 assert_eq!(
494 active_worktree_tasks
495 .into_iter()
496 .map(|(source_kind, task)| {
497 let resolved = task.resolved;
498 (source_kind, resolved.command)
499 })
500 .collect::<Vec<_>>(),
501 vec![(
502 TaskSourceKind::Worktree {
503 id: worktree_id,
504 directory_in_worktree: PathBuf::from(separator!(".zed")),
505 id_base: if cfg!(windows) {
506 "local worktree tasks from directory \".zed\"".into()
507 } else {
508 "local worktree tasks from directory \".zed\"".into()
509 },
510 },
511 "echo /dir".to_string(),
512 )]
513 );
514}
515
516#[gpui::test]
517async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
518 init_test(cx);
519
520 let fs = FakeFs::new(cx.executor());
521 fs.insert_tree(
522 path!("/dir"),
523 json!({
524 "test.rs": "const A: i32 = 1;",
525 "test2.rs": "",
526 "Cargo.toml": "a = 1",
527 "package.json": "{\"a\": 1}",
528 }),
529 )
530 .await;
531
532 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
533 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
534
535 let mut fake_rust_servers = language_registry.register_fake_lsp(
536 "Rust",
537 FakeLspAdapter {
538 name: "the-rust-language-server",
539 capabilities: lsp::ServerCapabilities {
540 completion_provider: Some(lsp::CompletionOptions {
541 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
542 ..Default::default()
543 }),
544 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
545 lsp::TextDocumentSyncOptions {
546 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
547 ..Default::default()
548 },
549 )),
550 ..Default::default()
551 },
552 ..Default::default()
553 },
554 );
555 let mut fake_json_servers = language_registry.register_fake_lsp(
556 "JSON",
557 FakeLspAdapter {
558 name: "the-json-language-server",
559 capabilities: lsp::ServerCapabilities {
560 completion_provider: Some(lsp::CompletionOptions {
561 trigger_characters: Some(vec![":".to_string()]),
562 ..Default::default()
563 }),
564 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
565 lsp::TextDocumentSyncOptions {
566 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
567 ..Default::default()
568 },
569 )),
570 ..Default::default()
571 },
572 ..Default::default()
573 },
574 );
575
576 // Open a buffer without an associated language server.
577 let (toml_buffer, _handle) = project
578 .update(cx, |project, cx| {
579 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
580 })
581 .await
582 .unwrap();
583
584 // Open a buffer with an associated language server before the language for it has been loaded.
585 let (rust_buffer, _handle2) = project
586 .update(cx, |project, cx| {
587 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
588 })
589 .await
590 .unwrap();
591 rust_buffer.update(cx, |buffer, _| {
592 assert_eq!(buffer.language().map(|l| l.name()), None);
593 });
594
595 // Now we add the languages to the project, and ensure they get assigned to all
596 // the relevant open buffers.
597 language_registry.add(json_lang());
598 language_registry.add(rust_lang());
599 cx.executor().run_until_parked();
600 rust_buffer.update(cx, |buffer, _| {
601 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
602 });
603
604 // A server is started up, and it is notified about Rust files.
605 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
606 assert_eq!(
607 fake_rust_server
608 .receive_notification::<lsp::notification::DidOpenTextDocument>()
609 .await
610 .text_document,
611 lsp::TextDocumentItem {
612 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
613 version: 0,
614 text: "const A: i32 = 1;".to_string(),
615 language_id: "rust".to_string(),
616 }
617 );
618
619 // The buffer is configured based on the language server's capabilities.
620 rust_buffer.update(cx, |buffer, _| {
621 assert_eq!(
622 buffer
623 .completion_triggers()
624 .into_iter()
625 .cloned()
626 .collect::<Vec<_>>(),
627 &[".".to_string(), "::".to_string()]
628 );
629 });
630 toml_buffer.update(cx, |buffer, _| {
631 assert!(buffer.completion_triggers().is_empty());
632 });
633
634 // Edit a buffer. The changes are reported to the language server.
635 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
636 assert_eq!(
637 fake_rust_server
638 .receive_notification::<lsp::notification::DidChangeTextDocument>()
639 .await
640 .text_document,
641 lsp::VersionedTextDocumentIdentifier::new(
642 lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
643 1
644 )
645 );
646
647 // Open a third buffer with a different associated language server.
648 let (json_buffer, _json_handle) = project
649 .update(cx, |project, cx| {
650 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
651 })
652 .await
653 .unwrap();
654
655 // A json language server is started up and is only notified about the json buffer.
656 let mut fake_json_server = fake_json_servers.next().await.unwrap();
657 assert_eq!(
658 fake_json_server
659 .receive_notification::<lsp::notification::DidOpenTextDocument>()
660 .await
661 .text_document,
662 lsp::TextDocumentItem {
663 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
664 version: 0,
665 text: "{\"a\": 1}".to_string(),
666 language_id: "json".to_string(),
667 }
668 );
669
670 // This buffer is configured based on the second language server's
671 // capabilities.
672 json_buffer.update(cx, |buffer, _| {
673 assert_eq!(
674 buffer
675 .completion_triggers()
676 .into_iter()
677 .cloned()
678 .collect::<Vec<_>>(),
679 &[":".to_string()]
680 );
681 });
682
683 // When opening another buffer whose language server is already running,
684 // it is also configured based on the existing language server's capabilities.
685 let (rust_buffer2, _handle4) = project
686 .update(cx, |project, cx| {
687 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
688 })
689 .await
690 .unwrap();
691 rust_buffer2.update(cx, |buffer, _| {
692 assert_eq!(
693 buffer
694 .completion_triggers()
695 .into_iter()
696 .cloned()
697 .collect::<Vec<_>>(),
698 &[".".to_string(), "::".to_string()]
699 );
700 });
701
702 // Changes are reported only to servers matching the buffer's language.
703 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
704 rust_buffer2.update(cx, |buffer, cx| {
705 buffer.edit([(0..0, "let x = 1;")], None, cx)
706 });
707 assert_eq!(
708 fake_rust_server
709 .receive_notification::<lsp::notification::DidChangeTextDocument>()
710 .await
711 .text_document,
712 lsp::VersionedTextDocumentIdentifier::new(
713 lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap(),
714 1
715 )
716 );
717
718 // Save notifications are reported to all servers.
719 project
720 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
721 .await
722 .unwrap();
723 assert_eq!(
724 fake_rust_server
725 .receive_notification::<lsp::notification::DidSaveTextDocument>()
726 .await
727 .text_document,
728 lsp::TextDocumentIdentifier::new(
729 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
730 )
731 );
732 assert_eq!(
733 fake_json_server
734 .receive_notification::<lsp::notification::DidSaveTextDocument>()
735 .await
736 .text_document,
737 lsp::TextDocumentIdentifier::new(
738 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
739 )
740 );
741
742 // Renames are reported only to servers matching the buffer's language.
743 fs.rename(
744 Path::new(path!("/dir/test2.rs")),
745 Path::new(path!("/dir/test3.rs")),
746 Default::default(),
747 )
748 .await
749 .unwrap();
750 assert_eq!(
751 fake_rust_server
752 .receive_notification::<lsp::notification::DidCloseTextDocument>()
753 .await
754 .text_document,
755 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap()),
756 );
757 assert_eq!(
758 fake_rust_server
759 .receive_notification::<lsp::notification::DidOpenTextDocument>()
760 .await
761 .text_document,
762 lsp::TextDocumentItem {
763 uri: lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap(),
764 version: 0,
765 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
766 language_id: "rust".to_string(),
767 },
768 );
769
770 rust_buffer2.update(cx, |buffer, cx| {
771 buffer.update_diagnostics(
772 LanguageServerId(0),
773 DiagnosticSet::from_sorted_entries(
774 vec![DiagnosticEntry {
775 diagnostic: Default::default(),
776 range: Anchor::MIN..Anchor::MAX,
777 }],
778 &buffer.snapshot(),
779 ),
780 cx,
781 );
782 assert_eq!(
783 buffer
784 .snapshot()
785 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
786 .count(),
787 1
788 );
789 });
790
791 // When the rename changes the extension of the file, the buffer gets closed on the old
792 // language server and gets opened on the new one.
793 fs.rename(
794 Path::new(path!("/dir/test3.rs")),
795 Path::new(path!("/dir/test3.json")),
796 Default::default(),
797 )
798 .await
799 .unwrap();
800 assert_eq!(
801 fake_rust_server
802 .receive_notification::<lsp::notification::DidCloseTextDocument>()
803 .await
804 .text_document,
805 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap()),
806 );
807 assert_eq!(
808 fake_json_server
809 .receive_notification::<lsp::notification::DidOpenTextDocument>()
810 .await
811 .text_document,
812 lsp::TextDocumentItem {
813 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
814 version: 0,
815 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
816 language_id: "json".to_string(),
817 },
818 );
819
820 // We clear the diagnostics, since the language has changed.
821 rust_buffer2.update(cx, |buffer, _| {
822 assert_eq!(
823 buffer
824 .snapshot()
825 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
826 .count(),
827 0
828 );
829 });
830
831 // The renamed file's version resets after changing language server.
832 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
833 assert_eq!(
834 fake_json_server
835 .receive_notification::<lsp::notification::DidChangeTextDocument>()
836 .await
837 .text_document,
838 lsp::VersionedTextDocumentIdentifier::new(
839 lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
840 1
841 )
842 );
843
844 // Restart language servers
845 project.update(cx, |project, cx| {
846 project.restart_language_servers_for_buffers(
847 vec![rust_buffer.clone(), json_buffer.clone()],
848 cx,
849 );
850 });
851
852 let mut rust_shutdown_requests = fake_rust_server
853 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
854 let mut json_shutdown_requests = fake_json_server
855 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
856 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
857
858 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
859 let mut fake_json_server = fake_json_servers.next().await.unwrap();
860
861 // Ensure rust document is reopened in new rust language server
862 assert_eq!(
863 fake_rust_server
864 .receive_notification::<lsp::notification::DidOpenTextDocument>()
865 .await
866 .text_document,
867 lsp::TextDocumentItem {
868 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
869 version: 0,
870 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
871 language_id: "rust".to_string(),
872 }
873 );
874
875 // Ensure json documents are reopened in new json language server
876 assert_set_eq!(
877 [
878 fake_json_server
879 .receive_notification::<lsp::notification::DidOpenTextDocument>()
880 .await
881 .text_document,
882 fake_json_server
883 .receive_notification::<lsp::notification::DidOpenTextDocument>()
884 .await
885 .text_document,
886 ],
887 [
888 lsp::TextDocumentItem {
889 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
890 version: 0,
891 text: json_buffer.update(cx, |buffer, _| buffer.text()),
892 language_id: "json".to_string(),
893 },
894 lsp::TextDocumentItem {
895 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
896 version: 0,
897 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
898 language_id: "json".to_string(),
899 }
900 ]
901 );
902
903 // Close notifications are reported only to servers matching the buffer's language.
904 cx.update(|_| drop(_json_handle));
905 let close_message = lsp::DidCloseTextDocumentParams {
906 text_document: lsp::TextDocumentIdentifier::new(
907 lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
908 ),
909 };
910 assert_eq!(
911 fake_json_server
912 .receive_notification::<lsp::notification::DidCloseTextDocument>()
913 .await,
914 close_message,
915 );
916}
917
918#[gpui::test]
919async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
920 init_test(cx);
921
922 let fs = FakeFs::new(cx.executor());
923 fs.insert_tree(
924 path!("/the-root"),
925 json!({
926 ".gitignore": "target\n",
927 "Cargo.lock": "",
928 "src": {
929 "a.rs": "",
930 "b.rs": "",
931 },
932 "target": {
933 "x": {
934 "out": {
935 "x.rs": ""
936 }
937 },
938 "y": {
939 "out": {
940 "y.rs": "",
941 }
942 },
943 "z": {
944 "out": {
945 "z.rs": ""
946 }
947 }
948 }
949 }),
950 )
951 .await;
952 fs.insert_tree(
953 path!("/the-registry"),
954 json!({
955 "dep1": {
956 "src": {
957 "dep1.rs": "",
958 }
959 },
960 "dep2": {
961 "src": {
962 "dep2.rs": "",
963 }
964 },
965 }),
966 )
967 .await;
968 fs.insert_tree(
969 path!("/the/stdlib"),
970 json!({
971 "LICENSE": "",
972 "src": {
973 "string.rs": "",
974 }
975 }),
976 )
977 .await;
978
979 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
980 let (language_registry, lsp_store) = project.read_with(cx, |project, _| {
981 (project.languages().clone(), project.lsp_store())
982 });
983 language_registry.add(rust_lang());
984 let mut fake_servers = language_registry.register_fake_lsp(
985 "Rust",
986 FakeLspAdapter {
987 name: "the-language-server",
988 ..Default::default()
989 },
990 );
991
992 cx.executor().run_until_parked();
993
994 // Start the language server by opening a buffer with a compatible file extension.
995 project
996 .update(cx, |project, cx| {
997 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
998 })
999 .await
1000 .unwrap();
1001
1002 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
1003 project.update(cx, |project, cx| {
1004 let worktree = project.worktrees(cx).next().unwrap();
1005 assert_eq!(
1006 worktree
1007 .read(cx)
1008 .snapshot()
1009 .entries(true, 0)
1010 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
1011 .collect::<Vec<_>>(),
1012 &[
1013 (Path::new(""), false),
1014 (Path::new(".gitignore"), false),
1015 (Path::new("Cargo.lock"), false),
1016 (Path::new("src"), false),
1017 (Path::new("src/a.rs"), false),
1018 (Path::new("src/b.rs"), false),
1019 (Path::new("target"), true),
1020 ]
1021 );
1022 });
1023
1024 let prev_read_dir_count = fs.read_dir_call_count();
1025
1026 let fake_server = fake_servers.next().await.unwrap();
1027 let (server_id, server_name) = lsp_store.read_with(cx, |lsp_store, _| {
1028 let (id, status) = lsp_store.language_server_statuses().next().unwrap();
1029 (id, LanguageServerName::from(status.name.as_str()))
1030 });
1031
1032 // Simulate jumping to a definition in a dependency outside of the worktree.
1033 let _out_of_worktree_buffer = project
1034 .update(cx, |project, cx| {
1035 project.open_local_buffer_via_lsp(
1036 lsp::Url::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
1037 server_id,
1038 server_name.clone(),
1039 cx,
1040 )
1041 })
1042 .await
1043 .unwrap();
1044
1045 // Keep track of the FS events reported to the language server.
1046 let file_changes = Arc::new(Mutex::new(Vec::new()));
1047 fake_server
1048 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
1049 registrations: vec![lsp::Registration {
1050 id: Default::default(),
1051 method: "workspace/didChangeWatchedFiles".to_string(),
1052 register_options: serde_json::to_value(
1053 lsp::DidChangeWatchedFilesRegistrationOptions {
1054 watchers: vec![
1055 lsp::FileSystemWatcher {
1056 glob_pattern: lsp::GlobPattern::String(
1057 path!("/the-root/Cargo.toml").to_string(),
1058 ),
1059 kind: None,
1060 },
1061 lsp::FileSystemWatcher {
1062 glob_pattern: lsp::GlobPattern::String(
1063 path!("/the-root/src/*.{rs,c}").to_string(),
1064 ),
1065 kind: None,
1066 },
1067 lsp::FileSystemWatcher {
1068 glob_pattern: lsp::GlobPattern::String(
1069 path!("/the-root/target/y/**/*.rs").to_string(),
1070 ),
1071 kind: None,
1072 },
1073 lsp::FileSystemWatcher {
1074 glob_pattern: lsp::GlobPattern::String(
1075 path!("/the/stdlib/src/**/*.rs").to_string(),
1076 ),
1077 kind: None,
1078 },
1079 lsp::FileSystemWatcher {
1080 glob_pattern: lsp::GlobPattern::String(
1081 path!("**/Cargo.lock").to_string(),
1082 ),
1083 kind: None,
1084 },
1085 ],
1086 },
1087 )
1088 .ok(),
1089 }],
1090 })
1091 .await
1092 .into_response()
1093 .unwrap();
1094 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
1095 let file_changes = file_changes.clone();
1096 move |params, _| {
1097 let mut file_changes = file_changes.lock();
1098 file_changes.extend(params.changes);
1099 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
1100 }
1101 });
1102
1103 cx.executor().run_until_parked();
1104 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
1105 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 5);
1106
1107 let mut new_watched_paths = fs.watched_paths();
1108 new_watched_paths.retain(|path| !path.starts_with(config_dir()));
1109 assert_eq!(
1110 &new_watched_paths,
1111 &[
1112 Path::new(path!("/the-root")),
1113 Path::new(path!("/the-registry/dep1/src/dep1.rs")),
1114 Path::new(path!("/the/stdlib/src"))
1115 ]
1116 );
1117
1118 // Now the language server has asked us to watch an ignored directory path,
1119 // so we recursively load it.
1120 project.update(cx, |project, cx| {
1121 let worktree = project.visible_worktrees(cx).next().unwrap();
1122 assert_eq!(
1123 worktree
1124 .read(cx)
1125 .snapshot()
1126 .entries(true, 0)
1127 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
1128 .collect::<Vec<_>>(),
1129 &[
1130 (Path::new(""), false),
1131 (Path::new(".gitignore"), false),
1132 (Path::new("Cargo.lock"), false),
1133 (Path::new("src"), false),
1134 (Path::new("src/a.rs"), false),
1135 (Path::new("src/b.rs"), false),
1136 (Path::new("target"), true),
1137 (Path::new("target/x"), true),
1138 (Path::new("target/y"), true),
1139 (Path::new("target/y/out"), true),
1140 (Path::new("target/y/out/y.rs"), true),
1141 (Path::new("target/z"), true),
1142 ]
1143 );
1144 });
1145
1146 // Perform some file system mutations, two of which match the watched patterns,
1147 // and one of which does not.
1148 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
1149 .await
1150 .unwrap();
1151 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
1152 .await
1153 .unwrap();
1154 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
1155 .await
1156 .unwrap();
1157 fs.create_file(
1158 path!("/the-root/target/x/out/x2.rs").as_ref(),
1159 Default::default(),
1160 )
1161 .await
1162 .unwrap();
1163 fs.create_file(
1164 path!("/the-root/target/y/out/y2.rs").as_ref(),
1165 Default::default(),
1166 )
1167 .await
1168 .unwrap();
1169 fs.save(
1170 path!("/the-root/Cargo.lock").as_ref(),
1171 &"".into(),
1172 Default::default(),
1173 )
1174 .await
1175 .unwrap();
1176 fs.save(
1177 path!("/the-stdlib/LICENSE").as_ref(),
1178 &"".into(),
1179 Default::default(),
1180 )
1181 .await
1182 .unwrap();
1183 fs.save(
1184 path!("/the/stdlib/src/string.rs").as_ref(),
1185 &"".into(),
1186 Default::default(),
1187 )
1188 .await
1189 .unwrap();
1190
1191 // The language server receives events for the FS mutations that match its watch patterns.
1192 cx.executor().run_until_parked();
1193 assert_eq!(
1194 &*file_changes.lock(),
1195 &[
1196 lsp::FileEvent {
1197 uri: lsp::Url::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
1198 typ: lsp::FileChangeType::CHANGED,
1199 },
1200 lsp::FileEvent {
1201 uri: lsp::Url::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
1202 typ: lsp::FileChangeType::DELETED,
1203 },
1204 lsp::FileEvent {
1205 uri: lsp::Url::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
1206 typ: lsp::FileChangeType::CREATED,
1207 },
1208 lsp::FileEvent {
1209 uri: lsp::Url::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
1210 typ: lsp::FileChangeType::CREATED,
1211 },
1212 lsp::FileEvent {
1213 uri: lsp::Url::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
1214 typ: lsp::FileChangeType::CHANGED,
1215 },
1216 ]
1217 );
1218}
1219
1220#[gpui::test]
1221async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
1222 init_test(cx);
1223
1224 let fs = FakeFs::new(cx.executor());
1225 fs.insert_tree(
1226 path!("/dir"),
1227 json!({
1228 "a.rs": "let a = 1;",
1229 "b.rs": "let b = 2;"
1230 }),
1231 )
1232 .await;
1233
1234 let project = Project::test(
1235 fs,
1236 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
1237 cx,
1238 )
1239 .await;
1240 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1241
1242 let buffer_a = project
1243 .update(cx, |project, cx| {
1244 project.open_local_buffer(path!("/dir/a.rs"), cx)
1245 })
1246 .await
1247 .unwrap();
1248 let buffer_b = project
1249 .update(cx, |project, cx| {
1250 project.open_local_buffer(path!("/dir/b.rs"), cx)
1251 })
1252 .await
1253 .unwrap();
1254
1255 lsp_store.update(cx, |lsp_store, cx| {
1256 lsp_store
1257 .update_diagnostics(
1258 LanguageServerId(0),
1259 lsp::PublishDiagnosticsParams {
1260 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1261 version: None,
1262 diagnostics: vec![lsp::Diagnostic {
1263 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1264 severity: Some(lsp::DiagnosticSeverity::ERROR),
1265 message: "error 1".to_string(),
1266 ..Default::default()
1267 }],
1268 },
1269 &[],
1270 cx,
1271 )
1272 .unwrap();
1273 lsp_store
1274 .update_diagnostics(
1275 LanguageServerId(0),
1276 lsp::PublishDiagnosticsParams {
1277 uri: Url::from_file_path(path!("/dir/b.rs")).unwrap(),
1278 version: None,
1279 diagnostics: vec![lsp::Diagnostic {
1280 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1281 severity: Some(DiagnosticSeverity::WARNING),
1282 message: "error 2".to_string(),
1283 ..Default::default()
1284 }],
1285 },
1286 &[],
1287 cx,
1288 )
1289 .unwrap();
1290 });
1291
1292 buffer_a.update(cx, |buffer, _| {
1293 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1294 assert_eq!(
1295 chunks
1296 .iter()
1297 .map(|(s, d)| (s.as_str(), *d))
1298 .collect::<Vec<_>>(),
1299 &[
1300 ("let ", None),
1301 ("a", Some(DiagnosticSeverity::ERROR)),
1302 (" = 1;", None),
1303 ]
1304 );
1305 });
1306 buffer_b.update(cx, |buffer, _| {
1307 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1308 assert_eq!(
1309 chunks
1310 .iter()
1311 .map(|(s, d)| (s.as_str(), *d))
1312 .collect::<Vec<_>>(),
1313 &[
1314 ("let ", None),
1315 ("b", Some(DiagnosticSeverity::WARNING)),
1316 (" = 2;", None),
1317 ]
1318 );
1319 });
1320}
1321
1322#[gpui::test]
1323async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1324 init_test(cx);
1325
1326 let fs = FakeFs::new(cx.executor());
1327 fs.insert_tree(
1328 path!("/root"),
1329 json!({
1330 "dir": {
1331 ".git": {
1332 "HEAD": "ref: refs/heads/main",
1333 },
1334 ".gitignore": "b.rs",
1335 "a.rs": "let a = 1;",
1336 "b.rs": "let b = 2;",
1337 },
1338 "other.rs": "let b = c;"
1339 }),
1340 )
1341 .await;
1342
1343 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
1344 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1345 let (worktree, _) = project
1346 .update(cx, |project, cx| {
1347 project.find_or_create_worktree(path!("/root/dir"), true, cx)
1348 })
1349 .await
1350 .unwrap();
1351 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1352
1353 let (worktree, _) = project
1354 .update(cx, |project, cx| {
1355 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
1356 })
1357 .await
1358 .unwrap();
1359 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1360
1361 let server_id = LanguageServerId(0);
1362 lsp_store.update(cx, |lsp_store, cx| {
1363 lsp_store
1364 .update_diagnostics(
1365 server_id,
1366 lsp::PublishDiagnosticsParams {
1367 uri: Url::from_file_path(path!("/root/dir/b.rs")).unwrap(),
1368 version: None,
1369 diagnostics: vec![lsp::Diagnostic {
1370 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1371 severity: Some(lsp::DiagnosticSeverity::ERROR),
1372 message: "unused variable 'b'".to_string(),
1373 ..Default::default()
1374 }],
1375 },
1376 &[],
1377 cx,
1378 )
1379 .unwrap();
1380 lsp_store
1381 .update_diagnostics(
1382 server_id,
1383 lsp::PublishDiagnosticsParams {
1384 uri: Url::from_file_path(path!("/root/other.rs")).unwrap(),
1385 version: None,
1386 diagnostics: vec![lsp::Diagnostic {
1387 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1388 severity: Some(lsp::DiagnosticSeverity::ERROR),
1389 message: "unknown variable 'c'".to_string(),
1390 ..Default::default()
1391 }],
1392 },
1393 &[],
1394 cx,
1395 )
1396 .unwrap();
1397 });
1398
1399 let main_ignored_buffer = project
1400 .update(cx, |project, cx| {
1401 project.open_buffer((main_worktree_id, "b.rs"), cx)
1402 })
1403 .await
1404 .unwrap();
1405 main_ignored_buffer.update(cx, |buffer, _| {
1406 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1407 assert_eq!(
1408 chunks
1409 .iter()
1410 .map(|(s, d)| (s.as_str(), *d))
1411 .collect::<Vec<_>>(),
1412 &[
1413 ("let ", None),
1414 ("b", Some(DiagnosticSeverity::ERROR)),
1415 (" = 2;", None),
1416 ],
1417 "Gigitnored buffers should still get in-buffer diagnostics",
1418 );
1419 });
1420 let other_buffer = project
1421 .update(cx, |project, cx| {
1422 project.open_buffer((other_worktree_id, ""), cx)
1423 })
1424 .await
1425 .unwrap();
1426 other_buffer.update(cx, |buffer, _| {
1427 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1428 assert_eq!(
1429 chunks
1430 .iter()
1431 .map(|(s, d)| (s.as_str(), *d))
1432 .collect::<Vec<_>>(),
1433 &[
1434 ("let b = ", None),
1435 ("c", Some(DiagnosticSeverity::ERROR)),
1436 (";", None),
1437 ],
1438 "Buffers from hidden projects should still get in-buffer diagnostics"
1439 );
1440 });
1441
1442 project.update(cx, |project, cx| {
1443 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1444 assert_eq!(
1445 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1446 vec![(
1447 ProjectPath {
1448 worktree_id: main_worktree_id,
1449 path: Arc::from(Path::new("b.rs")),
1450 },
1451 server_id,
1452 DiagnosticSummary {
1453 error_count: 1,
1454 warning_count: 0,
1455 }
1456 )]
1457 );
1458 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1459 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1460 });
1461}
1462
1463#[gpui::test]
1464async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1465 init_test(cx);
1466
1467 let progress_token = "the-progress-token";
1468
1469 let fs = FakeFs::new(cx.executor());
1470 fs.insert_tree(
1471 path!("/dir"),
1472 json!({
1473 "a.rs": "fn a() { A }",
1474 "b.rs": "const y: i32 = 1",
1475 }),
1476 )
1477 .await;
1478
1479 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1480 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1481
1482 language_registry.add(rust_lang());
1483 let mut fake_servers = language_registry.register_fake_lsp(
1484 "Rust",
1485 FakeLspAdapter {
1486 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1487 disk_based_diagnostics_sources: vec!["disk".into()],
1488 ..Default::default()
1489 },
1490 );
1491
1492 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1493
1494 // Cause worktree to start the fake language server
1495 let _ = project
1496 .update(cx, |project, cx| {
1497 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
1498 })
1499 .await
1500 .unwrap();
1501
1502 let mut events = cx.events(&project);
1503
1504 let fake_server = fake_servers.next().await.unwrap();
1505 assert_eq!(
1506 events.next().await.unwrap(),
1507 Event::LanguageServerAdded(
1508 LanguageServerId(0),
1509 fake_server.server.name(),
1510 Some(worktree_id)
1511 ),
1512 );
1513
1514 fake_server
1515 .start_progress(format!("{}/0", progress_token))
1516 .await;
1517 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1518 assert_eq!(
1519 events.next().await.unwrap(),
1520 Event::DiskBasedDiagnosticsStarted {
1521 language_server_id: LanguageServerId(0),
1522 }
1523 );
1524
1525 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1526 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1527 version: None,
1528 diagnostics: vec![lsp::Diagnostic {
1529 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1530 severity: Some(lsp::DiagnosticSeverity::ERROR),
1531 message: "undefined variable 'A'".to_string(),
1532 ..Default::default()
1533 }],
1534 });
1535 assert_eq!(
1536 events.next().await.unwrap(),
1537 Event::DiagnosticsUpdated {
1538 language_server_id: LanguageServerId(0),
1539 path: (worktree_id, Path::new("a.rs")).into()
1540 }
1541 );
1542
1543 fake_server.end_progress(format!("{}/0", progress_token));
1544 assert_eq!(
1545 events.next().await.unwrap(),
1546 Event::DiskBasedDiagnosticsFinished {
1547 language_server_id: LanguageServerId(0)
1548 }
1549 );
1550
1551 let buffer = project
1552 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
1553 .await
1554 .unwrap();
1555
1556 buffer.update(cx, |buffer, _| {
1557 let snapshot = buffer.snapshot();
1558 let diagnostics = snapshot
1559 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1560 .collect::<Vec<_>>();
1561 assert_eq!(
1562 diagnostics,
1563 &[DiagnosticEntry {
1564 range: Point::new(0, 9)..Point::new(0, 10),
1565 diagnostic: Diagnostic {
1566 severity: lsp::DiagnosticSeverity::ERROR,
1567 message: "undefined variable 'A'".to_string(),
1568 group_id: 0,
1569 is_primary: true,
1570 ..Default::default()
1571 }
1572 }]
1573 )
1574 });
1575
1576 // Ensure publishing empty diagnostics twice only results in one update event.
1577 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1578 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1579 version: None,
1580 diagnostics: Default::default(),
1581 });
1582 assert_eq!(
1583 events.next().await.unwrap(),
1584 Event::DiagnosticsUpdated {
1585 language_server_id: LanguageServerId(0),
1586 path: (worktree_id, Path::new("a.rs")).into()
1587 }
1588 );
1589
1590 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1591 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1592 version: None,
1593 diagnostics: Default::default(),
1594 });
1595 cx.executor().run_until_parked();
1596 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1597}
1598
1599#[gpui::test]
1600async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1601 init_test(cx);
1602
1603 let progress_token = "the-progress-token";
1604
1605 let fs = FakeFs::new(cx.executor());
1606 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1607
1608 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1609
1610 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1611 language_registry.add(rust_lang());
1612 let mut fake_servers = language_registry.register_fake_lsp(
1613 "Rust",
1614 FakeLspAdapter {
1615 name: "the-language-server",
1616 disk_based_diagnostics_sources: vec!["disk".into()],
1617 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1618 ..Default::default()
1619 },
1620 );
1621
1622 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1623
1624 let (buffer, _handle) = project
1625 .update(cx, |project, cx| {
1626 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1627 })
1628 .await
1629 .unwrap();
1630 // Simulate diagnostics starting to update.
1631 let fake_server = fake_servers.next().await.unwrap();
1632 fake_server.start_progress(progress_token).await;
1633
1634 // Restart the server before the diagnostics finish updating.
1635 project.update(cx, |project, cx| {
1636 project.restart_language_servers_for_buffers(vec![buffer], cx);
1637 });
1638 let mut events = cx.events(&project);
1639
1640 // Simulate the newly started server sending more diagnostics.
1641 let fake_server = fake_servers.next().await.unwrap();
1642 assert_eq!(
1643 events.next().await.unwrap(),
1644 Event::LanguageServerAdded(
1645 LanguageServerId(1),
1646 fake_server.server.name(),
1647 Some(worktree_id)
1648 )
1649 );
1650 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1651 fake_server.start_progress(progress_token).await;
1652 assert_eq!(
1653 events.next().await.unwrap(),
1654 Event::DiskBasedDiagnosticsStarted {
1655 language_server_id: LanguageServerId(1)
1656 }
1657 );
1658 project.update(cx, |project, cx| {
1659 assert_eq!(
1660 project
1661 .language_servers_running_disk_based_diagnostics(cx)
1662 .collect::<Vec<_>>(),
1663 [LanguageServerId(1)]
1664 );
1665 });
1666
1667 // All diagnostics are considered done, despite the old server's diagnostic
1668 // task never completing.
1669 fake_server.end_progress(progress_token);
1670 assert_eq!(
1671 events.next().await.unwrap(),
1672 Event::DiskBasedDiagnosticsFinished {
1673 language_server_id: LanguageServerId(1)
1674 }
1675 );
1676 project.update(cx, |project, cx| {
1677 assert_eq!(
1678 project
1679 .language_servers_running_disk_based_diagnostics(cx)
1680 .collect::<Vec<_>>(),
1681 [] as [language::LanguageServerId; 0]
1682 );
1683 });
1684}
1685
1686#[gpui::test]
1687async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1688 init_test(cx);
1689
1690 let fs = FakeFs::new(cx.executor());
1691 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
1692
1693 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1694
1695 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1696 language_registry.add(rust_lang());
1697 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1698
1699 let (buffer, _) = project
1700 .update(cx, |project, cx| {
1701 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1702 })
1703 .await
1704 .unwrap();
1705
1706 // Publish diagnostics
1707 let fake_server = fake_servers.next().await.unwrap();
1708 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1709 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1710 version: None,
1711 diagnostics: vec![lsp::Diagnostic {
1712 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1713 severity: Some(lsp::DiagnosticSeverity::ERROR),
1714 message: "the message".to_string(),
1715 ..Default::default()
1716 }],
1717 });
1718
1719 cx.executor().run_until_parked();
1720 buffer.update(cx, |buffer, _| {
1721 assert_eq!(
1722 buffer
1723 .snapshot()
1724 .diagnostics_in_range::<_, usize>(0..1, false)
1725 .map(|entry| entry.diagnostic.message.clone())
1726 .collect::<Vec<_>>(),
1727 ["the message".to_string()]
1728 );
1729 });
1730 project.update(cx, |project, cx| {
1731 assert_eq!(
1732 project.diagnostic_summary(false, cx),
1733 DiagnosticSummary {
1734 error_count: 1,
1735 warning_count: 0,
1736 }
1737 );
1738 });
1739
1740 project.update(cx, |project, cx| {
1741 project.restart_language_servers_for_buffers(vec![buffer.clone()], cx);
1742 });
1743
1744 // The diagnostics are cleared.
1745 cx.executor().run_until_parked();
1746 buffer.update(cx, |buffer, _| {
1747 assert_eq!(
1748 buffer
1749 .snapshot()
1750 .diagnostics_in_range::<_, usize>(0..1, false)
1751 .map(|entry| entry.diagnostic.message.clone())
1752 .collect::<Vec<_>>(),
1753 Vec::<String>::new(),
1754 );
1755 });
1756 project.update(cx, |project, cx| {
1757 assert_eq!(
1758 project.diagnostic_summary(false, cx),
1759 DiagnosticSummary {
1760 error_count: 0,
1761 warning_count: 0,
1762 }
1763 );
1764 });
1765}
1766
1767#[gpui::test]
1768async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1769 init_test(cx);
1770
1771 let fs = FakeFs::new(cx.executor());
1772 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1773
1774 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1775 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1776
1777 language_registry.add(rust_lang());
1778 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1779
1780 let (buffer, _handle) = project
1781 .update(cx, |project, cx| {
1782 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1783 })
1784 .await
1785 .unwrap();
1786
1787 // Before restarting the server, report diagnostics with an unknown buffer version.
1788 let fake_server = fake_servers.next().await.unwrap();
1789 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1790 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1791 version: Some(10000),
1792 diagnostics: Vec::new(),
1793 });
1794 cx.executor().run_until_parked();
1795 project.update(cx, |project, cx| {
1796 project.restart_language_servers_for_buffers(vec![buffer.clone()], cx);
1797 });
1798
1799 let mut fake_server = fake_servers.next().await.unwrap();
1800 let notification = fake_server
1801 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1802 .await
1803 .text_document;
1804 assert_eq!(notification.version, 0);
1805}
1806
1807#[gpui::test]
1808async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
1809 init_test(cx);
1810
1811 let progress_token = "the-progress-token";
1812
1813 let fs = FakeFs::new(cx.executor());
1814 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1815
1816 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1817
1818 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1819 language_registry.add(rust_lang());
1820 let mut fake_servers = language_registry.register_fake_lsp(
1821 "Rust",
1822 FakeLspAdapter {
1823 name: "the-language-server",
1824 disk_based_diagnostics_sources: vec!["disk".into()],
1825 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1826 ..Default::default()
1827 },
1828 );
1829
1830 let (buffer, _handle) = project
1831 .update(cx, |project, cx| {
1832 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1833 })
1834 .await
1835 .unwrap();
1836
1837 // Simulate diagnostics starting to update.
1838 let mut fake_server = fake_servers.next().await.unwrap();
1839 fake_server
1840 .start_progress_with(
1841 "another-token",
1842 lsp::WorkDoneProgressBegin {
1843 cancellable: Some(false),
1844 ..Default::default()
1845 },
1846 )
1847 .await;
1848 fake_server
1849 .start_progress_with(
1850 progress_token,
1851 lsp::WorkDoneProgressBegin {
1852 cancellable: Some(true),
1853 ..Default::default()
1854 },
1855 )
1856 .await;
1857 cx.executor().run_until_parked();
1858
1859 project.update(cx, |project, cx| {
1860 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
1861 });
1862
1863 let cancel_notification = fake_server
1864 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
1865 .await;
1866 assert_eq!(
1867 cancel_notification.token,
1868 NumberOrString::String(progress_token.into())
1869 );
1870}
1871
1872#[gpui::test]
1873async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1874 init_test(cx);
1875
1876 let fs = FakeFs::new(cx.executor());
1877 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
1878 .await;
1879
1880 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1881 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1882
1883 let mut fake_rust_servers = language_registry.register_fake_lsp(
1884 "Rust",
1885 FakeLspAdapter {
1886 name: "rust-lsp",
1887 ..Default::default()
1888 },
1889 );
1890 let mut fake_js_servers = language_registry.register_fake_lsp(
1891 "JavaScript",
1892 FakeLspAdapter {
1893 name: "js-lsp",
1894 ..Default::default()
1895 },
1896 );
1897 language_registry.add(rust_lang());
1898 language_registry.add(js_lang());
1899
1900 let _rs_buffer = project
1901 .update(cx, |project, cx| {
1902 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1903 })
1904 .await
1905 .unwrap();
1906 let _js_buffer = project
1907 .update(cx, |project, cx| {
1908 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
1909 })
1910 .await
1911 .unwrap();
1912
1913 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1914 assert_eq!(
1915 fake_rust_server_1
1916 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1917 .await
1918 .text_document
1919 .uri
1920 .as_str(),
1921 uri!("file:///dir/a.rs")
1922 );
1923
1924 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1925 assert_eq!(
1926 fake_js_server
1927 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1928 .await
1929 .text_document
1930 .uri
1931 .as_str(),
1932 uri!("file:///dir/b.js")
1933 );
1934
1935 // Disable Rust language server, ensuring only that server gets stopped.
1936 cx.update(|cx| {
1937 SettingsStore::update_global(cx, |settings, cx| {
1938 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1939 settings.languages.insert(
1940 "Rust".into(),
1941 LanguageSettingsContent {
1942 enable_language_server: Some(false),
1943 ..Default::default()
1944 },
1945 );
1946 });
1947 })
1948 });
1949 fake_rust_server_1
1950 .receive_notification::<lsp::notification::Exit>()
1951 .await;
1952
1953 // Enable Rust and disable JavaScript language servers, ensuring that the
1954 // former gets started again and that the latter stops.
1955 cx.update(|cx| {
1956 SettingsStore::update_global(cx, |settings, cx| {
1957 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1958 settings.languages.insert(
1959 LanguageName::new("Rust"),
1960 LanguageSettingsContent {
1961 enable_language_server: Some(true),
1962 ..Default::default()
1963 },
1964 );
1965 settings.languages.insert(
1966 LanguageName::new("JavaScript"),
1967 LanguageSettingsContent {
1968 enable_language_server: Some(false),
1969 ..Default::default()
1970 },
1971 );
1972 });
1973 })
1974 });
1975 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1976 assert_eq!(
1977 fake_rust_server_2
1978 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1979 .await
1980 .text_document
1981 .uri
1982 .as_str(),
1983 uri!("file:///dir/a.rs")
1984 );
1985 fake_js_server
1986 .receive_notification::<lsp::notification::Exit>()
1987 .await;
1988}
1989
1990#[gpui::test(iterations = 3)]
1991async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1992 init_test(cx);
1993
1994 let text = "
1995 fn a() { A }
1996 fn b() { BB }
1997 fn c() { CCC }
1998 "
1999 .unindent();
2000
2001 let fs = FakeFs::new(cx.executor());
2002 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
2003
2004 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2005 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2006
2007 language_registry.add(rust_lang());
2008 let mut fake_servers = language_registry.register_fake_lsp(
2009 "Rust",
2010 FakeLspAdapter {
2011 disk_based_diagnostics_sources: vec!["disk".into()],
2012 ..Default::default()
2013 },
2014 );
2015
2016 let buffer = project
2017 .update(cx, |project, cx| {
2018 project.open_local_buffer(path!("/dir/a.rs"), cx)
2019 })
2020 .await
2021 .unwrap();
2022
2023 let _handle = project.update(cx, |project, cx| {
2024 project.register_buffer_with_language_servers(&buffer, cx)
2025 });
2026
2027 let mut fake_server = fake_servers.next().await.unwrap();
2028 let open_notification = fake_server
2029 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2030 .await;
2031
2032 // Edit the buffer, moving the content down
2033 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
2034 let change_notification_1 = fake_server
2035 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2036 .await;
2037 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
2038
2039 // Report some diagnostics for the initial version of the buffer
2040 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2041 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2042 version: Some(open_notification.text_document.version),
2043 diagnostics: vec![
2044 lsp::Diagnostic {
2045 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2046 severity: Some(DiagnosticSeverity::ERROR),
2047 message: "undefined variable 'A'".to_string(),
2048 source: Some("disk".to_string()),
2049 ..Default::default()
2050 },
2051 lsp::Diagnostic {
2052 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2053 severity: Some(DiagnosticSeverity::ERROR),
2054 message: "undefined variable 'BB'".to_string(),
2055 source: Some("disk".to_string()),
2056 ..Default::default()
2057 },
2058 lsp::Diagnostic {
2059 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
2060 severity: Some(DiagnosticSeverity::ERROR),
2061 source: Some("disk".to_string()),
2062 message: "undefined variable 'CCC'".to_string(),
2063 ..Default::default()
2064 },
2065 ],
2066 });
2067
2068 // The diagnostics have moved down since they were created.
2069 cx.executor().run_until_parked();
2070 buffer.update(cx, |buffer, _| {
2071 assert_eq!(
2072 buffer
2073 .snapshot()
2074 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
2075 .collect::<Vec<_>>(),
2076 &[
2077 DiagnosticEntry {
2078 range: Point::new(3, 9)..Point::new(3, 11),
2079 diagnostic: Diagnostic {
2080 source: Some("disk".into()),
2081 severity: DiagnosticSeverity::ERROR,
2082 message: "undefined variable 'BB'".to_string(),
2083 is_disk_based: true,
2084 group_id: 1,
2085 is_primary: true,
2086 ..Default::default()
2087 },
2088 },
2089 DiagnosticEntry {
2090 range: Point::new(4, 9)..Point::new(4, 12),
2091 diagnostic: Diagnostic {
2092 source: Some("disk".into()),
2093 severity: DiagnosticSeverity::ERROR,
2094 message: "undefined variable 'CCC'".to_string(),
2095 is_disk_based: true,
2096 group_id: 2,
2097 is_primary: true,
2098 ..Default::default()
2099 }
2100 }
2101 ]
2102 );
2103 assert_eq!(
2104 chunks_with_diagnostics(buffer, 0..buffer.len()),
2105 [
2106 ("\n\nfn a() { ".to_string(), None),
2107 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2108 (" }\nfn b() { ".to_string(), None),
2109 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
2110 (" }\nfn c() { ".to_string(), None),
2111 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
2112 (" }\n".to_string(), None),
2113 ]
2114 );
2115 assert_eq!(
2116 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
2117 [
2118 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
2119 (" }\nfn c() { ".to_string(), None),
2120 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
2121 ]
2122 );
2123 });
2124
2125 // Ensure overlapping diagnostics are highlighted correctly.
2126 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2127 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2128 version: Some(open_notification.text_document.version),
2129 diagnostics: vec![
2130 lsp::Diagnostic {
2131 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2132 severity: Some(DiagnosticSeverity::ERROR),
2133 message: "undefined variable 'A'".to_string(),
2134 source: Some("disk".to_string()),
2135 ..Default::default()
2136 },
2137 lsp::Diagnostic {
2138 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
2139 severity: Some(DiagnosticSeverity::WARNING),
2140 message: "unreachable statement".to_string(),
2141 source: Some("disk".to_string()),
2142 ..Default::default()
2143 },
2144 ],
2145 });
2146
2147 cx.executor().run_until_parked();
2148 buffer.update(cx, |buffer, _| {
2149 assert_eq!(
2150 buffer
2151 .snapshot()
2152 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
2153 .collect::<Vec<_>>(),
2154 &[
2155 DiagnosticEntry {
2156 range: Point::new(2, 9)..Point::new(2, 12),
2157 diagnostic: Diagnostic {
2158 source: Some("disk".into()),
2159 severity: DiagnosticSeverity::WARNING,
2160 message: "unreachable statement".to_string(),
2161 is_disk_based: true,
2162 group_id: 4,
2163 is_primary: true,
2164 ..Default::default()
2165 }
2166 },
2167 DiagnosticEntry {
2168 range: Point::new(2, 9)..Point::new(2, 10),
2169 diagnostic: Diagnostic {
2170 source: Some("disk".into()),
2171 severity: DiagnosticSeverity::ERROR,
2172 message: "undefined variable 'A'".to_string(),
2173 is_disk_based: true,
2174 group_id: 3,
2175 is_primary: true,
2176 ..Default::default()
2177 },
2178 }
2179 ]
2180 );
2181 assert_eq!(
2182 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
2183 [
2184 ("fn a() { ".to_string(), None),
2185 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2186 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2187 ("\n".to_string(), None),
2188 ]
2189 );
2190 assert_eq!(
2191 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
2192 [
2193 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2194 ("\n".to_string(), None),
2195 ]
2196 );
2197 });
2198
2199 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
2200 // changes since the last save.
2201 buffer.update(cx, |buffer, cx| {
2202 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
2203 buffer.edit(
2204 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
2205 None,
2206 cx,
2207 );
2208 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
2209 });
2210 let change_notification_2 = fake_server
2211 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2212 .await;
2213 assert!(
2214 change_notification_2.text_document.version > change_notification_1.text_document.version
2215 );
2216
2217 // Handle out-of-order diagnostics
2218 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2219 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2220 version: Some(change_notification_2.text_document.version),
2221 diagnostics: vec![
2222 lsp::Diagnostic {
2223 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2224 severity: Some(DiagnosticSeverity::ERROR),
2225 message: "undefined variable 'BB'".to_string(),
2226 source: Some("disk".to_string()),
2227 ..Default::default()
2228 },
2229 lsp::Diagnostic {
2230 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2231 severity: Some(DiagnosticSeverity::WARNING),
2232 message: "undefined variable 'A'".to_string(),
2233 source: Some("disk".to_string()),
2234 ..Default::default()
2235 },
2236 ],
2237 });
2238
2239 cx.executor().run_until_parked();
2240 buffer.update(cx, |buffer, _| {
2241 assert_eq!(
2242 buffer
2243 .snapshot()
2244 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2245 .collect::<Vec<_>>(),
2246 &[
2247 DiagnosticEntry {
2248 range: Point::new(2, 21)..Point::new(2, 22),
2249 diagnostic: Diagnostic {
2250 source: Some("disk".into()),
2251 severity: DiagnosticSeverity::WARNING,
2252 message: "undefined variable 'A'".to_string(),
2253 is_disk_based: true,
2254 group_id: 6,
2255 is_primary: true,
2256 ..Default::default()
2257 }
2258 },
2259 DiagnosticEntry {
2260 range: Point::new(3, 9)..Point::new(3, 14),
2261 diagnostic: Diagnostic {
2262 source: Some("disk".into()),
2263 severity: DiagnosticSeverity::ERROR,
2264 message: "undefined variable 'BB'".to_string(),
2265 is_disk_based: true,
2266 group_id: 5,
2267 is_primary: true,
2268 ..Default::default()
2269 },
2270 }
2271 ]
2272 );
2273 });
2274}
2275
2276#[gpui::test]
2277async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
2278 init_test(cx);
2279
2280 let text = concat!(
2281 "let one = ;\n", //
2282 "let two = \n",
2283 "let three = 3;\n",
2284 );
2285
2286 let fs = FakeFs::new(cx.executor());
2287 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
2288
2289 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2290 let buffer = project
2291 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2292 .await
2293 .unwrap();
2294
2295 project.update(cx, |project, cx| {
2296 project.lsp_store.update(cx, |lsp_store, cx| {
2297 lsp_store
2298 .update_diagnostic_entries(
2299 LanguageServerId(0),
2300 PathBuf::from("/dir/a.rs"),
2301 None,
2302 vec![
2303 DiagnosticEntry {
2304 range: Unclipped(PointUtf16::new(0, 10))
2305 ..Unclipped(PointUtf16::new(0, 10)),
2306 diagnostic: Diagnostic {
2307 severity: DiagnosticSeverity::ERROR,
2308 message: "syntax error 1".to_string(),
2309 ..Default::default()
2310 },
2311 },
2312 DiagnosticEntry {
2313 range: Unclipped(PointUtf16::new(1, 10))
2314 ..Unclipped(PointUtf16::new(1, 10)),
2315 diagnostic: Diagnostic {
2316 severity: DiagnosticSeverity::ERROR,
2317 message: "syntax error 2".to_string(),
2318 ..Default::default()
2319 },
2320 },
2321 ],
2322 cx,
2323 )
2324 .unwrap();
2325 })
2326 });
2327
2328 // An empty range is extended forward to include the following character.
2329 // At the end of a line, an empty range is extended backward to include
2330 // the preceding character.
2331 buffer.update(cx, |buffer, _| {
2332 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2333 assert_eq!(
2334 chunks
2335 .iter()
2336 .map(|(s, d)| (s.as_str(), *d))
2337 .collect::<Vec<_>>(),
2338 &[
2339 ("let one = ", None),
2340 (";", Some(DiagnosticSeverity::ERROR)),
2341 ("\nlet two =", None),
2342 (" ", Some(DiagnosticSeverity::ERROR)),
2343 ("\nlet three = 3;\n", None)
2344 ]
2345 );
2346 });
2347}
2348
2349#[gpui::test]
2350async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2351 init_test(cx);
2352
2353 let fs = FakeFs::new(cx.executor());
2354 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2355 .await;
2356
2357 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2358 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
2359
2360 lsp_store.update(cx, |lsp_store, cx| {
2361 lsp_store
2362 .update_diagnostic_entries(
2363 LanguageServerId(0),
2364 Path::new("/dir/a.rs").to_owned(),
2365 None,
2366 vec![DiagnosticEntry {
2367 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2368 diagnostic: Diagnostic {
2369 severity: DiagnosticSeverity::ERROR,
2370 is_primary: true,
2371 message: "syntax error a1".to_string(),
2372 ..Default::default()
2373 },
2374 }],
2375 cx,
2376 )
2377 .unwrap();
2378 lsp_store
2379 .update_diagnostic_entries(
2380 LanguageServerId(1),
2381 Path::new("/dir/a.rs").to_owned(),
2382 None,
2383 vec![DiagnosticEntry {
2384 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2385 diagnostic: Diagnostic {
2386 severity: DiagnosticSeverity::ERROR,
2387 is_primary: true,
2388 message: "syntax error b1".to_string(),
2389 ..Default::default()
2390 },
2391 }],
2392 cx,
2393 )
2394 .unwrap();
2395
2396 assert_eq!(
2397 lsp_store.diagnostic_summary(false, cx),
2398 DiagnosticSummary {
2399 error_count: 2,
2400 warning_count: 0,
2401 }
2402 );
2403 });
2404}
2405
2406#[gpui::test]
2407async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2408 init_test(cx);
2409
2410 let text = "
2411 fn a() {
2412 f1();
2413 }
2414 fn b() {
2415 f2();
2416 }
2417 fn c() {
2418 f3();
2419 }
2420 "
2421 .unindent();
2422
2423 let fs = FakeFs::new(cx.executor());
2424 fs.insert_tree(
2425 path!("/dir"),
2426 json!({
2427 "a.rs": text.clone(),
2428 }),
2429 )
2430 .await;
2431
2432 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2433 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2434
2435 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2436 language_registry.add(rust_lang());
2437 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2438
2439 let (buffer, _handle) = project
2440 .update(cx, |project, cx| {
2441 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2442 })
2443 .await
2444 .unwrap();
2445
2446 let mut fake_server = fake_servers.next().await.unwrap();
2447 let lsp_document_version = fake_server
2448 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2449 .await
2450 .text_document
2451 .version;
2452
2453 // Simulate editing the buffer after the language server computes some edits.
2454 buffer.update(cx, |buffer, cx| {
2455 buffer.edit(
2456 [(
2457 Point::new(0, 0)..Point::new(0, 0),
2458 "// above first function\n",
2459 )],
2460 None,
2461 cx,
2462 );
2463 buffer.edit(
2464 [(
2465 Point::new(2, 0)..Point::new(2, 0),
2466 " // inside first function\n",
2467 )],
2468 None,
2469 cx,
2470 );
2471 buffer.edit(
2472 [(
2473 Point::new(6, 4)..Point::new(6, 4),
2474 "// inside second function ",
2475 )],
2476 None,
2477 cx,
2478 );
2479
2480 assert_eq!(
2481 buffer.text(),
2482 "
2483 // above first function
2484 fn a() {
2485 // inside first function
2486 f1();
2487 }
2488 fn b() {
2489 // inside second function f2();
2490 }
2491 fn c() {
2492 f3();
2493 }
2494 "
2495 .unindent()
2496 );
2497 });
2498
2499 let edits = lsp_store
2500 .update(cx, |lsp_store, cx| {
2501 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2502 &buffer,
2503 vec![
2504 // replace body of first function
2505 lsp::TextEdit {
2506 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2507 new_text: "
2508 fn a() {
2509 f10();
2510 }
2511 "
2512 .unindent(),
2513 },
2514 // edit inside second function
2515 lsp::TextEdit {
2516 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2517 new_text: "00".into(),
2518 },
2519 // edit inside third function via two distinct edits
2520 lsp::TextEdit {
2521 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2522 new_text: "4000".into(),
2523 },
2524 lsp::TextEdit {
2525 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2526 new_text: "".into(),
2527 },
2528 ],
2529 LanguageServerId(0),
2530 Some(lsp_document_version),
2531 cx,
2532 )
2533 })
2534 .await
2535 .unwrap();
2536
2537 buffer.update(cx, |buffer, cx| {
2538 for (range, new_text) in edits {
2539 buffer.edit([(range, new_text)], None, cx);
2540 }
2541 assert_eq!(
2542 buffer.text(),
2543 "
2544 // above first function
2545 fn a() {
2546 // inside first function
2547 f10();
2548 }
2549 fn b() {
2550 // inside second function f200();
2551 }
2552 fn c() {
2553 f4000();
2554 }
2555 "
2556 .unindent()
2557 );
2558 });
2559}
2560
2561#[gpui::test]
2562async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2563 init_test(cx);
2564
2565 let text = "
2566 use a::b;
2567 use a::c;
2568
2569 fn f() {
2570 b();
2571 c();
2572 }
2573 "
2574 .unindent();
2575
2576 let fs = FakeFs::new(cx.executor());
2577 fs.insert_tree(
2578 path!("/dir"),
2579 json!({
2580 "a.rs": text.clone(),
2581 }),
2582 )
2583 .await;
2584
2585 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2586 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2587 let buffer = project
2588 .update(cx, |project, cx| {
2589 project.open_local_buffer(path!("/dir/a.rs"), cx)
2590 })
2591 .await
2592 .unwrap();
2593
2594 // Simulate the language server sending us a small edit in the form of a very large diff.
2595 // Rust-analyzer does this when performing a merge-imports code action.
2596 let edits = lsp_store
2597 .update(cx, |lsp_store, cx| {
2598 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2599 &buffer,
2600 [
2601 // Replace the first use statement without editing the semicolon.
2602 lsp::TextEdit {
2603 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2604 new_text: "a::{b, c}".into(),
2605 },
2606 // Reinsert the remainder of the file between the semicolon and the final
2607 // newline of the file.
2608 lsp::TextEdit {
2609 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2610 new_text: "\n\n".into(),
2611 },
2612 lsp::TextEdit {
2613 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2614 new_text: "
2615 fn f() {
2616 b();
2617 c();
2618 }"
2619 .unindent(),
2620 },
2621 // Delete everything after the first newline of the file.
2622 lsp::TextEdit {
2623 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2624 new_text: "".into(),
2625 },
2626 ],
2627 LanguageServerId(0),
2628 None,
2629 cx,
2630 )
2631 })
2632 .await
2633 .unwrap();
2634
2635 buffer.update(cx, |buffer, cx| {
2636 let edits = edits
2637 .into_iter()
2638 .map(|(range, text)| {
2639 (
2640 range.start.to_point(buffer)..range.end.to_point(buffer),
2641 text,
2642 )
2643 })
2644 .collect::<Vec<_>>();
2645
2646 assert_eq!(
2647 edits,
2648 [
2649 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2650 (Point::new(1, 0)..Point::new(2, 0), "".into())
2651 ]
2652 );
2653
2654 for (range, new_text) in edits {
2655 buffer.edit([(range, new_text)], None, cx);
2656 }
2657 assert_eq!(
2658 buffer.text(),
2659 "
2660 use a::{b, c};
2661
2662 fn f() {
2663 b();
2664 c();
2665 }
2666 "
2667 .unindent()
2668 );
2669 });
2670}
2671
2672#[gpui::test]
2673async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
2674 cx: &mut gpui::TestAppContext,
2675) {
2676 init_test(cx);
2677
2678 let text = "Path()";
2679
2680 let fs = FakeFs::new(cx.executor());
2681 fs.insert_tree(
2682 path!("/dir"),
2683 json!({
2684 "a.rs": text
2685 }),
2686 )
2687 .await;
2688
2689 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2690 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2691 let buffer = project
2692 .update(cx, |project, cx| {
2693 project.open_local_buffer(path!("/dir/a.rs"), cx)
2694 })
2695 .await
2696 .unwrap();
2697
2698 // Simulate the language server sending us a pair of edits at the same location,
2699 // with an insertion following a replacement (which violates the LSP spec).
2700 let edits = lsp_store
2701 .update(cx, |lsp_store, cx| {
2702 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2703 &buffer,
2704 [
2705 lsp::TextEdit {
2706 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
2707 new_text: "Path".into(),
2708 },
2709 lsp::TextEdit {
2710 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
2711 new_text: "from path import Path\n\n\n".into(),
2712 },
2713 ],
2714 LanguageServerId(0),
2715 None,
2716 cx,
2717 )
2718 })
2719 .await
2720 .unwrap();
2721
2722 buffer.update(cx, |buffer, cx| {
2723 buffer.edit(edits, None, cx);
2724 assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
2725 });
2726}
2727
2728#[gpui::test]
2729async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2730 init_test(cx);
2731
2732 let text = "
2733 use a::b;
2734 use a::c;
2735
2736 fn f() {
2737 b();
2738 c();
2739 }
2740 "
2741 .unindent();
2742
2743 let fs = FakeFs::new(cx.executor());
2744 fs.insert_tree(
2745 path!("/dir"),
2746 json!({
2747 "a.rs": text.clone(),
2748 }),
2749 )
2750 .await;
2751
2752 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2753 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2754 let buffer = project
2755 .update(cx, |project, cx| {
2756 project.open_local_buffer(path!("/dir/a.rs"), cx)
2757 })
2758 .await
2759 .unwrap();
2760
2761 // Simulate the language server sending us edits in a non-ordered fashion,
2762 // with ranges sometimes being inverted or pointing to invalid locations.
2763 let edits = lsp_store
2764 .update(cx, |lsp_store, cx| {
2765 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2766 &buffer,
2767 [
2768 lsp::TextEdit {
2769 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2770 new_text: "\n\n".into(),
2771 },
2772 lsp::TextEdit {
2773 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2774 new_text: "a::{b, c}".into(),
2775 },
2776 lsp::TextEdit {
2777 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2778 new_text: "".into(),
2779 },
2780 lsp::TextEdit {
2781 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2782 new_text: "
2783 fn f() {
2784 b();
2785 c();
2786 }"
2787 .unindent(),
2788 },
2789 ],
2790 LanguageServerId(0),
2791 None,
2792 cx,
2793 )
2794 })
2795 .await
2796 .unwrap();
2797
2798 buffer.update(cx, |buffer, cx| {
2799 let edits = edits
2800 .into_iter()
2801 .map(|(range, text)| {
2802 (
2803 range.start.to_point(buffer)..range.end.to_point(buffer),
2804 text,
2805 )
2806 })
2807 .collect::<Vec<_>>();
2808
2809 assert_eq!(
2810 edits,
2811 [
2812 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2813 (Point::new(1, 0)..Point::new(2, 0), "".into())
2814 ]
2815 );
2816
2817 for (range, new_text) in edits {
2818 buffer.edit([(range, new_text)], None, cx);
2819 }
2820 assert_eq!(
2821 buffer.text(),
2822 "
2823 use a::{b, c};
2824
2825 fn f() {
2826 b();
2827 c();
2828 }
2829 "
2830 .unindent()
2831 );
2832 });
2833}
2834
2835fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2836 buffer: &Buffer,
2837 range: Range<T>,
2838) -> Vec<(String, Option<DiagnosticSeverity>)> {
2839 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2840 for chunk in buffer.snapshot().chunks(range, true) {
2841 if chunks.last().map_or(false, |prev_chunk| {
2842 prev_chunk.1 == chunk.diagnostic_severity
2843 }) {
2844 chunks.last_mut().unwrap().0.push_str(chunk.text);
2845 } else {
2846 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2847 }
2848 }
2849 chunks
2850}
2851
2852#[gpui::test(iterations = 10)]
2853async fn test_definition(cx: &mut gpui::TestAppContext) {
2854 init_test(cx);
2855
2856 let fs = FakeFs::new(cx.executor());
2857 fs.insert_tree(
2858 path!("/dir"),
2859 json!({
2860 "a.rs": "const fn a() { A }",
2861 "b.rs": "const y: i32 = crate::a()",
2862 }),
2863 )
2864 .await;
2865
2866 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
2867
2868 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2869 language_registry.add(rust_lang());
2870 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2871
2872 let (buffer, _handle) = project
2873 .update(cx, |project, cx| {
2874 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
2875 })
2876 .await
2877 .unwrap();
2878
2879 let fake_server = fake_servers.next().await.unwrap();
2880 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2881 let params = params.text_document_position_params;
2882 assert_eq!(
2883 params.text_document.uri.to_file_path().unwrap(),
2884 Path::new(path!("/dir/b.rs")),
2885 );
2886 assert_eq!(params.position, lsp::Position::new(0, 22));
2887
2888 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2889 lsp::Location::new(
2890 lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2891 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2892 ),
2893 )))
2894 });
2895 let mut definitions = project
2896 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2897 .await
2898 .unwrap();
2899
2900 // Assert no new language server started
2901 cx.executor().run_until_parked();
2902 assert!(fake_servers.try_next().is_err());
2903
2904 assert_eq!(definitions.len(), 1);
2905 let definition = definitions.pop().unwrap();
2906 cx.update(|cx| {
2907 let target_buffer = definition.target.buffer.read(cx);
2908 assert_eq!(
2909 target_buffer
2910 .file()
2911 .unwrap()
2912 .as_local()
2913 .unwrap()
2914 .abs_path(cx),
2915 Path::new(path!("/dir/a.rs")),
2916 );
2917 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2918 assert_eq!(
2919 list_worktrees(&project, cx),
2920 [
2921 (path!("/dir/a.rs").as_ref(), false),
2922 (path!("/dir/b.rs").as_ref(), true)
2923 ],
2924 );
2925
2926 drop(definition);
2927 });
2928 cx.update(|cx| {
2929 assert_eq!(
2930 list_worktrees(&project, cx),
2931 [(path!("/dir/b.rs").as_ref(), true)]
2932 );
2933 });
2934
2935 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
2936 project
2937 .read(cx)
2938 .worktrees(cx)
2939 .map(|worktree| {
2940 let worktree = worktree.read(cx);
2941 (
2942 worktree.as_local().unwrap().abs_path().as_ref(),
2943 worktree.is_visible(),
2944 )
2945 })
2946 .collect::<Vec<_>>()
2947 }
2948}
2949
2950#[gpui::test]
2951async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
2952 init_test(cx);
2953
2954 let fs = FakeFs::new(cx.executor());
2955 fs.insert_tree(
2956 path!("/dir"),
2957 json!({
2958 "a.ts": "",
2959 }),
2960 )
2961 .await;
2962
2963 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2964
2965 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2966 language_registry.add(typescript_lang());
2967 let mut fake_language_servers = language_registry.register_fake_lsp(
2968 "TypeScript",
2969 FakeLspAdapter {
2970 capabilities: lsp::ServerCapabilities {
2971 completion_provider: Some(lsp::CompletionOptions {
2972 trigger_characters: Some(vec![".".to_string()]),
2973 ..Default::default()
2974 }),
2975 ..Default::default()
2976 },
2977 ..Default::default()
2978 },
2979 );
2980
2981 let (buffer, _handle) = project
2982 .update(cx, |p, cx| {
2983 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
2984 })
2985 .await
2986 .unwrap();
2987
2988 let fake_server = fake_language_servers.next().await.unwrap();
2989
2990 // When text_edit exists, it takes precedence over insert_text and label
2991 let text = "let a = obj.fqn";
2992 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2993 let completions = project.update(cx, |project, cx| {
2994 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2995 });
2996
2997 fake_server
2998 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
2999 Ok(Some(lsp::CompletionResponse::Array(vec![
3000 lsp::CompletionItem {
3001 label: "labelText".into(),
3002 insert_text: Some("insertText".into()),
3003 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
3004 range: lsp::Range::new(
3005 lsp::Position::new(0, text.len() as u32 - 3),
3006 lsp::Position::new(0, text.len() as u32),
3007 ),
3008 new_text: "textEditText".into(),
3009 })),
3010 ..Default::default()
3011 },
3012 ])))
3013 })
3014 .next()
3015 .await;
3016
3017 let completions = completions
3018 .await
3019 .unwrap()
3020 .into_iter()
3021 .flat_map(|response| response.completions)
3022 .collect::<Vec<_>>();
3023 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3024
3025 assert_eq!(completions.len(), 1);
3026 assert_eq!(completions[0].new_text, "textEditText");
3027 assert_eq!(
3028 completions[0].replace_range.to_offset(&snapshot),
3029 text.len() - 3..text.len()
3030 );
3031}
3032
3033#[gpui::test]
3034async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
3035 init_test(cx);
3036
3037 let fs = FakeFs::new(cx.executor());
3038 fs.insert_tree(
3039 path!("/dir"),
3040 json!({
3041 "a.ts": "",
3042 }),
3043 )
3044 .await;
3045
3046 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3047
3048 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3049 language_registry.add(typescript_lang());
3050 let mut fake_language_servers = language_registry.register_fake_lsp(
3051 "TypeScript",
3052 FakeLspAdapter {
3053 capabilities: lsp::ServerCapabilities {
3054 completion_provider: Some(lsp::CompletionOptions {
3055 trigger_characters: Some(vec![".".to_string()]),
3056 ..Default::default()
3057 }),
3058 ..Default::default()
3059 },
3060 ..Default::default()
3061 },
3062 );
3063
3064 let (buffer, _handle) = project
3065 .update(cx, |p, cx| {
3066 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3067 })
3068 .await
3069 .unwrap();
3070
3071 let fake_server = fake_language_servers.next().await.unwrap();
3072 let text = "let a = obj.fqn";
3073
3074 // Test 1: When text_edit is None but insert_text exists with default edit_range
3075 {
3076 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3077 let completions = project.update(cx, |project, cx| {
3078 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3079 });
3080
3081 fake_server
3082 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3083 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3084 is_incomplete: false,
3085 item_defaults: Some(lsp::CompletionListItemDefaults {
3086 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3087 lsp::Range::new(
3088 lsp::Position::new(0, text.len() as u32 - 3),
3089 lsp::Position::new(0, text.len() as u32),
3090 ),
3091 )),
3092 ..Default::default()
3093 }),
3094 items: vec![lsp::CompletionItem {
3095 label: "labelText".into(),
3096 insert_text: Some("insertText".into()),
3097 text_edit: None,
3098 ..Default::default()
3099 }],
3100 })))
3101 })
3102 .next()
3103 .await;
3104
3105 let completions = completions
3106 .await
3107 .unwrap()
3108 .into_iter()
3109 .flat_map(|response| response.completions)
3110 .collect::<Vec<_>>();
3111 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3112
3113 assert_eq!(completions.len(), 1);
3114 assert_eq!(completions[0].new_text, "insertText");
3115 assert_eq!(
3116 completions[0].replace_range.to_offset(&snapshot),
3117 text.len() - 3..text.len()
3118 );
3119 }
3120
3121 // Test 2: When both text_edit and insert_text are None with default edit_range
3122 {
3123 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3124 let completions = project.update(cx, |project, cx| {
3125 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3126 });
3127
3128 fake_server
3129 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3130 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3131 is_incomplete: false,
3132 item_defaults: Some(lsp::CompletionListItemDefaults {
3133 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3134 lsp::Range::new(
3135 lsp::Position::new(0, text.len() as u32 - 3),
3136 lsp::Position::new(0, text.len() as u32),
3137 ),
3138 )),
3139 ..Default::default()
3140 }),
3141 items: vec![lsp::CompletionItem {
3142 label: "labelText".into(),
3143 insert_text: None,
3144 text_edit: None,
3145 ..Default::default()
3146 }],
3147 })))
3148 })
3149 .next()
3150 .await;
3151
3152 let completions = completions
3153 .await
3154 .unwrap()
3155 .into_iter()
3156 .flat_map(|response| response.completions)
3157 .collect::<Vec<_>>();
3158 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3159
3160 assert_eq!(completions.len(), 1);
3161 assert_eq!(completions[0].new_text, "labelText");
3162 assert_eq!(
3163 completions[0].replace_range.to_offset(&snapshot),
3164 text.len() - 3..text.len()
3165 );
3166 }
3167}
3168
3169#[gpui::test]
3170async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
3171 init_test(cx);
3172
3173 let fs = FakeFs::new(cx.executor());
3174 fs.insert_tree(
3175 path!("/dir"),
3176 json!({
3177 "a.ts": "",
3178 }),
3179 )
3180 .await;
3181
3182 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3183
3184 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3185 language_registry.add(typescript_lang());
3186 let mut fake_language_servers = language_registry.register_fake_lsp(
3187 "TypeScript",
3188 FakeLspAdapter {
3189 capabilities: lsp::ServerCapabilities {
3190 completion_provider: Some(lsp::CompletionOptions {
3191 trigger_characters: Some(vec![":".to_string()]),
3192 ..Default::default()
3193 }),
3194 ..Default::default()
3195 },
3196 ..Default::default()
3197 },
3198 );
3199
3200 let (buffer, _handle) = project
3201 .update(cx, |p, cx| {
3202 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3203 })
3204 .await
3205 .unwrap();
3206
3207 let fake_server = fake_language_servers.next().await.unwrap();
3208
3209 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
3210 let text = "let a = b.fqn";
3211 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3212 let completions = project.update(cx, |project, cx| {
3213 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3214 });
3215
3216 fake_server
3217 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3218 Ok(Some(lsp::CompletionResponse::Array(vec![
3219 lsp::CompletionItem {
3220 label: "fullyQualifiedName?".into(),
3221 insert_text: Some("fullyQualifiedName".into()),
3222 ..Default::default()
3223 },
3224 ])))
3225 })
3226 .next()
3227 .await;
3228 let completions = completions
3229 .await
3230 .unwrap()
3231 .into_iter()
3232 .flat_map(|response| response.completions)
3233 .collect::<Vec<_>>();
3234 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3235 assert_eq!(completions.len(), 1);
3236 assert_eq!(completions[0].new_text, "fullyQualifiedName");
3237 assert_eq!(
3238 completions[0].replace_range.to_offset(&snapshot),
3239 text.len() - 3..text.len()
3240 );
3241
3242 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
3243 let text = "let a = \"atoms/cmp\"";
3244 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3245 let completions = project.update(cx, |project, cx| {
3246 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
3247 });
3248
3249 fake_server
3250 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3251 Ok(Some(lsp::CompletionResponse::Array(vec![
3252 lsp::CompletionItem {
3253 label: "component".into(),
3254 ..Default::default()
3255 },
3256 ])))
3257 })
3258 .next()
3259 .await;
3260 let completions = completions
3261 .await
3262 .unwrap()
3263 .into_iter()
3264 .flat_map(|response| response.completions)
3265 .collect::<Vec<_>>();
3266 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3267 assert_eq!(completions.len(), 1);
3268 assert_eq!(completions[0].new_text, "component");
3269 assert_eq!(
3270 completions[0].replace_range.to_offset(&snapshot),
3271 text.len() - 4..text.len() - 1
3272 );
3273}
3274
3275#[gpui::test]
3276async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
3277 init_test(cx);
3278
3279 let fs = FakeFs::new(cx.executor());
3280 fs.insert_tree(
3281 path!("/dir"),
3282 json!({
3283 "a.ts": "",
3284 }),
3285 )
3286 .await;
3287
3288 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3289
3290 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3291 language_registry.add(typescript_lang());
3292 let mut fake_language_servers = language_registry.register_fake_lsp(
3293 "TypeScript",
3294 FakeLspAdapter {
3295 capabilities: lsp::ServerCapabilities {
3296 completion_provider: Some(lsp::CompletionOptions {
3297 trigger_characters: Some(vec![":".to_string()]),
3298 ..Default::default()
3299 }),
3300 ..Default::default()
3301 },
3302 ..Default::default()
3303 },
3304 );
3305
3306 let (buffer, _handle) = project
3307 .update(cx, |p, cx| {
3308 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3309 })
3310 .await
3311 .unwrap();
3312
3313 let fake_server = fake_language_servers.next().await.unwrap();
3314
3315 let text = "let a = b.fqn";
3316 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3317 let completions = project.update(cx, |project, cx| {
3318 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3319 });
3320
3321 fake_server
3322 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3323 Ok(Some(lsp::CompletionResponse::Array(vec![
3324 lsp::CompletionItem {
3325 label: "fullyQualifiedName?".into(),
3326 insert_text: Some("fully\rQualified\r\nName".into()),
3327 ..Default::default()
3328 },
3329 ])))
3330 })
3331 .next()
3332 .await;
3333 let completions = completions
3334 .await
3335 .unwrap()
3336 .into_iter()
3337 .flat_map(|response| response.completions)
3338 .collect::<Vec<_>>();
3339 assert_eq!(completions.len(), 1);
3340 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
3341}
3342
3343#[gpui::test(iterations = 10)]
3344async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
3345 init_test(cx);
3346
3347 let fs = FakeFs::new(cx.executor());
3348 fs.insert_tree(
3349 path!("/dir"),
3350 json!({
3351 "a.ts": "a",
3352 }),
3353 )
3354 .await;
3355
3356 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3357
3358 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3359 language_registry.add(typescript_lang());
3360 let mut fake_language_servers = language_registry.register_fake_lsp(
3361 "TypeScript",
3362 FakeLspAdapter {
3363 capabilities: lsp::ServerCapabilities {
3364 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
3365 lsp::CodeActionOptions {
3366 resolve_provider: Some(true),
3367 ..lsp::CodeActionOptions::default()
3368 },
3369 )),
3370 execute_command_provider: Some(lsp::ExecuteCommandOptions {
3371 commands: vec!["_the/command".to_string()],
3372 ..lsp::ExecuteCommandOptions::default()
3373 }),
3374 ..lsp::ServerCapabilities::default()
3375 },
3376 ..FakeLspAdapter::default()
3377 },
3378 );
3379
3380 let (buffer, _handle) = project
3381 .update(cx, |p, cx| {
3382 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3383 })
3384 .await
3385 .unwrap();
3386
3387 let fake_server = fake_language_servers.next().await.unwrap();
3388
3389 // Language server returns code actions that contain commands, and not edits.
3390 let actions = project.update(cx, |project, cx| {
3391 project.code_actions(&buffer, 0..0, None, cx)
3392 });
3393 fake_server
3394 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
3395 Ok(Some(vec![
3396 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3397 title: "The code action".into(),
3398 data: Some(serde_json::json!({
3399 "command": "_the/command",
3400 })),
3401 ..lsp::CodeAction::default()
3402 }),
3403 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3404 title: "two".into(),
3405 ..lsp::CodeAction::default()
3406 }),
3407 ]))
3408 })
3409 .next()
3410 .await;
3411
3412 let action = actions.await.unwrap()[0].clone();
3413 let apply = project.update(cx, |project, cx| {
3414 project.apply_code_action(buffer.clone(), action, true, cx)
3415 });
3416
3417 // Resolving the code action does not populate its edits. In absence of
3418 // edits, we must execute the given command.
3419 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
3420 |mut action, _| async move {
3421 if action.data.is_some() {
3422 action.command = Some(lsp::Command {
3423 title: "The command".into(),
3424 command: "_the/command".into(),
3425 arguments: Some(vec![json!("the-argument")]),
3426 });
3427 }
3428 Ok(action)
3429 },
3430 );
3431
3432 // While executing the command, the language server sends the editor
3433 // a `workspaceEdit` request.
3434 fake_server
3435 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
3436 let fake = fake_server.clone();
3437 move |params, _| {
3438 assert_eq!(params.command, "_the/command");
3439 let fake = fake.clone();
3440 async move {
3441 fake.server
3442 .request::<lsp::request::ApplyWorkspaceEdit>(
3443 lsp::ApplyWorkspaceEditParams {
3444 label: None,
3445 edit: lsp::WorkspaceEdit {
3446 changes: Some(
3447 [(
3448 lsp::Url::from_file_path(path!("/dir/a.ts")).unwrap(),
3449 vec![lsp::TextEdit {
3450 range: lsp::Range::new(
3451 lsp::Position::new(0, 0),
3452 lsp::Position::new(0, 0),
3453 ),
3454 new_text: "X".into(),
3455 }],
3456 )]
3457 .into_iter()
3458 .collect(),
3459 ),
3460 ..Default::default()
3461 },
3462 },
3463 )
3464 .await
3465 .into_response()
3466 .unwrap();
3467 Ok(Some(json!(null)))
3468 }
3469 }
3470 })
3471 .next()
3472 .await;
3473
3474 // Applying the code action returns a project transaction containing the edits
3475 // sent by the language server in its `workspaceEdit` request.
3476 let transaction = apply.await.unwrap();
3477 assert!(transaction.0.contains_key(&buffer));
3478 buffer.update(cx, |buffer, cx| {
3479 assert_eq!(buffer.text(), "Xa");
3480 buffer.undo(cx);
3481 assert_eq!(buffer.text(), "a");
3482 });
3483}
3484
3485#[gpui::test(iterations = 10)]
3486async fn test_save_file(cx: &mut gpui::TestAppContext) {
3487 init_test(cx);
3488
3489 let fs = FakeFs::new(cx.executor());
3490 fs.insert_tree(
3491 path!("/dir"),
3492 json!({
3493 "file1": "the old contents",
3494 }),
3495 )
3496 .await;
3497
3498 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3499 let buffer = project
3500 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3501 .await
3502 .unwrap();
3503 buffer.update(cx, |buffer, cx| {
3504 assert_eq!(buffer.text(), "the old contents");
3505 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3506 });
3507
3508 project
3509 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3510 .await
3511 .unwrap();
3512
3513 let new_text = fs
3514 .load(Path::new(path!("/dir/file1")))
3515 .await
3516 .unwrap()
3517 .replace("\r\n", "\n");
3518 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3519}
3520
3521#[gpui::test(iterations = 30)]
3522async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
3523 init_test(cx);
3524
3525 let fs = FakeFs::new(cx.executor().clone());
3526 fs.insert_tree(
3527 path!("/dir"),
3528 json!({
3529 "file1": "the original contents",
3530 }),
3531 )
3532 .await;
3533
3534 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3535 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3536 let buffer = project
3537 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3538 .await
3539 .unwrap();
3540
3541 // Simulate buffer diffs being slow, so that they don't complete before
3542 // the next file change occurs.
3543 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3544
3545 // Change the buffer's file on disk, and then wait for the file change
3546 // to be detected by the worktree, so that the buffer starts reloading.
3547 fs.save(
3548 path!("/dir/file1").as_ref(),
3549 &"the first contents".into(),
3550 Default::default(),
3551 )
3552 .await
3553 .unwrap();
3554 worktree.next_event(cx).await;
3555
3556 // Change the buffer's file again. Depending on the random seed, the
3557 // previous file change may still be in progress.
3558 fs.save(
3559 path!("/dir/file1").as_ref(),
3560 &"the second contents".into(),
3561 Default::default(),
3562 )
3563 .await
3564 .unwrap();
3565 worktree.next_event(cx).await;
3566
3567 cx.executor().run_until_parked();
3568 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3569 buffer.read_with(cx, |buffer, _| {
3570 assert_eq!(buffer.text(), on_disk_text);
3571 assert!(!buffer.is_dirty(), "buffer should not be dirty");
3572 assert!(!buffer.has_conflict(), "buffer should not be dirty");
3573 });
3574}
3575
3576#[gpui::test(iterations = 30)]
3577async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
3578 init_test(cx);
3579
3580 let fs = FakeFs::new(cx.executor().clone());
3581 fs.insert_tree(
3582 path!("/dir"),
3583 json!({
3584 "file1": "the original contents",
3585 }),
3586 )
3587 .await;
3588
3589 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3590 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3591 let buffer = project
3592 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3593 .await
3594 .unwrap();
3595
3596 // Simulate buffer diffs being slow, so that they don't complete before
3597 // the next file change occurs.
3598 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3599
3600 // Change the buffer's file on disk, and then wait for the file change
3601 // to be detected by the worktree, so that the buffer starts reloading.
3602 fs.save(
3603 path!("/dir/file1").as_ref(),
3604 &"the first contents".into(),
3605 Default::default(),
3606 )
3607 .await
3608 .unwrap();
3609 worktree.next_event(cx).await;
3610
3611 cx.executor()
3612 .spawn(cx.executor().simulate_random_delay())
3613 .await;
3614
3615 // Perform a noop edit, causing the buffer's version to increase.
3616 buffer.update(cx, |buffer, cx| {
3617 buffer.edit([(0..0, " ")], None, cx);
3618 buffer.undo(cx);
3619 });
3620
3621 cx.executor().run_until_parked();
3622 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3623 buffer.read_with(cx, |buffer, _| {
3624 let buffer_text = buffer.text();
3625 if buffer_text == on_disk_text {
3626 assert!(
3627 !buffer.is_dirty() && !buffer.has_conflict(),
3628 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
3629 );
3630 }
3631 // If the file change occurred while the buffer was processing the first
3632 // change, the buffer will be in a conflicting state.
3633 else {
3634 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3635 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3636 }
3637 });
3638}
3639
3640#[gpui::test]
3641async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
3642 init_test(cx);
3643
3644 let fs = FakeFs::new(cx.executor());
3645 fs.insert_tree(
3646 path!("/dir"),
3647 json!({
3648 "file1": "the old contents",
3649 }),
3650 )
3651 .await;
3652
3653 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
3654 let buffer = project
3655 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3656 .await
3657 .unwrap();
3658 buffer.update(cx, |buffer, cx| {
3659 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3660 });
3661
3662 project
3663 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3664 .await
3665 .unwrap();
3666
3667 let new_text = fs
3668 .load(Path::new(path!("/dir/file1")))
3669 .await
3670 .unwrap()
3671 .replace("\r\n", "\n");
3672 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3673}
3674
3675#[gpui::test]
3676async fn test_save_as(cx: &mut gpui::TestAppContext) {
3677 init_test(cx);
3678
3679 let fs = FakeFs::new(cx.executor());
3680 fs.insert_tree("/dir", json!({})).await;
3681
3682 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3683
3684 let languages = project.update(cx, |project, _| project.languages().clone());
3685 languages.add(rust_lang());
3686
3687 let buffer = project.update(cx, |project, cx| project.create_local_buffer("", None, cx));
3688 buffer.update(cx, |buffer, cx| {
3689 buffer.edit([(0..0, "abc")], None, cx);
3690 assert!(buffer.is_dirty());
3691 assert!(!buffer.has_conflict());
3692 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
3693 });
3694 project
3695 .update(cx, |project, cx| {
3696 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
3697 let path = ProjectPath {
3698 worktree_id,
3699 path: Arc::from(Path::new("file1.rs")),
3700 };
3701 project.save_buffer_as(buffer.clone(), path, cx)
3702 })
3703 .await
3704 .unwrap();
3705 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
3706
3707 cx.executor().run_until_parked();
3708 buffer.update(cx, |buffer, cx| {
3709 assert_eq!(
3710 buffer.file().unwrap().full_path(cx),
3711 Path::new("dir/file1.rs")
3712 );
3713 assert!(!buffer.is_dirty());
3714 assert!(!buffer.has_conflict());
3715 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
3716 });
3717
3718 let opened_buffer = project
3719 .update(cx, |project, cx| {
3720 project.open_local_buffer("/dir/file1.rs", cx)
3721 })
3722 .await
3723 .unwrap();
3724 assert_eq!(opened_buffer, buffer);
3725}
3726
3727#[gpui::test(retries = 5)]
3728async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
3729 use worktree::WorktreeModelHandle as _;
3730
3731 init_test(cx);
3732 cx.executor().allow_parking();
3733
3734 let dir = TempTree::new(json!({
3735 "a": {
3736 "file1": "",
3737 "file2": "",
3738 "file3": "",
3739 },
3740 "b": {
3741 "c": {
3742 "file4": "",
3743 "file5": "",
3744 }
3745 }
3746 }));
3747
3748 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
3749
3750 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3751 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
3752 async move { buffer.await.unwrap() }
3753 };
3754 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3755 project.update(cx, |project, cx| {
3756 let tree = project.worktrees(cx).next().unwrap();
3757 tree.read(cx)
3758 .entry_for_path(path)
3759 .unwrap_or_else(|| panic!("no entry for path {}", path))
3760 .id
3761 })
3762 };
3763
3764 let buffer2 = buffer_for_path("a/file2", cx).await;
3765 let buffer3 = buffer_for_path("a/file3", cx).await;
3766 let buffer4 = buffer_for_path("b/c/file4", cx).await;
3767 let buffer5 = buffer_for_path("b/c/file5", cx).await;
3768
3769 let file2_id = id_for_path("a/file2", cx);
3770 let file3_id = id_for_path("a/file3", cx);
3771 let file4_id = id_for_path("b/c/file4", cx);
3772
3773 // Create a remote copy of this worktree.
3774 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3775 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
3776
3777 let updates = Arc::new(Mutex::new(Vec::new()));
3778 tree.update(cx, |tree, cx| {
3779 let updates = updates.clone();
3780 tree.observe_updates(0, cx, move |update| {
3781 updates.lock().push(update);
3782 async { true }
3783 });
3784 });
3785
3786 let remote =
3787 cx.update(|cx| Worktree::remote(0, 1, metadata, project.read(cx).client().into(), cx));
3788
3789 cx.executor().run_until_parked();
3790
3791 cx.update(|cx| {
3792 assert!(!buffer2.read(cx).is_dirty());
3793 assert!(!buffer3.read(cx).is_dirty());
3794 assert!(!buffer4.read(cx).is_dirty());
3795 assert!(!buffer5.read(cx).is_dirty());
3796 });
3797
3798 // Rename and delete files and directories.
3799 tree.flush_fs_events(cx).await;
3800 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3801 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3802 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3803 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3804 tree.flush_fs_events(cx).await;
3805
3806 cx.update(|app| {
3807 assert_eq!(
3808 tree.read(app)
3809 .paths()
3810 .map(|p| p.to_str().unwrap())
3811 .collect::<Vec<_>>(),
3812 vec![
3813 "a",
3814 separator!("a/file1"),
3815 separator!("a/file2.new"),
3816 "b",
3817 "d",
3818 separator!("d/file3"),
3819 separator!("d/file4"),
3820 ]
3821 );
3822 });
3823
3824 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
3825 assert_eq!(id_for_path("d/file3", cx), file3_id);
3826 assert_eq!(id_for_path("d/file4", cx), file4_id);
3827
3828 cx.update(|cx| {
3829 assert_eq!(
3830 buffer2.read(cx).file().unwrap().path().as_ref(),
3831 Path::new("a/file2.new")
3832 );
3833 assert_eq!(
3834 buffer3.read(cx).file().unwrap().path().as_ref(),
3835 Path::new("d/file3")
3836 );
3837 assert_eq!(
3838 buffer4.read(cx).file().unwrap().path().as_ref(),
3839 Path::new("d/file4")
3840 );
3841 assert_eq!(
3842 buffer5.read(cx).file().unwrap().path().as_ref(),
3843 Path::new("b/c/file5")
3844 );
3845
3846 assert_matches!(
3847 buffer2.read(cx).file().unwrap().disk_state(),
3848 DiskState::Present { .. }
3849 );
3850 assert_matches!(
3851 buffer3.read(cx).file().unwrap().disk_state(),
3852 DiskState::Present { .. }
3853 );
3854 assert_matches!(
3855 buffer4.read(cx).file().unwrap().disk_state(),
3856 DiskState::Present { .. }
3857 );
3858 assert_eq!(
3859 buffer5.read(cx).file().unwrap().disk_state(),
3860 DiskState::Deleted
3861 );
3862 });
3863
3864 // Update the remote worktree. Check that it becomes consistent with the
3865 // local worktree.
3866 cx.executor().run_until_parked();
3867
3868 remote.update(cx, |remote, _| {
3869 for update in updates.lock().drain(..) {
3870 remote.as_remote_mut().unwrap().update_from_remote(update);
3871 }
3872 });
3873 cx.executor().run_until_parked();
3874 remote.update(cx, |remote, _| {
3875 assert_eq!(
3876 remote
3877 .paths()
3878 .map(|p| p.to_str().unwrap())
3879 .collect::<Vec<_>>(),
3880 vec![
3881 "a",
3882 separator!("a/file1"),
3883 separator!("a/file2.new"),
3884 "b",
3885 "d",
3886 separator!("d/file3"),
3887 separator!("d/file4"),
3888 ]
3889 );
3890 });
3891}
3892
3893#[gpui::test(iterations = 10)]
3894async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
3895 init_test(cx);
3896
3897 let fs = FakeFs::new(cx.executor());
3898 fs.insert_tree(
3899 path!("/dir"),
3900 json!({
3901 "a": {
3902 "file1": "",
3903 }
3904 }),
3905 )
3906 .await;
3907
3908 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3909 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3910 let tree_id = tree.update(cx, |tree, _| tree.id());
3911
3912 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3913 project.update(cx, |project, cx| {
3914 let tree = project.worktrees(cx).next().unwrap();
3915 tree.read(cx)
3916 .entry_for_path(path)
3917 .unwrap_or_else(|| panic!("no entry for path {}", path))
3918 .id
3919 })
3920 };
3921
3922 let dir_id = id_for_path("a", cx);
3923 let file_id = id_for_path("a/file1", cx);
3924 let buffer = project
3925 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
3926 .await
3927 .unwrap();
3928 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3929
3930 project
3931 .update(cx, |project, cx| {
3932 project.rename_entry(dir_id, Path::new("b"), cx)
3933 })
3934 .unwrap()
3935 .await
3936 .to_included()
3937 .unwrap();
3938 cx.executor().run_until_parked();
3939
3940 assert_eq!(id_for_path("b", cx), dir_id);
3941 assert_eq!(id_for_path("b/file1", cx), file_id);
3942 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3943}
3944
3945#[gpui::test]
3946async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
3947 init_test(cx);
3948
3949 let fs = FakeFs::new(cx.executor());
3950 fs.insert_tree(
3951 "/dir",
3952 json!({
3953 "a.txt": "a-contents",
3954 "b.txt": "b-contents",
3955 }),
3956 )
3957 .await;
3958
3959 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3960
3961 // Spawn multiple tasks to open paths, repeating some paths.
3962 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
3963 (
3964 p.open_local_buffer("/dir/a.txt", cx),
3965 p.open_local_buffer("/dir/b.txt", cx),
3966 p.open_local_buffer("/dir/a.txt", cx),
3967 )
3968 });
3969
3970 let buffer_a_1 = buffer_a_1.await.unwrap();
3971 let buffer_a_2 = buffer_a_2.await.unwrap();
3972 let buffer_b = buffer_b.await.unwrap();
3973 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
3974 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
3975
3976 // There is only one buffer per path.
3977 let buffer_a_id = buffer_a_1.entity_id();
3978 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
3979
3980 // Open the same path again while it is still open.
3981 drop(buffer_a_1);
3982 let buffer_a_3 = project
3983 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
3984 .await
3985 .unwrap();
3986
3987 // There's still only one buffer per path.
3988 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
3989}
3990
3991#[gpui::test]
3992async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
3993 init_test(cx);
3994
3995 let fs = FakeFs::new(cx.executor());
3996 fs.insert_tree(
3997 path!("/dir"),
3998 json!({
3999 "file1": "abc",
4000 "file2": "def",
4001 "file3": "ghi",
4002 }),
4003 )
4004 .await;
4005
4006 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4007
4008 let buffer1 = project
4009 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4010 .await
4011 .unwrap();
4012 let events = Arc::new(Mutex::new(Vec::new()));
4013
4014 // initially, the buffer isn't dirty.
4015 buffer1.update(cx, |buffer, cx| {
4016 cx.subscribe(&buffer1, {
4017 let events = events.clone();
4018 move |_, _, event, _| match event {
4019 BufferEvent::Operation { .. } => {}
4020 _ => events.lock().push(event.clone()),
4021 }
4022 })
4023 .detach();
4024
4025 assert!(!buffer.is_dirty());
4026 assert!(events.lock().is_empty());
4027
4028 buffer.edit([(1..2, "")], None, cx);
4029 });
4030
4031 // after the first edit, the buffer is dirty, and emits a dirtied event.
4032 buffer1.update(cx, |buffer, cx| {
4033 assert!(buffer.text() == "ac");
4034 assert!(buffer.is_dirty());
4035 assert_eq!(
4036 *events.lock(),
4037 &[
4038 language::BufferEvent::Edited,
4039 language::BufferEvent::DirtyChanged
4040 ]
4041 );
4042 events.lock().clear();
4043 buffer.did_save(
4044 buffer.version(),
4045 buffer.file().unwrap().disk_state().mtime(),
4046 cx,
4047 );
4048 });
4049
4050 // after saving, the buffer is not dirty, and emits a saved event.
4051 buffer1.update(cx, |buffer, cx| {
4052 assert!(!buffer.is_dirty());
4053 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
4054 events.lock().clear();
4055
4056 buffer.edit([(1..1, "B")], None, cx);
4057 buffer.edit([(2..2, "D")], None, cx);
4058 });
4059
4060 // after editing again, the buffer is dirty, and emits another dirty event.
4061 buffer1.update(cx, |buffer, cx| {
4062 assert!(buffer.text() == "aBDc");
4063 assert!(buffer.is_dirty());
4064 assert_eq!(
4065 *events.lock(),
4066 &[
4067 language::BufferEvent::Edited,
4068 language::BufferEvent::DirtyChanged,
4069 language::BufferEvent::Edited,
4070 ],
4071 );
4072 events.lock().clear();
4073
4074 // After restoring the buffer to its previously-saved state,
4075 // the buffer is not considered dirty anymore.
4076 buffer.edit([(1..3, "")], None, cx);
4077 assert!(buffer.text() == "ac");
4078 assert!(!buffer.is_dirty());
4079 });
4080
4081 assert_eq!(
4082 *events.lock(),
4083 &[
4084 language::BufferEvent::Edited,
4085 language::BufferEvent::DirtyChanged
4086 ]
4087 );
4088
4089 // When a file is deleted, it is not considered dirty.
4090 let events = Arc::new(Mutex::new(Vec::new()));
4091 let buffer2 = project
4092 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4093 .await
4094 .unwrap();
4095 buffer2.update(cx, |_, cx| {
4096 cx.subscribe(&buffer2, {
4097 let events = events.clone();
4098 move |_, _, event, _| match event {
4099 BufferEvent::Operation { .. } => {}
4100 _ => events.lock().push(event.clone()),
4101 }
4102 })
4103 .detach();
4104 });
4105
4106 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
4107 .await
4108 .unwrap();
4109 cx.executor().run_until_parked();
4110 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4111 assert_eq!(
4112 mem::take(&mut *events.lock()),
4113 &[language::BufferEvent::FileHandleChanged]
4114 );
4115
4116 // Buffer becomes dirty when edited.
4117 buffer2.update(cx, |buffer, cx| {
4118 buffer.edit([(2..3, "")], None, cx);
4119 assert_eq!(buffer.is_dirty(), true);
4120 });
4121 assert_eq!(
4122 mem::take(&mut *events.lock()),
4123 &[
4124 language::BufferEvent::Edited,
4125 language::BufferEvent::DirtyChanged
4126 ]
4127 );
4128
4129 // Buffer becomes clean again when all of its content is removed, because
4130 // the file was deleted.
4131 buffer2.update(cx, |buffer, cx| {
4132 buffer.edit([(0..2, "")], None, cx);
4133 assert_eq!(buffer.is_empty(), true);
4134 assert_eq!(buffer.is_dirty(), false);
4135 });
4136 assert_eq!(
4137 *events.lock(),
4138 &[
4139 language::BufferEvent::Edited,
4140 language::BufferEvent::DirtyChanged
4141 ]
4142 );
4143
4144 // When a file is already dirty when deleted, we don't emit a Dirtied event.
4145 let events = Arc::new(Mutex::new(Vec::new()));
4146 let buffer3 = project
4147 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
4148 .await
4149 .unwrap();
4150 buffer3.update(cx, |_, cx| {
4151 cx.subscribe(&buffer3, {
4152 let events = events.clone();
4153 move |_, _, event, _| match event {
4154 BufferEvent::Operation { .. } => {}
4155 _ => events.lock().push(event.clone()),
4156 }
4157 })
4158 .detach();
4159 });
4160
4161 buffer3.update(cx, |buffer, cx| {
4162 buffer.edit([(0..0, "x")], None, cx);
4163 });
4164 events.lock().clear();
4165 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
4166 .await
4167 .unwrap();
4168 cx.executor().run_until_parked();
4169 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
4170 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
4171}
4172
4173#[gpui::test]
4174async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
4175 init_test(cx);
4176
4177 let (initial_contents, initial_offsets) =
4178 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
4179 let fs = FakeFs::new(cx.executor());
4180 fs.insert_tree(
4181 path!("/dir"),
4182 json!({
4183 "the-file": initial_contents,
4184 }),
4185 )
4186 .await;
4187 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4188 let buffer = project
4189 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
4190 .await
4191 .unwrap();
4192
4193 let anchors = initial_offsets
4194 .iter()
4195 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
4196 .collect::<Vec<_>>();
4197
4198 // Change the file on disk, adding two new lines of text, and removing
4199 // one line.
4200 buffer.update(cx, |buffer, _| {
4201 assert!(!buffer.is_dirty());
4202 assert!(!buffer.has_conflict());
4203 });
4204
4205 let (new_contents, new_offsets) =
4206 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
4207 fs.save(
4208 path!("/dir/the-file").as_ref(),
4209 &new_contents.as_str().into(),
4210 LineEnding::Unix,
4211 )
4212 .await
4213 .unwrap();
4214
4215 // Because the buffer was not modified, it is reloaded from disk. Its
4216 // contents are edited according to the diff between the old and new
4217 // file contents.
4218 cx.executor().run_until_parked();
4219 buffer.update(cx, |buffer, _| {
4220 assert_eq!(buffer.text(), new_contents);
4221 assert!(!buffer.is_dirty());
4222 assert!(!buffer.has_conflict());
4223
4224 let anchor_offsets = anchors
4225 .iter()
4226 .map(|anchor| anchor.to_offset(&*buffer))
4227 .collect::<Vec<_>>();
4228 assert_eq!(anchor_offsets, new_offsets);
4229 });
4230
4231 // Modify the buffer
4232 buffer.update(cx, |buffer, cx| {
4233 buffer.edit([(0..0, " ")], None, cx);
4234 assert!(buffer.is_dirty());
4235 assert!(!buffer.has_conflict());
4236 });
4237
4238 // Change the file on disk again, adding blank lines to the beginning.
4239 fs.save(
4240 path!("/dir/the-file").as_ref(),
4241 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
4242 LineEnding::Unix,
4243 )
4244 .await
4245 .unwrap();
4246
4247 // Because the buffer is modified, it doesn't reload from disk, but is
4248 // marked as having a conflict.
4249 cx.executor().run_until_parked();
4250 buffer.update(cx, |buffer, _| {
4251 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
4252 assert!(buffer.has_conflict());
4253 });
4254}
4255
4256#[gpui::test]
4257async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
4258 init_test(cx);
4259
4260 let fs = FakeFs::new(cx.executor());
4261 fs.insert_tree(
4262 path!("/dir"),
4263 json!({
4264 "file1": "a\nb\nc\n",
4265 "file2": "one\r\ntwo\r\nthree\r\n",
4266 }),
4267 )
4268 .await;
4269
4270 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4271 let buffer1 = project
4272 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4273 .await
4274 .unwrap();
4275 let buffer2 = project
4276 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4277 .await
4278 .unwrap();
4279
4280 buffer1.update(cx, |buffer, _| {
4281 assert_eq!(buffer.text(), "a\nb\nc\n");
4282 assert_eq!(buffer.line_ending(), LineEnding::Unix);
4283 });
4284 buffer2.update(cx, |buffer, _| {
4285 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
4286 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4287 });
4288
4289 // Change a file's line endings on disk from unix to windows. The buffer's
4290 // state updates correctly.
4291 fs.save(
4292 path!("/dir/file1").as_ref(),
4293 &"aaa\nb\nc\n".into(),
4294 LineEnding::Windows,
4295 )
4296 .await
4297 .unwrap();
4298 cx.executor().run_until_parked();
4299 buffer1.update(cx, |buffer, _| {
4300 assert_eq!(buffer.text(), "aaa\nb\nc\n");
4301 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4302 });
4303
4304 // Save a file with windows line endings. The file is written correctly.
4305 buffer2.update(cx, |buffer, cx| {
4306 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
4307 });
4308 project
4309 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
4310 .await
4311 .unwrap();
4312 assert_eq!(
4313 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
4314 "one\r\ntwo\r\nthree\r\nfour\r\n",
4315 );
4316}
4317
4318#[gpui::test]
4319async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
4320 init_test(cx);
4321
4322 let fs = FakeFs::new(cx.executor());
4323 fs.insert_tree(
4324 path!("/dir"),
4325 json!({
4326 "a.rs": "
4327 fn foo(mut v: Vec<usize>) {
4328 for x in &v {
4329 v.push(1);
4330 }
4331 }
4332 "
4333 .unindent(),
4334 }),
4335 )
4336 .await;
4337
4338 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4339 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4340 let buffer = project
4341 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
4342 .await
4343 .unwrap();
4344
4345 let buffer_uri = Url::from_file_path(path!("/dir/a.rs")).unwrap();
4346 let message = lsp::PublishDiagnosticsParams {
4347 uri: buffer_uri.clone(),
4348 diagnostics: vec![
4349 lsp::Diagnostic {
4350 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4351 severity: Some(DiagnosticSeverity::WARNING),
4352 message: "error 1".to_string(),
4353 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4354 location: lsp::Location {
4355 uri: buffer_uri.clone(),
4356 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4357 },
4358 message: "error 1 hint 1".to_string(),
4359 }]),
4360 ..Default::default()
4361 },
4362 lsp::Diagnostic {
4363 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4364 severity: Some(DiagnosticSeverity::HINT),
4365 message: "error 1 hint 1".to_string(),
4366 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4367 location: lsp::Location {
4368 uri: buffer_uri.clone(),
4369 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4370 },
4371 message: "original diagnostic".to_string(),
4372 }]),
4373 ..Default::default()
4374 },
4375 lsp::Diagnostic {
4376 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4377 severity: Some(DiagnosticSeverity::ERROR),
4378 message: "error 2".to_string(),
4379 related_information: Some(vec![
4380 lsp::DiagnosticRelatedInformation {
4381 location: lsp::Location {
4382 uri: buffer_uri.clone(),
4383 range: lsp::Range::new(
4384 lsp::Position::new(1, 13),
4385 lsp::Position::new(1, 15),
4386 ),
4387 },
4388 message: "error 2 hint 1".to_string(),
4389 },
4390 lsp::DiagnosticRelatedInformation {
4391 location: lsp::Location {
4392 uri: buffer_uri.clone(),
4393 range: lsp::Range::new(
4394 lsp::Position::new(1, 13),
4395 lsp::Position::new(1, 15),
4396 ),
4397 },
4398 message: "error 2 hint 2".to_string(),
4399 },
4400 ]),
4401 ..Default::default()
4402 },
4403 lsp::Diagnostic {
4404 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4405 severity: Some(DiagnosticSeverity::HINT),
4406 message: "error 2 hint 1".to_string(),
4407 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4408 location: lsp::Location {
4409 uri: buffer_uri.clone(),
4410 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4411 },
4412 message: "original diagnostic".to_string(),
4413 }]),
4414 ..Default::default()
4415 },
4416 lsp::Diagnostic {
4417 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4418 severity: Some(DiagnosticSeverity::HINT),
4419 message: "error 2 hint 2".to_string(),
4420 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4421 location: lsp::Location {
4422 uri: buffer_uri,
4423 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4424 },
4425 message: "original diagnostic".to_string(),
4426 }]),
4427 ..Default::default()
4428 },
4429 ],
4430 version: None,
4431 };
4432
4433 lsp_store
4434 .update(cx, |lsp_store, cx| {
4435 lsp_store.update_diagnostics(LanguageServerId(0), message, &[], cx)
4436 })
4437 .unwrap();
4438 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
4439
4440 assert_eq!(
4441 buffer
4442 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
4443 .collect::<Vec<_>>(),
4444 &[
4445 DiagnosticEntry {
4446 range: Point::new(1, 8)..Point::new(1, 9),
4447 diagnostic: Diagnostic {
4448 severity: DiagnosticSeverity::WARNING,
4449 message: "error 1".to_string(),
4450 group_id: 1,
4451 is_primary: true,
4452 ..Default::default()
4453 }
4454 },
4455 DiagnosticEntry {
4456 range: Point::new(1, 8)..Point::new(1, 9),
4457 diagnostic: Diagnostic {
4458 severity: DiagnosticSeverity::HINT,
4459 message: "error 1 hint 1".to_string(),
4460 group_id: 1,
4461 is_primary: false,
4462 ..Default::default()
4463 }
4464 },
4465 DiagnosticEntry {
4466 range: Point::new(1, 13)..Point::new(1, 15),
4467 diagnostic: Diagnostic {
4468 severity: DiagnosticSeverity::HINT,
4469 message: "error 2 hint 1".to_string(),
4470 group_id: 0,
4471 is_primary: false,
4472 ..Default::default()
4473 }
4474 },
4475 DiagnosticEntry {
4476 range: Point::new(1, 13)..Point::new(1, 15),
4477 diagnostic: Diagnostic {
4478 severity: DiagnosticSeverity::HINT,
4479 message: "error 2 hint 2".to_string(),
4480 group_id: 0,
4481 is_primary: false,
4482 ..Default::default()
4483 }
4484 },
4485 DiagnosticEntry {
4486 range: Point::new(2, 8)..Point::new(2, 17),
4487 diagnostic: Diagnostic {
4488 severity: DiagnosticSeverity::ERROR,
4489 message: "error 2".to_string(),
4490 group_id: 0,
4491 is_primary: true,
4492 ..Default::default()
4493 }
4494 }
4495 ]
4496 );
4497
4498 assert_eq!(
4499 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
4500 &[
4501 DiagnosticEntry {
4502 range: Point::new(1, 13)..Point::new(1, 15),
4503 diagnostic: Diagnostic {
4504 severity: DiagnosticSeverity::HINT,
4505 message: "error 2 hint 1".to_string(),
4506 group_id: 0,
4507 is_primary: false,
4508 ..Default::default()
4509 }
4510 },
4511 DiagnosticEntry {
4512 range: Point::new(1, 13)..Point::new(1, 15),
4513 diagnostic: Diagnostic {
4514 severity: DiagnosticSeverity::HINT,
4515 message: "error 2 hint 2".to_string(),
4516 group_id: 0,
4517 is_primary: false,
4518 ..Default::default()
4519 }
4520 },
4521 DiagnosticEntry {
4522 range: Point::new(2, 8)..Point::new(2, 17),
4523 diagnostic: Diagnostic {
4524 severity: DiagnosticSeverity::ERROR,
4525 message: "error 2".to_string(),
4526 group_id: 0,
4527 is_primary: true,
4528 ..Default::default()
4529 }
4530 }
4531 ]
4532 );
4533
4534 assert_eq!(
4535 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4536 &[
4537 DiagnosticEntry {
4538 range: Point::new(1, 8)..Point::new(1, 9),
4539 diagnostic: Diagnostic {
4540 severity: DiagnosticSeverity::WARNING,
4541 message: "error 1".to_string(),
4542 group_id: 1,
4543 is_primary: true,
4544 ..Default::default()
4545 }
4546 },
4547 DiagnosticEntry {
4548 range: Point::new(1, 8)..Point::new(1, 9),
4549 diagnostic: Diagnostic {
4550 severity: DiagnosticSeverity::HINT,
4551 message: "error 1 hint 1".to_string(),
4552 group_id: 1,
4553 is_primary: false,
4554 ..Default::default()
4555 }
4556 },
4557 ]
4558 );
4559}
4560
4561#[gpui::test]
4562async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
4563 init_test(cx);
4564
4565 let fs = FakeFs::new(cx.executor());
4566 fs.insert_tree(
4567 path!("/dir"),
4568 json!({
4569 "one.rs": "const ONE: usize = 1;",
4570 "two": {
4571 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4572 }
4573
4574 }),
4575 )
4576 .await;
4577 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4578
4579 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4580 language_registry.add(rust_lang());
4581 let watched_paths = lsp::FileOperationRegistrationOptions {
4582 filters: vec![
4583 FileOperationFilter {
4584 scheme: Some("file".to_owned()),
4585 pattern: lsp::FileOperationPattern {
4586 glob: "**/*.rs".to_owned(),
4587 matches: Some(lsp::FileOperationPatternKind::File),
4588 options: None,
4589 },
4590 },
4591 FileOperationFilter {
4592 scheme: Some("file".to_owned()),
4593 pattern: lsp::FileOperationPattern {
4594 glob: "**/**".to_owned(),
4595 matches: Some(lsp::FileOperationPatternKind::Folder),
4596 options: None,
4597 },
4598 },
4599 ],
4600 };
4601 let mut fake_servers = language_registry.register_fake_lsp(
4602 "Rust",
4603 FakeLspAdapter {
4604 capabilities: lsp::ServerCapabilities {
4605 workspace: Some(lsp::WorkspaceServerCapabilities {
4606 workspace_folders: None,
4607 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
4608 did_rename: Some(watched_paths.clone()),
4609 will_rename: Some(watched_paths),
4610 ..Default::default()
4611 }),
4612 }),
4613 ..Default::default()
4614 },
4615 ..Default::default()
4616 },
4617 );
4618
4619 let _ = project
4620 .update(cx, |project, cx| {
4621 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
4622 })
4623 .await
4624 .unwrap();
4625
4626 let fake_server = fake_servers.next().await.unwrap();
4627 let response = project.update(cx, |project, cx| {
4628 let worktree = project.worktrees(cx).next().unwrap();
4629 let entry = worktree.read(cx).entry_for_path("one.rs").unwrap();
4630 project.rename_entry(entry.id, "three.rs".as_ref(), cx)
4631 });
4632 let expected_edit = lsp::WorkspaceEdit {
4633 changes: None,
4634 document_changes: Some(DocumentChanges::Edits({
4635 vec![TextDocumentEdit {
4636 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
4637 range: lsp::Range {
4638 start: lsp::Position {
4639 line: 0,
4640 character: 1,
4641 },
4642 end: lsp::Position {
4643 line: 0,
4644 character: 3,
4645 },
4646 },
4647 new_text: "This is not a drill".to_owned(),
4648 })],
4649 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
4650 uri: Url::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
4651 version: Some(1337),
4652 },
4653 }]
4654 })),
4655 change_annotations: None,
4656 };
4657 let resolved_workspace_edit = Arc::new(OnceLock::new());
4658 fake_server
4659 .set_request_handler::<WillRenameFiles, _, _>({
4660 let resolved_workspace_edit = resolved_workspace_edit.clone();
4661 let expected_edit = expected_edit.clone();
4662 move |params, _| {
4663 let resolved_workspace_edit = resolved_workspace_edit.clone();
4664 let expected_edit = expected_edit.clone();
4665 async move {
4666 assert_eq!(params.files.len(), 1);
4667 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
4668 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
4669 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
4670 Ok(Some(expected_edit))
4671 }
4672 }
4673 })
4674 .next()
4675 .await
4676 .unwrap();
4677 let _ = response.await.unwrap();
4678 fake_server
4679 .handle_notification::<DidRenameFiles, _>(|params, _| {
4680 assert_eq!(params.files.len(), 1);
4681 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
4682 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
4683 })
4684 .next()
4685 .await
4686 .unwrap();
4687 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
4688}
4689
4690#[gpui::test]
4691async fn test_rename(cx: &mut gpui::TestAppContext) {
4692 // hi
4693 init_test(cx);
4694
4695 let fs = FakeFs::new(cx.executor());
4696 fs.insert_tree(
4697 path!("/dir"),
4698 json!({
4699 "one.rs": "const ONE: usize = 1;",
4700 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4701 }),
4702 )
4703 .await;
4704
4705 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4706
4707 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4708 language_registry.add(rust_lang());
4709 let mut fake_servers = language_registry.register_fake_lsp(
4710 "Rust",
4711 FakeLspAdapter {
4712 capabilities: lsp::ServerCapabilities {
4713 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
4714 prepare_provider: Some(true),
4715 work_done_progress_options: Default::default(),
4716 })),
4717 ..Default::default()
4718 },
4719 ..Default::default()
4720 },
4721 );
4722
4723 let (buffer, _handle) = project
4724 .update(cx, |project, cx| {
4725 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
4726 })
4727 .await
4728 .unwrap();
4729
4730 let fake_server = fake_servers.next().await.unwrap();
4731
4732 let response = project.update(cx, |project, cx| {
4733 project.prepare_rename(buffer.clone(), 7, cx)
4734 });
4735 fake_server
4736 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
4737 assert_eq!(
4738 params.text_document.uri.as_str(),
4739 uri!("file:///dir/one.rs")
4740 );
4741 assert_eq!(params.position, lsp::Position::new(0, 7));
4742 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
4743 lsp::Position::new(0, 6),
4744 lsp::Position::new(0, 9),
4745 ))))
4746 })
4747 .next()
4748 .await
4749 .unwrap();
4750 let response = response.await.unwrap();
4751 let PrepareRenameResponse::Success(range) = response else {
4752 panic!("{:?}", response);
4753 };
4754 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
4755 assert_eq!(range, 6..9);
4756
4757 let response = project.update(cx, |project, cx| {
4758 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
4759 });
4760 fake_server
4761 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
4762 assert_eq!(
4763 params.text_document_position.text_document.uri.as_str(),
4764 uri!("file:///dir/one.rs")
4765 );
4766 assert_eq!(
4767 params.text_document_position.position,
4768 lsp::Position::new(0, 7)
4769 );
4770 assert_eq!(params.new_name, "THREE");
4771 Ok(Some(lsp::WorkspaceEdit {
4772 changes: Some(
4773 [
4774 (
4775 lsp::Url::from_file_path(path!("/dir/one.rs")).unwrap(),
4776 vec![lsp::TextEdit::new(
4777 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
4778 "THREE".to_string(),
4779 )],
4780 ),
4781 (
4782 lsp::Url::from_file_path(path!("/dir/two.rs")).unwrap(),
4783 vec![
4784 lsp::TextEdit::new(
4785 lsp::Range::new(
4786 lsp::Position::new(0, 24),
4787 lsp::Position::new(0, 27),
4788 ),
4789 "THREE".to_string(),
4790 ),
4791 lsp::TextEdit::new(
4792 lsp::Range::new(
4793 lsp::Position::new(0, 35),
4794 lsp::Position::new(0, 38),
4795 ),
4796 "THREE".to_string(),
4797 ),
4798 ],
4799 ),
4800 ]
4801 .into_iter()
4802 .collect(),
4803 ),
4804 ..Default::default()
4805 }))
4806 })
4807 .next()
4808 .await
4809 .unwrap();
4810 let mut transaction = response.await.unwrap().0;
4811 assert_eq!(transaction.len(), 2);
4812 assert_eq!(
4813 transaction
4814 .remove_entry(&buffer)
4815 .unwrap()
4816 .0
4817 .update(cx, |buffer, _| buffer.text()),
4818 "const THREE: usize = 1;"
4819 );
4820 assert_eq!(
4821 transaction
4822 .into_keys()
4823 .next()
4824 .unwrap()
4825 .update(cx, |buffer, _| buffer.text()),
4826 "const TWO: usize = one::THREE + one::THREE;"
4827 );
4828}
4829
4830#[gpui::test]
4831async fn test_search(cx: &mut gpui::TestAppContext) {
4832 init_test(cx);
4833
4834 let fs = FakeFs::new(cx.executor());
4835 fs.insert_tree(
4836 path!("/dir"),
4837 json!({
4838 "one.rs": "const ONE: usize = 1;",
4839 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
4840 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
4841 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
4842 }),
4843 )
4844 .await;
4845 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4846 assert_eq!(
4847 search(
4848 &project,
4849 SearchQuery::text(
4850 "TWO",
4851 false,
4852 true,
4853 false,
4854 Default::default(),
4855 Default::default(),
4856 false,
4857 None
4858 )
4859 .unwrap(),
4860 cx
4861 )
4862 .await
4863 .unwrap(),
4864 HashMap::from_iter([
4865 (separator!("dir/two.rs").to_string(), vec![6..9]),
4866 (separator!("dir/three.rs").to_string(), vec![37..40])
4867 ])
4868 );
4869
4870 let buffer_4 = project
4871 .update(cx, |project, cx| {
4872 project.open_local_buffer(path!("/dir/four.rs"), cx)
4873 })
4874 .await
4875 .unwrap();
4876 buffer_4.update(cx, |buffer, cx| {
4877 let text = "two::TWO";
4878 buffer.edit([(20..28, text), (31..43, text)], None, cx);
4879 });
4880
4881 assert_eq!(
4882 search(
4883 &project,
4884 SearchQuery::text(
4885 "TWO",
4886 false,
4887 true,
4888 false,
4889 Default::default(),
4890 Default::default(),
4891 false,
4892 None,
4893 )
4894 .unwrap(),
4895 cx
4896 )
4897 .await
4898 .unwrap(),
4899 HashMap::from_iter([
4900 (separator!("dir/two.rs").to_string(), vec![6..9]),
4901 (separator!("dir/three.rs").to_string(), vec![37..40]),
4902 (separator!("dir/four.rs").to_string(), vec![25..28, 36..39])
4903 ])
4904 );
4905}
4906
4907#[gpui::test]
4908async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
4909 init_test(cx);
4910
4911 let search_query = "file";
4912
4913 let fs = FakeFs::new(cx.executor());
4914 fs.insert_tree(
4915 path!("/dir"),
4916 json!({
4917 "one.rs": r#"// Rust file one"#,
4918 "one.ts": r#"// TypeScript file one"#,
4919 "two.rs": r#"// Rust file two"#,
4920 "two.ts": r#"// TypeScript file two"#,
4921 }),
4922 )
4923 .await;
4924 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4925
4926 assert!(
4927 search(
4928 &project,
4929 SearchQuery::text(
4930 search_query,
4931 false,
4932 true,
4933 false,
4934 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4935 Default::default(),
4936 false,
4937 None
4938 )
4939 .unwrap(),
4940 cx
4941 )
4942 .await
4943 .unwrap()
4944 .is_empty(),
4945 "If no inclusions match, no files should be returned"
4946 );
4947
4948 assert_eq!(
4949 search(
4950 &project,
4951 SearchQuery::text(
4952 search_query,
4953 false,
4954 true,
4955 false,
4956 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4957 Default::default(),
4958 false,
4959 None
4960 )
4961 .unwrap(),
4962 cx
4963 )
4964 .await
4965 .unwrap(),
4966 HashMap::from_iter([
4967 (separator!("dir/one.rs").to_string(), vec![8..12]),
4968 (separator!("dir/two.rs").to_string(), vec![8..12]),
4969 ]),
4970 "Rust only search should give only Rust files"
4971 );
4972
4973 assert_eq!(
4974 search(
4975 &project,
4976 SearchQuery::text(
4977 search_query,
4978 false,
4979 true,
4980 false,
4981 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4982 Default::default(),
4983 false,
4984 None,
4985 )
4986 .unwrap(),
4987 cx
4988 )
4989 .await
4990 .unwrap(),
4991 HashMap::from_iter([
4992 (separator!("dir/one.ts").to_string(), vec![14..18]),
4993 (separator!("dir/two.ts").to_string(), vec![14..18]),
4994 ]),
4995 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
4996 );
4997
4998 assert_eq!(
4999 search(
5000 &project,
5001 SearchQuery::text(
5002 search_query,
5003 false,
5004 true,
5005 false,
5006 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()])
5007 .unwrap(),
5008 Default::default(),
5009 false,
5010 None,
5011 )
5012 .unwrap(),
5013 cx
5014 )
5015 .await
5016 .unwrap(),
5017 HashMap::from_iter([
5018 (separator!("dir/two.ts").to_string(), vec![14..18]),
5019 (separator!("dir/one.rs").to_string(), vec![8..12]),
5020 (separator!("dir/one.ts").to_string(), vec![14..18]),
5021 (separator!("dir/two.rs").to_string(), vec![8..12]),
5022 ]),
5023 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
5024 );
5025}
5026
5027#[gpui::test]
5028async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
5029 init_test(cx);
5030
5031 let search_query = "file";
5032
5033 let fs = FakeFs::new(cx.executor());
5034 fs.insert_tree(
5035 path!("/dir"),
5036 json!({
5037 "one.rs": r#"// Rust file one"#,
5038 "one.ts": r#"// TypeScript file one"#,
5039 "two.rs": r#"// Rust file two"#,
5040 "two.ts": r#"// TypeScript file two"#,
5041 }),
5042 )
5043 .await;
5044 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5045
5046 assert_eq!(
5047 search(
5048 &project,
5049 SearchQuery::text(
5050 search_query,
5051 false,
5052 true,
5053 false,
5054 Default::default(),
5055 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5056 false,
5057 None,
5058 )
5059 .unwrap(),
5060 cx
5061 )
5062 .await
5063 .unwrap(),
5064 HashMap::from_iter([
5065 (separator!("dir/one.rs").to_string(), vec![8..12]),
5066 (separator!("dir/one.ts").to_string(), vec![14..18]),
5067 (separator!("dir/two.rs").to_string(), vec![8..12]),
5068 (separator!("dir/two.ts").to_string(), vec![14..18]),
5069 ]),
5070 "If no exclusions match, all files should be returned"
5071 );
5072
5073 assert_eq!(
5074 search(
5075 &project,
5076 SearchQuery::text(
5077 search_query,
5078 false,
5079 true,
5080 false,
5081 Default::default(),
5082 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
5083 false,
5084 None,
5085 )
5086 .unwrap(),
5087 cx
5088 )
5089 .await
5090 .unwrap(),
5091 HashMap::from_iter([
5092 (separator!("dir/one.ts").to_string(), vec![14..18]),
5093 (separator!("dir/two.ts").to_string(), vec![14..18]),
5094 ]),
5095 "Rust exclusion search should give only TypeScript files"
5096 );
5097
5098 assert_eq!(
5099 search(
5100 &project,
5101 SearchQuery::text(
5102 search_query,
5103 false,
5104 true,
5105 false,
5106 Default::default(),
5107 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5108 false,
5109 None,
5110 )
5111 .unwrap(),
5112 cx
5113 )
5114 .await
5115 .unwrap(),
5116 HashMap::from_iter([
5117 (separator!("dir/one.rs").to_string(), vec![8..12]),
5118 (separator!("dir/two.rs").to_string(), vec![8..12]),
5119 ]),
5120 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
5121 );
5122
5123 assert!(
5124 search(
5125 &project,
5126 SearchQuery::text(
5127 search_query,
5128 false,
5129 true,
5130 false,
5131 Default::default(),
5132 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()])
5133 .unwrap(),
5134 false,
5135 None,
5136 )
5137 .unwrap(),
5138 cx
5139 )
5140 .await
5141 .unwrap()
5142 .is_empty(),
5143 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
5144 );
5145}
5146
5147#[gpui::test]
5148async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
5149 init_test(cx);
5150
5151 let search_query = "file";
5152
5153 let fs = FakeFs::new(cx.executor());
5154 fs.insert_tree(
5155 path!("/dir"),
5156 json!({
5157 "one.rs": r#"// Rust file one"#,
5158 "one.ts": r#"// TypeScript file one"#,
5159 "two.rs": r#"// Rust file two"#,
5160 "two.ts": r#"// TypeScript file two"#,
5161 }),
5162 )
5163 .await;
5164 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5165
5166 assert!(
5167 search(
5168 &project,
5169 SearchQuery::text(
5170 search_query,
5171 false,
5172 true,
5173 false,
5174 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5175 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5176 false,
5177 None,
5178 )
5179 .unwrap(),
5180 cx
5181 )
5182 .await
5183 .unwrap()
5184 .is_empty(),
5185 "If both no exclusions and inclusions match, exclusions should win and return nothing"
5186 );
5187
5188 assert!(
5189 search(
5190 &project,
5191 SearchQuery::text(
5192 search_query,
5193 false,
5194 true,
5195 false,
5196 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5197 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5198 false,
5199 None,
5200 )
5201 .unwrap(),
5202 cx
5203 )
5204 .await
5205 .unwrap()
5206 .is_empty(),
5207 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
5208 );
5209
5210 assert!(
5211 search(
5212 &project,
5213 SearchQuery::text(
5214 search_query,
5215 false,
5216 true,
5217 false,
5218 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5219 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5220 false,
5221 None,
5222 )
5223 .unwrap(),
5224 cx
5225 )
5226 .await
5227 .unwrap()
5228 .is_empty(),
5229 "Non-matching inclusions and exclusions should not change that."
5230 );
5231
5232 assert_eq!(
5233 search(
5234 &project,
5235 SearchQuery::text(
5236 search_query,
5237 false,
5238 true,
5239 false,
5240 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5241 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()]).unwrap(),
5242 false,
5243 None,
5244 )
5245 .unwrap(),
5246 cx
5247 )
5248 .await
5249 .unwrap(),
5250 HashMap::from_iter([
5251 (separator!("dir/one.ts").to_string(), vec![14..18]),
5252 (separator!("dir/two.ts").to_string(), vec![14..18]),
5253 ]),
5254 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
5255 );
5256}
5257
5258#[gpui::test]
5259async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
5260 init_test(cx);
5261
5262 let fs = FakeFs::new(cx.executor());
5263 fs.insert_tree(
5264 path!("/worktree-a"),
5265 json!({
5266 "haystack.rs": r#"// NEEDLE"#,
5267 "haystack.ts": r#"// NEEDLE"#,
5268 }),
5269 )
5270 .await;
5271 fs.insert_tree(
5272 path!("/worktree-b"),
5273 json!({
5274 "haystack.rs": r#"// NEEDLE"#,
5275 "haystack.ts": r#"// NEEDLE"#,
5276 }),
5277 )
5278 .await;
5279
5280 let project = Project::test(
5281 fs.clone(),
5282 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
5283 cx,
5284 )
5285 .await;
5286
5287 assert_eq!(
5288 search(
5289 &project,
5290 SearchQuery::text(
5291 "NEEDLE",
5292 false,
5293 true,
5294 false,
5295 PathMatcher::new(&["worktree-a/*.rs".to_owned()]).unwrap(),
5296 Default::default(),
5297 true,
5298 None,
5299 )
5300 .unwrap(),
5301 cx
5302 )
5303 .await
5304 .unwrap(),
5305 HashMap::from_iter([(separator!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
5306 "should only return results from included worktree"
5307 );
5308 assert_eq!(
5309 search(
5310 &project,
5311 SearchQuery::text(
5312 "NEEDLE",
5313 false,
5314 true,
5315 false,
5316 PathMatcher::new(&["worktree-b/*.rs".to_owned()]).unwrap(),
5317 Default::default(),
5318 true,
5319 None,
5320 )
5321 .unwrap(),
5322 cx
5323 )
5324 .await
5325 .unwrap(),
5326 HashMap::from_iter([(separator!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
5327 "should only return results from included worktree"
5328 );
5329
5330 assert_eq!(
5331 search(
5332 &project,
5333 SearchQuery::text(
5334 "NEEDLE",
5335 false,
5336 true,
5337 false,
5338 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5339 Default::default(),
5340 false,
5341 None,
5342 )
5343 .unwrap(),
5344 cx
5345 )
5346 .await
5347 .unwrap(),
5348 HashMap::from_iter([
5349 (separator!("worktree-a/haystack.ts").to_string(), vec![3..9]),
5350 (separator!("worktree-b/haystack.ts").to_string(), vec![3..9])
5351 ]),
5352 "should return results from both worktrees"
5353 );
5354}
5355
5356#[gpui::test]
5357async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
5358 init_test(cx);
5359
5360 let fs = FakeFs::new(cx.background_executor.clone());
5361 fs.insert_tree(
5362 path!("/dir"),
5363 json!({
5364 ".git": {},
5365 ".gitignore": "**/target\n/node_modules\n",
5366 "target": {
5367 "index.txt": "index_key:index_value"
5368 },
5369 "node_modules": {
5370 "eslint": {
5371 "index.ts": "const eslint_key = 'eslint value'",
5372 "package.json": r#"{ "some_key": "some value" }"#,
5373 },
5374 "prettier": {
5375 "index.ts": "const prettier_key = 'prettier value'",
5376 "package.json": r#"{ "other_key": "other value" }"#,
5377 },
5378 },
5379 "package.json": r#"{ "main_key": "main value" }"#,
5380 }),
5381 )
5382 .await;
5383 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5384
5385 let query = "key";
5386 assert_eq!(
5387 search(
5388 &project,
5389 SearchQuery::text(
5390 query,
5391 false,
5392 false,
5393 false,
5394 Default::default(),
5395 Default::default(),
5396 false,
5397 None,
5398 )
5399 .unwrap(),
5400 cx
5401 )
5402 .await
5403 .unwrap(),
5404 HashMap::from_iter([(separator!("dir/package.json").to_string(), vec![8..11])]),
5405 "Only one non-ignored file should have the query"
5406 );
5407
5408 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5409 assert_eq!(
5410 search(
5411 &project,
5412 SearchQuery::text(
5413 query,
5414 false,
5415 false,
5416 true,
5417 Default::default(),
5418 Default::default(),
5419 false,
5420 None,
5421 )
5422 .unwrap(),
5423 cx
5424 )
5425 .await
5426 .unwrap(),
5427 HashMap::from_iter([
5428 (separator!("dir/package.json").to_string(), vec![8..11]),
5429 (separator!("dir/target/index.txt").to_string(), vec![6..9]),
5430 (
5431 separator!("dir/node_modules/prettier/package.json").to_string(),
5432 vec![9..12]
5433 ),
5434 (
5435 separator!("dir/node_modules/prettier/index.ts").to_string(),
5436 vec![15..18]
5437 ),
5438 (
5439 separator!("dir/node_modules/eslint/index.ts").to_string(),
5440 vec![13..16]
5441 ),
5442 (
5443 separator!("dir/node_modules/eslint/package.json").to_string(),
5444 vec![8..11]
5445 ),
5446 ]),
5447 "Unrestricted search with ignored directories should find every file with the query"
5448 );
5449
5450 let files_to_include = PathMatcher::new(&["node_modules/prettier/**".to_owned()]).unwrap();
5451 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()]).unwrap();
5452 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5453 assert_eq!(
5454 search(
5455 &project,
5456 SearchQuery::text(
5457 query,
5458 false,
5459 false,
5460 true,
5461 files_to_include,
5462 files_to_exclude,
5463 false,
5464 None,
5465 )
5466 .unwrap(),
5467 cx
5468 )
5469 .await
5470 .unwrap(),
5471 HashMap::from_iter([(
5472 separator!("dir/node_modules/prettier/package.json").to_string(),
5473 vec![9..12]
5474 )]),
5475 "With search including ignored prettier directory and excluding TS files, only one file should be found"
5476 );
5477}
5478
5479#[gpui::test]
5480async fn test_search_with_unicode(cx: &mut gpui::TestAppContext) {
5481 init_test(cx);
5482
5483 let fs = FakeFs::new(cx.executor());
5484 fs.insert_tree(
5485 path!("/dir"),
5486 json!({
5487 "one.rs": "// ПРИВЕТ? привет!",
5488 "two.rs": "// ПРИВЕТ.",
5489 "three.rs": "// привет",
5490 }),
5491 )
5492 .await;
5493 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5494
5495 let unicode_case_sensitive_query = SearchQuery::text(
5496 "привет",
5497 false,
5498 true,
5499 false,
5500 Default::default(),
5501 Default::default(),
5502 false,
5503 None,
5504 );
5505 assert_matches!(unicode_case_sensitive_query, Ok(SearchQuery::Text { .. }));
5506 assert_eq!(
5507 search(&project, unicode_case_sensitive_query.unwrap(), cx)
5508 .await
5509 .unwrap(),
5510 HashMap::from_iter([
5511 (separator!("dir/one.rs").to_string(), vec![17..29]),
5512 (separator!("dir/three.rs").to_string(), vec![3..15]),
5513 ])
5514 );
5515
5516 let unicode_case_insensitive_query = SearchQuery::text(
5517 "привет",
5518 false,
5519 false,
5520 false,
5521 Default::default(),
5522 Default::default(),
5523 false,
5524 None,
5525 );
5526 assert_matches!(
5527 unicode_case_insensitive_query,
5528 Ok(SearchQuery::Regex { .. })
5529 );
5530 assert_eq!(
5531 search(&project, unicode_case_insensitive_query.unwrap(), cx)
5532 .await
5533 .unwrap(),
5534 HashMap::from_iter([
5535 (separator!("dir/one.rs").to_string(), vec![3..15, 17..29]),
5536 (separator!("dir/two.rs").to_string(), vec![3..15]),
5537 (separator!("dir/three.rs").to_string(), vec![3..15]),
5538 ])
5539 );
5540
5541 assert_eq!(
5542 search(
5543 &project,
5544 SearchQuery::text(
5545 "привет.",
5546 false,
5547 false,
5548 false,
5549 Default::default(),
5550 Default::default(),
5551 false,
5552 None,
5553 )
5554 .unwrap(),
5555 cx
5556 )
5557 .await
5558 .unwrap(),
5559 HashMap::from_iter([(separator!("dir/two.rs").to_string(), vec![3..16]),])
5560 );
5561}
5562
5563#[gpui::test]
5564async fn test_create_entry(cx: &mut gpui::TestAppContext) {
5565 init_test(cx);
5566
5567 let fs = FakeFs::new(cx.executor().clone());
5568 fs.insert_tree(
5569 "/one/two",
5570 json!({
5571 "three": {
5572 "a.txt": "",
5573 "four": {}
5574 },
5575 "c.rs": ""
5576 }),
5577 )
5578 .await;
5579
5580 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
5581 project
5582 .update(cx, |project, cx| {
5583 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5584 project.create_entry((id, "b.."), true, cx)
5585 })
5586 .await
5587 .unwrap()
5588 .to_included()
5589 .unwrap();
5590
5591 // Can't create paths outside the project
5592 let result = project
5593 .update(cx, |project, cx| {
5594 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5595 project.create_entry((id, "../../boop"), true, cx)
5596 })
5597 .await;
5598 assert!(result.is_err());
5599
5600 // Can't create paths with '..'
5601 let result = project
5602 .update(cx, |project, cx| {
5603 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5604 project.create_entry((id, "four/../beep"), true, cx)
5605 })
5606 .await;
5607 assert!(result.is_err());
5608
5609 assert_eq!(
5610 fs.paths(true),
5611 vec![
5612 PathBuf::from(path!("/")),
5613 PathBuf::from(path!("/one")),
5614 PathBuf::from(path!("/one/two")),
5615 PathBuf::from(path!("/one/two/c.rs")),
5616 PathBuf::from(path!("/one/two/three")),
5617 PathBuf::from(path!("/one/two/three/a.txt")),
5618 PathBuf::from(path!("/one/two/three/b..")),
5619 PathBuf::from(path!("/one/two/three/four")),
5620 ]
5621 );
5622
5623 // And we cannot open buffers with '..'
5624 let result = project
5625 .update(cx, |project, cx| {
5626 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5627 project.open_buffer((id, "../c.rs"), cx)
5628 })
5629 .await;
5630 assert!(result.is_err())
5631}
5632
5633#[gpui::test]
5634async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
5635 init_test(cx);
5636
5637 let fs = FakeFs::new(cx.executor());
5638 fs.insert_tree(
5639 path!("/dir"),
5640 json!({
5641 "a.tsx": "a",
5642 }),
5643 )
5644 .await;
5645
5646 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5647
5648 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5649 language_registry.add(tsx_lang());
5650 let language_server_names = [
5651 "TypeScriptServer",
5652 "TailwindServer",
5653 "ESLintServer",
5654 "NoHoverCapabilitiesServer",
5655 ];
5656 let mut language_servers = [
5657 language_registry.register_fake_lsp(
5658 "tsx",
5659 FakeLspAdapter {
5660 name: language_server_names[0],
5661 capabilities: lsp::ServerCapabilities {
5662 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5663 ..lsp::ServerCapabilities::default()
5664 },
5665 ..FakeLspAdapter::default()
5666 },
5667 ),
5668 language_registry.register_fake_lsp(
5669 "tsx",
5670 FakeLspAdapter {
5671 name: language_server_names[1],
5672 capabilities: lsp::ServerCapabilities {
5673 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5674 ..lsp::ServerCapabilities::default()
5675 },
5676 ..FakeLspAdapter::default()
5677 },
5678 ),
5679 language_registry.register_fake_lsp(
5680 "tsx",
5681 FakeLspAdapter {
5682 name: language_server_names[2],
5683 capabilities: lsp::ServerCapabilities {
5684 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5685 ..lsp::ServerCapabilities::default()
5686 },
5687 ..FakeLspAdapter::default()
5688 },
5689 ),
5690 language_registry.register_fake_lsp(
5691 "tsx",
5692 FakeLspAdapter {
5693 name: language_server_names[3],
5694 capabilities: lsp::ServerCapabilities {
5695 hover_provider: None,
5696 ..lsp::ServerCapabilities::default()
5697 },
5698 ..FakeLspAdapter::default()
5699 },
5700 ),
5701 ];
5702
5703 let (buffer, _handle) = project
5704 .update(cx, |p, cx| {
5705 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
5706 })
5707 .await
5708 .unwrap();
5709 cx.executor().run_until_parked();
5710
5711 let mut servers_with_hover_requests = HashMap::default();
5712 for i in 0..language_server_names.len() {
5713 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
5714 panic!(
5715 "Failed to get language server #{i} with name {}",
5716 &language_server_names[i]
5717 )
5718 });
5719 let new_server_name = new_server.server.name();
5720 assert!(
5721 !servers_with_hover_requests.contains_key(&new_server_name),
5722 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5723 );
5724 match new_server_name.as_ref() {
5725 "TailwindServer" | "TypeScriptServer" => {
5726 servers_with_hover_requests.insert(
5727 new_server_name.clone(),
5728 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
5729 move |_, _| {
5730 let name = new_server_name.clone();
5731 async move {
5732 Ok(Some(lsp::Hover {
5733 contents: lsp::HoverContents::Scalar(
5734 lsp::MarkedString::String(format!("{name} hover")),
5735 ),
5736 range: None,
5737 }))
5738 }
5739 },
5740 ),
5741 );
5742 }
5743 "ESLintServer" => {
5744 servers_with_hover_requests.insert(
5745 new_server_name,
5746 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
5747 |_, _| async move { Ok(None) },
5748 ),
5749 );
5750 }
5751 "NoHoverCapabilitiesServer" => {
5752 let _never_handled = new_server
5753 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
5754 panic!(
5755 "Should not call for hovers server with no corresponding capabilities"
5756 )
5757 });
5758 }
5759 unexpected => panic!("Unexpected server name: {unexpected}"),
5760 }
5761 }
5762
5763 let hover_task = project.update(cx, |project, cx| {
5764 project.hover(&buffer, Point::new(0, 0), cx)
5765 });
5766 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
5767 |mut hover_request| async move {
5768 hover_request
5769 .next()
5770 .await
5771 .expect("All hover requests should have been triggered")
5772 },
5773 ))
5774 .await;
5775 assert_eq!(
5776 vec!["TailwindServer hover", "TypeScriptServer hover"],
5777 hover_task
5778 .await
5779 .into_iter()
5780 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
5781 .sorted()
5782 .collect::<Vec<_>>(),
5783 "Should receive hover responses from all related servers with hover capabilities"
5784 );
5785}
5786
5787#[gpui::test]
5788async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
5789 init_test(cx);
5790
5791 let fs = FakeFs::new(cx.executor());
5792 fs.insert_tree(
5793 path!("/dir"),
5794 json!({
5795 "a.ts": "a",
5796 }),
5797 )
5798 .await;
5799
5800 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5801
5802 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5803 language_registry.add(typescript_lang());
5804 let mut fake_language_servers = language_registry.register_fake_lsp(
5805 "TypeScript",
5806 FakeLspAdapter {
5807 capabilities: lsp::ServerCapabilities {
5808 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5809 ..lsp::ServerCapabilities::default()
5810 },
5811 ..FakeLspAdapter::default()
5812 },
5813 );
5814
5815 let (buffer, _handle) = project
5816 .update(cx, |p, cx| {
5817 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
5818 })
5819 .await
5820 .unwrap();
5821 cx.executor().run_until_parked();
5822
5823 let fake_server = fake_language_servers
5824 .next()
5825 .await
5826 .expect("failed to get the language server");
5827
5828 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
5829 move |_, _| async move {
5830 Ok(Some(lsp::Hover {
5831 contents: lsp::HoverContents::Array(vec![
5832 lsp::MarkedString::String("".to_string()),
5833 lsp::MarkedString::String(" ".to_string()),
5834 lsp::MarkedString::String("\n\n\n".to_string()),
5835 ]),
5836 range: None,
5837 }))
5838 },
5839 );
5840
5841 let hover_task = project.update(cx, |project, cx| {
5842 project.hover(&buffer, Point::new(0, 0), cx)
5843 });
5844 let () = request_handled
5845 .next()
5846 .await
5847 .expect("All hover requests should have been triggered");
5848 assert_eq!(
5849 Vec::<String>::new(),
5850 hover_task
5851 .await
5852 .into_iter()
5853 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
5854 .sorted()
5855 .collect::<Vec<_>>(),
5856 "Empty hover parts should be ignored"
5857 );
5858}
5859
5860#[gpui::test]
5861async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
5862 init_test(cx);
5863
5864 let fs = FakeFs::new(cx.executor());
5865 fs.insert_tree(
5866 path!("/dir"),
5867 json!({
5868 "a.ts": "a",
5869 }),
5870 )
5871 .await;
5872
5873 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5874
5875 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5876 language_registry.add(typescript_lang());
5877 let mut fake_language_servers = language_registry.register_fake_lsp(
5878 "TypeScript",
5879 FakeLspAdapter {
5880 capabilities: lsp::ServerCapabilities {
5881 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5882 ..lsp::ServerCapabilities::default()
5883 },
5884 ..FakeLspAdapter::default()
5885 },
5886 );
5887
5888 let (buffer, _handle) = project
5889 .update(cx, |p, cx| {
5890 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
5891 })
5892 .await
5893 .unwrap();
5894 cx.executor().run_until_parked();
5895
5896 let fake_server = fake_language_servers
5897 .next()
5898 .await
5899 .expect("failed to get the language server");
5900
5901 let mut request_handled = fake_server
5902 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
5903 Ok(Some(vec![
5904 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5905 title: "organize imports".to_string(),
5906 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
5907 ..lsp::CodeAction::default()
5908 }),
5909 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5910 title: "fix code".to_string(),
5911 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
5912 ..lsp::CodeAction::default()
5913 }),
5914 ]))
5915 });
5916
5917 let code_actions_task = project.update(cx, |project, cx| {
5918 project.code_actions(
5919 &buffer,
5920 0..buffer.read(cx).len(),
5921 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
5922 cx,
5923 )
5924 });
5925
5926 let () = request_handled
5927 .next()
5928 .await
5929 .expect("The code action request should have been triggered");
5930
5931 let code_actions = code_actions_task.await.unwrap();
5932 assert_eq!(code_actions.len(), 1);
5933 assert_eq!(
5934 code_actions[0].lsp_action.action_kind(),
5935 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
5936 );
5937}
5938
5939#[gpui::test]
5940async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
5941 init_test(cx);
5942
5943 let fs = FakeFs::new(cx.executor());
5944 fs.insert_tree(
5945 path!("/dir"),
5946 json!({
5947 "a.tsx": "a",
5948 }),
5949 )
5950 .await;
5951
5952 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5953
5954 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5955 language_registry.add(tsx_lang());
5956 let language_server_names = [
5957 "TypeScriptServer",
5958 "TailwindServer",
5959 "ESLintServer",
5960 "NoActionsCapabilitiesServer",
5961 ];
5962
5963 let mut language_server_rxs = [
5964 language_registry.register_fake_lsp(
5965 "tsx",
5966 FakeLspAdapter {
5967 name: language_server_names[0],
5968 capabilities: lsp::ServerCapabilities {
5969 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5970 ..lsp::ServerCapabilities::default()
5971 },
5972 ..FakeLspAdapter::default()
5973 },
5974 ),
5975 language_registry.register_fake_lsp(
5976 "tsx",
5977 FakeLspAdapter {
5978 name: language_server_names[1],
5979 capabilities: lsp::ServerCapabilities {
5980 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5981 ..lsp::ServerCapabilities::default()
5982 },
5983 ..FakeLspAdapter::default()
5984 },
5985 ),
5986 language_registry.register_fake_lsp(
5987 "tsx",
5988 FakeLspAdapter {
5989 name: language_server_names[2],
5990 capabilities: lsp::ServerCapabilities {
5991 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5992 ..lsp::ServerCapabilities::default()
5993 },
5994 ..FakeLspAdapter::default()
5995 },
5996 ),
5997 language_registry.register_fake_lsp(
5998 "tsx",
5999 FakeLspAdapter {
6000 name: language_server_names[3],
6001 capabilities: lsp::ServerCapabilities {
6002 code_action_provider: None,
6003 ..lsp::ServerCapabilities::default()
6004 },
6005 ..FakeLspAdapter::default()
6006 },
6007 ),
6008 ];
6009
6010 let (buffer, _handle) = project
6011 .update(cx, |p, cx| {
6012 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
6013 })
6014 .await
6015 .unwrap();
6016 cx.executor().run_until_parked();
6017
6018 let mut servers_with_actions_requests = HashMap::default();
6019 for i in 0..language_server_names.len() {
6020 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
6021 panic!(
6022 "Failed to get language server #{i} with name {}",
6023 &language_server_names[i]
6024 )
6025 });
6026 let new_server_name = new_server.server.name();
6027
6028 assert!(
6029 !servers_with_actions_requests.contains_key(&new_server_name),
6030 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6031 );
6032 match new_server_name.0.as_ref() {
6033 "TailwindServer" | "TypeScriptServer" => {
6034 servers_with_actions_requests.insert(
6035 new_server_name.clone(),
6036 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6037 move |_, _| {
6038 let name = new_server_name.clone();
6039 async move {
6040 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
6041 lsp::CodeAction {
6042 title: format!("{name} code action"),
6043 ..lsp::CodeAction::default()
6044 },
6045 )]))
6046 }
6047 },
6048 ),
6049 );
6050 }
6051 "ESLintServer" => {
6052 servers_with_actions_requests.insert(
6053 new_server_name,
6054 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6055 |_, _| async move { Ok(None) },
6056 ),
6057 );
6058 }
6059 "NoActionsCapabilitiesServer" => {
6060 let _never_handled = new_server
6061 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6062 panic!(
6063 "Should not call for code actions server with no corresponding capabilities"
6064 )
6065 });
6066 }
6067 unexpected => panic!("Unexpected server name: {unexpected}"),
6068 }
6069 }
6070
6071 let code_actions_task = project.update(cx, |project, cx| {
6072 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
6073 });
6074
6075 // cx.run_until_parked();
6076 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
6077 |mut code_actions_request| async move {
6078 code_actions_request
6079 .next()
6080 .await
6081 .expect("All code actions requests should have been triggered")
6082 },
6083 ))
6084 .await;
6085 assert_eq!(
6086 vec!["TailwindServer code action", "TypeScriptServer code action"],
6087 code_actions_task
6088 .await
6089 .unwrap()
6090 .into_iter()
6091 .map(|code_action| code_action.lsp_action.title().to_owned())
6092 .sorted()
6093 .collect::<Vec<_>>(),
6094 "Should receive code actions responses from all related servers with hover capabilities"
6095 );
6096}
6097
6098#[gpui::test]
6099async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
6100 init_test(cx);
6101
6102 let fs = FakeFs::new(cx.executor());
6103 fs.insert_tree(
6104 "/dir",
6105 json!({
6106 "a.rs": "let a = 1;",
6107 "b.rs": "let b = 2;",
6108 "c.rs": "let c = 2;",
6109 }),
6110 )
6111 .await;
6112
6113 let project = Project::test(
6114 fs,
6115 [
6116 "/dir/a.rs".as_ref(),
6117 "/dir/b.rs".as_ref(),
6118 "/dir/c.rs".as_ref(),
6119 ],
6120 cx,
6121 )
6122 .await;
6123
6124 // check the initial state and get the worktrees
6125 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
6126 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6127 assert_eq!(worktrees.len(), 3);
6128
6129 let worktree_a = worktrees[0].read(cx);
6130 let worktree_b = worktrees[1].read(cx);
6131 let worktree_c = worktrees[2].read(cx);
6132
6133 // check they start in the right order
6134 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
6135 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
6136 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
6137
6138 (
6139 worktrees[0].clone(),
6140 worktrees[1].clone(),
6141 worktrees[2].clone(),
6142 )
6143 });
6144
6145 // move first worktree to after the second
6146 // [a, b, c] -> [b, a, c]
6147 project
6148 .update(cx, |project, cx| {
6149 let first = worktree_a.read(cx);
6150 let second = worktree_b.read(cx);
6151 project.move_worktree(first.id(), second.id(), cx)
6152 })
6153 .expect("moving first after second");
6154
6155 // check the state after moving
6156 project.update(cx, |project, cx| {
6157 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6158 assert_eq!(worktrees.len(), 3);
6159
6160 let first = worktrees[0].read(cx);
6161 let second = worktrees[1].read(cx);
6162 let third = worktrees[2].read(cx);
6163
6164 // check they are now in the right order
6165 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
6166 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
6167 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6168 });
6169
6170 // move the second worktree to before the first
6171 // [b, a, c] -> [a, b, c]
6172 project
6173 .update(cx, |project, cx| {
6174 let second = worktree_a.read(cx);
6175 let first = worktree_b.read(cx);
6176 project.move_worktree(first.id(), second.id(), cx)
6177 })
6178 .expect("moving second before first");
6179
6180 // check the state after moving
6181 project.update(cx, |project, cx| {
6182 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6183 assert_eq!(worktrees.len(), 3);
6184
6185 let first = worktrees[0].read(cx);
6186 let second = worktrees[1].read(cx);
6187 let third = worktrees[2].read(cx);
6188
6189 // check they are now in the right order
6190 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6191 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6192 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6193 });
6194
6195 // move the second worktree to after the third
6196 // [a, b, c] -> [a, c, b]
6197 project
6198 .update(cx, |project, cx| {
6199 let second = worktree_b.read(cx);
6200 let third = worktree_c.read(cx);
6201 project.move_worktree(second.id(), third.id(), cx)
6202 })
6203 .expect("moving second after third");
6204
6205 // check the state after moving
6206 project.update(cx, |project, cx| {
6207 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6208 assert_eq!(worktrees.len(), 3);
6209
6210 let first = worktrees[0].read(cx);
6211 let second = worktrees[1].read(cx);
6212 let third = worktrees[2].read(cx);
6213
6214 // check they are now in the right order
6215 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6216 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
6217 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
6218 });
6219
6220 // move the third worktree to before the second
6221 // [a, c, b] -> [a, b, c]
6222 project
6223 .update(cx, |project, cx| {
6224 let third = worktree_c.read(cx);
6225 let second = worktree_b.read(cx);
6226 project.move_worktree(third.id(), second.id(), cx)
6227 })
6228 .expect("moving third before second");
6229
6230 // check the state after moving
6231 project.update(cx, |project, cx| {
6232 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6233 assert_eq!(worktrees.len(), 3);
6234
6235 let first = worktrees[0].read(cx);
6236 let second = worktrees[1].read(cx);
6237 let third = worktrees[2].read(cx);
6238
6239 // check they are now in the right order
6240 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6241 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6242 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6243 });
6244
6245 // move the first worktree to after the third
6246 // [a, b, c] -> [b, c, a]
6247 project
6248 .update(cx, |project, cx| {
6249 let first = worktree_a.read(cx);
6250 let third = worktree_c.read(cx);
6251 project.move_worktree(first.id(), third.id(), cx)
6252 })
6253 .expect("moving first after third");
6254
6255 // check the state after moving
6256 project.update(cx, |project, cx| {
6257 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6258 assert_eq!(worktrees.len(), 3);
6259
6260 let first = worktrees[0].read(cx);
6261 let second = worktrees[1].read(cx);
6262 let third = worktrees[2].read(cx);
6263
6264 // check they are now in the right order
6265 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
6266 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
6267 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
6268 });
6269
6270 // move the third worktree to before the first
6271 // [b, c, a] -> [a, b, c]
6272 project
6273 .update(cx, |project, cx| {
6274 let third = worktree_a.read(cx);
6275 let first = worktree_b.read(cx);
6276 project.move_worktree(third.id(), first.id(), cx)
6277 })
6278 .expect("moving third before first");
6279
6280 // check the state after moving
6281 project.update(cx, |project, cx| {
6282 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6283 assert_eq!(worktrees.len(), 3);
6284
6285 let first = worktrees[0].read(cx);
6286 let second = worktrees[1].read(cx);
6287 let third = worktrees[2].read(cx);
6288
6289 // check they are now in the right order
6290 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6291 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6292 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6293 });
6294}
6295
6296#[gpui::test]
6297async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
6298 init_test(cx);
6299
6300 let staged_contents = r#"
6301 fn main() {
6302 println!("hello world");
6303 }
6304 "#
6305 .unindent();
6306 let file_contents = r#"
6307 // print goodbye
6308 fn main() {
6309 println!("goodbye world");
6310 }
6311 "#
6312 .unindent();
6313
6314 let fs = FakeFs::new(cx.background_executor.clone());
6315 fs.insert_tree(
6316 "/dir",
6317 json!({
6318 ".git": {},
6319 "src": {
6320 "main.rs": file_contents,
6321 }
6322 }),
6323 )
6324 .await;
6325
6326 fs.set_index_for_repo(
6327 Path::new("/dir/.git"),
6328 &[("src/main.rs".into(), staged_contents)],
6329 );
6330
6331 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6332
6333 let buffer = project
6334 .update(cx, |project, cx| {
6335 project.open_local_buffer("/dir/src/main.rs", cx)
6336 })
6337 .await
6338 .unwrap();
6339 let unstaged_diff = project
6340 .update(cx, |project, cx| {
6341 project.open_unstaged_diff(buffer.clone(), cx)
6342 })
6343 .await
6344 .unwrap();
6345
6346 cx.run_until_parked();
6347 unstaged_diff.update(cx, |unstaged_diff, cx| {
6348 let snapshot = buffer.read(cx).snapshot();
6349 assert_hunks(
6350 unstaged_diff.hunks(&snapshot, cx),
6351 &snapshot,
6352 &unstaged_diff.base_text_string().unwrap(),
6353 &[
6354 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
6355 (
6356 2..3,
6357 " println!(\"hello world\");\n",
6358 " println!(\"goodbye world\");\n",
6359 DiffHunkStatus::modified_none(),
6360 ),
6361 ],
6362 );
6363 });
6364
6365 let staged_contents = r#"
6366 // print goodbye
6367 fn main() {
6368 }
6369 "#
6370 .unindent();
6371
6372 fs.set_index_for_repo(
6373 Path::new("/dir/.git"),
6374 &[("src/main.rs".into(), staged_contents)],
6375 );
6376
6377 cx.run_until_parked();
6378 unstaged_diff.update(cx, |unstaged_diff, cx| {
6379 let snapshot = buffer.read(cx).snapshot();
6380 assert_hunks(
6381 unstaged_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6382 &snapshot,
6383 &unstaged_diff.base_text().text(),
6384 &[(
6385 2..3,
6386 "",
6387 " println!(\"goodbye world\");\n",
6388 DiffHunkStatus::added_none(),
6389 )],
6390 );
6391 });
6392}
6393
6394#[gpui::test]
6395async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
6396 init_test(cx);
6397
6398 let committed_contents = r#"
6399 fn main() {
6400 println!("hello world");
6401 }
6402 "#
6403 .unindent();
6404 let staged_contents = r#"
6405 fn main() {
6406 println!("goodbye world");
6407 }
6408 "#
6409 .unindent();
6410 let file_contents = r#"
6411 // print goodbye
6412 fn main() {
6413 println!("goodbye world");
6414 }
6415 "#
6416 .unindent();
6417
6418 let fs = FakeFs::new(cx.background_executor.clone());
6419 fs.insert_tree(
6420 "/dir",
6421 json!({
6422 ".git": {},
6423 "src": {
6424 "modification.rs": file_contents,
6425 }
6426 }),
6427 )
6428 .await;
6429
6430 fs.set_head_for_repo(
6431 Path::new("/dir/.git"),
6432 &[
6433 ("src/modification.rs".into(), committed_contents),
6434 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6435 ],
6436 );
6437 fs.set_index_for_repo(
6438 Path::new("/dir/.git"),
6439 &[
6440 ("src/modification.rs".into(), staged_contents),
6441 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6442 ],
6443 );
6444
6445 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6446 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6447 let language = rust_lang();
6448 language_registry.add(language.clone());
6449
6450 let buffer_1 = project
6451 .update(cx, |project, cx| {
6452 project.open_local_buffer("/dir/src/modification.rs", cx)
6453 })
6454 .await
6455 .unwrap();
6456 let diff_1 = project
6457 .update(cx, |project, cx| {
6458 project.open_uncommitted_diff(buffer_1.clone(), cx)
6459 })
6460 .await
6461 .unwrap();
6462 diff_1.read_with(cx, |diff, _| {
6463 assert_eq!(diff.base_text().language().cloned(), Some(language))
6464 });
6465 cx.run_until_parked();
6466 diff_1.update(cx, |diff, cx| {
6467 let snapshot = buffer_1.read(cx).snapshot();
6468 assert_hunks(
6469 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6470 &snapshot,
6471 &diff.base_text_string().unwrap(),
6472 &[
6473 (
6474 0..1,
6475 "",
6476 "// print goodbye\n",
6477 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
6478 ),
6479 (
6480 2..3,
6481 " println!(\"hello world\");\n",
6482 " println!(\"goodbye world\");\n",
6483 DiffHunkStatus::modified_none(),
6484 ),
6485 ],
6486 );
6487 });
6488
6489 // Reset HEAD to a version that differs from both the buffer and the index.
6490 let committed_contents = r#"
6491 // print goodbye
6492 fn main() {
6493 }
6494 "#
6495 .unindent();
6496 fs.set_head_for_repo(
6497 Path::new("/dir/.git"),
6498 &[
6499 ("src/modification.rs".into(), committed_contents.clone()),
6500 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6501 ],
6502 );
6503
6504 // Buffer now has an unstaged hunk.
6505 cx.run_until_parked();
6506 diff_1.update(cx, |diff, cx| {
6507 let snapshot = buffer_1.read(cx).snapshot();
6508 assert_hunks(
6509 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6510 &snapshot,
6511 &diff.base_text().text(),
6512 &[(
6513 2..3,
6514 "",
6515 " println!(\"goodbye world\");\n",
6516 DiffHunkStatus::added_none(),
6517 )],
6518 );
6519 });
6520
6521 // Open a buffer for a file that's been deleted.
6522 let buffer_2 = project
6523 .update(cx, |project, cx| {
6524 project.open_local_buffer("/dir/src/deletion.rs", cx)
6525 })
6526 .await
6527 .unwrap();
6528 let diff_2 = project
6529 .update(cx, |project, cx| {
6530 project.open_uncommitted_diff(buffer_2.clone(), cx)
6531 })
6532 .await
6533 .unwrap();
6534 cx.run_until_parked();
6535 diff_2.update(cx, |diff, cx| {
6536 let snapshot = buffer_2.read(cx).snapshot();
6537 assert_hunks(
6538 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6539 &snapshot,
6540 &diff.base_text_string().unwrap(),
6541 &[(
6542 0..0,
6543 "// the-deleted-contents\n",
6544 "",
6545 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
6546 )],
6547 );
6548 });
6549
6550 // Stage the deletion of this file
6551 fs.set_index_for_repo(
6552 Path::new("/dir/.git"),
6553 &[("src/modification.rs".into(), committed_contents.clone())],
6554 );
6555 cx.run_until_parked();
6556 diff_2.update(cx, |diff, cx| {
6557 let snapshot = buffer_2.read(cx).snapshot();
6558 assert_hunks(
6559 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6560 &snapshot,
6561 &diff.base_text_string().unwrap(),
6562 &[(
6563 0..0,
6564 "// the-deleted-contents\n",
6565 "",
6566 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
6567 )],
6568 );
6569 });
6570}
6571
6572#[gpui::test]
6573async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
6574 use DiffHunkSecondaryStatus::*;
6575 init_test(cx);
6576
6577 let committed_contents = r#"
6578 zero
6579 one
6580 two
6581 three
6582 four
6583 five
6584 "#
6585 .unindent();
6586 let file_contents = r#"
6587 one
6588 TWO
6589 three
6590 FOUR
6591 five
6592 "#
6593 .unindent();
6594
6595 let fs = FakeFs::new(cx.background_executor.clone());
6596 fs.insert_tree(
6597 "/dir",
6598 json!({
6599 ".git": {},
6600 "file.txt": file_contents.clone()
6601 }),
6602 )
6603 .await;
6604
6605 fs.set_head_and_index_for_repo(
6606 "/dir/.git".as_ref(),
6607 &[("file.txt".into(), committed_contents.clone())],
6608 );
6609
6610 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6611
6612 let buffer = project
6613 .update(cx, |project, cx| {
6614 project.open_local_buffer("/dir/file.txt", cx)
6615 })
6616 .await
6617 .unwrap();
6618 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6619 let uncommitted_diff = project
6620 .update(cx, |project, cx| {
6621 project.open_uncommitted_diff(buffer.clone(), cx)
6622 })
6623 .await
6624 .unwrap();
6625 let mut diff_events = cx.events(&uncommitted_diff);
6626
6627 // The hunks are initially unstaged.
6628 uncommitted_diff.read_with(cx, |diff, cx| {
6629 assert_hunks(
6630 diff.hunks(&snapshot, cx),
6631 &snapshot,
6632 &diff.base_text_string().unwrap(),
6633 &[
6634 (
6635 0..0,
6636 "zero\n",
6637 "",
6638 DiffHunkStatus::deleted(HasSecondaryHunk),
6639 ),
6640 (
6641 1..2,
6642 "two\n",
6643 "TWO\n",
6644 DiffHunkStatus::modified(HasSecondaryHunk),
6645 ),
6646 (
6647 3..4,
6648 "four\n",
6649 "FOUR\n",
6650 DiffHunkStatus::modified(HasSecondaryHunk),
6651 ),
6652 ],
6653 );
6654 });
6655
6656 // Stage a hunk. It appears as optimistically staged.
6657 uncommitted_diff.update(cx, |diff, cx| {
6658 let range =
6659 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
6660 let hunks = diff
6661 .hunks_intersecting_range(range, &snapshot, cx)
6662 .collect::<Vec<_>>();
6663 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
6664
6665 assert_hunks(
6666 diff.hunks(&snapshot, cx),
6667 &snapshot,
6668 &diff.base_text_string().unwrap(),
6669 &[
6670 (
6671 0..0,
6672 "zero\n",
6673 "",
6674 DiffHunkStatus::deleted(HasSecondaryHunk),
6675 ),
6676 (
6677 1..2,
6678 "two\n",
6679 "TWO\n",
6680 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6681 ),
6682 (
6683 3..4,
6684 "four\n",
6685 "FOUR\n",
6686 DiffHunkStatus::modified(HasSecondaryHunk),
6687 ),
6688 ],
6689 );
6690 });
6691
6692 // The diff emits a change event for the range of the staged hunk.
6693 assert!(matches!(
6694 diff_events.next().await.unwrap(),
6695 BufferDiffEvent::HunksStagedOrUnstaged(_)
6696 ));
6697 let event = diff_events.next().await.unwrap();
6698 if let BufferDiffEvent::DiffChanged {
6699 changed_range: Some(changed_range),
6700 } = event
6701 {
6702 let changed_range = changed_range.to_point(&snapshot);
6703 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
6704 } else {
6705 panic!("Unexpected event {event:?}");
6706 }
6707
6708 // When the write to the index completes, it appears as staged.
6709 cx.run_until_parked();
6710 uncommitted_diff.update(cx, |diff, cx| {
6711 assert_hunks(
6712 diff.hunks(&snapshot, cx),
6713 &snapshot,
6714 &diff.base_text_string().unwrap(),
6715 &[
6716 (
6717 0..0,
6718 "zero\n",
6719 "",
6720 DiffHunkStatus::deleted(HasSecondaryHunk),
6721 ),
6722 (
6723 1..2,
6724 "two\n",
6725 "TWO\n",
6726 DiffHunkStatus::modified(NoSecondaryHunk),
6727 ),
6728 (
6729 3..4,
6730 "four\n",
6731 "FOUR\n",
6732 DiffHunkStatus::modified(HasSecondaryHunk),
6733 ),
6734 ],
6735 );
6736 });
6737
6738 // The diff emits a change event for the changed index text.
6739 let event = diff_events.next().await.unwrap();
6740 if let BufferDiffEvent::DiffChanged {
6741 changed_range: Some(changed_range),
6742 } = event
6743 {
6744 let changed_range = changed_range.to_point(&snapshot);
6745 assert_eq!(changed_range, Point::new(0, 0)..Point::new(4, 0));
6746 } else {
6747 panic!("Unexpected event {event:?}");
6748 }
6749
6750 // Simulate a problem writing to the git index.
6751 fs.set_error_message_for_index_write(
6752 "/dir/.git".as_ref(),
6753 Some("failed to write git index".into()),
6754 );
6755
6756 // Stage another hunk.
6757 uncommitted_diff.update(cx, |diff, cx| {
6758 let range =
6759 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
6760 let hunks = diff
6761 .hunks_intersecting_range(range, &snapshot, cx)
6762 .collect::<Vec<_>>();
6763 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
6764
6765 assert_hunks(
6766 diff.hunks(&snapshot, cx),
6767 &snapshot,
6768 &diff.base_text_string().unwrap(),
6769 &[
6770 (
6771 0..0,
6772 "zero\n",
6773 "",
6774 DiffHunkStatus::deleted(HasSecondaryHunk),
6775 ),
6776 (
6777 1..2,
6778 "two\n",
6779 "TWO\n",
6780 DiffHunkStatus::modified(NoSecondaryHunk),
6781 ),
6782 (
6783 3..4,
6784 "four\n",
6785 "FOUR\n",
6786 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6787 ),
6788 ],
6789 );
6790 });
6791 assert!(matches!(
6792 diff_events.next().await.unwrap(),
6793 BufferDiffEvent::HunksStagedOrUnstaged(_)
6794 ));
6795 let event = diff_events.next().await.unwrap();
6796 if let BufferDiffEvent::DiffChanged {
6797 changed_range: Some(changed_range),
6798 } = event
6799 {
6800 let changed_range = changed_range.to_point(&snapshot);
6801 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
6802 } else {
6803 panic!("Unexpected event {event:?}");
6804 }
6805
6806 // When the write fails, the hunk returns to being unstaged.
6807 cx.run_until_parked();
6808 uncommitted_diff.update(cx, |diff, cx| {
6809 assert_hunks(
6810 diff.hunks(&snapshot, cx),
6811 &snapshot,
6812 &diff.base_text_string().unwrap(),
6813 &[
6814 (
6815 0..0,
6816 "zero\n",
6817 "",
6818 DiffHunkStatus::deleted(HasSecondaryHunk),
6819 ),
6820 (
6821 1..2,
6822 "two\n",
6823 "TWO\n",
6824 DiffHunkStatus::modified(NoSecondaryHunk),
6825 ),
6826 (
6827 3..4,
6828 "four\n",
6829 "FOUR\n",
6830 DiffHunkStatus::modified(HasSecondaryHunk),
6831 ),
6832 ],
6833 );
6834 });
6835
6836 let event = diff_events.next().await.unwrap();
6837 if let BufferDiffEvent::DiffChanged {
6838 changed_range: Some(changed_range),
6839 } = event
6840 {
6841 let changed_range = changed_range.to_point(&snapshot);
6842 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
6843 } else {
6844 panic!("Unexpected event {event:?}");
6845 }
6846
6847 // Allow writing to the git index to succeed again.
6848 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
6849
6850 // Stage two hunks with separate operations.
6851 uncommitted_diff.update(cx, |diff, cx| {
6852 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
6853 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
6854 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
6855 });
6856
6857 // Both staged hunks appear as pending.
6858 uncommitted_diff.update(cx, |diff, cx| {
6859 assert_hunks(
6860 diff.hunks(&snapshot, cx),
6861 &snapshot,
6862 &diff.base_text_string().unwrap(),
6863 &[
6864 (
6865 0..0,
6866 "zero\n",
6867 "",
6868 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
6869 ),
6870 (
6871 1..2,
6872 "two\n",
6873 "TWO\n",
6874 DiffHunkStatus::modified(NoSecondaryHunk),
6875 ),
6876 (
6877 3..4,
6878 "four\n",
6879 "FOUR\n",
6880 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6881 ),
6882 ],
6883 );
6884 });
6885
6886 // Both staging operations take effect.
6887 cx.run_until_parked();
6888 uncommitted_diff.update(cx, |diff, cx| {
6889 assert_hunks(
6890 diff.hunks(&snapshot, cx),
6891 &snapshot,
6892 &diff.base_text_string().unwrap(),
6893 &[
6894 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
6895 (
6896 1..2,
6897 "two\n",
6898 "TWO\n",
6899 DiffHunkStatus::modified(NoSecondaryHunk),
6900 ),
6901 (
6902 3..4,
6903 "four\n",
6904 "FOUR\n",
6905 DiffHunkStatus::modified(NoSecondaryHunk),
6906 ),
6907 ],
6908 );
6909 });
6910}
6911
6912#[gpui::test(seeds(340, 472))]
6913async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
6914 use DiffHunkSecondaryStatus::*;
6915 init_test(cx);
6916
6917 let committed_contents = r#"
6918 zero
6919 one
6920 two
6921 three
6922 four
6923 five
6924 "#
6925 .unindent();
6926 let file_contents = r#"
6927 one
6928 TWO
6929 three
6930 FOUR
6931 five
6932 "#
6933 .unindent();
6934
6935 let fs = FakeFs::new(cx.background_executor.clone());
6936 fs.insert_tree(
6937 "/dir",
6938 json!({
6939 ".git": {},
6940 "file.txt": file_contents.clone()
6941 }),
6942 )
6943 .await;
6944
6945 fs.set_head_for_repo(
6946 "/dir/.git".as_ref(),
6947 &[("file.txt".into(), committed_contents.clone())],
6948 );
6949 fs.set_index_for_repo(
6950 "/dir/.git".as_ref(),
6951 &[("file.txt".into(), committed_contents.clone())],
6952 );
6953
6954 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6955
6956 let buffer = project
6957 .update(cx, |project, cx| {
6958 project.open_local_buffer("/dir/file.txt", cx)
6959 })
6960 .await
6961 .unwrap();
6962 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6963 let uncommitted_diff = project
6964 .update(cx, |project, cx| {
6965 project.open_uncommitted_diff(buffer.clone(), cx)
6966 })
6967 .await
6968 .unwrap();
6969
6970 // The hunks are initially unstaged.
6971 uncommitted_diff.read_with(cx, |diff, cx| {
6972 assert_hunks(
6973 diff.hunks(&snapshot, cx),
6974 &snapshot,
6975 &diff.base_text_string().unwrap(),
6976 &[
6977 (
6978 0..0,
6979 "zero\n",
6980 "",
6981 DiffHunkStatus::deleted(HasSecondaryHunk),
6982 ),
6983 (
6984 1..2,
6985 "two\n",
6986 "TWO\n",
6987 DiffHunkStatus::modified(HasSecondaryHunk),
6988 ),
6989 (
6990 3..4,
6991 "four\n",
6992 "FOUR\n",
6993 DiffHunkStatus::modified(HasSecondaryHunk),
6994 ),
6995 ],
6996 );
6997 });
6998
6999 // Pause IO events
7000 fs.pause_events();
7001
7002 // Stage the first hunk.
7003 uncommitted_diff.update(cx, |diff, cx| {
7004 let hunk = diff.hunks(&snapshot, cx).next().unwrap();
7005 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7006 assert_hunks(
7007 diff.hunks(&snapshot, cx),
7008 &snapshot,
7009 &diff.base_text_string().unwrap(),
7010 &[
7011 (
7012 0..0,
7013 "zero\n",
7014 "",
7015 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7016 ),
7017 (
7018 1..2,
7019 "two\n",
7020 "TWO\n",
7021 DiffHunkStatus::modified(HasSecondaryHunk),
7022 ),
7023 (
7024 3..4,
7025 "four\n",
7026 "FOUR\n",
7027 DiffHunkStatus::modified(HasSecondaryHunk),
7028 ),
7029 ],
7030 );
7031 });
7032
7033 // Stage the second hunk *before* receiving the FS event for the first hunk.
7034 cx.run_until_parked();
7035 uncommitted_diff.update(cx, |diff, cx| {
7036 let hunk = diff.hunks(&snapshot, cx).nth(1).unwrap();
7037 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7038 assert_hunks(
7039 diff.hunks(&snapshot, cx),
7040 &snapshot,
7041 &diff.base_text_string().unwrap(),
7042 &[
7043 (
7044 0..0,
7045 "zero\n",
7046 "",
7047 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7048 ),
7049 (
7050 1..2,
7051 "two\n",
7052 "TWO\n",
7053 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7054 ),
7055 (
7056 3..4,
7057 "four\n",
7058 "FOUR\n",
7059 DiffHunkStatus::modified(HasSecondaryHunk),
7060 ),
7061 ],
7062 );
7063 });
7064
7065 // Process the FS event for staging the first hunk (second event is still pending).
7066 fs.flush_events(1);
7067 cx.run_until_parked();
7068
7069 // Stage the third hunk before receiving the second FS event.
7070 uncommitted_diff.update(cx, |diff, cx| {
7071 let hunk = diff.hunks(&snapshot, cx).nth(2).unwrap();
7072 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7073 });
7074
7075 // Wait for all remaining IO.
7076 cx.run_until_parked();
7077 fs.flush_events(fs.buffered_event_count());
7078
7079 // Now all hunks are staged.
7080 cx.run_until_parked();
7081 uncommitted_diff.update(cx, |diff, cx| {
7082 assert_hunks(
7083 diff.hunks(&snapshot, cx),
7084 &snapshot,
7085 &diff.base_text_string().unwrap(),
7086 &[
7087 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7088 (
7089 1..2,
7090 "two\n",
7091 "TWO\n",
7092 DiffHunkStatus::modified(NoSecondaryHunk),
7093 ),
7094 (
7095 3..4,
7096 "four\n",
7097 "FOUR\n",
7098 DiffHunkStatus::modified(NoSecondaryHunk),
7099 ),
7100 ],
7101 );
7102 });
7103}
7104
7105#[gpui::test(iterations = 25)]
7106async fn test_staging_random_hunks(
7107 mut rng: StdRng,
7108 executor: BackgroundExecutor,
7109 cx: &mut gpui::TestAppContext,
7110) {
7111 let operations = env::var("OPERATIONS")
7112 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
7113 .unwrap_or(20);
7114
7115 // Try to induce races between diff recalculation and index writes.
7116 if rng.gen_bool(0.5) {
7117 executor.deprioritize(*CALCULATE_DIFF_TASK);
7118 }
7119
7120 use DiffHunkSecondaryStatus::*;
7121 init_test(cx);
7122
7123 let committed_text = (0..30).map(|i| format!("line {i}\n")).collect::<String>();
7124 let index_text = committed_text.clone();
7125 let buffer_text = (0..30)
7126 .map(|i| match i % 5 {
7127 0 => format!("line {i} (modified)\n"),
7128 _ => format!("line {i}\n"),
7129 })
7130 .collect::<String>();
7131
7132 let fs = FakeFs::new(cx.background_executor.clone());
7133 fs.insert_tree(
7134 path!("/dir"),
7135 json!({
7136 ".git": {},
7137 "file.txt": buffer_text.clone()
7138 }),
7139 )
7140 .await;
7141 fs.set_head_for_repo(
7142 path!("/dir/.git").as_ref(),
7143 &[("file.txt".into(), committed_text.clone())],
7144 );
7145 fs.set_index_for_repo(
7146 path!("/dir/.git").as_ref(),
7147 &[("file.txt".into(), index_text.clone())],
7148 );
7149 let repo = fs.open_repo(path!("/dir/.git").as_ref()).unwrap();
7150
7151 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7152 let buffer = project
7153 .update(cx, |project, cx| {
7154 project.open_local_buffer(path!("/dir/file.txt"), cx)
7155 })
7156 .await
7157 .unwrap();
7158 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7159 let uncommitted_diff = project
7160 .update(cx, |project, cx| {
7161 project.open_uncommitted_diff(buffer.clone(), cx)
7162 })
7163 .await
7164 .unwrap();
7165
7166 let mut hunks =
7167 uncommitted_diff.update(cx, |diff, cx| diff.hunks(&snapshot, cx).collect::<Vec<_>>());
7168 assert_eq!(hunks.len(), 6);
7169
7170 for _i in 0..operations {
7171 let hunk_ix = rng.gen_range(0..hunks.len());
7172 let hunk = &mut hunks[hunk_ix];
7173 let row = hunk.range.start.row;
7174
7175 if hunk.status().has_secondary_hunk() {
7176 log::info!("staging hunk at {row}");
7177 uncommitted_diff.update(cx, |diff, cx| {
7178 diff.stage_or_unstage_hunks(true, &[hunk.clone()], &snapshot, true, cx);
7179 });
7180 hunk.secondary_status = SecondaryHunkRemovalPending;
7181 } else {
7182 log::info!("unstaging hunk at {row}");
7183 uncommitted_diff.update(cx, |diff, cx| {
7184 diff.stage_or_unstage_hunks(false, &[hunk.clone()], &snapshot, true, cx);
7185 });
7186 hunk.secondary_status = SecondaryHunkAdditionPending;
7187 }
7188
7189 for _ in 0..rng.gen_range(0..10) {
7190 log::info!("yielding");
7191 cx.executor().simulate_random_delay().await;
7192 }
7193 }
7194
7195 cx.executor().run_until_parked();
7196
7197 for hunk in &mut hunks {
7198 if hunk.secondary_status == SecondaryHunkRemovalPending {
7199 hunk.secondary_status = NoSecondaryHunk;
7200 } else if hunk.secondary_status == SecondaryHunkAdditionPending {
7201 hunk.secondary_status = HasSecondaryHunk;
7202 }
7203 }
7204
7205 log::info!(
7206 "index text:\n{}",
7207 repo.load_index_text("file.txt".into()).await.unwrap()
7208 );
7209
7210 uncommitted_diff.update(cx, |diff, cx| {
7211 let expected_hunks = hunks
7212 .iter()
7213 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
7214 .collect::<Vec<_>>();
7215 let actual_hunks = diff
7216 .hunks(&snapshot, cx)
7217 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
7218 .collect::<Vec<_>>();
7219 assert_eq!(actual_hunks, expected_hunks);
7220 });
7221}
7222
7223#[gpui::test]
7224async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
7225 init_test(cx);
7226
7227 let committed_contents = r#"
7228 fn main() {
7229 println!("hello from HEAD");
7230 }
7231 "#
7232 .unindent();
7233 let file_contents = r#"
7234 fn main() {
7235 println!("hello from the working copy");
7236 }
7237 "#
7238 .unindent();
7239
7240 let fs = FakeFs::new(cx.background_executor.clone());
7241 fs.insert_tree(
7242 "/dir",
7243 json!({
7244 ".git": {},
7245 "src": {
7246 "main.rs": file_contents,
7247 }
7248 }),
7249 )
7250 .await;
7251
7252 fs.set_head_for_repo(
7253 Path::new("/dir/.git"),
7254 &[("src/main.rs".into(), committed_contents.clone())],
7255 );
7256 fs.set_index_for_repo(
7257 Path::new("/dir/.git"),
7258 &[("src/main.rs".into(), committed_contents.clone())],
7259 );
7260
7261 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
7262
7263 let buffer = project
7264 .update(cx, |project, cx| {
7265 project.open_local_buffer("/dir/src/main.rs", cx)
7266 })
7267 .await
7268 .unwrap();
7269 let uncommitted_diff = project
7270 .update(cx, |project, cx| {
7271 project.open_uncommitted_diff(buffer.clone(), cx)
7272 })
7273 .await
7274 .unwrap();
7275
7276 cx.run_until_parked();
7277 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
7278 let snapshot = buffer.read(cx).snapshot();
7279 assert_hunks(
7280 uncommitted_diff.hunks(&snapshot, cx),
7281 &snapshot,
7282 &uncommitted_diff.base_text_string().unwrap(),
7283 &[(
7284 1..2,
7285 " println!(\"hello from HEAD\");\n",
7286 " println!(\"hello from the working copy\");\n",
7287 DiffHunkStatus {
7288 kind: DiffHunkStatusKind::Modified,
7289 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
7290 },
7291 )],
7292 );
7293 });
7294}
7295
7296#[gpui::test]
7297async fn test_repository_and_path_for_project_path(
7298 background_executor: BackgroundExecutor,
7299 cx: &mut gpui::TestAppContext,
7300) {
7301 init_test(cx);
7302 let fs = FakeFs::new(background_executor);
7303 fs.insert_tree(
7304 path!("/root"),
7305 json!({
7306 "c.txt": "",
7307 "dir1": {
7308 ".git": {},
7309 "deps": {
7310 "dep1": {
7311 ".git": {},
7312 "src": {
7313 "a.txt": ""
7314 }
7315 }
7316 },
7317 "src": {
7318 "b.txt": ""
7319 }
7320 },
7321 }),
7322 )
7323 .await;
7324
7325 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
7326 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7327 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7328 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
7329 .await;
7330 cx.run_until_parked();
7331
7332 project.read_with(cx, |project, cx| {
7333 let git_store = project.git_store().read(cx);
7334 let pairs = [
7335 ("c.txt", None),
7336 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
7337 (
7338 "dir1/deps/dep1/src/a.txt",
7339 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
7340 ),
7341 ];
7342 let expected = pairs
7343 .iter()
7344 .map(|(path, result)| {
7345 (
7346 path,
7347 result.map(|(repo, repo_path)| {
7348 (Path::new(repo).into(), RepoPath::from(repo_path))
7349 }),
7350 )
7351 })
7352 .collect::<Vec<_>>();
7353 let actual = pairs
7354 .iter()
7355 .map(|(path, _)| {
7356 let project_path = (tree_id, Path::new(path)).into();
7357 let result = maybe!({
7358 let (repo, repo_path) =
7359 git_store.repository_and_path_for_project_path(&project_path, cx)?;
7360 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
7361 });
7362 (path, result)
7363 })
7364 .collect::<Vec<_>>();
7365 pretty_assertions::assert_eq!(expected, actual);
7366 });
7367
7368 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
7369 .await
7370 .unwrap();
7371 cx.run_until_parked();
7372
7373 project.read_with(cx, |project, cx| {
7374 let git_store = project.git_store().read(cx);
7375 assert_eq!(
7376 git_store.repository_and_path_for_project_path(
7377 &(tree_id, Path::new("dir1/src/b.txt")).into(),
7378 cx
7379 ),
7380 None
7381 );
7382 });
7383}
7384
7385#[gpui::test]
7386async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
7387 init_test(cx);
7388 let fs = FakeFs::new(cx.background_executor.clone());
7389 fs.insert_tree(
7390 path!("/root"),
7391 json!({
7392 "home": {
7393 ".git": {},
7394 "project": {
7395 "a.txt": "A"
7396 },
7397 },
7398 }),
7399 )
7400 .await;
7401 fs.set_home_dir(Path::new(path!("/root/home")).to_owned());
7402
7403 let project = Project::test(fs.clone(), [path!("/root/home/project").as_ref()], cx).await;
7404 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7405 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7406 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
7407 .await;
7408 tree.flush_fs_events(cx).await;
7409
7410 project.read_with(cx, |project, cx| {
7411 let containing = project
7412 .git_store()
7413 .read(cx)
7414 .repository_and_path_for_project_path(&(tree_id, "a.txt").into(), cx);
7415 assert!(containing.is_none());
7416 });
7417
7418 let project = Project::test(fs.clone(), [path!("/root/home").as_ref()], cx).await;
7419 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7420 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7421 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
7422 .await;
7423 tree.flush_fs_events(cx).await;
7424
7425 project.read_with(cx, |project, cx| {
7426 let containing = project
7427 .git_store()
7428 .read(cx)
7429 .repository_and_path_for_project_path(&(tree_id, "project/a.txt").into(), cx);
7430 assert_eq!(
7431 containing
7432 .unwrap()
7433 .0
7434 .read(cx)
7435 .work_directory_abs_path
7436 .as_ref(),
7437 Path::new(path!("/root/home"))
7438 );
7439 });
7440}
7441
7442#[gpui::test]
7443async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
7444 init_test(cx);
7445 cx.executor().allow_parking();
7446
7447 let root = TempTree::new(json!({
7448 "project": {
7449 "a.txt": "a", // Modified
7450 "b.txt": "bb", // Added
7451 "c.txt": "ccc", // Unchanged
7452 "d.txt": "dddd", // Deleted
7453 },
7454 }));
7455
7456 // Set up git repository before creating the project.
7457 let work_dir = root.path().join("project");
7458 let repo = git_init(work_dir.as_path());
7459 git_add("a.txt", &repo);
7460 git_add("c.txt", &repo);
7461 git_add("d.txt", &repo);
7462 git_commit("Initial commit", &repo);
7463 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
7464 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
7465
7466 let project = Project::test(
7467 Arc::new(RealFs::new(None, cx.executor())),
7468 [root.path()],
7469 cx,
7470 )
7471 .await;
7472
7473 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7474 tree.flush_fs_events(cx).await;
7475 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7476 .await;
7477 cx.executor().run_until_parked();
7478
7479 let repository = project.read_with(cx, |project, cx| {
7480 project.repositories(cx).values().next().unwrap().clone()
7481 });
7482
7483 // Check that the right git state is observed on startup
7484 repository.read_with(cx, |repository, _| {
7485 let entries = repository.cached_status().collect::<Vec<_>>();
7486 assert_eq!(
7487 entries,
7488 [
7489 StatusEntry {
7490 repo_path: "a.txt".into(),
7491 status: StatusCode::Modified.worktree(),
7492 },
7493 StatusEntry {
7494 repo_path: "b.txt".into(),
7495 status: FileStatus::Untracked,
7496 },
7497 StatusEntry {
7498 repo_path: "d.txt".into(),
7499 status: StatusCode::Deleted.worktree(),
7500 },
7501 ]
7502 );
7503 });
7504
7505 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
7506
7507 tree.flush_fs_events(cx).await;
7508 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7509 .await;
7510 cx.executor().run_until_parked();
7511
7512 repository.read_with(cx, |repository, _| {
7513 let entries = repository.cached_status().collect::<Vec<_>>();
7514 assert_eq!(
7515 entries,
7516 [
7517 StatusEntry {
7518 repo_path: "a.txt".into(),
7519 status: StatusCode::Modified.worktree(),
7520 },
7521 StatusEntry {
7522 repo_path: "b.txt".into(),
7523 status: FileStatus::Untracked,
7524 },
7525 StatusEntry {
7526 repo_path: "c.txt".into(),
7527 status: StatusCode::Modified.worktree(),
7528 },
7529 StatusEntry {
7530 repo_path: "d.txt".into(),
7531 status: StatusCode::Deleted.worktree(),
7532 },
7533 ]
7534 );
7535 });
7536
7537 git_add("a.txt", &repo);
7538 git_add("c.txt", &repo);
7539 git_remove_index(Path::new("d.txt"), &repo);
7540 git_commit("Another commit", &repo);
7541 tree.flush_fs_events(cx).await;
7542 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7543 .await;
7544 cx.executor().run_until_parked();
7545
7546 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
7547 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
7548 tree.flush_fs_events(cx).await;
7549 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7550 .await;
7551 cx.executor().run_until_parked();
7552
7553 repository.read_with(cx, |repository, _cx| {
7554 let entries = repository.cached_status().collect::<Vec<_>>();
7555
7556 // Deleting an untracked entry, b.txt, should leave no status
7557 // a.txt was tracked, and so should have a status
7558 assert_eq!(
7559 entries,
7560 [StatusEntry {
7561 repo_path: "a.txt".into(),
7562 status: StatusCode::Deleted.worktree(),
7563 }]
7564 );
7565 });
7566}
7567
7568#[gpui::test]
7569async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
7570 init_test(cx);
7571 cx.executor().allow_parking();
7572
7573 let root = TempTree::new(json!({
7574 "project": {
7575 "sub": {},
7576 "a.txt": "",
7577 },
7578 }));
7579
7580 let work_dir = root.path().join("project");
7581 let repo = git_init(work_dir.as_path());
7582 // a.txt exists in HEAD and the working copy but is deleted in the index.
7583 git_add("a.txt", &repo);
7584 git_commit("Initial commit", &repo);
7585 git_remove_index("a.txt".as_ref(), &repo);
7586 // `sub` is a nested git repository.
7587 let _sub = git_init(&work_dir.join("sub"));
7588
7589 let project = Project::test(
7590 Arc::new(RealFs::new(None, cx.executor())),
7591 [root.path()],
7592 cx,
7593 )
7594 .await;
7595
7596 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7597 tree.flush_fs_events(cx).await;
7598 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7599 .await;
7600 cx.executor().run_until_parked();
7601
7602 let repository = project.read_with(cx, |project, cx| {
7603 project
7604 .repositories(cx)
7605 .values()
7606 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
7607 .unwrap()
7608 .clone()
7609 });
7610
7611 repository.read_with(cx, |repository, _cx| {
7612 let entries = repository.cached_status().collect::<Vec<_>>();
7613
7614 // `sub` doesn't appear in our computed statuses.
7615 // a.txt appears with a combined `DA` status.
7616 assert_eq!(
7617 entries,
7618 [StatusEntry {
7619 repo_path: "a.txt".into(),
7620 status: TrackedStatus {
7621 index_status: StatusCode::Deleted,
7622 worktree_status: StatusCode::Added
7623 }
7624 .into(),
7625 }]
7626 )
7627 });
7628}
7629
7630#[gpui::test]
7631async fn test_repository_subfolder_git_status(
7632 executor: gpui::BackgroundExecutor,
7633 cx: &mut gpui::TestAppContext,
7634) {
7635 init_test(cx);
7636
7637 let fs = FakeFs::new(executor);
7638 fs.insert_tree(
7639 path!("/root"),
7640 json!({
7641 "my-repo": {
7642 ".git": {},
7643 "a.txt": "a",
7644 "sub-folder-1": {
7645 "sub-folder-2": {
7646 "c.txt": "cc",
7647 "d": {
7648 "e.txt": "eee"
7649 }
7650 },
7651 }
7652 },
7653 }),
7654 )
7655 .await;
7656
7657 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
7658 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
7659
7660 fs.set_status_for_repo(
7661 path!("/root/my-repo/.git").as_ref(),
7662 &[(E_TXT.as_ref(), FileStatus::Untracked)],
7663 );
7664
7665 let project = Project::test(
7666 fs.clone(),
7667 [path!("/root/my-repo/sub-folder-1/sub-folder-2").as_ref()],
7668 cx,
7669 )
7670 .await;
7671
7672 project
7673 .update(cx, |project, cx| project.git_scans_complete(cx))
7674 .await;
7675 cx.run_until_parked();
7676
7677 let repository = project.read_with(cx, |project, cx| {
7678 project.repositories(cx).values().next().unwrap().clone()
7679 });
7680
7681 // Ensure that the git status is loaded correctly
7682 repository.read_with(cx, |repository, _cx| {
7683 assert_eq!(
7684 repository.work_directory_abs_path,
7685 Path::new(path!("/root/my-repo")).into()
7686 );
7687
7688 assert_eq!(repository.status_for_path(&C_TXT.into()), None);
7689 assert_eq!(
7690 repository.status_for_path(&E_TXT.into()).unwrap().status,
7691 FileStatus::Untracked
7692 );
7693 });
7694
7695 fs.set_status_for_repo(path!("/root/my-repo/.git").as_ref(), &[]);
7696 project
7697 .update(cx, |project, cx| project.git_scans_complete(cx))
7698 .await;
7699 cx.run_until_parked();
7700
7701 repository.read_with(cx, |repository, _cx| {
7702 assert_eq!(repository.status_for_path(&C_TXT.into()), None);
7703 assert_eq!(repository.status_for_path(&E_TXT.into()), None);
7704 });
7705}
7706
7707// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
7708#[cfg(any())]
7709#[gpui::test]
7710async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
7711 init_test(cx);
7712 cx.executor().allow_parking();
7713
7714 let root = TempTree::new(json!({
7715 "project": {
7716 "a.txt": "a",
7717 },
7718 }));
7719 let root_path = root.path();
7720
7721 let repo = git_init(&root_path.join("project"));
7722 git_add("a.txt", &repo);
7723 git_commit("init", &repo);
7724
7725 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
7726
7727 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7728 tree.flush_fs_events(cx).await;
7729 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7730 .await;
7731 cx.executor().run_until_parked();
7732
7733 let repository = project.read_with(cx, |project, cx| {
7734 project.repositories(cx).values().next().unwrap().clone()
7735 });
7736
7737 git_branch("other-branch", &repo);
7738 git_checkout("refs/heads/other-branch", &repo);
7739 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
7740 git_add("a.txt", &repo);
7741 git_commit("capitalize", &repo);
7742 let commit = repo
7743 .head()
7744 .expect("Failed to get HEAD")
7745 .peel_to_commit()
7746 .expect("HEAD is not a commit");
7747 git_checkout("refs/heads/main", &repo);
7748 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
7749 git_add("a.txt", &repo);
7750 git_commit("improve letter", &repo);
7751 git_cherry_pick(&commit, &repo);
7752 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
7753 .expect("No CHERRY_PICK_HEAD");
7754 pretty_assertions::assert_eq!(
7755 git_status(&repo),
7756 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
7757 );
7758 tree.flush_fs_events(cx).await;
7759 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7760 .await;
7761 cx.executor().run_until_parked();
7762 let conflicts = repository.update(cx, |repository, _| {
7763 repository
7764 .merge_conflicts
7765 .iter()
7766 .cloned()
7767 .collect::<Vec<_>>()
7768 });
7769 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
7770
7771 git_add("a.txt", &repo);
7772 // Attempt to manually simulate what `git cherry-pick --continue` would do.
7773 git_commit("whatevs", &repo);
7774 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
7775 .expect("Failed to remove CHERRY_PICK_HEAD");
7776 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
7777 tree.flush_fs_events(cx).await;
7778 let conflicts = repository.update(cx, |repository, _| {
7779 repository
7780 .merge_conflicts
7781 .iter()
7782 .cloned()
7783 .collect::<Vec<_>>()
7784 });
7785 pretty_assertions::assert_eq!(conflicts, []);
7786}
7787
7788#[gpui::test]
7789async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
7790 init_test(cx);
7791 let fs = FakeFs::new(cx.background_executor.clone());
7792 fs.insert_tree(
7793 path!("/root"),
7794 json!({
7795 ".git": {},
7796 ".gitignore": "*.txt\n",
7797 "a.xml": "<a></a>",
7798 "b.txt": "Some text"
7799 }),
7800 )
7801 .await;
7802
7803 fs.set_head_and_index_for_repo(
7804 path!("/root/.git").as_ref(),
7805 &[
7806 (".gitignore".into(), "*.txt\n".into()),
7807 ("a.xml".into(), "<a></a>".into()),
7808 ],
7809 );
7810
7811 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
7812
7813 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7814 tree.flush_fs_events(cx).await;
7815 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7816 .await;
7817 cx.executor().run_until_parked();
7818
7819 let repository = project.read_with(cx, |project, cx| {
7820 project.repositories(cx).values().next().unwrap().clone()
7821 });
7822
7823 // One file is unmodified, the other is ignored.
7824 cx.read(|cx| {
7825 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
7826 assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
7827 });
7828
7829 // Change the gitignore, and stage the newly non-ignored file.
7830 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
7831 .await
7832 .unwrap();
7833 fs.set_index_for_repo(
7834 Path::new(path!("/root/.git")),
7835 &[
7836 (".gitignore".into(), "*.txt\n".into()),
7837 ("a.xml".into(), "<a></a>".into()),
7838 ("b.txt".into(), "Some text".into()),
7839 ],
7840 );
7841
7842 cx.executor().run_until_parked();
7843 cx.read(|cx| {
7844 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
7845 assert_entry_git_state(
7846 tree.read(cx),
7847 repository.read(cx),
7848 "b.txt",
7849 Some(StatusCode::Added),
7850 false,
7851 );
7852 });
7853}
7854
7855// NOTE:
7856// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
7857// a directory which some program has already open.
7858// This is a limitation of the Windows.
7859// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
7860#[gpui::test]
7861#[cfg_attr(target_os = "windows", ignore)]
7862async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
7863 init_test(cx);
7864 cx.executor().allow_parking();
7865 let root = TempTree::new(json!({
7866 "projects": {
7867 "project1": {
7868 "a": "",
7869 "b": "",
7870 }
7871 },
7872
7873 }));
7874 let root_path = root.path();
7875
7876 let repo = git_init(&root_path.join("projects/project1"));
7877 git_add("a", &repo);
7878 git_commit("init", &repo);
7879 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
7880
7881 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
7882
7883 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7884 tree.flush_fs_events(cx).await;
7885 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7886 .await;
7887 cx.executor().run_until_parked();
7888
7889 let repository = project.read_with(cx, |project, cx| {
7890 project.repositories(cx).values().next().unwrap().clone()
7891 });
7892
7893 repository.read_with(cx, |repository, _| {
7894 assert_eq!(
7895 repository.work_directory_abs_path.as_ref(),
7896 root_path.join("projects/project1").as_path()
7897 );
7898 assert_eq!(
7899 repository
7900 .status_for_path(&"a".into())
7901 .map(|entry| entry.status),
7902 Some(StatusCode::Modified.worktree()),
7903 );
7904 assert_eq!(
7905 repository
7906 .status_for_path(&"b".into())
7907 .map(|entry| entry.status),
7908 Some(FileStatus::Untracked),
7909 );
7910 });
7911
7912 std::fs::rename(
7913 root_path.join("projects/project1"),
7914 root_path.join("projects/project2"),
7915 )
7916 .unwrap();
7917 tree.flush_fs_events(cx).await;
7918
7919 repository.read_with(cx, |repository, _| {
7920 assert_eq!(
7921 repository.work_directory_abs_path.as_ref(),
7922 root_path.join("projects/project2").as_path()
7923 );
7924 assert_eq!(
7925 repository.status_for_path(&"a".into()).unwrap().status,
7926 StatusCode::Modified.worktree(),
7927 );
7928 assert_eq!(
7929 repository.status_for_path(&"b".into()).unwrap().status,
7930 FileStatus::Untracked,
7931 );
7932 });
7933}
7934
7935// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
7936// you can't rename a directory which some program has already open. This is a
7937// limitation of the Windows. See:
7938// https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
7939#[gpui::test]
7940#[cfg_attr(target_os = "windows", ignore)]
7941async fn test_file_status(cx: &mut gpui::TestAppContext) {
7942 init_test(cx);
7943 cx.executor().allow_parking();
7944 const IGNORE_RULE: &str = "**/target";
7945
7946 let root = TempTree::new(json!({
7947 "project": {
7948 "a.txt": "a",
7949 "b.txt": "bb",
7950 "c": {
7951 "d": {
7952 "e.txt": "eee"
7953 }
7954 },
7955 "f.txt": "ffff",
7956 "target": {
7957 "build_file": "???"
7958 },
7959 ".gitignore": IGNORE_RULE
7960 },
7961
7962 }));
7963 let root_path = root.path();
7964
7965 const A_TXT: &str = "a.txt";
7966 const B_TXT: &str = "b.txt";
7967 const E_TXT: &str = "c/d/e.txt";
7968 const F_TXT: &str = "f.txt";
7969 const DOTGITIGNORE: &str = ".gitignore";
7970 const BUILD_FILE: &str = "target/build_file";
7971
7972 // Set up git repository before creating the worktree.
7973 let work_dir = root.path().join("project");
7974 let mut repo = git_init(work_dir.as_path());
7975 repo.add_ignore_rule(IGNORE_RULE).unwrap();
7976 git_add(A_TXT, &repo);
7977 git_add(E_TXT, &repo);
7978 git_add(DOTGITIGNORE, &repo);
7979 git_commit("Initial commit", &repo);
7980
7981 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
7982
7983 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7984 tree.flush_fs_events(cx).await;
7985 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7986 .await;
7987 cx.executor().run_until_parked();
7988
7989 let repository = project.read_with(cx, |project, cx| {
7990 project.repositories(cx).values().next().unwrap().clone()
7991 });
7992
7993 // Check that the right git state is observed on startup
7994 repository.read_with(cx, |repository, _cx| {
7995 assert_eq!(
7996 repository.work_directory_abs_path.as_ref(),
7997 root_path.join("project").as_path()
7998 );
7999
8000 assert_eq!(
8001 repository.status_for_path(&B_TXT.into()).unwrap().status,
8002 FileStatus::Untracked,
8003 );
8004 assert_eq!(
8005 repository.status_for_path(&F_TXT.into()).unwrap().status,
8006 FileStatus::Untracked,
8007 );
8008 });
8009
8010 // Modify a file in the working copy.
8011 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
8012 tree.flush_fs_events(cx).await;
8013 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
8014 .await;
8015 cx.executor().run_until_parked();
8016
8017 // The worktree detects that the file's git status has changed.
8018 repository.read_with(cx, |repository, _| {
8019 assert_eq!(
8020 repository.status_for_path(&A_TXT.into()).unwrap().status,
8021 StatusCode::Modified.worktree(),
8022 );
8023 });
8024
8025 // Create a commit in the git repository.
8026 git_add(A_TXT, &repo);
8027 git_add(B_TXT, &repo);
8028 git_commit("Committing modified and added", &repo);
8029 tree.flush_fs_events(cx).await;
8030 cx.executor().run_until_parked();
8031
8032 // The worktree detects that the files' git status have changed.
8033 repository.read_with(cx, |repository, _cx| {
8034 assert_eq!(
8035 repository.status_for_path(&F_TXT.into()).unwrap().status,
8036 FileStatus::Untracked,
8037 );
8038 assert_eq!(repository.status_for_path(&B_TXT.into()), None);
8039 assert_eq!(repository.status_for_path(&A_TXT.into()), None);
8040 });
8041
8042 // Modify files in the working copy and perform git operations on other files.
8043 git_reset(0, &repo);
8044 git_remove_index(Path::new(B_TXT), &repo);
8045 git_stash(&mut repo);
8046 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
8047 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
8048 tree.flush_fs_events(cx).await;
8049 cx.executor().run_until_parked();
8050
8051 // Check that more complex repo changes are tracked
8052 repository.read_with(cx, |repository, _cx| {
8053 assert_eq!(repository.status_for_path(&A_TXT.into()), None);
8054 assert_eq!(
8055 repository.status_for_path(&B_TXT.into()).unwrap().status,
8056 FileStatus::Untracked,
8057 );
8058 assert_eq!(
8059 repository.status_for_path(&E_TXT.into()).unwrap().status,
8060 StatusCode::Modified.worktree(),
8061 );
8062 });
8063
8064 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
8065 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
8066 std::fs::write(
8067 work_dir.join(DOTGITIGNORE),
8068 [IGNORE_RULE, "f.txt"].join("\n"),
8069 )
8070 .unwrap();
8071
8072 git_add(Path::new(DOTGITIGNORE), &repo);
8073 git_commit("Committing modified git ignore", &repo);
8074
8075 tree.flush_fs_events(cx).await;
8076 cx.executor().run_until_parked();
8077
8078 let mut renamed_dir_name = "first_directory/second_directory";
8079 const RENAMED_FILE: &str = "rf.txt";
8080
8081 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
8082 std::fs::write(
8083 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
8084 "new-contents",
8085 )
8086 .unwrap();
8087
8088 tree.flush_fs_events(cx).await;
8089 cx.executor().run_until_parked();
8090
8091 repository.read_with(cx, |repository, _cx| {
8092 assert_eq!(
8093 repository
8094 .status_for_path(&Path::new(renamed_dir_name).join(RENAMED_FILE).into())
8095 .unwrap()
8096 .status,
8097 FileStatus::Untracked,
8098 );
8099 });
8100
8101 renamed_dir_name = "new_first_directory/second_directory";
8102
8103 std::fs::rename(
8104 work_dir.join("first_directory"),
8105 work_dir.join("new_first_directory"),
8106 )
8107 .unwrap();
8108
8109 tree.flush_fs_events(cx).await;
8110 cx.executor().run_until_parked();
8111
8112 repository.read_with(cx, |repository, _cx| {
8113 assert_eq!(
8114 repository
8115 .status_for_path(&Path::new(renamed_dir_name).join(RENAMED_FILE).into())
8116 .unwrap()
8117 .status,
8118 FileStatus::Untracked,
8119 );
8120 });
8121}
8122
8123#[gpui::test]
8124async fn test_repos_in_invisible_worktrees(
8125 executor: BackgroundExecutor,
8126 cx: &mut gpui::TestAppContext,
8127) {
8128 init_test(cx);
8129 let fs = FakeFs::new(executor);
8130 fs.insert_tree(
8131 path!("/root"),
8132 json!({
8133 "dir1": {
8134 ".git": {},
8135 "dep1": {
8136 ".git": {},
8137 "src": {
8138 "a.txt": "",
8139 },
8140 },
8141 "b.txt": "",
8142 },
8143 }),
8144 )
8145 .await;
8146
8147 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
8148 let visible_worktree =
8149 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8150 visible_worktree
8151 .read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
8152 .await;
8153
8154 let repos = project.read_with(cx, |project, cx| {
8155 project
8156 .repositories(cx)
8157 .values()
8158 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8159 .collect::<Vec<_>>()
8160 });
8161 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
8162
8163 let (invisible_worktree, _) = project
8164 .update(cx, |project, cx| {
8165 project.worktree_store.update(cx, |worktree_store, cx| {
8166 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
8167 })
8168 })
8169 .await
8170 .expect("failed to create worktree");
8171 invisible_worktree
8172 .read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
8173 .await;
8174
8175 let repos = project.read_with(cx, |project, cx| {
8176 project
8177 .repositories(cx)
8178 .values()
8179 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8180 .collect::<Vec<_>>()
8181 });
8182 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
8183}
8184
8185#[gpui::test(iterations = 10)]
8186async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
8187 init_test(cx);
8188 cx.update(|cx| {
8189 cx.update_global::<SettingsStore, _>(|store, cx| {
8190 store.update_user_settings::<WorktreeSettings>(cx, |project_settings| {
8191 project_settings.file_scan_exclusions = Some(Vec::new());
8192 });
8193 });
8194 });
8195 let fs = FakeFs::new(cx.background_executor.clone());
8196 fs.insert_tree(
8197 path!("/root"),
8198 json!({
8199 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
8200 "tree": {
8201 ".git": {},
8202 ".gitignore": "ignored-dir\n",
8203 "tracked-dir": {
8204 "tracked-file1": "",
8205 "ancestor-ignored-file1": "",
8206 },
8207 "ignored-dir": {
8208 "ignored-file1": ""
8209 }
8210 }
8211 }),
8212 )
8213 .await;
8214 fs.set_head_and_index_for_repo(
8215 path!("/root/tree/.git").as_ref(),
8216 &[
8217 (".gitignore".into(), "ignored-dir\n".into()),
8218 ("tracked-dir/tracked-file1".into(), "".into()),
8219 ],
8220 );
8221
8222 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
8223
8224 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8225 tree.flush_fs_events(cx).await;
8226 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
8227 .await;
8228 cx.executor().run_until_parked();
8229
8230 let repository = project.read_with(cx, |project, cx| {
8231 project.repositories(cx).values().next().unwrap().clone()
8232 });
8233
8234 tree.read_with(cx, |tree, _| {
8235 tree.as_local()
8236 .unwrap()
8237 .manually_refresh_entries_for_paths(vec![Path::new("ignored-dir").into()])
8238 })
8239 .recv()
8240 .await;
8241
8242 cx.read(|cx| {
8243 assert_entry_git_state(
8244 tree.read(cx),
8245 repository.read(cx),
8246 "tracked-dir/tracked-file1",
8247 None,
8248 false,
8249 );
8250 assert_entry_git_state(
8251 tree.read(cx),
8252 repository.read(cx),
8253 "tracked-dir/ancestor-ignored-file1",
8254 None,
8255 false,
8256 );
8257 assert_entry_git_state(
8258 tree.read(cx),
8259 repository.read(cx),
8260 "ignored-dir/ignored-file1",
8261 None,
8262 true,
8263 );
8264 });
8265
8266 fs.create_file(
8267 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
8268 Default::default(),
8269 )
8270 .await
8271 .unwrap();
8272 fs.set_index_for_repo(
8273 path!("/root/tree/.git").as_ref(),
8274 &[
8275 (".gitignore".into(), "ignored-dir\n".into()),
8276 ("tracked-dir/tracked-file1".into(), "".into()),
8277 ("tracked-dir/tracked-file2".into(), "".into()),
8278 ],
8279 );
8280 fs.create_file(
8281 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
8282 Default::default(),
8283 )
8284 .await
8285 .unwrap();
8286 fs.create_file(
8287 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
8288 Default::default(),
8289 )
8290 .await
8291 .unwrap();
8292
8293 cx.executor().run_until_parked();
8294 cx.read(|cx| {
8295 assert_entry_git_state(
8296 tree.read(cx),
8297 repository.read(cx),
8298 "tracked-dir/tracked-file2",
8299 Some(StatusCode::Added),
8300 false,
8301 );
8302 assert_entry_git_state(
8303 tree.read(cx),
8304 repository.read(cx),
8305 "tracked-dir/ancestor-ignored-file2",
8306 None,
8307 false,
8308 );
8309 assert_entry_git_state(
8310 tree.read(cx),
8311 repository.read(cx),
8312 "ignored-dir/ignored-file2",
8313 None,
8314 true,
8315 );
8316 assert!(tree.read(cx).entry_for_path(".git").unwrap().is_ignored);
8317 });
8318}
8319
8320#[gpui::test]
8321async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) {
8322 init_test(cx);
8323
8324 let fs = FakeFs::new(cx.executor());
8325 fs.insert_tree(
8326 path!("/project"),
8327 json!({
8328 ".git": {
8329 "worktrees": {
8330 "some-worktree": {
8331 "commondir": "../..\n",
8332 // For is_git_dir
8333 "HEAD": "",
8334 "config": ""
8335 }
8336 },
8337 "modules": {
8338 "subdir": {
8339 "some-submodule": {
8340 // For is_git_dir
8341 "HEAD": "",
8342 "config": "",
8343 }
8344 }
8345 }
8346 },
8347 "src": {
8348 "a.txt": "A",
8349 },
8350 "some-worktree": {
8351 ".git": "gitdir: ../.git/worktrees/some-worktree\n",
8352 "src": {
8353 "b.txt": "B",
8354 }
8355 },
8356 "subdir": {
8357 "some-submodule": {
8358 ".git": "gitdir: ../../.git/modules/subdir/some-submodule\n",
8359 "c.txt": "C",
8360 }
8361 }
8362 }),
8363 )
8364 .await;
8365
8366 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
8367 let scan_complete = project.update(cx, |project, cx| {
8368 project
8369 .worktrees(cx)
8370 .next()
8371 .unwrap()
8372 .read(cx)
8373 .as_local()
8374 .unwrap()
8375 .scan_complete()
8376 });
8377 scan_complete.await;
8378
8379 let mut repositories = project.update(cx, |project, cx| {
8380 project
8381 .repositories(cx)
8382 .values()
8383 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8384 .collect::<Vec<_>>()
8385 });
8386 repositories.sort();
8387 pretty_assertions::assert_eq!(
8388 repositories,
8389 [
8390 Path::new(path!("/project")).into(),
8391 Path::new(path!("/project/some-worktree")).into(),
8392 Path::new(path!("/project/subdir/some-submodule")).into(),
8393 ]
8394 );
8395
8396 // Generate a git-related event for the worktree and check that it's refreshed.
8397 fs.with_git_state(
8398 path!("/project/some-worktree/.git").as_ref(),
8399 true,
8400 |state| {
8401 state
8402 .head_contents
8403 .insert("src/b.txt".into(), "b".to_owned());
8404 state
8405 .index_contents
8406 .insert("src/b.txt".into(), "b".to_owned());
8407 },
8408 )
8409 .unwrap();
8410 cx.run_until_parked();
8411
8412 let buffer = project
8413 .update(cx, |project, cx| {
8414 project.open_local_buffer(path!("/project/some-worktree/src/b.txt"), cx)
8415 })
8416 .await
8417 .unwrap();
8418 let (worktree_repo, barrier) = project.update(cx, |project, cx| {
8419 let (repo, _) = project
8420 .git_store()
8421 .read(cx)
8422 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
8423 .unwrap();
8424 pretty_assertions::assert_eq!(
8425 repo.read(cx).work_directory_abs_path,
8426 Path::new(path!("/project/some-worktree")).into(),
8427 );
8428 let barrier = repo.update(cx, |repo, _| repo.barrier());
8429 (repo.clone(), barrier)
8430 });
8431 barrier.await.unwrap();
8432 worktree_repo.update(cx, |repo, _| {
8433 pretty_assertions::assert_eq!(
8434 repo.status_for_path(&"src/b.txt".into()).unwrap().status,
8435 StatusCode::Modified.worktree(),
8436 );
8437 });
8438
8439 // The same for the submodule.
8440 fs.with_git_state(
8441 path!("/project/subdir/some-submodule/.git").as_ref(),
8442 true,
8443 |state| {
8444 state.head_contents.insert("c.txt".into(), "c".to_owned());
8445 state.index_contents.insert("c.txt".into(), "c".to_owned());
8446 },
8447 )
8448 .unwrap();
8449 cx.run_until_parked();
8450
8451 let buffer = project
8452 .update(cx, |project, cx| {
8453 project.open_local_buffer(path!("/project/subdir/some-submodule/c.txt"), cx)
8454 })
8455 .await
8456 .unwrap();
8457 let (submodule_repo, barrier) = project.update(cx, |project, cx| {
8458 let (repo, _) = project
8459 .git_store()
8460 .read(cx)
8461 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
8462 .unwrap();
8463 pretty_assertions::assert_eq!(
8464 repo.read(cx).work_directory_abs_path,
8465 Path::new(path!("/project/subdir/some-submodule")).into(),
8466 );
8467 let barrier = repo.update(cx, |repo, _| repo.barrier());
8468 (repo.clone(), barrier)
8469 });
8470 barrier.await.unwrap();
8471 submodule_repo.update(cx, |repo, _| {
8472 pretty_assertions::assert_eq!(
8473 repo.status_for_path(&"c.txt".into()).unwrap().status,
8474 StatusCode::Modified.worktree(),
8475 );
8476 });
8477}
8478
8479#[gpui::test]
8480async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
8481 init_test(cx);
8482 let fs = FakeFs::new(cx.background_executor.clone());
8483 fs.insert_tree(
8484 path!("/root"),
8485 json!({
8486 "project": {
8487 ".git": {},
8488 "child1": {
8489 "a.txt": "A",
8490 },
8491 "child2": {
8492 "b.txt": "B",
8493 }
8494 }
8495 }),
8496 )
8497 .await;
8498
8499 let project = Project::test(
8500 fs.clone(),
8501 [
8502 path!("/root/project/child1").as_ref(),
8503 path!("/root/project/child2").as_ref(),
8504 ],
8505 cx,
8506 )
8507 .await;
8508
8509 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8510 tree.flush_fs_events(cx).await;
8511 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
8512 .await;
8513 cx.executor().run_until_parked();
8514
8515 let repos = project.read_with(cx, |project, cx| {
8516 project
8517 .repositories(cx)
8518 .values()
8519 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8520 .collect::<Vec<_>>()
8521 });
8522 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
8523}
8524
8525async fn search(
8526 project: &Entity<Project>,
8527 query: SearchQuery,
8528 cx: &mut gpui::TestAppContext,
8529) -> Result<HashMap<String, Vec<Range<usize>>>> {
8530 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
8531 let mut results = HashMap::default();
8532 while let Ok(search_result) = search_rx.recv().await {
8533 match search_result {
8534 SearchResult::Buffer { buffer, ranges } => {
8535 results.entry(buffer).or_insert(ranges);
8536 }
8537 SearchResult::LimitReached => {}
8538 }
8539 }
8540 Ok(results
8541 .into_iter()
8542 .map(|(buffer, ranges)| {
8543 buffer.update(cx, |buffer, cx| {
8544 let path = buffer
8545 .file()
8546 .unwrap()
8547 .full_path(cx)
8548 .to_string_lossy()
8549 .to_string();
8550 let ranges = ranges
8551 .into_iter()
8552 .map(|range| range.to_offset(buffer))
8553 .collect::<Vec<_>>();
8554 (path, ranges)
8555 })
8556 })
8557 .collect())
8558}
8559
8560pub fn init_test(cx: &mut gpui::TestAppContext) {
8561 zlog::init_test();
8562
8563 cx.update(|cx| {
8564 let settings_store = SettingsStore::test(cx);
8565 cx.set_global(settings_store);
8566 release_channel::init(SemanticVersion::default(), cx);
8567 language::init(cx);
8568 Project::init_settings(cx);
8569 });
8570}
8571
8572fn json_lang() -> Arc<Language> {
8573 Arc::new(Language::new(
8574 LanguageConfig {
8575 name: "JSON".into(),
8576 matcher: LanguageMatcher {
8577 path_suffixes: vec!["json".to_string()],
8578 ..Default::default()
8579 },
8580 ..Default::default()
8581 },
8582 None,
8583 ))
8584}
8585
8586fn js_lang() -> Arc<Language> {
8587 Arc::new(Language::new(
8588 LanguageConfig {
8589 name: "JavaScript".into(),
8590 matcher: LanguageMatcher {
8591 path_suffixes: vec!["js".to_string()],
8592 ..Default::default()
8593 },
8594 ..Default::default()
8595 },
8596 None,
8597 ))
8598}
8599
8600fn rust_lang() -> Arc<Language> {
8601 Arc::new(Language::new(
8602 LanguageConfig {
8603 name: "Rust".into(),
8604 matcher: LanguageMatcher {
8605 path_suffixes: vec!["rs".to_string()],
8606 ..Default::default()
8607 },
8608 ..Default::default()
8609 },
8610 Some(tree_sitter_rust::LANGUAGE.into()),
8611 ))
8612}
8613
8614fn typescript_lang() -> Arc<Language> {
8615 Arc::new(Language::new(
8616 LanguageConfig {
8617 name: "TypeScript".into(),
8618 matcher: LanguageMatcher {
8619 path_suffixes: vec!["ts".to_string()],
8620 ..Default::default()
8621 },
8622 ..Default::default()
8623 },
8624 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
8625 ))
8626}
8627
8628fn tsx_lang() -> Arc<Language> {
8629 Arc::new(Language::new(
8630 LanguageConfig {
8631 name: "tsx".into(),
8632 matcher: LanguageMatcher {
8633 path_suffixes: vec!["tsx".to_string()],
8634 ..Default::default()
8635 },
8636 ..Default::default()
8637 },
8638 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
8639 ))
8640}
8641
8642fn get_all_tasks(
8643 project: &Entity<Project>,
8644 task_contexts: &TaskContexts,
8645 cx: &mut App,
8646) -> Vec<(TaskSourceKind, ResolvedTask)> {
8647 let (mut old, new) = project.update(cx, |project, cx| {
8648 project
8649 .task_store
8650 .read(cx)
8651 .task_inventory()
8652 .unwrap()
8653 .read(cx)
8654 .used_and_current_resolved_tasks(task_contexts, cx)
8655 });
8656 old.extend(new);
8657 old
8658}
8659
8660#[track_caller]
8661fn assert_entry_git_state(
8662 tree: &Worktree,
8663 repository: &Repository,
8664 path: &str,
8665 index_status: Option<StatusCode>,
8666 is_ignored: bool,
8667) {
8668 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
8669 let entry = tree
8670 .entry_for_path(path)
8671 .unwrap_or_else(|| panic!("entry {path} not found"));
8672 let status = repository
8673 .status_for_path(&path.into())
8674 .map(|entry| entry.status);
8675 let expected = index_status.map(|index_status| {
8676 TrackedStatus {
8677 index_status,
8678 worktree_status: StatusCode::Unmodified,
8679 }
8680 .into()
8681 });
8682 assert_eq!(
8683 status, expected,
8684 "expected {path} to have git status: {expected:?}"
8685 );
8686 assert_eq!(
8687 entry.is_ignored, is_ignored,
8688 "expected {path} to have is_ignored: {is_ignored}"
8689 );
8690}
8691
8692#[track_caller]
8693fn git_init(path: &Path) -> git2::Repository {
8694 let mut init_opts = RepositoryInitOptions::new();
8695 init_opts.initial_head("main");
8696 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
8697}
8698
8699#[track_caller]
8700fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
8701 let path = path.as_ref();
8702 let mut index = repo.index().expect("Failed to get index");
8703 index.add_path(path).expect("Failed to add file");
8704 index.write().expect("Failed to write index");
8705}
8706
8707#[track_caller]
8708fn git_remove_index(path: &Path, repo: &git2::Repository) {
8709 let mut index = repo.index().expect("Failed to get index");
8710 index.remove_path(path).expect("Failed to add file");
8711 index.write().expect("Failed to write index");
8712}
8713
8714#[track_caller]
8715fn git_commit(msg: &'static str, repo: &git2::Repository) {
8716 use git2::Signature;
8717
8718 let signature = Signature::now("test", "test@zed.dev").unwrap();
8719 let oid = repo.index().unwrap().write_tree().unwrap();
8720 let tree = repo.find_tree(oid).unwrap();
8721 if let Ok(head) = repo.head() {
8722 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
8723
8724 let parent_commit = parent_obj.as_commit().unwrap();
8725
8726 repo.commit(
8727 Some("HEAD"),
8728 &signature,
8729 &signature,
8730 msg,
8731 &tree,
8732 &[parent_commit],
8733 )
8734 .expect("Failed to commit with parent");
8735 } else {
8736 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
8737 .expect("Failed to commit");
8738 }
8739}
8740
8741#[cfg(any())]
8742#[track_caller]
8743fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
8744 repo.cherrypick(commit, None).expect("Failed to cherrypick");
8745}
8746
8747#[track_caller]
8748fn git_stash(repo: &mut git2::Repository) {
8749 use git2::Signature;
8750
8751 let signature = Signature::now("test", "test@zed.dev").unwrap();
8752 repo.stash_save(&signature, "N/A", None)
8753 .expect("Failed to stash");
8754}
8755
8756#[track_caller]
8757fn git_reset(offset: usize, repo: &git2::Repository) {
8758 let head = repo.head().expect("Couldn't get repo head");
8759 let object = head.peel(git2::ObjectType::Commit).unwrap();
8760 let commit = object.as_commit().unwrap();
8761 let new_head = commit
8762 .parents()
8763 .inspect(|parnet| {
8764 parnet.message();
8765 })
8766 .nth(offset)
8767 .expect("Not enough history");
8768 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
8769 .expect("Could not reset");
8770}
8771
8772#[cfg(any())]
8773#[track_caller]
8774fn git_branch(name: &str, repo: &git2::Repository) {
8775 let head = repo
8776 .head()
8777 .expect("Couldn't get repo head")
8778 .peel_to_commit()
8779 .expect("HEAD is not a commit");
8780 repo.branch(name, &head, false).expect("Failed to commit");
8781}
8782
8783#[cfg(any())]
8784#[track_caller]
8785fn git_checkout(name: &str, repo: &git2::Repository) {
8786 repo.set_head(name).expect("Failed to set head");
8787 repo.checkout_head(None).expect("Failed to check out head");
8788}
8789
8790#[cfg(any())]
8791#[track_caller]
8792fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
8793 repo.statuses(None)
8794 .unwrap()
8795 .iter()
8796 .map(|status| (status.path().unwrap().to_string(), status.status()))
8797 .collect()
8798}
8799
8800#[gpui::test]
8801async fn test_find_project_path_abs(
8802 background_executor: BackgroundExecutor,
8803 cx: &mut gpui::TestAppContext,
8804) {
8805 // find_project_path should work with absolute paths
8806 init_test(cx);
8807
8808 let fs = FakeFs::new(background_executor);
8809 fs.insert_tree(
8810 path!("/root"),
8811 json!({
8812 "project1": {
8813 "file1.txt": "content1",
8814 "subdir": {
8815 "file2.txt": "content2"
8816 }
8817 },
8818 "project2": {
8819 "file3.txt": "content3"
8820 }
8821 }),
8822 )
8823 .await;
8824
8825 let project = Project::test(
8826 fs.clone(),
8827 [
8828 path!("/root/project1").as_ref(),
8829 path!("/root/project2").as_ref(),
8830 ],
8831 cx,
8832 )
8833 .await;
8834
8835 // Make sure the worktrees are fully initialized
8836 for worktree in project.read_with(cx, |project, cx| project.worktrees(cx).collect::<Vec<_>>()) {
8837 worktree
8838 .read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
8839 .await;
8840 }
8841 cx.run_until_parked();
8842
8843 let (project1_abs_path, project1_id, project2_abs_path, project2_id) =
8844 project.read_with(cx, |project, cx| {
8845 let worktrees: Vec<_> = project.worktrees(cx).collect();
8846 let abs_path1 = worktrees[0].read(cx).abs_path().to_path_buf();
8847 let id1 = worktrees[0].read(cx).id();
8848 let abs_path2 = worktrees[1].read(cx).abs_path().to_path_buf();
8849 let id2 = worktrees[1].read(cx).id();
8850 (abs_path1, id1, abs_path2, id2)
8851 });
8852
8853 project.update(cx, |project, cx| {
8854 let abs_path = project1_abs_path.join("file1.txt");
8855 let found_path = project.find_project_path(abs_path, cx).unwrap();
8856 assert_eq!(found_path.worktree_id, project1_id);
8857 assert_eq!(found_path.path.as_ref(), Path::new("file1.txt"));
8858
8859 let abs_path = project1_abs_path.join("subdir").join("file2.txt");
8860 let found_path = project.find_project_path(abs_path, cx).unwrap();
8861 assert_eq!(found_path.worktree_id, project1_id);
8862 assert_eq!(found_path.path.as_ref(), Path::new("subdir/file2.txt"));
8863
8864 let abs_path = project2_abs_path.join("file3.txt");
8865 let found_path = project.find_project_path(abs_path, cx).unwrap();
8866 assert_eq!(found_path.worktree_id, project2_id);
8867 assert_eq!(found_path.path.as_ref(), Path::new("file3.txt"));
8868
8869 let abs_path = project1_abs_path.join("nonexistent.txt");
8870 let found_path = project.find_project_path(abs_path, cx);
8871 assert!(
8872 found_path.is_some(),
8873 "Should find project path for nonexistent file in worktree"
8874 );
8875
8876 // Test with an absolute path outside any worktree
8877 let abs_path = Path::new("/some/other/path");
8878 let found_path = project.find_project_path(abs_path, cx);
8879 assert!(
8880 found_path.is_none(),
8881 "Should not find project path for path outside any worktree"
8882 );
8883 });
8884}