1#![allow(clippy::format_collect)]
2
3use crate::{
4 Event, git_store::StatusEntry, task_inventory::TaskContexts, task_store::TaskSettingsLocation,
5 *,
6};
7use buffer_diff::{
8 BufferDiffEvent, CALCULATE_DIFF_TASK, DiffHunkSecondaryStatus, DiffHunkStatus,
9 DiffHunkStatusKind, assert_hunks,
10};
11use fs::FakeFs;
12use futures::{StreamExt, future};
13use git::{
14 repository::RepoPath,
15 status::{StatusCode, TrackedStatus},
16};
17use git2::RepositoryInitOptions;
18use gpui::{App, BackgroundExecutor, SemanticVersion, UpdateGlobal};
19use http_client::Url;
20use language::{
21 Diagnostic, DiagnosticEntry, DiagnosticSet, DiskState, FakeLspAdapter, LanguageConfig,
22 LanguageMatcher, LanguageName, LineEnding, OffsetRangeExt, Point, ToPoint,
23 language_settings::{AllLanguageSettings, LanguageSettingsContent, language_settings},
24 tree_sitter_rust, tree_sitter_typescript,
25};
26use lsp::{
27 DiagnosticSeverity, DocumentChanges, FileOperationFilter, NumberOrString, TextDocumentEdit,
28 WillRenameFiles, notification::DidRenameFiles,
29};
30use parking_lot::Mutex;
31use paths::{config_dir, tasks_file};
32use postage::stream::Stream as _;
33use pretty_assertions::{assert_eq, assert_matches};
34use rand::{Rng as _, rngs::StdRng};
35use serde_json::json;
36#[cfg(not(windows))]
37use std::os;
38use std::{env, mem, num::NonZeroU32, ops::Range, str::FromStr, sync::OnceLock, task::Poll};
39use task::{ResolvedTask, TaskContext};
40use unindent::Unindent as _;
41use util::{
42 TryFutureExt as _, assert_set_eq, maybe, path,
43 paths::PathMatcher,
44 separator,
45 test::{TempTree, marked_text_offsets},
46 uri,
47};
48use worktree::WorktreeModelHandle as _;
49
50#[gpui::test]
51async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
52 cx.executor().allow_parking();
53
54 let (tx, mut rx) = futures::channel::mpsc::unbounded();
55 let _thread = std::thread::spawn(move || {
56 #[cfg(not(target_os = "windows"))]
57 std::fs::metadata("/tmp").unwrap();
58 #[cfg(target_os = "windows")]
59 std::fs::metadata("C:/Windows").unwrap();
60 std::thread::sleep(Duration::from_millis(1000));
61 tx.unbounded_send(1).unwrap();
62 });
63 rx.next().await.unwrap();
64}
65
66#[gpui::test]
67async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
68 cx.executor().allow_parking();
69
70 let io_task = smol::unblock(move || {
71 println!("sleeping on thread {:?}", std::thread::current().id());
72 std::thread::sleep(Duration::from_millis(10));
73 1
74 });
75
76 let task = cx.foreground_executor().spawn(async move {
77 io_task.await;
78 });
79
80 task.await;
81}
82
83#[cfg(not(windows))]
84#[gpui::test]
85async fn test_symlinks(cx: &mut gpui::TestAppContext) {
86 init_test(cx);
87 cx.executor().allow_parking();
88
89 let dir = TempTree::new(json!({
90 "root": {
91 "apple": "",
92 "banana": {
93 "carrot": {
94 "date": "",
95 "endive": "",
96 }
97 },
98 "fennel": {
99 "grape": "",
100 }
101 }
102 }));
103
104 let root_link_path = dir.path().join("root_link");
105 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
106 os::unix::fs::symlink(
107 dir.path().join("root/fennel"),
108 dir.path().join("root/finnochio"),
109 )
110 .unwrap();
111
112 let project = Project::test(
113 Arc::new(RealFs::new(None, cx.executor())),
114 [root_link_path.as_ref()],
115 cx,
116 )
117 .await;
118
119 project.update(cx, |project, cx| {
120 let tree = project.worktrees(cx).next().unwrap().read(cx);
121 assert_eq!(tree.file_count(), 5);
122 assert_eq!(
123 tree.inode_for_path("fennel/grape"),
124 tree.inode_for_path("finnochio/grape")
125 );
126 });
127}
128
129#[gpui::test]
130async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
131 init_test(cx);
132
133 let dir = TempTree::new(json!({
134 ".editorconfig": r#"
135 root = true
136 [*.rs]
137 indent_style = tab
138 indent_size = 3
139 end_of_line = lf
140 insert_final_newline = true
141 trim_trailing_whitespace = true
142 [*.js]
143 tab_width = 10
144 "#,
145 ".zed": {
146 "settings.json": r#"{
147 "tab_size": 8,
148 "hard_tabs": false,
149 "ensure_final_newline_on_save": false,
150 "remove_trailing_whitespace_on_save": false,
151 "soft_wrap": "editor_width"
152 }"#,
153 },
154 "a.rs": "fn a() {\n A\n}",
155 "b": {
156 ".editorconfig": r#"
157 [*.rs]
158 indent_size = 2
159 "#,
160 "b.rs": "fn b() {\n B\n}",
161 },
162 "c.js": "def c\n C\nend",
163 "README.json": "tabs are better\n",
164 }));
165
166 let path = dir.path();
167 let fs = FakeFs::new(cx.executor());
168 fs.insert_tree_from_real_fs(path, path).await;
169 let project = Project::test(fs, [path], cx).await;
170
171 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
172 language_registry.add(js_lang());
173 language_registry.add(json_lang());
174 language_registry.add(rust_lang());
175
176 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
177
178 cx.executor().run_until_parked();
179
180 cx.update(|cx| {
181 let tree = worktree.read(cx);
182 let settings_for = |path: &str| {
183 let file_entry = tree.entry_for_path(path).unwrap().clone();
184 let file = File::for_entry(file_entry, worktree.clone());
185 let file_language = project
186 .read(cx)
187 .languages()
188 .language_for_file_path(file.path.as_ref());
189 let file_language = cx
190 .background_executor()
191 .block(file_language)
192 .expect("Failed to get file language");
193 let file = file as _;
194 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
195 };
196
197 let settings_a = settings_for("a.rs");
198 let settings_b = settings_for("b/b.rs");
199 let settings_c = settings_for("c.js");
200 let settings_readme = settings_for("README.json");
201
202 // .editorconfig overrides .zed/settings
203 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
204 assert_eq!(settings_a.hard_tabs, true);
205 assert_eq!(settings_a.ensure_final_newline_on_save, true);
206 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
207
208 // .editorconfig in b/ overrides .editorconfig in root
209 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
210
211 // "indent_size" is not set, so "tab_width" is used
212 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
213
214 // README.md should not be affected by .editorconfig's globe "*.rs"
215 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
216 });
217}
218
219#[gpui::test]
220async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
221 init_test(cx);
222 TaskStore::init(None);
223
224 let fs = FakeFs::new(cx.executor());
225 fs.insert_tree(
226 path!("/dir"),
227 json!({
228 ".zed": {
229 "settings.json": r#"{ "tab_size": 8 }"#,
230 "tasks.json": r#"[{
231 "label": "cargo check all",
232 "command": "cargo",
233 "args": ["check", "--all"]
234 },]"#,
235 },
236 "a": {
237 "a.rs": "fn a() {\n A\n}"
238 },
239 "b": {
240 ".zed": {
241 "settings.json": r#"{ "tab_size": 2 }"#,
242 "tasks.json": r#"[{
243 "label": "cargo check",
244 "command": "cargo",
245 "args": ["check"]
246 },]"#,
247 },
248 "b.rs": "fn b() {\n B\n}"
249 }
250 }),
251 )
252 .await;
253
254 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
255 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
256
257 cx.executor().run_until_parked();
258 let worktree_id = cx.update(|cx| {
259 project.update(cx, |project, cx| {
260 project.worktrees(cx).next().unwrap().read(cx).id()
261 })
262 });
263
264 let mut task_contexts = TaskContexts::default();
265 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
266
267 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
268 id: worktree_id,
269 directory_in_worktree: PathBuf::from(".zed"),
270 id_base: "local worktree tasks from directory \".zed\"".into(),
271 };
272
273 let all_tasks = cx
274 .update(|cx| {
275 let tree = worktree.read(cx);
276
277 let file_a = File::for_entry(
278 tree.entry_for_path("a/a.rs").unwrap().clone(),
279 worktree.clone(),
280 ) as _;
281 let settings_a = language_settings(None, Some(&file_a), cx);
282 let file_b = File::for_entry(
283 tree.entry_for_path("b/b.rs").unwrap().clone(),
284 worktree.clone(),
285 ) as _;
286 let settings_b = language_settings(None, Some(&file_b), cx);
287
288 assert_eq!(settings_a.tab_size.get(), 8);
289 assert_eq!(settings_b.tab_size.get(), 2);
290
291 get_all_tasks(&project, &task_contexts, cx)
292 })
293 .into_iter()
294 .map(|(source_kind, task)| {
295 let resolved = task.resolved;
296 (
297 source_kind,
298 task.resolved_label,
299 resolved.args,
300 resolved.env,
301 )
302 })
303 .collect::<Vec<_>>();
304 assert_eq!(
305 all_tasks,
306 vec![
307 (
308 TaskSourceKind::Worktree {
309 id: worktree_id,
310 directory_in_worktree: PathBuf::from(separator!("b/.zed")),
311 id_base: if cfg!(windows) {
312 "local worktree tasks from directory \"b\\\\.zed\"".into()
313 } else {
314 "local worktree tasks from directory \"b/.zed\"".into()
315 },
316 },
317 "cargo check".to_string(),
318 vec!["check".to_string()],
319 HashMap::default(),
320 ),
321 (
322 topmost_local_task_source_kind.clone(),
323 "cargo check all".to_string(),
324 vec!["check".to_string(), "--all".to_string()],
325 HashMap::default(),
326 ),
327 ]
328 );
329
330 let (_, resolved_task) = cx
331 .update(|cx| get_all_tasks(&project, &task_contexts, cx))
332 .into_iter()
333 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
334 .expect("should have one global task");
335 project.update(cx, |project, cx| {
336 let task_inventory = project
337 .task_store
338 .read(cx)
339 .task_inventory()
340 .cloned()
341 .unwrap();
342 task_inventory.update(cx, |inventory, _| {
343 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
344 inventory
345 .update_file_based_tasks(
346 TaskSettingsLocation::Global(tasks_file()),
347 Some(
348 &json!([{
349 "label": "cargo check unstable",
350 "command": "cargo",
351 "args": [
352 "check",
353 "--all",
354 "--all-targets"
355 ],
356 "env": {
357 "RUSTFLAGS": "-Zunstable-options"
358 }
359 }])
360 .to_string(),
361 ),
362 )
363 .unwrap();
364 });
365 });
366 cx.run_until_parked();
367
368 let all_tasks = cx
369 .update(|cx| get_all_tasks(&project, &task_contexts, cx))
370 .into_iter()
371 .map(|(source_kind, task)| {
372 let resolved = task.resolved;
373 (
374 source_kind,
375 task.resolved_label,
376 resolved.args,
377 resolved.env,
378 )
379 })
380 .collect::<Vec<_>>();
381 assert_eq!(
382 all_tasks,
383 vec![
384 (
385 topmost_local_task_source_kind.clone(),
386 "cargo check all".to_string(),
387 vec!["check".to_string(), "--all".to_string()],
388 HashMap::default(),
389 ),
390 (
391 TaskSourceKind::Worktree {
392 id: worktree_id,
393 directory_in_worktree: PathBuf::from(separator!("b/.zed")),
394 id_base: if cfg!(windows) {
395 "local worktree tasks from directory \"b\\\\.zed\"".into()
396 } else {
397 "local worktree tasks from directory \"b/.zed\"".into()
398 },
399 },
400 "cargo check".to_string(),
401 vec!["check".to_string()],
402 HashMap::default(),
403 ),
404 (
405 TaskSourceKind::AbsPath {
406 abs_path: paths::tasks_file().clone(),
407 id_base: "global tasks.json".into(),
408 },
409 "cargo check unstable".to_string(),
410 vec![
411 "check".to_string(),
412 "--all".to_string(),
413 "--all-targets".to_string(),
414 ],
415 HashMap::from_iter(Some((
416 "RUSTFLAGS".to_string(),
417 "-Zunstable-options".to_string()
418 ))),
419 ),
420 ]
421 );
422}
423
424#[gpui::test]
425async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
426 init_test(cx);
427 TaskStore::init(None);
428
429 let fs = FakeFs::new(cx.executor());
430 fs.insert_tree(
431 path!("/dir"),
432 json!({
433 ".zed": {
434 "tasks.json": r#"[{
435 "label": "test worktree root",
436 "command": "echo $ZED_WORKTREE_ROOT"
437 }]"#,
438 },
439 "a": {
440 "a.rs": "fn a() {\n A\n}"
441 },
442 }),
443 )
444 .await;
445
446 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
447 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
448
449 cx.executor().run_until_parked();
450 let worktree_id = cx.update(|cx| {
451 project.update(cx, |project, cx| {
452 project.worktrees(cx).next().unwrap().read(cx).id()
453 })
454 });
455
456 let active_non_worktree_item_tasks = cx.update(|cx| {
457 get_all_tasks(
458 &project,
459 &TaskContexts {
460 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
461 active_worktree_context: None,
462 other_worktree_contexts: Vec::new(),
463 lsp_task_sources: HashMap::default(),
464 latest_selection: None,
465 },
466 cx,
467 )
468 });
469 assert!(
470 active_non_worktree_item_tasks.is_empty(),
471 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
472 );
473
474 let active_worktree_tasks = cx.update(|cx| {
475 get_all_tasks(
476 &project,
477 &TaskContexts {
478 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
479 active_worktree_context: Some((worktree_id, {
480 let mut worktree_context = TaskContext::default();
481 worktree_context
482 .task_variables
483 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
484 worktree_context
485 })),
486 other_worktree_contexts: Vec::new(),
487 lsp_task_sources: HashMap::default(),
488 latest_selection: None,
489 },
490 cx,
491 )
492 });
493 assert_eq!(
494 active_worktree_tasks
495 .into_iter()
496 .map(|(source_kind, task)| {
497 let resolved = task.resolved;
498 (source_kind, resolved.command)
499 })
500 .collect::<Vec<_>>(),
501 vec![(
502 TaskSourceKind::Worktree {
503 id: worktree_id,
504 directory_in_worktree: PathBuf::from(separator!(".zed")),
505 id_base: if cfg!(windows) {
506 "local worktree tasks from directory \".zed\"".into()
507 } else {
508 "local worktree tasks from directory \".zed\"".into()
509 },
510 },
511 "echo /dir".to_string(),
512 )]
513 );
514}
515
516#[gpui::test]
517async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
518 init_test(cx);
519
520 let fs = FakeFs::new(cx.executor());
521 fs.insert_tree(
522 path!("/dir"),
523 json!({
524 "test.rs": "const A: i32 = 1;",
525 "test2.rs": "",
526 "Cargo.toml": "a = 1",
527 "package.json": "{\"a\": 1}",
528 }),
529 )
530 .await;
531
532 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
533 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
534
535 let mut fake_rust_servers = language_registry.register_fake_lsp(
536 "Rust",
537 FakeLspAdapter {
538 name: "the-rust-language-server",
539 capabilities: lsp::ServerCapabilities {
540 completion_provider: Some(lsp::CompletionOptions {
541 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
542 ..Default::default()
543 }),
544 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
545 lsp::TextDocumentSyncOptions {
546 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
547 ..Default::default()
548 },
549 )),
550 ..Default::default()
551 },
552 ..Default::default()
553 },
554 );
555 let mut fake_json_servers = language_registry.register_fake_lsp(
556 "JSON",
557 FakeLspAdapter {
558 name: "the-json-language-server",
559 capabilities: lsp::ServerCapabilities {
560 completion_provider: Some(lsp::CompletionOptions {
561 trigger_characters: Some(vec![":".to_string()]),
562 ..Default::default()
563 }),
564 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
565 lsp::TextDocumentSyncOptions {
566 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
567 ..Default::default()
568 },
569 )),
570 ..Default::default()
571 },
572 ..Default::default()
573 },
574 );
575
576 // Open a buffer without an associated language server.
577 let (toml_buffer, _handle) = project
578 .update(cx, |project, cx| {
579 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
580 })
581 .await
582 .unwrap();
583
584 // Open a buffer with an associated language server before the language for it has been loaded.
585 let (rust_buffer, _handle2) = project
586 .update(cx, |project, cx| {
587 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
588 })
589 .await
590 .unwrap();
591 rust_buffer.update(cx, |buffer, _| {
592 assert_eq!(buffer.language().map(|l| l.name()), None);
593 });
594
595 // Now we add the languages to the project, and ensure they get assigned to all
596 // the relevant open buffers.
597 language_registry.add(json_lang());
598 language_registry.add(rust_lang());
599 cx.executor().run_until_parked();
600 rust_buffer.update(cx, |buffer, _| {
601 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
602 });
603
604 // A server is started up, and it is notified about Rust files.
605 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
606 assert_eq!(
607 fake_rust_server
608 .receive_notification::<lsp::notification::DidOpenTextDocument>()
609 .await
610 .text_document,
611 lsp::TextDocumentItem {
612 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
613 version: 0,
614 text: "const A: i32 = 1;".to_string(),
615 language_id: "rust".to_string(),
616 }
617 );
618
619 // The buffer is configured based on the language server's capabilities.
620 rust_buffer.update(cx, |buffer, _| {
621 assert_eq!(
622 buffer
623 .completion_triggers()
624 .into_iter()
625 .cloned()
626 .collect::<Vec<_>>(),
627 &[".".to_string(), "::".to_string()]
628 );
629 });
630 toml_buffer.update(cx, |buffer, _| {
631 assert!(buffer.completion_triggers().is_empty());
632 });
633
634 // Edit a buffer. The changes are reported to the language server.
635 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
636 assert_eq!(
637 fake_rust_server
638 .receive_notification::<lsp::notification::DidChangeTextDocument>()
639 .await
640 .text_document,
641 lsp::VersionedTextDocumentIdentifier::new(
642 lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
643 1
644 )
645 );
646
647 // Open a third buffer with a different associated language server.
648 let (json_buffer, _json_handle) = project
649 .update(cx, |project, cx| {
650 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
651 })
652 .await
653 .unwrap();
654
655 // A json language server is started up and is only notified about the json buffer.
656 let mut fake_json_server = fake_json_servers.next().await.unwrap();
657 assert_eq!(
658 fake_json_server
659 .receive_notification::<lsp::notification::DidOpenTextDocument>()
660 .await
661 .text_document,
662 lsp::TextDocumentItem {
663 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
664 version: 0,
665 text: "{\"a\": 1}".to_string(),
666 language_id: "json".to_string(),
667 }
668 );
669
670 // This buffer is configured based on the second language server's
671 // capabilities.
672 json_buffer.update(cx, |buffer, _| {
673 assert_eq!(
674 buffer
675 .completion_triggers()
676 .into_iter()
677 .cloned()
678 .collect::<Vec<_>>(),
679 &[":".to_string()]
680 );
681 });
682
683 // When opening another buffer whose language server is already running,
684 // it is also configured based on the existing language server's capabilities.
685 let (rust_buffer2, _handle4) = project
686 .update(cx, |project, cx| {
687 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
688 })
689 .await
690 .unwrap();
691 rust_buffer2.update(cx, |buffer, _| {
692 assert_eq!(
693 buffer
694 .completion_triggers()
695 .into_iter()
696 .cloned()
697 .collect::<Vec<_>>(),
698 &[".".to_string(), "::".to_string()]
699 );
700 });
701
702 // Changes are reported only to servers matching the buffer's language.
703 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
704 rust_buffer2.update(cx, |buffer, cx| {
705 buffer.edit([(0..0, "let x = 1;")], None, cx)
706 });
707 assert_eq!(
708 fake_rust_server
709 .receive_notification::<lsp::notification::DidChangeTextDocument>()
710 .await
711 .text_document,
712 lsp::VersionedTextDocumentIdentifier::new(
713 lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap(),
714 1
715 )
716 );
717
718 // Save notifications are reported to all servers.
719 project
720 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
721 .await
722 .unwrap();
723 assert_eq!(
724 fake_rust_server
725 .receive_notification::<lsp::notification::DidSaveTextDocument>()
726 .await
727 .text_document,
728 lsp::TextDocumentIdentifier::new(
729 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
730 )
731 );
732 assert_eq!(
733 fake_json_server
734 .receive_notification::<lsp::notification::DidSaveTextDocument>()
735 .await
736 .text_document,
737 lsp::TextDocumentIdentifier::new(
738 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
739 )
740 );
741
742 // Renames are reported only to servers matching the buffer's language.
743 fs.rename(
744 Path::new(path!("/dir/test2.rs")),
745 Path::new(path!("/dir/test3.rs")),
746 Default::default(),
747 )
748 .await
749 .unwrap();
750 assert_eq!(
751 fake_rust_server
752 .receive_notification::<lsp::notification::DidCloseTextDocument>()
753 .await
754 .text_document,
755 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap()),
756 );
757 assert_eq!(
758 fake_rust_server
759 .receive_notification::<lsp::notification::DidOpenTextDocument>()
760 .await
761 .text_document,
762 lsp::TextDocumentItem {
763 uri: lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap(),
764 version: 0,
765 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
766 language_id: "rust".to_string(),
767 },
768 );
769
770 rust_buffer2.update(cx, |buffer, cx| {
771 buffer.update_diagnostics(
772 LanguageServerId(0),
773 DiagnosticSet::from_sorted_entries(
774 vec![DiagnosticEntry {
775 diagnostic: Default::default(),
776 range: Anchor::MIN..Anchor::MAX,
777 }],
778 &buffer.snapshot(),
779 ),
780 cx,
781 );
782 assert_eq!(
783 buffer
784 .snapshot()
785 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
786 .count(),
787 1
788 );
789 });
790
791 // When the rename changes the extension of the file, the buffer gets closed on the old
792 // language server and gets opened on the new one.
793 fs.rename(
794 Path::new(path!("/dir/test3.rs")),
795 Path::new(path!("/dir/test3.json")),
796 Default::default(),
797 )
798 .await
799 .unwrap();
800 assert_eq!(
801 fake_rust_server
802 .receive_notification::<lsp::notification::DidCloseTextDocument>()
803 .await
804 .text_document,
805 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap()),
806 );
807 assert_eq!(
808 fake_json_server
809 .receive_notification::<lsp::notification::DidOpenTextDocument>()
810 .await
811 .text_document,
812 lsp::TextDocumentItem {
813 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
814 version: 0,
815 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
816 language_id: "json".to_string(),
817 },
818 );
819
820 // We clear the diagnostics, since the language has changed.
821 rust_buffer2.update(cx, |buffer, _| {
822 assert_eq!(
823 buffer
824 .snapshot()
825 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
826 .count(),
827 0
828 );
829 });
830
831 // The renamed file's version resets after changing language server.
832 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
833 assert_eq!(
834 fake_json_server
835 .receive_notification::<lsp::notification::DidChangeTextDocument>()
836 .await
837 .text_document,
838 lsp::VersionedTextDocumentIdentifier::new(
839 lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
840 1
841 )
842 );
843
844 // Restart language servers
845 project.update(cx, |project, cx| {
846 project.restart_language_servers_for_buffers(
847 vec![rust_buffer.clone(), json_buffer.clone()],
848 cx,
849 );
850 });
851
852 let mut rust_shutdown_requests = fake_rust_server
853 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
854 let mut json_shutdown_requests = fake_json_server
855 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
856 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
857
858 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
859 let mut fake_json_server = fake_json_servers.next().await.unwrap();
860
861 // Ensure rust document is reopened in new rust language server
862 assert_eq!(
863 fake_rust_server
864 .receive_notification::<lsp::notification::DidOpenTextDocument>()
865 .await
866 .text_document,
867 lsp::TextDocumentItem {
868 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
869 version: 0,
870 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
871 language_id: "rust".to_string(),
872 }
873 );
874
875 // Ensure json documents are reopened in new json language server
876 assert_set_eq!(
877 [
878 fake_json_server
879 .receive_notification::<lsp::notification::DidOpenTextDocument>()
880 .await
881 .text_document,
882 fake_json_server
883 .receive_notification::<lsp::notification::DidOpenTextDocument>()
884 .await
885 .text_document,
886 ],
887 [
888 lsp::TextDocumentItem {
889 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
890 version: 0,
891 text: json_buffer.update(cx, |buffer, _| buffer.text()),
892 language_id: "json".to_string(),
893 },
894 lsp::TextDocumentItem {
895 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
896 version: 0,
897 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
898 language_id: "json".to_string(),
899 }
900 ]
901 );
902
903 // Close notifications are reported only to servers matching the buffer's language.
904 cx.update(|_| drop(_json_handle));
905 let close_message = lsp::DidCloseTextDocumentParams {
906 text_document: lsp::TextDocumentIdentifier::new(
907 lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
908 ),
909 };
910 assert_eq!(
911 fake_json_server
912 .receive_notification::<lsp::notification::DidCloseTextDocument>()
913 .await,
914 close_message,
915 );
916}
917
918#[gpui::test]
919async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
920 init_test(cx);
921
922 let fs = FakeFs::new(cx.executor());
923 fs.insert_tree(
924 path!("/the-root"),
925 json!({
926 ".gitignore": "target\n",
927 "Cargo.lock": "",
928 "src": {
929 "a.rs": "",
930 "b.rs": "",
931 },
932 "target": {
933 "x": {
934 "out": {
935 "x.rs": ""
936 }
937 },
938 "y": {
939 "out": {
940 "y.rs": "",
941 }
942 },
943 "z": {
944 "out": {
945 "z.rs": ""
946 }
947 }
948 }
949 }),
950 )
951 .await;
952 fs.insert_tree(
953 path!("/the-registry"),
954 json!({
955 "dep1": {
956 "src": {
957 "dep1.rs": "",
958 }
959 },
960 "dep2": {
961 "src": {
962 "dep2.rs": "",
963 }
964 },
965 }),
966 )
967 .await;
968 fs.insert_tree(
969 path!("/the/stdlib"),
970 json!({
971 "LICENSE": "",
972 "src": {
973 "string.rs": "",
974 }
975 }),
976 )
977 .await;
978
979 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
980 let (language_registry, lsp_store) = project.read_with(cx, |project, _| {
981 (project.languages().clone(), project.lsp_store())
982 });
983 language_registry.add(rust_lang());
984 let mut fake_servers = language_registry.register_fake_lsp(
985 "Rust",
986 FakeLspAdapter {
987 name: "the-language-server",
988 ..Default::default()
989 },
990 );
991
992 cx.executor().run_until_parked();
993
994 // Start the language server by opening a buffer with a compatible file extension.
995 project
996 .update(cx, |project, cx| {
997 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
998 })
999 .await
1000 .unwrap();
1001
1002 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
1003 project.update(cx, |project, cx| {
1004 let worktree = project.worktrees(cx).next().unwrap();
1005 assert_eq!(
1006 worktree
1007 .read(cx)
1008 .snapshot()
1009 .entries(true, 0)
1010 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
1011 .collect::<Vec<_>>(),
1012 &[
1013 (Path::new(""), false),
1014 (Path::new(".gitignore"), false),
1015 (Path::new("Cargo.lock"), false),
1016 (Path::new("src"), false),
1017 (Path::new("src/a.rs"), false),
1018 (Path::new("src/b.rs"), false),
1019 (Path::new("target"), true),
1020 ]
1021 );
1022 });
1023
1024 let prev_read_dir_count = fs.read_dir_call_count();
1025
1026 let fake_server = fake_servers.next().await.unwrap();
1027 let (server_id, server_name) = lsp_store.read_with(cx, |lsp_store, _| {
1028 let (id, status) = lsp_store.language_server_statuses().next().unwrap();
1029 (id, LanguageServerName::from(status.name.as_str()))
1030 });
1031
1032 // Simulate jumping to a definition in a dependency outside of the worktree.
1033 let _out_of_worktree_buffer = project
1034 .update(cx, |project, cx| {
1035 project.open_local_buffer_via_lsp(
1036 lsp::Url::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
1037 server_id,
1038 server_name.clone(),
1039 cx,
1040 )
1041 })
1042 .await
1043 .unwrap();
1044
1045 // Keep track of the FS events reported to the language server.
1046 let file_changes = Arc::new(Mutex::new(Vec::new()));
1047 fake_server
1048 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
1049 registrations: vec![lsp::Registration {
1050 id: Default::default(),
1051 method: "workspace/didChangeWatchedFiles".to_string(),
1052 register_options: serde_json::to_value(
1053 lsp::DidChangeWatchedFilesRegistrationOptions {
1054 watchers: vec![
1055 lsp::FileSystemWatcher {
1056 glob_pattern: lsp::GlobPattern::String(
1057 path!("/the-root/Cargo.toml").to_string(),
1058 ),
1059 kind: None,
1060 },
1061 lsp::FileSystemWatcher {
1062 glob_pattern: lsp::GlobPattern::String(
1063 path!("/the-root/src/*.{rs,c}").to_string(),
1064 ),
1065 kind: None,
1066 },
1067 lsp::FileSystemWatcher {
1068 glob_pattern: lsp::GlobPattern::String(
1069 path!("/the-root/target/y/**/*.rs").to_string(),
1070 ),
1071 kind: None,
1072 },
1073 lsp::FileSystemWatcher {
1074 glob_pattern: lsp::GlobPattern::String(
1075 path!("/the/stdlib/src/**/*.rs").to_string(),
1076 ),
1077 kind: None,
1078 },
1079 lsp::FileSystemWatcher {
1080 glob_pattern: lsp::GlobPattern::String(
1081 path!("**/Cargo.lock").to_string(),
1082 ),
1083 kind: None,
1084 },
1085 ],
1086 },
1087 )
1088 .ok(),
1089 }],
1090 })
1091 .await
1092 .into_response()
1093 .unwrap();
1094 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
1095 let file_changes = file_changes.clone();
1096 move |params, _| {
1097 let mut file_changes = file_changes.lock();
1098 file_changes.extend(params.changes);
1099 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
1100 }
1101 });
1102
1103 cx.executor().run_until_parked();
1104 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
1105 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 5);
1106
1107 let mut new_watched_paths = fs.watched_paths();
1108 new_watched_paths.retain(|path| !path.starts_with(config_dir()));
1109 assert_eq!(
1110 &new_watched_paths,
1111 &[
1112 Path::new(path!("/the-root")),
1113 Path::new(path!("/the-registry/dep1/src/dep1.rs")),
1114 Path::new(path!("/the/stdlib/src"))
1115 ]
1116 );
1117
1118 // Now the language server has asked us to watch an ignored directory path,
1119 // so we recursively load it.
1120 project.update(cx, |project, cx| {
1121 let worktree = project.visible_worktrees(cx).next().unwrap();
1122 assert_eq!(
1123 worktree
1124 .read(cx)
1125 .snapshot()
1126 .entries(true, 0)
1127 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
1128 .collect::<Vec<_>>(),
1129 &[
1130 (Path::new(""), false),
1131 (Path::new(".gitignore"), false),
1132 (Path::new("Cargo.lock"), false),
1133 (Path::new("src"), false),
1134 (Path::new("src/a.rs"), false),
1135 (Path::new("src/b.rs"), false),
1136 (Path::new("target"), true),
1137 (Path::new("target/x"), true),
1138 (Path::new("target/y"), true),
1139 (Path::new("target/y/out"), true),
1140 (Path::new("target/y/out/y.rs"), true),
1141 (Path::new("target/z"), true),
1142 ]
1143 );
1144 });
1145
1146 // Perform some file system mutations, two of which match the watched patterns,
1147 // and one of which does not.
1148 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
1149 .await
1150 .unwrap();
1151 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
1152 .await
1153 .unwrap();
1154 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
1155 .await
1156 .unwrap();
1157 fs.create_file(
1158 path!("/the-root/target/x/out/x2.rs").as_ref(),
1159 Default::default(),
1160 )
1161 .await
1162 .unwrap();
1163 fs.create_file(
1164 path!("/the-root/target/y/out/y2.rs").as_ref(),
1165 Default::default(),
1166 )
1167 .await
1168 .unwrap();
1169 fs.save(
1170 path!("/the-root/Cargo.lock").as_ref(),
1171 &"".into(),
1172 Default::default(),
1173 )
1174 .await
1175 .unwrap();
1176 fs.save(
1177 path!("/the-stdlib/LICENSE").as_ref(),
1178 &"".into(),
1179 Default::default(),
1180 )
1181 .await
1182 .unwrap();
1183 fs.save(
1184 path!("/the/stdlib/src/string.rs").as_ref(),
1185 &"".into(),
1186 Default::default(),
1187 )
1188 .await
1189 .unwrap();
1190
1191 // The language server receives events for the FS mutations that match its watch patterns.
1192 cx.executor().run_until_parked();
1193 assert_eq!(
1194 &*file_changes.lock(),
1195 &[
1196 lsp::FileEvent {
1197 uri: lsp::Url::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
1198 typ: lsp::FileChangeType::CHANGED,
1199 },
1200 lsp::FileEvent {
1201 uri: lsp::Url::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
1202 typ: lsp::FileChangeType::DELETED,
1203 },
1204 lsp::FileEvent {
1205 uri: lsp::Url::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
1206 typ: lsp::FileChangeType::CREATED,
1207 },
1208 lsp::FileEvent {
1209 uri: lsp::Url::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
1210 typ: lsp::FileChangeType::CREATED,
1211 },
1212 lsp::FileEvent {
1213 uri: lsp::Url::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
1214 typ: lsp::FileChangeType::CHANGED,
1215 },
1216 ]
1217 );
1218}
1219
1220#[gpui::test]
1221async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
1222 init_test(cx);
1223
1224 let fs = FakeFs::new(cx.executor());
1225 fs.insert_tree(
1226 path!("/dir"),
1227 json!({
1228 "a.rs": "let a = 1;",
1229 "b.rs": "let b = 2;"
1230 }),
1231 )
1232 .await;
1233
1234 let project = Project::test(
1235 fs,
1236 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
1237 cx,
1238 )
1239 .await;
1240 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1241
1242 let buffer_a = project
1243 .update(cx, |project, cx| {
1244 project.open_local_buffer(path!("/dir/a.rs"), cx)
1245 })
1246 .await
1247 .unwrap();
1248 let buffer_b = project
1249 .update(cx, |project, cx| {
1250 project.open_local_buffer(path!("/dir/b.rs"), cx)
1251 })
1252 .await
1253 .unwrap();
1254
1255 lsp_store.update(cx, |lsp_store, cx| {
1256 lsp_store
1257 .update_diagnostics(
1258 LanguageServerId(0),
1259 lsp::PublishDiagnosticsParams {
1260 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1261 version: None,
1262 diagnostics: vec![lsp::Diagnostic {
1263 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1264 severity: Some(lsp::DiagnosticSeverity::ERROR),
1265 message: "error 1".to_string(),
1266 ..Default::default()
1267 }],
1268 },
1269 &[],
1270 cx,
1271 )
1272 .unwrap();
1273 lsp_store
1274 .update_diagnostics(
1275 LanguageServerId(0),
1276 lsp::PublishDiagnosticsParams {
1277 uri: Url::from_file_path(path!("/dir/b.rs")).unwrap(),
1278 version: None,
1279 diagnostics: vec![lsp::Diagnostic {
1280 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1281 severity: Some(DiagnosticSeverity::WARNING),
1282 message: "error 2".to_string(),
1283 ..Default::default()
1284 }],
1285 },
1286 &[],
1287 cx,
1288 )
1289 .unwrap();
1290 });
1291
1292 buffer_a.update(cx, |buffer, _| {
1293 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1294 assert_eq!(
1295 chunks
1296 .iter()
1297 .map(|(s, d)| (s.as_str(), *d))
1298 .collect::<Vec<_>>(),
1299 &[
1300 ("let ", None),
1301 ("a", Some(DiagnosticSeverity::ERROR)),
1302 (" = 1;", None),
1303 ]
1304 );
1305 });
1306 buffer_b.update(cx, |buffer, _| {
1307 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1308 assert_eq!(
1309 chunks
1310 .iter()
1311 .map(|(s, d)| (s.as_str(), *d))
1312 .collect::<Vec<_>>(),
1313 &[
1314 ("let ", None),
1315 ("b", Some(DiagnosticSeverity::WARNING)),
1316 (" = 2;", None),
1317 ]
1318 );
1319 });
1320}
1321
1322#[gpui::test]
1323async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1324 init_test(cx);
1325
1326 let fs = FakeFs::new(cx.executor());
1327 fs.insert_tree(
1328 path!("/root"),
1329 json!({
1330 "dir": {
1331 ".git": {
1332 "HEAD": "ref: refs/heads/main",
1333 },
1334 ".gitignore": "b.rs",
1335 "a.rs": "let a = 1;",
1336 "b.rs": "let b = 2;",
1337 },
1338 "other.rs": "let b = c;"
1339 }),
1340 )
1341 .await;
1342
1343 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
1344 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1345 let (worktree, _) = project
1346 .update(cx, |project, cx| {
1347 project.find_or_create_worktree(path!("/root/dir"), true, cx)
1348 })
1349 .await
1350 .unwrap();
1351 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1352
1353 let (worktree, _) = project
1354 .update(cx, |project, cx| {
1355 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
1356 })
1357 .await
1358 .unwrap();
1359 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1360
1361 let server_id = LanguageServerId(0);
1362 lsp_store.update(cx, |lsp_store, cx| {
1363 lsp_store
1364 .update_diagnostics(
1365 server_id,
1366 lsp::PublishDiagnosticsParams {
1367 uri: Url::from_file_path(path!("/root/dir/b.rs")).unwrap(),
1368 version: None,
1369 diagnostics: vec![lsp::Diagnostic {
1370 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1371 severity: Some(lsp::DiagnosticSeverity::ERROR),
1372 message: "unused variable 'b'".to_string(),
1373 ..Default::default()
1374 }],
1375 },
1376 &[],
1377 cx,
1378 )
1379 .unwrap();
1380 lsp_store
1381 .update_diagnostics(
1382 server_id,
1383 lsp::PublishDiagnosticsParams {
1384 uri: Url::from_file_path(path!("/root/other.rs")).unwrap(),
1385 version: None,
1386 diagnostics: vec![lsp::Diagnostic {
1387 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1388 severity: Some(lsp::DiagnosticSeverity::ERROR),
1389 message: "unknown variable 'c'".to_string(),
1390 ..Default::default()
1391 }],
1392 },
1393 &[],
1394 cx,
1395 )
1396 .unwrap();
1397 });
1398
1399 let main_ignored_buffer = project
1400 .update(cx, |project, cx| {
1401 project.open_buffer((main_worktree_id, "b.rs"), cx)
1402 })
1403 .await
1404 .unwrap();
1405 main_ignored_buffer.update(cx, |buffer, _| {
1406 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1407 assert_eq!(
1408 chunks
1409 .iter()
1410 .map(|(s, d)| (s.as_str(), *d))
1411 .collect::<Vec<_>>(),
1412 &[
1413 ("let ", None),
1414 ("b", Some(DiagnosticSeverity::ERROR)),
1415 (" = 2;", None),
1416 ],
1417 "Gigitnored buffers should still get in-buffer diagnostics",
1418 );
1419 });
1420 let other_buffer = project
1421 .update(cx, |project, cx| {
1422 project.open_buffer((other_worktree_id, ""), cx)
1423 })
1424 .await
1425 .unwrap();
1426 other_buffer.update(cx, |buffer, _| {
1427 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1428 assert_eq!(
1429 chunks
1430 .iter()
1431 .map(|(s, d)| (s.as_str(), *d))
1432 .collect::<Vec<_>>(),
1433 &[
1434 ("let b = ", None),
1435 ("c", Some(DiagnosticSeverity::ERROR)),
1436 (";", None),
1437 ],
1438 "Buffers from hidden projects should still get in-buffer diagnostics"
1439 );
1440 });
1441
1442 project.update(cx, |project, cx| {
1443 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1444 assert_eq!(
1445 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1446 vec![(
1447 ProjectPath {
1448 worktree_id: main_worktree_id,
1449 path: Arc::from(Path::new("b.rs")),
1450 },
1451 server_id,
1452 DiagnosticSummary {
1453 error_count: 1,
1454 warning_count: 0,
1455 }
1456 )]
1457 );
1458 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1459 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1460 });
1461}
1462
1463#[gpui::test]
1464async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1465 init_test(cx);
1466
1467 let progress_token = "the-progress-token";
1468
1469 let fs = FakeFs::new(cx.executor());
1470 fs.insert_tree(
1471 path!("/dir"),
1472 json!({
1473 "a.rs": "fn a() { A }",
1474 "b.rs": "const y: i32 = 1",
1475 }),
1476 )
1477 .await;
1478
1479 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1480 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1481
1482 language_registry.add(rust_lang());
1483 let mut fake_servers = language_registry.register_fake_lsp(
1484 "Rust",
1485 FakeLspAdapter {
1486 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1487 disk_based_diagnostics_sources: vec!["disk".into()],
1488 ..Default::default()
1489 },
1490 );
1491
1492 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1493
1494 // Cause worktree to start the fake language server
1495 let _ = project
1496 .update(cx, |project, cx| {
1497 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
1498 })
1499 .await
1500 .unwrap();
1501
1502 let mut events = cx.events(&project);
1503
1504 let fake_server = fake_servers.next().await.unwrap();
1505 assert_eq!(
1506 events.next().await.unwrap(),
1507 Event::LanguageServerAdded(
1508 LanguageServerId(0),
1509 fake_server.server.name(),
1510 Some(worktree_id)
1511 ),
1512 );
1513
1514 fake_server
1515 .start_progress(format!("{}/0", progress_token))
1516 .await;
1517 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1518 assert_eq!(
1519 events.next().await.unwrap(),
1520 Event::DiskBasedDiagnosticsStarted {
1521 language_server_id: LanguageServerId(0),
1522 }
1523 );
1524
1525 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1526 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1527 version: None,
1528 diagnostics: vec![lsp::Diagnostic {
1529 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1530 severity: Some(lsp::DiagnosticSeverity::ERROR),
1531 message: "undefined variable 'A'".to_string(),
1532 ..Default::default()
1533 }],
1534 });
1535 assert_eq!(
1536 events.next().await.unwrap(),
1537 Event::DiagnosticsUpdated {
1538 language_server_id: LanguageServerId(0),
1539 path: (worktree_id, Path::new("a.rs")).into()
1540 }
1541 );
1542
1543 fake_server.end_progress(format!("{}/0", progress_token));
1544 assert_eq!(
1545 events.next().await.unwrap(),
1546 Event::DiskBasedDiagnosticsFinished {
1547 language_server_id: LanguageServerId(0)
1548 }
1549 );
1550
1551 let buffer = project
1552 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
1553 .await
1554 .unwrap();
1555
1556 buffer.update(cx, |buffer, _| {
1557 let snapshot = buffer.snapshot();
1558 let diagnostics = snapshot
1559 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1560 .collect::<Vec<_>>();
1561 assert_eq!(
1562 diagnostics,
1563 &[DiagnosticEntry {
1564 range: Point::new(0, 9)..Point::new(0, 10),
1565 diagnostic: Diagnostic {
1566 severity: lsp::DiagnosticSeverity::ERROR,
1567 message: "undefined variable 'A'".to_string(),
1568 group_id: 0,
1569 is_primary: true,
1570 ..Default::default()
1571 }
1572 }]
1573 )
1574 });
1575
1576 // Ensure publishing empty diagnostics twice only results in one update event.
1577 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1578 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1579 version: None,
1580 diagnostics: Default::default(),
1581 });
1582 assert_eq!(
1583 events.next().await.unwrap(),
1584 Event::DiagnosticsUpdated {
1585 language_server_id: LanguageServerId(0),
1586 path: (worktree_id, Path::new("a.rs")).into()
1587 }
1588 );
1589
1590 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1591 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1592 version: None,
1593 diagnostics: Default::default(),
1594 });
1595 cx.executor().run_until_parked();
1596 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1597}
1598
1599#[gpui::test]
1600async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1601 init_test(cx);
1602
1603 let progress_token = "the-progress-token";
1604
1605 let fs = FakeFs::new(cx.executor());
1606 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1607
1608 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1609
1610 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1611 language_registry.add(rust_lang());
1612 let mut fake_servers = language_registry.register_fake_lsp(
1613 "Rust",
1614 FakeLspAdapter {
1615 name: "the-language-server",
1616 disk_based_diagnostics_sources: vec!["disk".into()],
1617 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1618 ..Default::default()
1619 },
1620 );
1621
1622 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1623
1624 let (buffer, _handle) = project
1625 .update(cx, |project, cx| {
1626 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1627 })
1628 .await
1629 .unwrap();
1630 // Simulate diagnostics starting to update.
1631 let fake_server = fake_servers.next().await.unwrap();
1632 fake_server.start_progress(progress_token).await;
1633
1634 // Restart the server before the diagnostics finish updating.
1635 project.update(cx, |project, cx| {
1636 project.restart_language_servers_for_buffers(vec![buffer], cx);
1637 });
1638 let mut events = cx.events(&project);
1639
1640 // Simulate the newly started server sending more diagnostics.
1641 let fake_server = fake_servers.next().await.unwrap();
1642 assert_eq!(
1643 events.next().await.unwrap(),
1644 Event::LanguageServerAdded(
1645 LanguageServerId(1),
1646 fake_server.server.name(),
1647 Some(worktree_id)
1648 )
1649 );
1650 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1651 fake_server.start_progress(progress_token).await;
1652 assert_eq!(
1653 events.next().await.unwrap(),
1654 Event::DiskBasedDiagnosticsStarted {
1655 language_server_id: LanguageServerId(1)
1656 }
1657 );
1658 project.update(cx, |project, cx| {
1659 assert_eq!(
1660 project
1661 .language_servers_running_disk_based_diagnostics(cx)
1662 .collect::<Vec<_>>(),
1663 [LanguageServerId(1)]
1664 );
1665 });
1666
1667 // All diagnostics are considered done, despite the old server's diagnostic
1668 // task never completing.
1669 fake_server.end_progress(progress_token);
1670 assert_eq!(
1671 events.next().await.unwrap(),
1672 Event::DiskBasedDiagnosticsFinished {
1673 language_server_id: LanguageServerId(1)
1674 }
1675 );
1676 project.update(cx, |project, cx| {
1677 assert_eq!(
1678 project
1679 .language_servers_running_disk_based_diagnostics(cx)
1680 .collect::<Vec<_>>(),
1681 [] as [language::LanguageServerId; 0]
1682 );
1683 });
1684}
1685
1686#[gpui::test]
1687async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1688 init_test(cx);
1689
1690 let fs = FakeFs::new(cx.executor());
1691 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
1692
1693 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1694
1695 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1696 language_registry.add(rust_lang());
1697 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1698
1699 let (buffer, _) = project
1700 .update(cx, |project, cx| {
1701 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1702 })
1703 .await
1704 .unwrap();
1705
1706 // Publish diagnostics
1707 let fake_server = fake_servers.next().await.unwrap();
1708 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1709 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1710 version: None,
1711 diagnostics: vec![lsp::Diagnostic {
1712 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1713 severity: Some(lsp::DiagnosticSeverity::ERROR),
1714 message: "the message".to_string(),
1715 ..Default::default()
1716 }],
1717 });
1718
1719 cx.executor().run_until_parked();
1720 buffer.update(cx, |buffer, _| {
1721 assert_eq!(
1722 buffer
1723 .snapshot()
1724 .diagnostics_in_range::<_, usize>(0..1, false)
1725 .map(|entry| entry.diagnostic.message.clone())
1726 .collect::<Vec<_>>(),
1727 ["the message".to_string()]
1728 );
1729 });
1730 project.update(cx, |project, cx| {
1731 assert_eq!(
1732 project.diagnostic_summary(false, cx),
1733 DiagnosticSummary {
1734 error_count: 1,
1735 warning_count: 0,
1736 }
1737 );
1738 });
1739
1740 project.update(cx, |project, cx| {
1741 project.restart_language_servers_for_buffers(vec![buffer.clone()], cx);
1742 });
1743
1744 // The diagnostics are cleared.
1745 cx.executor().run_until_parked();
1746 buffer.update(cx, |buffer, _| {
1747 assert_eq!(
1748 buffer
1749 .snapshot()
1750 .diagnostics_in_range::<_, usize>(0..1, false)
1751 .map(|entry| entry.diagnostic.message.clone())
1752 .collect::<Vec<_>>(),
1753 Vec::<String>::new(),
1754 );
1755 });
1756 project.update(cx, |project, cx| {
1757 assert_eq!(
1758 project.diagnostic_summary(false, cx),
1759 DiagnosticSummary {
1760 error_count: 0,
1761 warning_count: 0,
1762 }
1763 );
1764 });
1765}
1766
1767#[gpui::test]
1768async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1769 init_test(cx);
1770
1771 let fs = FakeFs::new(cx.executor());
1772 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1773
1774 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1775 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1776
1777 language_registry.add(rust_lang());
1778 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1779
1780 let (buffer, _handle) = project
1781 .update(cx, |project, cx| {
1782 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1783 })
1784 .await
1785 .unwrap();
1786
1787 // Before restarting the server, report diagnostics with an unknown buffer version.
1788 let fake_server = fake_servers.next().await.unwrap();
1789 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1790 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1791 version: Some(10000),
1792 diagnostics: Vec::new(),
1793 });
1794 cx.executor().run_until_parked();
1795 project.update(cx, |project, cx| {
1796 project.restart_language_servers_for_buffers(vec![buffer.clone()], cx);
1797 });
1798
1799 let mut fake_server = fake_servers.next().await.unwrap();
1800 let notification = fake_server
1801 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1802 .await
1803 .text_document;
1804 assert_eq!(notification.version, 0);
1805}
1806
1807#[gpui::test]
1808async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
1809 init_test(cx);
1810
1811 let progress_token = "the-progress-token";
1812
1813 let fs = FakeFs::new(cx.executor());
1814 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1815
1816 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1817
1818 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1819 language_registry.add(rust_lang());
1820 let mut fake_servers = language_registry.register_fake_lsp(
1821 "Rust",
1822 FakeLspAdapter {
1823 name: "the-language-server",
1824 disk_based_diagnostics_sources: vec!["disk".into()],
1825 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1826 ..Default::default()
1827 },
1828 );
1829
1830 let (buffer, _handle) = project
1831 .update(cx, |project, cx| {
1832 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1833 })
1834 .await
1835 .unwrap();
1836
1837 // Simulate diagnostics starting to update.
1838 let mut fake_server = fake_servers.next().await.unwrap();
1839 fake_server
1840 .start_progress_with(
1841 "another-token",
1842 lsp::WorkDoneProgressBegin {
1843 cancellable: Some(false),
1844 ..Default::default()
1845 },
1846 )
1847 .await;
1848 fake_server
1849 .start_progress_with(
1850 progress_token,
1851 lsp::WorkDoneProgressBegin {
1852 cancellable: Some(true),
1853 ..Default::default()
1854 },
1855 )
1856 .await;
1857 cx.executor().run_until_parked();
1858
1859 project.update(cx, |project, cx| {
1860 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
1861 });
1862
1863 let cancel_notification = fake_server
1864 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
1865 .await;
1866 assert_eq!(
1867 cancel_notification.token,
1868 NumberOrString::String(progress_token.into())
1869 );
1870}
1871
1872#[gpui::test]
1873async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1874 init_test(cx);
1875
1876 let fs = FakeFs::new(cx.executor());
1877 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
1878 .await;
1879
1880 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1881 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1882
1883 let mut fake_rust_servers = language_registry.register_fake_lsp(
1884 "Rust",
1885 FakeLspAdapter {
1886 name: "rust-lsp",
1887 ..Default::default()
1888 },
1889 );
1890 let mut fake_js_servers = language_registry.register_fake_lsp(
1891 "JavaScript",
1892 FakeLspAdapter {
1893 name: "js-lsp",
1894 ..Default::default()
1895 },
1896 );
1897 language_registry.add(rust_lang());
1898 language_registry.add(js_lang());
1899
1900 let _rs_buffer = project
1901 .update(cx, |project, cx| {
1902 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1903 })
1904 .await
1905 .unwrap();
1906 let _js_buffer = project
1907 .update(cx, |project, cx| {
1908 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
1909 })
1910 .await
1911 .unwrap();
1912
1913 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1914 assert_eq!(
1915 fake_rust_server_1
1916 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1917 .await
1918 .text_document
1919 .uri
1920 .as_str(),
1921 uri!("file:///dir/a.rs")
1922 );
1923
1924 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1925 assert_eq!(
1926 fake_js_server
1927 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1928 .await
1929 .text_document
1930 .uri
1931 .as_str(),
1932 uri!("file:///dir/b.js")
1933 );
1934
1935 // Disable Rust language server, ensuring only that server gets stopped.
1936 cx.update(|cx| {
1937 SettingsStore::update_global(cx, |settings, cx| {
1938 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1939 settings.languages.insert(
1940 "Rust".into(),
1941 LanguageSettingsContent {
1942 enable_language_server: Some(false),
1943 ..Default::default()
1944 },
1945 );
1946 });
1947 })
1948 });
1949 fake_rust_server_1
1950 .receive_notification::<lsp::notification::Exit>()
1951 .await;
1952
1953 // Enable Rust and disable JavaScript language servers, ensuring that the
1954 // former gets started again and that the latter stops.
1955 cx.update(|cx| {
1956 SettingsStore::update_global(cx, |settings, cx| {
1957 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1958 settings.languages.insert(
1959 LanguageName::new("Rust"),
1960 LanguageSettingsContent {
1961 enable_language_server: Some(true),
1962 ..Default::default()
1963 },
1964 );
1965 settings.languages.insert(
1966 LanguageName::new("JavaScript"),
1967 LanguageSettingsContent {
1968 enable_language_server: Some(false),
1969 ..Default::default()
1970 },
1971 );
1972 });
1973 })
1974 });
1975 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1976 assert_eq!(
1977 fake_rust_server_2
1978 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1979 .await
1980 .text_document
1981 .uri
1982 .as_str(),
1983 uri!("file:///dir/a.rs")
1984 );
1985 fake_js_server
1986 .receive_notification::<lsp::notification::Exit>()
1987 .await;
1988}
1989
1990#[gpui::test(iterations = 3)]
1991async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1992 init_test(cx);
1993
1994 let text = "
1995 fn a() { A }
1996 fn b() { BB }
1997 fn c() { CCC }
1998 "
1999 .unindent();
2000
2001 let fs = FakeFs::new(cx.executor());
2002 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
2003
2004 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2005 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2006
2007 language_registry.add(rust_lang());
2008 let mut fake_servers = language_registry.register_fake_lsp(
2009 "Rust",
2010 FakeLspAdapter {
2011 disk_based_diagnostics_sources: vec!["disk".into()],
2012 ..Default::default()
2013 },
2014 );
2015
2016 let buffer = project
2017 .update(cx, |project, cx| {
2018 project.open_local_buffer(path!("/dir/a.rs"), cx)
2019 })
2020 .await
2021 .unwrap();
2022
2023 let _handle = project.update(cx, |project, cx| {
2024 project.register_buffer_with_language_servers(&buffer, cx)
2025 });
2026
2027 let mut fake_server = fake_servers.next().await.unwrap();
2028 let open_notification = fake_server
2029 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2030 .await;
2031
2032 // Edit the buffer, moving the content down
2033 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
2034 let change_notification_1 = fake_server
2035 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2036 .await;
2037 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
2038
2039 // Report some diagnostics for the initial version of the buffer
2040 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2041 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2042 version: Some(open_notification.text_document.version),
2043 diagnostics: vec![
2044 lsp::Diagnostic {
2045 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2046 severity: Some(DiagnosticSeverity::ERROR),
2047 message: "undefined variable 'A'".to_string(),
2048 source: Some("disk".to_string()),
2049 ..Default::default()
2050 },
2051 lsp::Diagnostic {
2052 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2053 severity: Some(DiagnosticSeverity::ERROR),
2054 message: "undefined variable 'BB'".to_string(),
2055 source: Some("disk".to_string()),
2056 ..Default::default()
2057 },
2058 lsp::Diagnostic {
2059 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
2060 severity: Some(DiagnosticSeverity::ERROR),
2061 source: Some("disk".to_string()),
2062 message: "undefined variable 'CCC'".to_string(),
2063 ..Default::default()
2064 },
2065 ],
2066 });
2067
2068 // The diagnostics have moved down since they were created.
2069 cx.executor().run_until_parked();
2070 buffer.update(cx, |buffer, _| {
2071 assert_eq!(
2072 buffer
2073 .snapshot()
2074 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
2075 .collect::<Vec<_>>(),
2076 &[
2077 DiagnosticEntry {
2078 range: Point::new(3, 9)..Point::new(3, 11),
2079 diagnostic: Diagnostic {
2080 source: Some("disk".into()),
2081 severity: DiagnosticSeverity::ERROR,
2082 message: "undefined variable 'BB'".to_string(),
2083 is_disk_based: true,
2084 group_id: 1,
2085 is_primary: true,
2086 ..Default::default()
2087 },
2088 },
2089 DiagnosticEntry {
2090 range: Point::new(4, 9)..Point::new(4, 12),
2091 diagnostic: Diagnostic {
2092 source: Some("disk".into()),
2093 severity: DiagnosticSeverity::ERROR,
2094 message: "undefined variable 'CCC'".to_string(),
2095 is_disk_based: true,
2096 group_id: 2,
2097 is_primary: true,
2098 ..Default::default()
2099 }
2100 }
2101 ]
2102 );
2103 assert_eq!(
2104 chunks_with_diagnostics(buffer, 0..buffer.len()),
2105 [
2106 ("\n\nfn a() { ".to_string(), None),
2107 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2108 (" }\nfn b() { ".to_string(), None),
2109 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
2110 (" }\nfn c() { ".to_string(), None),
2111 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
2112 (" }\n".to_string(), None),
2113 ]
2114 );
2115 assert_eq!(
2116 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
2117 [
2118 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
2119 (" }\nfn c() { ".to_string(), None),
2120 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
2121 ]
2122 );
2123 });
2124
2125 // Ensure overlapping diagnostics are highlighted correctly.
2126 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2127 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2128 version: Some(open_notification.text_document.version),
2129 diagnostics: vec![
2130 lsp::Diagnostic {
2131 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2132 severity: Some(DiagnosticSeverity::ERROR),
2133 message: "undefined variable 'A'".to_string(),
2134 source: Some("disk".to_string()),
2135 ..Default::default()
2136 },
2137 lsp::Diagnostic {
2138 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
2139 severity: Some(DiagnosticSeverity::WARNING),
2140 message: "unreachable statement".to_string(),
2141 source: Some("disk".to_string()),
2142 ..Default::default()
2143 },
2144 ],
2145 });
2146
2147 cx.executor().run_until_parked();
2148 buffer.update(cx, |buffer, _| {
2149 assert_eq!(
2150 buffer
2151 .snapshot()
2152 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
2153 .collect::<Vec<_>>(),
2154 &[
2155 DiagnosticEntry {
2156 range: Point::new(2, 9)..Point::new(2, 12),
2157 diagnostic: Diagnostic {
2158 source: Some("disk".into()),
2159 severity: DiagnosticSeverity::WARNING,
2160 message: "unreachable statement".to_string(),
2161 is_disk_based: true,
2162 group_id: 4,
2163 is_primary: true,
2164 ..Default::default()
2165 }
2166 },
2167 DiagnosticEntry {
2168 range: Point::new(2, 9)..Point::new(2, 10),
2169 diagnostic: Diagnostic {
2170 source: Some("disk".into()),
2171 severity: DiagnosticSeverity::ERROR,
2172 message: "undefined variable 'A'".to_string(),
2173 is_disk_based: true,
2174 group_id: 3,
2175 is_primary: true,
2176 ..Default::default()
2177 },
2178 }
2179 ]
2180 );
2181 assert_eq!(
2182 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
2183 [
2184 ("fn a() { ".to_string(), None),
2185 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2186 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2187 ("\n".to_string(), None),
2188 ]
2189 );
2190 assert_eq!(
2191 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
2192 [
2193 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2194 ("\n".to_string(), None),
2195 ]
2196 );
2197 });
2198
2199 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
2200 // changes since the last save.
2201 buffer.update(cx, |buffer, cx| {
2202 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
2203 buffer.edit(
2204 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
2205 None,
2206 cx,
2207 );
2208 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
2209 });
2210 let change_notification_2 = fake_server
2211 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2212 .await;
2213 assert!(
2214 change_notification_2.text_document.version > change_notification_1.text_document.version
2215 );
2216
2217 // Handle out-of-order diagnostics
2218 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2219 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2220 version: Some(change_notification_2.text_document.version),
2221 diagnostics: vec![
2222 lsp::Diagnostic {
2223 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2224 severity: Some(DiagnosticSeverity::ERROR),
2225 message: "undefined variable 'BB'".to_string(),
2226 source: Some("disk".to_string()),
2227 ..Default::default()
2228 },
2229 lsp::Diagnostic {
2230 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2231 severity: Some(DiagnosticSeverity::WARNING),
2232 message: "undefined variable 'A'".to_string(),
2233 source: Some("disk".to_string()),
2234 ..Default::default()
2235 },
2236 ],
2237 });
2238
2239 cx.executor().run_until_parked();
2240 buffer.update(cx, |buffer, _| {
2241 assert_eq!(
2242 buffer
2243 .snapshot()
2244 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2245 .collect::<Vec<_>>(),
2246 &[
2247 DiagnosticEntry {
2248 range: Point::new(2, 21)..Point::new(2, 22),
2249 diagnostic: Diagnostic {
2250 source: Some("disk".into()),
2251 severity: DiagnosticSeverity::WARNING,
2252 message: "undefined variable 'A'".to_string(),
2253 is_disk_based: true,
2254 group_id: 6,
2255 is_primary: true,
2256 ..Default::default()
2257 }
2258 },
2259 DiagnosticEntry {
2260 range: Point::new(3, 9)..Point::new(3, 14),
2261 diagnostic: Diagnostic {
2262 source: Some("disk".into()),
2263 severity: DiagnosticSeverity::ERROR,
2264 message: "undefined variable 'BB'".to_string(),
2265 is_disk_based: true,
2266 group_id: 5,
2267 is_primary: true,
2268 ..Default::default()
2269 },
2270 }
2271 ]
2272 );
2273 });
2274}
2275
2276#[gpui::test]
2277async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
2278 init_test(cx);
2279
2280 let text = concat!(
2281 "let one = ;\n", //
2282 "let two = \n",
2283 "let three = 3;\n",
2284 );
2285
2286 let fs = FakeFs::new(cx.executor());
2287 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
2288
2289 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2290 let buffer = project
2291 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2292 .await
2293 .unwrap();
2294
2295 project.update(cx, |project, cx| {
2296 project.lsp_store.update(cx, |lsp_store, cx| {
2297 lsp_store
2298 .update_diagnostic_entries(
2299 LanguageServerId(0),
2300 PathBuf::from("/dir/a.rs"),
2301 None,
2302 vec![
2303 DiagnosticEntry {
2304 range: Unclipped(PointUtf16::new(0, 10))
2305 ..Unclipped(PointUtf16::new(0, 10)),
2306 diagnostic: Diagnostic {
2307 severity: DiagnosticSeverity::ERROR,
2308 message: "syntax error 1".to_string(),
2309 ..Default::default()
2310 },
2311 },
2312 DiagnosticEntry {
2313 range: Unclipped(PointUtf16::new(1, 10))
2314 ..Unclipped(PointUtf16::new(1, 10)),
2315 diagnostic: Diagnostic {
2316 severity: DiagnosticSeverity::ERROR,
2317 message: "syntax error 2".to_string(),
2318 ..Default::default()
2319 },
2320 },
2321 ],
2322 cx,
2323 )
2324 .unwrap();
2325 })
2326 });
2327
2328 // An empty range is extended forward to include the following character.
2329 // At the end of a line, an empty range is extended backward to include
2330 // the preceding character.
2331 buffer.update(cx, |buffer, _| {
2332 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2333 assert_eq!(
2334 chunks
2335 .iter()
2336 .map(|(s, d)| (s.as_str(), *d))
2337 .collect::<Vec<_>>(),
2338 &[
2339 ("let one = ", None),
2340 (";", Some(DiagnosticSeverity::ERROR)),
2341 ("\nlet two =", None),
2342 (" ", Some(DiagnosticSeverity::ERROR)),
2343 ("\nlet three = 3;\n", None)
2344 ]
2345 );
2346 });
2347}
2348
2349#[gpui::test]
2350async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2351 init_test(cx);
2352
2353 let fs = FakeFs::new(cx.executor());
2354 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2355 .await;
2356
2357 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2358 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
2359
2360 lsp_store.update(cx, |lsp_store, cx| {
2361 lsp_store
2362 .update_diagnostic_entries(
2363 LanguageServerId(0),
2364 Path::new("/dir/a.rs").to_owned(),
2365 None,
2366 vec![DiagnosticEntry {
2367 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2368 diagnostic: Diagnostic {
2369 severity: DiagnosticSeverity::ERROR,
2370 is_primary: true,
2371 message: "syntax error a1".to_string(),
2372 ..Default::default()
2373 },
2374 }],
2375 cx,
2376 )
2377 .unwrap();
2378 lsp_store
2379 .update_diagnostic_entries(
2380 LanguageServerId(1),
2381 Path::new("/dir/a.rs").to_owned(),
2382 None,
2383 vec![DiagnosticEntry {
2384 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2385 diagnostic: Diagnostic {
2386 severity: DiagnosticSeverity::ERROR,
2387 is_primary: true,
2388 message: "syntax error b1".to_string(),
2389 ..Default::default()
2390 },
2391 }],
2392 cx,
2393 )
2394 .unwrap();
2395
2396 assert_eq!(
2397 lsp_store.diagnostic_summary(false, cx),
2398 DiagnosticSummary {
2399 error_count: 2,
2400 warning_count: 0,
2401 }
2402 );
2403 });
2404}
2405
2406#[gpui::test]
2407async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2408 init_test(cx);
2409
2410 let text = "
2411 fn a() {
2412 f1();
2413 }
2414 fn b() {
2415 f2();
2416 }
2417 fn c() {
2418 f3();
2419 }
2420 "
2421 .unindent();
2422
2423 let fs = FakeFs::new(cx.executor());
2424 fs.insert_tree(
2425 path!("/dir"),
2426 json!({
2427 "a.rs": text.clone(),
2428 }),
2429 )
2430 .await;
2431
2432 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2433 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2434
2435 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2436 language_registry.add(rust_lang());
2437 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2438
2439 let (buffer, _handle) = project
2440 .update(cx, |project, cx| {
2441 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2442 })
2443 .await
2444 .unwrap();
2445
2446 let mut fake_server = fake_servers.next().await.unwrap();
2447 let lsp_document_version = fake_server
2448 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2449 .await
2450 .text_document
2451 .version;
2452
2453 // Simulate editing the buffer after the language server computes some edits.
2454 buffer.update(cx, |buffer, cx| {
2455 buffer.edit(
2456 [(
2457 Point::new(0, 0)..Point::new(0, 0),
2458 "// above first function\n",
2459 )],
2460 None,
2461 cx,
2462 );
2463 buffer.edit(
2464 [(
2465 Point::new(2, 0)..Point::new(2, 0),
2466 " // inside first function\n",
2467 )],
2468 None,
2469 cx,
2470 );
2471 buffer.edit(
2472 [(
2473 Point::new(6, 4)..Point::new(6, 4),
2474 "// inside second function ",
2475 )],
2476 None,
2477 cx,
2478 );
2479
2480 assert_eq!(
2481 buffer.text(),
2482 "
2483 // above first function
2484 fn a() {
2485 // inside first function
2486 f1();
2487 }
2488 fn b() {
2489 // inside second function f2();
2490 }
2491 fn c() {
2492 f3();
2493 }
2494 "
2495 .unindent()
2496 );
2497 });
2498
2499 let edits = lsp_store
2500 .update(cx, |lsp_store, cx| {
2501 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2502 &buffer,
2503 vec![
2504 // replace body of first function
2505 lsp::TextEdit {
2506 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2507 new_text: "
2508 fn a() {
2509 f10();
2510 }
2511 "
2512 .unindent(),
2513 },
2514 // edit inside second function
2515 lsp::TextEdit {
2516 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2517 new_text: "00".into(),
2518 },
2519 // edit inside third function via two distinct edits
2520 lsp::TextEdit {
2521 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2522 new_text: "4000".into(),
2523 },
2524 lsp::TextEdit {
2525 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2526 new_text: "".into(),
2527 },
2528 ],
2529 LanguageServerId(0),
2530 Some(lsp_document_version),
2531 cx,
2532 )
2533 })
2534 .await
2535 .unwrap();
2536
2537 buffer.update(cx, |buffer, cx| {
2538 for (range, new_text) in edits {
2539 buffer.edit([(range, new_text)], None, cx);
2540 }
2541 assert_eq!(
2542 buffer.text(),
2543 "
2544 // above first function
2545 fn a() {
2546 // inside first function
2547 f10();
2548 }
2549 fn b() {
2550 // inside second function f200();
2551 }
2552 fn c() {
2553 f4000();
2554 }
2555 "
2556 .unindent()
2557 );
2558 });
2559}
2560
2561#[gpui::test]
2562async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2563 init_test(cx);
2564
2565 let text = "
2566 use a::b;
2567 use a::c;
2568
2569 fn f() {
2570 b();
2571 c();
2572 }
2573 "
2574 .unindent();
2575
2576 let fs = FakeFs::new(cx.executor());
2577 fs.insert_tree(
2578 path!("/dir"),
2579 json!({
2580 "a.rs": text.clone(),
2581 }),
2582 )
2583 .await;
2584
2585 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2586 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2587 let buffer = project
2588 .update(cx, |project, cx| {
2589 project.open_local_buffer(path!("/dir/a.rs"), cx)
2590 })
2591 .await
2592 .unwrap();
2593
2594 // Simulate the language server sending us a small edit in the form of a very large diff.
2595 // Rust-analyzer does this when performing a merge-imports code action.
2596 let edits = lsp_store
2597 .update(cx, |lsp_store, cx| {
2598 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2599 &buffer,
2600 [
2601 // Replace the first use statement without editing the semicolon.
2602 lsp::TextEdit {
2603 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2604 new_text: "a::{b, c}".into(),
2605 },
2606 // Reinsert the remainder of the file between the semicolon and the final
2607 // newline of the file.
2608 lsp::TextEdit {
2609 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2610 new_text: "\n\n".into(),
2611 },
2612 lsp::TextEdit {
2613 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2614 new_text: "
2615 fn f() {
2616 b();
2617 c();
2618 }"
2619 .unindent(),
2620 },
2621 // Delete everything after the first newline of the file.
2622 lsp::TextEdit {
2623 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2624 new_text: "".into(),
2625 },
2626 ],
2627 LanguageServerId(0),
2628 None,
2629 cx,
2630 )
2631 })
2632 .await
2633 .unwrap();
2634
2635 buffer.update(cx, |buffer, cx| {
2636 let edits = edits
2637 .into_iter()
2638 .map(|(range, text)| {
2639 (
2640 range.start.to_point(buffer)..range.end.to_point(buffer),
2641 text,
2642 )
2643 })
2644 .collect::<Vec<_>>();
2645
2646 assert_eq!(
2647 edits,
2648 [
2649 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2650 (Point::new(1, 0)..Point::new(2, 0), "".into())
2651 ]
2652 );
2653
2654 for (range, new_text) in edits {
2655 buffer.edit([(range, new_text)], None, cx);
2656 }
2657 assert_eq!(
2658 buffer.text(),
2659 "
2660 use a::{b, c};
2661
2662 fn f() {
2663 b();
2664 c();
2665 }
2666 "
2667 .unindent()
2668 );
2669 });
2670}
2671
2672#[gpui::test]
2673async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
2674 cx: &mut gpui::TestAppContext,
2675) {
2676 init_test(cx);
2677
2678 let text = "Path()";
2679
2680 let fs = FakeFs::new(cx.executor());
2681 fs.insert_tree(
2682 path!("/dir"),
2683 json!({
2684 "a.rs": text
2685 }),
2686 )
2687 .await;
2688
2689 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2690 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2691 let buffer = project
2692 .update(cx, |project, cx| {
2693 project.open_local_buffer(path!("/dir/a.rs"), cx)
2694 })
2695 .await
2696 .unwrap();
2697
2698 // Simulate the language server sending us a pair of edits at the same location,
2699 // with an insertion following a replacement (which violates the LSP spec).
2700 let edits = lsp_store
2701 .update(cx, |lsp_store, cx| {
2702 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2703 &buffer,
2704 [
2705 lsp::TextEdit {
2706 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
2707 new_text: "Path".into(),
2708 },
2709 lsp::TextEdit {
2710 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
2711 new_text: "from path import Path\n\n\n".into(),
2712 },
2713 ],
2714 LanguageServerId(0),
2715 None,
2716 cx,
2717 )
2718 })
2719 .await
2720 .unwrap();
2721
2722 buffer.update(cx, |buffer, cx| {
2723 buffer.edit(edits, None, cx);
2724 assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
2725 });
2726}
2727
2728#[gpui::test]
2729async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2730 init_test(cx);
2731
2732 let text = "
2733 use a::b;
2734 use a::c;
2735
2736 fn f() {
2737 b();
2738 c();
2739 }
2740 "
2741 .unindent();
2742
2743 let fs = FakeFs::new(cx.executor());
2744 fs.insert_tree(
2745 path!("/dir"),
2746 json!({
2747 "a.rs": text.clone(),
2748 }),
2749 )
2750 .await;
2751
2752 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2753 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2754 let buffer = project
2755 .update(cx, |project, cx| {
2756 project.open_local_buffer(path!("/dir/a.rs"), cx)
2757 })
2758 .await
2759 .unwrap();
2760
2761 // Simulate the language server sending us edits in a non-ordered fashion,
2762 // with ranges sometimes being inverted or pointing to invalid locations.
2763 let edits = lsp_store
2764 .update(cx, |lsp_store, cx| {
2765 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2766 &buffer,
2767 [
2768 lsp::TextEdit {
2769 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2770 new_text: "\n\n".into(),
2771 },
2772 lsp::TextEdit {
2773 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2774 new_text: "a::{b, c}".into(),
2775 },
2776 lsp::TextEdit {
2777 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2778 new_text: "".into(),
2779 },
2780 lsp::TextEdit {
2781 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2782 new_text: "
2783 fn f() {
2784 b();
2785 c();
2786 }"
2787 .unindent(),
2788 },
2789 ],
2790 LanguageServerId(0),
2791 None,
2792 cx,
2793 )
2794 })
2795 .await
2796 .unwrap();
2797
2798 buffer.update(cx, |buffer, cx| {
2799 let edits = edits
2800 .into_iter()
2801 .map(|(range, text)| {
2802 (
2803 range.start.to_point(buffer)..range.end.to_point(buffer),
2804 text,
2805 )
2806 })
2807 .collect::<Vec<_>>();
2808
2809 assert_eq!(
2810 edits,
2811 [
2812 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2813 (Point::new(1, 0)..Point::new(2, 0), "".into())
2814 ]
2815 );
2816
2817 for (range, new_text) in edits {
2818 buffer.edit([(range, new_text)], None, cx);
2819 }
2820 assert_eq!(
2821 buffer.text(),
2822 "
2823 use a::{b, c};
2824
2825 fn f() {
2826 b();
2827 c();
2828 }
2829 "
2830 .unindent()
2831 );
2832 });
2833}
2834
2835fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2836 buffer: &Buffer,
2837 range: Range<T>,
2838) -> Vec<(String, Option<DiagnosticSeverity>)> {
2839 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2840 for chunk in buffer.snapshot().chunks(range, true) {
2841 if chunks.last().map_or(false, |prev_chunk| {
2842 prev_chunk.1 == chunk.diagnostic_severity
2843 }) {
2844 chunks.last_mut().unwrap().0.push_str(chunk.text);
2845 } else {
2846 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2847 }
2848 }
2849 chunks
2850}
2851
2852#[gpui::test(iterations = 10)]
2853async fn test_definition(cx: &mut gpui::TestAppContext) {
2854 init_test(cx);
2855
2856 let fs = FakeFs::new(cx.executor());
2857 fs.insert_tree(
2858 path!("/dir"),
2859 json!({
2860 "a.rs": "const fn a() { A }",
2861 "b.rs": "const y: i32 = crate::a()",
2862 }),
2863 )
2864 .await;
2865
2866 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
2867
2868 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2869 language_registry.add(rust_lang());
2870 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2871
2872 let (buffer, _handle) = project
2873 .update(cx, |project, cx| {
2874 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
2875 })
2876 .await
2877 .unwrap();
2878
2879 let fake_server = fake_servers.next().await.unwrap();
2880 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2881 let params = params.text_document_position_params;
2882 assert_eq!(
2883 params.text_document.uri.to_file_path().unwrap(),
2884 Path::new(path!("/dir/b.rs")),
2885 );
2886 assert_eq!(params.position, lsp::Position::new(0, 22));
2887
2888 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2889 lsp::Location::new(
2890 lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2891 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2892 ),
2893 )))
2894 });
2895 let mut definitions = project
2896 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2897 .await
2898 .unwrap();
2899
2900 // Assert no new language server started
2901 cx.executor().run_until_parked();
2902 assert!(fake_servers.try_next().is_err());
2903
2904 assert_eq!(definitions.len(), 1);
2905 let definition = definitions.pop().unwrap();
2906 cx.update(|cx| {
2907 let target_buffer = definition.target.buffer.read(cx);
2908 assert_eq!(
2909 target_buffer
2910 .file()
2911 .unwrap()
2912 .as_local()
2913 .unwrap()
2914 .abs_path(cx),
2915 Path::new(path!("/dir/a.rs")),
2916 );
2917 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2918 assert_eq!(
2919 list_worktrees(&project, cx),
2920 [
2921 (path!("/dir/a.rs").as_ref(), false),
2922 (path!("/dir/b.rs").as_ref(), true)
2923 ],
2924 );
2925
2926 drop(definition);
2927 });
2928 cx.update(|cx| {
2929 assert_eq!(
2930 list_worktrees(&project, cx),
2931 [(path!("/dir/b.rs").as_ref(), true)]
2932 );
2933 });
2934
2935 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
2936 project
2937 .read(cx)
2938 .worktrees(cx)
2939 .map(|worktree| {
2940 let worktree = worktree.read(cx);
2941 (
2942 worktree.as_local().unwrap().abs_path().as_ref(),
2943 worktree.is_visible(),
2944 )
2945 })
2946 .collect::<Vec<_>>()
2947 }
2948}
2949
2950#[gpui::test]
2951async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
2952 init_test(cx);
2953
2954 let fs = FakeFs::new(cx.executor());
2955 fs.insert_tree(
2956 path!("/dir"),
2957 json!({
2958 "a.ts": "",
2959 }),
2960 )
2961 .await;
2962
2963 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2964
2965 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2966 language_registry.add(typescript_lang());
2967 let mut fake_language_servers = language_registry.register_fake_lsp(
2968 "TypeScript",
2969 FakeLspAdapter {
2970 capabilities: lsp::ServerCapabilities {
2971 completion_provider: Some(lsp::CompletionOptions {
2972 trigger_characters: Some(vec![".".to_string()]),
2973 ..Default::default()
2974 }),
2975 ..Default::default()
2976 },
2977 ..Default::default()
2978 },
2979 );
2980
2981 let (buffer, _handle) = project
2982 .update(cx, |p, cx| {
2983 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
2984 })
2985 .await
2986 .unwrap();
2987
2988 let fake_server = fake_language_servers.next().await.unwrap();
2989
2990 // When text_edit exists, it takes precedence over insert_text and label
2991 let text = "let a = obj.fqn";
2992 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2993 let completions = project.update(cx, |project, cx| {
2994 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2995 });
2996
2997 fake_server
2998 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
2999 Ok(Some(lsp::CompletionResponse::Array(vec![
3000 lsp::CompletionItem {
3001 label: "labelText".into(),
3002 insert_text: Some("insertText".into()),
3003 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
3004 range: lsp::Range::new(
3005 lsp::Position::new(0, text.len() as u32 - 3),
3006 lsp::Position::new(0, text.len() as u32),
3007 ),
3008 new_text: "textEditText".into(),
3009 })),
3010 ..Default::default()
3011 },
3012 ])))
3013 })
3014 .next()
3015 .await;
3016
3017 let completions = completions.await.unwrap().unwrap();
3018 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3019
3020 assert_eq!(completions.len(), 1);
3021 assert_eq!(completions[0].new_text, "textEditText");
3022 assert_eq!(
3023 completions[0].replace_range.to_offset(&snapshot),
3024 text.len() - 3..text.len()
3025 );
3026}
3027
3028#[gpui::test]
3029async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
3030 init_test(cx);
3031
3032 let fs = FakeFs::new(cx.executor());
3033 fs.insert_tree(
3034 path!("/dir"),
3035 json!({
3036 "a.ts": "",
3037 }),
3038 )
3039 .await;
3040
3041 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3042
3043 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3044 language_registry.add(typescript_lang());
3045 let mut fake_language_servers = language_registry.register_fake_lsp(
3046 "TypeScript",
3047 FakeLspAdapter {
3048 capabilities: lsp::ServerCapabilities {
3049 completion_provider: Some(lsp::CompletionOptions {
3050 trigger_characters: Some(vec![".".to_string()]),
3051 ..Default::default()
3052 }),
3053 ..Default::default()
3054 },
3055 ..Default::default()
3056 },
3057 );
3058
3059 let (buffer, _handle) = project
3060 .update(cx, |p, cx| {
3061 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3062 })
3063 .await
3064 .unwrap();
3065
3066 let fake_server = fake_language_servers.next().await.unwrap();
3067 let text = "let a = obj.fqn";
3068
3069 // Test 1: When text_edit is None but insert_text exists with default edit_range
3070 {
3071 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3072 let completions = project.update(cx, |project, cx| {
3073 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3074 });
3075
3076 fake_server
3077 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3078 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3079 is_incomplete: false,
3080 item_defaults: Some(lsp::CompletionListItemDefaults {
3081 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3082 lsp::Range::new(
3083 lsp::Position::new(0, text.len() as u32 - 3),
3084 lsp::Position::new(0, text.len() as u32),
3085 ),
3086 )),
3087 ..Default::default()
3088 }),
3089 items: vec![lsp::CompletionItem {
3090 label: "labelText".into(),
3091 insert_text: Some("insertText".into()),
3092 text_edit: None,
3093 ..Default::default()
3094 }],
3095 })))
3096 })
3097 .next()
3098 .await;
3099
3100 let completions = completions.await.unwrap().unwrap();
3101 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3102
3103 assert_eq!(completions.len(), 1);
3104 assert_eq!(completions[0].new_text, "insertText");
3105 assert_eq!(
3106 completions[0].replace_range.to_offset(&snapshot),
3107 text.len() - 3..text.len()
3108 );
3109 }
3110
3111 // Test 2: When both text_edit and insert_text are None with default edit_range
3112 {
3113 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3114 let completions = project.update(cx, |project, cx| {
3115 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3116 });
3117
3118 fake_server
3119 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3120 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3121 is_incomplete: false,
3122 item_defaults: Some(lsp::CompletionListItemDefaults {
3123 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3124 lsp::Range::new(
3125 lsp::Position::new(0, text.len() as u32 - 3),
3126 lsp::Position::new(0, text.len() as u32),
3127 ),
3128 )),
3129 ..Default::default()
3130 }),
3131 items: vec![lsp::CompletionItem {
3132 label: "labelText".into(),
3133 insert_text: None,
3134 text_edit: None,
3135 ..Default::default()
3136 }],
3137 })))
3138 })
3139 .next()
3140 .await;
3141
3142 let completions = completions.await.unwrap().unwrap();
3143 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3144
3145 assert_eq!(completions.len(), 1);
3146 assert_eq!(completions[0].new_text, "labelText");
3147 assert_eq!(
3148 completions[0].replace_range.to_offset(&snapshot),
3149 text.len() - 3..text.len()
3150 );
3151 }
3152}
3153
3154#[gpui::test]
3155async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
3156 init_test(cx);
3157
3158 let fs = FakeFs::new(cx.executor());
3159 fs.insert_tree(
3160 path!("/dir"),
3161 json!({
3162 "a.ts": "",
3163 }),
3164 )
3165 .await;
3166
3167 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3168
3169 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3170 language_registry.add(typescript_lang());
3171 let mut fake_language_servers = language_registry.register_fake_lsp(
3172 "TypeScript",
3173 FakeLspAdapter {
3174 capabilities: lsp::ServerCapabilities {
3175 completion_provider: Some(lsp::CompletionOptions {
3176 trigger_characters: Some(vec![":".to_string()]),
3177 ..Default::default()
3178 }),
3179 ..Default::default()
3180 },
3181 ..Default::default()
3182 },
3183 );
3184
3185 let (buffer, _handle) = project
3186 .update(cx, |p, cx| {
3187 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3188 })
3189 .await
3190 .unwrap();
3191
3192 let fake_server = fake_language_servers.next().await.unwrap();
3193
3194 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
3195 let text = "let a = b.fqn";
3196 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3197 let completions = project.update(cx, |project, cx| {
3198 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3199 });
3200
3201 fake_server
3202 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3203 Ok(Some(lsp::CompletionResponse::Array(vec![
3204 lsp::CompletionItem {
3205 label: "fullyQualifiedName?".into(),
3206 insert_text: Some("fullyQualifiedName".into()),
3207 ..Default::default()
3208 },
3209 ])))
3210 })
3211 .next()
3212 .await;
3213 let completions = completions.await.unwrap().unwrap();
3214 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3215 assert_eq!(completions.len(), 1);
3216 assert_eq!(completions[0].new_text, "fullyQualifiedName");
3217 assert_eq!(
3218 completions[0].replace_range.to_offset(&snapshot),
3219 text.len() - 3..text.len()
3220 );
3221
3222 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
3223 let text = "let a = \"atoms/cmp\"";
3224 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3225 let completions = project.update(cx, |project, cx| {
3226 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
3227 });
3228
3229 fake_server
3230 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3231 Ok(Some(lsp::CompletionResponse::Array(vec![
3232 lsp::CompletionItem {
3233 label: "component".into(),
3234 ..Default::default()
3235 },
3236 ])))
3237 })
3238 .next()
3239 .await;
3240 let completions = completions.await.unwrap().unwrap();
3241 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3242 assert_eq!(completions.len(), 1);
3243 assert_eq!(completions[0].new_text, "component");
3244 assert_eq!(
3245 completions[0].replace_range.to_offset(&snapshot),
3246 text.len() - 4..text.len() - 1
3247 );
3248}
3249
3250#[gpui::test]
3251async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
3252 init_test(cx);
3253
3254 let fs = FakeFs::new(cx.executor());
3255 fs.insert_tree(
3256 path!("/dir"),
3257 json!({
3258 "a.ts": "",
3259 }),
3260 )
3261 .await;
3262
3263 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3264
3265 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3266 language_registry.add(typescript_lang());
3267 let mut fake_language_servers = language_registry.register_fake_lsp(
3268 "TypeScript",
3269 FakeLspAdapter {
3270 capabilities: lsp::ServerCapabilities {
3271 completion_provider: Some(lsp::CompletionOptions {
3272 trigger_characters: Some(vec![":".to_string()]),
3273 ..Default::default()
3274 }),
3275 ..Default::default()
3276 },
3277 ..Default::default()
3278 },
3279 );
3280
3281 let (buffer, _handle) = project
3282 .update(cx, |p, cx| {
3283 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3284 })
3285 .await
3286 .unwrap();
3287
3288 let fake_server = fake_language_servers.next().await.unwrap();
3289
3290 let text = "let a = b.fqn";
3291 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3292 let completions = project.update(cx, |project, cx| {
3293 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3294 });
3295
3296 fake_server
3297 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3298 Ok(Some(lsp::CompletionResponse::Array(vec![
3299 lsp::CompletionItem {
3300 label: "fullyQualifiedName?".into(),
3301 insert_text: Some("fully\rQualified\r\nName".into()),
3302 ..Default::default()
3303 },
3304 ])))
3305 })
3306 .next()
3307 .await;
3308 let completions = completions.await.unwrap().unwrap();
3309 assert_eq!(completions.len(), 1);
3310 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
3311}
3312
3313#[gpui::test(iterations = 10)]
3314async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
3315 init_test(cx);
3316
3317 let fs = FakeFs::new(cx.executor());
3318 fs.insert_tree(
3319 path!("/dir"),
3320 json!({
3321 "a.ts": "a",
3322 }),
3323 )
3324 .await;
3325
3326 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3327
3328 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3329 language_registry.add(typescript_lang());
3330 let mut fake_language_servers = language_registry.register_fake_lsp(
3331 "TypeScript",
3332 FakeLspAdapter {
3333 capabilities: lsp::ServerCapabilities {
3334 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
3335 lsp::CodeActionOptions {
3336 resolve_provider: Some(true),
3337 ..lsp::CodeActionOptions::default()
3338 },
3339 )),
3340 execute_command_provider: Some(lsp::ExecuteCommandOptions {
3341 commands: vec!["_the/command".to_string()],
3342 ..lsp::ExecuteCommandOptions::default()
3343 }),
3344 ..lsp::ServerCapabilities::default()
3345 },
3346 ..FakeLspAdapter::default()
3347 },
3348 );
3349
3350 let (buffer, _handle) = project
3351 .update(cx, |p, cx| {
3352 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3353 })
3354 .await
3355 .unwrap();
3356
3357 let fake_server = fake_language_servers.next().await.unwrap();
3358
3359 // Language server returns code actions that contain commands, and not edits.
3360 let actions = project.update(cx, |project, cx| {
3361 project.code_actions(&buffer, 0..0, None, cx)
3362 });
3363 fake_server
3364 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
3365 Ok(Some(vec![
3366 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3367 title: "The code action".into(),
3368 data: Some(serde_json::json!({
3369 "command": "_the/command",
3370 })),
3371 ..lsp::CodeAction::default()
3372 }),
3373 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3374 title: "two".into(),
3375 ..lsp::CodeAction::default()
3376 }),
3377 ]))
3378 })
3379 .next()
3380 .await;
3381
3382 let action = actions.await.unwrap()[0].clone();
3383 let apply = project.update(cx, |project, cx| {
3384 project.apply_code_action(buffer.clone(), action, true, cx)
3385 });
3386
3387 // Resolving the code action does not populate its edits. In absence of
3388 // edits, we must execute the given command.
3389 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
3390 |mut action, _| async move {
3391 if action.data.is_some() {
3392 action.command = Some(lsp::Command {
3393 title: "The command".into(),
3394 command: "_the/command".into(),
3395 arguments: Some(vec![json!("the-argument")]),
3396 });
3397 }
3398 Ok(action)
3399 },
3400 );
3401
3402 // While executing the command, the language server sends the editor
3403 // a `workspaceEdit` request.
3404 fake_server
3405 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
3406 let fake = fake_server.clone();
3407 move |params, _| {
3408 assert_eq!(params.command, "_the/command");
3409 let fake = fake.clone();
3410 async move {
3411 fake.server
3412 .request::<lsp::request::ApplyWorkspaceEdit>(
3413 lsp::ApplyWorkspaceEditParams {
3414 label: None,
3415 edit: lsp::WorkspaceEdit {
3416 changes: Some(
3417 [(
3418 lsp::Url::from_file_path(path!("/dir/a.ts")).unwrap(),
3419 vec![lsp::TextEdit {
3420 range: lsp::Range::new(
3421 lsp::Position::new(0, 0),
3422 lsp::Position::new(0, 0),
3423 ),
3424 new_text: "X".into(),
3425 }],
3426 )]
3427 .into_iter()
3428 .collect(),
3429 ),
3430 ..Default::default()
3431 },
3432 },
3433 )
3434 .await
3435 .into_response()
3436 .unwrap();
3437 Ok(Some(json!(null)))
3438 }
3439 }
3440 })
3441 .next()
3442 .await;
3443
3444 // Applying the code action returns a project transaction containing the edits
3445 // sent by the language server in its `workspaceEdit` request.
3446 let transaction = apply.await.unwrap();
3447 assert!(transaction.0.contains_key(&buffer));
3448 buffer.update(cx, |buffer, cx| {
3449 assert_eq!(buffer.text(), "Xa");
3450 buffer.undo(cx);
3451 assert_eq!(buffer.text(), "a");
3452 });
3453}
3454
3455#[gpui::test(iterations = 10)]
3456async fn test_save_file(cx: &mut gpui::TestAppContext) {
3457 init_test(cx);
3458
3459 let fs = FakeFs::new(cx.executor());
3460 fs.insert_tree(
3461 path!("/dir"),
3462 json!({
3463 "file1": "the old contents",
3464 }),
3465 )
3466 .await;
3467
3468 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3469 let buffer = project
3470 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3471 .await
3472 .unwrap();
3473 buffer.update(cx, |buffer, cx| {
3474 assert_eq!(buffer.text(), "the old contents");
3475 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3476 });
3477
3478 project
3479 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3480 .await
3481 .unwrap();
3482
3483 let new_text = fs
3484 .load(Path::new(path!("/dir/file1")))
3485 .await
3486 .unwrap()
3487 .replace("\r\n", "\n");
3488 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3489}
3490
3491#[gpui::test(iterations = 30)]
3492async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
3493 init_test(cx);
3494
3495 let fs = FakeFs::new(cx.executor().clone());
3496 fs.insert_tree(
3497 path!("/dir"),
3498 json!({
3499 "file1": "the original contents",
3500 }),
3501 )
3502 .await;
3503
3504 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3505 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3506 let buffer = project
3507 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3508 .await
3509 .unwrap();
3510
3511 // Simulate buffer diffs being slow, so that they don't complete before
3512 // the next file change occurs.
3513 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3514
3515 // Change the buffer's file on disk, and then wait for the file change
3516 // to be detected by the worktree, so that the buffer starts reloading.
3517 fs.save(
3518 path!("/dir/file1").as_ref(),
3519 &"the first contents".into(),
3520 Default::default(),
3521 )
3522 .await
3523 .unwrap();
3524 worktree.next_event(cx).await;
3525
3526 // Change the buffer's file again. Depending on the random seed, the
3527 // previous file change may still be in progress.
3528 fs.save(
3529 path!("/dir/file1").as_ref(),
3530 &"the second contents".into(),
3531 Default::default(),
3532 )
3533 .await
3534 .unwrap();
3535 worktree.next_event(cx).await;
3536
3537 cx.executor().run_until_parked();
3538 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3539 buffer.read_with(cx, |buffer, _| {
3540 assert_eq!(buffer.text(), on_disk_text);
3541 assert!(!buffer.is_dirty(), "buffer should not be dirty");
3542 assert!(!buffer.has_conflict(), "buffer should not be dirty");
3543 });
3544}
3545
3546#[gpui::test(iterations = 30)]
3547async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
3548 init_test(cx);
3549
3550 let fs = FakeFs::new(cx.executor().clone());
3551 fs.insert_tree(
3552 path!("/dir"),
3553 json!({
3554 "file1": "the original contents",
3555 }),
3556 )
3557 .await;
3558
3559 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3560 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3561 let buffer = project
3562 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3563 .await
3564 .unwrap();
3565
3566 // Simulate buffer diffs being slow, so that they don't complete before
3567 // the next file change occurs.
3568 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3569
3570 // Change the buffer's file on disk, and then wait for the file change
3571 // to be detected by the worktree, so that the buffer starts reloading.
3572 fs.save(
3573 path!("/dir/file1").as_ref(),
3574 &"the first contents".into(),
3575 Default::default(),
3576 )
3577 .await
3578 .unwrap();
3579 worktree.next_event(cx).await;
3580
3581 cx.executor()
3582 .spawn(cx.executor().simulate_random_delay())
3583 .await;
3584
3585 // Perform a noop edit, causing the buffer's version to increase.
3586 buffer.update(cx, |buffer, cx| {
3587 buffer.edit([(0..0, " ")], None, cx);
3588 buffer.undo(cx);
3589 });
3590
3591 cx.executor().run_until_parked();
3592 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3593 buffer.read_with(cx, |buffer, _| {
3594 let buffer_text = buffer.text();
3595 if buffer_text == on_disk_text {
3596 assert!(
3597 !buffer.is_dirty() && !buffer.has_conflict(),
3598 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
3599 );
3600 }
3601 // If the file change occurred while the buffer was processing the first
3602 // change, the buffer will be in a conflicting state.
3603 else {
3604 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3605 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3606 }
3607 });
3608}
3609
3610#[gpui::test]
3611async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
3612 init_test(cx);
3613
3614 let fs = FakeFs::new(cx.executor());
3615 fs.insert_tree(
3616 path!("/dir"),
3617 json!({
3618 "file1": "the old contents",
3619 }),
3620 )
3621 .await;
3622
3623 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
3624 let buffer = project
3625 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3626 .await
3627 .unwrap();
3628 buffer.update(cx, |buffer, cx| {
3629 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3630 });
3631
3632 project
3633 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3634 .await
3635 .unwrap();
3636
3637 let new_text = fs
3638 .load(Path::new(path!("/dir/file1")))
3639 .await
3640 .unwrap()
3641 .replace("\r\n", "\n");
3642 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3643}
3644
3645#[gpui::test]
3646async fn test_save_as(cx: &mut gpui::TestAppContext) {
3647 init_test(cx);
3648
3649 let fs = FakeFs::new(cx.executor());
3650 fs.insert_tree("/dir", json!({})).await;
3651
3652 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3653
3654 let languages = project.update(cx, |project, _| project.languages().clone());
3655 languages.add(rust_lang());
3656
3657 let buffer = project.update(cx, |project, cx| project.create_local_buffer("", None, cx));
3658 buffer.update(cx, |buffer, cx| {
3659 buffer.edit([(0..0, "abc")], None, cx);
3660 assert!(buffer.is_dirty());
3661 assert!(!buffer.has_conflict());
3662 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
3663 });
3664 project
3665 .update(cx, |project, cx| {
3666 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
3667 let path = ProjectPath {
3668 worktree_id,
3669 path: Arc::from(Path::new("file1.rs")),
3670 };
3671 project.save_buffer_as(buffer.clone(), path, cx)
3672 })
3673 .await
3674 .unwrap();
3675 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
3676
3677 cx.executor().run_until_parked();
3678 buffer.update(cx, |buffer, cx| {
3679 assert_eq!(
3680 buffer.file().unwrap().full_path(cx),
3681 Path::new("dir/file1.rs")
3682 );
3683 assert!(!buffer.is_dirty());
3684 assert!(!buffer.has_conflict());
3685 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
3686 });
3687
3688 let opened_buffer = project
3689 .update(cx, |project, cx| {
3690 project.open_local_buffer("/dir/file1.rs", cx)
3691 })
3692 .await
3693 .unwrap();
3694 assert_eq!(opened_buffer, buffer);
3695}
3696
3697#[gpui::test(retries = 5)]
3698async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
3699 use worktree::WorktreeModelHandle as _;
3700
3701 init_test(cx);
3702 cx.executor().allow_parking();
3703
3704 let dir = TempTree::new(json!({
3705 "a": {
3706 "file1": "",
3707 "file2": "",
3708 "file3": "",
3709 },
3710 "b": {
3711 "c": {
3712 "file4": "",
3713 "file5": "",
3714 }
3715 }
3716 }));
3717
3718 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
3719
3720 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3721 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
3722 async move { buffer.await.unwrap() }
3723 };
3724 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3725 project.update(cx, |project, cx| {
3726 let tree = project.worktrees(cx).next().unwrap();
3727 tree.read(cx)
3728 .entry_for_path(path)
3729 .unwrap_or_else(|| panic!("no entry for path {}", path))
3730 .id
3731 })
3732 };
3733
3734 let buffer2 = buffer_for_path("a/file2", cx).await;
3735 let buffer3 = buffer_for_path("a/file3", cx).await;
3736 let buffer4 = buffer_for_path("b/c/file4", cx).await;
3737 let buffer5 = buffer_for_path("b/c/file5", cx).await;
3738
3739 let file2_id = id_for_path("a/file2", cx);
3740 let file3_id = id_for_path("a/file3", cx);
3741 let file4_id = id_for_path("b/c/file4", cx);
3742
3743 // Create a remote copy of this worktree.
3744 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3745 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
3746
3747 let updates = Arc::new(Mutex::new(Vec::new()));
3748 tree.update(cx, |tree, cx| {
3749 let updates = updates.clone();
3750 tree.observe_updates(0, cx, move |update| {
3751 updates.lock().push(update);
3752 async { true }
3753 });
3754 });
3755
3756 let remote =
3757 cx.update(|cx| Worktree::remote(0, 1, metadata, project.read(cx).client().into(), cx));
3758
3759 cx.executor().run_until_parked();
3760
3761 cx.update(|cx| {
3762 assert!(!buffer2.read(cx).is_dirty());
3763 assert!(!buffer3.read(cx).is_dirty());
3764 assert!(!buffer4.read(cx).is_dirty());
3765 assert!(!buffer5.read(cx).is_dirty());
3766 });
3767
3768 // Rename and delete files and directories.
3769 tree.flush_fs_events(cx).await;
3770 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3771 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3772 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3773 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3774 tree.flush_fs_events(cx).await;
3775
3776 cx.update(|app| {
3777 assert_eq!(
3778 tree.read(app)
3779 .paths()
3780 .map(|p| p.to_str().unwrap())
3781 .collect::<Vec<_>>(),
3782 vec![
3783 "a",
3784 separator!("a/file1"),
3785 separator!("a/file2.new"),
3786 "b",
3787 "d",
3788 separator!("d/file3"),
3789 separator!("d/file4"),
3790 ]
3791 );
3792 });
3793
3794 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
3795 assert_eq!(id_for_path("d/file3", cx), file3_id);
3796 assert_eq!(id_for_path("d/file4", cx), file4_id);
3797
3798 cx.update(|cx| {
3799 assert_eq!(
3800 buffer2.read(cx).file().unwrap().path().as_ref(),
3801 Path::new("a/file2.new")
3802 );
3803 assert_eq!(
3804 buffer3.read(cx).file().unwrap().path().as_ref(),
3805 Path::new("d/file3")
3806 );
3807 assert_eq!(
3808 buffer4.read(cx).file().unwrap().path().as_ref(),
3809 Path::new("d/file4")
3810 );
3811 assert_eq!(
3812 buffer5.read(cx).file().unwrap().path().as_ref(),
3813 Path::new("b/c/file5")
3814 );
3815
3816 assert_matches!(
3817 buffer2.read(cx).file().unwrap().disk_state(),
3818 DiskState::Present { .. }
3819 );
3820 assert_matches!(
3821 buffer3.read(cx).file().unwrap().disk_state(),
3822 DiskState::Present { .. }
3823 );
3824 assert_matches!(
3825 buffer4.read(cx).file().unwrap().disk_state(),
3826 DiskState::Present { .. }
3827 );
3828 assert_eq!(
3829 buffer5.read(cx).file().unwrap().disk_state(),
3830 DiskState::Deleted
3831 );
3832 });
3833
3834 // Update the remote worktree. Check that it becomes consistent with the
3835 // local worktree.
3836 cx.executor().run_until_parked();
3837
3838 remote.update(cx, |remote, _| {
3839 for update in updates.lock().drain(..) {
3840 remote.as_remote_mut().unwrap().update_from_remote(update);
3841 }
3842 });
3843 cx.executor().run_until_parked();
3844 remote.update(cx, |remote, _| {
3845 assert_eq!(
3846 remote
3847 .paths()
3848 .map(|p| p.to_str().unwrap())
3849 .collect::<Vec<_>>(),
3850 vec![
3851 "a",
3852 separator!("a/file1"),
3853 separator!("a/file2.new"),
3854 "b",
3855 "d",
3856 separator!("d/file3"),
3857 separator!("d/file4"),
3858 ]
3859 );
3860 });
3861}
3862
3863#[gpui::test(iterations = 10)]
3864async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
3865 init_test(cx);
3866
3867 let fs = FakeFs::new(cx.executor());
3868 fs.insert_tree(
3869 path!("/dir"),
3870 json!({
3871 "a": {
3872 "file1": "",
3873 }
3874 }),
3875 )
3876 .await;
3877
3878 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3879 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3880 let tree_id = tree.update(cx, |tree, _| tree.id());
3881
3882 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3883 project.update(cx, |project, cx| {
3884 let tree = project.worktrees(cx).next().unwrap();
3885 tree.read(cx)
3886 .entry_for_path(path)
3887 .unwrap_or_else(|| panic!("no entry for path {}", path))
3888 .id
3889 })
3890 };
3891
3892 let dir_id = id_for_path("a", cx);
3893 let file_id = id_for_path("a/file1", cx);
3894 let buffer = project
3895 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
3896 .await
3897 .unwrap();
3898 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3899
3900 project
3901 .update(cx, |project, cx| {
3902 project.rename_entry(dir_id, Path::new("b"), cx)
3903 })
3904 .unwrap()
3905 .await
3906 .to_included()
3907 .unwrap();
3908 cx.executor().run_until_parked();
3909
3910 assert_eq!(id_for_path("b", cx), dir_id);
3911 assert_eq!(id_for_path("b/file1", cx), file_id);
3912 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3913}
3914
3915#[gpui::test]
3916async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
3917 init_test(cx);
3918
3919 let fs = FakeFs::new(cx.executor());
3920 fs.insert_tree(
3921 "/dir",
3922 json!({
3923 "a.txt": "a-contents",
3924 "b.txt": "b-contents",
3925 }),
3926 )
3927 .await;
3928
3929 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3930
3931 // Spawn multiple tasks to open paths, repeating some paths.
3932 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
3933 (
3934 p.open_local_buffer("/dir/a.txt", cx),
3935 p.open_local_buffer("/dir/b.txt", cx),
3936 p.open_local_buffer("/dir/a.txt", cx),
3937 )
3938 });
3939
3940 let buffer_a_1 = buffer_a_1.await.unwrap();
3941 let buffer_a_2 = buffer_a_2.await.unwrap();
3942 let buffer_b = buffer_b.await.unwrap();
3943 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
3944 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
3945
3946 // There is only one buffer per path.
3947 let buffer_a_id = buffer_a_1.entity_id();
3948 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
3949
3950 // Open the same path again while it is still open.
3951 drop(buffer_a_1);
3952 let buffer_a_3 = project
3953 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
3954 .await
3955 .unwrap();
3956
3957 // There's still only one buffer per path.
3958 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
3959}
3960
3961#[gpui::test]
3962async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
3963 init_test(cx);
3964
3965 let fs = FakeFs::new(cx.executor());
3966 fs.insert_tree(
3967 path!("/dir"),
3968 json!({
3969 "file1": "abc",
3970 "file2": "def",
3971 "file3": "ghi",
3972 }),
3973 )
3974 .await;
3975
3976 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3977
3978 let buffer1 = project
3979 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3980 .await
3981 .unwrap();
3982 let events = Arc::new(Mutex::new(Vec::new()));
3983
3984 // initially, the buffer isn't dirty.
3985 buffer1.update(cx, |buffer, cx| {
3986 cx.subscribe(&buffer1, {
3987 let events = events.clone();
3988 move |_, _, event, _| match event {
3989 BufferEvent::Operation { .. } => {}
3990 _ => events.lock().push(event.clone()),
3991 }
3992 })
3993 .detach();
3994
3995 assert!(!buffer.is_dirty());
3996 assert!(events.lock().is_empty());
3997
3998 buffer.edit([(1..2, "")], None, cx);
3999 });
4000
4001 // after the first edit, the buffer is dirty, and emits a dirtied event.
4002 buffer1.update(cx, |buffer, cx| {
4003 assert!(buffer.text() == "ac");
4004 assert!(buffer.is_dirty());
4005 assert_eq!(
4006 *events.lock(),
4007 &[
4008 language::BufferEvent::Edited,
4009 language::BufferEvent::DirtyChanged
4010 ]
4011 );
4012 events.lock().clear();
4013 buffer.did_save(
4014 buffer.version(),
4015 buffer.file().unwrap().disk_state().mtime(),
4016 cx,
4017 );
4018 });
4019
4020 // after saving, the buffer is not dirty, and emits a saved event.
4021 buffer1.update(cx, |buffer, cx| {
4022 assert!(!buffer.is_dirty());
4023 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
4024 events.lock().clear();
4025
4026 buffer.edit([(1..1, "B")], None, cx);
4027 buffer.edit([(2..2, "D")], None, cx);
4028 });
4029
4030 // after editing again, the buffer is dirty, and emits another dirty event.
4031 buffer1.update(cx, |buffer, cx| {
4032 assert!(buffer.text() == "aBDc");
4033 assert!(buffer.is_dirty());
4034 assert_eq!(
4035 *events.lock(),
4036 &[
4037 language::BufferEvent::Edited,
4038 language::BufferEvent::DirtyChanged,
4039 language::BufferEvent::Edited,
4040 ],
4041 );
4042 events.lock().clear();
4043
4044 // After restoring the buffer to its previously-saved state,
4045 // the buffer is not considered dirty anymore.
4046 buffer.edit([(1..3, "")], None, cx);
4047 assert!(buffer.text() == "ac");
4048 assert!(!buffer.is_dirty());
4049 });
4050
4051 assert_eq!(
4052 *events.lock(),
4053 &[
4054 language::BufferEvent::Edited,
4055 language::BufferEvent::DirtyChanged
4056 ]
4057 );
4058
4059 // When a file is deleted, it is not considered dirty.
4060 let events = Arc::new(Mutex::new(Vec::new()));
4061 let buffer2 = project
4062 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4063 .await
4064 .unwrap();
4065 buffer2.update(cx, |_, cx| {
4066 cx.subscribe(&buffer2, {
4067 let events = events.clone();
4068 move |_, _, event, _| match event {
4069 BufferEvent::Operation { .. } => {}
4070 _ => events.lock().push(event.clone()),
4071 }
4072 })
4073 .detach();
4074 });
4075
4076 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
4077 .await
4078 .unwrap();
4079 cx.executor().run_until_parked();
4080 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4081 assert_eq!(
4082 mem::take(&mut *events.lock()),
4083 &[language::BufferEvent::FileHandleChanged]
4084 );
4085
4086 // Buffer becomes dirty when edited.
4087 buffer2.update(cx, |buffer, cx| {
4088 buffer.edit([(2..3, "")], None, cx);
4089 assert_eq!(buffer.is_dirty(), true);
4090 });
4091 assert_eq!(
4092 mem::take(&mut *events.lock()),
4093 &[
4094 language::BufferEvent::Edited,
4095 language::BufferEvent::DirtyChanged
4096 ]
4097 );
4098
4099 // Buffer becomes clean again when all of its content is removed, because
4100 // the file was deleted.
4101 buffer2.update(cx, |buffer, cx| {
4102 buffer.edit([(0..2, "")], None, cx);
4103 assert_eq!(buffer.is_empty(), true);
4104 assert_eq!(buffer.is_dirty(), false);
4105 });
4106 assert_eq!(
4107 *events.lock(),
4108 &[
4109 language::BufferEvent::Edited,
4110 language::BufferEvent::DirtyChanged
4111 ]
4112 );
4113
4114 // When a file is already dirty when deleted, we don't emit a Dirtied event.
4115 let events = Arc::new(Mutex::new(Vec::new()));
4116 let buffer3 = project
4117 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
4118 .await
4119 .unwrap();
4120 buffer3.update(cx, |_, cx| {
4121 cx.subscribe(&buffer3, {
4122 let events = events.clone();
4123 move |_, _, event, _| match event {
4124 BufferEvent::Operation { .. } => {}
4125 _ => events.lock().push(event.clone()),
4126 }
4127 })
4128 .detach();
4129 });
4130
4131 buffer3.update(cx, |buffer, cx| {
4132 buffer.edit([(0..0, "x")], None, cx);
4133 });
4134 events.lock().clear();
4135 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
4136 .await
4137 .unwrap();
4138 cx.executor().run_until_parked();
4139 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
4140 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
4141}
4142
4143#[gpui::test]
4144async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
4145 init_test(cx);
4146
4147 let (initial_contents, initial_offsets) =
4148 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
4149 let fs = FakeFs::new(cx.executor());
4150 fs.insert_tree(
4151 path!("/dir"),
4152 json!({
4153 "the-file": initial_contents,
4154 }),
4155 )
4156 .await;
4157 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4158 let buffer = project
4159 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
4160 .await
4161 .unwrap();
4162
4163 let anchors = initial_offsets
4164 .iter()
4165 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
4166 .collect::<Vec<_>>();
4167
4168 // Change the file on disk, adding two new lines of text, and removing
4169 // one line.
4170 buffer.update(cx, |buffer, _| {
4171 assert!(!buffer.is_dirty());
4172 assert!(!buffer.has_conflict());
4173 });
4174
4175 let (new_contents, new_offsets) =
4176 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
4177 fs.save(
4178 path!("/dir/the-file").as_ref(),
4179 &new_contents.as_str().into(),
4180 LineEnding::Unix,
4181 )
4182 .await
4183 .unwrap();
4184
4185 // Because the buffer was not modified, it is reloaded from disk. Its
4186 // contents are edited according to the diff between the old and new
4187 // file contents.
4188 cx.executor().run_until_parked();
4189 buffer.update(cx, |buffer, _| {
4190 assert_eq!(buffer.text(), new_contents);
4191 assert!(!buffer.is_dirty());
4192 assert!(!buffer.has_conflict());
4193
4194 let anchor_offsets = anchors
4195 .iter()
4196 .map(|anchor| anchor.to_offset(&*buffer))
4197 .collect::<Vec<_>>();
4198 assert_eq!(anchor_offsets, new_offsets);
4199 });
4200
4201 // Modify the buffer
4202 buffer.update(cx, |buffer, cx| {
4203 buffer.edit([(0..0, " ")], None, cx);
4204 assert!(buffer.is_dirty());
4205 assert!(!buffer.has_conflict());
4206 });
4207
4208 // Change the file on disk again, adding blank lines to the beginning.
4209 fs.save(
4210 path!("/dir/the-file").as_ref(),
4211 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
4212 LineEnding::Unix,
4213 )
4214 .await
4215 .unwrap();
4216
4217 // Because the buffer is modified, it doesn't reload from disk, but is
4218 // marked as having a conflict.
4219 cx.executor().run_until_parked();
4220 buffer.update(cx, |buffer, _| {
4221 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
4222 assert!(buffer.has_conflict());
4223 });
4224}
4225
4226#[gpui::test]
4227async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
4228 init_test(cx);
4229
4230 let fs = FakeFs::new(cx.executor());
4231 fs.insert_tree(
4232 path!("/dir"),
4233 json!({
4234 "file1": "a\nb\nc\n",
4235 "file2": "one\r\ntwo\r\nthree\r\n",
4236 }),
4237 )
4238 .await;
4239
4240 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4241 let buffer1 = project
4242 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4243 .await
4244 .unwrap();
4245 let buffer2 = project
4246 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4247 .await
4248 .unwrap();
4249
4250 buffer1.update(cx, |buffer, _| {
4251 assert_eq!(buffer.text(), "a\nb\nc\n");
4252 assert_eq!(buffer.line_ending(), LineEnding::Unix);
4253 });
4254 buffer2.update(cx, |buffer, _| {
4255 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
4256 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4257 });
4258
4259 // Change a file's line endings on disk from unix to windows. The buffer's
4260 // state updates correctly.
4261 fs.save(
4262 path!("/dir/file1").as_ref(),
4263 &"aaa\nb\nc\n".into(),
4264 LineEnding::Windows,
4265 )
4266 .await
4267 .unwrap();
4268 cx.executor().run_until_parked();
4269 buffer1.update(cx, |buffer, _| {
4270 assert_eq!(buffer.text(), "aaa\nb\nc\n");
4271 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4272 });
4273
4274 // Save a file with windows line endings. The file is written correctly.
4275 buffer2.update(cx, |buffer, cx| {
4276 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
4277 });
4278 project
4279 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
4280 .await
4281 .unwrap();
4282 assert_eq!(
4283 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
4284 "one\r\ntwo\r\nthree\r\nfour\r\n",
4285 );
4286}
4287
4288#[gpui::test]
4289async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
4290 init_test(cx);
4291
4292 let fs = FakeFs::new(cx.executor());
4293 fs.insert_tree(
4294 path!("/dir"),
4295 json!({
4296 "a.rs": "
4297 fn foo(mut v: Vec<usize>) {
4298 for x in &v {
4299 v.push(1);
4300 }
4301 }
4302 "
4303 .unindent(),
4304 }),
4305 )
4306 .await;
4307
4308 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4309 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4310 let buffer = project
4311 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
4312 .await
4313 .unwrap();
4314
4315 let buffer_uri = Url::from_file_path(path!("/dir/a.rs")).unwrap();
4316 let message = lsp::PublishDiagnosticsParams {
4317 uri: buffer_uri.clone(),
4318 diagnostics: vec![
4319 lsp::Diagnostic {
4320 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4321 severity: Some(DiagnosticSeverity::WARNING),
4322 message: "error 1".to_string(),
4323 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4324 location: lsp::Location {
4325 uri: buffer_uri.clone(),
4326 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4327 },
4328 message: "error 1 hint 1".to_string(),
4329 }]),
4330 ..Default::default()
4331 },
4332 lsp::Diagnostic {
4333 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4334 severity: Some(DiagnosticSeverity::HINT),
4335 message: "error 1 hint 1".to_string(),
4336 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4337 location: lsp::Location {
4338 uri: buffer_uri.clone(),
4339 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4340 },
4341 message: "original diagnostic".to_string(),
4342 }]),
4343 ..Default::default()
4344 },
4345 lsp::Diagnostic {
4346 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4347 severity: Some(DiagnosticSeverity::ERROR),
4348 message: "error 2".to_string(),
4349 related_information: Some(vec![
4350 lsp::DiagnosticRelatedInformation {
4351 location: lsp::Location {
4352 uri: buffer_uri.clone(),
4353 range: lsp::Range::new(
4354 lsp::Position::new(1, 13),
4355 lsp::Position::new(1, 15),
4356 ),
4357 },
4358 message: "error 2 hint 1".to_string(),
4359 },
4360 lsp::DiagnosticRelatedInformation {
4361 location: lsp::Location {
4362 uri: buffer_uri.clone(),
4363 range: lsp::Range::new(
4364 lsp::Position::new(1, 13),
4365 lsp::Position::new(1, 15),
4366 ),
4367 },
4368 message: "error 2 hint 2".to_string(),
4369 },
4370 ]),
4371 ..Default::default()
4372 },
4373 lsp::Diagnostic {
4374 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4375 severity: Some(DiagnosticSeverity::HINT),
4376 message: "error 2 hint 1".to_string(),
4377 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4378 location: lsp::Location {
4379 uri: buffer_uri.clone(),
4380 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4381 },
4382 message: "original diagnostic".to_string(),
4383 }]),
4384 ..Default::default()
4385 },
4386 lsp::Diagnostic {
4387 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4388 severity: Some(DiagnosticSeverity::HINT),
4389 message: "error 2 hint 2".to_string(),
4390 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4391 location: lsp::Location {
4392 uri: buffer_uri,
4393 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4394 },
4395 message: "original diagnostic".to_string(),
4396 }]),
4397 ..Default::default()
4398 },
4399 ],
4400 version: None,
4401 };
4402
4403 lsp_store
4404 .update(cx, |lsp_store, cx| {
4405 lsp_store.update_diagnostics(LanguageServerId(0), message, &[], cx)
4406 })
4407 .unwrap();
4408 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
4409
4410 assert_eq!(
4411 buffer
4412 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
4413 .collect::<Vec<_>>(),
4414 &[
4415 DiagnosticEntry {
4416 range: Point::new(1, 8)..Point::new(1, 9),
4417 diagnostic: Diagnostic {
4418 severity: DiagnosticSeverity::WARNING,
4419 message: "error 1".to_string(),
4420 group_id: 1,
4421 is_primary: true,
4422 ..Default::default()
4423 }
4424 },
4425 DiagnosticEntry {
4426 range: Point::new(1, 8)..Point::new(1, 9),
4427 diagnostic: Diagnostic {
4428 severity: DiagnosticSeverity::HINT,
4429 message: "error 1 hint 1".to_string(),
4430 group_id: 1,
4431 is_primary: false,
4432 ..Default::default()
4433 }
4434 },
4435 DiagnosticEntry {
4436 range: Point::new(1, 13)..Point::new(1, 15),
4437 diagnostic: Diagnostic {
4438 severity: DiagnosticSeverity::HINT,
4439 message: "error 2 hint 1".to_string(),
4440 group_id: 0,
4441 is_primary: false,
4442 ..Default::default()
4443 }
4444 },
4445 DiagnosticEntry {
4446 range: Point::new(1, 13)..Point::new(1, 15),
4447 diagnostic: Diagnostic {
4448 severity: DiagnosticSeverity::HINT,
4449 message: "error 2 hint 2".to_string(),
4450 group_id: 0,
4451 is_primary: false,
4452 ..Default::default()
4453 }
4454 },
4455 DiagnosticEntry {
4456 range: Point::new(2, 8)..Point::new(2, 17),
4457 diagnostic: Diagnostic {
4458 severity: DiagnosticSeverity::ERROR,
4459 message: "error 2".to_string(),
4460 group_id: 0,
4461 is_primary: true,
4462 ..Default::default()
4463 }
4464 }
4465 ]
4466 );
4467
4468 assert_eq!(
4469 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
4470 &[
4471 DiagnosticEntry {
4472 range: Point::new(1, 13)..Point::new(1, 15),
4473 diagnostic: Diagnostic {
4474 severity: DiagnosticSeverity::HINT,
4475 message: "error 2 hint 1".to_string(),
4476 group_id: 0,
4477 is_primary: false,
4478 ..Default::default()
4479 }
4480 },
4481 DiagnosticEntry {
4482 range: Point::new(1, 13)..Point::new(1, 15),
4483 diagnostic: Diagnostic {
4484 severity: DiagnosticSeverity::HINT,
4485 message: "error 2 hint 2".to_string(),
4486 group_id: 0,
4487 is_primary: false,
4488 ..Default::default()
4489 }
4490 },
4491 DiagnosticEntry {
4492 range: Point::new(2, 8)..Point::new(2, 17),
4493 diagnostic: Diagnostic {
4494 severity: DiagnosticSeverity::ERROR,
4495 message: "error 2".to_string(),
4496 group_id: 0,
4497 is_primary: true,
4498 ..Default::default()
4499 }
4500 }
4501 ]
4502 );
4503
4504 assert_eq!(
4505 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4506 &[
4507 DiagnosticEntry {
4508 range: Point::new(1, 8)..Point::new(1, 9),
4509 diagnostic: Diagnostic {
4510 severity: DiagnosticSeverity::WARNING,
4511 message: "error 1".to_string(),
4512 group_id: 1,
4513 is_primary: true,
4514 ..Default::default()
4515 }
4516 },
4517 DiagnosticEntry {
4518 range: Point::new(1, 8)..Point::new(1, 9),
4519 diagnostic: Diagnostic {
4520 severity: DiagnosticSeverity::HINT,
4521 message: "error 1 hint 1".to_string(),
4522 group_id: 1,
4523 is_primary: false,
4524 ..Default::default()
4525 }
4526 },
4527 ]
4528 );
4529}
4530
4531#[gpui::test]
4532async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
4533 init_test(cx);
4534
4535 let fs = FakeFs::new(cx.executor());
4536 fs.insert_tree(
4537 path!("/dir"),
4538 json!({
4539 "one.rs": "const ONE: usize = 1;",
4540 "two": {
4541 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4542 }
4543
4544 }),
4545 )
4546 .await;
4547 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4548
4549 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4550 language_registry.add(rust_lang());
4551 let watched_paths = lsp::FileOperationRegistrationOptions {
4552 filters: vec![
4553 FileOperationFilter {
4554 scheme: Some("file".to_owned()),
4555 pattern: lsp::FileOperationPattern {
4556 glob: "**/*.rs".to_owned(),
4557 matches: Some(lsp::FileOperationPatternKind::File),
4558 options: None,
4559 },
4560 },
4561 FileOperationFilter {
4562 scheme: Some("file".to_owned()),
4563 pattern: lsp::FileOperationPattern {
4564 glob: "**/**".to_owned(),
4565 matches: Some(lsp::FileOperationPatternKind::Folder),
4566 options: None,
4567 },
4568 },
4569 ],
4570 };
4571 let mut fake_servers = language_registry.register_fake_lsp(
4572 "Rust",
4573 FakeLspAdapter {
4574 capabilities: lsp::ServerCapabilities {
4575 workspace: Some(lsp::WorkspaceServerCapabilities {
4576 workspace_folders: None,
4577 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
4578 did_rename: Some(watched_paths.clone()),
4579 will_rename: Some(watched_paths),
4580 ..Default::default()
4581 }),
4582 }),
4583 ..Default::default()
4584 },
4585 ..Default::default()
4586 },
4587 );
4588
4589 let _ = project
4590 .update(cx, |project, cx| {
4591 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
4592 })
4593 .await
4594 .unwrap();
4595
4596 let fake_server = fake_servers.next().await.unwrap();
4597 let response = project.update(cx, |project, cx| {
4598 let worktree = project.worktrees(cx).next().unwrap();
4599 let entry = worktree.read(cx).entry_for_path("one.rs").unwrap();
4600 project.rename_entry(entry.id, "three.rs".as_ref(), cx)
4601 });
4602 let expected_edit = lsp::WorkspaceEdit {
4603 changes: None,
4604 document_changes: Some(DocumentChanges::Edits({
4605 vec![TextDocumentEdit {
4606 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
4607 range: lsp::Range {
4608 start: lsp::Position {
4609 line: 0,
4610 character: 1,
4611 },
4612 end: lsp::Position {
4613 line: 0,
4614 character: 3,
4615 },
4616 },
4617 new_text: "This is not a drill".to_owned(),
4618 })],
4619 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
4620 uri: Url::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
4621 version: Some(1337),
4622 },
4623 }]
4624 })),
4625 change_annotations: None,
4626 };
4627 let resolved_workspace_edit = Arc::new(OnceLock::new());
4628 fake_server
4629 .set_request_handler::<WillRenameFiles, _, _>({
4630 let resolved_workspace_edit = resolved_workspace_edit.clone();
4631 let expected_edit = expected_edit.clone();
4632 move |params, _| {
4633 let resolved_workspace_edit = resolved_workspace_edit.clone();
4634 let expected_edit = expected_edit.clone();
4635 async move {
4636 assert_eq!(params.files.len(), 1);
4637 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
4638 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
4639 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
4640 Ok(Some(expected_edit))
4641 }
4642 }
4643 })
4644 .next()
4645 .await
4646 .unwrap();
4647 let _ = response.await.unwrap();
4648 fake_server
4649 .handle_notification::<DidRenameFiles, _>(|params, _| {
4650 assert_eq!(params.files.len(), 1);
4651 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
4652 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
4653 })
4654 .next()
4655 .await
4656 .unwrap();
4657 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
4658}
4659
4660#[gpui::test]
4661async fn test_rename(cx: &mut gpui::TestAppContext) {
4662 // hi
4663 init_test(cx);
4664
4665 let fs = FakeFs::new(cx.executor());
4666 fs.insert_tree(
4667 path!("/dir"),
4668 json!({
4669 "one.rs": "const ONE: usize = 1;",
4670 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4671 }),
4672 )
4673 .await;
4674
4675 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4676
4677 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4678 language_registry.add(rust_lang());
4679 let mut fake_servers = language_registry.register_fake_lsp(
4680 "Rust",
4681 FakeLspAdapter {
4682 capabilities: lsp::ServerCapabilities {
4683 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
4684 prepare_provider: Some(true),
4685 work_done_progress_options: Default::default(),
4686 })),
4687 ..Default::default()
4688 },
4689 ..Default::default()
4690 },
4691 );
4692
4693 let (buffer, _handle) = project
4694 .update(cx, |project, cx| {
4695 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
4696 })
4697 .await
4698 .unwrap();
4699
4700 let fake_server = fake_servers.next().await.unwrap();
4701
4702 let response = project.update(cx, |project, cx| {
4703 project.prepare_rename(buffer.clone(), 7, cx)
4704 });
4705 fake_server
4706 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
4707 assert_eq!(
4708 params.text_document.uri.as_str(),
4709 uri!("file:///dir/one.rs")
4710 );
4711 assert_eq!(params.position, lsp::Position::new(0, 7));
4712 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
4713 lsp::Position::new(0, 6),
4714 lsp::Position::new(0, 9),
4715 ))))
4716 })
4717 .next()
4718 .await
4719 .unwrap();
4720 let response = response.await.unwrap();
4721 let PrepareRenameResponse::Success(range) = response else {
4722 panic!("{:?}", response);
4723 };
4724 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
4725 assert_eq!(range, 6..9);
4726
4727 let response = project.update(cx, |project, cx| {
4728 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
4729 });
4730 fake_server
4731 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
4732 assert_eq!(
4733 params.text_document_position.text_document.uri.as_str(),
4734 uri!("file:///dir/one.rs")
4735 );
4736 assert_eq!(
4737 params.text_document_position.position,
4738 lsp::Position::new(0, 7)
4739 );
4740 assert_eq!(params.new_name, "THREE");
4741 Ok(Some(lsp::WorkspaceEdit {
4742 changes: Some(
4743 [
4744 (
4745 lsp::Url::from_file_path(path!("/dir/one.rs")).unwrap(),
4746 vec![lsp::TextEdit::new(
4747 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
4748 "THREE".to_string(),
4749 )],
4750 ),
4751 (
4752 lsp::Url::from_file_path(path!("/dir/two.rs")).unwrap(),
4753 vec![
4754 lsp::TextEdit::new(
4755 lsp::Range::new(
4756 lsp::Position::new(0, 24),
4757 lsp::Position::new(0, 27),
4758 ),
4759 "THREE".to_string(),
4760 ),
4761 lsp::TextEdit::new(
4762 lsp::Range::new(
4763 lsp::Position::new(0, 35),
4764 lsp::Position::new(0, 38),
4765 ),
4766 "THREE".to_string(),
4767 ),
4768 ],
4769 ),
4770 ]
4771 .into_iter()
4772 .collect(),
4773 ),
4774 ..Default::default()
4775 }))
4776 })
4777 .next()
4778 .await
4779 .unwrap();
4780 let mut transaction = response.await.unwrap().0;
4781 assert_eq!(transaction.len(), 2);
4782 assert_eq!(
4783 transaction
4784 .remove_entry(&buffer)
4785 .unwrap()
4786 .0
4787 .update(cx, |buffer, _| buffer.text()),
4788 "const THREE: usize = 1;"
4789 );
4790 assert_eq!(
4791 transaction
4792 .into_keys()
4793 .next()
4794 .unwrap()
4795 .update(cx, |buffer, _| buffer.text()),
4796 "const TWO: usize = one::THREE + one::THREE;"
4797 );
4798}
4799
4800#[gpui::test]
4801async fn test_search(cx: &mut gpui::TestAppContext) {
4802 init_test(cx);
4803
4804 let fs = FakeFs::new(cx.executor());
4805 fs.insert_tree(
4806 path!("/dir"),
4807 json!({
4808 "one.rs": "const ONE: usize = 1;",
4809 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
4810 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
4811 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
4812 }),
4813 )
4814 .await;
4815 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4816 assert_eq!(
4817 search(
4818 &project,
4819 SearchQuery::text(
4820 "TWO",
4821 false,
4822 true,
4823 false,
4824 Default::default(),
4825 Default::default(),
4826 false,
4827 None
4828 )
4829 .unwrap(),
4830 cx
4831 )
4832 .await
4833 .unwrap(),
4834 HashMap::from_iter([
4835 (separator!("dir/two.rs").to_string(), vec![6..9]),
4836 (separator!("dir/three.rs").to_string(), vec![37..40])
4837 ])
4838 );
4839
4840 let buffer_4 = project
4841 .update(cx, |project, cx| {
4842 project.open_local_buffer(path!("/dir/four.rs"), cx)
4843 })
4844 .await
4845 .unwrap();
4846 buffer_4.update(cx, |buffer, cx| {
4847 let text = "two::TWO";
4848 buffer.edit([(20..28, text), (31..43, text)], None, cx);
4849 });
4850
4851 assert_eq!(
4852 search(
4853 &project,
4854 SearchQuery::text(
4855 "TWO",
4856 false,
4857 true,
4858 false,
4859 Default::default(),
4860 Default::default(),
4861 false,
4862 None,
4863 )
4864 .unwrap(),
4865 cx
4866 )
4867 .await
4868 .unwrap(),
4869 HashMap::from_iter([
4870 (separator!("dir/two.rs").to_string(), vec![6..9]),
4871 (separator!("dir/three.rs").to_string(), vec![37..40]),
4872 (separator!("dir/four.rs").to_string(), vec![25..28, 36..39])
4873 ])
4874 );
4875}
4876
4877#[gpui::test]
4878async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
4879 init_test(cx);
4880
4881 let search_query = "file";
4882
4883 let fs = FakeFs::new(cx.executor());
4884 fs.insert_tree(
4885 path!("/dir"),
4886 json!({
4887 "one.rs": r#"// Rust file one"#,
4888 "one.ts": r#"// TypeScript file one"#,
4889 "two.rs": r#"// Rust file two"#,
4890 "two.ts": r#"// TypeScript file two"#,
4891 }),
4892 )
4893 .await;
4894 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4895
4896 assert!(
4897 search(
4898 &project,
4899 SearchQuery::text(
4900 search_query,
4901 false,
4902 true,
4903 false,
4904 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4905 Default::default(),
4906 false,
4907 None
4908 )
4909 .unwrap(),
4910 cx
4911 )
4912 .await
4913 .unwrap()
4914 .is_empty(),
4915 "If no inclusions match, no files should be returned"
4916 );
4917
4918 assert_eq!(
4919 search(
4920 &project,
4921 SearchQuery::text(
4922 search_query,
4923 false,
4924 true,
4925 false,
4926 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4927 Default::default(),
4928 false,
4929 None
4930 )
4931 .unwrap(),
4932 cx
4933 )
4934 .await
4935 .unwrap(),
4936 HashMap::from_iter([
4937 (separator!("dir/one.rs").to_string(), vec![8..12]),
4938 (separator!("dir/two.rs").to_string(), vec![8..12]),
4939 ]),
4940 "Rust only search should give only Rust files"
4941 );
4942
4943 assert_eq!(
4944 search(
4945 &project,
4946 SearchQuery::text(
4947 search_query,
4948 false,
4949 true,
4950 false,
4951 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4952 Default::default(),
4953 false,
4954 None,
4955 )
4956 .unwrap(),
4957 cx
4958 )
4959 .await
4960 .unwrap(),
4961 HashMap::from_iter([
4962 (separator!("dir/one.ts").to_string(), vec![14..18]),
4963 (separator!("dir/two.ts").to_string(), vec![14..18]),
4964 ]),
4965 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
4966 );
4967
4968 assert_eq!(
4969 search(
4970 &project,
4971 SearchQuery::text(
4972 search_query,
4973 false,
4974 true,
4975 false,
4976 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()])
4977 .unwrap(),
4978 Default::default(),
4979 false,
4980 None,
4981 )
4982 .unwrap(),
4983 cx
4984 )
4985 .await
4986 .unwrap(),
4987 HashMap::from_iter([
4988 (separator!("dir/two.ts").to_string(), vec![14..18]),
4989 (separator!("dir/one.rs").to_string(), vec![8..12]),
4990 (separator!("dir/one.ts").to_string(), vec![14..18]),
4991 (separator!("dir/two.rs").to_string(), vec![8..12]),
4992 ]),
4993 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
4994 );
4995}
4996
4997#[gpui::test]
4998async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
4999 init_test(cx);
5000
5001 let search_query = "file";
5002
5003 let fs = FakeFs::new(cx.executor());
5004 fs.insert_tree(
5005 path!("/dir"),
5006 json!({
5007 "one.rs": r#"// Rust file one"#,
5008 "one.ts": r#"// TypeScript file one"#,
5009 "two.rs": r#"// Rust file two"#,
5010 "two.ts": r#"// TypeScript file two"#,
5011 }),
5012 )
5013 .await;
5014 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5015
5016 assert_eq!(
5017 search(
5018 &project,
5019 SearchQuery::text(
5020 search_query,
5021 false,
5022 true,
5023 false,
5024 Default::default(),
5025 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5026 false,
5027 None,
5028 )
5029 .unwrap(),
5030 cx
5031 )
5032 .await
5033 .unwrap(),
5034 HashMap::from_iter([
5035 (separator!("dir/one.rs").to_string(), vec![8..12]),
5036 (separator!("dir/one.ts").to_string(), vec![14..18]),
5037 (separator!("dir/two.rs").to_string(), vec![8..12]),
5038 (separator!("dir/two.ts").to_string(), vec![14..18]),
5039 ]),
5040 "If no exclusions match, all files should be returned"
5041 );
5042
5043 assert_eq!(
5044 search(
5045 &project,
5046 SearchQuery::text(
5047 search_query,
5048 false,
5049 true,
5050 false,
5051 Default::default(),
5052 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
5053 false,
5054 None,
5055 )
5056 .unwrap(),
5057 cx
5058 )
5059 .await
5060 .unwrap(),
5061 HashMap::from_iter([
5062 (separator!("dir/one.ts").to_string(), vec![14..18]),
5063 (separator!("dir/two.ts").to_string(), vec![14..18]),
5064 ]),
5065 "Rust exclusion search should give only TypeScript files"
5066 );
5067
5068 assert_eq!(
5069 search(
5070 &project,
5071 SearchQuery::text(
5072 search_query,
5073 false,
5074 true,
5075 false,
5076 Default::default(),
5077 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5078 false,
5079 None,
5080 )
5081 .unwrap(),
5082 cx
5083 )
5084 .await
5085 .unwrap(),
5086 HashMap::from_iter([
5087 (separator!("dir/one.rs").to_string(), vec![8..12]),
5088 (separator!("dir/two.rs").to_string(), vec![8..12]),
5089 ]),
5090 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
5091 );
5092
5093 assert!(
5094 search(
5095 &project,
5096 SearchQuery::text(
5097 search_query,
5098 false,
5099 true,
5100 false,
5101 Default::default(),
5102 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()])
5103 .unwrap(),
5104 false,
5105 None,
5106 )
5107 .unwrap(),
5108 cx
5109 )
5110 .await
5111 .unwrap()
5112 .is_empty(),
5113 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
5114 );
5115}
5116
5117#[gpui::test]
5118async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
5119 init_test(cx);
5120
5121 let search_query = "file";
5122
5123 let fs = FakeFs::new(cx.executor());
5124 fs.insert_tree(
5125 path!("/dir"),
5126 json!({
5127 "one.rs": r#"// Rust file one"#,
5128 "one.ts": r#"// TypeScript file one"#,
5129 "two.rs": r#"// Rust file two"#,
5130 "two.ts": r#"// TypeScript file two"#,
5131 }),
5132 )
5133 .await;
5134 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5135
5136 assert!(
5137 search(
5138 &project,
5139 SearchQuery::text(
5140 search_query,
5141 false,
5142 true,
5143 false,
5144 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5145 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5146 false,
5147 None,
5148 )
5149 .unwrap(),
5150 cx
5151 )
5152 .await
5153 .unwrap()
5154 .is_empty(),
5155 "If both no exclusions and inclusions match, exclusions should win and return nothing"
5156 );
5157
5158 assert!(
5159 search(
5160 &project,
5161 SearchQuery::text(
5162 search_query,
5163 false,
5164 true,
5165 false,
5166 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5167 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5168 false,
5169 None,
5170 )
5171 .unwrap(),
5172 cx
5173 )
5174 .await
5175 .unwrap()
5176 .is_empty(),
5177 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
5178 );
5179
5180 assert!(
5181 search(
5182 &project,
5183 SearchQuery::text(
5184 search_query,
5185 false,
5186 true,
5187 false,
5188 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5189 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5190 false,
5191 None,
5192 )
5193 .unwrap(),
5194 cx
5195 )
5196 .await
5197 .unwrap()
5198 .is_empty(),
5199 "Non-matching inclusions and exclusions should not change that."
5200 );
5201
5202 assert_eq!(
5203 search(
5204 &project,
5205 SearchQuery::text(
5206 search_query,
5207 false,
5208 true,
5209 false,
5210 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5211 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()]).unwrap(),
5212 false,
5213 None,
5214 )
5215 .unwrap(),
5216 cx
5217 )
5218 .await
5219 .unwrap(),
5220 HashMap::from_iter([
5221 (separator!("dir/one.ts").to_string(), vec![14..18]),
5222 (separator!("dir/two.ts").to_string(), vec![14..18]),
5223 ]),
5224 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
5225 );
5226}
5227
5228#[gpui::test]
5229async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
5230 init_test(cx);
5231
5232 let fs = FakeFs::new(cx.executor());
5233 fs.insert_tree(
5234 path!("/worktree-a"),
5235 json!({
5236 "haystack.rs": r#"// NEEDLE"#,
5237 "haystack.ts": r#"// NEEDLE"#,
5238 }),
5239 )
5240 .await;
5241 fs.insert_tree(
5242 path!("/worktree-b"),
5243 json!({
5244 "haystack.rs": r#"// NEEDLE"#,
5245 "haystack.ts": r#"// NEEDLE"#,
5246 }),
5247 )
5248 .await;
5249
5250 let project = Project::test(
5251 fs.clone(),
5252 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
5253 cx,
5254 )
5255 .await;
5256
5257 assert_eq!(
5258 search(
5259 &project,
5260 SearchQuery::text(
5261 "NEEDLE",
5262 false,
5263 true,
5264 false,
5265 PathMatcher::new(&["worktree-a/*.rs".to_owned()]).unwrap(),
5266 Default::default(),
5267 true,
5268 None,
5269 )
5270 .unwrap(),
5271 cx
5272 )
5273 .await
5274 .unwrap(),
5275 HashMap::from_iter([(separator!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
5276 "should only return results from included worktree"
5277 );
5278 assert_eq!(
5279 search(
5280 &project,
5281 SearchQuery::text(
5282 "NEEDLE",
5283 false,
5284 true,
5285 false,
5286 PathMatcher::new(&["worktree-b/*.rs".to_owned()]).unwrap(),
5287 Default::default(),
5288 true,
5289 None,
5290 )
5291 .unwrap(),
5292 cx
5293 )
5294 .await
5295 .unwrap(),
5296 HashMap::from_iter([(separator!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
5297 "should only return results from included worktree"
5298 );
5299
5300 assert_eq!(
5301 search(
5302 &project,
5303 SearchQuery::text(
5304 "NEEDLE",
5305 false,
5306 true,
5307 false,
5308 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5309 Default::default(),
5310 false,
5311 None,
5312 )
5313 .unwrap(),
5314 cx
5315 )
5316 .await
5317 .unwrap(),
5318 HashMap::from_iter([
5319 (separator!("worktree-a/haystack.ts").to_string(), vec![3..9]),
5320 (separator!("worktree-b/haystack.ts").to_string(), vec![3..9])
5321 ]),
5322 "should return results from both worktrees"
5323 );
5324}
5325
5326#[gpui::test]
5327async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
5328 init_test(cx);
5329
5330 let fs = FakeFs::new(cx.background_executor.clone());
5331 fs.insert_tree(
5332 path!("/dir"),
5333 json!({
5334 ".git": {},
5335 ".gitignore": "**/target\n/node_modules\n",
5336 "target": {
5337 "index.txt": "index_key:index_value"
5338 },
5339 "node_modules": {
5340 "eslint": {
5341 "index.ts": "const eslint_key = 'eslint value'",
5342 "package.json": r#"{ "some_key": "some value" }"#,
5343 },
5344 "prettier": {
5345 "index.ts": "const prettier_key = 'prettier value'",
5346 "package.json": r#"{ "other_key": "other value" }"#,
5347 },
5348 },
5349 "package.json": r#"{ "main_key": "main value" }"#,
5350 }),
5351 )
5352 .await;
5353 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5354
5355 let query = "key";
5356 assert_eq!(
5357 search(
5358 &project,
5359 SearchQuery::text(
5360 query,
5361 false,
5362 false,
5363 false,
5364 Default::default(),
5365 Default::default(),
5366 false,
5367 None,
5368 )
5369 .unwrap(),
5370 cx
5371 )
5372 .await
5373 .unwrap(),
5374 HashMap::from_iter([(separator!("dir/package.json").to_string(), vec![8..11])]),
5375 "Only one non-ignored file should have the query"
5376 );
5377
5378 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5379 assert_eq!(
5380 search(
5381 &project,
5382 SearchQuery::text(
5383 query,
5384 false,
5385 false,
5386 true,
5387 Default::default(),
5388 Default::default(),
5389 false,
5390 None,
5391 )
5392 .unwrap(),
5393 cx
5394 )
5395 .await
5396 .unwrap(),
5397 HashMap::from_iter([
5398 (separator!("dir/package.json").to_string(), vec![8..11]),
5399 (separator!("dir/target/index.txt").to_string(), vec![6..9]),
5400 (
5401 separator!("dir/node_modules/prettier/package.json").to_string(),
5402 vec![9..12]
5403 ),
5404 (
5405 separator!("dir/node_modules/prettier/index.ts").to_string(),
5406 vec![15..18]
5407 ),
5408 (
5409 separator!("dir/node_modules/eslint/index.ts").to_string(),
5410 vec![13..16]
5411 ),
5412 (
5413 separator!("dir/node_modules/eslint/package.json").to_string(),
5414 vec![8..11]
5415 ),
5416 ]),
5417 "Unrestricted search with ignored directories should find every file with the query"
5418 );
5419
5420 let files_to_include = PathMatcher::new(&["node_modules/prettier/**".to_owned()]).unwrap();
5421 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()]).unwrap();
5422 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5423 assert_eq!(
5424 search(
5425 &project,
5426 SearchQuery::text(
5427 query,
5428 false,
5429 false,
5430 true,
5431 files_to_include,
5432 files_to_exclude,
5433 false,
5434 None,
5435 )
5436 .unwrap(),
5437 cx
5438 )
5439 .await
5440 .unwrap(),
5441 HashMap::from_iter([(
5442 separator!("dir/node_modules/prettier/package.json").to_string(),
5443 vec![9..12]
5444 )]),
5445 "With search including ignored prettier directory and excluding TS files, only one file should be found"
5446 );
5447}
5448
5449#[gpui::test]
5450async fn test_search_with_unicode(cx: &mut gpui::TestAppContext) {
5451 init_test(cx);
5452
5453 let fs = FakeFs::new(cx.executor());
5454 fs.insert_tree(
5455 path!("/dir"),
5456 json!({
5457 "one.rs": "// ПРИВЕТ? привет!",
5458 "two.rs": "// ПРИВЕТ.",
5459 "three.rs": "// привет",
5460 }),
5461 )
5462 .await;
5463 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5464
5465 let unicode_case_sensitive_query = SearchQuery::text(
5466 "привет",
5467 false,
5468 true,
5469 false,
5470 Default::default(),
5471 Default::default(),
5472 false,
5473 None,
5474 );
5475 assert_matches!(unicode_case_sensitive_query, Ok(SearchQuery::Text { .. }));
5476 assert_eq!(
5477 search(&project, unicode_case_sensitive_query.unwrap(), cx)
5478 .await
5479 .unwrap(),
5480 HashMap::from_iter([
5481 (separator!("dir/one.rs").to_string(), vec![17..29]),
5482 (separator!("dir/three.rs").to_string(), vec![3..15]),
5483 ])
5484 );
5485
5486 let unicode_case_insensitive_query = SearchQuery::text(
5487 "привет",
5488 false,
5489 false,
5490 false,
5491 Default::default(),
5492 Default::default(),
5493 false,
5494 None,
5495 );
5496 assert_matches!(
5497 unicode_case_insensitive_query,
5498 Ok(SearchQuery::Regex { .. })
5499 );
5500 assert_eq!(
5501 search(&project, unicode_case_insensitive_query.unwrap(), cx)
5502 .await
5503 .unwrap(),
5504 HashMap::from_iter([
5505 (separator!("dir/one.rs").to_string(), vec![3..15, 17..29]),
5506 (separator!("dir/two.rs").to_string(), vec![3..15]),
5507 (separator!("dir/three.rs").to_string(), vec![3..15]),
5508 ])
5509 );
5510
5511 assert_eq!(
5512 search(
5513 &project,
5514 SearchQuery::text(
5515 "привет.",
5516 false,
5517 false,
5518 false,
5519 Default::default(),
5520 Default::default(),
5521 false,
5522 None,
5523 )
5524 .unwrap(),
5525 cx
5526 )
5527 .await
5528 .unwrap(),
5529 HashMap::from_iter([(separator!("dir/two.rs").to_string(), vec![3..16]),])
5530 );
5531}
5532
5533#[gpui::test]
5534async fn test_create_entry(cx: &mut gpui::TestAppContext) {
5535 init_test(cx);
5536
5537 let fs = FakeFs::new(cx.executor().clone());
5538 fs.insert_tree(
5539 "/one/two",
5540 json!({
5541 "three": {
5542 "a.txt": "",
5543 "four": {}
5544 },
5545 "c.rs": ""
5546 }),
5547 )
5548 .await;
5549
5550 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
5551 project
5552 .update(cx, |project, cx| {
5553 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5554 project.create_entry((id, "b.."), true, cx)
5555 })
5556 .await
5557 .unwrap()
5558 .to_included()
5559 .unwrap();
5560
5561 // Can't create paths outside the project
5562 let result = project
5563 .update(cx, |project, cx| {
5564 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5565 project.create_entry((id, "../../boop"), true, cx)
5566 })
5567 .await;
5568 assert!(result.is_err());
5569
5570 // Can't create paths with '..'
5571 let result = project
5572 .update(cx, |project, cx| {
5573 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5574 project.create_entry((id, "four/../beep"), true, cx)
5575 })
5576 .await;
5577 assert!(result.is_err());
5578
5579 assert_eq!(
5580 fs.paths(true),
5581 vec![
5582 PathBuf::from(path!("/")),
5583 PathBuf::from(path!("/one")),
5584 PathBuf::from(path!("/one/two")),
5585 PathBuf::from(path!("/one/two/c.rs")),
5586 PathBuf::from(path!("/one/two/three")),
5587 PathBuf::from(path!("/one/two/three/a.txt")),
5588 PathBuf::from(path!("/one/two/three/b..")),
5589 PathBuf::from(path!("/one/two/three/four")),
5590 ]
5591 );
5592
5593 // And we cannot open buffers with '..'
5594 let result = project
5595 .update(cx, |project, cx| {
5596 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5597 project.open_buffer((id, "../c.rs"), cx)
5598 })
5599 .await;
5600 assert!(result.is_err())
5601}
5602
5603#[gpui::test]
5604async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
5605 init_test(cx);
5606
5607 let fs = FakeFs::new(cx.executor());
5608 fs.insert_tree(
5609 path!("/dir"),
5610 json!({
5611 "a.tsx": "a",
5612 }),
5613 )
5614 .await;
5615
5616 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5617
5618 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5619 language_registry.add(tsx_lang());
5620 let language_server_names = [
5621 "TypeScriptServer",
5622 "TailwindServer",
5623 "ESLintServer",
5624 "NoHoverCapabilitiesServer",
5625 ];
5626 let mut language_servers = [
5627 language_registry.register_fake_lsp(
5628 "tsx",
5629 FakeLspAdapter {
5630 name: language_server_names[0],
5631 capabilities: lsp::ServerCapabilities {
5632 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5633 ..lsp::ServerCapabilities::default()
5634 },
5635 ..FakeLspAdapter::default()
5636 },
5637 ),
5638 language_registry.register_fake_lsp(
5639 "tsx",
5640 FakeLspAdapter {
5641 name: language_server_names[1],
5642 capabilities: lsp::ServerCapabilities {
5643 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5644 ..lsp::ServerCapabilities::default()
5645 },
5646 ..FakeLspAdapter::default()
5647 },
5648 ),
5649 language_registry.register_fake_lsp(
5650 "tsx",
5651 FakeLspAdapter {
5652 name: language_server_names[2],
5653 capabilities: lsp::ServerCapabilities {
5654 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5655 ..lsp::ServerCapabilities::default()
5656 },
5657 ..FakeLspAdapter::default()
5658 },
5659 ),
5660 language_registry.register_fake_lsp(
5661 "tsx",
5662 FakeLspAdapter {
5663 name: language_server_names[3],
5664 capabilities: lsp::ServerCapabilities {
5665 hover_provider: None,
5666 ..lsp::ServerCapabilities::default()
5667 },
5668 ..FakeLspAdapter::default()
5669 },
5670 ),
5671 ];
5672
5673 let (buffer, _handle) = project
5674 .update(cx, |p, cx| {
5675 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
5676 })
5677 .await
5678 .unwrap();
5679 cx.executor().run_until_parked();
5680
5681 let mut servers_with_hover_requests = HashMap::default();
5682 for i in 0..language_server_names.len() {
5683 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
5684 panic!(
5685 "Failed to get language server #{i} with name {}",
5686 &language_server_names[i]
5687 )
5688 });
5689 let new_server_name = new_server.server.name();
5690 assert!(
5691 !servers_with_hover_requests.contains_key(&new_server_name),
5692 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5693 );
5694 match new_server_name.as_ref() {
5695 "TailwindServer" | "TypeScriptServer" => {
5696 servers_with_hover_requests.insert(
5697 new_server_name.clone(),
5698 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
5699 move |_, _| {
5700 let name = new_server_name.clone();
5701 async move {
5702 Ok(Some(lsp::Hover {
5703 contents: lsp::HoverContents::Scalar(
5704 lsp::MarkedString::String(format!("{name} hover")),
5705 ),
5706 range: None,
5707 }))
5708 }
5709 },
5710 ),
5711 );
5712 }
5713 "ESLintServer" => {
5714 servers_with_hover_requests.insert(
5715 new_server_name,
5716 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
5717 |_, _| async move { Ok(None) },
5718 ),
5719 );
5720 }
5721 "NoHoverCapabilitiesServer" => {
5722 let _never_handled = new_server
5723 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
5724 panic!(
5725 "Should not call for hovers server with no corresponding capabilities"
5726 )
5727 });
5728 }
5729 unexpected => panic!("Unexpected server name: {unexpected}"),
5730 }
5731 }
5732
5733 let hover_task = project.update(cx, |project, cx| {
5734 project.hover(&buffer, Point::new(0, 0), cx)
5735 });
5736 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
5737 |mut hover_request| async move {
5738 hover_request
5739 .next()
5740 .await
5741 .expect("All hover requests should have been triggered")
5742 },
5743 ))
5744 .await;
5745 assert_eq!(
5746 vec!["TailwindServer hover", "TypeScriptServer hover"],
5747 hover_task
5748 .await
5749 .into_iter()
5750 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
5751 .sorted()
5752 .collect::<Vec<_>>(),
5753 "Should receive hover responses from all related servers with hover capabilities"
5754 );
5755}
5756
5757#[gpui::test]
5758async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
5759 init_test(cx);
5760
5761 let fs = FakeFs::new(cx.executor());
5762 fs.insert_tree(
5763 path!("/dir"),
5764 json!({
5765 "a.ts": "a",
5766 }),
5767 )
5768 .await;
5769
5770 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5771
5772 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5773 language_registry.add(typescript_lang());
5774 let mut fake_language_servers = language_registry.register_fake_lsp(
5775 "TypeScript",
5776 FakeLspAdapter {
5777 capabilities: lsp::ServerCapabilities {
5778 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5779 ..lsp::ServerCapabilities::default()
5780 },
5781 ..FakeLspAdapter::default()
5782 },
5783 );
5784
5785 let (buffer, _handle) = project
5786 .update(cx, |p, cx| {
5787 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
5788 })
5789 .await
5790 .unwrap();
5791 cx.executor().run_until_parked();
5792
5793 let fake_server = fake_language_servers
5794 .next()
5795 .await
5796 .expect("failed to get the language server");
5797
5798 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
5799 move |_, _| async move {
5800 Ok(Some(lsp::Hover {
5801 contents: lsp::HoverContents::Array(vec![
5802 lsp::MarkedString::String("".to_string()),
5803 lsp::MarkedString::String(" ".to_string()),
5804 lsp::MarkedString::String("\n\n\n".to_string()),
5805 ]),
5806 range: None,
5807 }))
5808 },
5809 );
5810
5811 let hover_task = project.update(cx, |project, cx| {
5812 project.hover(&buffer, Point::new(0, 0), cx)
5813 });
5814 let () = request_handled
5815 .next()
5816 .await
5817 .expect("All hover requests should have been triggered");
5818 assert_eq!(
5819 Vec::<String>::new(),
5820 hover_task
5821 .await
5822 .into_iter()
5823 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
5824 .sorted()
5825 .collect::<Vec<_>>(),
5826 "Empty hover parts should be ignored"
5827 );
5828}
5829
5830#[gpui::test]
5831async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
5832 init_test(cx);
5833
5834 let fs = FakeFs::new(cx.executor());
5835 fs.insert_tree(
5836 path!("/dir"),
5837 json!({
5838 "a.ts": "a",
5839 }),
5840 )
5841 .await;
5842
5843 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5844
5845 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5846 language_registry.add(typescript_lang());
5847 let mut fake_language_servers = language_registry.register_fake_lsp(
5848 "TypeScript",
5849 FakeLspAdapter {
5850 capabilities: lsp::ServerCapabilities {
5851 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5852 ..lsp::ServerCapabilities::default()
5853 },
5854 ..FakeLspAdapter::default()
5855 },
5856 );
5857
5858 let (buffer, _handle) = project
5859 .update(cx, |p, cx| {
5860 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
5861 })
5862 .await
5863 .unwrap();
5864 cx.executor().run_until_parked();
5865
5866 let fake_server = fake_language_servers
5867 .next()
5868 .await
5869 .expect("failed to get the language server");
5870
5871 let mut request_handled = fake_server
5872 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
5873 Ok(Some(vec![
5874 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5875 title: "organize imports".to_string(),
5876 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
5877 ..lsp::CodeAction::default()
5878 }),
5879 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5880 title: "fix code".to_string(),
5881 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
5882 ..lsp::CodeAction::default()
5883 }),
5884 ]))
5885 });
5886
5887 let code_actions_task = project.update(cx, |project, cx| {
5888 project.code_actions(
5889 &buffer,
5890 0..buffer.read(cx).len(),
5891 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
5892 cx,
5893 )
5894 });
5895
5896 let () = request_handled
5897 .next()
5898 .await
5899 .expect("The code action request should have been triggered");
5900
5901 let code_actions = code_actions_task.await.unwrap();
5902 assert_eq!(code_actions.len(), 1);
5903 assert_eq!(
5904 code_actions[0].lsp_action.action_kind(),
5905 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
5906 );
5907}
5908
5909#[gpui::test]
5910async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
5911 init_test(cx);
5912
5913 let fs = FakeFs::new(cx.executor());
5914 fs.insert_tree(
5915 path!("/dir"),
5916 json!({
5917 "a.tsx": "a",
5918 }),
5919 )
5920 .await;
5921
5922 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5923
5924 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5925 language_registry.add(tsx_lang());
5926 let language_server_names = [
5927 "TypeScriptServer",
5928 "TailwindServer",
5929 "ESLintServer",
5930 "NoActionsCapabilitiesServer",
5931 ];
5932
5933 let mut language_server_rxs = [
5934 language_registry.register_fake_lsp(
5935 "tsx",
5936 FakeLspAdapter {
5937 name: language_server_names[0],
5938 capabilities: lsp::ServerCapabilities {
5939 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5940 ..lsp::ServerCapabilities::default()
5941 },
5942 ..FakeLspAdapter::default()
5943 },
5944 ),
5945 language_registry.register_fake_lsp(
5946 "tsx",
5947 FakeLspAdapter {
5948 name: language_server_names[1],
5949 capabilities: lsp::ServerCapabilities {
5950 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5951 ..lsp::ServerCapabilities::default()
5952 },
5953 ..FakeLspAdapter::default()
5954 },
5955 ),
5956 language_registry.register_fake_lsp(
5957 "tsx",
5958 FakeLspAdapter {
5959 name: language_server_names[2],
5960 capabilities: lsp::ServerCapabilities {
5961 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5962 ..lsp::ServerCapabilities::default()
5963 },
5964 ..FakeLspAdapter::default()
5965 },
5966 ),
5967 language_registry.register_fake_lsp(
5968 "tsx",
5969 FakeLspAdapter {
5970 name: language_server_names[3],
5971 capabilities: lsp::ServerCapabilities {
5972 code_action_provider: None,
5973 ..lsp::ServerCapabilities::default()
5974 },
5975 ..FakeLspAdapter::default()
5976 },
5977 ),
5978 ];
5979
5980 let (buffer, _handle) = project
5981 .update(cx, |p, cx| {
5982 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
5983 })
5984 .await
5985 .unwrap();
5986 cx.executor().run_until_parked();
5987
5988 let mut servers_with_actions_requests = HashMap::default();
5989 for i in 0..language_server_names.len() {
5990 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
5991 panic!(
5992 "Failed to get language server #{i} with name {}",
5993 &language_server_names[i]
5994 )
5995 });
5996 let new_server_name = new_server.server.name();
5997
5998 assert!(
5999 !servers_with_actions_requests.contains_key(&new_server_name),
6000 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6001 );
6002 match new_server_name.0.as_ref() {
6003 "TailwindServer" | "TypeScriptServer" => {
6004 servers_with_actions_requests.insert(
6005 new_server_name.clone(),
6006 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6007 move |_, _| {
6008 let name = new_server_name.clone();
6009 async move {
6010 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
6011 lsp::CodeAction {
6012 title: format!("{name} code action"),
6013 ..lsp::CodeAction::default()
6014 },
6015 )]))
6016 }
6017 },
6018 ),
6019 );
6020 }
6021 "ESLintServer" => {
6022 servers_with_actions_requests.insert(
6023 new_server_name,
6024 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6025 |_, _| async move { Ok(None) },
6026 ),
6027 );
6028 }
6029 "NoActionsCapabilitiesServer" => {
6030 let _never_handled = new_server
6031 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6032 panic!(
6033 "Should not call for code actions server with no corresponding capabilities"
6034 )
6035 });
6036 }
6037 unexpected => panic!("Unexpected server name: {unexpected}"),
6038 }
6039 }
6040
6041 let code_actions_task = project.update(cx, |project, cx| {
6042 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
6043 });
6044
6045 // cx.run_until_parked();
6046 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
6047 |mut code_actions_request| async move {
6048 code_actions_request
6049 .next()
6050 .await
6051 .expect("All code actions requests should have been triggered")
6052 },
6053 ))
6054 .await;
6055 assert_eq!(
6056 vec!["TailwindServer code action", "TypeScriptServer code action"],
6057 code_actions_task
6058 .await
6059 .unwrap()
6060 .into_iter()
6061 .map(|code_action| code_action.lsp_action.title().to_owned())
6062 .sorted()
6063 .collect::<Vec<_>>(),
6064 "Should receive code actions responses from all related servers with hover capabilities"
6065 );
6066}
6067
6068#[gpui::test]
6069async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
6070 init_test(cx);
6071
6072 let fs = FakeFs::new(cx.executor());
6073 fs.insert_tree(
6074 "/dir",
6075 json!({
6076 "a.rs": "let a = 1;",
6077 "b.rs": "let b = 2;",
6078 "c.rs": "let c = 2;",
6079 }),
6080 )
6081 .await;
6082
6083 let project = Project::test(
6084 fs,
6085 [
6086 "/dir/a.rs".as_ref(),
6087 "/dir/b.rs".as_ref(),
6088 "/dir/c.rs".as_ref(),
6089 ],
6090 cx,
6091 )
6092 .await;
6093
6094 // check the initial state and get the worktrees
6095 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
6096 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6097 assert_eq!(worktrees.len(), 3);
6098
6099 let worktree_a = worktrees[0].read(cx);
6100 let worktree_b = worktrees[1].read(cx);
6101 let worktree_c = worktrees[2].read(cx);
6102
6103 // check they start in the right order
6104 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
6105 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
6106 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
6107
6108 (
6109 worktrees[0].clone(),
6110 worktrees[1].clone(),
6111 worktrees[2].clone(),
6112 )
6113 });
6114
6115 // move first worktree to after the second
6116 // [a, b, c] -> [b, a, c]
6117 project
6118 .update(cx, |project, cx| {
6119 let first = worktree_a.read(cx);
6120 let second = worktree_b.read(cx);
6121 project.move_worktree(first.id(), second.id(), cx)
6122 })
6123 .expect("moving first after second");
6124
6125 // check the state after moving
6126 project.update(cx, |project, cx| {
6127 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6128 assert_eq!(worktrees.len(), 3);
6129
6130 let first = worktrees[0].read(cx);
6131 let second = worktrees[1].read(cx);
6132 let third = worktrees[2].read(cx);
6133
6134 // check they are now in the right order
6135 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
6136 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
6137 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6138 });
6139
6140 // move the second worktree to before the first
6141 // [b, a, c] -> [a, b, c]
6142 project
6143 .update(cx, |project, cx| {
6144 let second = worktree_a.read(cx);
6145 let first = worktree_b.read(cx);
6146 project.move_worktree(first.id(), second.id(), cx)
6147 })
6148 .expect("moving second before first");
6149
6150 // check the state after moving
6151 project.update(cx, |project, cx| {
6152 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6153 assert_eq!(worktrees.len(), 3);
6154
6155 let first = worktrees[0].read(cx);
6156 let second = worktrees[1].read(cx);
6157 let third = worktrees[2].read(cx);
6158
6159 // check they are now in the right order
6160 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6161 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6162 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6163 });
6164
6165 // move the second worktree to after the third
6166 // [a, b, c] -> [a, c, b]
6167 project
6168 .update(cx, |project, cx| {
6169 let second = worktree_b.read(cx);
6170 let third = worktree_c.read(cx);
6171 project.move_worktree(second.id(), third.id(), cx)
6172 })
6173 .expect("moving second after third");
6174
6175 // check the state after moving
6176 project.update(cx, |project, cx| {
6177 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6178 assert_eq!(worktrees.len(), 3);
6179
6180 let first = worktrees[0].read(cx);
6181 let second = worktrees[1].read(cx);
6182 let third = worktrees[2].read(cx);
6183
6184 // check they are now in the right order
6185 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6186 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
6187 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
6188 });
6189
6190 // move the third worktree to before the second
6191 // [a, c, b] -> [a, b, c]
6192 project
6193 .update(cx, |project, cx| {
6194 let third = worktree_c.read(cx);
6195 let second = worktree_b.read(cx);
6196 project.move_worktree(third.id(), second.id(), cx)
6197 })
6198 .expect("moving third before second");
6199
6200 // check the state after moving
6201 project.update(cx, |project, cx| {
6202 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6203 assert_eq!(worktrees.len(), 3);
6204
6205 let first = worktrees[0].read(cx);
6206 let second = worktrees[1].read(cx);
6207 let third = worktrees[2].read(cx);
6208
6209 // check they are now in the right order
6210 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6211 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6212 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6213 });
6214
6215 // move the first worktree to after the third
6216 // [a, b, c] -> [b, c, a]
6217 project
6218 .update(cx, |project, cx| {
6219 let first = worktree_a.read(cx);
6220 let third = worktree_c.read(cx);
6221 project.move_worktree(first.id(), third.id(), cx)
6222 })
6223 .expect("moving first after third");
6224
6225 // check the state after moving
6226 project.update(cx, |project, cx| {
6227 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6228 assert_eq!(worktrees.len(), 3);
6229
6230 let first = worktrees[0].read(cx);
6231 let second = worktrees[1].read(cx);
6232 let third = worktrees[2].read(cx);
6233
6234 // check they are now in the right order
6235 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
6236 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
6237 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
6238 });
6239
6240 // move the third worktree to before the first
6241 // [b, c, a] -> [a, b, c]
6242 project
6243 .update(cx, |project, cx| {
6244 let third = worktree_a.read(cx);
6245 let first = worktree_b.read(cx);
6246 project.move_worktree(third.id(), first.id(), cx)
6247 })
6248 .expect("moving third before first");
6249
6250 // check the state after moving
6251 project.update(cx, |project, cx| {
6252 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6253 assert_eq!(worktrees.len(), 3);
6254
6255 let first = worktrees[0].read(cx);
6256 let second = worktrees[1].read(cx);
6257 let third = worktrees[2].read(cx);
6258
6259 // check they are now in the right order
6260 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6261 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6262 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6263 });
6264}
6265
6266#[gpui::test]
6267async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
6268 init_test(cx);
6269
6270 let staged_contents = r#"
6271 fn main() {
6272 println!("hello world");
6273 }
6274 "#
6275 .unindent();
6276 let file_contents = r#"
6277 // print goodbye
6278 fn main() {
6279 println!("goodbye world");
6280 }
6281 "#
6282 .unindent();
6283
6284 let fs = FakeFs::new(cx.background_executor.clone());
6285 fs.insert_tree(
6286 "/dir",
6287 json!({
6288 ".git": {},
6289 "src": {
6290 "main.rs": file_contents,
6291 }
6292 }),
6293 )
6294 .await;
6295
6296 fs.set_index_for_repo(
6297 Path::new("/dir/.git"),
6298 &[("src/main.rs".into(), staged_contents)],
6299 );
6300
6301 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6302
6303 let buffer = project
6304 .update(cx, |project, cx| {
6305 project.open_local_buffer("/dir/src/main.rs", cx)
6306 })
6307 .await
6308 .unwrap();
6309 let unstaged_diff = project
6310 .update(cx, |project, cx| {
6311 project.open_unstaged_diff(buffer.clone(), cx)
6312 })
6313 .await
6314 .unwrap();
6315
6316 cx.run_until_parked();
6317 unstaged_diff.update(cx, |unstaged_diff, cx| {
6318 let snapshot = buffer.read(cx).snapshot();
6319 assert_hunks(
6320 unstaged_diff.hunks(&snapshot, cx),
6321 &snapshot,
6322 &unstaged_diff.base_text_string().unwrap(),
6323 &[
6324 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
6325 (
6326 2..3,
6327 " println!(\"hello world\");\n",
6328 " println!(\"goodbye world\");\n",
6329 DiffHunkStatus::modified_none(),
6330 ),
6331 ],
6332 );
6333 });
6334
6335 let staged_contents = r#"
6336 // print goodbye
6337 fn main() {
6338 }
6339 "#
6340 .unindent();
6341
6342 fs.set_index_for_repo(
6343 Path::new("/dir/.git"),
6344 &[("src/main.rs".into(), staged_contents)],
6345 );
6346
6347 cx.run_until_parked();
6348 unstaged_diff.update(cx, |unstaged_diff, cx| {
6349 let snapshot = buffer.read(cx).snapshot();
6350 assert_hunks(
6351 unstaged_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6352 &snapshot,
6353 &unstaged_diff.base_text().text(),
6354 &[(
6355 2..3,
6356 "",
6357 " println!(\"goodbye world\");\n",
6358 DiffHunkStatus::added_none(),
6359 )],
6360 );
6361 });
6362}
6363
6364#[gpui::test]
6365async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
6366 init_test(cx);
6367
6368 let committed_contents = r#"
6369 fn main() {
6370 println!("hello world");
6371 }
6372 "#
6373 .unindent();
6374 let staged_contents = r#"
6375 fn main() {
6376 println!("goodbye world");
6377 }
6378 "#
6379 .unindent();
6380 let file_contents = r#"
6381 // print goodbye
6382 fn main() {
6383 println!("goodbye world");
6384 }
6385 "#
6386 .unindent();
6387
6388 let fs = FakeFs::new(cx.background_executor.clone());
6389 fs.insert_tree(
6390 "/dir",
6391 json!({
6392 ".git": {},
6393 "src": {
6394 "modification.rs": file_contents,
6395 }
6396 }),
6397 )
6398 .await;
6399
6400 fs.set_head_for_repo(
6401 Path::new("/dir/.git"),
6402 &[
6403 ("src/modification.rs".into(), committed_contents),
6404 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6405 ],
6406 );
6407 fs.set_index_for_repo(
6408 Path::new("/dir/.git"),
6409 &[
6410 ("src/modification.rs".into(), staged_contents),
6411 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6412 ],
6413 );
6414
6415 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6416 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6417 let language = rust_lang();
6418 language_registry.add(language.clone());
6419
6420 let buffer_1 = project
6421 .update(cx, |project, cx| {
6422 project.open_local_buffer("/dir/src/modification.rs", cx)
6423 })
6424 .await
6425 .unwrap();
6426 let diff_1 = project
6427 .update(cx, |project, cx| {
6428 project.open_uncommitted_diff(buffer_1.clone(), cx)
6429 })
6430 .await
6431 .unwrap();
6432 diff_1.read_with(cx, |diff, _| {
6433 assert_eq!(diff.base_text().language().cloned(), Some(language))
6434 });
6435 cx.run_until_parked();
6436 diff_1.update(cx, |diff, cx| {
6437 let snapshot = buffer_1.read(cx).snapshot();
6438 assert_hunks(
6439 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6440 &snapshot,
6441 &diff.base_text_string().unwrap(),
6442 &[
6443 (
6444 0..1,
6445 "",
6446 "// print goodbye\n",
6447 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
6448 ),
6449 (
6450 2..3,
6451 " println!(\"hello world\");\n",
6452 " println!(\"goodbye world\");\n",
6453 DiffHunkStatus::modified_none(),
6454 ),
6455 ],
6456 );
6457 });
6458
6459 // Reset HEAD to a version that differs from both the buffer and the index.
6460 let committed_contents = r#"
6461 // print goodbye
6462 fn main() {
6463 }
6464 "#
6465 .unindent();
6466 fs.set_head_for_repo(
6467 Path::new("/dir/.git"),
6468 &[
6469 ("src/modification.rs".into(), committed_contents.clone()),
6470 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6471 ],
6472 );
6473
6474 // Buffer now has an unstaged hunk.
6475 cx.run_until_parked();
6476 diff_1.update(cx, |diff, cx| {
6477 let snapshot = buffer_1.read(cx).snapshot();
6478 assert_hunks(
6479 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6480 &snapshot,
6481 &diff.base_text().text(),
6482 &[(
6483 2..3,
6484 "",
6485 " println!(\"goodbye world\");\n",
6486 DiffHunkStatus::added_none(),
6487 )],
6488 );
6489 });
6490
6491 // Open a buffer for a file that's been deleted.
6492 let buffer_2 = project
6493 .update(cx, |project, cx| {
6494 project.open_local_buffer("/dir/src/deletion.rs", cx)
6495 })
6496 .await
6497 .unwrap();
6498 let diff_2 = project
6499 .update(cx, |project, cx| {
6500 project.open_uncommitted_diff(buffer_2.clone(), cx)
6501 })
6502 .await
6503 .unwrap();
6504 cx.run_until_parked();
6505 diff_2.update(cx, |diff, cx| {
6506 let snapshot = buffer_2.read(cx).snapshot();
6507 assert_hunks(
6508 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6509 &snapshot,
6510 &diff.base_text_string().unwrap(),
6511 &[(
6512 0..0,
6513 "// the-deleted-contents\n",
6514 "",
6515 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
6516 )],
6517 );
6518 });
6519
6520 // Stage the deletion of this file
6521 fs.set_index_for_repo(
6522 Path::new("/dir/.git"),
6523 &[("src/modification.rs".into(), committed_contents.clone())],
6524 );
6525 cx.run_until_parked();
6526 diff_2.update(cx, |diff, cx| {
6527 let snapshot = buffer_2.read(cx).snapshot();
6528 assert_hunks(
6529 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6530 &snapshot,
6531 &diff.base_text_string().unwrap(),
6532 &[(
6533 0..0,
6534 "// the-deleted-contents\n",
6535 "",
6536 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
6537 )],
6538 );
6539 });
6540}
6541
6542#[gpui::test]
6543async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
6544 use DiffHunkSecondaryStatus::*;
6545 init_test(cx);
6546
6547 let committed_contents = r#"
6548 zero
6549 one
6550 two
6551 three
6552 four
6553 five
6554 "#
6555 .unindent();
6556 let file_contents = r#"
6557 one
6558 TWO
6559 three
6560 FOUR
6561 five
6562 "#
6563 .unindent();
6564
6565 let fs = FakeFs::new(cx.background_executor.clone());
6566 fs.insert_tree(
6567 "/dir",
6568 json!({
6569 ".git": {},
6570 "file.txt": file_contents.clone()
6571 }),
6572 )
6573 .await;
6574
6575 fs.set_head_and_index_for_repo(
6576 "/dir/.git".as_ref(),
6577 &[("file.txt".into(), committed_contents.clone())],
6578 );
6579
6580 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6581
6582 let buffer = project
6583 .update(cx, |project, cx| {
6584 project.open_local_buffer("/dir/file.txt", cx)
6585 })
6586 .await
6587 .unwrap();
6588 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6589 let uncommitted_diff = project
6590 .update(cx, |project, cx| {
6591 project.open_uncommitted_diff(buffer.clone(), cx)
6592 })
6593 .await
6594 .unwrap();
6595 let mut diff_events = cx.events(&uncommitted_diff);
6596
6597 // The hunks are initially unstaged.
6598 uncommitted_diff.read_with(cx, |diff, cx| {
6599 assert_hunks(
6600 diff.hunks(&snapshot, cx),
6601 &snapshot,
6602 &diff.base_text_string().unwrap(),
6603 &[
6604 (
6605 0..0,
6606 "zero\n",
6607 "",
6608 DiffHunkStatus::deleted(HasSecondaryHunk),
6609 ),
6610 (
6611 1..2,
6612 "two\n",
6613 "TWO\n",
6614 DiffHunkStatus::modified(HasSecondaryHunk),
6615 ),
6616 (
6617 3..4,
6618 "four\n",
6619 "FOUR\n",
6620 DiffHunkStatus::modified(HasSecondaryHunk),
6621 ),
6622 ],
6623 );
6624 });
6625
6626 // Stage a hunk. It appears as optimistically staged.
6627 uncommitted_diff.update(cx, |diff, cx| {
6628 let range =
6629 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
6630 let hunks = diff
6631 .hunks_intersecting_range(range, &snapshot, cx)
6632 .collect::<Vec<_>>();
6633 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
6634
6635 assert_hunks(
6636 diff.hunks(&snapshot, cx),
6637 &snapshot,
6638 &diff.base_text_string().unwrap(),
6639 &[
6640 (
6641 0..0,
6642 "zero\n",
6643 "",
6644 DiffHunkStatus::deleted(HasSecondaryHunk),
6645 ),
6646 (
6647 1..2,
6648 "two\n",
6649 "TWO\n",
6650 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6651 ),
6652 (
6653 3..4,
6654 "four\n",
6655 "FOUR\n",
6656 DiffHunkStatus::modified(HasSecondaryHunk),
6657 ),
6658 ],
6659 );
6660 });
6661
6662 // The diff emits a change event for the range of the staged hunk.
6663 assert!(matches!(
6664 diff_events.next().await.unwrap(),
6665 BufferDiffEvent::HunksStagedOrUnstaged(_)
6666 ));
6667 let event = diff_events.next().await.unwrap();
6668 if let BufferDiffEvent::DiffChanged {
6669 changed_range: Some(changed_range),
6670 } = event
6671 {
6672 let changed_range = changed_range.to_point(&snapshot);
6673 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
6674 } else {
6675 panic!("Unexpected event {event:?}");
6676 }
6677
6678 // When the write to the index completes, it appears as staged.
6679 cx.run_until_parked();
6680 uncommitted_diff.update(cx, |diff, cx| {
6681 assert_hunks(
6682 diff.hunks(&snapshot, cx),
6683 &snapshot,
6684 &diff.base_text_string().unwrap(),
6685 &[
6686 (
6687 0..0,
6688 "zero\n",
6689 "",
6690 DiffHunkStatus::deleted(HasSecondaryHunk),
6691 ),
6692 (
6693 1..2,
6694 "two\n",
6695 "TWO\n",
6696 DiffHunkStatus::modified(NoSecondaryHunk),
6697 ),
6698 (
6699 3..4,
6700 "four\n",
6701 "FOUR\n",
6702 DiffHunkStatus::modified(HasSecondaryHunk),
6703 ),
6704 ],
6705 );
6706 });
6707
6708 // The diff emits a change event for the changed index text.
6709 let event = diff_events.next().await.unwrap();
6710 if let BufferDiffEvent::DiffChanged {
6711 changed_range: Some(changed_range),
6712 } = event
6713 {
6714 let changed_range = changed_range.to_point(&snapshot);
6715 assert_eq!(changed_range, Point::new(0, 0)..Point::new(4, 0));
6716 } else {
6717 panic!("Unexpected event {event:?}");
6718 }
6719
6720 // Simulate a problem writing to the git index.
6721 fs.set_error_message_for_index_write(
6722 "/dir/.git".as_ref(),
6723 Some("failed to write git index".into()),
6724 );
6725
6726 // Stage another hunk.
6727 uncommitted_diff.update(cx, |diff, cx| {
6728 let range =
6729 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
6730 let hunks = diff
6731 .hunks_intersecting_range(range, &snapshot, cx)
6732 .collect::<Vec<_>>();
6733 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
6734
6735 assert_hunks(
6736 diff.hunks(&snapshot, cx),
6737 &snapshot,
6738 &diff.base_text_string().unwrap(),
6739 &[
6740 (
6741 0..0,
6742 "zero\n",
6743 "",
6744 DiffHunkStatus::deleted(HasSecondaryHunk),
6745 ),
6746 (
6747 1..2,
6748 "two\n",
6749 "TWO\n",
6750 DiffHunkStatus::modified(NoSecondaryHunk),
6751 ),
6752 (
6753 3..4,
6754 "four\n",
6755 "FOUR\n",
6756 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6757 ),
6758 ],
6759 );
6760 });
6761 assert!(matches!(
6762 diff_events.next().await.unwrap(),
6763 BufferDiffEvent::HunksStagedOrUnstaged(_)
6764 ));
6765 let event = diff_events.next().await.unwrap();
6766 if let BufferDiffEvent::DiffChanged {
6767 changed_range: Some(changed_range),
6768 } = event
6769 {
6770 let changed_range = changed_range.to_point(&snapshot);
6771 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
6772 } else {
6773 panic!("Unexpected event {event:?}");
6774 }
6775
6776 // When the write fails, the hunk returns to being unstaged.
6777 cx.run_until_parked();
6778 uncommitted_diff.update(cx, |diff, cx| {
6779 assert_hunks(
6780 diff.hunks(&snapshot, cx),
6781 &snapshot,
6782 &diff.base_text_string().unwrap(),
6783 &[
6784 (
6785 0..0,
6786 "zero\n",
6787 "",
6788 DiffHunkStatus::deleted(HasSecondaryHunk),
6789 ),
6790 (
6791 1..2,
6792 "two\n",
6793 "TWO\n",
6794 DiffHunkStatus::modified(NoSecondaryHunk),
6795 ),
6796 (
6797 3..4,
6798 "four\n",
6799 "FOUR\n",
6800 DiffHunkStatus::modified(HasSecondaryHunk),
6801 ),
6802 ],
6803 );
6804 });
6805
6806 let event = diff_events.next().await.unwrap();
6807 if let BufferDiffEvent::DiffChanged {
6808 changed_range: Some(changed_range),
6809 } = event
6810 {
6811 let changed_range = changed_range.to_point(&snapshot);
6812 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
6813 } else {
6814 panic!("Unexpected event {event:?}");
6815 }
6816
6817 // Allow writing to the git index to succeed again.
6818 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
6819
6820 // Stage two hunks with separate operations.
6821 uncommitted_diff.update(cx, |diff, cx| {
6822 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
6823 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
6824 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
6825 });
6826
6827 // Both staged hunks appear as pending.
6828 uncommitted_diff.update(cx, |diff, cx| {
6829 assert_hunks(
6830 diff.hunks(&snapshot, cx),
6831 &snapshot,
6832 &diff.base_text_string().unwrap(),
6833 &[
6834 (
6835 0..0,
6836 "zero\n",
6837 "",
6838 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
6839 ),
6840 (
6841 1..2,
6842 "two\n",
6843 "TWO\n",
6844 DiffHunkStatus::modified(NoSecondaryHunk),
6845 ),
6846 (
6847 3..4,
6848 "four\n",
6849 "FOUR\n",
6850 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6851 ),
6852 ],
6853 );
6854 });
6855
6856 // Both staging operations take effect.
6857 cx.run_until_parked();
6858 uncommitted_diff.update(cx, |diff, cx| {
6859 assert_hunks(
6860 diff.hunks(&snapshot, cx),
6861 &snapshot,
6862 &diff.base_text_string().unwrap(),
6863 &[
6864 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
6865 (
6866 1..2,
6867 "two\n",
6868 "TWO\n",
6869 DiffHunkStatus::modified(NoSecondaryHunk),
6870 ),
6871 (
6872 3..4,
6873 "four\n",
6874 "FOUR\n",
6875 DiffHunkStatus::modified(NoSecondaryHunk),
6876 ),
6877 ],
6878 );
6879 });
6880}
6881
6882#[gpui::test(seeds(340, 472))]
6883async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
6884 use DiffHunkSecondaryStatus::*;
6885 init_test(cx);
6886
6887 let committed_contents = r#"
6888 zero
6889 one
6890 two
6891 three
6892 four
6893 five
6894 "#
6895 .unindent();
6896 let file_contents = r#"
6897 one
6898 TWO
6899 three
6900 FOUR
6901 five
6902 "#
6903 .unindent();
6904
6905 let fs = FakeFs::new(cx.background_executor.clone());
6906 fs.insert_tree(
6907 "/dir",
6908 json!({
6909 ".git": {},
6910 "file.txt": file_contents.clone()
6911 }),
6912 )
6913 .await;
6914
6915 fs.set_head_for_repo(
6916 "/dir/.git".as_ref(),
6917 &[("file.txt".into(), committed_contents.clone())],
6918 );
6919 fs.set_index_for_repo(
6920 "/dir/.git".as_ref(),
6921 &[("file.txt".into(), committed_contents.clone())],
6922 );
6923
6924 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6925
6926 let buffer = project
6927 .update(cx, |project, cx| {
6928 project.open_local_buffer("/dir/file.txt", cx)
6929 })
6930 .await
6931 .unwrap();
6932 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6933 let uncommitted_diff = project
6934 .update(cx, |project, cx| {
6935 project.open_uncommitted_diff(buffer.clone(), cx)
6936 })
6937 .await
6938 .unwrap();
6939
6940 // The hunks are initially unstaged.
6941 uncommitted_diff.read_with(cx, |diff, cx| {
6942 assert_hunks(
6943 diff.hunks(&snapshot, cx),
6944 &snapshot,
6945 &diff.base_text_string().unwrap(),
6946 &[
6947 (
6948 0..0,
6949 "zero\n",
6950 "",
6951 DiffHunkStatus::deleted(HasSecondaryHunk),
6952 ),
6953 (
6954 1..2,
6955 "two\n",
6956 "TWO\n",
6957 DiffHunkStatus::modified(HasSecondaryHunk),
6958 ),
6959 (
6960 3..4,
6961 "four\n",
6962 "FOUR\n",
6963 DiffHunkStatus::modified(HasSecondaryHunk),
6964 ),
6965 ],
6966 );
6967 });
6968
6969 // Pause IO events
6970 fs.pause_events();
6971
6972 // Stage the first hunk.
6973 uncommitted_diff.update(cx, |diff, cx| {
6974 let hunk = diff.hunks(&snapshot, cx).next().unwrap();
6975 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
6976 assert_hunks(
6977 diff.hunks(&snapshot, cx),
6978 &snapshot,
6979 &diff.base_text_string().unwrap(),
6980 &[
6981 (
6982 0..0,
6983 "zero\n",
6984 "",
6985 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
6986 ),
6987 (
6988 1..2,
6989 "two\n",
6990 "TWO\n",
6991 DiffHunkStatus::modified(HasSecondaryHunk),
6992 ),
6993 (
6994 3..4,
6995 "four\n",
6996 "FOUR\n",
6997 DiffHunkStatus::modified(HasSecondaryHunk),
6998 ),
6999 ],
7000 );
7001 });
7002
7003 // Stage the second hunk *before* receiving the FS event for the first hunk.
7004 cx.run_until_parked();
7005 uncommitted_diff.update(cx, |diff, cx| {
7006 let hunk = diff.hunks(&snapshot, cx).nth(1).unwrap();
7007 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7008 assert_hunks(
7009 diff.hunks(&snapshot, cx),
7010 &snapshot,
7011 &diff.base_text_string().unwrap(),
7012 &[
7013 (
7014 0..0,
7015 "zero\n",
7016 "",
7017 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7018 ),
7019 (
7020 1..2,
7021 "two\n",
7022 "TWO\n",
7023 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7024 ),
7025 (
7026 3..4,
7027 "four\n",
7028 "FOUR\n",
7029 DiffHunkStatus::modified(HasSecondaryHunk),
7030 ),
7031 ],
7032 );
7033 });
7034
7035 // Process the FS event for staging the first hunk (second event is still pending).
7036 fs.flush_events(1);
7037 cx.run_until_parked();
7038
7039 // Stage the third hunk before receiving the second FS event.
7040 uncommitted_diff.update(cx, |diff, cx| {
7041 let hunk = diff.hunks(&snapshot, cx).nth(2).unwrap();
7042 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7043 });
7044
7045 // Wait for all remaining IO.
7046 cx.run_until_parked();
7047 fs.flush_events(fs.buffered_event_count());
7048
7049 // Now all hunks are staged.
7050 cx.run_until_parked();
7051 uncommitted_diff.update(cx, |diff, cx| {
7052 assert_hunks(
7053 diff.hunks(&snapshot, cx),
7054 &snapshot,
7055 &diff.base_text_string().unwrap(),
7056 &[
7057 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7058 (
7059 1..2,
7060 "two\n",
7061 "TWO\n",
7062 DiffHunkStatus::modified(NoSecondaryHunk),
7063 ),
7064 (
7065 3..4,
7066 "four\n",
7067 "FOUR\n",
7068 DiffHunkStatus::modified(NoSecondaryHunk),
7069 ),
7070 ],
7071 );
7072 });
7073}
7074
7075#[gpui::test(iterations = 25)]
7076async fn test_staging_random_hunks(
7077 mut rng: StdRng,
7078 executor: BackgroundExecutor,
7079 cx: &mut gpui::TestAppContext,
7080) {
7081 let operations = env::var("OPERATIONS")
7082 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
7083 .unwrap_or(20);
7084
7085 // Try to induce races between diff recalculation and index writes.
7086 if rng.gen_bool(0.5) {
7087 executor.deprioritize(*CALCULATE_DIFF_TASK);
7088 }
7089
7090 use DiffHunkSecondaryStatus::*;
7091 init_test(cx);
7092
7093 let committed_text = (0..30).map(|i| format!("line {i}\n")).collect::<String>();
7094 let index_text = committed_text.clone();
7095 let buffer_text = (0..30)
7096 .map(|i| match i % 5 {
7097 0 => format!("line {i} (modified)\n"),
7098 _ => format!("line {i}\n"),
7099 })
7100 .collect::<String>();
7101
7102 let fs = FakeFs::new(cx.background_executor.clone());
7103 fs.insert_tree(
7104 path!("/dir"),
7105 json!({
7106 ".git": {},
7107 "file.txt": buffer_text.clone()
7108 }),
7109 )
7110 .await;
7111 fs.set_head_for_repo(
7112 path!("/dir/.git").as_ref(),
7113 &[("file.txt".into(), committed_text.clone())],
7114 );
7115 fs.set_index_for_repo(
7116 path!("/dir/.git").as_ref(),
7117 &[("file.txt".into(), index_text.clone())],
7118 );
7119 let repo = fs.open_repo(path!("/dir/.git").as_ref()).unwrap();
7120
7121 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7122 let buffer = project
7123 .update(cx, |project, cx| {
7124 project.open_local_buffer(path!("/dir/file.txt"), cx)
7125 })
7126 .await
7127 .unwrap();
7128 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7129 let uncommitted_diff = project
7130 .update(cx, |project, cx| {
7131 project.open_uncommitted_diff(buffer.clone(), cx)
7132 })
7133 .await
7134 .unwrap();
7135
7136 let mut hunks =
7137 uncommitted_diff.update(cx, |diff, cx| diff.hunks(&snapshot, cx).collect::<Vec<_>>());
7138 assert_eq!(hunks.len(), 6);
7139
7140 for _i in 0..operations {
7141 let hunk_ix = rng.gen_range(0..hunks.len());
7142 let hunk = &mut hunks[hunk_ix];
7143 let row = hunk.range.start.row;
7144
7145 if hunk.status().has_secondary_hunk() {
7146 log::info!("staging hunk at {row}");
7147 uncommitted_diff.update(cx, |diff, cx| {
7148 diff.stage_or_unstage_hunks(true, &[hunk.clone()], &snapshot, true, cx);
7149 });
7150 hunk.secondary_status = SecondaryHunkRemovalPending;
7151 } else {
7152 log::info!("unstaging hunk at {row}");
7153 uncommitted_diff.update(cx, |diff, cx| {
7154 diff.stage_or_unstage_hunks(false, &[hunk.clone()], &snapshot, true, cx);
7155 });
7156 hunk.secondary_status = SecondaryHunkAdditionPending;
7157 }
7158
7159 for _ in 0..rng.gen_range(0..10) {
7160 log::info!("yielding");
7161 cx.executor().simulate_random_delay().await;
7162 }
7163 }
7164
7165 cx.executor().run_until_parked();
7166
7167 for hunk in &mut hunks {
7168 if hunk.secondary_status == SecondaryHunkRemovalPending {
7169 hunk.secondary_status = NoSecondaryHunk;
7170 } else if hunk.secondary_status == SecondaryHunkAdditionPending {
7171 hunk.secondary_status = HasSecondaryHunk;
7172 }
7173 }
7174
7175 log::info!(
7176 "index text:\n{}",
7177 repo.load_index_text("file.txt".into()).await.unwrap()
7178 );
7179
7180 uncommitted_diff.update(cx, |diff, cx| {
7181 let expected_hunks = hunks
7182 .iter()
7183 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
7184 .collect::<Vec<_>>();
7185 let actual_hunks = diff
7186 .hunks(&snapshot, cx)
7187 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
7188 .collect::<Vec<_>>();
7189 assert_eq!(actual_hunks, expected_hunks);
7190 });
7191}
7192
7193#[gpui::test]
7194async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
7195 init_test(cx);
7196
7197 let committed_contents = r#"
7198 fn main() {
7199 println!("hello from HEAD");
7200 }
7201 "#
7202 .unindent();
7203 let file_contents = r#"
7204 fn main() {
7205 println!("hello from the working copy");
7206 }
7207 "#
7208 .unindent();
7209
7210 let fs = FakeFs::new(cx.background_executor.clone());
7211 fs.insert_tree(
7212 "/dir",
7213 json!({
7214 ".git": {},
7215 "src": {
7216 "main.rs": file_contents,
7217 }
7218 }),
7219 )
7220 .await;
7221
7222 fs.set_head_for_repo(
7223 Path::new("/dir/.git"),
7224 &[("src/main.rs".into(), committed_contents.clone())],
7225 );
7226 fs.set_index_for_repo(
7227 Path::new("/dir/.git"),
7228 &[("src/main.rs".into(), committed_contents.clone())],
7229 );
7230
7231 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
7232
7233 let buffer = project
7234 .update(cx, |project, cx| {
7235 project.open_local_buffer("/dir/src/main.rs", cx)
7236 })
7237 .await
7238 .unwrap();
7239 let uncommitted_diff = project
7240 .update(cx, |project, cx| {
7241 project.open_uncommitted_diff(buffer.clone(), cx)
7242 })
7243 .await
7244 .unwrap();
7245
7246 cx.run_until_parked();
7247 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
7248 let snapshot = buffer.read(cx).snapshot();
7249 assert_hunks(
7250 uncommitted_diff.hunks(&snapshot, cx),
7251 &snapshot,
7252 &uncommitted_diff.base_text_string().unwrap(),
7253 &[(
7254 1..2,
7255 " println!(\"hello from HEAD\");\n",
7256 " println!(\"hello from the working copy\");\n",
7257 DiffHunkStatus {
7258 kind: DiffHunkStatusKind::Modified,
7259 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
7260 },
7261 )],
7262 );
7263 });
7264}
7265
7266#[gpui::test]
7267async fn test_repository_and_path_for_project_path(
7268 background_executor: BackgroundExecutor,
7269 cx: &mut gpui::TestAppContext,
7270) {
7271 init_test(cx);
7272 let fs = FakeFs::new(background_executor);
7273 fs.insert_tree(
7274 path!("/root"),
7275 json!({
7276 "c.txt": "",
7277 "dir1": {
7278 ".git": {},
7279 "deps": {
7280 "dep1": {
7281 ".git": {},
7282 "src": {
7283 "a.txt": ""
7284 }
7285 }
7286 },
7287 "src": {
7288 "b.txt": ""
7289 }
7290 },
7291 }),
7292 )
7293 .await;
7294
7295 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
7296 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7297 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7298 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
7299 .await;
7300 cx.run_until_parked();
7301
7302 project.read_with(cx, |project, cx| {
7303 let git_store = project.git_store().read(cx);
7304 let pairs = [
7305 ("c.txt", None),
7306 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
7307 (
7308 "dir1/deps/dep1/src/a.txt",
7309 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
7310 ),
7311 ];
7312 let expected = pairs
7313 .iter()
7314 .map(|(path, result)| {
7315 (
7316 path,
7317 result.map(|(repo, repo_path)| {
7318 (Path::new(repo).into(), RepoPath::from(repo_path))
7319 }),
7320 )
7321 })
7322 .collect::<Vec<_>>();
7323 let actual = pairs
7324 .iter()
7325 .map(|(path, _)| {
7326 let project_path = (tree_id, Path::new(path)).into();
7327 let result = maybe!({
7328 let (repo, repo_path) =
7329 git_store.repository_and_path_for_project_path(&project_path, cx)?;
7330 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
7331 });
7332 (path, result)
7333 })
7334 .collect::<Vec<_>>();
7335 pretty_assertions::assert_eq!(expected, actual);
7336 });
7337
7338 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
7339 .await
7340 .unwrap();
7341 cx.run_until_parked();
7342
7343 project.read_with(cx, |project, cx| {
7344 let git_store = project.git_store().read(cx);
7345 assert_eq!(
7346 git_store.repository_and_path_for_project_path(
7347 &(tree_id, Path::new("dir1/src/b.txt")).into(),
7348 cx
7349 ),
7350 None
7351 );
7352 });
7353}
7354
7355#[gpui::test]
7356async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
7357 init_test(cx);
7358 let fs = FakeFs::new(cx.background_executor.clone());
7359 fs.insert_tree(
7360 path!("/root"),
7361 json!({
7362 "home": {
7363 ".git": {},
7364 "project": {
7365 "a.txt": "A"
7366 },
7367 },
7368 }),
7369 )
7370 .await;
7371 fs.set_home_dir(Path::new(path!("/root/home")).to_owned());
7372
7373 let project = Project::test(fs.clone(), [path!("/root/home/project").as_ref()], cx).await;
7374 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7375 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7376 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
7377 .await;
7378 tree.flush_fs_events(cx).await;
7379
7380 project.read_with(cx, |project, cx| {
7381 let containing = project
7382 .git_store()
7383 .read(cx)
7384 .repository_and_path_for_project_path(&(tree_id, "a.txt").into(), cx);
7385 assert!(containing.is_none());
7386 });
7387
7388 let project = Project::test(fs.clone(), [path!("/root/home").as_ref()], cx).await;
7389 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7390 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7391 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
7392 .await;
7393 tree.flush_fs_events(cx).await;
7394
7395 project.read_with(cx, |project, cx| {
7396 let containing = project
7397 .git_store()
7398 .read(cx)
7399 .repository_and_path_for_project_path(&(tree_id, "project/a.txt").into(), cx);
7400 assert_eq!(
7401 containing
7402 .unwrap()
7403 .0
7404 .read(cx)
7405 .work_directory_abs_path
7406 .as_ref(),
7407 Path::new(path!("/root/home"))
7408 );
7409 });
7410}
7411
7412#[gpui::test]
7413async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
7414 init_test(cx);
7415 cx.executor().allow_parking();
7416
7417 let root = TempTree::new(json!({
7418 "project": {
7419 "a.txt": "a", // Modified
7420 "b.txt": "bb", // Added
7421 "c.txt": "ccc", // Unchanged
7422 "d.txt": "dddd", // Deleted
7423 },
7424 }));
7425
7426 // Set up git repository before creating the project.
7427 let work_dir = root.path().join("project");
7428 let repo = git_init(work_dir.as_path());
7429 git_add("a.txt", &repo);
7430 git_add("c.txt", &repo);
7431 git_add("d.txt", &repo);
7432 git_commit("Initial commit", &repo);
7433 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
7434 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
7435
7436 let project = Project::test(
7437 Arc::new(RealFs::new(None, cx.executor())),
7438 [root.path()],
7439 cx,
7440 )
7441 .await;
7442
7443 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7444 tree.flush_fs_events(cx).await;
7445 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7446 .await;
7447 cx.executor().run_until_parked();
7448
7449 let repository = project.read_with(cx, |project, cx| {
7450 project.repositories(cx).values().next().unwrap().clone()
7451 });
7452
7453 // Check that the right git state is observed on startup
7454 repository.read_with(cx, |repository, _| {
7455 let entries = repository.cached_status().collect::<Vec<_>>();
7456 assert_eq!(
7457 entries,
7458 [
7459 StatusEntry {
7460 repo_path: "a.txt".into(),
7461 status: StatusCode::Modified.worktree(),
7462 },
7463 StatusEntry {
7464 repo_path: "b.txt".into(),
7465 status: FileStatus::Untracked,
7466 },
7467 StatusEntry {
7468 repo_path: "d.txt".into(),
7469 status: StatusCode::Deleted.worktree(),
7470 },
7471 ]
7472 );
7473 });
7474
7475 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
7476
7477 tree.flush_fs_events(cx).await;
7478 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7479 .await;
7480 cx.executor().run_until_parked();
7481
7482 repository.read_with(cx, |repository, _| {
7483 let entries = repository.cached_status().collect::<Vec<_>>();
7484 assert_eq!(
7485 entries,
7486 [
7487 StatusEntry {
7488 repo_path: "a.txt".into(),
7489 status: StatusCode::Modified.worktree(),
7490 },
7491 StatusEntry {
7492 repo_path: "b.txt".into(),
7493 status: FileStatus::Untracked,
7494 },
7495 StatusEntry {
7496 repo_path: "c.txt".into(),
7497 status: StatusCode::Modified.worktree(),
7498 },
7499 StatusEntry {
7500 repo_path: "d.txt".into(),
7501 status: StatusCode::Deleted.worktree(),
7502 },
7503 ]
7504 );
7505 });
7506
7507 git_add("a.txt", &repo);
7508 git_add("c.txt", &repo);
7509 git_remove_index(Path::new("d.txt"), &repo);
7510 git_commit("Another commit", &repo);
7511 tree.flush_fs_events(cx).await;
7512 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7513 .await;
7514 cx.executor().run_until_parked();
7515
7516 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
7517 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
7518 tree.flush_fs_events(cx).await;
7519 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7520 .await;
7521 cx.executor().run_until_parked();
7522
7523 repository.read_with(cx, |repository, _cx| {
7524 let entries = repository.cached_status().collect::<Vec<_>>();
7525
7526 // Deleting an untracked entry, b.txt, should leave no status
7527 // a.txt was tracked, and so should have a status
7528 assert_eq!(
7529 entries,
7530 [StatusEntry {
7531 repo_path: "a.txt".into(),
7532 status: StatusCode::Deleted.worktree(),
7533 }]
7534 );
7535 });
7536}
7537
7538#[gpui::test]
7539async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
7540 init_test(cx);
7541 cx.executor().allow_parking();
7542
7543 let root = TempTree::new(json!({
7544 "project": {
7545 "sub": {},
7546 "a.txt": "",
7547 },
7548 }));
7549
7550 let work_dir = root.path().join("project");
7551 let repo = git_init(work_dir.as_path());
7552 // a.txt exists in HEAD and the working copy but is deleted in the index.
7553 git_add("a.txt", &repo);
7554 git_commit("Initial commit", &repo);
7555 git_remove_index("a.txt".as_ref(), &repo);
7556 // `sub` is a nested git repository.
7557 let _sub = git_init(&work_dir.join("sub"));
7558
7559 let project = Project::test(
7560 Arc::new(RealFs::new(None, cx.executor())),
7561 [root.path()],
7562 cx,
7563 )
7564 .await;
7565
7566 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7567 tree.flush_fs_events(cx).await;
7568 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7569 .await;
7570 cx.executor().run_until_parked();
7571
7572 let repository = project.read_with(cx, |project, cx| {
7573 project
7574 .repositories(cx)
7575 .values()
7576 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
7577 .unwrap()
7578 .clone()
7579 });
7580
7581 repository.read_with(cx, |repository, _cx| {
7582 let entries = repository.cached_status().collect::<Vec<_>>();
7583
7584 // `sub` doesn't appear in our computed statuses.
7585 // a.txt appears with a combined `DA` status.
7586 assert_eq!(
7587 entries,
7588 [StatusEntry {
7589 repo_path: "a.txt".into(),
7590 status: TrackedStatus {
7591 index_status: StatusCode::Deleted,
7592 worktree_status: StatusCode::Added
7593 }
7594 .into(),
7595 }]
7596 )
7597 });
7598}
7599
7600#[gpui::test]
7601async fn test_repository_subfolder_git_status(
7602 executor: gpui::BackgroundExecutor,
7603 cx: &mut gpui::TestAppContext,
7604) {
7605 init_test(cx);
7606
7607 let fs = FakeFs::new(executor);
7608 fs.insert_tree(
7609 path!("/root"),
7610 json!({
7611 "my-repo": {
7612 ".git": {},
7613 "a.txt": "a",
7614 "sub-folder-1": {
7615 "sub-folder-2": {
7616 "c.txt": "cc",
7617 "d": {
7618 "e.txt": "eee"
7619 }
7620 },
7621 }
7622 },
7623 }),
7624 )
7625 .await;
7626
7627 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
7628 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
7629
7630 fs.set_status_for_repo(
7631 path!("/root/my-repo/.git").as_ref(),
7632 &[(E_TXT.as_ref(), FileStatus::Untracked)],
7633 );
7634
7635 let project = Project::test(
7636 fs.clone(),
7637 [path!("/root/my-repo/sub-folder-1/sub-folder-2").as_ref()],
7638 cx,
7639 )
7640 .await;
7641
7642 project
7643 .update(cx, |project, cx| project.git_scans_complete(cx))
7644 .await;
7645 cx.run_until_parked();
7646
7647 let repository = project.read_with(cx, |project, cx| {
7648 project.repositories(cx).values().next().unwrap().clone()
7649 });
7650
7651 // Ensure that the git status is loaded correctly
7652 repository.read_with(cx, |repository, _cx| {
7653 assert_eq!(
7654 repository.work_directory_abs_path,
7655 Path::new(path!("/root/my-repo")).into()
7656 );
7657
7658 assert_eq!(repository.status_for_path(&C_TXT.into()), None);
7659 assert_eq!(
7660 repository.status_for_path(&E_TXT.into()).unwrap().status,
7661 FileStatus::Untracked
7662 );
7663 });
7664
7665 fs.set_status_for_repo(path!("/root/my-repo/.git").as_ref(), &[]);
7666 project
7667 .update(cx, |project, cx| project.git_scans_complete(cx))
7668 .await;
7669 cx.run_until_parked();
7670
7671 repository.read_with(cx, |repository, _cx| {
7672 assert_eq!(repository.status_for_path(&C_TXT.into()), None);
7673 assert_eq!(repository.status_for_path(&E_TXT.into()), None);
7674 });
7675}
7676
7677// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
7678#[cfg(any())]
7679#[gpui::test]
7680async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
7681 init_test(cx);
7682 cx.executor().allow_parking();
7683
7684 let root = TempTree::new(json!({
7685 "project": {
7686 "a.txt": "a",
7687 },
7688 }));
7689 let root_path = root.path();
7690
7691 let repo = git_init(&root_path.join("project"));
7692 git_add("a.txt", &repo);
7693 git_commit("init", &repo);
7694
7695 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
7696
7697 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7698 tree.flush_fs_events(cx).await;
7699 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7700 .await;
7701 cx.executor().run_until_parked();
7702
7703 let repository = project.read_with(cx, |project, cx| {
7704 project.repositories(cx).values().next().unwrap().clone()
7705 });
7706
7707 git_branch("other-branch", &repo);
7708 git_checkout("refs/heads/other-branch", &repo);
7709 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
7710 git_add("a.txt", &repo);
7711 git_commit("capitalize", &repo);
7712 let commit = repo
7713 .head()
7714 .expect("Failed to get HEAD")
7715 .peel_to_commit()
7716 .expect("HEAD is not a commit");
7717 git_checkout("refs/heads/main", &repo);
7718 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
7719 git_add("a.txt", &repo);
7720 git_commit("improve letter", &repo);
7721 git_cherry_pick(&commit, &repo);
7722 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
7723 .expect("No CHERRY_PICK_HEAD");
7724 pretty_assertions::assert_eq!(
7725 git_status(&repo),
7726 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
7727 );
7728 tree.flush_fs_events(cx).await;
7729 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7730 .await;
7731 cx.executor().run_until_parked();
7732 let conflicts = repository.update(cx, |repository, _| {
7733 repository
7734 .merge_conflicts
7735 .iter()
7736 .cloned()
7737 .collect::<Vec<_>>()
7738 });
7739 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
7740
7741 git_add("a.txt", &repo);
7742 // Attempt to manually simulate what `git cherry-pick --continue` would do.
7743 git_commit("whatevs", &repo);
7744 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
7745 .expect("Failed to remove CHERRY_PICK_HEAD");
7746 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
7747 tree.flush_fs_events(cx).await;
7748 let conflicts = repository.update(cx, |repository, _| {
7749 repository
7750 .merge_conflicts
7751 .iter()
7752 .cloned()
7753 .collect::<Vec<_>>()
7754 });
7755 pretty_assertions::assert_eq!(conflicts, []);
7756}
7757
7758#[gpui::test]
7759async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
7760 init_test(cx);
7761 let fs = FakeFs::new(cx.background_executor.clone());
7762 fs.insert_tree(
7763 path!("/root"),
7764 json!({
7765 ".git": {},
7766 ".gitignore": "*.txt\n",
7767 "a.xml": "<a></a>",
7768 "b.txt": "Some text"
7769 }),
7770 )
7771 .await;
7772
7773 fs.set_head_and_index_for_repo(
7774 path!("/root/.git").as_ref(),
7775 &[
7776 (".gitignore".into(), "*.txt\n".into()),
7777 ("a.xml".into(), "<a></a>".into()),
7778 ],
7779 );
7780
7781 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
7782
7783 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7784 tree.flush_fs_events(cx).await;
7785 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7786 .await;
7787 cx.executor().run_until_parked();
7788
7789 let repository = project.read_with(cx, |project, cx| {
7790 project.repositories(cx).values().next().unwrap().clone()
7791 });
7792
7793 // One file is unmodified, the other is ignored.
7794 cx.read(|cx| {
7795 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
7796 assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
7797 });
7798
7799 // Change the gitignore, and stage the newly non-ignored file.
7800 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
7801 .await
7802 .unwrap();
7803 fs.set_index_for_repo(
7804 Path::new(path!("/root/.git")),
7805 &[
7806 (".gitignore".into(), "*.txt\n".into()),
7807 ("a.xml".into(), "<a></a>".into()),
7808 ("b.txt".into(), "Some text".into()),
7809 ],
7810 );
7811
7812 cx.executor().run_until_parked();
7813 cx.read(|cx| {
7814 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
7815 assert_entry_git_state(
7816 tree.read(cx),
7817 repository.read(cx),
7818 "b.txt",
7819 Some(StatusCode::Added),
7820 false,
7821 );
7822 });
7823}
7824
7825// NOTE:
7826// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
7827// a directory which some program has already open.
7828// This is a limitation of the Windows.
7829// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
7830#[gpui::test]
7831#[cfg_attr(target_os = "windows", ignore)]
7832async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
7833 init_test(cx);
7834 cx.executor().allow_parking();
7835 let root = TempTree::new(json!({
7836 "projects": {
7837 "project1": {
7838 "a": "",
7839 "b": "",
7840 }
7841 },
7842
7843 }));
7844 let root_path = root.path();
7845
7846 let repo = git_init(&root_path.join("projects/project1"));
7847 git_add("a", &repo);
7848 git_commit("init", &repo);
7849 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
7850
7851 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
7852
7853 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7854 tree.flush_fs_events(cx).await;
7855 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7856 .await;
7857 cx.executor().run_until_parked();
7858
7859 let repository = project.read_with(cx, |project, cx| {
7860 project.repositories(cx).values().next().unwrap().clone()
7861 });
7862
7863 repository.read_with(cx, |repository, _| {
7864 assert_eq!(
7865 repository.work_directory_abs_path.as_ref(),
7866 root_path.join("projects/project1").as_path()
7867 );
7868 assert_eq!(
7869 repository
7870 .status_for_path(&"a".into())
7871 .map(|entry| entry.status),
7872 Some(StatusCode::Modified.worktree()),
7873 );
7874 assert_eq!(
7875 repository
7876 .status_for_path(&"b".into())
7877 .map(|entry| entry.status),
7878 Some(FileStatus::Untracked),
7879 );
7880 });
7881
7882 std::fs::rename(
7883 root_path.join("projects/project1"),
7884 root_path.join("projects/project2"),
7885 )
7886 .unwrap();
7887 tree.flush_fs_events(cx).await;
7888
7889 repository.read_with(cx, |repository, _| {
7890 assert_eq!(
7891 repository.work_directory_abs_path.as_ref(),
7892 root_path.join("projects/project2").as_path()
7893 );
7894 assert_eq!(
7895 repository.status_for_path(&"a".into()).unwrap().status,
7896 StatusCode::Modified.worktree(),
7897 );
7898 assert_eq!(
7899 repository.status_for_path(&"b".into()).unwrap().status,
7900 FileStatus::Untracked,
7901 );
7902 });
7903}
7904
7905// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
7906// you can't rename a directory which some program has already open. This is a
7907// limitation of the Windows. See:
7908// https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
7909#[gpui::test]
7910#[cfg_attr(target_os = "windows", ignore)]
7911async fn test_file_status(cx: &mut gpui::TestAppContext) {
7912 init_test(cx);
7913 cx.executor().allow_parking();
7914 const IGNORE_RULE: &str = "**/target";
7915
7916 let root = TempTree::new(json!({
7917 "project": {
7918 "a.txt": "a",
7919 "b.txt": "bb",
7920 "c": {
7921 "d": {
7922 "e.txt": "eee"
7923 }
7924 },
7925 "f.txt": "ffff",
7926 "target": {
7927 "build_file": "???"
7928 },
7929 ".gitignore": IGNORE_RULE
7930 },
7931
7932 }));
7933 let root_path = root.path();
7934
7935 const A_TXT: &str = "a.txt";
7936 const B_TXT: &str = "b.txt";
7937 const E_TXT: &str = "c/d/e.txt";
7938 const F_TXT: &str = "f.txt";
7939 const DOTGITIGNORE: &str = ".gitignore";
7940 const BUILD_FILE: &str = "target/build_file";
7941
7942 // Set up git repository before creating the worktree.
7943 let work_dir = root.path().join("project");
7944 let mut repo = git_init(work_dir.as_path());
7945 repo.add_ignore_rule(IGNORE_RULE).unwrap();
7946 git_add(A_TXT, &repo);
7947 git_add(E_TXT, &repo);
7948 git_add(DOTGITIGNORE, &repo);
7949 git_commit("Initial commit", &repo);
7950
7951 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
7952
7953 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7954 tree.flush_fs_events(cx).await;
7955 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7956 .await;
7957 cx.executor().run_until_parked();
7958
7959 let repository = project.read_with(cx, |project, cx| {
7960 project.repositories(cx).values().next().unwrap().clone()
7961 });
7962
7963 // Check that the right git state is observed on startup
7964 repository.read_with(cx, |repository, _cx| {
7965 assert_eq!(
7966 repository.work_directory_abs_path.as_ref(),
7967 root_path.join("project").as_path()
7968 );
7969
7970 assert_eq!(
7971 repository.status_for_path(&B_TXT.into()).unwrap().status,
7972 FileStatus::Untracked,
7973 );
7974 assert_eq!(
7975 repository.status_for_path(&F_TXT.into()).unwrap().status,
7976 FileStatus::Untracked,
7977 );
7978 });
7979
7980 // Modify a file in the working copy.
7981 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
7982 tree.flush_fs_events(cx).await;
7983 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7984 .await;
7985 cx.executor().run_until_parked();
7986
7987 // The worktree detects that the file's git status has changed.
7988 repository.read_with(cx, |repository, _| {
7989 assert_eq!(
7990 repository.status_for_path(&A_TXT.into()).unwrap().status,
7991 StatusCode::Modified.worktree(),
7992 );
7993 });
7994
7995 // Create a commit in the git repository.
7996 git_add(A_TXT, &repo);
7997 git_add(B_TXT, &repo);
7998 git_commit("Committing modified and added", &repo);
7999 tree.flush_fs_events(cx).await;
8000 cx.executor().run_until_parked();
8001
8002 // The worktree detects that the files' git status have changed.
8003 repository.read_with(cx, |repository, _cx| {
8004 assert_eq!(
8005 repository.status_for_path(&F_TXT.into()).unwrap().status,
8006 FileStatus::Untracked,
8007 );
8008 assert_eq!(repository.status_for_path(&B_TXT.into()), None);
8009 assert_eq!(repository.status_for_path(&A_TXT.into()), None);
8010 });
8011
8012 // Modify files in the working copy and perform git operations on other files.
8013 git_reset(0, &repo);
8014 git_remove_index(Path::new(B_TXT), &repo);
8015 git_stash(&mut repo);
8016 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
8017 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
8018 tree.flush_fs_events(cx).await;
8019 cx.executor().run_until_parked();
8020
8021 // Check that more complex repo changes are tracked
8022 repository.read_with(cx, |repository, _cx| {
8023 assert_eq!(repository.status_for_path(&A_TXT.into()), None);
8024 assert_eq!(
8025 repository.status_for_path(&B_TXT.into()).unwrap().status,
8026 FileStatus::Untracked,
8027 );
8028 assert_eq!(
8029 repository.status_for_path(&E_TXT.into()).unwrap().status,
8030 StatusCode::Modified.worktree(),
8031 );
8032 });
8033
8034 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
8035 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
8036 std::fs::write(
8037 work_dir.join(DOTGITIGNORE),
8038 [IGNORE_RULE, "f.txt"].join("\n"),
8039 )
8040 .unwrap();
8041
8042 git_add(Path::new(DOTGITIGNORE), &repo);
8043 git_commit("Committing modified git ignore", &repo);
8044
8045 tree.flush_fs_events(cx).await;
8046 cx.executor().run_until_parked();
8047
8048 let mut renamed_dir_name = "first_directory/second_directory";
8049 const RENAMED_FILE: &str = "rf.txt";
8050
8051 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
8052 std::fs::write(
8053 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
8054 "new-contents",
8055 )
8056 .unwrap();
8057
8058 tree.flush_fs_events(cx).await;
8059 cx.executor().run_until_parked();
8060
8061 repository.read_with(cx, |repository, _cx| {
8062 assert_eq!(
8063 repository
8064 .status_for_path(&Path::new(renamed_dir_name).join(RENAMED_FILE).into())
8065 .unwrap()
8066 .status,
8067 FileStatus::Untracked,
8068 );
8069 });
8070
8071 renamed_dir_name = "new_first_directory/second_directory";
8072
8073 std::fs::rename(
8074 work_dir.join("first_directory"),
8075 work_dir.join("new_first_directory"),
8076 )
8077 .unwrap();
8078
8079 tree.flush_fs_events(cx).await;
8080 cx.executor().run_until_parked();
8081
8082 repository.read_with(cx, |repository, _cx| {
8083 assert_eq!(
8084 repository
8085 .status_for_path(&Path::new(renamed_dir_name).join(RENAMED_FILE).into())
8086 .unwrap()
8087 .status,
8088 FileStatus::Untracked,
8089 );
8090 });
8091}
8092
8093#[gpui::test]
8094async fn test_repos_in_invisible_worktrees(
8095 executor: BackgroundExecutor,
8096 cx: &mut gpui::TestAppContext,
8097) {
8098 init_test(cx);
8099 let fs = FakeFs::new(executor);
8100 fs.insert_tree(
8101 path!("/root"),
8102 json!({
8103 "dir1": {
8104 ".git": {},
8105 "dep1": {
8106 ".git": {},
8107 "src": {
8108 "a.txt": "",
8109 },
8110 },
8111 "b.txt": "",
8112 },
8113 }),
8114 )
8115 .await;
8116
8117 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
8118 let visible_worktree =
8119 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8120 visible_worktree
8121 .read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
8122 .await;
8123
8124 let repos = project.read_with(cx, |project, cx| {
8125 project
8126 .repositories(cx)
8127 .values()
8128 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8129 .collect::<Vec<_>>()
8130 });
8131 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
8132
8133 let (invisible_worktree, _) = project
8134 .update(cx, |project, cx| {
8135 project.worktree_store.update(cx, |worktree_store, cx| {
8136 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
8137 })
8138 })
8139 .await
8140 .expect("failed to create worktree");
8141 invisible_worktree
8142 .read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
8143 .await;
8144
8145 let repos = project.read_with(cx, |project, cx| {
8146 project
8147 .repositories(cx)
8148 .values()
8149 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8150 .collect::<Vec<_>>()
8151 });
8152 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
8153}
8154
8155#[gpui::test(iterations = 10)]
8156async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
8157 init_test(cx);
8158 cx.update(|cx| {
8159 cx.update_global::<SettingsStore, _>(|store, cx| {
8160 store.update_user_settings::<WorktreeSettings>(cx, |project_settings| {
8161 project_settings.file_scan_exclusions = Some(Vec::new());
8162 });
8163 });
8164 });
8165 let fs = FakeFs::new(cx.background_executor.clone());
8166 fs.insert_tree(
8167 path!("/root"),
8168 json!({
8169 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
8170 "tree": {
8171 ".git": {},
8172 ".gitignore": "ignored-dir\n",
8173 "tracked-dir": {
8174 "tracked-file1": "",
8175 "ancestor-ignored-file1": "",
8176 },
8177 "ignored-dir": {
8178 "ignored-file1": ""
8179 }
8180 }
8181 }),
8182 )
8183 .await;
8184 fs.set_head_and_index_for_repo(
8185 path!("/root/tree/.git").as_ref(),
8186 &[
8187 (".gitignore".into(), "ignored-dir\n".into()),
8188 ("tracked-dir/tracked-file1".into(), "".into()),
8189 ],
8190 );
8191
8192 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
8193
8194 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8195 tree.flush_fs_events(cx).await;
8196 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
8197 .await;
8198 cx.executor().run_until_parked();
8199
8200 let repository = project.read_with(cx, |project, cx| {
8201 project.repositories(cx).values().next().unwrap().clone()
8202 });
8203
8204 tree.read_with(cx, |tree, _| {
8205 tree.as_local()
8206 .unwrap()
8207 .manually_refresh_entries_for_paths(vec![Path::new("ignored-dir").into()])
8208 })
8209 .recv()
8210 .await;
8211
8212 cx.read(|cx| {
8213 assert_entry_git_state(
8214 tree.read(cx),
8215 repository.read(cx),
8216 "tracked-dir/tracked-file1",
8217 None,
8218 false,
8219 );
8220 assert_entry_git_state(
8221 tree.read(cx),
8222 repository.read(cx),
8223 "tracked-dir/ancestor-ignored-file1",
8224 None,
8225 false,
8226 );
8227 assert_entry_git_state(
8228 tree.read(cx),
8229 repository.read(cx),
8230 "ignored-dir/ignored-file1",
8231 None,
8232 true,
8233 );
8234 });
8235
8236 fs.create_file(
8237 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
8238 Default::default(),
8239 )
8240 .await
8241 .unwrap();
8242 fs.set_index_for_repo(
8243 path!("/root/tree/.git").as_ref(),
8244 &[
8245 (".gitignore".into(), "ignored-dir\n".into()),
8246 ("tracked-dir/tracked-file1".into(), "".into()),
8247 ("tracked-dir/tracked-file2".into(), "".into()),
8248 ],
8249 );
8250 fs.create_file(
8251 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
8252 Default::default(),
8253 )
8254 .await
8255 .unwrap();
8256 fs.create_file(
8257 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
8258 Default::default(),
8259 )
8260 .await
8261 .unwrap();
8262
8263 cx.executor().run_until_parked();
8264 cx.read(|cx| {
8265 assert_entry_git_state(
8266 tree.read(cx),
8267 repository.read(cx),
8268 "tracked-dir/tracked-file2",
8269 Some(StatusCode::Added),
8270 false,
8271 );
8272 assert_entry_git_state(
8273 tree.read(cx),
8274 repository.read(cx),
8275 "tracked-dir/ancestor-ignored-file2",
8276 None,
8277 false,
8278 );
8279 assert_entry_git_state(
8280 tree.read(cx),
8281 repository.read(cx),
8282 "ignored-dir/ignored-file2",
8283 None,
8284 true,
8285 );
8286 assert!(tree.read(cx).entry_for_path(".git").unwrap().is_ignored);
8287 });
8288}
8289
8290#[gpui::test]
8291async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) {
8292 init_test(cx);
8293
8294 let fs = FakeFs::new(cx.executor());
8295 fs.insert_tree(
8296 path!("/project"),
8297 json!({
8298 ".git": {
8299 "worktrees": {
8300 "some-worktree": {
8301 "commondir": "../..\n",
8302 // For is_git_dir
8303 "HEAD": "",
8304 "config": ""
8305 }
8306 },
8307 "modules": {
8308 "subdir": {
8309 "some-submodule": {
8310 // For is_git_dir
8311 "HEAD": "",
8312 "config": "",
8313 }
8314 }
8315 }
8316 },
8317 "src": {
8318 "a.txt": "A",
8319 },
8320 "some-worktree": {
8321 ".git": "gitdir: ../.git/worktrees/some-worktree\n",
8322 "src": {
8323 "b.txt": "B",
8324 }
8325 },
8326 "subdir": {
8327 "some-submodule": {
8328 ".git": "gitdir: ../../.git/modules/subdir/some-submodule\n",
8329 "c.txt": "C",
8330 }
8331 }
8332 }),
8333 )
8334 .await;
8335
8336 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
8337 let scan_complete = project.update(cx, |project, cx| {
8338 project
8339 .worktrees(cx)
8340 .next()
8341 .unwrap()
8342 .read(cx)
8343 .as_local()
8344 .unwrap()
8345 .scan_complete()
8346 });
8347 scan_complete.await;
8348
8349 let mut repositories = project.update(cx, |project, cx| {
8350 project
8351 .repositories(cx)
8352 .values()
8353 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8354 .collect::<Vec<_>>()
8355 });
8356 repositories.sort();
8357 pretty_assertions::assert_eq!(
8358 repositories,
8359 [
8360 Path::new(path!("/project")).into(),
8361 Path::new(path!("/project/some-worktree")).into(),
8362 Path::new(path!("/project/subdir/some-submodule")).into(),
8363 ]
8364 );
8365
8366 // Generate a git-related event for the worktree and check that it's refreshed.
8367 fs.with_git_state(
8368 path!("/project/some-worktree/.git").as_ref(),
8369 true,
8370 |state| {
8371 state
8372 .head_contents
8373 .insert("src/b.txt".into(), "b".to_owned());
8374 state
8375 .index_contents
8376 .insert("src/b.txt".into(), "b".to_owned());
8377 },
8378 )
8379 .unwrap();
8380 cx.run_until_parked();
8381
8382 let buffer = project
8383 .update(cx, |project, cx| {
8384 project.open_local_buffer(path!("/project/some-worktree/src/b.txt"), cx)
8385 })
8386 .await
8387 .unwrap();
8388 let (worktree_repo, barrier) = project.update(cx, |project, cx| {
8389 let (repo, _) = project
8390 .git_store()
8391 .read(cx)
8392 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
8393 .unwrap();
8394 pretty_assertions::assert_eq!(
8395 repo.read(cx).work_directory_abs_path,
8396 Path::new(path!("/project/some-worktree")).into(),
8397 );
8398 let barrier = repo.update(cx, |repo, _| repo.barrier());
8399 (repo.clone(), barrier)
8400 });
8401 barrier.await.unwrap();
8402 worktree_repo.update(cx, |repo, _| {
8403 pretty_assertions::assert_eq!(
8404 repo.status_for_path(&"src/b.txt".into()).unwrap().status,
8405 StatusCode::Modified.worktree(),
8406 );
8407 });
8408
8409 // The same for the submodule.
8410 fs.with_git_state(
8411 path!("/project/subdir/some-submodule/.git").as_ref(),
8412 true,
8413 |state| {
8414 state.head_contents.insert("c.txt".into(), "c".to_owned());
8415 state.index_contents.insert("c.txt".into(), "c".to_owned());
8416 },
8417 )
8418 .unwrap();
8419 cx.run_until_parked();
8420
8421 let buffer = project
8422 .update(cx, |project, cx| {
8423 project.open_local_buffer(path!("/project/subdir/some-submodule/c.txt"), cx)
8424 })
8425 .await
8426 .unwrap();
8427 let (submodule_repo, barrier) = project.update(cx, |project, cx| {
8428 let (repo, _) = project
8429 .git_store()
8430 .read(cx)
8431 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
8432 .unwrap();
8433 pretty_assertions::assert_eq!(
8434 repo.read(cx).work_directory_abs_path,
8435 Path::new(path!("/project/subdir/some-submodule")).into(),
8436 );
8437 let barrier = repo.update(cx, |repo, _| repo.barrier());
8438 (repo.clone(), barrier)
8439 });
8440 barrier.await.unwrap();
8441 submodule_repo.update(cx, |repo, _| {
8442 pretty_assertions::assert_eq!(
8443 repo.status_for_path(&"c.txt".into()).unwrap().status,
8444 StatusCode::Modified.worktree(),
8445 );
8446 });
8447}
8448
8449#[gpui::test]
8450async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
8451 init_test(cx);
8452 let fs = FakeFs::new(cx.background_executor.clone());
8453 fs.insert_tree(
8454 path!("/root"),
8455 json!({
8456 "project": {
8457 ".git": {},
8458 "child1": {
8459 "a.txt": "A",
8460 },
8461 "child2": {
8462 "b.txt": "B",
8463 }
8464 }
8465 }),
8466 )
8467 .await;
8468
8469 let project = Project::test(
8470 fs.clone(),
8471 [
8472 path!("/root/project/child1").as_ref(),
8473 path!("/root/project/child2").as_ref(),
8474 ],
8475 cx,
8476 )
8477 .await;
8478
8479 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8480 tree.flush_fs_events(cx).await;
8481 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
8482 .await;
8483 cx.executor().run_until_parked();
8484
8485 let repos = project.read_with(cx, |project, cx| {
8486 project
8487 .repositories(cx)
8488 .values()
8489 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8490 .collect::<Vec<_>>()
8491 });
8492 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
8493}
8494
8495async fn search(
8496 project: &Entity<Project>,
8497 query: SearchQuery,
8498 cx: &mut gpui::TestAppContext,
8499) -> Result<HashMap<String, Vec<Range<usize>>>> {
8500 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
8501 let mut results = HashMap::default();
8502 while let Ok(search_result) = search_rx.recv().await {
8503 match search_result {
8504 SearchResult::Buffer { buffer, ranges } => {
8505 results.entry(buffer).or_insert(ranges);
8506 }
8507 SearchResult::LimitReached => {}
8508 }
8509 }
8510 Ok(results
8511 .into_iter()
8512 .map(|(buffer, ranges)| {
8513 buffer.update(cx, |buffer, cx| {
8514 let path = buffer
8515 .file()
8516 .unwrap()
8517 .full_path(cx)
8518 .to_string_lossy()
8519 .to_string();
8520 let ranges = ranges
8521 .into_iter()
8522 .map(|range| range.to_offset(buffer))
8523 .collect::<Vec<_>>();
8524 (path, ranges)
8525 })
8526 })
8527 .collect())
8528}
8529
8530pub fn init_test(cx: &mut gpui::TestAppContext) {
8531 zlog::init_test();
8532
8533 cx.update(|cx| {
8534 let settings_store = SettingsStore::test(cx);
8535 cx.set_global(settings_store);
8536 release_channel::init(SemanticVersion::default(), cx);
8537 language::init(cx);
8538 Project::init_settings(cx);
8539 });
8540}
8541
8542fn json_lang() -> Arc<Language> {
8543 Arc::new(Language::new(
8544 LanguageConfig {
8545 name: "JSON".into(),
8546 matcher: LanguageMatcher {
8547 path_suffixes: vec!["json".to_string()],
8548 ..Default::default()
8549 },
8550 ..Default::default()
8551 },
8552 None,
8553 ))
8554}
8555
8556fn js_lang() -> Arc<Language> {
8557 Arc::new(Language::new(
8558 LanguageConfig {
8559 name: "JavaScript".into(),
8560 matcher: LanguageMatcher {
8561 path_suffixes: vec!["js".to_string()],
8562 ..Default::default()
8563 },
8564 ..Default::default()
8565 },
8566 None,
8567 ))
8568}
8569
8570fn rust_lang() -> Arc<Language> {
8571 Arc::new(Language::new(
8572 LanguageConfig {
8573 name: "Rust".into(),
8574 matcher: LanguageMatcher {
8575 path_suffixes: vec!["rs".to_string()],
8576 ..Default::default()
8577 },
8578 ..Default::default()
8579 },
8580 Some(tree_sitter_rust::LANGUAGE.into()),
8581 ))
8582}
8583
8584fn typescript_lang() -> Arc<Language> {
8585 Arc::new(Language::new(
8586 LanguageConfig {
8587 name: "TypeScript".into(),
8588 matcher: LanguageMatcher {
8589 path_suffixes: vec!["ts".to_string()],
8590 ..Default::default()
8591 },
8592 ..Default::default()
8593 },
8594 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
8595 ))
8596}
8597
8598fn tsx_lang() -> Arc<Language> {
8599 Arc::new(Language::new(
8600 LanguageConfig {
8601 name: "tsx".into(),
8602 matcher: LanguageMatcher {
8603 path_suffixes: vec!["tsx".to_string()],
8604 ..Default::default()
8605 },
8606 ..Default::default()
8607 },
8608 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
8609 ))
8610}
8611
8612fn get_all_tasks(
8613 project: &Entity<Project>,
8614 task_contexts: &TaskContexts,
8615 cx: &mut App,
8616) -> Vec<(TaskSourceKind, ResolvedTask)> {
8617 let (mut old, new) = project.update(cx, |project, cx| {
8618 project
8619 .task_store
8620 .read(cx)
8621 .task_inventory()
8622 .unwrap()
8623 .read(cx)
8624 .used_and_current_resolved_tasks(task_contexts, cx)
8625 });
8626 old.extend(new);
8627 old
8628}
8629
8630#[track_caller]
8631fn assert_entry_git_state(
8632 tree: &Worktree,
8633 repository: &Repository,
8634 path: &str,
8635 index_status: Option<StatusCode>,
8636 is_ignored: bool,
8637) {
8638 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
8639 let entry = tree
8640 .entry_for_path(path)
8641 .unwrap_or_else(|| panic!("entry {path} not found"));
8642 let status = repository
8643 .status_for_path(&path.into())
8644 .map(|entry| entry.status);
8645 let expected = index_status.map(|index_status| {
8646 TrackedStatus {
8647 index_status,
8648 worktree_status: StatusCode::Unmodified,
8649 }
8650 .into()
8651 });
8652 assert_eq!(
8653 status, expected,
8654 "expected {path} to have git status: {expected:?}"
8655 );
8656 assert_eq!(
8657 entry.is_ignored, is_ignored,
8658 "expected {path} to have is_ignored: {is_ignored}"
8659 );
8660}
8661
8662#[track_caller]
8663fn git_init(path: &Path) -> git2::Repository {
8664 let mut init_opts = RepositoryInitOptions::new();
8665 init_opts.initial_head("main");
8666 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
8667}
8668
8669#[track_caller]
8670fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
8671 let path = path.as_ref();
8672 let mut index = repo.index().expect("Failed to get index");
8673 index.add_path(path).expect("Failed to add file");
8674 index.write().expect("Failed to write index");
8675}
8676
8677#[track_caller]
8678fn git_remove_index(path: &Path, repo: &git2::Repository) {
8679 let mut index = repo.index().expect("Failed to get index");
8680 index.remove_path(path).expect("Failed to add file");
8681 index.write().expect("Failed to write index");
8682}
8683
8684#[track_caller]
8685fn git_commit(msg: &'static str, repo: &git2::Repository) {
8686 use git2::Signature;
8687
8688 let signature = Signature::now("test", "test@zed.dev").unwrap();
8689 let oid = repo.index().unwrap().write_tree().unwrap();
8690 let tree = repo.find_tree(oid).unwrap();
8691 if let Ok(head) = repo.head() {
8692 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
8693
8694 let parent_commit = parent_obj.as_commit().unwrap();
8695
8696 repo.commit(
8697 Some("HEAD"),
8698 &signature,
8699 &signature,
8700 msg,
8701 &tree,
8702 &[parent_commit],
8703 )
8704 .expect("Failed to commit with parent");
8705 } else {
8706 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
8707 .expect("Failed to commit");
8708 }
8709}
8710
8711#[cfg(any())]
8712#[track_caller]
8713fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
8714 repo.cherrypick(commit, None).expect("Failed to cherrypick");
8715}
8716
8717#[track_caller]
8718fn git_stash(repo: &mut git2::Repository) {
8719 use git2::Signature;
8720
8721 let signature = Signature::now("test", "test@zed.dev").unwrap();
8722 repo.stash_save(&signature, "N/A", None)
8723 .expect("Failed to stash");
8724}
8725
8726#[track_caller]
8727fn git_reset(offset: usize, repo: &git2::Repository) {
8728 let head = repo.head().expect("Couldn't get repo head");
8729 let object = head.peel(git2::ObjectType::Commit).unwrap();
8730 let commit = object.as_commit().unwrap();
8731 let new_head = commit
8732 .parents()
8733 .inspect(|parnet| {
8734 parnet.message();
8735 })
8736 .nth(offset)
8737 .expect("Not enough history");
8738 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
8739 .expect("Could not reset");
8740}
8741
8742#[cfg(any())]
8743#[track_caller]
8744fn git_branch(name: &str, repo: &git2::Repository) {
8745 let head = repo
8746 .head()
8747 .expect("Couldn't get repo head")
8748 .peel_to_commit()
8749 .expect("HEAD is not a commit");
8750 repo.branch(name, &head, false).expect("Failed to commit");
8751}
8752
8753#[cfg(any())]
8754#[track_caller]
8755fn git_checkout(name: &str, repo: &git2::Repository) {
8756 repo.set_head(name).expect("Failed to set head");
8757 repo.checkout_head(None).expect("Failed to check out head");
8758}
8759
8760#[cfg(any())]
8761#[track_caller]
8762fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
8763 repo.statuses(None)
8764 .unwrap()
8765 .iter()
8766 .map(|status| (status.path().unwrap().to_string(), status.status()))
8767 .collect()
8768}
8769
8770#[gpui::test]
8771async fn test_find_project_path_abs(
8772 background_executor: BackgroundExecutor,
8773 cx: &mut gpui::TestAppContext,
8774) {
8775 // find_project_path should work with absolute paths
8776 init_test(cx);
8777
8778 let fs = FakeFs::new(background_executor);
8779 fs.insert_tree(
8780 path!("/root"),
8781 json!({
8782 "project1": {
8783 "file1.txt": "content1",
8784 "subdir": {
8785 "file2.txt": "content2"
8786 }
8787 },
8788 "project2": {
8789 "file3.txt": "content3"
8790 }
8791 }),
8792 )
8793 .await;
8794
8795 let project = Project::test(
8796 fs.clone(),
8797 [
8798 path!("/root/project1").as_ref(),
8799 path!("/root/project2").as_ref(),
8800 ],
8801 cx,
8802 )
8803 .await;
8804
8805 // Make sure the worktrees are fully initialized
8806 for worktree in project.read_with(cx, |project, cx| project.worktrees(cx).collect::<Vec<_>>()) {
8807 worktree
8808 .read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
8809 .await;
8810 }
8811 cx.run_until_parked();
8812
8813 let (project1_abs_path, project1_id, project2_abs_path, project2_id) =
8814 project.read_with(cx, |project, cx| {
8815 let worktrees: Vec<_> = project.worktrees(cx).collect();
8816 let abs_path1 = worktrees[0].read(cx).abs_path().to_path_buf();
8817 let id1 = worktrees[0].read(cx).id();
8818 let abs_path2 = worktrees[1].read(cx).abs_path().to_path_buf();
8819 let id2 = worktrees[1].read(cx).id();
8820 (abs_path1, id1, abs_path2, id2)
8821 });
8822
8823 project.update(cx, |project, cx| {
8824 let abs_path = project1_abs_path.join("file1.txt");
8825 let found_path = project.find_project_path(abs_path, cx).unwrap();
8826 assert_eq!(found_path.worktree_id, project1_id);
8827 assert_eq!(found_path.path.as_ref(), Path::new("file1.txt"));
8828
8829 let abs_path = project1_abs_path.join("subdir").join("file2.txt");
8830 let found_path = project.find_project_path(abs_path, cx).unwrap();
8831 assert_eq!(found_path.worktree_id, project1_id);
8832 assert_eq!(found_path.path.as_ref(), Path::new("subdir/file2.txt"));
8833
8834 let abs_path = project2_abs_path.join("file3.txt");
8835 let found_path = project.find_project_path(abs_path, cx).unwrap();
8836 assert_eq!(found_path.worktree_id, project2_id);
8837 assert_eq!(found_path.path.as_ref(), Path::new("file3.txt"));
8838
8839 let abs_path = project1_abs_path.join("nonexistent.txt");
8840 let found_path = project.find_project_path(abs_path, cx);
8841 assert!(
8842 found_path.is_some(),
8843 "Should find project path for nonexistent file in worktree"
8844 );
8845
8846 // Test with an absolute path outside any worktree
8847 let abs_path = Path::new("/some/other/path");
8848 let found_path = project.find_project_path(abs_path, cx);
8849 assert!(
8850 found_path.is_none(),
8851 "Should not find project path for path outside any worktree"
8852 );
8853 });
8854}