1#![allow(clippy::format_collect)]
2
3use crate::{
4 Event, git_store::StatusEntry, task_inventory::TaskContexts, task_store::TaskSettingsLocation,
5 *,
6};
7use buffer_diff::{
8 BufferDiffEvent, CALCULATE_DIFF_TASK, DiffHunkSecondaryStatus, DiffHunkStatus,
9 DiffHunkStatusKind, assert_hunks,
10};
11use fs::FakeFs;
12use futures::{StreamExt, future};
13use git::{
14 repository::RepoPath,
15 status::{StatusCode, TrackedStatus},
16};
17use git2::RepositoryInitOptions;
18use gpui::{App, BackgroundExecutor, SemanticVersion, UpdateGlobal};
19use http_client::Url;
20use language::{
21 Diagnostic, DiagnosticEntry, DiagnosticSet, DiskState, FakeLspAdapter, LanguageConfig,
22 LanguageMatcher, LanguageName, LineEnding, OffsetRangeExt, Point, ToPoint,
23 language_settings::{AllLanguageSettings, LanguageSettingsContent, language_settings},
24 tree_sitter_rust, tree_sitter_typescript,
25};
26use lsp::{
27 DiagnosticSeverity, DocumentChanges, FileOperationFilter, NumberOrString, TextDocumentEdit,
28 WillRenameFiles, notification::DidRenameFiles,
29};
30use parking_lot::Mutex;
31use paths::{config_dir, tasks_file};
32use postage::stream::Stream as _;
33use pretty_assertions::{assert_eq, assert_matches};
34use rand::{Rng as _, rngs::StdRng};
35use serde_json::json;
36#[cfg(not(windows))]
37use std::os;
38use std::{env, mem, num::NonZeroU32, ops::Range, str::FromStr, sync::OnceLock, task::Poll};
39use task::{ResolvedTask, TaskContext};
40use unindent::Unindent as _;
41use util::{
42 TryFutureExt as _, assert_set_eq, maybe, path,
43 paths::PathMatcher,
44 separator,
45 test::{TempTree, marked_text_offsets},
46 uri,
47};
48use worktree::WorktreeModelHandle as _;
49
50#[gpui::test]
51async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
52 cx.executor().allow_parking();
53
54 let (tx, mut rx) = futures::channel::mpsc::unbounded();
55 let _thread = std::thread::spawn(move || {
56 #[cfg(not(target_os = "windows"))]
57 std::fs::metadata("/tmp").unwrap();
58 #[cfg(target_os = "windows")]
59 std::fs::metadata("C:/Windows").unwrap();
60 std::thread::sleep(Duration::from_millis(1000));
61 tx.unbounded_send(1).unwrap();
62 });
63 rx.next().await.unwrap();
64}
65
66#[gpui::test]
67async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
68 cx.executor().allow_parking();
69
70 let io_task = smol::unblock(move || {
71 println!("sleeping on thread {:?}", std::thread::current().id());
72 std::thread::sleep(Duration::from_millis(10));
73 1
74 });
75
76 let task = cx.foreground_executor().spawn(async move {
77 io_task.await;
78 });
79
80 task.await;
81}
82
83#[cfg(not(windows))]
84#[gpui::test]
85async fn test_symlinks(cx: &mut gpui::TestAppContext) {
86 init_test(cx);
87 cx.executor().allow_parking();
88
89 let dir = TempTree::new(json!({
90 "root": {
91 "apple": "",
92 "banana": {
93 "carrot": {
94 "date": "",
95 "endive": "",
96 }
97 },
98 "fennel": {
99 "grape": "",
100 }
101 }
102 }));
103
104 let root_link_path = dir.path().join("root_link");
105 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
106 os::unix::fs::symlink(
107 dir.path().join("root/fennel"),
108 dir.path().join("root/finnochio"),
109 )
110 .unwrap();
111
112 let project = Project::test(
113 Arc::new(RealFs::new(None, cx.executor())),
114 [root_link_path.as_ref()],
115 cx,
116 )
117 .await;
118
119 project.update(cx, |project, cx| {
120 let tree = project.worktrees(cx).next().unwrap().read(cx);
121 assert_eq!(tree.file_count(), 5);
122 assert_eq!(
123 tree.inode_for_path("fennel/grape"),
124 tree.inode_for_path("finnochio/grape")
125 );
126 });
127}
128
129#[gpui::test]
130async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
131 init_test(cx);
132
133 let dir = TempTree::new(json!({
134 ".editorconfig": r#"
135 root = true
136 [*.rs]
137 indent_style = tab
138 indent_size = 3
139 end_of_line = lf
140 insert_final_newline = true
141 trim_trailing_whitespace = true
142 [*.js]
143 tab_width = 10
144 "#,
145 ".zed": {
146 "settings.json": r#"{
147 "tab_size": 8,
148 "hard_tabs": false,
149 "ensure_final_newline_on_save": false,
150 "remove_trailing_whitespace_on_save": false,
151 "soft_wrap": "editor_width"
152 }"#,
153 },
154 "a.rs": "fn a() {\n A\n}",
155 "b": {
156 ".editorconfig": r#"
157 [*.rs]
158 indent_size = 2
159 "#,
160 "b.rs": "fn b() {\n B\n}",
161 },
162 "c.js": "def c\n C\nend",
163 "README.json": "tabs are better\n",
164 }));
165
166 let path = dir.path();
167 let fs = FakeFs::new(cx.executor());
168 fs.insert_tree_from_real_fs(path, path).await;
169 let project = Project::test(fs, [path], cx).await;
170
171 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
172 language_registry.add(js_lang());
173 language_registry.add(json_lang());
174 language_registry.add(rust_lang());
175
176 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
177
178 cx.executor().run_until_parked();
179
180 cx.update(|cx| {
181 let tree = worktree.read(cx);
182 let settings_for = |path: &str| {
183 let file_entry = tree.entry_for_path(path).unwrap().clone();
184 let file = File::for_entry(file_entry, worktree.clone());
185 let file_language = project
186 .read(cx)
187 .languages()
188 .language_for_file_path(file.path.as_ref());
189 let file_language = cx
190 .background_executor()
191 .block(file_language)
192 .expect("Failed to get file language");
193 let file = file as _;
194 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
195 };
196
197 let settings_a = settings_for("a.rs");
198 let settings_b = settings_for("b/b.rs");
199 let settings_c = settings_for("c.js");
200 let settings_readme = settings_for("README.json");
201
202 // .editorconfig overrides .zed/settings
203 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
204 assert_eq!(settings_a.hard_tabs, true);
205 assert_eq!(settings_a.ensure_final_newline_on_save, true);
206 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
207
208 // .editorconfig in b/ overrides .editorconfig in root
209 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
210
211 // "indent_size" is not set, so "tab_width" is used
212 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
213
214 // README.md should not be affected by .editorconfig's globe "*.rs"
215 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
216 });
217}
218
219#[gpui::test]
220async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
221 init_test(cx);
222 TaskStore::init(None);
223
224 let fs = FakeFs::new(cx.executor());
225 fs.insert_tree(
226 path!("/dir"),
227 json!({
228 ".zed": {
229 "settings.json": r#"{ "tab_size": 8 }"#,
230 "tasks.json": r#"[{
231 "label": "cargo check all",
232 "command": "cargo",
233 "args": ["check", "--all"]
234 },]"#,
235 },
236 "a": {
237 "a.rs": "fn a() {\n A\n}"
238 },
239 "b": {
240 ".zed": {
241 "settings.json": r#"{ "tab_size": 2 }"#,
242 "tasks.json": r#"[{
243 "label": "cargo check",
244 "command": "cargo",
245 "args": ["check"]
246 },]"#,
247 },
248 "b.rs": "fn b() {\n B\n}"
249 }
250 }),
251 )
252 .await;
253
254 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
255 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
256
257 cx.executor().run_until_parked();
258 let worktree_id = cx.update(|cx| {
259 project.update(cx, |project, cx| {
260 project.worktrees(cx).next().unwrap().read(cx).id()
261 })
262 });
263
264 let mut task_contexts = TaskContexts::default();
265 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
266
267 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
268 id: worktree_id,
269 directory_in_worktree: PathBuf::from(".zed"),
270 id_base: "local worktree tasks from directory \".zed\"".into(),
271 };
272
273 let all_tasks = cx
274 .update(|cx| {
275 let tree = worktree.read(cx);
276
277 let file_a = File::for_entry(
278 tree.entry_for_path("a/a.rs").unwrap().clone(),
279 worktree.clone(),
280 ) as _;
281 let settings_a = language_settings(None, Some(&file_a), cx);
282 let file_b = File::for_entry(
283 tree.entry_for_path("b/b.rs").unwrap().clone(),
284 worktree.clone(),
285 ) as _;
286 let settings_b = language_settings(None, Some(&file_b), cx);
287
288 assert_eq!(settings_a.tab_size.get(), 8);
289 assert_eq!(settings_b.tab_size.get(), 2);
290
291 get_all_tasks(&project, &task_contexts, cx)
292 })
293 .into_iter()
294 .map(|(source_kind, task)| {
295 let resolved = task.resolved;
296 (
297 source_kind,
298 task.resolved_label,
299 resolved.args,
300 resolved.env,
301 )
302 })
303 .collect::<Vec<_>>();
304 assert_eq!(
305 all_tasks,
306 vec![
307 (
308 TaskSourceKind::Worktree {
309 id: worktree_id,
310 directory_in_worktree: PathBuf::from(separator!("b/.zed")),
311 id_base: if cfg!(windows) {
312 "local worktree tasks from directory \"b\\\\.zed\"".into()
313 } else {
314 "local worktree tasks from directory \"b/.zed\"".into()
315 },
316 },
317 "cargo check".to_string(),
318 vec!["check".to_string()],
319 HashMap::default(),
320 ),
321 (
322 topmost_local_task_source_kind.clone(),
323 "cargo check all".to_string(),
324 vec!["check".to_string(), "--all".to_string()],
325 HashMap::default(),
326 ),
327 ]
328 );
329
330 let (_, resolved_task) = cx
331 .update(|cx| get_all_tasks(&project, &task_contexts, cx))
332 .into_iter()
333 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
334 .expect("should have one global task");
335 project.update(cx, |project, cx| {
336 let task_inventory = project
337 .task_store
338 .read(cx)
339 .task_inventory()
340 .cloned()
341 .unwrap();
342 task_inventory.update(cx, |inventory, _| {
343 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
344 inventory
345 .update_file_based_tasks(
346 TaskSettingsLocation::Global(tasks_file()),
347 Some(
348 &json!([{
349 "label": "cargo check unstable",
350 "command": "cargo",
351 "args": [
352 "check",
353 "--all",
354 "--all-targets"
355 ],
356 "env": {
357 "RUSTFLAGS": "-Zunstable-options"
358 }
359 }])
360 .to_string(),
361 ),
362 )
363 .unwrap();
364 });
365 });
366 cx.run_until_parked();
367
368 let all_tasks = cx
369 .update(|cx| get_all_tasks(&project, &task_contexts, cx))
370 .into_iter()
371 .map(|(source_kind, task)| {
372 let resolved = task.resolved;
373 (
374 source_kind,
375 task.resolved_label,
376 resolved.args,
377 resolved.env,
378 )
379 })
380 .collect::<Vec<_>>();
381 assert_eq!(
382 all_tasks,
383 vec![
384 (
385 topmost_local_task_source_kind.clone(),
386 "cargo check all".to_string(),
387 vec!["check".to_string(), "--all".to_string()],
388 HashMap::default(),
389 ),
390 (
391 TaskSourceKind::Worktree {
392 id: worktree_id,
393 directory_in_worktree: PathBuf::from(separator!("b/.zed")),
394 id_base: if cfg!(windows) {
395 "local worktree tasks from directory \"b\\\\.zed\"".into()
396 } else {
397 "local worktree tasks from directory \"b/.zed\"".into()
398 },
399 },
400 "cargo check".to_string(),
401 vec!["check".to_string()],
402 HashMap::default(),
403 ),
404 (
405 TaskSourceKind::AbsPath {
406 abs_path: paths::tasks_file().clone(),
407 id_base: "global tasks.json".into(),
408 },
409 "cargo check unstable".to_string(),
410 vec![
411 "check".to_string(),
412 "--all".to_string(),
413 "--all-targets".to_string(),
414 ],
415 HashMap::from_iter(Some((
416 "RUSTFLAGS".to_string(),
417 "-Zunstable-options".to_string()
418 ))),
419 ),
420 ]
421 );
422}
423
424#[gpui::test]
425async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
426 init_test(cx);
427 TaskStore::init(None);
428
429 let fs = FakeFs::new(cx.executor());
430 fs.insert_tree(
431 path!("/dir"),
432 json!({
433 ".zed": {
434 "tasks.json": r#"[{
435 "label": "test worktree root",
436 "command": "echo $ZED_WORKTREE_ROOT"
437 }]"#,
438 },
439 "a": {
440 "a.rs": "fn a() {\n A\n}"
441 },
442 }),
443 )
444 .await;
445
446 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
447 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
448
449 cx.executor().run_until_parked();
450 let worktree_id = cx.update(|cx| {
451 project.update(cx, |project, cx| {
452 project.worktrees(cx).next().unwrap().read(cx).id()
453 })
454 });
455
456 let active_non_worktree_item_tasks = cx.update(|cx| {
457 get_all_tasks(
458 &project,
459 &TaskContexts {
460 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
461 active_worktree_context: None,
462 other_worktree_contexts: Vec::new(),
463 lsp_task_sources: HashMap::default(),
464 latest_selection: None,
465 },
466 cx,
467 )
468 });
469 assert!(
470 active_non_worktree_item_tasks.is_empty(),
471 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
472 );
473
474 let active_worktree_tasks = cx.update(|cx| {
475 get_all_tasks(
476 &project,
477 &TaskContexts {
478 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
479 active_worktree_context: Some((worktree_id, {
480 let mut worktree_context = TaskContext::default();
481 worktree_context
482 .task_variables
483 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
484 worktree_context
485 })),
486 other_worktree_contexts: Vec::new(),
487 lsp_task_sources: HashMap::default(),
488 latest_selection: None,
489 },
490 cx,
491 )
492 });
493 assert_eq!(
494 active_worktree_tasks
495 .into_iter()
496 .map(|(source_kind, task)| {
497 let resolved = task.resolved;
498 (source_kind, resolved.command)
499 })
500 .collect::<Vec<_>>(),
501 vec![(
502 TaskSourceKind::Worktree {
503 id: worktree_id,
504 directory_in_worktree: PathBuf::from(separator!(".zed")),
505 id_base: if cfg!(windows) {
506 "local worktree tasks from directory \".zed\"".into()
507 } else {
508 "local worktree tasks from directory \".zed\"".into()
509 },
510 },
511 "echo /dir".to_string(),
512 )]
513 );
514}
515
516#[gpui::test]
517async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
518 init_test(cx);
519
520 let fs = FakeFs::new(cx.executor());
521 fs.insert_tree(
522 path!("/dir"),
523 json!({
524 "test.rs": "const A: i32 = 1;",
525 "test2.rs": "",
526 "Cargo.toml": "a = 1",
527 "package.json": "{\"a\": 1}",
528 }),
529 )
530 .await;
531
532 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
533 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
534
535 let mut fake_rust_servers = language_registry.register_fake_lsp(
536 "Rust",
537 FakeLspAdapter {
538 name: "the-rust-language-server",
539 capabilities: lsp::ServerCapabilities {
540 completion_provider: Some(lsp::CompletionOptions {
541 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
542 ..Default::default()
543 }),
544 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
545 lsp::TextDocumentSyncOptions {
546 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
547 ..Default::default()
548 },
549 )),
550 ..Default::default()
551 },
552 ..Default::default()
553 },
554 );
555 let mut fake_json_servers = language_registry.register_fake_lsp(
556 "JSON",
557 FakeLspAdapter {
558 name: "the-json-language-server",
559 capabilities: lsp::ServerCapabilities {
560 completion_provider: Some(lsp::CompletionOptions {
561 trigger_characters: Some(vec![":".to_string()]),
562 ..Default::default()
563 }),
564 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
565 lsp::TextDocumentSyncOptions {
566 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
567 ..Default::default()
568 },
569 )),
570 ..Default::default()
571 },
572 ..Default::default()
573 },
574 );
575
576 // Open a buffer without an associated language server.
577 let (toml_buffer, _handle) = project
578 .update(cx, |project, cx| {
579 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
580 })
581 .await
582 .unwrap();
583
584 // Open a buffer with an associated language server before the language for it has been loaded.
585 let (rust_buffer, _handle2) = project
586 .update(cx, |project, cx| {
587 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
588 })
589 .await
590 .unwrap();
591 rust_buffer.update(cx, |buffer, _| {
592 assert_eq!(buffer.language().map(|l| l.name()), None);
593 });
594
595 // Now we add the languages to the project, and ensure they get assigned to all
596 // the relevant open buffers.
597 language_registry.add(json_lang());
598 language_registry.add(rust_lang());
599 cx.executor().run_until_parked();
600 rust_buffer.update(cx, |buffer, _| {
601 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
602 });
603
604 // A server is started up, and it is notified about Rust files.
605 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
606 assert_eq!(
607 fake_rust_server
608 .receive_notification::<lsp::notification::DidOpenTextDocument>()
609 .await
610 .text_document,
611 lsp::TextDocumentItem {
612 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
613 version: 0,
614 text: "const A: i32 = 1;".to_string(),
615 language_id: "rust".to_string(),
616 }
617 );
618
619 // The buffer is configured based on the language server's capabilities.
620 rust_buffer.update(cx, |buffer, _| {
621 assert_eq!(
622 buffer
623 .completion_triggers()
624 .into_iter()
625 .cloned()
626 .collect::<Vec<_>>(),
627 &[".".to_string(), "::".to_string()]
628 );
629 });
630 toml_buffer.update(cx, |buffer, _| {
631 assert!(buffer.completion_triggers().is_empty());
632 });
633
634 // Edit a buffer. The changes are reported to the language server.
635 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
636 assert_eq!(
637 fake_rust_server
638 .receive_notification::<lsp::notification::DidChangeTextDocument>()
639 .await
640 .text_document,
641 lsp::VersionedTextDocumentIdentifier::new(
642 lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
643 1
644 )
645 );
646
647 // Open a third buffer with a different associated language server.
648 let (json_buffer, _json_handle) = project
649 .update(cx, |project, cx| {
650 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
651 })
652 .await
653 .unwrap();
654
655 // A json language server is started up and is only notified about the json buffer.
656 let mut fake_json_server = fake_json_servers.next().await.unwrap();
657 assert_eq!(
658 fake_json_server
659 .receive_notification::<lsp::notification::DidOpenTextDocument>()
660 .await
661 .text_document,
662 lsp::TextDocumentItem {
663 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
664 version: 0,
665 text: "{\"a\": 1}".to_string(),
666 language_id: "json".to_string(),
667 }
668 );
669
670 // This buffer is configured based on the second language server's
671 // capabilities.
672 json_buffer.update(cx, |buffer, _| {
673 assert_eq!(
674 buffer
675 .completion_triggers()
676 .into_iter()
677 .cloned()
678 .collect::<Vec<_>>(),
679 &[":".to_string()]
680 );
681 });
682
683 // When opening another buffer whose language server is already running,
684 // it is also configured based on the existing language server's capabilities.
685 let (rust_buffer2, _handle4) = project
686 .update(cx, |project, cx| {
687 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
688 })
689 .await
690 .unwrap();
691 rust_buffer2.update(cx, |buffer, _| {
692 assert_eq!(
693 buffer
694 .completion_triggers()
695 .into_iter()
696 .cloned()
697 .collect::<Vec<_>>(),
698 &[".".to_string(), "::".to_string()]
699 );
700 });
701
702 // Changes are reported only to servers matching the buffer's language.
703 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
704 rust_buffer2.update(cx, |buffer, cx| {
705 buffer.edit([(0..0, "let x = 1;")], None, cx)
706 });
707 assert_eq!(
708 fake_rust_server
709 .receive_notification::<lsp::notification::DidChangeTextDocument>()
710 .await
711 .text_document,
712 lsp::VersionedTextDocumentIdentifier::new(
713 lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap(),
714 1
715 )
716 );
717
718 // Save notifications are reported to all servers.
719 project
720 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
721 .await
722 .unwrap();
723 assert_eq!(
724 fake_rust_server
725 .receive_notification::<lsp::notification::DidSaveTextDocument>()
726 .await
727 .text_document,
728 lsp::TextDocumentIdentifier::new(
729 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
730 )
731 );
732 assert_eq!(
733 fake_json_server
734 .receive_notification::<lsp::notification::DidSaveTextDocument>()
735 .await
736 .text_document,
737 lsp::TextDocumentIdentifier::new(
738 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
739 )
740 );
741
742 // Renames are reported only to servers matching the buffer's language.
743 fs.rename(
744 Path::new(path!("/dir/test2.rs")),
745 Path::new(path!("/dir/test3.rs")),
746 Default::default(),
747 )
748 .await
749 .unwrap();
750 assert_eq!(
751 fake_rust_server
752 .receive_notification::<lsp::notification::DidCloseTextDocument>()
753 .await
754 .text_document,
755 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap()),
756 );
757 assert_eq!(
758 fake_rust_server
759 .receive_notification::<lsp::notification::DidOpenTextDocument>()
760 .await
761 .text_document,
762 lsp::TextDocumentItem {
763 uri: lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap(),
764 version: 0,
765 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
766 language_id: "rust".to_string(),
767 },
768 );
769
770 rust_buffer2.update(cx, |buffer, cx| {
771 buffer.update_diagnostics(
772 LanguageServerId(0),
773 DiagnosticSet::from_sorted_entries(
774 vec![DiagnosticEntry {
775 diagnostic: Default::default(),
776 range: Anchor::MIN..Anchor::MAX,
777 }],
778 &buffer.snapshot(),
779 ),
780 cx,
781 );
782 assert_eq!(
783 buffer
784 .snapshot()
785 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
786 .count(),
787 1
788 );
789 });
790
791 // When the rename changes the extension of the file, the buffer gets closed on the old
792 // language server and gets opened on the new one.
793 fs.rename(
794 Path::new(path!("/dir/test3.rs")),
795 Path::new(path!("/dir/test3.json")),
796 Default::default(),
797 )
798 .await
799 .unwrap();
800 assert_eq!(
801 fake_rust_server
802 .receive_notification::<lsp::notification::DidCloseTextDocument>()
803 .await
804 .text_document,
805 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap()),
806 );
807 assert_eq!(
808 fake_json_server
809 .receive_notification::<lsp::notification::DidOpenTextDocument>()
810 .await
811 .text_document,
812 lsp::TextDocumentItem {
813 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
814 version: 0,
815 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
816 language_id: "json".to_string(),
817 },
818 );
819
820 // We clear the diagnostics, since the language has changed.
821 rust_buffer2.update(cx, |buffer, _| {
822 assert_eq!(
823 buffer
824 .snapshot()
825 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
826 .count(),
827 0
828 );
829 });
830
831 // The renamed file's version resets after changing language server.
832 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
833 assert_eq!(
834 fake_json_server
835 .receive_notification::<lsp::notification::DidChangeTextDocument>()
836 .await
837 .text_document,
838 lsp::VersionedTextDocumentIdentifier::new(
839 lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
840 1
841 )
842 );
843
844 // Restart language servers
845 project.update(cx, |project, cx| {
846 project.restart_language_servers_for_buffers(
847 vec![rust_buffer.clone(), json_buffer.clone()],
848 cx,
849 );
850 });
851
852 let mut rust_shutdown_requests = fake_rust_server
853 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
854 let mut json_shutdown_requests = fake_json_server
855 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
856 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
857
858 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
859 let mut fake_json_server = fake_json_servers.next().await.unwrap();
860
861 // Ensure rust document is reopened in new rust language server
862 assert_eq!(
863 fake_rust_server
864 .receive_notification::<lsp::notification::DidOpenTextDocument>()
865 .await
866 .text_document,
867 lsp::TextDocumentItem {
868 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
869 version: 0,
870 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
871 language_id: "rust".to_string(),
872 }
873 );
874
875 // Ensure json documents are reopened in new json language server
876 assert_set_eq!(
877 [
878 fake_json_server
879 .receive_notification::<lsp::notification::DidOpenTextDocument>()
880 .await
881 .text_document,
882 fake_json_server
883 .receive_notification::<lsp::notification::DidOpenTextDocument>()
884 .await
885 .text_document,
886 ],
887 [
888 lsp::TextDocumentItem {
889 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
890 version: 0,
891 text: json_buffer.update(cx, |buffer, _| buffer.text()),
892 language_id: "json".to_string(),
893 },
894 lsp::TextDocumentItem {
895 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
896 version: 0,
897 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
898 language_id: "json".to_string(),
899 }
900 ]
901 );
902
903 // Close notifications are reported only to servers matching the buffer's language.
904 cx.update(|_| drop(_json_handle));
905 let close_message = lsp::DidCloseTextDocumentParams {
906 text_document: lsp::TextDocumentIdentifier::new(
907 lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
908 ),
909 };
910 assert_eq!(
911 fake_json_server
912 .receive_notification::<lsp::notification::DidCloseTextDocument>()
913 .await,
914 close_message,
915 );
916}
917
918#[gpui::test]
919async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
920 init_test(cx);
921
922 let fs = FakeFs::new(cx.executor());
923 fs.insert_tree(
924 path!("/the-root"),
925 json!({
926 ".gitignore": "target\n",
927 "Cargo.lock": "",
928 "src": {
929 "a.rs": "",
930 "b.rs": "",
931 },
932 "target": {
933 "x": {
934 "out": {
935 "x.rs": ""
936 }
937 },
938 "y": {
939 "out": {
940 "y.rs": "",
941 }
942 },
943 "z": {
944 "out": {
945 "z.rs": ""
946 }
947 }
948 }
949 }),
950 )
951 .await;
952 fs.insert_tree(
953 path!("/the-registry"),
954 json!({
955 "dep1": {
956 "src": {
957 "dep1.rs": "",
958 }
959 },
960 "dep2": {
961 "src": {
962 "dep2.rs": "",
963 }
964 },
965 }),
966 )
967 .await;
968 fs.insert_tree(
969 path!("/the/stdlib"),
970 json!({
971 "LICENSE": "",
972 "src": {
973 "string.rs": "",
974 }
975 }),
976 )
977 .await;
978
979 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
980 let (language_registry, lsp_store) = project.read_with(cx, |project, _| {
981 (project.languages().clone(), project.lsp_store())
982 });
983 language_registry.add(rust_lang());
984 let mut fake_servers = language_registry.register_fake_lsp(
985 "Rust",
986 FakeLspAdapter {
987 name: "the-language-server",
988 ..Default::default()
989 },
990 );
991
992 cx.executor().run_until_parked();
993
994 // Start the language server by opening a buffer with a compatible file extension.
995 project
996 .update(cx, |project, cx| {
997 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
998 })
999 .await
1000 .unwrap();
1001
1002 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
1003 project.update(cx, |project, cx| {
1004 let worktree = project.worktrees(cx).next().unwrap();
1005 assert_eq!(
1006 worktree
1007 .read(cx)
1008 .snapshot()
1009 .entries(true, 0)
1010 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
1011 .collect::<Vec<_>>(),
1012 &[
1013 (Path::new(""), false),
1014 (Path::new(".gitignore"), false),
1015 (Path::new("Cargo.lock"), false),
1016 (Path::new("src"), false),
1017 (Path::new("src/a.rs"), false),
1018 (Path::new("src/b.rs"), false),
1019 (Path::new("target"), true),
1020 ]
1021 );
1022 });
1023
1024 let prev_read_dir_count = fs.read_dir_call_count();
1025
1026 let fake_server = fake_servers.next().await.unwrap();
1027 let (server_id, server_name) = lsp_store.read_with(cx, |lsp_store, _| {
1028 let (id, status) = lsp_store.language_server_statuses().next().unwrap();
1029 (id, LanguageServerName::from(status.name.as_str()))
1030 });
1031
1032 // Simulate jumping to a definition in a dependency outside of the worktree.
1033 let _out_of_worktree_buffer = project
1034 .update(cx, |project, cx| {
1035 project.open_local_buffer_via_lsp(
1036 lsp::Url::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
1037 server_id,
1038 server_name.clone(),
1039 cx,
1040 )
1041 })
1042 .await
1043 .unwrap();
1044
1045 // Keep track of the FS events reported to the language server.
1046 let file_changes = Arc::new(Mutex::new(Vec::new()));
1047 fake_server
1048 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
1049 registrations: vec![lsp::Registration {
1050 id: Default::default(),
1051 method: "workspace/didChangeWatchedFiles".to_string(),
1052 register_options: serde_json::to_value(
1053 lsp::DidChangeWatchedFilesRegistrationOptions {
1054 watchers: vec![
1055 lsp::FileSystemWatcher {
1056 glob_pattern: lsp::GlobPattern::String(
1057 path!("/the-root/Cargo.toml").to_string(),
1058 ),
1059 kind: None,
1060 },
1061 lsp::FileSystemWatcher {
1062 glob_pattern: lsp::GlobPattern::String(
1063 path!("/the-root/src/*.{rs,c}").to_string(),
1064 ),
1065 kind: None,
1066 },
1067 lsp::FileSystemWatcher {
1068 glob_pattern: lsp::GlobPattern::String(
1069 path!("/the-root/target/y/**/*.rs").to_string(),
1070 ),
1071 kind: None,
1072 },
1073 lsp::FileSystemWatcher {
1074 glob_pattern: lsp::GlobPattern::String(
1075 path!("/the/stdlib/src/**/*.rs").to_string(),
1076 ),
1077 kind: None,
1078 },
1079 lsp::FileSystemWatcher {
1080 glob_pattern: lsp::GlobPattern::String(
1081 path!("**/Cargo.lock").to_string(),
1082 ),
1083 kind: None,
1084 },
1085 ],
1086 },
1087 )
1088 .ok(),
1089 }],
1090 })
1091 .await
1092 .unwrap();
1093 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
1094 let file_changes = file_changes.clone();
1095 move |params, _| {
1096 let mut file_changes = file_changes.lock();
1097 file_changes.extend(params.changes);
1098 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
1099 }
1100 });
1101
1102 cx.executor().run_until_parked();
1103 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
1104 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 5);
1105
1106 let mut new_watched_paths = fs.watched_paths();
1107 new_watched_paths.retain(|path| !path.starts_with(config_dir()));
1108 assert_eq!(
1109 &new_watched_paths,
1110 &[
1111 Path::new(path!("/the-root")),
1112 Path::new(path!("/the-registry/dep1/src/dep1.rs")),
1113 Path::new(path!("/the/stdlib/src"))
1114 ]
1115 );
1116
1117 // Now the language server has asked us to watch an ignored directory path,
1118 // so we recursively load it.
1119 project.update(cx, |project, cx| {
1120 let worktree = project.visible_worktrees(cx).next().unwrap();
1121 assert_eq!(
1122 worktree
1123 .read(cx)
1124 .snapshot()
1125 .entries(true, 0)
1126 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
1127 .collect::<Vec<_>>(),
1128 &[
1129 (Path::new(""), false),
1130 (Path::new(".gitignore"), false),
1131 (Path::new("Cargo.lock"), false),
1132 (Path::new("src"), false),
1133 (Path::new("src/a.rs"), false),
1134 (Path::new("src/b.rs"), false),
1135 (Path::new("target"), true),
1136 (Path::new("target/x"), true),
1137 (Path::new("target/y"), true),
1138 (Path::new("target/y/out"), true),
1139 (Path::new("target/y/out/y.rs"), true),
1140 (Path::new("target/z"), true),
1141 ]
1142 );
1143 });
1144
1145 // Perform some file system mutations, two of which match the watched patterns,
1146 // and one of which does not.
1147 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
1148 .await
1149 .unwrap();
1150 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
1151 .await
1152 .unwrap();
1153 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
1154 .await
1155 .unwrap();
1156 fs.create_file(
1157 path!("/the-root/target/x/out/x2.rs").as_ref(),
1158 Default::default(),
1159 )
1160 .await
1161 .unwrap();
1162 fs.create_file(
1163 path!("/the-root/target/y/out/y2.rs").as_ref(),
1164 Default::default(),
1165 )
1166 .await
1167 .unwrap();
1168 fs.save(
1169 path!("/the-root/Cargo.lock").as_ref(),
1170 &"".into(),
1171 Default::default(),
1172 )
1173 .await
1174 .unwrap();
1175 fs.save(
1176 path!("/the-stdlib/LICENSE").as_ref(),
1177 &"".into(),
1178 Default::default(),
1179 )
1180 .await
1181 .unwrap();
1182 fs.save(
1183 path!("/the/stdlib/src/string.rs").as_ref(),
1184 &"".into(),
1185 Default::default(),
1186 )
1187 .await
1188 .unwrap();
1189
1190 // The language server receives events for the FS mutations that match its watch patterns.
1191 cx.executor().run_until_parked();
1192 assert_eq!(
1193 &*file_changes.lock(),
1194 &[
1195 lsp::FileEvent {
1196 uri: lsp::Url::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
1197 typ: lsp::FileChangeType::CHANGED,
1198 },
1199 lsp::FileEvent {
1200 uri: lsp::Url::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
1201 typ: lsp::FileChangeType::DELETED,
1202 },
1203 lsp::FileEvent {
1204 uri: lsp::Url::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
1205 typ: lsp::FileChangeType::CREATED,
1206 },
1207 lsp::FileEvent {
1208 uri: lsp::Url::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
1209 typ: lsp::FileChangeType::CREATED,
1210 },
1211 lsp::FileEvent {
1212 uri: lsp::Url::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
1213 typ: lsp::FileChangeType::CHANGED,
1214 },
1215 ]
1216 );
1217}
1218
1219#[gpui::test]
1220async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
1221 init_test(cx);
1222
1223 let fs = FakeFs::new(cx.executor());
1224 fs.insert_tree(
1225 path!("/dir"),
1226 json!({
1227 "a.rs": "let a = 1;",
1228 "b.rs": "let b = 2;"
1229 }),
1230 )
1231 .await;
1232
1233 let project = Project::test(
1234 fs,
1235 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
1236 cx,
1237 )
1238 .await;
1239 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1240
1241 let buffer_a = project
1242 .update(cx, |project, cx| {
1243 project.open_local_buffer(path!("/dir/a.rs"), cx)
1244 })
1245 .await
1246 .unwrap();
1247 let buffer_b = project
1248 .update(cx, |project, cx| {
1249 project.open_local_buffer(path!("/dir/b.rs"), cx)
1250 })
1251 .await
1252 .unwrap();
1253
1254 lsp_store.update(cx, |lsp_store, cx| {
1255 lsp_store
1256 .update_diagnostics(
1257 LanguageServerId(0),
1258 lsp::PublishDiagnosticsParams {
1259 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1260 version: None,
1261 diagnostics: vec![lsp::Diagnostic {
1262 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1263 severity: Some(lsp::DiagnosticSeverity::ERROR),
1264 message: "error 1".to_string(),
1265 ..Default::default()
1266 }],
1267 },
1268 &[],
1269 cx,
1270 )
1271 .unwrap();
1272 lsp_store
1273 .update_diagnostics(
1274 LanguageServerId(0),
1275 lsp::PublishDiagnosticsParams {
1276 uri: Url::from_file_path(path!("/dir/b.rs")).unwrap(),
1277 version: None,
1278 diagnostics: vec![lsp::Diagnostic {
1279 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1280 severity: Some(DiagnosticSeverity::WARNING),
1281 message: "error 2".to_string(),
1282 ..Default::default()
1283 }],
1284 },
1285 &[],
1286 cx,
1287 )
1288 .unwrap();
1289 });
1290
1291 buffer_a.update(cx, |buffer, _| {
1292 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1293 assert_eq!(
1294 chunks
1295 .iter()
1296 .map(|(s, d)| (s.as_str(), *d))
1297 .collect::<Vec<_>>(),
1298 &[
1299 ("let ", None),
1300 ("a", Some(DiagnosticSeverity::ERROR)),
1301 (" = 1;", None),
1302 ]
1303 );
1304 });
1305 buffer_b.update(cx, |buffer, _| {
1306 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1307 assert_eq!(
1308 chunks
1309 .iter()
1310 .map(|(s, d)| (s.as_str(), *d))
1311 .collect::<Vec<_>>(),
1312 &[
1313 ("let ", None),
1314 ("b", Some(DiagnosticSeverity::WARNING)),
1315 (" = 2;", None),
1316 ]
1317 );
1318 });
1319}
1320
1321#[gpui::test]
1322async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1323 init_test(cx);
1324
1325 let fs = FakeFs::new(cx.executor());
1326 fs.insert_tree(
1327 path!("/root"),
1328 json!({
1329 "dir": {
1330 ".git": {
1331 "HEAD": "ref: refs/heads/main",
1332 },
1333 ".gitignore": "b.rs",
1334 "a.rs": "let a = 1;",
1335 "b.rs": "let b = 2;",
1336 },
1337 "other.rs": "let b = c;"
1338 }),
1339 )
1340 .await;
1341
1342 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
1343 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1344 let (worktree, _) = project
1345 .update(cx, |project, cx| {
1346 project.find_or_create_worktree(path!("/root/dir"), true, cx)
1347 })
1348 .await
1349 .unwrap();
1350 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1351
1352 let (worktree, _) = project
1353 .update(cx, |project, cx| {
1354 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
1355 })
1356 .await
1357 .unwrap();
1358 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1359
1360 let server_id = LanguageServerId(0);
1361 lsp_store.update(cx, |lsp_store, cx| {
1362 lsp_store
1363 .update_diagnostics(
1364 server_id,
1365 lsp::PublishDiagnosticsParams {
1366 uri: Url::from_file_path(path!("/root/dir/b.rs")).unwrap(),
1367 version: None,
1368 diagnostics: vec![lsp::Diagnostic {
1369 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1370 severity: Some(lsp::DiagnosticSeverity::ERROR),
1371 message: "unused variable 'b'".to_string(),
1372 ..Default::default()
1373 }],
1374 },
1375 &[],
1376 cx,
1377 )
1378 .unwrap();
1379 lsp_store
1380 .update_diagnostics(
1381 server_id,
1382 lsp::PublishDiagnosticsParams {
1383 uri: Url::from_file_path(path!("/root/other.rs")).unwrap(),
1384 version: None,
1385 diagnostics: vec![lsp::Diagnostic {
1386 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1387 severity: Some(lsp::DiagnosticSeverity::ERROR),
1388 message: "unknown variable 'c'".to_string(),
1389 ..Default::default()
1390 }],
1391 },
1392 &[],
1393 cx,
1394 )
1395 .unwrap();
1396 });
1397
1398 let main_ignored_buffer = project
1399 .update(cx, |project, cx| {
1400 project.open_buffer((main_worktree_id, "b.rs"), cx)
1401 })
1402 .await
1403 .unwrap();
1404 main_ignored_buffer.update(cx, |buffer, _| {
1405 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1406 assert_eq!(
1407 chunks
1408 .iter()
1409 .map(|(s, d)| (s.as_str(), *d))
1410 .collect::<Vec<_>>(),
1411 &[
1412 ("let ", None),
1413 ("b", Some(DiagnosticSeverity::ERROR)),
1414 (" = 2;", None),
1415 ],
1416 "Gigitnored buffers should still get in-buffer diagnostics",
1417 );
1418 });
1419 let other_buffer = project
1420 .update(cx, |project, cx| {
1421 project.open_buffer((other_worktree_id, ""), cx)
1422 })
1423 .await
1424 .unwrap();
1425 other_buffer.update(cx, |buffer, _| {
1426 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1427 assert_eq!(
1428 chunks
1429 .iter()
1430 .map(|(s, d)| (s.as_str(), *d))
1431 .collect::<Vec<_>>(),
1432 &[
1433 ("let b = ", None),
1434 ("c", Some(DiagnosticSeverity::ERROR)),
1435 (";", None),
1436 ],
1437 "Buffers from hidden projects should still get in-buffer diagnostics"
1438 );
1439 });
1440
1441 project.update(cx, |project, cx| {
1442 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1443 assert_eq!(
1444 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1445 vec![(
1446 ProjectPath {
1447 worktree_id: main_worktree_id,
1448 path: Arc::from(Path::new("b.rs")),
1449 },
1450 server_id,
1451 DiagnosticSummary {
1452 error_count: 1,
1453 warning_count: 0,
1454 }
1455 )]
1456 );
1457 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1458 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1459 });
1460}
1461
1462#[gpui::test]
1463async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1464 init_test(cx);
1465
1466 let progress_token = "the-progress-token";
1467
1468 let fs = FakeFs::new(cx.executor());
1469 fs.insert_tree(
1470 path!("/dir"),
1471 json!({
1472 "a.rs": "fn a() { A }",
1473 "b.rs": "const y: i32 = 1",
1474 }),
1475 )
1476 .await;
1477
1478 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1479 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1480
1481 language_registry.add(rust_lang());
1482 let mut fake_servers = language_registry.register_fake_lsp(
1483 "Rust",
1484 FakeLspAdapter {
1485 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1486 disk_based_diagnostics_sources: vec!["disk".into()],
1487 ..Default::default()
1488 },
1489 );
1490
1491 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1492
1493 // Cause worktree to start the fake language server
1494 let _ = project
1495 .update(cx, |project, cx| {
1496 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
1497 })
1498 .await
1499 .unwrap();
1500
1501 let mut events = cx.events(&project);
1502
1503 let fake_server = fake_servers.next().await.unwrap();
1504 assert_eq!(
1505 events.next().await.unwrap(),
1506 Event::LanguageServerAdded(
1507 LanguageServerId(0),
1508 fake_server.server.name(),
1509 Some(worktree_id)
1510 ),
1511 );
1512
1513 fake_server
1514 .start_progress(format!("{}/0", progress_token))
1515 .await;
1516 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1517 assert_eq!(
1518 events.next().await.unwrap(),
1519 Event::DiskBasedDiagnosticsStarted {
1520 language_server_id: LanguageServerId(0),
1521 }
1522 );
1523
1524 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1525 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1526 version: None,
1527 diagnostics: vec![lsp::Diagnostic {
1528 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1529 severity: Some(lsp::DiagnosticSeverity::ERROR),
1530 message: "undefined variable 'A'".to_string(),
1531 ..Default::default()
1532 }],
1533 });
1534 assert_eq!(
1535 events.next().await.unwrap(),
1536 Event::DiagnosticsUpdated {
1537 language_server_id: LanguageServerId(0),
1538 path: (worktree_id, Path::new("a.rs")).into()
1539 }
1540 );
1541
1542 fake_server.end_progress(format!("{}/0", progress_token));
1543 assert_eq!(
1544 events.next().await.unwrap(),
1545 Event::DiskBasedDiagnosticsFinished {
1546 language_server_id: LanguageServerId(0)
1547 }
1548 );
1549
1550 let buffer = project
1551 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
1552 .await
1553 .unwrap();
1554
1555 buffer.update(cx, |buffer, _| {
1556 let snapshot = buffer.snapshot();
1557 let diagnostics = snapshot
1558 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1559 .collect::<Vec<_>>();
1560 assert_eq!(
1561 diagnostics,
1562 &[DiagnosticEntry {
1563 range: Point::new(0, 9)..Point::new(0, 10),
1564 diagnostic: Diagnostic {
1565 severity: lsp::DiagnosticSeverity::ERROR,
1566 message: "undefined variable 'A'".to_string(),
1567 group_id: 0,
1568 is_primary: true,
1569 ..Default::default()
1570 }
1571 }]
1572 )
1573 });
1574
1575 // Ensure publishing empty diagnostics twice only results in one update event.
1576 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1577 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1578 version: None,
1579 diagnostics: Default::default(),
1580 });
1581 assert_eq!(
1582 events.next().await.unwrap(),
1583 Event::DiagnosticsUpdated {
1584 language_server_id: LanguageServerId(0),
1585 path: (worktree_id, Path::new("a.rs")).into()
1586 }
1587 );
1588
1589 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1590 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1591 version: None,
1592 diagnostics: Default::default(),
1593 });
1594 cx.executor().run_until_parked();
1595 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1596}
1597
1598#[gpui::test]
1599async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1600 init_test(cx);
1601
1602 let progress_token = "the-progress-token";
1603
1604 let fs = FakeFs::new(cx.executor());
1605 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1606
1607 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1608
1609 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1610 language_registry.add(rust_lang());
1611 let mut fake_servers = language_registry.register_fake_lsp(
1612 "Rust",
1613 FakeLspAdapter {
1614 name: "the-language-server",
1615 disk_based_diagnostics_sources: vec!["disk".into()],
1616 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1617 ..Default::default()
1618 },
1619 );
1620
1621 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1622
1623 let (buffer, _handle) = project
1624 .update(cx, |project, cx| {
1625 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1626 })
1627 .await
1628 .unwrap();
1629 // Simulate diagnostics starting to update.
1630 let fake_server = fake_servers.next().await.unwrap();
1631 fake_server.start_progress(progress_token).await;
1632
1633 // Restart the server before the diagnostics finish updating.
1634 project.update(cx, |project, cx| {
1635 project.restart_language_servers_for_buffers(vec![buffer], cx);
1636 });
1637 let mut events = cx.events(&project);
1638
1639 // Simulate the newly started server sending more diagnostics.
1640 let fake_server = fake_servers.next().await.unwrap();
1641 assert_eq!(
1642 events.next().await.unwrap(),
1643 Event::LanguageServerAdded(
1644 LanguageServerId(1),
1645 fake_server.server.name(),
1646 Some(worktree_id)
1647 )
1648 );
1649 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1650 fake_server.start_progress(progress_token).await;
1651 assert_eq!(
1652 events.next().await.unwrap(),
1653 Event::DiskBasedDiagnosticsStarted {
1654 language_server_id: LanguageServerId(1)
1655 }
1656 );
1657 project.update(cx, |project, cx| {
1658 assert_eq!(
1659 project
1660 .language_servers_running_disk_based_diagnostics(cx)
1661 .collect::<Vec<_>>(),
1662 [LanguageServerId(1)]
1663 );
1664 });
1665
1666 // All diagnostics are considered done, despite the old server's diagnostic
1667 // task never completing.
1668 fake_server.end_progress(progress_token);
1669 assert_eq!(
1670 events.next().await.unwrap(),
1671 Event::DiskBasedDiagnosticsFinished {
1672 language_server_id: LanguageServerId(1)
1673 }
1674 );
1675 project.update(cx, |project, cx| {
1676 assert_eq!(
1677 project
1678 .language_servers_running_disk_based_diagnostics(cx)
1679 .collect::<Vec<_>>(),
1680 [] as [language::LanguageServerId; 0]
1681 );
1682 });
1683}
1684
1685#[gpui::test]
1686async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1687 init_test(cx);
1688
1689 let fs = FakeFs::new(cx.executor());
1690 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
1691
1692 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1693
1694 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1695 language_registry.add(rust_lang());
1696 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1697
1698 let (buffer, _) = project
1699 .update(cx, |project, cx| {
1700 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1701 })
1702 .await
1703 .unwrap();
1704
1705 // Publish diagnostics
1706 let fake_server = fake_servers.next().await.unwrap();
1707 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1708 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1709 version: None,
1710 diagnostics: vec![lsp::Diagnostic {
1711 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1712 severity: Some(lsp::DiagnosticSeverity::ERROR),
1713 message: "the message".to_string(),
1714 ..Default::default()
1715 }],
1716 });
1717
1718 cx.executor().run_until_parked();
1719 buffer.update(cx, |buffer, _| {
1720 assert_eq!(
1721 buffer
1722 .snapshot()
1723 .diagnostics_in_range::<_, usize>(0..1, false)
1724 .map(|entry| entry.diagnostic.message.clone())
1725 .collect::<Vec<_>>(),
1726 ["the message".to_string()]
1727 );
1728 });
1729 project.update(cx, |project, cx| {
1730 assert_eq!(
1731 project.diagnostic_summary(false, cx),
1732 DiagnosticSummary {
1733 error_count: 1,
1734 warning_count: 0,
1735 }
1736 );
1737 });
1738
1739 project.update(cx, |project, cx| {
1740 project.restart_language_servers_for_buffers(vec![buffer.clone()], cx);
1741 });
1742
1743 // The diagnostics are cleared.
1744 cx.executor().run_until_parked();
1745 buffer.update(cx, |buffer, _| {
1746 assert_eq!(
1747 buffer
1748 .snapshot()
1749 .diagnostics_in_range::<_, usize>(0..1, false)
1750 .map(|entry| entry.diagnostic.message.clone())
1751 .collect::<Vec<_>>(),
1752 Vec::<String>::new(),
1753 );
1754 });
1755 project.update(cx, |project, cx| {
1756 assert_eq!(
1757 project.diagnostic_summary(false, cx),
1758 DiagnosticSummary {
1759 error_count: 0,
1760 warning_count: 0,
1761 }
1762 );
1763 });
1764}
1765
1766#[gpui::test]
1767async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1768 init_test(cx);
1769
1770 let fs = FakeFs::new(cx.executor());
1771 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1772
1773 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1774 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1775
1776 language_registry.add(rust_lang());
1777 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1778
1779 let (buffer, _handle) = project
1780 .update(cx, |project, cx| {
1781 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1782 })
1783 .await
1784 .unwrap();
1785
1786 // Before restarting the server, report diagnostics with an unknown buffer version.
1787 let fake_server = fake_servers.next().await.unwrap();
1788 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1789 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1790 version: Some(10000),
1791 diagnostics: Vec::new(),
1792 });
1793 cx.executor().run_until_parked();
1794 project.update(cx, |project, cx| {
1795 project.restart_language_servers_for_buffers(vec![buffer.clone()], cx);
1796 });
1797
1798 let mut fake_server = fake_servers.next().await.unwrap();
1799 let notification = fake_server
1800 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1801 .await
1802 .text_document;
1803 assert_eq!(notification.version, 0);
1804}
1805
1806#[gpui::test]
1807async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
1808 init_test(cx);
1809
1810 let progress_token = "the-progress-token";
1811
1812 let fs = FakeFs::new(cx.executor());
1813 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1814
1815 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1816
1817 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1818 language_registry.add(rust_lang());
1819 let mut fake_servers = language_registry.register_fake_lsp(
1820 "Rust",
1821 FakeLspAdapter {
1822 name: "the-language-server",
1823 disk_based_diagnostics_sources: vec!["disk".into()],
1824 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1825 ..Default::default()
1826 },
1827 );
1828
1829 let (buffer, _handle) = project
1830 .update(cx, |project, cx| {
1831 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1832 })
1833 .await
1834 .unwrap();
1835
1836 // Simulate diagnostics starting to update.
1837 let mut fake_server = fake_servers.next().await.unwrap();
1838 fake_server
1839 .start_progress_with(
1840 "another-token",
1841 lsp::WorkDoneProgressBegin {
1842 cancellable: Some(false),
1843 ..Default::default()
1844 },
1845 )
1846 .await;
1847 fake_server
1848 .start_progress_with(
1849 progress_token,
1850 lsp::WorkDoneProgressBegin {
1851 cancellable: Some(true),
1852 ..Default::default()
1853 },
1854 )
1855 .await;
1856 cx.executor().run_until_parked();
1857
1858 project.update(cx, |project, cx| {
1859 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
1860 });
1861
1862 let cancel_notification = fake_server
1863 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
1864 .await;
1865 assert_eq!(
1866 cancel_notification.token,
1867 NumberOrString::String(progress_token.into())
1868 );
1869}
1870
1871#[gpui::test]
1872async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1873 init_test(cx);
1874
1875 let fs = FakeFs::new(cx.executor());
1876 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
1877 .await;
1878
1879 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1880 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1881
1882 let mut fake_rust_servers = language_registry.register_fake_lsp(
1883 "Rust",
1884 FakeLspAdapter {
1885 name: "rust-lsp",
1886 ..Default::default()
1887 },
1888 );
1889 let mut fake_js_servers = language_registry.register_fake_lsp(
1890 "JavaScript",
1891 FakeLspAdapter {
1892 name: "js-lsp",
1893 ..Default::default()
1894 },
1895 );
1896 language_registry.add(rust_lang());
1897 language_registry.add(js_lang());
1898
1899 let _rs_buffer = project
1900 .update(cx, |project, cx| {
1901 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1902 })
1903 .await
1904 .unwrap();
1905 let _js_buffer = project
1906 .update(cx, |project, cx| {
1907 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
1908 })
1909 .await
1910 .unwrap();
1911
1912 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1913 assert_eq!(
1914 fake_rust_server_1
1915 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1916 .await
1917 .text_document
1918 .uri
1919 .as_str(),
1920 uri!("file:///dir/a.rs")
1921 );
1922
1923 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1924 assert_eq!(
1925 fake_js_server
1926 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1927 .await
1928 .text_document
1929 .uri
1930 .as_str(),
1931 uri!("file:///dir/b.js")
1932 );
1933
1934 // Disable Rust language server, ensuring only that server gets stopped.
1935 cx.update(|cx| {
1936 SettingsStore::update_global(cx, |settings, cx| {
1937 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1938 settings.languages.insert(
1939 "Rust".into(),
1940 LanguageSettingsContent {
1941 enable_language_server: Some(false),
1942 ..Default::default()
1943 },
1944 );
1945 });
1946 })
1947 });
1948 fake_rust_server_1
1949 .receive_notification::<lsp::notification::Exit>()
1950 .await;
1951
1952 // Enable Rust and disable JavaScript language servers, ensuring that the
1953 // former gets started again and that the latter stops.
1954 cx.update(|cx| {
1955 SettingsStore::update_global(cx, |settings, cx| {
1956 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1957 settings.languages.insert(
1958 LanguageName::new("Rust"),
1959 LanguageSettingsContent {
1960 enable_language_server: Some(true),
1961 ..Default::default()
1962 },
1963 );
1964 settings.languages.insert(
1965 LanguageName::new("JavaScript"),
1966 LanguageSettingsContent {
1967 enable_language_server: Some(false),
1968 ..Default::default()
1969 },
1970 );
1971 });
1972 })
1973 });
1974 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1975 assert_eq!(
1976 fake_rust_server_2
1977 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1978 .await
1979 .text_document
1980 .uri
1981 .as_str(),
1982 uri!("file:///dir/a.rs")
1983 );
1984 fake_js_server
1985 .receive_notification::<lsp::notification::Exit>()
1986 .await;
1987}
1988
1989#[gpui::test(iterations = 3)]
1990async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1991 init_test(cx);
1992
1993 let text = "
1994 fn a() { A }
1995 fn b() { BB }
1996 fn c() { CCC }
1997 "
1998 .unindent();
1999
2000 let fs = FakeFs::new(cx.executor());
2001 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
2002
2003 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2004 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2005
2006 language_registry.add(rust_lang());
2007 let mut fake_servers = language_registry.register_fake_lsp(
2008 "Rust",
2009 FakeLspAdapter {
2010 disk_based_diagnostics_sources: vec!["disk".into()],
2011 ..Default::default()
2012 },
2013 );
2014
2015 let buffer = project
2016 .update(cx, |project, cx| {
2017 project.open_local_buffer(path!("/dir/a.rs"), cx)
2018 })
2019 .await
2020 .unwrap();
2021
2022 let _handle = project.update(cx, |project, cx| {
2023 project.register_buffer_with_language_servers(&buffer, cx)
2024 });
2025
2026 let mut fake_server = fake_servers.next().await.unwrap();
2027 let open_notification = fake_server
2028 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2029 .await;
2030
2031 // Edit the buffer, moving the content down
2032 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
2033 let change_notification_1 = fake_server
2034 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2035 .await;
2036 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
2037
2038 // Report some diagnostics for the initial version of the buffer
2039 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2040 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2041 version: Some(open_notification.text_document.version),
2042 diagnostics: vec![
2043 lsp::Diagnostic {
2044 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2045 severity: Some(DiagnosticSeverity::ERROR),
2046 message: "undefined variable 'A'".to_string(),
2047 source: Some("disk".to_string()),
2048 ..Default::default()
2049 },
2050 lsp::Diagnostic {
2051 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2052 severity: Some(DiagnosticSeverity::ERROR),
2053 message: "undefined variable 'BB'".to_string(),
2054 source: Some("disk".to_string()),
2055 ..Default::default()
2056 },
2057 lsp::Diagnostic {
2058 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
2059 severity: Some(DiagnosticSeverity::ERROR),
2060 source: Some("disk".to_string()),
2061 message: "undefined variable 'CCC'".to_string(),
2062 ..Default::default()
2063 },
2064 ],
2065 });
2066
2067 // The diagnostics have moved down since they were created.
2068 cx.executor().run_until_parked();
2069 buffer.update(cx, |buffer, _| {
2070 assert_eq!(
2071 buffer
2072 .snapshot()
2073 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
2074 .collect::<Vec<_>>(),
2075 &[
2076 DiagnosticEntry {
2077 range: Point::new(3, 9)..Point::new(3, 11),
2078 diagnostic: Diagnostic {
2079 source: Some("disk".into()),
2080 severity: DiagnosticSeverity::ERROR,
2081 message: "undefined variable 'BB'".to_string(),
2082 is_disk_based: true,
2083 group_id: 1,
2084 is_primary: true,
2085 ..Default::default()
2086 },
2087 },
2088 DiagnosticEntry {
2089 range: Point::new(4, 9)..Point::new(4, 12),
2090 diagnostic: Diagnostic {
2091 source: Some("disk".into()),
2092 severity: DiagnosticSeverity::ERROR,
2093 message: "undefined variable 'CCC'".to_string(),
2094 is_disk_based: true,
2095 group_id: 2,
2096 is_primary: true,
2097 ..Default::default()
2098 }
2099 }
2100 ]
2101 );
2102 assert_eq!(
2103 chunks_with_diagnostics(buffer, 0..buffer.len()),
2104 [
2105 ("\n\nfn a() { ".to_string(), None),
2106 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2107 (" }\nfn b() { ".to_string(), None),
2108 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
2109 (" }\nfn c() { ".to_string(), None),
2110 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
2111 (" }\n".to_string(), None),
2112 ]
2113 );
2114 assert_eq!(
2115 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
2116 [
2117 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
2118 (" }\nfn c() { ".to_string(), None),
2119 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
2120 ]
2121 );
2122 });
2123
2124 // Ensure overlapping diagnostics are highlighted correctly.
2125 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2126 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2127 version: Some(open_notification.text_document.version),
2128 diagnostics: vec![
2129 lsp::Diagnostic {
2130 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2131 severity: Some(DiagnosticSeverity::ERROR),
2132 message: "undefined variable 'A'".to_string(),
2133 source: Some("disk".to_string()),
2134 ..Default::default()
2135 },
2136 lsp::Diagnostic {
2137 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
2138 severity: Some(DiagnosticSeverity::WARNING),
2139 message: "unreachable statement".to_string(),
2140 source: Some("disk".to_string()),
2141 ..Default::default()
2142 },
2143 ],
2144 });
2145
2146 cx.executor().run_until_parked();
2147 buffer.update(cx, |buffer, _| {
2148 assert_eq!(
2149 buffer
2150 .snapshot()
2151 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
2152 .collect::<Vec<_>>(),
2153 &[
2154 DiagnosticEntry {
2155 range: Point::new(2, 9)..Point::new(2, 12),
2156 diagnostic: Diagnostic {
2157 source: Some("disk".into()),
2158 severity: DiagnosticSeverity::WARNING,
2159 message: "unreachable statement".to_string(),
2160 is_disk_based: true,
2161 group_id: 4,
2162 is_primary: true,
2163 ..Default::default()
2164 }
2165 },
2166 DiagnosticEntry {
2167 range: Point::new(2, 9)..Point::new(2, 10),
2168 diagnostic: Diagnostic {
2169 source: Some("disk".into()),
2170 severity: DiagnosticSeverity::ERROR,
2171 message: "undefined variable 'A'".to_string(),
2172 is_disk_based: true,
2173 group_id: 3,
2174 is_primary: true,
2175 ..Default::default()
2176 },
2177 }
2178 ]
2179 );
2180 assert_eq!(
2181 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
2182 [
2183 ("fn a() { ".to_string(), None),
2184 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2185 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2186 ("\n".to_string(), None),
2187 ]
2188 );
2189 assert_eq!(
2190 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
2191 [
2192 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2193 ("\n".to_string(), None),
2194 ]
2195 );
2196 });
2197
2198 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
2199 // changes since the last save.
2200 buffer.update(cx, |buffer, cx| {
2201 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
2202 buffer.edit(
2203 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
2204 None,
2205 cx,
2206 );
2207 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
2208 });
2209 let change_notification_2 = fake_server
2210 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2211 .await;
2212 assert!(
2213 change_notification_2.text_document.version > change_notification_1.text_document.version
2214 );
2215
2216 // Handle out-of-order diagnostics
2217 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2218 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2219 version: Some(change_notification_2.text_document.version),
2220 diagnostics: vec![
2221 lsp::Diagnostic {
2222 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2223 severity: Some(DiagnosticSeverity::ERROR),
2224 message: "undefined variable 'BB'".to_string(),
2225 source: Some("disk".to_string()),
2226 ..Default::default()
2227 },
2228 lsp::Diagnostic {
2229 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2230 severity: Some(DiagnosticSeverity::WARNING),
2231 message: "undefined variable 'A'".to_string(),
2232 source: Some("disk".to_string()),
2233 ..Default::default()
2234 },
2235 ],
2236 });
2237
2238 cx.executor().run_until_parked();
2239 buffer.update(cx, |buffer, _| {
2240 assert_eq!(
2241 buffer
2242 .snapshot()
2243 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2244 .collect::<Vec<_>>(),
2245 &[
2246 DiagnosticEntry {
2247 range: Point::new(2, 21)..Point::new(2, 22),
2248 diagnostic: Diagnostic {
2249 source: Some("disk".into()),
2250 severity: DiagnosticSeverity::WARNING,
2251 message: "undefined variable 'A'".to_string(),
2252 is_disk_based: true,
2253 group_id: 6,
2254 is_primary: true,
2255 ..Default::default()
2256 }
2257 },
2258 DiagnosticEntry {
2259 range: Point::new(3, 9)..Point::new(3, 14),
2260 diagnostic: Diagnostic {
2261 source: Some("disk".into()),
2262 severity: DiagnosticSeverity::ERROR,
2263 message: "undefined variable 'BB'".to_string(),
2264 is_disk_based: true,
2265 group_id: 5,
2266 is_primary: true,
2267 ..Default::default()
2268 },
2269 }
2270 ]
2271 );
2272 });
2273}
2274
2275#[gpui::test]
2276async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
2277 init_test(cx);
2278
2279 let text = concat!(
2280 "let one = ;\n", //
2281 "let two = \n",
2282 "let three = 3;\n",
2283 );
2284
2285 let fs = FakeFs::new(cx.executor());
2286 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
2287
2288 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2289 let buffer = project
2290 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2291 .await
2292 .unwrap();
2293
2294 project.update(cx, |project, cx| {
2295 project.lsp_store.update(cx, |lsp_store, cx| {
2296 lsp_store
2297 .update_diagnostic_entries(
2298 LanguageServerId(0),
2299 PathBuf::from("/dir/a.rs"),
2300 None,
2301 vec![
2302 DiagnosticEntry {
2303 range: Unclipped(PointUtf16::new(0, 10))
2304 ..Unclipped(PointUtf16::new(0, 10)),
2305 diagnostic: Diagnostic {
2306 severity: DiagnosticSeverity::ERROR,
2307 message: "syntax error 1".to_string(),
2308 ..Default::default()
2309 },
2310 },
2311 DiagnosticEntry {
2312 range: Unclipped(PointUtf16::new(1, 10))
2313 ..Unclipped(PointUtf16::new(1, 10)),
2314 diagnostic: Diagnostic {
2315 severity: DiagnosticSeverity::ERROR,
2316 message: "syntax error 2".to_string(),
2317 ..Default::default()
2318 },
2319 },
2320 ],
2321 cx,
2322 )
2323 .unwrap();
2324 })
2325 });
2326
2327 // An empty range is extended forward to include the following character.
2328 // At the end of a line, an empty range is extended backward to include
2329 // the preceding character.
2330 buffer.update(cx, |buffer, _| {
2331 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2332 assert_eq!(
2333 chunks
2334 .iter()
2335 .map(|(s, d)| (s.as_str(), *d))
2336 .collect::<Vec<_>>(),
2337 &[
2338 ("let one = ", None),
2339 (";", Some(DiagnosticSeverity::ERROR)),
2340 ("\nlet two =", None),
2341 (" ", Some(DiagnosticSeverity::ERROR)),
2342 ("\nlet three = 3;\n", None)
2343 ]
2344 );
2345 });
2346}
2347
2348#[gpui::test]
2349async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2350 init_test(cx);
2351
2352 let fs = FakeFs::new(cx.executor());
2353 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2354 .await;
2355
2356 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2357 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
2358
2359 lsp_store.update(cx, |lsp_store, cx| {
2360 lsp_store
2361 .update_diagnostic_entries(
2362 LanguageServerId(0),
2363 Path::new("/dir/a.rs").to_owned(),
2364 None,
2365 vec![DiagnosticEntry {
2366 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2367 diagnostic: Diagnostic {
2368 severity: DiagnosticSeverity::ERROR,
2369 is_primary: true,
2370 message: "syntax error a1".to_string(),
2371 ..Default::default()
2372 },
2373 }],
2374 cx,
2375 )
2376 .unwrap();
2377 lsp_store
2378 .update_diagnostic_entries(
2379 LanguageServerId(1),
2380 Path::new("/dir/a.rs").to_owned(),
2381 None,
2382 vec![DiagnosticEntry {
2383 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2384 diagnostic: Diagnostic {
2385 severity: DiagnosticSeverity::ERROR,
2386 is_primary: true,
2387 message: "syntax error b1".to_string(),
2388 ..Default::default()
2389 },
2390 }],
2391 cx,
2392 )
2393 .unwrap();
2394
2395 assert_eq!(
2396 lsp_store.diagnostic_summary(false, cx),
2397 DiagnosticSummary {
2398 error_count: 2,
2399 warning_count: 0,
2400 }
2401 );
2402 });
2403}
2404
2405#[gpui::test]
2406async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2407 init_test(cx);
2408
2409 let text = "
2410 fn a() {
2411 f1();
2412 }
2413 fn b() {
2414 f2();
2415 }
2416 fn c() {
2417 f3();
2418 }
2419 "
2420 .unindent();
2421
2422 let fs = FakeFs::new(cx.executor());
2423 fs.insert_tree(
2424 path!("/dir"),
2425 json!({
2426 "a.rs": text.clone(),
2427 }),
2428 )
2429 .await;
2430
2431 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2432 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2433
2434 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2435 language_registry.add(rust_lang());
2436 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2437
2438 let (buffer, _handle) = project
2439 .update(cx, |project, cx| {
2440 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2441 })
2442 .await
2443 .unwrap();
2444
2445 let mut fake_server = fake_servers.next().await.unwrap();
2446 let lsp_document_version = fake_server
2447 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2448 .await
2449 .text_document
2450 .version;
2451
2452 // Simulate editing the buffer after the language server computes some edits.
2453 buffer.update(cx, |buffer, cx| {
2454 buffer.edit(
2455 [(
2456 Point::new(0, 0)..Point::new(0, 0),
2457 "// above first function\n",
2458 )],
2459 None,
2460 cx,
2461 );
2462 buffer.edit(
2463 [(
2464 Point::new(2, 0)..Point::new(2, 0),
2465 " // inside first function\n",
2466 )],
2467 None,
2468 cx,
2469 );
2470 buffer.edit(
2471 [(
2472 Point::new(6, 4)..Point::new(6, 4),
2473 "// inside second function ",
2474 )],
2475 None,
2476 cx,
2477 );
2478
2479 assert_eq!(
2480 buffer.text(),
2481 "
2482 // above first function
2483 fn a() {
2484 // inside first function
2485 f1();
2486 }
2487 fn b() {
2488 // inside second function f2();
2489 }
2490 fn c() {
2491 f3();
2492 }
2493 "
2494 .unindent()
2495 );
2496 });
2497
2498 let edits = lsp_store
2499 .update(cx, |lsp_store, cx| {
2500 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2501 &buffer,
2502 vec![
2503 // replace body of first function
2504 lsp::TextEdit {
2505 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2506 new_text: "
2507 fn a() {
2508 f10();
2509 }
2510 "
2511 .unindent(),
2512 },
2513 // edit inside second function
2514 lsp::TextEdit {
2515 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2516 new_text: "00".into(),
2517 },
2518 // edit inside third function via two distinct edits
2519 lsp::TextEdit {
2520 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2521 new_text: "4000".into(),
2522 },
2523 lsp::TextEdit {
2524 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2525 new_text: "".into(),
2526 },
2527 ],
2528 LanguageServerId(0),
2529 Some(lsp_document_version),
2530 cx,
2531 )
2532 })
2533 .await
2534 .unwrap();
2535
2536 buffer.update(cx, |buffer, cx| {
2537 for (range, new_text) in edits {
2538 buffer.edit([(range, new_text)], None, cx);
2539 }
2540 assert_eq!(
2541 buffer.text(),
2542 "
2543 // above first function
2544 fn a() {
2545 // inside first function
2546 f10();
2547 }
2548 fn b() {
2549 // inside second function f200();
2550 }
2551 fn c() {
2552 f4000();
2553 }
2554 "
2555 .unindent()
2556 );
2557 });
2558}
2559
2560#[gpui::test]
2561async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2562 init_test(cx);
2563
2564 let text = "
2565 use a::b;
2566 use a::c;
2567
2568 fn f() {
2569 b();
2570 c();
2571 }
2572 "
2573 .unindent();
2574
2575 let fs = FakeFs::new(cx.executor());
2576 fs.insert_tree(
2577 path!("/dir"),
2578 json!({
2579 "a.rs": text.clone(),
2580 }),
2581 )
2582 .await;
2583
2584 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2585 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2586 let buffer = project
2587 .update(cx, |project, cx| {
2588 project.open_local_buffer(path!("/dir/a.rs"), cx)
2589 })
2590 .await
2591 .unwrap();
2592
2593 // Simulate the language server sending us a small edit in the form of a very large diff.
2594 // Rust-analyzer does this when performing a merge-imports code action.
2595 let edits = lsp_store
2596 .update(cx, |lsp_store, cx| {
2597 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2598 &buffer,
2599 [
2600 // Replace the first use statement without editing the semicolon.
2601 lsp::TextEdit {
2602 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2603 new_text: "a::{b, c}".into(),
2604 },
2605 // Reinsert the remainder of the file between the semicolon and the final
2606 // newline of the file.
2607 lsp::TextEdit {
2608 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2609 new_text: "\n\n".into(),
2610 },
2611 lsp::TextEdit {
2612 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2613 new_text: "
2614 fn f() {
2615 b();
2616 c();
2617 }"
2618 .unindent(),
2619 },
2620 // Delete everything after the first newline of the file.
2621 lsp::TextEdit {
2622 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2623 new_text: "".into(),
2624 },
2625 ],
2626 LanguageServerId(0),
2627 None,
2628 cx,
2629 )
2630 })
2631 .await
2632 .unwrap();
2633
2634 buffer.update(cx, |buffer, cx| {
2635 let edits = edits
2636 .into_iter()
2637 .map(|(range, text)| {
2638 (
2639 range.start.to_point(buffer)..range.end.to_point(buffer),
2640 text,
2641 )
2642 })
2643 .collect::<Vec<_>>();
2644
2645 assert_eq!(
2646 edits,
2647 [
2648 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2649 (Point::new(1, 0)..Point::new(2, 0), "".into())
2650 ]
2651 );
2652
2653 for (range, new_text) in edits {
2654 buffer.edit([(range, new_text)], None, cx);
2655 }
2656 assert_eq!(
2657 buffer.text(),
2658 "
2659 use a::{b, c};
2660
2661 fn f() {
2662 b();
2663 c();
2664 }
2665 "
2666 .unindent()
2667 );
2668 });
2669}
2670
2671#[gpui::test]
2672async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
2673 cx: &mut gpui::TestAppContext,
2674) {
2675 init_test(cx);
2676
2677 let text = "Path()";
2678
2679 let fs = FakeFs::new(cx.executor());
2680 fs.insert_tree(
2681 path!("/dir"),
2682 json!({
2683 "a.rs": text
2684 }),
2685 )
2686 .await;
2687
2688 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2689 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2690 let buffer = project
2691 .update(cx, |project, cx| {
2692 project.open_local_buffer(path!("/dir/a.rs"), cx)
2693 })
2694 .await
2695 .unwrap();
2696
2697 // Simulate the language server sending us a pair of edits at the same location,
2698 // with an insertion following a replacement (which violates the LSP spec).
2699 let edits = lsp_store
2700 .update(cx, |lsp_store, cx| {
2701 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2702 &buffer,
2703 [
2704 lsp::TextEdit {
2705 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
2706 new_text: "Path".into(),
2707 },
2708 lsp::TextEdit {
2709 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
2710 new_text: "from path import Path\n\n\n".into(),
2711 },
2712 ],
2713 LanguageServerId(0),
2714 None,
2715 cx,
2716 )
2717 })
2718 .await
2719 .unwrap();
2720
2721 buffer.update(cx, |buffer, cx| {
2722 buffer.edit(edits, None, cx);
2723 assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
2724 });
2725}
2726
2727#[gpui::test]
2728async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2729 init_test(cx);
2730
2731 let text = "
2732 use a::b;
2733 use a::c;
2734
2735 fn f() {
2736 b();
2737 c();
2738 }
2739 "
2740 .unindent();
2741
2742 let fs = FakeFs::new(cx.executor());
2743 fs.insert_tree(
2744 path!("/dir"),
2745 json!({
2746 "a.rs": text.clone(),
2747 }),
2748 )
2749 .await;
2750
2751 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2752 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2753 let buffer = project
2754 .update(cx, |project, cx| {
2755 project.open_local_buffer(path!("/dir/a.rs"), cx)
2756 })
2757 .await
2758 .unwrap();
2759
2760 // Simulate the language server sending us edits in a non-ordered fashion,
2761 // with ranges sometimes being inverted or pointing to invalid locations.
2762 let edits = lsp_store
2763 .update(cx, |lsp_store, cx| {
2764 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2765 &buffer,
2766 [
2767 lsp::TextEdit {
2768 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2769 new_text: "\n\n".into(),
2770 },
2771 lsp::TextEdit {
2772 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2773 new_text: "a::{b, c}".into(),
2774 },
2775 lsp::TextEdit {
2776 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2777 new_text: "".into(),
2778 },
2779 lsp::TextEdit {
2780 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2781 new_text: "
2782 fn f() {
2783 b();
2784 c();
2785 }"
2786 .unindent(),
2787 },
2788 ],
2789 LanguageServerId(0),
2790 None,
2791 cx,
2792 )
2793 })
2794 .await
2795 .unwrap();
2796
2797 buffer.update(cx, |buffer, cx| {
2798 let edits = edits
2799 .into_iter()
2800 .map(|(range, text)| {
2801 (
2802 range.start.to_point(buffer)..range.end.to_point(buffer),
2803 text,
2804 )
2805 })
2806 .collect::<Vec<_>>();
2807
2808 assert_eq!(
2809 edits,
2810 [
2811 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2812 (Point::new(1, 0)..Point::new(2, 0), "".into())
2813 ]
2814 );
2815
2816 for (range, new_text) in edits {
2817 buffer.edit([(range, new_text)], None, cx);
2818 }
2819 assert_eq!(
2820 buffer.text(),
2821 "
2822 use a::{b, c};
2823
2824 fn f() {
2825 b();
2826 c();
2827 }
2828 "
2829 .unindent()
2830 );
2831 });
2832}
2833
2834fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2835 buffer: &Buffer,
2836 range: Range<T>,
2837) -> Vec<(String, Option<DiagnosticSeverity>)> {
2838 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2839 for chunk in buffer.snapshot().chunks(range, true) {
2840 if chunks.last().map_or(false, |prev_chunk| {
2841 prev_chunk.1 == chunk.diagnostic_severity
2842 }) {
2843 chunks.last_mut().unwrap().0.push_str(chunk.text);
2844 } else {
2845 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2846 }
2847 }
2848 chunks
2849}
2850
2851#[gpui::test(iterations = 10)]
2852async fn test_definition(cx: &mut gpui::TestAppContext) {
2853 init_test(cx);
2854
2855 let fs = FakeFs::new(cx.executor());
2856 fs.insert_tree(
2857 path!("/dir"),
2858 json!({
2859 "a.rs": "const fn a() { A }",
2860 "b.rs": "const y: i32 = crate::a()",
2861 }),
2862 )
2863 .await;
2864
2865 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
2866
2867 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2868 language_registry.add(rust_lang());
2869 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2870
2871 let (buffer, _handle) = project
2872 .update(cx, |project, cx| {
2873 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
2874 })
2875 .await
2876 .unwrap();
2877
2878 let fake_server = fake_servers.next().await.unwrap();
2879 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2880 let params = params.text_document_position_params;
2881 assert_eq!(
2882 params.text_document.uri.to_file_path().unwrap(),
2883 Path::new(path!("/dir/b.rs")),
2884 );
2885 assert_eq!(params.position, lsp::Position::new(0, 22));
2886
2887 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2888 lsp::Location::new(
2889 lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2890 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2891 ),
2892 )))
2893 });
2894 let mut definitions = project
2895 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2896 .await
2897 .unwrap();
2898
2899 // Assert no new language server started
2900 cx.executor().run_until_parked();
2901 assert!(fake_servers.try_next().is_err());
2902
2903 assert_eq!(definitions.len(), 1);
2904 let definition = definitions.pop().unwrap();
2905 cx.update(|cx| {
2906 let target_buffer = definition.target.buffer.read(cx);
2907 assert_eq!(
2908 target_buffer
2909 .file()
2910 .unwrap()
2911 .as_local()
2912 .unwrap()
2913 .abs_path(cx),
2914 Path::new(path!("/dir/a.rs")),
2915 );
2916 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2917 assert_eq!(
2918 list_worktrees(&project, cx),
2919 [
2920 (path!("/dir/a.rs").as_ref(), false),
2921 (path!("/dir/b.rs").as_ref(), true)
2922 ],
2923 );
2924
2925 drop(definition);
2926 });
2927 cx.update(|cx| {
2928 assert_eq!(
2929 list_worktrees(&project, cx),
2930 [(path!("/dir/b.rs").as_ref(), true)]
2931 );
2932 });
2933
2934 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
2935 project
2936 .read(cx)
2937 .worktrees(cx)
2938 .map(|worktree| {
2939 let worktree = worktree.read(cx);
2940 (
2941 worktree.as_local().unwrap().abs_path().as_ref(),
2942 worktree.is_visible(),
2943 )
2944 })
2945 .collect::<Vec<_>>()
2946 }
2947}
2948
2949#[gpui::test]
2950async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
2951 init_test(cx);
2952
2953 let fs = FakeFs::new(cx.executor());
2954 fs.insert_tree(
2955 path!("/dir"),
2956 json!({
2957 "a.ts": "",
2958 }),
2959 )
2960 .await;
2961
2962 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2963
2964 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2965 language_registry.add(typescript_lang());
2966 let mut fake_language_servers = language_registry.register_fake_lsp(
2967 "TypeScript",
2968 FakeLspAdapter {
2969 capabilities: lsp::ServerCapabilities {
2970 completion_provider: Some(lsp::CompletionOptions {
2971 trigger_characters: Some(vec![".".to_string()]),
2972 ..Default::default()
2973 }),
2974 ..Default::default()
2975 },
2976 ..Default::default()
2977 },
2978 );
2979
2980 let (buffer, _handle) = project
2981 .update(cx, |p, cx| {
2982 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
2983 })
2984 .await
2985 .unwrap();
2986
2987 let fake_server = fake_language_servers.next().await.unwrap();
2988
2989 // When text_edit exists, it takes precedence over insert_text and label
2990 let text = "let a = obj.fqn";
2991 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2992 let completions = project.update(cx, |project, cx| {
2993 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2994 });
2995
2996 fake_server
2997 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
2998 Ok(Some(lsp::CompletionResponse::Array(vec![
2999 lsp::CompletionItem {
3000 label: "labelText".into(),
3001 insert_text: Some("insertText".into()),
3002 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
3003 range: lsp::Range::new(
3004 lsp::Position::new(0, text.len() as u32 - 3),
3005 lsp::Position::new(0, text.len() as u32),
3006 ),
3007 new_text: "textEditText".into(),
3008 })),
3009 ..Default::default()
3010 },
3011 ])))
3012 })
3013 .next()
3014 .await;
3015
3016 let completions = completions.await.unwrap().unwrap();
3017 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3018
3019 assert_eq!(completions.len(), 1);
3020 assert_eq!(completions[0].new_text, "textEditText");
3021 assert_eq!(
3022 completions[0].replace_range.to_offset(&snapshot),
3023 text.len() - 3..text.len()
3024 );
3025}
3026
3027#[gpui::test]
3028async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
3029 init_test(cx);
3030
3031 let fs = FakeFs::new(cx.executor());
3032 fs.insert_tree(
3033 path!("/dir"),
3034 json!({
3035 "a.ts": "",
3036 }),
3037 )
3038 .await;
3039
3040 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3041
3042 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3043 language_registry.add(typescript_lang());
3044 let mut fake_language_servers = language_registry.register_fake_lsp(
3045 "TypeScript",
3046 FakeLspAdapter {
3047 capabilities: lsp::ServerCapabilities {
3048 completion_provider: Some(lsp::CompletionOptions {
3049 trigger_characters: Some(vec![".".to_string()]),
3050 ..Default::default()
3051 }),
3052 ..Default::default()
3053 },
3054 ..Default::default()
3055 },
3056 );
3057
3058 let (buffer, _handle) = project
3059 .update(cx, |p, cx| {
3060 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3061 })
3062 .await
3063 .unwrap();
3064
3065 let fake_server = fake_language_servers.next().await.unwrap();
3066 let text = "let a = obj.fqn";
3067
3068 // Test 1: When text_edit is None but insert_text exists with default edit_range
3069 {
3070 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3071 let completions = project.update(cx, |project, cx| {
3072 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3073 });
3074
3075 fake_server
3076 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3077 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3078 is_incomplete: false,
3079 item_defaults: Some(lsp::CompletionListItemDefaults {
3080 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3081 lsp::Range::new(
3082 lsp::Position::new(0, text.len() as u32 - 3),
3083 lsp::Position::new(0, text.len() as u32),
3084 ),
3085 )),
3086 ..Default::default()
3087 }),
3088 items: vec![lsp::CompletionItem {
3089 label: "labelText".into(),
3090 insert_text: Some("insertText".into()),
3091 text_edit: None,
3092 ..Default::default()
3093 }],
3094 })))
3095 })
3096 .next()
3097 .await;
3098
3099 let completions = completions.await.unwrap().unwrap();
3100 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3101
3102 assert_eq!(completions.len(), 1);
3103 assert_eq!(completions[0].new_text, "insertText");
3104 assert_eq!(
3105 completions[0].replace_range.to_offset(&snapshot),
3106 text.len() - 3..text.len()
3107 );
3108 }
3109
3110 // Test 2: When both text_edit and insert_text are None with default edit_range
3111 {
3112 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3113 let completions = project.update(cx, |project, cx| {
3114 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3115 });
3116
3117 fake_server
3118 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3119 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3120 is_incomplete: false,
3121 item_defaults: Some(lsp::CompletionListItemDefaults {
3122 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3123 lsp::Range::new(
3124 lsp::Position::new(0, text.len() as u32 - 3),
3125 lsp::Position::new(0, text.len() as u32),
3126 ),
3127 )),
3128 ..Default::default()
3129 }),
3130 items: vec![lsp::CompletionItem {
3131 label: "labelText".into(),
3132 insert_text: None,
3133 text_edit: None,
3134 ..Default::default()
3135 }],
3136 })))
3137 })
3138 .next()
3139 .await;
3140
3141 let completions = completions.await.unwrap().unwrap();
3142 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3143
3144 assert_eq!(completions.len(), 1);
3145 assert_eq!(completions[0].new_text, "labelText");
3146 assert_eq!(
3147 completions[0].replace_range.to_offset(&snapshot),
3148 text.len() - 3..text.len()
3149 );
3150 }
3151}
3152
3153#[gpui::test]
3154async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
3155 init_test(cx);
3156
3157 let fs = FakeFs::new(cx.executor());
3158 fs.insert_tree(
3159 path!("/dir"),
3160 json!({
3161 "a.ts": "",
3162 }),
3163 )
3164 .await;
3165
3166 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3167
3168 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3169 language_registry.add(typescript_lang());
3170 let mut fake_language_servers = language_registry.register_fake_lsp(
3171 "TypeScript",
3172 FakeLspAdapter {
3173 capabilities: lsp::ServerCapabilities {
3174 completion_provider: Some(lsp::CompletionOptions {
3175 trigger_characters: Some(vec![":".to_string()]),
3176 ..Default::default()
3177 }),
3178 ..Default::default()
3179 },
3180 ..Default::default()
3181 },
3182 );
3183
3184 let (buffer, _handle) = project
3185 .update(cx, |p, cx| {
3186 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3187 })
3188 .await
3189 .unwrap();
3190
3191 let fake_server = fake_language_servers.next().await.unwrap();
3192
3193 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
3194 let text = "let a = b.fqn";
3195 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3196 let completions = project.update(cx, |project, cx| {
3197 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3198 });
3199
3200 fake_server
3201 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3202 Ok(Some(lsp::CompletionResponse::Array(vec![
3203 lsp::CompletionItem {
3204 label: "fullyQualifiedName?".into(),
3205 insert_text: Some("fullyQualifiedName".into()),
3206 ..Default::default()
3207 },
3208 ])))
3209 })
3210 .next()
3211 .await;
3212 let completions = completions.await.unwrap().unwrap();
3213 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3214 assert_eq!(completions.len(), 1);
3215 assert_eq!(completions[0].new_text, "fullyQualifiedName");
3216 assert_eq!(
3217 completions[0].replace_range.to_offset(&snapshot),
3218 text.len() - 3..text.len()
3219 );
3220
3221 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
3222 let text = "let a = \"atoms/cmp\"";
3223 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3224 let completions = project.update(cx, |project, cx| {
3225 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
3226 });
3227
3228 fake_server
3229 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3230 Ok(Some(lsp::CompletionResponse::Array(vec![
3231 lsp::CompletionItem {
3232 label: "component".into(),
3233 ..Default::default()
3234 },
3235 ])))
3236 })
3237 .next()
3238 .await;
3239 let completions = completions.await.unwrap().unwrap();
3240 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3241 assert_eq!(completions.len(), 1);
3242 assert_eq!(completions[0].new_text, "component");
3243 assert_eq!(
3244 completions[0].replace_range.to_offset(&snapshot),
3245 text.len() - 4..text.len() - 1
3246 );
3247}
3248
3249#[gpui::test]
3250async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
3251 init_test(cx);
3252
3253 let fs = FakeFs::new(cx.executor());
3254 fs.insert_tree(
3255 path!("/dir"),
3256 json!({
3257 "a.ts": "",
3258 }),
3259 )
3260 .await;
3261
3262 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3263
3264 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3265 language_registry.add(typescript_lang());
3266 let mut fake_language_servers = language_registry.register_fake_lsp(
3267 "TypeScript",
3268 FakeLspAdapter {
3269 capabilities: lsp::ServerCapabilities {
3270 completion_provider: Some(lsp::CompletionOptions {
3271 trigger_characters: Some(vec![":".to_string()]),
3272 ..Default::default()
3273 }),
3274 ..Default::default()
3275 },
3276 ..Default::default()
3277 },
3278 );
3279
3280 let (buffer, _handle) = project
3281 .update(cx, |p, cx| {
3282 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3283 })
3284 .await
3285 .unwrap();
3286
3287 let fake_server = fake_language_servers.next().await.unwrap();
3288
3289 let text = "let a = b.fqn";
3290 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3291 let completions = project.update(cx, |project, cx| {
3292 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3293 });
3294
3295 fake_server
3296 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3297 Ok(Some(lsp::CompletionResponse::Array(vec![
3298 lsp::CompletionItem {
3299 label: "fullyQualifiedName?".into(),
3300 insert_text: Some("fully\rQualified\r\nName".into()),
3301 ..Default::default()
3302 },
3303 ])))
3304 })
3305 .next()
3306 .await;
3307 let completions = completions.await.unwrap().unwrap();
3308 assert_eq!(completions.len(), 1);
3309 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
3310}
3311
3312#[gpui::test(iterations = 10)]
3313async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
3314 init_test(cx);
3315
3316 let fs = FakeFs::new(cx.executor());
3317 fs.insert_tree(
3318 path!("/dir"),
3319 json!({
3320 "a.ts": "a",
3321 }),
3322 )
3323 .await;
3324
3325 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3326
3327 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3328 language_registry.add(typescript_lang());
3329 let mut fake_language_servers = language_registry.register_fake_lsp(
3330 "TypeScript",
3331 FakeLspAdapter {
3332 capabilities: lsp::ServerCapabilities {
3333 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
3334 lsp::CodeActionOptions {
3335 resolve_provider: Some(true),
3336 ..lsp::CodeActionOptions::default()
3337 },
3338 )),
3339 execute_command_provider: Some(lsp::ExecuteCommandOptions {
3340 commands: vec!["_the/command".to_string()],
3341 ..lsp::ExecuteCommandOptions::default()
3342 }),
3343 ..lsp::ServerCapabilities::default()
3344 },
3345 ..FakeLspAdapter::default()
3346 },
3347 );
3348
3349 let (buffer, _handle) = project
3350 .update(cx, |p, cx| {
3351 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3352 })
3353 .await
3354 .unwrap();
3355
3356 let fake_server = fake_language_servers.next().await.unwrap();
3357
3358 // Language server returns code actions that contain commands, and not edits.
3359 let actions = project.update(cx, |project, cx| {
3360 project.code_actions(&buffer, 0..0, None, cx)
3361 });
3362 fake_server
3363 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
3364 Ok(Some(vec![
3365 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3366 title: "The code action".into(),
3367 data: Some(serde_json::json!({
3368 "command": "_the/command",
3369 })),
3370 ..lsp::CodeAction::default()
3371 }),
3372 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3373 title: "two".into(),
3374 ..lsp::CodeAction::default()
3375 }),
3376 ]))
3377 })
3378 .next()
3379 .await;
3380
3381 let action = actions.await.unwrap()[0].clone();
3382 let apply = project.update(cx, |project, cx| {
3383 project.apply_code_action(buffer.clone(), action, true, cx)
3384 });
3385
3386 // Resolving the code action does not populate its edits. In absence of
3387 // edits, we must execute the given command.
3388 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
3389 |mut action, _| async move {
3390 if action.data.is_some() {
3391 action.command = Some(lsp::Command {
3392 title: "The command".into(),
3393 command: "_the/command".into(),
3394 arguments: Some(vec![json!("the-argument")]),
3395 });
3396 }
3397 Ok(action)
3398 },
3399 );
3400
3401 // While executing the command, the language server sends the editor
3402 // a `workspaceEdit` request.
3403 fake_server
3404 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
3405 let fake = fake_server.clone();
3406 move |params, _| {
3407 assert_eq!(params.command, "_the/command");
3408 let fake = fake.clone();
3409 async move {
3410 fake.server
3411 .request::<lsp::request::ApplyWorkspaceEdit>(
3412 lsp::ApplyWorkspaceEditParams {
3413 label: None,
3414 edit: lsp::WorkspaceEdit {
3415 changes: Some(
3416 [(
3417 lsp::Url::from_file_path(path!("/dir/a.ts")).unwrap(),
3418 vec![lsp::TextEdit {
3419 range: lsp::Range::new(
3420 lsp::Position::new(0, 0),
3421 lsp::Position::new(0, 0),
3422 ),
3423 new_text: "X".into(),
3424 }],
3425 )]
3426 .into_iter()
3427 .collect(),
3428 ),
3429 ..Default::default()
3430 },
3431 },
3432 )
3433 .await
3434 .unwrap();
3435 Ok(Some(json!(null)))
3436 }
3437 }
3438 })
3439 .next()
3440 .await;
3441
3442 // Applying the code action returns a project transaction containing the edits
3443 // sent by the language server in its `workspaceEdit` request.
3444 let transaction = apply.await.unwrap();
3445 assert!(transaction.0.contains_key(&buffer));
3446 buffer.update(cx, |buffer, cx| {
3447 assert_eq!(buffer.text(), "Xa");
3448 buffer.undo(cx);
3449 assert_eq!(buffer.text(), "a");
3450 });
3451}
3452
3453#[gpui::test(iterations = 10)]
3454async fn test_save_file(cx: &mut gpui::TestAppContext) {
3455 init_test(cx);
3456
3457 let fs = FakeFs::new(cx.executor());
3458 fs.insert_tree(
3459 path!("/dir"),
3460 json!({
3461 "file1": "the old contents",
3462 }),
3463 )
3464 .await;
3465
3466 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3467 let buffer = project
3468 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3469 .await
3470 .unwrap();
3471 buffer.update(cx, |buffer, cx| {
3472 assert_eq!(buffer.text(), "the old contents");
3473 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3474 });
3475
3476 project
3477 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3478 .await
3479 .unwrap();
3480
3481 let new_text = fs
3482 .load(Path::new(path!("/dir/file1")))
3483 .await
3484 .unwrap()
3485 .replace("\r\n", "\n");
3486 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3487}
3488
3489#[gpui::test(iterations = 30)]
3490async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
3491 init_test(cx);
3492
3493 let fs = FakeFs::new(cx.executor().clone());
3494 fs.insert_tree(
3495 path!("/dir"),
3496 json!({
3497 "file1": "the original contents",
3498 }),
3499 )
3500 .await;
3501
3502 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3503 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3504 let buffer = project
3505 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3506 .await
3507 .unwrap();
3508
3509 // Simulate buffer diffs being slow, so that they don't complete before
3510 // the next file change occurs.
3511 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3512
3513 // Change the buffer's file on disk, and then wait for the file change
3514 // to be detected by the worktree, so that the buffer starts reloading.
3515 fs.save(
3516 path!("/dir/file1").as_ref(),
3517 &"the first contents".into(),
3518 Default::default(),
3519 )
3520 .await
3521 .unwrap();
3522 worktree.next_event(cx).await;
3523
3524 // Change the buffer's file again. Depending on the random seed, the
3525 // previous file change may still be in progress.
3526 fs.save(
3527 path!("/dir/file1").as_ref(),
3528 &"the second contents".into(),
3529 Default::default(),
3530 )
3531 .await
3532 .unwrap();
3533 worktree.next_event(cx).await;
3534
3535 cx.executor().run_until_parked();
3536 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3537 buffer.read_with(cx, |buffer, _| {
3538 assert_eq!(buffer.text(), on_disk_text);
3539 assert!(!buffer.is_dirty(), "buffer should not be dirty");
3540 assert!(!buffer.has_conflict(), "buffer should not be dirty");
3541 });
3542}
3543
3544#[gpui::test(iterations = 30)]
3545async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
3546 init_test(cx);
3547
3548 let fs = FakeFs::new(cx.executor().clone());
3549 fs.insert_tree(
3550 path!("/dir"),
3551 json!({
3552 "file1": "the original contents",
3553 }),
3554 )
3555 .await;
3556
3557 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3558 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3559 let buffer = project
3560 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3561 .await
3562 .unwrap();
3563
3564 // Simulate buffer diffs being slow, so that they don't complete before
3565 // the next file change occurs.
3566 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3567
3568 // Change the buffer's file on disk, and then wait for the file change
3569 // to be detected by the worktree, so that the buffer starts reloading.
3570 fs.save(
3571 path!("/dir/file1").as_ref(),
3572 &"the first contents".into(),
3573 Default::default(),
3574 )
3575 .await
3576 .unwrap();
3577 worktree.next_event(cx).await;
3578
3579 cx.executor()
3580 .spawn(cx.executor().simulate_random_delay())
3581 .await;
3582
3583 // Perform a noop edit, causing the buffer's version to increase.
3584 buffer.update(cx, |buffer, cx| {
3585 buffer.edit([(0..0, " ")], None, cx);
3586 buffer.undo(cx);
3587 });
3588
3589 cx.executor().run_until_parked();
3590 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3591 buffer.read_with(cx, |buffer, _| {
3592 let buffer_text = buffer.text();
3593 if buffer_text == on_disk_text {
3594 assert!(
3595 !buffer.is_dirty() && !buffer.has_conflict(),
3596 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
3597 );
3598 }
3599 // If the file change occurred while the buffer was processing the first
3600 // change, the buffer will be in a conflicting state.
3601 else {
3602 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3603 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3604 }
3605 });
3606}
3607
3608#[gpui::test]
3609async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
3610 init_test(cx);
3611
3612 let fs = FakeFs::new(cx.executor());
3613 fs.insert_tree(
3614 path!("/dir"),
3615 json!({
3616 "file1": "the old contents",
3617 }),
3618 )
3619 .await;
3620
3621 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
3622 let buffer = project
3623 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3624 .await
3625 .unwrap();
3626 buffer.update(cx, |buffer, cx| {
3627 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3628 });
3629
3630 project
3631 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3632 .await
3633 .unwrap();
3634
3635 let new_text = fs
3636 .load(Path::new(path!("/dir/file1")))
3637 .await
3638 .unwrap()
3639 .replace("\r\n", "\n");
3640 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3641}
3642
3643#[gpui::test]
3644async fn test_save_as(cx: &mut gpui::TestAppContext) {
3645 init_test(cx);
3646
3647 let fs = FakeFs::new(cx.executor());
3648 fs.insert_tree("/dir", json!({})).await;
3649
3650 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3651
3652 let languages = project.update(cx, |project, _| project.languages().clone());
3653 languages.add(rust_lang());
3654
3655 let buffer = project.update(cx, |project, cx| project.create_local_buffer("", None, cx));
3656 buffer.update(cx, |buffer, cx| {
3657 buffer.edit([(0..0, "abc")], None, cx);
3658 assert!(buffer.is_dirty());
3659 assert!(!buffer.has_conflict());
3660 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
3661 });
3662 project
3663 .update(cx, |project, cx| {
3664 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
3665 let path = ProjectPath {
3666 worktree_id,
3667 path: Arc::from(Path::new("file1.rs")),
3668 };
3669 project.save_buffer_as(buffer.clone(), path, cx)
3670 })
3671 .await
3672 .unwrap();
3673 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
3674
3675 cx.executor().run_until_parked();
3676 buffer.update(cx, |buffer, cx| {
3677 assert_eq!(
3678 buffer.file().unwrap().full_path(cx),
3679 Path::new("dir/file1.rs")
3680 );
3681 assert!(!buffer.is_dirty());
3682 assert!(!buffer.has_conflict());
3683 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
3684 });
3685
3686 let opened_buffer = project
3687 .update(cx, |project, cx| {
3688 project.open_local_buffer("/dir/file1.rs", cx)
3689 })
3690 .await
3691 .unwrap();
3692 assert_eq!(opened_buffer, buffer);
3693}
3694
3695#[gpui::test(retries = 5)]
3696async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
3697 use worktree::WorktreeModelHandle as _;
3698
3699 init_test(cx);
3700 cx.executor().allow_parking();
3701
3702 let dir = TempTree::new(json!({
3703 "a": {
3704 "file1": "",
3705 "file2": "",
3706 "file3": "",
3707 },
3708 "b": {
3709 "c": {
3710 "file4": "",
3711 "file5": "",
3712 }
3713 }
3714 }));
3715
3716 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
3717
3718 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3719 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
3720 async move { buffer.await.unwrap() }
3721 };
3722 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3723 project.update(cx, |project, cx| {
3724 let tree = project.worktrees(cx).next().unwrap();
3725 tree.read(cx)
3726 .entry_for_path(path)
3727 .unwrap_or_else(|| panic!("no entry for path {}", path))
3728 .id
3729 })
3730 };
3731
3732 let buffer2 = buffer_for_path("a/file2", cx).await;
3733 let buffer3 = buffer_for_path("a/file3", cx).await;
3734 let buffer4 = buffer_for_path("b/c/file4", cx).await;
3735 let buffer5 = buffer_for_path("b/c/file5", cx).await;
3736
3737 let file2_id = id_for_path("a/file2", cx);
3738 let file3_id = id_for_path("a/file3", cx);
3739 let file4_id = id_for_path("b/c/file4", cx);
3740
3741 // Create a remote copy of this worktree.
3742 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3743 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
3744
3745 let updates = Arc::new(Mutex::new(Vec::new()));
3746 tree.update(cx, |tree, cx| {
3747 let updates = updates.clone();
3748 tree.observe_updates(0, cx, move |update| {
3749 updates.lock().push(update);
3750 async { true }
3751 });
3752 });
3753
3754 let remote =
3755 cx.update(|cx| Worktree::remote(0, 1, metadata, project.read(cx).client().into(), cx));
3756
3757 cx.executor().run_until_parked();
3758
3759 cx.update(|cx| {
3760 assert!(!buffer2.read(cx).is_dirty());
3761 assert!(!buffer3.read(cx).is_dirty());
3762 assert!(!buffer4.read(cx).is_dirty());
3763 assert!(!buffer5.read(cx).is_dirty());
3764 });
3765
3766 // Rename and delete files and directories.
3767 tree.flush_fs_events(cx).await;
3768 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3769 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3770 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3771 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3772 tree.flush_fs_events(cx).await;
3773
3774 cx.update(|app| {
3775 assert_eq!(
3776 tree.read(app)
3777 .paths()
3778 .map(|p| p.to_str().unwrap())
3779 .collect::<Vec<_>>(),
3780 vec![
3781 "a",
3782 separator!("a/file1"),
3783 separator!("a/file2.new"),
3784 "b",
3785 "d",
3786 separator!("d/file3"),
3787 separator!("d/file4"),
3788 ]
3789 );
3790 });
3791
3792 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
3793 assert_eq!(id_for_path("d/file3", cx), file3_id);
3794 assert_eq!(id_for_path("d/file4", cx), file4_id);
3795
3796 cx.update(|cx| {
3797 assert_eq!(
3798 buffer2.read(cx).file().unwrap().path().as_ref(),
3799 Path::new("a/file2.new")
3800 );
3801 assert_eq!(
3802 buffer3.read(cx).file().unwrap().path().as_ref(),
3803 Path::new("d/file3")
3804 );
3805 assert_eq!(
3806 buffer4.read(cx).file().unwrap().path().as_ref(),
3807 Path::new("d/file4")
3808 );
3809 assert_eq!(
3810 buffer5.read(cx).file().unwrap().path().as_ref(),
3811 Path::new("b/c/file5")
3812 );
3813
3814 assert_matches!(
3815 buffer2.read(cx).file().unwrap().disk_state(),
3816 DiskState::Present { .. }
3817 );
3818 assert_matches!(
3819 buffer3.read(cx).file().unwrap().disk_state(),
3820 DiskState::Present { .. }
3821 );
3822 assert_matches!(
3823 buffer4.read(cx).file().unwrap().disk_state(),
3824 DiskState::Present { .. }
3825 );
3826 assert_eq!(
3827 buffer5.read(cx).file().unwrap().disk_state(),
3828 DiskState::Deleted
3829 );
3830 });
3831
3832 // Update the remote worktree. Check that it becomes consistent with the
3833 // local worktree.
3834 cx.executor().run_until_parked();
3835
3836 remote.update(cx, |remote, _| {
3837 for update in updates.lock().drain(..) {
3838 remote.as_remote_mut().unwrap().update_from_remote(update);
3839 }
3840 });
3841 cx.executor().run_until_parked();
3842 remote.update(cx, |remote, _| {
3843 assert_eq!(
3844 remote
3845 .paths()
3846 .map(|p| p.to_str().unwrap())
3847 .collect::<Vec<_>>(),
3848 vec![
3849 "a",
3850 separator!("a/file1"),
3851 separator!("a/file2.new"),
3852 "b",
3853 "d",
3854 separator!("d/file3"),
3855 separator!("d/file4"),
3856 ]
3857 );
3858 });
3859}
3860
3861#[gpui::test(iterations = 10)]
3862async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
3863 init_test(cx);
3864
3865 let fs = FakeFs::new(cx.executor());
3866 fs.insert_tree(
3867 path!("/dir"),
3868 json!({
3869 "a": {
3870 "file1": "",
3871 }
3872 }),
3873 )
3874 .await;
3875
3876 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3877 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3878 let tree_id = tree.update(cx, |tree, _| tree.id());
3879
3880 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3881 project.update(cx, |project, cx| {
3882 let tree = project.worktrees(cx).next().unwrap();
3883 tree.read(cx)
3884 .entry_for_path(path)
3885 .unwrap_or_else(|| panic!("no entry for path {}", path))
3886 .id
3887 })
3888 };
3889
3890 let dir_id = id_for_path("a", cx);
3891 let file_id = id_for_path("a/file1", cx);
3892 let buffer = project
3893 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
3894 .await
3895 .unwrap();
3896 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3897
3898 project
3899 .update(cx, |project, cx| {
3900 project.rename_entry(dir_id, Path::new("b"), cx)
3901 })
3902 .unwrap()
3903 .await
3904 .to_included()
3905 .unwrap();
3906 cx.executor().run_until_parked();
3907
3908 assert_eq!(id_for_path("b", cx), dir_id);
3909 assert_eq!(id_for_path("b/file1", cx), file_id);
3910 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3911}
3912
3913#[gpui::test]
3914async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
3915 init_test(cx);
3916
3917 let fs = FakeFs::new(cx.executor());
3918 fs.insert_tree(
3919 "/dir",
3920 json!({
3921 "a.txt": "a-contents",
3922 "b.txt": "b-contents",
3923 }),
3924 )
3925 .await;
3926
3927 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3928
3929 // Spawn multiple tasks to open paths, repeating some paths.
3930 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
3931 (
3932 p.open_local_buffer("/dir/a.txt", cx),
3933 p.open_local_buffer("/dir/b.txt", cx),
3934 p.open_local_buffer("/dir/a.txt", cx),
3935 )
3936 });
3937
3938 let buffer_a_1 = buffer_a_1.await.unwrap();
3939 let buffer_a_2 = buffer_a_2.await.unwrap();
3940 let buffer_b = buffer_b.await.unwrap();
3941 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
3942 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
3943
3944 // There is only one buffer per path.
3945 let buffer_a_id = buffer_a_1.entity_id();
3946 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
3947
3948 // Open the same path again while it is still open.
3949 drop(buffer_a_1);
3950 let buffer_a_3 = project
3951 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
3952 .await
3953 .unwrap();
3954
3955 // There's still only one buffer per path.
3956 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
3957}
3958
3959#[gpui::test]
3960async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
3961 init_test(cx);
3962
3963 let fs = FakeFs::new(cx.executor());
3964 fs.insert_tree(
3965 path!("/dir"),
3966 json!({
3967 "file1": "abc",
3968 "file2": "def",
3969 "file3": "ghi",
3970 }),
3971 )
3972 .await;
3973
3974 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3975
3976 let buffer1 = project
3977 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3978 .await
3979 .unwrap();
3980 let events = Arc::new(Mutex::new(Vec::new()));
3981
3982 // initially, the buffer isn't dirty.
3983 buffer1.update(cx, |buffer, cx| {
3984 cx.subscribe(&buffer1, {
3985 let events = events.clone();
3986 move |_, _, event, _| match event {
3987 BufferEvent::Operation { .. } => {}
3988 _ => events.lock().push(event.clone()),
3989 }
3990 })
3991 .detach();
3992
3993 assert!(!buffer.is_dirty());
3994 assert!(events.lock().is_empty());
3995
3996 buffer.edit([(1..2, "")], None, cx);
3997 });
3998
3999 // after the first edit, the buffer is dirty, and emits a dirtied event.
4000 buffer1.update(cx, |buffer, cx| {
4001 assert!(buffer.text() == "ac");
4002 assert!(buffer.is_dirty());
4003 assert_eq!(
4004 *events.lock(),
4005 &[
4006 language::BufferEvent::Edited,
4007 language::BufferEvent::DirtyChanged
4008 ]
4009 );
4010 events.lock().clear();
4011 buffer.did_save(
4012 buffer.version(),
4013 buffer.file().unwrap().disk_state().mtime(),
4014 cx,
4015 );
4016 });
4017
4018 // after saving, the buffer is not dirty, and emits a saved event.
4019 buffer1.update(cx, |buffer, cx| {
4020 assert!(!buffer.is_dirty());
4021 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
4022 events.lock().clear();
4023
4024 buffer.edit([(1..1, "B")], None, cx);
4025 buffer.edit([(2..2, "D")], None, cx);
4026 });
4027
4028 // after editing again, the buffer is dirty, and emits another dirty event.
4029 buffer1.update(cx, |buffer, cx| {
4030 assert!(buffer.text() == "aBDc");
4031 assert!(buffer.is_dirty());
4032 assert_eq!(
4033 *events.lock(),
4034 &[
4035 language::BufferEvent::Edited,
4036 language::BufferEvent::DirtyChanged,
4037 language::BufferEvent::Edited,
4038 ],
4039 );
4040 events.lock().clear();
4041
4042 // After restoring the buffer to its previously-saved state,
4043 // the buffer is not considered dirty anymore.
4044 buffer.edit([(1..3, "")], None, cx);
4045 assert!(buffer.text() == "ac");
4046 assert!(!buffer.is_dirty());
4047 });
4048
4049 assert_eq!(
4050 *events.lock(),
4051 &[
4052 language::BufferEvent::Edited,
4053 language::BufferEvent::DirtyChanged
4054 ]
4055 );
4056
4057 // When a file is deleted, it is not considered dirty.
4058 let events = Arc::new(Mutex::new(Vec::new()));
4059 let buffer2 = project
4060 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4061 .await
4062 .unwrap();
4063 buffer2.update(cx, |_, cx| {
4064 cx.subscribe(&buffer2, {
4065 let events = events.clone();
4066 move |_, _, event, _| match event {
4067 BufferEvent::Operation { .. } => {}
4068 _ => events.lock().push(event.clone()),
4069 }
4070 })
4071 .detach();
4072 });
4073
4074 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
4075 .await
4076 .unwrap();
4077 cx.executor().run_until_parked();
4078 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4079 assert_eq!(
4080 mem::take(&mut *events.lock()),
4081 &[language::BufferEvent::FileHandleChanged]
4082 );
4083
4084 // Buffer becomes dirty when edited.
4085 buffer2.update(cx, |buffer, cx| {
4086 buffer.edit([(2..3, "")], None, cx);
4087 assert_eq!(buffer.is_dirty(), true);
4088 });
4089 assert_eq!(
4090 mem::take(&mut *events.lock()),
4091 &[
4092 language::BufferEvent::Edited,
4093 language::BufferEvent::DirtyChanged
4094 ]
4095 );
4096
4097 // Buffer becomes clean again when all of its content is removed, because
4098 // the file was deleted.
4099 buffer2.update(cx, |buffer, cx| {
4100 buffer.edit([(0..2, "")], None, cx);
4101 assert_eq!(buffer.is_empty(), true);
4102 assert_eq!(buffer.is_dirty(), false);
4103 });
4104 assert_eq!(
4105 *events.lock(),
4106 &[
4107 language::BufferEvent::Edited,
4108 language::BufferEvent::DirtyChanged
4109 ]
4110 );
4111
4112 // When a file is already dirty when deleted, we don't emit a Dirtied event.
4113 let events = Arc::new(Mutex::new(Vec::new()));
4114 let buffer3 = project
4115 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
4116 .await
4117 .unwrap();
4118 buffer3.update(cx, |_, cx| {
4119 cx.subscribe(&buffer3, {
4120 let events = events.clone();
4121 move |_, _, event, _| match event {
4122 BufferEvent::Operation { .. } => {}
4123 _ => events.lock().push(event.clone()),
4124 }
4125 })
4126 .detach();
4127 });
4128
4129 buffer3.update(cx, |buffer, cx| {
4130 buffer.edit([(0..0, "x")], None, cx);
4131 });
4132 events.lock().clear();
4133 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
4134 .await
4135 .unwrap();
4136 cx.executor().run_until_parked();
4137 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
4138 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
4139}
4140
4141#[gpui::test]
4142async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
4143 init_test(cx);
4144
4145 let (initial_contents, initial_offsets) =
4146 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
4147 let fs = FakeFs::new(cx.executor());
4148 fs.insert_tree(
4149 path!("/dir"),
4150 json!({
4151 "the-file": initial_contents,
4152 }),
4153 )
4154 .await;
4155 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4156 let buffer = project
4157 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
4158 .await
4159 .unwrap();
4160
4161 let anchors = initial_offsets
4162 .iter()
4163 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
4164 .collect::<Vec<_>>();
4165
4166 // Change the file on disk, adding two new lines of text, and removing
4167 // one line.
4168 buffer.update(cx, |buffer, _| {
4169 assert!(!buffer.is_dirty());
4170 assert!(!buffer.has_conflict());
4171 });
4172
4173 let (new_contents, new_offsets) =
4174 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
4175 fs.save(
4176 path!("/dir/the-file").as_ref(),
4177 &new_contents.as_str().into(),
4178 LineEnding::Unix,
4179 )
4180 .await
4181 .unwrap();
4182
4183 // Because the buffer was not modified, it is reloaded from disk. Its
4184 // contents are edited according to the diff between the old and new
4185 // file contents.
4186 cx.executor().run_until_parked();
4187 buffer.update(cx, |buffer, _| {
4188 assert_eq!(buffer.text(), new_contents);
4189 assert!(!buffer.is_dirty());
4190 assert!(!buffer.has_conflict());
4191
4192 let anchor_offsets = anchors
4193 .iter()
4194 .map(|anchor| anchor.to_offset(&*buffer))
4195 .collect::<Vec<_>>();
4196 assert_eq!(anchor_offsets, new_offsets);
4197 });
4198
4199 // Modify the buffer
4200 buffer.update(cx, |buffer, cx| {
4201 buffer.edit([(0..0, " ")], None, cx);
4202 assert!(buffer.is_dirty());
4203 assert!(!buffer.has_conflict());
4204 });
4205
4206 // Change the file on disk again, adding blank lines to the beginning.
4207 fs.save(
4208 path!("/dir/the-file").as_ref(),
4209 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
4210 LineEnding::Unix,
4211 )
4212 .await
4213 .unwrap();
4214
4215 // Because the buffer is modified, it doesn't reload from disk, but is
4216 // marked as having a conflict.
4217 cx.executor().run_until_parked();
4218 buffer.update(cx, |buffer, _| {
4219 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
4220 assert!(buffer.has_conflict());
4221 });
4222}
4223
4224#[gpui::test]
4225async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
4226 init_test(cx);
4227
4228 let fs = FakeFs::new(cx.executor());
4229 fs.insert_tree(
4230 path!("/dir"),
4231 json!({
4232 "file1": "a\nb\nc\n",
4233 "file2": "one\r\ntwo\r\nthree\r\n",
4234 }),
4235 )
4236 .await;
4237
4238 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4239 let buffer1 = project
4240 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4241 .await
4242 .unwrap();
4243 let buffer2 = project
4244 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4245 .await
4246 .unwrap();
4247
4248 buffer1.update(cx, |buffer, _| {
4249 assert_eq!(buffer.text(), "a\nb\nc\n");
4250 assert_eq!(buffer.line_ending(), LineEnding::Unix);
4251 });
4252 buffer2.update(cx, |buffer, _| {
4253 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
4254 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4255 });
4256
4257 // Change a file's line endings on disk from unix to windows. The buffer's
4258 // state updates correctly.
4259 fs.save(
4260 path!("/dir/file1").as_ref(),
4261 &"aaa\nb\nc\n".into(),
4262 LineEnding::Windows,
4263 )
4264 .await
4265 .unwrap();
4266 cx.executor().run_until_parked();
4267 buffer1.update(cx, |buffer, _| {
4268 assert_eq!(buffer.text(), "aaa\nb\nc\n");
4269 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4270 });
4271
4272 // Save a file with windows line endings. The file is written correctly.
4273 buffer2.update(cx, |buffer, cx| {
4274 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
4275 });
4276 project
4277 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
4278 .await
4279 .unwrap();
4280 assert_eq!(
4281 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
4282 "one\r\ntwo\r\nthree\r\nfour\r\n",
4283 );
4284}
4285
4286#[gpui::test]
4287async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
4288 init_test(cx);
4289
4290 let fs = FakeFs::new(cx.executor());
4291 fs.insert_tree(
4292 path!("/dir"),
4293 json!({
4294 "a.rs": "
4295 fn foo(mut v: Vec<usize>) {
4296 for x in &v {
4297 v.push(1);
4298 }
4299 }
4300 "
4301 .unindent(),
4302 }),
4303 )
4304 .await;
4305
4306 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4307 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4308 let buffer = project
4309 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
4310 .await
4311 .unwrap();
4312
4313 let buffer_uri = Url::from_file_path(path!("/dir/a.rs")).unwrap();
4314 let message = lsp::PublishDiagnosticsParams {
4315 uri: buffer_uri.clone(),
4316 diagnostics: vec![
4317 lsp::Diagnostic {
4318 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4319 severity: Some(DiagnosticSeverity::WARNING),
4320 message: "error 1".to_string(),
4321 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4322 location: lsp::Location {
4323 uri: buffer_uri.clone(),
4324 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4325 },
4326 message: "error 1 hint 1".to_string(),
4327 }]),
4328 ..Default::default()
4329 },
4330 lsp::Diagnostic {
4331 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4332 severity: Some(DiagnosticSeverity::HINT),
4333 message: "error 1 hint 1".to_string(),
4334 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4335 location: lsp::Location {
4336 uri: buffer_uri.clone(),
4337 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4338 },
4339 message: "original diagnostic".to_string(),
4340 }]),
4341 ..Default::default()
4342 },
4343 lsp::Diagnostic {
4344 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4345 severity: Some(DiagnosticSeverity::ERROR),
4346 message: "error 2".to_string(),
4347 related_information: Some(vec![
4348 lsp::DiagnosticRelatedInformation {
4349 location: lsp::Location {
4350 uri: buffer_uri.clone(),
4351 range: lsp::Range::new(
4352 lsp::Position::new(1, 13),
4353 lsp::Position::new(1, 15),
4354 ),
4355 },
4356 message: "error 2 hint 1".to_string(),
4357 },
4358 lsp::DiagnosticRelatedInformation {
4359 location: lsp::Location {
4360 uri: buffer_uri.clone(),
4361 range: lsp::Range::new(
4362 lsp::Position::new(1, 13),
4363 lsp::Position::new(1, 15),
4364 ),
4365 },
4366 message: "error 2 hint 2".to_string(),
4367 },
4368 ]),
4369 ..Default::default()
4370 },
4371 lsp::Diagnostic {
4372 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4373 severity: Some(DiagnosticSeverity::HINT),
4374 message: "error 2 hint 1".to_string(),
4375 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4376 location: lsp::Location {
4377 uri: buffer_uri.clone(),
4378 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4379 },
4380 message: "original diagnostic".to_string(),
4381 }]),
4382 ..Default::default()
4383 },
4384 lsp::Diagnostic {
4385 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4386 severity: Some(DiagnosticSeverity::HINT),
4387 message: "error 2 hint 2".to_string(),
4388 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4389 location: lsp::Location {
4390 uri: buffer_uri,
4391 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4392 },
4393 message: "original diagnostic".to_string(),
4394 }]),
4395 ..Default::default()
4396 },
4397 ],
4398 version: None,
4399 };
4400
4401 lsp_store
4402 .update(cx, |lsp_store, cx| {
4403 lsp_store.update_diagnostics(LanguageServerId(0), message, &[], cx)
4404 })
4405 .unwrap();
4406 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
4407
4408 assert_eq!(
4409 buffer
4410 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
4411 .collect::<Vec<_>>(),
4412 &[
4413 DiagnosticEntry {
4414 range: Point::new(1, 8)..Point::new(1, 9),
4415 diagnostic: Diagnostic {
4416 severity: DiagnosticSeverity::WARNING,
4417 message: "error 1".to_string(),
4418 group_id: 1,
4419 is_primary: true,
4420 ..Default::default()
4421 }
4422 },
4423 DiagnosticEntry {
4424 range: Point::new(1, 8)..Point::new(1, 9),
4425 diagnostic: Diagnostic {
4426 severity: DiagnosticSeverity::HINT,
4427 message: "error 1 hint 1".to_string(),
4428 group_id: 1,
4429 is_primary: false,
4430 ..Default::default()
4431 }
4432 },
4433 DiagnosticEntry {
4434 range: Point::new(1, 13)..Point::new(1, 15),
4435 diagnostic: Diagnostic {
4436 severity: DiagnosticSeverity::HINT,
4437 message: "error 2 hint 1".to_string(),
4438 group_id: 0,
4439 is_primary: false,
4440 ..Default::default()
4441 }
4442 },
4443 DiagnosticEntry {
4444 range: Point::new(1, 13)..Point::new(1, 15),
4445 diagnostic: Diagnostic {
4446 severity: DiagnosticSeverity::HINT,
4447 message: "error 2 hint 2".to_string(),
4448 group_id: 0,
4449 is_primary: false,
4450 ..Default::default()
4451 }
4452 },
4453 DiagnosticEntry {
4454 range: Point::new(2, 8)..Point::new(2, 17),
4455 diagnostic: Diagnostic {
4456 severity: DiagnosticSeverity::ERROR,
4457 message: "error 2".to_string(),
4458 group_id: 0,
4459 is_primary: true,
4460 ..Default::default()
4461 }
4462 }
4463 ]
4464 );
4465
4466 assert_eq!(
4467 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
4468 &[
4469 DiagnosticEntry {
4470 range: Point::new(1, 13)..Point::new(1, 15),
4471 diagnostic: Diagnostic {
4472 severity: DiagnosticSeverity::HINT,
4473 message: "error 2 hint 1".to_string(),
4474 group_id: 0,
4475 is_primary: false,
4476 ..Default::default()
4477 }
4478 },
4479 DiagnosticEntry {
4480 range: Point::new(1, 13)..Point::new(1, 15),
4481 diagnostic: Diagnostic {
4482 severity: DiagnosticSeverity::HINT,
4483 message: "error 2 hint 2".to_string(),
4484 group_id: 0,
4485 is_primary: false,
4486 ..Default::default()
4487 }
4488 },
4489 DiagnosticEntry {
4490 range: Point::new(2, 8)..Point::new(2, 17),
4491 diagnostic: Diagnostic {
4492 severity: DiagnosticSeverity::ERROR,
4493 message: "error 2".to_string(),
4494 group_id: 0,
4495 is_primary: true,
4496 ..Default::default()
4497 }
4498 }
4499 ]
4500 );
4501
4502 assert_eq!(
4503 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4504 &[
4505 DiagnosticEntry {
4506 range: Point::new(1, 8)..Point::new(1, 9),
4507 diagnostic: Diagnostic {
4508 severity: DiagnosticSeverity::WARNING,
4509 message: "error 1".to_string(),
4510 group_id: 1,
4511 is_primary: true,
4512 ..Default::default()
4513 }
4514 },
4515 DiagnosticEntry {
4516 range: Point::new(1, 8)..Point::new(1, 9),
4517 diagnostic: Diagnostic {
4518 severity: DiagnosticSeverity::HINT,
4519 message: "error 1 hint 1".to_string(),
4520 group_id: 1,
4521 is_primary: false,
4522 ..Default::default()
4523 }
4524 },
4525 ]
4526 );
4527}
4528
4529#[gpui::test]
4530async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
4531 init_test(cx);
4532
4533 let fs = FakeFs::new(cx.executor());
4534 fs.insert_tree(
4535 path!("/dir"),
4536 json!({
4537 "one.rs": "const ONE: usize = 1;",
4538 "two": {
4539 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4540 }
4541
4542 }),
4543 )
4544 .await;
4545 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4546
4547 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4548 language_registry.add(rust_lang());
4549 let watched_paths = lsp::FileOperationRegistrationOptions {
4550 filters: vec![
4551 FileOperationFilter {
4552 scheme: Some("file".to_owned()),
4553 pattern: lsp::FileOperationPattern {
4554 glob: "**/*.rs".to_owned(),
4555 matches: Some(lsp::FileOperationPatternKind::File),
4556 options: None,
4557 },
4558 },
4559 FileOperationFilter {
4560 scheme: Some("file".to_owned()),
4561 pattern: lsp::FileOperationPattern {
4562 glob: "**/**".to_owned(),
4563 matches: Some(lsp::FileOperationPatternKind::Folder),
4564 options: None,
4565 },
4566 },
4567 ],
4568 };
4569 let mut fake_servers = language_registry.register_fake_lsp(
4570 "Rust",
4571 FakeLspAdapter {
4572 capabilities: lsp::ServerCapabilities {
4573 workspace: Some(lsp::WorkspaceServerCapabilities {
4574 workspace_folders: None,
4575 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
4576 did_rename: Some(watched_paths.clone()),
4577 will_rename: Some(watched_paths),
4578 ..Default::default()
4579 }),
4580 }),
4581 ..Default::default()
4582 },
4583 ..Default::default()
4584 },
4585 );
4586
4587 let _ = project
4588 .update(cx, |project, cx| {
4589 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
4590 })
4591 .await
4592 .unwrap();
4593
4594 let fake_server = fake_servers.next().await.unwrap();
4595 let response = project.update(cx, |project, cx| {
4596 let worktree = project.worktrees(cx).next().unwrap();
4597 let entry = worktree.read(cx).entry_for_path("one.rs").unwrap();
4598 project.rename_entry(entry.id, "three.rs".as_ref(), cx)
4599 });
4600 let expected_edit = lsp::WorkspaceEdit {
4601 changes: None,
4602 document_changes: Some(DocumentChanges::Edits({
4603 vec![TextDocumentEdit {
4604 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
4605 range: lsp::Range {
4606 start: lsp::Position {
4607 line: 0,
4608 character: 1,
4609 },
4610 end: lsp::Position {
4611 line: 0,
4612 character: 3,
4613 },
4614 },
4615 new_text: "This is not a drill".to_owned(),
4616 })],
4617 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
4618 uri: Url::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
4619 version: Some(1337),
4620 },
4621 }]
4622 })),
4623 change_annotations: None,
4624 };
4625 let resolved_workspace_edit = Arc::new(OnceLock::new());
4626 fake_server
4627 .set_request_handler::<WillRenameFiles, _, _>({
4628 let resolved_workspace_edit = resolved_workspace_edit.clone();
4629 let expected_edit = expected_edit.clone();
4630 move |params, _| {
4631 let resolved_workspace_edit = resolved_workspace_edit.clone();
4632 let expected_edit = expected_edit.clone();
4633 async move {
4634 assert_eq!(params.files.len(), 1);
4635 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
4636 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
4637 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
4638 Ok(Some(expected_edit))
4639 }
4640 }
4641 })
4642 .next()
4643 .await
4644 .unwrap();
4645 let _ = response.await.unwrap();
4646 fake_server
4647 .handle_notification::<DidRenameFiles, _>(|params, _| {
4648 assert_eq!(params.files.len(), 1);
4649 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
4650 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
4651 })
4652 .next()
4653 .await
4654 .unwrap();
4655 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
4656}
4657
4658#[gpui::test]
4659async fn test_rename(cx: &mut gpui::TestAppContext) {
4660 // hi
4661 init_test(cx);
4662
4663 let fs = FakeFs::new(cx.executor());
4664 fs.insert_tree(
4665 path!("/dir"),
4666 json!({
4667 "one.rs": "const ONE: usize = 1;",
4668 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4669 }),
4670 )
4671 .await;
4672
4673 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4674
4675 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4676 language_registry.add(rust_lang());
4677 let mut fake_servers = language_registry.register_fake_lsp(
4678 "Rust",
4679 FakeLspAdapter {
4680 capabilities: lsp::ServerCapabilities {
4681 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
4682 prepare_provider: Some(true),
4683 work_done_progress_options: Default::default(),
4684 })),
4685 ..Default::default()
4686 },
4687 ..Default::default()
4688 },
4689 );
4690
4691 let (buffer, _handle) = project
4692 .update(cx, |project, cx| {
4693 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
4694 })
4695 .await
4696 .unwrap();
4697
4698 let fake_server = fake_servers.next().await.unwrap();
4699
4700 let response = project.update(cx, |project, cx| {
4701 project.prepare_rename(buffer.clone(), 7, cx)
4702 });
4703 fake_server
4704 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
4705 assert_eq!(
4706 params.text_document.uri.as_str(),
4707 uri!("file:///dir/one.rs")
4708 );
4709 assert_eq!(params.position, lsp::Position::new(0, 7));
4710 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
4711 lsp::Position::new(0, 6),
4712 lsp::Position::new(0, 9),
4713 ))))
4714 })
4715 .next()
4716 .await
4717 .unwrap();
4718 let response = response.await.unwrap();
4719 let PrepareRenameResponse::Success(range) = response else {
4720 panic!("{:?}", response);
4721 };
4722 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
4723 assert_eq!(range, 6..9);
4724
4725 let response = project.update(cx, |project, cx| {
4726 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
4727 });
4728 fake_server
4729 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
4730 assert_eq!(
4731 params.text_document_position.text_document.uri.as_str(),
4732 uri!("file:///dir/one.rs")
4733 );
4734 assert_eq!(
4735 params.text_document_position.position,
4736 lsp::Position::new(0, 7)
4737 );
4738 assert_eq!(params.new_name, "THREE");
4739 Ok(Some(lsp::WorkspaceEdit {
4740 changes: Some(
4741 [
4742 (
4743 lsp::Url::from_file_path(path!("/dir/one.rs")).unwrap(),
4744 vec![lsp::TextEdit::new(
4745 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
4746 "THREE".to_string(),
4747 )],
4748 ),
4749 (
4750 lsp::Url::from_file_path(path!("/dir/two.rs")).unwrap(),
4751 vec![
4752 lsp::TextEdit::new(
4753 lsp::Range::new(
4754 lsp::Position::new(0, 24),
4755 lsp::Position::new(0, 27),
4756 ),
4757 "THREE".to_string(),
4758 ),
4759 lsp::TextEdit::new(
4760 lsp::Range::new(
4761 lsp::Position::new(0, 35),
4762 lsp::Position::new(0, 38),
4763 ),
4764 "THREE".to_string(),
4765 ),
4766 ],
4767 ),
4768 ]
4769 .into_iter()
4770 .collect(),
4771 ),
4772 ..Default::default()
4773 }))
4774 })
4775 .next()
4776 .await
4777 .unwrap();
4778 let mut transaction = response.await.unwrap().0;
4779 assert_eq!(transaction.len(), 2);
4780 assert_eq!(
4781 transaction
4782 .remove_entry(&buffer)
4783 .unwrap()
4784 .0
4785 .update(cx, |buffer, _| buffer.text()),
4786 "const THREE: usize = 1;"
4787 );
4788 assert_eq!(
4789 transaction
4790 .into_keys()
4791 .next()
4792 .unwrap()
4793 .update(cx, |buffer, _| buffer.text()),
4794 "const TWO: usize = one::THREE + one::THREE;"
4795 );
4796}
4797
4798#[gpui::test]
4799async fn test_search(cx: &mut gpui::TestAppContext) {
4800 init_test(cx);
4801
4802 let fs = FakeFs::new(cx.executor());
4803 fs.insert_tree(
4804 path!("/dir"),
4805 json!({
4806 "one.rs": "const ONE: usize = 1;",
4807 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
4808 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
4809 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
4810 }),
4811 )
4812 .await;
4813 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4814 assert_eq!(
4815 search(
4816 &project,
4817 SearchQuery::text(
4818 "TWO",
4819 false,
4820 true,
4821 false,
4822 Default::default(),
4823 Default::default(),
4824 false,
4825 None
4826 )
4827 .unwrap(),
4828 cx
4829 )
4830 .await
4831 .unwrap(),
4832 HashMap::from_iter([
4833 (separator!("dir/two.rs").to_string(), vec![6..9]),
4834 (separator!("dir/three.rs").to_string(), vec![37..40])
4835 ])
4836 );
4837
4838 let buffer_4 = project
4839 .update(cx, |project, cx| {
4840 project.open_local_buffer(path!("/dir/four.rs"), cx)
4841 })
4842 .await
4843 .unwrap();
4844 buffer_4.update(cx, |buffer, cx| {
4845 let text = "two::TWO";
4846 buffer.edit([(20..28, text), (31..43, text)], None, cx);
4847 });
4848
4849 assert_eq!(
4850 search(
4851 &project,
4852 SearchQuery::text(
4853 "TWO",
4854 false,
4855 true,
4856 false,
4857 Default::default(),
4858 Default::default(),
4859 false,
4860 None,
4861 )
4862 .unwrap(),
4863 cx
4864 )
4865 .await
4866 .unwrap(),
4867 HashMap::from_iter([
4868 (separator!("dir/two.rs").to_string(), vec![6..9]),
4869 (separator!("dir/three.rs").to_string(), vec![37..40]),
4870 (separator!("dir/four.rs").to_string(), vec![25..28, 36..39])
4871 ])
4872 );
4873}
4874
4875#[gpui::test]
4876async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
4877 init_test(cx);
4878
4879 let search_query = "file";
4880
4881 let fs = FakeFs::new(cx.executor());
4882 fs.insert_tree(
4883 path!("/dir"),
4884 json!({
4885 "one.rs": r#"// Rust file one"#,
4886 "one.ts": r#"// TypeScript file one"#,
4887 "two.rs": r#"// Rust file two"#,
4888 "two.ts": r#"// TypeScript file two"#,
4889 }),
4890 )
4891 .await;
4892 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4893
4894 assert!(
4895 search(
4896 &project,
4897 SearchQuery::text(
4898 search_query,
4899 false,
4900 true,
4901 false,
4902 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4903 Default::default(),
4904 false,
4905 None
4906 )
4907 .unwrap(),
4908 cx
4909 )
4910 .await
4911 .unwrap()
4912 .is_empty(),
4913 "If no inclusions match, no files should be returned"
4914 );
4915
4916 assert_eq!(
4917 search(
4918 &project,
4919 SearchQuery::text(
4920 search_query,
4921 false,
4922 true,
4923 false,
4924 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4925 Default::default(),
4926 false,
4927 None
4928 )
4929 .unwrap(),
4930 cx
4931 )
4932 .await
4933 .unwrap(),
4934 HashMap::from_iter([
4935 (separator!("dir/one.rs").to_string(), vec![8..12]),
4936 (separator!("dir/two.rs").to_string(), vec![8..12]),
4937 ]),
4938 "Rust only search should give only Rust files"
4939 );
4940
4941 assert_eq!(
4942 search(
4943 &project,
4944 SearchQuery::text(
4945 search_query,
4946 false,
4947 true,
4948 false,
4949 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4950 Default::default(),
4951 false,
4952 None,
4953 )
4954 .unwrap(),
4955 cx
4956 )
4957 .await
4958 .unwrap(),
4959 HashMap::from_iter([
4960 (separator!("dir/one.ts").to_string(), vec![14..18]),
4961 (separator!("dir/two.ts").to_string(), vec![14..18]),
4962 ]),
4963 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
4964 );
4965
4966 assert_eq!(
4967 search(
4968 &project,
4969 SearchQuery::text(
4970 search_query,
4971 false,
4972 true,
4973 false,
4974 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()])
4975 .unwrap(),
4976 Default::default(),
4977 false,
4978 None,
4979 )
4980 .unwrap(),
4981 cx
4982 )
4983 .await
4984 .unwrap(),
4985 HashMap::from_iter([
4986 (separator!("dir/two.ts").to_string(), vec![14..18]),
4987 (separator!("dir/one.rs").to_string(), vec![8..12]),
4988 (separator!("dir/one.ts").to_string(), vec![14..18]),
4989 (separator!("dir/two.rs").to_string(), vec![8..12]),
4990 ]),
4991 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
4992 );
4993}
4994
4995#[gpui::test]
4996async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
4997 init_test(cx);
4998
4999 let search_query = "file";
5000
5001 let fs = FakeFs::new(cx.executor());
5002 fs.insert_tree(
5003 path!("/dir"),
5004 json!({
5005 "one.rs": r#"// Rust file one"#,
5006 "one.ts": r#"// TypeScript file one"#,
5007 "two.rs": r#"// Rust file two"#,
5008 "two.ts": r#"// TypeScript file two"#,
5009 }),
5010 )
5011 .await;
5012 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5013
5014 assert_eq!(
5015 search(
5016 &project,
5017 SearchQuery::text(
5018 search_query,
5019 false,
5020 true,
5021 false,
5022 Default::default(),
5023 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5024 false,
5025 None,
5026 )
5027 .unwrap(),
5028 cx
5029 )
5030 .await
5031 .unwrap(),
5032 HashMap::from_iter([
5033 (separator!("dir/one.rs").to_string(), vec![8..12]),
5034 (separator!("dir/one.ts").to_string(), vec![14..18]),
5035 (separator!("dir/two.rs").to_string(), vec![8..12]),
5036 (separator!("dir/two.ts").to_string(), vec![14..18]),
5037 ]),
5038 "If no exclusions match, all files should be returned"
5039 );
5040
5041 assert_eq!(
5042 search(
5043 &project,
5044 SearchQuery::text(
5045 search_query,
5046 false,
5047 true,
5048 false,
5049 Default::default(),
5050 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
5051 false,
5052 None,
5053 )
5054 .unwrap(),
5055 cx
5056 )
5057 .await
5058 .unwrap(),
5059 HashMap::from_iter([
5060 (separator!("dir/one.ts").to_string(), vec![14..18]),
5061 (separator!("dir/two.ts").to_string(), vec![14..18]),
5062 ]),
5063 "Rust exclusion search should give only TypeScript files"
5064 );
5065
5066 assert_eq!(
5067 search(
5068 &project,
5069 SearchQuery::text(
5070 search_query,
5071 false,
5072 true,
5073 false,
5074 Default::default(),
5075 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5076 false,
5077 None,
5078 )
5079 .unwrap(),
5080 cx
5081 )
5082 .await
5083 .unwrap(),
5084 HashMap::from_iter([
5085 (separator!("dir/one.rs").to_string(), vec![8..12]),
5086 (separator!("dir/two.rs").to_string(), vec![8..12]),
5087 ]),
5088 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
5089 );
5090
5091 assert!(
5092 search(
5093 &project,
5094 SearchQuery::text(
5095 search_query,
5096 false,
5097 true,
5098 false,
5099 Default::default(),
5100 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()])
5101 .unwrap(),
5102 false,
5103 None,
5104 )
5105 .unwrap(),
5106 cx
5107 )
5108 .await
5109 .unwrap()
5110 .is_empty(),
5111 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
5112 );
5113}
5114
5115#[gpui::test]
5116async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
5117 init_test(cx);
5118
5119 let search_query = "file";
5120
5121 let fs = FakeFs::new(cx.executor());
5122 fs.insert_tree(
5123 path!("/dir"),
5124 json!({
5125 "one.rs": r#"// Rust file one"#,
5126 "one.ts": r#"// TypeScript file one"#,
5127 "two.rs": r#"// Rust file two"#,
5128 "two.ts": r#"// TypeScript file two"#,
5129 }),
5130 )
5131 .await;
5132 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5133
5134 assert!(
5135 search(
5136 &project,
5137 SearchQuery::text(
5138 search_query,
5139 false,
5140 true,
5141 false,
5142 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5143 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5144 false,
5145 None,
5146 )
5147 .unwrap(),
5148 cx
5149 )
5150 .await
5151 .unwrap()
5152 .is_empty(),
5153 "If both no exclusions and inclusions match, exclusions should win and return nothing"
5154 );
5155
5156 assert!(
5157 search(
5158 &project,
5159 SearchQuery::text(
5160 search_query,
5161 false,
5162 true,
5163 false,
5164 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5165 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5166 false,
5167 None,
5168 )
5169 .unwrap(),
5170 cx
5171 )
5172 .await
5173 .unwrap()
5174 .is_empty(),
5175 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
5176 );
5177
5178 assert!(
5179 search(
5180 &project,
5181 SearchQuery::text(
5182 search_query,
5183 false,
5184 true,
5185 false,
5186 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5187 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5188 false,
5189 None,
5190 )
5191 .unwrap(),
5192 cx
5193 )
5194 .await
5195 .unwrap()
5196 .is_empty(),
5197 "Non-matching inclusions and exclusions should not change that."
5198 );
5199
5200 assert_eq!(
5201 search(
5202 &project,
5203 SearchQuery::text(
5204 search_query,
5205 false,
5206 true,
5207 false,
5208 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5209 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()]).unwrap(),
5210 false,
5211 None,
5212 )
5213 .unwrap(),
5214 cx
5215 )
5216 .await
5217 .unwrap(),
5218 HashMap::from_iter([
5219 (separator!("dir/one.ts").to_string(), vec![14..18]),
5220 (separator!("dir/two.ts").to_string(), vec![14..18]),
5221 ]),
5222 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
5223 );
5224}
5225
5226#[gpui::test]
5227async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
5228 init_test(cx);
5229
5230 let fs = FakeFs::new(cx.executor());
5231 fs.insert_tree(
5232 path!("/worktree-a"),
5233 json!({
5234 "haystack.rs": r#"// NEEDLE"#,
5235 "haystack.ts": r#"// NEEDLE"#,
5236 }),
5237 )
5238 .await;
5239 fs.insert_tree(
5240 path!("/worktree-b"),
5241 json!({
5242 "haystack.rs": r#"// NEEDLE"#,
5243 "haystack.ts": r#"// NEEDLE"#,
5244 }),
5245 )
5246 .await;
5247
5248 let project = Project::test(
5249 fs.clone(),
5250 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
5251 cx,
5252 )
5253 .await;
5254
5255 assert_eq!(
5256 search(
5257 &project,
5258 SearchQuery::text(
5259 "NEEDLE",
5260 false,
5261 true,
5262 false,
5263 PathMatcher::new(&["worktree-a/*.rs".to_owned()]).unwrap(),
5264 Default::default(),
5265 true,
5266 None,
5267 )
5268 .unwrap(),
5269 cx
5270 )
5271 .await
5272 .unwrap(),
5273 HashMap::from_iter([(separator!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
5274 "should only return results from included worktree"
5275 );
5276 assert_eq!(
5277 search(
5278 &project,
5279 SearchQuery::text(
5280 "NEEDLE",
5281 false,
5282 true,
5283 false,
5284 PathMatcher::new(&["worktree-b/*.rs".to_owned()]).unwrap(),
5285 Default::default(),
5286 true,
5287 None,
5288 )
5289 .unwrap(),
5290 cx
5291 )
5292 .await
5293 .unwrap(),
5294 HashMap::from_iter([(separator!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
5295 "should only return results from included worktree"
5296 );
5297
5298 assert_eq!(
5299 search(
5300 &project,
5301 SearchQuery::text(
5302 "NEEDLE",
5303 false,
5304 true,
5305 false,
5306 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5307 Default::default(),
5308 false,
5309 None,
5310 )
5311 .unwrap(),
5312 cx
5313 )
5314 .await
5315 .unwrap(),
5316 HashMap::from_iter([
5317 (separator!("worktree-a/haystack.ts").to_string(), vec![3..9]),
5318 (separator!("worktree-b/haystack.ts").to_string(), vec![3..9])
5319 ]),
5320 "should return results from both worktrees"
5321 );
5322}
5323
5324#[gpui::test]
5325async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
5326 init_test(cx);
5327
5328 let fs = FakeFs::new(cx.background_executor.clone());
5329 fs.insert_tree(
5330 path!("/dir"),
5331 json!({
5332 ".git": {},
5333 ".gitignore": "**/target\n/node_modules\n",
5334 "target": {
5335 "index.txt": "index_key:index_value"
5336 },
5337 "node_modules": {
5338 "eslint": {
5339 "index.ts": "const eslint_key = 'eslint value'",
5340 "package.json": r#"{ "some_key": "some value" }"#,
5341 },
5342 "prettier": {
5343 "index.ts": "const prettier_key = 'prettier value'",
5344 "package.json": r#"{ "other_key": "other value" }"#,
5345 },
5346 },
5347 "package.json": r#"{ "main_key": "main value" }"#,
5348 }),
5349 )
5350 .await;
5351 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5352
5353 let query = "key";
5354 assert_eq!(
5355 search(
5356 &project,
5357 SearchQuery::text(
5358 query,
5359 false,
5360 false,
5361 false,
5362 Default::default(),
5363 Default::default(),
5364 false,
5365 None,
5366 )
5367 .unwrap(),
5368 cx
5369 )
5370 .await
5371 .unwrap(),
5372 HashMap::from_iter([(separator!("dir/package.json").to_string(), vec![8..11])]),
5373 "Only one non-ignored file should have the query"
5374 );
5375
5376 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5377 assert_eq!(
5378 search(
5379 &project,
5380 SearchQuery::text(
5381 query,
5382 false,
5383 false,
5384 true,
5385 Default::default(),
5386 Default::default(),
5387 false,
5388 None,
5389 )
5390 .unwrap(),
5391 cx
5392 )
5393 .await
5394 .unwrap(),
5395 HashMap::from_iter([
5396 (separator!("dir/package.json").to_string(), vec![8..11]),
5397 (separator!("dir/target/index.txt").to_string(), vec![6..9]),
5398 (
5399 separator!("dir/node_modules/prettier/package.json").to_string(),
5400 vec![9..12]
5401 ),
5402 (
5403 separator!("dir/node_modules/prettier/index.ts").to_string(),
5404 vec![15..18]
5405 ),
5406 (
5407 separator!("dir/node_modules/eslint/index.ts").to_string(),
5408 vec![13..16]
5409 ),
5410 (
5411 separator!("dir/node_modules/eslint/package.json").to_string(),
5412 vec![8..11]
5413 ),
5414 ]),
5415 "Unrestricted search with ignored directories should find every file with the query"
5416 );
5417
5418 let files_to_include = PathMatcher::new(&["node_modules/prettier/**".to_owned()]).unwrap();
5419 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()]).unwrap();
5420 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5421 assert_eq!(
5422 search(
5423 &project,
5424 SearchQuery::text(
5425 query,
5426 false,
5427 false,
5428 true,
5429 files_to_include,
5430 files_to_exclude,
5431 false,
5432 None,
5433 )
5434 .unwrap(),
5435 cx
5436 )
5437 .await
5438 .unwrap(),
5439 HashMap::from_iter([(
5440 separator!("dir/node_modules/prettier/package.json").to_string(),
5441 vec![9..12]
5442 )]),
5443 "With search including ignored prettier directory and excluding TS files, only one file should be found"
5444 );
5445}
5446
5447#[gpui::test]
5448async fn test_search_with_unicode(cx: &mut gpui::TestAppContext) {
5449 init_test(cx);
5450
5451 let fs = FakeFs::new(cx.executor());
5452 fs.insert_tree(
5453 path!("/dir"),
5454 json!({
5455 "one.rs": "// ПРИВЕТ? привет!",
5456 "two.rs": "// ПРИВЕТ.",
5457 "three.rs": "// привет",
5458 }),
5459 )
5460 .await;
5461 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5462
5463 let unicode_case_sensitive_query = SearchQuery::text(
5464 "привет",
5465 false,
5466 true,
5467 false,
5468 Default::default(),
5469 Default::default(),
5470 false,
5471 None,
5472 );
5473 assert_matches!(unicode_case_sensitive_query, Ok(SearchQuery::Text { .. }));
5474 assert_eq!(
5475 search(&project, unicode_case_sensitive_query.unwrap(), cx)
5476 .await
5477 .unwrap(),
5478 HashMap::from_iter([
5479 (separator!("dir/one.rs").to_string(), vec![17..29]),
5480 (separator!("dir/three.rs").to_string(), vec![3..15]),
5481 ])
5482 );
5483
5484 let unicode_case_insensitive_query = SearchQuery::text(
5485 "привет",
5486 false,
5487 false,
5488 false,
5489 Default::default(),
5490 Default::default(),
5491 false,
5492 None,
5493 );
5494 assert_matches!(
5495 unicode_case_insensitive_query,
5496 Ok(SearchQuery::Regex { .. })
5497 );
5498 assert_eq!(
5499 search(&project, unicode_case_insensitive_query.unwrap(), cx)
5500 .await
5501 .unwrap(),
5502 HashMap::from_iter([
5503 (separator!("dir/one.rs").to_string(), vec![3..15, 17..29]),
5504 (separator!("dir/two.rs").to_string(), vec![3..15]),
5505 (separator!("dir/three.rs").to_string(), vec![3..15]),
5506 ])
5507 );
5508
5509 assert_eq!(
5510 search(
5511 &project,
5512 SearchQuery::text(
5513 "привет.",
5514 false,
5515 false,
5516 false,
5517 Default::default(),
5518 Default::default(),
5519 false,
5520 None,
5521 )
5522 .unwrap(),
5523 cx
5524 )
5525 .await
5526 .unwrap(),
5527 HashMap::from_iter([(separator!("dir/two.rs").to_string(), vec![3..16]),])
5528 );
5529}
5530
5531#[gpui::test]
5532async fn test_create_entry(cx: &mut gpui::TestAppContext) {
5533 init_test(cx);
5534
5535 let fs = FakeFs::new(cx.executor().clone());
5536 fs.insert_tree(
5537 "/one/two",
5538 json!({
5539 "three": {
5540 "a.txt": "",
5541 "four": {}
5542 },
5543 "c.rs": ""
5544 }),
5545 )
5546 .await;
5547
5548 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
5549 project
5550 .update(cx, |project, cx| {
5551 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5552 project.create_entry((id, "b.."), true, cx)
5553 })
5554 .await
5555 .unwrap()
5556 .to_included()
5557 .unwrap();
5558
5559 // Can't create paths outside the project
5560 let result = project
5561 .update(cx, |project, cx| {
5562 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5563 project.create_entry((id, "../../boop"), true, cx)
5564 })
5565 .await;
5566 assert!(result.is_err());
5567
5568 // Can't create paths with '..'
5569 let result = project
5570 .update(cx, |project, cx| {
5571 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5572 project.create_entry((id, "four/../beep"), true, cx)
5573 })
5574 .await;
5575 assert!(result.is_err());
5576
5577 assert_eq!(
5578 fs.paths(true),
5579 vec![
5580 PathBuf::from(path!("/")),
5581 PathBuf::from(path!("/one")),
5582 PathBuf::from(path!("/one/two")),
5583 PathBuf::from(path!("/one/two/c.rs")),
5584 PathBuf::from(path!("/one/two/three")),
5585 PathBuf::from(path!("/one/two/three/a.txt")),
5586 PathBuf::from(path!("/one/two/three/b..")),
5587 PathBuf::from(path!("/one/two/three/four")),
5588 ]
5589 );
5590
5591 // And we cannot open buffers with '..'
5592 let result = project
5593 .update(cx, |project, cx| {
5594 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5595 project.open_buffer((id, "../c.rs"), cx)
5596 })
5597 .await;
5598 assert!(result.is_err())
5599}
5600
5601#[gpui::test]
5602async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
5603 init_test(cx);
5604
5605 let fs = FakeFs::new(cx.executor());
5606 fs.insert_tree(
5607 path!("/dir"),
5608 json!({
5609 "a.tsx": "a",
5610 }),
5611 )
5612 .await;
5613
5614 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5615
5616 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5617 language_registry.add(tsx_lang());
5618 let language_server_names = [
5619 "TypeScriptServer",
5620 "TailwindServer",
5621 "ESLintServer",
5622 "NoHoverCapabilitiesServer",
5623 ];
5624 let mut language_servers = [
5625 language_registry.register_fake_lsp(
5626 "tsx",
5627 FakeLspAdapter {
5628 name: language_server_names[0],
5629 capabilities: lsp::ServerCapabilities {
5630 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5631 ..lsp::ServerCapabilities::default()
5632 },
5633 ..FakeLspAdapter::default()
5634 },
5635 ),
5636 language_registry.register_fake_lsp(
5637 "tsx",
5638 FakeLspAdapter {
5639 name: language_server_names[1],
5640 capabilities: lsp::ServerCapabilities {
5641 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5642 ..lsp::ServerCapabilities::default()
5643 },
5644 ..FakeLspAdapter::default()
5645 },
5646 ),
5647 language_registry.register_fake_lsp(
5648 "tsx",
5649 FakeLspAdapter {
5650 name: language_server_names[2],
5651 capabilities: lsp::ServerCapabilities {
5652 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5653 ..lsp::ServerCapabilities::default()
5654 },
5655 ..FakeLspAdapter::default()
5656 },
5657 ),
5658 language_registry.register_fake_lsp(
5659 "tsx",
5660 FakeLspAdapter {
5661 name: language_server_names[3],
5662 capabilities: lsp::ServerCapabilities {
5663 hover_provider: None,
5664 ..lsp::ServerCapabilities::default()
5665 },
5666 ..FakeLspAdapter::default()
5667 },
5668 ),
5669 ];
5670
5671 let (buffer, _handle) = project
5672 .update(cx, |p, cx| {
5673 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
5674 })
5675 .await
5676 .unwrap();
5677 cx.executor().run_until_parked();
5678
5679 let mut servers_with_hover_requests = HashMap::default();
5680 for i in 0..language_server_names.len() {
5681 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
5682 panic!(
5683 "Failed to get language server #{i} with name {}",
5684 &language_server_names[i]
5685 )
5686 });
5687 let new_server_name = new_server.server.name();
5688 assert!(
5689 !servers_with_hover_requests.contains_key(&new_server_name),
5690 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5691 );
5692 match new_server_name.as_ref() {
5693 "TailwindServer" | "TypeScriptServer" => {
5694 servers_with_hover_requests.insert(
5695 new_server_name.clone(),
5696 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
5697 move |_, _| {
5698 let name = new_server_name.clone();
5699 async move {
5700 Ok(Some(lsp::Hover {
5701 contents: lsp::HoverContents::Scalar(
5702 lsp::MarkedString::String(format!("{name} hover")),
5703 ),
5704 range: None,
5705 }))
5706 }
5707 },
5708 ),
5709 );
5710 }
5711 "ESLintServer" => {
5712 servers_with_hover_requests.insert(
5713 new_server_name,
5714 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
5715 |_, _| async move { Ok(None) },
5716 ),
5717 );
5718 }
5719 "NoHoverCapabilitiesServer" => {
5720 let _never_handled = new_server
5721 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
5722 panic!(
5723 "Should not call for hovers server with no corresponding capabilities"
5724 )
5725 });
5726 }
5727 unexpected => panic!("Unexpected server name: {unexpected}"),
5728 }
5729 }
5730
5731 let hover_task = project.update(cx, |project, cx| {
5732 project.hover(&buffer, Point::new(0, 0), cx)
5733 });
5734 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
5735 |mut hover_request| async move {
5736 hover_request
5737 .next()
5738 .await
5739 .expect("All hover requests should have been triggered")
5740 },
5741 ))
5742 .await;
5743 assert_eq!(
5744 vec!["TailwindServer hover", "TypeScriptServer hover"],
5745 hover_task
5746 .await
5747 .into_iter()
5748 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
5749 .sorted()
5750 .collect::<Vec<_>>(),
5751 "Should receive hover responses from all related servers with hover capabilities"
5752 );
5753}
5754
5755#[gpui::test]
5756async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
5757 init_test(cx);
5758
5759 let fs = FakeFs::new(cx.executor());
5760 fs.insert_tree(
5761 path!("/dir"),
5762 json!({
5763 "a.ts": "a",
5764 }),
5765 )
5766 .await;
5767
5768 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5769
5770 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5771 language_registry.add(typescript_lang());
5772 let mut fake_language_servers = language_registry.register_fake_lsp(
5773 "TypeScript",
5774 FakeLspAdapter {
5775 capabilities: lsp::ServerCapabilities {
5776 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5777 ..lsp::ServerCapabilities::default()
5778 },
5779 ..FakeLspAdapter::default()
5780 },
5781 );
5782
5783 let (buffer, _handle) = project
5784 .update(cx, |p, cx| {
5785 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
5786 })
5787 .await
5788 .unwrap();
5789 cx.executor().run_until_parked();
5790
5791 let fake_server = fake_language_servers
5792 .next()
5793 .await
5794 .expect("failed to get the language server");
5795
5796 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
5797 move |_, _| async move {
5798 Ok(Some(lsp::Hover {
5799 contents: lsp::HoverContents::Array(vec![
5800 lsp::MarkedString::String("".to_string()),
5801 lsp::MarkedString::String(" ".to_string()),
5802 lsp::MarkedString::String("\n\n\n".to_string()),
5803 ]),
5804 range: None,
5805 }))
5806 },
5807 );
5808
5809 let hover_task = project.update(cx, |project, cx| {
5810 project.hover(&buffer, Point::new(0, 0), cx)
5811 });
5812 let () = request_handled
5813 .next()
5814 .await
5815 .expect("All hover requests should have been triggered");
5816 assert_eq!(
5817 Vec::<String>::new(),
5818 hover_task
5819 .await
5820 .into_iter()
5821 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
5822 .sorted()
5823 .collect::<Vec<_>>(),
5824 "Empty hover parts should be ignored"
5825 );
5826}
5827
5828#[gpui::test]
5829async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
5830 init_test(cx);
5831
5832 let fs = FakeFs::new(cx.executor());
5833 fs.insert_tree(
5834 path!("/dir"),
5835 json!({
5836 "a.ts": "a",
5837 }),
5838 )
5839 .await;
5840
5841 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5842
5843 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5844 language_registry.add(typescript_lang());
5845 let mut fake_language_servers = language_registry.register_fake_lsp(
5846 "TypeScript",
5847 FakeLspAdapter {
5848 capabilities: lsp::ServerCapabilities {
5849 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5850 ..lsp::ServerCapabilities::default()
5851 },
5852 ..FakeLspAdapter::default()
5853 },
5854 );
5855
5856 let (buffer, _handle) = project
5857 .update(cx, |p, cx| {
5858 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
5859 })
5860 .await
5861 .unwrap();
5862 cx.executor().run_until_parked();
5863
5864 let fake_server = fake_language_servers
5865 .next()
5866 .await
5867 .expect("failed to get the language server");
5868
5869 let mut request_handled = fake_server
5870 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
5871 Ok(Some(vec![
5872 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5873 title: "organize imports".to_string(),
5874 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
5875 ..lsp::CodeAction::default()
5876 }),
5877 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5878 title: "fix code".to_string(),
5879 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
5880 ..lsp::CodeAction::default()
5881 }),
5882 ]))
5883 });
5884
5885 let code_actions_task = project.update(cx, |project, cx| {
5886 project.code_actions(
5887 &buffer,
5888 0..buffer.read(cx).len(),
5889 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
5890 cx,
5891 )
5892 });
5893
5894 let () = request_handled
5895 .next()
5896 .await
5897 .expect("The code action request should have been triggered");
5898
5899 let code_actions = code_actions_task.await.unwrap();
5900 assert_eq!(code_actions.len(), 1);
5901 assert_eq!(
5902 code_actions[0].lsp_action.action_kind(),
5903 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
5904 );
5905}
5906
5907#[gpui::test]
5908async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
5909 init_test(cx);
5910
5911 let fs = FakeFs::new(cx.executor());
5912 fs.insert_tree(
5913 path!("/dir"),
5914 json!({
5915 "a.tsx": "a",
5916 }),
5917 )
5918 .await;
5919
5920 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5921
5922 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5923 language_registry.add(tsx_lang());
5924 let language_server_names = [
5925 "TypeScriptServer",
5926 "TailwindServer",
5927 "ESLintServer",
5928 "NoActionsCapabilitiesServer",
5929 ];
5930
5931 let mut language_server_rxs = [
5932 language_registry.register_fake_lsp(
5933 "tsx",
5934 FakeLspAdapter {
5935 name: language_server_names[0],
5936 capabilities: lsp::ServerCapabilities {
5937 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5938 ..lsp::ServerCapabilities::default()
5939 },
5940 ..FakeLspAdapter::default()
5941 },
5942 ),
5943 language_registry.register_fake_lsp(
5944 "tsx",
5945 FakeLspAdapter {
5946 name: language_server_names[1],
5947 capabilities: lsp::ServerCapabilities {
5948 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5949 ..lsp::ServerCapabilities::default()
5950 },
5951 ..FakeLspAdapter::default()
5952 },
5953 ),
5954 language_registry.register_fake_lsp(
5955 "tsx",
5956 FakeLspAdapter {
5957 name: language_server_names[2],
5958 capabilities: lsp::ServerCapabilities {
5959 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5960 ..lsp::ServerCapabilities::default()
5961 },
5962 ..FakeLspAdapter::default()
5963 },
5964 ),
5965 language_registry.register_fake_lsp(
5966 "tsx",
5967 FakeLspAdapter {
5968 name: language_server_names[3],
5969 capabilities: lsp::ServerCapabilities {
5970 code_action_provider: None,
5971 ..lsp::ServerCapabilities::default()
5972 },
5973 ..FakeLspAdapter::default()
5974 },
5975 ),
5976 ];
5977
5978 let (buffer, _handle) = project
5979 .update(cx, |p, cx| {
5980 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
5981 })
5982 .await
5983 .unwrap();
5984 cx.executor().run_until_parked();
5985
5986 let mut servers_with_actions_requests = HashMap::default();
5987 for i in 0..language_server_names.len() {
5988 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
5989 panic!(
5990 "Failed to get language server #{i} with name {}",
5991 &language_server_names[i]
5992 )
5993 });
5994 let new_server_name = new_server.server.name();
5995
5996 assert!(
5997 !servers_with_actions_requests.contains_key(&new_server_name),
5998 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5999 );
6000 match new_server_name.0.as_ref() {
6001 "TailwindServer" | "TypeScriptServer" => {
6002 servers_with_actions_requests.insert(
6003 new_server_name.clone(),
6004 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6005 move |_, _| {
6006 let name = new_server_name.clone();
6007 async move {
6008 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
6009 lsp::CodeAction {
6010 title: format!("{name} code action"),
6011 ..lsp::CodeAction::default()
6012 },
6013 )]))
6014 }
6015 },
6016 ),
6017 );
6018 }
6019 "ESLintServer" => {
6020 servers_with_actions_requests.insert(
6021 new_server_name,
6022 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6023 |_, _| async move { Ok(None) },
6024 ),
6025 );
6026 }
6027 "NoActionsCapabilitiesServer" => {
6028 let _never_handled = new_server
6029 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6030 panic!(
6031 "Should not call for code actions server with no corresponding capabilities"
6032 )
6033 });
6034 }
6035 unexpected => panic!("Unexpected server name: {unexpected}"),
6036 }
6037 }
6038
6039 let code_actions_task = project.update(cx, |project, cx| {
6040 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
6041 });
6042
6043 // cx.run_until_parked();
6044 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
6045 |mut code_actions_request| async move {
6046 code_actions_request
6047 .next()
6048 .await
6049 .expect("All code actions requests should have been triggered")
6050 },
6051 ))
6052 .await;
6053 assert_eq!(
6054 vec!["TailwindServer code action", "TypeScriptServer code action"],
6055 code_actions_task
6056 .await
6057 .unwrap()
6058 .into_iter()
6059 .map(|code_action| code_action.lsp_action.title().to_owned())
6060 .sorted()
6061 .collect::<Vec<_>>(),
6062 "Should receive code actions responses from all related servers with hover capabilities"
6063 );
6064}
6065
6066#[gpui::test]
6067async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
6068 init_test(cx);
6069
6070 let fs = FakeFs::new(cx.executor());
6071 fs.insert_tree(
6072 "/dir",
6073 json!({
6074 "a.rs": "let a = 1;",
6075 "b.rs": "let b = 2;",
6076 "c.rs": "let c = 2;",
6077 }),
6078 )
6079 .await;
6080
6081 let project = Project::test(
6082 fs,
6083 [
6084 "/dir/a.rs".as_ref(),
6085 "/dir/b.rs".as_ref(),
6086 "/dir/c.rs".as_ref(),
6087 ],
6088 cx,
6089 )
6090 .await;
6091
6092 // check the initial state and get the worktrees
6093 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
6094 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6095 assert_eq!(worktrees.len(), 3);
6096
6097 let worktree_a = worktrees[0].read(cx);
6098 let worktree_b = worktrees[1].read(cx);
6099 let worktree_c = worktrees[2].read(cx);
6100
6101 // check they start in the right order
6102 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
6103 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
6104 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
6105
6106 (
6107 worktrees[0].clone(),
6108 worktrees[1].clone(),
6109 worktrees[2].clone(),
6110 )
6111 });
6112
6113 // move first worktree to after the second
6114 // [a, b, c] -> [b, a, c]
6115 project
6116 .update(cx, |project, cx| {
6117 let first = worktree_a.read(cx);
6118 let second = worktree_b.read(cx);
6119 project.move_worktree(first.id(), second.id(), cx)
6120 })
6121 .expect("moving first after second");
6122
6123 // check the state after moving
6124 project.update(cx, |project, cx| {
6125 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6126 assert_eq!(worktrees.len(), 3);
6127
6128 let first = worktrees[0].read(cx);
6129 let second = worktrees[1].read(cx);
6130 let third = worktrees[2].read(cx);
6131
6132 // check they are now in the right order
6133 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
6134 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
6135 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6136 });
6137
6138 // move the second worktree to before the first
6139 // [b, a, c] -> [a, b, c]
6140 project
6141 .update(cx, |project, cx| {
6142 let second = worktree_a.read(cx);
6143 let first = worktree_b.read(cx);
6144 project.move_worktree(first.id(), second.id(), cx)
6145 })
6146 .expect("moving second before first");
6147
6148 // check the state after moving
6149 project.update(cx, |project, cx| {
6150 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6151 assert_eq!(worktrees.len(), 3);
6152
6153 let first = worktrees[0].read(cx);
6154 let second = worktrees[1].read(cx);
6155 let third = worktrees[2].read(cx);
6156
6157 // check they are now in the right order
6158 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6159 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6160 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6161 });
6162
6163 // move the second worktree to after the third
6164 // [a, b, c] -> [a, c, b]
6165 project
6166 .update(cx, |project, cx| {
6167 let second = worktree_b.read(cx);
6168 let third = worktree_c.read(cx);
6169 project.move_worktree(second.id(), third.id(), cx)
6170 })
6171 .expect("moving second after third");
6172
6173 // check the state after moving
6174 project.update(cx, |project, cx| {
6175 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6176 assert_eq!(worktrees.len(), 3);
6177
6178 let first = worktrees[0].read(cx);
6179 let second = worktrees[1].read(cx);
6180 let third = worktrees[2].read(cx);
6181
6182 // check they are now in the right order
6183 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6184 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
6185 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
6186 });
6187
6188 // move the third worktree to before the second
6189 // [a, c, b] -> [a, b, c]
6190 project
6191 .update(cx, |project, cx| {
6192 let third = worktree_c.read(cx);
6193 let second = worktree_b.read(cx);
6194 project.move_worktree(third.id(), second.id(), cx)
6195 })
6196 .expect("moving third before second");
6197
6198 // check the state after moving
6199 project.update(cx, |project, cx| {
6200 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6201 assert_eq!(worktrees.len(), 3);
6202
6203 let first = worktrees[0].read(cx);
6204 let second = worktrees[1].read(cx);
6205 let third = worktrees[2].read(cx);
6206
6207 // check they are now in the right order
6208 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6209 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6210 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6211 });
6212
6213 // move the first worktree to after the third
6214 // [a, b, c] -> [b, c, a]
6215 project
6216 .update(cx, |project, cx| {
6217 let first = worktree_a.read(cx);
6218 let third = worktree_c.read(cx);
6219 project.move_worktree(first.id(), third.id(), cx)
6220 })
6221 .expect("moving first after third");
6222
6223 // check the state after moving
6224 project.update(cx, |project, cx| {
6225 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6226 assert_eq!(worktrees.len(), 3);
6227
6228 let first = worktrees[0].read(cx);
6229 let second = worktrees[1].read(cx);
6230 let third = worktrees[2].read(cx);
6231
6232 // check they are now in the right order
6233 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
6234 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
6235 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
6236 });
6237
6238 // move the third worktree to before the first
6239 // [b, c, a] -> [a, b, c]
6240 project
6241 .update(cx, |project, cx| {
6242 let third = worktree_a.read(cx);
6243 let first = worktree_b.read(cx);
6244 project.move_worktree(third.id(), first.id(), cx)
6245 })
6246 .expect("moving third before first");
6247
6248 // check the state after moving
6249 project.update(cx, |project, cx| {
6250 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6251 assert_eq!(worktrees.len(), 3);
6252
6253 let first = worktrees[0].read(cx);
6254 let second = worktrees[1].read(cx);
6255 let third = worktrees[2].read(cx);
6256
6257 // check they are now in the right order
6258 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6259 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6260 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6261 });
6262}
6263
6264#[gpui::test]
6265async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
6266 init_test(cx);
6267
6268 let staged_contents = r#"
6269 fn main() {
6270 println!("hello world");
6271 }
6272 "#
6273 .unindent();
6274 let file_contents = r#"
6275 // print goodbye
6276 fn main() {
6277 println!("goodbye world");
6278 }
6279 "#
6280 .unindent();
6281
6282 let fs = FakeFs::new(cx.background_executor.clone());
6283 fs.insert_tree(
6284 "/dir",
6285 json!({
6286 ".git": {},
6287 "src": {
6288 "main.rs": file_contents,
6289 }
6290 }),
6291 )
6292 .await;
6293
6294 fs.set_index_for_repo(
6295 Path::new("/dir/.git"),
6296 &[("src/main.rs".into(), staged_contents)],
6297 );
6298
6299 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6300
6301 let buffer = project
6302 .update(cx, |project, cx| {
6303 project.open_local_buffer("/dir/src/main.rs", cx)
6304 })
6305 .await
6306 .unwrap();
6307 let unstaged_diff = project
6308 .update(cx, |project, cx| {
6309 project.open_unstaged_diff(buffer.clone(), cx)
6310 })
6311 .await
6312 .unwrap();
6313
6314 cx.run_until_parked();
6315 unstaged_diff.update(cx, |unstaged_diff, cx| {
6316 let snapshot = buffer.read(cx).snapshot();
6317 assert_hunks(
6318 unstaged_diff.hunks(&snapshot, cx),
6319 &snapshot,
6320 &unstaged_diff.base_text_string().unwrap(),
6321 &[
6322 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
6323 (
6324 2..3,
6325 " println!(\"hello world\");\n",
6326 " println!(\"goodbye world\");\n",
6327 DiffHunkStatus::modified_none(),
6328 ),
6329 ],
6330 );
6331 });
6332
6333 let staged_contents = r#"
6334 // print goodbye
6335 fn main() {
6336 }
6337 "#
6338 .unindent();
6339
6340 fs.set_index_for_repo(
6341 Path::new("/dir/.git"),
6342 &[("src/main.rs".into(), staged_contents)],
6343 );
6344
6345 cx.run_until_parked();
6346 unstaged_diff.update(cx, |unstaged_diff, cx| {
6347 let snapshot = buffer.read(cx).snapshot();
6348 assert_hunks(
6349 unstaged_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6350 &snapshot,
6351 &unstaged_diff.base_text().text(),
6352 &[(
6353 2..3,
6354 "",
6355 " println!(\"goodbye world\");\n",
6356 DiffHunkStatus::added_none(),
6357 )],
6358 );
6359 });
6360}
6361
6362#[gpui::test]
6363async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
6364 init_test(cx);
6365
6366 let committed_contents = r#"
6367 fn main() {
6368 println!("hello world");
6369 }
6370 "#
6371 .unindent();
6372 let staged_contents = r#"
6373 fn main() {
6374 println!("goodbye world");
6375 }
6376 "#
6377 .unindent();
6378 let file_contents = r#"
6379 // print goodbye
6380 fn main() {
6381 println!("goodbye world");
6382 }
6383 "#
6384 .unindent();
6385
6386 let fs = FakeFs::new(cx.background_executor.clone());
6387 fs.insert_tree(
6388 "/dir",
6389 json!({
6390 ".git": {},
6391 "src": {
6392 "modification.rs": file_contents,
6393 }
6394 }),
6395 )
6396 .await;
6397
6398 fs.set_head_for_repo(
6399 Path::new("/dir/.git"),
6400 &[
6401 ("src/modification.rs".into(), committed_contents),
6402 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6403 ],
6404 );
6405 fs.set_index_for_repo(
6406 Path::new("/dir/.git"),
6407 &[
6408 ("src/modification.rs".into(), staged_contents),
6409 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6410 ],
6411 );
6412
6413 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6414 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6415 let language = rust_lang();
6416 language_registry.add(language.clone());
6417
6418 let buffer_1 = project
6419 .update(cx, |project, cx| {
6420 project.open_local_buffer("/dir/src/modification.rs", cx)
6421 })
6422 .await
6423 .unwrap();
6424 let diff_1 = project
6425 .update(cx, |project, cx| {
6426 project.open_uncommitted_diff(buffer_1.clone(), cx)
6427 })
6428 .await
6429 .unwrap();
6430 diff_1.read_with(cx, |diff, _| {
6431 assert_eq!(diff.base_text().language().cloned(), Some(language))
6432 });
6433 cx.run_until_parked();
6434 diff_1.update(cx, |diff, cx| {
6435 let snapshot = buffer_1.read(cx).snapshot();
6436 assert_hunks(
6437 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6438 &snapshot,
6439 &diff.base_text_string().unwrap(),
6440 &[
6441 (
6442 0..1,
6443 "",
6444 "// print goodbye\n",
6445 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
6446 ),
6447 (
6448 2..3,
6449 " println!(\"hello world\");\n",
6450 " println!(\"goodbye world\");\n",
6451 DiffHunkStatus::modified_none(),
6452 ),
6453 ],
6454 );
6455 });
6456
6457 // Reset HEAD to a version that differs from both the buffer and the index.
6458 let committed_contents = r#"
6459 // print goodbye
6460 fn main() {
6461 }
6462 "#
6463 .unindent();
6464 fs.set_head_for_repo(
6465 Path::new("/dir/.git"),
6466 &[
6467 ("src/modification.rs".into(), committed_contents.clone()),
6468 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6469 ],
6470 );
6471
6472 // Buffer now has an unstaged hunk.
6473 cx.run_until_parked();
6474 diff_1.update(cx, |diff, cx| {
6475 let snapshot = buffer_1.read(cx).snapshot();
6476 assert_hunks(
6477 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6478 &snapshot,
6479 &diff.base_text().text(),
6480 &[(
6481 2..3,
6482 "",
6483 " println!(\"goodbye world\");\n",
6484 DiffHunkStatus::added_none(),
6485 )],
6486 );
6487 });
6488
6489 // Open a buffer for a file that's been deleted.
6490 let buffer_2 = project
6491 .update(cx, |project, cx| {
6492 project.open_local_buffer("/dir/src/deletion.rs", cx)
6493 })
6494 .await
6495 .unwrap();
6496 let diff_2 = project
6497 .update(cx, |project, cx| {
6498 project.open_uncommitted_diff(buffer_2.clone(), cx)
6499 })
6500 .await
6501 .unwrap();
6502 cx.run_until_parked();
6503 diff_2.update(cx, |diff, cx| {
6504 let snapshot = buffer_2.read(cx).snapshot();
6505 assert_hunks(
6506 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6507 &snapshot,
6508 &diff.base_text_string().unwrap(),
6509 &[(
6510 0..0,
6511 "// the-deleted-contents\n",
6512 "",
6513 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
6514 )],
6515 );
6516 });
6517
6518 // Stage the deletion of this file
6519 fs.set_index_for_repo(
6520 Path::new("/dir/.git"),
6521 &[("src/modification.rs".into(), committed_contents.clone())],
6522 );
6523 cx.run_until_parked();
6524 diff_2.update(cx, |diff, cx| {
6525 let snapshot = buffer_2.read(cx).snapshot();
6526 assert_hunks(
6527 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6528 &snapshot,
6529 &diff.base_text_string().unwrap(),
6530 &[(
6531 0..0,
6532 "// the-deleted-contents\n",
6533 "",
6534 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
6535 )],
6536 );
6537 });
6538}
6539
6540#[gpui::test]
6541async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
6542 use DiffHunkSecondaryStatus::*;
6543 init_test(cx);
6544
6545 let committed_contents = r#"
6546 zero
6547 one
6548 two
6549 three
6550 four
6551 five
6552 "#
6553 .unindent();
6554 let file_contents = r#"
6555 one
6556 TWO
6557 three
6558 FOUR
6559 five
6560 "#
6561 .unindent();
6562
6563 let fs = FakeFs::new(cx.background_executor.clone());
6564 fs.insert_tree(
6565 "/dir",
6566 json!({
6567 ".git": {},
6568 "file.txt": file_contents.clone()
6569 }),
6570 )
6571 .await;
6572
6573 fs.set_head_and_index_for_repo(
6574 "/dir/.git".as_ref(),
6575 &[("file.txt".into(), committed_contents.clone())],
6576 );
6577
6578 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6579
6580 let buffer = project
6581 .update(cx, |project, cx| {
6582 project.open_local_buffer("/dir/file.txt", cx)
6583 })
6584 .await
6585 .unwrap();
6586 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6587 let uncommitted_diff = project
6588 .update(cx, |project, cx| {
6589 project.open_uncommitted_diff(buffer.clone(), cx)
6590 })
6591 .await
6592 .unwrap();
6593 let mut diff_events = cx.events(&uncommitted_diff);
6594
6595 // The hunks are initially unstaged.
6596 uncommitted_diff.read_with(cx, |diff, cx| {
6597 assert_hunks(
6598 diff.hunks(&snapshot, cx),
6599 &snapshot,
6600 &diff.base_text_string().unwrap(),
6601 &[
6602 (
6603 0..0,
6604 "zero\n",
6605 "",
6606 DiffHunkStatus::deleted(HasSecondaryHunk),
6607 ),
6608 (
6609 1..2,
6610 "two\n",
6611 "TWO\n",
6612 DiffHunkStatus::modified(HasSecondaryHunk),
6613 ),
6614 (
6615 3..4,
6616 "four\n",
6617 "FOUR\n",
6618 DiffHunkStatus::modified(HasSecondaryHunk),
6619 ),
6620 ],
6621 );
6622 });
6623
6624 // Stage a hunk. It appears as optimistically staged.
6625 uncommitted_diff.update(cx, |diff, cx| {
6626 let range =
6627 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
6628 let hunks = diff
6629 .hunks_intersecting_range(range, &snapshot, cx)
6630 .collect::<Vec<_>>();
6631 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
6632
6633 assert_hunks(
6634 diff.hunks(&snapshot, cx),
6635 &snapshot,
6636 &diff.base_text_string().unwrap(),
6637 &[
6638 (
6639 0..0,
6640 "zero\n",
6641 "",
6642 DiffHunkStatus::deleted(HasSecondaryHunk),
6643 ),
6644 (
6645 1..2,
6646 "two\n",
6647 "TWO\n",
6648 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6649 ),
6650 (
6651 3..4,
6652 "four\n",
6653 "FOUR\n",
6654 DiffHunkStatus::modified(HasSecondaryHunk),
6655 ),
6656 ],
6657 );
6658 });
6659
6660 // The diff emits a change event for the range of the staged hunk.
6661 assert!(matches!(
6662 diff_events.next().await.unwrap(),
6663 BufferDiffEvent::HunksStagedOrUnstaged(_)
6664 ));
6665 let event = diff_events.next().await.unwrap();
6666 if let BufferDiffEvent::DiffChanged {
6667 changed_range: Some(changed_range),
6668 } = event
6669 {
6670 let changed_range = changed_range.to_point(&snapshot);
6671 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
6672 } else {
6673 panic!("Unexpected event {event:?}");
6674 }
6675
6676 // When the write to the index completes, it appears as staged.
6677 cx.run_until_parked();
6678 uncommitted_diff.update(cx, |diff, cx| {
6679 assert_hunks(
6680 diff.hunks(&snapshot, cx),
6681 &snapshot,
6682 &diff.base_text_string().unwrap(),
6683 &[
6684 (
6685 0..0,
6686 "zero\n",
6687 "",
6688 DiffHunkStatus::deleted(HasSecondaryHunk),
6689 ),
6690 (
6691 1..2,
6692 "two\n",
6693 "TWO\n",
6694 DiffHunkStatus::modified(NoSecondaryHunk),
6695 ),
6696 (
6697 3..4,
6698 "four\n",
6699 "FOUR\n",
6700 DiffHunkStatus::modified(HasSecondaryHunk),
6701 ),
6702 ],
6703 );
6704 });
6705
6706 // The diff emits a change event for the changed index text.
6707 let event = diff_events.next().await.unwrap();
6708 if let BufferDiffEvent::DiffChanged {
6709 changed_range: Some(changed_range),
6710 } = event
6711 {
6712 let changed_range = changed_range.to_point(&snapshot);
6713 assert_eq!(changed_range, Point::new(0, 0)..Point::new(4, 0));
6714 } else {
6715 panic!("Unexpected event {event:?}");
6716 }
6717
6718 // Simulate a problem writing to the git index.
6719 fs.set_error_message_for_index_write(
6720 "/dir/.git".as_ref(),
6721 Some("failed to write git index".into()),
6722 );
6723
6724 // Stage another hunk.
6725 uncommitted_diff.update(cx, |diff, cx| {
6726 let range =
6727 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
6728 let hunks = diff
6729 .hunks_intersecting_range(range, &snapshot, cx)
6730 .collect::<Vec<_>>();
6731 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
6732
6733 assert_hunks(
6734 diff.hunks(&snapshot, cx),
6735 &snapshot,
6736 &diff.base_text_string().unwrap(),
6737 &[
6738 (
6739 0..0,
6740 "zero\n",
6741 "",
6742 DiffHunkStatus::deleted(HasSecondaryHunk),
6743 ),
6744 (
6745 1..2,
6746 "two\n",
6747 "TWO\n",
6748 DiffHunkStatus::modified(NoSecondaryHunk),
6749 ),
6750 (
6751 3..4,
6752 "four\n",
6753 "FOUR\n",
6754 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6755 ),
6756 ],
6757 );
6758 });
6759 assert!(matches!(
6760 diff_events.next().await.unwrap(),
6761 BufferDiffEvent::HunksStagedOrUnstaged(_)
6762 ));
6763 let event = diff_events.next().await.unwrap();
6764 if let BufferDiffEvent::DiffChanged {
6765 changed_range: Some(changed_range),
6766 } = event
6767 {
6768 let changed_range = changed_range.to_point(&snapshot);
6769 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
6770 } else {
6771 panic!("Unexpected event {event:?}");
6772 }
6773
6774 // When the write fails, the hunk returns to being unstaged.
6775 cx.run_until_parked();
6776 uncommitted_diff.update(cx, |diff, cx| {
6777 assert_hunks(
6778 diff.hunks(&snapshot, cx),
6779 &snapshot,
6780 &diff.base_text_string().unwrap(),
6781 &[
6782 (
6783 0..0,
6784 "zero\n",
6785 "",
6786 DiffHunkStatus::deleted(HasSecondaryHunk),
6787 ),
6788 (
6789 1..2,
6790 "two\n",
6791 "TWO\n",
6792 DiffHunkStatus::modified(NoSecondaryHunk),
6793 ),
6794 (
6795 3..4,
6796 "four\n",
6797 "FOUR\n",
6798 DiffHunkStatus::modified(HasSecondaryHunk),
6799 ),
6800 ],
6801 );
6802 });
6803
6804 let event = diff_events.next().await.unwrap();
6805 if let BufferDiffEvent::DiffChanged {
6806 changed_range: Some(changed_range),
6807 } = event
6808 {
6809 let changed_range = changed_range.to_point(&snapshot);
6810 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
6811 } else {
6812 panic!("Unexpected event {event:?}");
6813 }
6814
6815 // Allow writing to the git index to succeed again.
6816 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
6817
6818 // Stage two hunks with separate operations.
6819 uncommitted_diff.update(cx, |diff, cx| {
6820 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
6821 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
6822 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
6823 });
6824
6825 // Both staged hunks appear as pending.
6826 uncommitted_diff.update(cx, |diff, cx| {
6827 assert_hunks(
6828 diff.hunks(&snapshot, cx),
6829 &snapshot,
6830 &diff.base_text_string().unwrap(),
6831 &[
6832 (
6833 0..0,
6834 "zero\n",
6835 "",
6836 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
6837 ),
6838 (
6839 1..2,
6840 "two\n",
6841 "TWO\n",
6842 DiffHunkStatus::modified(NoSecondaryHunk),
6843 ),
6844 (
6845 3..4,
6846 "four\n",
6847 "FOUR\n",
6848 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6849 ),
6850 ],
6851 );
6852 });
6853
6854 // Both staging operations take effect.
6855 cx.run_until_parked();
6856 uncommitted_diff.update(cx, |diff, cx| {
6857 assert_hunks(
6858 diff.hunks(&snapshot, cx),
6859 &snapshot,
6860 &diff.base_text_string().unwrap(),
6861 &[
6862 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
6863 (
6864 1..2,
6865 "two\n",
6866 "TWO\n",
6867 DiffHunkStatus::modified(NoSecondaryHunk),
6868 ),
6869 (
6870 3..4,
6871 "four\n",
6872 "FOUR\n",
6873 DiffHunkStatus::modified(NoSecondaryHunk),
6874 ),
6875 ],
6876 );
6877 });
6878}
6879
6880#[gpui::test(seeds(340, 472))]
6881async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
6882 use DiffHunkSecondaryStatus::*;
6883 init_test(cx);
6884
6885 let committed_contents = r#"
6886 zero
6887 one
6888 two
6889 three
6890 four
6891 five
6892 "#
6893 .unindent();
6894 let file_contents = r#"
6895 one
6896 TWO
6897 three
6898 FOUR
6899 five
6900 "#
6901 .unindent();
6902
6903 let fs = FakeFs::new(cx.background_executor.clone());
6904 fs.insert_tree(
6905 "/dir",
6906 json!({
6907 ".git": {},
6908 "file.txt": file_contents.clone()
6909 }),
6910 )
6911 .await;
6912
6913 fs.set_head_for_repo(
6914 "/dir/.git".as_ref(),
6915 &[("file.txt".into(), committed_contents.clone())],
6916 );
6917 fs.set_index_for_repo(
6918 "/dir/.git".as_ref(),
6919 &[("file.txt".into(), committed_contents.clone())],
6920 );
6921
6922 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6923
6924 let buffer = project
6925 .update(cx, |project, cx| {
6926 project.open_local_buffer("/dir/file.txt", cx)
6927 })
6928 .await
6929 .unwrap();
6930 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6931 let uncommitted_diff = project
6932 .update(cx, |project, cx| {
6933 project.open_uncommitted_diff(buffer.clone(), cx)
6934 })
6935 .await
6936 .unwrap();
6937
6938 // The hunks are initially unstaged.
6939 uncommitted_diff.read_with(cx, |diff, cx| {
6940 assert_hunks(
6941 diff.hunks(&snapshot, cx),
6942 &snapshot,
6943 &diff.base_text_string().unwrap(),
6944 &[
6945 (
6946 0..0,
6947 "zero\n",
6948 "",
6949 DiffHunkStatus::deleted(HasSecondaryHunk),
6950 ),
6951 (
6952 1..2,
6953 "two\n",
6954 "TWO\n",
6955 DiffHunkStatus::modified(HasSecondaryHunk),
6956 ),
6957 (
6958 3..4,
6959 "four\n",
6960 "FOUR\n",
6961 DiffHunkStatus::modified(HasSecondaryHunk),
6962 ),
6963 ],
6964 );
6965 });
6966
6967 // Pause IO events
6968 fs.pause_events();
6969
6970 // Stage the first hunk.
6971 uncommitted_diff.update(cx, |diff, cx| {
6972 let hunk = diff.hunks(&snapshot, cx).next().unwrap();
6973 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
6974 assert_hunks(
6975 diff.hunks(&snapshot, cx),
6976 &snapshot,
6977 &diff.base_text_string().unwrap(),
6978 &[
6979 (
6980 0..0,
6981 "zero\n",
6982 "",
6983 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
6984 ),
6985 (
6986 1..2,
6987 "two\n",
6988 "TWO\n",
6989 DiffHunkStatus::modified(HasSecondaryHunk),
6990 ),
6991 (
6992 3..4,
6993 "four\n",
6994 "FOUR\n",
6995 DiffHunkStatus::modified(HasSecondaryHunk),
6996 ),
6997 ],
6998 );
6999 });
7000
7001 // Stage the second hunk *before* receiving the FS event for the first hunk.
7002 cx.run_until_parked();
7003 uncommitted_diff.update(cx, |diff, cx| {
7004 let hunk = diff.hunks(&snapshot, cx).nth(1).unwrap();
7005 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7006 assert_hunks(
7007 diff.hunks(&snapshot, cx),
7008 &snapshot,
7009 &diff.base_text_string().unwrap(),
7010 &[
7011 (
7012 0..0,
7013 "zero\n",
7014 "",
7015 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7016 ),
7017 (
7018 1..2,
7019 "two\n",
7020 "TWO\n",
7021 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7022 ),
7023 (
7024 3..4,
7025 "four\n",
7026 "FOUR\n",
7027 DiffHunkStatus::modified(HasSecondaryHunk),
7028 ),
7029 ],
7030 );
7031 });
7032
7033 // Process the FS event for staging the first hunk (second event is still pending).
7034 fs.flush_events(1);
7035 cx.run_until_parked();
7036
7037 // Stage the third hunk before receiving the second FS event.
7038 uncommitted_diff.update(cx, |diff, cx| {
7039 let hunk = diff.hunks(&snapshot, cx).nth(2).unwrap();
7040 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7041 });
7042
7043 // Wait for all remaining IO.
7044 cx.run_until_parked();
7045 fs.flush_events(fs.buffered_event_count());
7046
7047 // Now all hunks are staged.
7048 cx.run_until_parked();
7049 uncommitted_diff.update(cx, |diff, cx| {
7050 assert_hunks(
7051 diff.hunks(&snapshot, cx),
7052 &snapshot,
7053 &diff.base_text_string().unwrap(),
7054 &[
7055 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7056 (
7057 1..2,
7058 "two\n",
7059 "TWO\n",
7060 DiffHunkStatus::modified(NoSecondaryHunk),
7061 ),
7062 (
7063 3..4,
7064 "four\n",
7065 "FOUR\n",
7066 DiffHunkStatus::modified(NoSecondaryHunk),
7067 ),
7068 ],
7069 );
7070 });
7071}
7072
7073#[gpui::test(iterations = 25)]
7074async fn test_staging_random_hunks(
7075 mut rng: StdRng,
7076 executor: BackgroundExecutor,
7077 cx: &mut gpui::TestAppContext,
7078) {
7079 let operations = env::var("OPERATIONS")
7080 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
7081 .unwrap_or(20);
7082
7083 // Try to induce races between diff recalculation and index writes.
7084 if rng.gen_bool(0.5) {
7085 executor.deprioritize(*CALCULATE_DIFF_TASK);
7086 }
7087
7088 use DiffHunkSecondaryStatus::*;
7089 init_test(cx);
7090
7091 let committed_text = (0..30).map(|i| format!("line {i}\n")).collect::<String>();
7092 let index_text = committed_text.clone();
7093 let buffer_text = (0..30)
7094 .map(|i| match i % 5 {
7095 0 => format!("line {i} (modified)\n"),
7096 _ => format!("line {i}\n"),
7097 })
7098 .collect::<String>();
7099
7100 let fs = FakeFs::new(cx.background_executor.clone());
7101 fs.insert_tree(
7102 path!("/dir"),
7103 json!({
7104 ".git": {},
7105 "file.txt": buffer_text.clone()
7106 }),
7107 )
7108 .await;
7109 fs.set_head_for_repo(
7110 path!("/dir/.git").as_ref(),
7111 &[("file.txt".into(), committed_text.clone())],
7112 );
7113 fs.set_index_for_repo(
7114 path!("/dir/.git").as_ref(),
7115 &[("file.txt".into(), index_text.clone())],
7116 );
7117 let repo = fs.open_repo(path!("/dir/.git").as_ref()).unwrap();
7118
7119 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7120 let buffer = project
7121 .update(cx, |project, cx| {
7122 project.open_local_buffer(path!("/dir/file.txt"), cx)
7123 })
7124 .await
7125 .unwrap();
7126 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7127 let uncommitted_diff = project
7128 .update(cx, |project, cx| {
7129 project.open_uncommitted_diff(buffer.clone(), cx)
7130 })
7131 .await
7132 .unwrap();
7133
7134 let mut hunks =
7135 uncommitted_diff.update(cx, |diff, cx| diff.hunks(&snapshot, cx).collect::<Vec<_>>());
7136 assert_eq!(hunks.len(), 6);
7137
7138 for _i in 0..operations {
7139 let hunk_ix = rng.gen_range(0..hunks.len());
7140 let hunk = &mut hunks[hunk_ix];
7141 let row = hunk.range.start.row;
7142
7143 if hunk.status().has_secondary_hunk() {
7144 log::info!("staging hunk at {row}");
7145 uncommitted_diff.update(cx, |diff, cx| {
7146 diff.stage_or_unstage_hunks(true, &[hunk.clone()], &snapshot, true, cx);
7147 });
7148 hunk.secondary_status = SecondaryHunkRemovalPending;
7149 } else {
7150 log::info!("unstaging hunk at {row}");
7151 uncommitted_diff.update(cx, |diff, cx| {
7152 diff.stage_or_unstage_hunks(false, &[hunk.clone()], &snapshot, true, cx);
7153 });
7154 hunk.secondary_status = SecondaryHunkAdditionPending;
7155 }
7156
7157 for _ in 0..rng.gen_range(0..10) {
7158 log::info!("yielding");
7159 cx.executor().simulate_random_delay().await;
7160 }
7161 }
7162
7163 cx.executor().run_until_parked();
7164
7165 for hunk in &mut hunks {
7166 if hunk.secondary_status == SecondaryHunkRemovalPending {
7167 hunk.secondary_status = NoSecondaryHunk;
7168 } else if hunk.secondary_status == SecondaryHunkAdditionPending {
7169 hunk.secondary_status = HasSecondaryHunk;
7170 }
7171 }
7172
7173 log::info!(
7174 "index text:\n{}",
7175 repo.load_index_text("file.txt".into()).await.unwrap()
7176 );
7177
7178 uncommitted_diff.update(cx, |diff, cx| {
7179 let expected_hunks = hunks
7180 .iter()
7181 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
7182 .collect::<Vec<_>>();
7183 let actual_hunks = diff
7184 .hunks(&snapshot, cx)
7185 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
7186 .collect::<Vec<_>>();
7187 assert_eq!(actual_hunks, expected_hunks);
7188 });
7189}
7190
7191#[gpui::test]
7192async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
7193 init_test(cx);
7194
7195 let committed_contents = r#"
7196 fn main() {
7197 println!("hello from HEAD");
7198 }
7199 "#
7200 .unindent();
7201 let file_contents = r#"
7202 fn main() {
7203 println!("hello from the working copy");
7204 }
7205 "#
7206 .unindent();
7207
7208 let fs = FakeFs::new(cx.background_executor.clone());
7209 fs.insert_tree(
7210 "/dir",
7211 json!({
7212 ".git": {},
7213 "src": {
7214 "main.rs": file_contents,
7215 }
7216 }),
7217 )
7218 .await;
7219
7220 fs.set_head_for_repo(
7221 Path::new("/dir/.git"),
7222 &[("src/main.rs".into(), committed_contents.clone())],
7223 );
7224 fs.set_index_for_repo(
7225 Path::new("/dir/.git"),
7226 &[("src/main.rs".into(), committed_contents.clone())],
7227 );
7228
7229 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
7230
7231 let buffer = project
7232 .update(cx, |project, cx| {
7233 project.open_local_buffer("/dir/src/main.rs", cx)
7234 })
7235 .await
7236 .unwrap();
7237 let uncommitted_diff = project
7238 .update(cx, |project, cx| {
7239 project.open_uncommitted_diff(buffer.clone(), cx)
7240 })
7241 .await
7242 .unwrap();
7243
7244 cx.run_until_parked();
7245 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
7246 let snapshot = buffer.read(cx).snapshot();
7247 assert_hunks(
7248 uncommitted_diff.hunks(&snapshot, cx),
7249 &snapshot,
7250 &uncommitted_diff.base_text_string().unwrap(),
7251 &[(
7252 1..2,
7253 " println!(\"hello from HEAD\");\n",
7254 " println!(\"hello from the working copy\");\n",
7255 DiffHunkStatus {
7256 kind: DiffHunkStatusKind::Modified,
7257 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
7258 },
7259 )],
7260 );
7261 });
7262}
7263
7264#[gpui::test]
7265async fn test_repository_and_path_for_project_path(
7266 background_executor: BackgroundExecutor,
7267 cx: &mut gpui::TestAppContext,
7268) {
7269 init_test(cx);
7270 let fs = FakeFs::new(background_executor);
7271 fs.insert_tree(
7272 path!("/root"),
7273 json!({
7274 "c.txt": "",
7275 "dir1": {
7276 ".git": {},
7277 "deps": {
7278 "dep1": {
7279 ".git": {},
7280 "src": {
7281 "a.txt": ""
7282 }
7283 }
7284 },
7285 "src": {
7286 "b.txt": ""
7287 }
7288 },
7289 }),
7290 )
7291 .await;
7292
7293 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
7294 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7295 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7296 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
7297 .await;
7298 cx.run_until_parked();
7299
7300 project.read_with(cx, |project, cx| {
7301 let git_store = project.git_store().read(cx);
7302 let pairs = [
7303 ("c.txt", None),
7304 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
7305 (
7306 "dir1/deps/dep1/src/a.txt",
7307 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
7308 ),
7309 ];
7310 let expected = pairs
7311 .iter()
7312 .map(|(path, result)| {
7313 (
7314 path,
7315 result.map(|(repo, repo_path)| {
7316 (Path::new(repo).into(), RepoPath::from(repo_path))
7317 }),
7318 )
7319 })
7320 .collect::<Vec<_>>();
7321 let actual = pairs
7322 .iter()
7323 .map(|(path, _)| {
7324 let project_path = (tree_id, Path::new(path)).into();
7325 let result = maybe!({
7326 let (repo, repo_path) =
7327 git_store.repository_and_path_for_project_path(&project_path, cx)?;
7328 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
7329 });
7330 (path, result)
7331 })
7332 .collect::<Vec<_>>();
7333 pretty_assertions::assert_eq!(expected, actual);
7334 });
7335
7336 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
7337 .await
7338 .unwrap();
7339 cx.run_until_parked();
7340
7341 project.read_with(cx, |project, cx| {
7342 let git_store = project.git_store().read(cx);
7343 assert_eq!(
7344 git_store.repository_and_path_for_project_path(
7345 &(tree_id, Path::new("dir1/src/b.txt")).into(),
7346 cx
7347 ),
7348 None
7349 );
7350 });
7351}
7352
7353#[gpui::test]
7354async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
7355 init_test(cx);
7356 let fs = FakeFs::new(cx.background_executor.clone());
7357 fs.insert_tree(
7358 path!("/root"),
7359 json!({
7360 "home": {
7361 ".git": {},
7362 "project": {
7363 "a.txt": "A"
7364 },
7365 },
7366 }),
7367 )
7368 .await;
7369 fs.set_home_dir(Path::new(path!("/root/home")).to_owned());
7370
7371 let project = Project::test(fs.clone(), [path!("/root/home/project").as_ref()], cx).await;
7372 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7373 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7374 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
7375 .await;
7376 tree.flush_fs_events(cx).await;
7377
7378 project.read_with(cx, |project, cx| {
7379 let containing = project
7380 .git_store()
7381 .read(cx)
7382 .repository_and_path_for_project_path(&(tree_id, "a.txt").into(), cx);
7383 assert!(containing.is_none());
7384 });
7385
7386 let project = Project::test(fs.clone(), [path!("/root/home").as_ref()], cx).await;
7387 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7388 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7389 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
7390 .await;
7391 tree.flush_fs_events(cx).await;
7392
7393 project.read_with(cx, |project, cx| {
7394 let containing = project
7395 .git_store()
7396 .read(cx)
7397 .repository_and_path_for_project_path(&(tree_id, "project/a.txt").into(), cx);
7398 assert_eq!(
7399 containing
7400 .unwrap()
7401 .0
7402 .read(cx)
7403 .work_directory_abs_path
7404 .as_ref(),
7405 Path::new(path!("/root/home"))
7406 );
7407 });
7408}
7409
7410#[gpui::test]
7411async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
7412 init_test(cx);
7413 cx.executor().allow_parking();
7414
7415 let root = TempTree::new(json!({
7416 "project": {
7417 "a.txt": "a", // Modified
7418 "b.txt": "bb", // Added
7419 "c.txt": "ccc", // Unchanged
7420 "d.txt": "dddd", // Deleted
7421 },
7422 }));
7423
7424 // Set up git repository before creating the project.
7425 let work_dir = root.path().join("project");
7426 let repo = git_init(work_dir.as_path());
7427 git_add("a.txt", &repo);
7428 git_add("c.txt", &repo);
7429 git_add("d.txt", &repo);
7430 git_commit("Initial commit", &repo);
7431 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
7432 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
7433
7434 let project = Project::test(
7435 Arc::new(RealFs::new(None, cx.executor())),
7436 [root.path()],
7437 cx,
7438 )
7439 .await;
7440
7441 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7442 tree.flush_fs_events(cx).await;
7443 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7444 .await;
7445 cx.executor().run_until_parked();
7446
7447 let repository = project.read_with(cx, |project, cx| {
7448 project.repositories(cx).values().next().unwrap().clone()
7449 });
7450
7451 // Check that the right git state is observed on startup
7452 repository.read_with(cx, |repository, _| {
7453 let entries = repository.cached_status().collect::<Vec<_>>();
7454 assert_eq!(
7455 entries,
7456 [
7457 StatusEntry {
7458 repo_path: "a.txt".into(),
7459 status: StatusCode::Modified.worktree(),
7460 },
7461 StatusEntry {
7462 repo_path: "b.txt".into(),
7463 status: FileStatus::Untracked,
7464 },
7465 StatusEntry {
7466 repo_path: "d.txt".into(),
7467 status: StatusCode::Deleted.worktree(),
7468 },
7469 ]
7470 );
7471 });
7472
7473 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
7474
7475 tree.flush_fs_events(cx).await;
7476 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7477 .await;
7478 cx.executor().run_until_parked();
7479
7480 repository.read_with(cx, |repository, _| {
7481 let entries = repository.cached_status().collect::<Vec<_>>();
7482 assert_eq!(
7483 entries,
7484 [
7485 StatusEntry {
7486 repo_path: "a.txt".into(),
7487 status: StatusCode::Modified.worktree(),
7488 },
7489 StatusEntry {
7490 repo_path: "b.txt".into(),
7491 status: FileStatus::Untracked,
7492 },
7493 StatusEntry {
7494 repo_path: "c.txt".into(),
7495 status: StatusCode::Modified.worktree(),
7496 },
7497 StatusEntry {
7498 repo_path: "d.txt".into(),
7499 status: StatusCode::Deleted.worktree(),
7500 },
7501 ]
7502 );
7503 });
7504
7505 git_add("a.txt", &repo);
7506 git_add("c.txt", &repo);
7507 git_remove_index(Path::new("d.txt"), &repo);
7508 git_commit("Another commit", &repo);
7509 tree.flush_fs_events(cx).await;
7510 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7511 .await;
7512 cx.executor().run_until_parked();
7513
7514 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
7515 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
7516 tree.flush_fs_events(cx).await;
7517 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7518 .await;
7519 cx.executor().run_until_parked();
7520
7521 repository.read_with(cx, |repository, _cx| {
7522 let entries = repository.cached_status().collect::<Vec<_>>();
7523
7524 // Deleting an untracked entry, b.txt, should leave no status
7525 // a.txt was tracked, and so should have a status
7526 assert_eq!(
7527 entries,
7528 [StatusEntry {
7529 repo_path: "a.txt".into(),
7530 status: StatusCode::Deleted.worktree(),
7531 }]
7532 );
7533 });
7534}
7535
7536#[gpui::test]
7537async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
7538 init_test(cx);
7539 cx.executor().allow_parking();
7540
7541 let root = TempTree::new(json!({
7542 "project": {
7543 "sub": {},
7544 "a.txt": "",
7545 },
7546 }));
7547
7548 let work_dir = root.path().join("project");
7549 let repo = git_init(work_dir.as_path());
7550 // a.txt exists in HEAD and the working copy but is deleted in the index.
7551 git_add("a.txt", &repo);
7552 git_commit("Initial commit", &repo);
7553 git_remove_index("a.txt".as_ref(), &repo);
7554 // `sub` is a nested git repository.
7555 let _sub = git_init(&work_dir.join("sub"));
7556
7557 let project = Project::test(
7558 Arc::new(RealFs::new(None, cx.executor())),
7559 [root.path()],
7560 cx,
7561 )
7562 .await;
7563
7564 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7565 tree.flush_fs_events(cx).await;
7566 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7567 .await;
7568 cx.executor().run_until_parked();
7569
7570 let repository = project.read_with(cx, |project, cx| {
7571 project
7572 .repositories(cx)
7573 .values()
7574 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
7575 .unwrap()
7576 .clone()
7577 });
7578
7579 repository.read_with(cx, |repository, _cx| {
7580 let entries = repository.cached_status().collect::<Vec<_>>();
7581
7582 // `sub` doesn't appear in our computed statuses.
7583 // a.txt appears with a combined `DA` status.
7584 assert_eq!(
7585 entries,
7586 [StatusEntry {
7587 repo_path: "a.txt".into(),
7588 status: TrackedStatus {
7589 index_status: StatusCode::Deleted,
7590 worktree_status: StatusCode::Added
7591 }
7592 .into(),
7593 }]
7594 )
7595 });
7596}
7597
7598#[gpui::test]
7599async fn test_repository_subfolder_git_status(
7600 executor: gpui::BackgroundExecutor,
7601 cx: &mut gpui::TestAppContext,
7602) {
7603 init_test(cx);
7604
7605 let fs = FakeFs::new(executor);
7606 fs.insert_tree(
7607 path!("/root"),
7608 json!({
7609 "my-repo": {
7610 ".git": {},
7611 "a.txt": "a",
7612 "sub-folder-1": {
7613 "sub-folder-2": {
7614 "c.txt": "cc",
7615 "d": {
7616 "e.txt": "eee"
7617 }
7618 },
7619 }
7620 },
7621 }),
7622 )
7623 .await;
7624
7625 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
7626 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
7627
7628 fs.set_status_for_repo(
7629 path!("/root/my-repo/.git").as_ref(),
7630 &[(E_TXT.as_ref(), FileStatus::Untracked)],
7631 );
7632
7633 let project = Project::test(
7634 fs.clone(),
7635 [path!("/root/my-repo/sub-folder-1/sub-folder-2").as_ref()],
7636 cx,
7637 )
7638 .await;
7639
7640 project
7641 .update(cx, |project, cx| project.git_scans_complete(cx))
7642 .await;
7643 cx.run_until_parked();
7644
7645 let repository = project.read_with(cx, |project, cx| {
7646 project.repositories(cx).values().next().unwrap().clone()
7647 });
7648
7649 // Ensure that the git status is loaded correctly
7650 repository.read_with(cx, |repository, _cx| {
7651 assert_eq!(
7652 repository.work_directory_abs_path,
7653 Path::new(path!("/root/my-repo")).into()
7654 );
7655
7656 assert_eq!(repository.status_for_path(&C_TXT.into()), None);
7657 assert_eq!(
7658 repository.status_for_path(&E_TXT.into()).unwrap().status,
7659 FileStatus::Untracked
7660 );
7661 });
7662
7663 fs.set_status_for_repo(path!("/root/my-repo/.git").as_ref(), &[]);
7664 project
7665 .update(cx, |project, cx| project.git_scans_complete(cx))
7666 .await;
7667 cx.run_until_parked();
7668
7669 repository.read_with(cx, |repository, _cx| {
7670 assert_eq!(repository.status_for_path(&C_TXT.into()), None);
7671 assert_eq!(repository.status_for_path(&E_TXT.into()), None);
7672 });
7673}
7674
7675// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
7676#[cfg(any())]
7677#[gpui::test]
7678async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
7679 init_test(cx);
7680 cx.executor().allow_parking();
7681
7682 let root = TempTree::new(json!({
7683 "project": {
7684 "a.txt": "a",
7685 },
7686 }));
7687 let root_path = root.path();
7688
7689 let repo = git_init(&root_path.join("project"));
7690 git_add("a.txt", &repo);
7691 git_commit("init", &repo);
7692
7693 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
7694
7695 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7696 tree.flush_fs_events(cx).await;
7697 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7698 .await;
7699 cx.executor().run_until_parked();
7700
7701 let repository = project.read_with(cx, |project, cx| {
7702 project.repositories(cx).values().next().unwrap().clone()
7703 });
7704
7705 git_branch("other-branch", &repo);
7706 git_checkout("refs/heads/other-branch", &repo);
7707 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
7708 git_add("a.txt", &repo);
7709 git_commit("capitalize", &repo);
7710 let commit = repo
7711 .head()
7712 .expect("Failed to get HEAD")
7713 .peel_to_commit()
7714 .expect("HEAD is not a commit");
7715 git_checkout("refs/heads/main", &repo);
7716 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
7717 git_add("a.txt", &repo);
7718 git_commit("improve letter", &repo);
7719 git_cherry_pick(&commit, &repo);
7720 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
7721 .expect("No CHERRY_PICK_HEAD");
7722 pretty_assertions::assert_eq!(
7723 git_status(&repo),
7724 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
7725 );
7726 tree.flush_fs_events(cx).await;
7727 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7728 .await;
7729 cx.executor().run_until_parked();
7730 let conflicts = repository.update(cx, |repository, _| {
7731 repository
7732 .merge_conflicts
7733 .iter()
7734 .cloned()
7735 .collect::<Vec<_>>()
7736 });
7737 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
7738
7739 git_add("a.txt", &repo);
7740 // Attempt to manually simulate what `git cherry-pick --continue` would do.
7741 git_commit("whatevs", &repo);
7742 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
7743 .expect("Failed to remove CHERRY_PICK_HEAD");
7744 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
7745 tree.flush_fs_events(cx).await;
7746 let conflicts = repository.update(cx, |repository, _| {
7747 repository
7748 .merge_conflicts
7749 .iter()
7750 .cloned()
7751 .collect::<Vec<_>>()
7752 });
7753 pretty_assertions::assert_eq!(conflicts, []);
7754}
7755
7756#[gpui::test]
7757async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
7758 init_test(cx);
7759 let fs = FakeFs::new(cx.background_executor.clone());
7760 fs.insert_tree(
7761 path!("/root"),
7762 json!({
7763 ".git": {},
7764 ".gitignore": "*.txt\n",
7765 "a.xml": "<a></a>",
7766 "b.txt": "Some text"
7767 }),
7768 )
7769 .await;
7770
7771 fs.set_head_and_index_for_repo(
7772 path!("/root/.git").as_ref(),
7773 &[
7774 (".gitignore".into(), "*.txt\n".into()),
7775 ("a.xml".into(), "<a></a>".into()),
7776 ],
7777 );
7778
7779 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
7780
7781 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7782 tree.flush_fs_events(cx).await;
7783 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7784 .await;
7785 cx.executor().run_until_parked();
7786
7787 let repository = project.read_with(cx, |project, cx| {
7788 project.repositories(cx).values().next().unwrap().clone()
7789 });
7790
7791 // One file is unmodified, the other is ignored.
7792 cx.read(|cx| {
7793 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
7794 assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
7795 });
7796
7797 // Change the gitignore, and stage the newly non-ignored file.
7798 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
7799 .await
7800 .unwrap();
7801 fs.set_index_for_repo(
7802 Path::new(path!("/root/.git")),
7803 &[
7804 (".gitignore".into(), "*.txt\n".into()),
7805 ("a.xml".into(), "<a></a>".into()),
7806 ("b.txt".into(), "Some text".into()),
7807 ],
7808 );
7809
7810 cx.executor().run_until_parked();
7811 cx.read(|cx| {
7812 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
7813 assert_entry_git_state(
7814 tree.read(cx),
7815 repository.read(cx),
7816 "b.txt",
7817 Some(StatusCode::Added),
7818 false,
7819 );
7820 });
7821}
7822
7823// NOTE:
7824// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
7825// a directory which some program has already open.
7826// This is a limitation of the Windows.
7827// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
7828#[gpui::test]
7829#[cfg_attr(target_os = "windows", ignore)]
7830async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
7831 init_test(cx);
7832 cx.executor().allow_parking();
7833 let root = TempTree::new(json!({
7834 "projects": {
7835 "project1": {
7836 "a": "",
7837 "b": "",
7838 }
7839 },
7840
7841 }));
7842 let root_path = root.path();
7843
7844 let repo = git_init(&root_path.join("projects/project1"));
7845 git_add("a", &repo);
7846 git_commit("init", &repo);
7847 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
7848
7849 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
7850
7851 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7852 tree.flush_fs_events(cx).await;
7853 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7854 .await;
7855 cx.executor().run_until_parked();
7856
7857 let repository = project.read_with(cx, |project, cx| {
7858 project.repositories(cx).values().next().unwrap().clone()
7859 });
7860
7861 repository.read_with(cx, |repository, _| {
7862 assert_eq!(
7863 repository.work_directory_abs_path.as_ref(),
7864 root_path.join("projects/project1").as_path()
7865 );
7866 assert_eq!(
7867 repository
7868 .status_for_path(&"a".into())
7869 .map(|entry| entry.status),
7870 Some(StatusCode::Modified.worktree()),
7871 );
7872 assert_eq!(
7873 repository
7874 .status_for_path(&"b".into())
7875 .map(|entry| entry.status),
7876 Some(FileStatus::Untracked),
7877 );
7878 });
7879
7880 std::fs::rename(
7881 root_path.join("projects/project1"),
7882 root_path.join("projects/project2"),
7883 )
7884 .unwrap();
7885 tree.flush_fs_events(cx).await;
7886
7887 repository.read_with(cx, |repository, _| {
7888 assert_eq!(
7889 repository.work_directory_abs_path.as_ref(),
7890 root_path.join("projects/project2").as_path()
7891 );
7892 assert_eq!(
7893 repository.status_for_path(&"a".into()).unwrap().status,
7894 StatusCode::Modified.worktree(),
7895 );
7896 assert_eq!(
7897 repository.status_for_path(&"b".into()).unwrap().status,
7898 FileStatus::Untracked,
7899 );
7900 });
7901}
7902
7903// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
7904// you can't rename a directory which some program has already open. This is a
7905// limitation of the Windows. See:
7906// https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
7907#[gpui::test]
7908#[cfg_attr(target_os = "windows", ignore)]
7909async fn test_file_status(cx: &mut gpui::TestAppContext) {
7910 init_test(cx);
7911 cx.executor().allow_parking();
7912 const IGNORE_RULE: &str = "**/target";
7913
7914 let root = TempTree::new(json!({
7915 "project": {
7916 "a.txt": "a",
7917 "b.txt": "bb",
7918 "c": {
7919 "d": {
7920 "e.txt": "eee"
7921 }
7922 },
7923 "f.txt": "ffff",
7924 "target": {
7925 "build_file": "???"
7926 },
7927 ".gitignore": IGNORE_RULE
7928 },
7929
7930 }));
7931 let root_path = root.path();
7932
7933 const A_TXT: &str = "a.txt";
7934 const B_TXT: &str = "b.txt";
7935 const E_TXT: &str = "c/d/e.txt";
7936 const F_TXT: &str = "f.txt";
7937 const DOTGITIGNORE: &str = ".gitignore";
7938 const BUILD_FILE: &str = "target/build_file";
7939
7940 // Set up git repository before creating the worktree.
7941 let work_dir = root.path().join("project");
7942 let mut repo = git_init(work_dir.as_path());
7943 repo.add_ignore_rule(IGNORE_RULE).unwrap();
7944 git_add(A_TXT, &repo);
7945 git_add(E_TXT, &repo);
7946 git_add(DOTGITIGNORE, &repo);
7947 git_commit("Initial commit", &repo);
7948
7949 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
7950
7951 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7952 tree.flush_fs_events(cx).await;
7953 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7954 .await;
7955 cx.executor().run_until_parked();
7956
7957 let repository = project.read_with(cx, |project, cx| {
7958 project.repositories(cx).values().next().unwrap().clone()
7959 });
7960
7961 // Check that the right git state is observed on startup
7962 repository.read_with(cx, |repository, _cx| {
7963 assert_eq!(
7964 repository.work_directory_abs_path.as_ref(),
7965 root_path.join("project").as_path()
7966 );
7967
7968 assert_eq!(
7969 repository.status_for_path(&B_TXT.into()).unwrap().status,
7970 FileStatus::Untracked,
7971 );
7972 assert_eq!(
7973 repository.status_for_path(&F_TXT.into()).unwrap().status,
7974 FileStatus::Untracked,
7975 );
7976 });
7977
7978 // Modify a file in the working copy.
7979 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
7980 tree.flush_fs_events(cx).await;
7981 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7982 .await;
7983 cx.executor().run_until_parked();
7984
7985 // The worktree detects that the file's git status has changed.
7986 repository.read_with(cx, |repository, _| {
7987 assert_eq!(
7988 repository.status_for_path(&A_TXT.into()).unwrap().status,
7989 StatusCode::Modified.worktree(),
7990 );
7991 });
7992
7993 // Create a commit in the git repository.
7994 git_add(A_TXT, &repo);
7995 git_add(B_TXT, &repo);
7996 git_commit("Committing modified and added", &repo);
7997 tree.flush_fs_events(cx).await;
7998 cx.executor().run_until_parked();
7999
8000 // The worktree detects that the files' git status have changed.
8001 repository.read_with(cx, |repository, _cx| {
8002 assert_eq!(
8003 repository.status_for_path(&F_TXT.into()).unwrap().status,
8004 FileStatus::Untracked,
8005 );
8006 assert_eq!(repository.status_for_path(&B_TXT.into()), None);
8007 assert_eq!(repository.status_for_path(&A_TXT.into()), None);
8008 });
8009
8010 // Modify files in the working copy and perform git operations on other files.
8011 git_reset(0, &repo);
8012 git_remove_index(Path::new(B_TXT), &repo);
8013 git_stash(&mut repo);
8014 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
8015 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
8016 tree.flush_fs_events(cx).await;
8017 cx.executor().run_until_parked();
8018
8019 // Check that more complex repo changes are tracked
8020 repository.read_with(cx, |repository, _cx| {
8021 assert_eq!(repository.status_for_path(&A_TXT.into()), None);
8022 assert_eq!(
8023 repository.status_for_path(&B_TXT.into()).unwrap().status,
8024 FileStatus::Untracked,
8025 );
8026 assert_eq!(
8027 repository.status_for_path(&E_TXT.into()).unwrap().status,
8028 StatusCode::Modified.worktree(),
8029 );
8030 });
8031
8032 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
8033 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
8034 std::fs::write(
8035 work_dir.join(DOTGITIGNORE),
8036 [IGNORE_RULE, "f.txt"].join("\n"),
8037 )
8038 .unwrap();
8039
8040 git_add(Path::new(DOTGITIGNORE), &repo);
8041 git_commit("Committing modified git ignore", &repo);
8042
8043 tree.flush_fs_events(cx).await;
8044 cx.executor().run_until_parked();
8045
8046 let mut renamed_dir_name = "first_directory/second_directory";
8047 const RENAMED_FILE: &str = "rf.txt";
8048
8049 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
8050 std::fs::write(
8051 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
8052 "new-contents",
8053 )
8054 .unwrap();
8055
8056 tree.flush_fs_events(cx).await;
8057 cx.executor().run_until_parked();
8058
8059 repository.read_with(cx, |repository, _cx| {
8060 assert_eq!(
8061 repository
8062 .status_for_path(&Path::new(renamed_dir_name).join(RENAMED_FILE).into())
8063 .unwrap()
8064 .status,
8065 FileStatus::Untracked,
8066 );
8067 });
8068
8069 renamed_dir_name = "new_first_directory/second_directory";
8070
8071 std::fs::rename(
8072 work_dir.join("first_directory"),
8073 work_dir.join("new_first_directory"),
8074 )
8075 .unwrap();
8076
8077 tree.flush_fs_events(cx).await;
8078 cx.executor().run_until_parked();
8079
8080 repository.read_with(cx, |repository, _cx| {
8081 assert_eq!(
8082 repository
8083 .status_for_path(&Path::new(renamed_dir_name).join(RENAMED_FILE).into())
8084 .unwrap()
8085 .status,
8086 FileStatus::Untracked,
8087 );
8088 });
8089}
8090
8091#[gpui::test]
8092async fn test_repos_in_invisible_worktrees(
8093 executor: BackgroundExecutor,
8094 cx: &mut gpui::TestAppContext,
8095) {
8096 init_test(cx);
8097 let fs = FakeFs::new(executor);
8098 fs.insert_tree(
8099 path!("/root"),
8100 json!({
8101 "dir1": {
8102 ".git": {},
8103 "dep1": {
8104 ".git": {},
8105 "src": {
8106 "a.txt": "",
8107 },
8108 },
8109 "b.txt": "",
8110 },
8111 }),
8112 )
8113 .await;
8114
8115 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
8116 let visible_worktree =
8117 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8118 visible_worktree
8119 .read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
8120 .await;
8121
8122 let repos = project.read_with(cx, |project, cx| {
8123 project
8124 .repositories(cx)
8125 .values()
8126 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8127 .collect::<Vec<_>>()
8128 });
8129 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
8130
8131 let (invisible_worktree, _) = project
8132 .update(cx, |project, cx| {
8133 project.worktree_store.update(cx, |worktree_store, cx| {
8134 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
8135 })
8136 })
8137 .await
8138 .expect("failed to create worktree");
8139 invisible_worktree
8140 .read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
8141 .await;
8142
8143 let repos = project.read_with(cx, |project, cx| {
8144 project
8145 .repositories(cx)
8146 .values()
8147 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8148 .collect::<Vec<_>>()
8149 });
8150 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
8151}
8152
8153#[gpui::test(iterations = 10)]
8154async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
8155 init_test(cx);
8156 cx.update(|cx| {
8157 cx.update_global::<SettingsStore, _>(|store, cx| {
8158 store.update_user_settings::<WorktreeSettings>(cx, |project_settings| {
8159 project_settings.file_scan_exclusions = Some(Vec::new());
8160 });
8161 });
8162 });
8163 let fs = FakeFs::new(cx.background_executor.clone());
8164 fs.insert_tree(
8165 path!("/root"),
8166 json!({
8167 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
8168 "tree": {
8169 ".git": {},
8170 ".gitignore": "ignored-dir\n",
8171 "tracked-dir": {
8172 "tracked-file1": "",
8173 "ancestor-ignored-file1": "",
8174 },
8175 "ignored-dir": {
8176 "ignored-file1": ""
8177 }
8178 }
8179 }),
8180 )
8181 .await;
8182 fs.set_head_and_index_for_repo(
8183 path!("/root/tree/.git").as_ref(),
8184 &[
8185 (".gitignore".into(), "ignored-dir\n".into()),
8186 ("tracked-dir/tracked-file1".into(), "".into()),
8187 ],
8188 );
8189
8190 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
8191
8192 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8193 tree.flush_fs_events(cx).await;
8194 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
8195 .await;
8196 cx.executor().run_until_parked();
8197
8198 let repository = project.read_with(cx, |project, cx| {
8199 project.repositories(cx).values().next().unwrap().clone()
8200 });
8201
8202 tree.read_with(cx, |tree, _| {
8203 tree.as_local()
8204 .unwrap()
8205 .manually_refresh_entries_for_paths(vec![Path::new("ignored-dir").into()])
8206 })
8207 .recv()
8208 .await;
8209
8210 cx.read(|cx| {
8211 assert_entry_git_state(
8212 tree.read(cx),
8213 repository.read(cx),
8214 "tracked-dir/tracked-file1",
8215 None,
8216 false,
8217 );
8218 assert_entry_git_state(
8219 tree.read(cx),
8220 repository.read(cx),
8221 "tracked-dir/ancestor-ignored-file1",
8222 None,
8223 false,
8224 );
8225 assert_entry_git_state(
8226 tree.read(cx),
8227 repository.read(cx),
8228 "ignored-dir/ignored-file1",
8229 None,
8230 true,
8231 );
8232 });
8233
8234 fs.create_file(
8235 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
8236 Default::default(),
8237 )
8238 .await
8239 .unwrap();
8240 fs.set_index_for_repo(
8241 path!("/root/tree/.git").as_ref(),
8242 &[
8243 (".gitignore".into(), "ignored-dir\n".into()),
8244 ("tracked-dir/tracked-file1".into(), "".into()),
8245 ("tracked-dir/tracked-file2".into(), "".into()),
8246 ],
8247 );
8248 fs.create_file(
8249 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
8250 Default::default(),
8251 )
8252 .await
8253 .unwrap();
8254 fs.create_file(
8255 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
8256 Default::default(),
8257 )
8258 .await
8259 .unwrap();
8260
8261 cx.executor().run_until_parked();
8262 cx.read(|cx| {
8263 assert_entry_git_state(
8264 tree.read(cx),
8265 repository.read(cx),
8266 "tracked-dir/tracked-file2",
8267 Some(StatusCode::Added),
8268 false,
8269 );
8270 assert_entry_git_state(
8271 tree.read(cx),
8272 repository.read(cx),
8273 "tracked-dir/ancestor-ignored-file2",
8274 None,
8275 false,
8276 );
8277 assert_entry_git_state(
8278 tree.read(cx),
8279 repository.read(cx),
8280 "ignored-dir/ignored-file2",
8281 None,
8282 true,
8283 );
8284 assert!(tree.read(cx).entry_for_path(".git").unwrap().is_ignored);
8285 });
8286}
8287
8288#[gpui::test]
8289async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) {
8290 init_test(cx);
8291
8292 let fs = FakeFs::new(cx.executor());
8293 fs.insert_tree(
8294 path!("/project"),
8295 json!({
8296 ".git": {
8297 "worktrees": {
8298 "some-worktree": {
8299 "commondir": "../..\n",
8300 // For is_git_dir
8301 "HEAD": "",
8302 "config": ""
8303 }
8304 },
8305 "modules": {
8306 "subdir": {
8307 "some-submodule": {
8308 // For is_git_dir
8309 "HEAD": "",
8310 "config": "",
8311 }
8312 }
8313 }
8314 },
8315 "src": {
8316 "a.txt": "A",
8317 },
8318 "some-worktree": {
8319 ".git": "gitdir: ../.git/worktrees/some-worktree\n",
8320 "src": {
8321 "b.txt": "B",
8322 }
8323 },
8324 "subdir": {
8325 "some-submodule": {
8326 ".git": "gitdir: ../../.git/modules/subdir/some-submodule\n",
8327 "c.txt": "C",
8328 }
8329 }
8330 }),
8331 )
8332 .await;
8333
8334 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
8335 let scan_complete = project.update(cx, |project, cx| {
8336 project
8337 .worktrees(cx)
8338 .next()
8339 .unwrap()
8340 .read(cx)
8341 .as_local()
8342 .unwrap()
8343 .scan_complete()
8344 });
8345 scan_complete.await;
8346
8347 let mut repositories = project.update(cx, |project, cx| {
8348 project
8349 .repositories(cx)
8350 .values()
8351 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8352 .collect::<Vec<_>>()
8353 });
8354 repositories.sort();
8355 pretty_assertions::assert_eq!(
8356 repositories,
8357 [
8358 Path::new(path!("/project")).into(),
8359 Path::new(path!("/project/some-worktree")).into(),
8360 Path::new(path!("/project/subdir/some-submodule")).into(),
8361 ]
8362 );
8363
8364 // Generate a git-related event for the worktree and check that it's refreshed.
8365 fs.with_git_state(
8366 path!("/project/some-worktree/.git").as_ref(),
8367 true,
8368 |state| {
8369 state
8370 .head_contents
8371 .insert("src/b.txt".into(), "b".to_owned());
8372 state
8373 .index_contents
8374 .insert("src/b.txt".into(), "b".to_owned());
8375 },
8376 )
8377 .unwrap();
8378 cx.run_until_parked();
8379
8380 let buffer = project
8381 .update(cx, |project, cx| {
8382 project.open_local_buffer(path!("/project/some-worktree/src/b.txt"), cx)
8383 })
8384 .await
8385 .unwrap();
8386 let (worktree_repo, barrier) = project.update(cx, |project, cx| {
8387 let (repo, _) = project
8388 .git_store()
8389 .read(cx)
8390 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
8391 .unwrap();
8392 pretty_assertions::assert_eq!(
8393 repo.read(cx).work_directory_abs_path,
8394 Path::new(path!("/project/some-worktree")).into(),
8395 );
8396 let barrier = repo.update(cx, |repo, _| repo.barrier());
8397 (repo.clone(), barrier)
8398 });
8399 barrier.await.unwrap();
8400 worktree_repo.update(cx, |repo, _| {
8401 pretty_assertions::assert_eq!(
8402 repo.status_for_path(&"src/b.txt".into()).unwrap().status,
8403 StatusCode::Modified.worktree(),
8404 );
8405 });
8406
8407 // The same for the submodule.
8408 fs.with_git_state(
8409 path!("/project/subdir/some-submodule/.git").as_ref(),
8410 true,
8411 |state| {
8412 state.head_contents.insert("c.txt".into(), "c".to_owned());
8413 state.index_contents.insert("c.txt".into(), "c".to_owned());
8414 },
8415 )
8416 .unwrap();
8417 cx.run_until_parked();
8418
8419 let buffer = project
8420 .update(cx, |project, cx| {
8421 project.open_local_buffer(path!("/project/subdir/some-submodule/c.txt"), cx)
8422 })
8423 .await
8424 .unwrap();
8425 let (submodule_repo, barrier) = project.update(cx, |project, cx| {
8426 let (repo, _) = project
8427 .git_store()
8428 .read(cx)
8429 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
8430 .unwrap();
8431 pretty_assertions::assert_eq!(
8432 repo.read(cx).work_directory_abs_path,
8433 Path::new(path!("/project/subdir/some-submodule")).into(),
8434 );
8435 let barrier = repo.update(cx, |repo, _| repo.barrier());
8436 (repo.clone(), barrier)
8437 });
8438 barrier.await.unwrap();
8439 submodule_repo.update(cx, |repo, _| {
8440 pretty_assertions::assert_eq!(
8441 repo.status_for_path(&"c.txt".into()).unwrap().status,
8442 StatusCode::Modified.worktree(),
8443 );
8444 });
8445}
8446
8447#[gpui::test]
8448async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
8449 init_test(cx);
8450 let fs = FakeFs::new(cx.background_executor.clone());
8451 fs.insert_tree(
8452 path!("/root"),
8453 json!({
8454 "project": {
8455 ".git": {},
8456 "child1": {
8457 "a.txt": "A",
8458 },
8459 "child2": {
8460 "b.txt": "B",
8461 }
8462 }
8463 }),
8464 )
8465 .await;
8466
8467 let project = Project::test(
8468 fs.clone(),
8469 [
8470 path!("/root/project/child1").as_ref(),
8471 path!("/root/project/child2").as_ref(),
8472 ],
8473 cx,
8474 )
8475 .await;
8476
8477 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8478 tree.flush_fs_events(cx).await;
8479 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
8480 .await;
8481 cx.executor().run_until_parked();
8482
8483 let repos = project.read_with(cx, |project, cx| {
8484 project
8485 .repositories(cx)
8486 .values()
8487 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8488 .collect::<Vec<_>>()
8489 });
8490 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
8491}
8492
8493async fn search(
8494 project: &Entity<Project>,
8495 query: SearchQuery,
8496 cx: &mut gpui::TestAppContext,
8497) -> Result<HashMap<String, Vec<Range<usize>>>> {
8498 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
8499 let mut results = HashMap::default();
8500 while let Ok(search_result) = search_rx.recv().await {
8501 match search_result {
8502 SearchResult::Buffer { buffer, ranges } => {
8503 results.entry(buffer).or_insert(ranges);
8504 }
8505 SearchResult::LimitReached => {}
8506 }
8507 }
8508 Ok(results
8509 .into_iter()
8510 .map(|(buffer, ranges)| {
8511 buffer.update(cx, |buffer, cx| {
8512 let path = buffer
8513 .file()
8514 .unwrap()
8515 .full_path(cx)
8516 .to_string_lossy()
8517 .to_string();
8518 let ranges = ranges
8519 .into_iter()
8520 .map(|range| range.to_offset(buffer))
8521 .collect::<Vec<_>>();
8522 (path, ranges)
8523 })
8524 })
8525 .collect())
8526}
8527
8528pub fn init_test(cx: &mut gpui::TestAppContext) {
8529 if std::env::var("RUST_LOG").is_ok() {
8530 env_logger::try_init().ok();
8531 }
8532
8533 cx.update(|cx| {
8534 let settings_store = SettingsStore::test(cx);
8535 cx.set_global(settings_store);
8536 release_channel::init(SemanticVersion::default(), cx);
8537 language::init(cx);
8538 Project::init_settings(cx);
8539 });
8540}
8541
8542fn json_lang() -> Arc<Language> {
8543 Arc::new(Language::new(
8544 LanguageConfig {
8545 name: "JSON".into(),
8546 matcher: LanguageMatcher {
8547 path_suffixes: vec!["json".to_string()],
8548 ..Default::default()
8549 },
8550 ..Default::default()
8551 },
8552 None,
8553 ))
8554}
8555
8556fn js_lang() -> Arc<Language> {
8557 Arc::new(Language::new(
8558 LanguageConfig {
8559 name: "JavaScript".into(),
8560 matcher: LanguageMatcher {
8561 path_suffixes: vec!["js".to_string()],
8562 ..Default::default()
8563 },
8564 ..Default::default()
8565 },
8566 None,
8567 ))
8568}
8569
8570fn rust_lang() -> Arc<Language> {
8571 Arc::new(Language::new(
8572 LanguageConfig {
8573 name: "Rust".into(),
8574 matcher: LanguageMatcher {
8575 path_suffixes: vec!["rs".to_string()],
8576 ..Default::default()
8577 },
8578 ..Default::default()
8579 },
8580 Some(tree_sitter_rust::LANGUAGE.into()),
8581 ))
8582}
8583
8584fn typescript_lang() -> Arc<Language> {
8585 Arc::new(Language::new(
8586 LanguageConfig {
8587 name: "TypeScript".into(),
8588 matcher: LanguageMatcher {
8589 path_suffixes: vec!["ts".to_string()],
8590 ..Default::default()
8591 },
8592 ..Default::default()
8593 },
8594 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
8595 ))
8596}
8597
8598fn tsx_lang() -> Arc<Language> {
8599 Arc::new(Language::new(
8600 LanguageConfig {
8601 name: "tsx".into(),
8602 matcher: LanguageMatcher {
8603 path_suffixes: vec!["tsx".to_string()],
8604 ..Default::default()
8605 },
8606 ..Default::default()
8607 },
8608 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
8609 ))
8610}
8611
8612fn get_all_tasks(
8613 project: &Entity<Project>,
8614 task_contexts: &TaskContexts,
8615 cx: &mut App,
8616) -> Vec<(TaskSourceKind, ResolvedTask)> {
8617 let (mut old, new) = project.update(cx, |project, cx| {
8618 project
8619 .task_store
8620 .read(cx)
8621 .task_inventory()
8622 .unwrap()
8623 .read(cx)
8624 .used_and_current_resolved_tasks(task_contexts, cx)
8625 });
8626 old.extend(new);
8627 old
8628}
8629
8630#[track_caller]
8631fn assert_entry_git_state(
8632 tree: &Worktree,
8633 repository: &Repository,
8634 path: &str,
8635 index_status: Option<StatusCode>,
8636 is_ignored: bool,
8637) {
8638 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
8639 let entry = tree
8640 .entry_for_path(path)
8641 .unwrap_or_else(|| panic!("entry {path} not found"));
8642 let status = repository
8643 .status_for_path(&path.into())
8644 .map(|entry| entry.status);
8645 let expected = index_status.map(|index_status| {
8646 TrackedStatus {
8647 index_status,
8648 worktree_status: StatusCode::Unmodified,
8649 }
8650 .into()
8651 });
8652 assert_eq!(
8653 status, expected,
8654 "expected {path} to have git status: {expected:?}"
8655 );
8656 assert_eq!(
8657 entry.is_ignored, is_ignored,
8658 "expected {path} to have is_ignored: {is_ignored}"
8659 );
8660}
8661
8662#[track_caller]
8663fn git_init(path: &Path) -> git2::Repository {
8664 let mut init_opts = RepositoryInitOptions::new();
8665 init_opts.initial_head("main");
8666 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
8667}
8668
8669#[track_caller]
8670fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
8671 let path = path.as_ref();
8672 let mut index = repo.index().expect("Failed to get index");
8673 index.add_path(path).expect("Failed to add file");
8674 index.write().expect("Failed to write index");
8675}
8676
8677#[track_caller]
8678fn git_remove_index(path: &Path, repo: &git2::Repository) {
8679 let mut index = repo.index().expect("Failed to get index");
8680 index.remove_path(path).expect("Failed to add file");
8681 index.write().expect("Failed to write index");
8682}
8683
8684#[track_caller]
8685fn git_commit(msg: &'static str, repo: &git2::Repository) {
8686 use git2::Signature;
8687
8688 let signature = Signature::now("test", "test@zed.dev").unwrap();
8689 let oid = repo.index().unwrap().write_tree().unwrap();
8690 let tree = repo.find_tree(oid).unwrap();
8691 if let Ok(head) = repo.head() {
8692 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
8693
8694 let parent_commit = parent_obj.as_commit().unwrap();
8695
8696 repo.commit(
8697 Some("HEAD"),
8698 &signature,
8699 &signature,
8700 msg,
8701 &tree,
8702 &[parent_commit],
8703 )
8704 .expect("Failed to commit with parent");
8705 } else {
8706 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
8707 .expect("Failed to commit");
8708 }
8709}
8710
8711#[cfg(any())]
8712#[track_caller]
8713fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
8714 repo.cherrypick(commit, None).expect("Failed to cherrypick");
8715}
8716
8717#[track_caller]
8718fn git_stash(repo: &mut git2::Repository) {
8719 use git2::Signature;
8720
8721 let signature = Signature::now("test", "test@zed.dev").unwrap();
8722 repo.stash_save(&signature, "N/A", None)
8723 .expect("Failed to stash");
8724}
8725
8726#[track_caller]
8727fn git_reset(offset: usize, repo: &git2::Repository) {
8728 let head = repo.head().expect("Couldn't get repo head");
8729 let object = head.peel(git2::ObjectType::Commit).unwrap();
8730 let commit = object.as_commit().unwrap();
8731 let new_head = commit
8732 .parents()
8733 .inspect(|parnet| {
8734 parnet.message();
8735 })
8736 .nth(offset)
8737 .expect("Not enough history");
8738 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
8739 .expect("Could not reset");
8740}
8741
8742#[cfg(any())]
8743#[track_caller]
8744fn git_branch(name: &str, repo: &git2::Repository) {
8745 let head = repo
8746 .head()
8747 .expect("Couldn't get repo head")
8748 .peel_to_commit()
8749 .expect("HEAD is not a commit");
8750 repo.branch(name, &head, false).expect("Failed to commit");
8751}
8752
8753#[cfg(any())]
8754#[track_caller]
8755fn git_checkout(name: &str, repo: &git2::Repository) {
8756 repo.set_head(name).expect("Failed to set head");
8757 repo.checkout_head(None).expect("Failed to check out head");
8758}
8759
8760#[cfg(any())]
8761#[track_caller]
8762fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
8763 repo.statuses(None)
8764 .unwrap()
8765 .iter()
8766 .map(|status| (status.path().unwrap().to_string(), status.status()))
8767 .collect()
8768}
8769
8770#[gpui::test]
8771async fn test_find_project_path_abs(
8772 background_executor: BackgroundExecutor,
8773 cx: &mut gpui::TestAppContext,
8774) {
8775 // find_project_path should work with absolute paths
8776 init_test(cx);
8777
8778 let fs = FakeFs::new(background_executor);
8779 fs.insert_tree(
8780 path!("/root"),
8781 json!({
8782 "project1": {
8783 "file1.txt": "content1",
8784 "subdir": {
8785 "file2.txt": "content2"
8786 }
8787 },
8788 "project2": {
8789 "file3.txt": "content3"
8790 }
8791 }),
8792 )
8793 .await;
8794
8795 let project = Project::test(
8796 fs.clone(),
8797 [
8798 path!("/root/project1").as_ref(),
8799 path!("/root/project2").as_ref(),
8800 ],
8801 cx,
8802 )
8803 .await;
8804
8805 // Make sure the worktrees are fully initialized
8806 for worktree in project.read_with(cx, |project, cx| project.worktrees(cx).collect::<Vec<_>>()) {
8807 worktree
8808 .read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
8809 .await;
8810 }
8811 cx.run_until_parked();
8812
8813 let (project1_abs_path, project1_id, project2_abs_path, project2_id) =
8814 project.read_with(cx, |project, cx| {
8815 let worktrees: Vec<_> = project.worktrees(cx).collect();
8816 let abs_path1 = worktrees[0].read(cx).abs_path().to_path_buf();
8817 let id1 = worktrees[0].read(cx).id();
8818 let abs_path2 = worktrees[1].read(cx).abs_path().to_path_buf();
8819 let id2 = worktrees[1].read(cx).id();
8820 (abs_path1, id1, abs_path2, id2)
8821 });
8822
8823 project.update(cx, |project, cx| {
8824 let abs_path = project1_abs_path.join("file1.txt");
8825 let found_path = project.find_project_path(abs_path, cx).unwrap();
8826 assert_eq!(found_path.worktree_id, project1_id);
8827 assert_eq!(found_path.path.as_ref(), Path::new("file1.txt"));
8828
8829 let abs_path = project1_abs_path.join("subdir").join("file2.txt");
8830 let found_path = project.find_project_path(abs_path, cx).unwrap();
8831 assert_eq!(found_path.worktree_id, project1_id);
8832 assert_eq!(found_path.path.as_ref(), Path::new("subdir/file2.txt"));
8833
8834 let abs_path = project2_abs_path.join("file3.txt");
8835 let found_path = project.find_project_path(abs_path, cx).unwrap();
8836 assert_eq!(found_path.worktree_id, project2_id);
8837 assert_eq!(found_path.path.as_ref(), Path::new("file3.txt"));
8838
8839 let abs_path = project1_abs_path.join("nonexistent.txt");
8840 let found_path = project.find_project_path(abs_path, cx);
8841 assert!(
8842 found_path.is_some(),
8843 "Should find project path for nonexistent file in worktree"
8844 );
8845
8846 // Test with an absolute path outside any worktree
8847 let abs_path = Path::new("/some/other/path");
8848 let found_path = project.find_project_path(abs_path, cx);
8849 assert!(
8850 found_path.is_none(),
8851 "Should not find project path for path outside any worktree"
8852 );
8853 });
8854}