1#![allow(clippy::format_collect)]
2
3use crate::{
4 Event, git_store::StatusEntry, lsp_store::FS_WATCH_DEBOUNCE_TIMEOUT,
5 task_inventory::TaskContexts, task_store::TaskSettingsLocation, *,
6};
7use buffer_diff::{
8 BufferDiffEvent, CALCULATE_DIFF_TASK, DiffHunkSecondaryStatus, DiffHunkStatus,
9 DiffHunkStatusKind, assert_hunks,
10};
11use fs::FakeFs;
12use futures::{StreamExt, future};
13use git::{
14 repository::RepoPath,
15 status::{StatusCode, TrackedStatus},
16};
17use git2::RepositoryInitOptions;
18use gpui::{App, BackgroundExecutor, SemanticVersion, UpdateGlobal};
19use http_client::Url;
20use language::{
21 Diagnostic, DiagnosticEntry, DiagnosticSet, DiskState, FakeLspAdapter, LanguageConfig,
22 LanguageMatcher, LanguageName, LineEnding, OffsetRangeExt, Point, ToPoint,
23 language_settings::{AllLanguageSettings, LanguageSettingsContent, language_settings},
24 tree_sitter_rust, tree_sitter_typescript,
25};
26use lsp::{
27 DiagnosticSeverity, DocumentChanges, FileOperationFilter, NumberOrString, TextDocumentEdit,
28 WillRenameFiles, notification::DidRenameFiles,
29};
30use parking_lot::Mutex;
31use paths::{config_dir, tasks_file};
32use postage::stream::Stream as _;
33use pretty_assertions::{assert_eq, assert_matches};
34use rand::{Rng as _, rngs::StdRng};
35use serde_json::json;
36#[cfg(not(windows))]
37use std::os;
38use std::{env, mem, num::NonZeroU32, ops::Range, str::FromStr, sync::OnceLock, task::Poll};
39use task::{ResolvedTask, TaskContext};
40use unindent::Unindent as _;
41use util::{
42 TryFutureExt as _, assert_set_eq, maybe, path,
43 paths::PathMatcher,
44 separator,
45 test::{TempTree, marked_text_offsets},
46 uri,
47};
48use worktree::WorktreeModelHandle as _;
49
50#[gpui::test]
51async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
52 cx.executor().allow_parking();
53
54 let (tx, mut rx) = futures::channel::mpsc::unbounded();
55 let _thread = std::thread::spawn(move || {
56 #[cfg(not(target_os = "windows"))]
57 std::fs::metadata("/tmp").unwrap();
58 #[cfg(target_os = "windows")]
59 std::fs::metadata("C:/Windows").unwrap();
60 std::thread::sleep(Duration::from_millis(1000));
61 tx.unbounded_send(1).unwrap();
62 });
63 rx.next().await.unwrap();
64}
65
66#[gpui::test]
67async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
68 cx.executor().allow_parking();
69
70 let io_task = smol::unblock(move || {
71 println!("sleeping on thread {:?}", std::thread::current().id());
72 std::thread::sleep(Duration::from_millis(10));
73 1
74 });
75
76 let task = cx.foreground_executor().spawn(async move {
77 io_task.await;
78 });
79
80 task.await;
81}
82
83#[cfg(not(windows))]
84#[gpui::test]
85async fn test_symlinks(cx: &mut gpui::TestAppContext) {
86 init_test(cx);
87 cx.executor().allow_parking();
88
89 let dir = TempTree::new(json!({
90 "root": {
91 "apple": "",
92 "banana": {
93 "carrot": {
94 "date": "",
95 "endive": "",
96 }
97 },
98 "fennel": {
99 "grape": "",
100 }
101 }
102 }));
103
104 let root_link_path = dir.path().join("root_link");
105 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
106 os::unix::fs::symlink(
107 dir.path().join("root/fennel"),
108 dir.path().join("root/finnochio"),
109 )
110 .unwrap();
111
112 let project = Project::test(
113 Arc::new(RealFs::new(None, cx.executor())),
114 [root_link_path.as_ref()],
115 cx,
116 )
117 .await;
118
119 project.update(cx, |project, cx| {
120 let tree = project.worktrees(cx).next().unwrap().read(cx);
121 assert_eq!(tree.file_count(), 5);
122 assert_eq!(
123 tree.inode_for_path("fennel/grape"),
124 tree.inode_for_path("finnochio/grape")
125 );
126 });
127}
128
129#[gpui::test]
130async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
131 init_test(cx);
132
133 let dir = TempTree::new(json!({
134 ".editorconfig": r#"
135 root = true
136 [*.rs]
137 indent_style = tab
138 indent_size = 3
139 end_of_line = lf
140 insert_final_newline = true
141 trim_trailing_whitespace = true
142 [*.js]
143 tab_width = 10
144 "#,
145 ".zed": {
146 "settings.json": r#"{
147 "tab_size": 8,
148 "hard_tabs": false,
149 "ensure_final_newline_on_save": false,
150 "remove_trailing_whitespace_on_save": false,
151 "soft_wrap": "editor_width"
152 }"#,
153 },
154 "a.rs": "fn a() {\n A\n}",
155 "b": {
156 ".editorconfig": r#"
157 [*.rs]
158 indent_size = 2
159 "#,
160 "b.rs": "fn b() {\n B\n}",
161 },
162 "c.js": "def c\n C\nend",
163 "README.json": "tabs are better\n",
164 }));
165
166 let path = dir.path();
167 let fs = FakeFs::new(cx.executor());
168 fs.insert_tree_from_real_fs(path, path).await;
169 let project = Project::test(fs, [path], cx).await;
170
171 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
172 language_registry.add(js_lang());
173 language_registry.add(json_lang());
174 language_registry.add(rust_lang());
175
176 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
177
178 cx.executor().run_until_parked();
179
180 cx.update(|cx| {
181 let tree = worktree.read(cx);
182 let settings_for = |path: &str| {
183 let file_entry = tree.entry_for_path(path).unwrap().clone();
184 let file = File::for_entry(file_entry, worktree.clone());
185 let file_language = project
186 .read(cx)
187 .languages()
188 .language_for_file_path(file.path.as_ref());
189 let file_language = cx
190 .background_executor()
191 .block(file_language)
192 .expect("Failed to get file language");
193 let file = file as _;
194 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
195 };
196
197 let settings_a = settings_for("a.rs");
198 let settings_b = settings_for("b/b.rs");
199 let settings_c = settings_for("c.js");
200 let settings_readme = settings_for("README.json");
201
202 // .editorconfig overrides .zed/settings
203 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
204 assert_eq!(settings_a.hard_tabs, true);
205 assert_eq!(settings_a.ensure_final_newline_on_save, true);
206 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
207
208 // .editorconfig in b/ overrides .editorconfig in root
209 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
210
211 // "indent_size" is not set, so "tab_width" is used
212 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
213
214 // README.md should not be affected by .editorconfig's globe "*.rs"
215 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
216 });
217}
218
219#[gpui::test]
220async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
221 init_test(cx);
222 TaskStore::init(None);
223
224 let fs = FakeFs::new(cx.executor());
225 fs.insert_tree(
226 path!("/dir"),
227 json!({
228 ".zed": {
229 "settings.json": r#"{ "tab_size": 8 }"#,
230 "tasks.json": r#"[{
231 "label": "cargo check all",
232 "command": "cargo",
233 "args": ["check", "--all"]
234 },]"#,
235 },
236 "a": {
237 "a.rs": "fn a() {\n A\n}"
238 },
239 "b": {
240 ".zed": {
241 "settings.json": r#"{ "tab_size": 2 }"#,
242 "tasks.json": r#"[{
243 "label": "cargo check",
244 "command": "cargo",
245 "args": ["check"]
246 },]"#,
247 },
248 "b.rs": "fn b() {\n B\n}"
249 }
250 }),
251 )
252 .await;
253
254 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
255 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
256
257 cx.executor().run_until_parked();
258 let worktree_id = cx.update(|cx| {
259 project.update(cx, |project, cx| {
260 project.worktrees(cx).next().unwrap().read(cx).id()
261 })
262 });
263
264 let mut task_contexts = TaskContexts::default();
265 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
266
267 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
268 id: worktree_id,
269 directory_in_worktree: PathBuf::from(".zed"),
270 id_base: "local worktree tasks from directory \".zed\"".into(),
271 };
272
273 let all_tasks = cx
274 .update(|cx| {
275 let tree = worktree.read(cx);
276
277 let file_a = File::for_entry(
278 tree.entry_for_path("a/a.rs").unwrap().clone(),
279 worktree.clone(),
280 ) as _;
281 let settings_a = language_settings(None, Some(&file_a), cx);
282 let file_b = File::for_entry(
283 tree.entry_for_path("b/b.rs").unwrap().clone(),
284 worktree.clone(),
285 ) as _;
286 let settings_b = language_settings(None, Some(&file_b), cx);
287
288 assert_eq!(settings_a.tab_size.get(), 8);
289 assert_eq!(settings_b.tab_size.get(), 2);
290
291 get_all_tasks(&project, &task_contexts, cx)
292 })
293 .into_iter()
294 .map(|(source_kind, task)| {
295 let resolved = task.resolved;
296 (
297 source_kind,
298 task.resolved_label,
299 resolved.args,
300 resolved.env,
301 )
302 })
303 .collect::<Vec<_>>();
304 assert_eq!(
305 all_tasks,
306 vec![
307 (
308 TaskSourceKind::Worktree {
309 id: worktree_id,
310 directory_in_worktree: PathBuf::from(separator!("b/.zed")),
311 id_base: if cfg!(windows) {
312 "local worktree tasks from directory \"b\\\\.zed\"".into()
313 } else {
314 "local worktree tasks from directory \"b/.zed\"".into()
315 },
316 },
317 "cargo check".to_string(),
318 vec!["check".to_string()],
319 HashMap::default(),
320 ),
321 (
322 topmost_local_task_source_kind.clone(),
323 "cargo check all".to_string(),
324 vec!["check".to_string(), "--all".to_string()],
325 HashMap::default(),
326 ),
327 ]
328 );
329
330 let (_, resolved_task) = cx
331 .update(|cx| get_all_tasks(&project, &task_contexts, cx))
332 .into_iter()
333 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
334 .expect("should have one global task");
335 project.update(cx, |project, cx| {
336 let task_inventory = project
337 .task_store
338 .read(cx)
339 .task_inventory()
340 .cloned()
341 .unwrap();
342 task_inventory.update(cx, |inventory, _| {
343 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
344 inventory
345 .update_file_based_tasks(
346 TaskSettingsLocation::Global(tasks_file()),
347 Some(
348 &json!([{
349 "label": "cargo check unstable",
350 "command": "cargo",
351 "args": [
352 "check",
353 "--all",
354 "--all-targets"
355 ],
356 "env": {
357 "RUSTFLAGS": "-Zunstable-options"
358 }
359 }])
360 .to_string(),
361 ),
362 )
363 .unwrap();
364 });
365 });
366 cx.run_until_parked();
367
368 let all_tasks = cx
369 .update(|cx| get_all_tasks(&project, &task_contexts, cx))
370 .into_iter()
371 .map(|(source_kind, task)| {
372 let resolved = task.resolved;
373 (
374 source_kind,
375 task.resolved_label,
376 resolved.args,
377 resolved.env,
378 )
379 })
380 .collect::<Vec<_>>();
381 assert_eq!(
382 all_tasks,
383 vec![
384 (
385 topmost_local_task_source_kind.clone(),
386 "cargo check all".to_string(),
387 vec!["check".to_string(), "--all".to_string()],
388 HashMap::default(),
389 ),
390 (
391 TaskSourceKind::Worktree {
392 id: worktree_id,
393 directory_in_worktree: PathBuf::from(separator!("b/.zed")),
394 id_base: if cfg!(windows) {
395 "local worktree tasks from directory \"b\\\\.zed\"".into()
396 } else {
397 "local worktree tasks from directory \"b/.zed\"".into()
398 },
399 },
400 "cargo check".to_string(),
401 vec!["check".to_string()],
402 HashMap::default(),
403 ),
404 (
405 TaskSourceKind::AbsPath {
406 abs_path: paths::tasks_file().clone(),
407 id_base: "global tasks.json".into(),
408 },
409 "cargo check unstable".to_string(),
410 vec![
411 "check".to_string(),
412 "--all".to_string(),
413 "--all-targets".to_string(),
414 ],
415 HashMap::from_iter(Some((
416 "RUSTFLAGS".to_string(),
417 "-Zunstable-options".to_string()
418 ))),
419 ),
420 ]
421 );
422}
423
424#[gpui::test]
425async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
426 init_test(cx);
427 TaskStore::init(None);
428
429 let fs = FakeFs::new(cx.executor());
430 fs.insert_tree(
431 path!("/dir"),
432 json!({
433 ".zed": {
434 "tasks.json": r#"[{
435 "label": "test worktree root",
436 "command": "echo $ZED_WORKTREE_ROOT"
437 }]"#,
438 },
439 "a": {
440 "a.rs": "fn a() {\n A\n}"
441 },
442 }),
443 )
444 .await;
445
446 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
447 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
448
449 cx.executor().run_until_parked();
450 let worktree_id = cx.update(|cx| {
451 project.update(cx, |project, cx| {
452 project.worktrees(cx).next().unwrap().read(cx).id()
453 })
454 });
455
456 let active_non_worktree_item_tasks = cx.update(|cx| {
457 get_all_tasks(
458 &project,
459 &TaskContexts {
460 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
461 active_worktree_context: None,
462 other_worktree_contexts: Vec::new(),
463 lsp_task_sources: HashMap::default(),
464 latest_selection: None,
465 },
466 cx,
467 )
468 });
469 assert!(
470 active_non_worktree_item_tasks.is_empty(),
471 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
472 );
473
474 let active_worktree_tasks = cx.update(|cx| {
475 get_all_tasks(
476 &project,
477 &TaskContexts {
478 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
479 active_worktree_context: Some((worktree_id, {
480 let mut worktree_context = TaskContext::default();
481 worktree_context
482 .task_variables
483 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
484 worktree_context
485 })),
486 other_worktree_contexts: Vec::new(),
487 lsp_task_sources: HashMap::default(),
488 latest_selection: None,
489 },
490 cx,
491 )
492 });
493 assert_eq!(
494 active_worktree_tasks
495 .into_iter()
496 .map(|(source_kind, task)| {
497 let resolved = task.resolved;
498 (source_kind, resolved.command)
499 })
500 .collect::<Vec<_>>(),
501 vec![(
502 TaskSourceKind::Worktree {
503 id: worktree_id,
504 directory_in_worktree: PathBuf::from(separator!(".zed")),
505 id_base: if cfg!(windows) {
506 "local worktree tasks from directory \".zed\"".into()
507 } else {
508 "local worktree tasks from directory \".zed\"".into()
509 },
510 },
511 "echo /dir".to_string(),
512 )]
513 );
514}
515
516#[gpui::test]
517async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
518 init_test(cx);
519
520 let fs = FakeFs::new(cx.executor());
521 fs.insert_tree(
522 path!("/dir"),
523 json!({
524 "test.rs": "const A: i32 = 1;",
525 "test2.rs": "",
526 "Cargo.toml": "a = 1",
527 "package.json": "{\"a\": 1}",
528 }),
529 )
530 .await;
531
532 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
533 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
534
535 let mut fake_rust_servers = language_registry.register_fake_lsp(
536 "Rust",
537 FakeLspAdapter {
538 name: "the-rust-language-server",
539 capabilities: lsp::ServerCapabilities {
540 completion_provider: Some(lsp::CompletionOptions {
541 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
542 ..Default::default()
543 }),
544 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
545 lsp::TextDocumentSyncOptions {
546 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
547 ..Default::default()
548 },
549 )),
550 ..Default::default()
551 },
552 ..Default::default()
553 },
554 );
555 let mut fake_json_servers = language_registry.register_fake_lsp(
556 "JSON",
557 FakeLspAdapter {
558 name: "the-json-language-server",
559 capabilities: lsp::ServerCapabilities {
560 completion_provider: Some(lsp::CompletionOptions {
561 trigger_characters: Some(vec![":".to_string()]),
562 ..Default::default()
563 }),
564 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
565 lsp::TextDocumentSyncOptions {
566 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
567 ..Default::default()
568 },
569 )),
570 ..Default::default()
571 },
572 ..Default::default()
573 },
574 );
575
576 // Open a buffer without an associated language server.
577 let (toml_buffer, _handle) = project
578 .update(cx, |project, cx| {
579 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
580 })
581 .await
582 .unwrap();
583
584 // Open a buffer with an associated language server before the language for it has been loaded.
585 let (rust_buffer, _handle2) = project
586 .update(cx, |project, cx| {
587 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
588 })
589 .await
590 .unwrap();
591 rust_buffer.update(cx, |buffer, _| {
592 assert_eq!(buffer.language().map(|l| l.name()), None);
593 });
594
595 // Now we add the languages to the project, and ensure they get assigned to all
596 // the relevant open buffers.
597 language_registry.add(json_lang());
598 language_registry.add(rust_lang());
599 cx.executor().run_until_parked();
600 rust_buffer.update(cx, |buffer, _| {
601 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
602 });
603
604 // A server is started up, and it is notified about Rust files.
605 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
606 assert_eq!(
607 fake_rust_server
608 .receive_notification::<lsp::notification::DidOpenTextDocument>()
609 .await
610 .text_document,
611 lsp::TextDocumentItem {
612 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
613 version: 0,
614 text: "const A: i32 = 1;".to_string(),
615 language_id: "rust".to_string(),
616 }
617 );
618
619 // The buffer is configured based on the language server's capabilities.
620 rust_buffer.update(cx, |buffer, _| {
621 assert_eq!(
622 buffer
623 .completion_triggers()
624 .into_iter()
625 .cloned()
626 .collect::<Vec<_>>(),
627 &[".".to_string(), "::".to_string()]
628 );
629 });
630 toml_buffer.update(cx, |buffer, _| {
631 assert!(buffer.completion_triggers().is_empty());
632 });
633
634 // Edit a buffer. The changes are reported to the language server.
635 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
636 assert_eq!(
637 fake_rust_server
638 .receive_notification::<lsp::notification::DidChangeTextDocument>()
639 .await
640 .text_document,
641 lsp::VersionedTextDocumentIdentifier::new(
642 lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
643 1
644 )
645 );
646
647 // Open a third buffer with a different associated language server.
648 let (json_buffer, _json_handle) = project
649 .update(cx, |project, cx| {
650 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
651 })
652 .await
653 .unwrap();
654
655 // A json language server is started up and is only notified about the json buffer.
656 let mut fake_json_server = fake_json_servers.next().await.unwrap();
657 assert_eq!(
658 fake_json_server
659 .receive_notification::<lsp::notification::DidOpenTextDocument>()
660 .await
661 .text_document,
662 lsp::TextDocumentItem {
663 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
664 version: 0,
665 text: "{\"a\": 1}".to_string(),
666 language_id: "json".to_string(),
667 }
668 );
669
670 // This buffer is configured based on the second language server's
671 // capabilities.
672 json_buffer.update(cx, |buffer, _| {
673 assert_eq!(
674 buffer
675 .completion_triggers()
676 .into_iter()
677 .cloned()
678 .collect::<Vec<_>>(),
679 &[":".to_string()]
680 );
681 });
682
683 // When opening another buffer whose language server is already running,
684 // it is also configured based on the existing language server's capabilities.
685 let (rust_buffer2, _handle4) = project
686 .update(cx, |project, cx| {
687 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
688 })
689 .await
690 .unwrap();
691 rust_buffer2.update(cx, |buffer, _| {
692 assert_eq!(
693 buffer
694 .completion_triggers()
695 .into_iter()
696 .cloned()
697 .collect::<Vec<_>>(),
698 &[".".to_string(), "::".to_string()]
699 );
700 });
701
702 // Changes are reported only to servers matching the buffer's language.
703 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
704 rust_buffer2.update(cx, |buffer, cx| {
705 buffer.edit([(0..0, "let x = 1;")], None, cx)
706 });
707 assert_eq!(
708 fake_rust_server
709 .receive_notification::<lsp::notification::DidChangeTextDocument>()
710 .await
711 .text_document,
712 lsp::VersionedTextDocumentIdentifier::new(
713 lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap(),
714 1
715 )
716 );
717
718 // Save notifications are reported to all servers.
719 project
720 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
721 .await
722 .unwrap();
723 assert_eq!(
724 fake_rust_server
725 .receive_notification::<lsp::notification::DidSaveTextDocument>()
726 .await
727 .text_document,
728 lsp::TextDocumentIdentifier::new(
729 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
730 )
731 );
732 assert_eq!(
733 fake_json_server
734 .receive_notification::<lsp::notification::DidSaveTextDocument>()
735 .await
736 .text_document,
737 lsp::TextDocumentIdentifier::new(
738 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
739 )
740 );
741
742 // Renames are reported only to servers matching the buffer's language.
743 fs.rename(
744 Path::new(path!("/dir/test2.rs")),
745 Path::new(path!("/dir/test3.rs")),
746 Default::default(),
747 )
748 .await
749 .unwrap();
750 assert_eq!(
751 fake_rust_server
752 .receive_notification::<lsp::notification::DidCloseTextDocument>()
753 .await
754 .text_document,
755 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap()),
756 );
757 assert_eq!(
758 fake_rust_server
759 .receive_notification::<lsp::notification::DidOpenTextDocument>()
760 .await
761 .text_document,
762 lsp::TextDocumentItem {
763 uri: lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap(),
764 version: 0,
765 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
766 language_id: "rust".to_string(),
767 },
768 );
769
770 rust_buffer2.update(cx, |buffer, cx| {
771 buffer.update_diagnostics(
772 LanguageServerId(0),
773 DiagnosticSet::from_sorted_entries(
774 vec![DiagnosticEntry {
775 diagnostic: Default::default(),
776 range: Anchor::MIN..Anchor::MAX,
777 }],
778 &buffer.snapshot(),
779 ),
780 cx,
781 );
782 assert_eq!(
783 buffer
784 .snapshot()
785 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
786 .count(),
787 1
788 );
789 });
790
791 // When the rename changes the extension of the file, the buffer gets closed on the old
792 // language server and gets opened on the new one.
793 fs.rename(
794 Path::new(path!("/dir/test3.rs")),
795 Path::new(path!("/dir/test3.json")),
796 Default::default(),
797 )
798 .await
799 .unwrap();
800 assert_eq!(
801 fake_rust_server
802 .receive_notification::<lsp::notification::DidCloseTextDocument>()
803 .await
804 .text_document,
805 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap()),
806 );
807 assert_eq!(
808 fake_json_server
809 .receive_notification::<lsp::notification::DidOpenTextDocument>()
810 .await
811 .text_document,
812 lsp::TextDocumentItem {
813 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
814 version: 0,
815 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
816 language_id: "json".to_string(),
817 },
818 );
819
820 // We clear the diagnostics, since the language has changed.
821 rust_buffer2.update(cx, |buffer, _| {
822 assert_eq!(
823 buffer
824 .snapshot()
825 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
826 .count(),
827 0
828 );
829 });
830
831 // The renamed file's version resets after changing language server.
832 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
833 assert_eq!(
834 fake_json_server
835 .receive_notification::<lsp::notification::DidChangeTextDocument>()
836 .await
837 .text_document,
838 lsp::VersionedTextDocumentIdentifier::new(
839 lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
840 1
841 )
842 );
843
844 // Restart language servers
845 project.update(cx, |project, cx| {
846 project.restart_language_servers_for_buffers(
847 vec![rust_buffer.clone(), json_buffer.clone()],
848 cx,
849 );
850 });
851
852 let mut rust_shutdown_requests = fake_rust_server
853 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
854 let mut json_shutdown_requests = fake_json_server
855 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
856 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
857
858 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
859 let mut fake_json_server = fake_json_servers.next().await.unwrap();
860
861 // Ensure rust document is reopened in new rust language server
862 assert_eq!(
863 fake_rust_server
864 .receive_notification::<lsp::notification::DidOpenTextDocument>()
865 .await
866 .text_document,
867 lsp::TextDocumentItem {
868 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
869 version: 0,
870 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
871 language_id: "rust".to_string(),
872 }
873 );
874
875 // Ensure json documents are reopened in new json language server
876 assert_set_eq!(
877 [
878 fake_json_server
879 .receive_notification::<lsp::notification::DidOpenTextDocument>()
880 .await
881 .text_document,
882 fake_json_server
883 .receive_notification::<lsp::notification::DidOpenTextDocument>()
884 .await
885 .text_document,
886 ],
887 [
888 lsp::TextDocumentItem {
889 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
890 version: 0,
891 text: json_buffer.update(cx, |buffer, _| buffer.text()),
892 language_id: "json".to_string(),
893 },
894 lsp::TextDocumentItem {
895 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
896 version: 0,
897 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
898 language_id: "json".to_string(),
899 }
900 ]
901 );
902
903 // Close notifications are reported only to servers matching the buffer's language.
904 cx.update(|_| drop(_json_handle));
905 let close_message = lsp::DidCloseTextDocumentParams {
906 text_document: lsp::TextDocumentIdentifier::new(
907 lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
908 ),
909 };
910 assert_eq!(
911 fake_json_server
912 .receive_notification::<lsp::notification::DidCloseTextDocument>()
913 .await,
914 close_message,
915 );
916}
917
918#[gpui::test]
919async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
920 init_test(cx);
921
922 let fs = FakeFs::new(cx.executor());
923 fs.insert_tree(
924 path!("/the-root"),
925 json!({
926 ".gitignore": "target\n",
927 "Cargo.lock": "",
928 "src": {
929 "a.rs": "",
930 "b.rs": "",
931 },
932 "target": {
933 "x": {
934 "out": {
935 "x.rs": ""
936 }
937 },
938 "y": {
939 "out": {
940 "y.rs": "",
941 }
942 },
943 "z": {
944 "out": {
945 "z.rs": ""
946 }
947 }
948 }
949 }),
950 )
951 .await;
952 fs.insert_tree(
953 path!("/the-registry"),
954 json!({
955 "dep1": {
956 "src": {
957 "dep1.rs": "",
958 }
959 },
960 "dep2": {
961 "src": {
962 "dep2.rs": "",
963 }
964 },
965 }),
966 )
967 .await;
968 fs.insert_tree(
969 path!("/the/stdlib"),
970 json!({
971 "LICENSE": "",
972 "src": {
973 "string.rs": "",
974 }
975 }),
976 )
977 .await;
978
979 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
980 let (language_registry, lsp_store) = project.read_with(cx, |project, _| {
981 (project.languages().clone(), project.lsp_store())
982 });
983 language_registry.add(rust_lang());
984 let mut fake_servers = language_registry.register_fake_lsp(
985 "Rust",
986 FakeLspAdapter {
987 name: "the-language-server",
988 ..Default::default()
989 },
990 );
991
992 cx.executor().run_until_parked();
993
994 // Start the language server by opening a buffer with a compatible file extension.
995 project
996 .update(cx, |project, cx| {
997 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
998 })
999 .await
1000 .unwrap();
1001
1002 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
1003 project.update(cx, |project, cx| {
1004 let worktree = project.worktrees(cx).next().unwrap();
1005 assert_eq!(
1006 worktree
1007 .read(cx)
1008 .snapshot()
1009 .entries(true, 0)
1010 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
1011 .collect::<Vec<_>>(),
1012 &[
1013 (Path::new(""), false),
1014 (Path::new(".gitignore"), false),
1015 (Path::new("Cargo.lock"), false),
1016 (Path::new("src"), false),
1017 (Path::new("src/a.rs"), false),
1018 (Path::new("src/b.rs"), false),
1019 (Path::new("target"), true),
1020 ]
1021 );
1022 });
1023
1024 let prev_read_dir_count = fs.read_dir_call_count();
1025
1026 let fake_server = fake_servers.next().await.unwrap();
1027 let (server_id, server_name) = lsp_store.read_with(cx, |lsp_store, _| {
1028 let (id, status) = lsp_store.language_server_statuses().next().unwrap();
1029 (id, LanguageServerName::from(status.name.as_str()))
1030 });
1031
1032 // Simulate jumping to a definition in a dependency outside of the worktree.
1033 let _out_of_worktree_buffer = project
1034 .update(cx, |project, cx| {
1035 project.open_local_buffer_via_lsp(
1036 lsp::Url::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
1037 server_id,
1038 server_name.clone(),
1039 cx,
1040 )
1041 })
1042 .await
1043 .unwrap();
1044
1045 // Keep track of the FS events reported to the language server.
1046 let file_changes = Arc::new(Mutex::new(Vec::new()));
1047 fake_server
1048 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
1049 registrations: vec![lsp::Registration {
1050 id: Default::default(),
1051 method: "workspace/didChangeWatchedFiles".to_string(),
1052 register_options: serde_json::to_value(
1053 lsp::DidChangeWatchedFilesRegistrationOptions {
1054 watchers: vec![
1055 lsp::FileSystemWatcher {
1056 glob_pattern: lsp::GlobPattern::String(
1057 path!("/the-root/Cargo.toml").to_string(),
1058 ),
1059 kind: None,
1060 },
1061 lsp::FileSystemWatcher {
1062 glob_pattern: lsp::GlobPattern::String(
1063 path!("/the-root/src/*.{rs,c}").to_string(),
1064 ),
1065 kind: None,
1066 },
1067 lsp::FileSystemWatcher {
1068 glob_pattern: lsp::GlobPattern::String(
1069 path!("/the-root/target/y/**/*.rs").to_string(),
1070 ),
1071 kind: None,
1072 },
1073 lsp::FileSystemWatcher {
1074 glob_pattern: lsp::GlobPattern::String(
1075 path!("/the/stdlib/src/**/*.rs").to_string(),
1076 ),
1077 kind: None,
1078 },
1079 lsp::FileSystemWatcher {
1080 glob_pattern: lsp::GlobPattern::String(
1081 path!("**/Cargo.lock").to_string(),
1082 ),
1083 kind: None,
1084 },
1085 ],
1086 },
1087 )
1088 .ok(),
1089 }],
1090 })
1091 .await
1092 .into_response()
1093 .unwrap();
1094 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
1095 let file_changes = file_changes.clone();
1096 move |params, _| {
1097 let mut file_changes = file_changes.lock();
1098 file_changes.extend(params.changes);
1099 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
1100 }
1101 });
1102
1103 cx.executor().run_until_parked();
1104 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
1105 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 5);
1106
1107 let mut new_watched_paths = fs.watched_paths();
1108 new_watched_paths.retain(|path| !path.starts_with(config_dir()));
1109 assert_eq!(
1110 &new_watched_paths,
1111 &[
1112 Path::new(path!("/the-root")),
1113 Path::new(path!("/the-registry/dep1/src/dep1.rs")),
1114 Path::new(path!("/the/stdlib/src"))
1115 ]
1116 );
1117
1118 // Now the language server has asked us to watch an ignored directory path,
1119 // so we recursively load it.
1120 project.update(cx, |project, cx| {
1121 let worktree = project.visible_worktrees(cx).next().unwrap();
1122 assert_eq!(
1123 worktree
1124 .read(cx)
1125 .snapshot()
1126 .entries(true, 0)
1127 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
1128 .collect::<Vec<_>>(),
1129 &[
1130 (Path::new(""), false),
1131 (Path::new(".gitignore"), false),
1132 (Path::new("Cargo.lock"), false),
1133 (Path::new("src"), false),
1134 (Path::new("src/a.rs"), false),
1135 (Path::new("src/b.rs"), false),
1136 (Path::new("target"), true),
1137 (Path::new("target/x"), true),
1138 (Path::new("target/y"), true),
1139 (Path::new("target/y/out"), true),
1140 (Path::new("target/y/out/y.rs"), true),
1141 (Path::new("target/z"), true),
1142 ]
1143 );
1144 });
1145
1146 // Perform some file system mutations, two of which match the watched patterns,
1147 // and one of which does not.
1148 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
1149 .await
1150 .unwrap();
1151 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
1152 .await
1153 .unwrap();
1154 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
1155 .await
1156 .unwrap();
1157 fs.create_file(
1158 path!("/the-root/target/x/out/x2.rs").as_ref(),
1159 Default::default(),
1160 )
1161 .await
1162 .unwrap();
1163 fs.create_file(
1164 path!("/the-root/target/y/out/y2.rs").as_ref(),
1165 Default::default(),
1166 )
1167 .await
1168 .unwrap();
1169 fs.save(
1170 path!("/the-root/Cargo.lock").as_ref(),
1171 &"".into(),
1172 Default::default(),
1173 )
1174 .await
1175 .unwrap();
1176 fs.save(
1177 path!("/the-stdlib/LICENSE").as_ref(),
1178 &"".into(),
1179 Default::default(),
1180 )
1181 .await
1182 .unwrap();
1183 fs.save(
1184 path!("/the/stdlib/src/string.rs").as_ref(),
1185 &"".into(),
1186 Default::default(),
1187 )
1188 .await
1189 .unwrap();
1190
1191 // The language server receives events for the FS mutations that match its watch patterns.
1192 cx.executor().run_until_parked();
1193 cx.executor().advance_clock(FS_WATCH_DEBOUNCE_TIMEOUT);
1194 cx.executor().run_until_parked();
1195
1196 assert_eq!(
1197 &*file_changes.lock(),
1198 &[
1199 lsp::FileEvent {
1200 uri: lsp::Url::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
1201 typ: lsp::FileChangeType::CHANGED,
1202 },
1203 lsp::FileEvent {
1204 uri: lsp::Url::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
1205 typ: lsp::FileChangeType::DELETED,
1206 },
1207 lsp::FileEvent {
1208 uri: lsp::Url::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
1209 typ: lsp::FileChangeType::CREATED,
1210 },
1211 lsp::FileEvent {
1212 uri: lsp::Url::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
1213 typ: lsp::FileChangeType::CREATED,
1214 },
1215 lsp::FileEvent {
1216 uri: lsp::Url::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
1217 typ: lsp::FileChangeType::CHANGED,
1218 },
1219 ]
1220 );
1221}
1222
1223#[gpui::test]
1224async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
1225 init_test(cx);
1226
1227 let fs = FakeFs::new(cx.executor());
1228 fs.insert_tree(
1229 path!("/dir"),
1230 json!({
1231 "a.rs": "let a = 1;",
1232 "b.rs": "let b = 2;"
1233 }),
1234 )
1235 .await;
1236
1237 let project = Project::test(
1238 fs,
1239 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
1240 cx,
1241 )
1242 .await;
1243 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1244
1245 let buffer_a = project
1246 .update(cx, |project, cx| {
1247 project.open_local_buffer(path!("/dir/a.rs"), cx)
1248 })
1249 .await
1250 .unwrap();
1251 let buffer_b = project
1252 .update(cx, |project, cx| {
1253 project.open_local_buffer(path!("/dir/b.rs"), cx)
1254 })
1255 .await
1256 .unwrap();
1257
1258 lsp_store.update(cx, |lsp_store, cx| {
1259 lsp_store
1260 .update_diagnostics(
1261 LanguageServerId(0),
1262 lsp::PublishDiagnosticsParams {
1263 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1264 version: None,
1265 diagnostics: vec![lsp::Diagnostic {
1266 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1267 severity: Some(lsp::DiagnosticSeverity::ERROR),
1268 message: "error 1".to_string(),
1269 ..Default::default()
1270 }],
1271 },
1272 &[],
1273 cx,
1274 )
1275 .unwrap();
1276 lsp_store
1277 .update_diagnostics(
1278 LanguageServerId(0),
1279 lsp::PublishDiagnosticsParams {
1280 uri: Url::from_file_path(path!("/dir/b.rs")).unwrap(),
1281 version: None,
1282 diagnostics: vec![lsp::Diagnostic {
1283 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1284 severity: Some(DiagnosticSeverity::WARNING),
1285 message: "error 2".to_string(),
1286 ..Default::default()
1287 }],
1288 },
1289 &[],
1290 cx,
1291 )
1292 .unwrap();
1293 });
1294
1295 buffer_a.update(cx, |buffer, _| {
1296 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1297 assert_eq!(
1298 chunks
1299 .iter()
1300 .map(|(s, d)| (s.as_str(), *d))
1301 .collect::<Vec<_>>(),
1302 &[
1303 ("let ", None),
1304 ("a", Some(DiagnosticSeverity::ERROR)),
1305 (" = 1;", None),
1306 ]
1307 );
1308 });
1309 buffer_b.update(cx, |buffer, _| {
1310 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1311 assert_eq!(
1312 chunks
1313 .iter()
1314 .map(|(s, d)| (s.as_str(), *d))
1315 .collect::<Vec<_>>(),
1316 &[
1317 ("let ", None),
1318 ("b", Some(DiagnosticSeverity::WARNING)),
1319 (" = 2;", None),
1320 ]
1321 );
1322 });
1323}
1324
1325#[gpui::test]
1326async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1327 init_test(cx);
1328
1329 let fs = FakeFs::new(cx.executor());
1330 fs.insert_tree(
1331 path!("/root"),
1332 json!({
1333 "dir": {
1334 ".git": {
1335 "HEAD": "ref: refs/heads/main",
1336 },
1337 ".gitignore": "b.rs",
1338 "a.rs": "let a = 1;",
1339 "b.rs": "let b = 2;",
1340 },
1341 "other.rs": "let b = c;"
1342 }),
1343 )
1344 .await;
1345
1346 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
1347 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1348 let (worktree, _) = project
1349 .update(cx, |project, cx| {
1350 project.find_or_create_worktree(path!("/root/dir"), true, cx)
1351 })
1352 .await
1353 .unwrap();
1354 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1355
1356 let (worktree, _) = project
1357 .update(cx, |project, cx| {
1358 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
1359 })
1360 .await
1361 .unwrap();
1362 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1363
1364 let server_id = LanguageServerId(0);
1365 lsp_store.update(cx, |lsp_store, cx| {
1366 lsp_store
1367 .update_diagnostics(
1368 server_id,
1369 lsp::PublishDiagnosticsParams {
1370 uri: Url::from_file_path(path!("/root/dir/b.rs")).unwrap(),
1371 version: None,
1372 diagnostics: vec![lsp::Diagnostic {
1373 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1374 severity: Some(lsp::DiagnosticSeverity::ERROR),
1375 message: "unused variable 'b'".to_string(),
1376 ..Default::default()
1377 }],
1378 },
1379 &[],
1380 cx,
1381 )
1382 .unwrap();
1383 lsp_store
1384 .update_diagnostics(
1385 server_id,
1386 lsp::PublishDiagnosticsParams {
1387 uri: Url::from_file_path(path!("/root/other.rs")).unwrap(),
1388 version: None,
1389 diagnostics: vec![lsp::Diagnostic {
1390 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1391 severity: Some(lsp::DiagnosticSeverity::ERROR),
1392 message: "unknown variable 'c'".to_string(),
1393 ..Default::default()
1394 }],
1395 },
1396 &[],
1397 cx,
1398 )
1399 .unwrap();
1400 });
1401
1402 let main_ignored_buffer = project
1403 .update(cx, |project, cx| {
1404 project.open_buffer((main_worktree_id, "b.rs"), cx)
1405 })
1406 .await
1407 .unwrap();
1408 main_ignored_buffer.update(cx, |buffer, _| {
1409 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1410 assert_eq!(
1411 chunks
1412 .iter()
1413 .map(|(s, d)| (s.as_str(), *d))
1414 .collect::<Vec<_>>(),
1415 &[
1416 ("let ", None),
1417 ("b", Some(DiagnosticSeverity::ERROR)),
1418 (" = 2;", None),
1419 ],
1420 "Gigitnored buffers should still get in-buffer diagnostics",
1421 );
1422 });
1423 let other_buffer = project
1424 .update(cx, |project, cx| {
1425 project.open_buffer((other_worktree_id, ""), cx)
1426 })
1427 .await
1428 .unwrap();
1429 other_buffer.update(cx, |buffer, _| {
1430 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1431 assert_eq!(
1432 chunks
1433 .iter()
1434 .map(|(s, d)| (s.as_str(), *d))
1435 .collect::<Vec<_>>(),
1436 &[
1437 ("let b = ", None),
1438 ("c", Some(DiagnosticSeverity::ERROR)),
1439 (";", None),
1440 ],
1441 "Buffers from hidden projects should still get in-buffer diagnostics"
1442 );
1443 });
1444
1445 project.update(cx, |project, cx| {
1446 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1447 assert_eq!(
1448 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1449 vec![(
1450 ProjectPath {
1451 worktree_id: main_worktree_id,
1452 path: Arc::from(Path::new("b.rs")),
1453 },
1454 server_id,
1455 DiagnosticSummary {
1456 error_count: 1,
1457 warning_count: 0,
1458 }
1459 )]
1460 );
1461 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1462 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1463 });
1464}
1465
1466#[gpui::test]
1467async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1468 init_test(cx);
1469
1470 let progress_token = "the-progress-token";
1471
1472 let fs = FakeFs::new(cx.executor());
1473 fs.insert_tree(
1474 path!("/dir"),
1475 json!({
1476 "a.rs": "fn a() { A }",
1477 "b.rs": "const y: i32 = 1",
1478 }),
1479 )
1480 .await;
1481
1482 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1483 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1484
1485 language_registry.add(rust_lang());
1486 let mut fake_servers = language_registry.register_fake_lsp(
1487 "Rust",
1488 FakeLspAdapter {
1489 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1490 disk_based_diagnostics_sources: vec!["disk".into()],
1491 ..Default::default()
1492 },
1493 );
1494
1495 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1496
1497 // Cause worktree to start the fake language server
1498 let _ = project
1499 .update(cx, |project, cx| {
1500 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
1501 })
1502 .await
1503 .unwrap();
1504
1505 let mut events = cx.events(&project);
1506
1507 let fake_server = fake_servers.next().await.unwrap();
1508 assert_eq!(
1509 events.next().await.unwrap(),
1510 Event::LanguageServerAdded(
1511 LanguageServerId(0),
1512 fake_server.server.name(),
1513 Some(worktree_id)
1514 ),
1515 );
1516
1517 fake_server
1518 .start_progress(format!("{}/0", progress_token))
1519 .await;
1520 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1521 assert_eq!(
1522 events.next().await.unwrap(),
1523 Event::DiskBasedDiagnosticsStarted {
1524 language_server_id: LanguageServerId(0),
1525 }
1526 );
1527
1528 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1529 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1530 version: None,
1531 diagnostics: vec![lsp::Diagnostic {
1532 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1533 severity: Some(lsp::DiagnosticSeverity::ERROR),
1534 message: "undefined variable 'A'".to_string(),
1535 ..Default::default()
1536 }],
1537 });
1538 assert_eq!(
1539 events.next().await.unwrap(),
1540 Event::DiagnosticsUpdated {
1541 language_server_id: LanguageServerId(0),
1542 path: (worktree_id, Path::new("a.rs")).into()
1543 }
1544 );
1545
1546 fake_server.end_progress(format!("{}/0", progress_token));
1547 assert_eq!(
1548 events.next().await.unwrap(),
1549 Event::DiskBasedDiagnosticsFinished {
1550 language_server_id: LanguageServerId(0)
1551 }
1552 );
1553
1554 let buffer = project
1555 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
1556 .await
1557 .unwrap();
1558
1559 buffer.update(cx, |buffer, _| {
1560 let snapshot = buffer.snapshot();
1561 let diagnostics = snapshot
1562 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1563 .collect::<Vec<_>>();
1564 assert_eq!(
1565 diagnostics,
1566 &[DiagnosticEntry {
1567 range: Point::new(0, 9)..Point::new(0, 10),
1568 diagnostic: Diagnostic {
1569 severity: lsp::DiagnosticSeverity::ERROR,
1570 message: "undefined variable 'A'".to_string(),
1571 group_id: 0,
1572 is_primary: true,
1573 ..Default::default()
1574 }
1575 }]
1576 )
1577 });
1578
1579 // Ensure publishing empty diagnostics twice only results in one update event.
1580 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1581 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1582 version: None,
1583 diagnostics: Default::default(),
1584 });
1585 assert_eq!(
1586 events.next().await.unwrap(),
1587 Event::DiagnosticsUpdated {
1588 language_server_id: LanguageServerId(0),
1589 path: (worktree_id, Path::new("a.rs")).into()
1590 }
1591 );
1592
1593 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1594 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1595 version: None,
1596 diagnostics: Default::default(),
1597 });
1598 cx.executor().run_until_parked();
1599 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1600}
1601
1602#[gpui::test]
1603async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1604 init_test(cx);
1605
1606 let progress_token = "the-progress-token";
1607
1608 let fs = FakeFs::new(cx.executor());
1609 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1610
1611 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1612
1613 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1614 language_registry.add(rust_lang());
1615 let mut fake_servers = language_registry.register_fake_lsp(
1616 "Rust",
1617 FakeLspAdapter {
1618 name: "the-language-server",
1619 disk_based_diagnostics_sources: vec!["disk".into()],
1620 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1621 ..Default::default()
1622 },
1623 );
1624
1625 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1626
1627 let (buffer, _handle) = project
1628 .update(cx, |project, cx| {
1629 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1630 })
1631 .await
1632 .unwrap();
1633 // Simulate diagnostics starting to update.
1634 let fake_server = fake_servers.next().await.unwrap();
1635 fake_server.start_progress(progress_token).await;
1636
1637 // Restart the server before the diagnostics finish updating.
1638 project.update(cx, |project, cx| {
1639 project.restart_language_servers_for_buffers(vec![buffer], cx);
1640 });
1641 let mut events = cx.events(&project);
1642
1643 // Simulate the newly started server sending more diagnostics.
1644 let fake_server = fake_servers.next().await.unwrap();
1645 assert_eq!(
1646 events.next().await.unwrap(),
1647 Event::LanguageServerAdded(
1648 LanguageServerId(1),
1649 fake_server.server.name(),
1650 Some(worktree_id)
1651 )
1652 );
1653 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1654 fake_server.start_progress(progress_token).await;
1655 assert_eq!(
1656 events.next().await.unwrap(),
1657 Event::DiskBasedDiagnosticsStarted {
1658 language_server_id: LanguageServerId(1)
1659 }
1660 );
1661 project.update(cx, |project, cx| {
1662 assert_eq!(
1663 project
1664 .language_servers_running_disk_based_diagnostics(cx)
1665 .collect::<Vec<_>>(),
1666 [LanguageServerId(1)]
1667 );
1668 });
1669
1670 // All diagnostics are considered done, despite the old server's diagnostic
1671 // task never completing.
1672 fake_server.end_progress(progress_token);
1673 assert_eq!(
1674 events.next().await.unwrap(),
1675 Event::DiskBasedDiagnosticsFinished {
1676 language_server_id: LanguageServerId(1)
1677 }
1678 );
1679 project.update(cx, |project, cx| {
1680 assert_eq!(
1681 project
1682 .language_servers_running_disk_based_diagnostics(cx)
1683 .collect::<Vec<_>>(),
1684 [] as [language::LanguageServerId; 0]
1685 );
1686 });
1687}
1688
1689#[gpui::test]
1690async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1691 init_test(cx);
1692
1693 let fs = FakeFs::new(cx.executor());
1694 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
1695
1696 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1697
1698 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1699 language_registry.add(rust_lang());
1700 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1701
1702 let (buffer, _) = project
1703 .update(cx, |project, cx| {
1704 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1705 })
1706 .await
1707 .unwrap();
1708
1709 // Publish diagnostics
1710 let fake_server = fake_servers.next().await.unwrap();
1711 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1712 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1713 version: None,
1714 diagnostics: vec![lsp::Diagnostic {
1715 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1716 severity: Some(lsp::DiagnosticSeverity::ERROR),
1717 message: "the message".to_string(),
1718 ..Default::default()
1719 }],
1720 });
1721
1722 cx.executor().run_until_parked();
1723 buffer.update(cx, |buffer, _| {
1724 assert_eq!(
1725 buffer
1726 .snapshot()
1727 .diagnostics_in_range::<_, usize>(0..1, false)
1728 .map(|entry| entry.diagnostic.message.clone())
1729 .collect::<Vec<_>>(),
1730 ["the message".to_string()]
1731 );
1732 });
1733 project.update(cx, |project, cx| {
1734 assert_eq!(
1735 project.diagnostic_summary(false, cx),
1736 DiagnosticSummary {
1737 error_count: 1,
1738 warning_count: 0,
1739 }
1740 );
1741 });
1742
1743 project.update(cx, |project, cx| {
1744 project.restart_language_servers_for_buffers(vec![buffer.clone()], cx);
1745 });
1746
1747 // The diagnostics are cleared.
1748 cx.executor().run_until_parked();
1749 buffer.update(cx, |buffer, _| {
1750 assert_eq!(
1751 buffer
1752 .snapshot()
1753 .diagnostics_in_range::<_, usize>(0..1, false)
1754 .map(|entry| entry.diagnostic.message.clone())
1755 .collect::<Vec<_>>(),
1756 Vec::<String>::new(),
1757 );
1758 });
1759 project.update(cx, |project, cx| {
1760 assert_eq!(
1761 project.diagnostic_summary(false, cx),
1762 DiagnosticSummary {
1763 error_count: 0,
1764 warning_count: 0,
1765 }
1766 );
1767 });
1768}
1769
1770#[gpui::test]
1771async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1772 init_test(cx);
1773
1774 let fs = FakeFs::new(cx.executor());
1775 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1776
1777 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1778 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1779
1780 language_registry.add(rust_lang());
1781 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1782
1783 let (buffer, _handle) = project
1784 .update(cx, |project, cx| {
1785 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1786 })
1787 .await
1788 .unwrap();
1789
1790 // Before restarting the server, report diagnostics with an unknown buffer version.
1791 let fake_server = fake_servers.next().await.unwrap();
1792 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1793 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1794 version: Some(10000),
1795 diagnostics: Vec::new(),
1796 });
1797 cx.executor().run_until_parked();
1798 project.update(cx, |project, cx| {
1799 project.restart_language_servers_for_buffers(vec![buffer.clone()], cx);
1800 });
1801
1802 let mut fake_server = fake_servers.next().await.unwrap();
1803 let notification = fake_server
1804 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1805 .await
1806 .text_document;
1807 assert_eq!(notification.version, 0);
1808}
1809
1810#[gpui::test]
1811async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
1812 init_test(cx);
1813
1814 let progress_token = "the-progress-token";
1815
1816 let fs = FakeFs::new(cx.executor());
1817 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1818
1819 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1820
1821 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1822 language_registry.add(rust_lang());
1823 let mut fake_servers = language_registry.register_fake_lsp(
1824 "Rust",
1825 FakeLspAdapter {
1826 name: "the-language-server",
1827 disk_based_diagnostics_sources: vec!["disk".into()],
1828 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1829 ..Default::default()
1830 },
1831 );
1832
1833 let (buffer, _handle) = project
1834 .update(cx, |project, cx| {
1835 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1836 })
1837 .await
1838 .unwrap();
1839
1840 // Simulate diagnostics starting to update.
1841 let mut fake_server = fake_servers.next().await.unwrap();
1842 fake_server
1843 .start_progress_with(
1844 "another-token",
1845 lsp::WorkDoneProgressBegin {
1846 cancellable: Some(false),
1847 ..Default::default()
1848 },
1849 )
1850 .await;
1851 fake_server
1852 .start_progress_with(
1853 progress_token,
1854 lsp::WorkDoneProgressBegin {
1855 cancellable: Some(true),
1856 ..Default::default()
1857 },
1858 )
1859 .await;
1860 cx.executor().run_until_parked();
1861
1862 project.update(cx, |project, cx| {
1863 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
1864 });
1865
1866 let cancel_notification = fake_server
1867 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
1868 .await;
1869 assert_eq!(
1870 cancel_notification.token,
1871 NumberOrString::String(progress_token.into())
1872 );
1873}
1874
1875#[gpui::test]
1876async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1877 init_test(cx);
1878
1879 let fs = FakeFs::new(cx.executor());
1880 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
1881 .await;
1882
1883 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1884 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1885
1886 let mut fake_rust_servers = language_registry.register_fake_lsp(
1887 "Rust",
1888 FakeLspAdapter {
1889 name: "rust-lsp",
1890 ..Default::default()
1891 },
1892 );
1893 let mut fake_js_servers = language_registry.register_fake_lsp(
1894 "JavaScript",
1895 FakeLspAdapter {
1896 name: "js-lsp",
1897 ..Default::default()
1898 },
1899 );
1900 language_registry.add(rust_lang());
1901 language_registry.add(js_lang());
1902
1903 let _rs_buffer = project
1904 .update(cx, |project, cx| {
1905 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1906 })
1907 .await
1908 .unwrap();
1909 let _js_buffer = project
1910 .update(cx, |project, cx| {
1911 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
1912 })
1913 .await
1914 .unwrap();
1915
1916 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1917 assert_eq!(
1918 fake_rust_server_1
1919 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1920 .await
1921 .text_document
1922 .uri
1923 .as_str(),
1924 uri!("file:///dir/a.rs")
1925 );
1926
1927 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1928 assert_eq!(
1929 fake_js_server
1930 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1931 .await
1932 .text_document
1933 .uri
1934 .as_str(),
1935 uri!("file:///dir/b.js")
1936 );
1937
1938 // Disable Rust language server, ensuring only that server gets stopped.
1939 cx.update(|cx| {
1940 SettingsStore::update_global(cx, |settings, cx| {
1941 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1942 settings.languages.insert(
1943 "Rust".into(),
1944 LanguageSettingsContent {
1945 enable_language_server: Some(false),
1946 ..Default::default()
1947 },
1948 );
1949 });
1950 })
1951 });
1952 fake_rust_server_1
1953 .receive_notification::<lsp::notification::Exit>()
1954 .await;
1955
1956 // Enable Rust and disable JavaScript language servers, ensuring that the
1957 // former gets started again and that the latter stops.
1958 cx.update(|cx| {
1959 SettingsStore::update_global(cx, |settings, cx| {
1960 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1961 settings.languages.insert(
1962 LanguageName::new("Rust"),
1963 LanguageSettingsContent {
1964 enable_language_server: Some(true),
1965 ..Default::default()
1966 },
1967 );
1968 settings.languages.insert(
1969 LanguageName::new("JavaScript"),
1970 LanguageSettingsContent {
1971 enable_language_server: Some(false),
1972 ..Default::default()
1973 },
1974 );
1975 });
1976 })
1977 });
1978 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1979 assert_eq!(
1980 fake_rust_server_2
1981 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1982 .await
1983 .text_document
1984 .uri
1985 .as_str(),
1986 uri!("file:///dir/a.rs")
1987 );
1988 fake_js_server
1989 .receive_notification::<lsp::notification::Exit>()
1990 .await;
1991}
1992
1993#[gpui::test(iterations = 3)]
1994async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1995 init_test(cx);
1996
1997 let text = "
1998 fn a() { A }
1999 fn b() { BB }
2000 fn c() { CCC }
2001 "
2002 .unindent();
2003
2004 let fs = FakeFs::new(cx.executor());
2005 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
2006
2007 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2008 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2009
2010 language_registry.add(rust_lang());
2011 let mut fake_servers = language_registry.register_fake_lsp(
2012 "Rust",
2013 FakeLspAdapter {
2014 disk_based_diagnostics_sources: vec!["disk".into()],
2015 ..Default::default()
2016 },
2017 );
2018
2019 let buffer = project
2020 .update(cx, |project, cx| {
2021 project.open_local_buffer(path!("/dir/a.rs"), cx)
2022 })
2023 .await
2024 .unwrap();
2025
2026 let _handle = project.update(cx, |project, cx| {
2027 project.register_buffer_with_language_servers(&buffer, cx)
2028 });
2029
2030 let mut fake_server = fake_servers.next().await.unwrap();
2031 let open_notification = fake_server
2032 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2033 .await;
2034
2035 // Edit the buffer, moving the content down
2036 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
2037 let change_notification_1 = fake_server
2038 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2039 .await;
2040 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
2041
2042 // Report some diagnostics for the initial version of the buffer
2043 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2044 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2045 version: Some(open_notification.text_document.version),
2046 diagnostics: vec![
2047 lsp::Diagnostic {
2048 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2049 severity: Some(DiagnosticSeverity::ERROR),
2050 message: "undefined variable 'A'".to_string(),
2051 source: Some("disk".to_string()),
2052 ..Default::default()
2053 },
2054 lsp::Diagnostic {
2055 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2056 severity: Some(DiagnosticSeverity::ERROR),
2057 message: "undefined variable 'BB'".to_string(),
2058 source: Some("disk".to_string()),
2059 ..Default::default()
2060 },
2061 lsp::Diagnostic {
2062 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
2063 severity: Some(DiagnosticSeverity::ERROR),
2064 source: Some("disk".to_string()),
2065 message: "undefined variable 'CCC'".to_string(),
2066 ..Default::default()
2067 },
2068 ],
2069 });
2070
2071 // The diagnostics have moved down since they were created.
2072 cx.executor().run_until_parked();
2073 buffer.update(cx, |buffer, _| {
2074 assert_eq!(
2075 buffer
2076 .snapshot()
2077 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
2078 .collect::<Vec<_>>(),
2079 &[
2080 DiagnosticEntry {
2081 range: Point::new(3, 9)..Point::new(3, 11),
2082 diagnostic: Diagnostic {
2083 source: Some("disk".into()),
2084 severity: DiagnosticSeverity::ERROR,
2085 message: "undefined variable 'BB'".to_string(),
2086 is_disk_based: true,
2087 group_id: 1,
2088 is_primary: true,
2089 ..Default::default()
2090 },
2091 },
2092 DiagnosticEntry {
2093 range: Point::new(4, 9)..Point::new(4, 12),
2094 diagnostic: Diagnostic {
2095 source: Some("disk".into()),
2096 severity: DiagnosticSeverity::ERROR,
2097 message: "undefined variable 'CCC'".to_string(),
2098 is_disk_based: true,
2099 group_id: 2,
2100 is_primary: true,
2101 ..Default::default()
2102 }
2103 }
2104 ]
2105 );
2106 assert_eq!(
2107 chunks_with_diagnostics(buffer, 0..buffer.len()),
2108 [
2109 ("\n\nfn a() { ".to_string(), None),
2110 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2111 (" }\nfn b() { ".to_string(), None),
2112 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
2113 (" }\nfn c() { ".to_string(), None),
2114 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
2115 (" }\n".to_string(), None),
2116 ]
2117 );
2118 assert_eq!(
2119 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
2120 [
2121 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
2122 (" }\nfn c() { ".to_string(), None),
2123 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
2124 ]
2125 );
2126 });
2127
2128 // Ensure overlapping diagnostics are highlighted correctly.
2129 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2130 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2131 version: Some(open_notification.text_document.version),
2132 diagnostics: vec![
2133 lsp::Diagnostic {
2134 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2135 severity: Some(DiagnosticSeverity::ERROR),
2136 message: "undefined variable 'A'".to_string(),
2137 source: Some("disk".to_string()),
2138 ..Default::default()
2139 },
2140 lsp::Diagnostic {
2141 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
2142 severity: Some(DiagnosticSeverity::WARNING),
2143 message: "unreachable statement".to_string(),
2144 source: Some("disk".to_string()),
2145 ..Default::default()
2146 },
2147 ],
2148 });
2149
2150 cx.executor().run_until_parked();
2151 buffer.update(cx, |buffer, _| {
2152 assert_eq!(
2153 buffer
2154 .snapshot()
2155 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
2156 .collect::<Vec<_>>(),
2157 &[
2158 DiagnosticEntry {
2159 range: Point::new(2, 9)..Point::new(2, 12),
2160 diagnostic: Diagnostic {
2161 source: Some("disk".into()),
2162 severity: DiagnosticSeverity::WARNING,
2163 message: "unreachable statement".to_string(),
2164 is_disk_based: true,
2165 group_id: 4,
2166 is_primary: true,
2167 ..Default::default()
2168 }
2169 },
2170 DiagnosticEntry {
2171 range: Point::new(2, 9)..Point::new(2, 10),
2172 diagnostic: Diagnostic {
2173 source: Some("disk".into()),
2174 severity: DiagnosticSeverity::ERROR,
2175 message: "undefined variable 'A'".to_string(),
2176 is_disk_based: true,
2177 group_id: 3,
2178 is_primary: true,
2179 ..Default::default()
2180 },
2181 }
2182 ]
2183 );
2184 assert_eq!(
2185 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
2186 [
2187 ("fn a() { ".to_string(), None),
2188 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2189 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2190 ("\n".to_string(), None),
2191 ]
2192 );
2193 assert_eq!(
2194 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
2195 [
2196 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2197 ("\n".to_string(), None),
2198 ]
2199 );
2200 });
2201
2202 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
2203 // changes since the last save.
2204 buffer.update(cx, |buffer, cx| {
2205 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
2206 buffer.edit(
2207 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
2208 None,
2209 cx,
2210 );
2211 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
2212 });
2213 let change_notification_2 = fake_server
2214 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2215 .await;
2216 assert!(
2217 change_notification_2.text_document.version > change_notification_1.text_document.version
2218 );
2219
2220 // Handle out-of-order diagnostics
2221 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2222 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2223 version: Some(change_notification_2.text_document.version),
2224 diagnostics: vec![
2225 lsp::Diagnostic {
2226 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2227 severity: Some(DiagnosticSeverity::ERROR),
2228 message: "undefined variable 'BB'".to_string(),
2229 source: Some("disk".to_string()),
2230 ..Default::default()
2231 },
2232 lsp::Diagnostic {
2233 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2234 severity: Some(DiagnosticSeverity::WARNING),
2235 message: "undefined variable 'A'".to_string(),
2236 source: Some("disk".to_string()),
2237 ..Default::default()
2238 },
2239 ],
2240 });
2241
2242 cx.executor().run_until_parked();
2243 buffer.update(cx, |buffer, _| {
2244 assert_eq!(
2245 buffer
2246 .snapshot()
2247 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2248 .collect::<Vec<_>>(),
2249 &[
2250 DiagnosticEntry {
2251 range: Point::new(2, 21)..Point::new(2, 22),
2252 diagnostic: Diagnostic {
2253 source: Some("disk".into()),
2254 severity: DiagnosticSeverity::WARNING,
2255 message: "undefined variable 'A'".to_string(),
2256 is_disk_based: true,
2257 group_id: 6,
2258 is_primary: true,
2259 ..Default::default()
2260 }
2261 },
2262 DiagnosticEntry {
2263 range: Point::new(3, 9)..Point::new(3, 14),
2264 diagnostic: Diagnostic {
2265 source: Some("disk".into()),
2266 severity: DiagnosticSeverity::ERROR,
2267 message: "undefined variable 'BB'".to_string(),
2268 is_disk_based: true,
2269 group_id: 5,
2270 is_primary: true,
2271 ..Default::default()
2272 },
2273 }
2274 ]
2275 );
2276 });
2277}
2278
2279#[gpui::test]
2280async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
2281 init_test(cx);
2282
2283 let text = concat!(
2284 "let one = ;\n", //
2285 "let two = \n",
2286 "let three = 3;\n",
2287 );
2288
2289 let fs = FakeFs::new(cx.executor());
2290 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
2291
2292 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2293 let buffer = project
2294 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2295 .await
2296 .unwrap();
2297
2298 project.update(cx, |project, cx| {
2299 project.lsp_store.update(cx, |lsp_store, cx| {
2300 lsp_store
2301 .update_diagnostic_entries(
2302 LanguageServerId(0),
2303 PathBuf::from("/dir/a.rs"),
2304 None,
2305 vec![
2306 DiagnosticEntry {
2307 range: Unclipped(PointUtf16::new(0, 10))
2308 ..Unclipped(PointUtf16::new(0, 10)),
2309 diagnostic: Diagnostic {
2310 severity: DiagnosticSeverity::ERROR,
2311 message: "syntax error 1".to_string(),
2312 ..Default::default()
2313 },
2314 },
2315 DiagnosticEntry {
2316 range: Unclipped(PointUtf16::new(1, 10))
2317 ..Unclipped(PointUtf16::new(1, 10)),
2318 diagnostic: Diagnostic {
2319 severity: DiagnosticSeverity::ERROR,
2320 message: "syntax error 2".to_string(),
2321 ..Default::default()
2322 },
2323 },
2324 ],
2325 cx,
2326 )
2327 .unwrap();
2328 })
2329 });
2330
2331 // An empty range is extended forward to include the following character.
2332 // At the end of a line, an empty range is extended backward to include
2333 // the preceding character.
2334 buffer.update(cx, |buffer, _| {
2335 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2336 assert_eq!(
2337 chunks
2338 .iter()
2339 .map(|(s, d)| (s.as_str(), *d))
2340 .collect::<Vec<_>>(),
2341 &[
2342 ("let one = ", None),
2343 (";", Some(DiagnosticSeverity::ERROR)),
2344 ("\nlet two =", None),
2345 (" ", Some(DiagnosticSeverity::ERROR)),
2346 ("\nlet three = 3;\n", None)
2347 ]
2348 );
2349 });
2350}
2351
2352#[gpui::test]
2353async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2354 init_test(cx);
2355
2356 let fs = FakeFs::new(cx.executor());
2357 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2358 .await;
2359
2360 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2361 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
2362
2363 lsp_store.update(cx, |lsp_store, cx| {
2364 lsp_store
2365 .update_diagnostic_entries(
2366 LanguageServerId(0),
2367 Path::new("/dir/a.rs").to_owned(),
2368 None,
2369 vec![DiagnosticEntry {
2370 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2371 diagnostic: Diagnostic {
2372 severity: DiagnosticSeverity::ERROR,
2373 is_primary: true,
2374 message: "syntax error a1".to_string(),
2375 ..Default::default()
2376 },
2377 }],
2378 cx,
2379 )
2380 .unwrap();
2381 lsp_store
2382 .update_diagnostic_entries(
2383 LanguageServerId(1),
2384 Path::new("/dir/a.rs").to_owned(),
2385 None,
2386 vec![DiagnosticEntry {
2387 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2388 diagnostic: Diagnostic {
2389 severity: DiagnosticSeverity::ERROR,
2390 is_primary: true,
2391 message: "syntax error b1".to_string(),
2392 ..Default::default()
2393 },
2394 }],
2395 cx,
2396 )
2397 .unwrap();
2398
2399 assert_eq!(
2400 lsp_store.diagnostic_summary(false, cx),
2401 DiagnosticSummary {
2402 error_count: 2,
2403 warning_count: 0,
2404 }
2405 );
2406 });
2407}
2408
2409#[gpui::test]
2410async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2411 init_test(cx);
2412
2413 let text = "
2414 fn a() {
2415 f1();
2416 }
2417 fn b() {
2418 f2();
2419 }
2420 fn c() {
2421 f3();
2422 }
2423 "
2424 .unindent();
2425
2426 let fs = FakeFs::new(cx.executor());
2427 fs.insert_tree(
2428 path!("/dir"),
2429 json!({
2430 "a.rs": text.clone(),
2431 }),
2432 )
2433 .await;
2434
2435 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2436 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2437
2438 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2439 language_registry.add(rust_lang());
2440 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2441
2442 let (buffer, _handle) = project
2443 .update(cx, |project, cx| {
2444 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2445 })
2446 .await
2447 .unwrap();
2448
2449 let mut fake_server = fake_servers.next().await.unwrap();
2450 let lsp_document_version = fake_server
2451 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2452 .await
2453 .text_document
2454 .version;
2455
2456 // Simulate editing the buffer after the language server computes some edits.
2457 buffer.update(cx, |buffer, cx| {
2458 buffer.edit(
2459 [(
2460 Point::new(0, 0)..Point::new(0, 0),
2461 "// above first function\n",
2462 )],
2463 None,
2464 cx,
2465 );
2466 buffer.edit(
2467 [(
2468 Point::new(2, 0)..Point::new(2, 0),
2469 " // inside first function\n",
2470 )],
2471 None,
2472 cx,
2473 );
2474 buffer.edit(
2475 [(
2476 Point::new(6, 4)..Point::new(6, 4),
2477 "// inside second function ",
2478 )],
2479 None,
2480 cx,
2481 );
2482
2483 assert_eq!(
2484 buffer.text(),
2485 "
2486 // above first function
2487 fn a() {
2488 // inside first function
2489 f1();
2490 }
2491 fn b() {
2492 // inside second function f2();
2493 }
2494 fn c() {
2495 f3();
2496 }
2497 "
2498 .unindent()
2499 );
2500 });
2501
2502 let edits = lsp_store
2503 .update(cx, |lsp_store, cx| {
2504 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2505 &buffer,
2506 vec![
2507 // replace body of first function
2508 lsp::TextEdit {
2509 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2510 new_text: "
2511 fn a() {
2512 f10();
2513 }
2514 "
2515 .unindent(),
2516 },
2517 // edit inside second function
2518 lsp::TextEdit {
2519 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2520 new_text: "00".into(),
2521 },
2522 // edit inside third function via two distinct edits
2523 lsp::TextEdit {
2524 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2525 new_text: "4000".into(),
2526 },
2527 lsp::TextEdit {
2528 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2529 new_text: "".into(),
2530 },
2531 ],
2532 LanguageServerId(0),
2533 Some(lsp_document_version),
2534 cx,
2535 )
2536 })
2537 .await
2538 .unwrap();
2539
2540 buffer.update(cx, |buffer, cx| {
2541 for (range, new_text) in edits {
2542 buffer.edit([(range, new_text)], None, cx);
2543 }
2544 assert_eq!(
2545 buffer.text(),
2546 "
2547 // above first function
2548 fn a() {
2549 // inside first function
2550 f10();
2551 }
2552 fn b() {
2553 // inside second function f200();
2554 }
2555 fn c() {
2556 f4000();
2557 }
2558 "
2559 .unindent()
2560 );
2561 });
2562}
2563
2564#[gpui::test]
2565async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2566 init_test(cx);
2567
2568 let text = "
2569 use a::b;
2570 use a::c;
2571
2572 fn f() {
2573 b();
2574 c();
2575 }
2576 "
2577 .unindent();
2578
2579 let fs = FakeFs::new(cx.executor());
2580 fs.insert_tree(
2581 path!("/dir"),
2582 json!({
2583 "a.rs": text.clone(),
2584 }),
2585 )
2586 .await;
2587
2588 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2589 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2590 let buffer = project
2591 .update(cx, |project, cx| {
2592 project.open_local_buffer(path!("/dir/a.rs"), cx)
2593 })
2594 .await
2595 .unwrap();
2596
2597 // Simulate the language server sending us a small edit in the form of a very large diff.
2598 // Rust-analyzer does this when performing a merge-imports code action.
2599 let edits = lsp_store
2600 .update(cx, |lsp_store, cx| {
2601 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2602 &buffer,
2603 [
2604 // Replace the first use statement without editing the semicolon.
2605 lsp::TextEdit {
2606 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2607 new_text: "a::{b, c}".into(),
2608 },
2609 // Reinsert the remainder of the file between the semicolon and the final
2610 // newline of the file.
2611 lsp::TextEdit {
2612 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2613 new_text: "\n\n".into(),
2614 },
2615 lsp::TextEdit {
2616 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2617 new_text: "
2618 fn f() {
2619 b();
2620 c();
2621 }"
2622 .unindent(),
2623 },
2624 // Delete everything after the first newline of the file.
2625 lsp::TextEdit {
2626 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2627 new_text: "".into(),
2628 },
2629 ],
2630 LanguageServerId(0),
2631 None,
2632 cx,
2633 )
2634 })
2635 .await
2636 .unwrap();
2637
2638 buffer.update(cx, |buffer, cx| {
2639 let edits = edits
2640 .into_iter()
2641 .map(|(range, text)| {
2642 (
2643 range.start.to_point(buffer)..range.end.to_point(buffer),
2644 text,
2645 )
2646 })
2647 .collect::<Vec<_>>();
2648
2649 assert_eq!(
2650 edits,
2651 [
2652 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2653 (Point::new(1, 0)..Point::new(2, 0), "".into())
2654 ]
2655 );
2656
2657 for (range, new_text) in edits {
2658 buffer.edit([(range, new_text)], None, cx);
2659 }
2660 assert_eq!(
2661 buffer.text(),
2662 "
2663 use a::{b, c};
2664
2665 fn f() {
2666 b();
2667 c();
2668 }
2669 "
2670 .unindent()
2671 );
2672 });
2673}
2674
2675#[gpui::test]
2676async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
2677 cx: &mut gpui::TestAppContext,
2678) {
2679 init_test(cx);
2680
2681 let text = "Path()";
2682
2683 let fs = FakeFs::new(cx.executor());
2684 fs.insert_tree(
2685 path!("/dir"),
2686 json!({
2687 "a.rs": text
2688 }),
2689 )
2690 .await;
2691
2692 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2693 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2694 let buffer = project
2695 .update(cx, |project, cx| {
2696 project.open_local_buffer(path!("/dir/a.rs"), cx)
2697 })
2698 .await
2699 .unwrap();
2700
2701 // Simulate the language server sending us a pair of edits at the same location,
2702 // with an insertion following a replacement (which violates the LSP spec).
2703 let edits = lsp_store
2704 .update(cx, |lsp_store, cx| {
2705 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2706 &buffer,
2707 [
2708 lsp::TextEdit {
2709 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
2710 new_text: "Path".into(),
2711 },
2712 lsp::TextEdit {
2713 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
2714 new_text: "from path import Path\n\n\n".into(),
2715 },
2716 ],
2717 LanguageServerId(0),
2718 None,
2719 cx,
2720 )
2721 })
2722 .await
2723 .unwrap();
2724
2725 buffer.update(cx, |buffer, cx| {
2726 buffer.edit(edits, None, cx);
2727 assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
2728 });
2729}
2730
2731#[gpui::test]
2732async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2733 init_test(cx);
2734
2735 let text = "
2736 use a::b;
2737 use a::c;
2738
2739 fn f() {
2740 b();
2741 c();
2742 }
2743 "
2744 .unindent();
2745
2746 let fs = FakeFs::new(cx.executor());
2747 fs.insert_tree(
2748 path!("/dir"),
2749 json!({
2750 "a.rs": text.clone(),
2751 }),
2752 )
2753 .await;
2754
2755 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2756 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2757 let buffer = project
2758 .update(cx, |project, cx| {
2759 project.open_local_buffer(path!("/dir/a.rs"), cx)
2760 })
2761 .await
2762 .unwrap();
2763
2764 // Simulate the language server sending us edits in a non-ordered fashion,
2765 // with ranges sometimes being inverted or pointing to invalid locations.
2766 let edits = lsp_store
2767 .update(cx, |lsp_store, cx| {
2768 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2769 &buffer,
2770 [
2771 lsp::TextEdit {
2772 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2773 new_text: "\n\n".into(),
2774 },
2775 lsp::TextEdit {
2776 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2777 new_text: "a::{b, c}".into(),
2778 },
2779 lsp::TextEdit {
2780 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2781 new_text: "".into(),
2782 },
2783 lsp::TextEdit {
2784 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2785 new_text: "
2786 fn f() {
2787 b();
2788 c();
2789 }"
2790 .unindent(),
2791 },
2792 ],
2793 LanguageServerId(0),
2794 None,
2795 cx,
2796 )
2797 })
2798 .await
2799 .unwrap();
2800
2801 buffer.update(cx, |buffer, cx| {
2802 let edits = edits
2803 .into_iter()
2804 .map(|(range, text)| {
2805 (
2806 range.start.to_point(buffer)..range.end.to_point(buffer),
2807 text,
2808 )
2809 })
2810 .collect::<Vec<_>>();
2811
2812 assert_eq!(
2813 edits,
2814 [
2815 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2816 (Point::new(1, 0)..Point::new(2, 0), "".into())
2817 ]
2818 );
2819
2820 for (range, new_text) in edits {
2821 buffer.edit([(range, new_text)], None, cx);
2822 }
2823 assert_eq!(
2824 buffer.text(),
2825 "
2826 use a::{b, c};
2827
2828 fn f() {
2829 b();
2830 c();
2831 }
2832 "
2833 .unindent()
2834 );
2835 });
2836}
2837
2838fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2839 buffer: &Buffer,
2840 range: Range<T>,
2841) -> Vec<(String, Option<DiagnosticSeverity>)> {
2842 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2843 for chunk in buffer.snapshot().chunks(range, true) {
2844 if chunks.last().map_or(false, |prev_chunk| {
2845 prev_chunk.1 == chunk.diagnostic_severity
2846 }) {
2847 chunks.last_mut().unwrap().0.push_str(chunk.text);
2848 } else {
2849 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2850 }
2851 }
2852 chunks
2853}
2854
2855#[gpui::test(iterations = 10)]
2856async fn test_definition(cx: &mut gpui::TestAppContext) {
2857 init_test(cx);
2858
2859 let fs = FakeFs::new(cx.executor());
2860 fs.insert_tree(
2861 path!("/dir"),
2862 json!({
2863 "a.rs": "const fn a() { A }",
2864 "b.rs": "const y: i32 = crate::a()",
2865 }),
2866 )
2867 .await;
2868
2869 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
2870
2871 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2872 language_registry.add(rust_lang());
2873 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2874
2875 let (buffer, _handle) = project
2876 .update(cx, |project, cx| {
2877 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
2878 })
2879 .await
2880 .unwrap();
2881
2882 let fake_server = fake_servers.next().await.unwrap();
2883 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2884 let params = params.text_document_position_params;
2885 assert_eq!(
2886 params.text_document.uri.to_file_path().unwrap(),
2887 Path::new(path!("/dir/b.rs")),
2888 );
2889 assert_eq!(params.position, lsp::Position::new(0, 22));
2890
2891 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2892 lsp::Location::new(
2893 lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2894 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2895 ),
2896 )))
2897 });
2898 let mut definitions = project
2899 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2900 .await
2901 .unwrap();
2902
2903 // Assert no new language server started
2904 cx.executor().run_until_parked();
2905 assert!(fake_servers.try_next().is_err());
2906
2907 assert_eq!(definitions.len(), 1);
2908 let definition = definitions.pop().unwrap();
2909 cx.update(|cx| {
2910 let target_buffer = definition.target.buffer.read(cx);
2911 assert_eq!(
2912 target_buffer
2913 .file()
2914 .unwrap()
2915 .as_local()
2916 .unwrap()
2917 .abs_path(cx),
2918 Path::new(path!("/dir/a.rs")),
2919 );
2920 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2921 assert_eq!(
2922 list_worktrees(&project, cx),
2923 [
2924 (path!("/dir/a.rs").as_ref(), false),
2925 (path!("/dir/b.rs").as_ref(), true)
2926 ],
2927 );
2928
2929 drop(definition);
2930 });
2931 cx.update(|cx| {
2932 assert_eq!(
2933 list_worktrees(&project, cx),
2934 [(path!("/dir/b.rs").as_ref(), true)]
2935 );
2936 });
2937
2938 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
2939 project
2940 .read(cx)
2941 .worktrees(cx)
2942 .map(|worktree| {
2943 let worktree = worktree.read(cx);
2944 (
2945 worktree.as_local().unwrap().abs_path().as_ref(),
2946 worktree.is_visible(),
2947 )
2948 })
2949 .collect::<Vec<_>>()
2950 }
2951}
2952
2953#[gpui::test]
2954async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
2955 init_test(cx);
2956
2957 let fs = FakeFs::new(cx.executor());
2958 fs.insert_tree(
2959 path!("/dir"),
2960 json!({
2961 "a.ts": "",
2962 }),
2963 )
2964 .await;
2965
2966 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2967
2968 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2969 language_registry.add(typescript_lang());
2970 let mut fake_language_servers = language_registry.register_fake_lsp(
2971 "TypeScript",
2972 FakeLspAdapter {
2973 capabilities: lsp::ServerCapabilities {
2974 completion_provider: Some(lsp::CompletionOptions {
2975 trigger_characters: Some(vec![".".to_string()]),
2976 ..Default::default()
2977 }),
2978 ..Default::default()
2979 },
2980 ..Default::default()
2981 },
2982 );
2983
2984 let (buffer, _handle) = project
2985 .update(cx, |p, cx| {
2986 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
2987 })
2988 .await
2989 .unwrap();
2990
2991 let fake_server = fake_language_servers.next().await.unwrap();
2992
2993 // When text_edit exists, it takes precedence over insert_text and label
2994 let text = "let a = obj.fqn";
2995 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2996 let completions = project.update(cx, |project, cx| {
2997 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2998 });
2999
3000 fake_server
3001 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3002 Ok(Some(lsp::CompletionResponse::Array(vec![
3003 lsp::CompletionItem {
3004 label: "labelText".into(),
3005 insert_text: Some("insertText".into()),
3006 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
3007 range: lsp::Range::new(
3008 lsp::Position::new(0, text.len() as u32 - 3),
3009 lsp::Position::new(0, text.len() as u32),
3010 ),
3011 new_text: "textEditText".into(),
3012 })),
3013 ..Default::default()
3014 },
3015 ])))
3016 })
3017 .next()
3018 .await;
3019
3020 let completions = completions.await.unwrap().unwrap();
3021 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3022
3023 assert_eq!(completions.len(), 1);
3024 assert_eq!(completions[0].new_text, "textEditText");
3025 assert_eq!(
3026 completions[0].replace_range.to_offset(&snapshot),
3027 text.len() - 3..text.len()
3028 );
3029}
3030
3031#[gpui::test]
3032async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
3033 init_test(cx);
3034
3035 let fs = FakeFs::new(cx.executor());
3036 fs.insert_tree(
3037 path!("/dir"),
3038 json!({
3039 "a.ts": "",
3040 }),
3041 )
3042 .await;
3043
3044 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3045
3046 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3047 language_registry.add(typescript_lang());
3048 let mut fake_language_servers = language_registry.register_fake_lsp(
3049 "TypeScript",
3050 FakeLspAdapter {
3051 capabilities: lsp::ServerCapabilities {
3052 completion_provider: Some(lsp::CompletionOptions {
3053 trigger_characters: Some(vec![".".to_string()]),
3054 ..Default::default()
3055 }),
3056 ..Default::default()
3057 },
3058 ..Default::default()
3059 },
3060 );
3061
3062 let (buffer, _handle) = project
3063 .update(cx, |p, cx| {
3064 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3065 })
3066 .await
3067 .unwrap();
3068
3069 let fake_server = fake_language_servers.next().await.unwrap();
3070 let text = "let a = obj.fqn";
3071
3072 // Test 1: When text_edit is None but insert_text exists with default edit_range
3073 {
3074 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3075 let completions = project.update(cx, |project, cx| {
3076 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3077 });
3078
3079 fake_server
3080 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3081 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3082 is_incomplete: false,
3083 item_defaults: Some(lsp::CompletionListItemDefaults {
3084 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3085 lsp::Range::new(
3086 lsp::Position::new(0, text.len() as u32 - 3),
3087 lsp::Position::new(0, text.len() as u32),
3088 ),
3089 )),
3090 ..Default::default()
3091 }),
3092 items: vec![lsp::CompletionItem {
3093 label: "labelText".into(),
3094 insert_text: Some("insertText".into()),
3095 text_edit: None,
3096 ..Default::default()
3097 }],
3098 })))
3099 })
3100 .next()
3101 .await;
3102
3103 let completions = completions.await.unwrap().unwrap();
3104 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3105
3106 assert_eq!(completions.len(), 1);
3107 assert_eq!(completions[0].new_text, "insertText");
3108 assert_eq!(
3109 completions[0].replace_range.to_offset(&snapshot),
3110 text.len() - 3..text.len()
3111 );
3112 }
3113
3114 // Test 2: When both text_edit and insert_text are None with default edit_range
3115 {
3116 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3117 let completions = project.update(cx, |project, cx| {
3118 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3119 });
3120
3121 fake_server
3122 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3123 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3124 is_incomplete: false,
3125 item_defaults: Some(lsp::CompletionListItemDefaults {
3126 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3127 lsp::Range::new(
3128 lsp::Position::new(0, text.len() as u32 - 3),
3129 lsp::Position::new(0, text.len() as u32),
3130 ),
3131 )),
3132 ..Default::default()
3133 }),
3134 items: vec![lsp::CompletionItem {
3135 label: "labelText".into(),
3136 insert_text: None,
3137 text_edit: None,
3138 ..Default::default()
3139 }],
3140 })))
3141 })
3142 .next()
3143 .await;
3144
3145 let completions = completions.await.unwrap().unwrap();
3146 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3147
3148 assert_eq!(completions.len(), 1);
3149 assert_eq!(completions[0].new_text, "labelText");
3150 assert_eq!(
3151 completions[0].replace_range.to_offset(&snapshot),
3152 text.len() - 3..text.len()
3153 );
3154 }
3155}
3156
3157#[gpui::test]
3158async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
3159 init_test(cx);
3160
3161 let fs = FakeFs::new(cx.executor());
3162 fs.insert_tree(
3163 path!("/dir"),
3164 json!({
3165 "a.ts": "",
3166 }),
3167 )
3168 .await;
3169
3170 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3171
3172 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3173 language_registry.add(typescript_lang());
3174 let mut fake_language_servers = language_registry.register_fake_lsp(
3175 "TypeScript",
3176 FakeLspAdapter {
3177 capabilities: lsp::ServerCapabilities {
3178 completion_provider: Some(lsp::CompletionOptions {
3179 trigger_characters: Some(vec![":".to_string()]),
3180 ..Default::default()
3181 }),
3182 ..Default::default()
3183 },
3184 ..Default::default()
3185 },
3186 );
3187
3188 let (buffer, _handle) = project
3189 .update(cx, |p, cx| {
3190 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3191 })
3192 .await
3193 .unwrap();
3194
3195 let fake_server = fake_language_servers.next().await.unwrap();
3196
3197 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
3198 let text = "let a = b.fqn";
3199 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3200 let completions = project.update(cx, |project, cx| {
3201 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3202 });
3203
3204 fake_server
3205 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3206 Ok(Some(lsp::CompletionResponse::Array(vec![
3207 lsp::CompletionItem {
3208 label: "fullyQualifiedName?".into(),
3209 insert_text: Some("fullyQualifiedName".into()),
3210 ..Default::default()
3211 },
3212 ])))
3213 })
3214 .next()
3215 .await;
3216 let completions = completions.await.unwrap().unwrap();
3217 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3218 assert_eq!(completions.len(), 1);
3219 assert_eq!(completions[0].new_text, "fullyQualifiedName");
3220 assert_eq!(
3221 completions[0].replace_range.to_offset(&snapshot),
3222 text.len() - 3..text.len()
3223 );
3224
3225 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
3226 let text = "let a = \"atoms/cmp\"";
3227 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3228 let completions = project.update(cx, |project, cx| {
3229 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
3230 });
3231
3232 fake_server
3233 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3234 Ok(Some(lsp::CompletionResponse::Array(vec![
3235 lsp::CompletionItem {
3236 label: "component".into(),
3237 ..Default::default()
3238 },
3239 ])))
3240 })
3241 .next()
3242 .await;
3243 let completions = completions.await.unwrap().unwrap();
3244 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3245 assert_eq!(completions.len(), 1);
3246 assert_eq!(completions[0].new_text, "component");
3247 assert_eq!(
3248 completions[0].replace_range.to_offset(&snapshot),
3249 text.len() - 4..text.len() - 1
3250 );
3251}
3252
3253#[gpui::test]
3254async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
3255 init_test(cx);
3256
3257 let fs = FakeFs::new(cx.executor());
3258 fs.insert_tree(
3259 path!("/dir"),
3260 json!({
3261 "a.ts": "",
3262 }),
3263 )
3264 .await;
3265
3266 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3267
3268 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3269 language_registry.add(typescript_lang());
3270 let mut fake_language_servers = language_registry.register_fake_lsp(
3271 "TypeScript",
3272 FakeLspAdapter {
3273 capabilities: lsp::ServerCapabilities {
3274 completion_provider: Some(lsp::CompletionOptions {
3275 trigger_characters: Some(vec![":".to_string()]),
3276 ..Default::default()
3277 }),
3278 ..Default::default()
3279 },
3280 ..Default::default()
3281 },
3282 );
3283
3284 let (buffer, _handle) = project
3285 .update(cx, |p, cx| {
3286 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3287 })
3288 .await
3289 .unwrap();
3290
3291 let fake_server = fake_language_servers.next().await.unwrap();
3292
3293 let text = "let a = b.fqn";
3294 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3295 let completions = project.update(cx, |project, cx| {
3296 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3297 });
3298
3299 fake_server
3300 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3301 Ok(Some(lsp::CompletionResponse::Array(vec![
3302 lsp::CompletionItem {
3303 label: "fullyQualifiedName?".into(),
3304 insert_text: Some("fully\rQualified\r\nName".into()),
3305 ..Default::default()
3306 },
3307 ])))
3308 })
3309 .next()
3310 .await;
3311 let completions = completions.await.unwrap().unwrap();
3312 assert_eq!(completions.len(), 1);
3313 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
3314}
3315
3316#[gpui::test(iterations = 10)]
3317async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
3318 init_test(cx);
3319
3320 let fs = FakeFs::new(cx.executor());
3321 fs.insert_tree(
3322 path!("/dir"),
3323 json!({
3324 "a.ts": "a",
3325 }),
3326 )
3327 .await;
3328
3329 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3330
3331 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3332 language_registry.add(typescript_lang());
3333 let mut fake_language_servers = language_registry.register_fake_lsp(
3334 "TypeScript",
3335 FakeLspAdapter {
3336 capabilities: lsp::ServerCapabilities {
3337 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
3338 lsp::CodeActionOptions {
3339 resolve_provider: Some(true),
3340 ..lsp::CodeActionOptions::default()
3341 },
3342 )),
3343 execute_command_provider: Some(lsp::ExecuteCommandOptions {
3344 commands: vec!["_the/command".to_string()],
3345 ..lsp::ExecuteCommandOptions::default()
3346 }),
3347 ..lsp::ServerCapabilities::default()
3348 },
3349 ..FakeLspAdapter::default()
3350 },
3351 );
3352
3353 let (buffer, _handle) = project
3354 .update(cx, |p, cx| {
3355 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3356 })
3357 .await
3358 .unwrap();
3359
3360 let fake_server = fake_language_servers.next().await.unwrap();
3361
3362 // Language server returns code actions that contain commands, and not edits.
3363 let actions = project.update(cx, |project, cx| {
3364 project.code_actions(&buffer, 0..0, None, cx)
3365 });
3366 fake_server
3367 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
3368 Ok(Some(vec![
3369 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3370 title: "The code action".into(),
3371 data: Some(serde_json::json!({
3372 "command": "_the/command",
3373 })),
3374 ..lsp::CodeAction::default()
3375 }),
3376 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3377 title: "two".into(),
3378 ..lsp::CodeAction::default()
3379 }),
3380 ]))
3381 })
3382 .next()
3383 .await;
3384
3385 let action = actions.await.unwrap()[0].clone();
3386 let apply = project.update(cx, |project, cx| {
3387 project.apply_code_action(buffer.clone(), action, true, cx)
3388 });
3389
3390 // Resolving the code action does not populate its edits. In absence of
3391 // edits, we must execute the given command.
3392 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
3393 |mut action, _| async move {
3394 if action.data.is_some() {
3395 action.command = Some(lsp::Command {
3396 title: "The command".into(),
3397 command: "_the/command".into(),
3398 arguments: Some(vec![json!("the-argument")]),
3399 });
3400 }
3401 Ok(action)
3402 },
3403 );
3404
3405 // While executing the command, the language server sends the editor
3406 // a `workspaceEdit` request.
3407 fake_server
3408 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
3409 let fake = fake_server.clone();
3410 move |params, _| {
3411 assert_eq!(params.command, "_the/command");
3412 let fake = fake.clone();
3413 async move {
3414 fake.server
3415 .request::<lsp::request::ApplyWorkspaceEdit>(
3416 lsp::ApplyWorkspaceEditParams {
3417 label: None,
3418 edit: lsp::WorkspaceEdit {
3419 changes: Some(
3420 [(
3421 lsp::Url::from_file_path(path!("/dir/a.ts")).unwrap(),
3422 vec![lsp::TextEdit {
3423 range: lsp::Range::new(
3424 lsp::Position::new(0, 0),
3425 lsp::Position::new(0, 0),
3426 ),
3427 new_text: "X".into(),
3428 }],
3429 )]
3430 .into_iter()
3431 .collect(),
3432 ),
3433 ..Default::default()
3434 },
3435 },
3436 )
3437 .await
3438 .into_response()
3439 .unwrap();
3440 Ok(Some(json!(null)))
3441 }
3442 }
3443 })
3444 .next()
3445 .await;
3446
3447 // Applying the code action returns a project transaction containing the edits
3448 // sent by the language server in its `workspaceEdit` request.
3449 let transaction = apply.await.unwrap();
3450 assert!(transaction.0.contains_key(&buffer));
3451 buffer.update(cx, |buffer, cx| {
3452 assert_eq!(buffer.text(), "Xa");
3453 buffer.undo(cx);
3454 assert_eq!(buffer.text(), "a");
3455 });
3456}
3457
3458#[gpui::test(iterations = 10)]
3459async fn test_save_file(cx: &mut gpui::TestAppContext) {
3460 init_test(cx);
3461
3462 let fs = FakeFs::new(cx.executor());
3463 fs.insert_tree(
3464 path!("/dir"),
3465 json!({
3466 "file1": "the old contents",
3467 }),
3468 )
3469 .await;
3470
3471 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3472 let buffer = project
3473 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3474 .await
3475 .unwrap();
3476 buffer.update(cx, |buffer, cx| {
3477 assert_eq!(buffer.text(), "the old contents");
3478 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3479 });
3480
3481 project
3482 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3483 .await
3484 .unwrap();
3485
3486 let new_text = fs
3487 .load(Path::new(path!("/dir/file1")))
3488 .await
3489 .unwrap()
3490 .replace("\r\n", "\n");
3491 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3492}
3493
3494#[gpui::test(iterations = 30)]
3495async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
3496 init_test(cx);
3497
3498 let fs = FakeFs::new(cx.executor().clone());
3499 fs.insert_tree(
3500 path!("/dir"),
3501 json!({
3502 "file1": "the original contents",
3503 }),
3504 )
3505 .await;
3506
3507 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3508 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3509 let buffer = project
3510 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3511 .await
3512 .unwrap();
3513
3514 // Simulate buffer diffs being slow, so that they don't complete before
3515 // the next file change occurs.
3516 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3517
3518 // Change the buffer's file on disk, and then wait for the file change
3519 // to be detected by the worktree, so that the buffer starts reloading.
3520 fs.save(
3521 path!("/dir/file1").as_ref(),
3522 &"the first contents".into(),
3523 Default::default(),
3524 )
3525 .await
3526 .unwrap();
3527 worktree.next_event(cx).await;
3528
3529 // Change the buffer's file again. Depending on the random seed, the
3530 // previous file change may still be in progress.
3531 fs.save(
3532 path!("/dir/file1").as_ref(),
3533 &"the second contents".into(),
3534 Default::default(),
3535 )
3536 .await
3537 .unwrap();
3538 worktree.next_event(cx).await;
3539
3540 cx.executor().run_until_parked();
3541 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3542 buffer.read_with(cx, |buffer, _| {
3543 assert_eq!(buffer.text(), on_disk_text);
3544 assert!(!buffer.is_dirty(), "buffer should not be dirty");
3545 assert!(!buffer.has_conflict(), "buffer should not be dirty");
3546 });
3547}
3548
3549#[gpui::test(iterations = 30)]
3550async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
3551 init_test(cx);
3552
3553 let fs = FakeFs::new(cx.executor().clone());
3554 fs.insert_tree(
3555 path!("/dir"),
3556 json!({
3557 "file1": "the original contents",
3558 }),
3559 )
3560 .await;
3561
3562 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3563 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3564 let buffer = project
3565 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3566 .await
3567 .unwrap();
3568
3569 // Simulate buffer diffs being slow, so that they don't complete before
3570 // the next file change occurs.
3571 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3572
3573 // Change the buffer's file on disk, and then wait for the file change
3574 // to be detected by the worktree, so that the buffer starts reloading.
3575 fs.save(
3576 path!("/dir/file1").as_ref(),
3577 &"the first contents".into(),
3578 Default::default(),
3579 )
3580 .await
3581 .unwrap();
3582 worktree.next_event(cx).await;
3583
3584 cx.executor()
3585 .spawn(cx.executor().simulate_random_delay())
3586 .await;
3587
3588 // Perform a noop edit, causing the buffer's version to increase.
3589 buffer.update(cx, |buffer, cx| {
3590 buffer.edit([(0..0, " ")], None, cx);
3591 buffer.undo(cx);
3592 });
3593
3594 cx.executor().run_until_parked();
3595 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3596 buffer.read_with(cx, |buffer, _| {
3597 let buffer_text = buffer.text();
3598 if buffer_text == on_disk_text {
3599 assert!(
3600 !buffer.is_dirty() && !buffer.has_conflict(),
3601 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
3602 );
3603 }
3604 // If the file change occurred while the buffer was processing the first
3605 // change, the buffer will be in a conflicting state.
3606 else {
3607 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3608 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3609 }
3610 });
3611}
3612
3613#[gpui::test]
3614async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
3615 init_test(cx);
3616
3617 let fs = FakeFs::new(cx.executor());
3618 fs.insert_tree(
3619 path!("/dir"),
3620 json!({
3621 "file1": "the old contents",
3622 }),
3623 )
3624 .await;
3625
3626 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
3627 let buffer = project
3628 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3629 .await
3630 .unwrap();
3631 buffer.update(cx, |buffer, cx| {
3632 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3633 });
3634
3635 project
3636 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3637 .await
3638 .unwrap();
3639
3640 let new_text = fs
3641 .load(Path::new(path!("/dir/file1")))
3642 .await
3643 .unwrap()
3644 .replace("\r\n", "\n");
3645 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3646}
3647
3648#[gpui::test]
3649async fn test_save_as(cx: &mut gpui::TestAppContext) {
3650 init_test(cx);
3651
3652 let fs = FakeFs::new(cx.executor());
3653 fs.insert_tree("/dir", json!({})).await;
3654
3655 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3656
3657 let languages = project.update(cx, |project, _| project.languages().clone());
3658 languages.add(rust_lang());
3659
3660 let buffer = project.update(cx, |project, cx| project.create_local_buffer("", None, cx));
3661 buffer.update(cx, |buffer, cx| {
3662 buffer.edit([(0..0, "abc")], None, cx);
3663 assert!(buffer.is_dirty());
3664 assert!(!buffer.has_conflict());
3665 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
3666 });
3667 project
3668 .update(cx, |project, cx| {
3669 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
3670 let path = ProjectPath {
3671 worktree_id,
3672 path: Arc::from(Path::new("file1.rs")),
3673 };
3674 project.save_buffer_as(buffer.clone(), path, cx)
3675 })
3676 .await
3677 .unwrap();
3678 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
3679
3680 cx.executor().run_until_parked();
3681 buffer.update(cx, |buffer, cx| {
3682 assert_eq!(
3683 buffer.file().unwrap().full_path(cx),
3684 Path::new("dir/file1.rs")
3685 );
3686 assert!(!buffer.is_dirty());
3687 assert!(!buffer.has_conflict());
3688 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
3689 });
3690
3691 let opened_buffer = project
3692 .update(cx, |project, cx| {
3693 project.open_local_buffer("/dir/file1.rs", cx)
3694 })
3695 .await
3696 .unwrap();
3697 assert_eq!(opened_buffer, buffer);
3698}
3699
3700#[gpui::test(retries = 5)]
3701async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
3702 use worktree::WorktreeModelHandle as _;
3703
3704 init_test(cx);
3705 cx.executor().allow_parking();
3706
3707 let dir = TempTree::new(json!({
3708 "a": {
3709 "file1": "",
3710 "file2": "",
3711 "file3": "",
3712 },
3713 "b": {
3714 "c": {
3715 "file4": "",
3716 "file5": "",
3717 }
3718 }
3719 }));
3720
3721 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
3722
3723 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3724 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
3725 async move { buffer.await.unwrap() }
3726 };
3727 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3728 project.update(cx, |project, cx| {
3729 let tree = project.worktrees(cx).next().unwrap();
3730 tree.read(cx)
3731 .entry_for_path(path)
3732 .unwrap_or_else(|| panic!("no entry for path {}", path))
3733 .id
3734 })
3735 };
3736
3737 let buffer2 = buffer_for_path("a/file2", cx).await;
3738 let buffer3 = buffer_for_path("a/file3", cx).await;
3739 let buffer4 = buffer_for_path("b/c/file4", cx).await;
3740 let buffer5 = buffer_for_path("b/c/file5", cx).await;
3741
3742 let file2_id = id_for_path("a/file2", cx);
3743 let file3_id = id_for_path("a/file3", cx);
3744 let file4_id = id_for_path("b/c/file4", cx);
3745
3746 // Create a remote copy of this worktree.
3747 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3748 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
3749
3750 let updates = Arc::new(Mutex::new(Vec::new()));
3751 tree.update(cx, |tree, cx| {
3752 let updates = updates.clone();
3753 tree.observe_updates(0, cx, move |update| {
3754 updates.lock().push(update);
3755 async { true }
3756 });
3757 });
3758
3759 let remote =
3760 cx.update(|cx| Worktree::remote(0, 1, metadata, project.read(cx).client().into(), cx));
3761
3762 cx.executor().run_until_parked();
3763
3764 cx.update(|cx| {
3765 assert!(!buffer2.read(cx).is_dirty());
3766 assert!(!buffer3.read(cx).is_dirty());
3767 assert!(!buffer4.read(cx).is_dirty());
3768 assert!(!buffer5.read(cx).is_dirty());
3769 });
3770
3771 // Rename and delete files and directories.
3772 tree.flush_fs_events(cx).await;
3773 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3774 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3775 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3776 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3777 tree.flush_fs_events(cx).await;
3778
3779 cx.update(|app| {
3780 assert_eq!(
3781 tree.read(app)
3782 .paths()
3783 .map(|p| p.to_str().unwrap())
3784 .collect::<Vec<_>>(),
3785 vec![
3786 "a",
3787 separator!("a/file1"),
3788 separator!("a/file2.new"),
3789 "b",
3790 "d",
3791 separator!("d/file3"),
3792 separator!("d/file4"),
3793 ]
3794 );
3795 });
3796
3797 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
3798 assert_eq!(id_for_path("d/file3", cx), file3_id);
3799 assert_eq!(id_for_path("d/file4", cx), file4_id);
3800
3801 cx.update(|cx| {
3802 assert_eq!(
3803 buffer2.read(cx).file().unwrap().path().as_ref(),
3804 Path::new("a/file2.new")
3805 );
3806 assert_eq!(
3807 buffer3.read(cx).file().unwrap().path().as_ref(),
3808 Path::new("d/file3")
3809 );
3810 assert_eq!(
3811 buffer4.read(cx).file().unwrap().path().as_ref(),
3812 Path::new("d/file4")
3813 );
3814 assert_eq!(
3815 buffer5.read(cx).file().unwrap().path().as_ref(),
3816 Path::new("b/c/file5")
3817 );
3818
3819 assert_matches!(
3820 buffer2.read(cx).file().unwrap().disk_state(),
3821 DiskState::Present { .. }
3822 );
3823 assert_matches!(
3824 buffer3.read(cx).file().unwrap().disk_state(),
3825 DiskState::Present { .. }
3826 );
3827 assert_matches!(
3828 buffer4.read(cx).file().unwrap().disk_state(),
3829 DiskState::Present { .. }
3830 );
3831 assert_eq!(
3832 buffer5.read(cx).file().unwrap().disk_state(),
3833 DiskState::Deleted
3834 );
3835 });
3836
3837 // Update the remote worktree. Check that it becomes consistent with the
3838 // local worktree.
3839 cx.executor().run_until_parked();
3840
3841 remote.update(cx, |remote, _| {
3842 for update in updates.lock().drain(..) {
3843 remote.as_remote_mut().unwrap().update_from_remote(update);
3844 }
3845 });
3846 cx.executor().run_until_parked();
3847 remote.update(cx, |remote, _| {
3848 assert_eq!(
3849 remote
3850 .paths()
3851 .map(|p| p.to_str().unwrap())
3852 .collect::<Vec<_>>(),
3853 vec![
3854 "a",
3855 separator!("a/file1"),
3856 separator!("a/file2.new"),
3857 "b",
3858 "d",
3859 separator!("d/file3"),
3860 separator!("d/file4"),
3861 ]
3862 );
3863 });
3864}
3865
3866#[gpui::test(iterations = 10)]
3867async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
3868 init_test(cx);
3869
3870 let fs = FakeFs::new(cx.executor());
3871 fs.insert_tree(
3872 path!("/dir"),
3873 json!({
3874 "a": {
3875 "file1": "",
3876 }
3877 }),
3878 )
3879 .await;
3880
3881 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3882 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3883 let tree_id = tree.update(cx, |tree, _| tree.id());
3884
3885 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3886 project.update(cx, |project, cx| {
3887 let tree = project.worktrees(cx).next().unwrap();
3888 tree.read(cx)
3889 .entry_for_path(path)
3890 .unwrap_or_else(|| panic!("no entry for path {}", path))
3891 .id
3892 })
3893 };
3894
3895 let dir_id = id_for_path("a", cx);
3896 let file_id = id_for_path("a/file1", cx);
3897 let buffer = project
3898 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
3899 .await
3900 .unwrap();
3901 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3902
3903 project
3904 .update(cx, |project, cx| {
3905 project.rename_entry(dir_id, Path::new("b"), cx)
3906 })
3907 .unwrap()
3908 .await
3909 .to_included()
3910 .unwrap();
3911 cx.executor().run_until_parked();
3912
3913 assert_eq!(id_for_path("b", cx), dir_id);
3914 assert_eq!(id_for_path("b/file1", cx), file_id);
3915 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3916}
3917
3918#[gpui::test]
3919async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
3920 init_test(cx);
3921
3922 let fs = FakeFs::new(cx.executor());
3923 fs.insert_tree(
3924 "/dir",
3925 json!({
3926 "a.txt": "a-contents",
3927 "b.txt": "b-contents",
3928 }),
3929 )
3930 .await;
3931
3932 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3933
3934 // Spawn multiple tasks to open paths, repeating some paths.
3935 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
3936 (
3937 p.open_local_buffer("/dir/a.txt", cx),
3938 p.open_local_buffer("/dir/b.txt", cx),
3939 p.open_local_buffer("/dir/a.txt", cx),
3940 )
3941 });
3942
3943 let buffer_a_1 = buffer_a_1.await.unwrap();
3944 let buffer_a_2 = buffer_a_2.await.unwrap();
3945 let buffer_b = buffer_b.await.unwrap();
3946 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
3947 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
3948
3949 // There is only one buffer per path.
3950 let buffer_a_id = buffer_a_1.entity_id();
3951 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
3952
3953 // Open the same path again while it is still open.
3954 drop(buffer_a_1);
3955 let buffer_a_3 = project
3956 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
3957 .await
3958 .unwrap();
3959
3960 // There's still only one buffer per path.
3961 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
3962}
3963
3964#[gpui::test]
3965async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
3966 init_test(cx);
3967
3968 let fs = FakeFs::new(cx.executor());
3969 fs.insert_tree(
3970 path!("/dir"),
3971 json!({
3972 "file1": "abc",
3973 "file2": "def",
3974 "file3": "ghi",
3975 }),
3976 )
3977 .await;
3978
3979 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3980
3981 let buffer1 = project
3982 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3983 .await
3984 .unwrap();
3985 let events = Arc::new(Mutex::new(Vec::new()));
3986
3987 // initially, the buffer isn't dirty.
3988 buffer1.update(cx, |buffer, cx| {
3989 cx.subscribe(&buffer1, {
3990 let events = events.clone();
3991 move |_, _, event, _| match event {
3992 BufferEvent::Operation { .. } => {}
3993 _ => events.lock().push(event.clone()),
3994 }
3995 })
3996 .detach();
3997
3998 assert!(!buffer.is_dirty());
3999 assert!(events.lock().is_empty());
4000
4001 buffer.edit([(1..2, "")], None, cx);
4002 });
4003
4004 // after the first edit, the buffer is dirty, and emits a dirtied event.
4005 buffer1.update(cx, |buffer, cx| {
4006 assert!(buffer.text() == "ac");
4007 assert!(buffer.is_dirty());
4008 assert_eq!(
4009 *events.lock(),
4010 &[
4011 language::BufferEvent::Edited,
4012 language::BufferEvent::DirtyChanged
4013 ]
4014 );
4015 events.lock().clear();
4016 buffer.did_save(
4017 buffer.version(),
4018 buffer.file().unwrap().disk_state().mtime(),
4019 cx,
4020 );
4021 });
4022
4023 // after saving, the buffer is not dirty, and emits a saved event.
4024 buffer1.update(cx, |buffer, cx| {
4025 assert!(!buffer.is_dirty());
4026 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
4027 events.lock().clear();
4028
4029 buffer.edit([(1..1, "B")], None, cx);
4030 buffer.edit([(2..2, "D")], None, cx);
4031 });
4032
4033 // after editing again, the buffer is dirty, and emits another dirty event.
4034 buffer1.update(cx, |buffer, cx| {
4035 assert!(buffer.text() == "aBDc");
4036 assert!(buffer.is_dirty());
4037 assert_eq!(
4038 *events.lock(),
4039 &[
4040 language::BufferEvent::Edited,
4041 language::BufferEvent::DirtyChanged,
4042 language::BufferEvent::Edited,
4043 ],
4044 );
4045 events.lock().clear();
4046
4047 // After restoring the buffer to its previously-saved state,
4048 // the buffer is not considered dirty anymore.
4049 buffer.edit([(1..3, "")], None, cx);
4050 assert!(buffer.text() == "ac");
4051 assert!(!buffer.is_dirty());
4052 });
4053
4054 assert_eq!(
4055 *events.lock(),
4056 &[
4057 language::BufferEvent::Edited,
4058 language::BufferEvent::DirtyChanged
4059 ]
4060 );
4061
4062 // When a file is deleted, it is not considered dirty.
4063 let events = Arc::new(Mutex::new(Vec::new()));
4064 let buffer2 = project
4065 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4066 .await
4067 .unwrap();
4068 buffer2.update(cx, |_, cx| {
4069 cx.subscribe(&buffer2, {
4070 let events = events.clone();
4071 move |_, _, event, _| match event {
4072 BufferEvent::Operation { .. } => {}
4073 _ => events.lock().push(event.clone()),
4074 }
4075 })
4076 .detach();
4077 });
4078
4079 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
4080 .await
4081 .unwrap();
4082 cx.executor().run_until_parked();
4083 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4084 assert_eq!(
4085 mem::take(&mut *events.lock()),
4086 &[language::BufferEvent::FileHandleChanged]
4087 );
4088
4089 // Buffer becomes dirty when edited.
4090 buffer2.update(cx, |buffer, cx| {
4091 buffer.edit([(2..3, "")], None, cx);
4092 assert_eq!(buffer.is_dirty(), true);
4093 });
4094 assert_eq!(
4095 mem::take(&mut *events.lock()),
4096 &[
4097 language::BufferEvent::Edited,
4098 language::BufferEvent::DirtyChanged
4099 ]
4100 );
4101
4102 // Buffer becomes clean again when all of its content is removed, because
4103 // the file was deleted.
4104 buffer2.update(cx, |buffer, cx| {
4105 buffer.edit([(0..2, "")], None, cx);
4106 assert_eq!(buffer.is_empty(), true);
4107 assert_eq!(buffer.is_dirty(), false);
4108 });
4109 assert_eq!(
4110 *events.lock(),
4111 &[
4112 language::BufferEvent::Edited,
4113 language::BufferEvent::DirtyChanged
4114 ]
4115 );
4116
4117 // When a file is already dirty when deleted, we don't emit a Dirtied event.
4118 let events = Arc::new(Mutex::new(Vec::new()));
4119 let buffer3 = project
4120 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
4121 .await
4122 .unwrap();
4123 buffer3.update(cx, |_, cx| {
4124 cx.subscribe(&buffer3, {
4125 let events = events.clone();
4126 move |_, _, event, _| match event {
4127 BufferEvent::Operation { .. } => {}
4128 _ => events.lock().push(event.clone()),
4129 }
4130 })
4131 .detach();
4132 });
4133
4134 buffer3.update(cx, |buffer, cx| {
4135 buffer.edit([(0..0, "x")], None, cx);
4136 });
4137 events.lock().clear();
4138 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
4139 .await
4140 .unwrap();
4141 cx.executor().run_until_parked();
4142 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
4143 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
4144}
4145
4146#[gpui::test]
4147async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
4148 init_test(cx);
4149
4150 let (initial_contents, initial_offsets) =
4151 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
4152 let fs = FakeFs::new(cx.executor());
4153 fs.insert_tree(
4154 path!("/dir"),
4155 json!({
4156 "the-file": initial_contents,
4157 }),
4158 )
4159 .await;
4160 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4161 let buffer = project
4162 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
4163 .await
4164 .unwrap();
4165
4166 let anchors = initial_offsets
4167 .iter()
4168 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
4169 .collect::<Vec<_>>();
4170
4171 // Change the file on disk, adding two new lines of text, and removing
4172 // one line.
4173 buffer.update(cx, |buffer, _| {
4174 assert!(!buffer.is_dirty());
4175 assert!(!buffer.has_conflict());
4176 });
4177
4178 let (new_contents, new_offsets) =
4179 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
4180 fs.save(
4181 path!("/dir/the-file").as_ref(),
4182 &new_contents.as_str().into(),
4183 LineEnding::Unix,
4184 )
4185 .await
4186 .unwrap();
4187
4188 // Because the buffer was not modified, it is reloaded from disk. Its
4189 // contents are edited according to the diff between the old and new
4190 // file contents.
4191 cx.executor().run_until_parked();
4192 buffer.update(cx, |buffer, _| {
4193 assert_eq!(buffer.text(), new_contents);
4194 assert!(!buffer.is_dirty());
4195 assert!(!buffer.has_conflict());
4196
4197 let anchor_offsets = anchors
4198 .iter()
4199 .map(|anchor| anchor.to_offset(&*buffer))
4200 .collect::<Vec<_>>();
4201 assert_eq!(anchor_offsets, new_offsets);
4202 });
4203
4204 // Modify the buffer
4205 buffer.update(cx, |buffer, cx| {
4206 buffer.edit([(0..0, " ")], None, cx);
4207 assert!(buffer.is_dirty());
4208 assert!(!buffer.has_conflict());
4209 });
4210
4211 // Change the file on disk again, adding blank lines to the beginning.
4212 fs.save(
4213 path!("/dir/the-file").as_ref(),
4214 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
4215 LineEnding::Unix,
4216 )
4217 .await
4218 .unwrap();
4219
4220 // Because the buffer is modified, it doesn't reload from disk, but is
4221 // marked as having a conflict.
4222 cx.executor().run_until_parked();
4223 buffer.update(cx, |buffer, _| {
4224 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
4225 assert!(buffer.has_conflict());
4226 });
4227}
4228
4229#[gpui::test]
4230async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
4231 init_test(cx);
4232
4233 let fs = FakeFs::new(cx.executor());
4234 fs.insert_tree(
4235 path!("/dir"),
4236 json!({
4237 "file1": "a\nb\nc\n",
4238 "file2": "one\r\ntwo\r\nthree\r\n",
4239 }),
4240 )
4241 .await;
4242
4243 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4244 let buffer1 = project
4245 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4246 .await
4247 .unwrap();
4248 let buffer2 = project
4249 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4250 .await
4251 .unwrap();
4252
4253 buffer1.update(cx, |buffer, _| {
4254 assert_eq!(buffer.text(), "a\nb\nc\n");
4255 assert_eq!(buffer.line_ending(), LineEnding::Unix);
4256 });
4257 buffer2.update(cx, |buffer, _| {
4258 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
4259 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4260 });
4261
4262 // Change a file's line endings on disk from unix to windows. The buffer's
4263 // state updates correctly.
4264 fs.save(
4265 path!("/dir/file1").as_ref(),
4266 &"aaa\nb\nc\n".into(),
4267 LineEnding::Windows,
4268 )
4269 .await
4270 .unwrap();
4271 cx.executor().run_until_parked();
4272 buffer1.update(cx, |buffer, _| {
4273 assert_eq!(buffer.text(), "aaa\nb\nc\n");
4274 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4275 });
4276
4277 // Save a file with windows line endings. The file is written correctly.
4278 buffer2.update(cx, |buffer, cx| {
4279 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
4280 });
4281 project
4282 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
4283 .await
4284 .unwrap();
4285 assert_eq!(
4286 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
4287 "one\r\ntwo\r\nthree\r\nfour\r\n",
4288 );
4289}
4290
4291#[gpui::test]
4292async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
4293 init_test(cx);
4294
4295 let fs = FakeFs::new(cx.executor());
4296 fs.insert_tree(
4297 path!("/dir"),
4298 json!({
4299 "a.rs": "
4300 fn foo(mut v: Vec<usize>) {
4301 for x in &v {
4302 v.push(1);
4303 }
4304 }
4305 "
4306 .unindent(),
4307 }),
4308 )
4309 .await;
4310
4311 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4312 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4313 let buffer = project
4314 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
4315 .await
4316 .unwrap();
4317
4318 let buffer_uri = Url::from_file_path(path!("/dir/a.rs")).unwrap();
4319 let message = lsp::PublishDiagnosticsParams {
4320 uri: buffer_uri.clone(),
4321 diagnostics: vec![
4322 lsp::Diagnostic {
4323 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4324 severity: Some(DiagnosticSeverity::WARNING),
4325 message: "error 1".to_string(),
4326 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4327 location: lsp::Location {
4328 uri: buffer_uri.clone(),
4329 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4330 },
4331 message: "error 1 hint 1".to_string(),
4332 }]),
4333 ..Default::default()
4334 },
4335 lsp::Diagnostic {
4336 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4337 severity: Some(DiagnosticSeverity::HINT),
4338 message: "error 1 hint 1".to_string(),
4339 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4340 location: lsp::Location {
4341 uri: buffer_uri.clone(),
4342 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4343 },
4344 message: "original diagnostic".to_string(),
4345 }]),
4346 ..Default::default()
4347 },
4348 lsp::Diagnostic {
4349 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4350 severity: Some(DiagnosticSeverity::ERROR),
4351 message: "error 2".to_string(),
4352 related_information: Some(vec![
4353 lsp::DiagnosticRelatedInformation {
4354 location: lsp::Location {
4355 uri: buffer_uri.clone(),
4356 range: lsp::Range::new(
4357 lsp::Position::new(1, 13),
4358 lsp::Position::new(1, 15),
4359 ),
4360 },
4361 message: "error 2 hint 1".to_string(),
4362 },
4363 lsp::DiagnosticRelatedInformation {
4364 location: lsp::Location {
4365 uri: buffer_uri.clone(),
4366 range: lsp::Range::new(
4367 lsp::Position::new(1, 13),
4368 lsp::Position::new(1, 15),
4369 ),
4370 },
4371 message: "error 2 hint 2".to_string(),
4372 },
4373 ]),
4374 ..Default::default()
4375 },
4376 lsp::Diagnostic {
4377 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4378 severity: Some(DiagnosticSeverity::HINT),
4379 message: "error 2 hint 1".to_string(),
4380 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4381 location: lsp::Location {
4382 uri: buffer_uri.clone(),
4383 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4384 },
4385 message: "original diagnostic".to_string(),
4386 }]),
4387 ..Default::default()
4388 },
4389 lsp::Diagnostic {
4390 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4391 severity: Some(DiagnosticSeverity::HINT),
4392 message: "error 2 hint 2".to_string(),
4393 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4394 location: lsp::Location {
4395 uri: buffer_uri,
4396 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4397 },
4398 message: "original diagnostic".to_string(),
4399 }]),
4400 ..Default::default()
4401 },
4402 ],
4403 version: None,
4404 };
4405
4406 lsp_store
4407 .update(cx, |lsp_store, cx| {
4408 lsp_store.update_diagnostics(LanguageServerId(0), message, &[], cx)
4409 })
4410 .unwrap();
4411 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
4412
4413 assert_eq!(
4414 buffer
4415 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
4416 .collect::<Vec<_>>(),
4417 &[
4418 DiagnosticEntry {
4419 range: Point::new(1, 8)..Point::new(1, 9),
4420 diagnostic: Diagnostic {
4421 severity: DiagnosticSeverity::WARNING,
4422 message: "error 1".to_string(),
4423 group_id: 1,
4424 is_primary: true,
4425 ..Default::default()
4426 }
4427 },
4428 DiagnosticEntry {
4429 range: Point::new(1, 8)..Point::new(1, 9),
4430 diagnostic: Diagnostic {
4431 severity: DiagnosticSeverity::HINT,
4432 message: "error 1 hint 1".to_string(),
4433 group_id: 1,
4434 is_primary: false,
4435 ..Default::default()
4436 }
4437 },
4438 DiagnosticEntry {
4439 range: Point::new(1, 13)..Point::new(1, 15),
4440 diagnostic: Diagnostic {
4441 severity: DiagnosticSeverity::HINT,
4442 message: "error 2 hint 1".to_string(),
4443 group_id: 0,
4444 is_primary: false,
4445 ..Default::default()
4446 }
4447 },
4448 DiagnosticEntry {
4449 range: Point::new(1, 13)..Point::new(1, 15),
4450 diagnostic: Diagnostic {
4451 severity: DiagnosticSeverity::HINT,
4452 message: "error 2 hint 2".to_string(),
4453 group_id: 0,
4454 is_primary: false,
4455 ..Default::default()
4456 }
4457 },
4458 DiagnosticEntry {
4459 range: Point::new(2, 8)..Point::new(2, 17),
4460 diagnostic: Diagnostic {
4461 severity: DiagnosticSeverity::ERROR,
4462 message: "error 2".to_string(),
4463 group_id: 0,
4464 is_primary: true,
4465 ..Default::default()
4466 }
4467 }
4468 ]
4469 );
4470
4471 assert_eq!(
4472 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
4473 &[
4474 DiagnosticEntry {
4475 range: Point::new(1, 13)..Point::new(1, 15),
4476 diagnostic: Diagnostic {
4477 severity: DiagnosticSeverity::HINT,
4478 message: "error 2 hint 1".to_string(),
4479 group_id: 0,
4480 is_primary: false,
4481 ..Default::default()
4482 }
4483 },
4484 DiagnosticEntry {
4485 range: Point::new(1, 13)..Point::new(1, 15),
4486 diagnostic: Diagnostic {
4487 severity: DiagnosticSeverity::HINT,
4488 message: "error 2 hint 2".to_string(),
4489 group_id: 0,
4490 is_primary: false,
4491 ..Default::default()
4492 }
4493 },
4494 DiagnosticEntry {
4495 range: Point::new(2, 8)..Point::new(2, 17),
4496 diagnostic: Diagnostic {
4497 severity: DiagnosticSeverity::ERROR,
4498 message: "error 2".to_string(),
4499 group_id: 0,
4500 is_primary: true,
4501 ..Default::default()
4502 }
4503 }
4504 ]
4505 );
4506
4507 assert_eq!(
4508 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4509 &[
4510 DiagnosticEntry {
4511 range: Point::new(1, 8)..Point::new(1, 9),
4512 diagnostic: Diagnostic {
4513 severity: DiagnosticSeverity::WARNING,
4514 message: "error 1".to_string(),
4515 group_id: 1,
4516 is_primary: true,
4517 ..Default::default()
4518 }
4519 },
4520 DiagnosticEntry {
4521 range: Point::new(1, 8)..Point::new(1, 9),
4522 diagnostic: Diagnostic {
4523 severity: DiagnosticSeverity::HINT,
4524 message: "error 1 hint 1".to_string(),
4525 group_id: 1,
4526 is_primary: false,
4527 ..Default::default()
4528 }
4529 },
4530 ]
4531 );
4532}
4533
4534#[gpui::test]
4535async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
4536 init_test(cx);
4537
4538 let fs = FakeFs::new(cx.executor());
4539 fs.insert_tree(
4540 path!("/dir"),
4541 json!({
4542 "one.rs": "const ONE: usize = 1;",
4543 "two": {
4544 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4545 }
4546
4547 }),
4548 )
4549 .await;
4550 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4551
4552 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4553 language_registry.add(rust_lang());
4554 let watched_paths = lsp::FileOperationRegistrationOptions {
4555 filters: vec![
4556 FileOperationFilter {
4557 scheme: Some("file".to_owned()),
4558 pattern: lsp::FileOperationPattern {
4559 glob: "**/*.rs".to_owned(),
4560 matches: Some(lsp::FileOperationPatternKind::File),
4561 options: None,
4562 },
4563 },
4564 FileOperationFilter {
4565 scheme: Some("file".to_owned()),
4566 pattern: lsp::FileOperationPattern {
4567 glob: "**/**".to_owned(),
4568 matches: Some(lsp::FileOperationPatternKind::Folder),
4569 options: None,
4570 },
4571 },
4572 ],
4573 };
4574 let mut fake_servers = language_registry.register_fake_lsp(
4575 "Rust",
4576 FakeLspAdapter {
4577 capabilities: lsp::ServerCapabilities {
4578 workspace: Some(lsp::WorkspaceServerCapabilities {
4579 workspace_folders: None,
4580 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
4581 did_rename: Some(watched_paths.clone()),
4582 will_rename: Some(watched_paths),
4583 ..Default::default()
4584 }),
4585 }),
4586 ..Default::default()
4587 },
4588 ..Default::default()
4589 },
4590 );
4591
4592 let _ = project
4593 .update(cx, |project, cx| {
4594 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
4595 })
4596 .await
4597 .unwrap();
4598
4599 let fake_server = fake_servers.next().await.unwrap();
4600 let response = project.update(cx, |project, cx| {
4601 let worktree = project.worktrees(cx).next().unwrap();
4602 let entry = worktree.read(cx).entry_for_path("one.rs").unwrap();
4603 project.rename_entry(entry.id, "three.rs".as_ref(), cx)
4604 });
4605 let expected_edit = lsp::WorkspaceEdit {
4606 changes: None,
4607 document_changes: Some(DocumentChanges::Edits({
4608 vec![TextDocumentEdit {
4609 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
4610 range: lsp::Range {
4611 start: lsp::Position {
4612 line: 0,
4613 character: 1,
4614 },
4615 end: lsp::Position {
4616 line: 0,
4617 character: 3,
4618 },
4619 },
4620 new_text: "This is not a drill".to_owned(),
4621 })],
4622 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
4623 uri: Url::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
4624 version: Some(1337),
4625 },
4626 }]
4627 })),
4628 change_annotations: None,
4629 };
4630 let resolved_workspace_edit = Arc::new(OnceLock::new());
4631 fake_server
4632 .set_request_handler::<WillRenameFiles, _, _>({
4633 let resolved_workspace_edit = resolved_workspace_edit.clone();
4634 let expected_edit = expected_edit.clone();
4635 move |params, _| {
4636 let resolved_workspace_edit = resolved_workspace_edit.clone();
4637 let expected_edit = expected_edit.clone();
4638 async move {
4639 assert_eq!(params.files.len(), 1);
4640 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
4641 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
4642 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
4643 Ok(Some(expected_edit))
4644 }
4645 }
4646 })
4647 .next()
4648 .await
4649 .unwrap();
4650 let _ = response.await.unwrap();
4651 fake_server
4652 .handle_notification::<DidRenameFiles, _>(|params, _| {
4653 assert_eq!(params.files.len(), 1);
4654 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
4655 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
4656 })
4657 .next()
4658 .await
4659 .unwrap();
4660 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
4661}
4662
4663#[gpui::test]
4664async fn test_rename(cx: &mut gpui::TestAppContext) {
4665 // hi
4666 init_test(cx);
4667
4668 let fs = FakeFs::new(cx.executor());
4669 fs.insert_tree(
4670 path!("/dir"),
4671 json!({
4672 "one.rs": "const ONE: usize = 1;",
4673 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4674 }),
4675 )
4676 .await;
4677
4678 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4679
4680 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4681 language_registry.add(rust_lang());
4682 let mut fake_servers = language_registry.register_fake_lsp(
4683 "Rust",
4684 FakeLspAdapter {
4685 capabilities: lsp::ServerCapabilities {
4686 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
4687 prepare_provider: Some(true),
4688 work_done_progress_options: Default::default(),
4689 })),
4690 ..Default::default()
4691 },
4692 ..Default::default()
4693 },
4694 );
4695
4696 let (buffer, _handle) = project
4697 .update(cx, |project, cx| {
4698 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
4699 })
4700 .await
4701 .unwrap();
4702
4703 let fake_server = fake_servers.next().await.unwrap();
4704
4705 let response = project.update(cx, |project, cx| {
4706 project.prepare_rename(buffer.clone(), 7, cx)
4707 });
4708 fake_server
4709 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
4710 assert_eq!(
4711 params.text_document.uri.as_str(),
4712 uri!("file:///dir/one.rs")
4713 );
4714 assert_eq!(params.position, lsp::Position::new(0, 7));
4715 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
4716 lsp::Position::new(0, 6),
4717 lsp::Position::new(0, 9),
4718 ))))
4719 })
4720 .next()
4721 .await
4722 .unwrap();
4723 let response = response.await.unwrap();
4724 let PrepareRenameResponse::Success(range) = response else {
4725 panic!("{:?}", response);
4726 };
4727 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
4728 assert_eq!(range, 6..9);
4729
4730 let response = project.update(cx, |project, cx| {
4731 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
4732 });
4733 fake_server
4734 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
4735 assert_eq!(
4736 params.text_document_position.text_document.uri.as_str(),
4737 uri!("file:///dir/one.rs")
4738 );
4739 assert_eq!(
4740 params.text_document_position.position,
4741 lsp::Position::new(0, 7)
4742 );
4743 assert_eq!(params.new_name, "THREE");
4744 Ok(Some(lsp::WorkspaceEdit {
4745 changes: Some(
4746 [
4747 (
4748 lsp::Url::from_file_path(path!("/dir/one.rs")).unwrap(),
4749 vec![lsp::TextEdit::new(
4750 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
4751 "THREE".to_string(),
4752 )],
4753 ),
4754 (
4755 lsp::Url::from_file_path(path!("/dir/two.rs")).unwrap(),
4756 vec![
4757 lsp::TextEdit::new(
4758 lsp::Range::new(
4759 lsp::Position::new(0, 24),
4760 lsp::Position::new(0, 27),
4761 ),
4762 "THREE".to_string(),
4763 ),
4764 lsp::TextEdit::new(
4765 lsp::Range::new(
4766 lsp::Position::new(0, 35),
4767 lsp::Position::new(0, 38),
4768 ),
4769 "THREE".to_string(),
4770 ),
4771 ],
4772 ),
4773 ]
4774 .into_iter()
4775 .collect(),
4776 ),
4777 ..Default::default()
4778 }))
4779 })
4780 .next()
4781 .await
4782 .unwrap();
4783 let mut transaction = response.await.unwrap().0;
4784 assert_eq!(transaction.len(), 2);
4785 assert_eq!(
4786 transaction
4787 .remove_entry(&buffer)
4788 .unwrap()
4789 .0
4790 .update(cx, |buffer, _| buffer.text()),
4791 "const THREE: usize = 1;"
4792 );
4793 assert_eq!(
4794 transaction
4795 .into_keys()
4796 .next()
4797 .unwrap()
4798 .update(cx, |buffer, _| buffer.text()),
4799 "const TWO: usize = one::THREE + one::THREE;"
4800 );
4801}
4802
4803#[gpui::test]
4804async fn test_search(cx: &mut gpui::TestAppContext) {
4805 init_test(cx);
4806
4807 let fs = FakeFs::new(cx.executor());
4808 fs.insert_tree(
4809 path!("/dir"),
4810 json!({
4811 "one.rs": "const ONE: usize = 1;",
4812 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
4813 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
4814 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
4815 }),
4816 )
4817 .await;
4818 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4819 assert_eq!(
4820 search(
4821 &project,
4822 SearchQuery::text(
4823 "TWO",
4824 false,
4825 true,
4826 false,
4827 Default::default(),
4828 Default::default(),
4829 false,
4830 None
4831 )
4832 .unwrap(),
4833 cx
4834 )
4835 .await
4836 .unwrap(),
4837 HashMap::from_iter([
4838 (separator!("dir/two.rs").to_string(), vec![6..9]),
4839 (separator!("dir/three.rs").to_string(), vec![37..40])
4840 ])
4841 );
4842
4843 let buffer_4 = project
4844 .update(cx, |project, cx| {
4845 project.open_local_buffer(path!("/dir/four.rs"), cx)
4846 })
4847 .await
4848 .unwrap();
4849 buffer_4.update(cx, |buffer, cx| {
4850 let text = "two::TWO";
4851 buffer.edit([(20..28, text), (31..43, text)], None, cx);
4852 });
4853
4854 assert_eq!(
4855 search(
4856 &project,
4857 SearchQuery::text(
4858 "TWO",
4859 false,
4860 true,
4861 false,
4862 Default::default(),
4863 Default::default(),
4864 false,
4865 None,
4866 )
4867 .unwrap(),
4868 cx
4869 )
4870 .await
4871 .unwrap(),
4872 HashMap::from_iter([
4873 (separator!("dir/two.rs").to_string(), vec![6..9]),
4874 (separator!("dir/three.rs").to_string(), vec![37..40]),
4875 (separator!("dir/four.rs").to_string(), vec![25..28, 36..39])
4876 ])
4877 );
4878}
4879
4880#[gpui::test]
4881async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
4882 init_test(cx);
4883
4884 let search_query = "file";
4885
4886 let fs = FakeFs::new(cx.executor());
4887 fs.insert_tree(
4888 path!("/dir"),
4889 json!({
4890 "one.rs": r#"// Rust file one"#,
4891 "one.ts": r#"// TypeScript file one"#,
4892 "two.rs": r#"// Rust file two"#,
4893 "two.ts": r#"// TypeScript file two"#,
4894 }),
4895 )
4896 .await;
4897 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4898
4899 assert!(
4900 search(
4901 &project,
4902 SearchQuery::text(
4903 search_query,
4904 false,
4905 true,
4906 false,
4907 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4908 Default::default(),
4909 false,
4910 None
4911 )
4912 .unwrap(),
4913 cx
4914 )
4915 .await
4916 .unwrap()
4917 .is_empty(),
4918 "If no inclusions match, no files should be returned"
4919 );
4920
4921 assert_eq!(
4922 search(
4923 &project,
4924 SearchQuery::text(
4925 search_query,
4926 false,
4927 true,
4928 false,
4929 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4930 Default::default(),
4931 false,
4932 None
4933 )
4934 .unwrap(),
4935 cx
4936 )
4937 .await
4938 .unwrap(),
4939 HashMap::from_iter([
4940 (separator!("dir/one.rs").to_string(), vec![8..12]),
4941 (separator!("dir/two.rs").to_string(), vec![8..12]),
4942 ]),
4943 "Rust only search should give only Rust files"
4944 );
4945
4946 assert_eq!(
4947 search(
4948 &project,
4949 SearchQuery::text(
4950 search_query,
4951 false,
4952 true,
4953 false,
4954 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4955 Default::default(),
4956 false,
4957 None,
4958 )
4959 .unwrap(),
4960 cx
4961 )
4962 .await
4963 .unwrap(),
4964 HashMap::from_iter([
4965 (separator!("dir/one.ts").to_string(), vec![14..18]),
4966 (separator!("dir/two.ts").to_string(), vec![14..18]),
4967 ]),
4968 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
4969 );
4970
4971 assert_eq!(
4972 search(
4973 &project,
4974 SearchQuery::text(
4975 search_query,
4976 false,
4977 true,
4978 false,
4979 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()])
4980 .unwrap(),
4981 Default::default(),
4982 false,
4983 None,
4984 )
4985 .unwrap(),
4986 cx
4987 )
4988 .await
4989 .unwrap(),
4990 HashMap::from_iter([
4991 (separator!("dir/two.ts").to_string(), vec![14..18]),
4992 (separator!("dir/one.rs").to_string(), vec![8..12]),
4993 (separator!("dir/one.ts").to_string(), vec![14..18]),
4994 (separator!("dir/two.rs").to_string(), vec![8..12]),
4995 ]),
4996 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
4997 );
4998}
4999
5000#[gpui::test]
5001async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
5002 init_test(cx);
5003
5004 let search_query = "file";
5005
5006 let fs = FakeFs::new(cx.executor());
5007 fs.insert_tree(
5008 path!("/dir"),
5009 json!({
5010 "one.rs": r#"// Rust file one"#,
5011 "one.ts": r#"// TypeScript file one"#,
5012 "two.rs": r#"// Rust file two"#,
5013 "two.ts": r#"// TypeScript file two"#,
5014 }),
5015 )
5016 .await;
5017 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5018
5019 assert_eq!(
5020 search(
5021 &project,
5022 SearchQuery::text(
5023 search_query,
5024 false,
5025 true,
5026 false,
5027 Default::default(),
5028 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5029 false,
5030 None,
5031 )
5032 .unwrap(),
5033 cx
5034 )
5035 .await
5036 .unwrap(),
5037 HashMap::from_iter([
5038 (separator!("dir/one.rs").to_string(), vec![8..12]),
5039 (separator!("dir/one.ts").to_string(), vec![14..18]),
5040 (separator!("dir/two.rs").to_string(), vec![8..12]),
5041 (separator!("dir/two.ts").to_string(), vec![14..18]),
5042 ]),
5043 "If no exclusions match, all files should be returned"
5044 );
5045
5046 assert_eq!(
5047 search(
5048 &project,
5049 SearchQuery::text(
5050 search_query,
5051 false,
5052 true,
5053 false,
5054 Default::default(),
5055 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
5056 false,
5057 None,
5058 )
5059 .unwrap(),
5060 cx
5061 )
5062 .await
5063 .unwrap(),
5064 HashMap::from_iter([
5065 (separator!("dir/one.ts").to_string(), vec![14..18]),
5066 (separator!("dir/two.ts").to_string(), vec![14..18]),
5067 ]),
5068 "Rust exclusion search should give only TypeScript files"
5069 );
5070
5071 assert_eq!(
5072 search(
5073 &project,
5074 SearchQuery::text(
5075 search_query,
5076 false,
5077 true,
5078 false,
5079 Default::default(),
5080 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5081 false,
5082 None,
5083 )
5084 .unwrap(),
5085 cx
5086 )
5087 .await
5088 .unwrap(),
5089 HashMap::from_iter([
5090 (separator!("dir/one.rs").to_string(), vec![8..12]),
5091 (separator!("dir/two.rs").to_string(), vec![8..12]),
5092 ]),
5093 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
5094 );
5095
5096 assert!(
5097 search(
5098 &project,
5099 SearchQuery::text(
5100 search_query,
5101 false,
5102 true,
5103 false,
5104 Default::default(),
5105 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()])
5106 .unwrap(),
5107 false,
5108 None,
5109 )
5110 .unwrap(),
5111 cx
5112 )
5113 .await
5114 .unwrap()
5115 .is_empty(),
5116 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
5117 );
5118}
5119
5120#[gpui::test]
5121async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
5122 init_test(cx);
5123
5124 let search_query = "file";
5125
5126 let fs = FakeFs::new(cx.executor());
5127 fs.insert_tree(
5128 path!("/dir"),
5129 json!({
5130 "one.rs": r#"// Rust file one"#,
5131 "one.ts": r#"// TypeScript file one"#,
5132 "two.rs": r#"// Rust file two"#,
5133 "two.ts": r#"// TypeScript file two"#,
5134 }),
5135 )
5136 .await;
5137 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5138
5139 assert!(
5140 search(
5141 &project,
5142 SearchQuery::text(
5143 search_query,
5144 false,
5145 true,
5146 false,
5147 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5148 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5149 false,
5150 None,
5151 )
5152 .unwrap(),
5153 cx
5154 )
5155 .await
5156 .unwrap()
5157 .is_empty(),
5158 "If both no exclusions and inclusions match, exclusions should win and return nothing"
5159 );
5160
5161 assert!(
5162 search(
5163 &project,
5164 SearchQuery::text(
5165 search_query,
5166 false,
5167 true,
5168 false,
5169 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5170 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5171 false,
5172 None,
5173 )
5174 .unwrap(),
5175 cx
5176 )
5177 .await
5178 .unwrap()
5179 .is_empty(),
5180 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
5181 );
5182
5183 assert!(
5184 search(
5185 &project,
5186 SearchQuery::text(
5187 search_query,
5188 false,
5189 true,
5190 false,
5191 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5192 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5193 false,
5194 None,
5195 )
5196 .unwrap(),
5197 cx
5198 )
5199 .await
5200 .unwrap()
5201 .is_empty(),
5202 "Non-matching inclusions and exclusions should not change that."
5203 );
5204
5205 assert_eq!(
5206 search(
5207 &project,
5208 SearchQuery::text(
5209 search_query,
5210 false,
5211 true,
5212 false,
5213 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5214 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()]).unwrap(),
5215 false,
5216 None,
5217 )
5218 .unwrap(),
5219 cx
5220 )
5221 .await
5222 .unwrap(),
5223 HashMap::from_iter([
5224 (separator!("dir/one.ts").to_string(), vec![14..18]),
5225 (separator!("dir/two.ts").to_string(), vec![14..18]),
5226 ]),
5227 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
5228 );
5229}
5230
5231#[gpui::test]
5232async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
5233 init_test(cx);
5234
5235 let fs = FakeFs::new(cx.executor());
5236 fs.insert_tree(
5237 path!("/worktree-a"),
5238 json!({
5239 "haystack.rs": r#"// NEEDLE"#,
5240 "haystack.ts": r#"// NEEDLE"#,
5241 }),
5242 )
5243 .await;
5244 fs.insert_tree(
5245 path!("/worktree-b"),
5246 json!({
5247 "haystack.rs": r#"// NEEDLE"#,
5248 "haystack.ts": r#"// NEEDLE"#,
5249 }),
5250 )
5251 .await;
5252
5253 let project = Project::test(
5254 fs.clone(),
5255 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
5256 cx,
5257 )
5258 .await;
5259
5260 assert_eq!(
5261 search(
5262 &project,
5263 SearchQuery::text(
5264 "NEEDLE",
5265 false,
5266 true,
5267 false,
5268 PathMatcher::new(&["worktree-a/*.rs".to_owned()]).unwrap(),
5269 Default::default(),
5270 true,
5271 None,
5272 )
5273 .unwrap(),
5274 cx
5275 )
5276 .await
5277 .unwrap(),
5278 HashMap::from_iter([(separator!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
5279 "should only return results from included worktree"
5280 );
5281 assert_eq!(
5282 search(
5283 &project,
5284 SearchQuery::text(
5285 "NEEDLE",
5286 false,
5287 true,
5288 false,
5289 PathMatcher::new(&["worktree-b/*.rs".to_owned()]).unwrap(),
5290 Default::default(),
5291 true,
5292 None,
5293 )
5294 .unwrap(),
5295 cx
5296 )
5297 .await
5298 .unwrap(),
5299 HashMap::from_iter([(separator!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
5300 "should only return results from included worktree"
5301 );
5302
5303 assert_eq!(
5304 search(
5305 &project,
5306 SearchQuery::text(
5307 "NEEDLE",
5308 false,
5309 true,
5310 false,
5311 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5312 Default::default(),
5313 false,
5314 None,
5315 )
5316 .unwrap(),
5317 cx
5318 )
5319 .await
5320 .unwrap(),
5321 HashMap::from_iter([
5322 (separator!("worktree-a/haystack.ts").to_string(), vec![3..9]),
5323 (separator!("worktree-b/haystack.ts").to_string(), vec![3..9])
5324 ]),
5325 "should return results from both worktrees"
5326 );
5327}
5328
5329#[gpui::test]
5330async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
5331 init_test(cx);
5332
5333 let fs = FakeFs::new(cx.background_executor.clone());
5334 fs.insert_tree(
5335 path!("/dir"),
5336 json!({
5337 ".git": {},
5338 ".gitignore": "**/target\n/node_modules\n",
5339 "target": {
5340 "index.txt": "index_key:index_value"
5341 },
5342 "node_modules": {
5343 "eslint": {
5344 "index.ts": "const eslint_key = 'eslint value'",
5345 "package.json": r#"{ "some_key": "some value" }"#,
5346 },
5347 "prettier": {
5348 "index.ts": "const prettier_key = 'prettier value'",
5349 "package.json": r#"{ "other_key": "other value" }"#,
5350 },
5351 },
5352 "package.json": r#"{ "main_key": "main value" }"#,
5353 }),
5354 )
5355 .await;
5356 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5357
5358 let query = "key";
5359 assert_eq!(
5360 search(
5361 &project,
5362 SearchQuery::text(
5363 query,
5364 false,
5365 false,
5366 false,
5367 Default::default(),
5368 Default::default(),
5369 false,
5370 None,
5371 )
5372 .unwrap(),
5373 cx
5374 )
5375 .await
5376 .unwrap(),
5377 HashMap::from_iter([(separator!("dir/package.json").to_string(), vec![8..11])]),
5378 "Only one non-ignored file should have the query"
5379 );
5380
5381 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5382 assert_eq!(
5383 search(
5384 &project,
5385 SearchQuery::text(
5386 query,
5387 false,
5388 false,
5389 true,
5390 Default::default(),
5391 Default::default(),
5392 false,
5393 None,
5394 )
5395 .unwrap(),
5396 cx
5397 )
5398 .await
5399 .unwrap(),
5400 HashMap::from_iter([
5401 (separator!("dir/package.json").to_string(), vec![8..11]),
5402 (separator!("dir/target/index.txt").to_string(), vec![6..9]),
5403 (
5404 separator!("dir/node_modules/prettier/package.json").to_string(),
5405 vec![9..12]
5406 ),
5407 (
5408 separator!("dir/node_modules/prettier/index.ts").to_string(),
5409 vec![15..18]
5410 ),
5411 (
5412 separator!("dir/node_modules/eslint/index.ts").to_string(),
5413 vec![13..16]
5414 ),
5415 (
5416 separator!("dir/node_modules/eslint/package.json").to_string(),
5417 vec![8..11]
5418 ),
5419 ]),
5420 "Unrestricted search with ignored directories should find every file with the query"
5421 );
5422
5423 let files_to_include = PathMatcher::new(&["node_modules/prettier/**".to_owned()]).unwrap();
5424 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()]).unwrap();
5425 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5426 assert_eq!(
5427 search(
5428 &project,
5429 SearchQuery::text(
5430 query,
5431 false,
5432 false,
5433 true,
5434 files_to_include,
5435 files_to_exclude,
5436 false,
5437 None,
5438 )
5439 .unwrap(),
5440 cx
5441 )
5442 .await
5443 .unwrap(),
5444 HashMap::from_iter([(
5445 separator!("dir/node_modules/prettier/package.json").to_string(),
5446 vec![9..12]
5447 )]),
5448 "With search including ignored prettier directory and excluding TS files, only one file should be found"
5449 );
5450}
5451
5452#[gpui::test]
5453async fn test_search_with_unicode(cx: &mut gpui::TestAppContext) {
5454 init_test(cx);
5455
5456 let fs = FakeFs::new(cx.executor());
5457 fs.insert_tree(
5458 path!("/dir"),
5459 json!({
5460 "one.rs": "// ПРИВЕТ? привет!",
5461 "two.rs": "// ПРИВЕТ.",
5462 "three.rs": "// привет",
5463 }),
5464 )
5465 .await;
5466 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5467
5468 let unicode_case_sensitive_query = SearchQuery::text(
5469 "привет",
5470 false,
5471 true,
5472 false,
5473 Default::default(),
5474 Default::default(),
5475 false,
5476 None,
5477 );
5478 assert_matches!(unicode_case_sensitive_query, Ok(SearchQuery::Text { .. }));
5479 assert_eq!(
5480 search(&project, unicode_case_sensitive_query.unwrap(), cx)
5481 .await
5482 .unwrap(),
5483 HashMap::from_iter([
5484 (separator!("dir/one.rs").to_string(), vec![17..29]),
5485 (separator!("dir/three.rs").to_string(), vec![3..15]),
5486 ])
5487 );
5488
5489 let unicode_case_insensitive_query = SearchQuery::text(
5490 "привет",
5491 false,
5492 false,
5493 false,
5494 Default::default(),
5495 Default::default(),
5496 false,
5497 None,
5498 );
5499 assert_matches!(
5500 unicode_case_insensitive_query,
5501 Ok(SearchQuery::Regex { .. })
5502 );
5503 assert_eq!(
5504 search(&project, unicode_case_insensitive_query.unwrap(), cx)
5505 .await
5506 .unwrap(),
5507 HashMap::from_iter([
5508 (separator!("dir/one.rs").to_string(), vec![3..15, 17..29]),
5509 (separator!("dir/two.rs").to_string(), vec![3..15]),
5510 (separator!("dir/three.rs").to_string(), vec![3..15]),
5511 ])
5512 );
5513
5514 assert_eq!(
5515 search(
5516 &project,
5517 SearchQuery::text(
5518 "привет.",
5519 false,
5520 false,
5521 false,
5522 Default::default(),
5523 Default::default(),
5524 false,
5525 None,
5526 )
5527 .unwrap(),
5528 cx
5529 )
5530 .await
5531 .unwrap(),
5532 HashMap::from_iter([(separator!("dir/two.rs").to_string(), vec![3..16]),])
5533 );
5534}
5535
5536#[gpui::test]
5537async fn test_create_entry(cx: &mut gpui::TestAppContext) {
5538 init_test(cx);
5539
5540 let fs = FakeFs::new(cx.executor().clone());
5541 fs.insert_tree(
5542 "/one/two",
5543 json!({
5544 "three": {
5545 "a.txt": "",
5546 "four": {}
5547 },
5548 "c.rs": ""
5549 }),
5550 )
5551 .await;
5552
5553 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
5554 project
5555 .update(cx, |project, cx| {
5556 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5557 project.create_entry((id, "b.."), true, cx)
5558 })
5559 .await
5560 .unwrap()
5561 .to_included()
5562 .unwrap();
5563
5564 // Can't create paths outside the project
5565 let result = project
5566 .update(cx, |project, cx| {
5567 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5568 project.create_entry((id, "../../boop"), true, cx)
5569 })
5570 .await;
5571 assert!(result.is_err());
5572
5573 // Can't create paths with '..'
5574 let result = project
5575 .update(cx, |project, cx| {
5576 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5577 project.create_entry((id, "four/../beep"), true, cx)
5578 })
5579 .await;
5580 assert!(result.is_err());
5581
5582 assert_eq!(
5583 fs.paths(true),
5584 vec![
5585 PathBuf::from(path!("/")),
5586 PathBuf::from(path!("/one")),
5587 PathBuf::from(path!("/one/two")),
5588 PathBuf::from(path!("/one/two/c.rs")),
5589 PathBuf::from(path!("/one/two/three")),
5590 PathBuf::from(path!("/one/two/three/a.txt")),
5591 PathBuf::from(path!("/one/two/three/b..")),
5592 PathBuf::from(path!("/one/two/three/four")),
5593 ]
5594 );
5595
5596 // And we cannot open buffers with '..'
5597 let result = project
5598 .update(cx, |project, cx| {
5599 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5600 project.open_buffer((id, "../c.rs"), cx)
5601 })
5602 .await;
5603 assert!(result.is_err())
5604}
5605
5606#[gpui::test]
5607async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
5608 init_test(cx);
5609
5610 let fs = FakeFs::new(cx.executor());
5611 fs.insert_tree(
5612 path!("/dir"),
5613 json!({
5614 "a.tsx": "a",
5615 }),
5616 )
5617 .await;
5618
5619 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5620
5621 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5622 language_registry.add(tsx_lang());
5623 let language_server_names = [
5624 "TypeScriptServer",
5625 "TailwindServer",
5626 "ESLintServer",
5627 "NoHoverCapabilitiesServer",
5628 ];
5629 let mut language_servers = [
5630 language_registry.register_fake_lsp(
5631 "tsx",
5632 FakeLspAdapter {
5633 name: language_server_names[0],
5634 capabilities: lsp::ServerCapabilities {
5635 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5636 ..lsp::ServerCapabilities::default()
5637 },
5638 ..FakeLspAdapter::default()
5639 },
5640 ),
5641 language_registry.register_fake_lsp(
5642 "tsx",
5643 FakeLspAdapter {
5644 name: language_server_names[1],
5645 capabilities: lsp::ServerCapabilities {
5646 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5647 ..lsp::ServerCapabilities::default()
5648 },
5649 ..FakeLspAdapter::default()
5650 },
5651 ),
5652 language_registry.register_fake_lsp(
5653 "tsx",
5654 FakeLspAdapter {
5655 name: language_server_names[2],
5656 capabilities: lsp::ServerCapabilities {
5657 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5658 ..lsp::ServerCapabilities::default()
5659 },
5660 ..FakeLspAdapter::default()
5661 },
5662 ),
5663 language_registry.register_fake_lsp(
5664 "tsx",
5665 FakeLspAdapter {
5666 name: language_server_names[3],
5667 capabilities: lsp::ServerCapabilities {
5668 hover_provider: None,
5669 ..lsp::ServerCapabilities::default()
5670 },
5671 ..FakeLspAdapter::default()
5672 },
5673 ),
5674 ];
5675
5676 let (buffer, _handle) = project
5677 .update(cx, |p, cx| {
5678 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
5679 })
5680 .await
5681 .unwrap();
5682 cx.executor().run_until_parked();
5683
5684 let mut servers_with_hover_requests = HashMap::default();
5685 for i in 0..language_server_names.len() {
5686 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
5687 panic!(
5688 "Failed to get language server #{i} with name {}",
5689 &language_server_names[i]
5690 )
5691 });
5692 let new_server_name = new_server.server.name();
5693 assert!(
5694 !servers_with_hover_requests.contains_key(&new_server_name),
5695 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5696 );
5697 match new_server_name.as_ref() {
5698 "TailwindServer" | "TypeScriptServer" => {
5699 servers_with_hover_requests.insert(
5700 new_server_name.clone(),
5701 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
5702 move |_, _| {
5703 let name = new_server_name.clone();
5704 async move {
5705 Ok(Some(lsp::Hover {
5706 contents: lsp::HoverContents::Scalar(
5707 lsp::MarkedString::String(format!("{name} hover")),
5708 ),
5709 range: None,
5710 }))
5711 }
5712 },
5713 ),
5714 );
5715 }
5716 "ESLintServer" => {
5717 servers_with_hover_requests.insert(
5718 new_server_name,
5719 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
5720 |_, _| async move { Ok(None) },
5721 ),
5722 );
5723 }
5724 "NoHoverCapabilitiesServer" => {
5725 let _never_handled = new_server
5726 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
5727 panic!(
5728 "Should not call for hovers server with no corresponding capabilities"
5729 )
5730 });
5731 }
5732 unexpected => panic!("Unexpected server name: {unexpected}"),
5733 }
5734 }
5735
5736 let hover_task = project.update(cx, |project, cx| {
5737 project.hover(&buffer, Point::new(0, 0), cx)
5738 });
5739 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
5740 |mut hover_request| async move {
5741 hover_request
5742 .next()
5743 .await
5744 .expect("All hover requests should have been triggered")
5745 },
5746 ))
5747 .await;
5748 assert_eq!(
5749 vec!["TailwindServer hover", "TypeScriptServer hover"],
5750 hover_task
5751 .await
5752 .into_iter()
5753 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
5754 .sorted()
5755 .collect::<Vec<_>>(),
5756 "Should receive hover responses from all related servers with hover capabilities"
5757 );
5758}
5759
5760#[gpui::test]
5761async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
5762 init_test(cx);
5763
5764 let fs = FakeFs::new(cx.executor());
5765 fs.insert_tree(
5766 path!("/dir"),
5767 json!({
5768 "a.ts": "a",
5769 }),
5770 )
5771 .await;
5772
5773 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5774
5775 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5776 language_registry.add(typescript_lang());
5777 let mut fake_language_servers = language_registry.register_fake_lsp(
5778 "TypeScript",
5779 FakeLspAdapter {
5780 capabilities: lsp::ServerCapabilities {
5781 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5782 ..lsp::ServerCapabilities::default()
5783 },
5784 ..FakeLspAdapter::default()
5785 },
5786 );
5787
5788 let (buffer, _handle) = project
5789 .update(cx, |p, cx| {
5790 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
5791 })
5792 .await
5793 .unwrap();
5794 cx.executor().run_until_parked();
5795
5796 let fake_server = fake_language_servers
5797 .next()
5798 .await
5799 .expect("failed to get the language server");
5800
5801 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
5802 move |_, _| async move {
5803 Ok(Some(lsp::Hover {
5804 contents: lsp::HoverContents::Array(vec![
5805 lsp::MarkedString::String("".to_string()),
5806 lsp::MarkedString::String(" ".to_string()),
5807 lsp::MarkedString::String("\n\n\n".to_string()),
5808 ]),
5809 range: None,
5810 }))
5811 },
5812 );
5813
5814 let hover_task = project.update(cx, |project, cx| {
5815 project.hover(&buffer, Point::new(0, 0), cx)
5816 });
5817 let () = request_handled
5818 .next()
5819 .await
5820 .expect("All hover requests should have been triggered");
5821 assert_eq!(
5822 Vec::<String>::new(),
5823 hover_task
5824 .await
5825 .into_iter()
5826 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
5827 .sorted()
5828 .collect::<Vec<_>>(),
5829 "Empty hover parts should be ignored"
5830 );
5831}
5832
5833#[gpui::test]
5834async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
5835 init_test(cx);
5836
5837 let fs = FakeFs::new(cx.executor());
5838 fs.insert_tree(
5839 path!("/dir"),
5840 json!({
5841 "a.ts": "a",
5842 }),
5843 )
5844 .await;
5845
5846 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5847
5848 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5849 language_registry.add(typescript_lang());
5850 let mut fake_language_servers = language_registry.register_fake_lsp(
5851 "TypeScript",
5852 FakeLspAdapter {
5853 capabilities: lsp::ServerCapabilities {
5854 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5855 ..lsp::ServerCapabilities::default()
5856 },
5857 ..FakeLspAdapter::default()
5858 },
5859 );
5860
5861 let (buffer, _handle) = project
5862 .update(cx, |p, cx| {
5863 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
5864 })
5865 .await
5866 .unwrap();
5867 cx.executor().run_until_parked();
5868
5869 let fake_server = fake_language_servers
5870 .next()
5871 .await
5872 .expect("failed to get the language server");
5873
5874 let mut request_handled = fake_server
5875 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
5876 Ok(Some(vec![
5877 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5878 title: "organize imports".to_string(),
5879 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
5880 ..lsp::CodeAction::default()
5881 }),
5882 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5883 title: "fix code".to_string(),
5884 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
5885 ..lsp::CodeAction::default()
5886 }),
5887 ]))
5888 });
5889
5890 let code_actions_task = project.update(cx, |project, cx| {
5891 project.code_actions(
5892 &buffer,
5893 0..buffer.read(cx).len(),
5894 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
5895 cx,
5896 )
5897 });
5898
5899 let () = request_handled
5900 .next()
5901 .await
5902 .expect("The code action request should have been triggered");
5903
5904 let code_actions = code_actions_task.await.unwrap();
5905 assert_eq!(code_actions.len(), 1);
5906 assert_eq!(
5907 code_actions[0].lsp_action.action_kind(),
5908 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
5909 );
5910}
5911
5912#[gpui::test]
5913async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
5914 init_test(cx);
5915
5916 let fs = FakeFs::new(cx.executor());
5917 fs.insert_tree(
5918 path!("/dir"),
5919 json!({
5920 "a.tsx": "a",
5921 }),
5922 )
5923 .await;
5924
5925 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5926
5927 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5928 language_registry.add(tsx_lang());
5929 let language_server_names = [
5930 "TypeScriptServer",
5931 "TailwindServer",
5932 "ESLintServer",
5933 "NoActionsCapabilitiesServer",
5934 ];
5935
5936 let mut language_server_rxs = [
5937 language_registry.register_fake_lsp(
5938 "tsx",
5939 FakeLspAdapter {
5940 name: language_server_names[0],
5941 capabilities: lsp::ServerCapabilities {
5942 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5943 ..lsp::ServerCapabilities::default()
5944 },
5945 ..FakeLspAdapter::default()
5946 },
5947 ),
5948 language_registry.register_fake_lsp(
5949 "tsx",
5950 FakeLspAdapter {
5951 name: language_server_names[1],
5952 capabilities: lsp::ServerCapabilities {
5953 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5954 ..lsp::ServerCapabilities::default()
5955 },
5956 ..FakeLspAdapter::default()
5957 },
5958 ),
5959 language_registry.register_fake_lsp(
5960 "tsx",
5961 FakeLspAdapter {
5962 name: language_server_names[2],
5963 capabilities: lsp::ServerCapabilities {
5964 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5965 ..lsp::ServerCapabilities::default()
5966 },
5967 ..FakeLspAdapter::default()
5968 },
5969 ),
5970 language_registry.register_fake_lsp(
5971 "tsx",
5972 FakeLspAdapter {
5973 name: language_server_names[3],
5974 capabilities: lsp::ServerCapabilities {
5975 code_action_provider: None,
5976 ..lsp::ServerCapabilities::default()
5977 },
5978 ..FakeLspAdapter::default()
5979 },
5980 ),
5981 ];
5982
5983 let (buffer, _handle) = project
5984 .update(cx, |p, cx| {
5985 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
5986 })
5987 .await
5988 .unwrap();
5989 cx.executor().run_until_parked();
5990
5991 let mut servers_with_actions_requests = HashMap::default();
5992 for i in 0..language_server_names.len() {
5993 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
5994 panic!(
5995 "Failed to get language server #{i} with name {}",
5996 &language_server_names[i]
5997 )
5998 });
5999 let new_server_name = new_server.server.name();
6000
6001 assert!(
6002 !servers_with_actions_requests.contains_key(&new_server_name),
6003 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6004 );
6005 match new_server_name.0.as_ref() {
6006 "TailwindServer" | "TypeScriptServer" => {
6007 servers_with_actions_requests.insert(
6008 new_server_name.clone(),
6009 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6010 move |_, _| {
6011 let name = new_server_name.clone();
6012 async move {
6013 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
6014 lsp::CodeAction {
6015 title: format!("{name} code action"),
6016 ..lsp::CodeAction::default()
6017 },
6018 )]))
6019 }
6020 },
6021 ),
6022 );
6023 }
6024 "ESLintServer" => {
6025 servers_with_actions_requests.insert(
6026 new_server_name,
6027 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6028 |_, _| async move { Ok(None) },
6029 ),
6030 );
6031 }
6032 "NoActionsCapabilitiesServer" => {
6033 let _never_handled = new_server
6034 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6035 panic!(
6036 "Should not call for code actions server with no corresponding capabilities"
6037 )
6038 });
6039 }
6040 unexpected => panic!("Unexpected server name: {unexpected}"),
6041 }
6042 }
6043
6044 let code_actions_task = project.update(cx, |project, cx| {
6045 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
6046 });
6047
6048 // cx.run_until_parked();
6049 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
6050 |mut code_actions_request| async move {
6051 code_actions_request
6052 .next()
6053 .await
6054 .expect("All code actions requests should have been triggered")
6055 },
6056 ))
6057 .await;
6058 assert_eq!(
6059 vec!["TailwindServer code action", "TypeScriptServer code action"],
6060 code_actions_task
6061 .await
6062 .unwrap()
6063 .into_iter()
6064 .map(|code_action| code_action.lsp_action.title().to_owned())
6065 .sorted()
6066 .collect::<Vec<_>>(),
6067 "Should receive code actions responses from all related servers with hover capabilities"
6068 );
6069}
6070
6071#[gpui::test]
6072async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
6073 init_test(cx);
6074
6075 let fs = FakeFs::new(cx.executor());
6076 fs.insert_tree(
6077 "/dir",
6078 json!({
6079 "a.rs": "let a = 1;",
6080 "b.rs": "let b = 2;",
6081 "c.rs": "let c = 2;",
6082 }),
6083 )
6084 .await;
6085
6086 let project = Project::test(
6087 fs,
6088 [
6089 "/dir/a.rs".as_ref(),
6090 "/dir/b.rs".as_ref(),
6091 "/dir/c.rs".as_ref(),
6092 ],
6093 cx,
6094 )
6095 .await;
6096
6097 // check the initial state and get the worktrees
6098 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
6099 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6100 assert_eq!(worktrees.len(), 3);
6101
6102 let worktree_a = worktrees[0].read(cx);
6103 let worktree_b = worktrees[1].read(cx);
6104 let worktree_c = worktrees[2].read(cx);
6105
6106 // check they start in the right order
6107 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
6108 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
6109 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
6110
6111 (
6112 worktrees[0].clone(),
6113 worktrees[1].clone(),
6114 worktrees[2].clone(),
6115 )
6116 });
6117
6118 // move first worktree to after the second
6119 // [a, b, c] -> [b, a, c]
6120 project
6121 .update(cx, |project, cx| {
6122 let first = worktree_a.read(cx);
6123 let second = worktree_b.read(cx);
6124 project.move_worktree(first.id(), second.id(), cx)
6125 })
6126 .expect("moving first after second");
6127
6128 // check the state after moving
6129 project.update(cx, |project, cx| {
6130 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6131 assert_eq!(worktrees.len(), 3);
6132
6133 let first = worktrees[0].read(cx);
6134 let second = worktrees[1].read(cx);
6135 let third = worktrees[2].read(cx);
6136
6137 // check they are now in the right order
6138 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
6139 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
6140 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6141 });
6142
6143 // move the second worktree to before the first
6144 // [b, a, c] -> [a, b, c]
6145 project
6146 .update(cx, |project, cx| {
6147 let second = worktree_a.read(cx);
6148 let first = worktree_b.read(cx);
6149 project.move_worktree(first.id(), second.id(), cx)
6150 })
6151 .expect("moving second before first");
6152
6153 // check the state after moving
6154 project.update(cx, |project, cx| {
6155 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6156 assert_eq!(worktrees.len(), 3);
6157
6158 let first = worktrees[0].read(cx);
6159 let second = worktrees[1].read(cx);
6160 let third = worktrees[2].read(cx);
6161
6162 // check they are now in the right order
6163 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6164 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6165 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6166 });
6167
6168 // move the second worktree to after the third
6169 // [a, b, c] -> [a, c, b]
6170 project
6171 .update(cx, |project, cx| {
6172 let second = worktree_b.read(cx);
6173 let third = worktree_c.read(cx);
6174 project.move_worktree(second.id(), third.id(), cx)
6175 })
6176 .expect("moving second after third");
6177
6178 // check the state after moving
6179 project.update(cx, |project, cx| {
6180 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6181 assert_eq!(worktrees.len(), 3);
6182
6183 let first = worktrees[0].read(cx);
6184 let second = worktrees[1].read(cx);
6185 let third = worktrees[2].read(cx);
6186
6187 // check they are now in the right order
6188 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6189 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
6190 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
6191 });
6192
6193 // move the third worktree to before the second
6194 // [a, c, b] -> [a, b, c]
6195 project
6196 .update(cx, |project, cx| {
6197 let third = worktree_c.read(cx);
6198 let second = worktree_b.read(cx);
6199 project.move_worktree(third.id(), second.id(), cx)
6200 })
6201 .expect("moving third before second");
6202
6203 // check the state after moving
6204 project.update(cx, |project, cx| {
6205 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6206 assert_eq!(worktrees.len(), 3);
6207
6208 let first = worktrees[0].read(cx);
6209 let second = worktrees[1].read(cx);
6210 let third = worktrees[2].read(cx);
6211
6212 // check they are now in the right order
6213 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6214 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6215 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6216 });
6217
6218 // move the first worktree to after the third
6219 // [a, b, c] -> [b, c, a]
6220 project
6221 .update(cx, |project, cx| {
6222 let first = worktree_a.read(cx);
6223 let third = worktree_c.read(cx);
6224 project.move_worktree(first.id(), third.id(), cx)
6225 })
6226 .expect("moving first after third");
6227
6228 // check the state after moving
6229 project.update(cx, |project, cx| {
6230 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6231 assert_eq!(worktrees.len(), 3);
6232
6233 let first = worktrees[0].read(cx);
6234 let second = worktrees[1].read(cx);
6235 let third = worktrees[2].read(cx);
6236
6237 // check they are now in the right order
6238 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
6239 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
6240 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
6241 });
6242
6243 // move the third worktree to before the first
6244 // [b, c, a] -> [a, b, c]
6245 project
6246 .update(cx, |project, cx| {
6247 let third = worktree_a.read(cx);
6248 let first = worktree_b.read(cx);
6249 project.move_worktree(third.id(), first.id(), cx)
6250 })
6251 .expect("moving third before first");
6252
6253 // check the state after moving
6254 project.update(cx, |project, cx| {
6255 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6256 assert_eq!(worktrees.len(), 3);
6257
6258 let first = worktrees[0].read(cx);
6259 let second = worktrees[1].read(cx);
6260 let third = worktrees[2].read(cx);
6261
6262 // check they are now in the right order
6263 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6264 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6265 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6266 });
6267}
6268
6269#[gpui::test]
6270async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
6271 init_test(cx);
6272
6273 let staged_contents = r#"
6274 fn main() {
6275 println!("hello world");
6276 }
6277 "#
6278 .unindent();
6279 let file_contents = r#"
6280 // print goodbye
6281 fn main() {
6282 println!("goodbye world");
6283 }
6284 "#
6285 .unindent();
6286
6287 let fs = FakeFs::new(cx.background_executor.clone());
6288 fs.insert_tree(
6289 "/dir",
6290 json!({
6291 ".git": {},
6292 "src": {
6293 "main.rs": file_contents,
6294 }
6295 }),
6296 )
6297 .await;
6298
6299 fs.set_index_for_repo(
6300 Path::new("/dir/.git"),
6301 &[("src/main.rs".into(), staged_contents)],
6302 );
6303
6304 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6305
6306 let buffer = project
6307 .update(cx, |project, cx| {
6308 project.open_local_buffer("/dir/src/main.rs", cx)
6309 })
6310 .await
6311 .unwrap();
6312 let unstaged_diff = project
6313 .update(cx, |project, cx| {
6314 project.open_unstaged_diff(buffer.clone(), cx)
6315 })
6316 .await
6317 .unwrap();
6318
6319 cx.run_until_parked();
6320 unstaged_diff.update(cx, |unstaged_diff, cx| {
6321 let snapshot = buffer.read(cx).snapshot();
6322 assert_hunks(
6323 unstaged_diff.hunks(&snapshot, cx),
6324 &snapshot,
6325 &unstaged_diff.base_text_string().unwrap(),
6326 &[
6327 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
6328 (
6329 2..3,
6330 " println!(\"hello world\");\n",
6331 " println!(\"goodbye world\");\n",
6332 DiffHunkStatus::modified_none(),
6333 ),
6334 ],
6335 );
6336 });
6337
6338 let staged_contents = r#"
6339 // print goodbye
6340 fn main() {
6341 }
6342 "#
6343 .unindent();
6344
6345 fs.set_index_for_repo(
6346 Path::new("/dir/.git"),
6347 &[("src/main.rs".into(), staged_contents)],
6348 );
6349
6350 cx.run_until_parked();
6351 unstaged_diff.update(cx, |unstaged_diff, cx| {
6352 let snapshot = buffer.read(cx).snapshot();
6353 assert_hunks(
6354 unstaged_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6355 &snapshot,
6356 &unstaged_diff.base_text().text(),
6357 &[(
6358 2..3,
6359 "",
6360 " println!(\"goodbye world\");\n",
6361 DiffHunkStatus::added_none(),
6362 )],
6363 );
6364 });
6365}
6366
6367#[gpui::test]
6368async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
6369 init_test(cx);
6370
6371 let committed_contents = r#"
6372 fn main() {
6373 println!("hello world");
6374 }
6375 "#
6376 .unindent();
6377 let staged_contents = r#"
6378 fn main() {
6379 println!("goodbye world");
6380 }
6381 "#
6382 .unindent();
6383 let file_contents = r#"
6384 // print goodbye
6385 fn main() {
6386 println!("goodbye world");
6387 }
6388 "#
6389 .unindent();
6390
6391 let fs = FakeFs::new(cx.background_executor.clone());
6392 fs.insert_tree(
6393 "/dir",
6394 json!({
6395 ".git": {},
6396 "src": {
6397 "modification.rs": file_contents,
6398 }
6399 }),
6400 )
6401 .await;
6402
6403 fs.set_head_for_repo(
6404 Path::new("/dir/.git"),
6405 &[
6406 ("src/modification.rs".into(), committed_contents),
6407 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6408 ],
6409 );
6410 fs.set_index_for_repo(
6411 Path::new("/dir/.git"),
6412 &[
6413 ("src/modification.rs".into(), staged_contents),
6414 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6415 ],
6416 );
6417
6418 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6419 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6420 let language = rust_lang();
6421 language_registry.add(language.clone());
6422
6423 let buffer_1 = project
6424 .update(cx, |project, cx| {
6425 project.open_local_buffer("/dir/src/modification.rs", cx)
6426 })
6427 .await
6428 .unwrap();
6429 let diff_1 = project
6430 .update(cx, |project, cx| {
6431 project.open_uncommitted_diff(buffer_1.clone(), cx)
6432 })
6433 .await
6434 .unwrap();
6435 diff_1.read_with(cx, |diff, _| {
6436 assert_eq!(diff.base_text().language().cloned(), Some(language))
6437 });
6438 cx.run_until_parked();
6439 diff_1.update(cx, |diff, cx| {
6440 let snapshot = buffer_1.read(cx).snapshot();
6441 assert_hunks(
6442 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6443 &snapshot,
6444 &diff.base_text_string().unwrap(),
6445 &[
6446 (
6447 0..1,
6448 "",
6449 "// print goodbye\n",
6450 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
6451 ),
6452 (
6453 2..3,
6454 " println!(\"hello world\");\n",
6455 " println!(\"goodbye world\");\n",
6456 DiffHunkStatus::modified_none(),
6457 ),
6458 ],
6459 );
6460 });
6461
6462 // Reset HEAD to a version that differs from both the buffer and the index.
6463 let committed_contents = r#"
6464 // print goodbye
6465 fn main() {
6466 }
6467 "#
6468 .unindent();
6469 fs.set_head_for_repo(
6470 Path::new("/dir/.git"),
6471 &[
6472 ("src/modification.rs".into(), committed_contents.clone()),
6473 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6474 ],
6475 );
6476
6477 // Buffer now has an unstaged hunk.
6478 cx.run_until_parked();
6479 diff_1.update(cx, |diff, cx| {
6480 let snapshot = buffer_1.read(cx).snapshot();
6481 assert_hunks(
6482 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6483 &snapshot,
6484 &diff.base_text().text(),
6485 &[(
6486 2..3,
6487 "",
6488 " println!(\"goodbye world\");\n",
6489 DiffHunkStatus::added_none(),
6490 )],
6491 );
6492 });
6493
6494 // Open a buffer for a file that's been deleted.
6495 let buffer_2 = project
6496 .update(cx, |project, cx| {
6497 project.open_local_buffer("/dir/src/deletion.rs", cx)
6498 })
6499 .await
6500 .unwrap();
6501 let diff_2 = project
6502 .update(cx, |project, cx| {
6503 project.open_uncommitted_diff(buffer_2.clone(), cx)
6504 })
6505 .await
6506 .unwrap();
6507 cx.run_until_parked();
6508 diff_2.update(cx, |diff, cx| {
6509 let snapshot = buffer_2.read(cx).snapshot();
6510 assert_hunks(
6511 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6512 &snapshot,
6513 &diff.base_text_string().unwrap(),
6514 &[(
6515 0..0,
6516 "// the-deleted-contents\n",
6517 "",
6518 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
6519 )],
6520 );
6521 });
6522
6523 // Stage the deletion of this file
6524 fs.set_index_for_repo(
6525 Path::new("/dir/.git"),
6526 &[("src/modification.rs".into(), committed_contents.clone())],
6527 );
6528 cx.run_until_parked();
6529 diff_2.update(cx, |diff, cx| {
6530 let snapshot = buffer_2.read(cx).snapshot();
6531 assert_hunks(
6532 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6533 &snapshot,
6534 &diff.base_text_string().unwrap(),
6535 &[(
6536 0..0,
6537 "// the-deleted-contents\n",
6538 "",
6539 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
6540 )],
6541 );
6542 });
6543}
6544
6545#[gpui::test]
6546async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
6547 use DiffHunkSecondaryStatus::*;
6548 init_test(cx);
6549
6550 let committed_contents = r#"
6551 zero
6552 one
6553 two
6554 three
6555 four
6556 five
6557 "#
6558 .unindent();
6559 let file_contents = r#"
6560 one
6561 TWO
6562 three
6563 FOUR
6564 five
6565 "#
6566 .unindent();
6567
6568 let fs = FakeFs::new(cx.background_executor.clone());
6569 fs.insert_tree(
6570 "/dir",
6571 json!({
6572 ".git": {},
6573 "file.txt": file_contents.clone()
6574 }),
6575 )
6576 .await;
6577
6578 fs.set_head_and_index_for_repo(
6579 "/dir/.git".as_ref(),
6580 &[("file.txt".into(), committed_contents.clone())],
6581 );
6582
6583 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6584
6585 let buffer = project
6586 .update(cx, |project, cx| {
6587 project.open_local_buffer("/dir/file.txt", cx)
6588 })
6589 .await
6590 .unwrap();
6591 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6592 let uncommitted_diff = project
6593 .update(cx, |project, cx| {
6594 project.open_uncommitted_diff(buffer.clone(), cx)
6595 })
6596 .await
6597 .unwrap();
6598 let mut diff_events = cx.events(&uncommitted_diff);
6599
6600 // The hunks are initially unstaged.
6601 uncommitted_diff.read_with(cx, |diff, cx| {
6602 assert_hunks(
6603 diff.hunks(&snapshot, cx),
6604 &snapshot,
6605 &diff.base_text_string().unwrap(),
6606 &[
6607 (
6608 0..0,
6609 "zero\n",
6610 "",
6611 DiffHunkStatus::deleted(HasSecondaryHunk),
6612 ),
6613 (
6614 1..2,
6615 "two\n",
6616 "TWO\n",
6617 DiffHunkStatus::modified(HasSecondaryHunk),
6618 ),
6619 (
6620 3..4,
6621 "four\n",
6622 "FOUR\n",
6623 DiffHunkStatus::modified(HasSecondaryHunk),
6624 ),
6625 ],
6626 );
6627 });
6628
6629 // Stage a hunk. It appears as optimistically staged.
6630 uncommitted_diff.update(cx, |diff, cx| {
6631 let range =
6632 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
6633 let hunks = diff
6634 .hunks_intersecting_range(range, &snapshot, cx)
6635 .collect::<Vec<_>>();
6636 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
6637
6638 assert_hunks(
6639 diff.hunks(&snapshot, cx),
6640 &snapshot,
6641 &diff.base_text_string().unwrap(),
6642 &[
6643 (
6644 0..0,
6645 "zero\n",
6646 "",
6647 DiffHunkStatus::deleted(HasSecondaryHunk),
6648 ),
6649 (
6650 1..2,
6651 "two\n",
6652 "TWO\n",
6653 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6654 ),
6655 (
6656 3..4,
6657 "four\n",
6658 "FOUR\n",
6659 DiffHunkStatus::modified(HasSecondaryHunk),
6660 ),
6661 ],
6662 );
6663 });
6664
6665 // The diff emits a change event for the range of the staged hunk.
6666 assert!(matches!(
6667 diff_events.next().await.unwrap(),
6668 BufferDiffEvent::HunksStagedOrUnstaged(_)
6669 ));
6670 let event = diff_events.next().await.unwrap();
6671 if let BufferDiffEvent::DiffChanged {
6672 changed_range: Some(changed_range),
6673 } = event
6674 {
6675 let changed_range = changed_range.to_point(&snapshot);
6676 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
6677 } else {
6678 panic!("Unexpected event {event:?}");
6679 }
6680
6681 // When the write to the index completes, it appears as staged.
6682 cx.run_until_parked();
6683 uncommitted_diff.update(cx, |diff, cx| {
6684 assert_hunks(
6685 diff.hunks(&snapshot, cx),
6686 &snapshot,
6687 &diff.base_text_string().unwrap(),
6688 &[
6689 (
6690 0..0,
6691 "zero\n",
6692 "",
6693 DiffHunkStatus::deleted(HasSecondaryHunk),
6694 ),
6695 (
6696 1..2,
6697 "two\n",
6698 "TWO\n",
6699 DiffHunkStatus::modified(NoSecondaryHunk),
6700 ),
6701 (
6702 3..4,
6703 "four\n",
6704 "FOUR\n",
6705 DiffHunkStatus::modified(HasSecondaryHunk),
6706 ),
6707 ],
6708 );
6709 });
6710
6711 // The diff emits a change event for the changed index text.
6712 let event = diff_events.next().await.unwrap();
6713 if let BufferDiffEvent::DiffChanged {
6714 changed_range: Some(changed_range),
6715 } = event
6716 {
6717 let changed_range = changed_range.to_point(&snapshot);
6718 assert_eq!(changed_range, Point::new(0, 0)..Point::new(4, 0));
6719 } else {
6720 panic!("Unexpected event {event:?}");
6721 }
6722
6723 // Simulate a problem writing to the git index.
6724 fs.set_error_message_for_index_write(
6725 "/dir/.git".as_ref(),
6726 Some("failed to write git index".into()),
6727 );
6728
6729 // Stage another hunk.
6730 uncommitted_diff.update(cx, |diff, cx| {
6731 let range =
6732 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
6733 let hunks = diff
6734 .hunks_intersecting_range(range, &snapshot, cx)
6735 .collect::<Vec<_>>();
6736 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
6737
6738 assert_hunks(
6739 diff.hunks(&snapshot, cx),
6740 &snapshot,
6741 &diff.base_text_string().unwrap(),
6742 &[
6743 (
6744 0..0,
6745 "zero\n",
6746 "",
6747 DiffHunkStatus::deleted(HasSecondaryHunk),
6748 ),
6749 (
6750 1..2,
6751 "two\n",
6752 "TWO\n",
6753 DiffHunkStatus::modified(NoSecondaryHunk),
6754 ),
6755 (
6756 3..4,
6757 "four\n",
6758 "FOUR\n",
6759 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6760 ),
6761 ],
6762 );
6763 });
6764 assert!(matches!(
6765 diff_events.next().await.unwrap(),
6766 BufferDiffEvent::HunksStagedOrUnstaged(_)
6767 ));
6768 let event = diff_events.next().await.unwrap();
6769 if let BufferDiffEvent::DiffChanged {
6770 changed_range: Some(changed_range),
6771 } = event
6772 {
6773 let changed_range = changed_range.to_point(&snapshot);
6774 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
6775 } else {
6776 panic!("Unexpected event {event:?}");
6777 }
6778
6779 // When the write fails, the hunk returns to being unstaged.
6780 cx.run_until_parked();
6781 uncommitted_diff.update(cx, |diff, cx| {
6782 assert_hunks(
6783 diff.hunks(&snapshot, cx),
6784 &snapshot,
6785 &diff.base_text_string().unwrap(),
6786 &[
6787 (
6788 0..0,
6789 "zero\n",
6790 "",
6791 DiffHunkStatus::deleted(HasSecondaryHunk),
6792 ),
6793 (
6794 1..2,
6795 "two\n",
6796 "TWO\n",
6797 DiffHunkStatus::modified(NoSecondaryHunk),
6798 ),
6799 (
6800 3..4,
6801 "four\n",
6802 "FOUR\n",
6803 DiffHunkStatus::modified(HasSecondaryHunk),
6804 ),
6805 ],
6806 );
6807 });
6808
6809 let event = diff_events.next().await.unwrap();
6810 if let BufferDiffEvent::DiffChanged {
6811 changed_range: Some(changed_range),
6812 } = event
6813 {
6814 let changed_range = changed_range.to_point(&snapshot);
6815 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
6816 } else {
6817 panic!("Unexpected event {event:?}");
6818 }
6819
6820 // Allow writing to the git index to succeed again.
6821 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
6822
6823 // Stage two hunks with separate operations.
6824 uncommitted_diff.update(cx, |diff, cx| {
6825 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
6826 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
6827 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
6828 });
6829
6830 // Both staged hunks appear as pending.
6831 uncommitted_diff.update(cx, |diff, cx| {
6832 assert_hunks(
6833 diff.hunks(&snapshot, cx),
6834 &snapshot,
6835 &diff.base_text_string().unwrap(),
6836 &[
6837 (
6838 0..0,
6839 "zero\n",
6840 "",
6841 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
6842 ),
6843 (
6844 1..2,
6845 "two\n",
6846 "TWO\n",
6847 DiffHunkStatus::modified(NoSecondaryHunk),
6848 ),
6849 (
6850 3..4,
6851 "four\n",
6852 "FOUR\n",
6853 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6854 ),
6855 ],
6856 );
6857 });
6858
6859 // Both staging operations take effect.
6860 cx.run_until_parked();
6861 uncommitted_diff.update(cx, |diff, cx| {
6862 assert_hunks(
6863 diff.hunks(&snapshot, cx),
6864 &snapshot,
6865 &diff.base_text_string().unwrap(),
6866 &[
6867 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
6868 (
6869 1..2,
6870 "two\n",
6871 "TWO\n",
6872 DiffHunkStatus::modified(NoSecondaryHunk),
6873 ),
6874 (
6875 3..4,
6876 "four\n",
6877 "FOUR\n",
6878 DiffHunkStatus::modified(NoSecondaryHunk),
6879 ),
6880 ],
6881 );
6882 });
6883}
6884
6885#[gpui::test(seeds(340, 472))]
6886async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
6887 use DiffHunkSecondaryStatus::*;
6888 init_test(cx);
6889
6890 let committed_contents = r#"
6891 zero
6892 one
6893 two
6894 three
6895 four
6896 five
6897 "#
6898 .unindent();
6899 let file_contents = r#"
6900 one
6901 TWO
6902 three
6903 FOUR
6904 five
6905 "#
6906 .unindent();
6907
6908 let fs = FakeFs::new(cx.background_executor.clone());
6909 fs.insert_tree(
6910 "/dir",
6911 json!({
6912 ".git": {},
6913 "file.txt": file_contents.clone()
6914 }),
6915 )
6916 .await;
6917
6918 fs.set_head_for_repo(
6919 "/dir/.git".as_ref(),
6920 &[("file.txt".into(), committed_contents.clone())],
6921 );
6922 fs.set_index_for_repo(
6923 "/dir/.git".as_ref(),
6924 &[("file.txt".into(), committed_contents.clone())],
6925 );
6926
6927 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6928
6929 let buffer = project
6930 .update(cx, |project, cx| {
6931 project.open_local_buffer("/dir/file.txt", cx)
6932 })
6933 .await
6934 .unwrap();
6935 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6936 let uncommitted_diff = project
6937 .update(cx, |project, cx| {
6938 project.open_uncommitted_diff(buffer.clone(), cx)
6939 })
6940 .await
6941 .unwrap();
6942
6943 // The hunks are initially unstaged.
6944 uncommitted_diff.read_with(cx, |diff, cx| {
6945 assert_hunks(
6946 diff.hunks(&snapshot, cx),
6947 &snapshot,
6948 &diff.base_text_string().unwrap(),
6949 &[
6950 (
6951 0..0,
6952 "zero\n",
6953 "",
6954 DiffHunkStatus::deleted(HasSecondaryHunk),
6955 ),
6956 (
6957 1..2,
6958 "two\n",
6959 "TWO\n",
6960 DiffHunkStatus::modified(HasSecondaryHunk),
6961 ),
6962 (
6963 3..4,
6964 "four\n",
6965 "FOUR\n",
6966 DiffHunkStatus::modified(HasSecondaryHunk),
6967 ),
6968 ],
6969 );
6970 });
6971
6972 // Pause IO events
6973 fs.pause_events();
6974
6975 // Stage the first hunk.
6976 uncommitted_diff.update(cx, |diff, cx| {
6977 let hunk = diff.hunks(&snapshot, cx).next().unwrap();
6978 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
6979 assert_hunks(
6980 diff.hunks(&snapshot, cx),
6981 &snapshot,
6982 &diff.base_text_string().unwrap(),
6983 &[
6984 (
6985 0..0,
6986 "zero\n",
6987 "",
6988 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
6989 ),
6990 (
6991 1..2,
6992 "two\n",
6993 "TWO\n",
6994 DiffHunkStatus::modified(HasSecondaryHunk),
6995 ),
6996 (
6997 3..4,
6998 "four\n",
6999 "FOUR\n",
7000 DiffHunkStatus::modified(HasSecondaryHunk),
7001 ),
7002 ],
7003 );
7004 });
7005
7006 // Stage the second hunk *before* receiving the FS event for the first hunk.
7007 cx.run_until_parked();
7008 uncommitted_diff.update(cx, |diff, cx| {
7009 let hunk = diff.hunks(&snapshot, cx).nth(1).unwrap();
7010 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7011 assert_hunks(
7012 diff.hunks(&snapshot, cx),
7013 &snapshot,
7014 &diff.base_text_string().unwrap(),
7015 &[
7016 (
7017 0..0,
7018 "zero\n",
7019 "",
7020 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7021 ),
7022 (
7023 1..2,
7024 "two\n",
7025 "TWO\n",
7026 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7027 ),
7028 (
7029 3..4,
7030 "four\n",
7031 "FOUR\n",
7032 DiffHunkStatus::modified(HasSecondaryHunk),
7033 ),
7034 ],
7035 );
7036 });
7037
7038 // Process the FS event for staging the first hunk (second event is still pending).
7039 fs.flush_events(1);
7040 cx.run_until_parked();
7041
7042 // Stage the third hunk before receiving the second FS event.
7043 uncommitted_diff.update(cx, |diff, cx| {
7044 let hunk = diff.hunks(&snapshot, cx).nth(2).unwrap();
7045 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7046 });
7047
7048 // Wait for all remaining IO.
7049 cx.run_until_parked();
7050 fs.flush_events(fs.buffered_event_count());
7051
7052 // Now all hunks are staged.
7053 cx.run_until_parked();
7054 uncommitted_diff.update(cx, |diff, cx| {
7055 assert_hunks(
7056 diff.hunks(&snapshot, cx),
7057 &snapshot,
7058 &diff.base_text_string().unwrap(),
7059 &[
7060 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7061 (
7062 1..2,
7063 "two\n",
7064 "TWO\n",
7065 DiffHunkStatus::modified(NoSecondaryHunk),
7066 ),
7067 (
7068 3..4,
7069 "four\n",
7070 "FOUR\n",
7071 DiffHunkStatus::modified(NoSecondaryHunk),
7072 ),
7073 ],
7074 );
7075 });
7076}
7077
7078#[gpui::test(iterations = 25)]
7079async fn test_staging_random_hunks(
7080 mut rng: StdRng,
7081 executor: BackgroundExecutor,
7082 cx: &mut gpui::TestAppContext,
7083) {
7084 let operations = env::var("OPERATIONS")
7085 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
7086 .unwrap_or(20);
7087
7088 // Try to induce races between diff recalculation and index writes.
7089 if rng.gen_bool(0.5) {
7090 executor.deprioritize(*CALCULATE_DIFF_TASK);
7091 }
7092
7093 use DiffHunkSecondaryStatus::*;
7094 init_test(cx);
7095
7096 let committed_text = (0..30).map(|i| format!("line {i}\n")).collect::<String>();
7097 let index_text = committed_text.clone();
7098 let buffer_text = (0..30)
7099 .map(|i| match i % 5 {
7100 0 => format!("line {i} (modified)\n"),
7101 _ => format!("line {i}\n"),
7102 })
7103 .collect::<String>();
7104
7105 let fs = FakeFs::new(cx.background_executor.clone());
7106 fs.insert_tree(
7107 path!("/dir"),
7108 json!({
7109 ".git": {},
7110 "file.txt": buffer_text.clone()
7111 }),
7112 )
7113 .await;
7114 fs.set_head_for_repo(
7115 path!("/dir/.git").as_ref(),
7116 &[("file.txt".into(), committed_text.clone())],
7117 );
7118 fs.set_index_for_repo(
7119 path!("/dir/.git").as_ref(),
7120 &[("file.txt".into(), index_text.clone())],
7121 );
7122 let repo = fs.open_repo(path!("/dir/.git").as_ref()).unwrap();
7123
7124 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7125 let buffer = project
7126 .update(cx, |project, cx| {
7127 project.open_local_buffer(path!("/dir/file.txt"), cx)
7128 })
7129 .await
7130 .unwrap();
7131 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7132 let uncommitted_diff = project
7133 .update(cx, |project, cx| {
7134 project.open_uncommitted_diff(buffer.clone(), cx)
7135 })
7136 .await
7137 .unwrap();
7138
7139 let mut hunks =
7140 uncommitted_diff.update(cx, |diff, cx| diff.hunks(&snapshot, cx).collect::<Vec<_>>());
7141 assert_eq!(hunks.len(), 6);
7142
7143 for _i in 0..operations {
7144 let hunk_ix = rng.gen_range(0..hunks.len());
7145 let hunk = &mut hunks[hunk_ix];
7146 let row = hunk.range.start.row;
7147
7148 if hunk.status().has_secondary_hunk() {
7149 log::info!("staging hunk at {row}");
7150 uncommitted_diff.update(cx, |diff, cx| {
7151 diff.stage_or_unstage_hunks(true, &[hunk.clone()], &snapshot, true, cx);
7152 });
7153 hunk.secondary_status = SecondaryHunkRemovalPending;
7154 } else {
7155 log::info!("unstaging hunk at {row}");
7156 uncommitted_diff.update(cx, |diff, cx| {
7157 diff.stage_or_unstage_hunks(false, &[hunk.clone()], &snapshot, true, cx);
7158 });
7159 hunk.secondary_status = SecondaryHunkAdditionPending;
7160 }
7161
7162 for _ in 0..rng.gen_range(0..10) {
7163 log::info!("yielding");
7164 cx.executor().simulate_random_delay().await;
7165 }
7166 }
7167
7168 cx.executor().run_until_parked();
7169
7170 for hunk in &mut hunks {
7171 if hunk.secondary_status == SecondaryHunkRemovalPending {
7172 hunk.secondary_status = NoSecondaryHunk;
7173 } else if hunk.secondary_status == SecondaryHunkAdditionPending {
7174 hunk.secondary_status = HasSecondaryHunk;
7175 }
7176 }
7177
7178 log::info!(
7179 "index text:\n{}",
7180 repo.load_index_text("file.txt".into()).await.unwrap()
7181 );
7182
7183 uncommitted_diff.update(cx, |diff, cx| {
7184 let expected_hunks = hunks
7185 .iter()
7186 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
7187 .collect::<Vec<_>>();
7188 let actual_hunks = diff
7189 .hunks(&snapshot, cx)
7190 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
7191 .collect::<Vec<_>>();
7192 assert_eq!(actual_hunks, expected_hunks);
7193 });
7194}
7195
7196#[gpui::test]
7197async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
7198 init_test(cx);
7199
7200 let committed_contents = r#"
7201 fn main() {
7202 println!("hello from HEAD");
7203 }
7204 "#
7205 .unindent();
7206 let file_contents = r#"
7207 fn main() {
7208 println!("hello from the working copy");
7209 }
7210 "#
7211 .unindent();
7212
7213 let fs = FakeFs::new(cx.background_executor.clone());
7214 fs.insert_tree(
7215 "/dir",
7216 json!({
7217 ".git": {},
7218 "src": {
7219 "main.rs": file_contents,
7220 }
7221 }),
7222 )
7223 .await;
7224
7225 fs.set_head_for_repo(
7226 Path::new("/dir/.git"),
7227 &[("src/main.rs".into(), committed_contents.clone())],
7228 );
7229 fs.set_index_for_repo(
7230 Path::new("/dir/.git"),
7231 &[("src/main.rs".into(), committed_contents.clone())],
7232 );
7233
7234 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
7235
7236 let buffer = project
7237 .update(cx, |project, cx| {
7238 project.open_local_buffer("/dir/src/main.rs", cx)
7239 })
7240 .await
7241 .unwrap();
7242 let uncommitted_diff = project
7243 .update(cx, |project, cx| {
7244 project.open_uncommitted_diff(buffer.clone(), cx)
7245 })
7246 .await
7247 .unwrap();
7248
7249 cx.run_until_parked();
7250 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
7251 let snapshot = buffer.read(cx).snapshot();
7252 assert_hunks(
7253 uncommitted_diff.hunks(&snapshot, cx),
7254 &snapshot,
7255 &uncommitted_diff.base_text_string().unwrap(),
7256 &[(
7257 1..2,
7258 " println!(\"hello from HEAD\");\n",
7259 " println!(\"hello from the working copy\");\n",
7260 DiffHunkStatus {
7261 kind: DiffHunkStatusKind::Modified,
7262 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
7263 },
7264 )],
7265 );
7266 });
7267}
7268
7269#[gpui::test]
7270async fn test_repository_and_path_for_project_path(
7271 background_executor: BackgroundExecutor,
7272 cx: &mut gpui::TestAppContext,
7273) {
7274 init_test(cx);
7275 let fs = FakeFs::new(background_executor);
7276 fs.insert_tree(
7277 path!("/root"),
7278 json!({
7279 "c.txt": "",
7280 "dir1": {
7281 ".git": {},
7282 "deps": {
7283 "dep1": {
7284 ".git": {},
7285 "src": {
7286 "a.txt": ""
7287 }
7288 }
7289 },
7290 "src": {
7291 "b.txt": ""
7292 }
7293 },
7294 }),
7295 )
7296 .await;
7297
7298 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
7299 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7300 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7301 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
7302 .await;
7303 cx.run_until_parked();
7304
7305 project.read_with(cx, |project, cx| {
7306 let git_store = project.git_store().read(cx);
7307 let pairs = [
7308 ("c.txt", None),
7309 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
7310 (
7311 "dir1/deps/dep1/src/a.txt",
7312 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
7313 ),
7314 ];
7315 let expected = pairs
7316 .iter()
7317 .map(|(path, result)| {
7318 (
7319 path,
7320 result.map(|(repo, repo_path)| {
7321 (Path::new(repo).into(), RepoPath::from(repo_path))
7322 }),
7323 )
7324 })
7325 .collect::<Vec<_>>();
7326 let actual = pairs
7327 .iter()
7328 .map(|(path, _)| {
7329 let project_path = (tree_id, Path::new(path)).into();
7330 let result = maybe!({
7331 let (repo, repo_path) =
7332 git_store.repository_and_path_for_project_path(&project_path, cx)?;
7333 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
7334 });
7335 (path, result)
7336 })
7337 .collect::<Vec<_>>();
7338 pretty_assertions::assert_eq!(expected, actual);
7339 });
7340
7341 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
7342 .await
7343 .unwrap();
7344 cx.run_until_parked();
7345
7346 project.read_with(cx, |project, cx| {
7347 let git_store = project.git_store().read(cx);
7348 assert_eq!(
7349 git_store.repository_and_path_for_project_path(
7350 &(tree_id, Path::new("dir1/src/b.txt")).into(),
7351 cx
7352 ),
7353 None
7354 );
7355 });
7356}
7357
7358#[gpui::test]
7359async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
7360 init_test(cx);
7361 let fs = FakeFs::new(cx.background_executor.clone());
7362 fs.insert_tree(
7363 path!("/root"),
7364 json!({
7365 "home": {
7366 ".git": {},
7367 "project": {
7368 "a.txt": "A"
7369 },
7370 },
7371 }),
7372 )
7373 .await;
7374 fs.set_home_dir(Path::new(path!("/root/home")).to_owned());
7375
7376 let project = Project::test(fs.clone(), [path!("/root/home/project").as_ref()], cx).await;
7377 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7378 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7379 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
7380 .await;
7381 tree.flush_fs_events(cx).await;
7382
7383 project.read_with(cx, |project, cx| {
7384 let containing = project
7385 .git_store()
7386 .read(cx)
7387 .repository_and_path_for_project_path(&(tree_id, "a.txt").into(), cx);
7388 assert!(containing.is_none());
7389 });
7390
7391 let project = Project::test(fs.clone(), [path!("/root/home").as_ref()], cx).await;
7392 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7393 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7394 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
7395 .await;
7396 tree.flush_fs_events(cx).await;
7397
7398 project.read_with(cx, |project, cx| {
7399 let containing = project
7400 .git_store()
7401 .read(cx)
7402 .repository_and_path_for_project_path(&(tree_id, "project/a.txt").into(), cx);
7403 assert_eq!(
7404 containing
7405 .unwrap()
7406 .0
7407 .read(cx)
7408 .work_directory_abs_path
7409 .as_ref(),
7410 Path::new(path!("/root/home"))
7411 );
7412 });
7413}
7414
7415#[gpui::test]
7416async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
7417 init_test(cx);
7418 cx.executor().allow_parking();
7419
7420 let root = TempTree::new(json!({
7421 "project": {
7422 "a.txt": "a", // Modified
7423 "b.txt": "bb", // Added
7424 "c.txt": "ccc", // Unchanged
7425 "d.txt": "dddd", // Deleted
7426 },
7427 }));
7428
7429 // Set up git repository before creating the project.
7430 let work_dir = root.path().join("project");
7431 let repo = git_init(work_dir.as_path());
7432 git_add("a.txt", &repo);
7433 git_add("c.txt", &repo);
7434 git_add("d.txt", &repo);
7435 git_commit("Initial commit", &repo);
7436 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
7437 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
7438
7439 let project = Project::test(
7440 Arc::new(RealFs::new(None, cx.executor())),
7441 [root.path()],
7442 cx,
7443 )
7444 .await;
7445
7446 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7447 tree.flush_fs_events(cx).await;
7448 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7449 .await;
7450 cx.executor().run_until_parked();
7451
7452 let repository = project.read_with(cx, |project, cx| {
7453 project.repositories(cx).values().next().unwrap().clone()
7454 });
7455
7456 // Check that the right git state is observed on startup
7457 repository.read_with(cx, |repository, _| {
7458 let entries = repository.cached_status().collect::<Vec<_>>();
7459 assert_eq!(
7460 entries,
7461 [
7462 StatusEntry {
7463 repo_path: "a.txt".into(),
7464 status: StatusCode::Modified.worktree(),
7465 },
7466 StatusEntry {
7467 repo_path: "b.txt".into(),
7468 status: FileStatus::Untracked,
7469 },
7470 StatusEntry {
7471 repo_path: "d.txt".into(),
7472 status: StatusCode::Deleted.worktree(),
7473 },
7474 ]
7475 );
7476 });
7477
7478 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
7479
7480 tree.flush_fs_events(cx).await;
7481 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7482 .await;
7483 cx.executor().run_until_parked();
7484
7485 repository.read_with(cx, |repository, _| {
7486 let entries = repository.cached_status().collect::<Vec<_>>();
7487 assert_eq!(
7488 entries,
7489 [
7490 StatusEntry {
7491 repo_path: "a.txt".into(),
7492 status: StatusCode::Modified.worktree(),
7493 },
7494 StatusEntry {
7495 repo_path: "b.txt".into(),
7496 status: FileStatus::Untracked,
7497 },
7498 StatusEntry {
7499 repo_path: "c.txt".into(),
7500 status: StatusCode::Modified.worktree(),
7501 },
7502 StatusEntry {
7503 repo_path: "d.txt".into(),
7504 status: StatusCode::Deleted.worktree(),
7505 },
7506 ]
7507 );
7508 });
7509
7510 git_add("a.txt", &repo);
7511 git_add("c.txt", &repo);
7512 git_remove_index(Path::new("d.txt"), &repo);
7513 git_commit("Another commit", &repo);
7514 tree.flush_fs_events(cx).await;
7515 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7516 .await;
7517 cx.executor().run_until_parked();
7518
7519 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
7520 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
7521 tree.flush_fs_events(cx).await;
7522 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7523 .await;
7524 cx.executor().run_until_parked();
7525
7526 repository.read_with(cx, |repository, _cx| {
7527 let entries = repository.cached_status().collect::<Vec<_>>();
7528
7529 // Deleting an untracked entry, b.txt, should leave no status
7530 // a.txt was tracked, and so should have a status
7531 assert_eq!(
7532 entries,
7533 [StatusEntry {
7534 repo_path: "a.txt".into(),
7535 status: StatusCode::Deleted.worktree(),
7536 }]
7537 );
7538 });
7539}
7540
7541#[gpui::test]
7542async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
7543 init_test(cx);
7544 cx.executor().allow_parking();
7545
7546 let root = TempTree::new(json!({
7547 "project": {
7548 "sub": {},
7549 "a.txt": "",
7550 },
7551 }));
7552
7553 let work_dir = root.path().join("project");
7554 let repo = git_init(work_dir.as_path());
7555 // a.txt exists in HEAD and the working copy but is deleted in the index.
7556 git_add("a.txt", &repo);
7557 git_commit("Initial commit", &repo);
7558 git_remove_index("a.txt".as_ref(), &repo);
7559 // `sub` is a nested git repository.
7560 let _sub = git_init(&work_dir.join("sub"));
7561
7562 let project = Project::test(
7563 Arc::new(RealFs::new(None, cx.executor())),
7564 [root.path()],
7565 cx,
7566 )
7567 .await;
7568
7569 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7570 tree.flush_fs_events(cx).await;
7571 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7572 .await;
7573 cx.executor().run_until_parked();
7574
7575 let repository = project.read_with(cx, |project, cx| {
7576 project
7577 .repositories(cx)
7578 .values()
7579 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
7580 .unwrap()
7581 .clone()
7582 });
7583
7584 repository.read_with(cx, |repository, _cx| {
7585 let entries = repository.cached_status().collect::<Vec<_>>();
7586
7587 // `sub` doesn't appear in our computed statuses.
7588 // a.txt appears with a combined `DA` status.
7589 assert_eq!(
7590 entries,
7591 [StatusEntry {
7592 repo_path: "a.txt".into(),
7593 status: TrackedStatus {
7594 index_status: StatusCode::Deleted,
7595 worktree_status: StatusCode::Added
7596 }
7597 .into(),
7598 }]
7599 )
7600 });
7601}
7602
7603#[gpui::test]
7604async fn test_repository_subfolder_git_status(
7605 executor: gpui::BackgroundExecutor,
7606 cx: &mut gpui::TestAppContext,
7607) {
7608 init_test(cx);
7609
7610 let fs = FakeFs::new(executor);
7611 fs.insert_tree(
7612 path!("/root"),
7613 json!({
7614 "my-repo": {
7615 ".git": {},
7616 "a.txt": "a",
7617 "sub-folder-1": {
7618 "sub-folder-2": {
7619 "c.txt": "cc",
7620 "d": {
7621 "e.txt": "eee"
7622 }
7623 },
7624 }
7625 },
7626 }),
7627 )
7628 .await;
7629
7630 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
7631 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
7632
7633 fs.set_status_for_repo(
7634 path!("/root/my-repo/.git").as_ref(),
7635 &[(E_TXT.as_ref(), FileStatus::Untracked)],
7636 );
7637
7638 let project = Project::test(
7639 fs.clone(),
7640 [path!("/root/my-repo/sub-folder-1/sub-folder-2").as_ref()],
7641 cx,
7642 )
7643 .await;
7644
7645 project
7646 .update(cx, |project, cx| project.git_scans_complete(cx))
7647 .await;
7648 cx.run_until_parked();
7649
7650 let repository = project.read_with(cx, |project, cx| {
7651 project.repositories(cx).values().next().unwrap().clone()
7652 });
7653
7654 // Ensure that the git status is loaded correctly
7655 repository.read_with(cx, |repository, _cx| {
7656 assert_eq!(
7657 repository.work_directory_abs_path,
7658 Path::new(path!("/root/my-repo")).into()
7659 );
7660
7661 assert_eq!(repository.status_for_path(&C_TXT.into()), None);
7662 assert_eq!(
7663 repository.status_for_path(&E_TXT.into()).unwrap().status,
7664 FileStatus::Untracked
7665 );
7666 });
7667
7668 fs.set_status_for_repo(path!("/root/my-repo/.git").as_ref(), &[]);
7669 project
7670 .update(cx, |project, cx| project.git_scans_complete(cx))
7671 .await;
7672 cx.run_until_parked();
7673
7674 repository.read_with(cx, |repository, _cx| {
7675 assert_eq!(repository.status_for_path(&C_TXT.into()), None);
7676 assert_eq!(repository.status_for_path(&E_TXT.into()), None);
7677 });
7678}
7679
7680// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
7681#[cfg(any())]
7682#[gpui::test]
7683async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
7684 init_test(cx);
7685 cx.executor().allow_parking();
7686
7687 let root = TempTree::new(json!({
7688 "project": {
7689 "a.txt": "a",
7690 },
7691 }));
7692 let root_path = root.path();
7693
7694 let repo = git_init(&root_path.join("project"));
7695 git_add("a.txt", &repo);
7696 git_commit("init", &repo);
7697
7698 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
7699
7700 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7701 tree.flush_fs_events(cx).await;
7702 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7703 .await;
7704 cx.executor().run_until_parked();
7705
7706 let repository = project.read_with(cx, |project, cx| {
7707 project.repositories(cx).values().next().unwrap().clone()
7708 });
7709
7710 git_branch("other-branch", &repo);
7711 git_checkout("refs/heads/other-branch", &repo);
7712 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
7713 git_add("a.txt", &repo);
7714 git_commit("capitalize", &repo);
7715 let commit = repo
7716 .head()
7717 .expect("Failed to get HEAD")
7718 .peel_to_commit()
7719 .expect("HEAD is not a commit");
7720 git_checkout("refs/heads/main", &repo);
7721 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
7722 git_add("a.txt", &repo);
7723 git_commit("improve letter", &repo);
7724 git_cherry_pick(&commit, &repo);
7725 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
7726 .expect("No CHERRY_PICK_HEAD");
7727 pretty_assertions::assert_eq!(
7728 git_status(&repo),
7729 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
7730 );
7731 tree.flush_fs_events(cx).await;
7732 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7733 .await;
7734 cx.executor().run_until_parked();
7735 let conflicts = repository.update(cx, |repository, _| {
7736 repository
7737 .merge_conflicts
7738 .iter()
7739 .cloned()
7740 .collect::<Vec<_>>()
7741 });
7742 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
7743
7744 git_add("a.txt", &repo);
7745 // Attempt to manually simulate what `git cherry-pick --continue` would do.
7746 git_commit("whatevs", &repo);
7747 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
7748 .expect("Failed to remove CHERRY_PICK_HEAD");
7749 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
7750 tree.flush_fs_events(cx).await;
7751 let conflicts = repository.update(cx, |repository, _| {
7752 repository
7753 .merge_conflicts
7754 .iter()
7755 .cloned()
7756 .collect::<Vec<_>>()
7757 });
7758 pretty_assertions::assert_eq!(conflicts, []);
7759}
7760
7761#[gpui::test]
7762async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
7763 init_test(cx);
7764 let fs = FakeFs::new(cx.background_executor.clone());
7765 fs.insert_tree(
7766 path!("/root"),
7767 json!({
7768 ".git": {},
7769 ".gitignore": "*.txt\n",
7770 "a.xml": "<a></a>",
7771 "b.txt": "Some text"
7772 }),
7773 )
7774 .await;
7775
7776 fs.set_head_and_index_for_repo(
7777 path!("/root/.git").as_ref(),
7778 &[
7779 (".gitignore".into(), "*.txt\n".into()),
7780 ("a.xml".into(), "<a></a>".into()),
7781 ],
7782 );
7783
7784 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
7785
7786 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7787 tree.flush_fs_events(cx).await;
7788 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7789 .await;
7790 cx.executor().run_until_parked();
7791
7792 let repository = project.read_with(cx, |project, cx| {
7793 project.repositories(cx).values().next().unwrap().clone()
7794 });
7795
7796 // One file is unmodified, the other is ignored.
7797 cx.read(|cx| {
7798 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
7799 assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
7800 });
7801
7802 // Change the gitignore, and stage the newly non-ignored file.
7803 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
7804 .await
7805 .unwrap();
7806 fs.set_index_for_repo(
7807 Path::new(path!("/root/.git")),
7808 &[
7809 (".gitignore".into(), "*.txt\n".into()),
7810 ("a.xml".into(), "<a></a>".into()),
7811 ("b.txt".into(), "Some text".into()),
7812 ],
7813 );
7814
7815 cx.executor().run_until_parked();
7816 cx.read(|cx| {
7817 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
7818 assert_entry_git_state(
7819 tree.read(cx),
7820 repository.read(cx),
7821 "b.txt",
7822 Some(StatusCode::Added),
7823 false,
7824 );
7825 });
7826}
7827
7828// NOTE:
7829// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
7830// a directory which some program has already open.
7831// This is a limitation of the Windows.
7832// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
7833#[gpui::test]
7834#[cfg_attr(target_os = "windows", ignore)]
7835async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
7836 init_test(cx);
7837 cx.executor().allow_parking();
7838 let root = TempTree::new(json!({
7839 "projects": {
7840 "project1": {
7841 "a": "",
7842 "b": "",
7843 }
7844 },
7845
7846 }));
7847 let root_path = root.path();
7848
7849 let repo = git_init(&root_path.join("projects/project1"));
7850 git_add("a", &repo);
7851 git_commit("init", &repo);
7852 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
7853
7854 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
7855
7856 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7857 tree.flush_fs_events(cx).await;
7858 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7859 .await;
7860 cx.executor().run_until_parked();
7861
7862 let repository = project.read_with(cx, |project, cx| {
7863 project.repositories(cx).values().next().unwrap().clone()
7864 });
7865
7866 repository.read_with(cx, |repository, _| {
7867 assert_eq!(
7868 repository.work_directory_abs_path.as_ref(),
7869 root_path.join("projects/project1").as_path()
7870 );
7871 assert_eq!(
7872 repository
7873 .status_for_path(&"a".into())
7874 .map(|entry| entry.status),
7875 Some(StatusCode::Modified.worktree()),
7876 );
7877 assert_eq!(
7878 repository
7879 .status_for_path(&"b".into())
7880 .map(|entry| entry.status),
7881 Some(FileStatus::Untracked),
7882 );
7883 });
7884
7885 std::fs::rename(
7886 root_path.join("projects/project1"),
7887 root_path.join("projects/project2"),
7888 )
7889 .unwrap();
7890 tree.flush_fs_events(cx).await;
7891
7892 repository.read_with(cx, |repository, _| {
7893 assert_eq!(
7894 repository.work_directory_abs_path.as_ref(),
7895 root_path.join("projects/project2").as_path()
7896 );
7897 assert_eq!(
7898 repository.status_for_path(&"a".into()).unwrap().status,
7899 StatusCode::Modified.worktree(),
7900 );
7901 assert_eq!(
7902 repository.status_for_path(&"b".into()).unwrap().status,
7903 FileStatus::Untracked,
7904 );
7905 });
7906}
7907
7908// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
7909// you can't rename a directory which some program has already open. This is a
7910// limitation of the Windows. See:
7911// https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
7912#[gpui::test]
7913#[cfg_attr(target_os = "windows", ignore)]
7914async fn test_file_status(cx: &mut gpui::TestAppContext) {
7915 init_test(cx);
7916 cx.executor().allow_parking();
7917 const IGNORE_RULE: &str = "**/target";
7918
7919 let root = TempTree::new(json!({
7920 "project": {
7921 "a.txt": "a",
7922 "b.txt": "bb",
7923 "c": {
7924 "d": {
7925 "e.txt": "eee"
7926 }
7927 },
7928 "f.txt": "ffff",
7929 "target": {
7930 "build_file": "???"
7931 },
7932 ".gitignore": IGNORE_RULE
7933 },
7934
7935 }));
7936 let root_path = root.path();
7937
7938 const A_TXT: &str = "a.txt";
7939 const B_TXT: &str = "b.txt";
7940 const E_TXT: &str = "c/d/e.txt";
7941 const F_TXT: &str = "f.txt";
7942 const DOTGITIGNORE: &str = ".gitignore";
7943 const BUILD_FILE: &str = "target/build_file";
7944
7945 // Set up git repository before creating the worktree.
7946 let work_dir = root.path().join("project");
7947 let mut repo = git_init(work_dir.as_path());
7948 repo.add_ignore_rule(IGNORE_RULE).unwrap();
7949 git_add(A_TXT, &repo);
7950 git_add(E_TXT, &repo);
7951 git_add(DOTGITIGNORE, &repo);
7952 git_commit("Initial commit", &repo);
7953
7954 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
7955
7956 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7957 tree.flush_fs_events(cx).await;
7958 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7959 .await;
7960 cx.executor().run_until_parked();
7961
7962 let repository = project.read_with(cx, |project, cx| {
7963 project.repositories(cx).values().next().unwrap().clone()
7964 });
7965
7966 // Check that the right git state is observed on startup
7967 repository.read_with(cx, |repository, _cx| {
7968 assert_eq!(
7969 repository.work_directory_abs_path.as_ref(),
7970 root_path.join("project").as_path()
7971 );
7972
7973 assert_eq!(
7974 repository.status_for_path(&B_TXT.into()).unwrap().status,
7975 FileStatus::Untracked,
7976 );
7977 assert_eq!(
7978 repository.status_for_path(&F_TXT.into()).unwrap().status,
7979 FileStatus::Untracked,
7980 );
7981 });
7982
7983 // Modify a file in the working copy.
7984 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
7985 tree.flush_fs_events(cx).await;
7986 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7987 .await;
7988 cx.executor().run_until_parked();
7989
7990 // The worktree detects that the file's git status has changed.
7991 repository.read_with(cx, |repository, _| {
7992 assert_eq!(
7993 repository.status_for_path(&A_TXT.into()).unwrap().status,
7994 StatusCode::Modified.worktree(),
7995 );
7996 });
7997
7998 // Create a commit in the git repository.
7999 git_add(A_TXT, &repo);
8000 git_add(B_TXT, &repo);
8001 git_commit("Committing modified and added", &repo);
8002 tree.flush_fs_events(cx).await;
8003 cx.executor().run_until_parked();
8004
8005 // The worktree detects that the files' git status have changed.
8006 repository.read_with(cx, |repository, _cx| {
8007 assert_eq!(
8008 repository.status_for_path(&F_TXT.into()).unwrap().status,
8009 FileStatus::Untracked,
8010 );
8011 assert_eq!(repository.status_for_path(&B_TXT.into()), None);
8012 assert_eq!(repository.status_for_path(&A_TXT.into()), None);
8013 });
8014
8015 // Modify files in the working copy and perform git operations on other files.
8016 git_reset(0, &repo);
8017 git_remove_index(Path::new(B_TXT), &repo);
8018 git_stash(&mut repo);
8019 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
8020 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
8021 tree.flush_fs_events(cx).await;
8022 cx.executor().run_until_parked();
8023
8024 // Check that more complex repo changes are tracked
8025 repository.read_with(cx, |repository, _cx| {
8026 assert_eq!(repository.status_for_path(&A_TXT.into()), None);
8027 assert_eq!(
8028 repository.status_for_path(&B_TXT.into()).unwrap().status,
8029 FileStatus::Untracked,
8030 );
8031 assert_eq!(
8032 repository.status_for_path(&E_TXT.into()).unwrap().status,
8033 StatusCode::Modified.worktree(),
8034 );
8035 });
8036
8037 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
8038 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
8039 std::fs::write(
8040 work_dir.join(DOTGITIGNORE),
8041 [IGNORE_RULE, "f.txt"].join("\n"),
8042 )
8043 .unwrap();
8044
8045 git_add(Path::new(DOTGITIGNORE), &repo);
8046 git_commit("Committing modified git ignore", &repo);
8047
8048 tree.flush_fs_events(cx).await;
8049 cx.executor().run_until_parked();
8050
8051 let mut renamed_dir_name = "first_directory/second_directory";
8052 const RENAMED_FILE: &str = "rf.txt";
8053
8054 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
8055 std::fs::write(
8056 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
8057 "new-contents",
8058 )
8059 .unwrap();
8060
8061 tree.flush_fs_events(cx).await;
8062 cx.executor().run_until_parked();
8063
8064 repository.read_with(cx, |repository, _cx| {
8065 assert_eq!(
8066 repository
8067 .status_for_path(&Path::new(renamed_dir_name).join(RENAMED_FILE).into())
8068 .unwrap()
8069 .status,
8070 FileStatus::Untracked,
8071 );
8072 });
8073
8074 renamed_dir_name = "new_first_directory/second_directory";
8075
8076 std::fs::rename(
8077 work_dir.join("first_directory"),
8078 work_dir.join("new_first_directory"),
8079 )
8080 .unwrap();
8081
8082 tree.flush_fs_events(cx).await;
8083 cx.executor().run_until_parked();
8084
8085 repository.read_with(cx, |repository, _cx| {
8086 assert_eq!(
8087 repository
8088 .status_for_path(&Path::new(renamed_dir_name).join(RENAMED_FILE).into())
8089 .unwrap()
8090 .status,
8091 FileStatus::Untracked,
8092 );
8093 });
8094}
8095
8096#[gpui::test]
8097async fn test_repos_in_invisible_worktrees(
8098 executor: BackgroundExecutor,
8099 cx: &mut gpui::TestAppContext,
8100) {
8101 init_test(cx);
8102 let fs = FakeFs::new(executor);
8103 fs.insert_tree(
8104 path!("/root"),
8105 json!({
8106 "dir1": {
8107 ".git": {},
8108 "dep1": {
8109 ".git": {},
8110 "src": {
8111 "a.txt": "",
8112 },
8113 },
8114 "b.txt": "",
8115 },
8116 }),
8117 )
8118 .await;
8119
8120 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
8121 let visible_worktree =
8122 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8123 visible_worktree
8124 .read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
8125 .await;
8126
8127 let repos = project.read_with(cx, |project, cx| {
8128 project
8129 .repositories(cx)
8130 .values()
8131 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8132 .collect::<Vec<_>>()
8133 });
8134 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
8135
8136 let (invisible_worktree, _) = project
8137 .update(cx, |project, cx| {
8138 project.worktree_store.update(cx, |worktree_store, cx| {
8139 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
8140 })
8141 })
8142 .await
8143 .expect("failed to create worktree");
8144 invisible_worktree
8145 .read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
8146 .await;
8147
8148 let repos = project.read_with(cx, |project, cx| {
8149 project
8150 .repositories(cx)
8151 .values()
8152 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8153 .collect::<Vec<_>>()
8154 });
8155 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
8156}
8157
8158#[gpui::test(iterations = 10)]
8159async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
8160 init_test(cx);
8161 cx.update(|cx| {
8162 cx.update_global::<SettingsStore, _>(|store, cx| {
8163 store.update_user_settings::<WorktreeSettings>(cx, |project_settings| {
8164 project_settings.file_scan_exclusions = Some(Vec::new());
8165 });
8166 });
8167 });
8168 let fs = FakeFs::new(cx.background_executor.clone());
8169 fs.insert_tree(
8170 path!("/root"),
8171 json!({
8172 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
8173 "tree": {
8174 ".git": {},
8175 ".gitignore": "ignored-dir\n",
8176 "tracked-dir": {
8177 "tracked-file1": "",
8178 "ancestor-ignored-file1": "",
8179 },
8180 "ignored-dir": {
8181 "ignored-file1": ""
8182 }
8183 }
8184 }),
8185 )
8186 .await;
8187 fs.set_head_and_index_for_repo(
8188 path!("/root/tree/.git").as_ref(),
8189 &[
8190 (".gitignore".into(), "ignored-dir\n".into()),
8191 ("tracked-dir/tracked-file1".into(), "".into()),
8192 ],
8193 );
8194
8195 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
8196
8197 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8198 tree.flush_fs_events(cx).await;
8199 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
8200 .await;
8201 cx.executor().run_until_parked();
8202
8203 let repository = project.read_with(cx, |project, cx| {
8204 project.repositories(cx).values().next().unwrap().clone()
8205 });
8206
8207 tree.read_with(cx, |tree, _| {
8208 tree.as_local()
8209 .unwrap()
8210 .manually_refresh_entries_for_paths(vec![Path::new("ignored-dir").into()])
8211 })
8212 .recv()
8213 .await;
8214
8215 cx.read(|cx| {
8216 assert_entry_git_state(
8217 tree.read(cx),
8218 repository.read(cx),
8219 "tracked-dir/tracked-file1",
8220 None,
8221 false,
8222 );
8223 assert_entry_git_state(
8224 tree.read(cx),
8225 repository.read(cx),
8226 "tracked-dir/ancestor-ignored-file1",
8227 None,
8228 false,
8229 );
8230 assert_entry_git_state(
8231 tree.read(cx),
8232 repository.read(cx),
8233 "ignored-dir/ignored-file1",
8234 None,
8235 true,
8236 );
8237 });
8238
8239 fs.create_file(
8240 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
8241 Default::default(),
8242 )
8243 .await
8244 .unwrap();
8245 fs.set_index_for_repo(
8246 path!("/root/tree/.git").as_ref(),
8247 &[
8248 (".gitignore".into(), "ignored-dir\n".into()),
8249 ("tracked-dir/tracked-file1".into(), "".into()),
8250 ("tracked-dir/tracked-file2".into(), "".into()),
8251 ],
8252 );
8253 fs.create_file(
8254 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
8255 Default::default(),
8256 )
8257 .await
8258 .unwrap();
8259 fs.create_file(
8260 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
8261 Default::default(),
8262 )
8263 .await
8264 .unwrap();
8265
8266 cx.executor().run_until_parked();
8267 cx.read(|cx| {
8268 assert_entry_git_state(
8269 tree.read(cx),
8270 repository.read(cx),
8271 "tracked-dir/tracked-file2",
8272 Some(StatusCode::Added),
8273 false,
8274 );
8275 assert_entry_git_state(
8276 tree.read(cx),
8277 repository.read(cx),
8278 "tracked-dir/ancestor-ignored-file2",
8279 None,
8280 false,
8281 );
8282 assert_entry_git_state(
8283 tree.read(cx),
8284 repository.read(cx),
8285 "ignored-dir/ignored-file2",
8286 None,
8287 true,
8288 );
8289 assert!(tree.read(cx).entry_for_path(".git").unwrap().is_ignored);
8290 });
8291}
8292
8293#[gpui::test]
8294async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) {
8295 init_test(cx);
8296
8297 let fs = FakeFs::new(cx.executor());
8298 fs.insert_tree(
8299 path!("/project"),
8300 json!({
8301 ".git": {
8302 "worktrees": {
8303 "some-worktree": {
8304 "commondir": "../..\n",
8305 // For is_git_dir
8306 "HEAD": "",
8307 "config": ""
8308 }
8309 },
8310 "modules": {
8311 "subdir": {
8312 "some-submodule": {
8313 // For is_git_dir
8314 "HEAD": "",
8315 "config": "",
8316 }
8317 }
8318 }
8319 },
8320 "src": {
8321 "a.txt": "A",
8322 },
8323 "some-worktree": {
8324 ".git": "gitdir: ../.git/worktrees/some-worktree\n",
8325 "src": {
8326 "b.txt": "B",
8327 }
8328 },
8329 "subdir": {
8330 "some-submodule": {
8331 ".git": "gitdir: ../../.git/modules/subdir/some-submodule\n",
8332 "c.txt": "C",
8333 }
8334 }
8335 }),
8336 )
8337 .await;
8338
8339 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
8340 let scan_complete = project.update(cx, |project, cx| {
8341 project
8342 .worktrees(cx)
8343 .next()
8344 .unwrap()
8345 .read(cx)
8346 .as_local()
8347 .unwrap()
8348 .scan_complete()
8349 });
8350 scan_complete.await;
8351
8352 let mut repositories = project.update(cx, |project, cx| {
8353 project
8354 .repositories(cx)
8355 .values()
8356 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8357 .collect::<Vec<_>>()
8358 });
8359 repositories.sort();
8360 pretty_assertions::assert_eq!(
8361 repositories,
8362 [
8363 Path::new(path!("/project")).into(),
8364 Path::new(path!("/project/some-worktree")).into(),
8365 Path::new(path!("/project/subdir/some-submodule")).into(),
8366 ]
8367 );
8368
8369 // Generate a git-related event for the worktree and check that it's refreshed.
8370 fs.with_git_state(
8371 path!("/project/some-worktree/.git").as_ref(),
8372 true,
8373 |state| {
8374 state
8375 .head_contents
8376 .insert("src/b.txt".into(), "b".to_owned());
8377 state
8378 .index_contents
8379 .insert("src/b.txt".into(), "b".to_owned());
8380 },
8381 )
8382 .unwrap();
8383 cx.run_until_parked();
8384
8385 let buffer = project
8386 .update(cx, |project, cx| {
8387 project.open_local_buffer(path!("/project/some-worktree/src/b.txt"), cx)
8388 })
8389 .await
8390 .unwrap();
8391 let (worktree_repo, barrier) = project.update(cx, |project, cx| {
8392 let (repo, _) = project
8393 .git_store()
8394 .read(cx)
8395 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
8396 .unwrap();
8397 pretty_assertions::assert_eq!(
8398 repo.read(cx).work_directory_abs_path,
8399 Path::new(path!("/project/some-worktree")).into(),
8400 );
8401 let barrier = repo.update(cx, |repo, _| repo.barrier());
8402 (repo.clone(), barrier)
8403 });
8404 barrier.await.unwrap();
8405 worktree_repo.update(cx, |repo, _| {
8406 pretty_assertions::assert_eq!(
8407 repo.status_for_path(&"src/b.txt".into()).unwrap().status,
8408 StatusCode::Modified.worktree(),
8409 );
8410 });
8411
8412 // The same for the submodule.
8413 fs.with_git_state(
8414 path!("/project/subdir/some-submodule/.git").as_ref(),
8415 true,
8416 |state| {
8417 state.head_contents.insert("c.txt".into(), "c".to_owned());
8418 state.index_contents.insert("c.txt".into(), "c".to_owned());
8419 },
8420 )
8421 .unwrap();
8422 cx.run_until_parked();
8423
8424 let buffer = project
8425 .update(cx, |project, cx| {
8426 project.open_local_buffer(path!("/project/subdir/some-submodule/c.txt"), cx)
8427 })
8428 .await
8429 .unwrap();
8430 let (submodule_repo, barrier) = project.update(cx, |project, cx| {
8431 let (repo, _) = project
8432 .git_store()
8433 .read(cx)
8434 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
8435 .unwrap();
8436 pretty_assertions::assert_eq!(
8437 repo.read(cx).work_directory_abs_path,
8438 Path::new(path!("/project/subdir/some-submodule")).into(),
8439 );
8440 let barrier = repo.update(cx, |repo, _| repo.barrier());
8441 (repo.clone(), barrier)
8442 });
8443 barrier.await.unwrap();
8444 submodule_repo.update(cx, |repo, _| {
8445 pretty_assertions::assert_eq!(
8446 repo.status_for_path(&"c.txt".into()).unwrap().status,
8447 StatusCode::Modified.worktree(),
8448 );
8449 });
8450}
8451
8452#[gpui::test]
8453async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
8454 init_test(cx);
8455 let fs = FakeFs::new(cx.background_executor.clone());
8456 fs.insert_tree(
8457 path!("/root"),
8458 json!({
8459 "project": {
8460 ".git": {},
8461 "child1": {
8462 "a.txt": "A",
8463 },
8464 "child2": {
8465 "b.txt": "B",
8466 }
8467 }
8468 }),
8469 )
8470 .await;
8471
8472 let project = Project::test(
8473 fs.clone(),
8474 [
8475 path!("/root/project/child1").as_ref(),
8476 path!("/root/project/child2").as_ref(),
8477 ],
8478 cx,
8479 )
8480 .await;
8481
8482 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8483 tree.flush_fs_events(cx).await;
8484 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
8485 .await;
8486 cx.executor().run_until_parked();
8487
8488 let repos = project.read_with(cx, |project, cx| {
8489 project
8490 .repositories(cx)
8491 .values()
8492 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8493 .collect::<Vec<_>>()
8494 });
8495 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
8496}
8497
8498async fn search(
8499 project: &Entity<Project>,
8500 query: SearchQuery,
8501 cx: &mut gpui::TestAppContext,
8502) -> Result<HashMap<String, Vec<Range<usize>>>> {
8503 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
8504 let mut results = HashMap::default();
8505 while let Ok(search_result) = search_rx.recv().await {
8506 match search_result {
8507 SearchResult::Buffer { buffer, ranges } => {
8508 results.entry(buffer).or_insert(ranges);
8509 }
8510 SearchResult::LimitReached => {}
8511 }
8512 }
8513 Ok(results
8514 .into_iter()
8515 .map(|(buffer, ranges)| {
8516 buffer.update(cx, |buffer, cx| {
8517 let path = buffer
8518 .file()
8519 .unwrap()
8520 .full_path(cx)
8521 .to_string_lossy()
8522 .to_string();
8523 let ranges = ranges
8524 .into_iter()
8525 .map(|range| range.to_offset(buffer))
8526 .collect::<Vec<_>>();
8527 (path, ranges)
8528 })
8529 })
8530 .collect())
8531}
8532
8533pub fn init_test(cx: &mut gpui::TestAppContext) {
8534 if std::env::var("RUST_LOG").is_ok() {
8535 env_logger::try_init().ok();
8536 }
8537
8538 cx.update(|cx| {
8539 let settings_store = SettingsStore::test(cx);
8540 cx.set_global(settings_store);
8541 release_channel::init(SemanticVersion::default(), cx);
8542 language::init(cx);
8543 Project::init_settings(cx);
8544 });
8545}
8546
8547fn json_lang() -> Arc<Language> {
8548 Arc::new(Language::new(
8549 LanguageConfig {
8550 name: "JSON".into(),
8551 matcher: LanguageMatcher {
8552 path_suffixes: vec!["json".to_string()],
8553 ..Default::default()
8554 },
8555 ..Default::default()
8556 },
8557 None,
8558 ))
8559}
8560
8561fn js_lang() -> Arc<Language> {
8562 Arc::new(Language::new(
8563 LanguageConfig {
8564 name: "JavaScript".into(),
8565 matcher: LanguageMatcher {
8566 path_suffixes: vec!["js".to_string()],
8567 ..Default::default()
8568 },
8569 ..Default::default()
8570 },
8571 None,
8572 ))
8573}
8574
8575fn rust_lang() -> Arc<Language> {
8576 Arc::new(Language::new(
8577 LanguageConfig {
8578 name: "Rust".into(),
8579 matcher: LanguageMatcher {
8580 path_suffixes: vec!["rs".to_string()],
8581 ..Default::default()
8582 },
8583 ..Default::default()
8584 },
8585 Some(tree_sitter_rust::LANGUAGE.into()),
8586 ))
8587}
8588
8589fn typescript_lang() -> Arc<Language> {
8590 Arc::new(Language::new(
8591 LanguageConfig {
8592 name: "TypeScript".into(),
8593 matcher: LanguageMatcher {
8594 path_suffixes: vec!["ts".to_string()],
8595 ..Default::default()
8596 },
8597 ..Default::default()
8598 },
8599 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
8600 ))
8601}
8602
8603fn tsx_lang() -> Arc<Language> {
8604 Arc::new(Language::new(
8605 LanguageConfig {
8606 name: "tsx".into(),
8607 matcher: LanguageMatcher {
8608 path_suffixes: vec!["tsx".to_string()],
8609 ..Default::default()
8610 },
8611 ..Default::default()
8612 },
8613 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
8614 ))
8615}
8616
8617fn get_all_tasks(
8618 project: &Entity<Project>,
8619 task_contexts: &TaskContexts,
8620 cx: &mut App,
8621) -> Vec<(TaskSourceKind, ResolvedTask)> {
8622 let (mut old, new) = project.update(cx, |project, cx| {
8623 project
8624 .task_store
8625 .read(cx)
8626 .task_inventory()
8627 .unwrap()
8628 .read(cx)
8629 .used_and_current_resolved_tasks(task_contexts, cx)
8630 });
8631 old.extend(new);
8632 old
8633}
8634
8635#[track_caller]
8636fn assert_entry_git_state(
8637 tree: &Worktree,
8638 repository: &Repository,
8639 path: &str,
8640 index_status: Option<StatusCode>,
8641 is_ignored: bool,
8642) {
8643 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
8644 let entry = tree
8645 .entry_for_path(path)
8646 .unwrap_or_else(|| panic!("entry {path} not found"));
8647 let status = repository
8648 .status_for_path(&path.into())
8649 .map(|entry| entry.status);
8650 let expected = index_status.map(|index_status| {
8651 TrackedStatus {
8652 index_status,
8653 worktree_status: StatusCode::Unmodified,
8654 }
8655 .into()
8656 });
8657 assert_eq!(
8658 status, expected,
8659 "expected {path} to have git status: {expected:?}"
8660 );
8661 assert_eq!(
8662 entry.is_ignored, is_ignored,
8663 "expected {path} to have is_ignored: {is_ignored}"
8664 );
8665}
8666
8667#[track_caller]
8668fn git_init(path: &Path) -> git2::Repository {
8669 let mut init_opts = RepositoryInitOptions::new();
8670 init_opts.initial_head("main");
8671 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
8672}
8673
8674#[track_caller]
8675fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
8676 let path = path.as_ref();
8677 let mut index = repo.index().expect("Failed to get index");
8678 index.add_path(path).expect("Failed to add file");
8679 index.write().expect("Failed to write index");
8680}
8681
8682#[track_caller]
8683fn git_remove_index(path: &Path, repo: &git2::Repository) {
8684 let mut index = repo.index().expect("Failed to get index");
8685 index.remove_path(path).expect("Failed to add file");
8686 index.write().expect("Failed to write index");
8687}
8688
8689#[track_caller]
8690fn git_commit(msg: &'static str, repo: &git2::Repository) {
8691 use git2::Signature;
8692
8693 let signature = Signature::now("test", "test@zed.dev").unwrap();
8694 let oid = repo.index().unwrap().write_tree().unwrap();
8695 let tree = repo.find_tree(oid).unwrap();
8696 if let Ok(head) = repo.head() {
8697 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
8698
8699 let parent_commit = parent_obj.as_commit().unwrap();
8700
8701 repo.commit(
8702 Some("HEAD"),
8703 &signature,
8704 &signature,
8705 msg,
8706 &tree,
8707 &[parent_commit],
8708 )
8709 .expect("Failed to commit with parent");
8710 } else {
8711 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
8712 .expect("Failed to commit");
8713 }
8714}
8715
8716#[cfg(any())]
8717#[track_caller]
8718fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
8719 repo.cherrypick(commit, None).expect("Failed to cherrypick");
8720}
8721
8722#[track_caller]
8723fn git_stash(repo: &mut git2::Repository) {
8724 use git2::Signature;
8725
8726 let signature = Signature::now("test", "test@zed.dev").unwrap();
8727 repo.stash_save(&signature, "N/A", None)
8728 .expect("Failed to stash");
8729}
8730
8731#[track_caller]
8732fn git_reset(offset: usize, repo: &git2::Repository) {
8733 let head = repo.head().expect("Couldn't get repo head");
8734 let object = head.peel(git2::ObjectType::Commit).unwrap();
8735 let commit = object.as_commit().unwrap();
8736 let new_head = commit
8737 .parents()
8738 .inspect(|parnet| {
8739 parnet.message();
8740 })
8741 .nth(offset)
8742 .expect("Not enough history");
8743 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
8744 .expect("Could not reset");
8745}
8746
8747#[cfg(any())]
8748#[track_caller]
8749fn git_branch(name: &str, repo: &git2::Repository) {
8750 let head = repo
8751 .head()
8752 .expect("Couldn't get repo head")
8753 .peel_to_commit()
8754 .expect("HEAD is not a commit");
8755 repo.branch(name, &head, false).expect("Failed to commit");
8756}
8757
8758#[cfg(any())]
8759#[track_caller]
8760fn git_checkout(name: &str, repo: &git2::Repository) {
8761 repo.set_head(name).expect("Failed to set head");
8762 repo.checkout_head(None).expect("Failed to check out head");
8763}
8764
8765#[cfg(any())]
8766#[track_caller]
8767fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
8768 repo.statuses(None)
8769 .unwrap()
8770 .iter()
8771 .map(|status| (status.path().unwrap().to_string(), status.status()))
8772 .collect()
8773}
8774
8775#[gpui::test]
8776async fn test_find_project_path_abs(
8777 background_executor: BackgroundExecutor,
8778 cx: &mut gpui::TestAppContext,
8779) {
8780 // find_project_path should work with absolute paths
8781 init_test(cx);
8782
8783 let fs = FakeFs::new(background_executor);
8784 fs.insert_tree(
8785 path!("/root"),
8786 json!({
8787 "project1": {
8788 "file1.txt": "content1",
8789 "subdir": {
8790 "file2.txt": "content2"
8791 }
8792 },
8793 "project2": {
8794 "file3.txt": "content3"
8795 }
8796 }),
8797 )
8798 .await;
8799
8800 let project = Project::test(
8801 fs.clone(),
8802 [
8803 path!("/root/project1").as_ref(),
8804 path!("/root/project2").as_ref(),
8805 ],
8806 cx,
8807 )
8808 .await;
8809
8810 // Make sure the worktrees are fully initialized
8811 for worktree in project.read_with(cx, |project, cx| project.worktrees(cx).collect::<Vec<_>>()) {
8812 worktree
8813 .read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
8814 .await;
8815 }
8816 cx.run_until_parked();
8817
8818 let (project1_abs_path, project1_id, project2_abs_path, project2_id) =
8819 project.read_with(cx, |project, cx| {
8820 let worktrees: Vec<_> = project.worktrees(cx).collect();
8821 let abs_path1 = worktrees[0].read(cx).abs_path().to_path_buf();
8822 let id1 = worktrees[0].read(cx).id();
8823 let abs_path2 = worktrees[1].read(cx).abs_path().to_path_buf();
8824 let id2 = worktrees[1].read(cx).id();
8825 (abs_path1, id1, abs_path2, id2)
8826 });
8827
8828 project.update(cx, |project, cx| {
8829 let abs_path = project1_abs_path.join("file1.txt");
8830 let found_path = project.find_project_path(abs_path, cx).unwrap();
8831 assert_eq!(found_path.worktree_id, project1_id);
8832 assert_eq!(found_path.path.as_ref(), Path::new("file1.txt"));
8833
8834 let abs_path = project1_abs_path.join("subdir").join("file2.txt");
8835 let found_path = project.find_project_path(abs_path, cx).unwrap();
8836 assert_eq!(found_path.worktree_id, project1_id);
8837 assert_eq!(found_path.path.as_ref(), Path::new("subdir/file2.txt"));
8838
8839 let abs_path = project2_abs_path.join("file3.txt");
8840 let found_path = project.find_project_path(abs_path, cx).unwrap();
8841 assert_eq!(found_path.worktree_id, project2_id);
8842 assert_eq!(found_path.path.as_ref(), Path::new("file3.txt"));
8843
8844 let abs_path = project1_abs_path.join("nonexistent.txt");
8845 let found_path = project.find_project_path(abs_path, cx);
8846 assert!(
8847 found_path.is_some(),
8848 "Should find project path for nonexistent file in worktree"
8849 );
8850
8851 // Test with an absolute path outside any worktree
8852 let abs_path = Path::new("/some/other/path");
8853 let found_path = project.find_project_path(abs_path, cx);
8854 assert!(
8855 found_path.is_none(),
8856 "Should not find project path for path outside any worktree"
8857 );
8858 });
8859}