1#![allow(clippy::format_collect)]
2
3use crate::{
4 Event, git_store::StatusEntry, task_inventory::TaskContexts, task_store::TaskSettingsLocation,
5 *,
6};
7use buffer_diff::{
8 BufferDiffEvent, DiffHunkSecondaryStatus, DiffHunkStatus, DiffHunkStatusKind, assert_hunks,
9};
10use fs::FakeFs;
11use futures::{StreamExt, future};
12use git::{
13 repository::RepoPath,
14 status::{StatusCode, TrackedStatus},
15};
16use git2::RepositoryInitOptions;
17use gpui::{App, BackgroundExecutor, SemanticVersion, UpdateGlobal};
18use http_client::Url;
19use language::{
20 Diagnostic, DiagnosticEntry, DiagnosticSet, DiskState, FakeLspAdapter, LanguageConfig,
21 LanguageMatcher, LanguageName, LineEnding, OffsetRangeExt, Point, ToPoint,
22 language_settings::{AllLanguageSettings, LanguageSettingsContent, language_settings},
23 tree_sitter_rust, tree_sitter_typescript,
24};
25use lsp::{
26 DiagnosticSeverity, DocumentChanges, FileOperationFilter, NumberOrString, TextDocumentEdit,
27 WillRenameFiles, notification::DidRenameFiles,
28};
29use parking_lot::Mutex;
30use paths::{config_dir, tasks_file};
31use postage::stream::Stream as _;
32use pretty_assertions::{assert_eq, assert_matches};
33use serde_json::json;
34#[cfg(not(windows))]
35use std::os;
36use std::{mem, num::NonZeroU32, ops::Range, str::FromStr, sync::OnceLock, task::Poll};
37use task::{ResolvedTask, TaskContext};
38use unindent::Unindent as _;
39use util::{
40 TryFutureExt as _, assert_set_eq, path,
41 paths::PathMatcher,
42 separator,
43 test::{TempTree, marked_text_offsets},
44 uri,
45};
46use worktree::WorktreeModelHandle as _;
47
48#[gpui::test]
49async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
50 cx.executor().allow_parking();
51
52 let (tx, mut rx) = futures::channel::mpsc::unbounded();
53 let _thread = std::thread::spawn(move || {
54 #[cfg(not(target_os = "windows"))]
55 std::fs::metadata("/tmp").unwrap();
56 #[cfg(target_os = "windows")]
57 std::fs::metadata("C:/Windows").unwrap();
58 std::thread::sleep(Duration::from_millis(1000));
59 tx.unbounded_send(1).unwrap();
60 });
61 rx.next().await.unwrap();
62}
63
64#[gpui::test]
65async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
66 cx.executor().allow_parking();
67
68 let io_task = smol::unblock(move || {
69 println!("sleeping on thread {:?}", std::thread::current().id());
70 std::thread::sleep(Duration::from_millis(10));
71 1
72 });
73
74 let task = cx.foreground_executor().spawn(async move {
75 io_task.await;
76 });
77
78 task.await;
79}
80
81#[cfg(not(windows))]
82#[gpui::test]
83async fn test_symlinks(cx: &mut gpui::TestAppContext) {
84 init_test(cx);
85 cx.executor().allow_parking();
86
87 let dir = TempTree::new(json!({
88 "root": {
89 "apple": "",
90 "banana": {
91 "carrot": {
92 "date": "",
93 "endive": "",
94 }
95 },
96 "fennel": {
97 "grape": "",
98 }
99 }
100 }));
101
102 let root_link_path = dir.path().join("root_link");
103 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
104 os::unix::fs::symlink(
105 dir.path().join("root/fennel"),
106 dir.path().join("root/finnochio"),
107 )
108 .unwrap();
109
110 let project = Project::test(
111 Arc::new(RealFs::new(None, cx.executor())),
112 [root_link_path.as_ref()],
113 cx,
114 )
115 .await;
116
117 project.update(cx, |project, cx| {
118 let tree = project.worktrees(cx).next().unwrap().read(cx);
119 assert_eq!(tree.file_count(), 5);
120 assert_eq!(
121 tree.inode_for_path("fennel/grape"),
122 tree.inode_for_path("finnochio/grape")
123 );
124 });
125}
126
127#[gpui::test]
128async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
129 init_test(cx);
130
131 let dir = TempTree::new(json!({
132 ".editorconfig": r#"
133 root = true
134 [*.rs]
135 indent_style = tab
136 indent_size = 3
137 end_of_line = lf
138 insert_final_newline = true
139 trim_trailing_whitespace = true
140 [*.js]
141 tab_width = 10
142 "#,
143 ".zed": {
144 "settings.json": r#"{
145 "tab_size": 8,
146 "hard_tabs": false,
147 "ensure_final_newline_on_save": false,
148 "remove_trailing_whitespace_on_save": false,
149 "soft_wrap": "editor_width"
150 }"#,
151 },
152 "a.rs": "fn a() {\n A\n}",
153 "b": {
154 ".editorconfig": r#"
155 [*.rs]
156 indent_size = 2
157 "#,
158 "b.rs": "fn b() {\n B\n}",
159 },
160 "c.js": "def c\n C\nend",
161 "README.json": "tabs are better\n",
162 }));
163
164 let path = dir.path();
165 let fs = FakeFs::new(cx.executor());
166 fs.insert_tree_from_real_fs(path, path).await;
167 let project = Project::test(fs, [path], cx).await;
168
169 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
170 language_registry.add(js_lang());
171 language_registry.add(json_lang());
172 language_registry.add(rust_lang());
173
174 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
175
176 cx.executor().run_until_parked();
177
178 cx.update(|cx| {
179 let tree = worktree.read(cx);
180 let settings_for = |path: &str| {
181 let file_entry = tree.entry_for_path(path).unwrap().clone();
182 let file = File::for_entry(file_entry, worktree.clone());
183 let file_language = project
184 .read(cx)
185 .languages()
186 .language_for_file_path(file.path.as_ref());
187 let file_language = cx
188 .background_executor()
189 .block(file_language)
190 .expect("Failed to get file language");
191 let file = file as _;
192 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
193 };
194
195 let settings_a = settings_for("a.rs");
196 let settings_b = settings_for("b/b.rs");
197 let settings_c = settings_for("c.js");
198 let settings_readme = settings_for("README.json");
199
200 // .editorconfig overrides .zed/settings
201 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
202 assert_eq!(settings_a.hard_tabs, true);
203 assert_eq!(settings_a.ensure_final_newline_on_save, true);
204 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
205
206 // .editorconfig in b/ overrides .editorconfig in root
207 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
208
209 // "indent_size" is not set, so "tab_width" is used
210 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
211
212 // README.md should not be affected by .editorconfig's globe "*.rs"
213 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
214 });
215}
216
217#[gpui::test]
218async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
219 init_test(cx);
220 TaskStore::init(None);
221
222 let fs = FakeFs::new(cx.executor());
223 fs.insert_tree(
224 path!("/dir"),
225 json!({
226 ".zed": {
227 "settings.json": r#"{ "tab_size": 8 }"#,
228 "tasks.json": r#"[{
229 "label": "cargo check all",
230 "command": "cargo",
231 "args": ["check", "--all"]
232 },]"#,
233 },
234 "a": {
235 "a.rs": "fn a() {\n A\n}"
236 },
237 "b": {
238 ".zed": {
239 "settings.json": r#"{ "tab_size": 2 }"#,
240 "tasks.json": r#"[{
241 "label": "cargo check",
242 "command": "cargo",
243 "args": ["check"]
244 },]"#,
245 },
246 "b.rs": "fn b() {\n B\n}"
247 }
248 }),
249 )
250 .await;
251
252 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
253 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
254
255 cx.executor().run_until_parked();
256 let worktree_id = cx.update(|cx| {
257 project.update(cx, |project, cx| {
258 project.worktrees(cx).next().unwrap().read(cx).id()
259 })
260 });
261
262 let mut task_contexts = TaskContexts::default();
263 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
264
265 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
266 id: worktree_id,
267 directory_in_worktree: PathBuf::from(".zed"),
268 id_base: "local worktree tasks from directory \".zed\"".into(),
269 };
270
271 let all_tasks = cx
272 .update(|cx| {
273 let tree = worktree.read(cx);
274
275 let file_a = File::for_entry(
276 tree.entry_for_path("a/a.rs").unwrap().clone(),
277 worktree.clone(),
278 ) as _;
279 let settings_a = language_settings(None, Some(&file_a), cx);
280 let file_b = File::for_entry(
281 tree.entry_for_path("b/b.rs").unwrap().clone(),
282 worktree.clone(),
283 ) as _;
284 let settings_b = language_settings(None, Some(&file_b), cx);
285
286 assert_eq!(settings_a.tab_size.get(), 8);
287 assert_eq!(settings_b.tab_size.get(), 2);
288
289 get_all_tasks(&project, &task_contexts, cx)
290 })
291 .into_iter()
292 .map(|(source_kind, task)| {
293 let resolved = task.resolved.unwrap();
294 (
295 source_kind,
296 task.resolved_label,
297 resolved.args,
298 resolved.env,
299 )
300 })
301 .collect::<Vec<_>>();
302 assert_eq!(
303 all_tasks,
304 vec![
305 (
306 TaskSourceKind::Worktree {
307 id: worktree_id,
308 directory_in_worktree: PathBuf::from(separator!("b/.zed")),
309 id_base: if cfg!(windows) {
310 "local worktree tasks from directory \"b\\\\.zed\"".into()
311 } else {
312 "local worktree tasks from directory \"b/.zed\"".into()
313 },
314 },
315 "cargo check".to_string(),
316 vec!["check".to_string()],
317 HashMap::default(),
318 ),
319 (
320 topmost_local_task_source_kind.clone(),
321 "cargo check all".to_string(),
322 vec!["check".to_string(), "--all".to_string()],
323 HashMap::default(),
324 ),
325 ]
326 );
327
328 let (_, resolved_task) = cx
329 .update(|cx| get_all_tasks(&project, &task_contexts, cx))
330 .into_iter()
331 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
332 .expect("should have one global task");
333 project.update(cx, |project, cx| {
334 let task_inventory = project
335 .task_store
336 .read(cx)
337 .task_inventory()
338 .cloned()
339 .unwrap();
340 task_inventory.update(cx, |inventory, _| {
341 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
342 inventory
343 .update_file_based_tasks(
344 TaskSettingsLocation::Global(tasks_file()),
345 Some(
346 &json!([{
347 "label": "cargo check unstable",
348 "command": "cargo",
349 "args": [
350 "check",
351 "--all",
352 "--all-targets"
353 ],
354 "env": {
355 "RUSTFLAGS": "-Zunstable-options"
356 }
357 }])
358 .to_string(),
359 ),
360 settings::TaskKind::Script,
361 )
362 .unwrap();
363 });
364 });
365 cx.run_until_parked();
366
367 let all_tasks = cx
368 .update(|cx| get_all_tasks(&project, &task_contexts, cx))
369 .into_iter()
370 .map(|(source_kind, task)| {
371 let resolved = task.resolved.unwrap();
372 (
373 source_kind,
374 task.resolved_label,
375 resolved.args,
376 resolved.env,
377 )
378 })
379 .collect::<Vec<_>>();
380 assert_eq!(
381 all_tasks,
382 vec![
383 (
384 topmost_local_task_source_kind.clone(),
385 "cargo check all".to_string(),
386 vec!["check".to_string(), "--all".to_string()],
387 HashMap::default(),
388 ),
389 (
390 TaskSourceKind::Worktree {
391 id: worktree_id,
392 directory_in_worktree: PathBuf::from(separator!("b/.zed")),
393 id_base: if cfg!(windows) {
394 "local worktree tasks from directory \"b\\\\.zed\"".into()
395 } else {
396 "local worktree tasks from directory \"b/.zed\"".into()
397 },
398 },
399 "cargo check".to_string(),
400 vec!["check".to_string()],
401 HashMap::default(),
402 ),
403 (
404 TaskSourceKind::AbsPath {
405 abs_path: paths::tasks_file().clone(),
406 id_base: "global tasks.json".into(),
407 },
408 "cargo check unstable".to_string(),
409 vec![
410 "check".to_string(),
411 "--all".to_string(),
412 "--all-targets".to_string(),
413 ],
414 HashMap::from_iter(Some((
415 "RUSTFLAGS".to_string(),
416 "-Zunstable-options".to_string()
417 ))),
418 ),
419 ]
420 );
421}
422
423#[gpui::test]
424async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
425 init_test(cx);
426 TaskStore::init(None);
427
428 let fs = FakeFs::new(cx.executor());
429 fs.insert_tree(
430 path!("/dir"),
431 json!({
432 ".zed": {
433 "tasks.json": r#"[{
434 "label": "test worktree root",
435 "command": "echo $ZED_WORKTREE_ROOT"
436 }]"#,
437 },
438 "a": {
439 "a.rs": "fn a() {\n A\n}"
440 },
441 }),
442 )
443 .await;
444
445 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
446 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
447
448 cx.executor().run_until_parked();
449 let worktree_id = cx.update(|cx| {
450 project.update(cx, |project, cx| {
451 project.worktrees(cx).next().unwrap().read(cx).id()
452 })
453 });
454
455 let active_non_worktree_item_tasks = cx.update(|cx| {
456 get_all_tasks(
457 &project,
458 &TaskContexts {
459 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
460 active_worktree_context: None,
461 other_worktree_contexts: Vec::new(),
462 },
463 cx,
464 )
465 });
466 assert!(
467 active_non_worktree_item_tasks.is_empty(),
468 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
469 );
470
471 let active_worktree_tasks = cx.update(|cx| {
472 get_all_tasks(
473 &project,
474 &TaskContexts {
475 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
476 active_worktree_context: Some((worktree_id, {
477 let mut worktree_context = TaskContext::default();
478 worktree_context
479 .task_variables
480 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
481 worktree_context
482 })),
483 other_worktree_contexts: Vec::new(),
484 },
485 cx,
486 )
487 });
488 assert_eq!(
489 active_worktree_tasks
490 .into_iter()
491 .map(|(source_kind, task)| {
492 let resolved = task.resolved.unwrap();
493 (source_kind, resolved.command)
494 })
495 .collect::<Vec<_>>(),
496 vec![(
497 TaskSourceKind::Worktree {
498 id: worktree_id,
499 directory_in_worktree: PathBuf::from(separator!(".zed")),
500 id_base: if cfg!(windows) {
501 "local worktree tasks from directory \".zed\"".into()
502 } else {
503 "local worktree tasks from directory \".zed\"".into()
504 },
505 },
506 "echo /dir".to_string(),
507 )]
508 );
509}
510
511#[gpui::test]
512async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
513 init_test(cx);
514
515 let fs = FakeFs::new(cx.executor());
516 fs.insert_tree(
517 path!("/dir"),
518 json!({
519 "test.rs": "const A: i32 = 1;",
520 "test2.rs": "",
521 "Cargo.toml": "a = 1",
522 "package.json": "{\"a\": 1}",
523 }),
524 )
525 .await;
526
527 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
528 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
529
530 let mut fake_rust_servers = language_registry.register_fake_lsp(
531 "Rust",
532 FakeLspAdapter {
533 name: "the-rust-language-server",
534 capabilities: lsp::ServerCapabilities {
535 completion_provider: Some(lsp::CompletionOptions {
536 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
537 ..Default::default()
538 }),
539 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
540 lsp::TextDocumentSyncOptions {
541 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
542 ..Default::default()
543 },
544 )),
545 ..Default::default()
546 },
547 ..Default::default()
548 },
549 );
550 let mut fake_json_servers = language_registry.register_fake_lsp(
551 "JSON",
552 FakeLspAdapter {
553 name: "the-json-language-server",
554 capabilities: lsp::ServerCapabilities {
555 completion_provider: Some(lsp::CompletionOptions {
556 trigger_characters: Some(vec![":".to_string()]),
557 ..Default::default()
558 }),
559 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
560 lsp::TextDocumentSyncOptions {
561 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
562 ..Default::default()
563 },
564 )),
565 ..Default::default()
566 },
567 ..Default::default()
568 },
569 );
570
571 // Open a buffer without an associated language server.
572 let (toml_buffer, _handle) = project
573 .update(cx, |project, cx| {
574 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
575 })
576 .await
577 .unwrap();
578
579 // Open a buffer with an associated language server before the language for it has been loaded.
580 let (rust_buffer, _handle2) = project
581 .update(cx, |project, cx| {
582 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
583 })
584 .await
585 .unwrap();
586 rust_buffer.update(cx, |buffer, _| {
587 assert_eq!(buffer.language().map(|l| l.name()), None);
588 });
589
590 // Now we add the languages to the project, and ensure they get assigned to all
591 // the relevant open buffers.
592 language_registry.add(json_lang());
593 language_registry.add(rust_lang());
594 cx.executor().run_until_parked();
595 rust_buffer.update(cx, |buffer, _| {
596 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
597 });
598
599 // A server is started up, and it is notified about Rust files.
600 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
601 assert_eq!(
602 fake_rust_server
603 .receive_notification::<lsp::notification::DidOpenTextDocument>()
604 .await
605 .text_document,
606 lsp::TextDocumentItem {
607 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
608 version: 0,
609 text: "const A: i32 = 1;".to_string(),
610 language_id: "rust".to_string(),
611 }
612 );
613
614 // The buffer is configured based on the language server's capabilities.
615 rust_buffer.update(cx, |buffer, _| {
616 assert_eq!(
617 buffer
618 .completion_triggers()
619 .into_iter()
620 .cloned()
621 .collect::<Vec<_>>(),
622 &[".".to_string(), "::".to_string()]
623 );
624 });
625 toml_buffer.update(cx, |buffer, _| {
626 assert!(buffer.completion_triggers().is_empty());
627 });
628
629 // Edit a buffer. The changes are reported to the language server.
630 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
631 assert_eq!(
632 fake_rust_server
633 .receive_notification::<lsp::notification::DidChangeTextDocument>()
634 .await
635 .text_document,
636 lsp::VersionedTextDocumentIdentifier::new(
637 lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
638 1
639 )
640 );
641
642 // Open a third buffer with a different associated language server.
643 let (json_buffer, _json_handle) = project
644 .update(cx, |project, cx| {
645 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
646 })
647 .await
648 .unwrap();
649
650 // A json language server is started up and is only notified about the json buffer.
651 let mut fake_json_server = fake_json_servers.next().await.unwrap();
652 assert_eq!(
653 fake_json_server
654 .receive_notification::<lsp::notification::DidOpenTextDocument>()
655 .await
656 .text_document,
657 lsp::TextDocumentItem {
658 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
659 version: 0,
660 text: "{\"a\": 1}".to_string(),
661 language_id: "json".to_string(),
662 }
663 );
664
665 // This buffer is configured based on the second language server's
666 // capabilities.
667 json_buffer.update(cx, |buffer, _| {
668 assert_eq!(
669 buffer
670 .completion_triggers()
671 .into_iter()
672 .cloned()
673 .collect::<Vec<_>>(),
674 &[":".to_string()]
675 );
676 });
677
678 // When opening another buffer whose language server is already running,
679 // it is also configured based on the existing language server's capabilities.
680 let (rust_buffer2, _handle4) = project
681 .update(cx, |project, cx| {
682 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
683 })
684 .await
685 .unwrap();
686 rust_buffer2.update(cx, |buffer, _| {
687 assert_eq!(
688 buffer
689 .completion_triggers()
690 .into_iter()
691 .cloned()
692 .collect::<Vec<_>>(),
693 &[".".to_string(), "::".to_string()]
694 );
695 });
696
697 // Changes are reported only to servers matching the buffer's language.
698 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
699 rust_buffer2.update(cx, |buffer, cx| {
700 buffer.edit([(0..0, "let x = 1;")], None, cx)
701 });
702 assert_eq!(
703 fake_rust_server
704 .receive_notification::<lsp::notification::DidChangeTextDocument>()
705 .await
706 .text_document,
707 lsp::VersionedTextDocumentIdentifier::new(
708 lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap(),
709 1
710 )
711 );
712
713 // Save notifications are reported to all servers.
714 project
715 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
716 .await
717 .unwrap();
718 assert_eq!(
719 fake_rust_server
720 .receive_notification::<lsp::notification::DidSaveTextDocument>()
721 .await
722 .text_document,
723 lsp::TextDocumentIdentifier::new(
724 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
725 )
726 );
727 assert_eq!(
728 fake_json_server
729 .receive_notification::<lsp::notification::DidSaveTextDocument>()
730 .await
731 .text_document,
732 lsp::TextDocumentIdentifier::new(
733 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
734 )
735 );
736
737 // Renames are reported only to servers matching the buffer's language.
738 fs.rename(
739 Path::new(path!("/dir/test2.rs")),
740 Path::new(path!("/dir/test3.rs")),
741 Default::default(),
742 )
743 .await
744 .unwrap();
745 assert_eq!(
746 fake_rust_server
747 .receive_notification::<lsp::notification::DidCloseTextDocument>()
748 .await
749 .text_document,
750 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap()),
751 );
752 assert_eq!(
753 fake_rust_server
754 .receive_notification::<lsp::notification::DidOpenTextDocument>()
755 .await
756 .text_document,
757 lsp::TextDocumentItem {
758 uri: lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap(),
759 version: 0,
760 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
761 language_id: "rust".to_string(),
762 },
763 );
764
765 rust_buffer2.update(cx, |buffer, cx| {
766 buffer.update_diagnostics(
767 LanguageServerId(0),
768 DiagnosticSet::from_sorted_entries(
769 vec![DiagnosticEntry {
770 diagnostic: Default::default(),
771 range: Anchor::MIN..Anchor::MAX,
772 }],
773 &buffer.snapshot(),
774 ),
775 cx,
776 );
777 assert_eq!(
778 buffer
779 .snapshot()
780 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
781 .count(),
782 1
783 );
784 });
785
786 // When the rename changes the extension of the file, the buffer gets closed on the old
787 // language server and gets opened on the new one.
788 fs.rename(
789 Path::new(path!("/dir/test3.rs")),
790 Path::new(path!("/dir/test3.json")),
791 Default::default(),
792 )
793 .await
794 .unwrap();
795 assert_eq!(
796 fake_rust_server
797 .receive_notification::<lsp::notification::DidCloseTextDocument>()
798 .await
799 .text_document,
800 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap(),),
801 );
802 assert_eq!(
803 fake_json_server
804 .receive_notification::<lsp::notification::DidOpenTextDocument>()
805 .await
806 .text_document,
807 lsp::TextDocumentItem {
808 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
809 version: 0,
810 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
811 language_id: "json".to_string(),
812 },
813 );
814
815 // We clear the diagnostics, since the language has changed.
816 rust_buffer2.update(cx, |buffer, _| {
817 assert_eq!(
818 buffer
819 .snapshot()
820 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
821 .count(),
822 0
823 );
824 });
825
826 // The renamed file's version resets after changing language server.
827 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
828 assert_eq!(
829 fake_json_server
830 .receive_notification::<lsp::notification::DidChangeTextDocument>()
831 .await
832 .text_document,
833 lsp::VersionedTextDocumentIdentifier::new(
834 lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
835 1
836 )
837 );
838
839 // Restart language servers
840 project.update(cx, |project, cx| {
841 project.restart_language_servers_for_buffers(
842 vec![rust_buffer.clone(), json_buffer.clone()],
843 cx,
844 );
845 });
846
847 let mut rust_shutdown_requests = fake_rust_server
848 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
849 let mut json_shutdown_requests = fake_json_server
850 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
851 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
852
853 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
854 let mut fake_json_server = fake_json_servers.next().await.unwrap();
855
856 // Ensure rust document is reopened in new rust language server
857 assert_eq!(
858 fake_rust_server
859 .receive_notification::<lsp::notification::DidOpenTextDocument>()
860 .await
861 .text_document,
862 lsp::TextDocumentItem {
863 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
864 version: 0,
865 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
866 language_id: "rust".to_string(),
867 }
868 );
869
870 // Ensure json documents are reopened in new json language server
871 assert_set_eq!(
872 [
873 fake_json_server
874 .receive_notification::<lsp::notification::DidOpenTextDocument>()
875 .await
876 .text_document,
877 fake_json_server
878 .receive_notification::<lsp::notification::DidOpenTextDocument>()
879 .await
880 .text_document,
881 ],
882 [
883 lsp::TextDocumentItem {
884 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
885 version: 0,
886 text: json_buffer.update(cx, |buffer, _| buffer.text()),
887 language_id: "json".to_string(),
888 },
889 lsp::TextDocumentItem {
890 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
891 version: 0,
892 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
893 language_id: "json".to_string(),
894 }
895 ]
896 );
897
898 // Close notifications are reported only to servers matching the buffer's language.
899 cx.update(|_| drop(_json_handle));
900 let close_message = lsp::DidCloseTextDocumentParams {
901 text_document: lsp::TextDocumentIdentifier::new(
902 lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
903 ),
904 };
905 assert_eq!(
906 fake_json_server
907 .receive_notification::<lsp::notification::DidCloseTextDocument>()
908 .await,
909 close_message,
910 );
911}
912
913#[gpui::test]
914async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
915 init_test(cx);
916
917 let fs = FakeFs::new(cx.executor());
918 fs.insert_tree(
919 path!("/the-root"),
920 json!({
921 ".gitignore": "target\n",
922 "Cargo.lock": "",
923 "src": {
924 "a.rs": "",
925 "b.rs": "",
926 },
927 "target": {
928 "x": {
929 "out": {
930 "x.rs": ""
931 }
932 },
933 "y": {
934 "out": {
935 "y.rs": "",
936 }
937 },
938 "z": {
939 "out": {
940 "z.rs": ""
941 }
942 }
943 }
944 }),
945 )
946 .await;
947 fs.insert_tree(
948 path!("/the-registry"),
949 json!({
950 "dep1": {
951 "src": {
952 "dep1.rs": "",
953 }
954 },
955 "dep2": {
956 "src": {
957 "dep2.rs": "",
958 }
959 },
960 }),
961 )
962 .await;
963 fs.insert_tree(
964 path!("/the/stdlib"),
965 json!({
966 "LICENSE": "",
967 "src": {
968 "string.rs": "",
969 }
970 }),
971 )
972 .await;
973
974 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
975 let (language_registry, lsp_store) = project.read_with(cx, |project, _| {
976 (project.languages().clone(), project.lsp_store())
977 });
978 language_registry.add(rust_lang());
979 let mut fake_servers = language_registry.register_fake_lsp(
980 "Rust",
981 FakeLspAdapter {
982 name: "the-language-server",
983 ..Default::default()
984 },
985 );
986
987 cx.executor().run_until_parked();
988
989 // Start the language server by opening a buffer with a compatible file extension.
990 project
991 .update(cx, |project, cx| {
992 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
993 })
994 .await
995 .unwrap();
996
997 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
998 project.update(cx, |project, cx| {
999 let worktree = project.worktrees(cx).next().unwrap();
1000 assert_eq!(
1001 worktree
1002 .read(cx)
1003 .snapshot()
1004 .entries(true, 0)
1005 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
1006 .collect::<Vec<_>>(),
1007 &[
1008 (Path::new(""), false),
1009 (Path::new(".gitignore"), false),
1010 (Path::new("Cargo.lock"), false),
1011 (Path::new("src"), false),
1012 (Path::new("src/a.rs"), false),
1013 (Path::new("src/b.rs"), false),
1014 (Path::new("target"), true),
1015 ]
1016 );
1017 });
1018
1019 let prev_read_dir_count = fs.read_dir_call_count();
1020
1021 let fake_server = fake_servers.next().await.unwrap();
1022 let (server_id, server_name) = lsp_store.read_with(cx, |lsp_store, _| {
1023 let (id, status) = lsp_store.language_server_statuses().next().unwrap();
1024 (id, LanguageServerName::from(status.name.as_str()))
1025 });
1026
1027 // Simulate jumping to a definition in a dependency outside of the worktree.
1028 let _out_of_worktree_buffer = project
1029 .update(cx, |project, cx| {
1030 project.open_local_buffer_via_lsp(
1031 lsp::Url::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
1032 server_id,
1033 server_name.clone(),
1034 cx,
1035 )
1036 })
1037 .await
1038 .unwrap();
1039
1040 // Keep track of the FS events reported to the language server.
1041 let file_changes = Arc::new(Mutex::new(Vec::new()));
1042 fake_server
1043 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
1044 registrations: vec![lsp::Registration {
1045 id: Default::default(),
1046 method: "workspace/didChangeWatchedFiles".to_string(),
1047 register_options: serde_json::to_value(
1048 lsp::DidChangeWatchedFilesRegistrationOptions {
1049 watchers: vec![
1050 lsp::FileSystemWatcher {
1051 glob_pattern: lsp::GlobPattern::String(
1052 path!("/the-root/Cargo.toml").to_string(),
1053 ),
1054 kind: None,
1055 },
1056 lsp::FileSystemWatcher {
1057 glob_pattern: lsp::GlobPattern::String(
1058 path!("/the-root/src/*.{rs,c}").to_string(),
1059 ),
1060 kind: None,
1061 },
1062 lsp::FileSystemWatcher {
1063 glob_pattern: lsp::GlobPattern::String(
1064 path!("/the-root/target/y/**/*.rs").to_string(),
1065 ),
1066 kind: None,
1067 },
1068 lsp::FileSystemWatcher {
1069 glob_pattern: lsp::GlobPattern::String(
1070 path!("/the/stdlib/src/**/*.rs").to_string(),
1071 ),
1072 kind: None,
1073 },
1074 lsp::FileSystemWatcher {
1075 glob_pattern: lsp::GlobPattern::String(
1076 path!("**/Cargo.lock").to_string(),
1077 ),
1078 kind: None,
1079 },
1080 ],
1081 },
1082 )
1083 .ok(),
1084 }],
1085 })
1086 .await
1087 .unwrap();
1088 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
1089 let file_changes = file_changes.clone();
1090 move |params, _| {
1091 let mut file_changes = file_changes.lock();
1092 file_changes.extend(params.changes);
1093 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
1094 }
1095 });
1096
1097 cx.executor().run_until_parked();
1098 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
1099 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 5);
1100
1101 let mut new_watched_paths = fs.watched_paths();
1102 new_watched_paths.retain(|path| !path.starts_with(config_dir()));
1103 assert_eq!(
1104 &new_watched_paths,
1105 &[
1106 Path::new(path!("/the-root")),
1107 Path::new(path!("/the-registry/dep1/src/dep1.rs")),
1108 Path::new(path!("/the/stdlib/src"))
1109 ]
1110 );
1111
1112 // Now the language server has asked us to watch an ignored directory path,
1113 // so we recursively load it.
1114 project.update(cx, |project, cx| {
1115 let worktree = project.visible_worktrees(cx).next().unwrap();
1116 assert_eq!(
1117 worktree
1118 .read(cx)
1119 .snapshot()
1120 .entries(true, 0)
1121 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
1122 .collect::<Vec<_>>(),
1123 &[
1124 (Path::new(""), false),
1125 (Path::new(".gitignore"), false),
1126 (Path::new("Cargo.lock"), false),
1127 (Path::new("src"), false),
1128 (Path::new("src/a.rs"), false),
1129 (Path::new("src/b.rs"), false),
1130 (Path::new("target"), true),
1131 (Path::new("target/x"), true),
1132 (Path::new("target/y"), true),
1133 (Path::new("target/y/out"), true),
1134 (Path::new("target/y/out/y.rs"), true),
1135 (Path::new("target/z"), true),
1136 ]
1137 );
1138 });
1139
1140 // Perform some file system mutations, two of which match the watched patterns,
1141 // and one of which does not.
1142 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
1143 .await
1144 .unwrap();
1145 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
1146 .await
1147 .unwrap();
1148 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
1149 .await
1150 .unwrap();
1151 fs.create_file(
1152 path!("/the-root/target/x/out/x2.rs").as_ref(),
1153 Default::default(),
1154 )
1155 .await
1156 .unwrap();
1157 fs.create_file(
1158 path!("/the-root/target/y/out/y2.rs").as_ref(),
1159 Default::default(),
1160 )
1161 .await
1162 .unwrap();
1163 fs.save(
1164 path!("/the-root/Cargo.lock").as_ref(),
1165 &"".into(),
1166 Default::default(),
1167 )
1168 .await
1169 .unwrap();
1170 fs.save(
1171 path!("/the-stdlib/LICENSE").as_ref(),
1172 &"".into(),
1173 Default::default(),
1174 )
1175 .await
1176 .unwrap();
1177 fs.save(
1178 path!("/the/stdlib/src/string.rs").as_ref(),
1179 &"".into(),
1180 Default::default(),
1181 )
1182 .await
1183 .unwrap();
1184
1185 // The language server receives events for the FS mutations that match its watch patterns.
1186 cx.executor().run_until_parked();
1187 assert_eq!(
1188 &*file_changes.lock(),
1189 &[
1190 lsp::FileEvent {
1191 uri: lsp::Url::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
1192 typ: lsp::FileChangeType::CHANGED,
1193 },
1194 lsp::FileEvent {
1195 uri: lsp::Url::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
1196 typ: lsp::FileChangeType::DELETED,
1197 },
1198 lsp::FileEvent {
1199 uri: lsp::Url::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
1200 typ: lsp::FileChangeType::CREATED,
1201 },
1202 lsp::FileEvent {
1203 uri: lsp::Url::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
1204 typ: lsp::FileChangeType::CREATED,
1205 },
1206 lsp::FileEvent {
1207 uri: lsp::Url::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
1208 typ: lsp::FileChangeType::CHANGED,
1209 },
1210 ]
1211 );
1212}
1213
1214#[gpui::test]
1215async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
1216 init_test(cx);
1217
1218 let fs = FakeFs::new(cx.executor());
1219 fs.insert_tree(
1220 path!("/dir"),
1221 json!({
1222 "a.rs": "let a = 1;",
1223 "b.rs": "let b = 2;"
1224 }),
1225 )
1226 .await;
1227
1228 let project = Project::test(
1229 fs,
1230 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
1231 cx,
1232 )
1233 .await;
1234 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1235
1236 let buffer_a = project
1237 .update(cx, |project, cx| {
1238 project.open_local_buffer(path!("/dir/a.rs"), cx)
1239 })
1240 .await
1241 .unwrap();
1242 let buffer_b = project
1243 .update(cx, |project, cx| {
1244 project.open_local_buffer(path!("/dir/b.rs"), cx)
1245 })
1246 .await
1247 .unwrap();
1248
1249 lsp_store.update(cx, |lsp_store, cx| {
1250 lsp_store
1251 .update_diagnostics(
1252 LanguageServerId(0),
1253 lsp::PublishDiagnosticsParams {
1254 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1255 version: None,
1256 diagnostics: vec![lsp::Diagnostic {
1257 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1258 severity: Some(lsp::DiagnosticSeverity::ERROR),
1259 message: "error 1".to_string(),
1260 ..Default::default()
1261 }],
1262 },
1263 &[],
1264 cx,
1265 )
1266 .unwrap();
1267 lsp_store
1268 .update_diagnostics(
1269 LanguageServerId(0),
1270 lsp::PublishDiagnosticsParams {
1271 uri: Url::from_file_path(path!("/dir/b.rs")).unwrap(),
1272 version: None,
1273 diagnostics: vec![lsp::Diagnostic {
1274 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1275 severity: Some(DiagnosticSeverity::WARNING),
1276 message: "error 2".to_string(),
1277 ..Default::default()
1278 }],
1279 },
1280 &[],
1281 cx,
1282 )
1283 .unwrap();
1284 });
1285
1286 buffer_a.update(cx, |buffer, _| {
1287 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1288 assert_eq!(
1289 chunks
1290 .iter()
1291 .map(|(s, d)| (s.as_str(), *d))
1292 .collect::<Vec<_>>(),
1293 &[
1294 ("let ", None),
1295 ("a", Some(DiagnosticSeverity::ERROR)),
1296 (" = 1;", None),
1297 ]
1298 );
1299 });
1300 buffer_b.update(cx, |buffer, _| {
1301 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1302 assert_eq!(
1303 chunks
1304 .iter()
1305 .map(|(s, d)| (s.as_str(), *d))
1306 .collect::<Vec<_>>(),
1307 &[
1308 ("let ", None),
1309 ("b", Some(DiagnosticSeverity::WARNING)),
1310 (" = 2;", None),
1311 ]
1312 );
1313 });
1314}
1315
1316#[gpui::test]
1317async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1318 init_test(cx);
1319
1320 let fs = FakeFs::new(cx.executor());
1321 fs.insert_tree(
1322 path!("/root"),
1323 json!({
1324 "dir": {
1325 ".git": {
1326 "HEAD": "ref: refs/heads/main",
1327 },
1328 ".gitignore": "b.rs",
1329 "a.rs": "let a = 1;",
1330 "b.rs": "let b = 2;",
1331 },
1332 "other.rs": "let b = c;"
1333 }),
1334 )
1335 .await;
1336
1337 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
1338 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1339 let (worktree, _) = project
1340 .update(cx, |project, cx| {
1341 project.find_or_create_worktree(path!("/root/dir"), true, cx)
1342 })
1343 .await
1344 .unwrap();
1345 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1346
1347 let (worktree, _) = project
1348 .update(cx, |project, cx| {
1349 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
1350 })
1351 .await
1352 .unwrap();
1353 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1354
1355 let server_id = LanguageServerId(0);
1356 lsp_store.update(cx, |lsp_store, cx| {
1357 lsp_store
1358 .update_diagnostics(
1359 server_id,
1360 lsp::PublishDiagnosticsParams {
1361 uri: Url::from_file_path(path!("/root/dir/b.rs")).unwrap(),
1362 version: None,
1363 diagnostics: vec![lsp::Diagnostic {
1364 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1365 severity: Some(lsp::DiagnosticSeverity::ERROR),
1366 message: "unused variable 'b'".to_string(),
1367 ..Default::default()
1368 }],
1369 },
1370 &[],
1371 cx,
1372 )
1373 .unwrap();
1374 lsp_store
1375 .update_diagnostics(
1376 server_id,
1377 lsp::PublishDiagnosticsParams {
1378 uri: Url::from_file_path(path!("/root/other.rs")).unwrap(),
1379 version: None,
1380 diagnostics: vec![lsp::Diagnostic {
1381 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1382 severity: Some(lsp::DiagnosticSeverity::ERROR),
1383 message: "unknown variable 'c'".to_string(),
1384 ..Default::default()
1385 }],
1386 },
1387 &[],
1388 cx,
1389 )
1390 .unwrap();
1391 });
1392
1393 let main_ignored_buffer = project
1394 .update(cx, |project, cx| {
1395 project.open_buffer((main_worktree_id, "b.rs"), cx)
1396 })
1397 .await
1398 .unwrap();
1399 main_ignored_buffer.update(cx, |buffer, _| {
1400 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1401 assert_eq!(
1402 chunks
1403 .iter()
1404 .map(|(s, d)| (s.as_str(), *d))
1405 .collect::<Vec<_>>(),
1406 &[
1407 ("let ", None),
1408 ("b", Some(DiagnosticSeverity::ERROR)),
1409 (" = 2;", None),
1410 ],
1411 "Gigitnored buffers should still get in-buffer diagnostics",
1412 );
1413 });
1414 let other_buffer = project
1415 .update(cx, |project, cx| {
1416 project.open_buffer((other_worktree_id, ""), cx)
1417 })
1418 .await
1419 .unwrap();
1420 other_buffer.update(cx, |buffer, _| {
1421 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1422 assert_eq!(
1423 chunks
1424 .iter()
1425 .map(|(s, d)| (s.as_str(), *d))
1426 .collect::<Vec<_>>(),
1427 &[
1428 ("let b = ", None),
1429 ("c", Some(DiagnosticSeverity::ERROR)),
1430 (";", None),
1431 ],
1432 "Buffers from hidden projects should still get in-buffer diagnostics"
1433 );
1434 });
1435
1436 project.update(cx, |project, cx| {
1437 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1438 assert_eq!(
1439 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1440 vec![(
1441 ProjectPath {
1442 worktree_id: main_worktree_id,
1443 path: Arc::from(Path::new("b.rs")),
1444 },
1445 server_id,
1446 DiagnosticSummary {
1447 error_count: 1,
1448 warning_count: 0,
1449 }
1450 )]
1451 );
1452 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1453 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1454 });
1455}
1456
1457#[gpui::test]
1458async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1459 init_test(cx);
1460
1461 let progress_token = "the-progress-token";
1462
1463 let fs = FakeFs::new(cx.executor());
1464 fs.insert_tree(
1465 path!("/dir"),
1466 json!({
1467 "a.rs": "fn a() { A }",
1468 "b.rs": "const y: i32 = 1",
1469 }),
1470 )
1471 .await;
1472
1473 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1474 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1475
1476 language_registry.add(rust_lang());
1477 let mut fake_servers = language_registry.register_fake_lsp(
1478 "Rust",
1479 FakeLspAdapter {
1480 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1481 disk_based_diagnostics_sources: vec!["disk".into()],
1482 ..Default::default()
1483 },
1484 );
1485
1486 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1487
1488 // Cause worktree to start the fake language server
1489 let _ = project
1490 .update(cx, |project, cx| {
1491 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
1492 })
1493 .await
1494 .unwrap();
1495
1496 let mut events = cx.events(&project);
1497
1498 let fake_server = fake_servers.next().await.unwrap();
1499 assert_eq!(
1500 events.next().await.unwrap(),
1501 Event::LanguageServerAdded(
1502 LanguageServerId(0),
1503 fake_server.server.name(),
1504 Some(worktree_id)
1505 ),
1506 );
1507
1508 fake_server
1509 .start_progress(format!("{}/0", progress_token))
1510 .await;
1511 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1512 assert_eq!(
1513 events.next().await.unwrap(),
1514 Event::DiskBasedDiagnosticsStarted {
1515 language_server_id: LanguageServerId(0),
1516 }
1517 );
1518
1519 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1520 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1521 version: None,
1522 diagnostics: vec![lsp::Diagnostic {
1523 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1524 severity: Some(lsp::DiagnosticSeverity::ERROR),
1525 message: "undefined variable 'A'".to_string(),
1526 ..Default::default()
1527 }],
1528 });
1529 assert_eq!(
1530 events.next().await.unwrap(),
1531 Event::DiagnosticsUpdated {
1532 language_server_id: LanguageServerId(0),
1533 path: (worktree_id, Path::new("a.rs")).into()
1534 }
1535 );
1536
1537 fake_server.end_progress(format!("{}/0", progress_token));
1538 assert_eq!(
1539 events.next().await.unwrap(),
1540 Event::DiskBasedDiagnosticsFinished {
1541 language_server_id: LanguageServerId(0)
1542 }
1543 );
1544
1545 let buffer = project
1546 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
1547 .await
1548 .unwrap();
1549
1550 buffer.update(cx, |buffer, _| {
1551 let snapshot = buffer.snapshot();
1552 let diagnostics = snapshot
1553 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1554 .collect::<Vec<_>>();
1555 assert_eq!(
1556 diagnostics,
1557 &[DiagnosticEntry {
1558 range: Point::new(0, 9)..Point::new(0, 10),
1559 diagnostic: Diagnostic {
1560 severity: lsp::DiagnosticSeverity::ERROR,
1561 message: "undefined variable 'A'".to_string(),
1562 group_id: 0,
1563 is_primary: true,
1564 ..Default::default()
1565 }
1566 }]
1567 )
1568 });
1569
1570 // Ensure publishing empty diagnostics twice only results in one update event.
1571 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1572 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1573 version: None,
1574 diagnostics: Default::default(),
1575 });
1576 assert_eq!(
1577 events.next().await.unwrap(),
1578 Event::DiagnosticsUpdated {
1579 language_server_id: LanguageServerId(0),
1580 path: (worktree_id, Path::new("a.rs")).into()
1581 }
1582 );
1583
1584 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1585 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1586 version: None,
1587 diagnostics: Default::default(),
1588 });
1589 cx.executor().run_until_parked();
1590 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1591}
1592
1593#[gpui::test]
1594async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1595 init_test(cx);
1596
1597 let progress_token = "the-progress-token";
1598
1599 let fs = FakeFs::new(cx.executor());
1600 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1601
1602 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1603
1604 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1605 language_registry.add(rust_lang());
1606 let mut fake_servers = language_registry.register_fake_lsp(
1607 "Rust",
1608 FakeLspAdapter {
1609 name: "the-language-server",
1610 disk_based_diagnostics_sources: vec!["disk".into()],
1611 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1612 ..Default::default()
1613 },
1614 );
1615
1616 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1617
1618 let (buffer, _handle) = project
1619 .update(cx, |project, cx| {
1620 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1621 })
1622 .await
1623 .unwrap();
1624 // Simulate diagnostics starting to update.
1625 let fake_server = fake_servers.next().await.unwrap();
1626 fake_server.start_progress(progress_token).await;
1627
1628 // Restart the server before the diagnostics finish updating.
1629 project.update(cx, |project, cx| {
1630 project.restart_language_servers_for_buffers(vec![buffer], cx);
1631 });
1632 let mut events = cx.events(&project);
1633
1634 // Simulate the newly started server sending more diagnostics.
1635 let fake_server = fake_servers.next().await.unwrap();
1636 assert_eq!(
1637 events.next().await.unwrap(),
1638 Event::LanguageServerAdded(
1639 LanguageServerId(1),
1640 fake_server.server.name(),
1641 Some(worktree_id)
1642 )
1643 );
1644 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1645 fake_server.start_progress(progress_token).await;
1646 assert_eq!(
1647 events.next().await.unwrap(),
1648 Event::DiskBasedDiagnosticsStarted {
1649 language_server_id: LanguageServerId(1)
1650 }
1651 );
1652 project.update(cx, |project, cx| {
1653 assert_eq!(
1654 project
1655 .language_servers_running_disk_based_diagnostics(cx)
1656 .collect::<Vec<_>>(),
1657 [LanguageServerId(1)]
1658 );
1659 });
1660
1661 // All diagnostics are considered done, despite the old server's diagnostic
1662 // task never completing.
1663 fake_server.end_progress(progress_token);
1664 assert_eq!(
1665 events.next().await.unwrap(),
1666 Event::DiskBasedDiagnosticsFinished {
1667 language_server_id: LanguageServerId(1)
1668 }
1669 );
1670 project.update(cx, |project, cx| {
1671 assert_eq!(
1672 project
1673 .language_servers_running_disk_based_diagnostics(cx)
1674 .collect::<Vec<_>>(),
1675 [] as [language::LanguageServerId; 0]
1676 );
1677 });
1678}
1679
1680#[gpui::test]
1681async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1682 init_test(cx);
1683
1684 let fs = FakeFs::new(cx.executor());
1685 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
1686
1687 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1688
1689 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1690 language_registry.add(rust_lang());
1691 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1692
1693 let (buffer, _) = project
1694 .update(cx, |project, cx| {
1695 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1696 })
1697 .await
1698 .unwrap();
1699
1700 // Publish diagnostics
1701 let fake_server = fake_servers.next().await.unwrap();
1702 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1703 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1704 version: None,
1705 diagnostics: vec![lsp::Diagnostic {
1706 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1707 severity: Some(lsp::DiagnosticSeverity::ERROR),
1708 message: "the message".to_string(),
1709 ..Default::default()
1710 }],
1711 });
1712
1713 cx.executor().run_until_parked();
1714 buffer.update(cx, |buffer, _| {
1715 assert_eq!(
1716 buffer
1717 .snapshot()
1718 .diagnostics_in_range::<_, usize>(0..1, false)
1719 .map(|entry| entry.diagnostic.message.clone())
1720 .collect::<Vec<_>>(),
1721 ["the message".to_string()]
1722 );
1723 });
1724 project.update(cx, |project, cx| {
1725 assert_eq!(
1726 project.diagnostic_summary(false, cx),
1727 DiagnosticSummary {
1728 error_count: 1,
1729 warning_count: 0,
1730 }
1731 );
1732 });
1733
1734 project.update(cx, |project, cx| {
1735 project.restart_language_servers_for_buffers(vec![buffer.clone()], cx);
1736 });
1737
1738 // The diagnostics are cleared.
1739 cx.executor().run_until_parked();
1740 buffer.update(cx, |buffer, _| {
1741 assert_eq!(
1742 buffer
1743 .snapshot()
1744 .diagnostics_in_range::<_, usize>(0..1, false)
1745 .map(|entry| entry.diagnostic.message.clone())
1746 .collect::<Vec<_>>(),
1747 Vec::<String>::new(),
1748 );
1749 });
1750 project.update(cx, |project, cx| {
1751 assert_eq!(
1752 project.diagnostic_summary(false, cx),
1753 DiagnosticSummary {
1754 error_count: 0,
1755 warning_count: 0,
1756 }
1757 );
1758 });
1759}
1760
1761#[gpui::test]
1762async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1763 init_test(cx);
1764
1765 let fs = FakeFs::new(cx.executor());
1766 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1767
1768 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1769 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1770
1771 language_registry.add(rust_lang());
1772 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1773
1774 let (buffer, _handle) = project
1775 .update(cx, |project, cx| {
1776 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1777 })
1778 .await
1779 .unwrap();
1780
1781 // Before restarting the server, report diagnostics with an unknown buffer version.
1782 let fake_server = fake_servers.next().await.unwrap();
1783 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1784 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1785 version: Some(10000),
1786 diagnostics: Vec::new(),
1787 });
1788 cx.executor().run_until_parked();
1789 project.update(cx, |project, cx| {
1790 project.restart_language_servers_for_buffers(vec![buffer.clone()], cx);
1791 });
1792
1793 let mut fake_server = fake_servers.next().await.unwrap();
1794 let notification = fake_server
1795 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1796 .await
1797 .text_document;
1798 assert_eq!(notification.version, 0);
1799}
1800
1801#[gpui::test]
1802async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
1803 init_test(cx);
1804
1805 let progress_token = "the-progress-token";
1806
1807 let fs = FakeFs::new(cx.executor());
1808 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1809
1810 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1811
1812 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1813 language_registry.add(rust_lang());
1814 let mut fake_servers = language_registry.register_fake_lsp(
1815 "Rust",
1816 FakeLspAdapter {
1817 name: "the-language-server",
1818 disk_based_diagnostics_sources: vec!["disk".into()],
1819 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1820 ..Default::default()
1821 },
1822 );
1823
1824 let (buffer, _handle) = project
1825 .update(cx, |project, cx| {
1826 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1827 })
1828 .await
1829 .unwrap();
1830
1831 // Simulate diagnostics starting to update.
1832 let mut fake_server = fake_servers.next().await.unwrap();
1833 fake_server
1834 .start_progress_with(
1835 "another-token",
1836 lsp::WorkDoneProgressBegin {
1837 cancellable: Some(false),
1838 ..Default::default()
1839 },
1840 )
1841 .await;
1842 fake_server
1843 .start_progress_with(
1844 progress_token,
1845 lsp::WorkDoneProgressBegin {
1846 cancellable: Some(true),
1847 ..Default::default()
1848 },
1849 )
1850 .await;
1851 cx.executor().run_until_parked();
1852
1853 project.update(cx, |project, cx| {
1854 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
1855 });
1856
1857 let cancel_notification = fake_server
1858 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
1859 .await;
1860 assert_eq!(
1861 cancel_notification.token,
1862 NumberOrString::String(progress_token.into())
1863 );
1864}
1865
1866#[gpui::test]
1867async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1868 init_test(cx);
1869
1870 let fs = FakeFs::new(cx.executor());
1871 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
1872 .await;
1873
1874 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1875 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1876
1877 let mut fake_rust_servers = language_registry.register_fake_lsp(
1878 "Rust",
1879 FakeLspAdapter {
1880 name: "rust-lsp",
1881 ..Default::default()
1882 },
1883 );
1884 let mut fake_js_servers = language_registry.register_fake_lsp(
1885 "JavaScript",
1886 FakeLspAdapter {
1887 name: "js-lsp",
1888 ..Default::default()
1889 },
1890 );
1891 language_registry.add(rust_lang());
1892 language_registry.add(js_lang());
1893
1894 let _rs_buffer = project
1895 .update(cx, |project, cx| {
1896 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1897 })
1898 .await
1899 .unwrap();
1900 let _js_buffer = project
1901 .update(cx, |project, cx| {
1902 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
1903 })
1904 .await
1905 .unwrap();
1906
1907 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1908 assert_eq!(
1909 fake_rust_server_1
1910 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1911 .await
1912 .text_document
1913 .uri
1914 .as_str(),
1915 uri!("file:///dir/a.rs")
1916 );
1917
1918 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1919 assert_eq!(
1920 fake_js_server
1921 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1922 .await
1923 .text_document
1924 .uri
1925 .as_str(),
1926 uri!("file:///dir/b.js")
1927 );
1928
1929 // Disable Rust language server, ensuring only that server gets stopped.
1930 cx.update(|cx| {
1931 SettingsStore::update_global(cx, |settings, cx| {
1932 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1933 settings.languages.insert(
1934 "Rust".into(),
1935 LanguageSettingsContent {
1936 enable_language_server: Some(false),
1937 ..Default::default()
1938 },
1939 );
1940 });
1941 })
1942 });
1943 fake_rust_server_1
1944 .receive_notification::<lsp::notification::Exit>()
1945 .await;
1946
1947 // Enable Rust and disable JavaScript language servers, ensuring that the
1948 // former gets started again and that the latter stops.
1949 cx.update(|cx| {
1950 SettingsStore::update_global(cx, |settings, cx| {
1951 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1952 settings.languages.insert(
1953 LanguageName::new("Rust"),
1954 LanguageSettingsContent {
1955 enable_language_server: Some(true),
1956 ..Default::default()
1957 },
1958 );
1959 settings.languages.insert(
1960 LanguageName::new("JavaScript"),
1961 LanguageSettingsContent {
1962 enable_language_server: Some(false),
1963 ..Default::default()
1964 },
1965 );
1966 });
1967 })
1968 });
1969 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1970 assert_eq!(
1971 fake_rust_server_2
1972 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1973 .await
1974 .text_document
1975 .uri
1976 .as_str(),
1977 uri!("file:///dir/a.rs")
1978 );
1979 fake_js_server
1980 .receive_notification::<lsp::notification::Exit>()
1981 .await;
1982}
1983
1984#[gpui::test(iterations = 3)]
1985async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1986 init_test(cx);
1987
1988 let text = "
1989 fn a() { A }
1990 fn b() { BB }
1991 fn c() { CCC }
1992 "
1993 .unindent();
1994
1995 let fs = FakeFs::new(cx.executor());
1996 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
1997
1998 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1999 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2000
2001 language_registry.add(rust_lang());
2002 let mut fake_servers = language_registry.register_fake_lsp(
2003 "Rust",
2004 FakeLspAdapter {
2005 disk_based_diagnostics_sources: vec!["disk".into()],
2006 ..Default::default()
2007 },
2008 );
2009
2010 let buffer = project
2011 .update(cx, |project, cx| {
2012 project.open_local_buffer(path!("/dir/a.rs"), cx)
2013 })
2014 .await
2015 .unwrap();
2016
2017 let _handle = project.update(cx, |project, cx| {
2018 project.register_buffer_with_language_servers(&buffer, cx)
2019 });
2020
2021 let mut fake_server = fake_servers.next().await.unwrap();
2022 let open_notification = fake_server
2023 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2024 .await;
2025
2026 // Edit the buffer, moving the content down
2027 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
2028 let change_notification_1 = fake_server
2029 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2030 .await;
2031 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
2032
2033 // Report some diagnostics for the initial version of the buffer
2034 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2035 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2036 version: Some(open_notification.text_document.version),
2037 diagnostics: vec![
2038 lsp::Diagnostic {
2039 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2040 severity: Some(DiagnosticSeverity::ERROR),
2041 message: "undefined variable 'A'".to_string(),
2042 source: Some("disk".to_string()),
2043 ..Default::default()
2044 },
2045 lsp::Diagnostic {
2046 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2047 severity: Some(DiagnosticSeverity::ERROR),
2048 message: "undefined variable 'BB'".to_string(),
2049 source: Some("disk".to_string()),
2050 ..Default::default()
2051 },
2052 lsp::Diagnostic {
2053 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
2054 severity: Some(DiagnosticSeverity::ERROR),
2055 source: Some("disk".to_string()),
2056 message: "undefined variable 'CCC'".to_string(),
2057 ..Default::default()
2058 },
2059 ],
2060 });
2061
2062 // The diagnostics have moved down since they were created.
2063 cx.executor().run_until_parked();
2064 buffer.update(cx, |buffer, _| {
2065 assert_eq!(
2066 buffer
2067 .snapshot()
2068 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
2069 .collect::<Vec<_>>(),
2070 &[
2071 DiagnosticEntry {
2072 range: Point::new(3, 9)..Point::new(3, 11),
2073 diagnostic: Diagnostic {
2074 source: Some("disk".into()),
2075 severity: DiagnosticSeverity::ERROR,
2076 message: "undefined variable 'BB'".to_string(),
2077 is_disk_based: true,
2078 group_id: 1,
2079 is_primary: true,
2080 ..Default::default()
2081 },
2082 },
2083 DiagnosticEntry {
2084 range: Point::new(4, 9)..Point::new(4, 12),
2085 diagnostic: Diagnostic {
2086 source: Some("disk".into()),
2087 severity: DiagnosticSeverity::ERROR,
2088 message: "undefined variable 'CCC'".to_string(),
2089 is_disk_based: true,
2090 group_id: 2,
2091 is_primary: true,
2092 ..Default::default()
2093 }
2094 }
2095 ]
2096 );
2097 assert_eq!(
2098 chunks_with_diagnostics(buffer, 0..buffer.len()),
2099 [
2100 ("\n\nfn a() { ".to_string(), None),
2101 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2102 (" }\nfn b() { ".to_string(), None),
2103 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
2104 (" }\nfn c() { ".to_string(), None),
2105 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
2106 (" }\n".to_string(), None),
2107 ]
2108 );
2109 assert_eq!(
2110 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
2111 [
2112 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
2113 (" }\nfn c() { ".to_string(), None),
2114 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
2115 ]
2116 );
2117 });
2118
2119 // Ensure overlapping diagnostics are highlighted correctly.
2120 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2121 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2122 version: Some(open_notification.text_document.version),
2123 diagnostics: vec![
2124 lsp::Diagnostic {
2125 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2126 severity: Some(DiagnosticSeverity::ERROR),
2127 message: "undefined variable 'A'".to_string(),
2128 source: Some("disk".to_string()),
2129 ..Default::default()
2130 },
2131 lsp::Diagnostic {
2132 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
2133 severity: Some(DiagnosticSeverity::WARNING),
2134 message: "unreachable statement".to_string(),
2135 source: Some("disk".to_string()),
2136 ..Default::default()
2137 },
2138 ],
2139 });
2140
2141 cx.executor().run_until_parked();
2142 buffer.update(cx, |buffer, _| {
2143 assert_eq!(
2144 buffer
2145 .snapshot()
2146 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
2147 .collect::<Vec<_>>(),
2148 &[
2149 DiagnosticEntry {
2150 range: Point::new(2, 9)..Point::new(2, 12),
2151 diagnostic: Diagnostic {
2152 source: Some("disk".into()),
2153 severity: DiagnosticSeverity::WARNING,
2154 message: "unreachable statement".to_string(),
2155 is_disk_based: true,
2156 group_id: 4,
2157 is_primary: true,
2158 ..Default::default()
2159 }
2160 },
2161 DiagnosticEntry {
2162 range: Point::new(2, 9)..Point::new(2, 10),
2163 diagnostic: Diagnostic {
2164 source: Some("disk".into()),
2165 severity: DiagnosticSeverity::ERROR,
2166 message: "undefined variable 'A'".to_string(),
2167 is_disk_based: true,
2168 group_id: 3,
2169 is_primary: true,
2170 ..Default::default()
2171 },
2172 }
2173 ]
2174 );
2175 assert_eq!(
2176 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
2177 [
2178 ("fn a() { ".to_string(), None),
2179 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2180 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2181 ("\n".to_string(), None),
2182 ]
2183 );
2184 assert_eq!(
2185 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
2186 [
2187 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2188 ("\n".to_string(), None),
2189 ]
2190 );
2191 });
2192
2193 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
2194 // changes since the last save.
2195 buffer.update(cx, |buffer, cx| {
2196 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
2197 buffer.edit(
2198 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
2199 None,
2200 cx,
2201 );
2202 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
2203 });
2204 let change_notification_2 = fake_server
2205 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2206 .await;
2207 assert!(
2208 change_notification_2.text_document.version > change_notification_1.text_document.version
2209 );
2210
2211 // Handle out-of-order diagnostics
2212 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2213 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2214 version: Some(change_notification_2.text_document.version),
2215 diagnostics: vec![
2216 lsp::Diagnostic {
2217 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2218 severity: Some(DiagnosticSeverity::ERROR),
2219 message: "undefined variable 'BB'".to_string(),
2220 source: Some("disk".to_string()),
2221 ..Default::default()
2222 },
2223 lsp::Diagnostic {
2224 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2225 severity: Some(DiagnosticSeverity::WARNING),
2226 message: "undefined variable 'A'".to_string(),
2227 source: Some("disk".to_string()),
2228 ..Default::default()
2229 },
2230 ],
2231 });
2232
2233 cx.executor().run_until_parked();
2234 buffer.update(cx, |buffer, _| {
2235 assert_eq!(
2236 buffer
2237 .snapshot()
2238 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2239 .collect::<Vec<_>>(),
2240 &[
2241 DiagnosticEntry {
2242 range: Point::new(2, 21)..Point::new(2, 22),
2243 diagnostic: Diagnostic {
2244 source: Some("disk".into()),
2245 severity: DiagnosticSeverity::WARNING,
2246 message: "undefined variable 'A'".to_string(),
2247 is_disk_based: true,
2248 group_id: 6,
2249 is_primary: true,
2250 ..Default::default()
2251 }
2252 },
2253 DiagnosticEntry {
2254 range: Point::new(3, 9)..Point::new(3, 14),
2255 diagnostic: Diagnostic {
2256 source: Some("disk".into()),
2257 severity: DiagnosticSeverity::ERROR,
2258 message: "undefined variable 'BB'".to_string(),
2259 is_disk_based: true,
2260 group_id: 5,
2261 is_primary: true,
2262 ..Default::default()
2263 },
2264 }
2265 ]
2266 );
2267 });
2268}
2269
2270#[gpui::test]
2271async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
2272 init_test(cx);
2273
2274 let text = concat!(
2275 "let one = ;\n", //
2276 "let two = \n",
2277 "let three = 3;\n",
2278 );
2279
2280 let fs = FakeFs::new(cx.executor());
2281 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
2282
2283 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2284 let buffer = project
2285 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2286 .await
2287 .unwrap();
2288
2289 project.update(cx, |project, cx| {
2290 project.lsp_store.update(cx, |lsp_store, cx| {
2291 lsp_store
2292 .update_diagnostic_entries(
2293 LanguageServerId(0),
2294 PathBuf::from("/dir/a.rs"),
2295 None,
2296 vec![
2297 DiagnosticEntry {
2298 range: Unclipped(PointUtf16::new(0, 10))
2299 ..Unclipped(PointUtf16::new(0, 10)),
2300 diagnostic: Diagnostic {
2301 severity: DiagnosticSeverity::ERROR,
2302 message: "syntax error 1".to_string(),
2303 ..Default::default()
2304 },
2305 },
2306 DiagnosticEntry {
2307 range: Unclipped(PointUtf16::new(1, 10))
2308 ..Unclipped(PointUtf16::new(1, 10)),
2309 diagnostic: Diagnostic {
2310 severity: DiagnosticSeverity::ERROR,
2311 message: "syntax error 2".to_string(),
2312 ..Default::default()
2313 },
2314 },
2315 ],
2316 cx,
2317 )
2318 .unwrap();
2319 })
2320 });
2321
2322 // An empty range is extended forward to include the following character.
2323 // At the end of a line, an empty range is extended backward to include
2324 // the preceding character.
2325 buffer.update(cx, |buffer, _| {
2326 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2327 assert_eq!(
2328 chunks
2329 .iter()
2330 .map(|(s, d)| (s.as_str(), *d))
2331 .collect::<Vec<_>>(),
2332 &[
2333 ("let one = ", None),
2334 (";", Some(DiagnosticSeverity::ERROR)),
2335 ("\nlet two =", None),
2336 (" ", Some(DiagnosticSeverity::ERROR)),
2337 ("\nlet three = 3;\n", None)
2338 ]
2339 );
2340 });
2341}
2342
2343#[gpui::test]
2344async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2345 init_test(cx);
2346
2347 let fs = FakeFs::new(cx.executor());
2348 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2349 .await;
2350
2351 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2352 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
2353
2354 lsp_store.update(cx, |lsp_store, cx| {
2355 lsp_store
2356 .update_diagnostic_entries(
2357 LanguageServerId(0),
2358 Path::new("/dir/a.rs").to_owned(),
2359 None,
2360 vec![DiagnosticEntry {
2361 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2362 diagnostic: Diagnostic {
2363 severity: DiagnosticSeverity::ERROR,
2364 is_primary: true,
2365 message: "syntax error a1".to_string(),
2366 ..Default::default()
2367 },
2368 }],
2369 cx,
2370 )
2371 .unwrap();
2372 lsp_store
2373 .update_diagnostic_entries(
2374 LanguageServerId(1),
2375 Path::new("/dir/a.rs").to_owned(),
2376 None,
2377 vec![DiagnosticEntry {
2378 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2379 diagnostic: Diagnostic {
2380 severity: DiagnosticSeverity::ERROR,
2381 is_primary: true,
2382 message: "syntax error b1".to_string(),
2383 ..Default::default()
2384 },
2385 }],
2386 cx,
2387 )
2388 .unwrap();
2389
2390 assert_eq!(
2391 lsp_store.diagnostic_summary(false, cx),
2392 DiagnosticSummary {
2393 error_count: 2,
2394 warning_count: 0,
2395 }
2396 );
2397 });
2398}
2399
2400#[gpui::test]
2401async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2402 init_test(cx);
2403
2404 let text = "
2405 fn a() {
2406 f1();
2407 }
2408 fn b() {
2409 f2();
2410 }
2411 fn c() {
2412 f3();
2413 }
2414 "
2415 .unindent();
2416
2417 let fs = FakeFs::new(cx.executor());
2418 fs.insert_tree(
2419 path!("/dir"),
2420 json!({
2421 "a.rs": text.clone(),
2422 }),
2423 )
2424 .await;
2425
2426 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2427 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2428
2429 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2430 language_registry.add(rust_lang());
2431 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2432
2433 let (buffer, _handle) = project
2434 .update(cx, |project, cx| {
2435 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2436 })
2437 .await
2438 .unwrap();
2439
2440 let mut fake_server = fake_servers.next().await.unwrap();
2441 let lsp_document_version = fake_server
2442 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2443 .await
2444 .text_document
2445 .version;
2446
2447 // Simulate editing the buffer after the language server computes some edits.
2448 buffer.update(cx, |buffer, cx| {
2449 buffer.edit(
2450 [(
2451 Point::new(0, 0)..Point::new(0, 0),
2452 "// above first function\n",
2453 )],
2454 None,
2455 cx,
2456 );
2457 buffer.edit(
2458 [(
2459 Point::new(2, 0)..Point::new(2, 0),
2460 " // inside first function\n",
2461 )],
2462 None,
2463 cx,
2464 );
2465 buffer.edit(
2466 [(
2467 Point::new(6, 4)..Point::new(6, 4),
2468 "// inside second function ",
2469 )],
2470 None,
2471 cx,
2472 );
2473
2474 assert_eq!(
2475 buffer.text(),
2476 "
2477 // above first function
2478 fn a() {
2479 // inside first function
2480 f1();
2481 }
2482 fn b() {
2483 // inside second function f2();
2484 }
2485 fn c() {
2486 f3();
2487 }
2488 "
2489 .unindent()
2490 );
2491 });
2492
2493 let edits = lsp_store
2494 .update(cx, |lsp_store, cx| {
2495 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2496 &buffer,
2497 vec![
2498 // replace body of first function
2499 lsp::TextEdit {
2500 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2501 new_text: "
2502 fn a() {
2503 f10();
2504 }
2505 "
2506 .unindent(),
2507 },
2508 // edit inside second function
2509 lsp::TextEdit {
2510 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2511 new_text: "00".into(),
2512 },
2513 // edit inside third function via two distinct edits
2514 lsp::TextEdit {
2515 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2516 new_text: "4000".into(),
2517 },
2518 lsp::TextEdit {
2519 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2520 new_text: "".into(),
2521 },
2522 ],
2523 LanguageServerId(0),
2524 Some(lsp_document_version),
2525 cx,
2526 )
2527 })
2528 .await
2529 .unwrap();
2530
2531 buffer.update(cx, |buffer, cx| {
2532 for (range, new_text) in edits {
2533 buffer.edit([(range, new_text)], None, cx);
2534 }
2535 assert_eq!(
2536 buffer.text(),
2537 "
2538 // above first function
2539 fn a() {
2540 // inside first function
2541 f10();
2542 }
2543 fn b() {
2544 // inside second function f200();
2545 }
2546 fn c() {
2547 f4000();
2548 }
2549 "
2550 .unindent()
2551 );
2552 });
2553}
2554
2555#[gpui::test]
2556async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2557 init_test(cx);
2558
2559 let text = "
2560 use a::b;
2561 use a::c;
2562
2563 fn f() {
2564 b();
2565 c();
2566 }
2567 "
2568 .unindent();
2569
2570 let fs = FakeFs::new(cx.executor());
2571 fs.insert_tree(
2572 path!("/dir"),
2573 json!({
2574 "a.rs": text.clone(),
2575 }),
2576 )
2577 .await;
2578
2579 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2580 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2581 let buffer = project
2582 .update(cx, |project, cx| {
2583 project.open_local_buffer(path!("/dir/a.rs"), cx)
2584 })
2585 .await
2586 .unwrap();
2587
2588 // Simulate the language server sending us a small edit in the form of a very large diff.
2589 // Rust-analyzer does this when performing a merge-imports code action.
2590 let edits = lsp_store
2591 .update(cx, |lsp_store, cx| {
2592 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2593 &buffer,
2594 [
2595 // Replace the first use statement without editing the semicolon.
2596 lsp::TextEdit {
2597 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2598 new_text: "a::{b, c}".into(),
2599 },
2600 // Reinsert the remainder of the file between the semicolon and the final
2601 // newline of the file.
2602 lsp::TextEdit {
2603 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2604 new_text: "\n\n".into(),
2605 },
2606 lsp::TextEdit {
2607 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2608 new_text: "
2609 fn f() {
2610 b();
2611 c();
2612 }"
2613 .unindent(),
2614 },
2615 // Delete everything after the first newline of the file.
2616 lsp::TextEdit {
2617 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2618 new_text: "".into(),
2619 },
2620 ],
2621 LanguageServerId(0),
2622 None,
2623 cx,
2624 )
2625 })
2626 .await
2627 .unwrap();
2628
2629 buffer.update(cx, |buffer, cx| {
2630 let edits = edits
2631 .into_iter()
2632 .map(|(range, text)| {
2633 (
2634 range.start.to_point(buffer)..range.end.to_point(buffer),
2635 text,
2636 )
2637 })
2638 .collect::<Vec<_>>();
2639
2640 assert_eq!(
2641 edits,
2642 [
2643 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2644 (Point::new(1, 0)..Point::new(2, 0), "".into())
2645 ]
2646 );
2647
2648 for (range, new_text) in edits {
2649 buffer.edit([(range, new_text)], None, cx);
2650 }
2651 assert_eq!(
2652 buffer.text(),
2653 "
2654 use a::{b, c};
2655
2656 fn f() {
2657 b();
2658 c();
2659 }
2660 "
2661 .unindent()
2662 );
2663 });
2664}
2665
2666#[gpui::test]
2667async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
2668 cx: &mut gpui::TestAppContext,
2669) {
2670 init_test(cx);
2671
2672 let text = "Path()";
2673
2674 let fs = FakeFs::new(cx.executor());
2675 fs.insert_tree(
2676 path!("/dir"),
2677 json!({
2678 "a.rs": text
2679 }),
2680 )
2681 .await;
2682
2683 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2684 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2685 let buffer = project
2686 .update(cx, |project, cx| {
2687 project.open_local_buffer(path!("/dir/a.rs"), cx)
2688 })
2689 .await
2690 .unwrap();
2691
2692 // Simulate the language server sending us a pair of edits at the same location,
2693 // with an insertion following a replacement (which violates the LSP spec).
2694 let edits = lsp_store
2695 .update(cx, |lsp_store, cx| {
2696 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2697 &buffer,
2698 [
2699 lsp::TextEdit {
2700 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
2701 new_text: "Path".into(),
2702 },
2703 lsp::TextEdit {
2704 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
2705 new_text: "from path import Path\n\n\n".into(),
2706 },
2707 ],
2708 LanguageServerId(0),
2709 None,
2710 cx,
2711 )
2712 })
2713 .await
2714 .unwrap();
2715
2716 buffer.update(cx, |buffer, cx| {
2717 buffer.edit(edits, None, cx);
2718 assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
2719 });
2720}
2721
2722#[gpui::test]
2723async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2724 init_test(cx);
2725
2726 let text = "
2727 use a::b;
2728 use a::c;
2729
2730 fn f() {
2731 b();
2732 c();
2733 }
2734 "
2735 .unindent();
2736
2737 let fs = FakeFs::new(cx.executor());
2738 fs.insert_tree(
2739 path!("/dir"),
2740 json!({
2741 "a.rs": text.clone(),
2742 }),
2743 )
2744 .await;
2745
2746 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2747 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2748 let buffer = project
2749 .update(cx, |project, cx| {
2750 project.open_local_buffer(path!("/dir/a.rs"), cx)
2751 })
2752 .await
2753 .unwrap();
2754
2755 // Simulate the language server sending us edits in a non-ordered fashion,
2756 // with ranges sometimes being inverted or pointing to invalid locations.
2757 let edits = lsp_store
2758 .update(cx, |lsp_store, cx| {
2759 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2760 &buffer,
2761 [
2762 lsp::TextEdit {
2763 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2764 new_text: "\n\n".into(),
2765 },
2766 lsp::TextEdit {
2767 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2768 new_text: "a::{b, c}".into(),
2769 },
2770 lsp::TextEdit {
2771 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2772 new_text: "".into(),
2773 },
2774 lsp::TextEdit {
2775 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2776 new_text: "
2777 fn f() {
2778 b();
2779 c();
2780 }"
2781 .unindent(),
2782 },
2783 ],
2784 LanguageServerId(0),
2785 None,
2786 cx,
2787 )
2788 })
2789 .await
2790 .unwrap();
2791
2792 buffer.update(cx, |buffer, cx| {
2793 let edits = edits
2794 .into_iter()
2795 .map(|(range, text)| {
2796 (
2797 range.start.to_point(buffer)..range.end.to_point(buffer),
2798 text,
2799 )
2800 })
2801 .collect::<Vec<_>>();
2802
2803 assert_eq!(
2804 edits,
2805 [
2806 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2807 (Point::new(1, 0)..Point::new(2, 0), "".into())
2808 ]
2809 );
2810
2811 for (range, new_text) in edits {
2812 buffer.edit([(range, new_text)], None, cx);
2813 }
2814 assert_eq!(
2815 buffer.text(),
2816 "
2817 use a::{b, c};
2818
2819 fn f() {
2820 b();
2821 c();
2822 }
2823 "
2824 .unindent()
2825 );
2826 });
2827}
2828
2829fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2830 buffer: &Buffer,
2831 range: Range<T>,
2832) -> Vec<(String, Option<DiagnosticSeverity>)> {
2833 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2834 for chunk in buffer.snapshot().chunks(range, true) {
2835 if chunks.last().map_or(false, |prev_chunk| {
2836 prev_chunk.1 == chunk.diagnostic_severity
2837 }) {
2838 chunks.last_mut().unwrap().0.push_str(chunk.text);
2839 } else {
2840 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2841 }
2842 }
2843 chunks
2844}
2845
2846#[gpui::test(iterations = 10)]
2847async fn test_definition(cx: &mut gpui::TestAppContext) {
2848 init_test(cx);
2849
2850 let fs = FakeFs::new(cx.executor());
2851 fs.insert_tree(
2852 path!("/dir"),
2853 json!({
2854 "a.rs": "const fn a() { A }",
2855 "b.rs": "const y: i32 = crate::a()",
2856 }),
2857 )
2858 .await;
2859
2860 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
2861
2862 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2863 language_registry.add(rust_lang());
2864 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2865
2866 let (buffer, _handle) = project
2867 .update(cx, |project, cx| {
2868 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
2869 })
2870 .await
2871 .unwrap();
2872
2873 let fake_server = fake_servers.next().await.unwrap();
2874 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2875 let params = params.text_document_position_params;
2876 assert_eq!(
2877 params.text_document.uri.to_file_path().unwrap(),
2878 Path::new(path!("/dir/b.rs")),
2879 );
2880 assert_eq!(params.position, lsp::Position::new(0, 22));
2881
2882 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2883 lsp::Location::new(
2884 lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2885 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2886 ),
2887 )))
2888 });
2889 let mut definitions = project
2890 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2891 .await
2892 .unwrap();
2893
2894 // Assert no new language server started
2895 cx.executor().run_until_parked();
2896 assert!(fake_servers.try_next().is_err());
2897
2898 assert_eq!(definitions.len(), 1);
2899 let definition = definitions.pop().unwrap();
2900 cx.update(|cx| {
2901 let target_buffer = definition.target.buffer.read(cx);
2902 assert_eq!(
2903 target_buffer
2904 .file()
2905 .unwrap()
2906 .as_local()
2907 .unwrap()
2908 .abs_path(cx),
2909 Path::new(path!("/dir/a.rs")),
2910 );
2911 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2912 assert_eq!(
2913 list_worktrees(&project, cx),
2914 [
2915 (path!("/dir/a.rs").as_ref(), false),
2916 (path!("/dir/b.rs").as_ref(), true)
2917 ],
2918 );
2919
2920 drop(definition);
2921 });
2922 cx.update(|cx| {
2923 assert_eq!(
2924 list_worktrees(&project, cx),
2925 [(path!("/dir/b.rs").as_ref(), true)]
2926 );
2927 });
2928
2929 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
2930 project
2931 .read(cx)
2932 .worktrees(cx)
2933 .map(|worktree| {
2934 let worktree = worktree.read(cx);
2935 (
2936 worktree.as_local().unwrap().abs_path().as_ref(),
2937 worktree.is_visible(),
2938 )
2939 })
2940 .collect::<Vec<_>>()
2941 }
2942}
2943
2944#[gpui::test]
2945async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
2946 init_test(cx);
2947
2948 let fs = FakeFs::new(cx.executor());
2949 fs.insert_tree(
2950 path!("/dir"),
2951 json!({
2952 "a.ts": "",
2953 }),
2954 )
2955 .await;
2956
2957 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2958
2959 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2960 language_registry.add(typescript_lang());
2961 let mut fake_language_servers = language_registry.register_fake_lsp(
2962 "TypeScript",
2963 FakeLspAdapter {
2964 capabilities: lsp::ServerCapabilities {
2965 completion_provider: Some(lsp::CompletionOptions {
2966 trigger_characters: Some(vec![".".to_string()]),
2967 ..Default::default()
2968 }),
2969 ..Default::default()
2970 },
2971 ..Default::default()
2972 },
2973 );
2974
2975 let (buffer, _handle) = project
2976 .update(cx, |p, cx| {
2977 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
2978 })
2979 .await
2980 .unwrap();
2981
2982 let fake_server = fake_language_servers.next().await.unwrap();
2983
2984 // When text_edit exists, it takes precedence over insert_text and label
2985 let text = "let a = obj.fqn";
2986 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2987 let completions = project.update(cx, |project, cx| {
2988 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2989 });
2990
2991 fake_server
2992 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
2993 Ok(Some(lsp::CompletionResponse::Array(vec![
2994 lsp::CompletionItem {
2995 label: "labelText".into(),
2996 insert_text: Some("insertText".into()),
2997 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
2998 range: lsp::Range::new(
2999 lsp::Position::new(0, text.len() as u32 - 3),
3000 lsp::Position::new(0, text.len() as u32),
3001 ),
3002 new_text: "textEditText".into(),
3003 })),
3004 ..Default::default()
3005 },
3006 ])))
3007 })
3008 .next()
3009 .await;
3010
3011 let completions = completions.await.unwrap().unwrap();
3012 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3013
3014 assert_eq!(completions.len(), 1);
3015 assert_eq!(completions[0].new_text, "textEditText");
3016 assert_eq!(
3017 completions[0].old_range.to_offset(&snapshot),
3018 text.len() - 3..text.len()
3019 );
3020}
3021
3022#[gpui::test]
3023async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
3024 init_test(cx);
3025
3026 let fs = FakeFs::new(cx.executor());
3027 fs.insert_tree(
3028 path!("/dir"),
3029 json!({
3030 "a.ts": "",
3031 }),
3032 )
3033 .await;
3034
3035 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3036
3037 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3038 language_registry.add(typescript_lang());
3039 let mut fake_language_servers = language_registry.register_fake_lsp(
3040 "TypeScript",
3041 FakeLspAdapter {
3042 capabilities: lsp::ServerCapabilities {
3043 completion_provider: Some(lsp::CompletionOptions {
3044 trigger_characters: Some(vec![".".to_string()]),
3045 ..Default::default()
3046 }),
3047 ..Default::default()
3048 },
3049 ..Default::default()
3050 },
3051 );
3052
3053 let (buffer, _handle) = project
3054 .update(cx, |p, cx| {
3055 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3056 })
3057 .await
3058 .unwrap();
3059
3060 let fake_server = fake_language_servers.next().await.unwrap();
3061 let text = "let a = obj.fqn";
3062
3063 // Test 1: When text_edit is None but insert_text exists with default edit_range
3064 {
3065 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3066 let completions = project.update(cx, |project, cx| {
3067 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3068 });
3069
3070 fake_server
3071 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3072 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3073 is_incomplete: false,
3074 item_defaults: Some(lsp::CompletionListItemDefaults {
3075 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3076 lsp::Range::new(
3077 lsp::Position::new(0, text.len() as u32 - 3),
3078 lsp::Position::new(0, text.len() as u32),
3079 ),
3080 )),
3081 ..Default::default()
3082 }),
3083 items: vec![lsp::CompletionItem {
3084 label: "labelText".into(),
3085 insert_text: Some("insertText".into()),
3086 text_edit: None,
3087 ..Default::default()
3088 }],
3089 })))
3090 })
3091 .next()
3092 .await;
3093
3094 let completions = completions.await.unwrap().unwrap();
3095 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3096
3097 assert_eq!(completions.len(), 1);
3098 assert_eq!(completions[0].new_text, "insertText");
3099 assert_eq!(
3100 completions[0].old_range.to_offset(&snapshot),
3101 text.len() - 3..text.len()
3102 );
3103 }
3104
3105 // Test 2: When both text_edit and insert_text are None with default edit_range
3106 {
3107 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3108 let completions = project.update(cx, |project, cx| {
3109 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3110 });
3111
3112 fake_server
3113 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3114 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3115 is_incomplete: false,
3116 item_defaults: Some(lsp::CompletionListItemDefaults {
3117 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3118 lsp::Range::new(
3119 lsp::Position::new(0, text.len() as u32 - 3),
3120 lsp::Position::new(0, text.len() as u32),
3121 ),
3122 )),
3123 ..Default::default()
3124 }),
3125 items: vec![lsp::CompletionItem {
3126 label: "labelText".into(),
3127 insert_text: None,
3128 text_edit: None,
3129 ..Default::default()
3130 }],
3131 })))
3132 })
3133 .next()
3134 .await;
3135
3136 let completions = completions.await.unwrap().unwrap();
3137 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3138
3139 assert_eq!(completions.len(), 1);
3140 assert_eq!(completions[0].new_text, "labelText");
3141 assert_eq!(
3142 completions[0].old_range.to_offset(&snapshot),
3143 text.len() - 3..text.len()
3144 );
3145 }
3146}
3147
3148#[gpui::test]
3149async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
3150 init_test(cx);
3151
3152 let fs = FakeFs::new(cx.executor());
3153 fs.insert_tree(
3154 path!("/dir"),
3155 json!({
3156 "a.ts": "",
3157 }),
3158 )
3159 .await;
3160
3161 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3162
3163 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3164 language_registry.add(typescript_lang());
3165 let mut fake_language_servers = language_registry.register_fake_lsp(
3166 "TypeScript",
3167 FakeLspAdapter {
3168 capabilities: lsp::ServerCapabilities {
3169 completion_provider: Some(lsp::CompletionOptions {
3170 trigger_characters: Some(vec![":".to_string()]),
3171 ..Default::default()
3172 }),
3173 ..Default::default()
3174 },
3175 ..Default::default()
3176 },
3177 );
3178
3179 let (buffer, _handle) = project
3180 .update(cx, |p, cx| {
3181 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3182 })
3183 .await
3184 .unwrap();
3185
3186 let fake_server = fake_language_servers.next().await.unwrap();
3187
3188 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
3189 let text = "let a = b.fqn";
3190 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3191 let completions = project.update(cx, |project, cx| {
3192 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3193 });
3194
3195 fake_server
3196 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3197 Ok(Some(lsp::CompletionResponse::Array(vec![
3198 lsp::CompletionItem {
3199 label: "fullyQualifiedName?".into(),
3200 insert_text: Some("fullyQualifiedName".into()),
3201 ..Default::default()
3202 },
3203 ])))
3204 })
3205 .next()
3206 .await;
3207 let completions = completions.await.unwrap().unwrap();
3208 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3209 assert_eq!(completions.len(), 1);
3210 assert_eq!(completions[0].new_text, "fullyQualifiedName");
3211 assert_eq!(
3212 completions[0].old_range.to_offset(&snapshot),
3213 text.len() - 3..text.len()
3214 );
3215
3216 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
3217 let text = "let a = \"atoms/cmp\"";
3218 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3219 let completions = project.update(cx, |project, cx| {
3220 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
3221 });
3222
3223 fake_server
3224 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3225 Ok(Some(lsp::CompletionResponse::Array(vec![
3226 lsp::CompletionItem {
3227 label: "component".into(),
3228 ..Default::default()
3229 },
3230 ])))
3231 })
3232 .next()
3233 .await;
3234 let completions = completions.await.unwrap().unwrap();
3235 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3236 assert_eq!(completions.len(), 1);
3237 assert_eq!(completions[0].new_text, "component");
3238 assert_eq!(
3239 completions[0].old_range.to_offset(&snapshot),
3240 text.len() - 4..text.len() - 1
3241 );
3242}
3243
3244#[gpui::test]
3245async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
3246 init_test(cx);
3247
3248 let fs = FakeFs::new(cx.executor());
3249 fs.insert_tree(
3250 path!("/dir"),
3251 json!({
3252 "a.ts": "",
3253 }),
3254 )
3255 .await;
3256
3257 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3258
3259 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3260 language_registry.add(typescript_lang());
3261 let mut fake_language_servers = language_registry.register_fake_lsp(
3262 "TypeScript",
3263 FakeLspAdapter {
3264 capabilities: lsp::ServerCapabilities {
3265 completion_provider: Some(lsp::CompletionOptions {
3266 trigger_characters: Some(vec![":".to_string()]),
3267 ..Default::default()
3268 }),
3269 ..Default::default()
3270 },
3271 ..Default::default()
3272 },
3273 );
3274
3275 let (buffer, _handle) = project
3276 .update(cx, |p, cx| {
3277 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3278 })
3279 .await
3280 .unwrap();
3281
3282 let fake_server = fake_language_servers.next().await.unwrap();
3283
3284 let text = "let a = b.fqn";
3285 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3286 let completions = project.update(cx, |project, cx| {
3287 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3288 });
3289
3290 fake_server
3291 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3292 Ok(Some(lsp::CompletionResponse::Array(vec![
3293 lsp::CompletionItem {
3294 label: "fullyQualifiedName?".into(),
3295 insert_text: Some("fully\rQualified\r\nName".into()),
3296 ..Default::default()
3297 },
3298 ])))
3299 })
3300 .next()
3301 .await;
3302 let completions = completions.await.unwrap().unwrap();
3303 assert_eq!(completions.len(), 1);
3304 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
3305}
3306
3307#[gpui::test(iterations = 10)]
3308async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
3309 init_test(cx);
3310
3311 let fs = FakeFs::new(cx.executor());
3312 fs.insert_tree(
3313 path!("/dir"),
3314 json!({
3315 "a.ts": "a",
3316 }),
3317 )
3318 .await;
3319
3320 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3321
3322 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3323 language_registry.add(typescript_lang());
3324 let mut fake_language_servers = language_registry.register_fake_lsp(
3325 "TypeScript",
3326 FakeLspAdapter {
3327 capabilities: lsp::ServerCapabilities {
3328 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
3329 lsp::CodeActionOptions {
3330 resolve_provider: Some(true),
3331 ..lsp::CodeActionOptions::default()
3332 },
3333 )),
3334 execute_command_provider: Some(lsp::ExecuteCommandOptions {
3335 commands: vec!["_the/command".to_string()],
3336 ..lsp::ExecuteCommandOptions::default()
3337 }),
3338 ..lsp::ServerCapabilities::default()
3339 },
3340 ..FakeLspAdapter::default()
3341 },
3342 );
3343
3344 let (buffer, _handle) = project
3345 .update(cx, |p, cx| {
3346 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3347 })
3348 .await
3349 .unwrap();
3350
3351 let fake_server = fake_language_servers.next().await.unwrap();
3352
3353 // Language server returns code actions that contain commands, and not edits.
3354 let actions = project.update(cx, |project, cx| {
3355 project.code_actions(&buffer, 0..0, None, cx)
3356 });
3357 fake_server
3358 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
3359 Ok(Some(vec![
3360 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3361 title: "The code action".into(),
3362 data: Some(serde_json::json!({
3363 "command": "_the/command",
3364 })),
3365 ..lsp::CodeAction::default()
3366 }),
3367 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3368 title: "two".into(),
3369 ..lsp::CodeAction::default()
3370 }),
3371 ]))
3372 })
3373 .next()
3374 .await;
3375
3376 let action = actions.await.unwrap()[0].clone();
3377 let apply = project.update(cx, |project, cx| {
3378 project.apply_code_action(buffer.clone(), action, true, cx)
3379 });
3380
3381 // Resolving the code action does not populate its edits. In absence of
3382 // edits, we must execute the given command.
3383 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
3384 |mut action, _| async move {
3385 if action.data.is_some() {
3386 action.command = Some(lsp::Command {
3387 title: "The command".into(),
3388 command: "_the/command".into(),
3389 arguments: Some(vec![json!("the-argument")]),
3390 });
3391 }
3392 Ok(action)
3393 },
3394 );
3395
3396 // While executing the command, the language server sends the editor
3397 // a `workspaceEdit` request.
3398 fake_server
3399 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
3400 let fake = fake_server.clone();
3401 move |params, _| {
3402 assert_eq!(params.command, "_the/command");
3403 let fake = fake.clone();
3404 async move {
3405 fake.server
3406 .request::<lsp::request::ApplyWorkspaceEdit>(
3407 lsp::ApplyWorkspaceEditParams {
3408 label: None,
3409 edit: lsp::WorkspaceEdit {
3410 changes: Some(
3411 [(
3412 lsp::Url::from_file_path(path!("/dir/a.ts")).unwrap(),
3413 vec![lsp::TextEdit {
3414 range: lsp::Range::new(
3415 lsp::Position::new(0, 0),
3416 lsp::Position::new(0, 0),
3417 ),
3418 new_text: "X".into(),
3419 }],
3420 )]
3421 .into_iter()
3422 .collect(),
3423 ),
3424 ..Default::default()
3425 },
3426 },
3427 )
3428 .await
3429 .unwrap();
3430 Ok(Some(json!(null)))
3431 }
3432 }
3433 })
3434 .next()
3435 .await;
3436
3437 // Applying the code action returns a project transaction containing the edits
3438 // sent by the language server in its `workspaceEdit` request.
3439 let transaction = apply.await.unwrap();
3440 assert!(transaction.0.contains_key(&buffer));
3441 buffer.update(cx, |buffer, cx| {
3442 assert_eq!(buffer.text(), "Xa");
3443 buffer.undo(cx);
3444 assert_eq!(buffer.text(), "a");
3445 });
3446}
3447
3448#[gpui::test(iterations = 10)]
3449async fn test_save_file(cx: &mut gpui::TestAppContext) {
3450 init_test(cx);
3451
3452 let fs = FakeFs::new(cx.executor());
3453 fs.insert_tree(
3454 path!("/dir"),
3455 json!({
3456 "file1": "the old contents",
3457 }),
3458 )
3459 .await;
3460
3461 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3462 let buffer = project
3463 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3464 .await
3465 .unwrap();
3466 buffer.update(cx, |buffer, cx| {
3467 assert_eq!(buffer.text(), "the old contents");
3468 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3469 });
3470
3471 project
3472 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3473 .await
3474 .unwrap();
3475
3476 let new_text = fs
3477 .load(Path::new(path!("/dir/file1")))
3478 .await
3479 .unwrap()
3480 .replace("\r\n", "\n");
3481 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3482}
3483
3484#[gpui::test(iterations = 30)]
3485async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
3486 init_test(cx);
3487
3488 let fs = FakeFs::new(cx.executor().clone());
3489 fs.insert_tree(
3490 path!("/dir"),
3491 json!({
3492 "file1": "the original contents",
3493 }),
3494 )
3495 .await;
3496
3497 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3498 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3499 let buffer = project
3500 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3501 .await
3502 .unwrap();
3503
3504 // Simulate buffer diffs being slow, so that they don't complete before
3505 // the next file change occurs.
3506 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3507
3508 // Change the buffer's file on disk, and then wait for the file change
3509 // to be detected by the worktree, so that the buffer starts reloading.
3510 fs.save(
3511 path!("/dir/file1").as_ref(),
3512 &"the first contents".into(),
3513 Default::default(),
3514 )
3515 .await
3516 .unwrap();
3517 worktree.next_event(cx).await;
3518
3519 // Change the buffer's file again. Depending on the random seed, the
3520 // previous file change may still be in progress.
3521 fs.save(
3522 path!("/dir/file1").as_ref(),
3523 &"the second contents".into(),
3524 Default::default(),
3525 )
3526 .await
3527 .unwrap();
3528 worktree.next_event(cx).await;
3529
3530 cx.executor().run_until_parked();
3531 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3532 buffer.read_with(cx, |buffer, _| {
3533 assert_eq!(buffer.text(), on_disk_text);
3534 assert!(!buffer.is_dirty(), "buffer should not be dirty");
3535 assert!(!buffer.has_conflict(), "buffer should not be dirty");
3536 });
3537}
3538
3539#[gpui::test(iterations = 30)]
3540async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
3541 init_test(cx);
3542
3543 let fs = FakeFs::new(cx.executor().clone());
3544 fs.insert_tree(
3545 path!("/dir"),
3546 json!({
3547 "file1": "the original contents",
3548 }),
3549 )
3550 .await;
3551
3552 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3553 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3554 let buffer = project
3555 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3556 .await
3557 .unwrap();
3558
3559 // Simulate buffer diffs being slow, so that they don't complete before
3560 // the next file change occurs.
3561 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3562
3563 // Change the buffer's file on disk, and then wait for the file change
3564 // to be detected by the worktree, so that the buffer starts reloading.
3565 fs.save(
3566 path!("/dir/file1").as_ref(),
3567 &"the first contents".into(),
3568 Default::default(),
3569 )
3570 .await
3571 .unwrap();
3572 worktree.next_event(cx).await;
3573
3574 cx.executor()
3575 .spawn(cx.executor().simulate_random_delay())
3576 .await;
3577
3578 // Perform a noop edit, causing the buffer's version to increase.
3579 buffer.update(cx, |buffer, cx| {
3580 buffer.edit([(0..0, " ")], None, cx);
3581 buffer.undo(cx);
3582 });
3583
3584 cx.executor().run_until_parked();
3585 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3586 buffer.read_with(cx, |buffer, _| {
3587 let buffer_text = buffer.text();
3588 if buffer_text == on_disk_text {
3589 assert!(
3590 !buffer.is_dirty() && !buffer.has_conflict(),
3591 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
3592 );
3593 }
3594 // If the file change occurred while the buffer was processing the first
3595 // change, the buffer will be in a conflicting state.
3596 else {
3597 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3598 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3599 }
3600 });
3601}
3602
3603#[gpui::test]
3604async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
3605 init_test(cx);
3606
3607 let fs = FakeFs::new(cx.executor());
3608 fs.insert_tree(
3609 path!("/dir"),
3610 json!({
3611 "file1": "the old contents",
3612 }),
3613 )
3614 .await;
3615
3616 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
3617 let buffer = project
3618 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3619 .await
3620 .unwrap();
3621 buffer.update(cx, |buffer, cx| {
3622 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3623 });
3624
3625 project
3626 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3627 .await
3628 .unwrap();
3629
3630 let new_text = fs
3631 .load(Path::new(path!("/dir/file1")))
3632 .await
3633 .unwrap()
3634 .replace("\r\n", "\n");
3635 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3636}
3637
3638#[gpui::test]
3639async fn test_save_as(cx: &mut gpui::TestAppContext) {
3640 init_test(cx);
3641
3642 let fs = FakeFs::new(cx.executor());
3643 fs.insert_tree("/dir", json!({})).await;
3644
3645 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3646
3647 let languages = project.update(cx, |project, _| project.languages().clone());
3648 languages.add(rust_lang());
3649
3650 let buffer = project.update(cx, |project, cx| project.create_local_buffer("", None, cx));
3651 buffer.update(cx, |buffer, cx| {
3652 buffer.edit([(0..0, "abc")], None, cx);
3653 assert!(buffer.is_dirty());
3654 assert!(!buffer.has_conflict());
3655 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
3656 });
3657 project
3658 .update(cx, |project, cx| {
3659 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
3660 let path = ProjectPath {
3661 worktree_id,
3662 path: Arc::from(Path::new("file1.rs")),
3663 };
3664 project.save_buffer_as(buffer.clone(), path, cx)
3665 })
3666 .await
3667 .unwrap();
3668 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
3669
3670 cx.executor().run_until_parked();
3671 buffer.update(cx, |buffer, cx| {
3672 assert_eq!(
3673 buffer.file().unwrap().full_path(cx),
3674 Path::new("dir/file1.rs")
3675 );
3676 assert!(!buffer.is_dirty());
3677 assert!(!buffer.has_conflict());
3678 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
3679 });
3680
3681 let opened_buffer = project
3682 .update(cx, |project, cx| {
3683 project.open_local_buffer("/dir/file1.rs", cx)
3684 })
3685 .await
3686 .unwrap();
3687 assert_eq!(opened_buffer, buffer);
3688}
3689
3690#[gpui::test(retries = 5)]
3691async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
3692 use worktree::WorktreeModelHandle as _;
3693
3694 init_test(cx);
3695 cx.executor().allow_parking();
3696
3697 let dir = TempTree::new(json!({
3698 "a": {
3699 "file1": "",
3700 "file2": "",
3701 "file3": "",
3702 },
3703 "b": {
3704 "c": {
3705 "file4": "",
3706 "file5": "",
3707 }
3708 }
3709 }));
3710
3711 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
3712
3713 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3714 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
3715 async move { buffer.await.unwrap() }
3716 };
3717 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3718 project.update(cx, |project, cx| {
3719 let tree = project.worktrees(cx).next().unwrap();
3720 tree.read(cx)
3721 .entry_for_path(path)
3722 .unwrap_or_else(|| panic!("no entry for path {}", path))
3723 .id
3724 })
3725 };
3726
3727 let buffer2 = buffer_for_path("a/file2", cx).await;
3728 let buffer3 = buffer_for_path("a/file3", cx).await;
3729 let buffer4 = buffer_for_path("b/c/file4", cx).await;
3730 let buffer5 = buffer_for_path("b/c/file5", cx).await;
3731
3732 let file2_id = id_for_path("a/file2", cx);
3733 let file3_id = id_for_path("a/file3", cx);
3734 let file4_id = id_for_path("b/c/file4", cx);
3735
3736 // Create a remote copy of this worktree.
3737 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3738 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
3739
3740 let updates = Arc::new(Mutex::new(Vec::new()));
3741 tree.update(cx, |tree, cx| {
3742 let updates = updates.clone();
3743 tree.observe_updates(0, cx, move |update| {
3744 updates.lock().push(update);
3745 async { true }
3746 });
3747 });
3748
3749 let remote =
3750 cx.update(|cx| Worktree::remote(0, 1, metadata, project.read(cx).client().into(), cx));
3751
3752 cx.executor().run_until_parked();
3753
3754 cx.update(|cx| {
3755 assert!(!buffer2.read(cx).is_dirty());
3756 assert!(!buffer3.read(cx).is_dirty());
3757 assert!(!buffer4.read(cx).is_dirty());
3758 assert!(!buffer5.read(cx).is_dirty());
3759 });
3760
3761 // Rename and delete files and directories.
3762 tree.flush_fs_events(cx).await;
3763 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3764 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3765 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3766 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3767 tree.flush_fs_events(cx).await;
3768
3769 cx.update(|app| {
3770 assert_eq!(
3771 tree.read(app)
3772 .paths()
3773 .map(|p| p.to_str().unwrap())
3774 .collect::<Vec<_>>(),
3775 vec![
3776 "a",
3777 separator!("a/file1"),
3778 separator!("a/file2.new"),
3779 "b",
3780 "d",
3781 separator!("d/file3"),
3782 separator!("d/file4"),
3783 ]
3784 );
3785 });
3786
3787 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
3788 assert_eq!(id_for_path("d/file3", cx), file3_id);
3789 assert_eq!(id_for_path("d/file4", cx), file4_id);
3790
3791 cx.update(|cx| {
3792 assert_eq!(
3793 buffer2.read(cx).file().unwrap().path().as_ref(),
3794 Path::new("a/file2.new")
3795 );
3796 assert_eq!(
3797 buffer3.read(cx).file().unwrap().path().as_ref(),
3798 Path::new("d/file3")
3799 );
3800 assert_eq!(
3801 buffer4.read(cx).file().unwrap().path().as_ref(),
3802 Path::new("d/file4")
3803 );
3804 assert_eq!(
3805 buffer5.read(cx).file().unwrap().path().as_ref(),
3806 Path::new("b/c/file5")
3807 );
3808
3809 assert_matches!(
3810 buffer2.read(cx).file().unwrap().disk_state(),
3811 DiskState::Present { .. }
3812 );
3813 assert_matches!(
3814 buffer3.read(cx).file().unwrap().disk_state(),
3815 DiskState::Present { .. }
3816 );
3817 assert_matches!(
3818 buffer4.read(cx).file().unwrap().disk_state(),
3819 DiskState::Present { .. }
3820 );
3821 assert_eq!(
3822 buffer5.read(cx).file().unwrap().disk_state(),
3823 DiskState::Deleted
3824 );
3825 });
3826
3827 // Update the remote worktree. Check that it becomes consistent with the
3828 // local worktree.
3829 cx.executor().run_until_parked();
3830
3831 remote.update(cx, |remote, _| {
3832 for update in updates.lock().drain(..) {
3833 remote.as_remote_mut().unwrap().update_from_remote(update);
3834 }
3835 });
3836 cx.executor().run_until_parked();
3837 remote.update(cx, |remote, _| {
3838 assert_eq!(
3839 remote
3840 .paths()
3841 .map(|p| p.to_str().unwrap())
3842 .collect::<Vec<_>>(),
3843 vec![
3844 "a",
3845 separator!("a/file1"),
3846 separator!("a/file2.new"),
3847 "b",
3848 "d",
3849 separator!("d/file3"),
3850 separator!("d/file4"),
3851 ]
3852 );
3853 });
3854}
3855
3856#[gpui::test(iterations = 10)]
3857async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
3858 init_test(cx);
3859
3860 let fs = FakeFs::new(cx.executor());
3861 fs.insert_tree(
3862 path!("/dir"),
3863 json!({
3864 "a": {
3865 "file1": "",
3866 }
3867 }),
3868 )
3869 .await;
3870
3871 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3872 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3873 let tree_id = tree.update(cx, |tree, _| tree.id());
3874
3875 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3876 project.update(cx, |project, cx| {
3877 let tree = project.worktrees(cx).next().unwrap();
3878 tree.read(cx)
3879 .entry_for_path(path)
3880 .unwrap_or_else(|| panic!("no entry for path {}", path))
3881 .id
3882 })
3883 };
3884
3885 let dir_id = id_for_path("a", cx);
3886 let file_id = id_for_path("a/file1", cx);
3887 let buffer = project
3888 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
3889 .await
3890 .unwrap();
3891 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3892
3893 project
3894 .update(cx, |project, cx| {
3895 project.rename_entry(dir_id, Path::new("b"), cx)
3896 })
3897 .unwrap()
3898 .await
3899 .to_included()
3900 .unwrap();
3901 cx.executor().run_until_parked();
3902
3903 assert_eq!(id_for_path("b", cx), dir_id);
3904 assert_eq!(id_for_path("b/file1", cx), file_id);
3905 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3906}
3907
3908#[gpui::test]
3909async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
3910 init_test(cx);
3911
3912 let fs = FakeFs::new(cx.executor());
3913 fs.insert_tree(
3914 "/dir",
3915 json!({
3916 "a.txt": "a-contents",
3917 "b.txt": "b-contents",
3918 }),
3919 )
3920 .await;
3921
3922 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3923
3924 // Spawn multiple tasks to open paths, repeating some paths.
3925 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
3926 (
3927 p.open_local_buffer("/dir/a.txt", cx),
3928 p.open_local_buffer("/dir/b.txt", cx),
3929 p.open_local_buffer("/dir/a.txt", cx),
3930 )
3931 });
3932
3933 let buffer_a_1 = buffer_a_1.await.unwrap();
3934 let buffer_a_2 = buffer_a_2.await.unwrap();
3935 let buffer_b = buffer_b.await.unwrap();
3936 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
3937 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
3938
3939 // There is only one buffer per path.
3940 let buffer_a_id = buffer_a_1.entity_id();
3941 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
3942
3943 // Open the same path again while it is still open.
3944 drop(buffer_a_1);
3945 let buffer_a_3 = project
3946 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
3947 .await
3948 .unwrap();
3949
3950 // There's still only one buffer per path.
3951 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
3952}
3953
3954#[gpui::test]
3955async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
3956 init_test(cx);
3957
3958 let fs = FakeFs::new(cx.executor());
3959 fs.insert_tree(
3960 path!("/dir"),
3961 json!({
3962 "file1": "abc",
3963 "file2": "def",
3964 "file3": "ghi",
3965 }),
3966 )
3967 .await;
3968
3969 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3970
3971 let buffer1 = project
3972 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3973 .await
3974 .unwrap();
3975 let events = Arc::new(Mutex::new(Vec::new()));
3976
3977 // initially, the buffer isn't dirty.
3978 buffer1.update(cx, |buffer, cx| {
3979 cx.subscribe(&buffer1, {
3980 let events = events.clone();
3981 move |_, _, event, _| match event {
3982 BufferEvent::Operation { .. } => {}
3983 _ => events.lock().push(event.clone()),
3984 }
3985 })
3986 .detach();
3987
3988 assert!(!buffer.is_dirty());
3989 assert!(events.lock().is_empty());
3990
3991 buffer.edit([(1..2, "")], None, cx);
3992 });
3993
3994 // after the first edit, the buffer is dirty, and emits a dirtied event.
3995 buffer1.update(cx, |buffer, cx| {
3996 assert!(buffer.text() == "ac");
3997 assert!(buffer.is_dirty());
3998 assert_eq!(
3999 *events.lock(),
4000 &[
4001 language::BufferEvent::Edited,
4002 language::BufferEvent::DirtyChanged
4003 ]
4004 );
4005 events.lock().clear();
4006 buffer.did_save(
4007 buffer.version(),
4008 buffer.file().unwrap().disk_state().mtime(),
4009 cx,
4010 );
4011 });
4012
4013 // after saving, the buffer is not dirty, and emits a saved event.
4014 buffer1.update(cx, |buffer, cx| {
4015 assert!(!buffer.is_dirty());
4016 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
4017 events.lock().clear();
4018
4019 buffer.edit([(1..1, "B")], None, cx);
4020 buffer.edit([(2..2, "D")], None, cx);
4021 });
4022
4023 // after editing again, the buffer is dirty, and emits another dirty event.
4024 buffer1.update(cx, |buffer, cx| {
4025 assert!(buffer.text() == "aBDc");
4026 assert!(buffer.is_dirty());
4027 assert_eq!(
4028 *events.lock(),
4029 &[
4030 language::BufferEvent::Edited,
4031 language::BufferEvent::DirtyChanged,
4032 language::BufferEvent::Edited,
4033 ],
4034 );
4035 events.lock().clear();
4036
4037 // After restoring the buffer to its previously-saved state,
4038 // the buffer is not considered dirty anymore.
4039 buffer.edit([(1..3, "")], None, cx);
4040 assert!(buffer.text() == "ac");
4041 assert!(!buffer.is_dirty());
4042 });
4043
4044 assert_eq!(
4045 *events.lock(),
4046 &[
4047 language::BufferEvent::Edited,
4048 language::BufferEvent::DirtyChanged
4049 ]
4050 );
4051
4052 // When a file is deleted, it is not considered dirty.
4053 let events = Arc::new(Mutex::new(Vec::new()));
4054 let buffer2 = project
4055 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4056 .await
4057 .unwrap();
4058 buffer2.update(cx, |_, cx| {
4059 cx.subscribe(&buffer2, {
4060 let events = events.clone();
4061 move |_, _, event, _| match event {
4062 BufferEvent::Operation { .. } => {}
4063 _ => events.lock().push(event.clone()),
4064 }
4065 })
4066 .detach();
4067 });
4068
4069 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
4070 .await
4071 .unwrap();
4072 cx.executor().run_until_parked();
4073 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4074 assert_eq!(
4075 mem::take(&mut *events.lock()),
4076 &[language::BufferEvent::FileHandleChanged]
4077 );
4078
4079 // Buffer becomes dirty when edited.
4080 buffer2.update(cx, |buffer, cx| {
4081 buffer.edit([(2..3, "")], None, cx);
4082 assert_eq!(buffer.is_dirty(), true);
4083 });
4084 assert_eq!(
4085 mem::take(&mut *events.lock()),
4086 &[
4087 language::BufferEvent::Edited,
4088 language::BufferEvent::DirtyChanged
4089 ]
4090 );
4091
4092 // Buffer becomes clean again when all of its content is removed, because
4093 // the file was deleted.
4094 buffer2.update(cx, |buffer, cx| {
4095 buffer.edit([(0..2, "")], None, cx);
4096 assert_eq!(buffer.is_empty(), true);
4097 assert_eq!(buffer.is_dirty(), false);
4098 });
4099 assert_eq!(
4100 *events.lock(),
4101 &[
4102 language::BufferEvent::Edited,
4103 language::BufferEvent::DirtyChanged
4104 ]
4105 );
4106
4107 // When a file is already dirty when deleted, we don't emit a Dirtied event.
4108 let events = Arc::new(Mutex::new(Vec::new()));
4109 let buffer3 = project
4110 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
4111 .await
4112 .unwrap();
4113 buffer3.update(cx, |_, cx| {
4114 cx.subscribe(&buffer3, {
4115 let events = events.clone();
4116 move |_, _, event, _| match event {
4117 BufferEvent::Operation { .. } => {}
4118 _ => events.lock().push(event.clone()),
4119 }
4120 })
4121 .detach();
4122 });
4123
4124 buffer3.update(cx, |buffer, cx| {
4125 buffer.edit([(0..0, "x")], None, cx);
4126 });
4127 events.lock().clear();
4128 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
4129 .await
4130 .unwrap();
4131 cx.executor().run_until_parked();
4132 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
4133 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
4134}
4135
4136#[gpui::test]
4137async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
4138 init_test(cx);
4139
4140 let (initial_contents, initial_offsets) =
4141 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
4142 let fs = FakeFs::new(cx.executor());
4143 fs.insert_tree(
4144 path!("/dir"),
4145 json!({
4146 "the-file": initial_contents,
4147 }),
4148 )
4149 .await;
4150 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4151 let buffer = project
4152 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
4153 .await
4154 .unwrap();
4155
4156 let anchors = initial_offsets
4157 .iter()
4158 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
4159 .collect::<Vec<_>>();
4160
4161 // Change the file on disk, adding two new lines of text, and removing
4162 // one line.
4163 buffer.update(cx, |buffer, _| {
4164 assert!(!buffer.is_dirty());
4165 assert!(!buffer.has_conflict());
4166 });
4167
4168 let (new_contents, new_offsets) =
4169 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
4170 fs.save(
4171 path!("/dir/the-file").as_ref(),
4172 &new_contents.as_str().into(),
4173 LineEnding::Unix,
4174 )
4175 .await
4176 .unwrap();
4177
4178 // Because the buffer was not modified, it is reloaded from disk. Its
4179 // contents are edited according to the diff between the old and new
4180 // file contents.
4181 cx.executor().run_until_parked();
4182 buffer.update(cx, |buffer, _| {
4183 assert_eq!(buffer.text(), new_contents);
4184 assert!(!buffer.is_dirty());
4185 assert!(!buffer.has_conflict());
4186
4187 let anchor_offsets = anchors
4188 .iter()
4189 .map(|anchor| anchor.to_offset(&*buffer))
4190 .collect::<Vec<_>>();
4191 assert_eq!(anchor_offsets, new_offsets);
4192 });
4193
4194 // Modify the buffer
4195 buffer.update(cx, |buffer, cx| {
4196 buffer.edit([(0..0, " ")], None, cx);
4197 assert!(buffer.is_dirty());
4198 assert!(!buffer.has_conflict());
4199 });
4200
4201 // Change the file on disk again, adding blank lines to the beginning.
4202 fs.save(
4203 path!("/dir/the-file").as_ref(),
4204 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
4205 LineEnding::Unix,
4206 )
4207 .await
4208 .unwrap();
4209
4210 // Because the buffer is modified, it doesn't reload from disk, but is
4211 // marked as having a conflict.
4212 cx.executor().run_until_parked();
4213 buffer.update(cx, |buffer, _| {
4214 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
4215 assert!(buffer.has_conflict());
4216 });
4217}
4218
4219#[gpui::test]
4220async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
4221 init_test(cx);
4222
4223 let fs = FakeFs::new(cx.executor());
4224 fs.insert_tree(
4225 path!("/dir"),
4226 json!({
4227 "file1": "a\nb\nc\n",
4228 "file2": "one\r\ntwo\r\nthree\r\n",
4229 }),
4230 )
4231 .await;
4232
4233 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4234 let buffer1 = project
4235 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4236 .await
4237 .unwrap();
4238 let buffer2 = project
4239 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4240 .await
4241 .unwrap();
4242
4243 buffer1.update(cx, |buffer, _| {
4244 assert_eq!(buffer.text(), "a\nb\nc\n");
4245 assert_eq!(buffer.line_ending(), LineEnding::Unix);
4246 });
4247 buffer2.update(cx, |buffer, _| {
4248 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
4249 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4250 });
4251
4252 // Change a file's line endings on disk from unix to windows. The buffer's
4253 // state updates correctly.
4254 fs.save(
4255 path!("/dir/file1").as_ref(),
4256 &"aaa\nb\nc\n".into(),
4257 LineEnding::Windows,
4258 )
4259 .await
4260 .unwrap();
4261 cx.executor().run_until_parked();
4262 buffer1.update(cx, |buffer, _| {
4263 assert_eq!(buffer.text(), "aaa\nb\nc\n");
4264 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4265 });
4266
4267 // Save a file with windows line endings. The file is written correctly.
4268 buffer2.update(cx, |buffer, cx| {
4269 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
4270 });
4271 project
4272 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
4273 .await
4274 .unwrap();
4275 assert_eq!(
4276 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
4277 "one\r\ntwo\r\nthree\r\nfour\r\n",
4278 );
4279}
4280
4281#[gpui::test]
4282async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
4283 init_test(cx);
4284
4285 let fs = FakeFs::new(cx.executor());
4286 fs.insert_tree(
4287 path!("/dir"),
4288 json!({
4289 "a.rs": "
4290 fn foo(mut v: Vec<usize>) {
4291 for x in &v {
4292 v.push(1);
4293 }
4294 }
4295 "
4296 .unindent(),
4297 }),
4298 )
4299 .await;
4300
4301 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4302 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4303 let buffer = project
4304 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
4305 .await
4306 .unwrap();
4307
4308 let buffer_uri = Url::from_file_path(path!("/dir/a.rs")).unwrap();
4309 let message = lsp::PublishDiagnosticsParams {
4310 uri: buffer_uri.clone(),
4311 diagnostics: vec![
4312 lsp::Diagnostic {
4313 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4314 severity: Some(DiagnosticSeverity::WARNING),
4315 message: "error 1".to_string(),
4316 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4317 location: lsp::Location {
4318 uri: buffer_uri.clone(),
4319 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4320 },
4321 message: "error 1 hint 1".to_string(),
4322 }]),
4323 ..Default::default()
4324 },
4325 lsp::Diagnostic {
4326 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4327 severity: Some(DiagnosticSeverity::HINT),
4328 message: "error 1 hint 1".to_string(),
4329 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4330 location: lsp::Location {
4331 uri: buffer_uri.clone(),
4332 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4333 },
4334 message: "original diagnostic".to_string(),
4335 }]),
4336 ..Default::default()
4337 },
4338 lsp::Diagnostic {
4339 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4340 severity: Some(DiagnosticSeverity::ERROR),
4341 message: "error 2".to_string(),
4342 related_information: Some(vec![
4343 lsp::DiagnosticRelatedInformation {
4344 location: lsp::Location {
4345 uri: buffer_uri.clone(),
4346 range: lsp::Range::new(
4347 lsp::Position::new(1, 13),
4348 lsp::Position::new(1, 15),
4349 ),
4350 },
4351 message: "error 2 hint 1".to_string(),
4352 },
4353 lsp::DiagnosticRelatedInformation {
4354 location: lsp::Location {
4355 uri: buffer_uri.clone(),
4356 range: lsp::Range::new(
4357 lsp::Position::new(1, 13),
4358 lsp::Position::new(1, 15),
4359 ),
4360 },
4361 message: "error 2 hint 2".to_string(),
4362 },
4363 ]),
4364 ..Default::default()
4365 },
4366 lsp::Diagnostic {
4367 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4368 severity: Some(DiagnosticSeverity::HINT),
4369 message: "error 2 hint 1".to_string(),
4370 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4371 location: lsp::Location {
4372 uri: buffer_uri.clone(),
4373 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4374 },
4375 message: "original diagnostic".to_string(),
4376 }]),
4377 ..Default::default()
4378 },
4379 lsp::Diagnostic {
4380 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4381 severity: Some(DiagnosticSeverity::HINT),
4382 message: "error 2 hint 2".to_string(),
4383 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4384 location: lsp::Location {
4385 uri: buffer_uri,
4386 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4387 },
4388 message: "original diagnostic".to_string(),
4389 }]),
4390 ..Default::default()
4391 },
4392 ],
4393 version: None,
4394 };
4395
4396 lsp_store
4397 .update(cx, |lsp_store, cx| {
4398 lsp_store.update_diagnostics(LanguageServerId(0), message, &[], cx)
4399 })
4400 .unwrap();
4401 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
4402
4403 assert_eq!(
4404 buffer
4405 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
4406 .collect::<Vec<_>>(),
4407 &[
4408 DiagnosticEntry {
4409 range: Point::new(1, 8)..Point::new(1, 9),
4410 diagnostic: Diagnostic {
4411 severity: DiagnosticSeverity::WARNING,
4412 message: "error 1".to_string(),
4413 group_id: 1,
4414 is_primary: true,
4415 ..Default::default()
4416 }
4417 },
4418 DiagnosticEntry {
4419 range: Point::new(1, 8)..Point::new(1, 9),
4420 diagnostic: Diagnostic {
4421 severity: DiagnosticSeverity::HINT,
4422 message: "error 1 hint 1".to_string(),
4423 group_id: 1,
4424 is_primary: false,
4425 ..Default::default()
4426 }
4427 },
4428 DiagnosticEntry {
4429 range: Point::new(1, 13)..Point::new(1, 15),
4430 diagnostic: Diagnostic {
4431 severity: DiagnosticSeverity::HINT,
4432 message: "error 2 hint 1".to_string(),
4433 group_id: 0,
4434 is_primary: false,
4435 ..Default::default()
4436 }
4437 },
4438 DiagnosticEntry {
4439 range: Point::new(1, 13)..Point::new(1, 15),
4440 diagnostic: Diagnostic {
4441 severity: DiagnosticSeverity::HINT,
4442 message: "error 2 hint 2".to_string(),
4443 group_id: 0,
4444 is_primary: false,
4445 ..Default::default()
4446 }
4447 },
4448 DiagnosticEntry {
4449 range: Point::new(2, 8)..Point::new(2, 17),
4450 diagnostic: Diagnostic {
4451 severity: DiagnosticSeverity::ERROR,
4452 message: "error 2".to_string(),
4453 group_id: 0,
4454 is_primary: true,
4455 ..Default::default()
4456 }
4457 }
4458 ]
4459 );
4460
4461 assert_eq!(
4462 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
4463 &[
4464 DiagnosticEntry {
4465 range: Point::new(1, 13)..Point::new(1, 15),
4466 diagnostic: Diagnostic {
4467 severity: DiagnosticSeverity::HINT,
4468 message: "error 2 hint 1".to_string(),
4469 group_id: 0,
4470 is_primary: false,
4471 ..Default::default()
4472 }
4473 },
4474 DiagnosticEntry {
4475 range: Point::new(1, 13)..Point::new(1, 15),
4476 diagnostic: Diagnostic {
4477 severity: DiagnosticSeverity::HINT,
4478 message: "error 2 hint 2".to_string(),
4479 group_id: 0,
4480 is_primary: false,
4481 ..Default::default()
4482 }
4483 },
4484 DiagnosticEntry {
4485 range: Point::new(2, 8)..Point::new(2, 17),
4486 diagnostic: Diagnostic {
4487 severity: DiagnosticSeverity::ERROR,
4488 message: "error 2".to_string(),
4489 group_id: 0,
4490 is_primary: true,
4491 ..Default::default()
4492 }
4493 }
4494 ]
4495 );
4496
4497 assert_eq!(
4498 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4499 &[
4500 DiagnosticEntry {
4501 range: Point::new(1, 8)..Point::new(1, 9),
4502 diagnostic: Diagnostic {
4503 severity: DiagnosticSeverity::WARNING,
4504 message: "error 1".to_string(),
4505 group_id: 1,
4506 is_primary: true,
4507 ..Default::default()
4508 }
4509 },
4510 DiagnosticEntry {
4511 range: Point::new(1, 8)..Point::new(1, 9),
4512 diagnostic: Diagnostic {
4513 severity: DiagnosticSeverity::HINT,
4514 message: "error 1 hint 1".to_string(),
4515 group_id: 1,
4516 is_primary: false,
4517 ..Default::default()
4518 }
4519 },
4520 ]
4521 );
4522}
4523
4524#[gpui::test]
4525async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
4526 init_test(cx);
4527
4528 let fs = FakeFs::new(cx.executor());
4529 fs.insert_tree(
4530 path!("/dir"),
4531 json!({
4532 "one.rs": "const ONE: usize = 1;",
4533 "two": {
4534 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4535 }
4536
4537 }),
4538 )
4539 .await;
4540 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4541
4542 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4543 language_registry.add(rust_lang());
4544 let watched_paths = lsp::FileOperationRegistrationOptions {
4545 filters: vec![
4546 FileOperationFilter {
4547 scheme: Some("file".to_owned()),
4548 pattern: lsp::FileOperationPattern {
4549 glob: "**/*.rs".to_owned(),
4550 matches: Some(lsp::FileOperationPatternKind::File),
4551 options: None,
4552 },
4553 },
4554 FileOperationFilter {
4555 scheme: Some("file".to_owned()),
4556 pattern: lsp::FileOperationPattern {
4557 glob: "**/**".to_owned(),
4558 matches: Some(lsp::FileOperationPatternKind::Folder),
4559 options: None,
4560 },
4561 },
4562 ],
4563 };
4564 let mut fake_servers = language_registry.register_fake_lsp(
4565 "Rust",
4566 FakeLspAdapter {
4567 capabilities: lsp::ServerCapabilities {
4568 workspace: Some(lsp::WorkspaceServerCapabilities {
4569 workspace_folders: None,
4570 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
4571 did_rename: Some(watched_paths.clone()),
4572 will_rename: Some(watched_paths),
4573 ..Default::default()
4574 }),
4575 }),
4576 ..Default::default()
4577 },
4578 ..Default::default()
4579 },
4580 );
4581
4582 let _ = project
4583 .update(cx, |project, cx| {
4584 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
4585 })
4586 .await
4587 .unwrap();
4588
4589 let fake_server = fake_servers.next().await.unwrap();
4590 let response = project.update(cx, |project, cx| {
4591 let worktree = project.worktrees(cx).next().unwrap();
4592 let entry = worktree.read(cx).entry_for_path("one.rs").unwrap();
4593 project.rename_entry(entry.id, "three.rs".as_ref(), cx)
4594 });
4595 let expected_edit = lsp::WorkspaceEdit {
4596 changes: None,
4597 document_changes: Some(DocumentChanges::Edits({
4598 vec![TextDocumentEdit {
4599 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
4600 range: lsp::Range {
4601 start: lsp::Position {
4602 line: 0,
4603 character: 1,
4604 },
4605 end: lsp::Position {
4606 line: 0,
4607 character: 3,
4608 },
4609 },
4610 new_text: "This is not a drill".to_owned(),
4611 })],
4612 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
4613 uri: Url::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
4614 version: Some(1337),
4615 },
4616 }]
4617 })),
4618 change_annotations: None,
4619 };
4620 let resolved_workspace_edit = Arc::new(OnceLock::new());
4621 fake_server
4622 .set_request_handler::<WillRenameFiles, _, _>({
4623 let resolved_workspace_edit = resolved_workspace_edit.clone();
4624 let expected_edit = expected_edit.clone();
4625 move |params, _| {
4626 let resolved_workspace_edit = resolved_workspace_edit.clone();
4627 let expected_edit = expected_edit.clone();
4628 async move {
4629 assert_eq!(params.files.len(), 1);
4630 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
4631 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
4632 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
4633 Ok(Some(expected_edit))
4634 }
4635 }
4636 })
4637 .next()
4638 .await
4639 .unwrap();
4640 let _ = response.await.unwrap();
4641 fake_server
4642 .handle_notification::<DidRenameFiles, _>(|params, _| {
4643 assert_eq!(params.files.len(), 1);
4644 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
4645 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
4646 })
4647 .next()
4648 .await
4649 .unwrap();
4650 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
4651}
4652
4653#[gpui::test]
4654async fn test_rename(cx: &mut gpui::TestAppContext) {
4655 // hi
4656 init_test(cx);
4657
4658 let fs = FakeFs::new(cx.executor());
4659 fs.insert_tree(
4660 path!("/dir"),
4661 json!({
4662 "one.rs": "const ONE: usize = 1;",
4663 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4664 }),
4665 )
4666 .await;
4667
4668 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4669
4670 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4671 language_registry.add(rust_lang());
4672 let mut fake_servers = language_registry.register_fake_lsp(
4673 "Rust",
4674 FakeLspAdapter {
4675 capabilities: lsp::ServerCapabilities {
4676 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
4677 prepare_provider: Some(true),
4678 work_done_progress_options: Default::default(),
4679 })),
4680 ..Default::default()
4681 },
4682 ..Default::default()
4683 },
4684 );
4685
4686 let (buffer, _handle) = project
4687 .update(cx, |project, cx| {
4688 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
4689 })
4690 .await
4691 .unwrap();
4692
4693 let fake_server = fake_servers.next().await.unwrap();
4694
4695 let response = project.update(cx, |project, cx| {
4696 project.prepare_rename(buffer.clone(), 7, cx)
4697 });
4698 fake_server
4699 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
4700 assert_eq!(
4701 params.text_document.uri.as_str(),
4702 uri!("file:///dir/one.rs")
4703 );
4704 assert_eq!(params.position, lsp::Position::new(0, 7));
4705 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
4706 lsp::Position::new(0, 6),
4707 lsp::Position::new(0, 9),
4708 ))))
4709 })
4710 .next()
4711 .await
4712 .unwrap();
4713 let response = response.await.unwrap();
4714 let PrepareRenameResponse::Success(range) = response else {
4715 panic!("{:?}", response);
4716 };
4717 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
4718 assert_eq!(range, 6..9);
4719
4720 let response = project.update(cx, |project, cx| {
4721 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
4722 });
4723 fake_server
4724 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
4725 assert_eq!(
4726 params.text_document_position.text_document.uri.as_str(),
4727 uri!("file:///dir/one.rs")
4728 );
4729 assert_eq!(
4730 params.text_document_position.position,
4731 lsp::Position::new(0, 7)
4732 );
4733 assert_eq!(params.new_name, "THREE");
4734 Ok(Some(lsp::WorkspaceEdit {
4735 changes: Some(
4736 [
4737 (
4738 lsp::Url::from_file_path(path!("/dir/one.rs")).unwrap(),
4739 vec![lsp::TextEdit::new(
4740 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
4741 "THREE".to_string(),
4742 )],
4743 ),
4744 (
4745 lsp::Url::from_file_path(path!("/dir/two.rs")).unwrap(),
4746 vec![
4747 lsp::TextEdit::new(
4748 lsp::Range::new(
4749 lsp::Position::new(0, 24),
4750 lsp::Position::new(0, 27),
4751 ),
4752 "THREE".to_string(),
4753 ),
4754 lsp::TextEdit::new(
4755 lsp::Range::new(
4756 lsp::Position::new(0, 35),
4757 lsp::Position::new(0, 38),
4758 ),
4759 "THREE".to_string(),
4760 ),
4761 ],
4762 ),
4763 ]
4764 .into_iter()
4765 .collect(),
4766 ),
4767 ..Default::default()
4768 }))
4769 })
4770 .next()
4771 .await
4772 .unwrap();
4773 let mut transaction = response.await.unwrap().0;
4774 assert_eq!(transaction.len(), 2);
4775 assert_eq!(
4776 transaction
4777 .remove_entry(&buffer)
4778 .unwrap()
4779 .0
4780 .update(cx, |buffer, _| buffer.text()),
4781 "const THREE: usize = 1;"
4782 );
4783 assert_eq!(
4784 transaction
4785 .into_keys()
4786 .next()
4787 .unwrap()
4788 .update(cx, |buffer, _| buffer.text()),
4789 "const TWO: usize = one::THREE + one::THREE;"
4790 );
4791}
4792
4793#[gpui::test]
4794async fn test_search(cx: &mut gpui::TestAppContext) {
4795 init_test(cx);
4796
4797 let fs = FakeFs::new(cx.executor());
4798 fs.insert_tree(
4799 path!("/dir"),
4800 json!({
4801 "one.rs": "const ONE: usize = 1;",
4802 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
4803 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
4804 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
4805 }),
4806 )
4807 .await;
4808 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4809 assert_eq!(
4810 search(
4811 &project,
4812 SearchQuery::text(
4813 "TWO",
4814 false,
4815 true,
4816 false,
4817 Default::default(),
4818 Default::default(),
4819 None
4820 )
4821 .unwrap(),
4822 cx
4823 )
4824 .await
4825 .unwrap(),
4826 HashMap::from_iter([
4827 (separator!("dir/two.rs").to_string(), vec![6..9]),
4828 (separator!("dir/three.rs").to_string(), vec![37..40])
4829 ])
4830 );
4831
4832 let buffer_4 = project
4833 .update(cx, |project, cx| {
4834 project.open_local_buffer(path!("/dir/four.rs"), cx)
4835 })
4836 .await
4837 .unwrap();
4838 buffer_4.update(cx, |buffer, cx| {
4839 let text = "two::TWO";
4840 buffer.edit([(20..28, text), (31..43, text)], None, cx);
4841 });
4842
4843 assert_eq!(
4844 search(
4845 &project,
4846 SearchQuery::text(
4847 "TWO",
4848 false,
4849 true,
4850 false,
4851 Default::default(),
4852 Default::default(),
4853 None,
4854 )
4855 .unwrap(),
4856 cx
4857 )
4858 .await
4859 .unwrap(),
4860 HashMap::from_iter([
4861 (separator!("dir/two.rs").to_string(), vec![6..9]),
4862 (separator!("dir/three.rs").to_string(), vec![37..40]),
4863 (separator!("dir/four.rs").to_string(), vec![25..28, 36..39])
4864 ])
4865 );
4866}
4867
4868#[gpui::test]
4869async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
4870 init_test(cx);
4871
4872 let search_query = "file";
4873
4874 let fs = FakeFs::new(cx.executor());
4875 fs.insert_tree(
4876 path!("/dir"),
4877 json!({
4878 "one.rs": r#"// Rust file one"#,
4879 "one.ts": r#"// TypeScript file one"#,
4880 "two.rs": r#"// Rust file two"#,
4881 "two.ts": r#"// TypeScript file two"#,
4882 }),
4883 )
4884 .await;
4885 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4886
4887 assert!(
4888 search(
4889 &project,
4890 SearchQuery::text(
4891 search_query,
4892 false,
4893 true,
4894 false,
4895 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4896 Default::default(),
4897 None
4898 )
4899 .unwrap(),
4900 cx
4901 )
4902 .await
4903 .unwrap()
4904 .is_empty(),
4905 "If no inclusions match, no files should be returned"
4906 );
4907
4908 assert_eq!(
4909 search(
4910 &project,
4911 SearchQuery::text(
4912 search_query,
4913 false,
4914 true,
4915 false,
4916 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4917 Default::default(),
4918 None
4919 )
4920 .unwrap(),
4921 cx
4922 )
4923 .await
4924 .unwrap(),
4925 HashMap::from_iter([
4926 (separator!("dir/one.rs").to_string(), vec![8..12]),
4927 (separator!("dir/two.rs").to_string(), vec![8..12]),
4928 ]),
4929 "Rust only search should give only Rust files"
4930 );
4931
4932 assert_eq!(
4933 search(
4934 &project,
4935 SearchQuery::text(
4936 search_query,
4937 false,
4938 true,
4939 false,
4940 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4941 Default::default(),
4942 None,
4943 )
4944 .unwrap(),
4945 cx
4946 )
4947 .await
4948 .unwrap(),
4949 HashMap::from_iter([
4950 (separator!("dir/one.ts").to_string(), vec![14..18]),
4951 (separator!("dir/two.ts").to_string(), vec![14..18]),
4952 ]),
4953 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
4954 );
4955
4956 assert_eq!(
4957 search(
4958 &project,
4959 SearchQuery::text(
4960 search_query,
4961 false,
4962 true,
4963 false,
4964 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()])
4965 .unwrap(),
4966 Default::default(),
4967 None,
4968 )
4969 .unwrap(),
4970 cx
4971 )
4972 .await
4973 .unwrap(),
4974 HashMap::from_iter([
4975 (separator!("dir/two.ts").to_string(), vec![14..18]),
4976 (separator!("dir/one.rs").to_string(), vec![8..12]),
4977 (separator!("dir/one.ts").to_string(), vec![14..18]),
4978 (separator!("dir/two.rs").to_string(), vec![8..12]),
4979 ]),
4980 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
4981 );
4982}
4983
4984#[gpui::test]
4985async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
4986 init_test(cx);
4987
4988 let search_query = "file";
4989
4990 let fs = FakeFs::new(cx.executor());
4991 fs.insert_tree(
4992 path!("/dir"),
4993 json!({
4994 "one.rs": r#"// Rust file one"#,
4995 "one.ts": r#"// TypeScript file one"#,
4996 "two.rs": r#"// Rust file two"#,
4997 "two.ts": r#"// TypeScript file two"#,
4998 }),
4999 )
5000 .await;
5001 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5002
5003 assert_eq!(
5004 search(
5005 &project,
5006 SearchQuery::text(
5007 search_query,
5008 false,
5009 true,
5010 false,
5011 Default::default(),
5012 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5013 None,
5014 )
5015 .unwrap(),
5016 cx
5017 )
5018 .await
5019 .unwrap(),
5020 HashMap::from_iter([
5021 (separator!("dir/one.rs").to_string(), vec![8..12]),
5022 (separator!("dir/one.ts").to_string(), vec![14..18]),
5023 (separator!("dir/two.rs").to_string(), vec![8..12]),
5024 (separator!("dir/two.ts").to_string(), vec![14..18]),
5025 ]),
5026 "If no exclusions match, all files should be returned"
5027 );
5028
5029 assert_eq!(
5030 search(
5031 &project,
5032 SearchQuery::text(
5033 search_query,
5034 false,
5035 true,
5036 false,
5037 Default::default(),
5038 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
5039 None,
5040 )
5041 .unwrap(),
5042 cx
5043 )
5044 .await
5045 .unwrap(),
5046 HashMap::from_iter([
5047 (separator!("dir/one.ts").to_string(), vec![14..18]),
5048 (separator!("dir/two.ts").to_string(), vec![14..18]),
5049 ]),
5050 "Rust exclusion search should give only TypeScript files"
5051 );
5052
5053 assert_eq!(
5054 search(
5055 &project,
5056 SearchQuery::text(
5057 search_query,
5058 false,
5059 true,
5060 false,
5061 Default::default(),
5062 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5063 None,
5064 )
5065 .unwrap(),
5066 cx
5067 )
5068 .await
5069 .unwrap(),
5070 HashMap::from_iter([
5071 (separator!("dir/one.rs").to_string(), vec![8..12]),
5072 (separator!("dir/two.rs").to_string(), vec![8..12]),
5073 ]),
5074 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
5075 );
5076
5077 assert!(
5078 search(
5079 &project,
5080 SearchQuery::text(
5081 search_query,
5082 false,
5083 true,
5084 false,
5085 Default::default(),
5086 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()])
5087 .unwrap(),
5088 None,
5089 )
5090 .unwrap(),
5091 cx
5092 )
5093 .await
5094 .unwrap()
5095 .is_empty(),
5096 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
5097 );
5098}
5099
5100#[gpui::test]
5101async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
5102 init_test(cx);
5103
5104 let search_query = "file";
5105
5106 let fs = FakeFs::new(cx.executor());
5107 fs.insert_tree(
5108 path!("/dir"),
5109 json!({
5110 "one.rs": r#"// Rust file one"#,
5111 "one.ts": r#"// TypeScript file one"#,
5112 "two.rs": r#"// Rust file two"#,
5113 "two.ts": r#"// TypeScript file two"#,
5114 }),
5115 )
5116 .await;
5117 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5118
5119 assert!(
5120 search(
5121 &project,
5122 SearchQuery::text(
5123 search_query,
5124 false,
5125 true,
5126 false,
5127 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5128 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5129 None,
5130 )
5131 .unwrap(),
5132 cx
5133 )
5134 .await
5135 .unwrap()
5136 .is_empty(),
5137 "If both no exclusions and inclusions match, exclusions should win and return nothing"
5138 );
5139
5140 assert!(
5141 search(
5142 &project,
5143 SearchQuery::text(
5144 search_query,
5145 false,
5146 true,
5147 false,
5148 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5149 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5150 None,
5151 )
5152 .unwrap(),
5153 cx
5154 )
5155 .await
5156 .unwrap()
5157 .is_empty(),
5158 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
5159 );
5160
5161 assert!(
5162 search(
5163 &project,
5164 SearchQuery::text(
5165 search_query,
5166 false,
5167 true,
5168 false,
5169 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5170 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5171 None,
5172 )
5173 .unwrap(),
5174 cx
5175 )
5176 .await
5177 .unwrap()
5178 .is_empty(),
5179 "Non-matching inclusions and exclusions should not change that."
5180 );
5181
5182 assert_eq!(
5183 search(
5184 &project,
5185 SearchQuery::text(
5186 search_query,
5187 false,
5188 true,
5189 false,
5190 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5191 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()]).unwrap(),
5192 None,
5193 )
5194 .unwrap(),
5195 cx
5196 )
5197 .await
5198 .unwrap(),
5199 HashMap::from_iter([
5200 (separator!("dir/one.ts").to_string(), vec![14..18]),
5201 (separator!("dir/two.ts").to_string(), vec![14..18]),
5202 ]),
5203 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
5204 );
5205}
5206
5207#[gpui::test]
5208async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
5209 init_test(cx);
5210
5211 let fs = FakeFs::new(cx.executor());
5212 fs.insert_tree(
5213 path!("/worktree-a"),
5214 json!({
5215 "haystack.rs": r#"// NEEDLE"#,
5216 "haystack.ts": r#"// NEEDLE"#,
5217 }),
5218 )
5219 .await;
5220 fs.insert_tree(
5221 path!("/worktree-b"),
5222 json!({
5223 "haystack.rs": r#"// NEEDLE"#,
5224 "haystack.ts": r#"// NEEDLE"#,
5225 }),
5226 )
5227 .await;
5228
5229 let project = Project::test(
5230 fs.clone(),
5231 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
5232 cx,
5233 )
5234 .await;
5235
5236 assert_eq!(
5237 search(
5238 &project,
5239 SearchQuery::text(
5240 "NEEDLE",
5241 false,
5242 true,
5243 false,
5244 PathMatcher::new(&["worktree-a/*.rs".to_owned()]).unwrap(),
5245 Default::default(),
5246 None,
5247 )
5248 .unwrap(),
5249 cx
5250 )
5251 .await
5252 .unwrap(),
5253 HashMap::from_iter([(separator!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
5254 "should only return results from included worktree"
5255 );
5256 assert_eq!(
5257 search(
5258 &project,
5259 SearchQuery::text(
5260 "NEEDLE",
5261 false,
5262 true,
5263 false,
5264 PathMatcher::new(&["worktree-b/*.rs".to_owned()]).unwrap(),
5265 Default::default(),
5266 None,
5267 )
5268 .unwrap(),
5269 cx
5270 )
5271 .await
5272 .unwrap(),
5273 HashMap::from_iter([(separator!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
5274 "should only return results from included worktree"
5275 );
5276
5277 assert_eq!(
5278 search(
5279 &project,
5280 SearchQuery::text(
5281 "NEEDLE",
5282 false,
5283 true,
5284 false,
5285 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5286 Default::default(),
5287 None,
5288 )
5289 .unwrap(),
5290 cx
5291 )
5292 .await
5293 .unwrap(),
5294 HashMap::from_iter([
5295 (separator!("worktree-a/haystack.ts").to_string(), vec![3..9]),
5296 (separator!("worktree-b/haystack.ts").to_string(), vec![3..9])
5297 ]),
5298 "should return results from both worktrees"
5299 );
5300}
5301
5302#[gpui::test]
5303async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
5304 init_test(cx);
5305
5306 let fs = FakeFs::new(cx.background_executor.clone());
5307 fs.insert_tree(
5308 path!("/dir"),
5309 json!({
5310 ".git": {},
5311 ".gitignore": "**/target\n/node_modules\n",
5312 "target": {
5313 "index.txt": "index_key:index_value"
5314 },
5315 "node_modules": {
5316 "eslint": {
5317 "index.ts": "const eslint_key = 'eslint value'",
5318 "package.json": r#"{ "some_key": "some value" }"#,
5319 },
5320 "prettier": {
5321 "index.ts": "const prettier_key = 'prettier value'",
5322 "package.json": r#"{ "other_key": "other value" }"#,
5323 },
5324 },
5325 "package.json": r#"{ "main_key": "main value" }"#,
5326 }),
5327 )
5328 .await;
5329 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5330
5331 let query = "key";
5332 assert_eq!(
5333 search(
5334 &project,
5335 SearchQuery::text(
5336 query,
5337 false,
5338 false,
5339 false,
5340 Default::default(),
5341 Default::default(),
5342 None,
5343 )
5344 .unwrap(),
5345 cx
5346 )
5347 .await
5348 .unwrap(),
5349 HashMap::from_iter([(separator!("dir/package.json").to_string(), vec![8..11])]),
5350 "Only one non-ignored file should have the query"
5351 );
5352
5353 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5354 assert_eq!(
5355 search(
5356 &project,
5357 SearchQuery::text(
5358 query,
5359 false,
5360 false,
5361 true,
5362 Default::default(),
5363 Default::default(),
5364 None,
5365 )
5366 .unwrap(),
5367 cx
5368 )
5369 .await
5370 .unwrap(),
5371 HashMap::from_iter([
5372 (separator!("dir/package.json").to_string(), vec![8..11]),
5373 (separator!("dir/target/index.txt").to_string(), vec![6..9]),
5374 (
5375 separator!("dir/node_modules/prettier/package.json").to_string(),
5376 vec![9..12]
5377 ),
5378 (
5379 separator!("dir/node_modules/prettier/index.ts").to_string(),
5380 vec![15..18]
5381 ),
5382 (
5383 separator!("dir/node_modules/eslint/index.ts").to_string(),
5384 vec![13..16]
5385 ),
5386 (
5387 separator!("dir/node_modules/eslint/package.json").to_string(),
5388 vec![8..11]
5389 ),
5390 ]),
5391 "Unrestricted search with ignored directories should find every file with the query"
5392 );
5393
5394 let files_to_include = PathMatcher::new(&["node_modules/prettier/**".to_owned()]).unwrap();
5395 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()]).unwrap();
5396 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5397 assert_eq!(
5398 search(
5399 &project,
5400 SearchQuery::text(
5401 query,
5402 false,
5403 false,
5404 true,
5405 files_to_include,
5406 files_to_exclude,
5407 None,
5408 )
5409 .unwrap(),
5410 cx
5411 )
5412 .await
5413 .unwrap(),
5414 HashMap::from_iter([(
5415 separator!("dir/node_modules/prettier/package.json").to_string(),
5416 vec![9..12]
5417 )]),
5418 "With search including ignored prettier directory and excluding TS files, only one file should be found"
5419 );
5420}
5421
5422#[gpui::test]
5423async fn test_create_entry(cx: &mut gpui::TestAppContext) {
5424 init_test(cx);
5425
5426 let fs = FakeFs::new(cx.executor().clone());
5427 fs.insert_tree(
5428 "/one/two",
5429 json!({
5430 "three": {
5431 "a.txt": "",
5432 "four": {}
5433 },
5434 "c.rs": ""
5435 }),
5436 )
5437 .await;
5438
5439 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
5440 project
5441 .update(cx, |project, cx| {
5442 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5443 project.create_entry((id, "b.."), true, cx)
5444 })
5445 .await
5446 .unwrap()
5447 .to_included()
5448 .unwrap();
5449
5450 // Can't create paths outside the project
5451 let result = project
5452 .update(cx, |project, cx| {
5453 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5454 project.create_entry((id, "../../boop"), true, cx)
5455 })
5456 .await;
5457 assert!(result.is_err());
5458
5459 // Can't create paths with '..'
5460 let result = project
5461 .update(cx, |project, cx| {
5462 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5463 project.create_entry((id, "four/../beep"), true, cx)
5464 })
5465 .await;
5466 assert!(result.is_err());
5467
5468 assert_eq!(
5469 fs.paths(true),
5470 vec![
5471 PathBuf::from(path!("/")),
5472 PathBuf::from(path!("/one")),
5473 PathBuf::from(path!("/one/two")),
5474 PathBuf::from(path!("/one/two/c.rs")),
5475 PathBuf::from(path!("/one/two/three")),
5476 PathBuf::from(path!("/one/two/three/a.txt")),
5477 PathBuf::from(path!("/one/two/three/b..")),
5478 PathBuf::from(path!("/one/two/three/four")),
5479 ]
5480 );
5481
5482 // And we cannot open buffers with '..'
5483 let result = project
5484 .update(cx, |project, cx| {
5485 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5486 project.open_buffer((id, "../c.rs"), cx)
5487 })
5488 .await;
5489 assert!(result.is_err())
5490}
5491
5492#[gpui::test]
5493async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
5494 init_test(cx);
5495
5496 let fs = FakeFs::new(cx.executor());
5497 fs.insert_tree(
5498 path!("/dir"),
5499 json!({
5500 "a.tsx": "a",
5501 }),
5502 )
5503 .await;
5504
5505 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5506
5507 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5508 language_registry.add(tsx_lang());
5509 let language_server_names = [
5510 "TypeScriptServer",
5511 "TailwindServer",
5512 "ESLintServer",
5513 "NoHoverCapabilitiesServer",
5514 ];
5515 let mut language_servers = [
5516 language_registry.register_fake_lsp(
5517 "tsx",
5518 FakeLspAdapter {
5519 name: language_server_names[0],
5520 capabilities: lsp::ServerCapabilities {
5521 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5522 ..lsp::ServerCapabilities::default()
5523 },
5524 ..FakeLspAdapter::default()
5525 },
5526 ),
5527 language_registry.register_fake_lsp(
5528 "tsx",
5529 FakeLspAdapter {
5530 name: language_server_names[1],
5531 capabilities: lsp::ServerCapabilities {
5532 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5533 ..lsp::ServerCapabilities::default()
5534 },
5535 ..FakeLspAdapter::default()
5536 },
5537 ),
5538 language_registry.register_fake_lsp(
5539 "tsx",
5540 FakeLspAdapter {
5541 name: language_server_names[2],
5542 capabilities: lsp::ServerCapabilities {
5543 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5544 ..lsp::ServerCapabilities::default()
5545 },
5546 ..FakeLspAdapter::default()
5547 },
5548 ),
5549 language_registry.register_fake_lsp(
5550 "tsx",
5551 FakeLspAdapter {
5552 name: language_server_names[3],
5553 capabilities: lsp::ServerCapabilities {
5554 hover_provider: None,
5555 ..lsp::ServerCapabilities::default()
5556 },
5557 ..FakeLspAdapter::default()
5558 },
5559 ),
5560 ];
5561
5562 let (buffer, _handle) = project
5563 .update(cx, |p, cx| {
5564 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
5565 })
5566 .await
5567 .unwrap();
5568 cx.executor().run_until_parked();
5569
5570 let mut servers_with_hover_requests = HashMap::default();
5571 for i in 0..language_server_names.len() {
5572 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
5573 panic!(
5574 "Failed to get language server #{i} with name {}",
5575 &language_server_names[i]
5576 )
5577 });
5578 let new_server_name = new_server.server.name();
5579 assert!(
5580 !servers_with_hover_requests.contains_key(&new_server_name),
5581 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5582 );
5583 match new_server_name.as_ref() {
5584 "TailwindServer" | "TypeScriptServer" => {
5585 servers_with_hover_requests.insert(
5586 new_server_name.clone(),
5587 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
5588 move |_, _| {
5589 let name = new_server_name.clone();
5590 async move {
5591 Ok(Some(lsp::Hover {
5592 contents: lsp::HoverContents::Scalar(
5593 lsp::MarkedString::String(format!("{name} hover")),
5594 ),
5595 range: None,
5596 }))
5597 }
5598 },
5599 ),
5600 );
5601 }
5602 "ESLintServer" => {
5603 servers_with_hover_requests.insert(
5604 new_server_name,
5605 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
5606 |_, _| async move { Ok(None) },
5607 ),
5608 );
5609 }
5610 "NoHoverCapabilitiesServer" => {
5611 let _never_handled = new_server
5612 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
5613 panic!(
5614 "Should not call for hovers server with no corresponding capabilities"
5615 )
5616 });
5617 }
5618 unexpected => panic!("Unexpected server name: {unexpected}"),
5619 }
5620 }
5621
5622 let hover_task = project.update(cx, |project, cx| {
5623 project.hover(&buffer, Point::new(0, 0), cx)
5624 });
5625 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
5626 |mut hover_request| async move {
5627 hover_request
5628 .next()
5629 .await
5630 .expect("All hover requests should have been triggered")
5631 },
5632 ))
5633 .await;
5634 assert_eq!(
5635 vec!["TailwindServer hover", "TypeScriptServer hover"],
5636 hover_task
5637 .await
5638 .into_iter()
5639 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
5640 .sorted()
5641 .collect::<Vec<_>>(),
5642 "Should receive hover responses from all related servers with hover capabilities"
5643 );
5644}
5645
5646#[gpui::test]
5647async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
5648 init_test(cx);
5649
5650 let fs = FakeFs::new(cx.executor());
5651 fs.insert_tree(
5652 path!("/dir"),
5653 json!({
5654 "a.ts": "a",
5655 }),
5656 )
5657 .await;
5658
5659 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5660
5661 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5662 language_registry.add(typescript_lang());
5663 let mut fake_language_servers = language_registry.register_fake_lsp(
5664 "TypeScript",
5665 FakeLspAdapter {
5666 capabilities: lsp::ServerCapabilities {
5667 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5668 ..lsp::ServerCapabilities::default()
5669 },
5670 ..FakeLspAdapter::default()
5671 },
5672 );
5673
5674 let (buffer, _handle) = project
5675 .update(cx, |p, cx| {
5676 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
5677 })
5678 .await
5679 .unwrap();
5680 cx.executor().run_until_parked();
5681
5682 let fake_server = fake_language_servers
5683 .next()
5684 .await
5685 .expect("failed to get the language server");
5686
5687 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
5688 move |_, _| async move {
5689 Ok(Some(lsp::Hover {
5690 contents: lsp::HoverContents::Array(vec![
5691 lsp::MarkedString::String("".to_string()),
5692 lsp::MarkedString::String(" ".to_string()),
5693 lsp::MarkedString::String("\n\n\n".to_string()),
5694 ]),
5695 range: None,
5696 }))
5697 },
5698 );
5699
5700 let hover_task = project.update(cx, |project, cx| {
5701 project.hover(&buffer, Point::new(0, 0), cx)
5702 });
5703 let () = request_handled
5704 .next()
5705 .await
5706 .expect("All hover requests should have been triggered");
5707 assert_eq!(
5708 Vec::<String>::new(),
5709 hover_task
5710 .await
5711 .into_iter()
5712 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
5713 .sorted()
5714 .collect::<Vec<_>>(),
5715 "Empty hover parts should be ignored"
5716 );
5717}
5718
5719#[gpui::test]
5720async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
5721 init_test(cx);
5722
5723 let fs = FakeFs::new(cx.executor());
5724 fs.insert_tree(
5725 path!("/dir"),
5726 json!({
5727 "a.ts": "a",
5728 }),
5729 )
5730 .await;
5731
5732 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5733
5734 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5735 language_registry.add(typescript_lang());
5736 let mut fake_language_servers = language_registry.register_fake_lsp(
5737 "TypeScript",
5738 FakeLspAdapter {
5739 capabilities: lsp::ServerCapabilities {
5740 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5741 ..lsp::ServerCapabilities::default()
5742 },
5743 ..FakeLspAdapter::default()
5744 },
5745 );
5746
5747 let (buffer, _handle) = project
5748 .update(cx, |p, cx| {
5749 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
5750 })
5751 .await
5752 .unwrap();
5753 cx.executor().run_until_parked();
5754
5755 let fake_server = fake_language_servers
5756 .next()
5757 .await
5758 .expect("failed to get the language server");
5759
5760 let mut request_handled = fake_server
5761 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
5762 Ok(Some(vec![
5763 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5764 title: "organize imports".to_string(),
5765 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
5766 ..lsp::CodeAction::default()
5767 }),
5768 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5769 title: "fix code".to_string(),
5770 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
5771 ..lsp::CodeAction::default()
5772 }),
5773 ]))
5774 });
5775
5776 let code_actions_task = project.update(cx, |project, cx| {
5777 project.code_actions(
5778 &buffer,
5779 0..buffer.read(cx).len(),
5780 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
5781 cx,
5782 )
5783 });
5784
5785 let () = request_handled
5786 .next()
5787 .await
5788 .expect("The code action request should have been triggered");
5789
5790 let code_actions = code_actions_task.await.unwrap();
5791 assert_eq!(code_actions.len(), 1);
5792 assert_eq!(
5793 code_actions[0].lsp_action.action_kind(),
5794 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
5795 );
5796}
5797
5798#[gpui::test]
5799async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
5800 init_test(cx);
5801
5802 let fs = FakeFs::new(cx.executor());
5803 fs.insert_tree(
5804 path!("/dir"),
5805 json!({
5806 "a.tsx": "a",
5807 }),
5808 )
5809 .await;
5810
5811 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5812
5813 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5814 language_registry.add(tsx_lang());
5815 let language_server_names = [
5816 "TypeScriptServer",
5817 "TailwindServer",
5818 "ESLintServer",
5819 "NoActionsCapabilitiesServer",
5820 ];
5821
5822 let mut language_server_rxs = [
5823 language_registry.register_fake_lsp(
5824 "tsx",
5825 FakeLspAdapter {
5826 name: language_server_names[0],
5827 capabilities: lsp::ServerCapabilities {
5828 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5829 ..lsp::ServerCapabilities::default()
5830 },
5831 ..FakeLspAdapter::default()
5832 },
5833 ),
5834 language_registry.register_fake_lsp(
5835 "tsx",
5836 FakeLspAdapter {
5837 name: language_server_names[1],
5838 capabilities: lsp::ServerCapabilities {
5839 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5840 ..lsp::ServerCapabilities::default()
5841 },
5842 ..FakeLspAdapter::default()
5843 },
5844 ),
5845 language_registry.register_fake_lsp(
5846 "tsx",
5847 FakeLspAdapter {
5848 name: language_server_names[2],
5849 capabilities: lsp::ServerCapabilities {
5850 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5851 ..lsp::ServerCapabilities::default()
5852 },
5853 ..FakeLspAdapter::default()
5854 },
5855 ),
5856 language_registry.register_fake_lsp(
5857 "tsx",
5858 FakeLspAdapter {
5859 name: language_server_names[3],
5860 capabilities: lsp::ServerCapabilities {
5861 code_action_provider: None,
5862 ..lsp::ServerCapabilities::default()
5863 },
5864 ..FakeLspAdapter::default()
5865 },
5866 ),
5867 ];
5868
5869 let (buffer, _handle) = project
5870 .update(cx, |p, cx| {
5871 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
5872 })
5873 .await
5874 .unwrap();
5875 cx.executor().run_until_parked();
5876
5877 let mut servers_with_actions_requests = HashMap::default();
5878 for i in 0..language_server_names.len() {
5879 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
5880 panic!(
5881 "Failed to get language server #{i} with name {}",
5882 &language_server_names[i]
5883 )
5884 });
5885 let new_server_name = new_server.server.name();
5886
5887 assert!(
5888 !servers_with_actions_requests.contains_key(&new_server_name),
5889 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5890 );
5891 match new_server_name.0.as_ref() {
5892 "TailwindServer" | "TypeScriptServer" => {
5893 servers_with_actions_requests.insert(
5894 new_server_name.clone(),
5895 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
5896 move |_, _| {
5897 let name = new_server_name.clone();
5898 async move {
5899 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
5900 lsp::CodeAction {
5901 title: format!("{name} code action"),
5902 ..lsp::CodeAction::default()
5903 },
5904 )]))
5905 }
5906 },
5907 ),
5908 );
5909 }
5910 "ESLintServer" => {
5911 servers_with_actions_requests.insert(
5912 new_server_name,
5913 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
5914 |_, _| async move { Ok(None) },
5915 ),
5916 );
5917 }
5918 "NoActionsCapabilitiesServer" => {
5919 let _never_handled = new_server
5920 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
5921 panic!(
5922 "Should not call for code actions server with no corresponding capabilities"
5923 )
5924 });
5925 }
5926 unexpected => panic!("Unexpected server name: {unexpected}"),
5927 }
5928 }
5929
5930 let code_actions_task = project.update(cx, |project, cx| {
5931 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
5932 });
5933
5934 // cx.run_until_parked();
5935 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
5936 |mut code_actions_request| async move {
5937 code_actions_request
5938 .next()
5939 .await
5940 .expect("All code actions requests should have been triggered")
5941 },
5942 ))
5943 .await;
5944 assert_eq!(
5945 vec!["TailwindServer code action", "TypeScriptServer code action"],
5946 code_actions_task
5947 .await
5948 .unwrap()
5949 .into_iter()
5950 .map(|code_action| code_action.lsp_action.title().to_owned())
5951 .sorted()
5952 .collect::<Vec<_>>(),
5953 "Should receive code actions responses from all related servers with hover capabilities"
5954 );
5955}
5956
5957#[gpui::test]
5958async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
5959 init_test(cx);
5960
5961 let fs = FakeFs::new(cx.executor());
5962 fs.insert_tree(
5963 "/dir",
5964 json!({
5965 "a.rs": "let a = 1;",
5966 "b.rs": "let b = 2;",
5967 "c.rs": "let c = 2;",
5968 }),
5969 )
5970 .await;
5971
5972 let project = Project::test(
5973 fs,
5974 [
5975 "/dir/a.rs".as_ref(),
5976 "/dir/b.rs".as_ref(),
5977 "/dir/c.rs".as_ref(),
5978 ],
5979 cx,
5980 )
5981 .await;
5982
5983 // check the initial state and get the worktrees
5984 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
5985 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5986 assert_eq!(worktrees.len(), 3);
5987
5988 let worktree_a = worktrees[0].read(cx);
5989 let worktree_b = worktrees[1].read(cx);
5990 let worktree_c = worktrees[2].read(cx);
5991
5992 // check they start in the right order
5993 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
5994 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
5995 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
5996
5997 (
5998 worktrees[0].clone(),
5999 worktrees[1].clone(),
6000 worktrees[2].clone(),
6001 )
6002 });
6003
6004 // move first worktree to after the second
6005 // [a, b, c] -> [b, a, c]
6006 project
6007 .update(cx, |project, cx| {
6008 let first = worktree_a.read(cx);
6009 let second = worktree_b.read(cx);
6010 project.move_worktree(first.id(), second.id(), cx)
6011 })
6012 .expect("moving first after second");
6013
6014 // check the state after moving
6015 project.update(cx, |project, cx| {
6016 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6017 assert_eq!(worktrees.len(), 3);
6018
6019 let first = worktrees[0].read(cx);
6020 let second = worktrees[1].read(cx);
6021 let third = worktrees[2].read(cx);
6022
6023 // check they are now in the right order
6024 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
6025 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
6026 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6027 });
6028
6029 // move the second worktree to before the first
6030 // [b, a, c] -> [a, b, c]
6031 project
6032 .update(cx, |project, cx| {
6033 let second = worktree_a.read(cx);
6034 let first = worktree_b.read(cx);
6035 project.move_worktree(first.id(), second.id(), cx)
6036 })
6037 .expect("moving second before first");
6038
6039 // check the state after moving
6040 project.update(cx, |project, cx| {
6041 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6042 assert_eq!(worktrees.len(), 3);
6043
6044 let first = worktrees[0].read(cx);
6045 let second = worktrees[1].read(cx);
6046 let third = worktrees[2].read(cx);
6047
6048 // check they are now in the right order
6049 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6050 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6051 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6052 });
6053
6054 // move the second worktree to after the third
6055 // [a, b, c] -> [a, c, b]
6056 project
6057 .update(cx, |project, cx| {
6058 let second = worktree_b.read(cx);
6059 let third = worktree_c.read(cx);
6060 project.move_worktree(second.id(), third.id(), cx)
6061 })
6062 .expect("moving second after third");
6063
6064 // check the state after moving
6065 project.update(cx, |project, cx| {
6066 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6067 assert_eq!(worktrees.len(), 3);
6068
6069 let first = worktrees[0].read(cx);
6070 let second = worktrees[1].read(cx);
6071 let third = worktrees[2].read(cx);
6072
6073 // check they are now in the right order
6074 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6075 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
6076 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
6077 });
6078
6079 // move the third worktree to before the second
6080 // [a, c, b] -> [a, b, c]
6081 project
6082 .update(cx, |project, cx| {
6083 let third = worktree_c.read(cx);
6084 let second = worktree_b.read(cx);
6085 project.move_worktree(third.id(), second.id(), cx)
6086 })
6087 .expect("moving third before second");
6088
6089 // check the state after moving
6090 project.update(cx, |project, cx| {
6091 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6092 assert_eq!(worktrees.len(), 3);
6093
6094 let first = worktrees[0].read(cx);
6095 let second = worktrees[1].read(cx);
6096 let third = worktrees[2].read(cx);
6097
6098 // check they are now in the right order
6099 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6100 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6101 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6102 });
6103
6104 // move the first worktree to after the third
6105 // [a, b, c] -> [b, c, a]
6106 project
6107 .update(cx, |project, cx| {
6108 let first = worktree_a.read(cx);
6109 let third = worktree_c.read(cx);
6110 project.move_worktree(first.id(), third.id(), cx)
6111 })
6112 .expect("moving first after third");
6113
6114 // check the state after moving
6115 project.update(cx, |project, cx| {
6116 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6117 assert_eq!(worktrees.len(), 3);
6118
6119 let first = worktrees[0].read(cx);
6120 let second = worktrees[1].read(cx);
6121 let third = worktrees[2].read(cx);
6122
6123 // check they are now in the right order
6124 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
6125 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
6126 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
6127 });
6128
6129 // move the third worktree to before the first
6130 // [b, c, a] -> [a, b, c]
6131 project
6132 .update(cx, |project, cx| {
6133 let third = worktree_a.read(cx);
6134 let first = worktree_b.read(cx);
6135 project.move_worktree(third.id(), first.id(), cx)
6136 })
6137 .expect("moving third before first");
6138
6139 // check the state after moving
6140 project.update(cx, |project, cx| {
6141 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6142 assert_eq!(worktrees.len(), 3);
6143
6144 let first = worktrees[0].read(cx);
6145 let second = worktrees[1].read(cx);
6146 let third = worktrees[2].read(cx);
6147
6148 // check they are now in the right order
6149 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6150 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6151 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6152 });
6153}
6154
6155#[gpui::test]
6156async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
6157 init_test(cx);
6158
6159 let staged_contents = r#"
6160 fn main() {
6161 println!("hello world");
6162 }
6163 "#
6164 .unindent();
6165 let file_contents = r#"
6166 // print goodbye
6167 fn main() {
6168 println!("goodbye world");
6169 }
6170 "#
6171 .unindent();
6172
6173 let fs = FakeFs::new(cx.background_executor.clone());
6174 fs.insert_tree(
6175 "/dir",
6176 json!({
6177 ".git": {},
6178 "src": {
6179 "main.rs": file_contents,
6180 }
6181 }),
6182 )
6183 .await;
6184
6185 fs.set_index_for_repo(
6186 Path::new("/dir/.git"),
6187 &[("src/main.rs".into(), staged_contents)],
6188 );
6189
6190 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6191
6192 let buffer = project
6193 .update(cx, |project, cx| {
6194 project.open_local_buffer("/dir/src/main.rs", cx)
6195 })
6196 .await
6197 .unwrap();
6198 let unstaged_diff = project
6199 .update(cx, |project, cx| {
6200 project.open_unstaged_diff(buffer.clone(), cx)
6201 })
6202 .await
6203 .unwrap();
6204
6205 cx.run_until_parked();
6206 unstaged_diff.update(cx, |unstaged_diff, cx| {
6207 let snapshot = buffer.read(cx).snapshot();
6208 assert_hunks(
6209 unstaged_diff.hunks(&snapshot, cx),
6210 &snapshot,
6211 &unstaged_diff.base_text_string().unwrap(),
6212 &[
6213 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
6214 (
6215 2..3,
6216 " println!(\"hello world\");\n",
6217 " println!(\"goodbye world\");\n",
6218 DiffHunkStatus::modified_none(),
6219 ),
6220 ],
6221 );
6222 });
6223
6224 let staged_contents = r#"
6225 // print goodbye
6226 fn main() {
6227 }
6228 "#
6229 .unindent();
6230
6231 fs.set_index_for_repo(
6232 Path::new("/dir/.git"),
6233 &[("src/main.rs".into(), staged_contents)],
6234 );
6235
6236 cx.run_until_parked();
6237 unstaged_diff.update(cx, |unstaged_diff, cx| {
6238 let snapshot = buffer.read(cx).snapshot();
6239 assert_hunks(
6240 unstaged_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6241 &snapshot,
6242 &unstaged_diff.base_text().text(),
6243 &[(
6244 2..3,
6245 "",
6246 " println!(\"goodbye world\");\n",
6247 DiffHunkStatus::added_none(),
6248 )],
6249 );
6250 });
6251}
6252
6253#[gpui::test]
6254async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
6255 init_test(cx);
6256
6257 let committed_contents = r#"
6258 fn main() {
6259 println!("hello world");
6260 }
6261 "#
6262 .unindent();
6263 let staged_contents = r#"
6264 fn main() {
6265 println!("goodbye world");
6266 }
6267 "#
6268 .unindent();
6269 let file_contents = r#"
6270 // print goodbye
6271 fn main() {
6272 println!("goodbye world");
6273 }
6274 "#
6275 .unindent();
6276
6277 let fs = FakeFs::new(cx.background_executor.clone());
6278 fs.insert_tree(
6279 "/dir",
6280 json!({
6281 ".git": {},
6282 "src": {
6283 "modification.rs": file_contents,
6284 }
6285 }),
6286 )
6287 .await;
6288
6289 fs.set_head_for_repo(
6290 Path::new("/dir/.git"),
6291 &[
6292 ("src/modification.rs".into(), committed_contents),
6293 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6294 ],
6295 );
6296 fs.set_index_for_repo(
6297 Path::new("/dir/.git"),
6298 &[
6299 ("src/modification.rs".into(), staged_contents),
6300 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6301 ],
6302 );
6303
6304 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6305 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6306 let language = rust_lang();
6307 language_registry.add(language.clone());
6308
6309 let buffer_1 = project
6310 .update(cx, |project, cx| {
6311 project.open_local_buffer("/dir/src/modification.rs", cx)
6312 })
6313 .await
6314 .unwrap();
6315 let diff_1 = project
6316 .update(cx, |project, cx| {
6317 project.open_uncommitted_diff(buffer_1.clone(), cx)
6318 })
6319 .await
6320 .unwrap();
6321 diff_1.read_with(cx, |diff, _| {
6322 assert_eq!(diff.base_text().language().cloned(), Some(language))
6323 });
6324 cx.run_until_parked();
6325 diff_1.update(cx, |diff, cx| {
6326 let snapshot = buffer_1.read(cx).snapshot();
6327 assert_hunks(
6328 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6329 &snapshot,
6330 &diff.base_text_string().unwrap(),
6331 &[
6332 (
6333 0..1,
6334 "",
6335 "// print goodbye\n",
6336 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
6337 ),
6338 (
6339 2..3,
6340 " println!(\"hello world\");\n",
6341 " println!(\"goodbye world\");\n",
6342 DiffHunkStatus::modified_none(),
6343 ),
6344 ],
6345 );
6346 });
6347
6348 // Reset HEAD to a version that differs from both the buffer and the index.
6349 let committed_contents = r#"
6350 // print goodbye
6351 fn main() {
6352 }
6353 "#
6354 .unindent();
6355 fs.set_head_for_repo(
6356 Path::new("/dir/.git"),
6357 &[
6358 ("src/modification.rs".into(), committed_contents.clone()),
6359 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6360 ],
6361 );
6362
6363 // Buffer now has an unstaged hunk.
6364 cx.run_until_parked();
6365 diff_1.update(cx, |diff, cx| {
6366 let snapshot = buffer_1.read(cx).snapshot();
6367 assert_hunks(
6368 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6369 &snapshot,
6370 &diff.base_text().text(),
6371 &[(
6372 2..3,
6373 "",
6374 " println!(\"goodbye world\");\n",
6375 DiffHunkStatus::added_none(),
6376 )],
6377 );
6378 });
6379
6380 // Open a buffer for a file that's been deleted.
6381 let buffer_2 = project
6382 .update(cx, |project, cx| {
6383 project.open_local_buffer("/dir/src/deletion.rs", cx)
6384 })
6385 .await
6386 .unwrap();
6387 let diff_2 = project
6388 .update(cx, |project, cx| {
6389 project.open_uncommitted_diff(buffer_2.clone(), cx)
6390 })
6391 .await
6392 .unwrap();
6393 cx.run_until_parked();
6394 diff_2.update(cx, |diff, cx| {
6395 let snapshot = buffer_2.read(cx).snapshot();
6396 assert_hunks(
6397 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6398 &snapshot,
6399 &diff.base_text_string().unwrap(),
6400 &[(
6401 0..0,
6402 "// the-deleted-contents\n",
6403 "",
6404 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
6405 )],
6406 );
6407 });
6408
6409 // Stage the deletion of this file
6410 fs.set_index_for_repo(
6411 Path::new("/dir/.git"),
6412 &[("src/modification.rs".into(), committed_contents.clone())],
6413 );
6414 cx.run_until_parked();
6415 diff_2.update(cx, |diff, cx| {
6416 let snapshot = buffer_2.read(cx).snapshot();
6417 assert_hunks(
6418 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6419 &snapshot,
6420 &diff.base_text_string().unwrap(),
6421 &[(
6422 0..0,
6423 "// the-deleted-contents\n",
6424 "",
6425 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
6426 )],
6427 );
6428 });
6429}
6430
6431#[gpui::test]
6432async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
6433 use DiffHunkSecondaryStatus::*;
6434 init_test(cx);
6435
6436 let committed_contents = r#"
6437 zero
6438 one
6439 two
6440 three
6441 four
6442 five
6443 "#
6444 .unindent();
6445 let file_contents = r#"
6446 one
6447 TWO
6448 three
6449 FOUR
6450 five
6451 "#
6452 .unindent();
6453
6454 let fs = FakeFs::new(cx.background_executor.clone());
6455 fs.insert_tree(
6456 "/dir",
6457 json!({
6458 ".git": {},
6459 "file.txt": file_contents.clone()
6460 }),
6461 )
6462 .await;
6463
6464 fs.set_head_and_index_for_repo(
6465 "/dir/.git".as_ref(),
6466 &[("file.txt".into(), committed_contents.clone())],
6467 );
6468
6469 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6470
6471 let buffer = project
6472 .update(cx, |project, cx| {
6473 project.open_local_buffer("/dir/file.txt", cx)
6474 })
6475 .await
6476 .unwrap();
6477 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6478 let uncommitted_diff = project
6479 .update(cx, |project, cx| {
6480 project.open_uncommitted_diff(buffer.clone(), cx)
6481 })
6482 .await
6483 .unwrap();
6484 let mut diff_events = cx.events(&uncommitted_diff);
6485
6486 // The hunks are initially unstaged.
6487 uncommitted_diff.read_with(cx, |diff, cx| {
6488 assert_hunks(
6489 diff.hunks(&snapshot, cx),
6490 &snapshot,
6491 &diff.base_text_string().unwrap(),
6492 &[
6493 (
6494 0..0,
6495 "zero\n",
6496 "",
6497 DiffHunkStatus::deleted(HasSecondaryHunk),
6498 ),
6499 (
6500 1..2,
6501 "two\n",
6502 "TWO\n",
6503 DiffHunkStatus::modified(HasSecondaryHunk),
6504 ),
6505 (
6506 3..4,
6507 "four\n",
6508 "FOUR\n",
6509 DiffHunkStatus::modified(HasSecondaryHunk),
6510 ),
6511 ],
6512 );
6513 });
6514
6515 // Stage a hunk. It appears as optimistically staged.
6516 uncommitted_diff.update(cx, |diff, cx| {
6517 let range =
6518 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
6519 let hunks = diff
6520 .hunks_intersecting_range(range, &snapshot, cx)
6521 .collect::<Vec<_>>();
6522 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
6523
6524 assert_hunks(
6525 diff.hunks(&snapshot, cx),
6526 &snapshot,
6527 &diff.base_text_string().unwrap(),
6528 &[
6529 (
6530 0..0,
6531 "zero\n",
6532 "",
6533 DiffHunkStatus::deleted(HasSecondaryHunk),
6534 ),
6535 (
6536 1..2,
6537 "two\n",
6538 "TWO\n",
6539 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6540 ),
6541 (
6542 3..4,
6543 "four\n",
6544 "FOUR\n",
6545 DiffHunkStatus::modified(HasSecondaryHunk),
6546 ),
6547 ],
6548 );
6549 });
6550
6551 // The diff emits a change event for the range of the staged hunk.
6552 assert!(matches!(
6553 diff_events.next().await.unwrap(),
6554 BufferDiffEvent::HunksStagedOrUnstaged(_)
6555 ));
6556 let event = diff_events.next().await.unwrap();
6557 if let BufferDiffEvent::DiffChanged {
6558 changed_range: Some(changed_range),
6559 } = event
6560 {
6561 let changed_range = changed_range.to_point(&snapshot);
6562 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
6563 } else {
6564 panic!("Unexpected event {event:?}");
6565 }
6566
6567 // When the write to the index completes, it appears as staged.
6568 cx.run_until_parked();
6569 uncommitted_diff.update(cx, |diff, cx| {
6570 assert_hunks(
6571 diff.hunks(&snapshot, cx),
6572 &snapshot,
6573 &diff.base_text_string().unwrap(),
6574 &[
6575 (
6576 0..0,
6577 "zero\n",
6578 "",
6579 DiffHunkStatus::deleted(HasSecondaryHunk),
6580 ),
6581 (
6582 1..2,
6583 "two\n",
6584 "TWO\n",
6585 DiffHunkStatus::modified(NoSecondaryHunk),
6586 ),
6587 (
6588 3..4,
6589 "four\n",
6590 "FOUR\n",
6591 DiffHunkStatus::modified(HasSecondaryHunk),
6592 ),
6593 ],
6594 );
6595 });
6596
6597 // The diff emits a change event for the changed index text.
6598 let event = diff_events.next().await.unwrap();
6599 if let BufferDiffEvent::DiffChanged {
6600 changed_range: Some(changed_range),
6601 } = event
6602 {
6603 let changed_range = changed_range.to_point(&snapshot);
6604 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
6605 } else {
6606 panic!("Unexpected event {event:?}");
6607 }
6608
6609 // Simulate a problem writing to the git index.
6610 fs.set_error_message_for_index_write(
6611 "/dir/.git".as_ref(),
6612 Some("failed to write git index".into()),
6613 );
6614
6615 // Stage another hunk.
6616 uncommitted_diff.update(cx, |diff, cx| {
6617 let range =
6618 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
6619 let hunks = diff
6620 .hunks_intersecting_range(range, &snapshot, cx)
6621 .collect::<Vec<_>>();
6622 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
6623
6624 assert_hunks(
6625 diff.hunks(&snapshot, cx),
6626 &snapshot,
6627 &diff.base_text_string().unwrap(),
6628 &[
6629 (
6630 0..0,
6631 "zero\n",
6632 "",
6633 DiffHunkStatus::deleted(HasSecondaryHunk),
6634 ),
6635 (
6636 1..2,
6637 "two\n",
6638 "TWO\n",
6639 DiffHunkStatus::modified(NoSecondaryHunk),
6640 ),
6641 (
6642 3..4,
6643 "four\n",
6644 "FOUR\n",
6645 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6646 ),
6647 ],
6648 );
6649 });
6650 assert!(matches!(
6651 diff_events.next().await.unwrap(),
6652 BufferDiffEvent::HunksStagedOrUnstaged(_)
6653 ));
6654 let event = diff_events.next().await.unwrap();
6655 if let BufferDiffEvent::DiffChanged {
6656 changed_range: Some(changed_range),
6657 } = event
6658 {
6659 let changed_range = changed_range.to_point(&snapshot);
6660 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
6661 } else {
6662 panic!("Unexpected event {event:?}");
6663 }
6664
6665 // When the write fails, the hunk returns to being unstaged.
6666 cx.run_until_parked();
6667 uncommitted_diff.update(cx, |diff, cx| {
6668 assert_hunks(
6669 diff.hunks(&snapshot, cx),
6670 &snapshot,
6671 &diff.base_text_string().unwrap(),
6672 &[
6673 (
6674 0..0,
6675 "zero\n",
6676 "",
6677 DiffHunkStatus::deleted(HasSecondaryHunk),
6678 ),
6679 (
6680 1..2,
6681 "two\n",
6682 "TWO\n",
6683 DiffHunkStatus::modified(NoSecondaryHunk),
6684 ),
6685 (
6686 3..4,
6687 "four\n",
6688 "FOUR\n",
6689 DiffHunkStatus::modified(HasSecondaryHunk),
6690 ),
6691 ],
6692 );
6693 });
6694
6695 let event = diff_events.next().await.unwrap();
6696 if let BufferDiffEvent::DiffChanged {
6697 changed_range: Some(changed_range),
6698 } = event
6699 {
6700 let changed_range = changed_range.to_point(&snapshot);
6701 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
6702 } else {
6703 panic!("Unexpected event {event:?}");
6704 }
6705
6706 // Allow writing to the git index to succeed again.
6707 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
6708
6709 // Stage two hunks with separate operations.
6710 uncommitted_diff.update(cx, |diff, cx| {
6711 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
6712 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
6713 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
6714 });
6715
6716 // Both staged hunks appear as pending.
6717 uncommitted_diff.update(cx, |diff, cx| {
6718 assert_hunks(
6719 diff.hunks(&snapshot, cx),
6720 &snapshot,
6721 &diff.base_text_string().unwrap(),
6722 &[
6723 (
6724 0..0,
6725 "zero\n",
6726 "",
6727 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
6728 ),
6729 (
6730 1..2,
6731 "two\n",
6732 "TWO\n",
6733 DiffHunkStatus::modified(NoSecondaryHunk),
6734 ),
6735 (
6736 3..4,
6737 "four\n",
6738 "FOUR\n",
6739 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6740 ),
6741 ],
6742 );
6743 });
6744
6745 // Both staging operations take effect.
6746 cx.run_until_parked();
6747 uncommitted_diff.update(cx, |diff, cx| {
6748 assert_hunks(
6749 diff.hunks(&snapshot, cx),
6750 &snapshot,
6751 &diff.base_text_string().unwrap(),
6752 &[
6753 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
6754 (
6755 1..2,
6756 "two\n",
6757 "TWO\n",
6758 DiffHunkStatus::modified(NoSecondaryHunk),
6759 ),
6760 (
6761 3..4,
6762 "four\n",
6763 "FOUR\n",
6764 DiffHunkStatus::modified(NoSecondaryHunk),
6765 ),
6766 ],
6767 );
6768 });
6769}
6770
6771#[gpui::test(seeds(340, 472))]
6772async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
6773 use DiffHunkSecondaryStatus::*;
6774 init_test(cx);
6775
6776 let committed_contents = r#"
6777 zero
6778 one
6779 two
6780 three
6781 four
6782 five
6783 "#
6784 .unindent();
6785 let file_contents = r#"
6786 one
6787 TWO
6788 three
6789 FOUR
6790 five
6791 "#
6792 .unindent();
6793
6794 let fs = FakeFs::new(cx.background_executor.clone());
6795 fs.insert_tree(
6796 "/dir",
6797 json!({
6798 ".git": {},
6799 "file.txt": file_contents.clone()
6800 }),
6801 )
6802 .await;
6803
6804 fs.set_head_for_repo(
6805 "/dir/.git".as_ref(),
6806 &[("file.txt".into(), committed_contents.clone())],
6807 );
6808 fs.set_index_for_repo(
6809 "/dir/.git".as_ref(),
6810 &[("file.txt".into(), committed_contents.clone())],
6811 );
6812
6813 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6814
6815 let buffer = project
6816 .update(cx, |project, cx| {
6817 project.open_local_buffer("/dir/file.txt", cx)
6818 })
6819 .await
6820 .unwrap();
6821 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6822 let uncommitted_diff = project
6823 .update(cx, |project, cx| {
6824 project.open_uncommitted_diff(buffer.clone(), cx)
6825 })
6826 .await
6827 .unwrap();
6828
6829 // The hunks are initially unstaged.
6830 uncommitted_diff.read_with(cx, |diff, cx| {
6831 assert_hunks(
6832 diff.hunks(&snapshot, cx),
6833 &snapshot,
6834 &diff.base_text_string().unwrap(),
6835 &[
6836 (
6837 0..0,
6838 "zero\n",
6839 "",
6840 DiffHunkStatus::deleted(HasSecondaryHunk),
6841 ),
6842 (
6843 1..2,
6844 "two\n",
6845 "TWO\n",
6846 DiffHunkStatus::modified(HasSecondaryHunk),
6847 ),
6848 (
6849 3..4,
6850 "four\n",
6851 "FOUR\n",
6852 DiffHunkStatus::modified(HasSecondaryHunk),
6853 ),
6854 ],
6855 );
6856 });
6857
6858 // Pause IO events
6859 fs.pause_events();
6860
6861 // Stage the first hunk.
6862 uncommitted_diff.update(cx, |diff, cx| {
6863 let hunk = diff.hunks(&snapshot, cx).next().unwrap();
6864 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
6865 assert_hunks(
6866 diff.hunks(&snapshot, cx),
6867 &snapshot,
6868 &diff.base_text_string().unwrap(),
6869 &[
6870 (
6871 0..0,
6872 "zero\n",
6873 "",
6874 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
6875 ),
6876 (
6877 1..2,
6878 "two\n",
6879 "TWO\n",
6880 DiffHunkStatus::modified(HasSecondaryHunk),
6881 ),
6882 (
6883 3..4,
6884 "four\n",
6885 "FOUR\n",
6886 DiffHunkStatus::modified(HasSecondaryHunk),
6887 ),
6888 ],
6889 );
6890 });
6891
6892 // Stage the second hunk *before* receiving the FS event for the first hunk.
6893 cx.run_until_parked();
6894 uncommitted_diff.update(cx, |diff, cx| {
6895 let hunk = diff.hunks(&snapshot, cx).nth(1).unwrap();
6896 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
6897 assert_hunks(
6898 diff.hunks(&snapshot, cx),
6899 &snapshot,
6900 &diff.base_text_string().unwrap(),
6901 &[
6902 (
6903 0..0,
6904 "zero\n",
6905 "",
6906 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
6907 ),
6908 (
6909 1..2,
6910 "two\n",
6911 "TWO\n",
6912 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6913 ),
6914 (
6915 3..4,
6916 "four\n",
6917 "FOUR\n",
6918 DiffHunkStatus::modified(HasSecondaryHunk),
6919 ),
6920 ],
6921 );
6922 });
6923
6924 // Process the FS event for staging the first hunk (second event is still pending).
6925 fs.flush_events(1);
6926 cx.run_until_parked();
6927
6928 // Stage the third hunk before receiving the second FS event.
6929 uncommitted_diff.update(cx, |diff, cx| {
6930 let hunk = diff.hunks(&snapshot, cx).nth(2).unwrap();
6931 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
6932 });
6933
6934 // Wait for all remaining IO.
6935 cx.run_until_parked();
6936 fs.flush_events(fs.buffered_event_count());
6937
6938 // Now all hunks are staged.
6939 cx.run_until_parked();
6940 uncommitted_diff.update(cx, |diff, cx| {
6941 assert_hunks(
6942 diff.hunks(&snapshot, cx),
6943 &snapshot,
6944 &diff.base_text_string().unwrap(),
6945 &[
6946 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
6947 (
6948 1..2,
6949 "two\n",
6950 "TWO\n",
6951 DiffHunkStatus::modified(NoSecondaryHunk),
6952 ),
6953 (
6954 3..4,
6955 "four\n",
6956 "FOUR\n",
6957 DiffHunkStatus::modified(NoSecondaryHunk),
6958 ),
6959 ],
6960 );
6961 });
6962}
6963
6964#[gpui::test]
6965async fn test_staging_lots_of_hunks_fast(cx: &mut gpui::TestAppContext) {
6966 use DiffHunkSecondaryStatus::*;
6967 init_test(cx);
6968
6969 let different_lines = (0..500)
6970 .step_by(5)
6971 .map(|i| format!("diff {}\n", i))
6972 .collect::<Vec<String>>();
6973 let committed_contents = (0..500).map(|i| format!("{}\n", i)).collect::<String>();
6974 let file_contents = (0..500)
6975 .map(|i| {
6976 if i % 5 == 0 {
6977 different_lines[i / 5].clone()
6978 } else {
6979 format!("{}\n", i)
6980 }
6981 })
6982 .collect::<String>();
6983
6984 let fs = FakeFs::new(cx.background_executor.clone());
6985 fs.insert_tree(
6986 "/dir",
6987 json!({
6988 ".git": {},
6989 "file.txt": file_contents.clone()
6990 }),
6991 )
6992 .await;
6993
6994 fs.set_head_for_repo(
6995 "/dir/.git".as_ref(),
6996 &[("file.txt".into(), committed_contents.clone())],
6997 );
6998 fs.set_index_for_repo(
6999 "/dir/.git".as_ref(),
7000 &[("file.txt".into(), committed_contents.clone())],
7001 );
7002
7003 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7004
7005 let buffer = project
7006 .update(cx, |project, cx| {
7007 project.open_local_buffer("/dir/file.txt", cx)
7008 })
7009 .await
7010 .unwrap();
7011 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7012 let uncommitted_diff = project
7013 .update(cx, |project, cx| {
7014 project.open_uncommitted_diff(buffer.clone(), cx)
7015 })
7016 .await
7017 .unwrap();
7018
7019 let mut expected_hunks: Vec<(Range<u32>, String, String, DiffHunkStatus)> = (0..500)
7020 .step_by(5)
7021 .map(|i| {
7022 (
7023 i as u32..i as u32 + 1,
7024 format!("{}\n", i),
7025 different_lines[i / 5].clone(),
7026 DiffHunkStatus::modified(HasSecondaryHunk),
7027 )
7028 })
7029 .collect();
7030
7031 // The hunks are initially unstaged
7032 uncommitted_diff.read_with(cx, |diff, cx| {
7033 assert_hunks(
7034 diff.hunks(&snapshot, cx),
7035 &snapshot,
7036 &diff.base_text_string().unwrap(),
7037 &expected_hunks,
7038 );
7039 });
7040
7041 for (_, _, _, status) in expected_hunks.iter_mut() {
7042 *status = DiffHunkStatus::modified(SecondaryHunkRemovalPending);
7043 }
7044
7045 // Stage every hunk with a different call
7046 uncommitted_diff.update(cx, |diff, cx| {
7047 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
7048 for hunk in hunks {
7049 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7050 }
7051
7052 assert_hunks(
7053 diff.hunks(&snapshot, cx),
7054 &snapshot,
7055 &diff.base_text_string().unwrap(),
7056 &expected_hunks,
7057 );
7058 });
7059
7060 // If we wait, we'll have no pending hunks
7061 cx.run_until_parked();
7062 for (_, _, _, status) in expected_hunks.iter_mut() {
7063 *status = DiffHunkStatus::modified(NoSecondaryHunk);
7064 }
7065
7066 uncommitted_diff.update(cx, |diff, cx| {
7067 assert_hunks(
7068 diff.hunks(&snapshot, cx),
7069 &snapshot,
7070 &diff.base_text_string().unwrap(),
7071 &expected_hunks,
7072 );
7073 });
7074
7075 for (_, _, _, status) in expected_hunks.iter_mut() {
7076 *status = DiffHunkStatus::modified(SecondaryHunkAdditionPending);
7077 }
7078
7079 // Unstage every hunk with a different call
7080 uncommitted_diff.update(cx, |diff, cx| {
7081 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
7082 for hunk in hunks {
7083 diff.stage_or_unstage_hunks(false, &[hunk], &snapshot, true, cx);
7084 }
7085
7086 assert_hunks(
7087 diff.hunks(&snapshot, cx),
7088 &snapshot,
7089 &diff.base_text_string().unwrap(),
7090 &expected_hunks,
7091 );
7092 });
7093
7094 // If we wait, we'll have no pending hunks, again
7095 cx.run_until_parked();
7096 for (_, _, _, status) in expected_hunks.iter_mut() {
7097 *status = DiffHunkStatus::modified(HasSecondaryHunk);
7098 }
7099
7100 uncommitted_diff.update(cx, |diff, cx| {
7101 assert_hunks(
7102 diff.hunks(&snapshot, cx),
7103 &snapshot,
7104 &diff.base_text_string().unwrap(),
7105 &expected_hunks,
7106 );
7107 });
7108}
7109
7110#[gpui::test]
7111async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
7112 init_test(cx);
7113
7114 let committed_contents = r#"
7115 fn main() {
7116 println!("hello from HEAD");
7117 }
7118 "#
7119 .unindent();
7120 let file_contents = r#"
7121 fn main() {
7122 println!("hello from the working copy");
7123 }
7124 "#
7125 .unindent();
7126
7127 let fs = FakeFs::new(cx.background_executor.clone());
7128 fs.insert_tree(
7129 "/dir",
7130 json!({
7131 ".git": {},
7132 "src": {
7133 "main.rs": file_contents,
7134 }
7135 }),
7136 )
7137 .await;
7138
7139 fs.set_head_for_repo(
7140 Path::new("/dir/.git"),
7141 &[("src/main.rs".into(), committed_contents.clone())],
7142 );
7143 fs.set_index_for_repo(
7144 Path::new("/dir/.git"),
7145 &[("src/main.rs".into(), committed_contents.clone())],
7146 );
7147
7148 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
7149
7150 let buffer = project
7151 .update(cx, |project, cx| {
7152 project.open_local_buffer("/dir/src/main.rs", cx)
7153 })
7154 .await
7155 .unwrap();
7156 let uncommitted_diff = project
7157 .update(cx, |project, cx| {
7158 project.open_uncommitted_diff(buffer.clone(), cx)
7159 })
7160 .await
7161 .unwrap();
7162
7163 cx.run_until_parked();
7164 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
7165 let snapshot = buffer.read(cx).snapshot();
7166 assert_hunks(
7167 uncommitted_diff.hunks(&snapshot, cx),
7168 &snapshot,
7169 &uncommitted_diff.base_text_string().unwrap(),
7170 &[(
7171 1..2,
7172 " println!(\"hello from HEAD\");\n",
7173 " println!(\"hello from the working copy\");\n",
7174 DiffHunkStatus {
7175 kind: DiffHunkStatusKind::Modified,
7176 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
7177 },
7178 )],
7179 );
7180 });
7181}
7182
7183#[gpui::test]
7184async fn test_repository_and_path_for_project_path(
7185 background_executor: BackgroundExecutor,
7186 cx: &mut gpui::TestAppContext,
7187) {
7188 init_test(cx);
7189 let fs = FakeFs::new(background_executor);
7190 fs.insert_tree(
7191 path!("/root"),
7192 json!({
7193 "c.txt": "",
7194 "dir1": {
7195 ".git": {},
7196 "deps": {
7197 "dep1": {
7198 ".git": {},
7199 "src": {
7200 "a.txt": ""
7201 }
7202 }
7203 },
7204 "src": {
7205 "b.txt": ""
7206 }
7207 },
7208 }),
7209 )
7210 .await;
7211
7212 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
7213 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7214 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7215 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
7216 .await;
7217 tree.flush_fs_events(cx).await;
7218
7219 project.read_with(cx, |project, cx| {
7220 let git_store = project.git_store().read(cx);
7221 let pairs = [
7222 ("c.txt", None),
7223 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
7224 (
7225 "dir1/deps/dep1/src/a.txt",
7226 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
7227 ),
7228 ];
7229 let expected = pairs
7230 .iter()
7231 .map(|(path, result)| {
7232 (
7233 path,
7234 result.map(|(repo, repo_path)| {
7235 (Path::new(repo).into(), RepoPath::from(repo_path))
7236 }),
7237 )
7238 })
7239 .collect::<Vec<_>>();
7240 let actual = pairs
7241 .iter()
7242 .map(|(path, _)| {
7243 let project_path = (tree_id, Path::new(path)).into();
7244 let result = maybe!({
7245 let (repo, repo_path) =
7246 git_store.repository_and_path_for_project_path(&project_path, cx)?;
7247 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
7248 });
7249 (path, result)
7250 })
7251 .collect::<Vec<_>>();
7252 pretty_assertions::assert_eq!(expected, actual);
7253 });
7254
7255 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
7256 .await
7257 .unwrap();
7258 tree.flush_fs_events(cx).await;
7259
7260 project.read_with(cx, |project, cx| {
7261 let git_store = project.git_store().read(cx);
7262 assert_eq!(
7263 git_store.repository_and_path_for_project_path(
7264 &(tree_id, Path::new("dir1/src/b.txt")).into(),
7265 cx
7266 ),
7267 None
7268 );
7269 });
7270}
7271
7272#[gpui::test]
7273async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
7274 init_test(cx);
7275 let fs = FakeFs::new(cx.background_executor.clone());
7276 fs.insert_tree(
7277 path!("/root"),
7278 json!({
7279 "home": {
7280 ".git": {},
7281 "project": {
7282 "a.txt": "A"
7283 },
7284 },
7285 }),
7286 )
7287 .await;
7288 fs.set_home_dir(Path::new(path!("/root/home")).to_owned());
7289
7290 let project = Project::test(fs.clone(), [path!("/root/home/project").as_ref()], cx).await;
7291 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7292 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7293 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
7294 .await;
7295 tree.flush_fs_events(cx).await;
7296
7297 project.read_with(cx, |project, cx| {
7298 let containing = project
7299 .git_store()
7300 .read(cx)
7301 .repository_and_path_for_project_path(&(tree_id, "a.txt").into(), cx);
7302 assert!(containing.is_none());
7303 });
7304
7305 let project = Project::test(fs.clone(), [path!("/root/home").as_ref()], cx).await;
7306 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7307 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7308 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
7309 .await;
7310 tree.flush_fs_events(cx).await;
7311
7312 project.read_with(cx, |project, cx| {
7313 let containing = project
7314 .git_store()
7315 .read(cx)
7316 .repository_and_path_for_project_path(&(tree_id, "project/a.txt").into(), cx);
7317 assert_eq!(
7318 containing
7319 .unwrap()
7320 .0
7321 .read(cx)
7322 .work_directory_abs_path
7323 .as_ref(),
7324 Path::new(path!("/root/home"))
7325 );
7326 });
7327}
7328
7329#[gpui::test]
7330async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
7331 init_test(cx);
7332 cx.executor().allow_parking();
7333
7334 let root = TempTree::new(json!({
7335 "project": {
7336 "a.txt": "a", // Modified
7337 "b.txt": "bb", // Added
7338 "c.txt": "ccc", // Unchanged
7339 "d.txt": "dddd", // Deleted
7340 },
7341 }));
7342
7343 // Set up git repository before creating the project.
7344 let work_dir = root.path().join("project");
7345 let repo = git_init(work_dir.as_path());
7346 git_add("a.txt", &repo);
7347 git_add("c.txt", &repo);
7348 git_add("d.txt", &repo);
7349 git_commit("Initial commit", &repo);
7350 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
7351 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
7352
7353 let project = Project::test(
7354 Arc::new(RealFs::new(None, cx.executor())),
7355 [root.path()],
7356 cx,
7357 )
7358 .await;
7359
7360 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7361 tree.flush_fs_events(cx).await;
7362 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7363 .await;
7364 cx.executor().run_until_parked();
7365
7366 let repository = project.read_with(cx, |project, cx| {
7367 project.repositories(cx).values().next().unwrap().clone()
7368 });
7369
7370 // Check that the right git state is observed on startup
7371 repository.read_with(cx, |repository, _| {
7372 let entries = repository.cached_status().collect::<Vec<_>>();
7373 assert_eq!(
7374 entries,
7375 [
7376 StatusEntry {
7377 repo_path: "a.txt".into(),
7378 status: StatusCode::Modified.worktree(),
7379 },
7380 StatusEntry {
7381 repo_path: "b.txt".into(),
7382 status: FileStatus::Untracked,
7383 },
7384 StatusEntry {
7385 repo_path: "d.txt".into(),
7386 status: StatusCode::Deleted.worktree(),
7387 },
7388 ]
7389 );
7390 });
7391
7392 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
7393
7394 tree.flush_fs_events(cx).await;
7395 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7396 .await;
7397 cx.executor().run_until_parked();
7398
7399 repository.read_with(cx, |repository, _| {
7400 let entries = repository.cached_status().collect::<Vec<_>>();
7401 assert_eq!(
7402 entries,
7403 [
7404 StatusEntry {
7405 repo_path: "a.txt".into(),
7406 status: StatusCode::Modified.worktree(),
7407 },
7408 StatusEntry {
7409 repo_path: "b.txt".into(),
7410 status: FileStatus::Untracked,
7411 },
7412 StatusEntry {
7413 repo_path: "c.txt".into(),
7414 status: StatusCode::Modified.worktree(),
7415 },
7416 StatusEntry {
7417 repo_path: "d.txt".into(),
7418 status: StatusCode::Deleted.worktree(),
7419 },
7420 ]
7421 );
7422 });
7423
7424 git_add("a.txt", &repo);
7425 git_add("c.txt", &repo);
7426 git_remove_index(Path::new("d.txt"), &repo);
7427 git_commit("Another commit", &repo);
7428 tree.flush_fs_events(cx).await;
7429 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7430 .await;
7431 cx.executor().run_until_parked();
7432
7433 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
7434 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
7435 tree.flush_fs_events(cx).await;
7436 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7437 .await;
7438 cx.executor().run_until_parked();
7439
7440 repository.read_with(cx, |repository, _cx| {
7441 let entries = repository.cached_status().collect::<Vec<_>>();
7442
7443 // Deleting an untracked entry, b.txt, should leave no status
7444 // a.txt was tracked, and so should have a status
7445 assert_eq!(
7446 entries,
7447 [StatusEntry {
7448 repo_path: "a.txt".into(),
7449 status: StatusCode::Deleted.worktree(),
7450 }]
7451 );
7452 });
7453}
7454
7455#[gpui::test]
7456async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
7457 init_test(cx);
7458 cx.executor().allow_parking();
7459
7460 let root = TempTree::new(json!({
7461 "project": {
7462 "sub": {},
7463 "a.txt": "",
7464 },
7465 }));
7466
7467 let work_dir = root.path().join("project");
7468 let repo = git_init(work_dir.as_path());
7469 // a.txt exists in HEAD and the working copy but is deleted in the index.
7470 git_add("a.txt", &repo);
7471 git_commit("Initial commit", &repo);
7472 git_remove_index("a.txt".as_ref(), &repo);
7473 // `sub` is a nested git repository.
7474 let _sub = git_init(&work_dir.join("sub"));
7475
7476 let project = Project::test(
7477 Arc::new(RealFs::new(None, cx.executor())),
7478 [root.path()],
7479 cx,
7480 )
7481 .await;
7482
7483 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7484 tree.flush_fs_events(cx).await;
7485 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7486 .await;
7487 cx.executor().run_until_parked();
7488
7489 let repository = project.read_with(cx, |project, cx| {
7490 project
7491 .repositories(cx)
7492 .values()
7493 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
7494 .unwrap()
7495 .clone()
7496 });
7497
7498 repository.read_with(cx, |repository, _cx| {
7499 let entries = repository.cached_status().collect::<Vec<_>>();
7500
7501 // `sub` doesn't appear in our computed statuses.
7502 // a.txt appears with a combined `DA` status.
7503 assert_eq!(
7504 entries,
7505 [StatusEntry {
7506 repo_path: "a.txt".into(),
7507 status: TrackedStatus {
7508 index_status: StatusCode::Deleted,
7509 worktree_status: StatusCode::Added
7510 }
7511 .into(),
7512 }]
7513 )
7514 });
7515}
7516
7517#[gpui::test]
7518async fn test_repository_subfolder_git_status(cx: &mut gpui::TestAppContext) {
7519 init_test(cx);
7520 cx.executor().allow_parking();
7521
7522 let root = TempTree::new(json!({
7523 "my-repo": {
7524 // .git folder will go here
7525 "a.txt": "a",
7526 "sub-folder-1": {
7527 "sub-folder-2": {
7528 "c.txt": "cc",
7529 "d": {
7530 "e.txt": "eee"
7531 }
7532 },
7533 }
7534 },
7535 }));
7536
7537 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
7538 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
7539
7540 // Set up git repository before creating the worktree.
7541 let git_repo_work_dir = root.path().join("my-repo");
7542 let repo = git_init(git_repo_work_dir.as_path());
7543 git_add(C_TXT, &repo);
7544 git_commit("Initial commit", &repo);
7545
7546 // Open the worktree in subfolder
7547 let project_root = Path::new("my-repo/sub-folder-1/sub-folder-2");
7548
7549 let project = Project::test(
7550 Arc::new(RealFs::new(None, cx.executor())),
7551 [root.path().join(project_root).as_path()],
7552 cx,
7553 )
7554 .await;
7555
7556 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7557 tree.flush_fs_events(cx).await;
7558 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7559 .await;
7560 cx.executor().run_until_parked();
7561
7562 let repository = project.read_with(cx, |project, cx| {
7563 project.repositories(cx).values().next().unwrap().clone()
7564 });
7565
7566 // Ensure that the git status is loaded correctly
7567 repository.read_with(cx, |repository, _cx| {
7568 assert_eq!(
7569 repository.work_directory_abs_path.canonicalize().unwrap(),
7570 root.path().join("my-repo").canonicalize().unwrap()
7571 );
7572
7573 assert_eq!(repository.status_for_path(&C_TXT.into()), None);
7574 assert_eq!(
7575 repository.status_for_path(&E_TXT.into()).unwrap().status,
7576 FileStatus::Untracked
7577 );
7578 });
7579
7580 // Now we simulate FS events, but ONLY in the .git folder that's outside
7581 // of out project root.
7582 // Meaning: we don't produce any FS events for files inside the project.
7583 git_add(E_TXT, &repo);
7584 git_commit("Second commit", &repo);
7585 tree.flush_fs_events_in_root_git_repository(cx).await;
7586 cx.executor().run_until_parked();
7587
7588 repository.read_with(cx, |repository, _cx| {
7589 assert_eq!(repository.status_for_path(&C_TXT.into()), None);
7590 assert_eq!(repository.status_for_path(&E_TXT.into()), None);
7591 });
7592}
7593
7594// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
7595#[cfg(any())]
7596#[gpui::test]
7597async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
7598 init_test(cx);
7599 cx.executor().allow_parking();
7600
7601 let root = TempTree::new(json!({
7602 "project": {
7603 "a.txt": "a",
7604 },
7605 }));
7606 let root_path = root.path();
7607
7608 let repo = git_init(&root_path.join("project"));
7609 git_add("a.txt", &repo);
7610 git_commit("init", &repo);
7611
7612 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
7613
7614 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7615 tree.flush_fs_events(cx).await;
7616 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7617 .await;
7618 cx.executor().run_until_parked();
7619
7620 let repository = project.read_with(cx, |project, cx| {
7621 project.repositories(cx).values().next().unwrap().clone()
7622 });
7623
7624 git_branch("other-branch", &repo);
7625 git_checkout("refs/heads/other-branch", &repo);
7626 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
7627 git_add("a.txt", &repo);
7628 git_commit("capitalize", &repo);
7629 let commit = repo
7630 .head()
7631 .expect("Failed to get HEAD")
7632 .peel_to_commit()
7633 .expect("HEAD is not a commit");
7634 git_checkout("refs/heads/main", &repo);
7635 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
7636 git_add("a.txt", &repo);
7637 git_commit("improve letter", &repo);
7638 git_cherry_pick(&commit, &repo);
7639 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
7640 .expect("No CHERRY_PICK_HEAD");
7641 pretty_assertions::assert_eq!(
7642 git_status(&repo),
7643 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
7644 );
7645 tree.flush_fs_events(cx).await;
7646 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7647 .await;
7648 cx.executor().run_until_parked();
7649 let conflicts = repository.update(cx, |repository, _| {
7650 repository
7651 .merge_conflicts
7652 .iter()
7653 .cloned()
7654 .collect::<Vec<_>>()
7655 });
7656 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
7657
7658 git_add("a.txt", &repo);
7659 // Attempt to manually simulate what `git cherry-pick --continue` would do.
7660 git_commit("whatevs", &repo);
7661 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
7662 .expect("Failed to remove CHERRY_PICK_HEAD");
7663 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
7664 tree.flush_fs_events(cx).await;
7665 let conflicts = repository.update(cx, |repository, _| {
7666 repository
7667 .merge_conflicts
7668 .iter()
7669 .cloned()
7670 .collect::<Vec<_>>()
7671 });
7672 pretty_assertions::assert_eq!(conflicts, []);
7673}
7674
7675#[gpui::test]
7676async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
7677 init_test(cx);
7678 let fs = FakeFs::new(cx.background_executor.clone());
7679 fs.insert_tree(
7680 path!("/root"),
7681 json!({
7682 ".git": {},
7683 ".gitignore": "*.txt\n",
7684 "a.xml": "<a></a>",
7685 "b.txt": "Some text"
7686 }),
7687 )
7688 .await;
7689
7690 fs.set_head_and_index_for_repo(
7691 path!("/root/.git").as_ref(),
7692 &[
7693 (".gitignore".into(), "*.txt\n".into()),
7694 ("a.xml".into(), "<a></a>".into()),
7695 ],
7696 );
7697
7698 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
7699
7700 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7701 tree.flush_fs_events(cx).await;
7702 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7703 .await;
7704 cx.executor().run_until_parked();
7705
7706 let repository = project.read_with(cx, |project, cx| {
7707 project.repositories(cx).values().next().unwrap().clone()
7708 });
7709
7710 // One file is unmodified, the other is ignored.
7711 cx.read(|cx| {
7712 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
7713 assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
7714 });
7715
7716 // Change the gitignore, and stage the newly non-ignored file.
7717 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
7718 .await
7719 .unwrap();
7720 fs.set_index_for_repo(
7721 Path::new(path!("/root/.git")),
7722 &[
7723 (".gitignore".into(), "*.txt\n".into()),
7724 ("a.xml".into(), "<a></a>".into()),
7725 ("b.txt".into(), "Some text".into()),
7726 ],
7727 );
7728
7729 cx.executor().run_until_parked();
7730 cx.read(|cx| {
7731 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
7732 assert_entry_git_state(
7733 tree.read(cx),
7734 repository.read(cx),
7735 "b.txt",
7736 Some(StatusCode::Added),
7737 false,
7738 );
7739 });
7740}
7741
7742// NOTE:
7743// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
7744// a directory which some program has already open.
7745// This is a limitation of the Windows.
7746// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
7747#[gpui::test]
7748#[cfg_attr(target_os = "windows", ignore)]
7749async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
7750 init_test(cx);
7751 cx.executor().allow_parking();
7752 let root = TempTree::new(json!({
7753 "projects": {
7754 "project1": {
7755 "a": "",
7756 "b": "",
7757 }
7758 },
7759
7760 }));
7761 let root_path = root.path();
7762
7763 let repo = git_init(&root_path.join("projects/project1"));
7764 git_add("a", &repo);
7765 git_commit("init", &repo);
7766 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
7767
7768 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
7769
7770 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7771 tree.flush_fs_events(cx).await;
7772 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7773 .await;
7774 cx.executor().run_until_parked();
7775
7776 let repository = project.read_with(cx, |project, cx| {
7777 project.repositories(cx).values().next().unwrap().clone()
7778 });
7779
7780 repository.read_with(cx, |repository, _| {
7781 assert_eq!(
7782 repository.work_directory_abs_path.as_ref(),
7783 root_path.join("projects/project1").as_path()
7784 );
7785 assert_eq!(
7786 repository
7787 .status_for_path(&"a".into())
7788 .map(|entry| entry.status),
7789 Some(StatusCode::Modified.worktree()),
7790 );
7791 assert_eq!(
7792 repository
7793 .status_for_path(&"b".into())
7794 .map(|entry| entry.status),
7795 Some(FileStatus::Untracked),
7796 );
7797 });
7798
7799 std::fs::rename(
7800 root_path.join("projects/project1"),
7801 root_path.join("projects/project2"),
7802 )
7803 .unwrap();
7804 tree.flush_fs_events(cx).await;
7805
7806 repository.read_with(cx, |repository, _| {
7807 assert_eq!(
7808 repository.work_directory_abs_path.as_ref(),
7809 root_path.join("projects/project2").as_path()
7810 );
7811 assert_eq!(
7812 repository.status_for_path(&"a".into()).unwrap().status,
7813 StatusCode::Modified.worktree(),
7814 );
7815 assert_eq!(
7816 repository.status_for_path(&"b".into()).unwrap().status,
7817 FileStatus::Untracked,
7818 );
7819 });
7820}
7821
7822// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
7823// you can't rename a directory which some program has already open. This is a
7824// limitation of the Windows. See:
7825// https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
7826#[gpui::test]
7827#[cfg_attr(target_os = "windows", ignore)]
7828async fn test_file_status(cx: &mut gpui::TestAppContext) {
7829 init_test(cx);
7830 cx.executor().allow_parking();
7831 const IGNORE_RULE: &str = "**/target";
7832
7833 let root = TempTree::new(json!({
7834 "project": {
7835 "a.txt": "a",
7836 "b.txt": "bb",
7837 "c": {
7838 "d": {
7839 "e.txt": "eee"
7840 }
7841 },
7842 "f.txt": "ffff",
7843 "target": {
7844 "build_file": "???"
7845 },
7846 ".gitignore": IGNORE_RULE
7847 },
7848
7849 }));
7850 let root_path = root.path();
7851
7852 const A_TXT: &str = "a.txt";
7853 const B_TXT: &str = "b.txt";
7854 const E_TXT: &str = "c/d/e.txt";
7855 const F_TXT: &str = "f.txt";
7856 const DOTGITIGNORE: &str = ".gitignore";
7857 const BUILD_FILE: &str = "target/build_file";
7858
7859 // Set up git repository before creating the worktree.
7860 let work_dir = root.path().join("project");
7861 let mut repo = git_init(work_dir.as_path());
7862 repo.add_ignore_rule(IGNORE_RULE).unwrap();
7863 git_add(A_TXT, &repo);
7864 git_add(E_TXT, &repo);
7865 git_add(DOTGITIGNORE, &repo);
7866 git_commit("Initial commit", &repo);
7867
7868 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
7869
7870 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7871 tree.flush_fs_events(cx).await;
7872 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7873 .await;
7874 cx.executor().run_until_parked();
7875
7876 let repository = project.read_with(cx, |project, cx| {
7877 project.repositories(cx).values().next().unwrap().clone()
7878 });
7879
7880 // Check that the right git state is observed on startup
7881 repository.read_with(cx, |repository, _cx| {
7882 assert_eq!(
7883 repository.work_directory_abs_path.as_ref(),
7884 root_path.join("project").as_path()
7885 );
7886
7887 assert_eq!(
7888 repository.status_for_path(&B_TXT.into()).unwrap().status,
7889 FileStatus::Untracked,
7890 );
7891 assert_eq!(
7892 repository.status_for_path(&F_TXT.into()).unwrap().status,
7893 FileStatus::Untracked,
7894 );
7895 });
7896
7897 // Modify a file in the working copy.
7898 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
7899 tree.flush_fs_events(cx).await;
7900 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7901 .await;
7902 cx.executor().run_until_parked();
7903
7904 // The worktree detects that the file's git status has changed.
7905 repository.read_with(cx, |repository, _| {
7906 assert_eq!(
7907 repository.status_for_path(&A_TXT.into()).unwrap().status,
7908 StatusCode::Modified.worktree(),
7909 );
7910 });
7911
7912 // Create a commit in the git repository.
7913 git_add(A_TXT, &repo);
7914 git_add(B_TXT, &repo);
7915 git_commit("Committing modified and added", &repo);
7916 tree.flush_fs_events(cx).await;
7917 cx.executor().run_until_parked();
7918
7919 // The worktree detects that the files' git status have changed.
7920 repository.read_with(cx, |repository, _cx| {
7921 assert_eq!(
7922 repository.status_for_path(&F_TXT.into()).unwrap().status,
7923 FileStatus::Untracked,
7924 );
7925 assert_eq!(repository.status_for_path(&B_TXT.into()), None);
7926 assert_eq!(repository.status_for_path(&A_TXT.into()), None);
7927 });
7928
7929 // Modify files in the working copy and perform git operations on other files.
7930 git_reset(0, &repo);
7931 git_remove_index(Path::new(B_TXT), &repo);
7932 git_stash(&mut repo);
7933 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
7934 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
7935 tree.flush_fs_events(cx).await;
7936 cx.executor().run_until_parked();
7937
7938 // Check that more complex repo changes are tracked
7939 repository.read_with(cx, |repository, _cx| {
7940 assert_eq!(repository.status_for_path(&A_TXT.into()), None);
7941 assert_eq!(
7942 repository.status_for_path(&B_TXT.into()).unwrap().status,
7943 FileStatus::Untracked,
7944 );
7945 assert_eq!(
7946 repository.status_for_path(&E_TXT.into()).unwrap().status,
7947 StatusCode::Modified.worktree(),
7948 );
7949 });
7950
7951 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
7952 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
7953 std::fs::write(
7954 work_dir.join(DOTGITIGNORE),
7955 [IGNORE_RULE, "f.txt"].join("\n"),
7956 )
7957 .unwrap();
7958
7959 git_add(Path::new(DOTGITIGNORE), &repo);
7960 git_commit("Committing modified git ignore", &repo);
7961
7962 tree.flush_fs_events(cx).await;
7963 cx.executor().run_until_parked();
7964
7965 let mut renamed_dir_name = "first_directory/second_directory";
7966 const RENAMED_FILE: &str = "rf.txt";
7967
7968 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
7969 std::fs::write(
7970 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
7971 "new-contents",
7972 )
7973 .unwrap();
7974
7975 tree.flush_fs_events(cx).await;
7976 cx.executor().run_until_parked();
7977
7978 repository.read_with(cx, |repository, _cx| {
7979 assert_eq!(
7980 repository
7981 .status_for_path(&Path::new(renamed_dir_name).join(RENAMED_FILE).into())
7982 .unwrap()
7983 .status,
7984 FileStatus::Untracked,
7985 );
7986 });
7987
7988 renamed_dir_name = "new_first_directory/second_directory";
7989
7990 std::fs::rename(
7991 work_dir.join("first_directory"),
7992 work_dir.join("new_first_directory"),
7993 )
7994 .unwrap();
7995
7996 tree.flush_fs_events(cx).await;
7997 cx.executor().run_until_parked();
7998
7999 repository.read_with(cx, |repository, _cx| {
8000 assert_eq!(
8001 repository
8002 .status_for_path(&Path::new(renamed_dir_name).join(RENAMED_FILE).into())
8003 .unwrap()
8004 .status,
8005 FileStatus::Untracked,
8006 );
8007 });
8008}
8009
8010#[gpui::test]
8011async fn test_repos_in_invisible_worktrees(
8012 executor: BackgroundExecutor,
8013 cx: &mut gpui::TestAppContext,
8014) {
8015 init_test(cx);
8016 let fs = FakeFs::new(executor);
8017 fs.insert_tree(
8018 path!("/root"),
8019 json!({
8020 "dir1": {
8021 ".git": {},
8022 "dep1": {
8023 ".git": {},
8024 "src": {
8025 "a.txt": "",
8026 },
8027 },
8028 "b.txt": "",
8029 },
8030 }),
8031 )
8032 .await;
8033
8034 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
8035 let visible_worktree =
8036 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8037 visible_worktree
8038 .read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
8039 .await;
8040
8041 let repos = project.read_with(cx, |project, cx| {
8042 project
8043 .repositories(cx)
8044 .values()
8045 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8046 .collect::<Vec<_>>()
8047 });
8048 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
8049
8050 let (invisible_worktree, _) = project
8051 .update(cx, |project, cx| {
8052 project.worktree_store.update(cx, |worktree_store, cx| {
8053 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
8054 })
8055 })
8056 .await
8057 .expect("failed to create worktree");
8058 invisible_worktree
8059 .read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
8060 .await;
8061
8062 let repos = project.read_with(cx, |project, cx| {
8063 project
8064 .repositories(cx)
8065 .values()
8066 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8067 .collect::<Vec<_>>()
8068 });
8069 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
8070}
8071
8072#[gpui::test(iterations = 10)]
8073async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
8074 init_test(cx);
8075 cx.update(|cx| {
8076 cx.update_global::<SettingsStore, _>(|store, cx| {
8077 store.update_user_settings::<WorktreeSettings>(cx, |project_settings| {
8078 project_settings.file_scan_exclusions = Some(Vec::new());
8079 });
8080 });
8081 });
8082 let fs = FakeFs::new(cx.background_executor.clone());
8083 fs.insert_tree(
8084 path!("/root"),
8085 json!({
8086 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
8087 "tree": {
8088 ".git": {},
8089 ".gitignore": "ignored-dir\n",
8090 "tracked-dir": {
8091 "tracked-file1": "",
8092 "ancestor-ignored-file1": "",
8093 },
8094 "ignored-dir": {
8095 "ignored-file1": ""
8096 }
8097 }
8098 }),
8099 )
8100 .await;
8101 fs.set_head_and_index_for_repo(
8102 path!("/root/tree/.git").as_ref(),
8103 &[
8104 (".gitignore".into(), "ignored-dir\n".into()),
8105 ("tracked-dir/tracked-file1".into(), "".into()),
8106 ],
8107 );
8108
8109 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
8110
8111 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8112 tree.flush_fs_events(cx).await;
8113 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
8114 .await;
8115 cx.executor().run_until_parked();
8116
8117 let repository = project.read_with(cx, |project, cx| {
8118 project.repositories(cx).values().next().unwrap().clone()
8119 });
8120
8121 tree.read_with(cx, |tree, _| {
8122 tree.as_local()
8123 .unwrap()
8124 .manually_refresh_entries_for_paths(vec![Path::new("ignored-dir").into()])
8125 })
8126 .recv()
8127 .await;
8128
8129 cx.read(|cx| {
8130 assert_entry_git_state(
8131 tree.read(cx),
8132 repository.read(cx),
8133 "tracked-dir/tracked-file1",
8134 None,
8135 false,
8136 );
8137 assert_entry_git_state(
8138 tree.read(cx),
8139 repository.read(cx),
8140 "tracked-dir/ancestor-ignored-file1",
8141 None,
8142 false,
8143 );
8144 assert_entry_git_state(
8145 tree.read(cx),
8146 repository.read(cx),
8147 "ignored-dir/ignored-file1",
8148 None,
8149 true,
8150 );
8151 });
8152
8153 fs.create_file(
8154 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
8155 Default::default(),
8156 )
8157 .await
8158 .unwrap();
8159 fs.set_index_for_repo(
8160 path!("/root/tree/.git").as_ref(),
8161 &[
8162 (".gitignore".into(), "ignored-dir\n".into()),
8163 ("tracked-dir/tracked-file1".into(), "".into()),
8164 ("tracked-dir/tracked-file2".into(), "".into()),
8165 ],
8166 );
8167 fs.create_file(
8168 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
8169 Default::default(),
8170 )
8171 .await
8172 .unwrap();
8173 fs.create_file(
8174 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
8175 Default::default(),
8176 )
8177 .await
8178 .unwrap();
8179
8180 cx.executor().run_until_parked();
8181 cx.read(|cx| {
8182 assert_entry_git_state(
8183 tree.read(cx),
8184 repository.read(cx),
8185 "tracked-dir/tracked-file2",
8186 Some(StatusCode::Added),
8187 false,
8188 );
8189 assert_entry_git_state(
8190 tree.read(cx),
8191 repository.read(cx),
8192 "tracked-dir/ancestor-ignored-file2",
8193 None,
8194 false,
8195 );
8196 assert_entry_git_state(
8197 tree.read(cx),
8198 repository.read(cx),
8199 "ignored-dir/ignored-file2",
8200 None,
8201 true,
8202 );
8203 assert!(tree.read(cx).entry_for_path(".git").unwrap().is_ignored);
8204 });
8205}
8206
8207#[gpui::test]
8208async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
8209 init_test(cx);
8210 let fs = FakeFs::new(cx.background_executor.clone());
8211 fs.insert_tree(
8212 path!("/root"),
8213 json!({
8214 "project": {
8215 ".git": {},
8216 "child1": {
8217 "a.txt": "A",
8218 },
8219 "child2": {
8220 "b.txt": "B",
8221 }
8222 }
8223 }),
8224 )
8225 .await;
8226
8227 let project = Project::test(
8228 fs.clone(),
8229 [
8230 path!("/root/project/child1").as_ref(),
8231 path!("/root/project/child2").as_ref(),
8232 ],
8233 cx,
8234 )
8235 .await;
8236
8237 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8238 tree.flush_fs_events(cx).await;
8239 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
8240 .await;
8241 cx.executor().run_until_parked();
8242
8243 let repos = project.read_with(cx, |project, cx| {
8244 project
8245 .repositories(cx)
8246 .values()
8247 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8248 .collect::<Vec<_>>()
8249 });
8250 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
8251}
8252
8253async fn search(
8254 project: &Entity<Project>,
8255 query: SearchQuery,
8256 cx: &mut gpui::TestAppContext,
8257) -> Result<HashMap<String, Vec<Range<usize>>>> {
8258 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
8259 let mut results = HashMap::default();
8260 while let Ok(search_result) = search_rx.recv().await {
8261 match search_result {
8262 SearchResult::Buffer { buffer, ranges } => {
8263 results.entry(buffer).or_insert(ranges);
8264 }
8265 SearchResult::LimitReached => {}
8266 }
8267 }
8268 Ok(results
8269 .into_iter()
8270 .map(|(buffer, ranges)| {
8271 buffer.update(cx, |buffer, cx| {
8272 let path = buffer
8273 .file()
8274 .unwrap()
8275 .full_path(cx)
8276 .to_string_lossy()
8277 .to_string();
8278 let ranges = ranges
8279 .into_iter()
8280 .map(|range| range.to_offset(buffer))
8281 .collect::<Vec<_>>();
8282 (path, ranges)
8283 })
8284 })
8285 .collect())
8286}
8287
8288pub fn init_test(cx: &mut gpui::TestAppContext) {
8289 if std::env::var("RUST_LOG").is_ok() {
8290 env_logger::try_init().ok();
8291 }
8292
8293 cx.update(|cx| {
8294 let settings_store = SettingsStore::test(cx);
8295 cx.set_global(settings_store);
8296 release_channel::init(SemanticVersion::default(), cx);
8297 language::init(cx);
8298 Project::init_settings(cx);
8299 });
8300}
8301
8302fn json_lang() -> Arc<Language> {
8303 Arc::new(Language::new(
8304 LanguageConfig {
8305 name: "JSON".into(),
8306 matcher: LanguageMatcher {
8307 path_suffixes: vec!["json".to_string()],
8308 ..Default::default()
8309 },
8310 ..Default::default()
8311 },
8312 None,
8313 ))
8314}
8315
8316fn js_lang() -> Arc<Language> {
8317 Arc::new(Language::new(
8318 LanguageConfig {
8319 name: "JavaScript".into(),
8320 matcher: LanguageMatcher {
8321 path_suffixes: vec!["js".to_string()],
8322 ..Default::default()
8323 },
8324 ..Default::default()
8325 },
8326 None,
8327 ))
8328}
8329
8330fn rust_lang() -> Arc<Language> {
8331 Arc::new(Language::new(
8332 LanguageConfig {
8333 name: "Rust".into(),
8334 matcher: LanguageMatcher {
8335 path_suffixes: vec!["rs".to_string()],
8336 ..Default::default()
8337 },
8338 ..Default::default()
8339 },
8340 Some(tree_sitter_rust::LANGUAGE.into()),
8341 ))
8342}
8343
8344fn typescript_lang() -> Arc<Language> {
8345 Arc::new(Language::new(
8346 LanguageConfig {
8347 name: "TypeScript".into(),
8348 matcher: LanguageMatcher {
8349 path_suffixes: vec!["ts".to_string()],
8350 ..Default::default()
8351 },
8352 ..Default::default()
8353 },
8354 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
8355 ))
8356}
8357
8358fn tsx_lang() -> Arc<Language> {
8359 Arc::new(Language::new(
8360 LanguageConfig {
8361 name: "tsx".into(),
8362 matcher: LanguageMatcher {
8363 path_suffixes: vec!["tsx".to_string()],
8364 ..Default::default()
8365 },
8366 ..Default::default()
8367 },
8368 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
8369 ))
8370}
8371
8372fn get_all_tasks(
8373 project: &Entity<Project>,
8374 task_contexts: &TaskContexts,
8375 cx: &mut App,
8376) -> Vec<(TaskSourceKind, ResolvedTask)> {
8377 let (mut old, new) = project.update(cx, |project, cx| {
8378 project
8379 .task_store
8380 .read(cx)
8381 .task_inventory()
8382 .unwrap()
8383 .read(cx)
8384 .used_and_current_resolved_tasks(task_contexts, cx)
8385 });
8386 old.extend(new);
8387 old
8388}
8389
8390#[track_caller]
8391fn assert_entry_git_state(
8392 tree: &Worktree,
8393 repository: &Repository,
8394 path: &str,
8395 index_status: Option<StatusCode>,
8396 is_ignored: bool,
8397) {
8398 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
8399 let entry = tree
8400 .entry_for_path(path)
8401 .unwrap_or_else(|| panic!("entry {path} not found"));
8402 let status = repository
8403 .status_for_path(&path.into())
8404 .map(|entry| entry.status);
8405 let expected = index_status.map(|index_status| {
8406 TrackedStatus {
8407 index_status,
8408 worktree_status: StatusCode::Unmodified,
8409 }
8410 .into()
8411 });
8412 assert_eq!(
8413 status, expected,
8414 "expected {path} to have git status: {expected:?}"
8415 );
8416 assert_eq!(
8417 entry.is_ignored, is_ignored,
8418 "expected {path} to have is_ignored: {is_ignored}"
8419 );
8420}
8421
8422#[track_caller]
8423fn git_init(path: &Path) -> git2::Repository {
8424 let mut init_opts = RepositoryInitOptions::new();
8425 init_opts.initial_head("main");
8426 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
8427}
8428
8429#[track_caller]
8430fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
8431 let path = path.as_ref();
8432 let mut index = repo.index().expect("Failed to get index");
8433 index.add_path(path).expect("Failed to add file");
8434 index.write().expect("Failed to write index");
8435}
8436
8437#[track_caller]
8438fn git_remove_index(path: &Path, repo: &git2::Repository) {
8439 let mut index = repo.index().expect("Failed to get index");
8440 index.remove_path(path).expect("Failed to add file");
8441 index.write().expect("Failed to write index");
8442}
8443
8444#[track_caller]
8445fn git_commit(msg: &'static str, repo: &git2::Repository) {
8446 use git2::Signature;
8447
8448 let signature = Signature::now("test", "test@zed.dev").unwrap();
8449 let oid = repo.index().unwrap().write_tree().unwrap();
8450 let tree = repo.find_tree(oid).unwrap();
8451 if let Ok(head) = repo.head() {
8452 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
8453
8454 let parent_commit = parent_obj.as_commit().unwrap();
8455
8456 repo.commit(
8457 Some("HEAD"),
8458 &signature,
8459 &signature,
8460 msg,
8461 &tree,
8462 &[parent_commit],
8463 )
8464 .expect("Failed to commit with parent");
8465 } else {
8466 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
8467 .expect("Failed to commit");
8468 }
8469}
8470
8471#[cfg(any())]
8472#[track_caller]
8473fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
8474 repo.cherrypick(commit, None).expect("Failed to cherrypick");
8475}
8476
8477#[track_caller]
8478fn git_stash(repo: &mut git2::Repository) {
8479 use git2::Signature;
8480
8481 let signature = Signature::now("test", "test@zed.dev").unwrap();
8482 repo.stash_save(&signature, "N/A", None)
8483 .expect("Failed to stash");
8484}
8485
8486#[track_caller]
8487fn git_reset(offset: usize, repo: &git2::Repository) {
8488 let head = repo.head().expect("Couldn't get repo head");
8489 let object = head.peel(git2::ObjectType::Commit).unwrap();
8490 let commit = object.as_commit().unwrap();
8491 let new_head = commit
8492 .parents()
8493 .inspect(|parnet| {
8494 parnet.message();
8495 })
8496 .nth(offset)
8497 .expect("Not enough history");
8498 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
8499 .expect("Could not reset");
8500}
8501
8502#[cfg(any())]
8503#[track_caller]
8504fn git_branch(name: &str, repo: &git2::Repository) {
8505 let head = repo
8506 .head()
8507 .expect("Couldn't get repo head")
8508 .peel_to_commit()
8509 .expect("HEAD is not a commit");
8510 repo.branch(name, &head, false).expect("Failed to commit");
8511}
8512
8513#[cfg(any())]
8514#[track_caller]
8515fn git_checkout(name: &str, repo: &git2::Repository) {
8516 repo.set_head(name).expect("Failed to set head");
8517 repo.checkout_head(None).expect("Failed to check out head");
8518}
8519
8520#[cfg(any())]
8521#[track_caller]
8522fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
8523 repo.statuses(None)
8524 .unwrap()
8525 .iter()
8526 .map(|status| (status.path().unwrap().to_string(), status.status()))
8527 .collect()
8528}