1#![allow(clippy::format_collect)]
2
3use crate::{
4 Event, git_store::StatusEntry, task_inventory::TaskContexts, task_store::TaskSettingsLocation,
5 *,
6};
7use buffer_diff::{
8 BufferDiffEvent, DiffHunkSecondaryStatus, DiffHunkStatus, DiffHunkStatusKind, assert_hunks,
9};
10use fs::FakeFs;
11use futures::{StreamExt, future};
12use git::{
13 repository::RepoPath,
14 status::{StatusCode, TrackedStatus},
15};
16use git2::RepositoryInitOptions;
17use gpui::{App, BackgroundExecutor, SemanticVersion, UpdateGlobal};
18use http_client::Url;
19use language::{
20 Diagnostic, DiagnosticEntry, DiagnosticSet, DiskState, FakeLspAdapter, LanguageConfig,
21 LanguageMatcher, LanguageName, LineEnding, OffsetRangeExt, Point, ToPoint,
22 language_settings::{AllLanguageSettings, LanguageSettingsContent, language_settings},
23 tree_sitter_rust, tree_sitter_typescript,
24};
25use lsp::{
26 DiagnosticSeverity, DocumentChanges, FileOperationFilter, NumberOrString, TextDocumentEdit,
27 WillRenameFiles, notification::DidRenameFiles,
28};
29use parking_lot::Mutex;
30use paths::tasks_file;
31use postage::stream::Stream as _;
32use pretty_assertions::{assert_eq, assert_matches};
33use serde_json::json;
34#[cfg(not(windows))]
35use std::os;
36use std::{mem, num::NonZeroU32, ops::Range, str::FromStr, sync::OnceLock, task::Poll};
37use task::{ResolvedTask, TaskContext};
38use unindent::Unindent as _;
39use util::{
40 TryFutureExt as _, assert_set_eq, path,
41 paths::PathMatcher,
42 separator,
43 test::{TempTree, marked_text_offsets},
44 uri,
45};
46use worktree::WorktreeModelHandle as _;
47
48#[gpui::test]
49async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
50 cx.executor().allow_parking();
51
52 let (tx, mut rx) = futures::channel::mpsc::unbounded();
53 let _thread = std::thread::spawn(move || {
54 #[cfg(not(target_os = "windows"))]
55 std::fs::metadata("/tmp").unwrap();
56 #[cfg(target_os = "windows")]
57 std::fs::metadata("C:/Windows").unwrap();
58 std::thread::sleep(Duration::from_millis(1000));
59 tx.unbounded_send(1).unwrap();
60 });
61 rx.next().await.unwrap();
62}
63
64#[gpui::test]
65async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
66 cx.executor().allow_parking();
67
68 let io_task = smol::unblock(move || {
69 println!("sleeping on thread {:?}", std::thread::current().id());
70 std::thread::sleep(Duration::from_millis(10));
71 1
72 });
73
74 let task = cx.foreground_executor().spawn(async move {
75 io_task.await;
76 });
77
78 task.await;
79}
80
81#[cfg(not(windows))]
82#[gpui::test]
83async fn test_symlinks(cx: &mut gpui::TestAppContext) {
84 init_test(cx);
85 cx.executor().allow_parking();
86
87 let dir = TempTree::new(json!({
88 "root": {
89 "apple": "",
90 "banana": {
91 "carrot": {
92 "date": "",
93 "endive": "",
94 }
95 },
96 "fennel": {
97 "grape": "",
98 }
99 }
100 }));
101
102 let root_link_path = dir.path().join("root_link");
103 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
104 os::unix::fs::symlink(
105 dir.path().join("root/fennel"),
106 dir.path().join("root/finnochio"),
107 )
108 .unwrap();
109
110 let project = Project::test(
111 Arc::new(RealFs::new(None, cx.executor())),
112 [root_link_path.as_ref()],
113 cx,
114 )
115 .await;
116
117 project.update(cx, |project, cx| {
118 let tree = project.worktrees(cx).next().unwrap().read(cx);
119 assert_eq!(tree.file_count(), 5);
120 assert_eq!(
121 tree.inode_for_path("fennel/grape"),
122 tree.inode_for_path("finnochio/grape")
123 );
124 });
125}
126
127#[gpui::test]
128async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
129 init_test(cx);
130
131 let dir = TempTree::new(json!({
132 ".editorconfig": r#"
133 root = true
134 [*.rs]
135 indent_style = tab
136 indent_size = 3
137 end_of_line = lf
138 insert_final_newline = true
139 trim_trailing_whitespace = true
140 [*.js]
141 tab_width = 10
142 "#,
143 ".zed": {
144 "settings.json": r#"{
145 "tab_size": 8,
146 "hard_tabs": false,
147 "ensure_final_newline_on_save": false,
148 "remove_trailing_whitespace_on_save": false,
149 "soft_wrap": "editor_width"
150 }"#,
151 },
152 "a.rs": "fn a() {\n A\n}",
153 "b": {
154 ".editorconfig": r#"
155 [*.rs]
156 indent_size = 2
157 "#,
158 "b.rs": "fn b() {\n B\n}",
159 },
160 "c.js": "def c\n C\nend",
161 "README.json": "tabs are better\n",
162 }));
163
164 let path = dir.path();
165 let fs = FakeFs::new(cx.executor());
166 fs.insert_tree_from_real_fs(path, path).await;
167 let project = Project::test(fs, [path], cx).await;
168
169 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
170 language_registry.add(js_lang());
171 language_registry.add(json_lang());
172 language_registry.add(rust_lang());
173
174 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
175
176 cx.executor().run_until_parked();
177
178 cx.update(|cx| {
179 let tree = worktree.read(cx);
180 let settings_for = |path: &str| {
181 let file_entry = tree.entry_for_path(path).unwrap().clone();
182 let file = File::for_entry(file_entry, worktree.clone());
183 let file_language = project
184 .read(cx)
185 .languages()
186 .language_for_file_path(file.path.as_ref());
187 let file_language = cx
188 .background_executor()
189 .block(file_language)
190 .expect("Failed to get file language");
191 let file = file as _;
192 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
193 };
194
195 let settings_a = settings_for("a.rs");
196 let settings_b = settings_for("b/b.rs");
197 let settings_c = settings_for("c.js");
198 let settings_readme = settings_for("README.json");
199
200 // .editorconfig overrides .zed/settings
201 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
202 assert_eq!(settings_a.hard_tabs, true);
203 assert_eq!(settings_a.ensure_final_newline_on_save, true);
204 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
205
206 // .editorconfig in b/ overrides .editorconfig in root
207 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
208
209 // "indent_size" is not set, so "tab_width" is used
210 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
211
212 // README.md should not be affected by .editorconfig's globe "*.rs"
213 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
214 });
215}
216
217#[gpui::test]
218async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
219 init_test(cx);
220 TaskStore::init(None);
221
222 let fs = FakeFs::new(cx.executor());
223 fs.insert_tree(
224 path!("/dir"),
225 json!({
226 ".zed": {
227 "settings.json": r#"{ "tab_size": 8 }"#,
228 "tasks.json": r#"[{
229 "label": "cargo check all",
230 "command": "cargo",
231 "args": ["check", "--all"]
232 },]"#,
233 },
234 "a": {
235 "a.rs": "fn a() {\n A\n}"
236 },
237 "b": {
238 ".zed": {
239 "settings.json": r#"{ "tab_size": 2 }"#,
240 "tasks.json": r#"[{
241 "label": "cargo check",
242 "command": "cargo",
243 "args": ["check"]
244 },]"#,
245 },
246 "b.rs": "fn b() {\n B\n}"
247 }
248 }),
249 )
250 .await;
251
252 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
253 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
254
255 cx.executor().run_until_parked();
256 let worktree_id = cx.update(|cx| {
257 project.update(cx, |project, cx| {
258 project.worktrees(cx).next().unwrap().read(cx).id()
259 })
260 });
261
262 let mut task_contexts = TaskContexts::default();
263 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
264
265 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
266 id: worktree_id,
267 directory_in_worktree: PathBuf::from(".zed"),
268 id_base: "local worktree tasks from directory \".zed\"".into(),
269 };
270
271 let all_tasks = cx
272 .update(|cx| {
273 let tree = worktree.read(cx);
274
275 let file_a = File::for_entry(
276 tree.entry_for_path("a/a.rs").unwrap().clone(),
277 worktree.clone(),
278 ) as _;
279 let settings_a = language_settings(None, Some(&file_a), cx);
280 let file_b = File::for_entry(
281 tree.entry_for_path("b/b.rs").unwrap().clone(),
282 worktree.clone(),
283 ) as _;
284 let settings_b = language_settings(None, Some(&file_b), cx);
285
286 assert_eq!(settings_a.tab_size.get(), 8);
287 assert_eq!(settings_b.tab_size.get(), 2);
288
289 get_all_tasks(&project, &task_contexts, cx)
290 })
291 .into_iter()
292 .map(|(source_kind, task)| {
293 let resolved = task.resolved.unwrap();
294 (
295 source_kind,
296 task.resolved_label,
297 resolved.args,
298 resolved.env,
299 )
300 })
301 .collect::<Vec<_>>();
302 assert_eq!(
303 all_tasks,
304 vec![
305 (
306 TaskSourceKind::Worktree {
307 id: worktree_id,
308 directory_in_worktree: PathBuf::from(separator!("b/.zed")),
309 id_base: if cfg!(windows) {
310 "local worktree tasks from directory \"b\\\\.zed\"".into()
311 } else {
312 "local worktree tasks from directory \"b/.zed\"".into()
313 },
314 },
315 "cargo check".to_string(),
316 vec!["check".to_string()],
317 HashMap::default(),
318 ),
319 (
320 topmost_local_task_source_kind.clone(),
321 "cargo check all".to_string(),
322 vec!["check".to_string(), "--all".to_string()],
323 HashMap::default(),
324 ),
325 ]
326 );
327
328 let (_, resolved_task) = cx
329 .update(|cx| get_all_tasks(&project, &task_contexts, cx))
330 .into_iter()
331 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
332 .expect("should have one global task");
333 project.update(cx, |project, cx| {
334 let task_inventory = project
335 .task_store
336 .read(cx)
337 .task_inventory()
338 .cloned()
339 .unwrap();
340 task_inventory.update(cx, |inventory, _| {
341 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
342 inventory
343 .update_file_based_tasks(
344 TaskSettingsLocation::Global(tasks_file()),
345 Some(
346 &json!([{
347 "label": "cargo check unstable",
348 "command": "cargo",
349 "args": [
350 "check",
351 "--all",
352 "--all-targets"
353 ],
354 "env": {
355 "RUSTFLAGS": "-Zunstable-options"
356 }
357 }])
358 .to_string(),
359 ),
360 settings::TaskKind::Script,
361 )
362 .unwrap();
363 });
364 });
365 cx.run_until_parked();
366
367 let all_tasks = cx
368 .update(|cx| get_all_tasks(&project, &task_contexts, cx))
369 .into_iter()
370 .map(|(source_kind, task)| {
371 let resolved = task.resolved.unwrap();
372 (
373 source_kind,
374 task.resolved_label,
375 resolved.args,
376 resolved.env,
377 )
378 })
379 .collect::<Vec<_>>();
380 assert_eq!(
381 all_tasks,
382 vec![
383 (
384 topmost_local_task_source_kind.clone(),
385 "cargo check all".to_string(),
386 vec!["check".to_string(), "--all".to_string()],
387 HashMap::default(),
388 ),
389 (
390 TaskSourceKind::Worktree {
391 id: worktree_id,
392 directory_in_worktree: PathBuf::from(separator!("b/.zed")),
393 id_base: if cfg!(windows) {
394 "local worktree tasks from directory \"b\\\\.zed\"".into()
395 } else {
396 "local worktree tasks from directory \"b/.zed\"".into()
397 },
398 },
399 "cargo check".to_string(),
400 vec!["check".to_string()],
401 HashMap::default(),
402 ),
403 (
404 TaskSourceKind::AbsPath {
405 abs_path: paths::tasks_file().clone(),
406 id_base: "global tasks.json".into(),
407 },
408 "cargo check unstable".to_string(),
409 vec![
410 "check".to_string(),
411 "--all".to_string(),
412 "--all-targets".to_string(),
413 ],
414 HashMap::from_iter(Some((
415 "RUSTFLAGS".to_string(),
416 "-Zunstable-options".to_string()
417 ))),
418 ),
419 ]
420 );
421}
422
423#[gpui::test]
424async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
425 init_test(cx);
426 TaskStore::init(None);
427
428 let fs = FakeFs::new(cx.executor());
429 fs.insert_tree(
430 path!("/dir"),
431 json!({
432 ".zed": {
433 "tasks.json": r#"[{
434 "label": "test worktree root",
435 "command": "echo $ZED_WORKTREE_ROOT"
436 }]"#,
437 },
438 "a": {
439 "a.rs": "fn a() {\n A\n}"
440 },
441 }),
442 )
443 .await;
444
445 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
446 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
447
448 cx.executor().run_until_parked();
449 let worktree_id = cx.update(|cx| {
450 project.update(cx, |project, cx| {
451 project.worktrees(cx).next().unwrap().read(cx).id()
452 })
453 });
454
455 let active_non_worktree_item_tasks = cx.update(|cx| {
456 get_all_tasks(
457 &project,
458 &TaskContexts {
459 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
460 active_worktree_context: None,
461 other_worktree_contexts: Vec::new(),
462 },
463 cx,
464 )
465 });
466 assert!(
467 active_non_worktree_item_tasks.is_empty(),
468 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
469 );
470
471 let active_worktree_tasks = cx.update(|cx| {
472 get_all_tasks(
473 &project,
474 &TaskContexts {
475 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
476 active_worktree_context: Some((worktree_id, {
477 let mut worktree_context = TaskContext::default();
478 worktree_context
479 .task_variables
480 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
481 worktree_context
482 })),
483 other_worktree_contexts: Vec::new(),
484 },
485 cx,
486 )
487 });
488 assert_eq!(
489 active_worktree_tasks
490 .into_iter()
491 .map(|(source_kind, task)| {
492 let resolved = task.resolved.unwrap();
493 (source_kind, resolved.command)
494 })
495 .collect::<Vec<_>>(),
496 vec![(
497 TaskSourceKind::Worktree {
498 id: worktree_id,
499 directory_in_worktree: PathBuf::from(separator!(".zed")),
500 id_base: if cfg!(windows) {
501 "local worktree tasks from directory \".zed\"".into()
502 } else {
503 "local worktree tasks from directory \".zed\"".into()
504 },
505 },
506 "echo /dir".to_string(),
507 )]
508 );
509}
510
511#[gpui::test]
512async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
513 init_test(cx);
514
515 let fs = FakeFs::new(cx.executor());
516 fs.insert_tree(
517 path!("/dir"),
518 json!({
519 "test.rs": "const A: i32 = 1;",
520 "test2.rs": "",
521 "Cargo.toml": "a = 1",
522 "package.json": "{\"a\": 1}",
523 }),
524 )
525 .await;
526
527 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
528 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
529
530 let mut fake_rust_servers = language_registry.register_fake_lsp(
531 "Rust",
532 FakeLspAdapter {
533 name: "the-rust-language-server",
534 capabilities: lsp::ServerCapabilities {
535 completion_provider: Some(lsp::CompletionOptions {
536 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
537 ..Default::default()
538 }),
539 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
540 lsp::TextDocumentSyncOptions {
541 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
542 ..Default::default()
543 },
544 )),
545 ..Default::default()
546 },
547 ..Default::default()
548 },
549 );
550 let mut fake_json_servers = language_registry.register_fake_lsp(
551 "JSON",
552 FakeLspAdapter {
553 name: "the-json-language-server",
554 capabilities: lsp::ServerCapabilities {
555 completion_provider: Some(lsp::CompletionOptions {
556 trigger_characters: Some(vec![":".to_string()]),
557 ..Default::default()
558 }),
559 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
560 lsp::TextDocumentSyncOptions {
561 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
562 ..Default::default()
563 },
564 )),
565 ..Default::default()
566 },
567 ..Default::default()
568 },
569 );
570
571 // Open a buffer without an associated language server.
572 let (toml_buffer, _handle) = project
573 .update(cx, |project, cx| {
574 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
575 })
576 .await
577 .unwrap();
578
579 // Open a buffer with an associated language server before the language for it has been loaded.
580 let (rust_buffer, _handle2) = project
581 .update(cx, |project, cx| {
582 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
583 })
584 .await
585 .unwrap();
586 rust_buffer.update(cx, |buffer, _| {
587 assert_eq!(buffer.language().map(|l| l.name()), None);
588 });
589
590 // Now we add the languages to the project, and ensure they get assigned to all
591 // the relevant open buffers.
592 language_registry.add(json_lang());
593 language_registry.add(rust_lang());
594 cx.executor().run_until_parked();
595 rust_buffer.update(cx, |buffer, _| {
596 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
597 });
598
599 // A server is started up, and it is notified about Rust files.
600 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
601 assert_eq!(
602 fake_rust_server
603 .receive_notification::<lsp::notification::DidOpenTextDocument>()
604 .await
605 .text_document,
606 lsp::TextDocumentItem {
607 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
608 version: 0,
609 text: "const A: i32 = 1;".to_string(),
610 language_id: "rust".to_string(),
611 }
612 );
613
614 // The buffer is configured based on the language server's capabilities.
615 rust_buffer.update(cx, |buffer, _| {
616 assert_eq!(
617 buffer
618 .completion_triggers()
619 .into_iter()
620 .cloned()
621 .collect::<Vec<_>>(),
622 &[".".to_string(), "::".to_string()]
623 );
624 });
625 toml_buffer.update(cx, |buffer, _| {
626 assert!(buffer.completion_triggers().is_empty());
627 });
628
629 // Edit a buffer. The changes are reported to the language server.
630 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
631 assert_eq!(
632 fake_rust_server
633 .receive_notification::<lsp::notification::DidChangeTextDocument>()
634 .await
635 .text_document,
636 lsp::VersionedTextDocumentIdentifier::new(
637 lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
638 1
639 )
640 );
641
642 // Open a third buffer with a different associated language server.
643 let (json_buffer, _json_handle) = project
644 .update(cx, |project, cx| {
645 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
646 })
647 .await
648 .unwrap();
649
650 // A json language server is started up and is only notified about the json buffer.
651 let mut fake_json_server = fake_json_servers.next().await.unwrap();
652 assert_eq!(
653 fake_json_server
654 .receive_notification::<lsp::notification::DidOpenTextDocument>()
655 .await
656 .text_document,
657 lsp::TextDocumentItem {
658 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
659 version: 0,
660 text: "{\"a\": 1}".to_string(),
661 language_id: "json".to_string(),
662 }
663 );
664
665 // This buffer is configured based on the second language server's
666 // capabilities.
667 json_buffer.update(cx, |buffer, _| {
668 assert_eq!(
669 buffer
670 .completion_triggers()
671 .into_iter()
672 .cloned()
673 .collect::<Vec<_>>(),
674 &[":".to_string()]
675 );
676 });
677
678 // When opening another buffer whose language server is already running,
679 // it is also configured based on the existing language server's capabilities.
680 let (rust_buffer2, _handle4) = project
681 .update(cx, |project, cx| {
682 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
683 })
684 .await
685 .unwrap();
686 rust_buffer2.update(cx, |buffer, _| {
687 assert_eq!(
688 buffer
689 .completion_triggers()
690 .into_iter()
691 .cloned()
692 .collect::<Vec<_>>(),
693 &[".".to_string(), "::".to_string()]
694 );
695 });
696
697 // Changes are reported only to servers matching the buffer's language.
698 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
699 rust_buffer2.update(cx, |buffer, cx| {
700 buffer.edit([(0..0, "let x = 1;")], None, cx)
701 });
702 assert_eq!(
703 fake_rust_server
704 .receive_notification::<lsp::notification::DidChangeTextDocument>()
705 .await
706 .text_document,
707 lsp::VersionedTextDocumentIdentifier::new(
708 lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap(),
709 1
710 )
711 );
712
713 // Save notifications are reported to all servers.
714 project
715 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
716 .await
717 .unwrap();
718 assert_eq!(
719 fake_rust_server
720 .receive_notification::<lsp::notification::DidSaveTextDocument>()
721 .await
722 .text_document,
723 lsp::TextDocumentIdentifier::new(
724 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
725 )
726 );
727 assert_eq!(
728 fake_json_server
729 .receive_notification::<lsp::notification::DidSaveTextDocument>()
730 .await
731 .text_document,
732 lsp::TextDocumentIdentifier::new(
733 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
734 )
735 );
736
737 // Renames are reported only to servers matching the buffer's language.
738 fs.rename(
739 Path::new(path!("/dir/test2.rs")),
740 Path::new(path!("/dir/test3.rs")),
741 Default::default(),
742 )
743 .await
744 .unwrap();
745 assert_eq!(
746 fake_rust_server
747 .receive_notification::<lsp::notification::DidCloseTextDocument>()
748 .await
749 .text_document,
750 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap()),
751 );
752 assert_eq!(
753 fake_rust_server
754 .receive_notification::<lsp::notification::DidOpenTextDocument>()
755 .await
756 .text_document,
757 lsp::TextDocumentItem {
758 uri: lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap(),
759 version: 0,
760 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
761 language_id: "rust".to_string(),
762 },
763 );
764
765 rust_buffer2.update(cx, |buffer, cx| {
766 buffer.update_diagnostics(
767 LanguageServerId(0),
768 DiagnosticSet::from_sorted_entries(
769 vec![DiagnosticEntry {
770 diagnostic: Default::default(),
771 range: Anchor::MIN..Anchor::MAX,
772 }],
773 &buffer.snapshot(),
774 ),
775 cx,
776 );
777 assert_eq!(
778 buffer
779 .snapshot()
780 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
781 .count(),
782 1
783 );
784 });
785
786 // When the rename changes the extension of the file, the buffer gets closed on the old
787 // language server and gets opened on the new one.
788 fs.rename(
789 Path::new(path!("/dir/test3.rs")),
790 Path::new(path!("/dir/test3.json")),
791 Default::default(),
792 )
793 .await
794 .unwrap();
795 assert_eq!(
796 fake_rust_server
797 .receive_notification::<lsp::notification::DidCloseTextDocument>()
798 .await
799 .text_document,
800 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap(),),
801 );
802 assert_eq!(
803 fake_json_server
804 .receive_notification::<lsp::notification::DidOpenTextDocument>()
805 .await
806 .text_document,
807 lsp::TextDocumentItem {
808 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
809 version: 0,
810 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
811 language_id: "json".to_string(),
812 },
813 );
814
815 // We clear the diagnostics, since the language has changed.
816 rust_buffer2.update(cx, |buffer, _| {
817 assert_eq!(
818 buffer
819 .snapshot()
820 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
821 .count(),
822 0
823 );
824 });
825
826 // The renamed file's version resets after changing language server.
827 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
828 assert_eq!(
829 fake_json_server
830 .receive_notification::<lsp::notification::DidChangeTextDocument>()
831 .await
832 .text_document,
833 lsp::VersionedTextDocumentIdentifier::new(
834 lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
835 1
836 )
837 );
838
839 // Restart language servers
840 project.update(cx, |project, cx| {
841 project.restart_language_servers_for_buffers(
842 vec![rust_buffer.clone(), json_buffer.clone()],
843 cx,
844 );
845 });
846
847 let mut rust_shutdown_requests = fake_rust_server
848 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
849 let mut json_shutdown_requests = fake_json_server
850 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
851 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
852
853 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
854 let mut fake_json_server = fake_json_servers.next().await.unwrap();
855
856 // Ensure rust document is reopened in new rust language server
857 assert_eq!(
858 fake_rust_server
859 .receive_notification::<lsp::notification::DidOpenTextDocument>()
860 .await
861 .text_document,
862 lsp::TextDocumentItem {
863 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
864 version: 0,
865 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
866 language_id: "rust".to_string(),
867 }
868 );
869
870 // Ensure json documents are reopened in new json language server
871 assert_set_eq!(
872 [
873 fake_json_server
874 .receive_notification::<lsp::notification::DidOpenTextDocument>()
875 .await
876 .text_document,
877 fake_json_server
878 .receive_notification::<lsp::notification::DidOpenTextDocument>()
879 .await
880 .text_document,
881 ],
882 [
883 lsp::TextDocumentItem {
884 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
885 version: 0,
886 text: json_buffer.update(cx, |buffer, _| buffer.text()),
887 language_id: "json".to_string(),
888 },
889 lsp::TextDocumentItem {
890 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
891 version: 0,
892 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
893 language_id: "json".to_string(),
894 }
895 ]
896 );
897
898 // Close notifications are reported only to servers matching the buffer's language.
899 cx.update(|_| drop(_json_handle));
900 let close_message = lsp::DidCloseTextDocumentParams {
901 text_document: lsp::TextDocumentIdentifier::new(
902 lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
903 ),
904 };
905 assert_eq!(
906 fake_json_server
907 .receive_notification::<lsp::notification::DidCloseTextDocument>()
908 .await,
909 close_message,
910 );
911}
912
913#[gpui::test]
914async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
915 init_test(cx);
916
917 let fs = FakeFs::new(cx.executor());
918 fs.insert_tree(
919 path!("/the-root"),
920 json!({
921 ".gitignore": "target\n",
922 "src": {
923 "a.rs": "",
924 "b.rs": "",
925 },
926 "target": {
927 "x": {
928 "out": {
929 "x.rs": ""
930 }
931 },
932 "y": {
933 "out": {
934 "y.rs": "",
935 }
936 },
937 "z": {
938 "out": {
939 "z.rs": ""
940 }
941 }
942 }
943 }),
944 )
945 .await;
946
947 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
948 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
949 language_registry.add(rust_lang());
950 let mut fake_servers = language_registry.register_fake_lsp(
951 "Rust",
952 FakeLspAdapter {
953 name: "the-language-server",
954 ..Default::default()
955 },
956 );
957
958 cx.executor().run_until_parked();
959
960 // Start the language server by opening a buffer with a compatible file extension.
961 project
962 .update(cx, |project, cx| {
963 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
964 })
965 .await
966 .unwrap();
967
968 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
969 project.update(cx, |project, cx| {
970 let worktree = project.worktrees(cx).next().unwrap();
971 assert_eq!(
972 worktree
973 .read(cx)
974 .snapshot()
975 .entries(true, 0)
976 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
977 .collect::<Vec<_>>(),
978 &[
979 (Path::new(""), false),
980 (Path::new(".gitignore"), false),
981 (Path::new("src"), false),
982 (Path::new("src/a.rs"), false),
983 (Path::new("src/b.rs"), false),
984 (Path::new("target"), true),
985 ]
986 );
987 });
988
989 let prev_read_dir_count = fs.read_dir_call_count();
990
991 // Keep track of the FS events reported to the language server.
992 let fake_server = fake_servers.next().await.unwrap();
993 let file_changes = Arc::new(Mutex::new(Vec::new()));
994 fake_server
995 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
996 registrations: vec![lsp::Registration {
997 id: Default::default(),
998 method: "workspace/didChangeWatchedFiles".to_string(),
999 register_options: serde_json::to_value(
1000 lsp::DidChangeWatchedFilesRegistrationOptions {
1001 watchers: vec![
1002 lsp::FileSystemWatcher {
1003 glob_pattern: lsp::GlobPattern::String(
1004 path!("/the-root/Cargo.toml").to_string(),
1005 ),
1006 kind: None,
1007 },
1008 lsp::FileSystemWatcher {
1009 glob_pattern: lsp::GlobPattern::String(
1010 path!("/the-root/src/*.{rs,c}").to_string(),
1011 ),
1012 kind: None,
1013 },
1014 lsp::FileSystemWatcher {
1015 glob_pattern: lsp::GlobPattern::String(
1016 path!("/the-root/target/y/**/*.rs").to_string(),
1017 ),
1018 kind: None,
1019 },
1020 ],
1021 },
1022 )
1023 .ok(),
1024 }],
1025 })
1026 .await
1027 .unwrap();
1028 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
1029 let file_changes = file_changes.clone();
1030 move |params, _| {
1031 let mut file_changes = file_changes.lock();
1032 file_changes.extend(params.changes);
1033 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
1034 }
1035 });
1036
1037 cx.executor().run_until_parked();
1038 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
1039 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
1040
1041 // Now the language server has asked us to watch an ignored directory path,
1042 // so we recursively load it.
1043 project.update(cx, |project, cx| {
1044 let worktree = project.worktrees(cx).next().unwrap();
1045 assert_eq!(
1046 worktree
1047 .read(cx)
1048 .snapshot()
1049 .entries(true, 0)
1050 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
1051 .collect::<Vec<_>>(),
1052 &[
1053 (Path::new(""), false),
1054 (Path::new(".gitignore"), false),
1055 (Path::new("src"), false),
1056 (Path::new("src/a.rs"), false),
1057 (Path::new("src/b.rs"), false),
1058 (Path::new("target"), true),
1059 (Path::new("target/x"), true),
1060 (Path::new("target/y"), true),
1061 (Path::new("target/y/out"), true),
1062 (Path::new("target/y/out/y.rs"), true),
1063 (Path::new("target/z"), true),
1064 ]
1065 );
1066 });
1067
1068 // Perform some file system mutations, two of which match the watched patterns,
1069 // and one of which does not.
1070 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
1071 .await
1072 .unwrap();
1073 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
1074 .await
1075 .unwrap();
1076 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
1077 .await
1078 .unwrap();
1079 fs.create_file(
1080 path!("/the-root/target/x/out/x2.rs").as_ref(),
1081 Default::default(),
1082 )
1083 .await
1084 .unwrap();
1085 fs.create_file(
1086 path!("/the-root/target/y/out/y2.rs").as_ref(),
1087 Default::default(),
1088 )
1089 .await
1090 .unwrap();
1091
1092 // The language server receives events for the FS mutations that match its watch patterns.
1093 cx.executor().run_until_parked();
1094 assert_eq!(
1095 &*file_changes.lock(),
1096 &[
1097 lsp::FileEvent {
1098 uri: lsp::Url::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
1099 typ: lsp::FileChangeType::DELETED,
1100 },
1101 lsp::FileEvent {
1102 uri: lsp::Url::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
1103 typ: lsp::FileChangeType::CREATED,
1104 },
1105 lsp::FileEvent {
1106 uri: lsp::Url::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
1107 typ: lsp::FileChangeType::CREATED,
1108 },
1109 ]
1110 );
1111}
1112
1113#[gpui::test]
1114async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
1115 init_test(cx);
1116
1117 let fs = FakeFs::new(cx.executor());
1118 fs.insert_tree(
1119 path!("/dir"),
1120 json!({
1121 "a.rs": "let a = 1;",
1122 "b.rs": "let b = 2;"
1123 }),
1124 )
1125 .await;
1126
1127 let project = Project::test(
1128 fs,
1129 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
1130 cx,
1131 )
1132 .await;
1133 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1134
1135 let buffer_a = project
1136 .update(cx, |project, cx| {
1137 project.open_local_buffer(path!("/dir/a.rs"), cx)
1138 })
1139 .await
1140 .unwrap();
1141 let buffer_b = project
1142 .update(cx, |project, cx| {
1143 project.open_local_buffer(path!("/dir/b.rs"), cx)
1144 })
1145 .await
1146 .unwrap();
1147
1148 lsp_store.update(cx, |lsp_store, cx| {
1149 lsp_store
1150 .update_diagnostics(
1151 LanguageServerId(0),
1152 lsp::PublishDiagnosticsParams {
1153 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1154 version: None,
1155 diagnostics: vec![lsp::Diagnostic {
1156 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1157 severity: Some(lsp::DiagnosticSeverity::ERROR),
1158 message: "error 1".to_string(),
1159 ..Default::default()
1160 }],
1161 },
1162 &[],
1163 cx,
1164 )
1165 .unwrap();
1166 lsp_store
1167 .update_diagnostics(
1168 LanguageServerId(0),
1169 lsp::PublishDiagnosticsParams {
1170 uri: Url::from_file_path(path!("/dir/b.rs")).unwrap(),
1171 version: None,
1172 diagnostics: vec![lsp::Diagnostic {
1173 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1174 severity: Some(DiagnosticSeverity::WARNING),
1175 message: "error 2".to_string(),
1176 ..Default::default()
1177 }],
1178 },
1179 &[],
1180 cx,
1181 )
1182 .unwrap();
1183 });
1184
1185 buffer_a.update(cx, |buffer, _| {
1186 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1187 assert_eq!(
1188 chunks
1189 .iter()
1190 .map(|(s, d)| (s.as_str(), *d))
1191 .collect::<Vec<_>>(),
1192 &[
1193 ("let ", None),
1194 ("a", Some(DiagnosticSeverity::ERROR)),
1195 (" = 1;", None),
1196 ]
1197 );
1198 });
1199 buffer_b.update(cx, |buffer, _| {
1200 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1201 assert_eq!(
1202 chunks
1203 .iter()
1204 .map(|(s, d)| (s.as_str(), *d))
1205 .collect::<Vec<_>>(),
1206 &[
1207 ("let ", None),
1208 ("b", Some(DiagnosticSeverity::WARNING)),
1209 (" = 2;", None),
1210 ]
1211 );
1212 });
1213}
1214
1215#[gpui::test]
1216async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1217 init_test(cx);
1218
1219 let fs = FakeFs::new(cx.executor());
1220 fs.insert_tree(
1221 path!("/root"),
1222 json!({
1223 "dir": {
1224 ".git": {
1225 "HEAD": "ref: refs/heads/main",
1226 },
1227 ".gitignore": "b.rs",
1228 "a.rs": "let a = 1;",
1229 "b.rs": "let b = 2;",
1230 },
1231 "other.rs": "let b = c;"
1232 }),
1233 )
1234 .await;
1235
1236 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
1237 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1238 let (worktree, _) = project
1239 .update(cx, |project, cx| {
1240 project.find_or_create_worktree(path!("/root/dir"), true, cx)
1241 })
1242 .await
1243 .unwrap();
1244 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1245
1246 let (worktree, _) = project
1247 .update(cx, |project, cx| {
1248 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
1249 })
1250 .await
1251 .unwrap();
1252 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1253
1254 let server_id = LanguageServerId(0);
1255 lsp_store.update(cx, |lsp_store, cx| {
1256 lsp_store
1257 .update_diagnostics(
1258 server_id,
1259 lsp::PublishDiagnosticsParams {
1260 uri: Url::from_file_path(path!("/root/dir/b.rs")).unwrap(),
1261 version: None,
1262 diagnostics: vec![lsp::Diagnostic {
1263 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1264 severity: Some(lsp::DiagnosticSeverity::ERROR),
1265 message: "unused variable 'b'".to_string(),
1266 ..Default::default()
1267 }],
1268 },
1269 &[],
1270 cx,
1271 )
1272 .unwrap();
1273 lsp_store
1274 .update_diagnostics(
1275 server_id,
1276 lsp::PublishDiagnosticsParams {
1277 uri: Url::from_file_path(path!("/root/other.rs")).unwrap(),
1278 version: None,
1279 diagnostics: vec![lsp::Diagnostic {
1280 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1281 severity: Some(lsp::DiagnosticSeverity::ERROR),
1282 message: "unknown variable 'c'".to_string(),
1283 ..Default::default()
1284 }],
1285 },
1286 &[],
1287 cx,
1288 )
1289 .unwrap();
1290 });
1291
1292 let main_ignored_buffer = project
1293 .update(cx, |project, cx| {
1294 project.open_buffer((main_worktree_id, "b.rs"), cx)
1295 })
1296 .await
1297 .unwrap();
1298 main_ignored_buffer.update(cx, |buffer, _| {
1299 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1300 assert_eq!(
1301 chunks
1302 .iter()
1303 .map(|(s, d)| (s.as_str(), *d))
1304 .collect::<Vec<_>>(),
1305 &[
1306 ("let ", None),
1307 ("b", Some(DiagnosticSeverity::ERROR)),
1308 (" = 2;", None),
1309 ],
1310 "Gigitnored buffers should still get in-buffer diagnostics",
1311 );
1312 });
1313 let other_buffer = project
1314 .update(cx, |project, cx| {
1315 project.open_buffer((other_worktree_id, ""), cx)
1316 })
1317 .await
1318 .unwrap();
1319 other_buffer.update(cx, |buffer, _| {
1320 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1321 assert_eq!(
1322 chunks
1323 .iter()
1324 .map(|(s, d)| (s.as_str(), *d))
1325 .collect::<Vec<_>>(),
1326 &[
1327 ("let b = ", None),
1328 ("c", Some(DiagnosticSeverity::ERROR)),
1329 (";", None),
1330 ],
1331 "Buffers from hidden projects should still get in-buffer diagnostics"
1332 );
1333 });
1334
1335 project.update(cx, |project, cx| {
1336 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1337 assert_eq!(
1338 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1339 vec![(
1340 ProjectPath {
1341 worktree_id: main_worktree_id,
1342 path: Arc::from(Path::new("b.rs")),
1343 },
1344 server_id,
1345 DiagnosticSummary {
1346 error_count: 1,
1347 warning_count: 0,
1348 }
1349 )]
1350 );
1351 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1352 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1353 });
1354}
1355
1356#[gpui::test]
1357async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1358 init_test(cx);
1359
1360 let progress_token = "the-progress-token";
1361
1362 let fs = FakeFs::new(cx.executor());
1363 fs.insert_tree(
1364 path!("/dir"),
1365 json!({
1366 "a.rs": "fn a() { A }",
1367 "b.rs": "const y: i32 = 1",
1368 }),
1369 )
1370 .await;
1371
1372 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1373 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1374
1375 language_registry.add(rust_lang());
1376 let mut fake_servers = language_registry.register_fake_lsp(
1377 "Rust",
1378 FakeLspAdapter {
1379 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1380 disk_based_diagnostics_sources: vec!["disk".into()],
1381 ..Default::default()
1382 },
1383 );
1384
1385 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1386
1387 // Cause worktree to start the fake language server
1388 let _ = project
1389 .update(cx, |project, cx| {
1390 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
1391 })
1392 .await
1393 .unwrap();
1394
1395 let mut events = cx.events(&project);
1396
1397 let fake_server = fake_servers.next().await.unwrap();
1398 assert_eq!(
1399 events.next().await.unwrap(),
1400 Event::LanguageServerAdded(
1401 LanguageServerId(0),
1402 fake_server.server.name(),
1403 Some(worktree_id)
1404 ),
1405 );
1406
1407 fake_server
1408 .start_progress(format!("{}/0", progress_token))
1409 .await;
1410 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1411 assert_eq!(
1412 events.next().await.unwrap(),
1413 Event::DiskBasedDiagnosticsStarted {
1414 language_server_id: LanguageServerId(0),
1415 }
1416 );
1417
1418 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1419 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1420 version: None,
1421 diagnostics: vec![lsp::Diagnostic {
1422 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1423 severity: Some(lsp::DiagnosticSeverity::ERROR),
1424 message: "undefined variable 'A'".to_string(),
1425 ..Default::default()
1426 }],
1427 });
1428 assert_eq!(
1429 events.next().await.unwrap(),
1430 Event::DiagnosticsUpdated {
1431 language_server_id: LanguageServerId(0),
1432 path: (worktree_id, Path::new("a.rs")).into()
1433 }
1434 );
1435
1436 fake_server.end_progress(format!("{}/0", progress_token));
1437 assert_eq!(
1438 events.next().await.unwrap(),
1439 Event::DiskBasedDiagnosticsFinished {
1440 language_server_id: LanguageServerId(0)
1441 }
1442 );
1443
1444 let buffer = project
1445 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
1446 .await
1447 .unwrap();
1448
1449 buffer.update(cx, |buffer, _| {
1450 let snapshot = buffer.snapshot();
1451 let diagnostics = snapshot
1452 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1453 .collect::<Vec<_>>();
1454 assert_eq!(
1455 diagnostics,
1456 &[DiagnosticEntry {
1457 range: Point::new(0, 9)..Point::new(0, 10),
1458 diagnostic: Diagnostic {
1459 severity: lsp::DiagnosticSeverity::ERROR,
1460 message: "undefined variable 'A'".to_string(),
1461 group_id: 0,
1462 is_primary: true,
1463 ..Default::default()
1464 }
1465 }]
1466 )
1467 });
1468
1469 // Ensure publishing empty diagnostics twice only results in one update event.
1470 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1471 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1472 version: None,
1473 diagnostics: Default::default(),
1474 });
1475 assert_eq!(
1476 events.next().await.unwrap(),
1477 Event::DiagnosticsUpdated {
1478 language_server_id: LanguageServerId(0),
1479 path: (worktree_id, Path::new("a.rs")).into()
1480 }
1481 );
1482
1483 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1484 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1485 version: None,
1486 diagnostics: Default::default(),
1487 });
1488 cx.executor().run_until_parked();
1489 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1490}
1491
1492#[gpui::test]
1493async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1494 init_test(cx);
1495
1496 let progress_token = "the-progress-token";
1497
1498 let fs = FakeFs::new(cx.executor());
1499 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1500
1501 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1502
1503 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1504 language_registry.add(rust_lang());
1505 let mut fake_servers = language_registry.register_fake_lsp(
1506 "Rust",
1507 FakeLspAdapter {
1508 name: "the-language-server",
1509 disk_based_diagnostics_sources: vec!["disk".into()],
1510 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1511 ..Default::default()
1512 },
1513 );
1514
1515 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1516
1517 let (buffer, _handle) = project
1518 .update(cx, |project, cx| {
1519 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1520 })
1521 .await
1522 .unwrap();
1523 // Simulate diagnostics starting to update.
1524 let fake_server = fake_servers.next().await.unwrap();
1525 fake_server.start_progress(progress_token).await;
1526
1527 // Restart the server before the diagnostics finish updating.
1528 project.update(cx, |project, cx| {
1529 project.restart_language_servers_for_buffers(vec![buffer], cx);
1530 });
1531 let mut events = cx.events(&project);
1532
1533 // Simulate the newly started server sending more diagnostics.
1534 let fake_server = fake_servers.next().await.unwrap();
1535 assert_eq!(
1536 events.next().await.unwrap(),
1537 Event::LanguageServerAdded(
1538 LanguageServerId(1),
1539 fake_server.server.name(),
1540 Some(worktree_id)
1541 )
1542 );
1543 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1544 fake_server.start_progress(progress_token).await;
1545 assert_eq!(
1546 events.next().await.unwrap(),
1547 Event::DiskBasedDiagnosticsStarted {
1548 language_server_id: LanguageServerId(1)
1549 }
1550 );
1551 project.update(cx, |project, cx| {
1552 assert_eq!(
1553 project
1554 .language_servers_running_disk_based_diagnostics(cx)
1555 .collect::<Vec<_>>(),
1556 [LanguageServerId(1)]
1557 );
1558 });
1559
1560 // All diagnostics are considered done, despite the old server's diagnostic
1561 // task never completing.
1562 fake_server.end_progress(progress_token);
1563 assert_eq!(
1564 events.next().await.unwrap(),
1565 Event::DiskBasedDiagnosticsFinished {
1566 language_server_id: LanguageServerId(1)
1567 }
1568 );
1569 project.update(cx, |project, cx| {
1570 assert_eq!(
1571 project
1572 .language_servers_running_disk_based_diagnostics(cx)
1573 .collect::<Vec<_>>(),
1574 [] as [language::LanguageServerId; 0]
1575 );
1576 });
1577}
1578
1579#[gpui::test]
1580async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1581 init_test(cx);
1582
1583 let fs = FakeFs::new(cx.executor());
1584 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
1585
1586 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1587
1588 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1589 language_registry.add(rust_lang());
1590 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1591
1592 let (buffer, _) = project
1593 .update(cx, |project, cx| {
1594 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1595 })
1596 .await
1597 .unwrap();
1598
1599 // Publish diagnostics
1600 let fake_server = fake_servers.next().await.unwrap();
1601 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1602 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1603 version: None,
1604 diagnostics: vec![lsp::Diagnostic {
1605 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1606 severity: Some(lsp::DiagnosticSeverity::ERROR),
1607 message: "the message".to_string(),
1608 ..Default::default()
1609 }],
1610 });
1611
1612 cx.executor().run_until_parked();
1613 buffer.update(cx, |buffer, _| {
1614 assert_eq!(
1615 buffer
1616 .snapshot()
1617 .diagnostics_in_range::<_, usize>(0..1, false)
1618 .map(|entry| entry.diagnostic.message.clone())
1619 .collect::<Vec<_>>(),
1620 ["the message".to_string()]
1621 );
1622 });
1623 project.update(cx, |project, cx| {
1624 assert_eq!(
1625 project.diagnostic_summary(false, cx),
1626 DiagnosticSummary {
1627 error_count: 1,
1628 warning_count: 0,
1629 }
1630 );
1631 });
1632
1633 project.update(cx, |project, cx| {
1634 project.restart_language_servers_for_buffers(vec![buffer.clone()], cx);
1635 });
1636
1637 // The diagnostics are cleared.
1638 cx.executor().run_until_parked();
1639 buffer.update(cx, |buffer, _| {
1640 assert_eq!(
1641 buffer
1642 .snapshot()
1643 .diagnostics_in_range::<_, usize>(0..1, false)
1644 .map(|entry| entry.diagnostic.message.clone())
1645 .collect::<Vec<_>>(),
1646 Vec::<String>::new(),
1647 );
1648 });
1649 project.update(cx, |project, cx| {
1650 assert_eq!(
1651 project.diagnostic_summary(false, cx),
1652 DiagnosticSummary {
1653 error_count: 0,
1654 warning_count: 0,
1655 }
1656 );
1657 });
1658}
1659
1660#[gpui::test]
1661async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1662 init_test(cx);
1663
1664 let fs = FakeFs::new(cx.executor());
1665 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1666
1667 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1668 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1669
1670 language_registry.add(rust_lang());
1671 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1672
1673 let (buffer, _handle) = project
1674 .update(cx, |project, cx| {
1675 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1676 })
1677 .await
1678 .unwrap();
1679
1680 // Before restarting the server, report diagnostics with an unknown buffer version.
1681 let fake_server = fake_servers.next().await.unwrap();
1682 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1683 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1684 version: Some(10000),
1685 diagnostics: Vec::new(),
1686 });
1687 cx.executor().run_until_parked();
1688 project.update(cx, |project, cx| {
1689 project.restart_language_servers_for_buffers(vec![buffer.clone()], cx);
1690 });
1691
1692 let mut fake_server = fake_servers.next().await.unwrap();
1693 let notification = fake_server
1694 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1695 .await
1696 .text_document;
1697 assert_eq!(notification.version, 0);
1698}
1699
1700#[gpui::test]
1701async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
1702 init_test(cx);
1703
1704 let progress_token = "the-progress-token";
1705
1706 let fs = FakeFs::new(cx.executor());
1707 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1708
1709 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1710
1711 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1712 language_registry.add(rust_lang());
1713 let mut fake_servers = language_registry.register_fake_lsp(
1714 "Rust",
1715 FakeLspAdapter {
1716 name: "the-language-server",
1717 disk_based_diagnostics_sources: vec!["disk".into()],
1718 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1719 ..Default::default()
1720 },
1721 );
1722
1723 let (buffer, _handle) = project
1724 .update(cx, |project, cx| {
1725 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1726 })
1727 .await
1728 .unwrap();
1729
1730 // Simulate diagnostics starting to update.
1731 let mut fake_server = fake_servers.next().await.unwrap();
1732 fake_server
1733 .start_progress_with(
1734 "another-token",
1735 lsp::WorkDoneProgressBegin {
1736 cancellable: Some(false),
1737 ..Default::default()
1738 },
1739 )
1740 .await;
1741 fake_server
1742 .start_progress_with(
1743 progress_token,
1744 lsp::WorkDoneProgressBegin {
1745 cancellable: Some(true),
1746 ..Default::default()
1747 },
1748 )
1749 .await;
1750 cx.executor().run_until_parked();
1751
1752 project.update(cx, |project, cx| {
1753 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
1754 });
1755
1756 let cancel_notification = fake_server
1757 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
1758 .await;
1759 assert_eq!(
1760 cancel_notification.token,
1761 NumberOrString::String(progress_token.into())
1762 );
1763}
1764
1765#[gpui::test]
1766async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1767 init_test(cx);
1768
1769 let fs = FakeFs::new(cx.executor());
1770 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
1771 .await;
1772
1773 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1774 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1775
1776 let mut fake_rust_servers = language_registry.register_fake_lsp(
1777 "Rust",
1778 FakeLspAdapter {
1779 name: "rust-lsp",
1780 ..Default::default()
1781 },
1782 );
1783 let mut fake_js_servers = language_registry.register_fake_lsp(
1784 "JavaScript",
1785 FakeLspAdapter {
1786 name: "js-lsp",
1787 ..Default::default()
1788 },
1789 );
1790 language_registry.add(rust_lang());
1791 language_registry.add(js_lang());
1792
1793 let _rs_buffer = project
1794 .update(cx, |project, cx| {
1795 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1796 })
1797 .await
1798 .unwrap();
1799 let _js_buffer = project
1800 .update(cx, |project, cx| {
1801 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
1802 })
1803 .await
1804 .unwrap();
1805
1806 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1807 assert_eq!(
1808 fake_rust_server_1
1809 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1810 .await
1811 .text_document
1812 .uri
1813 .as_str(),
1814 uri!("file:///dir/a.rs")
1815 );
1816
1817 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1818 assert_eq!(
1819 fake_js_server
1820 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1821 .await
1822 .text_document
1823 .uri
1824 .as_str(),
1825 uri!("file:///dir/b.js")
1826 );
1827
1828 // Disable Rust language server, ensuring only that server gets stopped.
1829 cx.update(|cx| {
1830 SettingsStore::update_global(cx, |settings, cx| {
1831 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1832 settings.languages.insert(
1833 "Rust".into(),
1834 LanguageSettingsContent {
1835 enable_language_server: Some(false),
1836 ..Default::default()
1837 },
1838 );
1839 });
1840 })
1841 });
1842 fake_rust_server_1
1843 .receive_notification::<lsp::notification::Exit>()
1844 .await;
1845
1846 // Enable Rust and disable JavaScript language servers, ensuring that the
1847 // former gets started again and that the latter stops.
1848 cx.update(|cx| {
1849 SettingsStore::update_global(cx, |settings, cx| {
1850 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1851 settings.languages.insert(
1852 LanguageName::new("Rust"),
1853 LanguageSettingsContent {
1854 enable_language_server: Some(true),
1855 ..Default::default()
1856 },
1857 );
1858 settings.languages.insert(
1859 LanguageName::new("JavaScript"),
1860 LanguageSettingsContent {
1861 enable_language_server: Some(false),
1862 ..Default::default()
1863 },
1864 );
1865 });
1866 })
1867 });
1868 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1869 assert_eq!(
1870 fake_rust_server_2
1871 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1872 .await
1873 .text_document
1874 .uri
1875 .as_str(),
1876 uri!("file:///dir/a.rs")
1877 );
1878 fake_js_server
1879 .receive_notification::<lsp::notification::Exit>()
1880 .await;
1881}
1882
1883#[gpui::test(iterations = 3)]
1884async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1885 init_test(cx);
1886
1887 let text = "
1888 fn a() { A }
1889 fn b() { BB }
1890 fn c() { CCC }
1891 "
1892 .unindent();
1893
1894 let fs = FakeFs::new(cx.executor());
1895 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
1896
1897 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1898 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1899
1900 language_registry.add(rust_lang());
1901 let mut fake_servers = language_registry.register_fake_lsp(
1902 "Rust",
1903 FakeLspAdapter {
1904 disk_based_diagnostics_sources: vec!["disk".into()],
1905 ..Default::default()
1906 },
1907 );
1908
1909 let buffer = project
1910 .update(cx, |project, cx| {
1911 project.open_local_buffer(path!("/dir/a.rs"), cx)
1912 })
1913 .await
1914 .unwrap();
1915
1916 let _handle = project.update(cx, |project, cx| {
1917 project.register_buffer_with_language_servers(&buffer, cx)
1918 });
1919
1920 let mut fake_server = fake_servers.next().await.unwrap();
1921 let open_notification = fake_server
1922 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1923 .await;
1924
1925 // Edit the buffer, moving the content down
1926 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1927 let change_notification_1 = fake_server
1928 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1929 .await;
1930 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1931
1932 // Report some diagnostics for the initial version of the buffer
1933 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1934 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1935 version: Some(open_notification.text_document.version),
1936 diagnostics: vec![
1937 lsp::Diagnostic {
1938 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1939 severity: Some(DiagnosticSeverity::ERROR),
1940 message: "undefined variable 'A'".to_string(),
1941 source: Some("disk".to_string()),
1942 ..Default::default()
1943 },
1944 lsp::Diagnostic {
1945 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1946 severity: Some(DiagnosticSeverity::ERROR),
1947 message: "undefined variable 'BB'".to_string(),
1948 source: Some("disk".to_string()),
1949 ..Default::default()
1950 },
1951 lsp::Diagnostic {
1952 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1953 severity: Some(DiagnosticSeverity::ERROR),
1954 source: Some("disk".to_string()),
1955 message: "undefined variable 'CCC'".to_string(),
1956 ..Default::default()
1957 },
1958 ],
1959 });
1960
1961 // The diagnostics have moved down since they were created.
1962 cx.executor().run_until_parked();
1963 buffer.update(cx, |buffer, _| {
1964 assert_eq!(
1965 buffer
1966 .snapshot()
1967 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1968 .collect::<Vec<_>>(),
1969 &[
1970 DiagnosticEntry {
1971 range: Point::new(3, 9)..Point::new(3, 11),
1972 diagnostic: Diagnostic {
1973 source: Some("disk".into()),
1974 severity: DiagnosticSeverity::ERROR,
1975 message: "undefined variable 'BB'".to_string(),
1976 is_disk_based: true,
1977 group_id: 1,
1978 is_primary: true,
1979 ..Default::default()
1980 },
1981 },
1982 DiagnosticEntry {
1983 range: Point::new(4, 9)..Point::new(4, 12),
1984 diagnostic: Diagnostic {
1985 source: Some("disk".into()),
1986 severity: DiagnosticSeverity::ERROR,
1987 message: "undefined variable 'CCC'".to_string(),
1988 is_disk_based: true,
1989 group_id: 2,
1990 is_primary: true,
1991 ..Default::default()
1992 }
1993 }
1994 ]
1995 );
1996 assert_eq!(
1997 chunks_with_diagnostics(buffer, 0..buffer.len()),
1998 [
1999 ("\n\nfn a() { ".to_string(), None),
2000 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2001 (" }\nfn b() { ".to_string(), None),
2002 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
2003 (" }\nfn c() { ".to_string(), None),
2004 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
2005 (" }\n".to_string(), None),
2006 ]
2007 );
2008 assert_eq!(
2009 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
2010 [
2011 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
2012 (" }\nfn c() { ".to_string(), None),
2013 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
2014 ]
2015 );
2016 });
2017
2018 // Ensure overlapping diagnostics are highlighted correctly.
2019 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2020 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2021 version: Some(open_notification.text_document.version),
2022 diagnostics: vec![
2023 lsp::Diagnostic {
2024 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2025 severity: Some(DiagnosticSeverity::ERROR),
2026 message: "undefined variable 'A'".to_string(),
2027 source: Some("disk".to_string()),
2028 ..Default::default()
2029 },
2030 lsp::Diagnostic {
2031 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
2032 severity: Some(DiagnosticSeverity::WARNING),
2033 message: "unreachable statement".to_string(),
2034 source: Some("disk".to_string()),
2035 ..Default::default()
2036 },
2037 ],
2038 });
2039
2040 cx.executor().run_until_parked();
2041 buffer.update(cx, |buffer, _| {
2042 assert_eq!(
2043 buffer
2044 .snapshot()
2045 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
2046 .collect::<Vec<_>>(),
2047 &[
2048 DiagnosticEntry {
2049 range: Point::new(2, 9)..Point::new(2, 12),
2050 diagnostic: Diagnostic {
2051 source: Some("disk".into()),
2052 severity: DiagnosticSeverity::WARNING,
2053 message: "unreachable statement".to_string(),
2054 is_disk_based: true,
2055 group_id: 4,
2056 is_primary: true,
2057 ..Default::default()
2058 }
2059 },
2060 DiagnosticEntry {
2061 range: Point::new(2, 9)..Point::new(2, 10),
2062 diagnostic: Diagnostic {
2063 source: Some("disk".into()),
2064 severity: DiagnosticSeverity::ERROR,
2065 message: "undefined variable 'A'".to_string(),
2066 is_disk_based: true,
2067 group_id: 3,
2068 is_primary: true,
2069 ..Default::default()
2070 },
2071 }
2072 ]
2073 );
2074 assert_eq!(
2075 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
2076 [
2077 ("fn a() { ".to_string(), None),
2078 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2079 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2080 ("\n".to_string(), None),
2081 ]
2082 );
2083 assert_eq!(
2084 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
2085 [
2086 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2087 ("\n".to_string(), None),
2088 ]
2089 );
2090 });
2091
2092 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
2093 // changes since the last save.
2094 buffer.update(cx, |buffer, cx| {
2095 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
2096 buffer.edit(
2097 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
2098 None,
2099 cx,
2100 );
2101 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
2102 });
2103 let change_notification_2 = fake_server
2104 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2105 .await;
2106 assert!(
2107 change_notification_2.text_document.version > change_notification_1.text_document.version
2108 );
2109
2110 // Handle out-of-order diagnostics
2111 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2112 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2113 version: Some(change_notification_2.text_document.version),
2114 diagnostics: vec![
2115 lsp::Diagnostic {
2116 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2117 severity: Some(DiagnosticSeverity::ERROR),
2118 message: "undefined variable 'BB'".to_string(),
2119 source: Some("disk".to_string()),
2120 ..Default::default()
2121 },
2122 lsp::Diagnostic {
2123 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2124 severity: Some(DiagnosticSeverity::WARNING),
2125 message: "undefined variable 'A'".to_string(),
2126 source: Some("disk".to_string()),
2127 ..Default::default()
2128 },
2129 ],
2130 });
2131
2132 cx.executor().run_until_parked();
2133 buffer.update(cx, |buffer, _| {
2134 assert_eq!(
2135 buffer
2136 .snapshot()
2137 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2138 .collect::<Vec<_>>(),
2139 &[
2140 DiagnosticEntry {
2141 range: Point::new(2, 21)..Point::new(2, 22),
2142 diagnostic: Diagnostic {
2143 source: Some("disk".into()),
2144 severity: DiagnosticSeverity::WARNING,
2145 message: "undefined variable 'A'".to_string(),
2146 is_disk_based: true,
2147 group_id: 6,
2148 is_primary: true,
2149 ..Default::default()
2150 }
2151 },
2152 DiagnosticEntry {
2153 range: Point::new(3, 9)..Point::new(3, 14),
2154 diagnostic: Diagnostic {
2155 source: Some("disk".into()),
2156 severity: DiagnosticSeverity::ERROR,
2157 message: "undefined variable 'BB'".to_string(),
2158 is_disk_based: true,
2159 group_id: 5,
2160 is_primary: true,
2161 ..Default::default()
2162 },
2163 }
2164 ]
2165 );
2166 });
2167}
2168
2169#[gpui::test]
2170async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
2171 init_test(cx);
2172
2173 let text = concat!(
2174 "let one = ;\n", //
2175 "let two = \n",
2176 "let three = 3;\n",
2177 );
2178
2179 let fs = FakeFs::new(cx.executor());
2180 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
2181
2182 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2183 let buffer = project
2184 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2185 .await
2186 .unwrap();
2187
2188 project.update(cx, |project, cx| {
2189 project.lsp_store.update(cx, |lsp_store, cx| {
2190 lsp_store
2191 .update_diagnostic_entries(
2192 LanguageServerId(0),
2193 PathBuf::from("/dir/a.rs"),
2194 None,
2195 vec![
2196 DiagnosticEntry {
2197 range: Unclipped(PointUtf16::new(0, 10))
2198 ..Unclipped(PointUtf16::new(0, 10)),
2199 diagnostic: Diagnostic {
2200 severity: DiagnosticSeverity::ERROR,
2201 message: "syntax error 1".to_string(),
2202 ..Default::default()
2203 },
2204 },
2205 DiagnosticEntry {
2206 range: Unclipped(PointUtf16::new(1, 10))
2207 ..Unclipped(PointUtf16::new(1, 10)),
2208 diagnostic: Diagnostic {
2209 severity: DiagnosticSeverity::ERROR,
2210 message: "syntax error 2".to_string(),
2211 ..Default::default()
2212 },
2213 },
2214 ],
2215 cx,
2216 )
2217 .unwrap();
2218 })
2219 });
2220
2221 // An empty range is extended forward to include the following character.
2222 // At the end of a line, an empty range is extended backward to include
2223 // the preceding character.
2224 buffer.update(cx, |buffer, _| {
2225 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2226 assert_eq!(
2227 chunks
2228 .iter()
2229 .map(|(s, d)| (s.as_str(), *d))
2230 .collect::<Vec<_>>(),
2231 &[
2232 ("let one = ", None),
2233 (";", Some(DiagnosticSeverity::ERROR)),
2234 ("\nlet two =", None),
2235 (" ", Some(DiagnosticSeverity::ERROR)),
2236 ("\nlet three = 3;\n", None)
2237 ]
2238 );
2239 });
2240}
2241
2242#[gpui::test]
2243async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2244 init_test(cx);
2245
2246 let fs = FakeFs::new(cx.executor());
2247 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2248 .await;
2249
2250 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2251 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
2252
2253 lsp_store.update(cx, |lsp_store, cx| {
2254 lsp_store
2255 .update_diagnostic_entries(
2256 LanguageServerId(0),
2257 Path::new("/dir/a.rs").to_owned(),
2258 None,
2259 vec![DiagnosticEntry {
2260 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2261 diagnostic: Diagnostic {
2262 severity: DiagnosticSeverity::ERROR,
2263 is_primary: true,
2264 message: "syntax error a1".to_string(),
2265 ..Default::default()
2266 },
2267 }],
2268 cx,
2269 )
2270 .unwrap();
2271 lsp_store
2272 .update_diagnostic_entries(
2273 LanguageServerId(1),
2274 Path::new("/dir/a.rs").to_owned(),
2275 None,
2276 vec![DiagnosticEntry {
2277 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2278 diagnostic: Diagnostic {
2279 severity: DiagnosticSeverity::ERROR,
2280 is_primary: true,
2281 message: "syntax error b1".to_string(),
2282 ..Default::default()
2283 },
2284 }],
2285 cx,
2286 )
2287 .unwrap();
2288
2289 assert_eq!(
2290 lsp_store.diagnostic_summary(false, cx),
2291 DiagnosticSummary {
2292 error_count: 2,
2293 warning_count: 0,
2294 }
2295 );
2296 });
2297}
2298
2299#[gpui::test]
2300async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2301 init_test(cx);
2302
2303 let text = "
2304 fn a() {
2305 f1();
2306 }
2307 fn b() {
2308 f2();
2309 }
2310 fn c() {
2311 f3();
2312 }
2313 "
2314 .unindent();
2315
2316 let fs = FakeFs::new(cx.executor());
2317 fs.insert_tree(
2318 path!("/dir"),
2319 json!({
2320 "a.rs": text.clone(),
2321 }),
2322 )
2323 .await;
2324
2325 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2326 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2327
2328 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2329 language_registry.add(rust_lang());
2330 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2331
2332 let (buffer, _handle) = project
2333 .update(cx, |project, cx| {
2334 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2335 })
2336 .await
2337 .unwrap();
2338
2339 let mut fake_server = fake_servers.next().await.unwrap();
2340 let lsp_document_version = fake_server
2341 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2342 .await
2343 .text_document
2344 .version;
2345
2346 // Simulate editing the buffer after the language server computes some edits.
2347 buffer.update(cx, |buffer, cx| {
2348 buffer.edit(
2349 [(
2350 Point::new(0, 0)..Point::new(0, 0),
2351 "// above first function\n",
2352 )],
2353 None,
2354 cx,
2355 );
2356 buffer.edit(
2357 [(
2358 Point::new(2, 0)..Point::new(2, 0),
2359 " // inside first function\n",
2360 )],
2361 None,
2362 cx,
2363 );
2364 buffer.edit(
2365 [(
2366 Point::new(6, 4)..Point::new(6, 4),
2367 "// inside second function ",
2368 )],
2369 None,
2370 cx,
2371 );
2372
2373 assert_eq!(
2374 buffer.text(),
2375 "
2376 // above first function
2377 fn a() {
2378 // inside first function
2379 f1();
2380 }
2381 fn b() {
2382 // inside second function f2();
2383 }
2384 fn c() {
2385 f3();
2386 }
2387 "
2388 .unindent()
2389 );
2390 });
2391
2392 let edits = lsp_store
2393 .update(cx, |lsp_store, cx| {
2394 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2395 &buffer,
2396 vec![
2397 // replace body of first function
2398 lsp::TextEdit {
2399 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2400 new_text: "
2401 fn a() {
2402 f10();
2403 }
2404 "
2405 .unindent(),
2406 },
2407 // edit inside second function
2408 lsp::TextEdit {
2409 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2410 new_text: "00".into(),
2411 },
2412 // edit inside third function via two distinct edits
2413 lsp::TextEdit {
2414 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2415 new_text: "4000".into(),
2416 },
2417 lsp::TextEdit {
2418 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2419 new_text: "".into(),
2420 },
2421 ],
2422 LanguageServerId(0),
2423 Some(lsp_document_version),
2424 cx,
2425 )
2426 })
2427 .await
2428 .unwrap();
2429
2430 buffer.update(cx, |buffer, cx| {
2431 for (range, new_text) in edits {
2432 buffer.edit([(range, new_text)], None, cx);
2433 }
2434 assert_eq!(
2435 buffer.text(),
2436 "
2437 // above first function
2438 fn a() {
2439 // inside first function
2440 f10();
2441 }
2442 fn b() {
2443 // inside second function f200();
2444 }
2445 fn c() {
2446 f4000();
2447 }
2448 "
2449 .unindent()
2450 );
2451 });
2452}
2453
2454#[gpui::test]
2455async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2456 init_test(cx);
2457
2458 let text = "
2459 use a::b;
2460 use a::c;
2461
2462 fn f() {
2463 b();
2464 c();
2465 }
2466 "
2467 .unindent();
2468
2469 let fs = FakeFs::new(cx.executor());
2470 fs.insert_tree(
2471 path!("/dir"),
2472 json!({
2473 "a.rs": text.clone(),
2474 }),
2475 )
2476 .await;
2477
2478 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2479 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2480 let buffer = project
2481 .update(cx, |project, cx| {
2482 project.open_local_buffer(path!("/dir/a.rs"), cx)
2483 })
2484 .await
2485 .unwrap();
2486
2487 // Simulate the language server sending us a small edit in the form of a very large diff.
2488 // Rust-analyzer does this when performing a merge-imports code action.
2489 let edits = lsp_store
2490 .update(cx, |lsp_store, cx| {
2491 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2492 &buffer,
2493 [
2494 // Replace the first use statement without editing the semicolon.
2495 lsp::TextEdit {
2496 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2497 new_text: "a::{b, c}".into(),
2498 },
2499 // Reinsert the remainder of the file between the semicolon and the final
2500 // newline of the file.
2501 lsp::TextEdit {
2502 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2503 new_text: "\n\n".into(),
2504 },
2505 lsp::TextEdit {
2506 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2507 new_text: "
2508 fn f() {
2509 b();
2510 c();
2511 }"
2512 .unindent(),
2513 },
2514 // Delete everything after the first newline of the file.
2515 lsp::TextEdit {
2516 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2517 new_text: "".into(),
2518 },
2519 ],
2520 LanguageServerId(0),
2521 None,
2522 cx,
2523 )
2524 })
2525 .await
2526 .unwrap();
2527
2528 buffer.update(cx, |buffer, cx| {
2529 let edits = edits
2530 .into_iter()
2531 .map(|(range, text)| {
2532 (
2533 range.start.to_point(buffer)..range.end.to_point(buffer),
2534 text,
2535 )
2536 })
2537 .collect::<Vec<_>>();
2538
2539 assert_eq!(
2540 edits,
2541 [
2542 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2543 (Point::new(1, 0)..Point::new(2, 0), "".into())
2544 ]
2545 );
2546
2547 for (range, new_text) in edits {
2548 buffer.edit([(range, new_text)], None, cx);
2549 }
2550 assert_eq!(
2551 buffer.text(),
2552 "
2553 use a::{b, c};
2554
2555 fn f() {
2556 b();
2557 c();
2558 }
2559 "
2560 .unindent()
2561 );
2562 });
2563}
2564
2565#[gpui::test]
2566async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
2567 cx: &mut gpui::TestAppContext,
2568) {
2569 init_test(cx);
2570
2571 let text = "Path()";
2572
2573 let fs = FakeFs::new(cx.executor());
2574 fs.insert_tree(
2575 path!("/dir"),
2576 json!({
2577 "a.rs": text
2578 }),
2579 )
2580 .await;
2581
2582 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2583 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2584 let buffer = project
2585 .update(cx, |project, cx| {
2586 project.open_local_buffer(path!("/dir/a.rs"), cx)
2587 })
2588 .await
2589 .unwrap();
2590
2591 // Simulate the language server sending us a pair of edits at the same location,
2592 // with an insertion following a replacement (which violates the LSP spec).
2593 let edits = lsp_store
2594 .update(cx, |lsp_store, cx| {
2595 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2596 &buffer,
2597 [
2598 lsp::TextEdit {
2599 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
2600 new_text: "Path".into(),
2601 },
2602 lsp::TextEdit {
2603 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
2604 new_text: "from path import Path\n\n\n".into(),
2605 },
2606 ],
2607 LanguageServerId(0),
2608 None,
2609 cx,
2610 )
2611 })
2612 .await
2613 .unwrap();
2614
2615 buffer.update(cx, |buffer, cx| {
2616 buffer.edit(edits, None, cx);
2617 assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
2618 });
2619}
2620
2621#[gpui::test]
2622async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2623 init_test(cx);
2624
2625 let text = "
2626 use a::b;
2627 use a::c;
2628
2629 fn f() {
2630 b();
2631 c();
2632 }
2633 "
2634 .unindent();
2635
2636 let fs = FakeFs::new(cx.executor());
2637 fs.insert_tree(
2638 path!("/dir"),
2639 json!({
2640 "a.rs": text.clone(),
2641 }),
2642 )
2643 .await;
2644
2645 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2646 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2647 let buffer = project
2648 .update(cx, |project, cx| {
2649 project.open_local_buffer(path!("/dir/a.rs"), cx)
2650 })
2651 .await
2652 .unwrap();
2653
2654 // Simulate the language server sending us edits in a non-ordered fashion,
2655 // with ranges sometimes being inverted or pointing to invalid locations.
2656 let edits = lsp_store
2657 .update(cx, |lsp_store, cx| {
2658 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2659 &buffer,
2660 [
2661 lsp::TextEdit {
2662 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2663 new_text: "\n\n".into(),
2664 },
2665 lsp::TextEdit {
2666 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2667 new_text: "a::{b, c}".into(),
2668 },
2669 lsp::TextEdit {
2670 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2671 new_text: "".into(),
2672 },
2673 lsp::TextEdit {
2674 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2675 new_text: "
2676 fn f() {
2677 b();
2678 c();
2679 }"
2680 .unindent(),
2681 },
2682 ],
2683 LanguageServerId(0),
2684 None,
2685 cx,
2686 )
2687 })
2688 .await
2689 .unwrap();
2690
2691 buffer.update(cx, |buffer, cx| {
2692 let edits = edits
2693 .into_iter()
2694 .map(|(range, text)| {
2695 (
2696 range.start.to_point(buffer)..range.end.to_point(buffer),
2697 text,
2698 )
2699 })
2700 .collect::<Vec<_>>();
2701
2702 assert_eq!(
2703 edits,
2704 [
2705 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2706 (Point::new(1, 0)..Point::new(2, 0), "".into())
2707 ]
2708 );
2709
2710 for (range, new_text) in edits {
2711 buffer.edit([(range, new_text)], None, cx);
2712 }
2713 assert_eq!(
2714 buffer.text(),
2715 "
2716 use a::{b, c};
2717
2718 fn f() {
2719 b();
2720 c();
2721 }
2722 "
2723 .unindent()
2724 );
2725 });
2726}
2727
2728fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2729 buffer: &Buffer,
2730 range: Range<T>,
2731) -> Vec<(String, Option<DiagnosticSeverity>)> {
2732 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2733 for chunk in buffer.snapshot().chunks(range, true) {
2734 if chunks.last().map_or(false, |prev_chunk| {
2735 prev_chunk.1 == chunk.diagnostic_severity
2736 }) {
2737 chunks.last_mut().unwrap().0.push_str(chunk.text);
2738 } else {
2739 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2740 }
2741 }
2742 chunks
2743}
2744
2745#[gpui::test(iterations = 10)]
2746async fn test_definition(cx: &mut gpui::TestAppContext) {
2747 init_test(cx);
2748
2749 let fs = FakeFs::new(cx.executor());
2750 fs.insert_tree(
2751 path!("/dir"),
2752 json!({
2753 "a.rs": "const fn a() { A }",
2754 "b.rs": "const y: i32 = crate::a()",
2755 }),
2756 )
2757 .await;
2758
2759 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
2760
2761 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2762 language_registry.add(rust_lang());
2763 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2764
2765 let (buffer, _handle) = project
2766 .update(cx, |project, cx| {
2767 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
2768 })
2769 .await
2770 .unwrap();
2771
2772 let fake_server = fake_servers.next().await.unwrap();
2773 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2774 let params = params.text_document_position_params;
2775 assert_eq!(
2776 params.text_document.uri.to_file_path().unwrap(),
2777 Path::new(path!("/dir/b.rs")),
2778 );
2779 assert_eq!(params.position, lsp::Position::new(0, 22));
2780
2781 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2782 lsp::Location::new(
2783 lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2784 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2785 ),
2786 )))
2787 });
2788 let mut definitions = project
2789 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2790 .await
2791 .unwrap();
2792
2793 // Assert no new language server started
2794 cx.executor().run_until_parked();
2795 assert!(fake_servers.try_next().is_err());
2796
2797 assert_eq!(definitions.len(), 1);
2798 let definition = definitions.pop().unwrap();
2799 cx.update(|cx| {
2800 let target_buffer = definition.target.buffer.read(cx);
2801 assert_eq!(
2802 target_buffer
2803 .file()
2804 .unwrap()
2805 .as_local()
2806 .unwrap()
2807 .abs_path(cx),
2808 Path::new(path!("/dir/a.rs")),
2809 );
2810 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2811 assert_eq!(
2812 list_worktrees(&project, cx),
2813 [
2814 (path!("/dir/a.rs").as_ref(), false),
2815 (path!("/dir/b.rs").as_ref(), true)
2816 ],
2817 );
2818
2819 drop(definition);
2820 });
2821 cx.update(|cx| {
2822 assert_eq!(
2823 list_worktrees(&project, cx),
2824 [(path!("/dir/b.rs").as_ref(), true)]
2825 );
2826 });
2827
2828 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
2829 project
2830 .read(cx)
2831 .worktrees(cx)
2832 .map(|worktree| {
2833 let worktree = worktree.read(cx);
2834 (
2835 worktree.as_local().unwrap().abs_path().as_ref(),
2836 worktree.is_visible(),
2837 )
2838 })
2839 .collect::<Vec<_>>()
2840 }
2841}
2842
2843#[gpui::test]
2844async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
2845 init_test(cx);
2846
2847 let fs = FakeFs::new(cx.executor());
2848 fs.insert_tree(
2849 path!("/dir"),
2850 json!({
2851 "a.ts": "",
2852 }),
2853 )
2854 .await;
2855
2856 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2857
2858 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2859 language_registry.add(typescript_lang());
2860 let mut fake_language_servers = language_registry.register_fake_lsp(
2861 "TypeScript",
2862 FakeLspAdapter {
2863 capabilities: lsp::ServerCapabilities {
2864 completion_provider: Some(lsp::CompletionOptions {
2865 trigger_characters: Some(vec![".".to_string()]),
2866 ..Default::default()
2867 }),
2868 ..Default::default()
2869 },
2870 ..Default::default()
2871 },
2872 );
2873
2874 let (buffer, _handle) = project
2875 .update(cx, |p, cx| {
2876 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
2877 })
2878 .await
2879 .unwrap();
2880
2881 let fake_server = fake_language_servers.next().await.unwrap();
2882
2883 // When text_edit exists, it takes precedence over insert_text and label
2884 let text = "let a = obj.fqn";
2885 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2886 let completions = project.update(cx, |project, cx| {
2887 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2888 });
2889
2890 fake_server
2891 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
2892 Ok(Some(lsp::CompletionResponse::Array(vec![
2893 lsp::CompletionItem {
2894 label: "labelText".into(),
2895 insert_text: Some("insertText".into()),
2896 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
2897 range: lsp::Range::new(
2898 lsp::Position::new(0, text.len() as u32 - 3),
2899 lsp::Position::new(0, text.len() as u32),
2900 ),
2901 new_text: "textEditText".into(),
2902 })),
2903 ..Default::default()
2904 },
2905 ])))
2906 })
2907 .next()
2908 .await;
2909
2910 let completions = completions.await.unwrap().unwrap();
2911 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2912
2913 assert_eq!(completions.len(), 1);
2914 assert_eq!(completions[0].new_text, "textEditText");
2915 assert_eq!(
2916 completions[0].old_range.to_offset(&snapshot),
2917 text.len() - 3..text.len()
2918 );
2919}
2920
2921#[gpui::test]
2922async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
2923 init_test(cx);
2924
2925 let fs = FakeFs::new(cx.executor());
2926 fs.insert_tree(
2927 path!("/dir"),
2928 json!({
2929 "a.ts": "",
2930 }),
2931 )
2932 .await;
2933
2934 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2935
2936 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2937 language_registry.add(typescript_lang());
2938 let mut fake_language_servers = language_registry.register_fake_lsp(
2939 "TypeScript",
2940 FakeLspAdapter {
2941 capabilities: lsp::ServerCapabilities {
2942 completion_provider: Some(lsp::CompletionOptions {
2943 trigger_characters: Some(vec![".".to_string()]),
2944 ..Default::default()
2945 }),
2946 ..Default::default()
2947 },
2948 ..Default::default()
2949 },
2950 );
2951
2952 let (buffer, _handle) = project
2953 .update(cx, |p, cx| {
2954 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
2955 })
2956 .await
2957 .unwrap();
2958
2959 let fake_server = fake_language_servers.next().await.unwrap();
2960 let text = "let a = obj.fqn";
2961
2962 // Test 1: When text_edit is None but insert_text exists with default edit_range
2963 {
2964 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2965 let completions = project.update(cx, |project, cx| {
2966 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2967 });
2968
2969 fake_server
2970 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
2971 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
2972 is_incomplete: false,
2973 item_defaults: Some(lsp::CompletionListItemDefaults {
2974 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
2975 lsp::Range::new(
2976 lsp::Position::new(0, text.len() as u32 - 3),
2977 lsp::Position::new(0, text.len() as u32),
2978 ),
2979 )),
2980 ..Default::default()
2981 }),
2982 items: vec![lsp::CompletionItem {
2983 label: "labelText".into(),
2984 insert_text: Some("insertText".into()),
2985 text_edit: None,
2986 ..Default::default()
2987 }],
2988 })))
2989 })
2990 .next()
2991 .await;
2992
2993 let completions = completions.await.unwrap().unwrap();
2994 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2995
2996 assert_eq!(completions.len(), 1);
2997 assert_eq!(completions[0].new_text, "insertText");
2998 assert_eq!(
2999 completions[0].old_range.to_offset(&snapshot),
3000 text.len() - 3..text.len()
3001 );
3002 }
3003
3004 // Test 2: When both text_edit and insert_text are None with default edit_range
3005 {
3006 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3007 let completions = project.update(cx, |project, cx| {
3008 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3009 });
3010
3011 fake_server
3012 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3013 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3014 is_incomplete: false,
3015 item_defaults: Some(lsp::CompletionListItemDefaults {
3016 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3017 lsp::Range::new(
3018 lsp::Position::new(0, text.len() as u32 - 3),
3019 lsp::Position::new(0, text.len() as u32),
3020 ),
3021 )),
3022 ..Default::default()
3023 }),
3024 items: vec![lsp::CompletionItem {
3025 label: "labelText".into(),
3026 insert_text: None,
3027 text_edit: None,
3028 ..Default::default()
3029 }],
3030 })))
3031 })
3032 .next()
3033 .await;
3034
3035 let completions = completions.await.unwrap().unwrap();
3036 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3037
3038 assert_eq!(completions.len(), 1);
3039 assert_eq!(completions[0].new_text, "labelText");
3040 assert_eq!(
3041 completions[0].old_range.to_offset(&snapshot),
3042 text.len() - 3..text.len()
3043 );
3044 }
3045}
3046
3047#[gpui::test]
3048async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
3049 init_test(cx);
3050
3051 let fs = FakeFs::new(cx.executor());
3052 fs.insert_tree(
3053 path!("/dir"),
3054 json!({
3055 "a.ts": "",
3056 }),
3057 )
3058 .await;
3059
3060 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3061
3062 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3063 language_registry.add(typescript_lang());
3064 let mut fake_language_servers = language_registry.register_fake_lsp(
3065 "TypeScript",
3066 FakeLspAdapter {
3067 capabilities: lsp::ServerCapabilities {
3068 completion_provider: Some(lsp::CompletionOptions {
3069 trigger_characters: Some(vec![":".to_string()]),
3070 ..Default::default()
3071 }),
3072 ..Default::default()
3073 },
3074 ..Default::default()
3075 },
3076 );
3077
3078 let (buffer, _handle) = project
3079 .update(cx, |p, cx| {
3080 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3081 })
3082 .await
3083 .unwrap();
3084
3085 let fake_server = fake_language_servers.next().await.unwrap();
3086
3087 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
3088 let text = "let a = b.fqn";
3089 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3090 let completions = project.update(cx, |project, cx| {
3091 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3092 });
3093
3094 fake_server
3095 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3096 Ok(Some(lsp::CompletionResponse::Array(vec![
3097 lsp::CompletionItem {
3098 label: "fullyQualifiedName?".into(),
3099 insert_text: Some("fullyQualifiedName".into()),
3100 ..Default::default()
3101 },
3102 ])))
3103 })
3104 .next()
3105 .await;
3106 let completions = completions.await.unwrap().unwrap();
3107 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3108 assert_eq!(completions.len(), 1);
3109 assert_eq!(completions[0].new_text, "fullyQualifiedName");
3110 assert_eq!(
3111 completions[0].old_range.to_offset(&snapshot),
3112 text.len() - 3..text.len()
3113 );
3114
3115 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
3116 let text = "let a = \"atoms/cmp\"";
3117 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3118 let completions = project.update(cx, |project, cx| {
3119 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
3120 });
3121
3122 fake_server
3123 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3124 Ok(Some(lsp::CompletionResponse::Array(vec![
3125 lsp::CompletionItem {
3126 label: "component".into(),
3127 ..Default::default()
3128 },
3129 ])))
3130 })
3131 .next()
3132 .await;
3133 let completions = completions.await.unwrap().unwrap();
3134 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3135 assert_eq!(completions.len(), 1);
3136 assert_eq!(completions[0].new_text, "component");
3137 assert_eq!(
3138 completions[0].old_range.to_offset(&snapshot),
3139 text.len() - 4..text.len() - 1
3140 );
3141}
3142
3143#[gpui::test]
3144async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
3145 init_test(cx);
3146
3147 let fs = FakeFs::new(cx.executor());
3148 fs.insert_tree(
3149 path!("/dir"),
3150 json!({
3151 "a.ts": "",
3152 }),
3153 )
3154 .await;
3155
3156 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3157
3158 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3159 language_registry.add(typescript_lang());
3160 let mut fake_language_servers = language_registry.register_fake_lsp(
3161 "TypeScript",
3162 FakeLspAdapter {
3163 capabilities: lsp::ServerCapabilities {
3164 completion_provider: Some(lsp::CompletionOptions {
3165 trigger_characters: Some(vec![":".to_string()]),
3166 ..Default::default()
3167 }),
3168 ..Default::default()
3169 },
3170 ..Default::default()
3171 },
3172 );
3173
3174 let (buffer, _handle) = project
3175 .update(cx, |p, cx| {
3176 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3177 })
3178 .await
3179 .unwrap();
3180
3181 let fake_server = fake_language_servers.next().await.unwrap();
3182
3183 let text = "let a = b.fqn";
3184 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3185 let completions = project.update(cx, |project, cx| {
3186 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3187 });
3188
3189 fake_server
3190 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3191 Ok(Some(lsp::CompletionResponse::Array(vec![
3192 lsp::CompletionItem {
3193 label: "fullyQualifiedName?".into(),
3194 insert_text: Some("fully\rQualified\r\nName".into()),
3195 ..Default::default()
3196 },
3197 ])))
3198 })
3199 .next()
3200 .await;
3201 let completions = completions.await.unwrap().unwrap();
3202 assert_eq!(completions.len(), 1);
3203 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
3204}
3205
3206#[gpui::test(iterations = 10)]
3207async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
3208 init_test(cx);
3209
3210 let fs = FakeFs::new(cx.executor());
3211 fs.insert_tree(
3212 path!("/dir"),
3213 json!({
3214 "a.ts": "a",
3215 }),
3216 )
3217 .await;
3218
3219 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3220
3221 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3222 language_registry.add(typescript_lang());
3223 let mut fake_language_servers = language_registry.register_fake_lsp(
3224 "TypeScript",
3225 FakeLspAdapter {
3226 capabilities: lsp::ServerCapabilities {
3227 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
3228 lsp::CodeActionOptions {
3229 resolve_provider: Some(true),
3230 ..lsp::CodeActionOptions::default()
3231 },
3232 )),
3233 execute_command_provider: Some(lsp::ExecuteCommandOptions {
3234 commands: vec!["_the/command".to_string()],
3235 ..lsp::ExecuteCommandOptions::default()
3236 }),
3237 ..lsp::ServerCapabilities::default()
3238 },
3239 ..FakeLspAdapter::default()
3240 },
3241 );
3242
3243 let (buffer, _handle) = project
3244 .update(cx, |p, cx| {
3245 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3246 })
3247 .await
3248 .unwrap();
3249
3250 let fake_server = fake_language_servers.next().await.unwrap();
3251
3252 // Language server returns code actions that contain commands, and not edits.
3253 let actions = project.update(cx, |project, cx| {
3254 project.code_actions(&buffer, 0..0, None, cx)
3255 });
3256 fake_server
3257 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
3258 Ok(Some(vec![
3259 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3260 title: "The code action".into(),
3261 data: Some(serde_json::json!({
3262 "command": "_the/command",
3263 })),
3264 ..lsp::CodeAction::default()
3265 }),
3266 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3267 title: "two".into(),
3268 ..lsp::CodeAction::default()
3269 }),
3270 ]))
3271 })
3272 .next()
3273 .await;
3274
3275 let action = actions.await.unwrap()[0].clone();
3276 let apply = project.update(cx, |project, cx| {
3277 project.apply_code_action(buffer.clone(), action, true, cx)
3278 });
3279
3280 // Resolving the code action does not populate its edits. In absence of
3281 // edits, we must execute the given command.
3282 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
3283 |mut action, _| async move {
3284 if action.data.is_some() {
3285 action.command = Some(lsp::Command {
3286 title: "The command".into(),
3287 command: "_the/command".into(),
3288 arguments: Some(vec![json!("the-argument")]),
3289 });
3290 }
3291 Ok(action)
3292 },
3293 );
3294
3295 // While executing the command, the language server sends the editor
3296 // a `workspaceEdit` request.
3297 fake_server
3298 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
3299 let fake = fake_server.clone();
3300 move |params, _| {
3301 assert_eq!(params.command, "_the/command");
3302 let fake = fake.clone();
3303 async move {
3304 fake.server
3305 .request::<lsp::request::ApplyWorkspaceEdit>(
3306 lsp::ApplyWorkspaceEditParams {
3307 label: None,
3308 edit: lsp::WorkspaceEdit {
3309 changes: Some(
3310 [(
3311 lsp::Url::from_file_path(path!("/dir/a.ts")).unwrap(),
3312 vec![lsp::TextEdit {
3313 range: lsp::Range::new(
3314 lsp::Position::new(0, 0),
3315 lsp::Position::new(0, 0),
3316 ),
3317 new_text: "X".into(),
3318 }],
3319 )]
3320 .into_iter()
3321 .collect(),
3322 ),
3323 ..Default::default()
3324 },
3325 },
3326 )
3327 .await
3328 .unwrap();
3329 Ok(Some(json!(null)))
3330 }
3331 }
3332 })
3333 .next()
3334 .await;
3335
3336 // Applying the code action returns a project transaction containing the edits
3337 // sent by the language server in its `workspaceEdit` request.
3338 let transaction = apply.await.unwrap();
3339 assert!(transaction.0.contains_key(&buffer));
3340 buffer.update(cx, |buffer, cx| {
3341 assert_eq!(buffer.text(), "Xa");
3342 buffer.undo(cx);
3343 assert_eq!(buffer.text(), "a");
3344 });
3345}
3346
3347#[gpui::test(iterations = 10)]
3348async fn test_save_file(cx: &mut gpui::TestAppContext) {
3349 init_test(cx);
3350
3351 let fs = FakeFs::new(cx.executor());
3352 fs.insert_tree(
3353 path!("/dir"),
3354 json!({
3355 "file1": "the old contents",
3356 }),
3357 )
3358 .await;
3359
3360 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3361 let buffer = project
3362 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3363 .await
3364 .unwrap();
3365 buffer.update(cx, |buffer, cx| {
3366 assert_eq!(buffer.text(), "the old contents");
3367 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3368 });
3369
3370 project
3371 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3372 .await
3373 .unwrap();
3374
3375 let new_text = fs
3376 .load(Path::new(path!("/dir/file1")))
3377 .await
3378 .unwrap()
3379 .replace("\r\n", "\n");
3380 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3381}
3382
3383#[gpui::test(iterations = 30)]
3384async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
3385 init_test(cx);
3386
3387 let fs = FakeFs::new(cx.executor().clone());
3388 fs.insert_tree(
3389 path!("/dir"),
3390 json!({
3391 "file1": "the original contents",
3392 }),
3393 )
3394 .await;
3395
3396 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3397 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3398 let buffer = project
3399 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3400 .await
3401 .unwrap();
3402
3403 // Simulate buffer diffs being slow, so that they don't complete before
3404 // the next file change occurs.
3405 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3406
3407 // Change the buffer's file on disk, and then wait for the file change
3408 // to be detected by the worktree, so that the buffer starts reloading.
3409 fs.save(
3410 path!("/dir/file1").as_ref(),
3411 &"the first contents".into(),
3412 Default::default(),
3413 )
3414 .await
3415 .unwrap();
3416 worktree.next_event(cx).await;
3417
3418 // Change the buffer's file again. Depending on the random seed, the
3419 // previous file change may still be in progress.
3420 fs.save(
3421 path!("/dir/file1").as_ref(),
3422 &"the second contents".into(),
3423 Default::default(),
3424 )
3425 .await
3426 .unwrap();
3427 worktree.next_event(cx).await;
3428
3429 cx.executor().run_until_parked();
3430 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3431 buffer.read_with(cx, |buffer, _| {
3432 assert_eq!(buffer.text(), on_disk_text);
3433 assert!(!buffer.is_dirty(), "buffer should not be dirty");
3434 assert!(!buffer.has_conflict(), "buffer should not be dirty");
3435 });
3436}
3437
3438#[gpui::test(iterations = 30)]
3439async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
3440 init_test(cx);
3441
3442 let fs = FakeFs::new(cx.executor().clone());
3443 fs.insert_tree(
3444 path!("/dir"),
3445 json!({
3446 "file1": "the original contents",
3447 }),
3448 )
3449 .await;
3450
3451 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3452 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3453 let buffer = project
3454 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3455 .await
3456 .unwrap();
3457
3458 // Simulate buffer diffs being slow, so that they don't complete before
3459 // the next file change occurs.
3460 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3461
3462 // Change the buffer's file on disk, and then wait for the file change
3463 // to be detected by the worktree, so that the buffer starts reloading.
3464 fs.save(
3465 path!("/dir/file1").as_ref(),
3466 &"the first contents".into(),
3467 Default::default(),
3468 )
3469 .await
3470 .unwrap();
3471 worktree.next_event(cx).await;
3472
3473 cx.executor()
3474 .spawn(cx.executor().simulate_random_delay())
3475 .await;
3476
3477 // Perform a noop edit, causing the buffer's version to increase.
3478 buffer.update(cx, |buffer, cx| {
3479 buffer.edit([(0..0, " ")], None, cx);
3480 buffer.undo(cx);
3481 });
3482
3483 cx.executor().run_until_parked();
3484 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3485 buffer.read_with(cx, |buffer, _| {
3486 let buffer_text = buffer.text();
3487 if buffer_text == on_disk_text {
3488 assert!(
3489 !buffer.is_dirty() && !buffer.has_conflict(),
3490 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
3491 );
3492 }
3493 // If the file change occurred while the buffer was processing the first
3494 // change, the buffer will be in a conflicting state.
3495 else {
3496 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3497 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3498 }
3499 });
3500}
3501
3502#[gpui::test]
3503async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
3504 init_test(cx);
3505
3506 let fs = FakeFs::new(cx.executor());
3507 fs.insert_tree(
3508 path!("/dir"),
3509 json!({
3510 "file1": "the old contents",
3511 }),
3512 )
3513 .await;
3514
3515 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
3516 let buffer = project
3517 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3518 .await
3519 .unwrap();
3520 buffer.update(cx, |buffer, cx| {
3521 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3522 });
3523
3524 project
3525 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3526 .await
3527 .unwrap();
3528
3529 let new_text = fs
3530 .load(Path::new(path!("/dir/file1")))
3531 .await
3532 .unwrap()
3533 .replace("\r\n", "\n");
3534 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3535}
3536
3537#[gpui::test]
3538async fn test_save_as(cx: &mut gpui::TestAppContext) {
3539 init_test(cx);
3540
3541 let fs = FakeFs::new(cx.executor());
3542 fs.insert_tree("/dir", json!({})).await;
3543
3544 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3545
3546 let languages = project.update(cx, |project, _| project.languages().clone());
3547 languages.add(rust_lang());
3548
3549 let buffer = project.update(cx, |project, cx| project.create_local_buffer("", None, cx));
3550 buffer.update(cx, |buffer, cx| {
3551 buffer.edit([(0..0, "abc")], None, cx);
3552 assert!(buffer.is_dirty());
3553 assert!(!buffer.has_conflict());
3554 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
3555 });
3556 project
3557 .update(cx, |project, cx| {
3558 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
3559 let path = ProjectPath {
3560 worktree_id,
3561 path: Arc::from(Path::new("file1.rs")),
3562 };
3563 project.save_buffer_as(buffer.clone(), path, cx)
3564 })
3565 .await
3566 .unwrap();
3567 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
3568
3569 cx.executor().run_until_parked();
3570 buffer.update(cx, |buffer, cx| {
3571 assert_eq!(
3572 buffer.file().unwrap().full_path(cx),
3573 Path::new("dir/file1.rs")
3574 );
3575 assert!(!buffer.is_dirty());
3576 assert!(!buffer.has_conflict());
3577 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
3578 });
3579
3580 let opened_buffer = project
3581 .update(cx, |project, cx| {
3582 project.open_local_buffer("/dir/file1.rs", cx)
3583 })
3584 .await
3585 .unwrap();
3586 assert_eq!(opened_buffer, buffer);
3587}
3588
3589#[gpui::test(retries = 5)]
3590async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
3591 use worktree::WorktreeModelHandle as _;
3592
3593 init_test(cx);
3594 cx.executor().allow_parking();
3595
3596 let dir = TempTree::new(json!({
3597 "a": {
3598 "file1": "",
3599 "file2": "",
3600 "file3": "",
3601 },
3602 "b": {
3603 "c": {
3604 "file4": "",
3605 "file5": "",
3606 }
3607 }
3608 }));
3609
3610 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
3611
3612 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3613 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
3614 async move { buffer.await.unwrap() }
3615 };
3616 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3617 project.update(cx, |project, cx| {
3618 let tree = project.worktrees(cx).next().unwrap();
3619 tree.read(cx)
3620 .entry_for_path(path)
3621 .unwrap_or_else(|| panic!("no entry for path {}", path))
3622 .id
3623 })
3624 };
3625
3626 let buffer2 = buffer_for_path("a/file2", cx).await;
3627 let buffer3 = buffer_for_path("a/file3", cx).await;
3628 let buffer4 = buffer_for_path("b/c/file4", cx).await;
3629 let buffer5 = buffer_for_path("b/c/file5", cx).await;
3630
3631 let file2_id = id_for_path("a/file2", cx);
3632 let file3_id = id_for_path("a/file3", cx);
3633 let file4_id = id_for_path("b/c/file4", cx);
3634
3635 // Create a remote copy of this worktree.
3636 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3637 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
3638
3639 let updates = Arc::new(Mutex::new(Vec::new()));
3640 tree.update(cx, |tree, cx| {
3641 let updates = updates.clone();
3642 tree.observe_updates(0, cx, move |update| {
3643 updates.lock().push(update);
3644 async { true }
3645 });
3646 });
3647
3648 let remote =
3649 cx.update(|cx| Worktree::remote(0, 1, metadata, project.read(cx).client().into(), cx));
3650
3651 cx.executor().run_until_parked();
3652
3653 cx.update(|cx| {
3654 assert!(!buffer2.read(cx).is_dirty());
3655 assert!(!buffer3.read(cx).is_dirty());
3656 assert!(!buffer4.read(cx).is_dirty());
3657 assert!(!buffer5.read(cx).is_dirty());
3658 });
3659
3660 // Rename and delete files and directories.
3661 tree.flush_fs_events(cx).await;
3662 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3663 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3664 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3665 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3666 tree.flush_fs_events(cx).await;
3667
3668 cx.update(|app| {
3669 assert_eq!(
3670 tree.read(app)
3671 .paths()
3672 .map(|p| p.to_str().unwrap())
3673 .collect::<Vec<_>>(),
3674 vec![
3675 "a",
3676 separator!("a/file1"),
3677 separator!("a/file2.new"),
3678 "b",
3679 "d",
3680 separator!("d/file3"),
3681 separator!("d/file4"),
3682 ]
3683 );
3684 });
3685
3686 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
3687 assert_eq!(id_for_path("d/file3", cx), file3_id);
3688 assert_eq!(id_for_path("d/file4", cx), file4_id);
3689
3690 cx.update(|cx| {
3691 assert_eq!(
3692 buffer2.read(cx).file().unwrap().path().as_ref(),
3693 Path::new("a/file2.new")
3694 );
3695 assert_eq!(
3696 buffer3.read(cx).file().unwrap().path().as_ref(),
3697 Path::new("d/file3")
3698 );
3699 assert_eq!(
3700 buffer4.read(cx).file().unwrap().path().as_ref(),
3701 Path::new("d/file4")
3702 );
3703 assert_eq!(
3704 buffer5.read(cx).file().unwrap().path().as_ref(),
3705 Path::new("b/c/file5")
3706 );
3707
3708 assert_matches!(
3709 buffer2.read(cx).file().unwrap().disk_state(),
3710 DiskState::Present { .. }
3711 );
3712 assert_matches!(
3713 buffer3.read(cx).file().unwrap().disk_state(),
3714 DiskState::Present { .. }
3715 );
3716 assert_matches!(
3717 buffer4.read(cx).file().unwrap().disk_state(),
3718 DiskState::Present { .. }
3719 );
3720 assert_eq!(
3721 buffer5.read(cx).file().unwrap().disk_state(),
3722 DiskState::Deleted
3723 );
3724 });
3725
3726 // Update the remote worktree. Check that it becomes consistent with the
3727 // local worktree.
3728 cx.executor().run_until_parked();
3729
3730 remote.update(cx, |remote, _| {
3731 for update in updates.lock().drain(..) {
3732 remote.as_remote_mut().unwrap().update_from_remote(update);
3733 }
3734 });
3735 cx.executor().run_until_parked();
3736 remote.update(cx, |remote, _| {
3737 assert_eq!(
3738 remote
3739 .paths()
3740 .map(|p| p.to_str().unwrap())
3741 .collect::<Vec<_>>(),
3742 vec![
3743 "a",
3744 separator!("a/file1"),
3745 separator!("a/file2.new"),
3746 "b",
3747 "d",
3748 separator!("d/file3"),
3749 separator!("d/file4"),
3750 ]
3751 );
3752 });
3753}
3754
3755#[gpui::test(iterations = 10)]
3756async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
3757 init_test(cx);
3758
3759 let fs = FakeFs::new(cx.executor());
3760 fs.insert_tree(
3761 path!("/dir"),
3762 json!({
3763 "a": {
3764 "file1": "",
3765 }
3766 }),
3767 )
3768 .await;
3769
3770 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3771 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3772 let tree_id = tree.update(cx, |tree, _| tree.id());
3773
3774 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3775 project.update(cx, |project, cx| {
3776 let tree = project.worktrees(cx).next().unwrap();
3777 tree.read(cx)
3778 .entry_for_path(path)
3779 .unwrap_or_else(|| panic!("no entry for path {}", path))
3780 .id
3781 })
3782 };
3783
3784 let dir_id = id_for_path("a", cx);
3785 let file_id = id_for_path("a/file1", cx);
3786 let buffer = project
3787 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
3788 .await
3789 .unwrap();
3790 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3791
3792 project
3793 .update(cx, |project, cx| {
3794 project.rename_entry(dir_id, Path::new("b"), cx)
3795 })
3796 .unwrap()
3797 .await
3798 .to_included()
3799 .unwrap();
3800 cx.executor().run_until_parked();
3801
3802 assert_eq!(id_for_path("b", cx), dir_id);
3803 assert_eq!(id_for_path("b/file1", cx), file_id);
3804 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3805}
3806
3807#[gpui::test]
3808async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
3809 init_test(cx);
3810
3811 let fs = FakeFs::new(cx.executor());
3812 fs.insert_tree(
3813 "/dir",
3814 json!({
3815 "a.txt": "a-contents",
3816 "b.txt": "b-contents",
3817 }),
3818 )
3819 .await;
3820
3821 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3822
3823 // Spawn multiple tasks to open paths, repeating some paths.
3824 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
3825 (
3826 p.open_local_buffer("/dir/a.txt", cx),
3827 p.open_local_buffer("/dir/b.txt", cx),
3828 p.open_local_buffer("/dir/a.txt", cx),
3829 )
3830 });
3831
3832 let buffer_a_1 = buffer_a_1.await.unwrap();
3833 let buffer_a_2 = buffer_a_2.await.unwrap();
3834 let buffer_b = buffer_b.await.unwrap();
3835 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
3836 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
3837
3838 // There is only one buffer per path.
3839 let buffer_a_id = buffer_a_1.entity_id();
3840 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
3841
3842 // Open the same path again while it is still open.
3843 drop(buffer_a_1);
3844 let buffer_a_3 = project
3845 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
3846 .await
3847 .unwrap();
3848
3849 // There's still only one buffer per path.
3850 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
3851}
3852
3853#[gpui::test]
3854async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
3855 init_test(cx);
3856
3857 let fs = FakeFs::new(cx.executor());
3858 fs.insert_tree(
3859 path!("/dir"),
3860 json!({
3861 "file1": "abc",
3862 "file2": "def",
3863 "file3": "ghi",
3864 }),
3865 )
3866 .await;
3867
3868 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3869
3870 let buffer1 = project
3871 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3872 .await
3873 .unwrap();
3874 let events = Arc::new(Mutex::new(Vec::new()));
3875
3876 // initially, the buffer isn't dirty.
3877 buffer1.update(cx, |buffer, cx| {
3878 cx.subscribe(&buffer1, {
3879 let events = events.clone();
3880 move |_, _, event, _| match event {
3881 BufferEvent::Operation { .. } => {}
3882 _ => events.lock().push(event.clone()),
3883 }
3884 })
3885 .detach();
3886
3887 assert!(!buffer.is_dirty());
3888 assert!(events.lock().is_empty());
3889
3890 buffer.edit([(1..2, "")], None, cx);
3891 });
3892
3893 // after the first edit, the buffer is dirty, and emits a dirtied event.
3894 buffer1.update(cx, |buffer, cx| {
3895 assert!(buffer.text() == "ac");
3896 assert!(buffer.is_dirty());
3897 assert_eq!(
3898 *events.lock(),
3899 &[
3900 language::BufferEvent::Edited,
3901 language::BufferEvent::DirtyChanged
3902 ]
3903 );
3904 events.lock().clear();
3905 buffer.did_save(
3906 buffer.version(),
3907 buffer.file().unwrap().disk_state().mtime(),
3908 cx,
3909 );
3910 });
3911
3912 // after saving, the buffer is not dirty, and emits a saved event.
3913 buffer1.update(cx, |buffer, cx| {
3914 assert!(!buffer.is_dirty());
3915 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
3916 events.lock().clear();
3917
3918 buffer.edit([(1..1, "B")], None, cx);
3919 buffer.edit([(2..2, "D")], None, cx);
3920 });
3921
3922 // after editing again, the buffer is dirty, and emits another dirty event.
3923 buffer1.update(cx, |buffer, cx| {
3924 assert!(buffer.text() == "aBDc");
3925 assert!(buffer.is_dirty());
3926 assert_eq!(
3927 *events.lock(),
3928 &[
3929 language::BufferEvent::Edited,
3930 language::BufferEvent::DirtyChanged,
3931 language::BufferEvent::Edited,
3932 ],
3933 );
3934 events.lock().clear();
3935
3936 // After restoring the buffer to its previously-saved state,
3937 // the buffer is not considered dirty anymore.
3938 buffer.edit([(1..3, "")], None, cx);
3939 assert!(buffer.text() == "ac");
3940 assert!(!buffer.is_dirty());
3941 });
3942
3943 assert_eq!(
3944 *events.lock(),
3945 &[
3946 language::BufferEvent::Edited,
3947 language::BufferEvent::DirtyChanged
3948 ]
3949 );
3950
3951 // When a file is deleted, it is not considered dirty.
3952 let events = Arc::new(Mutex::new(Vec::new()));
3953 let buffer2 = project
3954 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
3955 .await
3956 .unwrap();
3957 buffer2.update(cx, |_, cx| {
3958 cx.subscribe(&buffer2, {
3959 let events = events.clone();
3960 move |_, _, event, _| match event {
3961 BufferEvent::Operation { .. } => {}
3962 _ => events.lock().push(event.clone()),
3963 }
3964 })
3965 .detach();
3966 });
3967
3968 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
3969 .await
3970 .unwrap();
3971 cx.executor().run_until_parked();
3972 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3973 assert_eq!(
3974 mem::take(&mut *events.lock()),
3975 &[language::BufferEvent::FileHandleChanged]
3976 );
3977
3978 // Buffer becomes dirty when edited.
3979 buffer2.update(cx, |buffer, cx| {
3980 buffer.edit([(2..3, "")], None, cx);
3981 assert_eq!(buffer.is_dirty(), true);
3982 });
3983 assert_eq!(
3984 mem::take(&mut *events.lock()),
3985 &[
3986 language::BufferEvent::Edited,
3987 language::BufferEvent::DirtyChanged
3988 ]
3989 );
3990
3991 // Buffer becomes clean again when all of its content is removed, because
3992 // the file was deleted.
3993 buffer2.update(cx, |buffer, cx| {
3994 buffer.edit([(0..2, "")], None, cx);
3995 assert_eq!(buffer.is_empty(), true);
3996 assert_eq!(buffer.is_dirty(), false);
3997 });
3998 assert_eq!(
3999 *events.lock(),
4000 &[
4001 language::BufferEvent::Edited,
4002 language::BufferEvent::DirtyChanged
4003 ]
4004 );
4005
4006 // When a file is already dirty when deleted, we don't emit a Dirtied event.
4007 let events = Arc::new(Mutex::new(Vec::new()));
4008 let buffer3 = project
4009 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
4010 .await
4011 .unwrap();
4012 buffer3.update(cx, |_, cx| {
4013 cx.subscribe(&buffer3, {
4014 let events = events.clone();
4015 move |_, _, event, _| match event {
4016 BufferEvent::Operation { .. } => {}
4017 _ => events.lock().push(event.clone()),
4018 }
4019 })
4020 .detach();
4021 });
4022
4023 buffer3.update(cx, |buffer, cx| {
4024 buffer.edit([(0..0, "x")], None, cx);
4025 });
4026 events.lock().clear();
4027 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
4028 .await
4029 .unwrap();
4030 cx.executor().run_until_parked();
4031 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
4032 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
4033}
4034
4035#[gpui::test]
4036async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
4037 init_test(cx);
4038
4039 let (initial_contents, initial_offsets) =
4040 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
4041 let fs = FakeFs::new(cx.executor());
4042 fs.insert_tree(
4043 path!("/dir"),
4044 json!({
4045 "the-file": initial_contents,
4046 }),
4047 )
4048 .await;
4049 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4050 let buffer = project
4051 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
4052 .await
4053 .unwrap();
4054
4055 let anchors = initial_offsets
4056 .iter()
4057 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
4058 .collect::<Vec<_>>();
4059
4060 // Change the file on disk, adding two new lines of text, and removing
4061 // one line.
4062 buffer.update(cx, |buffer, _| {
4063 assert!(!buffer.is_dirty());
4064 assert!(!buffer.has_conflict());
4065 });
4066
4067 let (new_contents, new_offsets) =
4068 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
4069 fs.save(
4070 path!("/dir/the-file").as_ref(),
4071 &new_contents.as_str().into(),
4072 LineEnding::Unix,
4073 )
4074 .await
4075 .unwrap();
4076
4077 // Because the buffer was not modified, it is reloaded from disk. Its
4078 // contents are edited according to the diff between the old and new
4079 // file contents.
4080 cx.executor().run_until_parked();
4081 buffer.update(cx, |buffer, _| {
4082 assert_eq!(buffer.text(), new_contents);
4083 assert!(!buffer.is_dirty());
4084 assert!(!buffer.has_conflict());
4085
4086 let anchor_offsets = anchors
4087 .iter()
4088 .map(|anchor| anchor.to_offset(&*buffer))
4089 .collect::<Vec<_>>();
4090 assert_eq!(anchor_offsets, new_offsets);
4091 });
4092
4093 // Modify the buffer
4094 buffer.update(cx, |buffer, cx| {
4095 buffer.edit([(0..0, " ")], None, cx);
4096 assert!(buffer.is_dirty());
4097 assert!(!buffer.has_conflict());
4098 });
4099
4100 // Change the file on disk again, adding blank lines to the beginning.
4101 fs.save(
4102 path!("/dir/the-file").as_ref(),
4103 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
4104 LineEnding::Unix,
4105 )
4106 .await
4107 .unwrap();
4108
4109 // Because the buffer is modified, it doesn't reload from disk, but is
4110 // marked as having a conflict.
4111 cx.executor().run_until_parked();
4112 buffer.update(cx, |buffer, _| {
4113 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
4114 assert!(buffer.has_conflict());
4115 });
4116}
4117
4118#[gpui::test]
4119async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
4120 init_test(cx);
4121
4122 let fs = FakeFs::new(cx.executor());
4123 fs.insert_tree(
4124 path!("/dir"),
4125 json!({
4126 "file1": "a\nb\nc\n",
4127 "file2": "one\r\ntwo\r\nthree\r\n",
4128 }),
4129 )
4130 .await;
4131
4132 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4133 let buffer1 = project
4134 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4135 .await
4136 .unwrap();
4137 let buffer2 = project
4138 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4139 .await
4140 .unwrap();
4141
4142 buffer1.update(cx, |buffer, _| {
4143 assert_eq!(buffer.text(), "a\nb\nc\n");
4144 assert_eq!(buffer.line_ending(), LineEnding::Unix);
4145 });
4146 buffer2.update(cx, |buffer, _| {
4147 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
4148 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4149 });
4150
4151 // Change a file's line endings on disk from unix to windows. The buffer's
4152 // state updates correctly.
4153 fs.save(
4154 path!("/dir/file1").as_ref(),
4155 &"aaa\nb\nc\n".into(),
4156 LineEnding::Windows,
4157 )
4158 .await
4159 .unwrap();
4160 cx.executor().run_until_parked();
4161 buffer1.update(cx, |buffer, _| {
4162 assert_eq!(buffer.text(), "aaa\nb\nc\n");
4163 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4164 });
4165
4166 // Save a file with windows line endings. The file is written correctly.
4167 buffer2.update(cx, |buffer, cx| {
4168 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
4169 });
4170 project
4171 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
4172 .await
4173 .unwrap();
4174 assert_eq!(
4175 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
4176 "one\r\ntwo\r\nthree\r\nfour\r\n",
4177 );
4178}
4179
4180#[gpui::test]
4181async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
4182 init_test(cx);
4183
4184 let fs = FakeFs::new(cx.executor());
4185 fs.insert_tree(
4186 path!("/dir"),
4187 json!({
4188 "a.rs": "
4189 fn foo(mut v: Vec<usize>) {
4190 for x in &v {
4191 v.push(1);
4192 }
4193 }
4194 "
4195 .unindent(),
4196 }),
4197 )
4198 .await;
4199
4200 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4201 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4202 let buffer = project
4203 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
4204 .await
4205 .unwrap();
4206
4207 let buffer_uri = Url::from_file_path(path!("/dir/a.rs")).unwrap();
4208 let message = lsp::PublishDiagnosticsParams {
4209 uri: buffer_uri.clone(),
4210 diagnostics: vec![
4211 lsp::Diagnostic {
4212 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4213 severity: Some(DiagnosticSeverity::WARNING),
4214 message: "error 1".to_string(),
4215 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4216 location: lsp::Location {
4217 uri: buffer_uri.clone(),
4218 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4219 },
4220 message: "error 1 hint 1".to_string(),
4221 }]),
4222 ..Default::default()
4223 },
4224 lsp::Diagnostic {
4225 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4226 severity: Some(DiagnosticSeverity::HINT),
4227 message: "error 1 hint 1".to_string(),
4228 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4229 location: lsp::Location {
4230 uri: buffer_uri.clone(),
4231 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4232 },
4233 message: "original diagnostic".to_string(),
4234 }]),
4235 ..Default::default()
4236 },
4237 lsp::Diagnostic {
4238 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4239 severity: Some(DiagnosticSeverity::ERROR),
4240 message: "error 2".to_string(),
4241 related_information: Some(vec![
4242 lsp::DiagnosticRelatedInformation {
4243 location: lsp::Location {
4244 uri: buffer_uri.clone(),
4245 range: lsp::Range::new(
4246 lsp::Position::new(1, 13),
4247 lsp::Position::new(1, 15),
4248 ),
4249 },
4250 message: "error 2 hint 1".to_string(),
4251 },
4252 lsp::DiagnosticRelatedInformation {
4253 location: lsp::Location {
4254 uri: buffer_uri.clone(),
4255 range: lsp::Range::new(
4256 lsp::Position::new(1, 13),
4257 lsp::Position::new(1, 15),
4258 ),
4259 },
4260 message: "error 2 hint 2".to_string(),
4261 },
4262 ]),
4263 ..Default::default()
4264 },
4265 lsp::Diagnostic {
4266 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4267 severity: Some(DiagnosticSeverity::HINT),
4268 message: "error 2 hint 1".to_string(),
4269 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4270 location: lsp::Location {
4271 uri: buffer_uri.clone(),
4272 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4273 },
4274 message: "original diagnostic".to_string(),
4275 }]),
4276 ..Default::default()
4277 },
4278 lsp::Diagnostic {
4279 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4280 severity: Some(DiagnosticSeverity::HINT),
4281 message: "error 2 hint 2".to_string(),
4282 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4283 location: lsp::Location {
4284 uri: buffer_uri,
4285 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4286 },
4287 message: "original diagnostic".to_string(),
4288 }]),
4289 ..Default::default()
4290 },
4291 ],
4292 version: None,
4293 };
4294
4295 lsp_store
4296 .update(cx, |lsp_store, cx| {
4297 lsp_store.update_diagnostics(LanguageServerId(0), message, &[], cx)
4298 })
4299 .unwrap();
4300 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
4301
4302 assert_eq!(
4303 buffer
4304 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
4305 .collect::<Vec<_>>(),
4306 &[
4307 DiagnosticEntry {
4308 range: Point::new(1, 8)..Point::new(1, 9),
4309 diagnostic: Diagnostic {
4310 severity: DiagnosticSeverity::WARNING,
4311 message: "error 1".to_string(),
4312 group_id: 1,
4313 is_primary: true,
4314 ..Default::default()
4315 }
4316 },
4317 DiagnosticEntry {
4318 range: Point::new(1, 8)..Point::new(1, 9),
4319 diagnostic: Diagnostic {
4320 severity: DiagnosticSeverity::HINT,
4321 message: "error 1 hint 1".to_string(),
4322 group_id: 1,
4323 is_primary: false,
4324 ..Default::default()
4325 }
4326 },
4327 DiagnosticEntry {
4328 range: Point::new(1, 13)..Point::new(1, 15),
4329 diagnostic: Diagnostic {
4330 severity: DiagnosticSeverity::HINT,
4331 message: "error 2 hint 1".to_string(),
4332 group_id: 0,
4333 is_primary: false,
4334 ..Default::default()
4335 }
4336 },
4337 DiagnosticEntry {
4338 range: Point::new(1, 13)..Point::new(1, 15),
4339 diagnostic: Diagnostic {
4340 severity: DiagnosticSeverity::HINT,
4341 message: "error 2 hint 2".to_string(),
4342 group_id: 0,
4343 is_primary: false,
4344 ..Default::default()
4345 }
4346 },
4347 DiagnosticEntry {
4348 range: Point::new(2, 8)..Point::new(2, 17),
4349 diagnostic: Diagnostic {
4350 severity: DiagnosticSeverity::ERROR,
4351 message: "error 2".to_string(),
4352 group_id: 0,
4353 is_primary: true,
4354 ..Default::default()
4355 }
4356 }
4357 ]
4358 );
4359
4360 assert_eq!(
4361 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
4362 &[
4363 DiagnosticEntry {
4364 range: Point::new(1, 13)..Point::new(1, 15),
4365 diagnostic: Diagnostic {
4366 severity: DiagnosticSeverity::HINT,
4367 message: "error 2 hint 1".to_string(),
4368 group_id: 0,
4369 is_primary: false,
4370 ..Default::default()
4371 }
4372 },
4373 DiagnosticEntry {
4374 range: Point::new(1, 13)..Point::new(1, 15),
4375 diagnostic: Diagnostic {
4376 severity: DiagnosticSeverity::HINT,
4377 message: "error 2 hint 2".to_string(),
4378 group_id: 0,
4379 is_primary: false,
4380 ..Default::default()
4381 }
4382 },
4383 DiagnosticEntry {
4384 range: Point::new(2, 8)..Point::new(2, 17),
4385 diagnostic: Diagnostic {
4386 severity: DiagnosticSeverity::ERROR,
4387 message: "error 2".to_string(),
4388 group_id: 0,
4389 is_primary: true,
4390 ..Default::default()
4391 }
4392 }
4393 ]
4394 );
4395
4396 assert_eq!(
4397 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4398 &[
4399 DiagnosticEntry {
4400 range: Point::new(1, 8)..Point::new(1, 9),
4401 diagnostic: Diagnostic {
4402 severity: DiagnosticSeverity::WARNING,
4403 message: "error 1".to_string(),
4404 group_id: 1,
4405 is_primary: true,
4406 ..Default::default()
4407 }
4408 },
4409 DiagnosticEntry {
4410 range: Point::new(1, 8)..Point::new(1, 9),
4411 diagnostic: Diagnostic {
4412 severity: DiagnosticSeverity::HINT,
4413 message: "error 1 hint 1".to_string(),
4414 group_id: 1,
4415 is_primary: false,
4416 ..Default::default()
4417 }
4418 },
4419 ]
4420 );
4421}
4422
4423#[gpui::test]
4424async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
4425 init_test(cx);
4426
4427 let fs = FakeFs::new(cx.executor());
4428 fs.insert_tree(
4429 path!("/dir"),
4430 json!({
4431 "one.rs": "const ONE: usize = 1;",
4432 "two": {
4433 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4434 }
4435
4436 }),
4437 )
4438 .await;
4439 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4440
4441 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4442 language_registry.add(rust_lang());
4443 let watched_paths = lsp::FileOperationRegistrationOptions {
4444 filters: vec![
4445 FileOperationFilter {
4446 scheme: Some("file".to_owned()),
4447 pattern: lsp::FileOperationPattern {
4448 glob: "**/*.rs".to_owned(),
4449 matches: Some(lsp::FileOperationPatternKind::File),
4450 options: None,
4451 },
4452 },
4453 FileOperationFilter {
4454 scheme: Some("file".to_owned()),
4455 pattern: lsp::FileOperationPattern {
4456 glob: "**/**".to_owned(),
4457 matches: Some(lsp::FileOperationPatternKind::Folder),
4458 options: None,
4459 },
4460 },
4461 ],
4462 };
4463 let mut fake_servers = language_registry.register_fake_lsp(
4464 "Rust",
4465 FakeLspAdapter {
4466 capabilities: lsp::ServerCapabilities {
4467 workspace: Some(lsp::WorkspaceServerCapabilities {
4468 workspace_folders: None,
4469 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
4470 did_rename: Some(watched_paths.clone()),
4471 will_rename: Some(watched_paths),
4472 ..Default::default()
4473 }),
4474 }),
4475 ..Default::default()
4476 },
4477 ..Default::default()
4478 },
4479 );
4480
4481 let _ = project
4482 .update(cx, |project, cx| {
4483 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
4484 })
4485 .await
4486 .unwrap();
4487
4488 let fake_server = fake_servers.next().await.unwrap();
4489 let response = project.update(cx, |project, cx| {
4490 let worktree = project.worktrees(cx).next().unwrap();
4491 let entry = worktree.read(cx).entry_for_path("one.rs").unwrap();
4492 project.rename_entry(entry.id, "three.rs".as_ref(), cx)
4493 });
4494 let expected_edit = lsp::WorkspaceEdit {
4495 changes: None,
4496 document_changes: Some(DocumentChanges::Edits({
4497 vec![TextDocumentEdit {
4498 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
4499 range: lsp::Range {
4500 start: lsp::Position {
4501 line: 0,
4502 character: 1,
4503 },
4504 end: lsp::Position {
4505 line: 0,
4506 character: 3,
4507 },
4508 },
4509 new_text: "This is not a drill".to_owned(),
4510 })],
4511 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
4512 uri: Url::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
4513 version: Some(1337),
4514 },
4515 }]
4516 })),
4517 change_annotations: None,
4518 };
4519 let resolved_workspace_edit = Arc::new(OnceLock::new());
4520 fake_server
4521 .set_request_handler::<WillRenameFiles, _, _>({
4522 let resolved_workspace_edit = resolved_workspace_edit.clone();
4523 let expected_edit = expected_edit.clone();
4524 move |params, _| {
4525 let resolved_workspace_edit = resolved_workspace_edit.clone();
4526 let expected_edit = expected_edit.clone();
4527 async move {
4528 assert_eq!(params.files.len(), 1);
4529 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
4530 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
4531 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
4532 Ok(Some(expected_edit))
4533 }
4534 }
4535 })
4536 .next()
4537 .await
4538 .unwrap();
4539 let _ = response.await.unwrap();
4540 fake_server
4541 .handle_notification::<DidRenameFiles, _>(|params, _| {
4542 assert_eq!(params.files.len(), 1);
4543 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
4544 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
4545 })
4546 .next()
4547 .await
4548 .unwrap();
4549 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
4550}
4551
4552#[gpui::test]
4553async fn test_rename(cx: &mut gpui::TestAppContext) {
4554 // hi
4555 init_test(cx);
4556
4557 let fs = FakeFs::new(cx.executor());
4558 fs.insert_tree(
4559 path!("/dir"),
4560 json!({
4561 "one.rs": "const ONE: usize = 1;",
4562 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4563 }),
4564 )
4565 .await;
4566
4567 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4568
4569 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4570 language_registry.add(rust_lang());
4571 let mut fake_servers = language_registry.register_fake_lsp(
4572 "Rust",
4573 FakeLspAdapter {
4574 capabilities: lsp::ServerCapabilities {
4575 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
4576 prepare_provider: Some(true),
4577 work_done_progress_options: Default::default(),
4578 })),
4579 ..Default::default()
4580 },
4581 ..Default::default()
4582 },
4583 );
4584
4585 let (buffer, _handle) = project
4586 .update(cx, |project, cx| {
4587 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
4588 })
4589 .await
4590 .unwrap();
4591
4592 let fake_server = fake_servers.next().await.unwrap();
4593
4594 let response = project.update(cx, |project, cx| {
4595 project.prepare_rename(buffer.clone(), 7, cx)
4596 });
4597 fake_server
4598 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
4599 assert_eq!(
4600 params.text_document.uri.as_str(),
4601 uri!("file:///dir/one.rs")
4602 );
4603 assert_eq!(params.position, lsp::Position::new(0, 7));
4604 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
4605 lsp::Position::new(0, 6),
4606 lsp::Position::new(0, 9),
4607 ))))
4608 })
4609 .next()
4610 .await
4611 .unwrap();
4612 let response = response.await.unwrap();
4613 let PrepareRenameResponse::Success(range) = response else {
4614 panic!("{:?}", response);
4615 };
4616 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
4617 assert_eq!(range, 6..9);
4618
4619 let response = project.update(cx, |project, cx| {
4620 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
4621 });
4622 fake_server
4623 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
4624 assert_eq!(
4625 params.text_document_position.text_document.uri.as_str(),
4626 uri!("file:///dir/one.rs")
4627 );
4628 assert_eq!(
4629 params.text_document_position.position,
4630 lsp::Position::new(0, 7)
4631 );
4632 assert_eq!(params.new_name, "THREE");
4633 Ok(Some(lsp::WorkspaceEdit {
4634 changes: Some(
4635 [
4636 (
4637 lsp::Url::from_file_path(path!("/dir/one.rs")).unwrap(),
4638 vec![lsp::TextEdit::new(
4639 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
4640 "THREE".to_string(),
4641 )],
4642 ),
4643 (
4644 lsp::Url::from_file_path(path!("/dir/two.rs")).unwrap(),
4645 vec![
4646 lsp::TextEdit::new(
4647 lsp::Range::new(
4648 lsp::Position::new(0, 24),
4649 lsp::Position::new(0, 27),
4650 ),
4651 "THREE".to_string(),
4652 ),
4653 lsp::TextEdit::new(
4654 lsp::Range::new(
4655 lsp::Position::new(0, 35),
4656 lsp::Position::new(0, 38),
4657 ),
4658 "THREE".to_string(),
4659 ),
4660 ],
4661 ),
4662 ]
4663 .into_iter()
4664 .collect(),
4665 ),
4666 ..Default::default()
4667 }))
4668 })
4669 .next()
4670 .await
4671 .unwrap();
4672 let mut transaction = response.await.unwrap().0;
4673 assert_eq!(transaction.len(), 2);
4674 assert_eq!(
4675 transaction
4676 .remove_entry(&buffer)
4677 .unwrap()
4678 .0
4679 .update(cx, |buffer, _| buffer.text()),
4680 "const THREE: usize = 1;"
4681 );
4682 assert_eq!(
4683 transaction
4684 .into_keys()
4685 .next()
4686 .unwrap()
4687 .update(cx, |buffer, _| buffer.text()),
4688 "const TWO: usize = one::THREE + one::THREE;"
4689 );
4690}
4691
4692#[gpui::test]
4693async fn test_search(cx: &mut gpui::TestAppContext) {
4694 init_test(cx);
4695
4696 let fs = FakeFs::new(cx.executor());
4697 fs.insert_tree(
4698 path!("/dir"),
4699 json!({
4700 "one.rs": "const ONE: usize = 1;",
4701 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
4702 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
4703 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
4704 }),
4705 )
4706 .await;
4707 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4708 assert_eq!(
4709 search(
4710 &project,
4711 SearchQuery::text(
4712 "TWO",
4713 false,
4714 true,
4715 false,
4716 Default::default(),
4717 Default::default(),
4718 None
4719 )
4720 .unwrap(),
4721 cx
4722 )
4723 .await
4724 .unwrap(),
4725 HashMap::from_iter([
4726 (separator!("dir/two.rs").to_string(), vec![6..9]),
4727 (separator!("dir/three.rs").to_string(), vec![37..40])
4728 ])
4729 );
4730
4731 let buffer_4 = project
4732 .update(cx, |project, cx| {
4733 project.open_local_buffer(path!("/dir/four.rs"), cx)
4734 })
4735 .await
4736 .unwrap();
4737 buffer_4.update(cx, |buffer, cx| {
4738 let text = "two::TWO";
4739 buffer.edit([(20..28, text), (31..43, text)], None, cx);
4740 });
4741
4742 assert_eq!(
4743 search(
4744 &project,
4745 SearchQuery::text(
4746 "TWO",
4747 false,
4748 true,
4749 false,
4750 Default::default(),
4751 Default::default(),
4752 None,
4753 )
4754 .unwrap(),
4755 cx
4756 )
4757 .await
4758 .unwrap(),
4759 HashMap::from_iter([
4760 (separator!("dir/two.rs").to_string(), vec![6..9]),
4761 (separator!("dir/three.rs").to_string(), vec![37..40]),
4762 (separator!("dir/four.rs").to_string(), vec![25..28, 36..39])
4763 ])
4764 );
4765}
4766
4767#[gpui::test]
4768async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
4769 init_test(cx);
4770
4771 let search_query = "file";
4772
4773 let fs = FakeFs::new(cx.executor());
4774 fs.insert_tree(
4775 path!("/dir"),
4776 json!({
4777 "one.rs": r#"// Rust file one"#,
4778 "one.ts": r#"// TypeScript file one"#,
4779 "two.rs": r#"// Rust file two"#,
4780 "two.ts": r#"// TypeScript file two"#,
4781 }),
4782 )
4783 .await;
4784 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4785
4786 assert!(
4787 search(
4788 &project,
4789 SearchQuery::text(
4790 search_query,
4791 false,
4792 true,
4793 false,
4794 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4795 Default::default(),
4796 None
4797 )
4798 .unwrap(),
4799 cx
4800 )
4801 .await
4802 .unwrap()
4803 .is_empty(),
4804 "If no inclusions match, no files should be returned"
4805 );
4806
4807 assert_eq!(
4808 search(
4809 &project,
4810 SearchQuery::text(
4811 search_query,
4812 false,
4813 true,
4814 false,
4815 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4816 Default::default(),
4817 None
4818 )
4819 .unwrap(),
4820 cx
4821 )
4822 .await
4823 .unwrap(),
4824 HashMap::from_iter([
4825 (separator!("dir/one.rs").to_string(), vec![8..12]),
4826 (separator!("dir/two.rs").to_string(), vec![8..12]),
4827 ]),
4828 "Rust only search should give only Rust files"
4829 );
4830
4831 assert_eq!(
4832 search(
4833 &project,
4834 SearchQuery::text(
4835 search_query,
4836 false,
4837 true,
4838 false,
4839 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4840 Default::default(),
4841 None,
4842 )
4843 .unwrap(),
4844 cx
4845 )
4846 .await
4847 .unwrap(),
4848 HashMap::from_iter([
4849 (separator!("dir/one.ts").to_string(), vec![14..18]),
4850 (separator!("dir/two.ts").to_string(), vec![14..18]),
4851 ]),
4852 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
4853 );
4854
4855 assert_eq!(
4856 search(
4857 &project,
4858 SearchQuery::text(
4859 search_query,
4860 false,
4861 true,
4862 false,
4863 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()])
4864 .unwrap(),
4865 Default::default(),
4866 None,
4867 )
4868 .unwrap(),
4869 cx
4870 )
4871 .await
4872 .unwrap(),
4873 HashMap::from_iter([
4874 (separator!("dir/two.ts").to_string(), vec![14..18]),
4875 (separator!("dir/one.rs").to_string(), vec![8..12]),
4876 (separator!("dir/one.ts").to_string(), vec![14..18]),
4877 (separator!("dir/two.rs").to_string(), vec![8..12]),
4878 ]),
4879 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
4880 );
4881}
4882
4883#[gpui::test]
4884async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
4885 init_test(cx);
4886
4887 let search_query = "file";
4888
4889 let fs = FakeFs::new(cx.executor());
4890 fs.insert_tree(
4891 path!("/dir"),
4892 json!({
4893 "one.rs": r#"// Rust file one"#,
4894 "one.ts": r#"// TypeScript file one"#,
4895 "two.rs": r#"// Rust file two"#,
4896 "two.ts": r#"// TypeScript file two"#,
4897 }),
4898 )
4899 .await;
4900 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4901
4902 assert_eq!(
4903 search(
4904 &project,
4905 SearchQuery::text(
4906 search_query,
4907 false,
4908 true,
4909 false,
4910 Default::default(),
4911 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4912 None,
4913 )
4914 .unwrap(),
4915 cx
4916 )
4917 .await
4918 .unwrap(),
4919 HashMap::from_iter([
4920 (separator!("dir/one.rs").to_string(), vec![8..12]),
4921 (separator!("dir/one.ts").to_string(), vec![14..18]),
4922 (separator!("dir/two.rs").to_string(), vec![8..12]),
4923 (separator!("dir/two.ts").to_string(), vec![14..18]),
4924 ]),
4925 "If no exclusions match, all files should be returned"
4926 );
4927
4928 assert_eq!(
4929 search(
4930 &project,
4931 SearchQuery::text(
4932 search_query,
4933 false,
4934 true,
4935 false,
4936 Default::default(),
4937 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4938 None,
4939 )
4940 .unwrap(),
4941 cx
4942 )
4943 .await
4944 .unwrap(),
4945 HashMap::from_iter([
4946 (separator!("dir/one.ts").to_string(), vec![14..18]),
4947 (separator!("dir/two.ts").to_string(), vec![14..18]),
4948 ]),
4949 "Rust exclusion search should give only TypeScript files"
4950 );
4951
4952 assert_eq!(
4953 search(
4954 &project,
4955 SearchQuery::text(
4956 search_query,
4957 false,
4958 true,
4959 false,
4960 Default::default(),
4961 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4962 None,
4963 )
4964 .unwrap(),
4965 cx
4966 )
4967 .await
4968 .unwrap(),
4969 HashMap::from_iter([
4970 (separator!("dir/one.rs").to_string(), vec![8..12]),
4971 (separator!("dir/two.rs").to_string(), vec![8..12]),
4972 ]),
4973 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
4974 );
4975
4976 assert!(
4977 search(
4978 &project,
4979 SearchQuery::text(
4980 search_query,
4981 false,
4982 true,
4983 false,
4984 Default::default(),
4985 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()])
4986 .unwrap(),
4987 None,
4988 )
4989 .unwrap(),
4990 cx
4991 )
4992 .await
4993 .unwrap()
4994 .is_empty(),
4995 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
4996 );
4997}
4998
4999#[gpui::test]
5000async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
5001 init_test(cx);
5002
5003 let search_query = "file";
5004
5005 let fs = FakeFs::new(cx.executor());
5006 fs.insert_tree(
5007 path!("/dir"),
5008 json!({
5009 "one.rs": r#"// Rust file one"#,
5010 "one.ts": r#"// TypeScript file one"#,
5011 "two.rs": r#"// Rust file two"#,
5012 "two.ts": r#"// TypeScript file two"#,
5013 }),
5014 )
5015 .await;
5016 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5017
5018 assert!(
5019 search(
5020 &project,
5021 SearchQuery::text(
5022 search_query,
5023 false,
5024 true,
5025 false,
5026 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5027 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5028 None,
5029 )
5030 .unwrap(),
5031 cx
5032 )
5033 .await
5034 .unwrap()
5035 .is_empty(),
5036 "If both no exclusions and inclusions match, exclusions should win and return nothing"
5037 );
5038
5039 assert!(
5040 search(
5041 &project,
5042 SearchQuery::text(
5043 search_query,
5044 false,
5045 true,
5046 false,
5047 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5048 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5049 None,
5050 )
5051 .unwrap(),
5052 cx
5053 )
5054 .await
5055 .unwrap()
5056 .is_empty(),
5057 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
5058 );
5059
5060 assert!(
5061 search(
5062 &project,
5063 SearchQuery::text(
5064 search_query,
5065 false,
5066 true,
5067 false,
5068 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5069 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5070 None,
5071 )
5072 .unwrap(),
5073 cx
5074 )
5075 .await
5076 .unwrap()
5077 .is_empty(),
5078 "Non-matching inclusions and exclusions should not change that."
5079 );
5080
5081 assert_eq!(
5082 search(
5083 &project,
5084 SearchQuery::text(
5085 search_query,
5086 false,
5087 true,
5088 false,
5089 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5090 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()]).unwrap(),
5091 None,
5092 )
5093 .unwrap(),
5094 cx
5095 )
5096 .await
5097 .unwrap(),
5098 HashMap::from_iter([
5099 (separator!("dir/one.ts").to_string(), vec![14..18]),
5100 (separator!("dir/two.ts").to_string(), vec![14..18]),
5101 ]),
5102 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
5103 );
5104}
5105
5106#[gpui::test]
5107async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
5108 init_test(cx);
5109
5110 let fs = FakeFs::new(cx.executor());
5111 fs.insert_tree(
5112 path!("/worktree-a"),
5113 json!({
5114 "haystack.rs": r#"// NEEDLE"#,
5115 "haystack.ts": r#"// NEEDLE"#,
5116 }),
5117 )
5118 .await;
5119 fs.insert_tree(
5120 path!("/worktree-b"),
5121 json!({
5122 "haystack.rs": r#"// NEEDLE"#,
5123 "haystack.ts": r#"// NEEDLE"#,
5124 }),
5125 )
5126 .await;
5127
5128 let project = Project::test(
5129 fs.clone(),
5130 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
5131 cx,
5132 )
5133 .await;
5134
5135 assert_eq!(
5136 search(
5137 &project,
5138 SearchQuery::text(
5139 "NEEDLE",
5140 false,
5141 true,
5142 false,
5143 PathMatcher::new(&["worktree-a/*.rs".to_owned()]).unwrap(),
5144 Default::default(),
5145 None,
5146 )
5147 .unwrap(),
5148 cx
5149 )
5150 .await
5151 .unwrap(),
5152 HashMap::from_iter([(separator!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
5153 "should only return results from included worktree"
5154 );
5155 assert_eq!(
5156 search(
5157 &project,
5158 SearchQuery::text(
5159 "NEEDLE",
5160 false,
5161 true,
5162 false,
5163 PathMatcher::new(&["worktree-b/*.rs".to_owned()]).unwrap(),
5164 Default::default(),
5165 None,
5166 )
5167 .unwrap(),
5168 cx
5169 )
5170 .await
5171 .unwrap(),
5172 HashMap::from_iter([(separator!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
5173 "should only return results from included worktree"
5174 );
5175
5176 assert_eq!(
5177 search(
5178 &project,
5179 SearchQuery::text(
5180 "NEEDLE",
5181 false,
5182 true,
5183 false,
5184 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5185 Default::default(),
5186 None,
5187 )
5188 .unwrap(),
5189 cx
5190 )
5191 .await
5192 .unwrap(),
5193 HashMap::from_iter([
5194 (separator!("worktree-a/haystack.ts").to_string(), vec![3..9]),
5195 (separator!("worktree-b/haystack.ts").to_string(), vec![3..9])
5196 ]),
5197 "should return results from both worktrees"
5198 );
5199}
5200
5201#[gpui::test]
5202async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
5203 init_test(cx);
5204
5205 let fs = FakeFs::new(cx.background_executor.clone());
5206 fs.insert_tree(
5207 path!("/dir"),
5208 json!({
5209 ".git": {},
5210 ".gitignore": "**/target\n/node_modules\n",
5211 "target": {
5212 "index.txt": "index_key:index_value"
5213 },
5214 "node_modules": {
5215 "eslint": {
5216 "index.ts": "const eslint_key = 'eslint value'",
5217 "package.json": r#"{ "some_key": "some value" }"#,
5218 },
5219 "prettier": {
5220 "index.ts": "const prettier_key = 'prettier value'",
5221 "package.json": r#"{ "other_key": "other value" }"#,
5222 },
5223 },
5224 "package.json": r#"{ "main_key": "main value" }"#,
5225 }),
5226 )
5227 .await;
5228 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5229
5230 let query = "key";
5231 assert_eq!(
5232 search(
5233 &project,
5234 SearchQuery::text(
5235 query,
5236 false,
5237 false,
5238 false,
5239 Default::default(),
5240 Default::default(),
5241 None,
5242 )
5243 .unwrap(),
5244 cx
5245 )
5246 .await
5247 .unwrap(),
5248 HashMap::from_iter([(separator!("dir/package.json").to_string(), vec![8..11])]),
5249 "Only one non-ignored file should have the query"
5250 );
5251
5252 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5253 assert_eq!(
5254 search(
5255 &project,
5256 SearchQuery::text(
5257 query,
5258 false,
5259 false,
5260 true,
5261 Default::default(),
5262 Default::default(),
5263 None,
5264 )
5265 .unwrap(),
5266 cx
5267 )
5268 .await
5269 .unwrap(),
5270 HashMap::from_iter([
5271 (separator!("dir/package.json").to_string(), vec![8..11]),
5272 (separator!("dir/target/index.txt").to_string(), vec![6..9]),
5273 (
5274 separator!("dir/node_modules/prettier/package.json").to_string(),
5275 vec![9..12]
5276 ),
5277 (
5278 separator!("dir/node_modules/prettier/index.ts").to_string(),
5279 vec![15..18]
5280 ),
5281 (
5282 separator!("dir/node_modules/eslint/index.ts").to_string(),
5283 vec![13..16]
5284 ),
5285 (
5286 separator!("dir/node_modules/eslint/package.json").to_string(),
5287 vec![8..11]
5288 ),
5289 ]),
5290 "Unrestricted search with ignored directories should find every file with the query"
5291 );
5292
5293 let files_to_include = PathMatcher::new(&["node_modules/prettier/**".to_owned()]).unwrap();
5294 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()]).unwrap();
5295 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5296 assert_eq!(
5297 search(
5298 &project,
5299 SearchQuery::text(
5300 query,
5301 false,
5302 false,
5303 true,
5304 files_to_include,
5305 files_to_exclude,
5306 None,
5307 )
5308 .unwrap(),
5309 cx
5310 )
5311 .await
5312 .unwrap(),
5313 HashMap::from_iter([(
5314 separator!("dir/node_modules/prettier/package.json").to_string(),
5315 vec![9..12]
5316 )]),
5317 "With search including ignored prettier directory and excluding TS files, only one file should be found"
5318 );
5319}
5320
5321#[gpui::test]
5322async fn test_create_entry(cx: &mut gpui::TestAppContext) {
5323 init_test(cx);
5324
5325 let fs = FakeFs::new(cx.executor().clone());
5326 fs.insert_tree(
5327 "/one/two",
5328 json!({
5329 "three": {
5330 "a.txt": "",
5331 "four": {}
5332 },
5333 "c.rs": ""
5334 }),
5335 )
5336 .await;
5337
5338 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
5339 project
5340 .update(cx, |project, cx| {
5341 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5342 project.create_entry((id, "b.."), true, cx)
5343 })
5344 .await
5345 .unwrap()
5346 .to_included()
5347 .unwrap();
5348
5349 // Can't create paths outside the project
5350 let result = project
5351 .update(cx, |project, cx| {
5352 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5353 project.create_entry((id, "../../boop"), true, cx)
5354 })
5355 .await;
5356 assert!(result.is_err());
5357
5358 // Can't create paths with '..'
5359 let result = project
5360 .update(cx, |project, cx| {
5361 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5362 project.create_entry((id, "four/../beep"), true, cx)
5363 })
5364 .await;
5365 assert!(result.is_err());
5366
5367 assert_eq!(
5368 fs.paths(true),
5369 vec![
5370 PathBuf::from(path!("/")),
5371 PathBuf::from(path!("/one")),
5372 PathBuf::from(path!("/one/two")),
5373 PathBuf::from(path!("/one/two/c.rs")),
5374 PathBuf::from(path!("/one/two/three")),
5375 PathBuf::from(path!("/one/two/three/a.txt")),
5376 PathBuf::from(path!("/one/two/three/b..")),
5377 PathBuf::from(path!("/one/two/three/four")),
5378 ]
5379 );
5380
5381 // And we cannot open buffers with '..'
5382 let result = project
5383 .update(cx, |project, cx| {
5384 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5385 project.open_buffer((id, "../c.rs"), cx)
5386 })
5387 .await;
5388 assert!(result.is_err())
5389}
5390
5391#[gpui::test]
5392async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
5393 init_test(cx);
5394
5395 let fs = FakeFs::new(cx.executor());
5396 fs.insert_tree(
5397 path!("/dir"),
5398 json!({
5399 "a.tsx": "a",
5400 }),
5401 )
5402 .await;
5403
5404 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5405
5406 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5407 language_registry.add(tsx_lang());
5408 let language_server_names = [
5409 "TypeScriptServer",
5410 "TailwindServer",
5411 "ESLintServer",
5412 "NoHoverCapabilitiesServer",
5413 ];
5414 let mut language_servers = [
5415 language_registry.register_fake_lsp(
5416 "tsx",
5417 FakeLspAdapter {
5418 name: language_server_names[0],
5419 capabilities: lsp::ServerCapabilities {
5420 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5421 ..lsp::ServerCapabilities::default()
5422 },
5423 ..FakeLspAdapter::default()
5424 },
5425 ),
5426 language_registry.register_fake_lsp(
5427 "tsx",
5428 FakeLspAdapter {
5429 name: language_server_names[1],
5430 capabilities: lsp::ServerCapabilities {
5431 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5432 ..lsp::ServerCapabilities::default()
5433 },
5434 ..FakeLspAdapter::default()
5435 },
5436 ),
5437 language_registry.register_fake_lsp(
5438 "tsx",
5439 FakeLspAdapter {
5440 name: language_server_names[2],
5441 capabilities: lsp::ServerCapabilities {
5442 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5443 ..lsp::ServerCapabilities::default()
5444 },
5445 ..FakeLspAdapter::default()
5446 },
5447 ),
5448 language_registry.register_fake_lsp(
5449 "tsx",
5450 FakeLspAdapter {
5451 name: language_server_names[3],
5452 capabilities: lsp::ServerCapabilities {
5453 hover_provider: None,
5454 ..lsp::ServerCapabilities::default()
5455 },
5456 ..FakeLspAdapter::default()
5457 },
5458 ),
5459 ];
5460
5461 let (buffer, _handle) = project
5462 .update(cx, |p, cx| {
5463 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
5464 })
5465 .await
5466 .unwrap();
5467 cx.executor().run_until_parked();
5468
5469 let mut servers_with_hover_requests = HashMap::default();
5470 for i in 0..language_server_names.len() {
5471 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
5472 panic!(
5473 "Failed to get language server #{i} with name {}",
5474 &language_server_names[i]
5475 )
5476 });
5477 let new_server_name = new_server.server.name();
5478 assert!(
5479 !servers_with_hover_requests.contains_key(&new_server_name),
5480 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5481 );
5482 match new_server_name.as_ref() {
5483 "TailwindServer" | "TypeScriptServer" => {
5484 servers_with_hover_requests.insert(
5485 new_server_name.clone(),
5486 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
5487 move |_, _| {
5488 let name = new_server_name.clone();
5489 async move {
5490 Ok(Some(lsp::Hover {
5491 contents: lsp::HoverContents::Scalar(
5492 lsp::MarkedString::String(format!("{name} hover")),
5493 ),
5494 range: None,
5495 }))
5496 }
5497 },
5498 ),
5499 );
5500 }
5501 "ESLintServer" => {
5502 servers_with_hover_requests.insert(
5503 new_server_name,
5504 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
5505 |_, _| async move { Ok(None) },
5506 ),
5507 );
5508 }
5509 "NoHoverCapabilitiesServer" => {
5510 let _never_handled = new_server
5511 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
5512 panic!(
5513 "Should not call for hovers server with no corresponding capabilities"
5514 )
5515 });
5516 }
5517 unexpected => panic!("Unexpected server name: {unexpected}"),
5518 }
5519 }
5520
5521 let hover_task = project.update(cx, |project, cx| {
5522 project.hover(&buffer, Point::new(0, 0), cx)
5523 });
5524 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
5525 |mut hover_request| async move {
5526 hover_request
5527 .next()
5528 .await
5529 .expect("All hover requests should have been triggered")
5530 },
5531 ))
5532 .await;
5533 assert_eq!(
5534 vec!["TailwindServer hover", "TypeScriptServer hover"],
5535 hover_task
5536 .await
5537 .into_iter()
5538 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
5539 .sorted()
5540 .collect::<Vec<_>>(),
5541 "Should receive hover responses from all related servers with hover capabilities"
5542 );
5543}
5544
5545#[gpui::test]
5546async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
5547 init_test(cx);
5548
5549 let fs = FakeFs::new(cx.executor());
5550 fs.insert_tree(
5551 path!("/dir"),
5552 json!({
5553 "a.ts": "a",
5554 }),
5555 )
5556 .await;
5557
5558 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5559
5560 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5561 language_registry.add(typescript_lang());
5562 let mut fake_language_servers = language_registry.register_fake_lsp(
5563 "TypeScript",
5564 FakeLspAdapter {
5565 capabilities: lsp::ServerCapabilities {
5566 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5567 ..lsp::ServerCapabilities::default()
5568 },
5569 ..FakeLspAdapter::default()
5570 },
5571 );
5572
5573 let (buffer, _handle) = project
5574 .update(cx, |p, cx| {
5575 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
5576 })
5577 .await
5578 .unwrap();
5579 cx.executor().run_until_parked();
5580
5581 let fake_server = fake_language_servers
5582 .next()
5583 .await
5584 .expect("failed to get the language server");
5585
5586 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
5587 move |_, _| async move {
5588 Ok(Some(lsp::Hover {
5589 contents: lsp::HoverContents::Array(vec![
5590 lsp::MarkedString::String("".to_string()),
5591 lsp::MarkedString::String(" ".to_string()),
5592 lsp::MarkedString::String("\n\n\n".to_string()),
5593 ]),
5594 range: None,
5595 }))
5596 },
5597 );
5598
5599 let hover_task = project.update(cx, |project, cx| {
5600 project.hover(&buffer, Point::new(0, 0), cx)
5601 });
5602 let () = request_handled
5603 .next()
5604 .await
5605 .expect("All hover requests should have been triggered");
5606 assert_eq!(
5607 Vec::<String>::new(),
5608 hover_task
5609 .await
5610 .into_iter()
5611 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
5612 .sorted()
5613 .collect::<Vec<_>>(),
5614 "Empty hover parts should be ignored"
5615 );
5616}
5617
5618#[gpui::test]
5619async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
5620 init_test(cx);
5621
5622 let fs = FakeFs::new(cx.executor());
5623 fs.insert_tree(
5624 path!("/dir"),
5625 json!({
5626 "a.ts": "a",
5627 }),
5628 )
5629 .await;
5630
5631 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5632
5633 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5634 language_registry.add(typescript_lang());
5635 let mut fake_language_servers = language_registry.register_fake_lsp(
5636 "TypeScript",
5637 FakeLspAdapter {
5638 capabilities: lsp::ServerCapabilities {
5639 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5640 ..lsp::ServerCapabilities::default()
5641 },
5642 ..FakeLspAdapter::default()
5643 },
5644 );
5645
5646 let (buffer, _handle) = project
5647 .update(cx, |p, cx| {
5648 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
5649 })
5650 .await
5651 .unwrap();
5652 cx.executor().run_until_parked();
5653
5654 let fake_server = fake_language_servers
5655 .next()
5656 .await
5657 .expect("failed to get the language server");
5658
5659 let mut request_handled = fake_server
5660 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
5661 Ok(Some(vec![
5662 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5663 title: "organize imports".to_string(),
5664 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
5665 ..lsp::CodeAction::default()
5666 }),
5667 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5668 title: "fix code".to_string(),
5669 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
5670 ..lsp::CodeAction::default()
5671 }),
5672 ]))
5673 });
5674
5675 let code_actions_task = project.update(cx, |project, cx| {
5676 project.code_actions(
5677 &buffer,
5678 0..buffer.read(cx).len(),
5679 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
5680 cx,
5681 )
5682 });
5683
5684 let () = request_handled
5685 .next()
5686 .await
5687 .expect("The code action request should have been triggered");
5688
5689 let code_actions = code_actions_task.await.unwrap();
5690 assert_eq!(code_actions.len(), 1);
5691 assert_eq!(
5692 code_actions[0].lsp_action.action_kind(),
5693 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
5694 );
5695}
5696
5697#[gpui::test]
5698async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
5699 init_test(cx);
5700
5701 let fs = FakeFs::new(cx.executor());
5702 fs.insert_tree(
5703 path!("/dir"),
5704 json!({
5705 "a.tsx": "a",
5706 }),
5707 )
5708 .await;
5709
5710 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5711
5712 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5713 language_registry.add(tsx_lang());
5714 let language_server_names = [
5715 "TypeScriptServer",
5716 "TailwindServer",
5717 "ESLintServer",
5718 "NoActionsCapabilitiesServer",
5719 ];
5720
5721 let mut language_server_rxs = [
5722 language_registry.register_fake_lsp(
5723 "tsx",
5724 FakeLspAdapter {
5725 name: language_server_names[0],
5726 capabilities: lsp::ServerCapabilities {
5727 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5728 ..lsp::ServerCapabilities::default()
5729 },
5730 ..FakeLspAdapter::default()
5731 },
5732 ),
5733 language_registry.register_fake_lsp(
5734 "tsx",
5735 FakeLspAdapter {
5736 name: language_server_names[1],
5737 capabilities: lsp::ServerCapabilities {
5738 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5739 ..lsp::ServerCapabilities::default()
5740 },
5741 ..FakeLspAdapter::default()
5742 },
5743 ),
5744 language_registry.register_fake_lsp(
5745 "tsx",
5746 FakeLspAdapter {
5747 name: language_server_names[2],
5748 capabilities: lsp::ServerCapabilities {
5749 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5750 ..lsp::ServerCapabilities::default()
5751 },
5752 ..FakeLspAdapter::default()
5753 },
5754 ),
5755 language_registry.register_fake_lsp(
5756 "tsx",
5757 FakeLspAdapter {
5758 name: language_server_names[3],
5759 capabilities: lsp::ServerCapabilities {
5760 code_action_provider: None,
5761 ..lsp::ServerCapabilities::default()
5762 },
5763 ..FakeLspAdapter::default()
5764 },
5765 ),
5766 ];
5767
5768 let (buffer, _handle) = project
5769 .update(cx, |p, cx| {
5770 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
5771 })
5772 .await
5773 .unwrap();
5774 cx.executor().run_until_parked();
5775
5776 let mut servers_with_actions_requests = HashMap::default();
5777 for i in 0..language_server_names.len() {
5778 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
5779 panic!(
5780 "Failed to get language server #{i} with name {}",
5781 &language_server_names[i]
5782 )
5783 });
5784 let new_server_name = new_server.server.name();
5785
5786 assert!(
5787 !servers_with_actions_requests.contains_key(&new_server_name),
5788 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5789 );
5790 match new_server_name.0.as_ref() {
5791 "TailwindServer" | "TypeScriptServer" => {
5792 servers_with_actions_requests.insert(
5793 new_server_name.clone(),
5794 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
5795 move |_, _| {
5796 let name = new_server_name.clone();
5797 async move {
5798 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
5799 lsp::CodeAction {
5800 title: format!("{name} code action"),
5801 ..lsp::CodeAction::default()
5802 },
5803 )]))
5804 }
5805 },
5806 ),
5807 );
5808 }
5809 "ESLintServer" => {
5810 servers_with_actions_requests.insert(
5811 new_server_name,
5812 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
5813 |_, _| async move { Ok(None) },
5814 ),
5815 );
5816 }
5817 "NoActionsCapabilitiesServer" => {
5818 let _never_handled = new_server
5819 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
5820 panic!(
5821 "Should not call for code actions server with no corresponding capabilities"
5822 )
5823 });
5824 }
5825 unexpected => panic!("Unexpected server name: {unexpected}"),
5826 }
5827 }
5828
5829 let code_actions_task = project.update(cx, |project, cx| {
5830 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
5831 });
5832
5833 // cx.run_until_parked();
5834 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
5835 |mut code_actions_request| async move {
5836 code_actions_request
5837 .next()
5838 .await
5839 .expect("All code actions requests should have been triggered")
5840 },
5841 ))
5842 .await;
5843 assert_eq!(
5844 vec!["TailwindServer code action", "TypeScriptServer code action"],
5845 code_actions_task
5846 .await
5847 .unwrap()
5848 .into_iter()
5849 .map(|code_action| code_action.lsp_action.title().to_owned())
5850 .sorted()
5851 .collect::<Vec<_>>(),
5852 "Should receive code actions responses from all related servers with hover capabilities"
5853 );
5854}
5855
5856#[gpui::test]
5857async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
5858 init_test(cx);
5859
5860 let fs = FakeFs::new(cx.executor());
5861 fs.insert_tree(
5862 "/dir",
5863 json!({
5864 "a.rs": "let a = 1;",
5865 "b.rs": "let b = 2;",
5866 "c.rs": "let c = 2;",
5867 }),
5868 )
5869 .await;
5870
5871 let project = Project::test(
5872 fs,
5873 [
5874 "/dir/a.rs".as_ref(),
5875 "/dir/b.rs".as_ref(),
5876 "/dir/c.rs".as_ref(),
5877 ],
5878 cx,
5879 )
5880 .await;
5881
5882 // check the initial state and get the worktrees
5883 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
5884 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5885 assert_eq!(worktrees.len(), 3);
5886
5887 let worktree_a = worktrees[0].read(cx);
5888 let worktree_b = worktrees[1].read(cx);
5889 let worktree_c = worktrees[2].read(cx);
5890
5891 // check they start in the right order
5892 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
5893 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
5894 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
5895
5896 (
5897 worktrees[0].clone(),
5898 worktrees[1].clone(),
5899 worktrees[2].clone(),
5900 )
5901 });
5902
5903 // move first worktree to after the second
5904 // [a, b, c] -> [b, a, c]
5905 project
5906 .update(cx, |project, cx| {
5907 let first = worktree_a.read(cx);
5908 let second = worktree_b.read(cx);
5909 project.move_worktree(first.id(), second.id(), cx)
5910 })
5911 .expect("moving first after second");
5912
5913 // check the state after moving
5914 project.update(cx, |project, cx| {
5915 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5916 assert_eq!(worktrees.len(), 3);
5917
5918 let first = worktrees[0].read(cx);
5919 let second = worktrees[1].read(cx);
5920 let third = worktrees[2].read(cx);
5921
5922 // check they are now in the right order
5923 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5924 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
5925 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5926 });
5927
5928 // move the second worktree to before the first
5929 // [b, a, c] -> [a, b, c]
5930 project
5931 .update(cx, |project, cx| {
5932 let second = worktree_a.read(cx);
5933 let first = worktree_b.read(cx);
5934 project.move_worktree(first.id(), second.id(), cx)
5935 })
5936 .expect("moving second before first");
5937
5938 // check the state after moving
5939 project.update(cx, |project, cx| {
5940 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5941 assert_eq!(worktrees.len(), 3);
5942
5943 let first = worktrees[0].read(cx);
5944 let second = worktrees[1].read(cx);
5945 let third = worktrees[2].read(cx);
5946
5947 // check they are now in the right order
5948 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5949 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5950 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5951 });
5952
5953 // move the second worktree to after the third
5954 // [a, b, c] -> [a, c, b]
5955 project
5956 .update(cx, |project, cx| {
5957 let second = worktree_b.read(cx);
5958 let third = worktree_c.read(cx);
5959 project.move_worktree(second.id(), third.id(), cx)
5960 })
5961 .expect("moving second after third");
5962
5963 // check the state after moving
5964 project.update(cx, |project, cx| {
5965 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5966 assert_eq!(worktrees.len(), 3);
5967
5968 let first = worktrees[0].read(cx);
5969 let second = worktrees[1].read(cx);
5970 let third = worktrees[2].read(cx);
5971
5972 // check they are now in the right order
5973 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5974 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5975 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
5976 });
5977
5978 // move the third worktree to before the second
5979 // [a, c, b] -> [a, b, c]
5980 project
5981 .update(cx, |project, cx| {
5982 let third = worktree_c.read(cx);
5983 let second = worktree_b.read(cx);
5984 project.move_worktree(third.id(), second.id(), cx)
5985 })
5986 .expect("moving third before second");
5987
5988 // check the state after moving
5989 project.update(cx, |project, cx| {
5990 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5991 assert_eq!(worktrees.len(), 3);
5992
5993 let first = worktrees[0].read(cx);
5994 let second = worktrees[1].read(cx);
5995 let third = worktrees[2].read(cx);
5996
5997 // check they are now in the right order
5998 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5999 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6000 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6001 });
6002
6003 // move the first worktree to after the third
6004 // [a, b, c] -> [b, c, a]
6005 project
6006 .update(cx, |project, cx| {
6007 let first = worktree_a.read(cx);
6008 let third = worktree_c.read(cx);
6009 project.move_worktree(first.id(), third.id(), cx)
6010 })
6011 .expect("moving first after third");
6012
6013 // check the state after moving
6014 project.update(cx, |project, cx| {
6015 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6016 assert_eq!(worktrees.len(), 3);
6017
6018 let first = worktrees[0].read(cx);
6019 let second = worktrees[1].read(cx);
6020 let third = worktrees[2].read(cx);
6021
6022 // check they are now in the right order
6023 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
6024 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
6025 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
6026 });
6027
6028 // move the third worktree to before the first
6029 // [b, c, a] -> [a, b, c]
6030 project
6031 .update(cx, |project, cx| {
6032 let third = worktree_a.read(cx);
6033 let first = worktree_b.read(cx);
6034 project.move_worktree(third.id(), first.id(), cx)
6035 })
6036 .expect("moving third before first");
6037
6038 // check the state after moving
6039 project.update(cx, |project, cx| {
6040 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6041 assert_eq!(worktrees.len(), 3);
6042
6043 let first = worktrees[0].read(cx);
6044 let second = worktrees[1].read(cx);
6045 let third = worktrees[2].read(cx);
6046
6047 // check they are now in the right order
6048 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6049 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6050 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6051 });
6052}
6053
6054#[gpui::test]
6055async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
6056 init_test(cx);
6057
6058 let staged_contents = r#"
6059 fn main() {
6060 println!("hello world");
6061 }
6062 "#
6063 .unindent();
6064 let file_contents = r#"
6065 // print goodbye
6066 fn main() {
6067 println!("goodbye world");
6068 }
6069 "#
6070 .unindent();
6071
6072 let fs = FakeFs::new(cx.background_executor.clone());
6073 fs.insert_tree(
6074 "/dir",
6075 json!({
6076 ".git": {},
6077 "src": {
6078 "main.rs": file_contents,
6079 }
6080 }),
6081 )
6082 .await;
6083
6084 fs.set_index_for_repo(
6085 Path::new("/dir/.git"),
6086 &[("src/main.rs".into(), staged_contents)],
6087 );
6088
6089 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6090
6091 let buffer = project
6092 .update(cx, |project, cx| {
6093 project.open_local_buffer("/dir/src/main.rs", cx)
6094 })
6095 .await
6096 .unwrap();
6097 let unstaged_diff = project
6098 .update(cx, |project, cx| {
6099 project.open_unstaged_diff(buffer.clone(), cx)
6100 })
6101 .await
6102 .unwrap();
6103
6104 cx.run_until_parked();
6105 unstaged_diff.update(cx, |unstaged_diff, cx| {
6106 let snapshot = buffer.read(cx).snapshot();
6107 assert_hunks(
6108 unstaged_diff.hunks(&snapshot, cx),
6109 &snapshot,
6110 &unstaged_diff.base_text_string().unwrap(),
6111 &[
6112 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
6113 (
6114 2..3,
6115 " println!(\"hello world\");\n",
6116 " println!(\"goodbye world\");\n",
6117 DiffHunkStatus::modified_none(),
6118 ),
6119 ],
6120 );
6121 });
6122
6123 let staged_contents = r#"
6124 // print goodbye
6125 fn main() {
6126 }
6127 "#
6128 .unindent();
6129
6130 fs.set_index_for_repo(
6131 Path::new("/dir/.git"),
6132 &[("src/main.rs".into(), staged_contents)],
6133 );
6134
6135 cx.run_until_parked();
6136 unstaged_diff.update(cx, |unstaged_diff, cx| {
6137 let snapshot = buffer.read(cx).snapshot();
6138 assert_hunks(
6139 unstaged_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6140 &snapshot,
6141 &unstaged_diff.base_text().text(),
6142 &[(
6143 2..3,
6144 "",
6145 " println!(\"goodbye world\");\n",
6146 DiffHunkStatus::added_none(),
6147 )],
6148 );
6149 });
6150}
6151
6152#[gpui::test]
6153async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
6154 init_test(cx);
6155
6156 let committed_contents = r#"
6157 fn main() {
6158 println!("hello world");
6159 }
6160 "#
6161 .unindent();
6162 let staged_contents = r#"
6163 fn main() {
6164 println!("goodbye world");
6165 }
6166 "#
6167 .unindent();
6168 let file_contents = r#"
6169 // print goodbye
6170 fn main() {
6171 println!("goodbye world");
6172 }
6173 "#
6174 .unindent();
6175
6176 let fs = FakeFs::new(cx.background_executor.clone());
6177 fs.insert_tree(
6178 "/dir",
6179 json!({
6180 ".git": {},
6181 "src": {
6182 "modification.rs": file_contents,
6183 }
6184 }),
6185 )
6186 .await;
6187
6188 fs.set_head_for_repo(
6189 Path::new("/dir/.git"),
6190 &[
6191 ("src/modification.rs".into(), committed_contents),
6192 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6193 ],
6194 );
6195 fs.set_index_for_repo(
6196 Path::new("/dir/.git"),
6197 &[
6198 ("src/modification.rs".into(), staged_contents),
6199 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6200 ],
6201 );
6202
6203 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6204 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6205 let language = rust_lang();
6206 language_registry.add(language.clone());
6207
6208 let buffer_1 = project
6209 .update(cx, |project, cx| {
6210 project.open_local_buffer("/dir/src/modification.rs", cx)
6211 })
6212 .await
6213 .unwrap();
6214 let diff_1 = project
6215 .update(cx, |project, cx| {
6216 project.open_uncommitted_diff(buffer_1.clone(), cx)
6217 })
6218 .await
6219 .unwrap();
6220 diff_1.read_with(cx, |diff, _| {
6221 assert_eq!(diff.base_text().language().cloned(), Some(language))
6222 });
6223 cx.run_until_parked();
6224 diff_1.update(cx, |diff, cx| {
6225 let snapshot = buffer_1.read(cx).snapshot();
6226 assert_hunks(
6227 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6228 &snapshot,
6229 &diff.base_text_string().unwrap(),
6230 &[
6231 (
6232 0..1,
6233 "",
6234 "// print goodbye\n",
6235 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
6236 ),
6237 (
6238 2..3,
6239 " println!(\"hello world\");\n",
6240 " println!(\"goodbye world\");\n",
6241 DiffHunkStatus::modified_none(),
6242 ),
6243 ],
6244 );
6245 });
6246
6247 // Reset HEAD to a version that differs from both the buffer and the index.
6248 let committed_contents = r#"
6249 // print goodbye
6250 fn main() {
6251 }
6252 "#
6253 .unindent();
6254 fs.set_head_for_repo(
6255 Path::new("/dir/.git"),
6256 &[
6257 ("src/modification.rs".into(), committed_contents.clone()),
6258 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6259 ],
6260 );
6261
6262 // Buffer now has an unstaged hunk.
6263 cx.run_until_parked();
6264 diff_1.update(cx, |diff, cx| {
6265 let snapshot = buffer_1.read(cx).snapshot();
6266 assert_hunks(
6267 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6268 &snapshot,
6269 &diff.base_text().text(),
6270 &[(
6271 2..3,
6272 "",
6273 " println!(\"goodbye world\");\n",
6274 DiffHunkStatus::added_none(),
6275 )],
6276 );
6277 });
6278
6279 // Open a buffer for a file that's been deleted.
6280 let buffer_2 = project
6281 .update(cx, |project, cx| {
6282 project.open_local_buffer("/dir/src/deletion.rs", cx)
6283 })
6284 .await
6285 .unwrap();
6286 let diff_2 = project
6287 .update(cx, |project, cx| {
6288 project.open_uncommitted_diff(buffer_2.clone(), cx)
6289 })
6290 .await
6291 .unwrap();
6292 cx.run_until_parked();
6293 diff_2.update(cx, |diff, cx| {
6294 let snapshot = buffer_2.read(cx).snapshot();
6295 assert_hunks(
6296 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6297 &snapshot,
6298 &diff.base_text_string().unwrap(),
6299 &[(
6300 0..0,
6301 "// the-deleted-contents\n",
6302 "",
6303 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
6304 )],
6305 );
6306 });
6307
6308 // Stage the deletion of this file
6309 fs.set_index_for_repo(
6310 Path::new("/dir/.git"),
6311 &[("src/modification.rs".into(), committed_contents.clone())],
6312 );
6313 cx.run_until_parked();
6314 diff_2.update(cx, |diff, cx| {
6315 let snapshot = buffer_2.read(cx).snapshot();
6316 assert_hunks(
6317 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6318 &snapshot,
6319 &diff.base_text_string().unwrap(),
6320 &[(
6321 0..0,
6322 "// the-deleted-contents\n",
6323 "",
6324 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
6325 )],
6326 );
6327 });
6328}
6329
6330#[gpui::test]
6331async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
6332 use DiffHunkSecondaryStatus::*;
6333 init_test(cx);
6334
6335 let committed_contents = r#"
6336 zero
6337 one
6338 two
6339 three
6340 four
6341 five
6342 "#
6343 .unindent();
6344 let file_contents = r#"
6345 one
6346 TWO
6347 three
6348 FOUR
6349 five
6350 "#
6351 .unindent();
6352
6353 let fs = FakeFs::new(cx.background_executor.clone());
6354 fs.insert_tree(
6355 "/dir",
6356 json!({
6357 ".git": {},
6358 "file.txt": file_contents.clone()
6359 }),
6360 )
6361 .await;
6362
6363 fs.set_head_and_index_for_repo(
6364 "/dir/.git".as_ref(),
6365 &[("file.txt".into(), committed_contents.clone())],
6366 );
6367
6368 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6369
6370 let buffer = project
6371 .update(cx, |project, cx| {
6372 project.open_local_buffer("/dir/file.txt", cx)
6373 })
6374 .await
6375 .unwrap();
6376 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6377 let uncommitted_diff = project
6378 .update(cx, |project, cx| {
6379 project.open_uncommitted_diff(buffer.clone(), cx)
6380 })
6381 .await
6382 .unwrap();
6383 let mut diff_events = cx.events(&uncommitted_diff);
6384
6385 // The hunks are initially unstaged.
6386 uncommitted_diff.read_with(cx, |diff, cx| {
6387 assert_hunks(
6388 diff.hunks(&snapshot, cx),
6389 &snapshot,
6390 &diff.base_text_string().unwrap(),
6391 &[
6392 (
6393 0..0,
6394 "zero\n",
6395 "",
6396 DiffHunkStatus::deleted(HasSecondaryHunk),
6397 ),
6398 (
6399 1..2,
6400 "two\n",
6401 "TWO\n",
6402 DiffHunkStatus::modified(HasSecondaryHunk),
6403 ),
6404 (
6405 3..4,
6406 "four\n",
6407 "FOUR\n",
6408 DiffHunkStatus::modified(HasSecondaryHunk),
6409 ),
6410 ],
6411 );
6412 });
6413
6414 // Stage a hunk. It appears as optimistically staged.
6415 uncommitted_diff.update(cx, |diff, cx| {
6416 let range =
6417 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
6418 let hunks = diff
6419 .hunks_intersecting_range(range, &snapshot, cx)
6420 .collect::<Vec<_>>();
6421 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
6422
6423 assert_hunks(
6424 diff.hunks(&snapshot, cx),
6425 &snapshot,
6426 &diff.base_text_string().unwrap(),
6427 &[
6428 (
6429 0..0,
6430 "zero\n",
6431 "",
6432 DiffHunkStatus::deleted(HasSecondaryHunk),
6433 ),
6434 (
6435 1..2,
6436 "two\n",
6437 "TWO\n",
6438 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6439 ),
6440 (
6441 3..4,
6442 "four\n",
6443 "FOUR\n",
6444 DiffHunkStatus::modified(HasSecondaryHunk),
6445 ),
6446 ],
6447 );
6448 });
6449
6450 // The diff emits a change event for the range of the staged hunk.
6451 assert!(matches!(
6452 diff_events.next().await.unwrap(),
6453 BufferDiffEvent::HunksStagedOrUnstaged(_)
6454 ));
6455 let event = diff_events.next().await.unwrap();
6456 if let BufferDiffEvent::DiffChanged {
6457 changed_range: Some(changed_range),
6458 } = event
6459 {
6460 let changed_range = changed_range.to_point(&snapshot);
6461 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
6462 } else {
6463 panic!("Unexpected event {event:?}");
6464 }
6465
6466 // When the write to the index completes, it appears as staged.
6467 cx.run_until_parked();
6468 uncommitted_diff.update(cx, |diff, cx| {
6469 assert_hunks(
6470 diff.hunks(&snapshot, cx),
6471 &snapshot,
6472 &diff.base_text_string().unwrap(),
6473 &[
6474 (
6475 0..0,
6476 "zero\n",
6477 "",
6478 DiffHunkStatus::deleted(HasSecondaryHunk),
6479 ),
6480 (
6481 1..2,
6482 "two\n",
6483 "TWO\n",
6484 DiffHunkStatus::modified(NoSecondaryHunk),
6485 ),
6486 (
6487 3..4,
6488 "four\n",
6489 "FOUR\n",
6490 DiffHunkStatus::modified(HasSecondaryHunk),
6491 ),
6492 ],
6493 );
6494 });
6495
6496 // The diff emits a change event for the changed index text.
6497 let event = diff_events.next().await.unwrap();
6498 if let BufferDiffEvent::DiffChanged {
6499 changed_range: Some(changed_range),
6500 } = event
6501 {
6502 let changed_range = changed_range.to_point(&snapshot);
6503 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
6504 } else {
6505 panic!("Unexpected event {event:?}");
6506 }
6507
6508 // Simulate a problem writing to the git index.
6509 fs.set_error_message_for_index_write(
6510 "/dir/.git".as_ref(),
6511 Some("failed to write git index".into()),
6512 );
6513
6514 // Stage another hunk.
6515 uncommitted_diff.update(cx, |diff, cx| {
6516 let range =
6517 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
6518 let hunks = diff
6519 .hunks_intersecting_range(range, &snapshot, cx)
6520 .collect::<Vec<_>>();
6521 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
6522
6523 assert_hunks(
6524 diff.hunks(&snapshot, cx),
6525 &snapshot,
6526 &diff.base_text_string().unwrap(),
6527 &[
6528 (
6529 0..0,
6530 "zero\n",
6531 "",
6532 DiffHunkStatus::deleted(HasSecondaryHunk),
6533 ),
6534 (
6535 1..2,
6536 "two\n",
6537 "TWO\n",
6538 DiffHunkStatus::modified(NoSecondaryHunk),
6539 ),
6540 (
6541 3..4,
6542 "four\n",
6543 "FOUR\n",
6544 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6545 ),
6546 ],
6547 );
6548 });
6549 assert!(matches!(
6550 diff_events.next().await.unwrap(),
6551 BufferDiffEvent::HunksStagedOrUnstaged(_)
6552 ));
6553 let event = diff_events.next().await.unwrap();
6554 if let BufferDiffEvent::DiffChanged {
6555 changed_range: Some(changed_range),
6556 } = event
6557 {
6558 let changed_range = changed_range.to_point(&snapshot);
6559 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
6560 } else {
6561 panic!("Unexpected event {event:?}");
6562 }
6563
6564 // When the write fails, the hunk returns to being unstaged.
6565 cx.run_until_parked();
6566 uncommitted_diff.update(cx, |diff, cx| {
6567 assert_hunks(
6568 diff.hunks(&snapshot, cx),
6569 &snapshot,
6570 &diff.base_text_string().unwrap(),
6571 &[
6572 (
6573 0..0,
6574 "zero\n",
6575 "",
6576 DiffHunkStatus::deleted(HasSecondaryHunk),
6577 ),
6578 (
6579 1..2,
6580 "two\n",
6581 "TWO\n",
6582 DiffHunkStatus::modified(NoSecondaryHunk),
6583 ),
6584 (
6585 3..4,
6586 "four\n",
6587 "FOUR\n",
6588 DiffHunkStatus::modified(HasSecondaryHunk),
6589 ),
6590 ],
6591 );
6592 });
6593
6594 let event = diff_events.next().await.unwrap();
6595 if let BufferDiffEvent::DiffChanged {
6596 changed_range: Some(changed_range),
6597 } = event
6598 {
6599 let changed_range = changed_range.to_point(&snapshot);
6600 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
6601 } else {
6602 panic!("Unexpected event {event:?}");
6603 }
6604
6605 // Allow writing to the git index to succeed again.
6606 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
6607
6608 // Stage two hunks with separate operations.
6609 uncommitted_diff.update(cx, |diff, cx| {
6610 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
6611 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
6612 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
6613 });
6614
6615 // Both staged hunks appear as pending.
6616 uncommitted_diff.update(cx, |diff, cx| {
6617 assert_hunks(
6618 diff.hunks(&snapshot, cx),
6619 &snapshot,
6620 &diff.base_text_string().unwrap(),
6621 &[
6622 (
6623 0..0,
6624 "zero\n",
6625 "",
6626 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
6627 ),
6628 (
6629 1..2,
6630 "two\n",
6631 "TWO\n",
6632 DiffHunkStatus::modified(NoSecondaryHunk),
6633 ),
6634 (
6635 3..4,
6636 "four\n",
6637 "FOUR\n",
6638 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6639 ),
6640 ],
6641 );
6642 });
6643
6644 // Both staging operations take effect.
6645 cx.run_until_parked();
6646 uncommitted_diff.update(cx, |diff, cx| {
6647 assert_hunks(
6648 diff.hunks(&snapshot, cx),
6649 &snapshot,
6650 &diff.base_text_string().unwrap(),
6651 &[
6652 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
6653 (
6654 1..2,
6655 "two\n",
6656 "TWO\n",
6657 DiffHunkStatus::modified(NoSecondaryHunk),
6658 ),
6659 (
6660 3..4,
6661 "four\n",
6662 "FOUR\n",
6663 DiffHunkStatus::modified(NoSecondaryHunk),
6664 ),
6665 ],
6666 );
6667 });
6668}
6669
6670#[gpui::test(seeds(340, 472))]
6671async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
6672 use DiffHunkSecondaryStatus::*;
6673 init_test(cx);
6674
6675 let committed_contents = r#"
6676 zero
6677 one
6678 two
6679 three
6680 four
6681 five
6682 "#
6683 .unindent();
6684 let file_contents = r#"
6685 one
6686 TWO
6687 three
6688 FOUR
6689 five
6690 "#
6691 .unindent();
6692
6693 let fs = FakeFs::new(cx.background_executor.clone());
6694 fs.insert_tree(
6695 "/dir",
6696 json!({
6697 ".git": {},
6698 "file.txt": file_contents.clone()
6699 }),
6700 )
6701 .await;
6702
6703 fs.set_head_for_repo(
6704 "/dir/.git".as_ref(),
6705 &[("file.txt".into(), committed_contents.clone())],
6706 );
6707 fs.set_index_for_repo(
6708 "/dir/.git".as_ref(),
6709 &[("file.txt".into(), committed_contents.clone())],
6710 );
6711
6712 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6713
6714 let buffer = project
6715 .update(cx, |project, cx| {
6716 project.open_local_buffer("/dir/file.txt", cx)
6717 })
6718 .await
6719 .unwrap();
6720 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6721 let uncommitted_diff = project
6722 .update(cx, |project, cx| {
6723 project.open_uncommitted_diff(buffer.clone(), cx)
6724 })
6725 .await
6726 .unwrap();
6727
6728 // The hunks are initially unstaged.
6729 uncommitted_diff.read_with(cx, |diff, cx| {
6730 assert_hunks(
6731 diff.hunks(&snapshot, cx),
6732 &snapshot,
6733 &diff.base_text_string().unwrap(),
6734 &[
6735 (
6736 0..0,
6737 "zero\n",
6738 "",
6739 DiffHunkStatus::deleted(HasSecondaryHunk),
6740 ),
6741 (
6742 1..2,
6743 "two\n",
6744 "TWO\n",
6745 DiffHunkStatus::modified(HasSecondaryHunk),
6746 ),
6747 (
6748 3..4,
6749 "four\n",
6750 "FOUR\n",
6751 DiffHunkStatus::modified(HasSecondaryHunk),
6752 ),
6753 ],
6754 );
6755 });
6756
6757 // Pause IO events
6758 fs.pause_events();
6759
6760 // Stage the first hunk.
6761 uncommitted_diff.update(cx, |diff, cx| {
6762 let hunk = diff.hunks(&snapshot, cx).next().unwrap();
6763 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
6764 assert_hunks(
6765 diff.hunks(&snapshot, cx),
6766 &snapshot,
6767 &diff.base_text_string().unwrap(),
6768 &[
6769 (
6770 0..0,
6771 "zero\n",
6772 "",
6773 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
6774 ),
6775 (
6776 1..2,
6777 "two\n",
6778 "TWO\n",
6779 DiffHunkStatus::modified(HasSecondaryHunk),
6780 ),
6781 (
6782 3..4,
6783 "four\n",
6784 "FOUR\n",
6785 DiffHunkStatus::modified(HasSecondaryHunk),
6786 ),
6787 ],
6788 );
6789 });
6790
6791 // Stage the second hunk *before* receiving the FS event for the first hunk.
6792 cx.run_until_parked();
6793 uncommitted_diff.update(cx, |diff, cx| {
6794 let hunk = diff.hunks(&snapshot, cx).nth(1).unwrap();
6795 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
6796 assert_hunks(
6797 diff.hunks(&snapshot, cx),
6798 &snapshot,
6799 &diff.base_text_string().unwrap(),
6800 &[
6801 (
6802 0..0,
6803 "zero\n",
6804 "",
6805 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
6806 ),
6807 (
6808 1..2,
6809 "two\n",
6810 "TWO\n",
6811 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6812 ),
6813 (
6814 3..4,
6815 "four\n",
6816 "FOUR\n",
6817 DiffHunkStatus::modified(HasSecondaryHunk),
6818 ),
6819 ],
6820 );
6821 });
6822
6823 // Process the FS event for staging the first hunk (second event is still pending).
6824 fs.flush_events(1);
6825 cx.run_until_parked();
6826
6827 // Stage the third hunk before receiving the second FS event.
6828 uncommitted_diff.update(cx, |diff, cx| {
6829 let hunk = diff.hunks(&snapshot, cx).nth(2).unwrap();
6830 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
6831 });
6832
6833 // Wait for all remaining IO.
6834 cx.run_until_parked();
6835 fs.flush_events(fs.buffered_event_count());
6836
6837 // Now all hunks are staged.
6838 cx.run_until_parked();
6839 uncommitted_diff.update(cx, |diff, cx| {
6840 assert_hunks(
6841 diff.hunks(&snapshot, cx),
6842 &snapshot,
6843 &diff.base_text_string().unwrap(),
6844 &[
6845 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
6846 (
6847 1..2,
6848 "two\n",
6849 "TWO\n",
6850 DiffHunkStatus::modified(NoSecondaryHunk),
6851 ),
6852 (
6853 3..4,
6854 "four\n",
6855 "FOUR\n",
6856 DiffHunkStatus::modified(NoSecondaryHunk),
6857 ),
6858 ],
6859 );
6860 });
6861}
6862
6863#[gpui::test]
6864async fn test_staging_lots_of_hunks_fast(cx: &mut gpui::TestAppContext) {
6865 use DiffHunkSecondaryStatus::*;
6866 init_test(cx);
6867
6868 let different_lines = (0..500)
6869 .step_by(5)
6870 .map(|i| format!("diff {}\n", i))
6871 .collect::<Vec<String>>();
6872 let committed_contents = (0..500).map(|i| format!("{}\n", i)).collect::<String>();
6873 let file_contents = (0..500)
6874 .map(|i| {
6875 if i % 5 == 0 {
6876 different_lines[i / 5].clone()
6877 } else {
6878 format!("{}\n", i)
6879 }
6880 })
6881 .collect::<String>();
6882
6883 let fs = FakeFs::new(cx.background_executor.clone());
6884 fs.insert_tree(
6885 "/dir",
6886 json!({
6887 ".git": {},
6888 "file.txt": file_contents.clone()
6889 }),
6890 )
6891 .await;
6892
6893 fs.set_head_for_repo(
6894 "/dir/.git".as_ref(),
6895 &[("file.txt".into(), committed_contents.clone())],
6896 );
6897 fs.set_index_for_repo(
6898 "/dir/.git".as_ref(),
6899 &[("file.txt".into(), committed_contents.clone())],
6900 );
6901
6902 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6903
6904 let buffer = project
6905 .update(cx, |project, cx| {
6906 project.open_local_buffer("/dir/file.txt", cx)
6907 })
6908 .await
6909 .unwrap();
6910 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6911 let uncommitted_diff = project
6912 .update(cx, |project, cx| {
6913 project.open_uncommitted_diff(buffer.clone(), cx)
6914 })
6915 .await
6916 .unwrap();
6917
6918 let mut expected_hunks: Vec<(Range<u32>, String, String, DiffHunkStatus)> = (0..500)
6919 .step_by(5)
6920 .map(|i| {
6921 (
6922 i as u32..i as u32 + 1,
6923 format!("{}\n", i),
6924 different_lines[i / 5].clone(),
6925 DiffHunkStatus::modified(HasSecondaryHunk),
6926 )
6927 })
6928 .collect();
6929
6930 // The hunks are initially unstaged
6931 uncommitted_diff.read_with(cx, |diff, cx| {
6932 assert_hunks(
6933 diff.hunks(&snapshot, cx),
6934 &snapshot,
6935 &diff.base_text_string().unwrap(),
6936 &expected_hunks,
6937 );
6938 });
6939
6940 for (_, _, _, status) in expected_hunks.iter_mut() {
6941 *status = DiffHunkStatus::modified(SecondaryHunkRemovalPending);
6942 }
6943
6944 // Stage every hunk with a different call
6945 uncommitted_diff.update(cx, |diff, cx| {
6946 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
6947 for hunk in hunks {
6948 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
6949 }
6950
6951 assert_hunks(
6952 diff.hunks(&snapshot, cx),
6953 &snapshot,
6954 &diff.base_text_string().unwrap(),
6955 &expected_hunks,
6956 );
6957 });
6958
6959 // If we wait, we'll have no pending hunks
6960 cx.run_until_parked();
6961 for (_, _, _, status) in expected_hunks.iter_mut() {
6962 *status = DiffHunkStatus::modified(NoSecondaryHunk);
6963 }
6964
6965 uncommitted_diff.update(cx, |diff, cx| {
6966 assert_hunks(
6967 diff.hunks(&snapshot, cx),
6968 &snapshot,
6969 &diff.base_text_string().unwrap(),
6970 &expected_hunks,
6971 );
6972 });
6973
6974 for (_, _, _, status) in expected_hunks.iter_mut() {
6975 *status = DiffHunkStatus::modified(SecondaryHunkAdditionPending);
6976 }
6977
6978 // Unstage every hunk with a different call
6979 uncommitted_diff.update(cx, |diff, cx| {
6980 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
6981 for hunk in hunks {
6982 diff.stage_or_unstage_hunks(false, &[hunk], &snapshot, true, cx);
6983 }
6984
6985 assert_hunks(
6986 diff.hunks(&snapshot, cx),
6987 &snapshot,
6988 &diff.base_text_string().unwrap(),
6989 &expected_hunks,
6990 );
6991 });
6992
6993 // If we wait, we'll have no pending hunks, again
6994 cx.run_until_parked();
6995 for (_, _, _, status) in expected_hunks.iter_mut() {
6996 *status = DiffHunkStatus::modified(HasSecondaryHunk);
6997 }
6998
6999 uncommitted_diff.update(cx, |diff, cx| {
7000 assert_hunks(
7001 diff.hunks(&snapshot, cx),
7002 &snapshot,
7003 &diff.base_text_string().unwrap(),
7004 &expected_hunks,
7005 );
7006 });
7007}
7008
7009#[gpui::test]
7010async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
7011 init_test(cx);
7012
7013 let committed_contents = r#"
7014 fn main() {
7015 println!("hello from HEAD");
7016 }
7017 "#
7018 .unindent();
7019 let file_contents = r#"
7020 fn main() {
7021 println!("hello from the working copy");
7022 }
7023 "#
7024 .unindent();
7025
7026 let fs = FakeFs::new(cx.background_executor.clone());
7027 fs.insert_tree(
7028 "/dir",
7029 json!({
7030 ".git": {},
7031 "src": {
7032 "main.rs": file_contents,
7033 }
7034 }),
7035 )
7036 .await;
7037
7038 fs.set_head_for_repo(
7039 Path::new("/dir/.git"),
7040 &[("src/main.rs".into(), committed_contents.clone())],
7041 );
7042 fs.set_index_for_repo(
7043 Path::new("/dir/.git"),
7044 &[("src/main.rs".into(), committed_contents.clone())],
7045 );
7046
7047 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
7048
7049 let buffer = project
7050 .update(cx, |project, cx| {
7051 project.open_local_buffer("/dir/src/main.rs", cx)
7052 })
7053 .await
7054 .unwrap();
7055 let uncommitted_diff = project
7056 .update(cx, |project, cx| {
7057 project.open_uncommitted_diff(buffer.clone(), cx)
7058 })
7059 .await
7060 .unwrap();
7061
7062 cx.run_until_parked();
7063 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
7064 let snapshot = buffer.read(cx).snapshot();
7065 assert_hunks(
7066 uncommitted_diff.hunks(&snapshot, cx),
7067 &snapshot,
7068 &uncommitted_diff.base_text_string().unwrap(),
7069 &[(
7070 1..2,
7071 " println!(\"hello from HEAD\");\n",
7072 " println!(\"hello from the working copy\");\n",
7073 DiffHunkStatus {
7074 kind: DiffHunkStatusKind::Modified,
7075 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
7076 },
7077 )],
7078 );
7079 });
7080}
7081
7082#[gpui::test]
7083async fn test_repository_and_path_for_project_path(
7084 background_executor: BackgroundExecutor,
7085 cx: &mut gpui::TestAppContext,
7086) {
7087 init_test(cx);
7088 let fs = FakeFs::new(background_executor);
7089 fs.insert_tree(
7090 path!("/root"),
7091 json!({
7092 "c.txt": "",
7093 "dir1": {
7094 ".git": {},
7095 "deps": {
7096 "dep1": {
7097 ".git": {},
7098 "src": {
7099 "a.txt": ""
7100 }
7101 }
7102 },
7103 "src": {
7104 "b.txt": ""
7105 }
7106 },
7107 }),
7108 )
7109 .await;
7110
7111 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
7112 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7113 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7114 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
7115 .await;
7116 tree.flush_fs_events(cx).await;
7117
7118 project.read_with(cx, |project, cx| {
7119 let git_store = project.git_store().read(cx);
7120 let pairs = [
7121 ("c.txt", None),
7122 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
7123 (
7124 "dir1/deps/dep1/src/a.txt",
7125 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
7126 ),
7127 ];
7128 let expected = pairs
7129 .iter()
7130 .map(|(path, result)| {
7131 (
7132 path,
7133 result.map(|(repo, repo_path)| {
7134 (Path::new(repo).into(), RepoPath::from(repo_path))
7135 }),
7136 )
7137 })
7138 .collect::<Vec<_>>();
7139 let actual = pairs
7140 .iter()
7141 .map(|(path, _)| {
7142 let project_path = (tree_id, Path::new(path)).into();
7143 let result = maybe!({
7144 let (repo, repo_path) =
7145 git_store.repository_and_path_for_project_path(&project_path, cx)?;
7146 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
7147 });
7148 (path, result)
7149 })
7150 .collect::<Vec<_>>();
7151 pretty_assertions::assert_eq!(expected, actual);
7152 });
7153
7154 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
7155 .await
7156 .unwrap();
7157 tree.flush_fs_events(cx).await;
7158
7159 project.read_with(cx, |project, cx| {
7160 let git_store = project.git_store().read(cx);
7161 assert_eq!(
7162 git_store.repository_and_path_for_project_path(
7163 &(tree_id, Path::new("dir1/src/b.txt")).into(),
7164 cx
7165 ),
7166 None
7167 );
7168 });
7169}
7170
7171#[gpui::test]
7172async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
7173 init_test(cx);
7174 let fs = FakeFs::new(cx.background_executor.clone());
7175 fs.insert_tree(
7176 path!("/root"),
7177 json!({
7178 "home": {
7179 ".git": {},
7180 "project": {
7181 "a.txt": "A"
7182 },
7183 },
7184 }),
7185 )
7186 .await;
7187 fs.set_home_dir(Path::new(path!("/root/home")).to_owned());
7188
7189 let project = Project::test(fs.clone(), [path!("/root/home/project").as_ref()], cx).await;
7190 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7191 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7192 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
7193 .await;
7194 tree.flush_fs_events(cx).await;
7195
7196 project.read_with(cx, |project, cx| {
7197 let containing = project
7198 .git_store()
7199 .read(cx)
7200 .repository_and_path_for_project_path(&(tree_id, "a.txt").into(), cx);
7201 assert!(containing.is_none());
7202 });
7203
7204 let project = Project::test(fs.clone(), [path!("/root/home").as_ref()], cx).await;
7205 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7206 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7207 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
7208 .await;
7209 tree.flush_fs_events(cx).await;
7210
7211 project.read_with(cx, |project, cx| {
7212 let containing = project
7213 .git_store()
7214 .read(cx)
7215 .repository_and_path_for_project_path(&(tree_id, "project/a.txt").into(), cx);
7216 assert_eq!(
7217 containing
7218 .unwrap()
7219 .0
7220 .read(cx)
7221 .work_directory_abs_path
7222 .as_ref(),
7223 Path::new(path!("/root/home"))
7224 );
7225 });
7226}
7227
7228#[gpui::test]
7229async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
7230 init_test(cx);
7231 cx.executor().allow_parking();
7232
7233 let root = TempTree::new(json!({
7234 "project": {
7235 "a.txt": "a", // Modified
7236 "b.txt": "bb", // Added
7237 "c.txt": "ccc", // Unchanged
7238 "d.txt": "dddd", // Deleted
7239 },
7240 }));
7241
7242 // Set up git repository before creating the project.
7243 let work_dir = root.path().join("project");
7244 let repo = git_init(work_dir.as_path());
7245 git_add("a.txt", &repo);
7246 git_add("c.txt", &repo);
7247 git_add("d.txt", &repo);
7248 git_commit("Initial commit", &repo);
7249 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
7250 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
7251
7252 let project = Project::test(
7253 Arc::new(RealFs::new(None, cx.executor())),
7254 [root.path()],
7255 cx,
7256 )
7257 .await;
7258
7259 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7260 tree.flush_fs_events(cx).await;
7261 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7262 .await;
7263 cx.executor().run_until_parked();
7264
7265 let repository = project.read_with(cx, |project, cx| {
7266 project.repositories(cx).values().next().unwrap().clone()
7267 });
7268
7269 // Check that the right git state is observed on startup
7270 repository.read_with(cx, |repository, _| {
7271 let entries = repository.cached_status().collect::<Vec<_>>();
7272 assert_eq!(
7273 entries,
7274 [
7275 StatusEntry {
7276 repo_path: "a.txt".into(),
7277 status: StatusCode::Modified.worktree(),
7278 },
7279 StatusEntry {
7280 repo_path: "b.txt".into(),
7281 status: FileStatus::Untracked,
7282 },
7283 StatusEntry {
7284 repo_path: "d.txt".into(),
7285 status: StatusCode::Deleted.worktree(),
7286 },
7287 ]
7288 );
7289 });
7290
7291 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
7292
7293 tree.flush_fs_events(cx).await;
7294 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7295 .await;
7296 cx.executor().run_until_parked();
7297
7298 repository.read_with(cx, |repository, _| {
7299 let entries = repository.cached_status().collect::<Vec<_>>();
7300 assert_eq!(
7301 entries,
7302 [
7303 StatusEntry {
7304 repo_path: "a.txt".into(),
7305 status: StatusCode::Modified.worktree(),
7306 },
7307 StatusEntry {
7308 repo_path: "b.txt".into(),
7309 status: FileStatus::Untracked,
7310 },
7311 StatusEntry {
7312 repo_path: "c.txt".into(),
7313 status: StatusCode::Modified.worktree(),
7314 },
7315 StatusEntry {
7316 repo_path: "d.txt".into(),
7317 status: StatusCode::Deleted.worktree(),
7318 },
7319 ]
7320 );
7321 });
7322
7323 git_add("a.txt", &repo);
7324 git_add("c.txt", &repo);
7325 git_remove_index(Path::new("d.txt"), &repo);
7326 git_commit("Another commit", &repo);
7327 tree.flush_fs_events(cx).await;
7328 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7329 .await;
7330 cx.executor().run_until_parked();
7331
7332 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
7333 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
7334 tree.flush_fs_events(cx).await;
7335 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7336 .await;
7337 cx.executor().run_until_parked();
7338
7339 repository.read_with(cx, |repository, _cx| {
7340 let entries = repository.cached_status().collect::<Vec<_>>();
7341
7342 // Deleting an untracked entry, b.txt, should leave no status
7343 // a.txt was tracked, and so should have a status
7344 assert_eq!(
7345 entries,
7346 [StatusEntry {
7347 repo_path: "a.txt".into(),
7348 status: StatusCode::Deleted.worktree(),
7349 }]
7350 );
7351 });
7352}
7353
7354#[gpui::test]
7355async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
7356 init_test(cx);
7357 cx.executor().allow_parking();
7358
7359 let root = TempTree::new(json!({
7360 "project": {
7361 "sub": {},
7362 "a.txt": "",
7363 },
7364 }));
7365
7366 let work_dir = root.path().join("project");
7367 let repo = git_init(work_dir.as_path());
7368 // a.txt exists in HEAD and the working copy but is deleted in the index.
7369 git_add("a.txt", &repo);
7370 git_commit("Initial commit", &repo);
7371 git_remove_index("a.txt".as_ref(), &repo);
7372 // `sub` is a nested git repository.
7373 let _sub = git_init(&work_dir.join("sub"));
7374
7375 let project = Project::test(
7376 Arc::new(RealFs::new(None, cx.executor())),
7377 [root.path()],
7378 cx,
7379 )
7380 .await;
7381
7382 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7383 tree.flush_fs_events(cx).await;
7384 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7385 .await;
7386 cx.executor().run_until_parked();
7387
7388 let repository = project.read_with(cx, |project, cx| {
7389 project
7390 .repositories(cx)
7391 .values()
7392 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
7393 .unwrap()
7394 .clone()
7395 });
7396
7397 repository.read_with(cx, |repository, _cx| {
7398 let entries = repository.cached_status().collect::<Vec<_>>();
7399
7400 // `sub` doesn't appear in our computed statuses.
7401 // a.txt appears with a combined `DA` status.
7402 assert_eq!(
7403 entries,
7404 [StatusEntry {
7405 repo_path: "a.txt".into(),
7406 status: TrackedStatus {
7407 index_status: StatusCode::Deleted,
7408 worktree_status: StatusCode::Added
7409 }
7410 .into(),
7411 }]
7412 )
7413 });
7414}
7415
7416#[gpui::test]
7417async fn test_repository_subfolder_git_status(cx: &mut gpui::TestAppContext) {
7418 init_test(cx);
7419 cx.executor().allow_parking();
7420
7421 let root = TempTree::new(json!({
7422 "my-repo": {
7423 // .git folder will go here
7424 "a.txt": "a",
7425 "sub-folder-1": {
7426 "sub-folder-2": {
7427 "c.txt": "cc",
7428 "d": {
7429 "e.txt": "eee"
7430 }
7431 },
7432 }
7433 },
7434 }));
7435
7436 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
7437 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
7438
7439 // Set up git repository before creating the worktree.
7440 let git_repo_work_dir = root.path().join("my-repo");
7441 let repo = git_init(git_repo_work_dir.as_path());
7442 git_add(C_TXT, &repo);
7443 git_commit("Initial commit", &repo);
7444
7445 // Open the worktree in subfolder
7446 let project_root = Path::new("my-repo/sub-folder-1/sub-folder-2");
7447
7448 let project = Project::test(
7449 Arc::new(RealFs::new(None, cx.executor())),
7450 [root.path().join(project_root).as_path()],
7451 cx,
7452 )
7453 .await;
7454
7455 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7456 tree.flush_fs_events(cx).await;
7457 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7458 .await;
7459 cx.executor().run_until_parked();
7460
7461 let repository = project.read_with(cx, |project, cx| {
7462 project.repositories(cx).values().next().unwrap().clone()
7463 });
7464
7465 // Ensure that the git status is loaded correctly
7466 repository.read_with(cx, |repository, _cx| {
7467 assert_eq!(
7468 repository.work_directory_abs_path.canonicalize().unwrap(),
7469 root.path().join("my-repo").canonicalize().unwrap()
7470 );
7471
7472 assert_eq!(repository.status_for_path(&C_TXT.into()), None);
7473 assert_eq!(
7474 repository.status_for_path(&E_TXT.into()).unwrap().status,
7475 FileStatus::Untracked
7476 );
7477 });
7478
7479 // Now we simulate FS events, but ONLY in the .git folder that's outside
7480 // of out project root.
7481 // Meaning: we don't produce any FS events for files inside the project.
7482 git_add(E_TXT, &repo);
7483 git_commit("Second commit", &repo);
7484 tree.flush_fs_events_in_root_git_repository(cx).await;
7485 cx.executor().run_until_parked();
7486
7487 repository.read_with(cx, |repository, _cx| {
7488 assert_eq!(repository.status_for_path(&C_TXT.into()), None);
7489 assert_eq!(repository.status_for_path(&E_TXT.into()), None);
7490 });
7491}
7492
7493// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
7494#[cfg(any())]
7495#[gpui::test]
7496async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
7497 init_test(cx);
7498 cx.executor().allow_parking();
7499
7500 let root = TempTree::new(json!({
7501 "project": {
7502 "a.txt": "a",
7503 },
7504 }));
7505 let root_path = root.path();
7506
7507 let repo = git_init(&root_path.join("project"));
7508 git_add("a.txt", &repo);
7509 git_commit("init", &repo);
7510
7511 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
7512
7513 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7514 tree.flush_fs_events(cx).await;
7515 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7516 .await;
7517 cx.executor().run_until_parked();
7518
7519 let repository = project.read_with(cx, |project, cx| {
7520 project.repositories(cx).values().next().unwrap().clone()
7521 });
7522
7523 git_branch("other-branch", &repo);
7524 git_checkout("refs/heads/other-branch", &repo);
7525 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
7526 git_add("a.txt", &repo);
7527 git_commit("capitalize", &repo);
7528 let commit = repo
7529 .head()
7530 .expect("Failed to get HEAD")
7531 .peel_to_commit()
7532 .expect("HEAD is not a commit");
7533 git_checkout("refs/heads/main", &repo);
7534 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
7535 git_add("a.txt", &repo);
7536 git_commit("improve letter", &repo);
7537 git_cherry_pick(&commit, &repo);
7538 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
7539 .expect("No CHERRY_PICK_HEAD");
7540 pretty_assertions::assert_eq!(
7541 git_status(&repo),
7542 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
7543 );
7544 tree.flush_fs_events(cx).await;
7545 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7546 .await;
7547 cx.executor().run_until_parked();
7548 let conflicts = repository.update(cx, |repository, _| {
7549 repository
7550 .merge_conflicts
7551 .iter()
7552 .cloned()
7553 .collect::<Vec<_>>()
7554 });
7555 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
7556
7557 git_add("a.txt", &repo);
7558 // Attempt to manually simulate what `git cherry-pick --continue` would do.
7559 git_commit("whatevs", &repo);
7560 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
7561 .expect("Failed to remove CHERRY_PICK_HEAD");
7562 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
7563 tree.flush_fs_events(cx).await;
7564 let conflicts = repository.update(cx, |repository, _| {
7565 repository
7566 .merge_conflicts
7567 .iter()
7568 .cloned()
7569 .collect::<Vec<_>>()
7570 });
7571 pretty_assertions::assert_eq!(conflicts, []);
7572}
7573
7574#[gpui::test]
7575async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
7576 init_test(cx);
7577 let fs = FakeFs::new(cx.background_executor.clone());
7578 fs.insert_tree(
7579 path!("/root"),
7580 json!({
7581 ".git": {},
7582 ".gitignore": "*.txt\n",
7583 "a.xml": "<a></a>",
7584 "b.txt": "Some text"
7585 }),
7586 )
7587 .await;
7588
7589 fs.set_head_and_index_for_repo(
7590 path!("/root/.git").as_ref(),
7591 &[
7592 (".gitignore".into(), "*.txt\n".into()),
7593 ("a.xml".into(), "<a></a>".into()),
7594 ],
7595 );
7596
7597 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
7598
7599 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7600 tree.flush_fs_events(cx).await;
7601 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7602 .await;
7603 cx.executor().run_until_parked();
7604
7605 let repository = project.read_with(cx, |project, cx| {
7606 project.repositories(cx).values().next().unwrap().clone()
7607 });
7608
7609 // One file is unmodified, the other is ignored.
7610 cx.read(|cx| {
7611 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
7612 assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
7613 });
7614
7615 // Change the gitignore, and stage the newly non-ignored file.
7616 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
7617 .await
7618 .unwrap();
7619 fs.set_index_for_repo(
7620 Path::new(path!("/root/.git")),
7621 &[
7622 (".gitignore".into(), "*.txt\n".into()),
7623 ("a.xml".into(), "<a></a>".into()),
7624 ("b.txt".into(), "Some text".into()),
7625 ],
7626 );
7627
7628 cx.executor().run_until_parked();
7629 cx.read(|cx| {
7630 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
7631 assert_entry_git_state(
7632 tree.read(cx),
7633 repository.read(cx),
7634 "b.txt",
7635 Some(StatusCode::Added),
7636 false,
7637 );
7638 });
7639}
7640
7641// NOTE:
7642// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
7643// a directory which some program has already open.
7644// This is a limitation of the Windows.
7645// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
7646#[gpui::test]
7647#[cfg_attr(target_os = "windows", ignore)]
7648async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
7649 init_test(cx);
7650 cx.executor().allow_parking();
7651 let root = TempTree::new(json!({
7652 "projects": {
7653 "project1": {
7654 "a": "",
7655 "b": "",
7656 }
7657 },
7658
7659 }));
7660 let root_path = root.path();
7661
7662 let repo = git_init(&root_path.join("projects/project1"));
7663 git_add("a", &repo);
7664 git_commit("init", &repo);
7665 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
7666
7667 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
7668
7669 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7670 tree.flush_fs_events(cx).await;
7671 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7672 .await;
7673 cx.executor().run_until_parked();
7674
7675 let repository = project.read_with(cx, |project, cx| {
7676 project.repositories(cx).values().next().unwrap().clone()
7677 });
7678
7679 repository.read_with(cx, |repository, _| {
7680 assert_eq!(
7681 repository.work_directory_abs_path.as_ref(),
7682 root_path.join("projects/project1").as_path()
7683 );
7684 assert_eq!(
7685 repository
7686 .status_for_path(&"a".into())
7687 .map(|entry| entry.status),
7688 Some(StatusCode::Modified.worktree()),
7689 );
7690 assert_eq!(
7691 repository
7692 .status_for_path(&"b".into())
7693 .map(|entry| entry.status),
7694 Some(FileStatus::Untracked),
7695 );
7696 });
7697
7698 std::fs::rename(
7699 root_path.join("projects/project1"),
7700 root_path.join("projects/project2"),
7701 )
7702 .unwrap();
7703 tree.flush_fs_events(cx).await;
7704
7705 repository.read_with(cx, |repository, _| {
7706 assert_eq!(
7707 repository.work_directory_abs_path.as_ref(),
7708 root_path.join("projects/project2").as_path()
7709 );
7710 assert_eq!(
7711 repository.status_for_path(&"a".into()).unwrap().status,
7712 StatusCode::Modified.worktree(),
7713 );
7714 assert_eq!(
7715 repository.status_for_path(&"b".into()).unwrap().status,
7716 FileStatus::Untracked,
7717 );
7718 });
7719}
7720
7721// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
7722// you can't rename a directory which some program has already open. This is a
7723// limitation of the Windows. See:
7724// https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
7725#[gpui::test]
7726#[cfg_attr(target_os = "windows", ignore)]
7727async fn test_file_status(cx: &mut gpui::TestAppContext) {
7728 init_test(cx);
7729 cx.executor().allow_parking();
7730 const IGNORE_RULE: &str = "**/target";
7731
7732 let root = TempTree::new(json!({
7733 "project": {
7734 "a.txt": "a",
7735 "b.txt": "bb",
7736 "c": {
7737 "d": {
7738 "e.txt": "eee"
7739 }
7740 },
7741 "f.txt": "ffff",
7742 "target": {
7743 "build_file": "???"
7744 },
7745 ".gitignore": IGNORE_RULE
7746 },
7747
7748 }));
7749 let root_path = root.path();
7750
7751 const A_TXT: &str = "a.txt";
7752 const B_TXT: &str = "b.txt";
7753 const E_TXT: &str = "c/d/e.txt";
7754 const F_TXT: &str = "f.txt";
7755 const DOTGITIGNORE: &str = ".gitignore";
7756 const BUILD_FILE: &str = "target/build_file";
7757
7758 // Set up git repository before creating the worktree.
7759 let work_dir = root.path().join("project");
7760 let mut repo = git_init(work_dir.as_path());
7761 repo.add_ignore_rule(IGNORE_RULE).unwrap();
7762 git_add(A_TXT, &repo);
7763 git_add(E_TXT, &repo);
7764 git_add(DOTGITIGNORE, &repo);
7765 git_commit("Initial commit", &repo);
7766
7767 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
7768
7769 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7770 tree.flush_fs_events(cx).await;
7771 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7772 .await;
7773 cx.executor().run_until_parked();
7774
7775 let repository = project.read_with(cx, |project, cx| {
7776 project.repositories(cx).values().next().unwrap().clone()
7777 });
7778
7779 // Check that the right git state is observed on startup
7780 repository.read_with(cx, |repository, _cx| {
7781 assert_eq!(
7782 repository.work_directory_abs_path.as_ref(),
7783 root_path.join("project").as_path()
7784 );
7785
7786 assert_eq!(
7787 repository.status_for_path(&B_TXT.into()).unwrap().status,
7788 FileStatus::Untracked,
7789 );
7790 assert_eq!(
7791 repository.status_for_path(&F_TXT.into()).unwrap().status,
7792 FileStatus::Untracked,
7793 );
7794 });
7795
7796 // Modify a file in the working copy.
7797 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
7798 tree.flush_fs_events(cx).await;
7799 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7800 .await;
7801 cx.executor().run_until_parked();
7802
7803 // The worktree detects that the file's git status has changed.
7804 repository.read_with(cx, |repository, _| {
7805 assert_eq!(
7806 repository.status_for_path(&A_TXT.into()).unwrap().status,
7807 StatusCode::Modified.worktree(),
7808 );
7809 });
7810
7811 // Create a commit in the git repository.
7812 git_add(A_TXT, &repo);
7813 git_add(B_TXT, &repo);
7814 git_commit("Committing modified and added", &repo);
7815 tree.flush_fs_events(cx).await;
7816 cx.executor().run_until_parked();
7817
7818 // The worktree detects that the files' git status have changed.
7819 repository.read_with(cx, |repository, _cx| {
7820 assert_eq!(
7821 repository.status_for_path(&F_TXT.into()).unwrap().status,
7822 FileStatus::Untracked,
7823 );
7824 assert_eq!(repository.status_for_path(&B_TXT.into()), None);
7825 assert_eq!(repository.status_for_path(&A_TXT.into()), None);
7826 });
7827
7828 // Modify files in the working copy and perform git operations on other files.
7829 git_reset(0, &repo);
7830 git_remove_index(Path::new(B_TXT), &repo);
7831 git_stash(&mut repo);
7832 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
7833 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
7834 tree.flush_fs_events(cx).await;
7835 cx.executor().run_until_parked();
7836
7837 // Check that more complex repo changes are tracked
7838 repository.read_with(cx, |repository, _cx| {
7839 assert_eq!(repository.status_for_path(&A_TXT.into()), None);
7840 assert_eq!(
7841 repository.status_for_path(&B_TXT.into()).unwrap().status,
7842 FileStatus::Untracked,
7843 );
7844 assert_eq!(
7845 repository.status_for_path(&E_TXT.into()).unwrap().status,
7846 StatusCode::Modified.worktree(),
7847 );
7848 });
7849
7850 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
7851 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
7852 std::fs::write(
7853 work_dir.join(DOTGITIGNORE),
7854 [IGNORE_RULE, "f.txt"].join("\n"),
7855 )
7856 .unwrap();
7857
7858 git_add(Path::new(DOTGITIGNORE), &repo);
7859 git_commit("Committing modified git ignore", &repo);
7860
7861 tree.flush_fs_events(cx).await;
7862 cx.executor().run_until_parked();
7863
7864 let mut renamed_dir_name = "first_directory/second_directory";
7865 const RENAMED_FILE: &str = "rf.txt";
7866
7867 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
7868 std::fs::write(
7869 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
7870 "new-contents",
7871 )
7872 .unwrap();
7873
7874 tree.flush_fs_events(cx).await;
7875 cx.executor().run_until_parked();
7876
7877 repository.read_with(cx, |repository, _cx| {
7878 assert_eq!(
7879 repository
7880 .status_for_path(&Path::new(renamed_dir_name).join(RENAMED_FILE).into())
7881 .unwrap()
7882 .status,
7883 FileStatus::Untracked,
7884 );
7885 });
7886
7887 renamed_dir_name = "new_first_directory/second_directory";
7888
7889 std::fs::rename(
7890 work_dir.join("first_directory"),
7891 work_dir.join("new_first_directory"),
7892 )
7893 .unwrap();
7894
7895 tree.flush_fs_events(cx).await;
7896 cx.executor().run_until_parked();
7897
7898 repository.read_with(cx, |repository, _cx| {
7899 assert_eq!(
7900 repository
7901 .status_for_path(&Path::new(renamed_dir_name).join(RENAMED_FILE).into())
7902 .unwrap()
7903 .status,
7904 FileStatus::Untracked,
7905 );
7906 });
7907}
7908
7909#[gpui::test]
7910async fn test_repos_in_invisible_worktrees(
7911 executor: BackgroundExecutor,
7912 cx: &mut gpui::TestAppContext,
7913) {
7914 init_test(cx);
7915 let fs = FakeFs::new(executor);
7916 fs.insert_tree(
7917 path!("/root"),
7918 json!({
7919 "dir1": {
7920 ".git": {},
7921 "dep1": {
7922 ".git": {},
7923 "src": {
7924 "a.txt": "",
7925 },
7926 },
7927 "b.txt": "",
7928 },
7929 }),
7930 )
7931 .await;
7932
7933 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
7934 let visible_worktree =
7935 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7936 visible_worktree
7937 .read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
7938 .await;
7939
7940 let repos = project.read_with(cx, |project, cx| {
7941 project
7942 .repositories(cx)
7943 .values()
7944 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
7945 .collect::<Vec<_>>()
7946 });
7947 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
7948
7949 let (invisible_worktree, _) = project
7950 .update(cx, |project, cx| {
7951 project.worktree_store.update(cx, |worktree_store, cx| {
7952 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
7953 })
7954 })
7955 .await
7956 .expect("failed to create worktree");
7957 invisible_worktree
7958 .read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
7959 .await;
7960
7961 let repos = project.read_with(cx, |project, cx| {
7962 project
7963 .repositories(cx)
7964 .values()
7965 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
7966 .collect::<Vec<_>>()
7967 });
7968 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
7969}
7970
7971#[gpui::test(iterations = 10)]
7972async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
7973 init_test(cx);
7974 cx.update(|cx| {
7975 cx.update_global::<SettingsStore, _>(|store, cx| {
7976 store.update_user_settings::<WorktreeSettings>(cx, |project_settings| {
7977 project_settings.file_scan_exclusions = Some(Vec::new());
7978 });
7979 });
7980 });
7981 let fs = FakeFs::new(cx.background_executor.clone());
7982 fs.insert_tree(
7983 path!("/root"),
7984 json!({
7985 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
7986 "tree": {
7987 ".git": {},
7988 ".gitignore": "ignored-dir\n",
7989 "tracked-dir": {
7990 "tracked-file1": "",
7991 "ancestor-ignored-file1": "",
7992 },
7993 "ignored-dir": {
7994 "ignored-file1": ""
7995 }
7996 }
7997 }),
7998 )
7999 .await;
8000 fs.set_head_and_index_for_repo(
8001 path!("/root/tree/.git").as_ref(),
8002 &[
8003 (".gitignore".into(), "ignored-dir\n".into()),
8004 ("tracked-dir/tracked-file1".into(), "".into()),
8005 ],
8006 );
8007
8008 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
8009
8010 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8011 tree.flush_fs_events(cx).await;
8012 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
8013 .await;
8014 cx.executor().run_until_parked();
8015
8016 let repository = project.read_with(cx, |project, cx| {
8017 project.repositories(cx).values().next().unwrap().clone()
8018 });
8019
8020 tree.read_with(cx, |tree, _| {
8021 tree.as_local()
8022 .unwrap()
8023 .manually_refresh_entries_for_paths(vec![Path::new("ignored-dir").into()])
8024 })
8025 .recv()
8026 .await;
8027
8028 cx.read(|cx| {
8029 assert_entry_git_state(
8030 tree.read(cx),
8031 repository.read(cx),
8032 "tracked-dir/tracked-file1",
8033 None,
8034 false,
8035 );
8036 assert_entry_git_state(
8037 tree.read(cx),
8038 repository.read(cx),
8039 "tracked-dir/ancestor-ignored-file1",
8040 None,
8041 false,
8042 );
8043 assert_entry_git_state(
8044 tree.read(cx),
8045 repository.read(cx),
8046 "ignored-dir/ignored-file1",
8047 None,
8048 true,
8049 );
8050 });
8051
8052 fs.create_file(
8053 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
8054 Default::default(),
8055 )
8056 .await
8057 .unwrap();
8058 fs.set_index_for_repo(
8059 path!("/root/tree/.git").as_ref(),
8060 &[
8061 (".gitignore".into(), "ignored-dir\n".into()),
8062 ("tracked-dir/tracked-file1".into(), "".into()),
8063 ("tracked-dir/tracked-file2".into(), "".into()),
8064 ],
8065 );
8066 fs.create_file(
8067 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
8068 Default::default(),
8069 )
8070 .await
8071 .unwrap();
8072 fs.create_file(
8073 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
8074 Default::default(),
8075 )
8076 .await
8077 .unwrap();
8078
8079 cx.executor().run_until_parked();
8080 cx.read(|cx| {
8081 assert_entry_git_state(
8082 tree.read(cx),
8083 repository.read(cx),
8084 "tracked-dir/tracked-file2",
8085 Some(StatusCode::Added),
8086 false,
8087 );
8088 assert_entry_git_state(
8089 tree.read(cx),
8090 repository.read(cx),
8091 "tracked-dir/ancestor-ignored-file2",
8092 None,
8093 false,
8094 );
8095 assert_entry_git_state(
8096 tree.read(cx),
8097 repository.read(cx),
8098 "ignored-dir/ignored-file2",
8099 None,
8100 true,
8101 );
8102 assert!(tree.read(cx).entry_for_path(".git").unwrap().is_ignored);
8103 });
8104}
8105
8106#[gpui::test]
8107async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
8108 init_test(cx);
8109 let fs = FakeFs::new(cx.background_executor.clone());
8110 fs.insert_tree(
8111 path!("/root"),
8112 json!({
8113 "project": {
8114 ".git": {},
8115 "child1": {
8116 "a.txt": "A",
8117 },
8118 "child2": {
8119 "b.txt": "B",
8120 }
8121 }
8122 }),
8123 )
8124 .await;
8125
8126 let project = Project::test(
8127 fs.clone(),
8128 [
8129 path!("/root/project/child1").as_ref(),
8130 path!("/root/project/child2").as_ref(),
8131 ],
8132 cx,
8133 )
8134 .await;
8135
8136 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8137 tree.flush_fs_events(cx).await;
8138 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
8139 .await;
8140 cx.executor().run_until_parked();
8141
8142 let repos = project.read_with(cx, |project, cx| {
8143 project
8144 .repositories(cx)
8145 .values()
8146 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8147 .collect::<Vec<_>>()
8148 });
8149 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
8150}
8151
8152async fn search(
8153 project: &Entity<Project>,
8154 query: SearchQuery,
8155 cx: &mut gpui::TestAppContext,
8156) -> Result<HashMap<String, Vec<Range<usize>>>> {
8157 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
8158 let mut results = HashMap::default();
8159 while let Ok(search_result) = search_rx.recv().await {
8160 match search_result {
8161 SearchResult::Buffer { buffer, ranges } => {
8162 results.entry(buffer).or_insert(ranges);
8163 }
8164 SearchResult::LimitReached => {}
8165 }
8166 }
8167 Ok(results
8168 .into_iter()
8169 .map(|(buffer, ranges)| {
8170 buffer.update(cx, |buffer, cx| {
8171 let path = buffer
8172 .file()
8173 .unwrap()
8174 .full_path(cx)
8175 .to_string_lossy()
8176 .to_string();
8177 let ranges = ranges
8178 .into_iter()
8179 .map(|range| range.to_offset(buffer))
8180 .collect::<Vec<_>>();
8181 (path, ranges)
8182 })
8183 })
8184 .collect())
8185}
8186
8187pub fn init_test(cx: &mut gpui::TestAppContext) {
8188 if std::env::var("RUST_LOG").is_ok() {
8189 env_logger::try_init().ok();
8190 }
8191
8192 cx.update(|cx| {
8193 let settings_store = SettingsStore::test(cx);
8194 cx.set_global(settings_store);
8195 release_channel::init(SemanticVersion::default(), cx);
8196 language::init(cx);
8197 Project::init_settings(cx);
8198 });
8199}
8200
8201fn json_lang() -> Arc<Language> {
8202 Arc::new(Language::new(
8203 LanguageConfig {
8204 name: "JSON".into(),
8205 matcher: LanguageMatcher {
8206 path_suffixes: vec!["json".to_string()],
8207 ..Default::default()
8208 },
8209 ..Default::default()
8210 },
8211 None,
8212 ))
8213}
8214
8215fn js_lang() -> Arc<Language> {
8216 Arc::new(Language::new(
8217 LanguageConfig {
8218 name: "JavaScript".into(),
8219 matcher: LanguageMatcher {
8220 path_suffixes: vec!["js".to_string()],
8221 ..Default::default()
8222 },
8223 ..Default::default()
8224 },
8225 None,
8226 ))
8227}
8228
8229fn rust_lang() -> Arc<Language> {
8230 Arc::new(Language::new(
8231 LanguageConfig {
8232 name: "Rust".into(),
8233 matcher: LanguageMatcher {
8234 path_suffixes: vec!["rs".to_string()],
8235 ..Default::default()
8236 },
8237 ..Default::default()
8238 },
8239 Some(tree_sitter_rust::LANGUAGE.into()),
8240 ))
8241}
8242
8243fn typescript_lang() -> Arc<Language> {
8244 Arc::new(Language::new(
8245 LanguageConfig {
8246 name: "TypeScript".into(),
8247 matcher: LanguageMatcher {
8248 path_suffixes: vec!["ts".to_string()],
8249 ..Default::default()
8250 },
8251 ..Default::default()
8252 },
8253 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
8254 ))
8255}
8256
8257fn tsx_lang() -> Arc<Language> {
8258 Arc::new(Language::new(
8259 LanguageConfig {
8260 name: "tsx".into(),
8261 matcher: LanguageMatcher {
8262 path_suffixes: vec!["tsx".to_string()],
8263 ..Default::default()
8264 },
8265 ..Default::default()
8266 },
8267 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
8268 ))
8269}
8270
8271fn get_all_tasks(
8272 project: &Entity<Project>,
8273 task_contexts: &TaskContexts,
8274 cx: &mut App,
8275) -> Vec<(TaskSourceKind, ResolvedTask)> {
8276 let (mut old, new) = project.update(cx, |project, cx| {
8277 project
8278 .task_store
8279 .read(cx)
8280 .task_inventory()
8281 .unwrap()
8282 .read(cx)
8283 .used_and_current_resolved_tasks(task_contexts, cx)
8284 });
8285 old.extend(new);
8286 old
8287}
8288
8289#[track_caller]
8290fn assert_entry_git_state(
8291 tree: &Worktree,
8292 repository: &Repository,
8293 path: &str,
8294 index_status: Option<StatusCode>,
8295 is_ignored: bool,
8296) {
8297 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
8298 let entry = tree
8299 .entry_for_path(path)
8300 .unwrap_or_else(|| panic!("entry {path} not found"));
8301 let status = repository
8302 .status_for_path(&path.into())
8303 .map(|entry| entry.status);
8304 let expected = index_status.map(|index_status| {
8305 TrackedStatus {
8306 index_status,
8307 worktree_status: StatusCode::Unmodified,
8308 }
8309 .into()
8310 });
8311 assert_eq!(
8312 status, expected,
8313 "expected {path} to have git status: {expected:?}"
8314 );
8315 assert_eq!(
8316 entry.is_ignored, is_ignored,
8317 "expected {path} to have is_ignored: {is_ignored}"
8318 );
8319}
8320
8321#[track_caller]
8322fn git_init(path: &Path) -> git2::Repository {
8323 let mut init_opts = RepositoryInitOptions::new();
8324 init_opts.initial_head("main");
8325 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
8326}
8327
8328#[track_caller]
8329fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
8330 let path = path.as_ref();
8331 let mut index = repo.index().expect("Failed to get index");
8332 index.add_path(path).expect("Failed to add file");
8333 index.write().expect("Failed to write index");
8334}
8335
8336#[track_caller]
8337fn git_remove_index(path: &Path, repo: &git2::Repository) {
8338 let mut index = repo.index().expect("Failed to get index");
8339 index.remove_path(path).expect("Failed to add file");
8340 index.write().expect("Failed to write index");
8341}
8342
8343#[track_caller]
8344fn git_commit(msg: &'static str, repo: &git2::Repository) {
8345 use git2::Signature;
8346
8347 let signature = Signature::now("test", "test@zed.dev").unwrap();
8348 let oid = repo.index().unwrap().write_tree().unwrap();
8349 let tree = repo.find_tree(oid).unwrap();
8350 if let Ok(head) = repo.head() {
8351 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
8352
8353 let parent_commit = parent_obj.as_commit().unwrap();
8354
8355 repo.commit(
8356 Some("HEAD"),
8357 &signature,
8358 &signature,
8359 msg,
8360 &tree,
8361 &[parent_commit],
8362 )
8363 .expect("Failed to commit with parent");
8364 } else {
8365 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
8366 .expect("Failed to commit");
8367 }
8368}
8369
8370#[cfg(any())]
8371#[track_caller]
8372fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
8373 repo.cherrypick(commit, None).expect("Failed to cherrypick");
8374}
8375
8376#[track_caller]
8377fn git_stash(repo: &mut git2::Repository) {
8378 use git2::Signature;
8379
8380 let signature = Signature::now("test", "test@zed.dev").unwrap();
8381 repo.stash_save(&signature, "N/A", None)
8382 .expect("Failed to stash");
8383}
8384
8385#[track_caller]
8386fn git_reset(offset: usize, repo: &git2::Repository) {
8387 let head = repo.head().expect("Couldn't get repo head");
8388 let object = head.peel(git2::ObjectType::Commit).unwrap();
8389 let commit = object.as_commit().unwrap();
8390 let new_head = commit
8391 .parents()
8392 .inspect(|parnet| {
8393 parnet.message();
8394 })
8395 .nth(offset)
8396 .expect("Not enough history");
8397 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
8398 .expect("Could not reset");
8399}
8400
8401#[cfg(any())]
8402#[track_caller]
8403fn git_branch(name: &str, repo: &git2::Repository) {
8404 let head = repo
8405 .head()
8406 .expect("Couldn't get repo head")
8407 .peel_to_commit()
8408 .expect("HEAD is not a commit");
8409 repo.branch(name, &head, false).expect("Failed to commit");
8410}
8411
8412#[cfg(any())]
8413#[track_caller]
8414fn git_checkout(name: &str, repo: &git2::Repository) {
8415 repo.set_head(name).expect("Failed to set head");
8416 repo.checkout_head(None).expect("Failed to check out head");
8417}
8418
8419#[cfg(any())]
8420#[track_caller]
8421fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
8422 repo.statuses(None)
8423 .unwrap()
8424 .iter()
8425 .map(|status| (status.path().unwrap().to_string(), status.status()))
8426 .collect()
8427}