1#![allow(clippy::format_collect)]
2
3use crate::{task_inventory::TaskContexts, task_store::TaskSettingsLocation, Event, *};
4use buffer_diff::{
5 assert_hunks, BufferDiffEvent, DiffHunkSecondaryStatus, DiffHunkStatus, DiffHunkStatusKind,
6};
7use fs::FakeFs;
8use futures::{future, StreamExt};
9use git::repository::RepoPath;
10use gpui::{App, BackgroundExecutor, SemanticVersion, UpdateGlobal};
11use http_client::Url;
12use language::{
13 language_settings::{language_settings, AllLanguageSettings, LanguageSettingsContent},
14 tree_sitter_rust, tree_sitter_typescript, Diagnostic, DiagnosticEntry, DiagnosticSet,
15 DiskState, FakeLspAdapter, LanguageConfig, LanguageMatcher, LanguageName, LineEnding,
16 OffsetRangeExt, Point, ToPoint,
17};
18use lsp::{
19 notification::DidRenameFiles, DiagnosticSeverity, DocumentChanges, FileOperationFilter,
20 NumberOrString, TextDocumentEdit, WillRenameFiles,
21};
22use parking_lot::Mutex;
23use paths::tasks_file;
24use pretty_assertions::{assert_eq, assert_matches};
25use serde_json::json;
26#[cfg(not(windows))]
27use std::os;
28use std::{mem, num::NonZeroU32, ops::Range, str::FromStr, sync::OnceLock, task::Poll};
29use task::{ResolvedTask, TaskContext};
30use unindent::Unindent as _;
31use util::{
32 assert_set_eq, path,
33 paths::PathMatcher,
34 separator,
35 test::{marked_text_offsets, TempTree},
36 uri, TryFutureExt as _,
37};
38use worktree::WorktreeModelHandle as _;
39
40#[gpui::test]
41async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
42 cx.executor().allow_parking();
43
44 let (tx, mut rx) = futures::channel::mpsc::unbounded();
45 let _thread = std::thread::spawn(move || {
46 #[cfg(not(target_os = "windows"))]
47 std::fs::metadata("/tmp").unwrap();
48 #[cfg(target_os = "windows")]
49 std::fs::metadata("C:/Windows").unwrap();
50 std::thread::sleep(Duration::from_millis(1000));
51 tx.unbounded_send(1).unwrap();
52 });
53 rx.next().await.unwrap();
54}
55
56#[gpui::test]
57async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
58 cx.executor().allow_parking();
59
60 let io_task = smol::unblock(move || {
61 println!("sleeping on thread {:?}", std::thread::current().id());
62 std::thread::sleep(Duration::from_millis(10));
63 1
64 });
65
66 let task = cx.foreground_executor().spawn(async move {
67 io_task.await;
68 });
69
70 task.await;
71}
72
73#[cfg(not(windows))]
74#[gpui::test]
75async fn test_symlinks(cx: &mut gpui::TestAppContext) {
76 init_test(cx);
77 cx.executor().allow_parking();
78
79 let dir = TempTree::new(json!({
80 "root": {
81 "apple": "",
82 "banana": {
83 "carrot": {
84 "date": "",
85 "endive": "",
86 }
87 },
88 "fennel": {
89 "grape": "",
90 }
91 }
92 }));
93
94 let root_link_path = dir.path().join("root_link");
95 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
96 os::unix::fs::symlink(
97 dir.path().join("root/fennel"),
98 dir.path().join("root/finnochio"),
99 )
100 .unwrap();
101
102 let project = Project::test(
103 Arc::new(RealFs::new(None, cx.executor())),
104 [root_link_path.as_ref()],
105 cx,
106 )
107 .await;
108
109 project.update(cx, |project, cx| {
110 let tree = project.worktrees(cx).next().unwrap().read(cx);
111 assert_eq!(tree.file_count(), 5);
112 assert_eq!(
113 tree.inode_for_path("fennel/grape"),
114 tree.inode_for_path("finnochio/grape")
115 );
116 });
117}
118
119#[gpui::test]
120async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
121 init_test(cx);
122
123 let dir = TempTree::new(json!({
124 ".editorconfig": r#"
125 root = true
126 [*.rs]
127 indent_style = tab
128 indent_size = 3
129 end_of_line = lf
130 insert_final_newline = true
131 trim_trailing_whitespace = true
132 [*.js]
133 tab_width = 10
134 "#,
135 ".zed": {
136 "settings.json": r#"{
137 "tab_size": 8,
138 "hard_tabs": false,
139 "ensure_final_newline_on_save": false,
140 "remove_trailing_whitespace_on_save": false,
141 "soft_wrap": "editor_width"
142 }"#,
143 },
144 "a.rs": "fn a() {\n A\n}",
145 "b": {
146 ".editorconfig": r#"
147 [*.rs]
148 indent_size = 2
149 "#,
150 "b.rs": "fn b() {\n B\n}",
151 },
152 "c.js": "def c\n C\nend",
153 "README.json": "tabs are better\n",
154 }));
155
156 let path = dir.path();
157 let fs = FakeFs::new(cx.executor());
158 fs.insert_tree_from_real_fs(path, path).await;
159 let project = Project::test(fs, [path], cx).await;
160
161 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
162 language_registry.add(js_lang());
163 language_registry.add(json_lang());
164 language_registry.add(rust_lang());
165
166 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
167
168 cx.executor().run_until_parked();
169
170 cx.update(|cx| {
171 let tree = worktree.read(cx);
172 let settings_for = |path: &str| {
173 let file_entry = tree.entry_for_path(path).unwrap().clone();
174 let file = File::for_entry(file_entry, worktree.clone());
175 let file_language = project
176 .read(cx)
177 .languages()
178 .language_for_file_path(file.path.as_ref());
179 let file_language = cx
180 .background_executor()
181 .block(file_language)
182 .expect("Failed to get file language");
183 let file = file as _;
184 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
185 };
186
187 let settings_a = settings_for("a.rs");
188 let settings_b = settings_for("b/b.rs");
189 let settings_c = settings_for("c.js");
190 let settings_readme = settings_for("README.json");
191
192 // .editorconfig overrides .zed/settings
193 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
194 assert_eq!(settings_a.hard_tabs, true);
195 assert_eq!(settings_a.ensure_final_newline_on_save, true);
196 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
197
198 // .editorconfig in b/ overrides .editorconfig in root
199 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
200
201 // "indent_size" is not set, so "tab_width" is used
202 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
203
204 // README.md should not be affected by .editorconfig's globe "*.rs"
205 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
206 });
207}
208
209#[gpui::test]
210async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
211 init_test(cx);
212 TaskStore::init(None);
213
214 let fs = FakeFs::new(cx.executor());
215 fs.insert_tree(
216 path!("/dir"),
217 json!({
218 ".zed": {
219 "settings.json": r#"{ "tab_size": 8 }"#,
220 "tasks.json": r#"[{
221 "label": "cargo check all",
222 "command": "cargo",
223 "args": ["check", "--all"]
224 },]"#,
225 },
226 "a": {
227 "a.rs": "fn a() {\n A\n}"
228 },
229 "b": {
230 ".zed": {
231 "settings.json": r#"{ "tab_size": 2 }"#,
232 "tasks.json": r#"[{
233 "label": "cargo check",
234 "command": "cargo",
235 "args": ["check"]
236 },]"#,
237 },
238 "b.rs": "fn b() {\n B\n}"
239 }
240 }),
241 )
242 .await;
243
244 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
245 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
246
247 cx.executor().run_until_parked();
248 let worktree_id = cx.update(|cx| {
249 project.update(cx, |project, cx| {
250 project.worktrees(cx).next().unwrap().read(cx).id()
251 })
252 });
253
254 let mut task_contexts = TaskContexts::default();
255 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
256
257 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
258 id: worktree_id,
259 directory_in_worktree: PathBuf::from(".zed"),
260 id_base: "local worktree tasks from directory \".zed\"".into(),
261 };
262
263 let all_tasks = cx
264 .update(|cx| {
265 let tree = worktree.read(cx);
266
267 let file_a = File::for_entry(
268 tree.entry_for_path("a/a.rs").unwrap().clone(),
269 worktree.clone(),
270 ) as _;
271 let settings_a = language_settings(None, Some(&file_a), cx);
272 let file_b = File::for_entry(
273 tree.entry_for_path("b/b.rs").unwrap().clone(),
274 worktree.clone(),
275 ) as _;
276 let settings_b = language_settings(None, Some(&file_b), cx);
277
278 assert_eq!(settings_a.tab_size.get(), 8);
279 assert_eq!(settings_b.tab_size.get(), 2);
280
281 get_all_tasks(&project, &task_contexts, cx)
282 })
283 .into_iter()
284 .map(|(source_kind, task)| {
285 let resolved = task.resolved.unwrap();
286 (
287 source_kind,
288 task.resolved_label,
289 resolved.args,
290 resolved.env,
291 )
292 })
293 .collect::<Vec<_>>();
294 assert_eq!(
295 all_tasks,
296 vec![
297 (
298 TaskSourceKind::Worktree {
299 id: worktree_id,
300 directory_in_worktree: PathBuf::from(separator!("b/.zed")),
301 id_base: if cfg!(windows) {
302 "local worktree tasks from directory \"b\\\\.zed\"".into()
303 } else {
304 "local worktree tasks from directory \"b/.zed\"".into()
305 },
306 },
307 "cargo check".to_string(),
308 vec!["check".to_string()],
309 HashMap::default(),
310 ),
311 (
312 topmost_local_task_source_kind.clone(),
313 "cargo check all".to_string(),
314 vec!["check".to_string(), "--all".to_string()],
315 HashMap::default(),
316 ),
317 ]
318 );
319
320 let (_, resolved_task) = cx
321 .update(|cx| get_all_tasks(&project, &task_contexts, cx))
322 .into_iter()
323 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
324 .expect("should have one global task");
325 project.update(cx, |project, cx| {
326 let task_inventory = project
327 .task_store
328 .read(cx)
329 .task_inventory()
330 .cloned()
331 .unwrap();
332 task_inventory.update(cx, |inventory, _| {
333 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
334 inventory
335 .update_file_based_tasks(
336 TaskSettingsLocation::Global(tasks_file()),
337 Some(
338 &json!([{
339 "label": "cargo check unstable",
340 "command": "cargo",
341 "args": [
342 "check",
343 "--all",
344 "--all-targets"
345 ],
346 "env": {
347 "RUSTFLAGS": "-Zunstable-options"
348 }
349 }])
350 .to_string(),
351 ),
352 settings::TaskKind::Script,
353 )
354 .unwrap();
355 });
356 });
357 cx.run_until_parked();
358
359 let all_tasks = cx
360 .update(|cx| get_all_tasks(&project, &task_contexts, cx))
361 .into_iter()
362 .map(|(source_kind, task)| {
363 let resolved = task.resolved.unwrap();
364 (
365 source_kind,
366 task.resolved_label,
367 resolved.args,
368 resolved.env,
369 )
370 })
371 .collect::<Vec<_>>();
372 assert_eq!(
373 all_tasks,
374 vec![
375 (
376 topmost_local_task_source_kind.clone(),
377 "cargo check all".to_string(),
378 vec!["check".to_string(), "--all".to_string()],
379 HashMap::default(),
380 ),
381 (
382 TaskSourceKind::Worktree {
383 id: worktree_id,
384 directory_in_worktree: PathBuf::from(separator!("b/.zed")),
385 id_base: if cfg!(windows) {
386 "local worktree tasks from directory \"b\\\\.zed\"".into()
387 } else {
388 "local worktree tasks from directory \"b/.zed\"".into()
389 },
390 },
391 "cargo check".to_string(),
392 vec!["check".to_string()],
393 HashMap::default(),
394 ),
395 (
396 TaskSourceKind::AbsPath {
397 abs_path: paths::tasks_file().clone(),
398 id_base: "global tasks.json".into(),
399 },
400 "cargo check unstable".to_string(),
401 vec![
402 "check".to_string(),
403 "--all".to_string(),
404 "--all-targets".to_string(),
405 ],
406 HashMap::from_iter(Some((
407 "RUSTFLAGS".to_string(),
408 "-Zunstable-options".to_string()
409 ))),
410 ),
411 ]
412 );
413}
414
415#[gpui::test]
416async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
417 init_test(cx);
418 TaskStore::init(None);
419
420 let fs = FakeFs::new(cx.executor());
421 fs.insert_tree(
422 path!("/dir"),
423 json!({
424 ".zed": {
425 "tasks.json": r#"[{
426 "label": "test worktree root",
427 "command": "echo $ZED_WORKTREE_ROOT"
428 }]"#,
429 },
430 "a": {
431 "a.rs": "fn a() {\n A\n}"
432 },
433 }),
434 )
435 .await;
436
437 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
438 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
439
440 cx.executor().run_until_parked();
441 let worktree_id = cx.update(|cx| {
442 project.update(cx, |project, cx| {
443 project.worktrees(cx).next().unwrap().read(cx).id()
444 })
445 });
446
447 let active_non_worktree_item_tasks = cx.update(|cx| {
448 get_all_tasks(
449 &project,
450 &TaskContexts {
451 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
452 active_worktree_context: None,
453 other_worktree_contexts: Vec::new(),
454 },
455 cx,
456 )
457 });
458 assert!(
459 active_non_worktree_item_tasks.is_empty(),
460 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
461 );
462
463 let active_worktree_tasks = cx.update(|cx| {
464 get_all_tasks(
465 &project,
466 &TaskContexts {
467 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
468 active_worktree_context: Some((worktree_id, {
469 let mut worktree_context = TaskContext::default();
470 worktree_context
471 .task_variables
472 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
473 worktree_context
474 })),
475 other_worktree_contexts: Vec::new(),
476 },
477 cx,
478 )
479 });
480 assert_eq!(
481 active_worktree_tasks
482 .into_iter()
483 .map(|(source_kind, task)| {
484 let resolved = task.resolved.unwrap();
485 (source_kind, resolved.command)
486 })
487 .collect::<Vec<_>>(),
488 vec![(
489 TaskSourceKind::Worktree {
490 id: worktree_id,
491 directory_in_worktree: PathBuf::from(separator!(".zed")),
492 id_base: if cfg!(windows) {
493 "local worktree tasks from directory \".zed\"".into()
494 } else {
495 "local worktree tasks from directory \".zed\"".into()
496 },
497 },
498 "echo /dir".to_string(),
499 )]
500 );
501}
502
503#[gpui::test]
504async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
505 init_test(cx);
506
507 let fs = FakeFs::new(cx.executor());
508 fs.insert_tree(
509 path!("/dir"),
510 json!({
511 "test.rs": "const A: i32 = 1;",
512 "test2.rs": "",
513 "Cargo.toml": "a = 1",
514 "package.json": "{\"a\": 1}",
515 }),
516 )
517 .await;
518
519 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
520 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
521
522 let mut fake_rust_servers = language_registry.register_fake_lsp(
523 "Rust",
524 FakeLspAdapter {
525 name: "the-rust-language-server",
526 capabilities: lsp::ServerCapabilities {
527 completion_provider: Some(lsp::CompletionOptions {
528 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
529 ..Default::default()
530 }),
531 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
532 lsp::TextDocumentSyncOptions {
533 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
534 ..Default::default()
535 },
536 )),
537 ..Default::default()
538 },
539 ..Default::default()
540 },
541 );
542 let mut fake_json_servers = language_registry.register_fake_lsp(
543 "JSON",
544 FakeLspAdapter {
545 name: "the-json-language-server",
546 capabilities: lsp::ServerCapabilities {
547 completion_provider: Some(lsp::CompletionOptions {
548 trigger_characters: Some(vec![":".to_string()]),
549 ..Default::default()
550 }),
551 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
552 lsp::TextDocumentSyncOptions {
553 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
554 ..Default::default()
555 },
556 )),
557 ..Default::default()
558 },
559 ..Default::default()
560 },
561 );
562
563 // Open a buffer without an associated language server.
564 let (toml_buffer, _handle) = project
565 .update(cx, |project, cx| {
566 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
567 })
568 .await
569 .unwrap();
570
571 // Open a buffer with an associated language server before the language for it has been loaded.
572 let (rust_buffer, _handle2) = project
573 .update(cx, |project, cx| {
574 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
575 })
576 .await
577 .unwrap();
578 rust_buffer.update(cx, |buffer, _| {
579 assert_eq!(buffer.language().map(|l| l.name()), None);
580 });
581
582 // Now we add the languages to the project, and ensure they get assigned to all
583 // the relevant open buffers.
584 language_registry.add(json_lang());
585 language_registry.add(rust_lang());
586 cx.executor().run_until_parked();
587 rust_buffer.update(cx, |buffer, _| {
588 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
589 });
590
591 // A server is started up, and it is notified about Rust files.
592 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
593 assert_eq!(
594 fake_rust_server
595 .receive_notification::<lsp::notification::DidOpenTextDocument>()
596 .await
597 .text_document,
598 lsp::TextDocumentItem {
599 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
600 version: 0,
601 text: "const A: i32 = 1;".to_string(),
602 language_id: "rust".to_string(),
603 }
604 );
605
606 // The buffer is configured based on the language server's capabilities.
607 rust_buffer.update(cx, |buffer, _| {
608 assert_eq!(
609 buffer
610 .completion_triggers()
611 .into_iter()
612 .cloned()
613 .collect::<Vec<_>>(),
614 &[".".to_string(), "::".to_string()]
615 );
616 });
617 toml_buffer.update(cx, |buffer, _| {
618 assert!(buffer.completion_triggers().is_empty());
619 });
620
621 // Edit a buffer. The changes are reported to the language server.
622 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
623 assert_eq!(
624 fake_rust_server
625 .receive_notification::<lsp::notification::DidChangeTextDocument>()
626 .await
627 .text_document,
628 lsp::VersionedTextDocumentIdentifier::new(
629 lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
630 1
631 )
632 );
633
634 // Open a third buffer with a different associated language server.
635 let (json_buffer, _json_handle) = project
636 .update(cx, |project, cx| {
637 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
638 })
639 .await
640 .unwrap();
641
642 // A json language server is started up and is only notified about the json buffer.
643 let mut fake_json_server = fake_json_servers.next().await.unwrap();
644 assert_eq!(
645 fake_json_server
646 .receive_notification::<lsp::notification::DidOpenTextDocument>()
647 .await
648 .text_document,
649 lsp::TextDocumentItem {
650 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
651 version: 0,
652 text: "{\"a\": 1}".to_string(),
653 language_id: "json".to_string(),
654 }
655 );
656
657 // This buffer is configured based on the second language server's
658 // capabilities.
659 json_buffer.update(cx, |buffer, _| {
660 assert_eq!(
661 buffer
662 .completion_triggers()
663 .into_iter()
664 .cloned()
665 .collect::<Vec<_>>(),
666 &[":".to_string()]
667 );
668 });
669
670 // When opening another buffer whose language server is already running,
671 // it is also configured based on the existing language server's capabilities.
672 let (rust_buffer2, _handle4) = project
673 .update(cx, |project, cx| {
674 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
675 })
676 .await
677 .unwrap();
678 rust_buffer2.update(cx, |buffer, _| {
679 assert_eq!(
680 buffer
681 .completion_triggers()
682 .into_iter()
683 .cloned()
684 .collect::<Vec<_>>(),
685 &[".".to_string(), "::".to_string()]
686 );
687 });
688
689 // Changes are reported only to servers matching the buffer's language.
690 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
691 rust_buffer2.update(cx, |buffer, cx| {
692 buffer.edit([(0..0, "let x = 1;")], None, cx)
693 });
694 assert_eq!(
695 fake_rust_server
696 .receive_notification::<lsp::notification::DidChangeTextDocument>()
697 .await
698 .text_document,
699 lsp::VersionedTextDocumentIdentifier::new(
700 lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap(),
701 1
702 )
703 );
704
705 // Save notifications are reported to all servers.
706 project
707 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
708 .await
709 .unwrap();
710 assert_eq!(
711 fake_rust_server
712 .receive_notification::<lsp::notification::DidSaveTextDocument>()
713 .await
714 .text_document,
715 lsp::TextDocumentIdentifier::new(
716 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
717 )
718 );
719 assert_eq!(
720 fake_json_server
721 .receive_notification::<lsp::notification::DidSaveTextDocument>()
722 .await
723 .text_document,
724 lsp::TextDocumentIdentifier::new(
725 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
726 )
727 );
728
729 // Renames are reported only to servers matching the buffer's language.
730 fs.rename(
731 Path::new(path!("/dir/test2.rs")),
732 Path::new(path!("/dir/test3.rs")),
733 Default::default(),
734 )
735 .await
736 .unwrap();
737 assert_eq!(
738 fake_rust_server
739 .receive_notification::<lsp::notification::DidCloseTextDocument>()
740 .await
741 .text_document,
742 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap()),
743 );
744 assert_eq!(
745 fake_rust_server
746 .receive_notification::<lsp::notification::DidOpenTextDocument>()
747 .await
748 .text_document,
749 lsp::TextDocumentItem {
750 uri: lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap(),
751 version: 0,
752 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
753 language_id: "rust".to_string(),
754 },
755 );
756
757 rust_buffer2.update(cx, |buffer, cx| {
758 buffer.update_diagnostics(
759 LanguageServerId(0),
760 DiagnosticSet::from_sorted_entries(
761 vec![DiagnosticEntry {
762 diagnostic: Default::default(),
763 range: Anchor::MIN..Anchor::MAX,
764 }],
765 &buffer.snapshot(),
766 ),
767 cx,
768 );
769 assert_eq!(
770 buffer
771 .snapshot()
772 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
773 .count(),
774 1
775 );
776 });
777
778 // When the rename changes the extension of the file, the buffer gets closed on the old
779 // language server and gets opened on the new one.
780 fs.rename(
781 Path::new(path!("/dir/test3.rs")),
782 Path::new(path!("/dir/test3.json")),
783 Default::default(),
784 )
785 .await
786 .unwrap();
787 assert_eq!(
788 fake_rust_server
789 .receive_notification::<lsp::notification::DidCloseTextDocument>()
790 .await
791 .text_document,
792 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap(),),
793 );
794 assert_eq!(
795 fake_json_server
796 .receive_notification::<lsp::notification::DidOpenTextDocument>()
797 .await
798 .text_document,
799 lsp::TextDocumentItem {
800 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
801 version: 0,
802 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
803 language_id: "json".to_string(),
804 },
805 );
806
807 // We clear the diagnostics, since the language has changed.
808 rust_buffer2.update(cx, |buffer, _| {
809 assert_eq!(
810 buffer
811 .snapshot()
812 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
813 .count(),
814 0
815 );
816 });
817
818 // The renamed file's version resets after changing language server.
819 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
820 assert_eq!(
821 fake_json_server
822 .receive_notification::<lsp::notification::DidChangeTextDocument>()
823 .await
824 .text_document,
825 lsp::VersionedTextDocumentIdentifier::new(
826 lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
827 1
828 )
829 );
830
831 // Restart language servers
832 project.update(cx, |project, cx| {
833 project.restart_language_servers_for_buffers(
834 vec![rust_buffer.clone(), json_buffer.clone()],
835 cx,
836 );
837 });
838
839 let mut rust_shutdown_requests = fake_rust_server
840 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
841 let mut json_shutdown_requests = fake_json_server
842 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
843 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
844
845 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
846 let mut fake_json_server = fake_json_servers.next().await.unwrap();
847
848 // Ensure rust document is reopened in new rust language server
849 assert_eq!(
850 fake_rust_server
851 .receive_notification::<lsp::notification::DidOpenTextDocument>()
852 .await
853 .text_document,
854 lsp::TextDocumentItem {
855 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
856 version: 0,
857 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
858 language_id: "rust".to_string(),
859 }
860 );
861
862 // Ensure json documents are reopened in new json language server
863 assert_set_eq!(
864 [
865 fake_json_server
866 .receive_notification::<lsp::notification::DidOpenTextDocument>()
867 .await
868 .text_document,
869 fake_json_server
870 .receive_notification::<lsp::notification::DidOpenTextDocument>()
871 .await
872 .text_document,
873 ],
874 [
875 lsp::TextDocumentItem {
876 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
877 version: 0,
878 text: json_buffer.update(cx, |buffer, _| buffer.text()),
879 language_id: "json".to_string(),
880 },
881 lsp::TextDocumentItem {
882 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
883 version: 0,
884 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
885 language_id: "json".to_string(),
886 }
887 ]
888 );
889
890 // Close notifications are reported only to servers matching the buffer's language.
891 cx.update(|_| drop(_json_handle));
892 let close_message = lsp::DidCloseTextDocumentParams {
893 text_document: lsp::TextDocumentIdentifier::new(
894 lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
895 ),
896 };
897 assert_eq!(
898 fake_json_server
899 .receive_notification::<lsp::notification::DidCloseTextDocument>()
900 .await,
901 close_message,
902 );
903}
904
905#[gpui::test]
906async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
907 init_test(cx);
908
909 let fs = FakeFs::new(cx.executor());
910 fs.insert_tree(
911 path!("/the-root"),
912 json!({
913 ".gitignore": "target\n",
914 "src": {
915 "a.rs": "",
916 "b.rs": "",
917 },
918 "target": {
919 "x": {
920 "out": {
921 "x.rs": ""
922 }
923 },
924 "y": {
925 "out": {
926 "y.rs": "",
927 }
928 },
929 "z": {
930 "out": {
931 "z.rs": ""
932 }
933 }
934 }
935 }),
936 )
937 .await;
938
939 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
940 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
941 language_registry.add(rust_lang());
942 let mut fake_servers = language_registry.register_fake_lsp(
943 "Rust",
944 FakeLspAdapter {
945 name: "the-language-server",
946 ..Default::default()
947 },
948 );
949
950 cx.executor().run_until_parked();
951
952 // Start the language server by opening a buffer with a compatible file extension.
953 project
954 .update(cx, |project, cx| {
955 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
956 })
957 .await
958 .unwrap();
959
960 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
961 project.update(cx, |project, cx| {
962 let worktree = project.worktrees(cx).next().unwrap();
963 assert_eq!(
964 worktree
965 .read(cx)
966 .snapshot()
967 .entries(true, 0)
968 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
969 .collect::<Vec<_>>(),
970 &[
971 (Path::new(""), false),
972 (Path::new(".gitignore"), false),
973 (Path::new("src"), false),
974 (Path::new("src/a.rs"), false),
975 (Path::new("src/b.rs"), false),
976 (Path::new("target"), true),
977 ]
978 );
979 });
980
981 let prev_read_dir_count = fs.read_dir_call_count();
982
983 // Keep track of the FS events reported to the language server.
984 let fake_server = fake_servers.next().await.unwrap();
985 let file_changes = Arc::new(Mutex::new(Vec::new()));
986 fake_server
987 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
988 registrations: vec![lsp::Registration {
989 id: Default::default(),
990 method: "workspace/didChangeWatchedFiles".to_string(),
991 register_options: serde_json::to_value(
992 lsp::DidChangeWatchedFilesRegistrationOptions {
993 watchers: vec![
994 lsp::FileSystemWatcher {
995 glob_pattern: lsp::GlobPattern::String(
996 path!("/the-root/Cargo.toml").to_string(),
997 ),
998 kind: None,
999 },
1000 lsp::FileSystemWatcher {
1001 glob_pattern: lsp::GlobPattern::String(
1002 path!("/the-root/src/*.{rs,c}").to_string(),
1003 ),
1004 kind: None,
1005 },
1006 lsp::FileSystemWatcher {
1007 glob_pattern: lsp::GlobPattern::String(
1008 path!("/the-root/target/y/**/*.rs").to_string(),
1009 ),
1010 kind: None,
1011 },
1012 ],
1013 },
1014 )
1015 .ok(),
1016 }],
1017 })
1018 .await
1019 .unwrap();
1020 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
1021 let file_changes = file_changes.clone();
1022 move |params, _| {
1023 let mut file_changes = file_changes.lock();
1024 file_changes.extend(params.changes);
1025 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
1026 }
1027 });
1028
1029 cx.executor().run_until_parked();
1030 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
1031 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
1032
1033 // Now the language server has asked us to watch an ignored directory path,
1034 // so we recursively load it.
1035 project.update(cx, |project, cx| {
1036 let worktree = project.worktrees(cx).next().unwrap();
1037 assert_eq!(
1038 worktree
1039 .read(cx)
1040 .snapshot()
1041 .entries(true, 0)
1042 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
1043 .collect::<Vec<_>>(),
1044 &[
1045 (Path::new(""), false),
1046 (Path::new(".gitignore"), false),
1047 (Path::new("src"), false),
1048 (Path::new("src/a.rs"), false),
1049 (Path::new("src/b.rs"), false),
1050 (Path::new("target"), true),
1051 (Path::new("target/x"), true),
1052 (Path::new("target/y"), true),
1053 (Path::new("target/y/out"), true),
1054 (Path::new("target/y/out/y.rs"), true),
1055 (Path::new("target/z"), true),
1056 ]
1057 );
1058 });
1059
1060 // Perform some file system mutations, two of which match the watched patterns,
1061 // and one of which does not.
1062 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
1063 .await
1064 .unwrap();
1065 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
1066 .await
1067 .unwrap();
1068 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
1069 .await
1070 .unwrap();
1071 fs.create_file(
1072 path!("/the-root/target/x/out/x2.rs").as_ref(),
1073 Default::default(),
1074 )
1075 .await
1076 .unwrap();
1077 fs.create_file(
1078 path!("/the-root/target/y/out/y2.rs").as_ref(),
1079 Default::default(),
1080 )
1081 .await
1082 .unwrap();
1083
1084 // The language server receives events for the FS mutations that match its watch patterns.
1085 cx.executor().run_until_parked();
1086 assert_eq!(
1087 &*file_changes.lock(),
1088 &[
1089 lsp::FileEvent {
1090 uri: lsp::Url::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
1091 typ: lsp::FileChangeType::DELETED,
1092 },
1093 lsp::FileEvent {
1094 uri: lsp::Url::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
1095 typ: lsp::FileChangeType::CREATED,
1096 },
1097 lsp::FileEvent {
1098 uri: lsp::Url::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
1099 typ: lsp::FileChangeType::CREATED,
1100 },
1101 ]
1102 );
1103}
1104
1105#[gpui::test]
1106async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
1107 init_test(cx);
1108
1109 let fs = FakeFs::new(cx.executor());
1110 fs.insert_tree(
1111 path!("/dir"),
1112 json!({
1113 "a.rs": "let a = 1;",
1114 "b.rs": "let b = 2;"
1115 }),
1116 )
1117 .await;
1118
1119 let project = Project::test(
1120 fs,
1121 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
1122 cx,
1123 )
1124 .await;
1125 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1126
1127 let buffer_a = project
1128 .update(cx, |project, cx| {
1129 project.open_local_buffer(path!("/dir/a.rs"), cx)
1130 })
1131 .await
1132 .unwrap();
1133 let buffer_b = project
1134 .update(cx, |project, cx| {
1135 project.open_local_buffer(path!("/dir/b.rs"), cx)
1136 })
1137 .await
1138 .unwrap();
1139
1140 lsp_store.update(cx, |lsp_store, cx| {
1141 lsp_store
1142 .update_diagnostics(
1143 LanguageServerId(0),
1144 lsp::PublishDiagnosticsParams {
1145 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1146 version: None,
1147 diagnostics: vec![lsp::Diagnostic {
1148 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1149 severity: Some(lsp::DiagnosticSeverity::ERROR),
1150 message: "error 1".to_string(),
1151 ..Default::default()
1152 }],
1153 },
1154 &[],
1155 cx,
1156 )
1157 .unwrap();
1158 lsp_store
1159 .update_diagnostics(
1160 LanguageServerId(0),
1161 lsp::PublishDiagnosticsParams {
1162 uri: Url::from_file_path(path!("/dir/b.rs")).unwrap(),
1163 version: None,
1164 diagnostics: vec![lsp::Diagnostic {
1165 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1166 severity: Some(DiagnosticSeverity::WARNING),
1167 message: "error 2".to_string(),
1168 ..Default::default()
1169 }],
1170 },
1171 &[],
1172 cx,
1173 )
1174 .unwrap();
1175 });
1176
1177 buffer_a.update(cx, |buffer, _| {
1178 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1179 assert_eq!(
1180 chunks
1181 .iter()
1182 .map(|(s, d)| (s.as_str(), *d))
1183 .collect::<Vec<_>>(),
1184 &[
1185 ("let ", None),
1186 ("a", Some(DiagnosticSeverity::ERROR)),
1187 (" = 1;", None),
1188 ]
1189 );
1190 });
1191 buffer_b.update(cx, |buffer, _| {
1192 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1193 assert_eq!(
1194 chunks
1195 .iter()
1196 .map(|(s, d)| (s.as_str(), *d))
1197 .collect::<Vec<_>>(),
1198 &[
1199 ("let ", None),
1200 ("b", Some(DiagnosticSeverity::WARNING)),
1201 (" = 2;", None),
1202 ]
1203 );
1204 });
1205}
1206
1207#[gpui::test]
1208async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1209 init_test(cx);
1210
1211 let fs = FakeFs::new(cx.executor());
1212 fs.insert_tree(
1213 path!("/root"),
1214 json!({
1215 "dir": {
1216 ".git": {
1217 "HEAD": "ref: refs/heads/main",
1218 },
1219 ".gitignore": "b.rs",
1220 "a.rs": "let a = 1;",
1221 "b.rs": "let b = 2;",
1222 },
1223 "other.rs": "let b = c;"
1224 }),
1225 )
1226 .await;
1227
1228 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
1229 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1230 let (worktree, _) = project
1231 .update(cx, |project, cx| {
1232 project.find_or_create_worktree(path!("/root/dir"), true, cx)
1233 })
1234 .await
1235 .unwrap();
1236 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1237
1238 let (worktree, _) = project
1239 .update(cx, |project, cx| {
1240 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
1241 })
1242 .await
1243 .unwrap();
1244 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1245
1246 let server_id = LanguageServerId(0);
1247 lsp_store.update(cx, |lsp_store, cx| {
1248 lsp_store
1249 .update_diagnostics(
1250 server_id,
1251 lsp::PublishDiagnosticsParams {
1252 uri: Url::from_file_path(path!("/root/dir/b.rs")).unwrap(),
1253 version: None,
1254 diagnostics: vec![lsp::Diagnostic {
1255 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1256 severity: Some(lsp::DiagnosticSeverity::ERROR),
1257 message: "unused variable 'b'".to_string(),
1258 ..Default::default()
1259 }],
1260 },
1261 &[],
1262 cx,
1263 )
1264 .unwrap();
1265 lsp_store
1266 .update_diagnostics(
1267 server_id,
1268 lsp::PublishDiagnosticsParams {
1269 uri: Url::from_file_path(path!("/root/other.rs")).unwrap(),
1270 version: None,
1271 diagnostics: vec![lsp::Diagnostic {
1272 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1273 severity: Some(lsp::DiagnosticSeverity::ERROR),
1274 message: "unknown variable 'c'".to_string(),
1275 ..Default::default()
1276 }],
1277 },
1278 &[],
1279 cx,
1280 )
1281 .unwrap();
1282 });
1283
1284 let main_ignored_buffer = project
1285 .update(cx, |project, cx| {
1286 project.open_buffer((main_worktree_id, "b.rs"), cx)
1287 })
1288 .await
1289 .unwrap();
1290 main_ignored_buffer.update(cx, |buffer, _| {
1291 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1292 assert_eq!(
1293 chunks
1294 .iter()
1295 .map(|(s, d)| (s.as_str(), *d))
1296 .collect::<Vec<_>>(),
1297 &[
1298 ("let ", None),
1299 ("b", Some(DiagnosticSeverity::ERROR)),
1300 (" = 2;", None),
1301 ],
1302 "Gigitnored buffers should still get in-buffer diagnostics",
1303 );
1304 });
1305 let other_buffer = project
1306 .update(cx, |project, cx| {
1307 project.open_buffer((other_worktree_id, ""), cx)
1308 })
1309 .await
1310 .unwrap();
1311 other_buffer.update(cx, |buffer, _| {
1312 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1313 assert_eq!(
1314 chunks
1315 .iter()
1316 .map(|(s, d)| (s.as_str(), *d))
1317 .collect::<Vec<_>>(),
1318 &[
1319 ("let b = ", None),
1320 ("c", Some(DiagnosticSeverity::ERROR)),
1321 (";", None),
1322 ],
1323 "Buffers from hidden projects should still get in-buffer diagnostics"
1324 );
1325 });
1326
1327 project.update(cx, |project, cx| {
1328 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1329 assert_eq!(
1330 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1331 vec![(
1332 ProjectPath {
1333 worktree_id: main_worktree_id,
1334 path: Arc::from(Path::new("b.rs")),
1335 },
1336 server_id,
1337 DiagnosticSummary {
1338 error_count: 1,
1339 warning_count: 0,
1340 }
1341 )]
1342 );
1343 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1344 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1345 });
1346}
1347
1348#[gpui::test]
1349async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1350 init_test(cx);
1351
1352 let progress_token = "the-progress-token";
1353
1354 let fs = FakeFs::new(cx.executor());
1355 fs.insert_tree(
1356 path!("/dir"),
1357 json!({
1358 "a.rs": "fn a() { A }",
1359 "b.rs": "const y: i32 = 1",
1360 }),
1361 )
1362 .await;
1363
1364 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1365 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1366
1367 language_registry.add(rust_lang());
1368 let mut fake_servers = language_registry.register_fake_lsp(
1369 "Rust",
1370 FakeLspAdapter {
1371 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1372 disk_based_diagnostics_sources: vec!["disk".into()],
1373 ..Default::default()
1374 },
1375 );
1376
1377 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1378
1379 // Cause worktree to start the fake language server
1380 let _ = project
1381 .update(cx, |project, cx| {
1382 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
1383 })
1384 .await
1385 .unwrap();
1386
1387 let mut events = cx.events(&project);
1388
1389 let fake_server = fake_servers.next().await.unwrap();
1390 assert_eq!(
1391 events.next().await.unwrap(),
1392 Event::LanguageServerAdded(
1393 LanguageServerId(0),
1394 fake_server.server.name(),
1395 Some(worktree_id)
1396 ),
1397 );
1398
1399 fake_server
1400 .start_progress(format!("{}/0", progress_token))
1401 .await;
1402 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1403 assert_eq!(
1404 events.next().await.unwrap(),
1405 Event::DiskBasedDiagnosticsStarted {
1406 language_server_id: LanguageServerId(0),
1407 }
1408 );
1409
1410 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1411 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1412 version: None,
1413 diagnostics: vec![lsp::Diagnostic {
1414 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1415 severity: Some(lsp::DiagnosticSeverity::ERROR),
1416 message: "undefined variable 'A'".to_string(),
1417 ..Default::default()
1418 }],
1419 });
1420 assert_eq!(
1421 events.next().await.unwrap(),
1422 Event::DiagnosticsUpdated {
1423 language_server_id: LanguageServerId(0),
1424 path: (worktree_id, Path::new("a.rs")).into()
1425 }
1426 );
1427
1428 fake_server.end_progress(format!("{}/0", progress_token));
1429 assert_eq!(
1430 events.next().await.unwrap(),
1431 Event::DiskBasedDiagnosticsFinished {
1432 language_server_id: LanguageServerId(0)
1433 }
1434 );
1435
1436 let buffer = project
1437 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
1438 .await
1439 .unwrap();
1440
1441 buffer.update(cx, |buffer, _| {
1442 let snapshot = buffer.snapshot();
1443 let diagnostics = snapshot
1444 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1445 .collect::<Vec<_>>();
1446 assert_eq!(
1447 diagnostics,
1448 &[DiagnosticEntry {
1449 range: Point::new(0, 9)..Point::new(0, 10),
1450 diagnostic: Diagnostic {
1451 severity: lsp::DiagnosticSeverity::ERROR,
1452 message: "undefined variable 'A'".to_string(),
1453 group_id: 0,
1454 is_primary: true,
1455 ..Default::default()
1456 }
1457 }]
1458 )
1459 });
1460
1461 // Ensure publishing empty diagnostics twice only results in one update event.
1462 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1463 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1464 version: None,
1465 diagnostics: Default::default(),
1466 });
1467 assert_eq!(
1468 events.next().await.unwrap(),
1469 Event::DiagnosticsUpdated {
1470 language_server_id: LanguageServerId(0),
1471 path: (worktree_id, Path::new("a.rs")).into()
1472 }
1473 );
1474
1475 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1476 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1477 version: None,
1478 diagnostics: Default::default(),
1479 });
1480 cx.executor().run_until_parked();
1481 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1482}
1483
1484#[gpui::test]
1485async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1486 init_test(cx);
1487
1488 let progress_token = "the-progress-token";
1489
1490 let fs = FakeFs::new(cx.executor());
1491 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1492
1493 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1494
1495 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1496 language_registry.add(rust_lang());
1497 let mut fake_servers = language_registry.register_fake_lsp(
1498 "Rust",
1499 FakeLspAdapter {
1500 name: "the-language-server",
1501 disk_based_diagnostics_sources: vec!["disk".into()],
1502 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1503 ..Default::default()
1504 },
1505 );
1506
1507 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1508
1509 let (buffer, _handle) = project
1510 .update(cx, |project, cx| {
1511 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1512 })
1513 .await
1514 .unwrap();
1515 // Simulate diagnostics starting to update.
1516 let fake_server = fake_servers.next().await.unwrap();
1517 fake_server.start_progress(progress_token).await;
1518
1519 // Restart the server before the diagnostics finish updating.
1520 project.update(cx, |project, cx| {
1521 project.restart_language_servers_for_buffers(vec![buffer], cx);
1522 });
1523 let mut events = cx.events(&project);
1524
1525 // Simulate the newly started server sending more diagnostics.
1526 let fake_server = fake_servers.next().await.unwrap();
1527 assert_eq!(
1528 events.next().await.unwrap(),
1529 Event::LanguageServerAdded(
1530 LanguageServerId(1),
1531 fake_server.server.name(),
1532 Some(worktree_id)
1533 )
1534 );
1535 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1536 fake_server.start_progress(progress_token).await;
1537 assert_eq!(
1538 events.next().await.unwrap(),
1539 Event::DiskBasedDiagnosticsStarted {
1540 language_server_id: LanguageServerId(1)
1541 }
1542 );
1543 project.update(cx, |project, cx| {
1544 assert_eq!(
1545 project
1546 .language_servers_running_disk_based_diagnostics(cx)
1547 .collect::<Vec<_>>(),
1548 [LanguageServerId(1)]
1549 );
1550 });
1551
1552 // All diagnostics are considered done, despite the old server's diagnostic
1553 // task never completing.
1554 fake_server.end_progress(progress_token);
1555 assert_eq!(
1556 events.next().await.unwrap(),
1557 Event::DiskBasedDiagnosticsFinished {
1558 language_server_id: LanguageServerId(1)
1559 }
1560 );
1561 project.update(cx, |project, cx| {
1562 assert_eq!(
1563 project
1564 .language_servers_running_disk_based_diagnostics(cx)
1565 .collect::<Vec<_>>(),
1566 [] as [language::LanguageServerId; 0]
1567 );
1568 });
1569}
1570
1571#[gpui::test]
1572async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1573 init_test(cx);
1574
1575 let fs = FakeFs::new(cx.executor());
1576 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
1577
1578 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1579
1580 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1581 language_registry.add(rust_lang());
1582 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1583
1584 let (buffer, _) = project
1585 .update(cx, |project, cx| {
1586 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1587 })
1588 .await
1589 .unwrap();
1590
1591 // Publish diagnostics
1592 let fake_server = fake_servers.next().await.unwrap();
1593 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1594 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1595 version: None,
1596 diagnostics: vec![lsp::Diagnostic {
1597 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1598 severity: Some(lsp::DiagnosticSeverity::ERROR),
1599 message: "the message".to_string(),
1600 ..Default::default()
1601 }],
1602 });
1603
1604 cx.executor().run_until_parked();
1605 buffer.update(cx, |buffer, _| {
1606 assert_eq!(
1607 buffer
1608 .snapshot()
1609 .diagnostics_in_range::<_, usize>(0..1, false)
1610 .map(|entry| entry.diagnostic.message.clone())
1611 .collect::<Vec<_>>(),
1612 ["the message".to_string()]
1613 );
1614 });
1615 project.update(cx, |project, cx| {
1616 assert_eq!(
1617 project.diagnostic_summary(false, cx),
1618 DiagnosticSummary {
1619 error_count: 1,
1620 warning_count: 0,
1621 }
1622 );
1623 });
1624
1625 project.update(cx, |project, cx| {
1626 project.restart_language_servers_for_buffers(vec![buffer.clone()], cx);
1627 });
1628
1629 // The diagnostics are cleared.
1630 cx.executor().run_until_parked();
1631 buffer.update(cx, |buffer, _| {
1632 assert_eq!(
1633 buffer
1634 .snapshot()
1635 .diagnostics_in_range::<_, usize>(0..1, false)
1636 .map(|entry| entry.diagnostic.message.clone())
1637 .collect::<Vec<_>>(),
1638 Vec::<String>::new(),
1639 );
1640 });
1641 project.update(cx, |project, cx| {
1642 assert_eq!(
1643 project.diagnostic_summary(false, cx),
1644 DiagnosticSummary {
1645 error_count: 0,
1646 warning_count: 0,
1647 }
1648 );
1649 });
1650}
1651
1652#[gpui::test]
1653async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1654 init_test(cx);
1655
1656 let fs = FakeFs::new(cx.executor());
1657 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1658
1659 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1660 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1661
1662 language_registry.add(rust_lang());
1663 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1664
1665 let (buffer, _handle) = project
1666 .update(cx, |project, cx| {
1667 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1668 })
1669 .await
1670 .unwrap();
1671
1672 // Before restarting the server, report diagnostics with an unknown buffer version.
1673 let fake_server = fake_servers.next().await.unwrap();
1674 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1675 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1676 version: Some(10000),
1677 diagnostics: Vec::new(),
1678 });
1679 cx.executor().run_until_parked();
1680 project.update(cx, |project, cx| {
1681 project.restart_language_servers_for_buffers(vec![buffer.clone()], cx);
1682 });
1683
1684 let mut fake_server = fake_servers.next().await.unwrap();
1685 let notification = fake_server
1686 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1687 .await
1688 .text_document;
1689 assert_eq!(notification.version, 0);
1690}
1691
1692#[gpui::test]
1693async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
1694 init_test(cx);
1695
1696 let progress_token = "the-progress-token";
1697
1698 let fs = FakeFs::new(cx.executor());
1699 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1700
1701 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1702
1703 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1704 language_registry.add(rust_lang());
1705 let mut fake_servers = language_registry.register_fake_lsp(
1706 "Rust",
1707 FakeLspAdapter {
1708 name: "the-language-server",
1709 disk_based_diagnostics_sources: vec!["disk".into()],
1710 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1711 ..Default::default()
1712 },
1713 );
1714
1715 let (buffer, _handle) = project
1716 .update(cx, |project, cx| {
1717 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1718 })
1719 .await
1720 .unwrap();
1721
1722 // Simulate diagnostics starting to update.
1723 let mut fake_server = fake_servers.next().await.unwrap();
1724 fake_server
1725 .start_progress_with(
1726 "another-token",
1727 lsp::WorkDoneProgressBegin {
1728 cancellable: Some(false),
1729 ..Default::default()
1730 },
1731 )
1732 .await;
1733 fake_server
1734 .start_progress_with(
1735 progress_token,
1736 lsp::WorkDoneProgressBegin {
1737 cancellable: Some(true),
1738 ..Default::default()
1739 },
1740 )
1741 .await;
1742 cx.executor().run_until_parked();
1743
1744 project.update(cx, |project, cx| {
1745 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
1746 });
1747
1748 let cancel_notification = fake_server
1749 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
1750 .await;
1751 assert_eq!(
1752 cancel_notification.token,
1753 NumberOrString::String(progress_token.into())
1754 );
1755}
1756
1757#[gpui::test]
1758async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1759 init_test(cx);
1760
1761 let fs = FakeFs::new(cx.executor());
1762 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
1763 .await;
1764
1765 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1766 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1767
1768 let mut fake_rust_servers = language_registry.register_fake_lsp(
1769 "Rust",
1770 FakeLspAdapter {
1771 name: "rust-lsp",
1772 ..Default::default()
1773 },
1774 );
1775 let mut fake_js_servers = language_registry.register_fake_lsp(
1776 "JavaScript",
1777 FakeLspAdapter {
1778 name: "js-lsp",
1779 ..Default::default()
1780 },
1781 );
1782 language_registry.add(rust_lang());
1783 language_registry.add(js_lang());
1784
1785 let _rs_buffer = project
1786 .update(cx, |project, cx| {
1787 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1788 })
1789 .await
1790 .unwrap();
1791 let _js_buffer = project
1792 .update(cx, |project, cx| {
1793 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
1794 })
1795 .await
1796 .unwrap();
1797
1798 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1799 assert_eq!(
1800 fake_rust_server_1
1801 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1802 .await
1803 .text_document
1804 .uri
1805 .as_str(),
1806 uri!("file:///dir/a.rs")
1807 );
1808
1809 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1810 assert_eq!(
1811 fake_js_server
1812 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1813 .await
1814 .text_document
1815 .uri
1816 .as_str(),
1817 uri!("file:///dir/b.js")
1818 );
1819
1820 // Disable Rust language server, ensuring only that server gets stopped.
1821 cx.update(|cx| {
1822 SettingsStore::update_global(cx, |settings, cx| {
1823 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1824 settings.languages.insert(
1825 "Rust".into(),
1826 LanguageSettingsContent {
1827 enable_language_server: Some(false),
1828 ..Default::default()
1829 },
1830 );
1831 });
1832 })
1833 });
1834 fake_rust_server_1
1835 .receive_notification::<lsp::notification::Exit>()
1836 .await;
1837
1838 // Enable Rust and disable JavaScript language servers, ensuring that the
1839 // former gets started again and that the latter stops.
1840 cx.update(|cx| {
1841 SettingsStore::update_global(cx, |settings, cx| {
1842 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1843 settings.languages.insert(
1844 LanguageName::new("Rust"),
1845 LanguageSettingsContent {
1846 enable_language_server: Some(true),
1847 ..Default::default()
1848 },
1849 );
1850 settings.languages.insert(
1851 LanguageName::new("JavaScript"),
1852 LanguageSettingsContent {
1853 enable_language_server: Some(false),
1854 ..Default::default()
1855 },
1856 );
1857 });
1858 })
1859 });
1860 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1861 assert_eq!(
1862 fake_rust_server_2
1863 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1864 .await
1865 .text_document
1866 .uri
1867 .as_str(),
1868 uri!("file:///dir/a.rs")
1869 );
1870 fake_js_server
1871 .receive_notification::<lsp::notification::Exit>()
1872 .await;
1873}
1874
1875#[gpui::test(iterations = 3)]
1876async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1877 init_test(cx);
1878
1879 let text = "
1880 fn a() { A }
1881 fn b() { BB }
1882 fn c() { CCC }
1883 "
1884 .unindent();
1885
1886 let fs = FakeFs::new(cx.executor());
1887 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
1888
1889 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1890 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1891
1892 language_registry.add(rust_lang());
1893 let mut fake_servers = language_registry.register_fake_lsp(
1894 "Rust",
1895 FakeLspAdapter {
1896 disk_based_diagnostics_sources: vec!["disk".into()],
1897 ..Default::default()
1898 },
1899 );
1900
1901 let buffer = project
1902 .update(cx, |project, cx| {
1903 project.open_local_buffer(path!("/dir/a.rs"), cx)
1904 })
1905 .await
1906 .unwrap();
1907
1908 let _handle = project.update(cx, |project, cx| {
1909 project.register_buffer_with_language_servers(&buffer, cx)
1910 });
1911
1912 let mut fake_server = fake_servers.next().await.unwrap();
1913 let open_notification = fake_server
1914 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1915 .await;
1916
1917 // Edit the buffer, moving the content down
1918 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1919 let change_notification_1 = fake_server
1920 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1921 .await;
1922 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1923
1924 // Report some diagnostics for the initial version of the buffer
1925 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1926 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1927 version: Some(open_notification.text_document.version),
1928 diagnostics: vec![
1929 lsp::Diagnostic {
1930 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1931 severity: Some(DiagnosticSeverity::ERROR),
1932 message: "undefined variable 'A'".to_string(),
1933 source: Some("disk".to_string()),
1934 ..Default::default()
1935 },
1936 lsp::Diagnostic {
1937 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1938 severity: Some(DiagnosticSeverity::ERROR),
1939 message: "undefined variable 'BB'".to_string(),
1940 source: Some("disk".to_string()),
1941 ..Default::default()
1942 },
1943 lsp::Diagnostic {
1944 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1945 severity: Some(DiagnosticSeverity::ERROR),
1946 source: Some("disk".to_string()),
1947 message: "undefined variable 'CCC'".to_string(),
1948 ..Default::default()
1949 },
1950 ],
1951 });
1952
1953 // The diagnostics have moved down since they were created.
1954 cx.executor().run_until_parked();
1955 buffer.update(cx, |buffer, _| {
1956 assert_eq!(
1957 buffer
1958 .snapshot()
1959 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1960 .collect::<Vec<_>>(),
1961 &[
1962 DiagnosticEntry {
1963 range: Point::new(3, 9)..Point::new(3, 11),
1964 diagnostic: Diagnostic {
1965 source: Some("disk".into()),
1966 severity: DiagnosticSeverity::ERROR,
1967 message: "undefined variable 'BB'".to_string(),
1968 is_disk_based: true,
1969 group_id: 1,
1970 is_primary: true,
1971 ..Default::default()
1972 },
1973 },
1974 DiagnosticEntry {
1975 range: Point::new(4, 9)..Point::new(4, 12),
1976 diagnostic: Diagnostic {
1977 source: Some("disk".into()),
1978 severity: DiagnosticSeverity::ERROR,
1979 message: "undefined variable 'CCC'".to_string(),
1980 is_disk_based: true,
1981 group_id: 2,
1982 is_primary: true,
1983 ..Default::default()
1984 }
1985 }
1986 ]
1987 );
1988 assert_eq!(
1989 chunks_with_diagnostics(buffer, 0..buffer.len()),
1990 [
1991 ("\n\nfn a() { ".to_string(), None),
1992 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1993 (" }\nfn b() { ".to_string(), None),
1994 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1995 (" }\nfn c() { ".to_string(), None),
1996 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1997 (" }\n".to_string(), None),
1998 ]
1999 );
2000 assert_eq!(
2001 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
2002 [
2003 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
2004 (" }\nfn c() { ".to_string(), None),
2005 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
2006 ]
2007 );
2008 });
2009
2010 // Ensure overlapping diagnostics are highlighted correctly.
2011 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2012 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2013 version: Some(open_notification.text_document.version),
2014 diagnostics: vec![
2015 lsp::Diagnostic {
2016 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2017 severity: Some(DiagnosticSeverity::ERROR),
2018 message: "undefined variable 'A'".to_string(),
2019 source: Some("disk".to_string()),
2020 ..Default::default()
2021 },
2022 lsp::Diagnostic {
2023 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
2024 severity: Some(DiagnosticSeverity::WARNING),
2025 message: "unreachable statement".to_string(),
2026 source: Some("disk".to_string()),
2027 ..Default::default()
2028 },
2029 ],
2030 });
2031
2032 cx.executor().run_until_parked();
2033 buffer.update(cx, |buffer, _| {
2034 assert_eq!(
2035 buffer
2036 .snapshot()
2037 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
2038 .collect::<Vec<_>>(),
2039 &[
2040 DiagnosticEntry {
2041 range: Point::new(2, 9)..Point::new(2, 12),
2042 diagnostic: Diagnostic {
2043 source: Some("disk".into()),
2044 severity: DiagnosticSeverity::WARNING,
2045 message: "unreachable statement".to_string(),
2046 is_disk_based: true,
2047 group_id: 4,
2048 is_primary: true,
2049 ..Default::default()
2050 }
2051 },
2052 DiagnosticEntry {
2053 range: Point::new(2, 9)..Point::new(2, 10),
2054 diagnostic: Diagnostic {
2055 source: Some("disk".into()),
2056 severity: DiagnosticSeverity::ERROR,
2057 message: "undefined variable 'A'".to_string(),
2058 is_disk_based: true,
2059 group_id: 3,
2060 is_primary: true,
2061 ..Default::default()
2062 },
2063 }
2064 ]
2065 );
2066 assert_eq!(
2067 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
2068 [
2069 ("fn a() { ".to_string(), None),
2070 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2071 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2072 ("\n".to_string(), None),
2073 ]
2074 );
2075 assert_eq!(
2076 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
2077 [
2078 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2079 ("\n".to_string(), None),
2080 ]
2081 );
2082 });
2083
2084 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
2085 // changes since the last save.
2086 buffer.update(cx, |buffer, cx| {
2087 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
2088 buffer.edit(
2089 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
2090 None,
2091 cx,
2092 );
2093 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
2094 });
2095 let change_notification_2 = fake_server
2096 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2097 .await;
2098 assert!(
2099 change_notification_2.text_document.version > change_notification_1.text_document.version
2100 );
2101
2102 // Handle out-of-order diagnostics
2103 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2104 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2105 version: Some(change_notification_2.text_document.version),
2106 diagnostics: vec![
2107 lsp::Diagnostic {
2108 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2109 severity: Some(DiagnosticSeverity::ERROR),
2110 message: "undefined variable 'BB'".to_string(),
2111 source: Some("disk".to_string()),
2112 ..Default::default()
2113 },
2114 lsp::Diagnostic {
2115 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2116 severity: Some(DiagnosticSeverity::WARNING),
2117 message: "undefined variable 'A'".to_string(),
2118 source: Some("disk".to_string()),
2119 ..Default::default()
2120 },
2121 ],
2122 });
2123
2124 cx.executor().run_until_parked();
2125 buffer.update(cx, |buffer, _| {
2126 assert_eq!(
2127 buffer
2128 .snapshot()
2129 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2130 .collect::<Vec<_>>(),
2131 &[
2132 DiagnosticEntry {
2133 range: Point::new(2, 21)..Point::new(2, 22),
2134 diagnostic: Diagnostic {
2135 source: Some("disk".into()),
2136 severity: DiagnosticSeverity::WARNING,
2137 message: "undefined variable 'A'".to_string(),
2138 is_disk_based: true,
2139 group_id: 6,
2140 is_primary: true,
2141 ..Default::default()
2142 }
2143 },
2144 DiagnosticEntry {
2145 range: Point::new(3, 9)..Point::new(3, 14),
2146 diagnostic: Diagnostic {
2147 source: Some("disk".into()),
2148 severity: DiagnosticSeverity::ERROR,
2149 message: "undefined variable 'BB'".to_string(),
2150 is_disk_based: true,
2151 group_id: 5,
2152 is_primary: true,
2153 ..Default::default()
2154 },
2155 }
2156 ]
2157 );
2158 });
2159}
2160
2161#[gpui::test]
2162async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
2163 init_test(cx);
2164
2165 let text = concat!(
2166 "let one = ;\n", //
2167 "let two = \n",
2168 "let three = 3;\n",
2169 );
2170
2171 let fs = FakeFs::new(cx.executor());
2172 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
2173
2174 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2175 let buffer = project
2176 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2177 .await
2178 .unwrap();
2179
2180 project.update(cx, |project, cx| {
2181 project.lsp_store.update(cx, |lsp_store, cx| {
2182 lsp_store
2183 .update_diagnostic_entries(
2184 LanguageServerId(0),
2185 PathBuf::from("/dir/a.rs"),
2186 None,
2187 vec![
2188 DiagnosticEntry {
2189 range: Unclipped(PointUtf16::new(0, 10))
2190 ..Unclipped(PointUtf16::new(0, 10)),
2191 diagnostic: Diagnostic {
2192 severity: DiagnosticSeverity::ERROR,
2193 message: "syntax error 1".to_string(),
2194 ..Default::default()
2195 },
2196 },
2197 DiagnosticEntry {
2198 range: Unclipped(PointUtf16::new(1, 10))
2199 ..Unclipped(PointUtf16::new(1, 10)),
2200 diagnostic: Diagnostic {
2201 severity: DiagnosticSeverity::ERROR,
2202 message: "syntax error 2".to_string(),
2203 ..Default::default()
2204 },
2205 },
2206 ],
2207 cx,
2208 )
2209 .unwrap();
2210 })
2211 });
2212
2213 // An empty range is extended forward to include the following character.
2214 // At the end of a line, an empty range is extended backward to include
2215 // the preceding character.
2216 buffer.update(cx, |buffer, _| {
2217 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2218 assert_eq!(
2219 chunks
2220 .iter()
2221 .map(|(s, d)| (s.as_str(), *d))
2222 .collect::<Vec<_>>(),
2223 &[
2224 ("let one = ", None),
2225 (";", Some(DiagnosticSeverity::ERROR)),
2226 ("\nlet two =", None),
2227 (" ", Some(DiagnosticSeverity::ERROR)),
2228 ("\nlet three = 3;\n", None)
2229 ]
2230 );
2231 });
2232}
2233
2234#[gpui::test]
2235async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2236 init_test(cx);
2237
2238 let fs = FakeFs::new(cx.executor());
2239 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2240 .await;
2241
2242 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2243 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
2244
2245 lsp_store.update(cx, |lsp_store, cx| {
2246 lsp_store
2247 .update_diagnostic_entries(
2248 LanguageServerId(0),
2249 Path::new("/dir/a.rs").to_owned(),
2250 None,
2251 vec![DiagnosticEntry {
2252 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2253 diagnostic: Diagnostic {
2254 severity: DiagnosticSeverity::ERROR,
2255 is_primary: true,
2256 message: "syntax error a1".to_string(),
2257 ..Default::default()
2258 },
2259 }],
2260 cx,
2261 )
2262 .unwrap();
2263 lsp_store
2264 .update_diagnostic_entries(
2265 LanguageServerId(1),
2266 Path::new("/dir/a.rs").to_owned(),
2267 None,
2268 vec![DiagnosticEntry {
2269 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2270 diagnostic: Diagnostic {
2271 severity: DiagnosticSeverity::ERROR,
2272 is_primary: true,
2273 message: "syntax error b1".to_string(),
2274 ..Default::default()
2275 },
2276 }],
2277 cx,
2278 )
2279 .unwrap();
2280
2281 assert_eq!(
2282 lsp_store.diagnostic_summary(false, cx),
2283 DiagnosticSummary {
2284 error_count: 2,
2285 warning_count: 0,
2286 }
2287 );
2288 });
2289}
2290
2291#[gpui::test]
2292async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2293 init_test(cx);
2294
2295 let text = "
2296 fn a() {
2297 f1();
2298 }
2299 fn b() {
2300 f2();
2301 }
2302 fn c() {
2303 f3();
2304 }
2305 "
2306 .unindent();
2307
2308 let fs = FakeFs::new(cx.executor());
2309 fs.insert_tree(
2310 path!("/dir"),
2311 json!({
2312 "a.rs": text.clone(),
2313 }),
2314 )
2315 .await;
2316
2317 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2318 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2319
2320 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2321 language_registry.add(rust_lang());
2322 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2323
2324 let (buffer, _handle) = project
2325 .update(cx, |project, cx| {
2326 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2327 })
2328 .await
2329 .unwrap();
2330
2331 let mut fake_server = fake_servers.next().await.unwrap();
2332 let lsp_document_version = fake_server
2333 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2334 .await
2335 .text_document
2336 .version;
2337
2338 // Simulate editing the buffer after the language server computes some edits.
2339 buffer.update(cx, |buffer, cx| {
2340 buffer.edit(
2341 [(
2342 Point::new(0, 0)..Point::new(0, 0),
2343 "// above first function\n",
2344 )],
2345 None,
2346 cx,
2347 );
2348 buffer.edit(
2349 [(
2350 Point::new(2, 0)..Point::new(2, 0),
2351 " // inside first function\n",
2352 )],
2353 None,
2354 cx,
2355 );
2356 buffer.edit(
2357 [(
2358 Point::new(6, 4)..Point::new(6, 4),
2359 "// inside second function ",
2360 )],
2361 None,
2362 cx,
2363 );
2364
2365 assert_eq!(
2366 buffer.text(),
2367 "
2368 // above first function
2369 fn a() {
2370 // inside first function
2371 f1();
2372 }
2373 fn b() {
2374 // inside second function f2();
2375 }
2376 fn c() {
2377 f3();
2378 }
2379 "
2380 .unindent()
2381 );
2382 });
2383
2384 let edits = lsp_store
2385 .update(cx, |lsp_store, cx| {
2386 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2387 &buffer,
2388 vec![
2389 // replace body of first function
2390 lsp::TextEdit {
2391 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2392 new_text: "
2393 fn a() {
2394 f10();
2395 }
2396 "
2397 .unindent(),
2398 },
2399 // edit inside second function
2400 lsp::TextEdit {
2401 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2402 new_text: "00".into(),
2403 },
2404 // edit inside third function via two distinct edits
2405 lsp::TextEdit {
2406 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2407 new_text: "4000".into(),
2408 },
2409 lsp::TextEdit {
2410 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2411 new_text: "".into(),
2412 },
2413 ],
2414 LanguageServerId(0),
2415 Some(lsp_document_version),
2416 cx,
2417 )
2418 })
2419 .await
2420 .unwrap();
2421
2422 buffer.update(cx, |buffer, cx| {
2423 for (range, new_text) in edits {
2424 buffer.edit([(range, new_text)], None, cx);
2425 }
2426 assert_eq!(
2427 buffer.text(),
2428 "
2429 // above first function
2430 fn a() {
2431 // inside first function
2432 f10();
2433 }
2434 fn b() {
2435 // inside second function f200();
2436 }
2437 fn c() {
2438 f4000();
2439 }
2440 "
2441 .unindent()
2442 );
2443 });
2444}
2445
2446#[gpui::test]
2447async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2448 init_test(cx);
2449
2450 let text = "
2451 use a::b;
2452 use a::c;
2453
2454 fn f() {
2455 b();
2456 c();
2457 }
2458 "
2459 .unindent();
2460
2461 let fs = FakeFs::new(cx.executor());
2462 fs.insert_tree(
2463 path!("/dir"),
2464 json!({
2465 "a.rs": text.clone(),
2466 }),
2467 )
2468 .await;
2469
2470 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2471 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2472 let buffer = project
2473 .update(cx, |project, cx| {
2474 project.open_local_buffer(path!("/dir/a.rs"), cx)
2475 })
2476 .await
2477 .unwrap();
2478
2479 // Simulate the language server sending us a small edit in the form of a very large diff.
2480 // Rust-analyzer does this when performing a merge-imports code action.
2481 let edits = lsp_store
2482 .update(cx, |lsp_store, cx| {
2483 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2484 &buffer,
2485 [
2486 // Replace the first use statement without editing the semicolon.
2487 lsp::TextEdit {
2488 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2489 new_text: "a::{b, c}".into(),
2490 },
2491 // Reinsert the remainder of the file between the semicolon and the final
2492 // newline of the file.
2493 lsp::TextEdit {
2494 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2495 new_text: "\n\n".into(),
2496 },
2497 lsp::TextEdit {
2498 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2499 new_text: "
2500 fn f() {
2501 b();
2502 c();
2503 }"
2504 .unindent(),
2505 },
2506 // Delete everything after the first newline of the file.
2507 lsp::TextEdit {
2508 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2509 new_text: "".into(),
2510 },
2511 ],
2512 LanguageServerId(0),
2513 None,
2514 cx,
2515 )
2516 })
2517 .await
2518 .unwrap();
2519
2520 buffer.update(cx, |buffer, cx| {
2521 let edits = edits
2522 .into_iter()
2523 .map(|(range, text)| {
2524 (
2525 range.start.to_point(buffer)..range.end.to_point(buffer),
2526 text,
2527 )
2528 })
2529 .collect::<Vec<_>>();
2530
2531 assert_eq!(
2532 edits,
2533 [
2534 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2535 (Point::new(1, 0)..Point::new(2, 0), "".into())
2536 ]
2537 );
2538
2539 for (range, new_text) in edits {
2540 buffer.edit([(range, new_text)], None, cx);
2541 }
2542 assert_eq!(
2543 buffer.text(),
2544 "
2545 use a::{b, c};
2546
2547 fn f() {
2548 b();
2549 c();
2550 }
2551 "
2552 .unindent()
2553 );
2554 });
2555}
2556
2557#[gpui::test]
2558async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2559 init_test(cx);
2560
2561 let text = "
2562 use a::b;
2563 use a::c;
2564
2565 fn f() {
2566 b();
2567 c();
2568 }
2569 "
2570 .unindent();
2571
2572 let fs = FakeFs::new(cx.executor());
2573 fs.insert_tree(
2574 path!("/dir"),
2575 json!({
2576 "a.rs": text.clone(),
2577 }),
2578 )
2579 .await;
2580
2581 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2582 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2583 let buffer = project
2584 .update(cx, |project, cx| {
2585 project.open_local_buffer(path!("/dir/a.rs"), cx)
2586 })
2587 .await
2588 .unwrap();
2589
2590 // Simulate the language server sending us edits in a non-ordered fashion,
2591 // with ranges sometimes being inverted or pointing to invalid locations.
2592 let edits = lsp_store
2593 .update(cx, |lsp_store, cx| {
2594 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2595 &buffer,
2596 [
2597 lsp::TextEdit {
2598 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2599 new_text: "\n\n".into(),
2600 },
2601 lsp::TextEdit {
2602 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2603 new_text: "a::{b, c}".into(),
2604 },
2605 lsp::TextEdit {
2606 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2607 new_text: "".into(),
2608 },
2609 lsp::TextEdit {
2610 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2611 new_text: "
2612 fn f() {
2613 b();
2614 c();
2615 }"
2616 .unindent(),
2617 },
2618 ],
2619 LanguageServerId(0),
2620 None,
2621 cx,
2622 )
2623 })
2624 .await
2625 .unwrap();
2626
2627 buffer.update(cx, |buffer, cx| {
2628 let edits = edits
2629 .into_iter()
2630 .map(|(range, text)| {
2631 (
2632 range.start.to_point(buffer)..range.end.to_point(buffer),
2633 text,
2634 )
2635 })
2636 .collect::<Vec<_>>();
2637
2638 assert_eq!(
2639 edits,
2640 [
2641 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2642 (Point::new(1, 0)..Point::new(2, 0), "".into())
2643 ]
2644 );
2645
2646 for (range, new_text) in edits {
2647 buffer.edit([(range, new_text)], None, cx);
2648 }
2649 assert_eq!(
2650 buffer.text(),
2651 "
2652 use a::{b, c};
2653
2654 fn f() {
2655 b();
2656 c();
2657 }
2658 "
2659 .unindent()
2660 );
2661 });
2662}
2663
2664fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2665 buffer: &Buffer,
2666 range: Range<T>,
2667) -> Vec<(String, Option<DiagnosticSeverity>)> {
2668 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2669 for chunk in buffer.snapshot().chunks(range, true) {
2670 if chunks.last().map_or(false, |prev_chunk| {
2671 prev_chunk.1 == chunk.diagnostic_severity
2672 }) {
2673 chunks.last_mut().unwrap().0.push_str(chunk.text);
2674 } else {
2675 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2676 }
2677 }
2678 chunks
2679}
2680
2681#[gpui::test(iterations = 10)]
2682async fn test_definition(cx: &mut gpui::TestAppContext) {
2683 init_test(cx);
2684
2685 let fs = FakeFs::new(cx.executor());
2686 fs.insert_tree(
2687 path!("/dir"),
2688 json!({
2689 "a.rs": "const fn a() { A }",
2690 "b.rs": "const y: i32 = crate::a()",
2691 }),
2692 )
2693 .await;
2694
2695 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
2696
2697 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2698 language_registry.add(rust_lang());
2699 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2700
2701 let (buffer, _handle) = project
2702 .update(cx, |project, cx| {
2703 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
2704 })
2705 .await
2706 .unwrap();
2707
2708 let fake_server = fake_servers.next().await.unwrap();
2709 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2710 let params = params.text_document_position_params;
2711 assert_eq!(
2712 params.text_document.uri.to_file_path().unwrap(),
2713 Path::new(path!("/dir/b.rs")),
2714 );
2715 assert_eq!(params.position, lsp::Position::new(0, 22));
2716
2717 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2718 lsp::Location::new(
2719 lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2720 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2721 ),
2722 )))
2723 });
2724 let mut definitions = project
2725 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2726 .await
2727 .unwrap();
2728
2729 // Assert no new language server started
2730 cx.executor().run_until_parked();
2731 assert!(fake_servers.try_next().is_err());
2732
2733 assert_eq!(definitions.len(), 1);
2734 let definition = definitions.pop().unwrap();
2735 cx.update(|cx| {
2736 let target_buffer = definition.target.buffer.read(cx);
2737 assert_eq!(
2738 target_buffer
2739 .file()
2740 .unwrap()
2741 .as_local()
2742 .unwrap()
2743 .abs_path(cx),
2744 Path::new(path!("/dir/a.rs")),
2745 );
2746 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2747 assert_eq!(
2748 list_worktrees(&project, cx),
2749 [
2750 (path!("/dir/a.rs").as_ref(), false),
2751 (path!("/dir/b.rs").as_ref(), true)
2752 ],
2753 );
2754
2755 drop(definition);
2756 });
2757 cx.update(|cx| {
2758 assert_eq!(
2759 list_worktrees(&project, cx),
2760 [(path!("/dir/b.rs").as_ref(), true)]
2761 );
2762 });
2763
2764 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
2765 project
2766 .read(cx)
2767 .worktrees(cx)
2768 .map(|worktree| {
2769 let worktree = worktree.read(cx);
2770 (
2771 worktree.as_local().unwrap().abs_path().as_ref(),
2772 worktree.is_visible(),
2773 )
2774 })
2775 .collect::<Vec<_>>()
2776 }
2777}
2778
2779#[gpui::test]
2780async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2781 init_test(cx);
2782
2783 let fs = FakeFs::new(cx.executor());
2784 fs.insert_tree(
2785 path!("/dir"),
2786 json!({
2787 "a.ts": "",
2788 }),
2789 )
2790 .await;
2791
2792 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2793
2794 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2795 language_registry.add(typescript_lang());
2796 let mut fake_language_servers = language_registry.register_fake_lsp(
2797 "TypeScript",
2798 FakeLspAdapter {
2799 capabilities: lsp::ServerCapabilities {
2800 completion_provider: Some(lsp::CompletionOptions {
2801 trigger_characters: Some(vec![":".to_string()]),
2802 ..Default::default()
2803 }),
2804 ..Default::default()
2805 },
2806 ..Default::default()
2807 },
2808 );
2809
2810 let (buffer, _handle) = project
2811 .update(cx, |p, cx| {
2812 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
2813 })
2814 .await
2815 .unwrap();
2816
2817 let fake_server = fake_language_servers.next().await.unwrap();
2818
2819 let text = "let a = b.fqn";
2820 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2821 let completions = project.update(cx, |project, cx| {
2822 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2823 });
2824
2825 fake_server
2826 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
2827 Ok(Some(lsp::CompletionResponse::Array(vec![
2828 lsp::CompletionItem {
2829 label: "fullyQualifiedName?".into(),
2830 insert_text: Some("fullyQualifiedName".into()),
2831 ..Default::default()
2832 },
2833 ])))
2834 })
2835 .next()
2836 .await;
2837 let completions = completions.await.unwrap().unwrap();
2838 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2839 assert_eq!(completions.len(), 1);
2840 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2841 assert_eq!(
2842 completions[0].old_range.to_offset(&snapshot),
2843 text.len() - 3..text.len()
2844 );
2845
2846 let text = "let a = \"atoms/cmp\"";
2847 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2848 let completions = project.update(cx, |project, cx| {
2849 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
2850 });
2851
2852 fake_server
2853 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
2854 Ok(Some(lsp::CompletionResponse::Array(vec![
2855 lsp::CompletionItem {
2856 label: "component".into(),
2857 ..Default::default()
2858 },
2859 ])))
2860 })
2861 .next()
2862 .await;
2863 let completions = completions.await.unwrap().unwrap();
2864 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2865 assert_eq!(completions.len(), 1);
2866 assert_eq!(completions[0].new_text, "component");
2867 assert_eq!(
2868 completions[0].old_range.to_offset(&snapshot),
2869 text.len() - 4..text.len() - 1
2870 );
2871}
2872
2873#[gpui::test]
2874async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2875 init_test(cx);
2876
2877 let fs = FakeFs::new(cx.executor());
2878 fs.insert_tree(
2879 path!("/dir"),
2880 json!({
2881 "a.ts": "",
2882 }),
2883 )
2884 .await;
2885
2886 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2887
2888 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2889 language_registry.add(typescript_lang());
2890 let mut fake_language_servers = language_registry.register_fake_lsp(
2891 "TypeScript",
2892 FakeLspAdapter {
2893 capabilities: lsp::ServerCapabilities {
2894 completion_provider: Some(lsp::CompletionOptions {
2895 trigger_characters: Some(vec![":".to_string()]),
2896 ..Default::default()
2897 }),
2898 ..Default::default()
2899 },
2900 ..Default::default()
2901 },
2902 );
2903
2904 let (buffer, _handle) = project
2905 .update(cx, |p, cx| {
2906 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
2907 })
2908 .await
2909 .unwrap();
2910
2911 let fake_server = fake_language_servers.next().await.unwrap();
2912
2913 let text = "let a = b.fqn";
2914 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2915 let completions = project.update(cx, |project, cx| {
2916 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2917 });
2918
2919 fake_server
2920 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
2921 Ok(Some(lsp::CompletionResponse::Array(vec![
2922 lsp::CompletionItem {
2923 label: "fullyQualifiedName?".into(),
2924 insert_text: Some("fully\rQualified\r\nName".into()),
2925 ..Default::default()
2926 },
2927 ])))
2928 })
2929 .next()
2930 .await;
2931 let completions = completions.await.unwrap().unwrap();
2932 assert_eq!(completions.len(), 1);
2933 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2934}
2935
2936#[gpui::test(iterations = 10)]
2937async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2938 init_test(cx);
2939
2940 let fs = FakeFs::new(cx.executor());
2941 fs.insert_tree(
2942 path!("/dir"),
2943 json!({
2944 "a.ts": "a",
2945 }),
2946 )
2947 .await;
2948
2949 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2950
2951 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2952 language_registry.add(typescript_lang());
2953 let mut fake_language_servers = language_registry.register_fake_lsp(
2954 "TypeScript",
2955 FakeLspAdapter {
2956 capabilities: lsp::ServerCapabilities {
2957 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
2958 lsp::CodeActionOptions {
2959 resolve_provider: Some(true),
2960 ..lsp::CodeActionOptions::default()
2961 },
2962 )),
2963 execute_command_provider: Some(lsp::ExecuteCommandOptions {
2964 commands: vec!["_the/command".to_string()],
2965 ..lsp::ExecuteCommandOptions::default()
2966 }),
2967 ..lsp::ServerCapabilities::default()
2968 },
2969 ..FakeLspAdapter::default()
2970 },
2971 );
2972
2973 let (buffer, _handle) = project
2974 .update(cx, |p, cx| {
2975 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
2976 })
2977 .await
2978 .unwrap();
2979
2980 let fake_server = fake_language_servers.next().await.unwrap();
2981
2982 // Language server returns code actions that contain commands, and not edits.
2983 let actions = project.update(cx, |project, cx| {
2984 project.code_actions(&buffer, 0..0, None, cx)
2985 });
2986 fake_server
2987 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2988 Ok(Some(vec![
2989 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2990 title: "The code action".into(),
2991 data: Some(serde_json::json!({
2992 "command": "_the/command",
2993 })),
2994 ..lsp::CodeAction::default()
2995 }),
2996 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2997 title: "two".into(),
2998 ..lsp::CodeAction::default()
2999 }),
3000 ]))
3001 })
3002 .next()
3003 .await;
3004
3005 let action = actions.await.unwrap()[0].clone();
3006 let apply = project.update(cx, |project, cx| {
3007 project.apply_code_action(buffer.clone(), action, true, cx)
3008 });
3009
3010 // Resolving the code action does not populate its edits. In absence of
3011 // edits, we must execute the given command.
3012 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
3013 |mut action, _| async move {
3014 if action.data.is_some() {
3015 action.command = Some(lsp::Command {
3016 title: "The command".into(),
3017 command: "_the/command".into(),
3018 arguments: Some(vec![json!("the-argument")]),
3019 });
3020 }
3021 Ok(action)
3022 },
3023 );
3024
3025 // While executing the command, the language server sends the editor
3026 // a `workspaceEdit` request.
3027 fake_server
3028 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
3029 let fake = fake_server.clone();
3030 move |params, _| {
3031 assert_eq!(params.command, "_the/command");
3032 let fake = fake.clone();
3033 async move {
3034 fake.server
3035 .request::<lsp::request::ApplyWorkspaceEdit>(
3036 lsp::ApplyWorkspaceEditParams {
3037 label: None,
3038 edit: lsp::WorkspaceEdit {
3039 changes: Some(
3040 [(
3041 lsp::Url::from_file_path(path!("/dir/a.ts")).unwrap(),
3042 vec![lsp::TextEdit {
3043 range: lsp::Range::new(
3044 lsp::Position::new(0, 0),
3045 lsp::Position::new(0, 0),
3046 ),
3047 new_text: "X".into(),
3048 }],
3049 )]
3050 .into_iter()
3051 .collect(),
3052 ),
3053 ..Default::default()
3054 },
3055 },
3056 )
3057 .await
3058 .unwrap();
3059 Ok(Some(json!(null)))
3060 }
3061 }
3062 })
3063 .next()
3064 .await;
3065
3066 // Applying the code action returns a project transaction containing the edits
3067 // sent by the language server in its `workspaceEdit` request.
3068 let transaction = apply.await.unwrap();
3069 assert!(transaction.0.contains_key(&buffer));
3070 buffer.update(cx, |buffer, cx| {
3071 assert_eq!(buffer.text(), "Xa");
3072 buffer.undo(cx);
3073 assert_eq!(buffer.text(), "a");
3074 });
3075}
3076
3077#[gpui::test(iterations = 10)]
3078async fn test_save_file(cx: &mut gpui::TestAppContext) {
3079 init_test(cx);
3080
3081 let fs = FakeFs::new(cx.executor());
3082 fs.insert_tree(
3083 path!("/dir"),
3084 json!({
3085 "file1": "the old contents",
3086 }),
3087 )
3088 .await;
3089
3090 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3091 let buffer = project
3092 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3093 .await
3094 .unwrap();
3095 buffer.update(cx, |buffer, cx| {
3096 assert_eq!(buffer.text(), "the old contents");
3097 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3098 });
3099
3100 project
3101 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3102 .await
3103 .unwrap();
3104
3105 let new_text = fs
3106 .load(Path::new(path!("/dir/file1")))
3107 .await
3108 .unwrap()
3109 .replace("\r\n", "\n");
3110 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3111}
3112
3113#[gpui::test(iterations = 30)]
3114async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
3115 init_test(cx);
3116
3117 let fs = FakeFs::new(cx.executor().clone());
3118 fs.insert_tree(
3119 path!("/dir"),
3120 json!({
3121 "file1": "the original contents",
3122 }),
3123 )
3124 .await;
3125
3126 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3127 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3128 let buffer = project
3129 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3130 .await
3131 .unwrap();
3132
3133 // Simulate buffer diffs being slow, so that they don't complete before
3134 // the next file change occurs.
3135 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3136
3137 // Change the buffer's file on disk, and then wait for the file change
3138 // to be detected by the worktree, so that the buffer starts reloading.
3139 fs.save(
3140 path!("/dir/file1").as_ref(),
3141 &"the first contents".into(),
3142 Default::default(),
3143 )
3144 .await
3145 .unwrap();
3146 worktree.next_event(cx).await;
3147
3148 // Change the buffer's file again. Depending on the random seed, the
3149 // previous file change may still be in progress.
3150 fs.save(
3151 path!("/dir/file1").as_ref(),
3152 &"the second contents".into(),
3153 Default::default(),
3154 )
3155 .await
3156 .unwrap();
3157 worktree.next_event(cx).await;
3158
3159 cx.executor().run_until_parked();
3160 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3161 buffer.read_with(cx, |buffer, _| {
3162 assert_eq!(buffer.text(), on_disk_text);
3163 assert!(!buffer.is_dirty(), "buffer should not be dirty");
3164 assert!(!buffer.has_conflict(), "buffer should not be dirty");
3165 });
3166}
3167
3168#[gpui::test(iterations = 30)]
3169async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
3170 init_test(cx);
3171
3172 let fs = FakeFs::new(cx.executor().clone());
3173 fs.insert_tree(
3174 path!("/dir"),
3175 json!({
3176 "file1": "the original contents",
3177 }),
3178 )
3179 .await;
3180
3181 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3182 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3183 let buffer = project
3184 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3185 .await
3186 .unwrap();
3187
3188 // Simulate buffer diffs being slow, so that they don't complete before
3189 // the next file change occurs.
3190 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3191
3192 // Change the buffer's file on disk, and then wait for the file change
3193 // to be detected by the worktree, so that the buffer starts reloading.
3194 fs.save(
3195 path!("/dir/file1").as_ref(),
3196 &"the first contents".into(),
3197 Default::default(),
3198 )
3199 .await
3200 .unwrap();
3201 worktree.next_event(cx).await;
3202
3203 cx.executor()
3204 .spawn(cx.executor().simulate_random_delay())
3205 .await;
3206
3207 // Perform a noop edit, causing the buffer's version to increase.
3208 buffer.update(cx, |buffer, cx| {
3209 buffer.edit([(0..0, " ")], None, cx);
3210 buffer.undo(cx);
3211 });
3212
3213 cx.executor().run_until_parked();
3214 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3215 buffer.read_with(cx, |buffer, _| {
3216 let buffer_text = buffer.text();
3217 if buffer_text == on_disk_text {
3218 assert!(
3219 !buffer.is_dirty() && !buffer.has_conflict(),
3220 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
3221 );
3222 }
3223 // If the file change occurred while the buffer was processing the first
3224 // change, the buffer will be in a conflicting state.
3225 else {
3226 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3227 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3228 }
3229 });
3230}
3231
3232#[gpui::test]
3233async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
3234 init_test(cx);
3235
3236 let fs = FakeFs::new(cx.executor());
3237 fs.insert_tree(
3238 path!("/dir"),
3239 json!({
3240 "file1": "the old contents",
3241 }),
3242 )
3243 .await;
3244
3245 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
3246 let buffer = project
3247 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3248 .await
3249 .unwrap();
3250 buffer.update(cx, |buffer, cx| {
3251 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3252 });
3253
3254 project
3255 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3256 .await
3257 .unwrap();
3258
3259 let new_text = fs
3260 .load(Path::new(path!("/dir/file1")))
3261 .await
3262 .unwrap()
3263 .replace("\r\n", "\n");
3264 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3265}
3266
3267#[gpui::test]
3268async fn test_save_as(cx: &mut gpui::TestAppContext) {
3269 init_test(cx);
3270
3271 let fs = FakeFs::new(cx.executor());
3272 fs.insert_tree("/dir", json!({})).await;
3273
3274 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3275
3276 let languages = project.update(cx, |project, _| project.languages().clone());
3277 languages.add(rust_lang());
3278
3279 let buffer = project.update(cx, |project, cx| project.create_local_buffer("", None, cx));
3280 buffer.update(cx, |buffer, cx| {
3281 buffer.edit([(0..0, "abc")], None, cx);
3282 assert!(buffer.is_dirty());
3283 assert!(!buffer.has_conflict());
3284 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
3285 });
3286 project
3287 .update(cx, |project, cx| {
3288 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
3289 let path = ProjectPath {
3290 worktree_id,
3291 path: Arc::from(Path::new("file1.rs")),
3292 };
3293 project.save_buffer_as(buffer.clone(), path, cx)
3294 })
3295 .await
3296 .unwrap();
3297 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
3298
3299 cx.executor().run_until_parked();
3300 buffer.update(cx, |buffer, cx| {
3301 assert_eq!(
3302 buffer.file().unwrap().full_path(cx),
3303 Path::new("dir/file1.rs")
3304 );
3305 assert!(!buffer.is_dirty());
3306 assert!(!buffer.has_conflict());
3307 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
3308 });
3309
3310 let opened_buffer = project
3311 .update(cx, |project, cx| {
3312 project.open_local_buffer("/dir/file1.rs", cx)
3313 })
3314 .await
3315 .unwrap();
3316 assert_eq!(opened_buffer, buffer);
3317}
3318
3319#[gpui::test(retries = 5)]
3320async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
3321 use worktree::WorktreeModelHandle as _;
3322
3323 init_test(cx);
3324 cx.executor().allow_parking();
3325
3326 let dir = TempTree::new(json!({
3327 "a": {
3328 "file1": "",
3329 "file2": "",
3330 "file3": "",
3331 },
3332 "b": {
3333 "c": {
3334 "file4": "",
3335 "file5": "",
3336 }
3337 }
3338 }));
3339
3340 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
3341
3342 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3343 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
3344 async move { buffer.await.unwrap() }
3345 };
3346 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3347 project.update(cx, |project, cx| {
3348 let tree = project.worktrees(cx).next().unwrap();
3349 tree.read(cx)
3350 .entry_for_path(path)
3351 .unwrap_or_else(|| panic!("no entry for path {}", path))
3352 .id
3353 })
3354 };
3355
3356 let buffer2 = buffer_for_path("a/file2", cx).await;
3357 let buffer3 = buffer_for_path("a/file3", cx).await;
3358 let buffer4 = buffer_for_path("b/c/file4", cx).await;
3359 let buffer5 = buffer_for_path("b/c/file5", cx).await;
3360
3361 let file2_id = id_for_path("a/file2", cx);
3362 let file3_id = id_for_path("a/file3", cx);
3363 let file4_id = id_for_path("b/c/file4", cx);
3364
3365 // Create a remote copy of this worktree.
3366 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3367 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
3368
3369 let updates = Arc::new(Mutex::new(Vec::new()));
3370 tree.update(cx, |tree, cx| {
3371 let updates = updates.clone();
3372 tree.observe_updates(0, cx, move |update| {
3373 updates.lock().push(update);
3374 async { true }
3375 });
3376 });
3377
3378 let remote =
3379 cx.update(|cx| Worktree::remote(0, 1, metadata, project.read(cx).client().into(), cx));
3380
3381 cx.executor().run_until_parked();
3382
3383 cx.update(|cx| {
3384 assert!(!buffer2.read(cx).is_dirty());
3385 assert!(!buffer3.read(cx).is_dirty());
3386 assert!(!buffer4.read(cx).is_dirty());
3387 assert!(!buffer5.read(cx).is_dirty());
3388 });
3389
3390 // Rename and delete files and directories.
3391 tree.flush_fs_events(cx).await;
3392 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3393 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3394 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3395 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3396 tree.flush_fs_events(cx).await;
3397
3398 cx.update(|app| {
3399 assert_eq!(
3400 tree.read(app)
3401 .paths()
3402 .map(|p| p.to_str().unwrap())
3403 .collect::<Vec<_>>(),
3404 vec![
3405 "a",
3406 separator!("a/file1"),
3407 separator!("a/file2.new"),
3408 "b",
3409 "d",
3410 separator!("d/file3"),
3411 separator!("d/file4"),
3412 ]
3413 );
3414 });
3415
3416 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
3417 assert_eq!(id_for_path("d/file3", cx), file3_id);
3418 assert_eq!(id_for_path("d/file4", cx), file4_id);
3419
3420 cx.update(|cx| {
3421 assert_eq!(
3422 buffer2.read(cx).file().unwrap().path().as_ref(),
3423 Path::new("a/file2.new")
3424 );
3425 assert_eq!(
3426 buffer3.read(cx).file().unwrap().path().as_ref(),
3427 Path::new("d/file3")
3428 );
3429 assert_eq!(
3430 buffer4.read(cx).file().unwrap().path().as_ref(),
3431 Path::new("d/file4")
3432 );
3433 assert_eq!(
3434 buffer5.read(cx).file().unwrap().path().as_ref(),
3435 Path::new("b/c/file5")
3436 );
3437
3438 assert_matches!(
3439 buffer2.read(cx).file().unwrap().disk_state(),
3440 DiskState::Present { .. }
3441 );
3442 assert_matches!(
3443 buffer3.read(cx).file().unwrap().disk_state(),
3444 DiskState::Present { .. }
3445 );
3446 assert_matches!(
3447 buffer4.read(cx).file().unwrap().disk_state(),
3448 DiskState::Present { .. }
3449 );
3450 assert_eq!(
3451 buffer5.read(cx).file().unwrap().disk_state(),
3452 DiskState::Deleted
3453 );
3454 });
3455
3456 // Update the remote worktree. Check that it becomes consistent with the
3457 // local worktree.
3458 cx.executor().run_until_parked();
3459
3460 remote.update(cx, |remote, _| {
3461 for update in updates.lock().drain(..) {
3462 remote.as_remote_mut().unwrap().update_from_remote(update);
3463 }
3464 });
3465 cx.executor().run_until_parked();
3466 remote.update(cx, |remote, _| {
3467 assert_eq!(
3468 remote
3469 .paths()
3470 .map(|p| p.to_str().unwrap())
3471 .collect::<Vec<_>>(),
3472 vec![
3473 "a",
3474 separator!("a/file1"),
3475 separator!("a/file2.new"),
3476 "b",
3477 "d",
3478 separator!("d/file3"),
3479 separator!("d/file4"),
3480 ]
3481 );
3482 });
3483}
3484
3485#[gpui::test(iterations = 10)]
3486async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
3487 init_test(cx);
3488
3489 let fs = FakeFs::new(cx.executor());
3490 fs.insert_tree(
3491 path!("/dir"),
3492 json!({
3493 "a": {
3494 "file1": "",
3495 }
3496 }),
3497 )
3498 .await;
3499
3500 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3501 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3502 let tree_id = tree.update(cx, |tree, _| tree.id());
3503
3504 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3505 project.update(cx, |project, cx| {
3506 let tree = project.worktrees(cx).next().unwrap();
3507 tree.read(cx)
3508 .entry_for_path(path)
3509 .unwrap_or_else(|| panic!("no entry for path {}", path))
3510 .id
3511 })
3512 };
3513
3514 let dir_id = id_for_path("a", cx);
3515 let file_id = id_for_path("a/file1", cx);
3516 let buffer = project
3517 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
3518 .await
3519 .unwrap();
3520 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3521
3522 project
3523 .update(cx, |project, cx| {
3524 project.rename_entry(dir_id, Path::new("b"), cx)
3525 })
3526 .unwrap()
3527 .await
3528 .to_included()
3529 .unwrap();
3530 cx.executor().run_until_parked();
3531
3532 assert_eq!(id_for_path("b", cx), dir_id);
3533 assert_eq!(id_for_path("b/file1", cx), file_id);
3534 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3535}
3536
3537#[gpui::test]
3538async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
3539 init_test(cx);
3540
3541 let fs = FakeFs::new(cx.executor());
3542 fs.insert_tree(
3543 "/dir",
3544 json!({
3545 "a.txt": "a-contents",
3546 "b.txt": "b-contents",
3547 }),
3548 )
3549 .await;
3550
3551 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3552
3553 // Spawn multiple tasks to open paths, repeating some paths.
3554 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
3555 (
3556 p.open_local_buffer("/dir/a.txt", cx),
3557 p.open_local_buffer("/dir/b.txt", cx),
3558 p.open_local_buffer("/dir/a.txt", cx),
3559 )
3560 });
3561
3562 let buffer_a_1 = buffer_a_1.await.unwrap();
3563 let buffer_a_2 = buffer_a_2.await.unwrap();
3564 let buffer_b = buffer_b.await.unwrap();
3565 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
3566 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
3567
3568 // There is only one buffer per path.
3569 let buffer_a_id = buffer_a_1.entity_id();
3570 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
3571
3572 // Open the same path again while it is still open.
3573 drop(buffer_a_1);
3574 let buffer_a_3 = project
3575 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
3576 .await
3577 .unwrap();
3578
3579 // There's still only one buffer per path.
3580 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
3581}
3582
3583#[gpui::test]
3584async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
3585 init_test(cx);
3586
3587 let fs = FakeFs::new(cx.executor());
3588 fs.insert_tree(
3589 path!("/dir"),
3590 json!({
3591 "file1": "abc",
3592 "file2": "def",
3593 "file3": "ghi",
3594 }),
3595 )
3596 .await;
3597
3598 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3599
3600 let buffer1 = project
3601 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3602 .await
3603 .unwrap();
3604 let events = Arc::new(Mutex::new(Vec::new()));
3605
3606 // initially, the buffer isn't dirty.
3607 buffer1.update(cx, |buffer, cx| {
3608 cx.subscribe(&buffer1, {
3609 let events = events.clone();
3610 move |_, _, event, _| match event {
3611 BufferEvent::Operation { .. } => {}
3612 _ => events.lock().push(event.clone()),
3613 }
3614 })
3615 .detach();
3616
3617 assert!(!buffer.is_dirty());
3618 assert!(events.lock().is_empty());
3619
3620 buffer.edit([(1..2, "")], None, cx);
3621 });
3622
3623 // after the first edit, the buffer is dirty, and emits a dirtied event.
3624 buffer1.update(cx, |buffer, cx| {
3625 assert!(buffer.text() == "ac");
3626 assert!(buffer.is_dirty());
3627 assert_eq!(
3628 *events.lock(),
3629 &[
3630 language::BufferEvent::Edited,
3631 language::BufferEvent::DirtyChanged
3632 ]
3633 );
3634 events.lock().clear();
3635 buffer.did_save(
3636 buffer.version(),
3637 buffer.file().unwrap().disk_state().mtime(),
3638 cx,
3639 );
3640 });
3641
3642 // after saving, the buffer is not dirty, and emits a saved event.
3643 buffer1.update(cx, |buffer, cx| {
3644 assert!(!buffer.is_dirty());
3645 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
3646 events.lock().clear();
3647
3648 buffer.edit([(1..1, "B")], None, cx);
3649 buffer.edit([(2..2, "D")], None, cx);
3650 });
3651
3652 // after editing again, the buffer is dirty, and emits another dirty event.
3653 buffer1.update(cx, |buffer, cx| {
3654 assert!(buffer.text() == "aBDc");
3655 assert!(buffer.is_dirty());
3656 assert_eq!(
3657 *events.lock(),
3658 &[
3659 language::BufferEvent::Edited,
3660 language::BufferEvent::DirtyChanged,
3661 language::BufferEvent::Edited,
3662 ],
3663 );
3664 events.lock().clear();
3665
3666 // After restoring the buffer to its previously-saved state,
3667 // the buffer is not considered dirty anymore.
3668 buffer.edit([(1..3, "")], None, cx);
3669 assert!(buffer.text() == "ac");
3670 assert!(!buffer.is_dirty());
3671 });
3672
3673 assert_eq!(
3674 *events.lock(),
3675 &[
3676 language::BufferEvent::Edited,
3677 language::BufferEvent::DirtyChanged
3678 ]
3679 );
3680
3681 // When a file is deleted, the buffer is considered dirty.
3682 let events = Arc::new(Mutex::new(Vec::new()));
3683 let buffer2 = project
3684 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
3685 .await
3686 .unwrap();
3687 buffer2.update(cx, |_, cx| {
3688 cx.subscribe(&buffer2, {
3689 let events = events.clone();
3690 move |_, _, event, _| events.lock().push(event.clone())
3691 })
3692 .detach();
3693 });
3694
3695 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
3696 .await
3697 .unwrap();
3698 cx.executor().run_until_parked();
3699 buffer2.update(cx, |buffer, _| assert!(buffer.is_dirty()));
3700 assert_eq!(
3701 *events.lock(),
3702 &[
3703 language::BufferEvent::DirtyChanged,
3704 language::BufferEvent::FileHandleChanged
3705 ]
3706 );
3707
3708 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3709 let events = Arc::new(Mutex::new(Vec::new()));
3710 let buffer3 = project
3711 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
3712 .await
3713 .unwrap();
3714 buffer3.update(cx, |_, cx| {
3715 cx.subscribe(&buffer3, {
3716 let events = events.clone();
3717 move |_, _, event, _| events.lock().push(event.clone())
3718 })
3719 .detach();
3720 });
3721
3722 buffer3.update(cx, |buffer, cx| {
3723 buffer.edit([(0..0, "x")], None, cx);
3724 });
3725 events.lock().clear();
3726 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
3727 .await
3728 .unwrap();
3729 cx.executor().run_until_parked();
3730 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
3731 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
3732}
3733
3734#[gpui::test]
3735async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
3736 init_test(cx);
3737
3738 let (initial_contents, initial_offsets) =
3739 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
3740 let fs = FakeFs::new(cx.executor());
3741 fs.insert_tree(
3742 path!("/dir"),
3743 json!({
3744 "the-file": initial_contents,
3745 }),
3746 )
3747 .await;
3748 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3749 let buffer = project
3750 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
3751 .await
3752 .unwrap();
3753
3754 let anchors = initial_offsets
3755 .iter()
3756 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
3757 .collect::<Vec<_>>();
3758
3759 // Change the file on disk, adding two new lines of text, and removing
3760 // one line.
3761 buffer.update(cx, |buffer, _| {
3762 assert!(!buffer.is_dirty());
3763 assert!(!buffer.has_conflict());
3764 });
3765
3766 let (new_contents, new_offsets) =
3767 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
3768 fs.save(
3769 path!("/dir/the-file").as_ref(),
3770 &new_contents.as_str().into(),
3771 LineEnding::Unix,
3772 )
3773 .await
3774 .unwrap();
3775
3776 // Because the buffer was not modified, it is reloaded from disk. Its
3777 // contents are edited according to the diff between the old and new
3778 // file contents.
3779 cx.executor().run_until_parked();
3780 buffer.update(cx, |buffer, _| {
3781 assert_eq!(buffer.text(), new_contents);
3782 assert!(!buffer.is_dirty());
3783 assert!(!buffer.has_conflict());
3784
3785 let anchor_offsets = anchors
3786 .iter()
3787 .map(|anchor| anchor.to_offset(&*buffer))
3788 .collect::<Vec<_>>();
3789 assert_eq!(anchor_offsets, new_offsets);
3790 });
3791
3792 // Modify the buffer
3793 buffer.update(cx, |buffer, cx| {
3794 buffer.edit([(0..0, " ")], None, cx);
3795 assert!(buffer.is_dirty());
3796 assert!(!buffer.has_conflict());
3797 });
3798
3799 // Change the file on disk again, adding blank lines to the beginning.
3800 fs.save(
3801 path!("/dir/the-file").as_ref(),
3802 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
3803 LineEnding::Unix,
3804 )
3805 .await
3806 .unwrap();
3807
3808 // Because the buffer is modified, it doesn't reload from disk, but is
3809 // marked as having a conflict.
3810 cx.executor().run_until_parked();
3811 buffer.update(cx, |buffer, _| {
3812 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
3813 assert!(buffer.has_conflict());
3814 });
3815}
3816
3817#[gpui::test]
3818async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
3819 init_test(cx);
3820
3821 let fs = FakeFs::new(cx.executor());
3822 fs.insert_tree(
3823 path!("/dir"),
3824 json!({
3825 "file1": "a\nb\nc\n",
3826 "file2": "one\r\ntwo\r\nthree\r\n",
3827 }),
3828 )
3829 .await;
3830
3831 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3832 let buffer1 = project
3833 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3834 .await
3835 .unwrap();
3836 let buffer2 = project
3837 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
3838 .await
3839 .unwrap();
3840
3841 buffer1.update(cx, |buffer, _| {
3842 assert_eq!(buffer.text(), "a\nb\nc\n");
3843 assert_eq!(buffer.line_ending(), LineEnding::Unix);
3844 });
3845 buffer2.update(cx, |buffer, _| {
3846 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3847 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3848 });
3849
3850 // Change a file's line endings on disk from unix to windows. The buffer's
3851 // state updates correctly.
3852 fs.save(
3853 path!("/dir/file1").as_ref(),
3854 &"aaa\nb\nc\n".into(),
3855 LineEnding::Windows,
3856 )
3857 .await
3858 .unwrap();
3859 cx.executor().run_until_parked();
3860 buffer1.update(cx, |buffer, _| {
3861 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3862 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3863 });
3864
3865 // Save a file with windows line endings. The file is written correctly.
3866 buffer2.update(cx, |buffer, cx| {
3867 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3868 });
3869 project
3870 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3871 .await
3872 .unwrap();
3873 assert_eq!(
3874 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
3875 "one\r\ntwo\r\nthree\r\nfour\r\n",
3876 );
3877}
3878
3879#[gpui::test]
3880async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3881 init_test(cx);
3882
3883 let fs = FakeFs::new(cx.executor());
3884 fs.insert_tree(
3885 path!("/dir"),
3886 json!({
3887 "a.rs": "
3888 fn foo(mut v: Vec<usize>) {
3889 for x in &v {
3890 v.push(1);
3891 }
3892 }
3893 "
3894 .unindent(),
3895 }),
3896 )
3897 .await;
3898
3899 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3900 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3901 let buffer = project
3902 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
3903 .await
3904 .unwrap();
3905
3906 let buffer_uri = Url::from_file_path(path!("/dir/a.rs")).unwrap();
3907 let message = lsp::PublishDiagnosticsParams {
3908 uri: buffer_uri.clone(),
3909 diagnostics: vec![
3910 lsp::Diagnostic {
3911 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3912 severity: Some(DiagnosticSeverity::WARNING),
3913 message: "error 1".to_string(),
3914 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3915 location: lsp::Location {
3916 uri: buffer_uri.clone(),
3917 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3918 },
3919 message: "error 1 hint 1".to_string(),
3920 }]),
3921 ..Default::default()
3922 },
3923 lsp::Diagnostic {
3924 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3925 severity: Some(DiagnosticSeverity::HINT),
3926 message: "error 1 hint 1".to_string(),
3927 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3928 location: lsp::Location {
3929 uri: buffer_uri.clone(),
3930 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3931 },
3932 message: "original diagnostic".to_string(),
3933 }]),
3934 ..Default::default()
3935 },
3936 lsp::Diagnostic {
3937 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3938 severity: Some(DiagnosticSeverity::ERROR),
3939 message: "error 2".to_string(),
3940 related_information: Some(vec![
3941 lsp::DiagnosticRelatedInformation {
3942 location: lsp::Location {
3943 uri: buffer_uri.clone(),
3944 range: lsp::Range::new(
3945 lsp::Position::new(1, 13),
3946 lsp::Position::new(1, 15),
3947 ),
3948 },
3949 message: "error 2 hint 1".to_string(),
3950 },
3951 lsp::DiagnosticRelatedInformation {
3952 location: lsp::Location {
3953 uri: buffer_uri.clone(),
3954 range: lsp::Range::new(
3955 lsp::Position::new(1, 13),
3956 lsp::Position::new(1, 15),
3957 ),
3958 },
3959 message: "error 2 hint 2".to_string(),
3960 },
3961 ]),
3962 ..Default::default()
3963 },
3964 lsp::Diagnostic {
3965 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3966 severity: Some(DiagnosticSeverity::HINT),
3967 message: "error 2 hint 1".to_string(),
3968 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3969 location: lsp::Location {
3970 uri: buffer_uri.clone(),
3971 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3972 },
3973 message: "original diagnostic".to_string(),
3974 }]),
3975 ..Default::default()
3976 },
3977 lsp::Diagnostic {
3978 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3979 severity: Some(DiagnosticSeverity::HINT),
3980 message: "error 2 hint 2".to_string(),
3981 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3982 location: lsp::Location {
3983 uri: buffer_uri,
3984 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3985 },
3986 message: "original diagnostic".to_string(),
3987 }]),
3988 ..Default::default()
3989 },
3990 ],
3991 version: None,
3992 };
3993
3994 lsp_store
3995 .update(cx, |lsp_store, cx| {
3996 lsp_store.update_diagnostics(LanguageServerId(0), message, &[], cx)
3997 })
3998 .unwrap();
3999 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
4000
4001 assert_eq!(
4002 buffer
4003 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
4004 .collect::<Vec<_>>(),
4005 &[
4006 DiagnosticEntry {
4007 range: Point::new(1, 8)..Point::new(1, 9),
4008 diagnostic: Diagnostic {
4009 severity: DiagnosticSeverity::WARNING,
4010 message: "error 1".to_string(),
4011 group_id: 1,
4012 is_primary: true,
4013 ..Default::default()
4014 }
4015 },
4016 DiagnosticEntry {
4017 range: Point::new(1, 8)..Point::new(1, 9),
4018 diagnostic: Diagnostic {
4019 severity: DiagnosticSeverity::HINT,
4020 message: "error 1 hint 1".to_string(),
4021 group_id: 1,
4022 is_primary: false,
4023 ..Default::default()
4024 }
4025 },
4026 DiagnosticEntry {
4027 range: Point::new(1, 13)..Point::new(1, 15),
4028 diagnostic: Diagnostic {
4029 severity: DiagnosticSeverity::HINT,
4030 message: "error 2 hint 1".to_string(),
4031 group_id: 0,
4032 is_primary: false,
4033 ..Default::default()
4034 }
4035 },
4036 DiagnosticEntry {
4037 range: Point::new(1, 13)..Point::new(1, 15),
4038 diagnostic: Diagnostic {
4039 severity: DiagnosticSeverity::HINT,
4040 message: "error 2 hint 2".to_string(),
4041 group_id: 0,
4042 is_primary: false,
4043 ..Default::default()
4044 }
4045 },
4046 DiagnosticEntry {
4047 range: Point::new(2, 8)..Point::new(2, 17),
4048 diagnostic: Diagnostic {
4049 severity: DiagnosticSeverity::ERROR,
4050 message: "error 2".to_string(),
4051 group_id: 0,
4052 is_primary: true,
4053 ..Default::default()
4054 }
4055 }
4056 ]
4057 );
4058
4059 assert_eq!(
4060 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
4061 &[
4062 DiagnosticEntry {
4063 range: Point::new(1, 13)..Point::new(1, 15),
4064 diagnostic: Diagnostic {
4065 severity: DiagnosticSeverity::HINT,
4066 message: "error 2 hint 1".to_string(),
4067 group_id: 0,
4068 is_primary: false,
4069 ..Default::default()
4070 }
4071 },
4072 DiagnosticEntry {
4073 range: Point::new(1, 13)..Point::new(1, 15),
4074 diagnostic: Diagnostic {
4075 severity: DiagnosticSeverity::HINT,
4076 message: "error 2 hint 2".to_string(),
4077 group_id: 0,
4078 is_primary: false,
4079 ..Default::default()
4080 }
4081 },
4082 DiagnosticEntry {
4083 range: Point::new(2, 8)..Point::new(2, 17),
4084 diagnostic: Diagnostic {
4085 severity: DiagnosticSeverity::ERROR,
4086 message: "error 2".to_string(),
4087 group_id: 0,
4088 is_primary: true,
4089 ..Default::default()
4090 }
4091 }
4092 ]
4093 );
4094
4095 assert_eq!(
4096 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4097 &[
4098 DiagnosticEntry {
4099 range: Point::new(1, 8)..Point::new(1, 9),
4100 diagnostic: Diagnostic {
4101 severity: DiagnosticSeverity::WARNING,
4102 message: "error 1".to_string(),
4103 group_id: 1,
4104 is_primary: true,
4105 ..Default::default()
4106 }
4107 },
4108 DiagnosticEntry {
4109 range: Point::new(1, 8)..Point::new(1, 9),
4110 diagnostic: Diagnostic {
4111 severity: DiagnosticSeverity::HINT,
4112 message: "error 1 hint 1".to_string(),
4113 group_id: 1,
4114 is_primary: false,
4115 ..Default::default()
4116 }
4117 },
4118 ]
4119 );
4120}
4121
4122#[gpui::test]
4123async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
4124 init_test(cx);
4125
4126 let fs = FakeFs::new(cx.executor());
4127 fs.insert_tree(
4128 path!("/dir"),
4129 json!({
4130 "one.rs": "const ONE: usize = 1;",
4131 "two": {
4132 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4133 }
4134
4135 }),
4136 )
4137 .await;
4138 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4139
4140 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4141 language_registry.add(rust_lang());
4142 let watched_paths = lsp::FileOperationRegistrationOptions {
4143 filters: vec![
4144 FileOperationFilter {
4145 scheme: Some("file".to_owned()),
4146 pattern: lsp::FileOperationPattern {
4147 glob: "**/*.rs".to_owned(),
4148 matches: Some(lsp::FileOperationPatternKind::File),
4149 options: None,
4150 },
4151 },
4152 FileOperationFilter {
4153 scheme: Some("file".to_owned()),
4154 pattern: lsp::FileOperationPattern {
4155 glob: "**/**".to_owned(),
4156 matches: Some(lsp::FileOperationPatternKind::Folder),
4157 options: None,
4158 },
4159 },
4160 ],
4161 };
4162 let mut fake_servers = language_registry.register_fake_lsp(
4163 "Rust",
4164 FakeLspAdapter {
4165 capabilities: lsp::ServerCapabilities {
4166 workspace: Some(lsp::WorkspaceServerCapabilities {
4167 workspace_folders: None,
4168 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
4169 did_rename: Some(watched_paths.clone()),
4170 will_rename: Some(watched_paths),
4171 ..Default::default()
4172 }),
4173 }),
4174 ..Default::default()
4175 },
4176 ..Default::default()
4177 },
4178 );
4179
4180 let _ = project
4181 .update(cx, |project, cx| {
4182 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
4183 })
4184 .await
4185 .unwrap();
4186
4187 let fake_server = fake_servers.next().await.unwrap();
4188 let response = project.update(cx, |project, cx| {
4189 let worktree = project.worktrees(cx).next().unwrap();
4190 let entry = worktree.read(cx).entry_for_path("one.rs").unwrap();
4191 project.rename_entry(entry.id, "three.rs".as_ref(), cx)
4192 });
4193 let expected_edit = lsp::WorkspaceEdit {
4194 changes: None,
4195 document_changes: Some(DocumentChanges::Edits({
4196 vec![TextDocumentEdit {
4197 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
4198 range: lsp::Range {
4199 start: lsp::Position {
4200 line: 0,
4201 character: 1,
4202 },
4203 end: lsp::Position {
4204 line: 0,
4205 character: 3,
4206 },
4207 },
4208 new_text: "This is not a drill".to_owned(),
4209 })],
4210 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
4211 uri: Url::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
4212 version: Some(1337),
4213 },
4214 }]
4215 })),
4216 change_annotations: None,
4217 };
4218 let resolved_workspace_edit = Arc::new(OnceLock::new());
4219 fake_server
4220 .set_request_handler::<WillRenameFiles, _, _>({
4221 let resolved_workspace_edit = resolved_workspace_edit.clone();
4222 let expected_edit = expected_edit.clone();
4223 move |params, _| {
4224 let resolved_workspace_edit = resolved_workspace_edit.clone();
4225 let expected_edit = expected_edit.clone();
4226 async move {
4227 assert_eq!(params.files.len(), 1);
4228 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
4229 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
4230 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
4231 Ok(Some(expected_edit))
4232 }
4233 }
4234 })
4235 .next()
4236 .await
4237 .unwrap();
4238 let _ = response.await.unwrap();
4239 fake_server
4240 .handle_notification::<DidRenameFiles, _>(|params, _| {
4241 assert_eq!(params.files.len(), 1);
4242 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
4243 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
4244 })
4245 .next()
4246 .await
4247 .unwrap();
4248 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
4249}
4250
4251#[gpui::test]
4252async fn test_rename(cx: &mut gpui::TestAppContext) {
4253 // hi
4254 init_test(cx);
4255
4256 let fs = FakeFs::new(cx.executor());
4257 fs.insert_tree(
4258 path!("/dir"),
4259 json!({
4260 "one.rs": "const ONE: usize = 1;",
4261 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4262 }),
4263 )
4264 .await;
4265
4266 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4267
4268 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4269 language_registry.add(rust_lang());
4270 let mut fake_servers = language_registry.register_fake_lsp(
4271 "Rust",
4272 FakeLspAdapter {
4273 capabilities: lsp::ServerCapabilities {
4274 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
4275 prepare_provider: Some(true),
4276 work_done_progress_options: Default::default(),
4277 })),
4278 ..Default::default()
4279 },
4280 ..Default::default()
4281 },
4282 );
4283
4284 let (buffer, _handle) = project
4285 .update(cx, |project, cx| {
4286 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
4287 })
4288 .await
4289 .unwrap();
4290
4291 let fake_server = fake_servers.next().await.unwrap();
4292
4293 let response = project.update(cx, |project, cx| {
4294 project.prepare_rename(buffer.clone(), 7, cx)
4295 });
4296 fake_server
4297 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
4298 assert_eq!(
4299 params.text_document.uri.as_str(),
4300 uri!("file:///dir/one.rs")
4301 );
4302 assert_eq!(params.position, lsp::Position::new(0, 7));
4303 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
4304 lsp::Position::new(0, 6),
4305 lsp::Position::new(0, 9),
4306 ))))
4307 })
4308 .next()
4309 .await
4310 .unwrap();
4311 let response = response.await.unwrap();
4312 let PrepareRenameResponse::Success(range) = response else {
4313 panic!("{:?}", response);
4314 };
4315 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
4316 assert_eq!(range, 6..9);
4317
4318 let response = project.update(cx, |project, cx| {
4319 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
4320 });
4321 fake_server
4322 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
4323 assert_eq!(
4324 params.text_document_position.text_document.uri.as_str(),
4325 uri!("file:///dir/one.rs")
4326 );
4327 assert_eq!(
4328 params.text_document_position.position,
4329 lsp::Position::new(0, 7)
4330 );
4331 assert_eq!(params.new_name, "THREE");
4332 Ok(Some(lsp::WorkspaceEdit {
4333 changes: Some(
4334 [
4335 (
4336 lsp::Url::from_file_path(path!("/dir/one.rs")).unwrap(),
4337 vec![lsp::TextEdit::new(
4338 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
4339 "THREE".to_string(),
4340 )],
4341 ),
4342 (
4343 lsp::Url::from_file_path(path!("/dir/two.rs")).unwrap(),
4344 vec![
4345 lsp::TextEdit::new(
4346 lsp::Range::new(
4347 lsp::Position::new(0, 24),
4348 lsp::Position::new(0, 27),
4349 ),
4350 "THREE".to_string(),
4351 ),
4352 lsp::TextEdit::new(
4353 lsp::Range::new(
4354 lsp::Position::new(0, 35),
4355 lsp::Position::new(0, 38),
4356 ),
4357 "THREE".to_string(),
4358 ),
4359 ],
4360 ),
4361 ]
4362 .into_iter()
4363 .collect(),
4364 ),
4365 ..Default::default()
4366 }))
4367 })
4368 .next()
4369 .await
4370 .unwrap();
4371 let mut transaction = response.await.unwrap().0;
4372 assert_eq!(transaction.len(), 2);
4373 assert_eq!(
4374 transaction
4375 .remove_entry(&buffer)
4376 .unwrap()
4377 .0
4378 .update(cx, |buffer, _| buffer.text()),
4379 "const THREE: usize = 1;"
4380 );
4381 assert_eq!(
4382 transaction
4383 .into_keys()
4384 .next()
4385 .unwrap()
4386 .update(cx, |buffer, _| buffer.text()),
4387 "const TWO: usize = one::THREE + one::THREE;"
4388 );
4389}
4390
4391#[gpui::test]
4392async fn test_search(cx: &mut gpui::TestAppContext) {
4393 init_test(cx);
4394
4395 let fs = FakeFs::new(cx.executor());
4396 fs.insert_tree(
4397 path!("/dir"),
4398 json!({
4399 "one.rs": "const ONE: usize = 1;",
4400 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
4401 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
4402 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
4403 }),
4404 )
4405 .await;
4406 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4407 assert_eq!(
4408 search(
4409 &project,
4410 SearchQuery::text(
4411 "TWO",
4412 false,
4413 true,
4414 false,
4415 Default::default(),
4416 Default::default(),
4417 None
4418 )
4419 .unwrap(),
4420 cx
4421 )
4422 .await
4423 .unwrap(),
4424 HashMap::from_iter([
4425 (separator!("dir/two.rs").to_string(), vec![6..9]),
4426 (separator!("dir/three.rs").to_string(), vec![37..40])
4427 ])
4428 );
4429
4430 let buffer_4 = project
4431 .update(cx, |project, cx| {
4432 project.open_local_buffer(path!("/dir/four.rs"), cx)
4433 })
4434 .await
4435 .unwrap();
4436 buffer_4.update(cx, |buffer, cx| {
4437 let text = "two::TWO";
4438 buffer.edit([(20..28, text), (31..43, text)], None, cx);
4439 });
4440
4441 assert_eq!(
4442 search(
4443 &project,
4444 SearchQuery::text(
4445 "TWO",
4446 false,
4447 true,
4448 false,
4449 Default::default(),
4450 Default::default(),
4451 None,
4452 )
4453 .unwrap(),
4454 cx
4455 )
4456 .await
4457 .unwrap(),
4458 HashMap::from_iter([
4459 (separator!("dir/two.rs").to_string(), vec![6..9]),
4460 (separator!("dir/three.rs").to_string(), vec![37..40]),
4461 (separator!("dir/four.rs").to_string(), vec![25..28, 36..39])
4462 ])
4463 );
4464}
4465
4466#[gpui::test]
4467async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
4468 init_test(cx);
4469
4470 let search_query = "file";
4471
4472 let fs = FakeFs::new(cx.executor());
4473 fs.insert_tree(
4474 path!("/dir"),
4475 json!({
4476 "one.rs": r#"// Rust file one"#,
4477 "one.ts": r#"// TypeScript file one"#,
4478 "two.rs": r#"// Rust file two"#,
4479 "two.ts": r#"// TypeScript file two"#,
4480 }),
4481 )
4482 .await;
4483 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4484
4485 assert!(
4486 search(
4487 &project,
4488 SearchQuery::text(
4489 search_query,
4490 false,
4491 true,
4492 false,
4493 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4494 Default::default(),
4495 None
4496 )
4497 .unwrap(),
4498 cx
4499 )
4500 .await
4501 .unwrap()
4502 .is_empty(),
4503 "If no inclusions match, no files should be returned"
4504 );
4505
4506 assert_eq!(
4507 search(
4508 &project,
4509 SearchQuery::text(
4510 search_query,
4511 false,
4512 true,
4513 false,
4514 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4515 Default::default(),
4516 None
4517 )
4518 .unwrap(),
4519 cx
4520 )
4521 .await
4522 .unwrap(),
4523 HashMap::from_iter([
4524 (separator!("dir/one.rs").to_string(), vec![8..12]),
4525 (separator!("dir/two.rs").to_string(), vec![8..12]),
4526 ]),
4527 "Rust only search should give only Rust files"
4528 );
4529
4530 assert_eq!(
4531 search(
4532 &project,
4533 SearchQuery::text(
4534 search_query,
4535 false,
4536 true,
4537 false,
4538
4539 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4540
4541 Default::default(),
4542 None,
4543 ).unwrap(),
4544 cx
4545 )
4546 .await
4547 .unwrap(),
4548 HashMap::from_iter([
4549 (separator!("dir/one.ts").to_string(), vec![14..18]),
4550 (separator!("dir/two.ts").to_string(), vec![14..18]),
4551 ]),
4552 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
4553 );
4554
4555 assert_eq!(
4556 search(
4557 &project,
4558 SearchQuery::text(
4559 search_query,
4560 false,
4561 true,
4562 false,
4563
4564 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4565
4566 Default::default(),
4567 None,
4568 ).unwrap(),
4569 cx
4570 )
4571 .await
4572 .unwrap(),
4573 HashMap::from_iter([
4574 (separator!("dir/two.ts").to_string(), vec![14..18]),
4575 (separator!("dir/one.rs").to_string(), vec![8..12]),
4576 (separator!("dir/one.ts").to_string(), vec![14..18]),
4577 (separator!("dir/two.rs").to_string(), vec![8..12]),
4578 ]),
4579 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
4580 );
4581}
4582
4583#[gpui::test]
4584async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
4585 init_test(cx);
4586
4587 let search_query = "file";
4588
4589 let fs = FakeFs::new(cx.executor());
4590 fs.insert_tree(
4591 path!("/dir"),
4592 json!({
4593 "one.rs": r#"// Rust file one"#,
4594 "one.ts": r#"// TypeScript file one"#,
4595 "two.rs": r#"// Rust file two"#,
4596 "two.ts": r#"// TypeScript file two"#,
4597 }),
4598 )
4599 .await;
4600 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4601
4602 assert_eq!(
4603 search(
4604 &project,
4605 SearchQuery::text(
4606 search_query,
4607 false,
4608 true,
4609 false,
4610 Default::default(),
4611 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4612 None,
4613 )
4614 .unwrap(),
4615 cx
4616 )
4617 .await
4618 .unwrap(),
4619 HashMap::from_iter([
4620 (separator!("dir/one.rs").to_string(), vec![8..12]),
4621 (separator!("dir/one.ts").to_string(), vec![14..18]),
4622 (separator!("dir/two.rs").to_string(), vec![8..12]),
4623 (separator!("dir/two.ts").to_string(), vec![14..18]),
4624 ]),
4625 "If no exclusions match, all files should be returned"
4626 );
4627
4628 assert_eq!(
4629 search(
4630 &project,
4631 SearchQuery::text(
4632 search_query,
4633 false,
4634 true,
4635 false,
4636 Default::default(),
4637 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4638 None,
4639 )
4640 .unwrap(),
4641 cx
4642 )
4643 .await
4644 .unwrap(),
4645 HashMap::from_iter([
4646 (separator!("dir/one.ts").to_string(), vec![14..18]),
4647 (separator!("dir/two.ts").to_string(), vec![14..18]),
4648 ]),
4649 "Rust exclusion search should give only TypeScript files"
4650 );
4651
4652 assert_eq!(
4653 search(
4654 &project,
4655 SearchQuery::text(
4656 search_query,
4657 false,
4658 true,
4659 false,
4660 Default::default(),
4661 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4662 None,
4663 ).unwrap(),
4664 cx
4665 )
4666 .await
4667 .unwrap(),
4668 HashMap::from_iter([
4669 (separator!("dir/one.rs").to_string(), vec![8..12]),
4670 (separator!("dir/two.rs").to_string(), vec![8..12]),
4671 ]),
4672 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
4673 );
4674
4675 assert!(
4676 search(
4677 &project,
4678 SearchQuery::text(
4679 search_query,
4680 false,
4681 true,
4682 false,
4683 Default::default(),
4684
4685 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4686 None,
4687
4688 ).unwrap(),
4689 cx
4690 )
4691 .await
4692 .unwrap().is_empty(),
4693 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
4694 );
4695}
4696
4697#[gpui::test]
4698async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
4699 init_test(cx);
4700
4701 let search_query = "file";
4702
4703 let fs = FakeFs::new(cx.executor());
4704 fs.insert_tree(
4705 path!("/dir"),
4706 json!({
4707 "one.rs": r#"// Rust file one"#,
4708 "one.ts": r#"// TypeScript file one"#,
4709 "two.rs": r#"// Rust file two"#,
4710 "two.ts": r#"// TypeScript file two"#,
4711 }),
4712 )
4713 .await;
4714 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4715
4716 assert!(
4717 search(
4718 &project,
4719 SearchQuery::text(
4720 search_query,
4721 false,
4722 true,
4723 false,
4724 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4725 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4726 None,
4727 )
4728 .unwrap(),
4729 cx
4730 )
4731 .await
4732 .unwrap()
4733 .is_empty(),
4734 "If both no exclusions and inclusions match, exclusions should win and return nothing"
4735 );
4736
4737 assert!(
4738 search(
4739 &project,
4740 SearchQuery::text(
4741 search_query,
4742 false,
4743 true,
4744 false,
4745 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4746 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4747 None,
4748 ).unwrap(),
4749 cx
4750 )
4751 .await
4752 .unwrap()
4753 .is_empty(),
4754 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
4755 );
4756
4757 assert!(
4758 search(
4759 &project,
4760 SearchQuery::text(
4761 search_query,
4762 false,
4763 true,
4764 false,
4765 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4766 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4767 None,
4768 )
4769 .unwrap(),
4770 cx
4771 )
4772 .await
4773 .unwrap()
4774 .is_empty(),
4775 "Non-matching inclusions and exclusions should not change that."
4776 );
4777
4778 assert_eq!(
4779 search(
4780 &project,
4781 SearchQuery::text(
4782 search_query,
4783 false,
4784 true,
4785 false,
4786 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4787 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()]).unwrap(),
4788 None,
4789 )
4790 .unwrap(),
4791 cx
4792 )
4793 .await
4794 .unwrap(),
4795 HashMap::from_iter([
4796 (separator!("dir/one.ts").to_string(), vec![14..18]),
4797 (separator!("dir/two.ts").to_string(), vec![14..18]),
4798 ]),
4799 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
4800 );
4801}
4802
4803#[gpui::test]
4804async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
4805 init_test(cx);
4806
4807 let fs = FakeFs::new(cx.executor());
4808 fs.insert_tree(
4809 path!("/worktree-a"),
4810 json!({
4811 "haystack.rs": r#"// NEEDLE"#,
4812 "haystack.ts": r#"// NEEDLE"#,
4813 }),
4814 )
4815 .await;
4816 fs.insert_tree(
4817 path!("/worktree-b"),
4818 json!({
4819 "haystack.rs": r#"// NEEDLE"#,
4820 "haystack.ts": r#"// NEEDLE"#,
4821 }),
4822 )
4823 .await;
4824
4825 let project = Project::test(
4826 fs.clone(),
4827 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
4828 cx,
4829 )
4830 .await;
4831
4832 assert_eq!(
4833 search(
4834 &project,
4835 SearchQuery::text(
4836 "NEEDLE",
4837 false,
4838 true,
4839 false,
4840 PathMatcher::new(&["worktree-a/*.rs".to_owned()]).unwrap(),
4841 Default::default(),
4842 None,
4843 )
4844 .unwrap(),
4845 cx
4846 )
4847 .await
4848 .unwrap(),
4849 HashMap::from_iter([(separator!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
4850 "should only return results from included worktree"
4851 );
4852 assert_eq!(
4853 search(
4854 &project,
4855 SearchQuery::text(
4856 "NEEDLE",
4857 false,
4858 true,
4859 false,
4860 PathMatcher::new(&["worktree-b/*.rs".to_owned()]).unwrap(),
4861 Default::default(),
4862 None,
4863 )
4864 .unwrap(),
4865 cx
4866 )
4867 .await
4868 .unwrap(),
4869 HashMap::from_iter([(separator!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
4870 "should only return results from included worktree"
4871 );
4872
4873 assert_eq!(
4874 search(
4875 &project,
4876 SearchQuery::text(
4877 "NEEDLE",
4878 false,
4879 true,
4880 false,
4881 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4882 Default::default(),
4883 None,
4884 )
4885 .unwrap(),
4886 cx
4887 )
4888 .await
4889 .unwrap(),
4890 HashMap::from_iter([
4891 (separator!("worktree-a/haystack.ts").to_string(), vec![3..9]),
4892 (separator!("worktree-b/haystack.ts").to_string(), vec![3..9])
4893 ]),
4894 "should return results from both worktrees"
4895 );
4896}
4897
4898#[gpui::test]
4899async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
4900 init_test(cx);
4901
4902 let fs = FakeFs::new(cx.background_executor.clone());
4903 fs.insert_tree(
4904 path!("/dir"),
4905 json!({
4906 ".git": {},
4907 ".gitignore": "**/target\n/node_modules\n",
4908 "target": {
4909 "index.txt": "index_key:index_value"
4910 },
4911 "node_modules": {
4912 "eslint": {
4913 "index.ts": "const eslint_key = 'eslint value'",
4914 "package.json": r#"{ "some_key": "some value" }"#,
4915 },
4916 "prettier": {
4917 "index.ts": "const prettier_key = 'prettier value'",
4918 "package.json": r#"{ "other_key": "other value" }"#,
4919 },
4920 },
4921 "package.json": r#"{ "main_key": "main value" }"#,
4922 }),
4923 )
4924 .await;
4925 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4926
4927 let query = "key";
4928 assert_eq!(
4929 search(
4930 &project,
4931 SearchQuery::text(
4932 query,
4933 false,
4934 false,
4935 false,
4936 Default::default(),
4937 Default::default(),
4938 None,
4939 )
4940 .unwrap(),
4941 cx
4942 )
4943 .await
4944 .unwrap(),
4945 HashMap::from_iter([(separator!("dir/package.json").to_string(), vec![8..11])]),
4946 "Only one non-ignored file should have the query"
4947 );
4948
4949 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4950 assert_eq!(
4951 search(
4952 &project,
4953 SearchQuery::text(
4954 query,
4955 false,
4956 false,
4957 true,
4958 Default::default(),
4959 Default::default(),
4960 None,
4961 )
4962 .unwrap(),
4963 cx
4964 )
4965 .await
4966 .unwrap(),
4967 HashMap::from_iter([
4968 (separator!("dir/package.json").to_string(), vec![8..11]),
4969 (separator!("dir/target/index.txt").to_string(), vec![6..9]),
4970 (
4971 separator!("dir/node_modules/prettier/package.json").to_string(),
4972 vec![9..12]
4973 ),
4974 (
4975 separator!("dir/node_modules/prettier/index.ts").to_string(),
4976 vec![15..18]
4977 ),
4978 (
4979 separator!("dir/node_modules/eslint/index.ts").to_string(),
4980 vec![13..16]
4981 ),
4982 (
4983 separator!("dir/node_modules/eslint/package.json").to_string(),
4984 vec![8..11]
4985 ),
4986 ]),
4987 "Unrestricted search with ignored directories should find every file with the query"
4988 );
4989
4990 let files_to_include = PathMatcher::new(&["node_modules/prettier/**".to_owned()]).unwrap();
4991 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()]).unwrap();
4992 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4993 assert_eq!(
4994 search(
4995 &project,
4996 SearchQuery::text(
4997 query,
4998 false,
4999 false,
5000 true,
5001 files_to_include,
5002 files_to_exclude,
5003 None,
5004 )
5005 .unwrap(),
5006 cx
5007 )
5008 .await
5009 .unwrap(),
5010 HashMap::from_iter([(
5011 separator!("dir/node_modules/prettier/package.json").to_string(),
5012 vec![9..12]
5013 )]),
5014 "With search including ignored prettier directory and excluding TS files, only one file should be found"
5015 );
5016}
5017
5018#[gpui::test]
5019async fn test_create_entry(cx: &mut gpui::TestAppContext) {
5020 init_test(cx);
5021
5022 let fs = FakeFs::new(cx.executor().clone());
5023 fs.insert_tree(
5024 "/one/two",
5025 json!({
5026 "three": {
5027 "a.txt": "",
5028 "four": {}
5029 },
5030 "c.rs": ""
5031 }),
5032 )
5033 .await;
5034
5035 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
5036 project
5037 .update(cx, |project, cx| {
5038 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5039 project.create_entry((id, "b.."), true, cx)
5040 })
5041 .await
5042 .unwrap()
5043 .to_included()
5044 .unwrap();
5045
5046 // Can't create paths outside the project
5047 let result = project
5048 .update(cx, |project, cx| {
5049 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5050 project.create_entry((id, "../../boop"), true, cx)
5051 })
5052 .await;
5053 assert!(result.is_err());
5054
5055 // Can't create paths with '..'
5056 let result = project
5057 .update(cx, |project, cx| {
5058 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5059 project.create_entry((id, "four/../beep"), true, cx)
5060 })
5061 .await;
5062 assert!(result.is_err());
5063
5064 assert_eq!(
5065 fs.paths(true),
5066 vec![
5067 PathBuf::from(path!("/")),
5068 PathBuf::from(path!("/one")),
5069 PathBuf::from(path!("/one/two")),
5070 PathBuf::from(path!("/one/two/c.rs")),
5071 PathBuf::from(path!("/one/two/three")),
5072 PathBuf::from(path!("/one/two/three/a.txt")),
5073 PathBuf::from(path!("/one/two/three/b..")),
5074 PathBuf::from(path!("/one/two/three/four")),
5075 ]
5076 );
5077
5078 // And we cannot open buffers with '..'
5079 let result = project
5080 .update(cx, |project, cx| {
5081 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5082 project.open_buffer((id, "../c.rs"), cx)
5083 })
5084 .await;
5085 assert!(result.is_err())
5086}
5087
5088#[gpui::test]
5089async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
5090 init_test(cx);
5091
5092 let fs = FakeFs::new(cx.executor());
5093 fs.insert_tree(
5094 path!("/dir"),
5095 json!({
5096 "a.tsx": "a",
5097 }),
5098 )
5099 .await;
5100
5101 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5102
5103 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5104 language_registry.add(tsx_lang());
5105 let language_server_names = [
5106 "TypeScriptServer",
5107 "TailwindServer",
5108 "ESLintServer",
5109 "NoHoverCapabilitiesServer",
5110 ];
5111 let mut language_servers = [
5112 language_registry.register_fake_lsp(
5113 "tsx",
5114 FakeLspAdapter {
5115 name: language_server_names[0],
5116 capabilities: lsp::ServerCapabilities {
5117 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5118 ..lsp::ServerCapabilities::default()
5119 },
5120 ..FakeLspAdapter::default()
5121 },
5122 ),
5123 language_registry.register_fake_lsp(
5124 "tsx",
5125 FakeLspAdapter {
5126 name: language_server_names[1],
5127 capabilities: lsp::ServerCapabilities {
5128 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5129 ..lsp::ServerCapabilities::default()
5130 },
5131 ..FakeLspAdapter::default()
5132 },
5133 ),
5134 language_registry.register_fake_lsp(
5135 "tsx",
5136 FakeLspAdapter {
5137 name: language_server_names[2],
5138 capabilities: lsp::ServerCapabilities {
5139 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5140 ..lsp::ServerCapabilities::default()
5141 },
5142 ..FakeLspAdapter::default()
5143 },
5144 ),
5145 language_registry.register_fake_lsp(
5146 "tsx",
5147 FakeLspAdapter {
5148 name: language_server_names[3],
5149 capabilities: lsp::ServerCapabilities {
5150 hover_provider: None,
5151 ..lsp::ServerCapabilities::default()
5152 },
5153 ..FakeLspAdapter::default()
5154 },
5155 ),
5156 ];
5157
5158 let (buffer, _handle) = project
5159 .update(cx, |p, cx| {
5160 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
5161 })
5162 .await
5163 .unwrap();
5164 cx.executor().run_until_parked();
5165
5166 let mut servers_with_hover_requests = HashMap::default();
5167 for i in 0..language_server_names.len() {
5168 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
5169 panic!(
5170 "Failed to get language server #{i} with name {}",
5171 &language_server_names[i]
5172 )
5173 });
5174 let new_server_name = new_server.server.name();
5175 assert!(
5176 !servers_with_hover_requests.contains_key(&new_server_name),
5177 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5178 );
5179 match new_server_name.as_ref() {
5180 "TailwindServer" | "TypeScriptServer" => {
5181 servers_with_hover_requests.insert(
5182 new_server_name.clone(),
5183 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
5184 move |_, _| {
5185 let name = new_server_name.clone();
5186 async move {
5187 Ok(Some(lsp::Hover {
5188 contents: lsp::HoverContents::Scalar(
5189 lsp::MarkedString::String(format!("{name} hover")),
5190 ),
5191 range: None,
5192 }))
5193 }
5194 },
5195 ),
5196 );
5197 }
5198 "ESLintServer" => {
5199 servers_with_hover_requests.insert(
5200 new_server_name,
5201 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
5202 |_, _| async move { Ok(None) },
5203 ),
5204 );
5205 }
5206 "NoHoverCapabilitiesServer" => {
5207 let _never_handled = new_server
5208 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
5209 panic!(
5210 "Should not call for hovers server with no corresponding capabilities"
5211 )
5212 });
5213 }
5214 unexpected => panic!("Unexpected server name: {unexpected}"),
5215 }
5216 }
5217
5218 let hover_task = project.update(cx, |project, cx| {
5219 project.hover(&buffer, Point::new(0, 0), cx)
5220 });
5221 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
5222 |mut hover_request| async move {
5223 hover_request
5224 .next()
5225 .await
5226 .expect("All hover requests should have been triggered")
5227 },
5228 ))
5229 .await;
5230 assert_eq!(
5231 vec!["TailwindServer hover", "TypeScriptServer hover"],
5232 hover_task
5233 .await
5234 .into_iter()
5235 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
5236 .sorted()
5237 .collect::<Vec<_>>(),
5238 "Should receive hover responses from all related servers with hover capabilities"
5239 );
5240}
5241
5242#[gpui::test]
5243async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
5244 init_test(cx);
5245
5246 let fs = FakeFs::new(cx.executor());
5247 fs.insert_tree(
5248 path!("/dir"),
5249 json!({
5250 "a.ts": "a",
5251 }),
5252 )
5253 .await;
5254
5255 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5256
5257 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5258 language_registry.add(typescript_lang());
5259 let mut fake_language_servers = language_registry.register_fake_lsp(
5260 "TypeScript",
5261 FakeLspAdapter {
5262 capabilities: lsp::ServerCapabilities {
5263 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5264 ..lsp::ServerCapabilities::default()
5265 },
5266 ..FakeLspAdapter::default()
5267 },
5268 );
5269
5270 let (buffer, _handle) = project
5271 .update(cx, |p, cx| {
5272 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
5273 })
5274 .await
5275 .unwrap();
5276 cx.executor().run_until_parked();
5277
5278 let fake_server = fake_language_servers
5279 .next()
5280 .await
5281 .expect("failed to get the language server");
5282
5283 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
5284 move |_, _| async move {
5285 Ok(Some(lsp::Hover {
5286 contents: lsp::HoverContents::Array(vec![
5287 lsp::MarkedString::String("".to_string()),
5288 lsp::MarkedString::String(" ".to_string()),
5289 lsp::MarkedString::String("\n\n\n".to_string()),
5290 ]),
5291 range: None,
5292 }))
5293 },
5294 );
5295
5296 let hover_task = project.update(cx, |project, cx| {
5297 project.hover(&buffer, Point::new(0, 0), cx)
5298 });
5299 let () = request_handled
5300 .next()
5301 .await
5302 .expect("All hover requests should have been triggered");
5303 assert_eq!(
5304 Vec::<String>::new(),
5305 hover_task
5306 .await
5307 .into_iter()
5308 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
5309 .sorted()
5310 .collect::<Vec<_>>(),
5311 "Empty hover parts should be ignored"
5312 );
5313}
5314
5315#[gpui::test]
5316async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
5317 init_test(cx);
5318
5319 let fs = FakeFs::new(cx.executor());
5320 fs.insert_tree(
5321 path!("/dir"),
5322 json!({
5323 "a.ts": "a",
5324 }),
5325 )
5326 .await;
5327
5328 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5329
5330 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5331 language_registry.add(typescript_lang());
5332 let mut fake_language_servers = language_registry.register_fake_lsp(
5333 "TypeScript",
5334 FakeLspAdapter {
5335 capabilities: lsp::ServerCapabilities {
5336 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5337 ..lsp::ServerCapabilities::default()
5338 },
5339 ..FakeLspAdapter::default()
5340 },
5341 );
5342
5343 let (buffer, _handle) = project
5344 .update(cx, |p, cx| {
5345 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
5346 })
5347 .await
5348 .unwrap();
5349 cx.executor().run_until_parked();
5350
5351 let fake_server = fake_language_servers
5352 .next()
5353 .await
5354 .expect("failed to get the language server");
5355
5356 let mut request_handled = fake_server
5357 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
5358 Ok(Some(vec![
5359 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5360 title: "organize imports".to_string(),
5361 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
5362 ..lsp::CodeAction::default()
5363 }),
5364 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5365 title: "fix code".to_string(),
5366 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
5367 ..lsp::CodeAction::default()
5368 }),
5369 ]))
5370 });
5371
5372 let code_actions_task = project.update(cx, |project, cx| {
5373 project.code_actions(
5374 &buffer,
5375 0..buffer.read(cx).len(),
5376 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
5377 cx,
5378 )
5379 });
5380
5381 let () = request_handled
5382 .next()
5383 .await
5384 .expect("The code action request should have been triggered");
5385
5386 let code_actions = code_actions_task.await.unwrap();
5387 assert_eq!(code_actions.len(), 1);
5388 assert_eq!(
5389 code_actions[0].lsp_action.action_kind(),
5390 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
5391 );
5392}
5393
5394#[gpui::test]
5395async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
5396 init_test(cx);
5397
5398 let fs = FakeFs::new(cx.executor());
5399 fs.insert_tree(
5400 path!("/dir"),
5401 json!({
5402 "a.tsx": "a",
5403 }),
5404 )
5405 .await;
5406
5407 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5408
5409 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5410 language_registry.add(tsx_lang());
5411 let language_server_names = [
5412 "TypeScriptServer",
5413 "TailwindServer",
5414 "ESLintServer",
5415 "NoActionsCapabilitiesServer",
5416 ];
5417
5418 let mut language_server_rxs = [
5419 language_registry.register_fake_lsp(
5420 "tsx",
5421 FakeLspAdapter {
5422 name: language_server_names[0],
5423 capabilities: lsp::ServerCapabilities {
5424 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5425 ..lsp::ServerCapabilities::default()
5426 },
5427 ..FakeLspAdapter::default()
5428 },
5429 ),
5430 language_registry.register_fake_lsp(
5431 "tsx",
5432 FakeLspAdapter {
5433 name: language_server_names[1],
5434 capabilities: lsp::ServerCapabilities {
5435 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5436 ..lsp::ServerCapabilities::default()
5437 },
5438 ..FakeLspAdapter::default()
5439 },
5440 ),
5441 language_registry.register_fake_lsp(
5442 "tsx",
5443 FakeLspAdapter {
5444 name: language_server_names[2],
5445 capabilities: lsp::ServerCapabilities {
5446 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5447 ..lsp::ServerCapabilities::default()
5448 },
5449 ..FakeLspAdapter::default()
5450 },
5451 ),
5452 language_registry.register_fake_lsp(
5453 "tsx",
5454 FakeLspAdapter {
5455 name: language_server_names[3],
5456 capabilities: lsp::ServerCapabilities {
5457 code_action_provider: None,
5458 ..lsp::ServerCapabilities::default()
5459 },
5460 ..FakeLspAdapter::default()
5461 },
5462 ),
5463 ];
5464
5465 let (buffer, _handle) = project
5466 .update(cx, |p, cx| {
5467 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
5468 })
5469 .await
5470 .unwrap();
5471 cx.executor().run_until_parked();
5472
5473 let mut servers_with_actions_requests = HashMap::default();
5474 for i in 0..language_server_names.len() {
5475 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
5476 panic!(
5477 "Failed to get language server #{i} with name {}",
5478 &language_server_names[i]
5479 )
5480 });
5481 let new_server_name = new_server.server.name();
5482
5483 assert!(
5484 !servers_with_actions_requests.contains_key(&new_server_name),
5485 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5486 );
5487 match new_server_name.0.as_ref() {
5488 "TailwindServer" | "TypeScriptServer" => {
5489 servers_with_actions_requests.insert(
5490 new_server_name.clone(),
5491 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
5492 move |_, _| {
5493 let name = new_server_name.clone();
5494 async move {
5495 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
5496 lsp::CodeAction {
5497 title: format!("{name} code action"),
5498 ..lsp::CodeAction::default()
5499 },
5500 )]))
5501 }
5502 },
5503 ),
5504 );
5505 }
5506 "ESLintServer" => {
5507 servers_with_actions_requests.insert(
5508 new_server_name,
5509 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
5510 |_, _| async move { Ok(None) },
5511 ),
5512 );
5513 }
5514 "NoActionsCapabilitiesServer" => {
5515 let _never_handled = new_server
5516 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
5517 panic!(
5518 "Should not call for code actions server with no corresponding capabilities"
5519 )
5520 });
5521 }
5522 unexpected => panic!("Unexpected server name: {unexpected}"),
5523 }
5524 }
5525
5526 let code_actions_task = project.update(cx, |project, cx| {
5527 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
5528 });
5529
5530 // cx.run_until_parked();
5531 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
5532 |mut code_actions_request| async move {
5533 code_actions_request
5534 .next()
5535 .await
5536 .expect("All code actions requests should have been triggered")
5537 },
5538 ))
5539 .await;
5540 assert_eq!(
5541 vec!["TailwindServer code action", "TypeScriptServer code action"],
5542 code_actions_task
5543 .await
5544 .unwrap()
5545 .into_iter()
5546 .map(|code_action| code_action.lsp_action.title().to_owned())
5547 .sorted()
5548 .collect::<Vec<_>>(),
5549 "Should receive code actions responses from all related servers with hover capabilities"
5550 );
5551}
5552
5553#[gpui::test]
5554async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
5555 init_test(cx);
5556
5557 let fs = FakeFs::new(cx.executor());
5558 fs.insert_tree(
5559 "/dir",
5560 json!({
5561 "a.rs": "let a = 1;",
5562 "b.rs": "let b = 2;",
5563 "c.rs": "let c = 2;",
5564 }),
5565 )
5566 .await;
5567
5568 let project = Project::test(
5569 fs,
5570 [
5571 "/dir/a.rs".as_ref(),
5572 "/dir/b.rs".as_ref(),
5573 "/dir/c.rs".as_ref(),
5574 ],
5575 cx,
5576 )
5577 .await;
5578
5579 // check the initial state and get the worktrees
5580 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
5581 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5582 assert_eq!(worktrees.len(), 3);
5583
5584 let worktree_a = worktrees[0].read(cx);
5585 let worktree_b = worktrees[1].read(cx);
5586 let worktree_c = worktrees[2].read(cx);
5587
5588 // check they start in the right order
5589 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
5590 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
5591 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
5592
5593 (
5594 worktrees[0].clone(),
5595 worktrees[1].clone(),
5596 worktrees[2].clone(),
5597 )
5598 });
5599
5600 // move first worktree to after the second
5601 // [a, b, c] -> [b, a, c]
5602 project
5603 .update(cx, |project, cx| {
5604 let first = worktree_a.read(cx);
5605 let second = worktree_b.read(cx);
5606 project.move_worktree(first.id(), second.id(), cx)
5607 })
5608 .expect("moving first after second");
5609
5610 // check the state after moving
5611 project.update(cx, |project, cx| {
5612 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5613 assert_eq!(worktrees.len(), 3);
5614
5615 let first = worktrees[0].read(cx);
5616 let second = worktrees[1].read(cx);
5617 let third = worktrees[2].read(cx);
5618
5619 // check they are now in the right order
5620 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5621 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
5622 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5623 });
5624
5625 // move the second worktree to before the first
5626 // [b, a, c] -> [a, b, c]
5627 project
5628 .update(cx, |project, cx| {
5629 let second = worktree_a.read(cx);
5630 let first = worktree_b.read(cx);
5631 project.move_worktree(first.id(), second.id(), cx)
5632 })
5633 .expect("moving second before first");
5634
5635 // check the state after moving
5636 project.update(cx, |project, cx| {
5637 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5638 assert_eq!(worktrees.len(), 3);
5639
5640 let first = worktrees[0].read(cx);
5641 let second = worktrees[1].read(cx);
5642 let third = worktrees[2].read(cx);
5643
5644 // check they are now in the right order
5645 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5646 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5647 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5648 });
5649
5650 // move the second worktree to after the third
5651 // [a, b, c] -> [a, c, b]
5652 project
5653 .update(cx, |project, cx| {
5654 let second = worktree_b.read(cx);
5655 let third = worktree_c.read(cx);
5656 project.move_worktree(second.id(), third.id(), cx)
5657 })
5658 .expect("moving second after third");
5659
5660 // check the state after moving
5661 project.update(cx, |project, cx| {
5662 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5663 assert_eq!(worktrees.len(), 3);
5664
5665 let first = worktrees[0].read(cx);
5666 let second = worktrees[1].read(cx);
5667 let third = worktrees[2].read(cx);
5668
5669 // check they are now in the right order
5670 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5671 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5672 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
5673 });
5674
5675 // move the third worktree to before the second
5676 // [a, c, b] -> [a, b, c]
5677 project
5678 .update(cx, |project, cx| {
5679 let third = worktree_c.read(cx);
5680 let second = worktree_b.read(cx);
5681 project.move_worktree(third.id(), second.id(), cx)
5682 })
5683 .expect("moving third before second");
5684
5685 // check the state after moving
5686 project.update(cx, |project, cx| {
5687 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5688 assert_eq!(worktrees.len(), 3);
5689
5690 let first = worktrees[0].read(cx);
5691 let second = worktrees[1].read(cx);
5692 let third = worktrees[2].read(cx);
5693
5694 // check they are now in the right order
5695 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5696 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5697 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5698 });
5699
5700 // move the first worktree to after the third
5701 // [a, b, c] -> [b, c, a]
5702 project
5703 .update(cx, |project, cx| {
5704 let first = worktree_a.read(cx);
5705 let third = worktree_c.read(cx);
5706 project.move_worktree(first.id(), third.id(), cx)
5707 })
5708 .expect("moving first after third");
5709
5710 // check the state after moving
5711 project.update(cx, |project, cx| {
5712 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5713 assert_eq!(worktrees.len(), 3);
5714
5715 let first = worktrees[0].read(cx);
5716 let second = worktrees[1].read(cx);
5717 let third = worktrees[2].read(cx);
5718
5719 // check they are now in the right order
5720 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5721 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5722 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
5723 });
5724
5725 // move the third worktree to before the first
5726 // [b, c, a] -> [a, b, c]
5727 project
5728 .update(cx, |project, cx| {
5729 let third = worktree_a.read(cx);
5730 let first = worktree_b.read(cx);
5731 project.move_worktree(third.id(), first.id(), cx)
5732 })
5733 .expect("moving third before first");
5734
5735 // check the state after moving
5736 project.update(cx, |project, cx| {
5737 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5738 assert_eq!(worktrees.len(), 3);
5739
5740 let first = worktrees[0].read(cx);
5741 let second = worktrees[1].read(cx);
5742 let third = worktrees[2].read(cx);
5743
5744 // check they are now in the right order
5745 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5746 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5747 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5748 });
5749}
5750
5751#[gpui::test]
5752async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
5753 init_test(cx);
5754
5755 let staged_contents = r#"
5756 fn main() {
5757 println!("hello world");
5758 }
5759 "#
5760 .unindent();
5761 let file_contents = r#"
5762 // print goodbye
5763 fn main() {
5764 println!("goodbye world");
5765 }
5766 "#
5767 .unindent();
5768
5769 let fs = FakeFs::new(cx.background_executor.clone());
5770 fs.insert_tree(
5771 "/dir",
5772 json!({
5773 ".git": {},
5774 "src": {
5775 "main.rs": file_contents,
5776 }
5777 }),
5778 )
5779 .await;
5780
5781 fs.set_index_for_repo(
5782 Path::new("/dir/.git"),
5783 &[("src/main.rs".into(), staged_contents)],
5784 );
5785
5786 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5787
5788 let buffer = project
5789 .update(cx, |project, cx| {
5790 project.open_local_buffer("/dir/src/main.rs", cx)
5791 })
5792 .await
5793 .unwrap();
5794 let unstaged_diff = project
5795 .update(cx, |project, cx| {
5796 project.open_unstaged_diff(buffer.clone(), cx)
5797 })
5798 .await
5799 .unwrap();
5800
5801 cx.run_until_parked();
5802 unstaged_diff.update(cx, |unstaged_diff, cx| {
5803 let snapshot = buffer.read(cx).snapshot();
5804 assert_hunks(
5805 unstaged_diff.hunks(&snapshot, cx),
5806 &snapshot,
5807 &unstaged_diff.base_text_string().unwrap(),
5808 &[
5809 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
5810 (
5811 2..3,
5812 " println!(\"hello world\");\n",
5813 " println!(\"goodbye world\");\n",
5814 DiffHunkStatus::modified_none(),
5815 ),
5816 ],
5817 );
5818 });
5819
5820 let staged_contents = r#"
5821 // print goodbye
5822 fn main() {
5823 }
5824 "#
5825 .unindent();
5826
5827 fs.set_index_for_repo(
5828 Path::new("/dir/.git"),
5829 &[("src/main.rs".into(), staged_contents)],
5830 );
5831
5832 cx.run_until_parked();
5833 unstaged_diff.update(cx, |unstaged_diff, cx| {
5834 let snapshot = buffer.read(cx).snapshot();
5835 assert_hunks(
5836 unstaged_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
5837 &snapshot,
5838 &unstaged_diff.base_text().text(),
5839 &[(
5840 2..3,
5841 "",
5842 " println!(\"goodbye world\");\n",
5843 DiffHunkStatus::added_none(),
5844 )],
5845 );
5846 });
5847}
5848
5849#[gpui::test]
5850async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
5851 init_test(cx);
5852
5853 let committed_contents = r#"
5854 fn main() {
5855 println!("hello world");
5856 }
5857 "#
5858 .unindent();
5859 let staged_contents = r#"
5860 fn main() {
5861 println!("goodbye world");
5862 }
5863 "#
5864 .unindent();
5865 let file_contents = r#"
5866 // print goodbye
5867 fn main() {
5868 println!("goodbye world");
5869 }
5870 "#
5871 .unindent();
5872
5873 let fs = FakeFs::new(cx.background_executor.clone());
5874 fs.insert_tree(
5875 "/dir",
5876 json!({
5877 ".git": {},
5878 "src": {
5879 "modification.rs": file_contents,
5880 }
5881 }),
5882 )
5883 .await;
5884
5885 fs.set_head_for_repo(
5886 Path::new("/dir/.git"),
5887 &[
5888 ("src/modification.rs".into(), committed_contents),
5889 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
5890 ],
5891 );
5892 fs.set_index_for_repo(
5893 Path::new("/dir/.git"),
5894 &[
5895 ("src/modification.rs".into(), staged_contents),
5896 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
5897 ],
5898 );
5899
5900 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5901 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5902 let language = rust_lang();
5903 language_registry.add(language.clone());
5904
5905 let buffer_1 = project
5906 .update(cx, |project, cx| {
5907 project.open_local_buffer("/dir/src/modification.rs", cx)
5908 })
5909 .await
5910 .unwrap();
5911 let diff_1 = project
5912 .update(cx, |project, cx| {
5913 project.open_uncommitted_diff(buffer_1.clone(), cx)
5914 })
5915 .await
5916 .unwrap();
5917 diff_1.read_with(cx, |diff, _| {
5918 assert_eq!(diff.base_text().language().cloned(), Some(language))
5919 });
5920 cx.run_until_parked();
5921 diff_1.update(cx, |diff, cx| {
5922 let snapshot = buffer_1.read(cx).snapshot();
5923 assert_hunks(
5924 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
5925 &snapshot,
5926 &diff.base_text_string().unwrap(),
5927 &[
5928 (
5929 0..1,
5930 "",
5931 "// print goodbye\n",
5932 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
5933 ),
5934 (
5935 2..3,
5936 " println!(\"hello world\");\n",
5937 " println!(\"goodbye world\");\n",
5938 DiffHunkStatus::modified_none(),
5939 ),
5940 ],
5941 );
5942 });
5943
5944 // Reset HEAD to a version that differs from both the buffer and the index.
5945 let committed_contents = r#"
5946 // print goodbye
5947 fn main() {
5948 }
5949 "#
5950 .unindent();
5951 fs.set_head_for_repo(
5952 Path::new("/dir/.git"),
5953 &[
5954 ("src/modification.rs".into(), committed_contents.clone()),
5955 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
5956 ],
5957 );
5958
5959 // Buffer now has an unstaged hunk.
5960 cx.run_until_parked();
5961 diff_1.update(cx, |diff, cx| {
5962 let snapshot = buffer_1.read(cx).snapshot();
5963 assert_hunks(
5964 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
5965 &snapshot,
5966 &diff.base_text().text(),
5967 &[(
5968 2..3,
5969 "",
5970 " println!(\"goodbye world\");\n",
5971 DiffHunkStatus::added_none(),
5972 )],
5973 );
5974 });
5975
5976 // Open a buffer for a file that's been deleted.
5977 let buffer_2 = project
5978 .update(cx, |project, cx| {
5979 project.open_local_buffer("/dir/src/deletion.rs", cx)
5980 })
5981 .await
5982 .unwrap();
5983 let diff_2 = project
5984 .update(cx, |project, cx| {
5985 project.open_uncommitted_diff(buffer_2.clone(), cx)
5986 })
5987 .await
5988 .unwrap();
5989 cx.run_until_parked();
5990 diff_2.update(cx, |diff, cx| {
5991 let snapshot = buffer_2.read(cx).snapshot();
5992 assert_hunks(
5993 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
5994 &snapshot,
5995 &diff.base_text_string().unwrap(),
5996 &[(
5997 0..0,
5998 "// the-deleted-contents\n",
5999 "",
6000 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
6001 )],
6002 );
6003 });
6004
6005 // Stage the deletion of this file
6006 fs.set_index_for_repo(
6007 Path::new("/dir/.git"),
6008 &[("src/modification.rs".into(), committed_contents.clone())],
6009 );
6010 cx.run_until_parked();
6011 diff_2.update(cx, |diff, cx| {
6012 let snapshot = buffer_2.read(cx).snapshot();
6013 assert_hunks(
6014 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6015 &snapshot,
6016 &diff.base_text_string().unwrap(),
6017 &[(
6018 0..0,
6019 "// the-deleted-contents\n",
6020 "",
6021 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
6022 )],
6023 );
6024 });
6025}
6026
6027#[gpui::test]
6028async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
6029 use DiffHunkSecondaryStatus::*;
6030 init_test(cx);
6031
6032 let committed_contents = r#"
6033 zero
6034 one
6035 two
6036 three
6037 four
6038 five
6039 "#
6040 .unindent();
6041 let file_contents = r#"
6042 one
6043 TWO
6044 three
6045 FOUR
6046 five
6047 "#
6048 .unindent();
6049
6050 let fs = FakeFs::new(cx.background_executor.clone());
6051 fs.insert_tree(
6052 "/dir",
6053 json!({
6054 ".git": {},
6055 "file.txt": file_contents.clone()
6056 }),
6057 )
6058 .await;
6059
6060 fs.set_head_and_index_for_repo(
6061 "/dir/.git".as_ref(),
6062 &[("file.txt".into(), committed_contents.clone())],
6063 );
6064
6065 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6066
6067 let buffer = project
6068 .update(cx, |project, cx| {
6069 project.open_local_buffer("/dir/file.txt", cx)
6070 })
6071 .await
6072 .unwrap();
6073 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6074 let uncommitted_diff = project
6075 .update(cx, |project, cx| {
6076 project.open_uncommitted_diff(buffer.clone(), cx)
6077 })
6078 .await
6079 .unwrap();
6080 let mut diff_events = cx.events(&uncommitted_diff);
6081
6082 // The hunks are initially unstaged.
6083 uncommitted_diff.read_with(cx, |diff, cx| {
6084 assert_hunks(
6085 diff.hunks(&snapshot, cx),
6086 &snapshot,
6087 &diff.base_text_string().unwrap(),
6088 &[
6089 (
6090 0..0,
6091 "zero\n",
6092 "",
6093 DiffHunkStatus::deleted(HasSecondaryHunk),
6094 ),
6095 (
6096 1..2,
6097 "two\n",
6098 "TWO\n",
6099 DiffHunkStatus::modified(HasSecondaryHunk),
6100 ),
6101 (
6102 3..4,
6103 "four\n",
6104 "FOUR\n",
6105 DiffHunkStatus::modified(HasSecondaryHunk),
6106 ),
6107 ],
6108 );
6109 });
6110
6111 // Stage a hunk. It appears as optimistically staged.
6112 uncommitted_diff.update(cx, |diff, cx| {
6113 let range =
6114 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
6115 let hunks = diff
6116 .hunks_intersecting_range(range, &snapshot, cx)
6117 .collect::<Vec<_>>();
6118 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
6119
6120 assert_hunks(
6121 diff.hunks(&snapshot, cx),
6122 &snapshot,
6123 &diff.base_text_string().unwrap(),
6124 &[
6125 (
6126 0..0,
6127 "zero\n",
6128 "",
6129 DiffHunkStatus::deleted(HasSecondaryHunk),
6130 ),
6131 (
6132 1..2,
6133 "two\n",
6134 "TWO\n",
6135 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6136 ),
6137 (
6138 3..4,
6139 "four\n",
6140 "FOUR\n",
6141 DiffHunkStatus::modified(HasSecondaryHunk),
6142 ),
6143 ],
6144 );
6145 });
6146
6147 // The diff emits a change event for the range of the staged hunk.
6148 assert!(matches!(
6149 diff_events.next().await.unwrap(),
6150 BufferDiffEvent::HunksStagedOrUnstaged(_)
6151 ));
6152 let event = diff_events.next().await.unwrap();
6153 if let BufferDiffEvent::DiffChanged {
6154 changed_range: Some(changed_range),
6155 } = event
6156 {
6157 let changed_range = changed_range.to_point(&snapshot);
6158 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
6159 } else {
6160 panic!("Unexpected event {event:?}");
6161 }
6162
6163 // When the write to the index completes, it appears as staged.
6164 cx.run_until_parked();
6165 uncommitted_diff.update(cx, |diff, cx| {
6166 assert_hunks(
6167 diff.hunks(&snapshot, cx),
6168 &snapshot,
6169 &diff.base_text_string().unwrap(),
6170 &[
6171 (
6172 0..0,
6173 "zero\n",
6174 "",
6175 DiffHunkStatus::deleted(HasSecondaryHunk),
6176 ),
6177 (
6178 1..2,
6179 "two\n",
6180 "TWO\n",
6181 DiffHunkStatus::modified(NoSecondaryHunk),
6182 ),
6183 (
6184 3..4,
6185 "four\n",
6186 "FOUR\n",
6187 DiffHunkStatus::modified(HasSecondaryHunk),
6188 ),
6189 ],
6190 );
6191 });
6192
6193 // The diff emits a change event for the changed index text.
6194 let event = diff_events.next().await.unwrap();
6195 if let BufferDiffEvent::DiffChanged {
6196 changed_range: Some(changed_range),
6197 } = event
6198 {
6199 let changed_range = changed_range.to_point(&snapshot);
6200 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
6201 } else {
6202 panic!("Unexpected event {event:?}");
6203 }
6204
6205 // Simulate a problem writing to the git index.
6206 fs.set_error_message_for_index_write(
6207 "/dir/.git".as_ref(),
6208 Some("failed to write git index".into()),
6209 );
6210
6211 // Stage another hunk.
6212 uncommitted_diff.update(cx, |diff, cx| {
6213 let range =
6214 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
6215 let hunks = diff
6216 .hunks_intersecting_range(range, &snapshot, cx)
6217 .collect::<Vec<_>>();
6218 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
6219
6220 assert_hunks(
6221 diff.hunks(&snapshot, cx),
6222 &snapshot,
6223 &diff.base_text_string().unwrap(),
6224 &[
6225 (
6226 0..0,
6227 "zero\n",
6228 "",
6229 DiffHunkStatus::deleted(HasSecondaryHunk),
6230 ),
6231 (
6232 1..2,
6233 "two\n",
6234 "TWO\n",
6235 DiffHunkStatus::modified(NoSecondaryHunk),
6236 ),
6237 (
6238 3..4,
6239 "four\n",
6240 "FOUR\n",
6241 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6242 ),
6243 ],
6244 );
6245 });
6246 assert!(matches!(
6247 diff_events.next().await.unwrap(),
6248 BufferDiffEvent::HunksStagedOrUnstaged(_)
6249 ));
6250 let event = diff_events.next().await.unwrap();
6251 if let BufferDiffEvent::DiffChanged {
6252 changed_range: Some(changed_range),
6253 } = event
6254 {
6255 let changed_range = changed_range.to_point(&snapshot);
6256 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
6257 } else {
6258 panic!("Unexpected event {event:?}");
6259 }
6260
6261 // When the write fails, the hunk returns to being unstaged.
6262 cx.run_until_parked();
6263 uncommitted_diff.update(cx, |diff, cx| {
6264 assert_hunks(
6265 diff.hunks(&snapshot, cx),
6266 &snapshot,
6267 &diff.base_text_string().unwrap(),
6268 &[
6269 (
6270 0..0,
6271 "zero\n",
6272 "",
6273 DiffHunkStatus::deleted(HasSecondaryHunk),
6274 ),
6275 (
6276 1..2,
6277 "two\n",
6278 "TWO\n",
6279 DiffHunkStatus::modified(NoSecondaryHunk),
6280 ),
6281 (
6282 3..4,
6283 "four\n",
6284 "FOUR\n",
6285 DiffHunkStatus::modified(HasSecondaryHunk),
6286 ),
6287 ],
6288 );
6289 });
6290
6291 let event = diff_events.next().await.unwrap();
6292 if let BufferDiffEvent::DiffChanged {
6293 changed_range: Some(changed_range),
6294 } = event
6295 {
6296 let changed_range = changed_range.to_point(&snapshot);
6297 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
6298 } else {
6299 panic!("Unexpected event {event:?}");
6300 }
6301
6302 // Allow writing to the git index to succeed again.
6303 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
6304
6305 // Stage two hunks with separate operations.
6306 uncommitted_diff.update(cx, |diff, cx| {
6307 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
6308 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
6309 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
6310 });
6311
6312 // Both staged hunks appear as pending.
6313 uncommitted_diff.update(cx, |diff, cx| {
6314 assert_hunks(
6315 diff.hunks(&snapshot, cx),
6316 &snapshot,
6317 &diff.base_text_string().unwrap(),
6318 &[
6319 (
6320 0..0,
6321 "zero\n",
6322 "",
6323 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
6324 ),
6325 (
6326 1..2,
6327 "two\n",
6328 "TWO\n",
6329 DiffHunkStatus::modified(NoSecondaryHunk),
6330 ),
6331 (
6332 3..4,
6333 "four\n",
6334 "FOUR\n",
6335 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6336 ),
6337 ],
6338 );
6339 });
6340
6341 // Both staging operations take effect.
6342 cx.run_until_parked();
6343 uncommitted_diff.update(cx, |diff, cx| {
6344 assert_hunks(
6345 diff.hunks(&snapshot, cx),
6346 &snapshot,
6347 &diff.base_text_string().unwrap(),
6348 &[
6349 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
6350 (
6351 1..2,
6352 "two\n",
6353 "TWO\n",
6354 DiffHunkStatus::modified(NoSecondaryHunk),
6355 ),
6356 (
6357 3..4,
6358 "four\n",
6359 "FOUR\n",
6360 DiffHunkStatus::modified(NoSecondaryHunk),
6361 ),
6362 ],
6363 );
6364 });
6365}
6366
6367#[gpui::test(seeds(340, 472))]
6368async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
6369 use DiffHunkSecondaryStatus::*;
6370 init_test(cx);
6371
6372 let committed_contents = r#"
6373 zero
6374 one
6375 two
6376 three
6377 four
6378 five
6379 "#
6380 .unindent();
6381 let file_contents = r#"
6382 one
6383 TWO
6384 three
6385 FOUR
6386 five
6387 "#
6388 .unindent();
6389
6390 let fs = FakeFs::new(cx.background_executor.clone());
6391 fs.insert_tree(
6392 "/dir",
6393 json!({
6394 ".git": {},
6395 "file.txt": file_contents.clone()
6396 }),
6397 )
6398 .await;
6399
6400 fs.set_head_for_repo(
6401 "/dir/.git".as_ref(),
6402 &[("file.txt".into(), committed_contents.clone())],
6403 );
6404 fs.set_index_for_repo(
6405 "/dir/.git".as_ref(),
6406 &[("file.txt".into(), committed_contents.clone())],
6407 );
6408
6409 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6410
6411 let buffer = project
6412 .update(cx, |project, cx| {
6413 project.open_local_buffer("/dir/file.txt", cx)
6414 })
6415 .await
6416 .unwrap();
6417 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6418 let uncommitted_diff = project
6419 .update(cx, |project, cx| {
6420 project.open_uncommitted_diff(buffer.clone(), cx)
6421 })
6422 .await
6423 .unwrap();
6424
6425 // The hunks are initially unstaged.
6426 uncommitted_diff.read_with(cx, |diff, cx| {
6427 assert_hunks(
6428 diff.hunks(&snapshot, cx),
6429 &snapshot,
6430 &diff.base_text_string().unwrap(),
6431 &[
6432 (
6433 0..0,
6434 "zero\n",
6435 "",
6436 DiffHunkStatus::deleted(HasSecondaryHunk),
6437 ),
6438 (
6439 1..2,
6440 "two\n",
6441 "TWO\n",
6442 DiffHunkStatus::modified(HasSecondaryHunk),
6443 ),
6444 (
6445 3..4,
6446 "four\n",
6447 "FOUR\n",
6448 DiffHunkStatus::modified(HasSecondaryHunk),
6449 ),
6450 ],
6451 );
6452 });
6453
6454 // Pause IO events
6455 fs.pause_events();
6456
6457 // Stage the first hunk.
6458 uncommitted_diff.update(cx, |diff, cx| {
6459 let hunk = diff.hunks(&snapshot, cx).next().unwrap();
6460 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
6461 assert_hunks(
6462 diff.hunks(&snapshot, cx),
6463 &snapshot,
6464 &diff.base_text_string().unwrap(),
6465 &[
6466 (
6467 0..0,
6468 "zero\n",
6469 "",
6470 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
6471 ),
6472 (
6473 1..2,
6474 "two\n",
6475 "TWO\n",
6476 DiffHunkStatus::modified(HasSecondaryHunk),
6477 ),
6478 (
6479 3..4,
6480 "four\n",
6481 "FOUR\n",
6482 DiffHunkStatus::modified(HasSecondaryHunk),
6483 ),
6484 ],
6485 );
6486 });
6487
6488 // Stage the second hunk *before* receiving the FS event for the first hunk.
6489 cx.run_until_parked();
6490 uncommitted_diff.update(cx, |diff, cx| {
6491 let hunk = diff.hunks(&snapshot, cx).nth(1).unwrap();
6492 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
6493 assert_hunks(
6494 diff.hunks(&snapshot, cx),
6495 &snapshot,
6496 &diff.base_text_string().unwrap(),
6497 &[
6498 (
6499 0..0,
6500 "zero\n",
6501 "",
6502 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
6503 ),
6504 (
6505 1..2,
6506 "two\n",
6507 "TWO\n",
6508 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6509 ),
6510 (
6511 3..4,
6512 "four\n",
6513 "FOUR\n",
6514 DiffHunkStatus::modified(HasSecondaryHunk),
6515 ),
6516 ],
6517 );
6518 });
6519
6520 // Process the FS event for staging the first hunk (second event is still pending).
6521 fs.flush_events(1);
6522 cx.run_until_parked();
6523
6524 // Stage the third hunk before receiving the second FS event.
6525 uncommitted_diff.update(cx, |diff, cx| {
6526 let hunk = diff.hunks(&snapshot, cx).nth(2).unwrap();
6527 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
6528 });
6529
6530 // Wait for all remaining IO.
6531 cx.run_until_parked();
6532 fs.flush_events(fs.buffered_event_count());
6533
6534 // Now all hunks are staged.
6535 cx.run_until_parked();
6536 uncommitted_diff.update(cx, |diff, cx| {
6537 assert_hunks(
6538 diff.hunks(&snapshot, cx),
6539 &snapshot,
6540 &diff.base_text_string().unwrap(),
6541 &[
6542 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
6543 (
6544 1..2,
6545 "two\n",
6546 "TWO\n",
6547 DiffHunkStatus::modified(NoSecondaryHunk),
6548 ),
6549 (
6550 3..4,
6551 "four\n",
6552 "FOUR\n",
6553 DiffHunkStatus::modified(NoSecondaryHunk),
6554 ),
6555 ],
6556 );
6557 });
6558}
6559
6560#[gpui::test]
6561async fn test_staging_lots_of_hunks_fast(cx: &mut gpui::TestAppContext) {
6562 use DiffHunkSecondaryStatus::*;
6563 init_test(cx);
6564
6565 let different_lines = (0..500)
6566 .step_by(5)
6567 .map(|i| format!("diff {}\n", i))
6568 .collect::<Vec<String>>();
6569 let committed_contents = (0..500).map(|i| format!("{}\n", i)).collect::<String>();
6570 let file_contents = (0..500)
6571 .map(|i| {
6572 if i % 5 == 0 {
6573 different_lines[i / 5].clone()
6574 } else {
6575 format!("{}\n", i)
6576 }
6577 })
6578 .collect::<String>();
6579
6580 let fs = FakeFs::new(cx.background_executor.clone());
6581 fs.insert_tree(
6582 "/dir",
6583 json!({
6584 ".git": {},
6585 "file.txt": file_contents.clone()
6586 }),
6587 )
6588 .await;
6589
6590 fs.set_head_for_repo(
6591 "/dir/.git".as_ref(),
6592 &[("file.txt".into(), committed_contents.clone())],
6593 );
6594 fs.set_index_for_repo(
6595 "/dir/.git".as_ref(),
6596 &[("file.txt".into(), committed_contents.clone())],
6597 );
6598
6599 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6600
6601 let buffer = project
6602 .update(cx, |project, cx| {
6603 project.open_local_buffer("/dir/file.txt", cx)
6604 })
6605 .await
6606 .unwrap();
6607 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6608 let uncommitted_diff = project
6609 .update(cx, |project, cx| {
6610 project.open_uncommitted_diff(buffer.clone(), cx)
6611 })
6612 .await
6613 .unwrap();
6614
6615 let mut expected_hunks: Vec<(Range<u32>, String, String, DiffHunkStatus)> = (0..500)
6616 .step_by(5)
6617 .map(|i| {
6618 (
6619 i as u32..i as u32 + 1,
6620 format!("{}\n", i),
6621 different_lines[i / 5].clone(),
6622 DiffHunkStatus::modified(HasSecondaryHunk),
6623 )
6624 })
6625 .collect();
6626
6627 // The hunks are initially unstaged
6628 uncommitted_diff.read_with(cx, |diff, cx| {
6629 assert_hunks(
6630 diff.hunks(&snapshot, cx),
6631 &snapshot,
6632 &diff.base_text_string().unwrap(),
6633 &expected_hunks,
6634 );
6635 });
6636
6637 for (_, _, _, status) in expected_hunks.iter_mut() {
6638 *status = DiffHunkStatus::modified(SecondaryHunkRemovalPending);
6639 }
6640
6641 // Stage every hunk with a different call
6642 uncommitted_diff.update(cx, |diff, cx| {
6643 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
6644 for hunk in hunks {
6645 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
6646 }
6647
6648 assert_hunks(
6649 diff.hunks(&snapshot, cx),
6650 &snapshot,
6651 &diff.base_text_string().unwrap(),
6652 &expected_hunks,
6653 );
6654 });
6655
6656 // If we wait, we'll have no pending hunks
6657 cx.run_until_parked();
6658 for (_, _, _, status) in expected_hunks.iter_mut() {
6659 *status = DiffHunkStatus::modified(NoSecondaryHunk);
6660 }
6661
6662 uncommitted_diff.update(cx, |diff, cx| {
6663 assert_hunks(
6664 diff.hunks(&snapshot, cx),
6665 &snapshot,
6666 &diff.base_text_string().unwrap(),
6667 &expected_hunks,
6668 );
6669 });
6670
6671 for (_, _, _, status) in expected_hunks.iter_mut() {
6672 *status = DiffHunkStatus::modified(SecondaryHunkAdditionPending);
6673 }
6674
6675 // Unstage every hunk with a different call
6676 uncommitted_diff.update(cx, |diff, cx| {
6677 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
6678 for hunk in hunks {
6679 diff.stage_or_unstage_hunks(false, &[hunk], &snapshot, true, cx);
6680 }
6681
6682 assert_hunks(
6683 diff.hunks(&snapshot, cx),
6684 &snapshot,
6685 &diff.base_text_string().unwrap(),
6686 &expected_hunks,
6687 );
6688 });
6689
6690 // If we wait, we'll have no pending hunks, again
6691 cx.run_until_parked();
6692 for (_, _, _, status) in expected_hunks.iter_mut() {
6693 *status = DiffHunkStatus::modified(HasSecondaryHunk);
6694 }
6695
6696 uncommitted_diff.update(cx, |diff, cx| {
6697 assert_hunks(
6698 diff.hunks(&snapshot, cx),
6699 &snapshot,
6700 &diff.base_text_string().unwrap(),
6701 &expected_hunks,
6702 );
6703 });
6704}
6705
6706#[gpui::test]
6707async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
6708 init_test(cx);
6709
6710 let committed_contents = r#"
6711 fn main() {
6712 println!("hello from HEAD");
6713 }
6714 "#
6715 .unindent();
6716 let file_contents = r#"
6717 fn main() {
6718 println!("hello from the working copy");
6719 }
6720 "#
6721 .unindent();
6722
6723 let fs = FakeFs::new(cx.background_executor.clone());
6724 fs.insert_tree(
6725 "/dir",
6726 json!({
6727 ".git": {},
6728 "src": {
6729 "main.rs": file_contents,
6730 }
6731 }),
6732 )
6733 .await;
6734
6735 fs.set_head_for_repo(
6736 Path::new("/dir/.git"),
6737 &[("src/main.rs".into(), committed_contents.clone())],
6738 );
6739 fs.set_index_for_repo(
6740 Path::new("/dir/.git"),
6741 &[("src/main.rs".into(), committed_contents.clone())],
6742 );
6743
6744 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
6745
6746 let buffer = project
6747 .update(cx, |project, cx| {
6748 project.open_local_buffer("/dir/src/main.rs", cx)
6749 })
6750 .await
6751 .unwrap();
6752 let uncommitted_diff = project
6753 .update(cx, |project, cx| {
6754 project.open_uncommitted_diff(buffer.clone(), cx)
6755 })
6756 .await
6757 .unwrap();
6758
6759 cx.run_until_parked();
6760 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
6761 let snapshot = buffer.read(cx).snapshot();
6762 assert_hunks(
6763 uncommitted_diff.hunks(&snapshot, cx),
6764 &snapshot,
6765 &uncommitted_diff.base_text_string().unwrap(),
6766 &[(
6767 1..2,
6768 " println!(\"hello from HEAD\");\n",
6769 " println!(\"hello from the working copy\");\n",
6770 DiffHunkStatus {
6771 kind: DiffHunkStatusKind::Modified,
6772 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
6773 },
6774 )],
6775 );
6776 });
6777}
6778
6779#[gpui::test]
6780async fn test_repository_and_path_for_project_path(
6781 background_executor: BackgroundExecutor,
6782 cx: &mut gpui::TestAppContext,
6783) {
6784 init_test(cx);
6785 let fs = FakeFs::new(background_executor);
6786 fs.insert_tree(
6787 path!("/root"),
6788 json!({
6789 "c.txt": "",
6790 "dir1": {
6791 ".git": {},
6792 "deps": {
6793 "dep1": {
6794 ".git": {},
6795 "src": {
6796 "a.txt": ""
6797 }
6798 }
6799 },
6800 "src": {
6801 "b.txt": ""
6802 }
6803 },
6804 }),
6805 )
6806 .await;
6807
6808 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
6809 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
6810 let tree_id = tree.read_with(cx, |tree, _| tree.id());
6811 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
6812 .await;
6813 tree.flush_fs_events(cx).await;
6814
6815 project.read_with(cx, |project, cx| {
6816 let git_store = project.git_store().read(cx);
6817 let pairs = [
6818 ("c.txt", None),
6819 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
6820 (
6821 "dir1/deps/dep1/src/a.txt",
6822 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
6823 ),
6824 ];
6825 let expected = pairs
6826 .iter()
6827 .map(|(path, result)| {
6828 (
6829 path,
6830 result.map(|(repo, repo_path)| {
6831 (Path::new(repo).to_owned(), RepoPath::from(repo_path))
6832 }),
6833 )
6834 })
6835 .collect::<Vec<_>>();
6836 let actual = pairs
6837 .iter()
6838 .map(|(path, _)| {
6839 let project_path = (tree_id, Path::new(path)).into();
6840 let result = maybe!({
6841 let (repo, repo_path) =
6842 git_store.repository_and_path_for_project_path(&project_path, cx)?;
6843 Some((
6844 repo.read(cx)
6845 .repository_entry
6846 .work_directory_abs_path
6847 .clone(),
6848 repo_path,
6849 ))
6850 });
6851 (path, result)
6852 })
6853 .collect::<Vec<_>>();
6854 pretty_assertions::assert_eq!(expected, actual);
6855 });
6856
6857 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
6858 .await
6859 .unwrap();
6860 tree.flush_fs_events(cx).await;
6861
6862 project.read_with(cx, |project, cx| {
6863 let git_store = project.git_store().read(cx);
6864 assert_eq!(
6865 git_store.repository_and_path_for_project_path(
6866 &(tree_id, Path::new("dir1/src/b.txt")).into(),
6867 cx
6868 ),
6869 None
6870 );
6871 });
6872}
6873
6874#[gpui::test]
6875async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
6876 init_test(cx);
6877 let fs = FakeFs::new(cx.background_executor.clone());
6878 fs.insert_tree(
6879 path!("/root"),
6880 json!({
6881 "home": {
6882 ".git": {},
6883 "project": {
6884 "a.txt": "A"
6885 },
6886 },
6887 }),
6888 )
6889 .await;
6890 fs.set_home_dir(Path::new(path!("/root/home")).to_owned());
6891
6892 let project = Project::test(fs.clone(), [path!("/root/home/project").as_ref()], cx).await;
6893 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
6894 let tree_id = tree.read_with(cx, |tree, _| tree.id());
6895 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
6896 .await;
6897 tree.flush_fs_events(cx).await;
6898
6899 project.read_with(cx, |project, cx| {
6900 let containing = project
6901 .git_store()
6902 .read(cx)
6903 .repository_and_path_for_project_path(&(tree_id, "a.txt").into(), cx);
6904 assert!(containing.is_none());
6905 });
6906
6907 let project = Project::test(fs.clone(), [path!("/root/home").as_ref()], cx).await;
6908 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
6909 let tree_id = tree.read_with(cx, |tree, _| tree.id());
6910 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
6911 .await;
6912 tree.flush_fs_events(cx).await;
6913
6914 project.read_with(cx, |project, cx| {
6915 let containing = project
6916 .git_store()
6917 .read(cx)
6918 .repository_and_path_for_project_path(&(tree_id, "project/a.txt").into(), cx);
6919 assert_eq!(
6920 containing
6921 .unwrap()
6922 .0
6923 .read(cx)
6924 .repository_entry
6925 .work_directory_abs_path,
6926 Path::new(path!("/root/home"))
6927 );
6928 });
6929}
6930
6931async fn search(
6932 project: &Entity<Project>,
6933 query: SearchQuery,
6934 cx: &mut gpui::TestAppContext,
6935) -> Result<HashMap<String, Vec<Range<usize>>>> {
6936 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
6937 let mut results = HashMap::default();
6938 while let Ok(search_result) = search_rx.recv().await {
6939 match search_result {
6940 SearchResult::Buffer { buffer, ranges } => {
6941 results.entry(buffer).or_insert(ranges);
6942 }
6943 SearchResult::LimitReached => {}
6944 }
6945 }
6946 Ok(results
6947 .into_iter()
6948 .map(|(buffer, ranges)| {
6949 buffer.update(cx, |buffer, cx| {
6950 let path = buffer
6951 .file()
6952 .unwrap()
6953 .full_path(cx)
6954 .to_string_lossy()
6955 .to_string();
6956 let ranges = ranges
6957 .into_iter()
6958 .map(|range| range.to_offset(buffer))
6959 .collect::<Vec<_>>();
6960 (path, ranges)
6961 })
6962 })
6963 .collect())
6964}
6965
6966pub fn init_test(cx: &mut gpui::TestAppContext) {
6967 if std::env::var("RUST_LOG").is_ok() {
6968 env_logger::try_init().ok();
6969 }
6970
6971 cx.update(|cx| {
6972 let settings_store = SettingsStore::test(cx);
6973 cx.set_global(settings_store);
6974 release_channel::init(SemanticVersion::default(), cx);
6975 language::init(cx);
6976 Project::init_settings(cx);
6977 });
6978}
6979
6980fn json_lang() -> Arc<Language> {
6981 Arc::new(Language::new(
6982 LanguageConfig {
6983 name: "JSON".into(),
6984 matcher: LanguageMatcher {
6985 path_suffixes: vec!["json".to_string()],
6986 ..Default::default()
6987 },
6988 ..Default::default()
6989 },
6990 None,
6991 ))
6992}
6993
6994fn js_lang() -> Arc<Language> {
6995 Arc::new(Language::new(
6996 LanguageConfig {
6997 name: "JavaScript".into(),
6998 matcher: LanguageMatcher {
6999 path_suffixes: vec!["js".to_string()],
7000 ..Default::default()
7001 },
7002 ..Default::default()
7003 },
7004 None,
7005 ))
7006}
7007
7008fn rust_lang() -> Arc<Language> {
7009 Arc::new(Language::new(
7010 LanguageConfig {
7011 name: "Rust".into(),
7012 matcher: LanguageMatcher {
7013 path_suffixes: vec!["rs".to_string()],
7014 ..Default::default()
7015 },
7016 ..Default::default()
7017 },
7018 Some(tree_sitter_rust::LANGUAGE.into()),
7019 ))
7020}
7021
7022fn typescript_lang() -> Arc<Language> {
7023 Arc::new(Language::new(
7024 LanguageConfig {
7025 name: "TypeScript".into(),
7026 matcher: LanguageMatcher {
7027 path_suffixes: vec!["ts".to_string()],
7028 ..Default::default()
7029 },
7030 ..Default::default()
7031 },
7032 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
7033 ))
7034}
7035
7036fn tsx_lang() -> Arc<Language> {
7037 Arc::new(Language::new(
7038 LanguageConfig {
7039 name: "tsx".into(),
7040 matcher: LanguageMatcher {
7041 path_suffixes: vec!["tsx".to_string()],
7042 ..Default::default()
7043 },
7044 ..Default::default()
7045 },
7046 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
7047 ))
7048}
7049
7050fn get_all_tasks(
7051 project: &Entity<Project>,
7052 task_contexts: &TaskContexts,
7053 cx: &mut App,
7054) -> Vec<(TaskSourceKind, ResolvedTask)> {
7055 let (mut old, new) = project.update(cx, |project, cx| {
7056 project
7057 .task_store
7058 .read(cx)
7059 .task_inventory()
7060 .unwrap()
7061 .read(cx)
7062 .used_and_current_resolved_tasks(task_contexts, cx)
7063 });
7064 old.extend(new);
7065 old
7066}