1#![allow(clippy::format_collect)]
2
3use crate::{task_inventory::TaskContexts, task_store::TaskSettingsLocation, Event, *};
4use buffer_diff::{
5 assert_hunks, BufferDiffEvent, DiffHunkSecondaryStatus, DiffHunkStatus, DiffHunkStatusKind,
6};
7use fs::FakeFs;
8use futures::{future, StreamExt};
9use gpui::{App, SemanticVersion, UpdateGlobal};
10use http_client::Url;
11use language::{
12 language_settings::{language_settings, AllLanguageSettings, LanguageSettingsContent},
13 tree_sitter_rust, tree_sitter_typescript, Diagnostic, DiagnosticEntry, DiagnosticSet,
14 DiskState, FakeLspAdapter, LanguageConfig, LanguageMatcher, LanguageName, LineEnding,
15 OffsetRangeExt, Point, ToPoint,
16};
17use lsp::{
18 notification::DidRenameFiles, DiagnosticSeverity, DocumentChanges, FileOperationFilter,
19 NumberOrString, TextDocumentEdit, WillRenameFiles,
20};
21use parking_lot::Mutex;
22use paths::tasks_file;
23use pretty_assertions::{assert_eq, assert_matches};
24use serde_json::json;
25#[cfg(not(windows))]
26use std::os;
27use std::{str::FromStr, sync::OnceLock};
28
29use std::{mem, num::NonZeroU32, ops::Range, task::Poll};
30use task::{ResolvedTask, TaskContext};
31use unindent::Unindent as _;
32use util::{
33 assert_set_eq, path,
34 paths::PathMatcher,
35 separator,
36 test::{marked_text_offsets, TempTree},
37 uri, TryFutureExt as _,
38};
39
40#[gpui::test]
41async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
42 cx.executor().allow_parking();
43
44 let (tx, mut rx) = futures::channel::mpsc::unbounded();
45 let _thread = std::thread::spawn(move || {
46 #[cfg(not(target_os = "windows"))]
47 std::fs::metadata("/tmp").unwrap();
48 #[cfg(target_os = "windows")]
49 std::fs::metadata("C:/Windows").unwrap();
50 std::thread::sleep(Duration::from_millis(1000));
51 tx.unbounded_send(1).unwrap();
52 });
53 rx.next().await.unwrap();
54}
55
56#[gpui::test]
57async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
58 cx.executor().allow_parking();
59
60 let io_task = smol::unblock(move || {
61 println!("sleeping on thread {:?}", std::thread::current().id());
62 std::thread::sleep(Duration::from_millis(10));
63 1
64 });
65
66 let task = cx.foreground_executor().spawn(async move {
67 io_task.await;
68 });
69
70 task.await;
71}
72
73#[cfg(not(windows))]
74#[gpui::test]
75async fn test_symlinks(cx: &mut gpui::TestAppContext) {
76 init_test(cx);
77 cx.executor().allow_parking();
78
79 let dir = TempTree::new(json!({
80 "root": {
81 "apple": "",
82 "banana": {
83 "carrot": {
84 "date": "",
85 "endive": "",
86 }
87 },
88 "fennel": {
89 "grape": "",
90 }
91 }
92 }));
93
94 let root_link_path = dir.path().join("root_link");
95 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
96 os::unix::fs::symlink(
97 dir.path().join("root/fennel"),
98 dir.path().join("root/finnochio"),
99 )
100 .unwrap();
101
102 let project = Project::test(Arc::new(RealFs::default()), [root_link_path.as_ref()], cx).await;
103
104 project.update(cx, |project, cx| {
105 let tree = project.worktrees(cx).next().unwrap().read(cx);
106 assert_eq!(tree.file_count(), 5);
107 assert_eq!(
108 tree.inode_for_path("fennel/grape"),
109 tree.inode_for_path("finnochio/grape")
110 );
111 });
112}
113
114#[gpui::test]
115async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
116 init_test(cx);
117
118 let dir = TempTree::new(json!({
119 ".editorconfig": r#"
120 root = true
121 [*.rs]
122 indent_style = tab
123 indent_size = 3
124 end_of_line = lf
125 insert_final_newline = true
126 trim_trailing_whitespace = true
127 [*.js]
128 tab_width = 10
129 "#,
130 ".zed": {
131 "settings.json": r#"{
132 "tab_size": 8,
133 "hard_tabs": false,
134 "ensure_final_newline_on_save": false,
135 "remove_trailing_whitespace_on_save": false,
136 "soft_wrap": "editor_width"
137 }"#,
138 },
139 "a.rs": "fn a() {\n A\n}",
140 "b": {
141 ".editorconfig": r#"
142 [*.rs]
143 indent_size = 2
144 "#,
145 "b.rs": "fn b() {\n B\n}",
146 },
147 "c.js": "def c\n C\nend",
148 "README.json": "tabs are better\n",
149 }));
150
151 let path = dir.path();
152 let fs = FakeFs::new(cx.executor());
153 fs.insert_tree_from_real_fs(path, path).await;
154 let project = Project::test(fs, [path], cx).await;
155
156 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
157 language_registry.add(js_lang());
158 language_registry.add(json_lang());
159 language_registry.add(rust_lang());
160
161 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
162
163 cx.executor().run_until_parked();
164
165 cx.update(|cx| {
166 let tree = worktree.read(cx);
167 let settings_for = |path: &str| {
168 let file_entry = tree.entry_for_path(path).unwrap().clone();
169 let file = File::for_entry(file_entry, worktree.clone());
170 let file_language = project
171 .read(cx)
172 .languages()
173 .language_for_file_path(file.path.as_ref());
174 let file_language = cx
175 .background_executor()
176 .block(file_language)
177 .expect("Failed to get file language");
178 let file = file as _;
179 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
180 };
181
182 let settings_a = settings_for("a.rs");
183 let settings_b = settings_for("b/b.rs");
184 let settings_c = settings_for("c.js");
185 let settings_readme = settings_for("README.json");
186
187 // .editorconfig overrides .zed/settings
188 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
189 assert_eq!(settings_a.hard_tabs, true);
190 assert_eq!(settings_a.ensure_final_newline_on_save, true);
191 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
192
193 // .editorconfig in b/ overrides .editorconfig in root
194 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
195
196 // "indent_size" is not set, so "tab_width" is used
197 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
198
199 // README.md should not be affected by .editorconfig's globe "*.rs"
200 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
201 });
202}
203
204#[gpui::test]
205async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
206 init_test(cx);
207 TaskStore::init(None);
208
209 let fs = FakeFs::new(cx.executor());
210 fs.insert_tree(
211 path!("/dir"),
212 json!({
213 ".zed": {
214 "settings.json": r#"{ "tab_size": 8 }"#,
215 "tasks.json": r#"[{
216 "label": "cargo check all",
217 "command": "cargo",
218 "args": ["check", "--all"]
219 },]"#,
220 },
221 "a": {
222 "a.rs": "fn a() {\n A\n}"
223 },
224 "b": {
225 ".zed": {
226 "settings.json": r#"{ "tab_size": 2 }"#,
227 "tasks.json": r#"[{
228 "label": "cargo check",
229 "command": "cargo",
230 "args": ["check"]
231 },]"#,
232 },
233 "b.rs": "fn b() {\n B\n}"
234 }
235 }),
236 )
237 .await;
238
239 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
240 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
241
242 cx.executor().run_until_parked();
243 let worktree_id = cx.update(|cx| {
244 project.update(cx, |project, cx| {
245 project.worktrees(cx).next().unwrap().read(cx).id()
246 })
247 });
248
249 let mut task_contexts = TaskContexts::default();
250 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
251
252 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
253 id: worktree_id,
254 directory_in_worktree: PathBuf::from(".zed"),
255 id_base: "local worktree tasks from directory \".zed\"".into(),
256 };
257
258 let all_tasks = cx
259 .update(|cx| {
260 let tree = worktree.read(cx);
261
262 let file_a = File::for_entry(
263 tree.entry_for_path("a/a.rs").unwrap().clone(),
264 worktree.clone(),
265 ) as _;
266 let settings_a = language_settings(None, Some(&file_a), cx);
267 let file_b = File::for_entry(
268 tree.entry_for_path("b/b.rs").unwrap().clone(),
269 worktree.clone(),
270 ) as _;
271 let settings_b = language_settings(None, Some(&file_b), cx);
272
273 assert_eq!(settings_a.tab_size.get(), 8);
274 assert_eq!(settings_b.tab_size.get(), 2);
275
276 get_all_tasks(&project, &task_contexts, cx)
277 })
278 .into_iter()
279 .map(|(source_kind, task)| {
280 let resolved = task.resolved.unwrap();
281 (
282 source_kind,
283 task.resolved_label,
284 resolved.args,
285 resolved.env,
286 )
287 })
288 .collect::<Vec<_>>();
289 assert_eq!(
290 all_tasks,
291 vec![
292 (
293 TaskSourceKind::Worktree {
294 id: worktree_id,
295 directory_in_worktree: PathBuf::from(separator!("b/.zed")),
296 id_base: if cfg!(windows) {
297 "local worktree tasks from directory \"b\\\\.zed\"".into()
298 } else {
299 "local worktree tasks from directory \"b/.zed\"".into()
300 },
301 },
302 "cargo check".to_string(),
303 vec!["check".to_string()],
304 HashMap::default(),
305 ),
306 (
307 topmost_local_task_source_kind.clone(),
308 "cargo check all".to_string(),
309 vec!["check".to_string(), "--all".to_string()],
310 HashMap::default(),
311 ),
312 ]
313 );
314
315 let (_, resolved_task) = cx
316 .update(|cx| get_all_tasks(&project, &task_contexts, cx))
317 .into_iter()
318 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
319 .expect("should have one global task");
320 project.update(cx, |project, cx| {
321 let task_inventory = project
322 .task_store
323 .read(cx)
324 .task_inventory()
325 .cloned()
326 .unwrap();
327 task_inventory.update(cx, |inventory, _| {
328 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
329 inventory
330 .update_file_based_tasks(
331 TaskSettingsLocation::Global(tasks_file()),
332 Some(
333 &json!([{
334 "label": "cargo check unstable",
335 "command": "cargo",
336 "args": [
337 "check",
338 "--all",
339 "--all-targets"
340 ],
341 "env": {
342 "RUSTFLAGS": "-Zunstable-options"
343 }
344 }])
345 .to_string(),
346 ),
347 settings::TaskKind::Script,
348 )
349 .unwrap();
350 });
351 });
352 cx.run_until_parked();
353
354 let all_tasks = cx
355 .update(|cx| get_all_tasks(&project, &task_contexts, cx))
356 .into_iter()
357 .map(|(source_kind, task)| {
358 let resolved = task.resolved.unwrap();
359 (
360 source_kind,
361 task.resolved_label,
362 resolved.args,
363 resolved.env,
364 )
365 })
366 .collect::<Vec<_>>();
367 assert_eq!(
368 all_tasks,
369 vec![
370 (
371 topmost_local_task_source_kind.clone(),
372 "cargo check all".to_string(),
373 vec!["check".to_string(), "--all".to_string()],
374 HashMap::default(),
375 ),
376 (
377 TaskSourceKind::Worktree {
378 id: worktree_id,
379 directory_in_worktree: PathBuf::from(separator!("b/.zed")),
380 id_base: if cfg!(windows) {
381 "local worktree tasks from directory \"b\\\\.zed\"".into()
382 } else {
383 "local worktree tasks from directory \"b/.zed\"".into()
384 },
385 },
386 "cargo check".to_string(),
387 vec!["check".to_string()],
388 HashMap::default(),
389 ),
390 (
391 TaskSourceKind::AbsPath {
392 abs_path: paths::tasks_file().clone(),
393 id_base: "global tasks.json".into(),
394 },
395 "cargo check unstable".to_string(),
396 vec![
397 "check".to_string(),
398 "--all".to_string(),
399 "--all-targets".to_string(),
400 ],
401 HashMap::from_iter(Some((
402 "RUSTFLAGS".to_string(),
403 "-Zunstable-options".to_string()
404 ))),
405 ),
406 ]
407 );
408}
409
410#[gpui::test]
411async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
412 init_test(cx);
413 TaskStore::init(None);
414
415 let fs = FakeFs::new(cx.executor());
416 fs.insert_tree(
417 path!("/dir"),
418 json!({
419 ".zed": {
420 "tasks.json": r#"[{
421 "label": "test worktree root",
422 "command": "echo $ZED_WORKTREE_ROOT"
423 }]"#,
424 },
425 "a": {
426 "a.rs": "fn a() {\n A\n}"
427 },
428 }),
429 )
430 .await;
431
432 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
433 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
434
435 cx.executor().run_until_parked();
436 let worktree_id = cx.update(|cx| {
437 project.update(cx, |project, cx| {
438 project.worktrees(cx).next().unwrap().read(cx).id()
439 })
440 });
441
442 let active_non_worktree_item_tasks = cx.update(|cx| {
443 get_all_tasks(
444 &project,
445 &TaskContexts {
446 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
447 active_worktree_context: None,
448 other_worktree_contexts: Vec::new(),
449 },
450 cx,
451 )
452 });
453 assert!(
454 active_non_worktree_item_tasks.is_empty(),
455 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
456 );
457
458 let active_worktree_tasks = cx.update(|cx| {
459 get_all_tasks(
460 &project,
461 &TaskContexts {
462 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
463 active_worktree_context: Some((worktree_id, {
464 let mut worktree_context = TaskContext::default();
465 worktree_context
466 .task_variables
467 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
468 worktree_context
469 })),
470 other_worktree_contexts: Vec::new(),
471 },
472 cx,
473 )
474 });
475 assert_eq!(
476 active_worktree_tasks
477 .into_iter()
478 .map(|(source_kind, task)| {
479 let resolved = task.resolved.unwrap();
480 (source_kind, resolved.command)
481 })
482 .collect::<Vec<_>>(),
483 vec![(
484 TaskSourceKind::Worktree {
485 id: worktree_id,
486 directory_in_worktree: PathBuf::from(separator!(".zed")),
487 id_base: if cfg!(windows) {
488 "local worktree tasks from directory \".zed\"".into()
489 } else {
490 "local worktree tasks from directory \".zed\"".into()
491 },
492 },
493 "echo /dir".to_string(),
494 )]
495 );
496}
497
498#[gpui::test]
499async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
500 init_test(cx);
501
502 let fs = FakeFs::new(cx.executor());
503 fs.insert_tree(
504 path!("/dir"),
505 json!({
506 "test.rs": "const A: i32 = 1;",
507 "test2.rs": "",
508 "Cargo.toml": "a = 1",
509 "package.json": "{\"a\": 1}",
510 }),
511 )
512 .await;
513
514 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
515 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
516
517 let mut fake_rust_servers = language_registry.register_fake_lsp(
518 "Rust",
519 FakeLspAdapter {
520 name: "the-rust-language-server",
521 capabilities: lsp::ServerCapabilities {
522 completion_provider: Some(lsp::CompletionOptions {
523 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
524 ..Default::default()
525 }),
526 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
527 lsp::TextDocumentSyncOptions {
528 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
529 ..Default::default()
530 },
531 )),
532 ..Default::default()
533 },
534 ..Default::default()
535 },
536 );
537 let mut fake_json_servers = language_registry.register_fake_lsp(
538 "JSON",
539 FakeLspAdapter {
540 name: "the-json-language-server",
541 capabilities: lsp::ServerCapabilities {
542 completion_provider: Some(lsp::CompletionOptions {
543 trigger_characters: Some(vec![":".to_string()]),
544 ..Default::default()
545 }),
546 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
547 lsp::TextDocumentSyncOptions {
548 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
549 ..Default::default()
550 },
551 )),
552 ..Default::default()
553 },
554 ..Default::default()
555 },
556 );
557
558 // Open a buffer without an associated language server.
559 let (toml_buffer, _handle) = project
560 .update(cx, |project, cx| {
561 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
562 })
563 .await
564 .unwrap();
565
566 // Open a buffer with an associated language server before the language for it has been loaded.
567 let (rust_buffer, _handle2) = project
568 .update(cx, |project, cx| {
569 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
570 })
571 .await
572 .unwrap();
573 rust_buffer.update(cx, |buffer, _| {
574 assert_eq!(buffer.language().map(|l| l.name()), None);
575 });
576
577 // Now we add the languages to the project, and ensure they get assigned to all
578 // the relevant open buffers.
579 language_registry.add(json_lang());
580 language_registry.add(rust_lang());
581 cx.executor().run_until_parked();
582 rust_buffer.update(cx, |buffer, _| {
583 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
584 });
585
586 // A server is started up, and it is notified about Rust files.
587 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
588 assert_eq!(
589 fake_rust_server
590 .receive_notification::<lsp::notification::DidOpenTextDocument>()
591 .await
592 .text_document,
593 lsp::TextDocumentItem {
594 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
595 version: 0,
596 text: "const A: i32 = 1;".to_string(),
597 language_id: "rust".to_string(),
598 }
599 );
600
601 // The buffer is configured based on the language server's capabilities.
602 rust_buffer.update(cx, |buffer, _| {
603 assert_eq!(
604 buffer
605 .completion_triggers()
606 .into_iter()
607 .cloned()
608 .collect::<Vec<_>>(),
609 &[".".to_string(), "::".to_string()]
610 );
611 });
612 toml_buffer.update(cx, |buffer, _| {
613 assert!(buffer.completion_triggers().is_empty());
614 });
615
616 // Edit a buffer. The changes are reported to the language server.
617 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
618 assert_eq!(
619 fake_rust_server
620 .receive_notification::<lsp::notification::DidChangeTextDocument>()
621 .await
622 .text_document,
623 lsp::VersionedTextDocumentIdentifier::new(
624 lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
625 1
626 )
627 );
628
629 // Open a third buffer with a different associated language server.
630 let (json_buffer, _json_handle) = project
631 .update(cx, |project, cx| {
632 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
633 })
634 .await
635 .unwrap();
636
637 // A json language server is started up and is only notified about the json buffer.
638 let mut fake_json_server = fake_json_servers.next().await.unwrap();
639 assert_eq!(
640 fake_json_server
641 .receive_notification::<lsp::notification::DidOpenTextDocument>()
642 .await
643 .text_document,
644 lsp::TextDocumentItem {
645 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
646 version: 0,
647 text: "{\"a\": 1}".to_string(),
648 language_id: "json".to_string(),
649 }
650 );
651
652 // This buffer is configured based on the second language server's
653 // capabilities.
654 json_buffer.update(cx, |buffer, _| {
655 assert_eq!(
656 buffer
657 .completion_triggers()
658 .into_iter()
659 .cloned()
660 .collect::<Vec<_>>(),
661 &[":".to_string()]
662 );
663 });
664
665 // When opening another buffer whose language server is already running,
666 // it is also configured based on the existing language server's capabilities.
667 let (rust_buffer2, _handle4) = project
668 .update(cx, |project, cx| {
669 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
670 })
671 .await
672 .unwrap();
673 rust_buffer2.update(cx, |buffer, _| {
674 assert_eq!(
675 buffer
676 .completion_triggers()
677 .into_iter()
678 .cloned()
679 .collect::<Vec<_>>(),
680 &[".".to_string(), "::".to_string()]
681 );
682 });
683
684 // Changes are reported only to servers matching the buffer's language.
685 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
686 rust_buffer2.update(cx, |buffer, cx| {
687 buffer.edit([(0..0, "let x = 1;")], None, cx)
688 });
689 assert_eq!(
690 fake_rust_server
691 .receive_notification::<lsp::notification::DidChangeTextDocument>()
692 .await
693 .text_document,
694 lsp::VersionedTextDocumentIdentifier::new(
695 lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap(),
696 1
697 )
698 );
699
700 // Save notifications are reported to all servers.
701 project
702 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
703 .await
704 .unwrap();
705 assert_eq!(
706 fake_rust_server
707 .receive_notification::<lsp::notification::DidSaveTextDocument>()
708 .await
709 .text_document,
710 lsp::TextDocumentIdentifier::new(
711 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
712 )
713 );
714 assert_eq!(
715 fake_json_server
716 .receive_notification::<lsp::notification::DidSaveTextDocument>()
717 .await
718 .text_document,
719 lsp::TextDocumentIdentifier::new(
720 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
721 )
722 );
723
724 // Renames are reported only to servers matching the buffer's language.
725 fs.rename(
726 Path::new(path!("/dir/test2.rs")),
727 Path::new(path!("/dir/test3.rs")),
728 Default::default(),
729 )
730 .await
731 .unwrap();
732 assert_eq!(
733 fake_rust_server
734 .receive_notification::<lsp::notification::DidCloseTextDocument>()
735 .await
736 .text_document,
737 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap()),
738 );
739 assert_eq!(
740 fake_rust_server
741 .receive_notification::<lsp::notification::DidOpenTextDocument>()
742 .await
743 .text_document,
744 lsp::TextDocumentItem {
745 uri: lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap(),
746 version: 0,
747 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
748 language_id: "rust".to_string(),
749 },
750 );
751
752 rust_buffer2.update(cx, |buffer, cx| {
753 buffer.update_diagnostics(
754 LanguageServerId(0),
755 DiagnosticSet::from_sorted_entries(
756 vec![DiagnosticEntry {
757 diagnostic: Default::default(),
758 range: Anchor::MIN..Anchor::MAX,
759 }],
760 &buffer.snapshot(),
761 ),
762 cx,
763 );
764 assert_eq!(
765 buffer
766 .snapshot()
767 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
768 .count(),
769 1
770 );
771 });
772
773 // When the rename changes the extension of the file, the buffer gets closed on the old
774 // language server and gets opened on the new one.
775 fs.rename(
776 Path::new(path!("/dir/test3.rs")),
777 Path::new(path!("/dir/test3.json")),
778 Default::default(),
779 )
780 .await
781 .unwrap();
782 assert_eq!(
783 fake_rust_server
784 .receive_notification::<lsp::notification::DidCloseTextDocument>()
785 .await
786 .text_document,
787 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap(),),
788 );
789 assert_eq!(
790 fake_json_server
791 .receive_notification::<lsp::notification::DidOpenTextDocument>()
792 .await
793 .text_document,
794 lsp::TextDocumentItem {
795 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
796 version: 0,
797 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
798 language_id: "json".to_string(),
799 },
800 );
801
802 // We clear the diagnostics, since the language has changed.
803 rust_buffer2.update(cx, |buffer, _| {
804 assert_eq!(
805 buffer
806 .snapshot()
807 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
808 .count(),
809 0
810 );
811 });
812
813 // The renamed file's version resets after changing language server.
814 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
815 assert_eq!(
816 fake_json_server
817 .receive_notification::<lsp::notification::DidChangeTextDocument>()
818 .await
819 .text_document,
820 lsp::VersionedTextDocumentIdentifier::new(
821 lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
822 1
823 )
824 );
825
826 // Restart language servers
827 project.update(cx, |project, cx| {
828 project.restart_language_servers_for_buffers(
829 vec![rust_buffer.clone(), json_buffer.clone()],
830 cx,
831 );
832 });
833
834 let mut rust_shutdown_requests = fake_rust_server
835 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
836 let mut json_shutdown_requests = fake_json_server
837 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
838 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
839
840 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
841 let mut fake_json_server = fake_json_servers.next().await.unwrap();
842
843 // Ensure rust document is reopened in new rust language server
844 assert_eq!(
845 fake_rust_server
846 .receive_notification::<lsp::notification::DidOpenTextDocument>()
847 .await
848 .text_document,
849 lsp::TextDocumentItem {
850 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
851 version: 0,
852 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
853 language_id: "rust".to_string(),
854 }
855 );
856
857 // Ensure json documents are reopened in new json language server
858 assert_set_eq!(
859 [
860 fake_json_server
861 .receive_notification::<lsp::notification::DidOpenTextDocument>()
862 .await
863 .text_document,
864 fake_json_server
865 .receive_notification::<lsp::notification::DidOpenTextDocument>()
866 .await
867 .text_document,
868 ],
869 [
870 lsp::TextDocumentItem {
871 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
872 version: 0,
873 text: json_buffer.update(cx, |buffer, _| buffer.text()),
874 language_id: "json".to_string(),
875 },
876 lsp::TextDocumentItem {
877 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
878 version: 0,
879 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
880 language_id: "json".to_string(),
881 }
882 ]
883 );
884
885 // Close notifications are reported only to servers matching the buffer's language.
886 cx.update(|_| drop(_json_handle));
887 let close_message = lsp::DidCloseTextDocumentParams {
888 text_document: lsp::TextDocumentIdentifier::new(
889 lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
890 ),
891 };
892 assert_eq!(
893 fake_json_server
894 .receive_notification::<lsp::notification::DidCloseTextDocument>()
895 .await,
896 close_message,
897 );
898}
899
900#[gpui::test]
901async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
902 init_test(cx);
903
904 let fs = FakeFs::new(cx.executor());
905 fs.insert_tree(
906 path!("/the-root"),
907 json!({
908 ".gitignore": "target\n",
909 "src": {
910 "a.rs": "",
911 "b.rs": "",
912 },
913 "target": {
914 "x": {
915 "out": {
916 "x.rs": ""
917 }
918 },
919 "y": {
920 "out": {
921 "y.rs": "",
922 }
923 },
924 "z": {
925 "out": {
926 "z.rs": ""
927 }
928 }
929 }
930 }),
931 )
932 .await;
933
934 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
935 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
936 language_registry.add(rust_lang());
937 let mut fake_servers = language_registry.register_fake_lsp(
938 "Rust",
939 FakeLspAdapter {
940 name: "the-language-server",
941 ..Default::default()
942 },
943 );
944
945 cx.executor().run_until_parked();
946
947 // Start the language server by opening a buffer with a compatible file extension.
948 project
949 .update(cx, |project, cx| {
950 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
951 })
952 .await
953 .unwrap();
954
955 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
956 project.update(cx, |project, cx| {
957 let worktree = project.worktrees(cx).next().unwrap();
958 assert_eq!(
959 worktree
960 .read(cx)
961 .snapshot()
962 .entries(true, 0)
963 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
964 .collect::<Vec<_>>(),
965 &[
966 (Path::new(""), false),
967 (Path::new(".gitignore"), false),
968 (Path::new("src"), false),
969 (Path::new("src/a.rs"), false),
970 (Path::new("src/b.rs"), false),
971 (Path::new("target"), true),
972 ]
973 );
974 });
975
976 let prev_read_dir_count = fs.read_dir_call_count();
977
978 // Keep track of the FS events reported to the language server.
979 let fake_server = fake_servers.next().await.unwrap();
980 let file_changes = Arc::new(Mutex::new(Vec::new()));
981 fake_server
982 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
983 registrations: vec![lsp::Registration {
984 id: Default::default(),
985 method: "workspace/didChangeWatchedFiles".to_string(),
986 register_options: serde_json::to_value(
987 lsp::DidChangeWatchedFilesRegistrationOptions {
988 watchers: vec![
989 lsp::FileSystemWatcher {
990 glob_pattern: lsp::GlobPattern::String(
991 path!("/the-root/Cargo.toml").to_string(),
992 ),
993 kind: None,
994 },
995 lsp::FileSystemWatcher {
996 glob_pattern: lsp::GlobPattern::String(
997 path!("/the-root/src/*.{rs,c}").to_string(),
998 ),
999 kind: None,
1000 },
1001 lsp::FileSystemWatcher {
1002 glob_pattern: lsp::GlobPattern::String(
1003 path!("/the-root/target/y/**/*.rs").to_string(),
1004 ),
1005 kind: None,
1006 },
1007 ],
1008 },
1009 )
1010 .ok(),
1011 }],
1012 })
1013 .await
1014 .unwrap();
1015 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
1016 let file_changes = file_changes.clone();
1017 move |params, _| {
1018 let mut file_changes = file_changes.lock();
1019 file_changes.extend(params.changes);
1020 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
1021 }
1022 });
1023
1024 cx.executor().run_until_parked();
1025 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
1026 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
1027
1028 // Now the language server has asked us to watch an ignored directory path,
1029 // so we recursively load it.
1030 project.update(cx, |project, cx| {
1031 let worktree = project.worktrees(cx).next().unwrap();
1032 assert_eq!(
1033 worktree
1034 .read(cx)
1035 .snapshot()
1036 .entries(true, 0)
1037 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
1038 .collect::<Vec<_>>(),
1039 &[
1040 (Path::new(""), false),
1041 (Path::new(".gitignore"), false),
1042 (Path::new("src"), false),
1043 (Path::new("src/a.rs"), false),
1044 (Path::new("src/b.rs"), false),
1045 (Path::new("target"), true),
1046 (Path::new("target/x"), true),
1047 (Path::new("target/y"), true),
1048 (Path::new("target/y/out"), true),
1049 (Path::new("target/y/out/y.rs"), true),
1050 (Path::new("target/z"), true),
1051 ]
1052 );
1053 });
1054
1055 // Perform some file system mutations, two of which match the watched patterns,
1056 // and one of which does not.
1057 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
1058 .await
1059 .unwrap();
1060 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
1061 .await
1062 .unwrap();
1063 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
1064 .await
1065 .unwrap();
1066 fs.create_file(
1067 path!("/the-root/target/x/out/x2.rs").as_ref(),
1068 Default::default(),
1069 )
1070 .await
1071 .unwrap();
1072 fs.create_file(
1073 path!("/the-root/target/y/out/y2.rs").as_ref(),
1074 Default::default(),
1075 )
1076 .await
1077 .unwrap();
1078
1079 // The language server receives events for the FS mutations that match its watch patterns.
1080 cx.executor().run_until_parked();
1081 assert_eq!(
1082 &*file_changes.lock(),
1083 &[
1084 lsp::FileEvent {
1085 uri: lsp::Url::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
1086 typ: lsp::FileChangeType::DELETED,
1087 },
1088 lsp::FileEvent {
1089 uri: lsp::Url::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
1090 typ: lsp::FileChangeType::CREATED,
1091 },
1092 lsp::FileEvent {
1093 uri: lsp::Url::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
1094 typ: lsp::FileChangeType::CREATED,
1095 },
1096 ]
1097 );
1098}
1099
1100#[gpui::test]
1101async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
1102 init_test(cx);
1103
1104 let fs = FakeFs::new(cx.executor());
1105 fs.insert_tree(
1106 path!("/dir"),
1107 json!({
1108 "a.rs": "let a = 1;",
1109 "b.rs": "let b = 2;"
1110 }),
1111 )
1112 .await;
1113
1114 let project = Project::test(
1115 fs,
1116 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
1117 cx,
1118 )
1119 .await;
1120 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1121
1122 let buffer_a = project
1123 .update(cx, |project, cx| {
1124 project.open_local_buffer(path!("/dir/a.rs"), cx)
1125 })
1126 .await
1127 .unwrap();
1128 let buffer_b = project
1129 .update(cx, |project, cx| {
1130 project.open_local_buffer(path!("/dir/b.rs"), cx)
1131 })
1132 .await
1133 .unwrap();
1134
1135 lsp_store.update(cx, |lsp_store, cx| {
1136 lsp_store
1137 .update_diagnostics(
1138 LanguageServerId(0),
1139 lsp::PublishDiagnosticsParams {
1140 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1141 version: None,
1142 diagnostics: vec![lsp::Diagnostic {
1143 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1144 severity: Some(lsp::DiagnosticSeverity::ERROR),
1145 message: "error 1".to_string(),
1146 ..Default::default()
1147 }],
1148 },
1149 &[],
1150 cx,
1151 )
1152 .unwrap();
1153 lsp_store
1154 .update_diagnostics(
1155 LanguageServerId(0),
1156 lsp::PublishDiagnosticsParams {
1157 uri: Url::from_file_path(path!("/dir/b.rs")).unwrap(),
1158 version: None,
1159 diagnostics: vec![lsp::Diagnostic {
1160 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1161 severity: Some(DiagnosticSeverity::WARNING),
1162 message: "error 2".to_string(),
1163 ..Default::default()
1164 }],
1165 },
1166 &[],
1167 cx,
1168 )
1169 .unwrap();
1170 });
1171
1172 buffer_a.update(cx, |buffer, _| {
1173 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1174 assert_eq!(
1175 chunks
1176 .iter()
1177 .map(|(s, d)| (s.as_str(), *d))
1178 .collect::<Vec<_>>(),
1179 &[
1180 ("let ", None),
1181 ("a", Some(DiagnosticSeverity::ERROR)),
1182 (" = 1;", None),
1183 ]
1184 );
1185 });
1186 buffer_b.update(cx, |buffer, _| {
1187 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1188 assert_eq!(
1189 chunks
1190 .iter()
1191 .map(|(s, d)| (s.as_str(), *d))
1192 .collect::<Vec<_>>(),
1193 &[
1194 ("let ", None),
1195 ("b", Some(DiagnosticSeverity::WARNING)),
1196 (" = 2;", None),
1197 ]
1198 );
1199 });
1200}
1201
1202#[gpui::test]
1203async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1204 init_test(cx);
1205
1206 let fs = FakeFs::new(cx.executor());
1207 fs.insert_tree(
1208 path!("/root"),
1209 json!({
1210 "dir": {
1211 ".git": {
1212 "HEAD": "ref: refs/heads/main",
1213 },
1214 ".gitignore": "b.rs",
1215 "a.rs": "let a = 1;",
1216 "b.rs": "let b = 2;",
1217 },
1218 "other.rs": "let b = c;"
1219 }),
1220 )
1221 .await;
1222
1223 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
1224 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1225 let (worktree, _) = project
1226 .update(cx, |project, cx| {
1227 project.find_or_create_worktree(path!("/root/dir"), true, cx)
1228 })
1229 .await
1230 .unwrap();
1231 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1232
1233 let (worktree, _) = project
1234 .update(cx, |project, cx| {
1235 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
1236 })
1237 .await
1238 .unwrap();
1239 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1240
1241 let server_id = LanguageServerId(0);
1242 lsp_store.update(cx, |lsp_store, cx| {
1243 lsp_store
1244 .update_diagnostics(
1245 server_id,
1246 lsp::PublishDiagnosticsParams {
1247 uri: Url::from_file_path(path!("/root/dir/b.rs")).unwrap(),
1248 version: None,
1249 diagnostics: vec![lsp::Diagnostic {
1250 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1251 severity: Some(lsp::DiagnosticSeverity::ERROR),
1252 message: "unused variable 'b'".to_string(),
1253 ..Default::default()
1254 }],
1255 },
1256 &[],
1257 cx,
1258 )
1259 .unwrap();
1260 lsp_store
1261 .update_diagnostics(
1262 server_id,
1263 lsp::PublishDiagnosticsParams {
1264 uri: Url::from_file_path(path!("/root/other.rs")).unwrap(),
1265 version: None,
1266 diagnostics: vec![lsp::Diagnostic {
1267 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1268 severity: Some(lsp::DiagnosticSeverity::ERROR),
1269 message: "unknown variable 'c'".to_string(),
1270 ..Default::default()
1271 }],
1272 },
1273 &[],
1274 cx,
1275 )
1276 .unwrap();
1277 });
1278
1279 let main_ignored_buffer = project
1280 .update(cx, |project, cx| {
1281 project.open_buffer((main_worktree_id, "b.rs"), cx)
1282 })
1283 .await
1284 .unwrap();
1285 main_ignored_buffer.update(cx, |buffer, _| {
1286 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1287 assert_eq!(
1288 chunks
1289 .iter()
1290 .map(|(s, d)| (s.as_str(), *d))
1291 .collect::<Vec<_>>(),
1292 &[
1293 ("let ", None),
1294 ("b", Some(DiagnosticSeverity::ERROR)),
1295 (" = 2;", None),
1296 ],
1297 "Gigitnored buffers should still get in-buffer diagnostics",
1298 );
1299 });
1300 let other_buffer = project
1301 .update(cx, |project, cx| {
1302 project.open_buffer((other_worktree_id, ""), cx)
1303 })
1304 .await
1305 .unwrap();
1306 other_buffer.update(cx, |buffer, _| {
1307 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1308 assert_eq!(
1309 chunks
1310 .iter()
1311 .map(|(s, d)| (s.as_str(), *d))
1312 .collect::<Vec<_>>(),
1313 &[
1314 ("let b = ", None),
1315 ("c", Some(DiagnosticSeverity::ERROR)),
1316 (";", None),
1317 ],
1318 "Buffers from hidden projects should still get in-buffer diagnostics"
1319 );
1320 });
1321
1322 project.update(cx, |project, cx| {
1323 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1324 assert_eq!(
1325 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1326 vec![(
1327 ProjectPath {
1328 worktree_id: main_worktree_id,
1329 path: Arc::from(Path::new("b.rs")),
1330 },
1331 server_id,
1332 DiagnosticSummary {
1333 error_count: 1,
1334 warning_count: 0,
1335 }
1336 )]
1337 );
1338 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1339 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1340 });
1341}
1342
1343#[gpui::test]
1344async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1345 init_test(cx);
1346
1347 let progress_token = "the-progress-token";
1348
1349 let fs = FakeFs::new(cx.executor());
1350 fs.insert_tree(
1351 path!("/dir"),
1352 json!({
1353 "a.rs": "fn a() { A }",
1354 "b.rs": "const y: i32 = 1",
1355 }),
1356 )
1357 .await;
1358
1359 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1360 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1361
1362 language_registry.add(rust_lang());
1363 let mut fake_servers = language_registry.register_fake_lsp(
1364 "Rust",
1365 FakeLspAdapter {
1366 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1367 disk_based_diagnostics_sources: vec!["disk".into()],
1368 ..Default::default()
1369 },
1370 );
1371
1372 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1373
1374 // Cause worktree to start the fake language server
1375 let _ = project
1376 .update(cx, |project, cx| {
1377 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
1378 })
1379 .await
1380 .unwrap();
1381
1382 let mut events = cx.events(&project);
1383
1384 let fake_server = fake_servers.next().await.unwrap();
1385 assert_eq!(
1386 events.next().await.unwrap(),
1387 Event::LanguageServerAdded(
1388 LanguageServerId(0),
1389 fake_server.server.name(),
1390 Some(worktree_id)
1391 ),
1392 );
1393
1394 fake_server
1395 .start_progress(format!("{}/0", progress_token))
1396 .await;
1397 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1398 assert_eq!(
1399 events.next().await.unwrap(),
1400 Event::DiskBasedDiagnosticsStarted {
1401 language_server_id: LanguageServerId(0),
1402 }
1403 );
1404
1405 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1406 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1407 version: None,
1408 diagnostics: vec![lsp::Diagnostic {
1409 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1410 severity: Some(lsp::DiagnosticSeverity::ERROR),
1411 message: "undefined variable 'A'".to_string(),
1412 ..Default::default()
1413 }],
1414 });
1415 assert_eq!(
1416 events.next().await.unwrap(),
1417 Event::DiagnosticsUpdated {
1418 language_server_id: LanguageServerId(0),
1419 path: (worktree_id, Path::new("a.rs")).into()
1420 }
1421 );
1422
1423 fake_server.end_progress(format!("{}/0", progress_token));
1424 assert_eq!(
1425 events.next().await.unwrap(),
1426 Event::DiskBasedDiagnosticsFinished {
1427 language_server_id: LanguageServerId(0)
1428 }
1429 );
1430
1431 let buffer = project
1432 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
1433 .await
1434 .unwrap();
1435
1436 buffer.update(cx, |buffer, _| {
1437 let snapshot = buffer.snapshot();
1438 let diagnostics = snapshot
1439 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1440 .collect::<Vec<_>>();
1441 assert_eq!(
1442 diagnostics,
1443 &[DiagnosticEntry {
1444 range: Point::new(0, 9)..Point::new(0, 10),
1445 diagnostic: Diagnostic {
1446 severity: lsp::DiagnosticSeverity::ERROR,
1447 message: "undefined variable 'A'".to_string(),
1448 group_id: 0,
1449 is_primary: true,
1450 ..Default::default()
1451 }
1452 }]
1453 )
1454 });
1455
1456 // Ensure publishing empty diagnostics twice only results in one update event.
1457 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1458 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1459 version: None,
1460 diagnostics: Default::default(),
1461 });
1462 assert_eq!(
1463 events.next().await.unwrap(),
1464 Event::DiagnosticsUpdated {
1465 language_server_id: LanguageServerId(0),
1466 path: (worktree_id, Path::new("a.rs")).into()
1467 }
1468 );
1469
1470 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1471 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1472 version: None,
1473 diagnostics: Default::default(),
1474 });
1475 cx.executor().run_until_parked();
1476 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1477}
1478
1479#[gpui::test]
1480async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1481 init_test(cx);
1482
1483 let progress_token = "the-progress-token";
1484
1485 let fs = FakeFs::new(cx.executor());
1486 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1487
1488 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1489
1490 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1491 language_registry.add(rust_lang());
1492 let mut fake_servers = language_registry.register_fake_lsp(
1493 "Rust",
1494 FakeLspAdapter {
1495 name: "the-language-server",
1496 disk_based_diagnostics_sources: vec!["disk".into()],
1497 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1498 ..Default::default()
1499 },
1500 );
1501
1502 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1503
1504 let (buffer, _handle) = project
1505 .update(cx, |project, cx| {
1506 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1507 })
1508 .await
1509 .unwrap();
1510 // Simulate diagnostics starting to update.
1511 let fake_server = fake_servers.next().await.unwrap();
1512 fake_server.start_progress(progress_token).await;
1513
1514 // Restart the server before the diagnostics finish updating.
1515 project.update(cx, |project, cx| {
1516 project.restart_language_servers_for_buffers(vec![buffer], cx);
1517 });
1518 let mut events = cx.events(&project);
1519
1520 // Simulate the newly started server sending more diagnostics.
1521 let fake_server = fake_servers.next().await.unwrap();
1522 assert_eq!(
1523 events.next().await.unwrap(),
1524 Event::LanguageServerAdded(
1525 LanguageServerId(1),
1526 fake_server.server.name(),
1527 Some(worktree_id)
1528 )
1529 );
1530 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1531 fake_server.start_progress(progress_token).await;
1532 assert_eq!(
1533 events.next().await.unwrap(),
1534 Event::DiskBasedDiagnosticsStarted {
1535 language_server_id: LanguageServerId(1)
1536 }
1537 );
1538 project.update(cx, |project, cx| {
1539 assert_eq!(
1540 project
1541 .language_servers_running_disk_based_diagnostics(cx)
1542 .collect::<Vec<_>>(),
1543 [LanguageServerId(1)]
1544 );
1545 });
1546
1547 // All diagnostics are considered done, despite the old server's diagnostic
1548 // task never completing.
1549 fake_server.end_progress(progress_token);
1550 assert_eq!(
1551 events.next().await.unwrap(),
1552 Event::DiskBasedDiagnosticsFinished {
1553 language_server_id: LanguageServerId(1)
1554 }
1555 );
1556 project.update(cx, |project, cx| {
1557 assert_eq!(
1558 project
1559 .language_servers_running_disk_based_diagnostics(cx)
1560 .collect::<Vec<_>>(),
1561 [] as [language::LanguageServerId; 0]
1562 );
1563 });
1564}
1565
1566#[gpui::test]
1567async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1568 init_test(cx);
1569
1570 let fs = FakeFs::new(cx.executor());
1571 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
1572
1573 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1574
1575 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1576 language_registry.add(rust_lang());
1577 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1578
1579 let (buffer, _) = project
1580 .update(cx, |project, cx| {
1581 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1582 })
1583 .await
1584 .unwrap();
1585
1586 // Publish diagnostics
1587 let fake_server = fake_servers.next().await.unwrap();
1588 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1589 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1590 version: None,
1591 diagnostics: vec![lsp::Diagnostic {
1592 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1593 severity: Some(lsp::DiagnosticSeverity::ERROR),
1594 message: "the message".to_string(),
1595 ..Default::default()
1596 }],
1597 });
1598
1599 cx.executor().run_until_parked();
1600 buffer.update(cx, |buffer, _| {
1601 assert_eq!(
1602 buffer
1603 .snapshot()
1604 .diagnostics_in_range::<_, usize>(0..1, false)
1605 .map(|entry| entry.diagnostic.message.clone())
1606 .collect::<Vec<_>>(),
1607 ["the message".to_string()]
1608 );
1609 });
1610 project.update(cx, |project, cx| {
1611 assert_eq!(
1612 project.diagnostic_summary(false, cx),
1613 DiagnosticSummary {
1614 error_count: 1,
1615 warning_count: 0,
1616 }
1617 );
1618 });
1619
1620 project.update(cx, |project, cx| {
1621 project.restart_language_servers_for_buffers(vec![buffer.clone()], cx);
1622 });
1623
1624 // The diagnostics are cleared.
1625 cx.executor().run_until_parked();
1626 buffer.update(cx, |buffer, _| {
1627 assert_eq!(
1628 buffer
1629 .snapshot()
1630 .diagnostics_in_range::<_, usize>(0..1, false)
1631 .map(|entry| entry.diagnostic.message.clone())
1632 .collect::<Vec<_>>(),
1633 Vec::<String>::new(),
1634 );
1635 });
1636 project.update(cx, |project, cx| {
1637 assert_eq!(
1638 project.diagnostic_summary(false, cx),
1639 DiagnosticSummary {
1640 error_count: 0,
1641 warning_count: 0,
1642 }
1643 );
1644 });
1645}
1646
1647#[gpui::test]
1648async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1649 init_test(cx);
1650
1651 let fs = FakeFs::new(cx.executor());
1652 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1653
1654 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1655 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1656
1657 language_registry.add(rust_lang());
1658 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1659
1660 let (buffer, _handle) = project
1661 .update(cx, |project, cx| {
1662 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1663 })
1664 .await
1665 .unwrap();
1666
1667 // Before restarting the server, report diagnostics with an unknown buffer version.
1668 let fake_server = fake_servers.next().await.unwrap();
1669 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1670 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1671 version: Some(10000),
1672 diagnostics: Vec::new(),
1673 });
1674 cx.executor().run_until_parked();
1675 project.update(cx, |project, cx| {
1676 project.restart_language_servers_for_buffers(vec![buffer.clone()], cx);
1677 });
1678
1679 let mut fake_server = fake_servers.next().await.unwrap();
1680 let notification = fake_server
1681 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1682 .await
1683 .text_document;
1684 assert_eq!(notification.version, 0);
1685}
1686
1687#[gpui::test]
1688async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
1689 init_test(cx);
1690
1691 let progress_token = "the-progress-token";
1692
1693 let fs = FakeFs::new(cx.executor());
1694 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1695
1696 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1697
1698 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1699 language_registry.add(rust_lang());
1700 let mut fake_servers = language_registry.register_fake_lsp(
1701 "Rust",
1702 FakeLspAdapter {
1703 name: "the-language-server",
1704 disk_based_diagnostics_sources: vec!["disk".into()],
1705 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1706 ..Default::default()
1707 },
1708 );
1709
1710 let (buffer, _handle) = project
1711 .update(cx, |project, cx| {
1712 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1713 })
1714 .await
1715 .unwrap();
1716
1717 // Simulate diagnostics starting to update.
1718 let mut fake_server = fake_servers.next().await.unwrap();
1719 fake_server
1720 .start_progress_with(
1721 "another-token",
1722 lsp::WorkDoneProgressBegin {
1723 cancellable: Some(false),
1724 ..Default::default()
1725 },
1726 )
1727 .await;
1728 fake_server
1729 .start_progress_with(
1730 progress_token,
1731 lsp::WorkDoneProgressBegin {
1732 cancellable: Some(true),
1733 ..Default::default()
1734 },
1735 )
1736 .await;
1737 cx.executor().run_until_parked();
1738
1739 project.update(cx, |project, cx| {
1740 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
1741 });
1742
1743 let cancel_notification = fake_server
1744 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
1745 .await;
1746 assert_eq!(
1747 cancel_notification.token,
1748 NumberOrString::String(progress_token.into())
1749 );
1750}
1751
1752#[gpui::test]
1753async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1754 init_test(cx);
1755
1756 let fs = FakeFs::new(cx.executor());
1757 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
1758 .await;
1759
1760 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1761 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1762
1763 let mut fake_rust_servers = language_registry.register_fake_lsp(
1764 "Rust",
1765 FakeLspAdapter {
1766 name: "rust-lsp",
1767 ..Default::default()
1768 },
1769 );
1770 let mut fake_js_servers = language_registry.register_fake_lsp(
1771 "JavaScript",
1772 FakeLspAdapter {
1773 name: "js-lsp",
1774 ..Default::default()
1775 },
1776 );
1777 language_registry.add(rust_lang());
1778 language_registry.add(js_lang());
1779
1780 let _rs_buffer = project
1781 .update(cx, |project, cx| {
1782 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1783 })
1784 .await
1785 .unwrap();
1786 let _js_buffer = project
1787 .update(cx, |project, cx| {
1788 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
1789 })
1790 .await
1791 .unwrap();
1792
1793 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1794 assert_eq!(
1795 fake_rust_server_1
1796 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1797 .await
1798 .text_document
1799 .uri
1800 .as_str(),
1801 uri!("file:///dir/a.rs")
1802 );
1803
1804 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1805 assert_eq!(
1806 fake_js_server
1807 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1808 .await
1809 .text_document
1810 .uri
1811 .as_str(),
1812 uri!("file:///dir/b.js")
1813 );
1814
1815 // Disable Rust language server, ensuring only that server gets stopped.
1816 cx.update(|cx| {
1817 SettingsStore::update_global(cx, |settings, cx| {
1818 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1819 settings.languages.insert(
1820 "Rust".into(),
1821 LanguageSettingsContent {
1822 enable_language_server: Some(false),
1823 ..Default::default()
1824 },
1825 );
1826 });
1827 })
1828 });
1829 fake_rust_server_1
1830 .receive_notification::<lsp::notification::Exit>()
1831 .await;
1832
1833 // Enable Rust and disable JavaScript language servers, ensuring that the
1834 // former gets started again and that the latter stops.
1835 cx.update(|cx| {
1836 SettingsStore::update_global(cx, |settings, cx| {
1837 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1838 settings.languages.insert(
1839 LanguageName::new("Rust"),
1840 LanguageSettingsContent {
1841 enable_language_server: Some(true),
1842 ..Default::default()
1843 },
1844 );
1845 settings.languages.insert(
1846 LanguageName::new("JavaScript"),
1847 LanguageSettingsContent {
1848 enable_language_server: Some(false),
1849 ..Default::default()
1850 },
1851 );
1852 });
1853 })
1854 });
1855 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1856 assert_eq!(
1857 fake_rust_server_2
1858 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1859 .await
1860 .text_document
1861 .uri
1862 .as_str(),
1863 uri!("file:///dir/a.rs")
1864 );
1865 fake_js_server
1866 .receive_notification::<lsp::notification::Exit>()
1867 .await;
1868}
1869
1870#[gpui::test(iterations = 3)]
1871async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1872 init_test(cx);
1873
1874 let text = "
1875 fn a() { A }
1876 fn b() { BB }
1877 fn c() { CCC }
1878 "
1879 .unindent();
1880
1881 let fs = FakeFs::new(cx.executor());
1882 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
1883
1884 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1885 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1886
1887 language_registry.add(rust_lang());
1888 let mut fake_servers = language_registry.register_fake_lsp(
1889 "Rust",
1890 FakeLspAdapter {
1891 disk_based_diagnostics_sources: vec!["disk".into()],
1892 ..Default::default()
1893 },
1894 );
1895
1896 let buffer = project
1897 .update(cx, |project, cx| {
1898 project.open_local_buffer(path!("/dir/a.rs"), cx)
1899 })
1900 .await
1901 .unwrap();
1902
1903 let _handle = project.update(cx, |project, cx| {
1904 project.register_buffer_with_language_servers(&buffer, cx)
1905 });
1906
1907 let mut fake_server = fake_servers.next().await.unwrap();
1908 let open_notification = fake_server
1909 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1910 .await;
1911
1912 // Edit the buffer, moving the content down
1913 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1914 let change_notification_1 = fake_server
1915 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1916 .await;
1917 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1918
1919 // Report some diagnostics for the initial version of the buffer
1920 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1921 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1922 version: Some(open_notification.text_document.version),
1923 diagnostics: vec![
1924 lsp::Diagnostic {
1925 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1926 severity: Some(DiagnosticSeverity::ERROR),
1927 message: "undefined variable 'A'".to_string(),
1928 source: Some("disk".to_string()),
1929 ..Default::default()
1930 },
1931 lsp::Diagnostic {
1932 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1933 severity: Some(DiagnosticSeverity::ERROR),
1934 message: "undefined variable 'BB'".to_string(),
1935 source: Some("disk".to_string()),
1936 ..Default::default()
1937 },
1938 lsp::Diagnostic {
1939 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1940 severity: Some(DiagnosticSeverity::ERROR),
1941 source: Some("disk".to_string()),
1942 message: "undefined variable 'CCC'".to_string(),
1943 ..Default::default()
1944 },
1945 ],
1946 });
1947
1948 // The diagnostics have moved down since they were created.
1949 cx.executor().run_until_parked();
1950 buffer.update(cx, |buffer, _| {
1951 assert_eq!(
1952 buffer
1953 .snapshot()
1954 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1955 .collect::<Vec<_>>(),
1956 &[
1957 DiagnosticEntry {
1958 range: Point::new(3, 9)..Point::new(3, 11),
1959 diagnostic: Diagnostic {
1960 source: Some("disk".into()),
1961 severity: DiagnosticSeverity::ERROR,
1962 message: "undefined variable 'BB'".to_string(),
1963 is_disk_based: true,
1964 group_id: 1,
1965 is_primary: true,
1966 ..Default::default()
1967 },
1968 },
1969 DiagnosticEntry {
1970 range: Point::new(4, 9)..Point::new(4, 12),
1971 diagnostic: Diagnostic {
1972 source: Some("disk".into()),
1973 severity: DiagnosticSeverity::ERROR,
1974 message: "undefined variable 'CCC'".to_string(),
1975 is_disk_based: true,
1976 group_id: 2,
1977 is_primary: true,
1978 ..Default::default()
1979 }
1980 }
1981 ]
1982 );
1983 assert_eq!(
1984 chunks_with_diagnostics(buffer, 0..buffer.len()),
1985 [
1986 ("\n\nfn a() { ".to_string(), None),
1987 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1988 (" }\nfn b() { ".to_string(), None),
1989 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1990 (" }\nfn c() { ".to_string(), None),
1991 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1992 (" }\n".to_string(), None),
1993 ]
1994 );
1995 assert_eq!(
1996 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1997 [
1998 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1999 (" }\nfn c() { ".to_string(), None),
2000 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
2001 ]
2002 );
2003 });
2004
2005 // Ensure overlapping diagnostics are highlighted correctly.
2006 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2007 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2008 version: Some(open_notification.text_document.version),
2009 diagnostics: vec![
2010 lsp::Diagnostic {
2011 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2012 severity: Some(DiagnosticSeverity::ERROR),
2013 message: "undefined variable 'A'".to_string(),
2014 source: Some("disk".to_string()),
2015 ..Default::default()
2016 },
2017 lsp::Diagnostic {
2018 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
2019 severity: Some(DiagnosticSeverity::WARNING),
2020 message: "unreachable statement".to_string(),
2021 source: Some("disk".to_string()),
2022 ..Default::default()
2023 },
2024 ],
2025 });
2026
2027 cx.executor().run_until_parked();
2028 buffer.update(cx, |buffer, _| {
2029 assert_eq!(
2030 buffer
2031 .snapshot()
2032 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
2033 .collect::<Vec<_>>(),
2034 &[
2035 DiagnosticEntry {
2036 range: Point::new(2, 9)..Point::new(2, 12),
2037 diagnostic: Diagnostic {
2038 source: Some("disk".into()),
2039 severity: DiagnosticSeverity::WARNING,
2040 message: "unreachable statement".to_string(),
2041 is_disk_based: true,
2042 group_id: 4,
2043 is_primary: true,
2044 ..Default::default()
2045 }
2046 },
2047 DiagnosticEntry {
2048 range: Point::new(2, 9)..Point::new(2, 10),
2049 diagnostic: Diagnostic {
2050 source: Some("disk".into()),
2051 severity: DiagnosticSeverity::ERROR,
2052 message: "undefined variable 'A'".to_string(),
2053 is_disk_based: true,
2054 group_id: 3,
2055 is_primary: true,
2056 ..Default::default()
2057 },
2058 }
2059 ]
2060 );
2061 assert_eq!(
2062 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
2063 [
2064 ("fn a() { ".to_string(), None),
2065 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2066 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2067 ("\n".to_string(), None),
2068 ]
2069 );
2070 assert_eq!(
2071 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
2072 [
2073 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2074 ("\n".to_string(), None),
2075 ]
2076 );
2077 });
2078
2079 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
2080 // changes since the last save.
2081 buffer.update(cx, |buffer, cx| {
2082 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
2083 buffer.edit(
2084 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
2085 None,
2086 cx,
2087 );
2088 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
2089 });
2090 let change_notification_2 = fake_server
2091 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2092 .await;
2093 assert!(
2094 change_notification_2.text_document.version > change_notification_1.text_document.version
2095 );
2096
2097 // Handle out-of-order diagnostics
2098 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2099 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2100 version: Some(change_notification_2.text_document.version),
2101 diagnostics: vec![
2102 lsp::Diagnostic {
2103 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2104 severity: Some(DiagnosticSeverity::ERROR),
2105 message: "undefined variable 'BB'".to_string(),
2106 source: Some("disk".to_string()),
2107 ..Default::default()
2108 },
2109 lsp::Diagnostic {
2110 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2111 severity: Some(DiagnosticSeverity::WARNING),
2112 message: "undefined variable 'A'".to_string(),
2113 source: Some("disk".to_string()),
2114 ..Default::default()
2115 },
2116 ],
2117 });
2118
2119 cx.executor().run_until_parked();
2120 buffer.update(cx, |buffer, _| {
2121 assert_eq!(
2122 buffer
2123 .snapshot()
2124 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2125 .collect::<Vec<_>>(),
2126 &[
2127 DiagnosticEntry {
2128 range: Point::new(2, 21)..Point::new(2, 22),
2129 diagnostic: Diagnostic {
2130 source: Some("disk".into()),
2131 severity: DiagnosticSeverity::WARNING,
2132 message: "undefined variable 'A'".to_string(),
2133 is_disk_based: true,
2134 group_id: 6,
2135 is_primary: true,
2136 ..Default::default()
2137 }
2138 },
2139 DiagnosticEntry {
2140 range: Point::new(3, 9)..Point::new(3, 14),
2141 diagnostic: Diagnostic {
2142 source: Some("disk".into()),
2143 severity: DiagnosticSeverity::ERROR,
2144 message: "undefined variable 'BB'".to_string(),
2145 is_disk_based: true,
2146 group_id: 5,
2147 is_primary: true,
2148 ..Default::default()
2149 },
2150 }
2151 ]
2152 );
2153 });
2154}
2155
2156#[gpui::test]
2157async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
2158 init_test(cx);
2159
2160 let text = concat!(
2161 "let one = ;\n", //
2162 "let two = \n",
2163 "let three = 3;\n",
2164 );
2165
2166 let fs = FakeFs::new(cx.executor());
2167 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
2168
2169 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2170 let buffer = project
2171 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2172 .await
2173 .unwrap();
2174
2175 project.update(cx, |project, cx| {
2176 project.lsp_store.update(cx, |lsp_store, cx| {
2177 lsp_store
2178 .update_diagnostic_entries(
2179 LanguageServerId(0),
2180 PathBuf::from("/dir/a.rs"),
2181 None,
2182 vec![
2183 DiagnosticEntry {
2184 range: Unclipped(PointUtf16::new(0, 10))
2185 ..Unclipped(PointUtf16::new(0, 10)),
2186 diagnostic: Diagnostic {
2187 severity: DiagnosticSeverity::ERROR,
2188 message: "syntax error 1".to_string(),
2189 ..Default::default()
2190 },
2191 },
2192 DiagnosticEntry {
2193 range: Unclipped(PointUtf16::new(1, 10))
2194 ..Unclipped(PointUtf16::new(1, 10)),
2195 diagnostic: Diagnostic {
2196 severity: DiagnosticSeverity::ERROR,
2197 message: "syntax error 2".to_string(),
2198 ..Default::default()
2199 },
2200 },
2201 ],
2202 cx,
2203 )
2204 .unwrap();
2205 })
2206 });
2207
2208 // An empty range is extended forward to include the following character.
2209 // At the end of a line, an empty range is extended backward to include
2210 // the preceding character.
2211 buffer.update(cx, |buffer, _| {
2212 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2213 assert_eq!(
2214 chunks
2215 .iter()
2216 .map(|(s, d)| (s.as_str(), *d))
2217 .collect::<Vec<_>>(),
2218 &[
2219 ("let one = ", None),
2220 (";", Some(DiagnosticSeverity::ERROR)),
2221 ("\nlet two =", None),
2222 (" ", Some(DiagnosticSeverity::ERROR)),
2223 ("\nlet three = 3;\n", None)
2224 ]
2225 );
2226 });
2227}
2228
2229#[gpui::test]
2230async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2231 init_test(cx);
2232
2233 let fs = FakeFs::new(cx.executor());
2234 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2235 .await;
2236
2237 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2238 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
2239
2240 lsp_store.update(cx, |lsp_store, cx| {
2241 lsp_store
2242 .update_diagnostic_entries(
2243 LanguageServerId(0),
2244 Path::new("/dir/a.rs").to_owned(),
2245 None,
2246 vec![DiagnosticEntry {
2247 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2248 diagnostic: Diagnostic {
2249 severity: DiagnosticSeverity::ERROR,
2250 is_primary: true,
2251 message: "syntax error a1".to_string(),
2252 ..Default::default()
2253 },
2254 }],
2255 cx,
2256 )
2257 .unwrap();
2258 lsp_store
2259 .update_diagnostic_entries(
2260 LanguageServerId(1),
2261 Path::new("/dir/a.rs").to_owned(),
2262 None,
2263 vec![DiagnosticEntry {
2264 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2265 diagnostic: Diagnostic {
2266 severity: DiagnosticSeverity::ERROR,
2267 is_primary: true,
2268 message: "syntax error b1".to_string(),
2269 ..Default::default()
2270 },
2271 }],
2272 cx,
2273 )
2274 .unwrap();
2275
2276 assert_eq!(
2277 lsp_store.diagnostic_summary(false, cx),
2278 DiagnosticSummary {
2279 error_count: 2,
2280 warning_count: 0,
2281 }
2282 );
2283 });
2284}
2285
2286#[gpui::test]
2287async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2288 init_test(cx);
2289
2290 let text = "
2291 fn a() {
2292 f1();
2293 }
2294 fn b() {
2295 f2();
2296 }
2297 fn c() {
2298 f3();
2299 }
2300 "
2301 .unindent();
2302
2303 let fs = FakeFs::new(cx.executor());
2304 fs.insert_tree(
2305 path!("/dir"),
2306 json!({
2307 "a.rs": text.clone(),
2308 }),
2309 )
2310 .await;
2311
2312 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2313 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2314
2315 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2316 language_registry.add(rust_lang());
2317 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2318
2319 let (buffer, _handle) = project
2320 .update(cx, |project, cx| {
2321 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2322 })
2323 .await
2324 .unwrap();
2325
2326 let mut fake_server = fake_servers.next().await.unwrap();
2327 let lsp_document_version = fake_server
2328 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2329 .await
2330 .text_document
2331 .version;
2332
2333 // Simulate editing the buffer after the language server computes some edits.
2334 buffer.update(cx, |buffer, cx| {
2335 buffer.edit(
2336 [(
2337 Point::new(0, 0)..Point::new(0, 0),
2338 "// above first function\n",
2339 )],
2340 None,
2341 cx,
2342 );
2343 buffer.edit(
2344 [(
2345 Point::new(2, 0)..Point::new(2, 0),
2346 " // inside first function\n",
2347 )],
2348 None,
2349 cx,
2350 );
2351 buffer.edit(
2352 [(
2353 Point::new(6, 4)..Point::new(6, 4),
2354 "// inside second function ",
2355 )],
2356 None,
2357 cx,
2358 );
2359
2360 assert_eq!(
2361 buffer.text(),
2362 "
2363 // above first function
2364 fn a() {
2365 // inside first function
2366 f1();
2367 }
2368 fn b() {
2369 // inside second function f2();
2370 }
2371 fn c() {
2372 f3();
2373 }
2374 "
2375 .unindent()
2376 );
2377 });
2378
2379 let edits = lsp_store
2380 .update(cx, |lsp_store, cx| {
2381 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2382 &buffer,
2383 vec![
2384 // replace body of first function
2385 lsp::TextEdit {
2386 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2387 new_text: "
2388 fn a() {
2389 f10();
2390 }
2391 "
2392 .unindent(),
2393 },
2394 // edit inside second function
2395 lsp::TextEdit {
2396 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2397 new_text: "00".into(),
2398 },
2399 // edit inside third function via two distinct edits
2400 lsp::TextEdit {
2401 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2402 new_text: "4000".into(),
2403 },
2404 lsp::TextEdit {
2405 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2406 new_text: "".into(),
2407 },
2408 ],
2409 LanguageServerId(0),
2410 Some(lsp_document_version),
2411 cx,
2412 )
2413 })
2414 .await
2415 .unwrap();
2416
2417 buffer.update(cx, |buffer, cx| {
2418 for (range, new_text) in edits {
2419 buffer.edit([(range, new_text)], None, cx);
2420 }
2421 assert_eq!(
2422 buffer.text(),
2423 "
2424 // above first function
2425 fn a() {
2426 // inside first function
2427 f10();
2428 }
2429 fn b() {
2430 // inside second function f200();
2431 }
2432 fn c() {
2433 f4000();
2434 }
2435 "
2436 .unindent()
2437 );
2438 });
2439}
2440
2441#[gpui::test]
2442async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2443 init_test(cx);
2444
2445 let text = "
2446 use a::b;
2447 use a::c;
2448
2449 fn f() {
2450 b();
2451 c();
2452 }
2453 "
2454 .unindent();
2455
2456 let fs = FakeFs::new(cx.executor());
2457 fs.insert_tree(
2458 path!("/dir"),
2459 json!({
2460 "a.rs": text.clone(),
2461 }),
2462 )
2463 .await;
2464
2465 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2466 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2467 let buffer = project
2468 .update(cx, |project, cx| {
2469 project.open_local_buffer(path!("/dir/a.rs"), cx)
2470 })
2471 .await
2472 .unwrap();
2473
2474 // Simulate the language server sending us a small edit in the form of a very large diff.
2475 // Rust-analyzer does this when performing a merge-imports code action.
2476 let edits = lsp_store
2477 .update(cx, |lsp_store, cx| {
2478 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2479 &buffer,
2480 [
2481 // Replace the first use statement without editing the semicolon.
2482 lsp::TextEdit {
2483 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2484 new_text: "a::{b, c}".into(),
2485 },
2486 // Reinsert the remainder of the file between the semicolon and the final
2487 // newline of the file.
2488 lsp::TextEdit {
2489 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2490 new_text: "\n\n".into(),
2491 },
2492 lsp::TextEdit {
2493 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2494 new_text: "
2495 fn f() {
2496 b();
2497 c();
2498 }"
2499 .unindent(),
2500 },
2501 // Delete everything after the first newline of the file.
2502 lsp::TextEdit {
2503 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2504 new_text: "".into(),
2505 },
2506 ],
2507 LanguageServerId(0),
2508 None,
2509 cx,
2510 )
2511 })
2512 .await
2513 .unwrap();
2514
2515 buffer.update(cx, |buffer, cx| {
2516 let edits = edits
2517 .into_iter()
2518 .map(|(range, text)| {
2519 (
2520 range.start.to_point(buffer)..range.end.to_point(buffer),
2521 text,
2522 )
2523 })
2524 .collect::<Vec<_>>();
2525
2526 assert_eq!(
2527 edits,
2528 [
2529 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2530 (Point::new(1, 0)..Point::new(2, 0), "".into())
2531 ]
2532 );
2533
2534 for (range, new_text) in edits {
2535 buffer.edit([(range, new_text)], None, cx);
2536 }
2537 assert_eq!(
2538 buffer.text(),
2539 "
2540 use a::{b, c};
2541
2542 fn f() {
2543 b();
2544 c();
2545 }
2546 "
2547 .unindent()
2548 );
2549 });
2550}
2551
2552#[gpui::test]
2553async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2554 init_test(cx);
2555
2556 let text = "
2557 use a::b;
2558 use a::c;
2559
2560 fn f() {
2561 b();
2562 c();
2563 }
2564 "
2565 .unindent();
2566
2567 let fs = FakeFs::new(cx.executor());
2568 fs.insert_tree(
2569 path!("/dir"),
2570 json!({
2571 "a.rs": text.clone(),
2572 }),
2573 )
2574 .await;
2575
2576 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2577 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2578 let buffer = project
2579 .update(cx, |project, cx| {
2580 project.open_local_buffer(path!("/dir/a.rs"), cx)
2581 })
2582 .await
2583 .unwrap();
2584
2585 // Simulate the language server sending us edits in a non-ordered fashion,
2586 // with ranges sometimes being inverted or pointing to invalid locations.
2587 let edits = lsp_store
2588 .update(cx, |lsp_store, cx| {
2589 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2590 &buffer,
2591 [
2592 lsp::TextEdit {
2593 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2594 new_text: "\n\n".into(),
2595 },
2596 lsp::TextEdit {
2597 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2598 new_text: "a::{b, c}".into(),
2599 },
2600 lsp::TextEdit {
2601 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2602 new_text: "".into(),
2603 },
2604 lsp::TextEdit {
2605 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2606 new_text: "
2607 fn f() {
2608 b();
2609 c();
2610 }"
2611 .unindent(),
2612 },
2613 ],
2614 LanguageServerId(0),
2615 None,
2616 cx,
2617 )
2618 })
2619 .await
2620 .unwrap();
2621
2622 buffer.update(cx, |buffer, cx| {
2623 let edits = edits
2624 .into_iter()
2625 .map(|(range, text)| {
2626 (
2627 range.start.to_point(buffer)..range.end.to_point(buffer),
2628 text,
2629 )
2630 })
2631 .collect::<Vec<_>>();
2632
2633 assert_eq!(
2634 edits,
2635 [
2636 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2637 (Point::new(1, 0)..Point::new(2, 0), "".into())
2638 ]
2639 );
2640
2641 for (range, new_text) in edits {
2642 buffer.edit([(range, new_text)], None, cx);
2643 }
2644 assert_eq!(
2645 buffer.text(),
2646 "
2647 use a::{b, c};
2648
2649 fn f() {
2650 b();
2651 c();
2652 }
2653 "
2654 .unindent()
2655 );
2656 });
2657}
2658
2659fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2660 buffer: &Buffer,
2661 range: Range<T>,
2662) -> Vec<(String, Option<DiagnosticSeverity>)> {
2663 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2664 for chunk in buffer.snapshot().chunks(range, true) {
2665 if chunks.last().map_or(false, |prev_chunk| {
2666 prev_chunk.1 == chunk.diagnostic_severity
2667 }) {
2668 chunks.last_mut().unwrap().0.push_str(chunk.text);
2669 } else {
2670 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2671 }
2672 }
2673 chunks
2674}
2675
2676#[gpui::test(iterations = 10)]
2677async fn test_definition(cx: &mut gpui::TestAppContext) {
2678 init_test(cx);
2679
2680 let fs = FakeFs::new(cx.executor());
2681 fs.insert_tree(
2682 path!("/dir"),
2683 json!({
2684 "a.rs": "const fn a() { A }",
2685 "b.rs": "const y: i32 = crate::a()",
2686 }),
2687 )
2688 .await;
2689
2690 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
2691
2692 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2693 language_registry.add(rust_lang());
2694 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2695
2696 let (buffer, _handle) = project
2697 .update(cx, |project, cx| {
2698 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
2699 })
2700 .await
2701 .unwrap();
2702
2703 let fake_server = fake_servers.next().await.unwrap();
2704 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2705 let params = params.text_document_position_params;
2706 assert_eq!(
2707 params.text_document.uri.to_file_path().unwrap(),
2708 Path::new(path!("/dir/b.rs")),
2709 );
2710 assert_eq!(params.position, lsp::Position::new(0, 22));
2711
2712 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2713 lsp::Location::new(
2714 lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2715 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2716 ),
2717 )))
2718 });
2719 let mut definitions = project
2720 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2721 .await
2722 .unwrap();
2723
2724 // Assert no new language server started
2725 cx.executor().run_until_parked();
2726 assert!(fake_servers.try_next().is_err());
2727
2728 assert_eq!(definitions.len(), 1);
2729 let definition = definitions.pop().unwrap();
2730 cx.update(|cx| {
2731 let target_buffer = definition.target.buffer.read(cx);
2732 assert_eq!(
2733 target_buffer
2734 .file()
2735 .unwrap()
2736 .as_local()
2737 .unwrap()
2738 .abs_path(cx),
2739 Path::new(path!("/dir/a.rs")),
2740 );
2741 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2742 assert_eq!(
2743 list_worktrees(&project, cx),
2744 [
2745 (path!("/dir/a.rs").as_ref(), false),
2746 (path!("/dir/b.rs").as_ref(), true)
2747 ],
2748 );
2749
2750 drop(definition);
2751 });
2752 cx.update(|cx| {
2753 assert_eq!(
2754 list_worktrees(&project, cx),
2755 [(path!("/dir/b.rs").as_ref(), true)]
2756 );
2757 });
2758
2759 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
2760 project
2761 .read(cx)
2762 .worktrees(cx)
2763 .map(|worktree| {
2764 let worktree = worktree.read(cx);
2765 (
2766 worktree.as_local().unwrap().abs_path().as_ref(),
2767 worktree.is_visible(),
2768 )
2769 })
2770 .collect::<Vec<_>>()
2771 }
2772}
2773
2774#[gpui::test]
2775async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2776 init_test(cx);
2777
2778 let fs = FakeFs::new(cx.executor());
2779 fs.insert_tree(
2780 path!("/dir"),
2781 json!({
2782 "a.ts": "",
2783 }),
2784 )
2785 .await;
2786
2787 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2788
2789 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2790 language_registry.add(typescript_lang());
2791 let mut fake_language_servers = language_registry.register_fake_lsp(
2792 "TypeScript",
2793 FakeLspAdapter {
2794 capabilities: lsp::ServerCapabilities {
2795 completion_provider: Some(lsp::CompletionOptions {
2796 trigger_characters: Some(vec![":".to_string()]),
2797 ..Default::default()
2798 }),
2799 ..Default::default()
2800 },
2801 ..Default::default()
2802 },
2803 );
2804
2805 let (buffer, _handle) = project
2806 .update(cx, |p, cx| {
2807 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
2808 })
2809 .await
2810 .unwrap();
2811
2812 let fake_server = fake_language_servers.next().await.unwrap();
2813
2814 let text = "let a = b.fqn";
2815 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2816 let completions = project.update(cx, |project, cx| {
2817 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2818 });
2819
2820 fake_server
2821 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2822 Ok(Some(lsp::CompletionResponse::Array(vec![
2823 lsp::CompletionItem {
2824 label: "fullyQualifiedName?".into(),
2825 insert_text: Some("fullyQualifiedName".into()),
2826 ..Default::default()
2827 },
2828 ])))
2829 })
2830 .next()
2831 .await;
2832 let completions = completions.await.unwrap().unwrap();
2833 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2834 assert_eq!(completions.len(), 1);
2835 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2836 assert_eq!(
2837 completions[0].old_range.to_offset(&snapshot),
2838 text.len() - 3..text.len()
2839 );
2840
2841 let text = "let a = \"atoms/cmp\"";
2842 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2843 let completions = project.update(cx, |project, cx| {
2844 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
2845 });
2846
2847 fake_server
2848 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2849 Ok(Some(lsp::CompletionResponse::Array(vec![
2850 lsp::CompletionItem {
2851 label: "component".into(),
2852 ..Default::default()
2853 },
2854 ])))
2855 })
2856 .next()
2857 .await;
2858 let completions = completions.await.unwrap().unwrap();
2859 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2860 assert_eq!(completions.len(), 1);
2861 assert_eq!(completions[0].new_text, "component");
2862 assert_eq!(
2863 completions[0].old_range.to_offset(&snapshot),
2864 text.len() - 4..text.len() - 1
2865 );
2866}
2867
2868#[gpui::test]
2869async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2870 init_test(cx);
2871
2872 let fs = FakeFs::new(cx.executor());
2873 fs.insert_tree(
2874 path!("/dir"),
2875 json!({
2876 "a.ts": "",
2877 }),
2878 )
2879 .await;
2880
2881 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2882
2883 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2884 language_registry.add(typescript_lang());
2885 let mut fake_language_servers = language_registry.register_fake_lsp(
2886 "TypeScript",
2887 FakeLspAdapter {
2888 capabilities: lsp::ServerCapabilities {
2889 completion_provider: Some(lsp::CompletionOptions {
2890 trigger_characters: Some(vec![":".to_string()]),
2891 ..Default::default()
2892 }),
2893 ..Default::default()
2894 },
2895 ..Default::default()
2896 },
2897 );
2898
2899 let (buffer, _handle) = project
2900 .update(cx, |p, cx| {
2901 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
2902 })
2903 .await
2904 .unwrap();
2905
2906 let fake_server = fake_language_servers.next().await.unwrap();
2907
2908 let text = "let a = b.fqn";
2909 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2910 let completions = project.update(cx, |project, cx| {
2911 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2912 });
2913
2914 fake_server
2915 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2916 Ok(Some(lsp::CompletionResponse::Array(vec![
2917 lsp::CompletionItem {
2918 label: "fullyQualifiedName?".into(),
2919 insert_text: Some("fully\rQualified\r\nName".into()),
2920 ..Default::default()
2921 },
2922 ])))
2923 })
2924 .next()
2925 .await;
2926 let completions = completions.await.unwrap().unwrap();
2927 assert_eq!(completions.len(), 1);
2928 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2929}
2930
2931#[gpui::test(iterations = 10)]
2932async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2933 init_test(cx);
2934
2935 let fs = FakeFs::new(cx.executor());
2936 fs.insert_tree(
2937 path!("/dir"),
2938 json!({
2939 "a.ts": "a",
2940 }),
2941 )
2942 .await;
2943
2944 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2945
2946 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2947 language_registry.add(typescript_lang());
2948 let mut fake_language_servers = language_registry.register_fake_lsp(
2949 "TypeScript",
2950 FakeLspAdapter {
2951 capabilities: lsp::ServerCapabilities {
2952 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
2953 lsp::CodeActionOptions {
2954 resolve_provider: Some(true),
2955 ..lsp::CodeActionOptions::default()
2956 },
2957 )),
2958 execute_command_provider: Some(lsp::ExecuteCommandOptions {
2959 commands: vec!["_the/command".to_string()],
2960 ..lsp::ExecuteCommandOptions::default()
2961 }),
2962 ..lsp::ServerCapabilities::default()
2963 },
2964 ..FakeLspAdapter::default()
2965 },
2966 );
2967
2968 let (buffer, _handle) = project
2969 .update(cx, |p, cx| {
2970 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
2971 })
2972 .await
2973 .unwrap();
2974
2975 let fake_server = fake_language_servers.next().await.unwrap();
2976
2977 // Language server returns code actions that contain commands, and not edits.
2978 let actions = project.update(cx, |project, cx| {
2979 project.code_actions(&buffer, 0..0, None, cx)
2980 });
2981 fake_server
2982 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2983 Ok(Some(vec![
2984 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2985 title: "The code action".into(),
2986 data: Some(serde_json::json!({
2987 "command": "_the/command",
2988 })),
2989 ..lsp::CodeAction::default()
2990 }),
2991 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2992 title: "two".into(),
2993 ..lsp::CodeAction::default()
2994 }),
2995 ]))
2996 })
2997 .next()
2998 .await;
2999
3000 let action = actions.await.unwrap()[0].clone();
3001 let apply = project.update(cx, |project, cx| {
3002 project.apply_code_action(buffer.clone(), action, true, cx)
3003 });
3004
3005 // Resolving the code action does not populate its edits. In absence of
3006 // edits, we must execute the given command.
3007 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
3008 |mut action, _| async move {
3009 if action.data.is_some() {
3010 action.command = Some(lsp::Command {
3011 title: "The command".into(),
3012 command: "_the/command".into(),
3013 arguments: Some(vec![json!("the-argument")]),
3014 });
3015 }
3016 Ok(action)
3017 },
3018 );
3019
3020 // While executing the command, the language server sends the editor
3021 // a `workspaceEdit` request.
3022 fake_server
3023 .handle_request::<lsp::request::ExecuteCommand, _, _>({
3024 let fake = fake_server.clone();
3025 move |params, _| {
3026 assert_eq!(params.command, "_the/command");
3027 let fake = fake.clone();
3028 async move {
3029 fake.server
3030 .request::<lsp::request::ApplyWorkspaceEdit>(
3031 lsp::ApplyWorkspaceEditParams {
3032 label: None,
3033 edit: lsp::WorkspaceEdit {
3034 changes: Some(
3035 [(
3036 lsp::Url::from_file_path(path!("/dir/a.ts")).unwrap(),
3037 vec![lsp::TextEdit {
3038 range: lsp::Range::new(
3039 lsp::Position::new(0, 0),
3040 lsp::Position::new(0, 0),
3041 ),
3042 new_text: "X".into(),
3043 }],
3044 )]
3045 .into_iter()
3046 .collect(),
3047 ),
3048 ..Default::default()
3049 },
3050 },
3051 )
3052 .await
3053 .unwrap();
3054 Ok(Some(json!(null)))
3055 }
3056 }
3057 })
3058 .next()
3059 .await;
3060
3061 // Applying the code action returns a project transaction containing the edits
3062 // sent by the language server in its `workspaceEdit` request.
3063 let transaction = apply.await.unwrap();
3064 assert!(transaction.0.contains_key(&buffer));
3065 buffer.update(cx, |buffer, cx| {
3066 assert_eq!(buffer.text(), "Xa");
3067 buffer.undo(cx);
3068 assert_eq!(buffer.text(), "a");
3069 });
3070}
3071
3072#[gpui::test(iterations = 10)]
3073async fn test_save_file(cx: &mut gpui::TestAppContext) {
3074 init_test(cx);
3075
3076 let fs = FakeFs::new(cx.executor());
3077 fs.insert_tree(
3078 path!("/dir"),
3079 json!({
3080 "file1": "the old contents",
3081 }),
3082 )
3083 .await;
3084
3085 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3086 let buffer = project
3087 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3088 .await
3089 .unwrap();
3090 buffer.update(cx, |buffer, cx| {
3091 assert_eq!(buffer.text(), "the old contents");
3092 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3093 });
3094
3095 project
3096 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3097 .await
3098 .unwrap();
3099
3100 let new_text = fs
3101 .load(Path::new(path!("/dir/file1")))
3102 .await
3103 .unwrap()
3104 .replace("\r\n", "\n");
3105 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3106}
3107
3108#[gpui::test(iterations = 30)]
3109async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
3110 init_test(cx);
3111
3112 let fs = FakeFs::new(cx.executor().clone());
3113 fs.insert_tree(
3114 path!("/dir"),
3115 json!({
3116 "file1": "the original contents",
3117 }),
3118 )
3119 .await;
3120
3121 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3122 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3123 let buffer = project
3124 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3125 .await
3126 .unwrap();
3127
3128 // Simulate buffer diffs being slow, so that they don't complete before
3129 // the next file change occurs.
3130 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3131
3132 // Change the buffer's file on disk, and then wait for the file change
3133 // to be detected by the worktree, so that the buffer starts reloading.
3134 fs.save(
3135 path!("/dir/file1").as_ref(),
3136 &"the first contents".into(),
3137 Default::default(),
3138 )
3139 .await
3140 .unwrap();
3141 worktree.next_event(cx).await;
3142
3143 // Change the buffer's file again. Depending on the random seed, the
3144 // previous file change may still be in progress.
3145 fs.save(
3146 path!("/dir/file1").as_ref(),
3147 &"the second contents".into(),
3148 Default::default(),
3149 )
3150 .await
3151 .unwrap();
3152 worktree.next_event(cx).await;
3153
3154 cx.executor().run_until_parked();
3155 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3156 buffer.read_with(cx, |buffer, _| {
3157 assert_eq!(buffer.text(), on_disk_text);
3158 assert!(!buffer.is_dirty(), "buffer should not be dirty");
3159 assert!(!buffer.has_conflict(), "buffer should not be dirty");
3160 });
3161}
3162
3163#[gpui::test(iterations = 30)]
3164async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
3165 init_test(cx);
3166
3167 let fs = FakeFs::new(cx.executor().clone());
3168 fs.insert_tree(
3169 path!("/dir"),
3170 json!({
3171 "file1": "the original contents",
3172 }),
3173 )
3174 .await;
3175
3176 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3177 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3178 let buffer = project
3179 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3180 .await
3181 .unwrap();
3182
3183 // Simulate buffer diffs being slow, so that they don't complete before
3184 // the next file change occurs.
3185 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3186
3187 // Change the buffer's file on disk, and then wait for the file change
3188 // to be detected by the worktree, so that the buffer starts reloading.
3189 fs.save(
3190 path!("/dir/file1").as_ref(),
3191 &"the first contents".into(),
3192 Default::default(),
3193 )
3194 .await
3195 .unwrap();
3196 worktree.next_event(cx).await;
3197
3198 cx.executor()
3199 .spawn(cx.executor().simulate_random_delay())
3200 .await;
3201
3202 // Perform a noop edit, causing the buffer's version to increase.
3203 buffer.update(cx, |buffer, cx| {
3204 buffer.edit([(0..0, " ")], None, cx);
3205 buffer.undo(cx);
3206 });
3207
3208 cx.executor().run_until_parked();
3209 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3210 buffer.read_with(cx, |buffer, _| {
3211 let buffer_text = buffer.text();
3212 if buffer_text == on_disk_text {
3213 assert!(
3214 !buffer.is_dirty() && !buffer.has_conflict(),
3215 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
3216 );
3217 }
3218 // If the file change occurred while the buffer was processing the first
3219 // change, the buffer will be in a conflicting state.
3220 else {
3221 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3222 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3223 }
3224 });
3225}
3226
3227#[gpui::test]
3228async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
3229 init_test(cx);
3230
3231 let fs = FakeFs::new(cx.executor());
3232 fs.insert_tree(
3233 path!("/dir"),
3234 json!({
3235 "file1": "the old contents",
3236 }),
3237 )
3238 .await;
3239
3240 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
3241 let buffer = project
3242 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3243 .await
3244 .unwrap();
3245 buffer.update(cx, |buffer, cx| {
3246 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3247 });
3248
3249 project
3250 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3251 .await
3252 .unwrap();
3253
3254 let new_text = fs
3255 .load(Path::new(path!("/dir/file1")))
3256 .await
3257 .unwrap()
3258 .replace("\r\n", "\n");
3259 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3260}
3261
3262#[gpui::test]
3263async fn test_save_as(cx: &mut gpui::TestAppContext) {
3264 init_test(cx);
3265
3266 let fs = FakeFs::new(cx.executor());
3267 fs.insert_tree("/dir", json!({})).await;
3268
3269 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3270
3271 let languages = project.update(cx, |project, _| project.languages().clone());
3272 languages.add(rust_lang());
3273
3274 let buffer = project.update(cx, |project, cx| project.create_local_buffer("", None, cx));
3275 buffer.update(cx, |buffer, cx| {
3276 buffer.edit([(0..0, "abc")], None, cx);
3277 assert!(buffer.is_dirty());
3278 assert!(!buffer.has_conflict());
3279 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
3280 });
3281 project
3282 .update(cx, |project, cx| {
3283 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
3284 let path = ProjectPath {
3285 worktree_id,
3286 path: Arc::from(Path::new("file1.rs")),
3287 };
3288 project.save_buffer_as(buffer.clone(), path, cx)
3289 })
3290 .await
3291 .unwrap();
3292 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
3293
3294 cx.executor().run_until_parked();
3295 buffer.update(cx, |buffer, cx| {
3296 assert_eq!(
3297 buffer.file().unwrap().full_path(cx),
3298 Path::new("dir/file1.rs")
3299 );
3300 assert!(!buffer.is_dirty());
3301 assert!(!buffer.has_conflict());
3302 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
3303 });
3304
3305 let opened_buffer = project
3306 .update(cx, |project, cx| {
3307 project.open_local_buffer("/dir/file1.rs", cx)
3308 })
3309 .await
3310 .unwrap();
3311 assert_eq!(opened_buffer, buffer);
3312}
3313
3314#[gpui::test(retries = 5)]
3315async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
3316 use worktree::WorktreeModelHandle as _;
3317
3318 init_test(cx);
3319 cx.executor().allow_parking();
3320
3321 let dir = TempTree::new(json!({
3322 "a": {
3323 "file1": "",
3324 "file2": "",
3325 "file3": "",
3326 },
3327 "b": {
3328 "c": {
3329 "file4": "",
3330 "file5": "",
3331 }
3332 }
3333 }));
3334
3335 let project = Project::test(Arc::new(RealFs::default()), [dir.path()], cx).await;
3336
3337 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3338 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
3339 async move { buffer.await.unwrap() }
3340 };
3341 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3342 project.update(cx, |project, cx| {
3343 let tree = project.worktrees(cx).next().unwrap();
3344 tree.read(cx)
3345 .entry_for_path(path)
3346 .unwrap_or_else(|| panic!("no entry for path {}", path))
3347 .id
3348 })
3349 };
3350
3351 let buffer2 = buffer_for_path("a/file2", cx).await;
3352 let buffer3 = buffer_for_path("a/file3", cx).await;
3353 let buffer4 = buffer_for_path("b/c/file4", cx).await;
3354 let buffer5 = buffer_for_path("b/c/file5", cx).await;
3355
3356 let file2_id = id_for_path("a/file2", cx);
3357 let file3_id = id_for_path("a/file3", cx);
3358 let file4_id = id_for_path("b/c/file4", cx);
3359
3360 // Create a remote copy of this worktree.
3361 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3362 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
3363
3364 let updates = Arc::new(Mutex::new(Vec::new()));
3365 tree.update(cx, |tree, cx| {
3366 let updates = updates.clone();
3367 tree.observe_updates(0, cx, move |update| {
3368 updates.lock().push(update);
3369 async { true }
3370 });
3371 });
3372
3373 let remote =
3374 cx.update(|cx| Worktree::remote(0, 1, metadata, project.read(cx).client().into(), cx));
3375
3376 cx.executor().run_until_parked();
3377
3378 cx.update(|cx| {
3379 assert!(!buffer2.read(cx).is_dirty());
3380 assert!(!buffer3.read(cx).is_dirty());
3381 assert!(!buffer4.read(cx).is_dirty());
3382 assert!(!buffer5.read(cx).is_dirty());
3383 });
3384
3385 // Rename and delete files and directories.
3386 tree.flush_fs_events(cx).await;
3387 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3388 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3389 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3390 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3391 tree.flush_fs_events(cx).await;
3392
3393 cx.update(|app| {
3394 assert_eq!(
3395 tree.read(app)
3396 .paths()
3397 .map(|p| p.to_str().unwrap())
3398 .collect::<Vec<_>>(),
3399 vec![
3400 "a",
3401 separator!("a/file1"),
3402 separator!("a/file2.new"),
3403 "b",
3404 "d",
3405 separator!("d/file3"),
3406 separator!("d/file4"),
3407 ]
3408 );
3409 });
3410
3411 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
3412 assert_eq!(id_for_path("d/file3", cx), file3_id);
3413 assert_eq!(id_for_path("d/file4", cx), file4_id);
3414
3415 cx.update(|cx| {
3416 assert_eq!(
3417 buffer2.read(cx).file().unwrap().path().as_ref(),
3418 Path::new("a/file2.new")
3419 );
3420 assert_eq!(
3421 buffer3.read(cx).file().unwrap().path().as_ref(),
3422 Path::new("d/file3")
3423 );
3424 assert_eq!(
3425 buffer4.read(cx).file().unwrap().path().as_ref(),
3426 Path::new("d/file4")
3427 );
3428 assert_eq!(
3429 buffer5.read(cx).file().unwrap().path().as_ref(),
3430 Path::new("b/c/file5")
3431 );
3432
3433 assert_matches!(
3434 buffer2.read(cx).file().unwrap().disk_state(),
3435 DiskState::Present { .. }
3436 );
3437 assert_matches!(
3438 buffer3.read(cx).file().unwrap().disk_state(),
3439 DiskState::Present { .. }
3440 );
3441 assert_matches!(
3442 buffer4.read(cx).file().unwrap().disk_state(),
3443 DiskState::Present { .. }
3444 );
3445 assert_eq!(
3446 buffer5.read(cx).file().unwrap().disk_state(),
3447 DiskState::Deleted
3448 );
3449 });
3450
3451 // Update the remote worktree. Check that it becomes consistent with the
3452 // local worktree.
3453 cx.executor().run_until_parked();
3454
3455 remote.update(cx, |remote, _| {
3456 for update in updates.lock().drain(..) {
3457 remote.as_remote_mut().unwrap().update_from_remote(update);
3458 }
3459 });
3460 cx.executor().run_until_parked();
3461 remote.update(cx, |remote, _| {
3462 assert_eq!(
3463 remote
3464 .paths()
3465 .map(|p| p.to_str().unwrap())
3466 .collect::<Vec<_>>(),
3467 vec![
3468 "a",
3469 separator!("a/file1"),
3470 separator!("a/file2.new"),
3471 "b",
3472 "d",
3473 separator!("d/file3"),
3474 separator!("d/file4"),
3475 ]
3476 );
3477 });
3478}
3479
3480#[gpui::test(iterations = 10)]
3481async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
3482 init_test(cx);
3483
3484 let fs = FakeFs::new(cx.executor());
3485 fs.insert_tree(
3486 path!("/dir"),
3487 json!({
3488 "a": {
3489 "file1": "",
3490 }
3491 }),
3492 )
3493 .await;
3494
3495 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3496 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3497 let tree_id = tree.update(cx, |tree, _| tree.id());
3498
3499 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3500 project.update(cx, |project, cx| {
3501 let tree = project.worktrees(cx).next().unwrap();
3502 tree.read(cx)
3503 .entry_for_path(path)
3504 .unwrap_or_else(|| panic!("no entry for path {}", path))
3505 .id
3506 })
3507 };
3508
3509 let dir_id = id_for_path("a", cx);
3510 let file_id = id_for_path("a/file1", cx);
3511 let buffer = project
3512 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
3513 .await
3514 .unwrap();
3515 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3516
3517 project
3518 .update(cx, |project, cx| {
3519 project.rename_entry(dir_id, Path::new("b"), cx)
3520 })
3521 .unwrap()
3522 .await
3523 .to_included()
3524 .unwrap();
3525 cx.executor().run_until_parked();
3526
3527 assert_eq!(id_for_path("b", cx), dir_id);
3528 assert_eq!(id_for_path("b/file1", cx), file_id);
3529 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3530}
3531
3532#[gpui::test]
3533async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
3534 init_test(cx);
3535
3536 let fs = FakeFs::new(cx.executor());
3537 fs.insert_tree(
3538 "/dir",
3539 json!({
3540 "a.txt": "a-contents",
3541 "b.txt": "b-contents",
3542 }),
3543 )
3544 .await;
3545
3546 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3547
3548 // Spawn multiple tasks to open paths, repeating some paths.
3549 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
3550 (
3551 p.open_local_buffer("/dir/a.txt", cx),
3552 p.open_local_buffer("/dir/b.txt", cx),
3553 p.open_local_buffer("/dir/a.txt", cx),
3554 )
3555 });
3556
3557 let buffer_a_1 = buffer_a_1.await.unwrap();
3558 let buffer_a_2 = buffer_a_2.await.unwrap();
3559 let buffer_b = buffer_b.await.unwrap();
3560 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
3561 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
3562
3563 // There is only one buffer per path.
3564 let buffer_a_id = buffer_a_1.entity_id();
3565 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
3566
3567 // Open the same path again while it is still open.
3568 drop(buffer_a_1);
3569 let buffer_a_3 = project
3570 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
3571 .await
3572 .unwrap();
3573
3574 // There's still only one buffer per path.
3575 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
3576}
3577
3578#[gpui::test]
3579async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
3580 init_test(cx);
3581
3582 let fs = FakeFs::new(cx.executor());
3583 fs.insert_tree(
3584 path!("/dir"),
3585 json!({
3586 "file1": "abc",
3587 "file2": "def",
3588 "file3": "ghi",
3589 }),
3590 )
3591 .await;
3592
3593 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3594
3595 let buffer1 = project
3596 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3597 .await
3598 .unwrap();
3599 let events = Arc::new(Mutex::new(Vec::new()));
3600
3601 // initially, the buffer isn't dirty.
3602 buffer1.update(cx, |buffer, cx| {
3603 cx.subscribe(&buffer1, {
3604 let events = events.clone();
3605 move |_, _, event, _| match event {
3606 BufferEvent::Operation { .. } => {}
3607 _ => events.lock().push(event.clone()),
3608 }
3609 })
3610 .detach();
3611
3612 assert!(!buffer.is_dirty());
3613 assert!(events.lock().is_empty());
3614
3615 buffer.edit([(1..2, "")], None, cx);
3616 });
3617
3618 // after the first edit, the buffer is dirty, and emits a dirtied event.
3619 buffer1.update(cx, |buffer, cx| {
3620 assert!(buffer.text() == "ac");
3621 assert!(buffer.is_dirty());
3622 assert_eq!(
3623 *events.lock(),
3624 &[
3625 language::BufferEvent::Edited,
3626 language::BufferEvent::DirtyChanged
3627 ]
3628 );
3629 events.lock().clear();
3630 buffer.did_save(
3631 buffer.version(),
3632 buffer.file().unwrap().disk_state().mtime(),
3633 cx,
3634 );
3635 });
3636
3637 // after saving, the buffer is not dirty, and emits a saved event.
3638 buffer1.update(cx, |buffer, cx| {
3639 assert!(!buffer.is_dirty());
3640 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
3641 events.lock().clear();
3642
3643 buffer.edit([(1..1, "B")], None, cx);
3644 buffer.edit([(2..2, "D")], None, cx);
3645 });
3646
3647 // after editing again, the buffer is dirty, and emits another dirty event.
3648 buffer1.update(cx, |buffer, cx| {
3649 assert!(buffer.text() == "aBDc");
3650 assert!(buffer.is_dirty());
3651 assert_eq!(
3652 *events.lock(),
3653 &[
3654 language::BufferEvent::Edited,
3655 language::BufferEvent::DirtyChanged,
3656 language::BufferEvent::Edited,
3657 ],
3658 );
3659 events.lock().clear();
3660
3661 // After restoring the buffer to its previously-saved state,
3662 // the buffer is not considered dirty anymore.
3663 buffer.edit([(1..3, "")], None, cx);
3664 assert!(buffer.text() == "ac");
3665 assert!(!buffer.is_dirty());
3666 });
3667
3668 assert_eq!(
3669 *events.lock(),
3670 &[
3671 language::BufferEvent::Edited,
3672 language::BufferEvent::DirtyChanged
3673 ]
3674 );
3675
3676 // When a file is deleted, the buffer is considered dirty.
3677 let events = Arc::new(Mutex::new(Vec::new()));
3678 let buffer2 = project
3679 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
3680 .await
3681 .unwrap();
3682 buffer2.update(cx, |_, cx| {
3683 cx.subscribe(&buffer2, {
3684 let events = events.clone();
3685 move |_, _, event, _| events.lock().push(event.clone())
3686 })
3687 .detach();
3688 });
3689
3690 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
3691 .await
3692 .unwrap();
3693 cx.executor().run_until_parked();
3694 buffer2.update(cx, |buffer, _| assert!(buffer.is_dirty()));
3695 assert_eq!(
3696 *events.lock(),
3697 &[
3698 language::BufferEvent::DirtyChanged,
3699 language::BufferEvent::FileHandleChanged
3700 ]
3701 );
3702
3703 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3704 let events = Arc::new(Mutex::new(Vec::new()));
3705 let buffer3 = project
3706 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
3707 .await
3708 .unwrap();
3709 buffer3.update(cx, |_, cx| {
3710 cx.subscribe(&buffer3, {
3711 let events = events.clone();
3712 move |_, _, event, _| events.lock().push(event.clone())
3713 })
3714 .detach();
3715 });
3716
3717 buffer3.update(cx, |buffer, cx| {
3718 buffer.edit([(0..0, "x")], None, cx);
3719 });
3720 events.lock().clear();
3721 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
3722 .await
3723 .unwrap();
3724 cx.executor().run_until_parked();
3725 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
3726 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
3727}
3728
3729#[gpui::test]
3730async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
3731 init_test(cx);
3732
3733 let (initial_contents, initial_offsets) =
3734 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
3735 let fs = FakeFs::new(cx.executor());
3736 fs.insert_tree(
3737 path!("/dir"),
3738 json!({
3739 "the-file": initial_contents,
3740 }),
3741 )
3742 .await;
3743 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3744 let buffer = project
3745 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
3746 .await
3747 .unwrap();
3748
3749 let anchors = initial_offsets
3750 .iter()
3751 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
3752 .collect::<Vec<_>>();
3753
3754 // Change the file on disk, adding two new lines of text, and removing
3755 // one line.
3756 buffer.update(cx, |buffer, _| {
3757 assert!(!buffer.is_dirty());
3758 assert!(!buffer.has_conflict());
3759 });
3760
3761 let (new_contents, new_offsets) =
3762 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
3763 fs.save(
3764 path!("/dir/the-file").as_ref(),
3765 &new_contents.as_str().into(),
3766 LineEnding::Unix,
3767 )
3768 .await
3769 .unwrap();
3770
3771 // Because the buffer was not modified, it is reloaded from disk. Its
3772 // contents are edited according to the diff between the old and new
3773 // file contents.
3774 cx.executor().run_until_parked();
3775 buffer.update(cx, |buffer, _| {
3776 assert_eq!(buffer.text(), new_contents);
3777 assert!(!buffer.is_dirty());
3778 assert!(!buffer.has_conflict());
3779
3780 let anchor_offsets = anchors
3781 .iter()
3782 .map(|anchor| anchor.to_offset(&*buffer))
3783 .collect::<Vec<_>>();
3784 assert_eq!(anchor_offsets, new_offsets);
3785 });
3786
3787 // Modify the buffer
3788 buffer.update(cx, |buffer, cx| {
3789 buffer.edit([(0..0, " ")], None, cx);
3790 assert!(buffer.is_dirty());
3791 assert!(!buffer.has_conflict());
3792 });
3793
3794 // Change the file on disk again, adding blank lines to the beginning.
3795 fs.save(
3796 path!("/dir/the-file").as_ref(),
3797 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
3798 LineEnding::Unix,
3799 )
3800 .await
3801 .unwrap();
3802
3803 // Because the buffer is modified, it doesn't reload from disk, but is
3804 // marked as having a conflict.
3805 cx.executor().run_until_parked();
3806 buffer.update(cx, |buffer, _| {
3807 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
3808 assert!(buffer.has_conflict());
3809 });
3810}
3811
3812#[gpui::test]
3813async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
3814 init_test(cx);
3815
3816 let fs = FakeFs::new(cx.executor());
3817 fs.insert_tree(
3818 path!("/dir"),
3819 json!({
3820 "file1": "a\nb\nc\n",
3821 "file2": "one\r\ntwo\r\nthree\r\n",
3822 }),
3823 )
3824 .await;
3825
3826 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3827 let buffer1 = project
3828 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3829 .await
3830 .unwrap();
3831 let buffer2 = project
3832 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
3833 .await
3834 .unwrap();
3835
3836 buffer1.update(cx, |buffer, _| {
3837 assert_eq!(buffer.text(), "a\nb\nc\n");
3838 assert_eq!(buffer.line_ending(), LineEnding::Unix);
3839 });
3840 buffer2.update(cx, |buffer, _| {
3841 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3842 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3843 });
3844
3845 // Change a file's line endings on disk from unix to windows. The buffer's
3846 // state updates correctly.
3847 fs.save(
3848 path!("/dir/file1").as_ref(),
3849 &"aaa\nb\nc\n".into(),
3850 LineEnding::Windows,
3851 )
3852 .await
3853 .unwrap();
3854 cx.executor().run_until_parked();
3855 buffer1.update(cx, |buffer, _| {
3856 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3857 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3858 });
3859
3860 // Save a file with windows line endings. The file is written correctly.
3861 buffer2.update(cx, |buffer, cx| {
3862 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3863 });
3864 project
3865 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3866 .await
3867 .unwrap();
3868 assert_eq!(
3869 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
3870 "one\r\ntwo\r\nthree\r\nfour\r\n",
3871 );
3872}
3873
3874#[gpui::test]
3875async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3876 init_test(cx);
3877
3878 let fs = FakeFs::new(cx.executor());
3879 fs.insert_tree(
3880 path!("/dir"),
3881 json!({
3882 "a.rs": "
3883 fn foo(mut v: Vec<usize>) {
3884 for x in &v {
3885 v.push(1);
3886 }
3887 }
3888 "
3889 .unindent(),
3890 }),
3891 )
3892 .await;
3893
3894 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3895 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3896 let buffer = project
3897 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
3898 .await
3899 .unwrap();
3900
3901 let buffer_uri = Url::from_file_path(path!("/dir/a.rs")).unwrap();
3902 let message = lsp::PublishDiagnosticsParams {
3903 uri: buffer_uri.clone(),
3904 diagnostics: vec![
3905 lsp::Diagnostic {
3906 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3907 severity: Some(DiagnosticSeverity::WARNING),
3908 message: "error 1".to_string(),
3909 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3910 location: lsp::Location {
3911 uri: buffer_uri.clone(),
3912 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3913 },
3914 message: "error 1 hint 1".to_string(),
3915 }]),
3916 ..Default::default()
3917 },
3918 lsp::Diagnostic {
3919 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3920 severity: Some(DiagnosticSeverity::HINT),
3921 message: "error 1 hint 1".to_string(),
3922 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3923 location: lsp::Location {
3924 uri: buffer_uri.clone(),
3925 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3926 },
3927 message: "original diagnostic".to_string(),
3928 }]),
3929 ..Default::default()
3930 },
3931 lsp::Diagnostic {
3932 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3933 severity: Some(DiagnosticSeverity::ERROR),
3934 message: "error 2".to_string(),
3935 related_information: Some(vec![
3936 lsp::DiagnosticRelatedInformation {
3937 location: lsp::Location {
3938 uri: buffer_uri.clone(),
3939 range: lsp::Range::new(
3940 lsp::Position::new(1, 13),
3941 lsp::Position::new(1, 15),
3942 ),
3943 },
3944 message: "error 2 hint 1".to_string(),
3945 },
3946 lsp::DiagnosticRelatedInformation {
3947 location: lsp::Location {
3948 uri: buffer_uri.clone(),
3949 range: lsp::Range::new(
3950 lsp::Position::new(1, 13),
3951 lsp::Position::new(1, 15),
3952 ),
3953 },
3954 message: "error 2 hint 2".to_string(),
3955 },
3956 ]),
3957 ..Default::default()
3958 },
3959 lsp::Diagnostic {
3960 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3961 severity: Some(DiagnosticSeverity::HINT),
3962 message: "error 2 hint 1".to_string(),
3963 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3964 location: lsp::Location {
3965 uri: buffer_uri.clone(),
3966 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3967 },
3968 message: "original diagnostic".to_string(),
3969 }]),
3970 ..Default::default()
3971 },
3972 lsp::Diagnostic {
3973 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3974 severity: Some(DiagnosticSeverity::HINT),
3975 message: "error 2 hint 2".to_string(),
3976 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3977 location: lsp::Location {
3978 uri: buffer_uri,
3979 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3980 },
3981 message: "original diagnostic".to_string(),
3982 }]),
3983 ..Default::default()
3984 },
3985 ],
3986 version: None,
3987 };
3988
3989 lsp_store
3990 .update(cx, |lsp_store, cx| {
3991 lsp_store.update_diagnostics(LanguageServerId(0), message, &[], cx)
3992 })
3993 .unwrap();
3994 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
3995
3996 assert_eq!(
3997 buffer
3998 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3999 .collect::<Vec<_>>(),
4000 &[
4001 DiagnosticEntry {
4002 range: Point::new(1, 8)..Point::new(1, 9),
4003 diagnostic: Diagnostic {
4004 severity: DiagnosticSeverity::WARNING,
4005 message: "error 1".to_string(),
4006 group_id: 1,
4007 is_primary: true,
4008 ..Default::default()
4009 }
4010 },
4011 DiagnosticEntry {
4012 range: Point::new(1, 8)..Point::new(1, 9),
4013 diagnostic: Diagnostic {
4014 severity: DiagnosticSeverity::HINT,
4015 message: "error 1 hint 1".to_string(),
4016 group_id: 1,
4017 is_primary: false,
4018 ..Default::default()
4019 }
4020 },
4021 DiagnosticEntry {
4022 range: Point::new(1, 13)..Point::new(1, 15),
4023 diagnostic: Diagnostic {
4024 severity: DiagnosticSeverity::HINT,
4025 message: "error 2 hint 1".to_string(),
4026 group_id: 0,
4027 is_primary: false,
4028 ..Default::default()
4029 }
4030 },
4031 DiagnosticEntry {
4032 range: Point::new(1, 13)..Point::new(1, 15),
4033 diagnostic: Diagnostic {
4034 severity: DiagnosticSeverity::HINT,
4035 message: "error 2 hint 2".to_string(),
4036 group_id: 0,
4037 is_primary: false,
4038 ..Default::default()
4039 }
4040 },
4041 DiagnosticEntry {
4042 range: Point::new(2, 8)..Point::new(2, 17),
4043 diagnostic: Diagnostic {
4044 severity: DiagnosticSeverity::ERROR,
4045 message: "error 2".to_string(),
4046 group_id: 0,
4047 is_primary: true,
4048 ..Default::default()
4049 }
4050 }
4051 ]
4052 );
4053
4054 assert_eq!(
4055 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
4056 &[
4057 DiagnosticEntry {
4058 range: Point::new(1, 13)..Point::new(1, 15),
4059 diagnostic: Diagnostic {
4060 severity: DiagnosticSeverity::HINT,
4061 message: "error 2 hint 1".to_string(),
4062 group_id: 0,
4063 is_primary: false,
4064 ..Default::default()
4065 }
4066 },
4067 DiagnosticEntry {
4068 range: Point::new(1, 13)..Point::new(1, 15),
4069 diagnostic: Diagnostic {
4070 severity: DiagnosticSeverity::HINT,
4071 message: "error 2 hint 2".to_string(),
4072 group_id: 0,
4073 is_primary: false,
4074 ..Default::default()
4075 }
4076 },
4077 DiagnosticEntry {
4078 range: Point::new(2, 8)..Point::new(2, 17),
4079 diagnostic: Diagnostic {
4080 severity: DiagnosticSeverity::ERROR,
4081 message: "error 2".to_string(),
4082 group_id: 0,
4083 is_primary: true,
4084 ..Default::default()
4085 }
4086 }
4087 ]
4088 );
4089
4090 assert_eq!(
4091 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4092 &[
4093 DiagnosticEntry {
4094 range: Point::new(1, 8)..Point::new(1, 9),
4095 diagnostic: Diagnostic {
4096 severity: DiagnosticSeverity::WARNING,
4097 message: "error 1".to_string(),
4098 group_id: 1,
4099 is_primary: true,
4100 ..Default::default()
4101 }
4102 },
4103 DiagnosticEntry {
4104 range: Point::new(1, 8)..Point::new(1, 9),
4105 diagnostic: Diagnostic {
4106 severity: DiagnosticSeverity::HINT,
4107 message: "error 1 hint 1".to_string(),
4108 group_id: 1,
4109 is_primary: false,
4110 ..Default::default()
4111 }
4112 },
4113 ]
4114 );
4115}
4116
4117#[gpui::test]
4118async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
4119 init_test(cx);
4120
4121 let fs = FakeFs::new(cx.executor());
4122 fs.insert_tree(
4123 path!("/dir"),
4124 json!({
4125 "one.rs": "const ONE: usize = 1;",
4126 "two": {
4127 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4128 }
4129
4130 }),
4131 )
4132 .await;
4133 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4134
4135 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4136 language_registry.add(rust_lang());
4137 let watched_paths = lsp::FileOperationRegistrationOptions {
4138 filters: vec![
4139 FileOperationFilter {
4140 scheme: Some("file".to_owned()),
4141 pattern: lsp::FileOperationPattern {
4142 glob: "**/*.rs".to_owned(),
4143 matches: Some(lsp::FileOperationPatternKind::File),
4144 options: None,
4145 },
4146 },
4147 FileOperationFilter {
4148 scheme: Some("file".to_owned()),
4149 pattern: lsp::FileOperationPattern {
4150 glob: "**/**".to_owned(),
4151 matches: Some(lsp::FileOperationPatternKind::Folder),
4152 options: None,
4153 },
4154 },
4155 ],
4156 };
4157 let mut fake_servers = language_registry.register_fake_lsp(
4158 "Rust",
4159 FakeLspAdapter {
4160 capabilities: lsp::ServerCapabilities {
4161 workspace: Some(lsp::WorkspaceServerCapabilities {
4162 workspace_folders: None,
4163 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
4164 did_rename: Some(watched_paths.clone()),
4165 will_rename: Some(watched_paths),
4166 ..Default::default()
4167 }),
4168 }),
4169 ..Default::default()
4170 },
4171 ..Default::default()
4172 },
4173 );
4174
4175 let _ = project
4176 .update(cx, |project, cx| {
4177 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
4178 })
4179 .await
4180 .unwrap();
4181
4182 let fake_server = fake_servers.next().await.unwrap();
4183 let response = project.update(cx, |project, cx| {
4184 let worktree = project.worktrees(cx).next().unwrap();
4185 let entry = worktree.read(cx).entry_for_path("one.rs").unwrap();
4186 project.rename_entry(entry.id, "three.rs".as_ref(), cx)
4187 });
4188 let expected_edit = lsp::WorkspaceEdit {
4189 changes: None,
4190 document_changes: Some(DocumentChanges::Edits({
4191 vec![TextDocumentEdit {
4192 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
4193 range: lsp::Range {
4194 start: lsp::Position {
4195 line: 0,
4196 character: 1,
4197 },
4198 end: lsp::Position {
4199 line: 0,
4200 character: 3,
4201 },
4202 },
4203 new_text: "This is not a drill".to_owned(),
4204 })],
4205 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
4206 uri: Url::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
4207 version: Some(1337),
4208 },
4209 }]
4210 })),
4211 change_annotations: None,
4212 };
4213 let resolved_workspace_edit = Arc::new(OnceLock::new());
4214 fake_server
4215 .handle_request::<WillRenameFiles, _, _>({
4216 let resolved_workspace_edit = resolved_workspace_edit.clone();
4217 let expected_edit = expected_edit.clone();
4218 move |params, _| {
4219 let resolved_workspace_edit = resolved_workspace_edit.clone();
4220 let expected_edit = expected_edit.clone();
4221 async move {
4222 assert_eq!(params.files.len(), 1);
4223 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
4224 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
4225 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
4226 Ok(Some(expected_edit))
4227 }
4228 }
4229 })
4230 .next()
4231 .await
4232 .unwrap();
4233 let _ = response.await.unwrap();
4234 fake_server
4235 .handle_notification::<DidRenameFiles, _>(|params, _| {
4236 assert_eq!(params.files.len(), 1);
4237 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
4238 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
4239 })
4240 .next()
4241 .await
4242 .unwrap();
4243 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
4244}
4245
4246#[gpui::test]
4247async fn test_rename(cx: &mut gpui::TestAppContext) {
4248 // hi
4249 init_test(cx);
4250
4251 let fs = FakeFs::new(cx.executor());
4252 fs.insert_tree(
4253 path!("/dir"),
4254 json!({
4255 "one.rs": "const ONE: usize = 1;",
4256 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4257 }),
4258 )
4259 .await;
4260
4261 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4262
4263 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4264 language_registry.add(rust_lang());
4265 let mut fake_servers = language_registry.register_fake_lsp(
4266 "Rust",
4267 FakeLspAdapter {
4268 capabilities: lsp::ServerCapabilities {
4269 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
4270 prepare_provider: Some(true),
4271 work_done_progress_options: Default::default(),
4272 })),
4273 ..Default::default()
4274 },
4275 ..Default::default()
4276 },
4277 );
4278
4279 let (buffer, _handle) = project
4280 .update(cx, |project, cx| {
4281 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
4282 })
4283 .await
4284 .unwrap();
4285
4286 let fake_server = fake_servers.next().await.unwrap();
4287
4288 let response = project.update(cx, |project, cx| {
4289 project.prepare_rename(buffer.clone(), 7, cx)
4290 });
4291 fake_server
4292 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
4293 assert_eq!(
4294 params.text_document.uri.as_str(),
4295 uri!("file:///dir/one.rs")
4296 );
4297 assert_eq!(params.position, lsp::Position::new(0, 7));
4298 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
4299 lsp::Position::new(0, 6),
4300 lsp::Position::new(0, 9),
4301 ))))
4302 })
4303 .next()
4304 .await
4305 .unwrap();
4306 let response = response.await.unwrap();
4307 let PrepareRenameResponse::Success(range) = response else {
4308 panic!("{:?}", response);
4309 };
4310 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
4311 assert_eq!(range, 6..9);
4312
4313 let response = project.update(cx, |project, cx| {
4314 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
4315 });
4316 fake_server
4317 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
4318 assert_eq!(
4319 params.text_document_position.text_document.uri.as_str(),
4320 uri!("file:///dir/one.rs")
4321 );
4322 assert_eq!(
4323 params.text_document_position.position,
4324 lsp::Position::new(0, 7)
4325 );
4326 assert_eq!(params.new_name, "THREE");
4327 Ok(Some(lsp::WorkspaceEdit {
4328 changes: Some(
4329 [
4330 (
4331 lsp::Url::from_file_path(path!("/dir/one.rs")).unwrap(),
4332 vec![lsp::TextEdit::new(
4333 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
4334 "THREE".to_string(),
4335 )],
4336 ),
4337 (
4338 lsp::Url::from_file_path(path!("/dir/two.rs")).unwrap(),
4339 vec![
4340 lsp::TextEdit::new(
4341 lsp::Range::new(
4342 lsp::Position::new(0, 24),
4343 lsp::Position::new(0, 27),
4344 ),
4345 "THREE".to_string(),
4346 ),
4347 lsp::TextEdit::new(
4348 lsp::Range::new(
4349 lsp::Position::new(0, 35),
4350 lsp::Position::new(0, 38),
4351 ),
4352 "THREE".to_string(),
4353 ),
4354 ],
4355 ),
4356 ]
4357 .into_iter()
4358 .collect(),
4359 ),
4360 ..Default::default()
4361 }))
4362 })
4363 .next()
4364 .await
4365 .unwrap();
4366 let mut transaction = response.await.unwrap().0;
4367 assert_eq!(transaction.len(), 2);
4368 assert_eq!(
4369 transaction
4370 .remove_entry(&buffer)
4371 .unwrap()
4372 .0
4373 .update(cx, |buffer, _| buffer.text()),
4374 "const THREE: usize = 1;"
4375 );
4376 assert_eq!(
4377 transaction
4378 .into_keys()
4379 .next()
4380 .unwrap()
4381 .update(cx, |buffer, _| buffer.text()),
4382 "const TWO: usize = one::THREE + one::THREE;"
4383 );
4384}
4385
4386#[gpui::test]
4387async fn test_search(cx: &mut gpui::TestAppContext) {
4388 init_test(cx);
4389
4390 let fs = FakeFs::new(cx.executor());
4391 fs.insert_tree(
4392 path!("/dir"),
4393 json!({
4394 "one.rs": "const ONE: usize = 1;",
4395 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
4396 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
4397 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
4398 }),
4399 )
4400 .await;
4401 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4402 assert_eq!(
4403 search(
4404 &project,
4405 SearchQuery::text(
4406 "TWO",
4407 false,
4408 true,
4409 false,
4410 Default::default(),
4411 Default::default(),
4412 None
4413 )
4414 .unwrap(),
4415 cx
4416 )
4417 .await
4418 .unwrap(),
4419 HashMap::from_iter([
4420 (separator!("dir/two.rs").to_string(), vec![6..9]),
4421 (separator!("dir/three.rs").to_string(), vec![37..40])
4422 ])
4423 );
4424
4425 let buffer_4 = project
4426 .update(cx, |project, cx| {
4427 project.open_local_buffer(path!("/dir/four.rs"), cx)
4428 })
4429 .await
4430 .unwrap();
4431 buffer_4.update(cx, |buffer, cx| {
4432 let text = "two::TWO";
4433 buffer.edit([(20..28, text), (31..43, text)], None, cx);
4434 });
4435
4436 assert_eq!(
4437 search(
4438 &project,
4439 SearchQuery::text(
4440 "TWO",
4441 false,
4442 true,
4443 false,
4444 Default::default(),
4445 Default::default(),
4446 None,
4447 )
4448 .unwrap(),
4449 cx
4450 )
4451 .await
4452 .unwrap(),
4453 HashMap::from_iter([
4454 (separator!("dir/two.rs").to_string(), vec![6..9]),
4455 (separator!("dir/three.rs").to_string(), vec![37..40]),
4456 (separator!("dir/four.rs").to_string(), vec![25..28, 36..39])
4457 ])
4458 );
4459}
4460
4461#[gpui::test]
4462async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
4463 init_test(cx);
4464
4465 let search_query = "file";
4466
4467 let fs = FakeFs::new(cx.executor());
4468 fs.insert_tree(
4469 path!("/dir"),
4470 json!({
4471 "one.rs": r#"// Rust file one"#,
4472 "one.ts": r#"// TypeScript file one"#,
4473 "two.rs": r#"// Rust file two"#,
4474 "two.ts": r#"// TypeScript file two"#,
4475 }),
4476 )
4477 .await;
4478 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4479
4480 assert!(
4481 search(
4482 &project,
4483 SearchQuery::text(
4484 search_query,
4485 false,
4486 true,
4487 false,
4488 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4489 Default::default(),
4490 None
4491 )
4492 .unwrap(),
4493 cx
4494 )
4495 .await
4496 .unwrap()
4497 .is_empty(),
4498 "If no inclusions match, no files should be returned"
4499 );
4500
4501 assert_eq!(
4502 search(
4503 &project,
4504 SearchQuery::text(
4505 search_query,
4506 false,
4507 true,
4508 false,
4509 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4510 Default::default(),
4511 None
4512 )
4513 .unwrap(),
4514 cx
4515 )
4516 .await
4517 .unwrap(),
4518 HashMap::from_iter([
4519 (separator!("dir/one.rs").to_string(), vec![8..12]),
4520 (separator!("dir/two.rs").to_string(), vec![8..12]),
4521 ]),
4522 "Rust only search should give only Rust files"
4523 );
4524
4525 assert_eq!(
4526 search(
4527 &project,
4528 SearchQuery::text(
4529 search_query,
4530 false,
4531 true,
4532 false,
4533
4534 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4535
4536 Default::default(),
4537 None,
4538 ).unwrap(),
4539 cx
4540 )
4541 .await
4542 .unwrap(),
4543 HashMap::from_iter([
4544 (separator!("dir/one.ts").to_string(), vec![14..18]),
4545 (separator!("dir/two.ts").to_string(), vec![14..18]),
4546 ]),
4547 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
4548 );
4549
4550 assert_eq!(
4551 search(
4552 &project,
4553 SearchQuery::text(
4554 search_query,
4555 false,
4556 true,
4557 false,
4558
4559 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4560
4561 Default::default(),
4562 None,
4563 ).unwrap(),
4564 cx
4565 )
4566 .await
4567 .unwrap(),
4568 HashMap::from_iter([
4569 (separator!("dir/two.ts").to_string(), vec![14..18]),
4570 (separator!("dir/one.rs").to_string(), vec![8..12]),
4571 (separator!("dir/one.ts").to_string(), vec![14..18]),
4572 (separator!("dir/two.rs").to_string(), vec![8..12]),
4573 ]),
4574 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
4575 );
4576}
4577
4578#[gpui::test]
4579async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
4580 init_test(cx);
4581
4582 let search_query = "file";
4583
4584 let fs = FakeFs::new(cx.executor());
4585 fs.insert_tree(
4586 path!("/dir"),
4587 json!({
4588 "one.rs": r#"// Rust file one"#,
4589 "one.ts": r#"// TypeScript file one"#,
4590 "two.rs": r#"// Rust file two"#,
4591 "two.ts": r#"// TypeScript file two"#,
4592 }),
4593 )
4594 .await;
4595 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4596
4597 assert_eq!(
4598 search(
4599 &project,
4600 SearchQuery::text(
4601 search_query,
4602 false,
4603 true,
4604 false,
4605 Default::default(),
4606 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4607 None,
4608 )
4609 .unwrap(),
4610 cx
4611 )
4612 .await
4613 .unwrap(),
4614 HashMap::from_iter([
4615 (separator!("dir/one.rs").to_string(), vec![8..12]),
4616 (separator!("dir/one.ts").to_string(), vec![14..18]),
4617 (separator!("dir/two.rs").to_string(), vec![8..12]),
4618 (separator!("dir/two.ts").to_string(), vec![14..18]),
4619 ]),
4620 "If no exclusions match, all files should be returned"
4621 );
4622
4623 assert_eq!(
4624 search(
4625 &project,
4626 SearchQuery::text(
4627 search_query,
4628 false,
4629 true,
4630 false,
4631 Default::default(),
4632 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4633 None,
4634 )
4635 .unwrap(),
4636 cx
4637 )
4638 .await
4639 .unwrap(),
4640 HashMap::from_iter([
4641 (separator!("dir/one.ts").to_string(), vec![14..18]),
4642 (separator!("dir/two.ts").to_string(), vec![14..18]),
4643 ]),
4644 "Rust exclusion search should give only TypeScript files"
4645 );
4646
4647 assert_eq!(
4648 search(
4649 &project,
4650 SearchQuery::text(
4651 search_query,
4652 false,
4653 true,
4654 false,
4655 Default::default(),
4656 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4657 None,
4658 ).unwrap(),
4659 cx
4660 )
4661 .await
4662 .unwrap(),
4663 HashMap::from_iter([
4664 (separator!("dir/one.rs").to_string(), vec![8..12]),
4665 (separator!("dir/two.rs").to_string(), vec![8..12]),
4666 ]),
4667 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
4668 );
4669
4670 assert!(
4671 search(
4672 &project,
4673 SearchQuery::text(
4674 search_query,
4675 false,
4676 true,
4677 false,
4678 Default::default(),
4679
4680 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4681 None,
4682
4683 ).unwrap(),
4684 cx
4685 )
4686 .await
4687 .unwrap().is_empty(),
4688 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
4689 );
4690}
4691
4692#[gpui::test]
4693async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
4694 init_test(cx);
4695
4696 let search_query = "file";
4697
4698 let fs = FakeFs::new(cx.executor());
4699 fs.insert_tree(
4700 path!("/dir"),
4701 json!({
4702 "one.rs": r#"// Rust file one"#,
4703 "one.ts": r#"// TypeScript file one"#,
4704 "two.rs": r#"// Rust file two"#,
4705 "two.ts": r#"// TypeScript file two"#,
4706 }),
4707 )
4708 .await;
4709 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4710
4711 assert!(
4712 search(
4713 &project,
4714 SearchQuery::text(
4715 search_query,
4716 false,
4717 true,
4718 false,
4719 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4720 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4721 None,
4722 )
4723 .unwrap(),
4724 cx
4725 )
4726 .await
4727 .unwrap()
4728 .is_empty(),
4729 "If both no exclusions and inclusions match, exclusions should win and return nothing"
4730 );
4731
4732 assert!(
4733 search(
4734 &project,
4735 SearchQuery::text(
4736 search_query,
4737 false,
4738 true,
4739 false,
4740 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4741 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4742 None,
4743 ).unwrap(),
4744 cx
4745 )
4746 .await
4747 .unwrap()
4748 .is_empty(),
4749 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
4750 );
4751
4752 assert!(
4753 search(
4754 &project,
4755 SearchQuery::text(
4756 search_query,
4757 false,
4758 true,
4759 false,
4760 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4761 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4762 None,
4763 )
4764 .unwrap(),
4765 cx
4766 )
4767 .await
4768 .unwrap()
4769 .is_empty(),
4770 "Non-matching inclusions and exclusions should not change that."
4771 );
4772
4773 assert_eq!(
4774 search(
4775 &project,
4776 SearchQuery::text(
4777 search_query,
4778 false,
4779 true,
4780 false,
4781 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4782 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()]).unwrap(),
4783 None,
4784 )
4785 .unwrap(),
4786 cx
4787 )
4788 .await
4789 .unwrap(),
4790 HashMap::from_iter([
4791 (separator!("dir/one.ts").to_string(), vec![14..18]),
4792 (separator!("dir/two.ts").to_string(), vec![14..18]),
4793 ]),
4794 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
4795 );
4796}
4797
4798#[gpui::test]
4799async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
4800 init_test(cx);
4801
4802 let fs = FakeFs::new(cx.executor());
4803 fs.insert_tree(
4804 path!("/worktree-a"),
4805 json!({
4806 "haystack.rs": r#"// NEEDLE"#,
4807 "haystack.ts": r#"// NEEDLE"#,
4808 }),
4809 )
4810 .await;
4811 fs.insert_tree(
4812 path!("/worktree-b"),
4813 json!({
4814 "haystack.rs": r#"// NEEDLE"#,
4815 "haystack.ts": r#"// NEEDLE"#,
4816 }),
4817 )
4818 .await;
4819
4820 let project = Project::test(
4821 fs.clone(),
4822 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
4823 cx,
4824 )
4825 .await;
4826
4827 assert_eq!(
4828 search(
4829 &project,
4830 SearchQuery::text(
4831 "NEEDLE",
4832 false,
4833 true,
4834 false,
4835 PathMatcher::new(&["worktree-a/*.rs".to_owned()]).unwrap(),
4836 Default::default(),
4837 None,
4838 )
4839 .unwrap(),
4840 cx
4841 )
4842 .await
4843 .unwrap(),
4844 HashMap::from_iter([(separator!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
4845 "should only return results from included worktree"
4846 );
4847 assert_eq!(
4848 search(
4849 &project,
4850 SearchQuery::text(
4851 "NEEDLE",
4852 false,
4853 true,
4854 false,
4855 PathMatcher::new(&["worktree-b/*.rs".to_owned()]).unwrap(),
4856 Default::default(),
4857 None,
4858 )
4859 .unwrap(),
4860 cx
4861 )
4862 .await
4863 .unwrap(),
4864 HashMap::from_iter([(separator!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
4865 "should only return results from included worktree"
4866 );
4867
4868 assert_eq!(
4869 search(
4870 &project,
4871 SearchQuery::text(
4872 "NEEDLE",
4873 false,
4874 true,
4875 false,
4876 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4877 Default::default(),
4878 None,
4879 )
4880 .unwrap(),
4881 cx
4882 )
4883 .await
4884 .unwrap(),
4885 HashMap::from_iter([
4886 (separator!("worktree-a/haystack.ts").to_string(), vec![3..9]),
4887 (separator!("worktree-b/haystack.ts").to_string(), vec![3..9])
4888 ]),
4889 "should return results from both worktrees"
4890 );
4891}
4892
4893#[gpui::test]
4894async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
4895 init_test(cx);
4896
4897 let fs = FakeFs::new(cx.background_executor.clone());
4898 fs.insert_tree(
4899 path!("/dir"),
4900 json!({
4901 ".git": {},
4902 ".gitignore": "**/target\n/node_modules\n",
4903 "target": {
4904 "index.txt": "index_key:index_value"
4905 },
4906 "node_modules": {
4907 "eslint": {
4908 "index.ts": "const eslint_key = 'eslint value'",
4909 "package.json": r#"{ "some_key": "some value" }"#,
4910 },
4911 "prettier": {
4912 "index.ts": "const prettier_key = 'prettier value'",
4913 "package.json": r#"{ "other_key": "other value" }"#,
4914 },
4915 },
4916 "package.json": r#"{ "main_key": "main value" }"#,
4917 }),
4918 )
4919 .await;
4920 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4921
4922 let query = "key";
4923 assert_eq!(
4924 search(
4925 &project,
4926 SearchQuery::text(
4927 query,
4928 false,
4929 false,
4930 false,
4931 Default::default(),
4932 Default::default(),
4933 None,
4934 )
4935 .unwrap(),
4936 cx
4937 )
4938 .await
4939 .unwrap(),
4940 HashMap::from_iter([(separator!("dir/package.json").to_string(), vec![8..11])]),
4941 "Only one non-ignored file should have the query"
4942 );
4943
4944 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4945 assert_eq!(
4946 search(
4947 &project,
4948 SearchQuery::text(
4949 query,
4950 false,
4951 false,
4952 true,
4953 Default::default(),
4954 Default::default(),
4955 None,
4956 )
4957 .unwrap(),
4958 cx
4959 )
4960 .await
4961 .unwrap(),
4962 HashMap::from_iter([
4963 (separator!("dir/package.json").to_string(), vec![8..11]),
4964 (separator!("dir/target/index.txt").to_string(), vec![6..9]),
4965 (
4966 separator!("dir/node_modules/prettier/package.json").to_string(),
4967 vec![9..12]
4968 ),
4969 (
4970 separator!("dir/node_modules/prettier/index.ts").to_string(),
4971 vec![15..18]
4972 ),
4973 (
4974 separator!("dir/node_modules/eslint/index.ts").to_string(),
4975 vec![13..16]
4976 ),
4977 (
4978 separator!("dir/node_modules/eslint/package.json").to_string(),
4979 vec![8..11]
4980 ),
4981 ]),
4982 "Unrestricted search with ignored directories should find every file with the query"
4983 );
4984
4985 let files_to_include = PathMatcher::new(&["node_modules/prettier/**".to_owned()]).unwrap();
4986 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()]).unwrap();
4987 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4988 assert_eq!(
4989 search(
4990 &project,
4991 SearchQuery::text(
4992 query,
4993 false,
4994 false,
4995 true,
4996 files_to_include,
4997 files_to_exclude,
4998 None,
4999 )
5000 .unwrap(),
5001 cx
5002 )
5003 .await
5004 .unwrap(),
5005 HashMap::from_iter([(
5006 separator!("dir/node_modules/prettier/package.json").to_string(),
5007 vec![9..12]
5008 )]),
5009 "With search including ignored prettier directory and excluding TS files, only one file should be found"
5010 );
5011}
5012
5013#[gpui::test]
5014async fn test_create_entry(cx: &mut gpui::TestAppContext) {
5015 init_test(cx);
5016
5017 let fs = FakeFs::new(cx.executor().clone());
5018 fs.insert_tree(
5019 "/one/two",
5020 json!({
5021 "three": {
5022 "a.txt": "",
5023 "four": {}
5024 },
5025 "c.rs": ""
5026 }),
5027 )
5028 .await;
5029
5030 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
5031 project
5032 .update(cx, |project, cx| {
5033 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5034 project.create_entry((id, "b.."), true, cx)
5035 })
5036 .await
5037 .unwrap()
5038 .to_included()
5039 .unwrap();
5040
5041 // Can't create paths outside the project
5042 let result = project
5043 .update(cx, |project, cx| {
5044 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5045 project.create_entry((id, "../../boop"), true, cx)
5046 })
5047 .await;
5048 assert!(result.is_err());
5049
5050 // Can't create paths with '..'
5051 let result = project
5052 .update(cx, |project, cx| {
5053 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5054 project.create_entry((id, "four/../beep"), true, cx)
5055 })
5056 .await;
5057 assert!(result.is_err());
5058
5059 assert_eq!(
5060 fs.paths(true),
5061 vec![
5062 PathBuf::from(path!("/")),
5063 PathBuf::from(path!("/one")),
5064 PathBuf::from(path!("/one/two")),
5065 PathBuf::from(path!("/one/two/c.rs")),
5066 PathBuf::from(path!("/one/two/three")),
5067 PathBuf::from(path!("/one/two/three/a.txt")),
5068 PathBuf::from(path!("/one/two/three/b..")),
5069 PathBuf::from(path!("/one/two/three/four")),
5070 ]
5071 );
5072
5073 // And we cannot open buffers with '..'
5074 let result = project
5075 .update(cx, |project, cx| {
5076 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5077 project.open_buffer((id, "../c.rs"), cx)
5078 })
5079 .await;
5080 assert!(result.is_err())
5081}
5082
5083#[gpui::test]
5084async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
5085 init_test(cx);
5086
5087 let fs = FakeFs::new(cx.executor());
5088 fs.insert_tree(
5089 path!("/dir"),
5090 json!({
5091 "a.tsx": "a",
5092 }),
5093 )
5094 .await;
5095
5096 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5097
5098 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5099 language_registry.add(tsx_lang());
5100 let language_server_names = [
5101 "TypeScriptServer",
5102 "TailwindServer",
5103 "ESLintServer",
5104 "NoHoverCapabilitiesServer",
5105 ];
5106 let mut language_servers = [
5107 language_registry.register_fake_lsp(
5108 "tsx",
5109 FakeLspAdapter {
5110 name: language_server_names[0],
5111 capabilities: lsp::ServerCapabilities {
5112 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5113 ..lsp::ServerCapabilities::default()
5114 },
5115 ..FakeLspAdapter::default()
5116 },
5117 ),
5118 language_registry.register_fake_lsp(
5119 "tsx",
5120 FakeLspAdapter {
5121 name: language_server_names[1],
5122 capabilities: lsp::ServerCapabilities {
5123 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5124 ..lsp::ServerCapabilities::default()
5125 },
5126 ..FakeLspAdapter::default()
5127 },
5128 ),
5129 language_registry.register_fake_lsp(
5130 "tsx",
5131 FakeLspAdapter {
5132 name: language_server_names[2],
5133 capabilities: lsp::ServerCapabilities {
5134 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5135 ..lsp::ServerCapabilities::default()
5136 },
5137 ..FakeLspAdapter::default()
5138 },
5139 ),
5140 language_registry.register_fake_lsp(
5141 "tsx",
5142 FakeLspAdapter {
5143 name: language_server_names[3],
5144 capabilities: lsp::ServerCapabilities {
5145 hover_provider: None,
5146 ..lsp::ServerCapabilities::default()
5147 },
5148 ..FakeLspAdapter::default()
5149 },
5150 ),
5151 ];
5152
5153 let (buffer, _handle) = project
5154 .update(cx, |p, cx| {
5155 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
5156 })
5157 .await
5158 .unwrap();
5159 cx.executor().run_until_parked();
5160
5161 let mut servers_with_hover_requests = HashMap::default();
5162 for i in 0..language_server_names.len() {
5163 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
5164 panic!(
5165 "Failed to get language server #{i} with name {}",
5166 &language_server_names[i]
5167 )
5168 });
5169 let new_server_name = new_server.server.name();
5170 assert!(
5171 !servers_with_hover_requests.contains_key(&new_server_name),
5172 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5173 );
5174 match new_server_name.as_ref() {
5175 "TailwindServer" | "TypeScriptServer" => {
5176 servers_with_hover_requests.insert(
5177 new_server_name.clone(),
5178 new_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| {
5179 let name = new_server_name.clone();
5180 async move {
5181 Ok(Some(lsp::Hover {
5182 contents: lsp::HoverContents::Scalar(lsp::MarkedString::String(
5183 format!("{name} hover"),
5184 )),
5185 range: None,
5186 }))
5187 }
5188 }),
5189 );
5190 }
5191 "ESLintServer" => {
5192 servers_with_hover_requests.insert(
5193 new_server_name,
5194 new_server.handle_request::<lsp::request::HoverRequest, _, _>(
5195 |_, _| async move { Ok(None) },
5196 ),
5197 );
5198 }
5199 "NoHoverCapabilitiesServer" => {
5200 let _never_handled = new_server.handle_request::<lsp::request::HoverRequest, _, _>(
5201 |_, _| async move {
5202 panic!(
5203 "Should not call for hovers server with no corresponding capabilities"
5204 )
5205 },
5206 );
5207 }
5208 unexpected => panic!("Unexpected server name: {unexpected}"),
5209 }
5210 }
5211
5212 let hover_task = project.update(cx, |project, cx| {
5213 project.hover(&buffer, Point::new(0, 0), cx)
5214 });
5215 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
5216 |mut hover_request| async move {
5217 hover_request
5218 .next()
5219 .await
5220 .expect("All hover requests should have been triggered")
5221 },
5222 ))
5223 .await;
5224 assert_eq!(
5225 vec!["TailwindServer hover", "TypeScriptServer hover"],
5226 hover_task
5227 .await
5228 .into_iter()
5229 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
5230 .sorted()
5231 .collect::<Vec<_>>(),
5232 "Should receive hover responses from all related servers with hover capabilities"
5233 );
5234}
5235
5236#[gpui::test]
5237async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
5238 init_test(cx);
5239
5240 let fs = FakeFs::new(cx.executor());
5241 fs.insert_tree(
5242 path!("/dir"),
5243 json!({
5244 "a.ts": "a",
5245 }),
5246 )
5247 .await;
5248
5249 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5250
5251 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5252 language_registry.add(typescript_lang());
5253 let mut fake_language_servers = language_registry.register_fake_lsp(
5254 "TypeScript",
5255 FakeLspAdapter {
5256 capabilities: lsp::ServerCapabilities {
5257 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5258 ..lsp::ServerCapabilities::default()
5259 },
5260 ..FakeLspAdapter::default()
5261 },
5262 );
5263
5264 let (buffer, _handle) = project
5265 .update(cx, |p, cx| {
5266 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
5267 })
5268 .await
5269 .unwrap();
5270 cx.executor().run_until_parked();
5271
5272 let fake_server = fake_language_servers
5273 .next()
5274 .await
5275 .expect("failed to get the language server");
5276
5277 let mut request_handled =
5278 fake_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| async move {
5279 Ok(Some(lsp::Hover {
5280 contents: lsp::HoverContents::Array(vec![
5281 lsp::MarkedString::String("".to_string()),
5282 lsp::MarkedString::String(" ".to_string()),
5283 lsp::MarkedString::String("\n\n\n".to_string()),
5284 ]),
5285 range: None,
5286 }))
5287 });
5288
5289 let hover_task = project.update(cx, |project, cx| {
5290 project.hover(&buffer, Point::new(0, 0), cx)
5291 });
5292 let () = request_handled
5293 .next()
5294 .await
5295 .expect("All hover requests should have been triggered");
5296 assert_eq!(
5297 Vec::<String>::new(),
5298 hover_task
5299 .await
5300 .into_iter()
5301 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
5302 .sorted()
5303 .collect::<Vec<_>>(),
5304 "Empty hover parts should be ignored"
5305 );
5306}
5307
5308#[gpui::test]
5309async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
5310 init_test(cx);
5311
5312 let fs = FakeFs::new(cx.executor());
5313 fs.insert_tree(
5314 path!("/dir"),
5315 json!({
5316 "a.ts": "a",
5317 }),
5318 )
5319 .await;
5320
5321 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5322
5323 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5324 language_registry.add(typescript_lang());
5325 let mut fake_language_servers = language_registry.register_fake_lsp(
5326 "TypeScript",
5327 FakeLspAdapter {
5328 capabilities: lsp::ServerCapabilities {
5329 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5330 ..lsp::ServerCapabilities::default()
5331 },
5332 ..FakeLspAdapter::default()
5333 },
5334 );
5335
5336 let (buffer, _handle) = project
5337 .update(cx, |p, cx| {
5338 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
5339 })
5340 .await
5341 .unwrap();
5342 cx.executor().run_until_parked();
5343
5344 let fake_server = fake_language_servers
5345 .next()
5346 .await
5347 .expect("failed to get the language server");
5348
5349 let mut request_handled = fake_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5350 move |_, _| async move {
5351 Ok(Some(vec![
5352 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5353 title: "organize imports".to_string(),
5354 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
5355 ..lsp::CodeAction::default()
5356 }),
5357 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5358 title: "fix code".to_string(),
5359 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
5360 ..lsp::CodeAction::default()
5361 }),
5362 ]))
5363 },
5364 );
5365
5366 let code_actions_task = project.update(cx, |project, cx| {
5367 project.code_actions(
5368 &buffer,
5369 0..buffer.read(cx).len(),
5370 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
5371 cx,
5372 )
5373 });
5374
5375 let () = request_handled
5376 .next()
5377 .await
5378 .expect("The code action request should have been triggered");
5379
5380 let code_actions = code_actions_task.await.unwrap();
5381 assert_eq!(code_actions.len(), 1);
5382 assert_eq!(
5383 code_actions[0].lsp_action.action_kind(),
5384 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
5385 );
5386}
5387
5388#[gpui::test]
5389async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
5390 init_test(cx);
5391
5392 let fs = FakeFs::new(cx.executor());
5393 fs.insert_tree(
5394 path!("/dir"),
5395 json!({
5396 "a.tsx": "a",
5397 }),
5398 )
5399 .await;
5400
5401 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5402
5403 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5404 language_registry.add(tsx_lang());
5405 let language_server_names = [
5406 "TypeScriptServer",
5407 "TailwindServer",
5408 "ESLintServer",
5409 "NoActionsCapabilitiesServer",
5410 ];
5411
5412 let mut language_server_rxs = [
5413 language_registry.register_fake_lsp(
5414 "tsx",
5415 FakeLspAdapter {
5416 name: language_server_names[0],
5417 capabilities: lsp::ServerCapabilities {
5418 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5419 ..lsp::ServerCapabilities::default()
5420 },
5421 ..FakeLspAdapter::default()
5422 },
5423 ),
5424 language_registry.register_fake_lsp(
5425 "tsx",
5426 FakeLspAdapter {
5427 name: language_server_names[1],
5428 capabilities: lsp::ServerCapabilities {
5429 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5430 ..lsp::ServerCapabilities::default()
5431 },
5432 ..FakeLspAdapter::default()
5433 },
5434 ),
5435 language_registry.register_fake_lsp(
5436 "tsx",
5437 FakeLspAdapter {
5438 name: language_server_names[2],
5439 capabilities: lsp::ServerCapabilities {
5440 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5441 ..lsp::ServerCapabilities::default()
5442 },
5443 ..FakeLspAdapter::default()
5444 },
5445 ),
5446 language_registry.register_fake_lsp(
5447 "tsx",
5448 FakeLspAdapter {
5449 name: language_server_names[3],
5450 capabilities: lsp::ServerCapabilities {
5451 code_action_provider: None,
5452 ..lsp::ServerCapabilities::default()
5453 },
5454 ..FakeLspAdapter::default()
5455 },
5456 ),
5457 ];
5458
5459 let (buffer, _handle) = project
5460 .update(cx, |p, cx| {
5461 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
5462 })
5463 .await
5464 .unwrap();
5465 cx.executor().run_until_parked();
5466
5467 let mut servers_with_actions_requests = HashMap::default();
5468 for i in 0..language_server_names.len() {
5469 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
5470 panic!(
5471 "Failed to get language server #{i} with name {}",
5472 &language_server_names[i]
5473 )
5474 });
5475 let new_server_name = new_server.server.name();
5476
5477 assert!(
5478 !servers_with_actions_requests.contains_key(&new_server_name),
5479 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5480 );
5481 match new_server_name.0.as_ref() {
5482 "TailwindServer" | "TypeScriptServer" => {
5483 servers_with_actions_requests.insert(
5484 new_server_name.clone(),
5485 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5486 move |_, _| {
5487 let name = new_server_name.clone();
5488 async move {
5489 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
5490 lsp::CodeAction {
5491 title: format!("{name} code action"),
5492 ..lsp::CodeAction::default()
5493 },
5494 )]))
5495 }
5496 },
5497 ),
5498 );
5499 }
5500 "ESLintServer" => {
5501 servers_with_actions_requests.insert(
5502 new_server_name,
5503 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5504 |_, _| async move { Ok(None) },
5505 ),
5506 );
5507 }
5508 "NoActionsCapabilitiesServer" => {
5509 let _never_handled = new_server
5510 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
5511 panic!(
5512 "Should not call for code actions server with no corresponding capabilities"
5513 )
5514 });
5515 }
5516 unexpected => panic!("Unexpected server name: {unexpected}"),
5517 }
5518 }
5519
5520 let code_actions_task = project.update(cx, |project, cx| {
5521 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
5522 });
5523
5524 // cx.run_until_parked();
5525 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
5526 |mut code_actions_request| async move {
5527 code_actions_request
5528 .next()
5529 .await
5530 .expect("All code actions requests should have been triggered")
5531 },
5532 ))
5533 .await;
5534 assert_eq!(
5535 vec!["TailwindServer code action", "TypeScriptServer code action"],
5536 code_actions_task
5537 .await
5538 .unwrap()
5539 .into_iter()
5540 .map(|code_action| code_action.lsp_action.title().to_owned())
5541 .sorted()
5542 .collect::<Vec<_>>(),
5543 "Should receive code actions responses from all related servers with hover capabilities"
5544 );
5545}
5546
5547#[gpui::test]
5548async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
5549 init_test(cx);
5550
5551 let fs = FakeFs::new(cx.executor());
5552 fs.insert_tree(
5553 "/dir",
5554 json!({
5555 "a.rs": "let a = 1;",
5556 "b.rs": "let b = 2;",
5557 "c.rs": "let c = 2;",
5558 }),
5559 )
5560 .await;
5561
5562 let project = Project::test(
5563 fs,
5564 [
5565 "/dir/a.rs".as_ref(),
5566 "/dir/b.rs".as_ref(),
5567 "/dir/c.rs".as_ref(),
5568 ],
5569 cx,
5570 )
5571 .await;
5572
5573 // check the initial state and get the worktrees
5574 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
5575 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5576 assert_eq!(worktrees.len(), 3);
5577
5578 let worktree_a = worktrees[0].read(cx);
5579 let worktree_b = worktrees[1].read(cx);
5580 let worktree_c = worktrees[2].read(cx);
5581
5582 // check they start in the right order
5583 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
5584 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
5585 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
5586
5587 (
5588 worktrees[0].clone(),
5589 worktrees[1].clone(),
5590 worktrees[2].clone(),
5591 )
5592 });
5593
5594 // move first worktree to after the second
5595 // [a, b, c] -> [b, a, c]
5596 project
5597 .update(cx, |project, cx| {
5598 let first = worktree_a.read(cx);
5599 let second = worktree_b.read(cx);
5600 project.move_worktree(first.id(), second.id(), cx)
5601 })
5602 .expect("moving first after second");
5603
5604 // check the state after moving
5605 project.update(cx, |project, cx| {
5606 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5607 assert_eq!(worktrees.len(), 3);
5608
5609 let first = worktrees[0].read(cx);
5610 let second = worktrees[1].read(cx);
5611 let third = worktrees[2].read(cx);
5612
5613 // check they are now in the right order
5614 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5615 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
5616 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5617 });
5618
5619 // move the second worktree to before the first
5620 // [b, a, c] -> [a, b, c]
5621 project
5622 .update(cx, |project, cx| {
5623 let second = worktree_a.read(cx);
5624 let first = worktree_b.read(cx);
5625 project.move_worktree(first.id(), second.id(), cx)
5626 })
5627 .expect("moving second before first");
5628
5629 // check the state after moving
5630 project.update(cx, |project, cx| {
5631 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5632 assert_eq!(worktrees.len(), 3);
5633
5634 let first = worktrees[0].read(cx);
5635 let second = worktrees[1].read(cx);
5636 let third = worktrees[2].read(cx);
5637
5638 // check they are now in the right order
5639 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5640 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5641 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5642 });
5643
5644 // move the second worktree to after the third
5645 // [a, b, c] -> [a, c, b]
5646 project
5647 .update(cx, |project, cx| {
5648 let second = worktree_b.read(cx);
5649 let third = worktree_c.read(cx);
5650 project.move_worktree(second.id(), third.id(), cx)
5651 })
5652 .expect("moving second after third");
5653
5654 // check the state after moving
5655 project.update(cx, |project, cx| {
5656 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5657 assert_eq!(worktrees.len(), 3);
5658
5659 let first = worktrees[0].read(cx);
5660 let second = worktrees[1].read(cx);
5661 let third = worktrees[2].read(cx);
5662
5663 // check they are now in the right order
5664 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5665 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5666 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
5667 });
5668
5669 // move the third worktree to before the second
5670 // [a, c, b] -> [a, b, c]
5671 project
5672 .update(cx, |project, cx| {
5673 let third = worktree_c.read(cx);
5674 let second = worktree_b.read(cx);
5675 project.move_worktree(third.id(), second.id(), cx)
5676 })
5677 .expect("moving third before second");
5678
5679 // check the state after moving
5680 project.update(cx, |project, cx| {
5681 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5682 assert_eq!(worktrees.len(), 3);
5683
5684 let first = worktrees[0].read(cx);
5685 let second = worktrees[1].read(cx);
5686 let third = worktrees[2].read(cx);
5687
5688 // check they are now in the right order
5689 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5690 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5691 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5692 });
5693
5694 // move the first worktree to after the third
5695 // [a, b, c] -> [b, c, a]
5696 project
5697 .update(cx, |project, cx| {
5698 let first = worktree_a.read(cx);
5699 let third = worktree_c.read(cx);
5700 project.move_worktree(first.id(), third.id(), cx)
5701 })
5702 .expect("moving first after third");
5703
5704 // check the state after moving
5705 project.update(cx, |project, cx| {
5706 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5707 assert_eq!(worktrees.len(), 3);
5708
5709 let first = worktrees[0].read(cx);
5710 let second = worktrees[1].read(cx);
5711 let third = worktrees[2].read(cx);
5712
5713 // check they are now in the right order
5714 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5715 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5716 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
5717 });
5718
5719 // move the third worktree to before the first
5720 // [b, c, a] -> [a, b, c]
5721 project
5722 .update(cx, |project, cx| {
5723 let third = worktree_a.read(cx);
5724 let first = worktree_b.read(cx);
5725 project.move_worktree(third.id(), first.id(), cx)
5726 })
5727 .expect("moving third before first");
5728
5729 // check the state after moving
5730 project.update(cx, |project, cx| {
5731 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5732 assert_eq!(worktrees.len(), 3);
5733
5734 let first = worktrees[0].read(cx);
5735 let second = worktrees[1].read(cx);
5736 let third = worktrees[2].read(cx);
5737
5738 // check they are now in the right order
5739 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5740 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5741 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5742 });
5743}
5744
5745#[gpui::test]
5746async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
5747 init_test(cx);
5748
5749 let staged_contents = r#"
5750 fn main() {
5751 println!("hello world");
5752 }
5753 "#
5754 .unindent();
5755 let file_contents = r#"
5756 // print goodbye
5757 fn main() {
5758 println!("goodbye world");
5759 }
5760 "#
5761 .unindent();
5762
5763 let fs = FakeFs::new(cx.background_executor.clone());
5764 fs.insert_tree(
5765 "/dir",
5766 json!({
5767 ".git": {},
5768 "src": {
5769 "main.rs": file_contents,
5770 }
5771 }),
5772 )
5773 .await;
5774
5775 fs.set_index_for_repo(
5776 Path::new("/dir/.git"),
5777 &[("src/main.rs".into(), staged_contents)],
5778 );
5779
5780 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5781
5782 let buffer = project
5783 .update(cx, |project, cx| {
5784 project.open_local_buffer("/dir/src/main.rs", cx)
5785 })
5786 .await
5787 .unwrap();
5788 let unstaged_diff = project
5789 .update(cx, |project, cx| {
5790 project.open_unstaged_diff(buffer.clone(), cx)
5791 })
5792 .await
5793 .unwrap();
5794
5795 cx.run_until_parked();
5796 unstaged_diff.update(cx, |unstaged_diff, cx| {
5797 let snapshot = buffer.read(cx).snapshot();
5798 assert_hunks(
5799 unstaged_diff.hunks(&snapshot, cx),
5800 &snapshot,
5801 &unstaged_diff.base_text_string().unwrap(),
5802 &[
5803 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
5804 (
5805 2..3,
5806 " println!(\"hello world\");\n",
5807 " println!(\"goodbye world\");\n",
5808 DiffHunkStatus::modified_none(),
5809 ),
5810 ],
5811 );
5812 });
5813
5814 let staged_contents = r#"
5815 // print goodbye
5816 fn main() {
5817 }
5818 "#
5819 .unindent();
5820
5821 fs.set_index_for_repo(
5822 Path::new("/dir/.git"),
5823 &[("src/main.rs".into(), staged_contents)],
5824 );
5825
5826 cx.run_until_parked();
5827 unstaged_diff.update(cx, |unstaged_diff, cx| {
5828 let snapshot = buffer.read(cx).snapshot();
5829 assert_hunks(
5830 unstaged_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
5831 &snapshot,
5832 &unstaged_diff.base_text().text(),
5833 &[(
5834 2..3,
5835 "",
5836 " println!(\"goodbye world\");\n",
5837 DiffHunkStatus::added_none(),
5838 )],
5839 );
5840 });
5841}
5842
5843#[gpui::test]
5844async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
5845 init_test(cx);
5846
5847 let committed_contents = r#"
5848 fn main() {
5849 println!("hello world");
5850 }
5851 "#
5852 .unindent();
5853 let staged_contents = r#"
5854 fn main() {
5855 println!("goodbye world");
5856 }
5857 "#
5858 .unindent();
5859 let file_contents = r#"
5860 // print goodbye
5861 fn main() {
5862 println!("goodbye world");
5863 }
5864 "#
5865 .unindent();
5866
5867 let fs = FakeFs::new(cx.background_executor.clone());
5868 fs.insert_tree(
5869 "/dir",
5870 json!({
5871 ".git": {},
5872 "src": {
5873 "modification.rs": file_contents,
5874 }
5875 }),
5876 )
5877 .await;
5878
5879 fs.set_head_for_repo(
5880 Path::new("/dir/.git"),
5881 &[
5882 ("src/modification.rs".into(), committed_contents),
5883 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
5884 ],
5885 );
5886 fs.set_index_for_repo(
5887 Path::new("/dir/.git"),
5888 &[
5889 ("src/modification.rs".into(), staged_contents),
5890 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
5891 ],
5892 );
5893
5894 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5895 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5896 let language = rust_lang();
5897 language_registry.add(language.clone());
5898
5899 let buffer_1 = project
5900 .update(cx, |project, cx| {
5901 project.open_local_buffer("/dir/src/modification.rs", cx)
5902 })
5903 .await
5904 .unwrap();
5905 let diff_1 = project
5906 .update(cx, |project, cx| {
5907 project.open_uncommitted_diff(buffer_1.clone(), cx)
5908 })
5909 .await
5910 .unwrap();
5911 diff_1.read_with(cx, |diff, _| {
5912 assert_eq!(diff.base_text().language().cloned(), Some(language))
5913 });
5914 cx.run_until_parked();
5915 diff_1.update(cx, |diff, cx| {
5916 let snapshot = buffer_1.read(cx).snapshot();
5917 assert_hunks(
5918 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
5919 &snapshot,
5920 &diff.base_text_string().unwrap(),
5921 &[
5922 (
5923 0..1,
5924 "",
5925 "// print goodbye\n",
5926 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
5927 ),
5928 (
5929 2..3,
5930 " println!(\"hello world\");\n",
5931 " println!(\"goodbye world\");\n",
5932 DiffHunkStatus::modified_none(),
5933 ),
5934 ],
5935 );
5936 });
5937
5938 // Reset HEAD to a version that differs from both the buffer and the index.
5939 let committed_contents = r#"
5940 // print goodbye
5941 fn main() {
5942 }
5943 "#
5944 .unindent();
5945 fs.set_head_for_repo(
5946 Path::new("/dir/.git"),
5947 &[
5948 ("src/modification.rs".into(), committed_contents.clone()),
5949 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
5950 ],
5951 );
5952
5953 // Buffer now has an unstaged hunk.
5954 cx.run_until_parked();
5955 diff_1.update(cx, |diff, cx| {
5956 let snapshot = buffer_1.read(cx).snapshot();
5957 assert_hunks(
5958 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
5959 &snapshot,
5960 &diff.base_text().text(),
5961 &[(
5962 2..3,
5963 "",
5964 " println!(\"goodbye world\");\n",
5965 DiffHunkStatus::added_none(),
5966 )],
5967 );
5968 });
5969
5970 // Open a buffer for a file that's been deleted.
5971 let buffer_2 = project
5972 .update(cx, |project, cx| {
5973 project.open_local_buffer("/dir/src/deletion.rs", cx)
5974 })
5975 .await
5976 .unwrap();
5977 let diff_2 = project
5978 .update(cx, |project, cx| {
5979 project.open_uncommitted_diff(buffer_2.clone(), cx)
5980 })
5981 .await
5982 .unwrap();
5983 cx.run_until_parked();
5984 diff_2.update(cx, |diff, cx| {
5985 let snapshot = buffer_2.read(cx).snapshot();
5986 assert_hunks(
5987 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
5988 &snapshot,
5989 &diff.base_text_string().unwrap(),
5990 &[(
5991 0..0,
5992 "// the-deleted-contents\n",
5993 "",
5994 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
5995 )],
5996 );
5997 });
5998
5999 // Stage the deletion of this file
6000 fs.set_index_for_repo(
6001 Path::new("/dir/.git"),
6002 &[("src/modification.rs".into(), committed_contents.clone())],
6003 );
6004 cx.run_until_parked();
6005 diff_2.update(cx, |diff, cx| {
6006 let snapshot = buffer_2.read(cx).snapshot();
6007 assert_hunks(
6008 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6009 &snapshot,
6010 &diff.base_text_string().unwrap(),
6011 &[(
6012 0..0,
6013 "// the-deleted-contents\n",
6014 "",
6015 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
6016 )],
6017 );
6018 });
6019}
6020
6021#[gpui::test]
6022async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
6023 use DiffHunkSecondaryStatus::*;
6024 init_test(cx);
6025
6026 let committed_contents = r#"
6027 zero
6028 one
6029 two
6030 three
6031 four
6032 five
6033 "#
6034 .unindent();
6035 let file_contents = r#"
6036 one
6037 TWO
6038 three
6039 FOUR
6040 five
6041 "#
6042 .unindent();
6043
6044 let fs = FakeFs::new(cx.background_executor.clone());
6045 fs.insert_tree(
6046 "/dir",
6047 json!({
6048 ".git": {},
6049 "file.txt": file_contents.clone()
6050 }),
6051 )
6052 .await;
6053
6054 fs.set_head_and_index_for_repo(
6055 "/dir/.git".as_ref(),
6056 &[("file.txt".into(), committed_contents.clone())],
6057 );
6058
6059 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6060
6061 let buffer = project
6062 .update(cx, |project, cx| {
6063 project.open_local_buffer("/dir/file.txt", cx)
6064 })
6065 .await
6066 .unwrap();
6067 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6068 let uncommitted_diff = project
6069 .update(cx, |project, cx| {
6070 project.open_uncommitted_diff(buffer.clone(), cx)
6071 })
6072 .await
6073 .unwrap();
6074 let mut diff_events = cx.events(&uncommitted_diff);
6075
6076 // The hunks are initially unstaged.
6077 uncommitted_diff.read_with(cx, |diff, cx| {
6078 assert_hunks(
6079 diff.hunks(&snapshot, cx),
6080 &snapshot,
6081 &diff.base_text_string().unwrap(),
6082 &[
6083 (
6084 0..0,
6085 "zero\n",
6086 "",
6087 DiffHunkStatus::deleted(HasSecondaryHunk),
6088 ),
6089 (
6090 1..2,
6091 "two\n",
6092 "TWO\n",
6093 DiffHunkStatus::modified(HasSecondaryHunk),
6094 ),
6095 (
6096 3..4,
6097 "four\n",
6098 "FOUR\n",
6099 DiffHunkStatus::modified(HasSecondaryHunk),
6100 ),
6101 ],
6102 );
6103 });
6104
6105 // Stage a hunk. It appears as optimistically staged.
6106 uncommitted_diff.update(cx, |diff, cx| {
6107 let range =
6108 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
6109 let hunks = diff
6110 .hunks_intersecting_range(range, &snapshot, cx)
6111 .collect::<Vec<_>>();
6112 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
6113
6114 assert_hunks(
6115 diff.hunks(&snapshot, cx),
6116 &snapshot,
6117 &diff.base_text_string().unwrap(),
6118 &[
6119 (
6120 0..0,
6121 "zero\n",
6122 "",
6123 DiffHunkStatus::deleted(HasSecondaryHunk),
6124 ),
6125 (
6126 1..2,
6127 "two\n",
6128 "TWO\n",
6129 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6130 ),
6131 (
6132 3..4,
6133 "four\n",
6134 "FOUR\n",
6135 DiffHunkStatus::modified(HasSecondaryHunk),
6136 ),
6137 ],
6138 );
6139 });
6140
6141 // The diff emits a change event for the range of the staged hunk.
6142 assert!(matches!(
6143 diff_events.next().await.unwrap(),
6144 BufferDiffEvent::HunksStagedOrUnstaged(_)
6145 ));
6146 let event = diff_events.next().await.unwrap();
6147 if let BufferDiffEvent::DiffChanged {
6148 changed_range: Some(changed_range),
6149 } = event
6150 {
6151 let changed_range = changed_range.to_point(&snapshot);
6152 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
6153 } else {
6154 panic!("Unexpected event {event:?}");
6155 }
6156
6157 // When the write to the index completes, it appears as staged.
6158 cx.run_until_parked();
6159 uncommitted_diff.update(cx, |diff, cx| {
6160 assert_hunks(
6161 diff.hunks(&snapshot, cx),
6162 &snapshot,
6163 &diff.base_text_string().unwrap(),
6164 &[
6165 (
6166 0..0,
6167 "zero\n",
6168 "",
6169 DiffHunkStatus::deleted(HasSecondaryHunk),
6170 ),
6171 (
6172 1..2,
6173 "two\n",
6174 "TWO\n",
6175 DiffHunkStatus::modified(NoSecondaryHunk),
6176 ),
6177 (
6178 3..4,
6179 "four\n",
6180 "FOUR\n",
6181 DiffHunkStatus::modified(HasSecondaryHunk),
6182 ),
6183 ],
6184 );
6185 });
6186
6187 // The diff emits a change event for the changed index text.
6188 let event = diff_events.next().await.unwrap();
6189 if let BufferDiffEvent::DiffChanged {
6190 changed_range: Some(changed_range),
6191 } = event
6192 {
6193 let changed_range = changed_range.to_point(&snapshot);
6194 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
6195 } else {
6196 panic!("Unexpected event {event:?}");
6197 }
6198
6199 // Simulate a problem writing to the git index.
6200 fs.set_error_message_for_index_write(
6201 "/dir/.git".as_ref(),
6202 Some("failed to write git index".into()),
6203 );
6204
6205 // Stage another hunk.
6206 uncommitted_diff.update(cx, |diff, cx| {
6207 let range =
6208 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
6209 let hunks = diff
6210 .hunks_intersecting_range(range, &snapshot, cx)
6211 .collect::<Vec<_>>();
6212 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
6213
6214 assert_hunks(
6215 diff.hunks(&snapshot, cx),
6216 &snapshot,
6217 &diff.base_text_string().unwrap(),
6218 &[
6219 (
6220 0..0,
6221 "zero\n",
6222 "",
6223 DiffHunkStatus::deleted(HasSecondaryHunk),
6224 ),
6225 (
6226 1..2,
6227 "two\n",
6228 "TWO\n",
6229 DiffHunkStatus::modified(NoSecondaryHunk),
6230 ),
6231 (
6232 3..4,
6233 "four\n",
6234 "FOUR\n",
6235 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6236 ),
6237 ],
6238 );
6239 });
6240 assert!(matches!(
6241 diff_events.next().await.unwrap(),
6242 BufferDiffEvent::HunksStagedOrUnstaged(_)
6243 ));
6244 let event = diff_events.next().await.unwrap();
6245 if let BufferDiffEvent::DiffChanged {
6246 changed_range: Some(changed_range),
6247 } = event
6248 {
6249 let changed_range = changed_range.to_point(&snapshot);
6250 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
6251 } else {
6252 panic!("Unexpected event {event:?}");
6253 }
6254
6255 // When the write fails, the hunk returns to being unstaged.
6256 cx.run_until_parked();
6257 uncommitted_diff.update(cx, |diff, cx| {
6258 assert_hunks(
6259 diff.hunks(&snapshot, cx),
6260 &snapshot,
6261 &diff.base_text_string().unwrap(),
6262 &[
6263 (
6264 0..0,
6265 "zero\n",
6266 "",
6267 DiffHunkStatus::deleted(HasSecondaryHunk),
6268 ),
6269 (
6270 1..2,
6271 "two\n",
6272 "TWO\n",
6273 DiffHunkStatus::modified(NoSecondaryHunk),
6274 ),
6275 (
6276 3..4,
6277 "four\n",
6278 "FOUR\n",
6279 DiffHunkStatus::modified(HasSecondaryHunk),
6280 ),
6281 ],
6282 );
6283 });
6284
6285 let event = diff_events.next().await.unwrap();
6286 if let BufferDiffEvent::DiffChanged {
6287 changed_range: Some(changed_range),
6288 } = event
6289 {
6290 let changed_range = changed_range.to_point(&snapshot);
6291 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
6292 } else {
6293 panic!("Unexpected event {event:?}");
6294 }
6295
6296 // Allow writing to the git index to succeed again.
6297 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
6298
6299 // Stage two hunks with separate operations.
6300 uncommitted_diff.update(cx, |diff, cx| {
6301 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
6302 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
6303 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
6304 });
6305
6306 // Both staged hunks appear as pending.
6307 uncommitted_diff.update(cx, |diff, cx| {
6308 assert_hunks(
6309 diff.hunks(&snapshot, cx),
6310 &snapshot,
6311 &diff.base_text_string().unwrap(),
6312 &[
6313 (
6314 0..0,
6315 "zero\n",
6316 "",
6317 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
6318 ),
6319 (
6320 1..2,
6321 "two\n",
6322 "TWO\n",
6323 DiffHunkStatus::modified(NoSecondaryHunk),
6324 ),
6325 (
6326 3..4,
6327 "four\n",
6328 "FOUR\n",
6329 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6330 ),
6331 ],
6332 );
6333 });
6334
6335 // Both staging operations take effect.
6336 cx.run_until_parked();
6337 uncommitted_diff.update(cx, |diff, cx| {
6338 assert_hunks(
6339 diff.hunks(&snapshot, cx),
6340 &snapshot,
6341 &diff.base_text_string().unwrap(),
6342 &[
6343 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
6344 (
6345 1..2,
6346 "two\n",
6347 "TWO\n",
6348 DiffHunkStatus::modified(NoSecondaryHunk),
6349 ),
6350 (
6351 3..4,
6352 "four\n",
6353 "FOUR\n",
6354 DiffHunkStatus::modified(NoSecondaryHunk),
6355 ),
6356 ],
6357 );
6358 });
6359}
6360
6361#[gpui::test(iterations = 10, seeds(340, 472))]
6362async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
6363 use DiffHunkSecondaryStatus::*;
6364 init_test(cx);
6365
6366 let committed_contents = r#"
6367 zero
6368 one
6369 two
6370 three
6371 four
6372 five
6373 "#
6374 .unindent();
6375 let file_contents = r#"
6376 one
6377 TWO
6378 three
6379 FOUR
6380 five
6381 "#
6382 .unindent();
6383
6384 let fs = FakeFs::new(cx.background_executor.clone());
6385 fs.insert_tree(
6386 "/dir",
6387 json!({
6388 ".git": {},
6389 "file.txt": file_contents.clone()
6390 }),
6391 )
6392 .await;
6393
6394 fs.set_head_for_repo(
6395 "/dir/.git".as_ref(),
6396 &[("file.txt".into(), committed_contents.clone())],
6397 );
6398 fs.set_index_for_repo(
6399 "/dir/.git".as_ref(),
6400 &[("file.txt".into(), committed_contents.clone())],
6401 );
6402
6403 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6404
6405 let buffer = project
6406 .update(cx, |project, cx| {
6407 project.open_local_buffer("/dir/file.txt", cx)
6408 })
6409 .await
6410 .unwrap();
6411 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6412 let uncommitted_diff = project
6413 .update(cx, |project, cx| {
6414 project.open_uncommitted_diff(buffer.clone(), cx)
6415 })
6416 .await
6417 .unwrap();
6418
6419 // The hunks are initially unstaged.
6420 uncommitted_diff.read_with(cx, |diff, cx| {
6421 assert_hunks(
6422 diff.hunks(&snapshot, cx),
6423 &snapshot,
6424 &diff.base_text_string().unwrap(),
6425 &[
6426 (
6427 0..0,
6428 "zero\n",
6429 "",
6430 DiffHunkStatus::deleted(HasSecondaryHunk),
6431 ),
6432 (
6433 1..2,
6434 "two\n",
6435 "TWO\n",
6436 DiffHunkStatus::modified(HasSecondaryHunk),
6437 ),
6438 (
6439 3..4,
6440 "four\n",
6441 "FOUR\n",
6442 DiffHunkStatus::modified(HasSecondaryHunk),
6443 ),
6444 ],
6445 );
6446 });
6447
6448 // Pause IO events
6449 fs.pause_events();
6450
6451 // Stage the first hunk.
6452 uncommitted_diff.update(cx, |diff, cx| {
6453 let hunk = diff.hunks(&snapshot, cx).next().unwrap();
6454 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
6455 assert_hunks(
6456 diff.hunks(&snapshot, cx),
6457 &snapshot,
6458 &diff.base_text_string().unwrap(),
6459 &[
6460 (
6461 0..0,
6462 "zero\n",
6463 "",
6464 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
6465 ),
6466 (
6467 1..2,
6468 "two\n",
6469 "TWO\n",
6470 DiffHunkStatus::modified(HasSecondaryHunk),
6471 ),
6472 (
6473 3..4,
6474 "four\n",
6475 "FOUR\n",
6476 DiffHunkStatus::modified(HasSecondaryHunk),
6477 ),
6478 ],
6479 );
6480 });
6481
6482 // Stage the second hunk *before* receiving the FS event for the first hunk.
6483 cx.run_until_parked();
6484 uncommitted_diff.update(cx, |diff, cx| {
6485 let hunk = diff.hunks(&snapshot, cx).nth(1).unwrap();
6486 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
6487 assert_hunks(
6488 diff.hunks(&snapshot, cx),
6489 &snapshot,
6490 &diff.base_text_string().unwrap(),
6491 &[
6492 (
6493 0..0,
6494 "zero\n",
6495 "",
6496 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
6497 ),
6498 (
6499 1..2,
6500 "two\n",
6501 "TWO\n",
6502 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6503 ),
6504 (
6505 3..4,
6506 "four\n",
6507 "FOUR\n",
6508 DiffHunkStatus::modified(HasSecondaryHunk),
6509 ),
6510 ],
6511 );
6512 });
6513
6514 // Process the FS event for staging the first hunk (second event is still pending).
6515 fs.flush_events(1);
6516 cx.run_until_parked();
6517
6518 // Stage the third hunk before receiving the second FS event.
6519 uncommitted_diff.update(cx, |diff, cx| {
6520 let hunk = diff.hunks(&snapshot, cx).nth(2).unwrap();
6521 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
6522 });
6523
6524 // Wait for all remaining IO.
6525 cx.run_until_parked();
6526 fs.flush_events(fs.buffered_event_count());
6527
6528 // Now all hunks are staged.
6529 cx.run_until_parked();
6530 uncommitted_diff.update(cx, |diff, cx| {
6531 assert_hunks(
6532 diff.hunks(&snapshot, cx),
6533 &snapshot,
6534 &diff.base_text_string().unwrap(),
6535 &[
6536 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
6537 (
6538 1..2,
6539 "two\n",
6540 "TWO\n",
6541 DiffHunkStatus::modified(NoSecondaryHunk),
6542 ),
6543 (
6544 3..4,
6545 "four\n",
6546 "FOUR\n",
6547 DiffHunkStatus::modified(NoSecondaryHunk),
6548 ),
6549 ],
6550 );
6551 });
6552}
6553
6554#[gpui::test]
6555async fn test_staging_lots_of_hunks_fast(cx: &mut gpui::TestAppContext) {
6556 use DiffHunkSecondaryStatus::*;
6557 init_test(cx);
6558
6559 let different_lines = (0..500)
6560 .step_by(5)
6561 .map(|i| format!("diff {}\n", i))
6562 .collect::<Vec<String>>();
6563 let committed_contents = (0..500).map(|i| format!("{}\n", i)).collect::<String>();
6564 let file_contents = (0..500)
6565 .map(|i| {
6566 if i % 5 == 0 {
6567 different_lines[i / 5].clone()
6568 } else {
6569 format!("{}\n", i)
6570 }
6571 })
6572 .collect::<String>();
6573
6574 let fs = FakeFs::new(cx.background_executor.clone());
6575 fs.insert_tree(
6576 "/dir",
6577 json!({
6578 ".git": {},
6579 "file.txt": file_contents.clone()
6580 }),
6581 )
6582 .await;
6583
6584 fs.set_head_for_repo(
6585 "/dir/.git".as_ref(),
6586 &[("file.txt".into(), committed_contents.clone())],
6587 );
6588 fs.set_index_for_repo(
6589 "/dir/.git".as_ref(),
6590 &[("file.txt".into(), committed_contents.clone())],
6591 );
6592
6593 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6594
6595 let buffer = project
6596 .update(cx, |project, cx| {
6597 project.open_local_buffer("/dir/file.txt", cx)
6598 })
6599 .await
6600 .unwrap();
6601 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6602 let uncommitted_diff = project
6603 .update(cx, |project, cx| {
6604 project.open_uncommitted_diff(buffer.clone(), cx)
6605 })
6606 .await
6607 .unwrap();
6608
6609 let mut expected_hunks: Vec<(Range<u32>, String, String, DiffHunkStatus)> = (0..500)
6610 .step_by(5)
6611 .map(|i| {
6612 (
6613 i as u32..i as u32 + 1,
6614 format!("{}\n", i),
6615 different_lines[i / 5].clone(),
6616 DiffHunkStatus::modified(HasSecondaryHunk),
6617 )
6618 })
6619 .collect();
6620
6621 // The hunks are initially unstaged
6622 uncommitted_diff.read_with(cx, |diff, cx| {
6623 assert_hunks(
6624 diff.hunks(&snapshot, cx),
6625 &snapshot,
6626 &diff.base_text_string().unwrap(),
6627 &expected_hunks,
6628 );
6629 });
6630
6631 for (_, _, _, status) in expected_hunks.iter_mut() {
6632 *status = DiffHunkStatus::modified(SecondaryHunkRemovalPending);
6633 }
6634
6635 // Stage every hunk with a different call
6636 uncommitted_diff.update(cx, |diff, cx| {
6637 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
6638 for hunk in hunks {
6639 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
6640 }
6641
6642 assert_hunks(
6643 diff.hunks(&snapshot, cx),
6644 &snapshot,
6645 &diff.base_text_string().unwrap(),
6646 &expected_hunks,
6647 );
6648 });
6649
6650 // If we wait, we'll have no pending hunks
6651 cx.run_until_parked();
6652 for (_, _, _, status) in expected_hunks.iter_mut() {
6653 *status = DiffHunkStatus::modified(NoSecondaryHunk);
6654 }
6655
6656 uncommitted_diff.update(cx, |diff, cx| {
6657 assert_hunks(
6658 diff.hunks(&snapshot, cx),
6659 &snapshot,
6660 &diff.base_text_string().unwrap(),
6661 &expected_hunks,
6662 );
6663 });
6664
6665 for (_, _, _, status) in expected_hunks.iter_mut() {
6666 *status = DiffHunkStatus::modified(SecondaryHunkAdditionPending);
6667 }
6668
6669 // Unstage every hunk with a different call
6670 uncommitted_diff.update(cx, |diff, cx| {
6671 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
6672 for hunk in hunks {
6673 diff.stage_or_unstage_hunks(false, &[hunk], &snapshot, true, cx);
6674 }
6675
6676 assert_hunks(
6677 diff.hunks(&snapshot, cx),
6678 &snapshot,
6679 &diff.base_text_string().unwrap(),
6680 &expected_hunks,
6681 );
6682 });
6683
6684 // If we wait, we'll have no pending hunks, again
6685 cx.run_until_parked();
6686 for (_, _, _, status) in expected_hunks.iter_mut() {
6687 *status = DiffHunkStatus::modified(HasSecondaryHunk);
6688 }
6689
6690 uncommitted_diff.update(cx, |diff, cx| {
6691 assert_hunks(
6692 diff.hunks(&snapshot, cx),
6693 &snapshot,
6694 &diff.base_text_string().unwrap(),
6695 &expected_hunks,
6696 );
6697 });
6698}
6699
6700#[gpui::test]
6701async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
6702 init_test(cx);
6703
6704 let committed_contents = r#"
6705 fn main() {
6706 println!("hello from HEAD");
6707 }
6708 "#
6709 .unindent();
6710 let file_contents = r#"
6711 fn main() {
6712 println!("hello from the working copy");
6713 }
6714 "#
6715 .unindent();
6716
6717 let fs = FakeFs::new(cx.background_executor.clone());
6718 fs.insert_tree(
6719 "/dir",
6720 json!({
6721 ".git": {},
6722 "src": {
6723 "main.rs": file_contents,
6724 }
6725 }),
6726 )
6727 .await;
6728
6729 fs.set_head_for_repo(
6730 Path::new("/dir/.git"),
6731 &[("src/main.rs".into(), committed_contents.clone())],
6732 );
6733 fs.set_index_for_repo(
6734 Path::new("/dir/.git"),
6735 &[("src/main.rs".into(), committed_contents.clone())],
6736 );
6737
6738 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
6739
6740 let buffer = project
6741 .update(cx, |project, cx| {
6742 project.open_local_buffer("/dir/src/main.rs", cx)
6743 })
6744 .await
6745 .unwrap();
6746 let uncommitted_diff = project
6747 .update(cx, |project, cx| {
6748 project.open_uncommitted_diff(buffer.clone(), cx)
6749 })
6750 .await
6751 .unwrap();
6752
6753 cx.run_until_parked();
6754 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
6755 let snapshot = buffer.read(cx).snapshot();
6756 assert_hunks(
6757 uncommitted_diff.hunks(&snapshot, cx),
6758 &snapshot,
6759 &uncommitted_diff.base_text_string().unwrap(),
6760 &[(
6761 1..2,
6762 " println!(\"hello from HEAD\");\n",
6763 " println!(\"hello from the working copy\");\n",
6764 DiffHunkStatus {
6765 kind: DiffHunkStatusKind::Modified,
6766 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
6767 },
6768 )],
6769 );
6770 });
6771}
6772
6773async fn search(
6774 project: &Entity<Project>,
6775 query: SearchQuery,
6776 cx: &mut gpui::TestAppContext,
6777) -> Result<HashMap<String, Vec<Range<usize>>>> {
6778 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
6779 let mut results = HashMap::default();
6780 while let Ok(search_result) = search_rx.recv().await {
6781 match search_result {
6782 SearchResult::Buffer { buffer, ranges } => {
6783 results.entry(buffer).or_insert(ranges);
6784 }
6785 SearchResult::LimitReached => {}
6786 }
6787 }
6788 Ok(results
6789 .into_iter()
6790 .map(|(buffer, ranges)| {
6791 buffer.update(cx, |buffer, cx| {
6792 let path = buffer
6793 .file()
6794 .unwrap()
6795 .full_path(cx)
6796 .to_string_lossy()
6797 .to_string();
6798 let ranges = ranges
6799 .into_iter()
6800 .map(|range| range.to_offset(buffer))
6801 .collect::<Vec<_>>();
6802 (path, ranges)
6803 })
6804 })
6805 .collect())
6806}
6807
6808pub fn init_test(cx: &mut gpui::TestAppContext) {
6809 if std::env::var("RUST_LOG").is_ok() {
6810 env_logger::try_init().ok();
6811 }
6812
6813 cx.update(|cx| {
6814 let settings_store = SettingsStore::test(cx);
6815 cx.set_global(settings_store);
6816 release_channel::init(SemanticVersion::default(), cx);
6817 language::init(cx);
6818 Project::init_settings(cx);
6819 });
6820}
6821
6822fn json_lang() -> Arc<Language> {
6823 Arc::new(Language::new(
6824 LanguageConfig {
6825 name: "JSON".into(),
6826 matcher: LanguageMatcher {
6827 path_suffixes: vec!["json".to_string()],
6828 ..Default::default()
6829 },
6830 ..Default::default()
6831 },
6832 None,
6833 ))
6834}
6835
6836fn js_lang() -> Arc<Language> {
6837 Arc::new(Language::new(
6838 LanguageConfig {
6839 name: "JavaScript".into(),
6840 matcher: LanguageMatcher {
6841 path_suffixes: vec!["js".to_string()],
6842 ..Default::default()
6843 },
6844 ..Default::default()
6845 },
6846 None,
6847 ))
6848}
6849
6850fn rust_lang() -> Arc<Language> {
6851 Arc::new(Language::new(
6852 LanguageConfig {
6853 name: "Rust".into(),
6854 matcher: LanguageMatcher {
6855 path_suffixes: vec!["rs".to_string()],
6856 ..Default::default()
6857 },
6858 ..Default::default()
6859 },
6860 Some(tree_sitter_rust::LANGUAGE.into()),
6861 ))
6862}
6863
6864fn typescript_lang() -> Arc<Language> {
6865 Arc::new(Language::new(
6866 LanguageConfig {
6867 name: "TypeScript".into(),
6868 matcher: LanguageMatcher {
6869 path_suffixes: vec!["ts".to_string()],
6870 ..Default::default()
6871 },
6872 ..Default::default()
6873 },
6874 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
6875 ))
6876}
6877
6878fn tsx_lang() -> Arc<Language> {
6879 Arc::new(Language::new(
6880 LanguageConfig {
6881 name: "tsx".into(),
6882 matcher: LanguageMatcher {
6883 path_suffixes: vec!["tsx".to_string()],
6884 ..Default::default()
6885 },
6886 ..Default::default()
6887 },
6888 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
6889 ))
6890}
6891
6892fn get_all_tasks(
6893 project: &Entity<Project>,
6894 task_contexts: &TaskContexts,
6895 cx: &mut App,
6896) -> Vec<(TaskSourceKind, ResolvedTask)> {
6897 let (mut old, new) = project.update(cx, |project, cx| {
6898 project
6899 .task_store
6900 .read(cx)
6901 .task_inventory()
6902 .unwrap()
6903 .read(cx)
6904 .used_and_current_resolved_tasks(task_contexts, cx)
6905 });
6906 old.extend(new);
6907 old
6908}